context
stringlengths 2.52k
185k
| gt
stringclasses 1
value |
---|---|
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Management.Sql;
using Microsoft.Azure.Management.Sql.Models;
using Microsoft.WindowsAzure;
using Microsoft.WindowsAzure.Common;
using Microsoft.WindowsAzure.Common.Internals;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace Microsoft.Azure.Management.Sql
{
/// <summary>
/// Represents all the operations for operating on Azure SQL Database
/// Servers. Contains operations to: Create, Retrieve, Update, and Delete
/// servers.
/// </summary>
internal partial class ServerOperations : IServiceOperations<SqlManagementClient>, IServerOperations
{
/// <summary>
/// Initializes a new instance of the ServerOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
internal ServerOperations(SqlManagementClient client)
{
this._client = client;
}
private SqlManagementClient _client;
/// <summary>
/// Gets a reference to the
/// Microsoft.Azure.Management.Sql.SqlManagementClient.
/// </summary>
public SqlManagementClient Client
{
get { return this._client; }
}
/// <summary>
/// Creates a new Azure SQL Database server.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server on which the
/// database is hosted.
/// </param>
/// <param name='parameters'>
/// Required. The required parameters for createing or updating a
/// database.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Represents the response to a Get Database request.
/// </returns>
public async Task<ServerGetResponse> CreateOrUpdateAsync(string resourceGroupName, string serverName, ServerCreateOrUpdateParameters parameters, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (serverName == null)
{
throw new ArgumentNullException("serverName");
}
if (parameters == null)
{
throw new ArgumentNullException("parameters");
}
if (parameters.Location == null)
{
throw new ArgumentNullException("parameters.Location");
}
if (parameters.Properties == null)
{
throw new ArgumentNullException("parameters.Properties");
}
if (parameters.Tags == null)
{
throw new ArgumentNullException("parameters.Tags");
}
// Tracing
bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = Tracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
tracingParameters.Add("parameters", parameters);
Tracing.Enter(invocationId, this, "CreateOrUpdateAsync", tracingParameters);
}
// Construct URL
string url = "/subscriptions/" + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId) + "/resourceGroups/" + Uri.EscapeDataString(resourceGroupName) + "/providers/Microsoft.Sql/servers/" + Uri.EscapeDataString(serverName) + "?";
url = url + "api-version=2014-04-01";
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Put;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
JToken requestDoc = null;
JObject serverCreateOrUpdateParametersValue = new JObject();
requestDoc = serverCreateOrUpdateParametersValue;
JObject propertiesValue = new JObject();
serverCreateOrUpdateParametersValue["properties"] = propertiesValue;
if (parameters.Properties.Version != null)
{
propertiesValue["version"] = parameters.Properties.Version;
}
if (parameters.Properties.AdministratorLogin != null)
{
propertiesValue["administratorLogin"] = parameters.Properties.AdministratorLogin;
}
if (parameters.Properties.AdministratorLoginPassword != null)
{
propertiesValue["administratorLoginPassword"] = parameters.Properties.AdministratorLoginPassword;
}
serverCreateOrUpdateParametersValue["location"] = parameters.Location;
if (parameters.Tags != null)
{
JObject tagsDictionary = new JObject();
foreach (KeyValuePair<string, string> pair in parameters.Tags)
{
string tagsKey = pair.Key;
string tagsValue = pair.Value;
tagsDictionary[tagsKey] = tagsValue;
}
serverCreateOrUpdateParametersValue["tags"] = tagsDictionary;
}
requestContent = requestDoc.ToString(Formatting.Indented);
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
Tracing.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
Tracing.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Created)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
Tracing.Error(invocationId, ex);
}
throw ex;
}
// Create Result
ServerGetResponse result = null;
// Deserialize Response
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new ServerGetResponse();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
Server serverInstance = new Server();
result.Server = serverInstance;
JToken nameValue = responseDoc["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
string nameInstance = ((string)nameValue);
serverInstance.Name = nameInstance;
}
JToken propertiesValue2 = responseDoc["properties"];
if (propertiesValue2 != null && propertiesValue2.Type != JTokenType.Null)
{
ServerProperties propertiesInstance = new ServerProperties();
serverInstance.Properties = propertiesInstance;
JToken fullyQualifiedDomainNameValue = propertiesValue2["fullyQualifiedDomainName"];
if (fullyQualifiedDomainNameValue != null && fullyQualifiedDomainNameValue.Type != JTokenType.Null)
{
string fullyQualifiedDomainNameInstance = ((string)fullyQualifiedDomainNameValue);
propertiesInstance.FullyQualifiedDomainName = fullyQualifiedDomainNameInstance;
}
JToken versionValue = propertiesValue2["version"];
if (versionValue != null && versionValue.Type != JTokenType.Null)
{
string versionInstance = ((string)versionValue);
propertiesInstance.Version = versionInstance;
}
JToken administratorLoginValue = propertiesValue2["administratorLogin"];
if (administratorLoginValue != null && administratorLoginValue.Type != JTokenType.Null)
{
string administratorLoginInstance = ((string)administratorLoginValue);
propertiesInstance.AdministratorLogin = administratorLoginInstance;
}
JToken administratorLoginPasswordValue = propertiesValue2["administratorLoginPassword"];
if (administratorLoginPasswordValue != null && administratorLoginPasswordValue.Type != JTokenType.Null)
{
string administratorLoginPasswordInstance = ((string)administratorLoginPasswordValue);
propertiesInstance.AdministratorLoginPassword = administratorLoginPasswordInstance;
}
}
JToken idValue = responseDoc["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
serverInstance.Id = idInstance;
}
JToken typeValue = responseDoc["type"];
if (typeValue != null && typeValue.Type != JTokenType.Null)
{
string typeInstance = ((string)typeValue);
serverInstance.Type = typeInstance;
}
JToken locationValue = responseDoc["location"];
if (locationValue != null && locationValue.Type != JTokenType.Null)
{
string locationInstance = ((string)locationValue);
serverInstance.Location = locationInstance;
}
JToken tagsSequenceElement = ((JToken)responseDoc["tags"]);
if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null)
{
foreach (JProperty property in tagsSequenceElement)
{
string tagsKey2 = ((string)property.Name);
string tagsValue2 = ((string)property.Value);
serverInstance.Tags.Add(tagsKey2, tagsValue2);
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
Tracing.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Returns information about an Azure SQL Database Server.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the server to delete.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public async Task<OperationResponse> DeleteAsync(string resourceGroupName, string serverName, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (serverName == null)
{
throw new ArgumentNullException("serverName");
}
// Tracing
bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = Tracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
Tracing.Enter(invocationId, this, "DeleteAsync", tracingParameters);
}
// Construct URL
string url = "/subscriptions/" + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId) + "/resourceGroups/" + Uri.EscapeDataString(resourceGroupName) + "/providers/Microsoft.Sql/servers/" + Uri.EscapeDataString(serverName) + "?";
url = url + "api-version=2014-04-01";
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Delete;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
Tracing.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
Tracing.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.NoContent)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
Tracing.Error(invocationId, ex);
}
throw ex;
}
// Create Result
OperationResponse result = null;
result = new OperationResponse();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
Tracing.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Returns information about an Azure SQL Database Server.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the server to retrieve.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Represents the response to a Get Database request.
/// </returns>
public async Task<ServerGetResponse> GetAsync(string resourceGroupName, string serverName, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (serverName == null)
{
throw new ArgumentNullException("serverName");
}
// Tracing
bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = Tracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
Tracing.Enter(invocationId, this, "GetAsync", tracingParameters);
}
// Construct URL
string url = "/subscriptions/" + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId) + "/resourceGroups/" + Uri.EscapeDataString(resourceGroupName) + "/providers/Microsoft.Sql/servers/" + Uri.EscapeDataString(serverName) + "?";
url = url + "api-version=2014-04-01";
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
Tracing.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
Tracing.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
Tracing.Error(invocationId, ex);
}
throw ex;
}
// Create Result
ServerGetResponse result = null;
// Deserialize Response
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new ServerGetResponse();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
Server serverInstance = new Server();
result.Server = serverInstance;
JToken nameValue = responseDoc["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
string nameInstance = ((string)nameValue);
serverInstance.Name = nameInstance;
}
JToken propertiesValue = responseDoc["properties"];
if (propertiesValue != null && propertiesValue.Type != JTokenType.Null)
{
ServerProperties propertiesInstance = new ServerProperties();
serverInstance.Properties = propertiesInstance;
JToken fullyQualifiedDomainNameValue = propertiesValue["fullyQualifiedDomainName"];
if (fullyQualifiedDomainNameValue != null && fullyQualifiedDomainNameValue.Type != JTokenType.Null)
{
string fullyQualifiedDomainNameInstance = ((string)fullyQualifiedDomainNameValue);
propertiesInstance.FullyQualifiedDomainName = fullyQualifiedDomainNameInstance;
}
JToken versionValue = propertiesValue["version"];
if (versionValue != null && versionValue.Type != JTokenType.Null)
{
string versionInstance = ((string)versionValue);
propertiesInstance.Version = versionInstance;
}
JToken administratorLoginValue = propertiesValue["administratorLogin"];
if (administratorLoginValue != null && administratorLoginValue.Type != JTokenType.Null)
{
string administratorLoginInstance = ((string)administratorLoginValue);
propertiesInstance.AdministratorLogin = administratorLoginInstance;
}
JToken administratorLoginPasswordValue = propertiesValue["administratorLoginPassword"];
if (administratorLoginPasswordValue != null && administratorLoginPasswordValue.Type != JTokenType.Null)
{
string administratorLoginPasswordInstance = ((string)administratorLoginPasswordValue);
propertiesInstance.AdministratorLoginPassword = administratorLoginPasswordInstance;
}
}
JToken idValue = responseDoc["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
serverInstance.Id = idInstance;
}
JToken typeValue = responseDoc["type"];
if (typeValue != null && typeValue.Type != JTokenType.Null)
{
string typeInstance = ((string)typeValue);
serverInstance.Type = typeInstance;
}
JToken locationValue = responseDoc["location"];
if (locationValue != null && locationValue.Type != JTokenType.Null)
{
string locationInstance = ((string)locationValue);
serverInstance.Location = locationInstance;
}
JToken tagsSequenceElement = ((JToken)responseDoc["tags"]);
if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null)
{
foreach (JProperty property in tagsSequenceElement)
{
string tagsKey = ((string)property.Name);
string tagsValue = ((string)property.Value);
serverInstance.Tags.Add(tagsKey, tagsValue);
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
Tracing.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Returns information about an Azure SQL Database Server.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Represents the response to a Get Database request.
/// </returns>
public async Task<ServerListResponse> ListAsync(string resourceGroupName, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
// Tracing
bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = Tracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
Tracing.Enter(invocationId, this, "ListAsync", tracingParameters);
}
// Construct URL
string url = "/subscriptions/" + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId) + "/resourceGroups/" + Uri.EscapeDataString(resourceGroupName) + "/providers/Microsoft.Sql/servers?";
url = url + "api-version=2014-04-01";
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
Tracing.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
Tracing.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
Tracing.Error(invocationId, ex);
}
throw ex;
}
// Create Result
ServerListResponse result = null;
// Deserialize Response
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new ServerListResponse();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
JToken valueArray = responseDoc["value"];
if (valueArray != null && valueArray.Type != JTokenType.Null)
{
foreach (JToken valueValue in ((JArray)valueArray))
{
Server serverInstance = new Server();
result.Servers.Add(serverInstance);
JToken nameValue = valueValue["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
string nameInstance = ((string)nameValue);
serverInstance.Name = nameInstance;
}
JToken propertiesValue = valueValue["properties"];
if (propertiesValue != null && propertiesValue.Type != JTokenType.Null)
{
ServerProperties propertiesInstance = new ServerProperties();
serverInstance.Properties = propertiesInstance;
JToken fullyQualifiedDomainNameValue = propertiesValue["fullyQualifiedDomainName"];
if (fullyQualifiedDomainNameValue != null && fullyQualifiedDomainNameValue.Type != JTokenType.Null)
{
string fullyQualifiedDomainNameInstance = ((string)fullyQualifiedDomainNameValue);
propertiesInstance.FullyQualifiedDomainName = fullyQualifiedDomainNameInstance;
}
JToken versionValue = propertiesValue["version"];
if (versionValue != null && versionValue.Type != JTokenType.Null)
{
string versionInstance = ((string)versionValue);
propertiesInstance.Version = versionInstance;
}
JToken administratorLoginValue = propertiesValue["administratorLogin"];
if (administratorLoginValue != null && administratorLoginValue.Type != JTokenType.Null)
{
string administratorLoginInstance = ((string)administratorLoginValue);
propertiesInstance.AdministratorLogin = administratorLoginInstance;
}
JToken administratorLoginPasswordValue = propertiesValue["administratorLoginPassword"];
if (administratorLoginPasswordValue != null && administratorLoginPasswordValue.Type != JTokenType.Null)
{
string administratorLoginPasswordInstance = ((string)administratorLoginPasswordValue);
propertiesInstance.AdministratorLoginPassword = administratorLoginPasswordInstance;
}
}
JToken idValue = valueValue["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
serverInstance.Id = idInstance;
}
JToken typeValue = valueValue["type"];
if (typeValue != null && typeValue.Type != JTokenType.Null)
{
string typeInstance = ((string)typeValue);
serverInstance.Type = typeInstance;
}
JToken locationValue = valueValue["location"];
if (locationValue != null && locationValue.Type != JTokenType.Null)
{
string locationInstance = ((string)locationValue);
serverInstance.Location = locationInstance;
}
JToken tagsSequenceElement = ((JToken)valueValue["tags"]);
if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null)
{
foreach (JProperty property in tagsSequenceElement)
{
string tagsKey = ((string)property.Name);
string tagsValue = ((string)property.Value);
serverInstance.Tags.Add(tagsKey, tagsValue);
}
}
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
Tracing.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
}
}
| |
// This source code is dual-licensed under the Apache License, version
// 2.0, and the Mozilla Public License, version 1.1.
//
// The APL v2.0:
//
//---------------------------------------------------------------------------
// Copyright (C) 2007-2014 GoPivotal, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//---------------------------------------------------------------------------
//
// The MPL v1.1:
//
//---------------------------------------------------------------------------
// The contents of this file are subject to the Mozilla Public License
// Version 1.1 (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License
// at http://www.mozilla.org/MPL/
//
// Software distributed under the License is distributed on an "AS IS"
// basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
// the License for the specific language governing rights and
// limitations under the License.
//
// The Original Code is RabbitMQ.
//
// The Initial Developer of the Original Code is GoPivotal, Inc.
// Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved.
//---------------------------------------------------------------------------
using System;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Collections.Generic;
using RabbitMQ.Client;
using RabbitMQ.Client.Impl;
using RabbitMQ.Client.Events;
using RabbitMQ.Client.Exceptions;
using RabbitMQ.Util;
using RabbitMQ.Client.Framing;
namespace RabbitMQ.Client.Framing.Impl {
public class Connection : IConnection, NetworkConnection
{
///<summary>Heartbeat frame for transmission. Reusable across connections.</summary>
public readonly Frame m_heartbeatFrame = new Frame(Constants.FrameHeartbeat,
0,
new byte[0]);
///<summary>Timeout used while waiting for AMQP handshaking to
///complete (milliseconds)</summary>
public static int HandshakeTimeout = 10000;
public IConnectionFactory m_factory;
public IFrameHandler m_frameHandler;
public uint m_frameMax = 0;
public ushort m_heartbeat = 0;
public IDictionary<string, object> m_clientProperties;
public IDictionary<string, object> m_serverProperties;
public AmqpTcpEndpoint[] m_knownHosts = null;
public MainSession m_session0;
public ModelBase m_model0;
public SessionManager m_sessionManager;
public volatile bool m_running = true;
public readonly object m_eventLock = new object();
public ConnectionShutdownEventHandler m_connectionShutdown;
public volatile ShutdownEventArgs m_closeReason = null;
public CallbackExceptionEventHandler m_callbackException;
public ConnectionBlockedEventHandler m_connectionBlocked;
public ConnectionUnblockedEventHandler m_connectionUnblocked;
public ManualResetEvent m_appContinuation = new ManualResetEvent(false);
public AutoResetEvent m_heartbeatRead = new AutoResetEvent(false);
public AutoResetEvent m_heartbeatWrite = new AutoResetEvent(false);
public volatile bool m_closed = false;
public Guid m_id = Guid.NewGuid();
public int m_missedHeartbeats = 0;
public IList<ShutdownReportEntry> m_shutdownReport = new SynchronizedList<ShutdownReportEntry>(new List<ShutdownReportEntry>());
public Connection(IConnectionFactory factory,
bool insist,
IFrameHandler frameHandler)
{
m_factory = factory;
m_frameHandler = frameHandler;
m_sessionManager = new SessionManager(this, 0);
m_session0 = new MainSession(this);
m_session0.Handler = new MainSession.SessionCloseDelegate(NotifyReceivedCloseOk);
m_model0 = (ModelBase)Protocol.CreateModel(m_session0);
StartMainLoop(factory.UseBackgroundThreadsForIO);
Open(insist);
StartHeartbeatLoops(factory.UseBackgroundThreadsForIO);
AppDomain.CurrentDomain.DomainUnload += HandleDomainUnload;
}
public event ConnectionShutdownEventHandler ConnectionShutdown
{
add
{
bool ok = false;
lock (m_eventLock)
{
if (m_closeReason == null)
{
m_connectionShutdown += value;
ok = true;
}
}
if (!ok)
{
value(this, m_closeReason);
}
}
remove
{
lock (m_eventLock)
{
m_connectionShutdown -= value;
}
}
}
public event ConnectionBlockedEventHandler ConnectionBlocked
{
add
{
lock (m_eventLock)
{
m_connectionBlocked += value;
}
}
remove
{
lock (m_eventLock)
{
m_connectionBlocked -= value;
}
}
}
public event ConnectionUnblockedEventHandler ConnectionUnblocked
{
add
{
lock (m_eventLock)
{
m_connectionUnblocked += value;
}
}
remove
{
lock (m_eventLock)
{
m_connectionUnblocked -= value;
}
}
}
public event CallbackExceptionEventHandler CallbackException
{
add
{
lock (m_eventLock)
{
m_callbackException += value;
}
}
remove
{
lock (m_eventLock)
{
m_callbackException -= value;
}
}
}
public AmqpTcpEndpoint Endpoint
{
get
{
return m_frameHandler.Endpoint;
}
}
public EndPoint LocalEndPoint
{
get { return m_frameHandler.LocalEndPoint; }
}
public EndPoint RemoteEndPoint
{
get { return m_frameHandler.RemoteEndPoint; }
}
public int LocalPort
{
get
{
return m_frameHandler.LocalPort;
}
}
public int RemotePort
{
get
{
return m_frameHandler.RemotePort;
}
}
///<summary>Explicit implementation of IConnection.Protocol.</summary>
IProtocol IConnection.Protocol
{
get
{
return Endpoint.Protocol;
}
}
///<summary>Another overload of a Protocol property, useful
///for exposing a tighter type.</summary>
public ProtocolBase Protocol
{
get
{
return (ProtocolBase)Endpoint.Protocol;
}
}
public void WriteFrame(Frame f)
{
m_frameHandler.WriteFrame(f);
m_heartbeatWrite.Set();
}
public ushort ChannelMax
{
get
{
return m_sessionManager.ChannelMax;
}
}
public uint FrameMax
{
get
{
return m_frameMax;
}
set
{
m_frameMax = value;
}
}
public ushort Heartbeat
{
get
{
return m_heartbeat;
}
set
{
m_heartbeat = value;
// Socket read timeout is twice the hearbeat
// because when we hit the timeout socket is
// in unusable state
m_frameHandler.Timeout = value * 2 * 1000;
}
}
public IDictionary<string, object> ClientProperties
{
get
{
return m_clientProperties;
}
set
{
m_clientProperties = value;
}
}
public IDictionary<string, object> ServerProperties
{
get
{
return m_serverProperties;
}
set
{
m_serverProperties = value;
}
}
public AmqpTcpEndpoint[] KnownHosts
{
get { return m_knownHosts; }
set { m_knownHosts = value; }
}
public ShutdownEventArgs CloseReason
{
get
{
return m_closeReason;
}
}
public bool IsOpen
{
get
{
return CloseReason == null;
}
}
public bool AutoClose
{
get
{
return m_sessionManager.AutoClose;
}
set
{
m_sessionManager.AutoClose = value;
}
}
public void EnsureIsOpen()
{
if(!IsOpen)
{
throw new AlreadyClosedException(this.CloseReason);
}
}
public IModel CreateModel()
{
this.EnsureIsOpen();
ISession session = CreateSession();
IFullModel model = (IFullModel)Protocol.CreateModel(session);
model._Private_ChannelOpen("");
return model;
}
public ISession CreateSession()
{
return m_sessionManager.Create();
}
public ISession CreateSession(int channelNumber)
{
return m_sessionManager.Create(channelNumber);
}
public bool SetCloseReason(ShutdownEventArgs reason)
{
lock (m_eventLock)
{
if (m_closeReason == null)
{
m_closeReason = reason;
return true;
}
else
{
return false;
}
}
}
public IList<ShutdownReportEntry> ShutdownReport
{
get
{
return m_shutdownReport;
}
}
void IDisposable.Dispose()
{
Abort();
if (ShutdownReport.Count > 0)
{
foreach (ShutdownReportEntry entry in ShutdownReport)
{
if (entry.Exception != null)
throw entry.Exception;
}
throw new OperationInterruptedException(null);
}
}
///<summary>API-side invocation of connection.close.</summary>
public void Close()
{
Close(Constants.ReplySuccess, "Goodbye", Timeout.Infinite);
}
///<summary>API-side invocation of connection.close.</summary>
public void Close(ushort reasonCode, string reasonText)
{
Close(reasonCode, reasonText, Timeout.Infinite);
}
///<summary>API-side invocation of connection.close with timeout.</summary>
public void Close(int timeout)
{
Close(Constants.ReplySuccess, "Goodbye", timeout);
}
///<summary>API-side invocation of connection.close with timeout.</summary>
public void Close(ushort reasonCode, string reasonText, int timeout)
{
Close(new ShutdownEventArgs(ShutdownInitiator.Application, reasonCode, reasonText), false, timeout);
}
public void Close(ShutdownEventArgs reason)
{
Close(reason, false, Timeout.Infinite);
}
///<summary>API-side invocation of connection abort.</summary>
public void Abort()
{
Abort(Timeout.Infinite);
}
///<summary>API-side invocation of connection abort.</summary>
public void Abort(ushort reasonCode, string reasonText)
{
Abort(reasonCode, reasonText, Timeout.Infinite);
}
///<summary>API-side invocation of connection abort with timeout.</summary>
public void Abort(int timeout)
{
Abort(Constants.ReplySuccess, "Connection close forced", timeout);
}
///<summary>API-side invocation of connection abort with timeout.</summary>
public void Abort(ushort reasonCode, string reasonText, int timeout)
{
Abort(reasonCode, reasonText, ShutdownInitiator.Application, timeout);
}
public void Abort(ushort reasonCode, string reasonText,
ShutdownInitiator initiator, int timeout)
{
Close( new ShutdownEventArgs(initiator, reasonCode, reasonText),
true, timeout);
}
///<summary>Try to close connection in a graceful way</summary>
///<remarks>
///<para>
///Shutdown reason contains code and text assigned when closing the connection,
///as well as the information about what initiated the close
///</para>
///<para>
///Abort flag, if true, signals to close the ongoing connection immediately
///and do not report any errors if it was already closed.
///</para>
///<para>
///Timeout determines how much time internal close operations should be given
///to complete. Negative or Timeout.Infinite value mean infinity.
///</para>
///</remarks>
public void Close(ShutdownEventArgs reason, bool abort, int timeout)
{
if (!SetCloseReason(reason))
{
if (!abort)
throw new AlreadyClosedException(m_closeReason);
}
else
{
OnShutdown();
m_session0.SetSessionClosing(false);
try
{
// Try to send connection.close
// Wait for CloseOk in the MainLoop
m_session0.Transmit(ConnectionCloseWrapper(reason.ReplyCode,
reason.ReplyText));
}
catch (AlreadyClosedException ace)
{
if (!abort)
throw ace;
}
#pragma warning disable 0168
catch (NotSupportedException nse)
{
// buffered stream had unread data in it and Flush()
// was called, ignore to not confuse the user
}
#pragma warning restore 0168
catch (IOException ioe)
{
if (m_model0.CloseReason == null)
{
if (!abort)
throw ioe;
else
LogCloseError("Couldn't close connection cleanly. "
+ "Socket closed unexpectedly", ioe);
}
}
finally
{
TerminateMainloop();
}
}
if (!m_appContinuation.WaitOne(BlockingCell.validatedTimeout(timeout),true))
m_frameHandler.Close();
}
public delegate void ConnectionCloseDelegate(ushort replyCode,
string replyText,
ushort classId,
ushort methodId);
public void InternalClose(ShutdownEventArgs reason)
{
if (!SetCloseReason(reason))
{
if (m_closed)
throw new AlreadyClosedException(m_closeReason);
// We are quiescing, but still allow for server-close
}
OnShutdown();
m_session0.SetSessionClosing(true);
TerminateMainloop();
}
///<remarks>
/// May be called more than once. Should therefore be idempotent.
///</remarks>
public void TerminateMainloop()
{
m_running = false;
}
public void StartMainLoop(bool useBackgroundThread)
{
Thread mainLoopThread = new Thread(new ThreadStart(MainLoop));
mainLoopThread.Name = "AMQP Connection " + Endpoint.ToString();
mainLoopThread.IsBackground = useBackgroundThread;
mainLoopThread.Start();
}
public void StartHeartbeatLoops(bool useBackgroundThread)
{
if (Heartbeat != 0) {
StartHeartbeatLoop(new ThreadStart(HeartbeatReadLoop), "Inbound", useBackgroundThread);
StartHeartbeatLoop(new ThreadStart(HeartbeatWriteLoop), "Outbound", useBackgroundThread);
}
}
public void StartHeartbeatLoop(ThreadStart loop, string name, bool useBackgroundThread)
{
Thread heartbeatLoop = new Thread(loop);
heartbeatLoop.Name = "AMQP Heartbeat " + name + " for Connection " + Endpoint.ToString();
heartbeatLoop.IsBackground = useBackgroundThread;
heartbeatLoop.Start();
}
public void HeartbeatWriteLoop()
{
try
{
while (!m_closed)
{
if (!m_heartbeatWrite.WaitOne(Heartbeat * 1000, false))
{
WriteFrame(m_heartbeatFrame);
}
}
} catch (Exception e) {
HandleMainLoopException(new ShutdownEventArgs(
ShutdownInitiator.Library,
0,
"End of stream",
e));
}
TerminateMainloop();
FinishClose();
}
public void HeartbeatReadLoop()
{
while (!m_closed)
{
if (!m_heartbeatRead.WaitOne(Heartbeat * 1000, false))
m_missedHeartbeats++;
else
m_missedHeartbeats = 0;
// Has to miss two full heartbeats to force socket close
if (m_missedHeartbeats > 1)
{
String description = "Heartbeat missing with heartbeat == " +
m_heartbeat + " seconds";
EndOfStreamException eose = new EndOfStreamException(description);
m_shutdownReport.Add(new ShutdownReportEntry(description, eose));
HandleMainLoopException(new ShutdownEventArgs(
ShutdownInitiator.Library,
0,
"End of stream",
eose));
break;
}
}
TerminateMainloop();
FinishClose();
}
public void NotifyHeartbeatThread()
{
if (m_heartbeat == 0) {
// Heartbeating not enabled for this connection.
return;
}
m_heartbeatRead.Set();
}
public void MainLoop()
{
try
{
bool shutdownCleanly = false;
try
{
while (m_running)
{
try {
MainLoopIteration();
} catch (SoftProtocolException spe) {
QuiesceChannel(spe);
}
}
shutdownCleanly = true;
}
catch (EndOfStreamException eose)
{
// Possible heartbeat exception
HandleMainLoopException(new ShutdownEventArgs(
ShutdownInitiator.Library,
0,
"End of stream",
eose));
}
catch (HardProtocolException hpe)
{
shutdownCleanly = HardProtocolExceptionHandler(hpe);
}
catch (SocketException se)
{
// Possibly due to handshake timeout
HandleMainLoopException(new ShutdownEventArgs(ShutdownInitiator.Library,
0,
"Socket exception",
se));
}
catch (Exception ex)
{
HandleMainLoopException(new ShutdownEventArgs(ShutdownInitiator.Library,
Constants.InternalError,
"Unexpected Exception",
ex));
}
// If allowed for clean shutdown, run main loop until the
// connection closes.
if (shutdownCleanly)
{
try
{
ClosingLoop();
#pragma warning disable 0168
} catch (SocketException se)
{
// means that socket was closed when frame handler
// attempted to use it. Since we are shutting down,
// ignore it.
}
#pragma warning restore 0168
}
FinishClose();
}
finally
{
m_appContinuation.Set();
}
}
public void MainLoopIteration()
{
Frame frame = m_frameHandler.ReadFrame();
NotifyHeartbeatThread();
// We have received an actual frame.
if (frame.Type == Constants.FrameHeartbeat) {
// Ignore it: we've already just reset the heartbeat
// counter.
return;
}
if (frame.Channel == 0) {
// In theory, we could get non-connection.close-ok
// frames here while we're quiescing (m_closeReason !=
// null). In practice, there's a limited number of
// things the server can ask of us on channel 0 -
// essentially, just connection.close. That, combined
// with the restrictions on pipelining, mean that
// we're OK here to handle channel 0 traffic in a
// quiescing situation, even though technically we
// should be ignoring everything except
// connection.close-ok.
m_session0.HandleFrame(frame);
} else {
// If we're still m_running, but have a m_closeReason,
// then we must be quiescing, which means any inbound
// frames for non-zero channels (and any inbound
// commands on channel zero that aren't
// Connection.CloseOk) must be discarded.
if (m_closeReason == null)
{
// No close reason, not quiescing the
// connection. Handle the frame. (Of course, the
// Session itself may be quiescing this particular
// channel, but that's none of our concern.)
ISession session = m_sessionManager.Lookup(frame.Channel);
if (session == null) {
throw new ChannelErrorException(frame.Channel);
} else {
session.HandleFrame(frame);
}
}
}
}
// Only call at the end of the Mainloop or HeartbeatLoop
public void FinishClose()
{
// Notify hearbeat loops that they can leave
m_closed = true;
m_heartbeatRead.Set();
m_heartbeatWrite.Set();
m_frameHandler.Close();
m_model0.SetCloseReason(m_closeReason);
m_model0.FinishClose();
}
/// <remarks>
/// We need to close the socket, otherwise attempting to unload the domain
/// could cause a CannotUnloadAppDomainException
/// </remarks>
public void HandleDomainUnload(object sender, EventArgs ea)
{
Abort(Constants.InternalError, "Domain Unload");
}
public bool HardProtocolExceptionHandler(HardProtocolException hpe)
{
if (SetCloseReason(hpe.ShutdownReason))
{
OnShutdown();
m_session0.SetSessionClosing(false);
try
{
m_session0.Transmit(ConnectionCloseWrapper(
hpe.ShutdownReason.ReplyCode,
hpe.ShutdownReason.ReplyText));
return true;
} catch (IOException ioe) {
LogCloseError("Broker closed socket unexpectedly", ioe);
}
} else
LogCloseError("Hard Protocol Exception occured "
+ "while closing the connection", hpe);
return false;
}
///<remarks>
/// Loop only used while quiescing. Use only to cleanly close connection
///</remarks>
public void ClosingLoop()
{
try
{
m_frameHandler.Timeout = 0;
// Wait for response/socket closure or timeout
while (!m_closed)
{
MainLoopIteration();
}
}
catch (ObjectDisposedException ode)
{
if (!m_closed)
LogCloseError("Connection didn't close cleanly", ode);
}
catch (EndOfStreamException eose)
{
if (m_model0.CloseReason == null)
LogCloseError("Connection didn't close cleanly. "
+ "Socket closed unexpectedly", eose);
}
catch (IOException ioe)
{
LogCloseError("Connection didn't close cleanly. "
+ "Socket closed unexpectedly", ioe);
}
catch (Exception e)
{
LogCloseError("Unexpected exception while closing: ", e);
}
}
public void NotifyReceivedCloseOk()
{
TerminateMainloop();
m_closed = true;
}
///<summary>
/// Sets the channel named in the SoftProtocolException into
/// "quiescing mode", where we issue a channel.close and
/// ignore everything except for subsequent channel.close
/// messages and the channel.close-ok reply that should
/// eventually arrive.
///</summary>
///<remarks>
///<para>
/// Since a well-behaved peer will not wait indefinitely before
/// issuing the close-ok, we don't bother with a timeout here;
/// compare this to the case of a connection.close-ok, where a
/// timeout is necessary.
///</para>
///<para>
/// We need to send the close method and politely wait for a
/// reply before marking the channel as available for reuse.
///</para>
///<para>
/// As soon as SoftProtocolException is detected, we should stop
/// servicing ordinary application work, and should concentrate
/// on bringing down the channel as quickly and gracefully as
/// possible. The way this is done, as per the close-protocol,
/// is to signal closure up the stack *before* sending the
/// channel.close, by invoking ISession.Close. Once the upper
/// layers have been signalled, we are free to do what we need
/// to do to clean up and shut down the channel.
///</para>
///</remarks>
public void QuiesceChannel(SoftProtocolException pe) {
// Construct the QuiescingSession that we'll use during
// the quiesce process.
ISession newSession = new QuiescingSession(this,
pe.Channel,
pe.ShutdownReason);
// Here we detach the session from the connection. It's
// still alive: it just won't receive any further frames
// from the mainloop (once we return to the mainloop, of
// course). Instead, those frames will be directed at the
// new QuiescingSession.
ISession oldSession = m_sessionManager.Swap(pe.Channel, newSession);
// Now we have all the information we need, and the event
// flow of the *lower* layers is set up properly for
// shutdown. Signal channel closure *up* the stack, toward
// the model and application.
oldSession.Close(pe.ShutdownReason);
// The upper layers have been signalled. Now we can tell
// our peer. The peer will respond through the lower
// layers - specifically, through the QuiescingSession we
// installed above.
newSession.Transmit(ChannelCloseWrapper(pe.ReplyCode, pe.Message));
}
public void HandleMainLoopException(ShutdownEventArgs reason) {
if (!SetCloseReason(reason))
{
LogCloseError("Unexpected Main Loop Exception while closing: "
+ reason.ToString(), null);
return;
}
OnShutdown();
LogCloseError("Unexpected connection closure: " + reason.ToString(), null);
}
public void LogCloseError(String error, Exception ex)
{
m_shutdownReport.Add(new ShutdownReportEntry(error, ex));
}
public void PrettyPrintShutdownReport()
{
if (ShutdownReport.Count == 0)
{
Console.Error.WriteLine("No errors reported when closing connection {0}", this);
} else {
Console.Error.WriteLine("Log of errors while closing connection {0}:", this);
foreach(ShutdownReportEntry entry in ShutdownReport)
{
Console.Error.WriteLine(entry.ToString());
}
}
}
public void HandleConnectionBlocked(string reason)
{
ConnectionBlockedEventArgs args = new ConnectionBlockedEventArgs(reason);
OnConnectionBlocked(args);
}
public void OnConnectionBlocked(ConnectionBlockedEventArgs args)
{
ConnectionBlockedEventHandler handler;
lock (m_eventLock)
{
handler = m_connectionBlocked;
}
if (handler != null)
{
foreach (ConnectionBlockedEventHandler h in handler.GetInvocationList()) {
try {
h(this, args);
} catch (Exception e) {
CallbackExceptionEventArgs cee_args = new CallbackExceptionEventArgs(e);
cee_args.Detail["context"] = "OnConnectionBlocked";
OnCallbackException(cee_args);
}
}
}
}
public void HandleConnectionUnblocked()
{
OnConnectionUnblocked();
}
public void OnConnectionUnblocked()
{
ConnectionUnblockedEventHandler handler;
lock (m_eventLock)
{
handler = m_connectionUnblocked;
}
if (handler != null)
{
foreach (ConnectionUnblockedEventHandler h in handler.GetInvocationList()) {
try {
h(this);
} catch (Exception e) {
CallbackExceptionEventArgs args = new CallbackExceptionEventArgs(e);
args.Detail["context"] = "OnConnectionUnblocked";
OnCallbackException(args);
}
}
}
}
///<summary>Broadcasts notification of the final shutdown of the connection.</summary>
public void OnShutdown()
{
ConnectionShutdownEventHandler handler;
ShutdownEventArgs reason;
lock (m_eventLock)
{
handler = m_connectionShutdown;
reason = m_closeReason;
m_connectionShutdown = null;
}
if (handler != null)
{
foreach (ConnectionShutdownEventHandler h in handler.GetInvocationList()) {
try {
h(this, reason);
} catch (Exception e) {
CallbackExceptionEventArgs args = new CallbackExceptionEventArgs(e);
args.Detail["context"] = "OnShutdown";
OnCallbackException(args);
}
}
}
AppDomain.CurrentDomain.DomainUnload -= HandleDomainUnload;
}
public void OnCallbackException(CallbackExceptionEventArgs args)
{
CallbackExceptionEventHandler handler;
lock (m_eventLock) {
handler = m_callbackException;
}
if (handler != null) {
foreach (CallbackExceptionEventHandler h in handler.GetInvocationList()) {
try {
h(this, args);
} catch {
// Exception in
// Callback-exception-handler. That was the
// app's last chance. Swallow the exception.
// FIXME: proper logging
}
}
}
}
public static IDictionary<string, object> DefaultClientProperties()
{
System.Reflection.Assembly assembly =
System.Reflection.Assembly.GetAssembly(typeof(Connection));
string version = assembly.GetName().Version.ToString();
//TODO: Get the rest of this data from the Assembly Attributes
IDictionary<string, object> table = new Dictionary<string, object>();
table["product"] = Encoding.UTF8.GetBytes("RabbitMQ");
table["version"] = Encoding.UTF8.GetBytes(version);
table["platform"] = Encoding.UTF8.GetBytes(".NET");
table["copyright"] = Encoding.UTF8.GetBytes("Copyright (C) 2007-2014 GoPivotal, Inc.");
table["information"] = Encoding.UTF8.GetBytes("Licensed under the MPL. " +
"See http://www.rabbitmq.com/");
return table;
}
public Command ConnectionCloseWrapper(ushort reasonCode, string reasonText)
{
Command request;
int replyClassId, replyMethodId;
Protocol.CreateConnectionClose(reasonCode,
reasonText,
out request,
out replyClassId,
out replyMethodId);
return request;
}
protected Command ChannelCloseWrapper(ushort reasonCode, string reasonText)
{
Command request;
int replyClassId, replyMethodId;
Protocol.CreateChannelClose(reasonCode,
reasonText,
out request,
out replyClassId,
out replyMethodId);
return request;
}
private static uint NegotiatedMaxValue(uint clientValue, uint serverValue)
{
return (clientValue == 0 || serverValue == 0) ?
Math.Max(clientValue, serverValue) :
Math.Min(clientValue, serverValue);
}
protected void StartAndTune()
{
BlockingCell connectionStartCell = new BlockingCell();
m_model0.m_connectionStartCell = connectionStartCell;
m_frameHandler.Timeout = HandshakeTimeout;
m_frameHandler.SendHeader();
ConnectionStartDetails connectionStart = (ConnectionStartDetails)
connectionStartCell.Value;
if (connectionStart == null){
throw new IOException("connection.start was never received, likely due to a network timeout");
}
ServerProperties = connectionStart.m_serverProperties;
AmqpVersion serverVersion = new AmqpVersion(connectionStart.m_versionMajor,
connectionStart.m_versionMinor);
if (!serverVersion.Equals(Protocol.Version))
{
TerminateMainloop();
FinishClose();
throw new ProtocolVersionMismatchException(Protocol.MajorVersion,
Protocol.MinorVersion,
serverVersion.Major,
serverVersion.Minor);
}
m_clientProperties = new Dictionary<string, object>(m_factory.ClientProperties);
m_clientProperties["capabilities"] = Protocol.Capabilities;
// FIXME: parse out locales properly!
ConnectionTuneDetails connectionTune = default(ConnectionTuneDetails);
bool tuned = false;
try
{
string mechanismsString = Encoding.UTF8.GetString(connectionStart.m_mechanisms);
string[] mechanisms = mechanismsString.Split(' ');
AuthMechanismFactory mechanismFactory = m_factory.AuthMechanismFactory(mechanisms);
if (mechanismFactory == null) {
throw new IOException("No compatible authentication mechanism found - " +
"server offered [" + mechanismsString + "]");
}
AuthMechanism mechanism = mechanismFactory.GetInstance();
byte[] challenge = null;
do {
byte[] response = mechanism.handleChallenge(challenge, m_factory);
ConnectionSecureOrTune res;
if (challenge == null) {
res = m_model0.ConnectionStartOk(m_clientProperties,
mechanismFactory.Name,
response,
"en_US");
}
else {
res = m_model0.ConnectionSecureOk(response);
}
if (res.m_challenge == null) {
connectionTune = res.m_tuneDetails;
tuned = true;
} else {
challenge = res.m_challenge;
}
} while (!tuned);
}
catch (OperationInterruptedException e)
{
if (e.ShutdownReason != null && e.ShutdownReason.ReplyCode == Constants.AccessRefused)
{
throw new AuthenticationFailureException(e.ShutdownReason.ReplyText);
}
throw new PossibleAuthenticationFailureException(
"Possibly caused by authentication failure", e);
}
ushort channelMax = (ushort) NegotiatedMaxValue(m_factory.RequestedChannelMax,
connectionTune.m_channelMax);
m_sessionManager = new SessionManager(this, channelMax);
uint frameMax = NegotiatedMaxValue(m_factory.RequestedFrameMax,
connectionTune.m_frameMax);
FrameMax = frameMax;
ushort heartbeat = (ushort) NegotiatedMaxValue(m_factory.RequestedHeartbeat,
connectionTune.m_heartbeat);
Heartbeat = heartbeat;
m_model0.ConnectionTuneOk(channelMax,
frameMax,
heartbeat);
}
public void Open(bool insist)
{
StartAndTune();
m_model0.ConnectionOpen(m_factory.VirtualHost, String.Empty, false);
}
public override string ToString()
{
return string.Format("Connection({0},{1})", m_id, Endpoint);
}
}
}
| |
#region License
//
// Copyright (c) 2007-2009, Sean Chambers <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Reflection;
using System.Text;
using FluentMigrator.Expressions;
using FluentMigrator.Infrastructure;
using FluentMigrator.Runner.Initialization;
using FluentMigrator.Runner.Processors;
using FluentMigrator.Runner.Versioning;
using FluentMigrator.Infrastructure.Extensions;
namespace FluentMigrator.Runner
{
public class MigrationRunner : IMigrationRunner
{
private IAssemblyCollection _migrationAssemblies;
private IAnnouncer _announcer;
private IStopWatch _stopWatch;
private bool _alreadyOutputPreviewOnlyModeWarning;
private readonly MigrationValidator _migrationValidator;
private readonly MigrationScopeHandler _migrationScopeHandler;
public bool TransactionPerSession
{
get { return RunnerContext.TransactionPerSession; }
}
public bool SilentlyFail { get; set; }
public IMigrationProcessor Processor { get; private set; }
public IMigrationInformationLoader MigrationLoader { get; set; }
public IProfileLoader ProfileLoader { get; set; }
public IMaintenanceLoader MaintenanceLoader { get; set; }
public IMigrationConventions Conventions { get; private set; }
public IList<Exception> CaughtExceptions { get; private set; }
public IMigrationScope CurrentScope
{
get
{
return _migrationScopeHandler.CurrentScope;
}
set
{
_migrationScopeHandler.CurrentScope = value;
}
}
public IRunnerContext RunnerContext { get; private set; }
public MigrationRunner(Assembly assembly, IRunnerContext runnerContext, IMigrationProcessor processor)
: this(new SingleAssembly(assembly), runnerContext, processor)
{
}
public MigrationRunner(IAssemblyCollection assemblies, IRunnerContext runnerContext, IMigrationProcessor processor)
{
_migrationAssemblies = assemblies;
_announcer = runnerContext.Announcer;
Processor = processor;
_stopWatch = runnerContext.StopWatch;
RunnerContext = runnerContext;
SilentlyFail = false;
CaughtExceptions = null;
Conventions = new MigrationConventions();
if (!string.IsNullOrEmpty(runnerContext.WorkingDirectory))
Conventions.GetWorkingDirectory = () => runnerContext.WorkingDirectory;
_migrationScopeHandler = new MigrationScopeHandler(Processor);
_migrationValidator = new MigrationValidator(_announcer, Conventions);
MigrationLoader = new DefaultMigrationInformationLoader(Conventions, _migrationAssemblies, runnerContext.Namespace, runnerContext.NestedNamespaces, runnerContext.Tags);
ProfileLoader = new ProfileLoader(runnerContext, this, Conventions);
MaintenanceLoader = new MaintenanceLoader(_migrationAssemblies, runnerContext.Tags, Conventions);
if (runnerContext.NoConnection){
VersionLoader = new ConnectionlessVersionLoader(this, _migrationAssemblies, Conventions, runnerContext.StartVersion, runnerContext.Version);
}
else{
VersionLoader = new VersionLoader(this, _migrationAssemblies, Conventions);
}
}
public IVersionLoader VersionLoader { get; set; }
public void ApplyProfiles()
{
ProfileLoader.ApplyProfiles();
}
public void ApplyMaintenance(MigrationStage stage, bool useAutomaticTransactionManagement)
{
var maintenanceMigrations = MaintenanceLoader.LoadMaintenance(stage);
foreach (var maintenanceMigration in maintenanceMigrations)
{
ApplyMigrationUp(maintenanceMigration, useAutomaticTransactionManagement && maintenanceMigration.TransactionBehavior == TransactionBehavior.Default);
}
}
public void MigrateUp()
{
MigrateUp(true);
}
public void MigrateUp(bool useAutomaticTransactionManagement)
{
MigrateUp(long.MaxValue, useAutomaticTransactionManagement);
}
public void MigrateUp(long targetVersion)
{
MigrateUp(targetVersion, true);
}
public void MigrateUp(long targetVersion, bool useAutomaticTransactionManagement)
{
var migrationInfos = GetUpMigrationsToApply(targetVersion);
using (IMigrationScope scope = _migrationScopeHandler.CreateOrWrapMigrationScope(useAutomaticTransactionManagement && TransactionPerSession))
{
try
{
ApplyMaintenance(MigrationStage.BeforeAll, useAutomaticTransactionManagement);
foreach (var migrationInfo in migrationInfos)
{
ApplyMaintenance(MigrationStage.BeforeEach, useAutomaticTransactionManagement);
ApplyMigrationUp(migrationInfo, useAutomaticTransactionManagement && migrationInfo.TransactionBehavior == TransactionBehavior.Default);
ApplyMaintenance(MigrationStage.AfterEach, useAutomaticTransactionManagement);
}
ApplyMaintenance(MigrationStage.BeforeProfiles, useAutomaticTransactionManagement);
ApplyProfiles();
ApplyMaintenance(MigrationStage.AfterAll, useAutomaticTransactionManagement);
scope.Complete();
}
catch
{
if (scope.IsActive)
scope.Cancel(); // SQLAnywhere needs explicit call to rollback transaction
throw;
}
}
VersionLoader.LoadVersionInfo();
}
private IEnumerable<IMigrationInfo> GetUpMigrationsToApply(long version)
{
var migrations = MigrationLoader.LoadMigrations();
return from pair in migrations
where IsMigrationStepNeededForUpMigration(pair.Key, version)
select pair.Value;
}
private bool IsMigrationStepNeededForUpMigration(long versionOfMigration, long targetVersion)
{
if (versionOfMigration <= targetVersion && !VersionLoader.VersionInfo.HasAppliedMigration(versionOfMigration))
{
return true;
}
return false;
}
public void MigrateDown(long targetVersion)
{
MigrateDown(targetVersion, true);
}
public void MigrateDown(long targetVersion, bool useAutomaticTransactionManagement)
{
var migrationInfos = GetDownMigrationsToApply(targetVersion);
using (IMigrationScope scope = _migrationScopeHandler.CreateOrWrapMigrationScope(useAutomaticTransactionManagement && TransactionPerSession))
{
try
{
foreach (var migrationInfo in migrationInfos)
{
ApplyMigrationDown(migrationInfo, useAutomaticTransactionManagement && migrationInfo.TransactionBehavior == TransactionBehavior.Default);
}
ApplyProfiles();
scope.Complete();
}
catch
{
if (scope.IsActive)
scope.Cancel(); // SQLAnywhere needs explicit call to rollback transaction
throw;
}
}
VersionLoader.LoadVersionInfo();
}
private IEnumerable<IMigrationInfo> GetDownMigrationsToApply(long targetVersion)
{
var migrations = MigrationLoader.LoadMigrations();
var migrationsToApply = (from pair in migrations
where IsMigrationStepNeededForDownMigration(pair.Key, targetVersion)
select pair.Value);
return migrationsToApply.OrderByDescending(x => x.Version);
}
private bool IsMigrationStepNeededForDownMigration(long versionOfMigration, long targetVersion)
{
if (versionOfMigration > targetVersion && VersionLoader.VersionInfo.HasAppliedMigration(versionOfMigration))
{
return true;
}
return false;
}
public virtual void ApplyMigrationUp(IMigrationInfo migrationInfo, bool useTransaction)
{
if (migrationInfo == null) throw new ArgumentNullException("migrationInfo");
if (!_alreadyOutputPreviewOnlyModeWarning && Processor.Options.PreviewOnly)
{
_announcer.Heading("PREVIEW-ONLY MODE");
_alreadyOutputPreviewOnlyModeWarning = true;
}
if (!migrationInfo.IsAttributed() || !VersionLoader.VersionInfo.HasAppliedMigration(migrationInfo.Version))
{
var name = migrationInfo.GetName();
_announcer.Heading(string.Format("{0} migrating", name));
_stopWatch.Start();
using (IMigrationScope scope = _migrationScopeHandler.CreateOrWrapMigrationScope(useTransaction))
{
try
{
ExecuteMigration(migrationInfo.Migration, (m, c) => m.GetUpExpressions(c));
if (migrationInfo.IsAttributed())
{
VersionLoader.UpdateVersionInfo(migrationInfo.Version, migrationInfo.Description ?? migrationInfo.Migration.GetType().Name);
}
scope.Complete();
}
catch
{
if (useTransaction && scope.IsActive)
scope.Cancel(); // SQLAnywhere needs explicit call to rollback transaction
throw;
}
_stopWatch.Stop();
_announcer.Say(string.Format("{0} migrated", name));
_announcer.ElapsedTime(_stopWatch.ElapsedTime());
}
}
}
public virtual void ApplyMigrationDown(IMigrationInfo migrationInfo, bool useTransaction)
{
if (migrationInfo == null) throw new ArgumentNullException("migrationInfo");
var name = migrationInfo.GetName();
_announcer.Heading(string.Format("{0} reverting", name));
_stopWatch.Start();
using (IMigrationScope scope = _migrationScopeHandler.CreateOrWrapMigrationScope(useTransaction))
{
try
{
ExecuteMigration(migrationInfo.Migration, (m, c) => m.GetDownExpressions(c));
if (migrationInfo.IsAttributed()) VersionLoader.DeleteVersion(migrationInfo.Version);
scope.Complete();
}
catch
{
if (useTransaction && scope.IsActive)
scope.Cancel(); // SQLAnywhere needs explicit call to rollback transaction
throw;
}
_stopWatch.Stop();
_announcer.Say(string.Format("{0} reverted", name));
_announcer.ElapsedTime(_stopWatch.ElapsedTime());
}
}
public void Rollback(int steps)
{
Rollback(steps, true);
}
public void Rollback(int steps, bool useAutomaticTransactionManagement)
{
var availableMigrations = MigrationLoader.LoadMigrations();
var migrationsToRollback = new List<IMigrationInfo>();
foreach (long version in VersionLoader.VersionInfo.AppliedMigrations())
{
IMigrationInfo migrationInfo;
if (availableMigrations.TryGetValue(version, out migrationInfo)) migrationsToRollback.Add(migrationInfo);
}
using (IMigrationScope scope = _migrationScopeHandler.CreateOrWrapMigrationScope(useAutomaticTransactionManagement && TransactionPerSession))
{
try
{
foreach (IMigrationInfo migrationInfo in migrationsToRollback.Take(steps))
{
ApplyMigrationDown(migrationInfo, useAutomaticTransactionManagement && migrationInfo.TransactionBehavior == TransactionBehavior.Default);
}
scope.Complete();
}
catch
{
if (scope.IsActive)
scope.Cancel(); // SQLAnywhere needs explicit call to rollback transaction
throw;
}
}
VersionLoader.LoadVersionInfo();
if (!VersionLoader.VersionInfo.AppliedMigrations().Any())
VersionLoader.RemoveVersionTable();
}
public void RollbackToVersion(long version)
{
RollbackToVersion(version, true);
}
public void RollbackToVersion(long version, bool useAutomaticTransactionManagement)
{
var availableMigrations = MigrationLoader.LoadMigrations();
var migrationsToRollback = new List<IMigrationInfo>();
foreach (long appliedVersion in VersionLoader.VersionInfo.AppliedMigrations())
{
IMigrationInfo migrationInfo;
if (availableMigrations.TryGetValue(appliedVersion, out migrationInfo)) migrationsToRollback.Add(migrationInfo);
}
using (IMigrationScope scope = _migrationScopeHandler.CreateOrWrapMigrationScope(useAutomaticTransactionManagement && TransactionPerSession))
{
try
{
foreach (IMigrationInfo migrationInfo in migrationsToRollback)
{
if (version >= migrationInfo.Version) continue;
ApplyMigrationDown(migrationInfo, useAutomaticTransactionManagement && migrationInfo.TransactionBehavior == TransactionBehavior.Default);
}
scope.Complete();
}
catch
{
if (scope.IsActive)
scope.Cancel(); // SQLAnywhere needs explicit call to rollback transaction
throw;
}
}
VersionLoader.LoadVersionInfo();
if (version == 0 && !VersionLoader.VersionInfo.AppliedMigrations().Any())
VersionLoader.RemoveVersionTable();
}
public IAssemblyCollection MigrationAssemblies
{
get { return _migrationAssemblies; }
}
public void Up(IMigration migration)
{
var migrationInfoAdapter = new NonAttributedMigrationToMigrationInfoAdapter(migration);
ApplyMigrationUp(migrationInfoAdapter, true);
}
private void ExecuteMigration(IMigration migration, Action<IMigration, IMigrationContext> getExpressions)
{
CaughtExceptions = new List<Exception>();
var context = new MigrationContext(Conventions, Processor, MigrationAssemblies, RunnerContext.ApplicationContext, Processor.ConnectionString);
getExpressions(migration, context);
_migrationValidator.ApplyConventionsToAndValidateExpressions(migration, context.Expressions);
ExecuteExpressions(context.Expressions);
}
public void Down(IMigration migration)
{
var migrationInfoAdapter = new NonAttributedMigrationToMigrationInfoAdapter(migration);
ApplyMigrationDown(migrationInfoAdapter, true);
}
/// <summary>
/// execute each migration expression in the expression collection
/// </summary>
/// <param name="expressions"></param>
protected void ExecuteExpressions(ICollection<IMigrationExpression> expressions)
{
long insertTicks = 0;
int insertCount = 0;
foreach (IMigrationExpression expression in expressions)
{
try
{
if (expression is InsertDataExpression)
{
insertTicks += _stopWatch.Time(() => expression.ExecuteWith(Processor)).Ticks;
insertCount++;
}
else
{
AnnounceTime(expression.ToString(), () => expression.ExecuteWith(Processor));
}
}
catch (Exception er)
{
_announcer.Error(er);
//catch the error and move onto the next expression
if (SilentlyFail)
{
CaughtExceptions.Add(er);
continue;
}
throw;
}
}
if (insertCount > 0)
{
var avg = new TimeSpan(insertTicks / insertCount);
var msg = string.Format("-> {0} Insert operations completed in {1} taking an average of {2}", insertCount, new TimeSpan(insertTicks), avg);
_announcer.Say(msg);
}
}
private void AnnounceTime(string message, Action action)
{
_announcer.Say(message);
_announcer.ElapsedTime(_stopWatch.Time(action));
}
public void ValidateVersionOrder()
{
var unappliedVersions = MigrationLoader.LoadMigrations().Where(kvp => MigrationVersionLessThanGreatestAppliedMigration(kvp.Key)).ToList();
if (unappliedVersions.Any())
throw new VersionOrderInvalidException(unappliedVersions);
_announcer.Say("Version ordering valid.");
}
public void ListMigrations()
{
IVersionInfo currentVersionInfo = this.VersionLoader.VersionInfo;
long currentVersion = currentVersionInfo.Latest();
_announcer.Heading("Migrations");
foreach(KeyValuePair<long, IMigrationInfo> migration in MigrationLoader.LoadMigrations())
{
string migrationName = migration.Value.GetName();
bool isCurrent = migration.Key == currentVersion;
string message = string.Format("{0}{1}",
migrationName,
isCurrent ? " (current)" : string.Empty);
if(isCurrent)
_announcer.Emphasize(message);
else
_announcer.Say(message);
}
}
private bool MigrationVersionLessThanGreatestAppliedMigration(long version)
{
return !VersionLoader.VersionInfo.HasAppliedMigration(version) && version < VersionLoader.VersionInfo.Latest();
}
public IMigrationScope BeginScope()
{
return _migrationScopeHandler.BeginScope();
}
}
}
| |
/*
Project Orleans Cloud Service SDK ver. 1.0
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the ""Software""), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
using System;
using System.Collections.Generic;
using System.Net.Sockets;
using System.Text;
using Orleans.Messaging;
namespace Orleans.Runtime.Messaging
{
internal class SiloMessageSender : OutgoingMessageSender
{
private readonly MessageCenter messageCenter;
private const int DEFAULT_MAX_RETRIES = 0;
private readonly Dictionary<SiloAddress, DateTime> lastConnectionFailure;
internal const string RETRY_COUNT_TAG = "RetryCount";
internal static readonly TimeSpan CONNECTION_RETRY_DELAY = TimeSpan.FromMilliseconds(1000);
internal SiloMessageSender(string nameSuffix, MessageCenter msgCtr)
: base(nameSuffix, msgCtr.MessagingConfiguration)
{
messageCenter = msgCtr;
lastConnectionFailure = new Dictionary<SiloAddress, DateTime>();
OnFault = FaultBehavior.RestartOnFault;
}
protected override SocketDirection GetSocketDirection()
{
return SocketDirection.SiloToSilo;
}
protected override bool PrepareMessageForSend(Message msg)
{
// Don't send messages that have already timed out
if (msg.IsExpired)
{
msg.DropExpiredMessage(MessagingStatisticsGroup.Phase.Send);
return false;
}
// Fill in the outbound message with our silo address, if it's not already set
if (!msg.ContainsHeader(Message.Header.SENDING_SILO))
msg.SendingSilo = messageCenter.MyAddress;
// If there's no target silo set, then we shouldn't see this message; send it back
if (msg.TargetSilo == null)
{
FailMessage(msg, "No target silo provided -- internal error");
return false;
}
// If we know this silo is dead, don't bother
if ((messageCenter.SiloDeadOracle != null) && messageCenter.SiloDeadOracle(msg.TargetSilo))
{
FailMessage(msg, String.Format("Target {0} silo is known to be dead", msg.TargetSilo.ToLongString()));
return false;
}
// If we had a bad connection to this address recently, don't even try
DateTime failure;
if (lastConnectionFailure.TryGetValue(msg.TargetSilo, out failure))
{
var since = DateTime.UtcNow.Subtract(failure);
if (since < CONNECTION_RETRY_DELAY)
{
FailMessage(msg, String.Format("Recent ({0} ago, at {1}) connection failure trying to reach target silo {2}. Going to drop {3} msg {4} without sending. CONNECTION_RETRY_DELAY = {5}.",
since, TraceLogger.PrintDate(failure), msg.TargetSilo.ToLongString(), msg.Direction, msg.Id, CONNECTION_RETRY_DELAY));
return false;
}
}
if (Message.WriteMessagingTraces)
msg.AddTimestamp(Message.LifecycleTag.SendOutgoing);
return true;
}
protected override bool GetSendingSocket(Message msg, out Socket socket, out SiloAddress targetSilo, out string error)
{
socket = null;
targetSilo = msg.TargetSilo;
error = null;
try
{
socket = messageCenter.SocketManager.GetSendingSocket(targetSilo.Endpoint);
if (socket.Connected) return true;
messageCenter.SocketManager.InvalidateEntry(targetSilo.Endpoint);
socket = messageCenter.SocketManager.GetSendingSocket(targetSilo.Endpoint);
return true;
}
catch (Exception ex)
{
error = "Exception getting a sending socket to endpoint " + targetSilo.ToString();
Log.Warn(ErrorCode.Messaging_UnableToGetSendingSocket, error, ex);
messageCenter.SocketManager.InvalidateEntry(targetSilo.Endpoint);
lastConnectionFailure[targetSilo] = DateTime.UtcNow;
return false;
}
}
protected override void OnGetSendingSocketFailure(Message msg, string error)
{
FailMessage(msg, error);
}
protected override void OnMessageSerializationFailure(Message msg, Exception exc)
{
// we only get here if we failed to serialize the msg (or any other catastrophic failure).
// Request msg fails to serialize on the sending silo, so we just enqueue a rejection msg.
// Response msg fails to serialize on the responding silo, so we try to send an error response back.
Log.Warn(ErrorCode.MessagingUnexpectedSendError, String.Format("Unexpected error sending message {0}", msg.ToString()), exc);
msg.ReleaseBodyAndHeaderBuffers();
MessagingStatisticsGroup.OnFailedSentMessage(msg);
if (msg.Direction == Message.Directions.Request)
{
messageCenter.SendRejection(msg, Message.RejectionTypes.Unrecoverable, exc.ToString());
}
else if (msg.Direction == Message.Directions.Response && msg.Result != Message.ResponseTypes.Error)
{
// if we failed sending an original response, turn the response body into an error and reply with it.
// unless the response was already an error response (so we don't loop forever).
msg.Result = Message.ResponseTypes.Error;
msg.BodyObject = Response.ExceptionResponse(exc);
messageCenter.SendMessage(msg);
}
else
{
MessagingStatisticsGroup.OnDroppedSentMessage(msg);
}
}
protected override void OnSendFailure(Socket socket, SiloAddress targetSilo)
{
messageCenter.SocketManager.InvalidateEntry(targetSilo.Endpoint);
}
protected override void ProcessMessageAfterSend(Message msg, bool sendError, string sendErrorStr)
{
if (sendError)
{
msg.ReleaseHeadersOnly();
RetryMessage(msg);
}
else
{
msg.ReleaseBodyAndHeaderBuffers();
if (Log.IsVerbose3) Log.Verbose3("Sending queue delay time for: {0} is {1}", msg, DateTime.UtcNow.Subtract((DateTime)msg.GetMetadata(OutboundMessageQueue.QUEUED_TIME_METADATA)));
}
}
protected override void FailMessage(Message msg, string reason)
{
msg.ReleaseBodyAndHeaderBuffers();
MessagingStatisticsGroup.OnFailedSentMessage(msg);
if (msg.Direction == Message.Directions.Request)
{
if (Log.IsVerbose) Log.Verbose(ErrorCode.MessagingSendingRejection, "Silo {0} is rejecting message: {0}. Reason = {1}", messageCenter.MyAddress, msg, reason);
// Done retrying, send back an error instead
messageCenter.SendRejection(msg, Message.RejectionTypes.Transient, String.Format("Silo {0} is rejecting message: {1}. Reason = {2}", messageCenter.MyAddress, msg, reason));
}else
{
Log.Info(ErrorCode.Messaging_OutgoingMS_DroppingMessage, "Silo {0} is dropping message: {0}. Reason = {1}", messageCenter.MyAddress, msg, reason);
MessagingStatisticsGroup.OnDroppedSentMessage(msg);
}
}
private void RetryMessage(Message msg, Exception ex = null)
{
if (msg == null) return;
int maxRetries = DEFAULT_MAX_RETRIES;
if (msg.ContainsMetadata(Message.Metadata.MAX_RETRIES))
maxRetries = (int)msg.GetMetadata(Message.Metadata.MAX_RETRIES);
int retryCount = 0;
if (msg.ContainsMetadata(RETRY_COUNT_TAG))
retryCount = (int)msg.GetMetadata(RETRY_COUNT_TAG);
if (retryCount < maxRetries)
{
msg.SetMetadata(RETRY_COUNT_TAG, retryCount + 1);
messageCenter.OutboundQueue.SendMessage(msg);
}
else
{
var reason = new StringBuilder("Retry count exceeded. ");
if (ex != null)
{
reason.Append("Original exception is: ").Append(ex.ToString());
}
reason.Append("Msg is: ").Append(msg);
FailMessage(msg, reason.ToString());
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Data;
using System.Text;
using System.Windows.Forms;
namespace PTWin
{
public partial class ResourceEdit : WinPart
{
private ProjectTracker.Library.ResourceEdit _resource;
public ProjectTracker.Library.ResourceEdit Resource
{
get { return _resource; }
}
public ResourceEdit(ProjectTracker.Library.ResourceEdit resource)
{
InitializeComponent();
// store object reference
_resource = resource;
}
private void ResourceEdit_Load(object sender, EventArgs e)
{
this.CurrentPrincipalChanged += new EventHandler(ResourceEdit_CurrentPrincipalChanged);
_resource.PropertyChanged += new PropertyChangedEventHandler(mResource_PropertyChanged);
this.RoleListBindingSource.DataSource = ProjectTracker.Library.RoleList.GetList();
BindUI();
ApplyAuthorizationRules();
}
#region WinPart Code
protected internal override object GetIdValue()
{
return _resource;
}
public override string ToString()
{
return _resource.FullName;
}
private void ResourceEdit_CurrentPrincipalChanged(object sender, EventArgs e)
{
ApplyAuthorizationRules();
}
#endregion
private void ApplyAuthorizationRules()
{
bool canEdit = Csla.Rules.BusinessRules.HasPermission(Csla.Rules.AuthorizationActions.EditObject, typeof(ProjectTracker.Library.ResourceEdit));
if (!canEdit)
RebindUI(false, true);
// have the controls enable/disable/etc
this.ReadWriteAuthorization1.ResetControlAuthorization();
// enable/disable appropriate buttons
this.OKButton.Enabled = canEdit;
this.ApplyButton.Enabled = canEdit;
this.Cancel_Button.Enabled = canEdit;
// enable/disable role column in grid
this.AssignmentsDataGridView.Columns[3].ReadOnly = !canEdit;
}
private void OKButton_Click(object sender, EventArgs e)
{
using (StatusBusy busy = new StatusBusy("Saving..."))
{
RebindUI(true, false);
}
this.Close();
}
private void ApplyButton_Click(object sender, EventArgs e)
{
using (StatusBusy busy = new StatusBusy("Saving..."))
{
RebindUI(true, true);
}
}
private void Cancel_Button_Click(object sender, EventArgs e)
{
RebindUI(false, true);
}
private void CloseButton_Click(object sender, EventArgs e)
{
RebindUI(false, false);
this.Close();
}
private void BindUI()
{
_resource.BeginEdit();
this.ResourceBindingSource.DataSource = _resource;
}
private void RebindUI(bool saveObject, bool rebind)
{
// disable events
this.ResourceBindingSource.RaiseListChangedEvents = false;
this.AssignmentsBindingSource.RaiseListChangedEvents = false;
try
{
// unbind the UI
UnbindBindingSource(this.AssignmentsBindingSource, saveObject, false);
UnbindBindingSource(this.ResourceBindingSource, saveObject, true);
this.AssignmentsBindingSource.DataSource = this.ResourceBindingSource;
// save or cancel changes
if (saveObject)
{
_resource.ApplyEdit();
try
{
_resource = _resource.Save();
}
catch (Csla.DataPortalException ex)
{
MessageBox.Show(ex.BusinessException.ToString(),
"Error saving", MessageBoxButtons.OK,
MessageBoxIcon.Exclamation);
}
catch (Exception ex)
{
MessageBox.Show(ex.ToString(),
"Error Saving", MessageBoxButtons.OK,
MessageBoxIcon.Exclamation);
}
}
else
_resource.CancelEdit();
}
finally
{
// rebind UI if requested
if (rebind)
BindUI();
// restore events
this.ResourceBindingSource.RaiseListChangedEvents = true;
this.AssignmentsBindingSource.RaiseListChangedEvents = true;
if (rebind)
{
// refresh the UI if rebinding
this.ResourceBindingSource.ResetBindings(false);
this.AssignmentsBindingSource.ResetBindings(false);
}
}
}
private void AssignButton_Click(object sender, EventArgs e)
{
ProjectSelect dlg = new ProjectSelect();
if (dlg.ShowDialog() == DialogResult.OK)
try
{
_resource.Assignments.AssignTo(dlg.ProjectId);
}
catch (InvalidOperationException ex)
{
MessageBox.Show(ex.ToString(),
"Error Assigning", MessageBoxButtons.OK,
MessageBoxIcon.Information);
}
catch (Exception ex)
{
MessageBox.Show(ex.ToString(),
"Error Assigning", MessageBoxButtons.OK,
MessageBoxIcon.Exclamation);
}
}
private void UnassignButton_Click(object sender, EventArgs e)
{
if (this.AssignmentsDataGridView.SelectedRows.Count > 0)
{
var projectId = (int)this.AssignmentsDataGridView.SelectedRows[0].Cells[0].Value;
_resource.Assignments.Remove(projectId);
}
}
private void mResource_PropertyChanged(object sender, PropertyChangedEventArgs e)
{
if (e.PropertyName == "IsDirty")
{
this.ResourceBindingSource.ResetBindings(true);
this.AssignmentsBindingSource.ResetBindings(true);
}
}
private void AssignmentsDataGridView_CellContentClick(object sender, DataGridViewCellEventArgs e)
{
if (e.ColumnIndex == 1 && e.RowIndex > -1)
{
var projectId = (int)this.AssignmentsDataGridView.Rows[e.RowIndex].Cells[0].Value;
MainForm.Instance.ShowEditProject(projectId);
}
}
}
}
| |
//-----------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//-----------------------------------------------------------------------------
namespace System.Runtime.Diagnostics
{
using System;
using System.Collections;
using System.ComponentModel;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Security;
using System.Text;
using System.Xml;
using System.Xml.XPath;
using System.Diagnostics.CodeAnalysis;
using System.Security.Permissions;
abstract class DiagnosticTraceBase
{
//Diagnostics trace
protected const string DefaultTraceListenerName = "Default";
protected const string TraceRecordVersion = "http://schemas.microsoft.com/2004/10/E2ETraceEvent/TraceRecord";
protected static string AppDomainFriendlyName = AppDomain.CurrentDomain.FriendlyName;
const ushort TracingEventLogCategory = 4;
object thisLock;
bool tracingEnabled = true;
bool calledShutdown;
bool haveListeners;
SourceLevels level;
protected string TraceSourceName;
TraceSource traceSource;
[Fx.Tag.SecurityNote(Critical = "This determines the event source name.")]
[SecurityCritical]
string eventSourceName;
public DiagnosticTraceBase(string traceSourceName)
{
this.thisLock = new object();
this.TraceSourceName = traceSourceName;
this.LastFailure = DateTime.MinValue;
}
protected DateTime LastFailure { get; set; }
[SuppressMessage(FxCop.Category.Security, FxCop.Rule.DoNotIndirectlyExposeMethodsWithLinkDemands,
Justification = "SecurityCritical method. Does not expose critical resources returned by methods with Link Demands")]
[Fx.Tag.SecurityNote(Critical = "Critical because we are invoking TraceSource.Listeners which has a Link Demand for UnmanagedCode permission.",
Miscellaneous = "Asserting Unmanaged Code causes traceSource.Listeners to be successfully initiated and cached. But the Listeners property has a LinkDemand for UnmanagedCode, so it can't be read by partially trusted assemblies in heterogeneous appdomains")]
[SecurityCritical]
[SecurityPermission(SecurityAction.Assert, UnmanagedCode = true)]
static void UnsafeRemoveDefaultTraceListener(TraceSource traceSource)
{
traceSource.Listeners.Remove(DiagnosticTraceBase.DefaultTraceListenerName);
}
public TraceSource TraceSource
{
get
{
return this.traceSource;
}
set
{
SetTraceSource(value);
}
}
[SuppressMessage(FxCop.Category.Security, FxCop.Rule.DoNotIndirectlyExposeMethodsWithLinkDemands,
Justification = "Does not expose critical resources returned by methods with Link Demands")]
[Fx.Tag.SecurityNote(Critical = "Critical because we are invoking TraceSource.Listeners which has a Link Demand for UnmanagedCode permission.",
Safe = "Safe because are only retrieving the count of listeners and removing the default trace listener - we aren't leaking any critical resources.")]
[SecuritySafeCritical]
protected void SetTraceSource(TraceSource traceSource)
{
if (traceSource != null)
{
UnsafeRemoveDefaultTraceListener(traceSource);
this.traceSource = traceSource;
this.haveListeners = this.traceSource.Listeners.Count > 0;
}
}
public bool HaveListeners
{
get
{
return this.haveListeners;
}
}
SourceLevels FixLevel(SourceLevels level)
{
//the bit fixing below is meant to keep the trace level legal even if somebody uses numbers in config
if (((level & ~SourceLevels.Information) & SourceLevels.Verbose) != 0)
{
level |= SourceLevels.Verbose;
}
else if (((level & ~SourceLevels.Warning) & SourceLevels.Information) != 0)
{
level |= SourceLevels.Information;
}
else if (((level & ~SourceLevels.Error) & SourceLevels.Warning) != 0)
{
level |= SourceLevels.Warning;
}
if (((level & ~SourceLevels.Critical) & SourceLevels.Error) != 0)
{
level |= SourceLevels.Error;
}
if ((level & SourceLevels.Critical) != 0)
{
level |= SourceLevels.Critical;
}
// If only the ActivityTracing flag is set, then
// we really have Off. Do not do ActivityTracing then.
if (level == SourceLevels.ActivityTracing)
{
level = SourceLevels.Off;
}
return level;
}
protected virtual void OnSetLevel(SourceLevels level)
{
}
[SuppressMessage(FxCop.Category.Security, FxCop.Rule.DoNotIndirectlyExposeMethodsWithLinkDemands,
Justification = "Does not expose critical resources returned by methods with Link Demands")]
[Fx.Tag.SecurityNote(Critical = "Critical because we are invoking TraceSource.Listeners and SourceSwitch.Level which have Link Demands for UnmanagedCode permission.")]
[SecurityCritical]
void SetLevel(SourceLevels level)
{
SourceLevels fixedLevel = FixLevel(level);
this.level = fixedLevel;
if (this.TraceSource != null)
{
// Need this for setup from places like TransactionBridge.
this.haveListeners = this.TraceSource.Listeners.Count > 0;
OnSetLevel(level);
#pragma warning disable 618
this.tracingEnabled = this.HaveListeners && (level != SourceLevels.Off);
#pragma warning restore 618
this.TraceSource.Switch.Level = level;
}
}
[Fx.Tag.SecurityNote(Critical = "Critical because we are invoking SetLevel.")]
[SecurityCritical]
void SetLevelThreadSafe(SourceLevels level)
{
lock (this.thisLock)
{
SetLevel(level);
}
}
public SourceLevels Level
{
get
{
if (this.TraceSource != null && (this.TraceSource.Switch.Level != this.level))
{
this.level = this.TraceSource.Switch.Level;
}
return this.level;
}
[Fx.Tag.SecurityNote(Critical = "Critical because we are invoking SetLevelTheadSafe.")]
[SecurityCritical]
set
{
SetLevelThreadSafe(value);
}
}
protected string EventSourceName
{
[Fx.Tag.SecurityNote(Critical = "Access critical eventSourceName field",
Safe = "Doesn't leak info\\resources")]
[SecuritySafeCritical]
get
{
return this.eventSourceName;
}
[Fx.Tag.SecurityNote(Critical = "This determines the event source name.")]
[SecurityCritical]
set
{
this.eventSourceName = value;
}
}
public bool TracingEnabled
{
get
{
return this.tracingEnabled && this.traceSource != null;
}
}
protected static string ProcessName
{
[Fx.Tag.SecurityNote(Critical = "Satisfies a LinkDemand for 'PermissionSetAttribute' on type 'Process' when calling method GetCurrentProcess",
Safe = "Does not leak any resource and has been reviewed")]
[SecuritySafeCritical]
get
{
string retval = null;
using (Process process = Process.GetCurrentProcess())
{
retval = process.ProcessName;
}
return retval;
}
}
protected static int ProcessId
{
[Fx.Tag.SecurityNote(Critical = "Satisfies a LinkDemand for 'PermissionSetAttribute' on type 'Process' when calling method GetCurrentProcess",
Safe = "Does not leak any resource and has been reviewed")]
[SecuritySafeCritical]
get
{
int retval = -1;
using (Process process = Process.GetCurrentProcess())
{
retval = process.Id;
}
return retval;
}
}
public virtual bool ShouldTrace(TraceEventLevel level)
{
return ShouldTraceToTraceSource(level);
}
public bool ShouldTrace(TraceEventType type)
{
return this.TracingEnabled && this.HaveListeners &&
(this.TraceSource != null) &&
0 != ((int)type & (int)this.Level);
}
public bool ShouldTraceToTraceSource(TraceEventLevel level)
{
return ShouldTrace(TraceLevelHelper.GetTraceEventType(level));
}
//only used for exceptions, perf is not important
public static string XmlEncode(string text)
{
if (string.IsNullOrEmpty(text))
{
return text;
}
int len = text.Length;
StringBuilder encodedText = new StringBuilder(len + 8); //perf optimization, expecting no more than 2 > characters
for (int i = 0; i < len; ++i)
{
char ch = text[i];
switch (ch)
{
case '<':
encodedText.Append("<");
break;
case '>':
encodedText.Append(">");
break;
case '&':
encodedText.Append("&");
break;
default:
encodedText.Append(ch);
break;
}
}
return encodedText.ToString();
}
[Fx.Tag.SecurityNote(Critical = "Sets global event handlers for the AppDomain",
Safe = "Doesn't leak resources\\Information")]
[SecuritySafeCritical]
[SuppressMessage(FxCop.Category.Security, FxCop.Rule.DoNotIndirectlyExposeMethodsWithLinkDemands,
Justification = "SecuritySafeCritical method, Does not expose critical resources returned by methods with Link Demands")]
protected void AddDomainEventHandlersForCleanup()
{
AppDomain currentDomain = AppDomain.CurrentDomain;
if (this.TraceSource != null)
{
this.haveListeners = this.TraceSource.Listeners.Count > 0;
}
this.tracingEnabled = this.haveListeners;
if (this.TracingEnabled)
{
currentDomain.UnhandledException += new UnhandledExceptionEventHandler(UnhandledExceptionHandler);
this.SetLevel(this.TraceSource.Switch.Level);
#if MONO_FEATURE_MULTIPLE_APPDOMAINS
currentDomain.DomainUnload += new EventHandler(ExitOrUnloadEventHandler);
#endif
currentDomain.ProcessExit += new EventHandler(ExitOrUnloadEventHandler);
}
}
void ExitOrUnloadEventHandler(object sender, EventArgs e)
{
ShutdownTracing();
}
protected abstract void OnUnhandledException(Exception exception);
protected void UnhandledExceptionHandler(object sender, UnhandledExceptionEventArgs args)
{
Exception e = (Exception)args.ExceptionObject;
OnUnhandledException(e);
ShutdownTracing();
}
protected static string CreateSourceString(object source)
{
var traceSourceStringProvider = source as ITraceSourceStringProvider;
if (traceSourceStringProvider != null)
{
return traceSourceStringProvider.GetSourceString();
}
return CreateDefaultSourceString(source);
}
internal static string CreateDefaultSourceString(object source)
{
if (source == null)
{
throw new ArgumentNullException("source");
}
return String.Format(CultureInfo.CurrentCulture, "{0}/{1}", source.GetType().ToString(), source.GetHashCode());
}
protected static void AddExceptionToTraceString(XmlWriter xml, Exception exception)
{
xml.WriteElementString(DiagnosticStrings.ExceptionTypeTag, XmlEncode(exception.GetType().AssemblyQualifiedName));
xml.WriteElementString(DiagnosticStrings.MessageTag, XmlEncode(exception.Message));
xml.WriteElementString(DiagnosticStrings.StackTraceTag, XmlEncode(StackTraceString(exception)));
xml.WriteElementString(DiagnosticStrings.ExceptionStringTag, XmlEncode(exception.ToString()));
Win32Exception win32Exception = exception as Win32Exception;
if (win32Exception != null)
{
xml.WriteElementString(DiagnosticStrings.NativeErrorCodeTag, win32Exception.NativeErrorCode.ToString("X", CultureInfo.InvariantCulture));
}
if (exception.Data != null && exception.Data.Count > 0)
{
xml.WriteStartElement(DiagnosticStrings.DataItemsTag);
foreach (object dataItem in exception.Data.Keys)
{
xml.WriteStartElement(DiagnosticStrings.DataTag);
xml.WriteElementString(DiagnosticStrings.KeyTag, XmlEncode(dataItem.ToString()));
xml.WriteElementString(DiagnosticStrings.ValueTag, XmlEncode(exception.Data[dataItem].ToString()));
xml.WriteEndElement();
}
xml.WriteEndElement();
}
if (exception.InnerException != null)
{
xml.WriteStartElement(DiagnosticStrings.InnerExceptionTag);
AddExceptionToTraceString(xml, exception.InnerException);
xml.WriteEndElement();
}
}
protected static string StackTraceString(Exception exception)
{
string retval = exception.StackTrace;
if (string.IsNullOrEmpty(retval))
{
// This means that the exception hasn't been thrown yet. We need to manufacture the stack then.
StackTrace stackTrace = new StackTrace(false);
// Figure out how many frames should be throw away
System.Diagnostics.StackFrame[] stackFrames = stackTrace.GetFrames();
int frameCount = 0;
bool breakLoop = false;
foreach (StackFrame frame in stackFrames)
{
string methodName = frame.GetMethod().Name;
switch (methodName)
{
case "StackTraceString":
case "AddExceptionToTraceString":
case "BuildTrace":
case "TraceEvent":
case "TraceException":
case "GetAdditionalPayload":
++frameCount;
break;
default:
if (methodName.StartsWith("ThrowHelper", StringComparison.Ordinal))
{
++frameCount;
}
else
{
breakLoop = true;
}
break;
}
if (breakLoop)
{
break;
}
}
stackTrace = new StackTrace(frameCount, false);
retval = stackTrace.ToString();
}
return retval;
}
//CSDMain:109153, Duplicate code from System.ServiceModel.Diagnostics
[Fx.Tag.SecurityNote(Critical = "Calls unsafe methods, UnsafeCreateEventLogger and UnsafeLogEvent.",
Safe = "Event identities cannot be spoofed as they are constants determined inside the method, Demands the same permission that is asserted by the unsafe method.")]
[SecuritySafeCritical]
[SuppressMessage(FxCop.Category.Security, FxCop.Rule.SecureAsserts,
Justification = "Should not demand permission that is asserted by the EtwProvider ctor.")]
protected void LogTraceFailure(string traceString, Exception exception)
{
const int FailureBlackoutDuration = 10;
TimeSpan FailureBlackout = TimeSpan.FromMinutes(FailureBlackoutDuration);
try
{
lock (this.thisLock)
{
if (DateTime.UtcNow.Subtract(this.LastFailure) >= FailureBlackout)
{
this.LastFailure = DateTime.UtcNow;
#pragma warning disable 618
EventLogger logger = EventLogger.UnsafeCreateEventLogger(this.eventSourceName, this);
#pragma warning restore 618
if (exception == null)
{
logger.UnsafeLogEvent(TraceEventType.Error, TracingEventLogCategory, (uint)System.Runtime.Diagnostics.EventLogEventId.FailedToTraceEvent, false,
traceString);
}
else
{
logger.UnsafeLogEvent(TraceEventType.Error, TracingEventLogCategory, (uint)System.Runtime.Diagnostics.EventLogEventId.FailedToTraceEventWithException, false,
traceString, exception.ToString());
}
}
}
}
catch (Exception eventLoggerException)
{
if (Fx.IsFatal(eventLoggerException))
{
throw;
}
}
}
protected abstract void OnShutdownTracing();
void ShutdownTracing()
{
if (!this.calledShutdown)
{
this.calledShutdown = true;
try
{
OnShutdownTracing();
}
#pragma warning suppress 56500 //[....]; Taken care of by FxCop
catch (Exception exception)
{
if (Fx.IsFatal(exception))
{
throw;
}
//log failure
LogTraceFailure(null, exception);
}
}
}
protected bool CalledShutdown
{
get
{
return this.calledShutdown;
}
}
public static Guid ActivityId
{
[Fx.Tag.SecurityNote(Critical = "gets the CorrelationManager, which does a LinkDemand for UnmanagedCode",
Safe = "only uses the CM to get the ActivityId, which is not protected data, doesn't leak the CM")]
[SecuritySafeCritical]
[SuppressMessage(FxCop.Category.Security, FxCop.Rule.DoNotIndirectlyExposeMethodsWithLinkDemands,
Justification = "SecuritySafeCriticial method")]
get
{
object id = Trace.CorrelationManager.ActivityId;
return id == null ? Guid.Empty : (Guid)id;
}
[Fx.Tag.SecurityNote(Critical = "gets the CorrelationManager, which does a LinkDemand for UnmanagedCode",
Safe = "only uses the CM to get the ActivityId, which is not protected data, doesn't leak the CM")]
[SecuritySafeCritical]
set
{
Trace.CorrelationManager.ActivityId = value;
}
}
#pragma warning restore 56500
protected static string LookupSeverity(TraceEventType type)
{
string s;
switch (type)
{
case TraceEventType.Critical:
s = "Critical";
break;
case TraceEventType.Error:
s = "Error";
break;
case TraceEventType.Warning:
s = "Warning";
break;
case TraceEventType.Information:
s = "Information";
break;
case TraceEventType.Verbose:
s = "Verbose";
break;
case TraceEventType.Start:
s = "Start";
break;
case TraceEventType.Stop:
s = "Stop";
break;
case TraceEventType.Suspend:
s = "Suspend";
break;
case TraceEventType.Transfer:
s = "Transfer";
break;
default:
s = type.ToString();
break;
}
#pragma warning disable 618
Fx.Assert(s == type.ToString(), "Return value should equal the name of the enum");
#pragma warning restore 618
return s;
}
public abstract bool IsEnabled();
public abstract void TraceEventLogEvent(TraceEventType type, TraceRecord traceRecord);
}
}
| |
using System;
using System.IO;
using System.Collections.Generic;
using System.Text;
using System.Text.RegularExpressions;
using System.Windows.Forms;
using Newtonsoft.Json.Linq;
using ExcelDna.Integration;
namespace CellStore.Excel.Tools
{
/// <summary>
/// Represents a collection of functions to help with several tasks
/// </summary>
public class Utils
{
private static String logPathDir =
System.Environment.GetFolderPath(System.Environment.SpecialFolder.LocalApplicationData)
+ "\\28msec";
public static String logPath = logPathDir + "\\CellStore.Excel.log";
private static System.IO.StreamWriter logWriter;
public static void initLogWriter()
{
if (!Directory.Exists(logPathDir))
{
Directory.CreateDirectory(logPathDir);
}
logWriter = new System.IO.StreamWriter(logPath, true);
}
public static void closeLogWriter()
{
logWriter.Close();
}
public static void log(String message)
{
logWriter.WriteLine("{1:HH:mm:ss.fff} LOG {0}", message, DateTime.Now);
logWriter.Flush();
}
private static double? getValueDouble(JObject fact)
{
JValue value = ((JValue)fact["Value"]);
if (value != null && value.Type != JTokenType.Null && value.ToString() != String.Empty)
{
return value.Value<double>();
}
return null;
}
private static string getAspectValue(JObject fact, String aspectName)
{
JObject aspects = (JObject)fact["Aspects"];
JValue aspect = ((JValue)aspects[aspectName]);
if (aspect != null && aspect.Type != JTokenType.Null)
{
return aspect.Value<String>();
}
else if (aspect != null && aspect.Type == JTokenType.Null)
{
return "null";
}
return String.Empty;
}
private static string factToString(JObject fact)
{
StringBuilder sb = new StringBuilder();
JObject aspects = (JObject)fact["Aspects"];
String archive = getAspectValue(fact, "xbrl28:Archive");
String entity = getAspectValue(fact, "xbrl:Entity");
String period = getAspectValue(fact, "xbrl:Period");
String concept = getAspectValue(fact, "xbrl:Concept");
String unit = getAspectValue(fact, "xbrl:Unit");
double? value = getValueDouble(fact);
sb.AppendLine("Value=" + value);
sb.AppendLine("Concept=" + concept + " Entity=" + entity + " Archive=" + archive + " Period=" + period + " Unit=" + unit);
List<string> ignore = new List<string> { "xbrl28:Archive", "xbrl:Entity", "xbrl:Period", "xbrl:Concept", "xbrl:Unit" };
foreach (JValue keyAspect in fact["KeyAspects"].Children())
{
String dim = keyAspect.Value<String>();
if (ignore.Contains(dim))
continue;
String mem = ((JValue)aspects[dim]).Value<String>();
sb.Append(dim + "=" + mem + " ");
}
sb.AppendLine("");
sb.AppendLine("------------------------------------");
return sb.ToString();
}
public static Object[,] defaultErrorHandler(Exception ex)
{
String caption = "ERROR";
String msg = ex.Message;
log(caption + ": " + msg);
Object[,] error = new Object[,] { { "# ERROR " + msg } };
return error;
}
public static Object getFactTableResult(dynamic response, bool debugInfo = false)
{
JArray facts = (JArray)response["FactTable"];
Object[] results;
if (facts.Count > 0)
{
results = new Object[facts.Count];
}
else
{
results = new Object[] { ExcelEmpty.Value };
}
StringBuilder sb = new StringBuilder();
int row = 0;
foreach (JObject fact in facts.Children())
{
double? value = getValueDouble(fact);
if (value != null)
{
results[row] = value;
if (debugInfo)
sb.Append(factToString(fact));
row++;
}
}
if (debugInfo)
{
Utils.log(facts.ToString());
MessageBox.Show(sb.ToString(), "Facts Debug Info");
}
if(results.Length > 1)
{
return new Object[]
{
string.Join("|", results)
};
}
return results;
}
public static bool hasEntityFilter(String eid, String ticker, String tag, Dictionary<string, string> dimensions)
{
return eid != null || ticker != null || tag != null ||
(dimensions != null && dimensions.ContainsKey("xbrl:Entity"));
}
public static bool hasConceptFilter(String concept, Dictionary<string, string> dimensions)
{
return concept != null ||
(dimensions != null && dimensions.ContainsKey("xbrl:Concept"));
}
public static bool hasAdditionalFilter(String fiscalYear, String fiscalPeriod, Dictionary<string, string> dimensions)
{
return fiscalYear != null || fiscalPeriod != null || dimensions != null;
}
public static String castParamString(
Object param, String paramName, bool isMandatory, String defaultVal = null)
{
String param_casted;
if (param is string)
{
param_casted = Convert.ToString(param);
}
else if (param is double)
{
param_casted = Convert.ToString(param);
}
else if (param is Boolean)
{
param_casted = Convert.ToString(param);
}
else if (param is ExcelReference)
{
ExcelReference reference = (ExcelReference)param;
List<ExcelReference> list = reference.InnerReferences;
if (reference.GetValue() is ExcelError && list != null && list.ToArray().Length > 0)
{
param_casted = castParamString(list[0], paramName, isMandatory, defaultVal);
}
else
{
param_casted = castParamString(reference.GetValue(), paramName, isMandatory, defaultVal);
}
} else if (!isMandatory && (param == null || param is ExcelEmpty || param is ExcelMissing))
{
param_casted = defaultVal;
}
else if (isMandatory && (param == null || param is ExcelEmpty || param is ExcelMissing))
{
throw new ArgumentException("Mandatory Parameter missing: '" + paramName + "'.", paramName);
}
else
{
throw new ArgumentException("Invalid Parameter value '" + param.ToString() + "'.", paramName);
}
return param_casted;
}
public static bool castParamBool(
Object param, String paramName, bool defaultVal)
{
bool param_casted;
if (param is Boolean)
{
param_casted = Convert.ToBoolean(param);
}
else if (param is double)
{
param_casted = Convert.ToBoolean(param);
}
else if (param is string)
{
param_casted = Convert.ToBoolean(param);
}
else if (param == null || param is ExcelEmpty || param is ExcelMissing)
{
param_casted = defaultVal;
}
else
{
throw new ArgumentException("Invalid Boolean Parameter value '" + param.ToString() + "'.", paramName);
}
return param_casted;
}
public static int? castParamInt(
Object param, String paramName, int? defaultVal = null)
{
int? param_casted;
if (param is string)
{
param_casted = Convert.ToInt32(param);
}
else if (param is double)
{
param_casted = Convert.ToInt32(param);
}
else if (param == null || param is ExcelEmpty || param is ExcelMissing)
{
param_casted = defaultVal;
}
else
{
throw new ArgumentException("Invalid Parameter value '" + param.ToString() + "'.", paramName);
}
return param_casted;
}
private static void readFromObject(Object param, String paramName, String suffix, Dictionary<string, string> dict)
{
if (param is Object[])
{
//Utils.log("Object[]");
Object[] param_casted = (Object[])param;
int param_d1 = param_casted.Length;
for (int i = 0; i < param_d1; i++)
{
if (param_casted[i] == null || param_casted[i] is ExcelEmpty || param_casted[i] is ExcelMissing)
{
continue;
}
readFromObject(param_casted[i], paramName, suffix, dict);
}
}
else if (param is Object[,])
{
//Utils.log("Object[,]");
Object[,] param_casted = (Object[,])param;
int param_d1 = param_casted.GetLength(0);
int param_d2 = param_casted.GetLength(1);
for (int i = 0; i < param_d1; i++)
{
for (int j = 0; j < param_d2; j++)
{
if (param_casted[i, j] == null || param_casted[i, j] is ExcelEmpty || param_casted[i, j] is ExcelMissing)
{
continue;
}
readFromObject(param_casted[i, j], paramName, suffix, dict);
}
}
}
else if (param is ExcelReference)
{
//Utils.log("ExcelReference");
ExcelReference reference = (ExcelReference) param;
List<ExcelReference> list = reference.InnerReferences;
if (reference.GetValue() is ExcelError && list != null && list.ToArray().Length > 0)
{
foreach (ExcelReference refer in list)
{
Object val = refer.GetValue();
readFromObject(val, paramName, suffix, dict);
}
}
else
{
readFromObject(reference.GetValue(), paramName, suffix, dict);
}
}
else if (param is string)
{
string param_Val = Convert.ToString(param);
//Utils.log("val: " + param_Val);
string[] tokenz = param_Val.Split('=');
string errormsg = "Invalid Parameter value '" + param_Val + "' for parameter '" + paramName + "'. Accepted format: 'prefix:Dimension=value'.";
if (tokenz.Length != 2)
{
throw new ArgumentException(errormsg, paramName);
}
Regex regex = new Regex("^[^:]+:[^:]+" + suffix + "$");
string param_Key = tokenz[0] + suffix;
if (!regex.IsMatch(param_Key)) // the user might have used the suffix already
{
param_Key = tokenz[0] + "";
}
if (!regex.IsMatch(param_Key))
{
throw new ArgumentException(errormsg, paramName);
}
string param_Value = Convert.ToString(tokenz[1]);
//Utils.log(param_Key + "=" + param_Value);
dict.Add(param_Key, param_Value);
}
else if (param == null || param is ExcelEmpty || param is ExcelMissing)
{
; // skip
}
else
{
throw new ArgumentException("Invalid Parameter value '" + Convert.ToString(param) + "' for '" + paramName + "'.", paramName);
}
}
public static Dictionary<string, string> castStringDictionary(
Object param, String paramName, String suffix)
{
if (param == null || param is ExcelEmpty || param is ExcelMissing)
{
return null;
}
Dictionary<string, string> dict = new Dictionary<string, string>();
readFromObject(param, paramName, suffix, dict);
return dict;
}
public static Dictionary<string, bool> castBoolDictionary(
Object param, String paramName, String suffix)
{
Dictionary<string, string> dictString = castStringDictionary(param, paramName, suffix);
if (dictString == null)
{
return null;
}
Dictionary<string, bool> dict = new Dictionary<string, bool>(dictString.Count);
foreach (KeyValuePair<string, string> entry in dictString)
{
dict.Add(entry.Key, Convert.ToBoolean(entry.Value));
}
return dict;
}
public static Dictionary<string, int> castIntDictionary(
Object param, String paramName, String suffix)
{
Dictionary<string, string> dictString = castStringDictionary(param, paramName, suffix);
if (dictString == null)
{
return null;
}
Dictionary<string, int> dict = new Dictionary<string, int>(dictString.Count);
foreach (KeyValuePair<string, string> entry in dictString)
{
dict.Add(entry.Key, Convert.ToInt32(entry.Value));
}
return dict;
}
}
}
| |
using System;
/// <summary>
/// ToInt64(System.Object)
/// </summary>
public class ConvertToInt64_9
{
#region Public Methods
public bool RunTests()
{
bool retVal = true;
TestLibrary.TestFramework.LogInformation("[Positive]");
retVal = PosTest1() && retVal;
retVal = PosTest2() && retVal;
retVal = PosTest3() && retVal;
retVal = PosTest4() && retVal;
retVal = PosTest5() && retVal;
retVal = PosTest6() && retVal;
retVal = PosTest7() && retVal;
//
// TODO: Add your negative test cases here
//
TestLibrary.TestFramework.LogInformation("[Negative]");
retVal = NegTest1() && retVal;
return retVal;
}
#region Positive Test Cases
public bool PosTest1()
{
bool retVal = true;
// Add your scenario description here
TestLibrary.TestFramework.BeginScenario("PosTest1: Verify methos ToInt64((object)random).");
try
{
object random = TestLibrary.Generator.GetInt64(-55);
long actual = Convert.ToInt64(random);
long expected = (long)random;
if (actual != expected)
{
TestLibrary.TestFramework.LogError("001.1", "Method ToInt64 Err.");
TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLE] actual = " + actual + ", expected = " + expected);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("001.2", "Unexpected exception: " + e);
TestLibrary.TestFramework.LogInformation(e.StackTrace);
retVal = false;
}
return retVal;
}
public bool PosTest2()
{
bool retVal = true;
// Add your scenario description here
TestLibrary.TestFramework.BeginScenario("PosTest2: Verify method ToInt64((object)0)");
try
{
object obj = 0;
long actual = Convert.ToInt64(obj);
long expected = 0;
if (actual != expected)
{
TestLibrary.TestFramework.LogError("002.1", "Method ToInt64 Err.");
TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLE] actual = " + actual + ", expected = " + expected);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("002.2", "Unexpected exception: " + e);
TestLibrary.TestFramework.LogInformation(e.StackTrace);
retVal = false;
}
return retVal;
}
public bool PosTest3()
{
bool retVal = true;
// Add your scenario description here
TestLibrary.TestFramework.BeginScenario("PosTest3: Verify method ToInt64((object)int64.max)");
try
{
object obj = Int64.MaxValue;
long actual = Convert.ToInt64(obj);
long expected = Int64.MaxValue;
if (actual != expected)
{
TestLibrary.TestFramework.LogError("003.1", "Method ToInt64 Err.");
TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLE] actual = " + actual + ", expected = " + expected);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("003.2", "Unexpected exception: " + e);
TestLibrary.TestFramework.LogInformation(e.StackTrace);
retVal = false;
}
return retVal;
}
public bool PosTest4()
{
bool retVal = true;
// Add your scenario description here
TestLibrary.TestFramework.BeginScenario("PosTest4: Verify method ToInt64((object)int64.min)");
try
{
object obj = Int64.MinValue;
long actual = Convert.ToInt64(obj);
long expected = Int64.MinValue;
if (actual != expected)
{
TestLibrary.TestFramework.LogError("004.1", "Method ToInt64 Err.");
TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLE] actual = " + actual + ", expected = " + expected);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("004.2", "Unexpected exception: " + e);
TestLibrary.TestFramework.LogInformation(e.StackTrace);
retVal = false;
}
return retVal;
}
public bool PosTest5()
{
bool retVal = true;
// Add your scenario description here
TestLibrary.TestFramework.BeginScenario("PosTest5: Verify method ToInt64(true)");
try
{
object obj = true;
long actual = Convert.ToInt64(obj);
long expected = 1;
if (actual != expected)
{
TestLibrary.TestFramework.LogError("005.1", "Method ToInt64 Err.");
TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLE] actual = " + actual + ", expected = " + expected);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("005.2", "Unexpected exception: " + e);
TestLibrary.TestFramework.LogInformation(e.StackTrace);
retVal = false;
}
return retVal;
}
public bool PosTest6()
{
bool retVal = true;
// Add your scenario description here
TestLibrary.TestFramework.BeginScenario("PosTest6: Verify method ToInt64(false)");
try
{
object obj = false;
long actual = Convert.ToInt64(obj);
long expected = 0;
if (actual != expected)
{
TestLibrary.TestFramework.LogError("006.1", "Method ToInt64 Err.");
TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLE] actual = " + actual + ", expected = " + expected);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("006.2", "Unexpected exception: " + e);
TestLibrary.TestFramework.LogInformation(e.StackTrace);
retVal = false;
}
return retVal;
}
public bool PosTest7()
{
bool retVal = true;
// Add your scenario description here
TestLibrary.TestFramework.BeginScenario("PosTest7: Verify method ToInt64(null)");
try
{
object obj = null;
long actual = Convert.ToInt64(obj);
long expected = 0;
if (actual != expected)
{
TestLibrary.TestFramework.LogError("007.1", "Method ToInt16 Err.");
TestLibrary.TestFramework.LogInformation("WARNING [LOCAL VARIABLE] actual = " + actual + ", expected = " + expected);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("007.2", "Unexpected exception: " + e);
TestLibrary.TestFramework.LogInformation(e.StackTrace);
retVal = false;
}
return retVal;
}
#endregion
#region Nagetive Test Cases
public bool NegTest1()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest1: InvalidCastException is not thrown.");
try
{
object obj = new object();
long r = Convert.ToInt64(obj);
TestLibrary.TestFramework.LogError("101.1", "InvalidCastException is not thrown.");
retVal = false;
}
catch (InvalidCastException)
{ }
catch (Exception e)
{
TestLibrary.TestFramework.LogError("101.2", "Unexpected exception: " + e);
TestLibrary.TestFramework.LogInformation(e.StackTrace);
retVal = false;
}
return retVal;
}
#endregion
#endregion
public static int Main()
{
ConvertToInt64_9 test = new ConvertToInt64_9();
TestLibrary.TestFramework.BeginTestCase("ConvertToInt64_9");
if (test.RunTests())
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("PASS");
return 100;
}
else
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("FAIL");
return 0;
}
}
}
| |
/*
* SubSonic - http://subsonicproject.com
*
* The contents of this file are subject to the Mozilla Public
* License Version 1.1 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an
* "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*/
#if ALLPROVIDERS
using System;
using System.Collections;
using System.Data;
using MySql.Data.MySqlClient;
using SubSonic.Utilities;
namespace SubSonic
{
/// <summary>
/// Summary for the MySqlInnoDBDataProvider class
/// </summary>
public class MySqlInnoDBDataProvider : MySqlDataProvider
{
private const string ALL_TABLE_COLUMNS_SQL =
@"SELECT
TABLE_SCHEMA as `Database`,
TABLE_NAME as TableName,
COLUMN_NAME as ColumnName,
ORDINAL_POSITION as OrdinalPosition,
COLUMN_DEFAULT as DefaultSetting,
IS_NULLABLE as IsNullable,
DATA_TYPE as DataType,
CHARACTER_MAXIMUM_LENGTH as MaxLength,
IF(EXTRA = 'auto_increment', 1, 0) as IsIdentity
FROM
INFORMATION_SCHEMA.COLUMNS
WHERE
TABLE_SCHEMA = ?DatabaseName
ORDER BY
OrdinalPosition ASC";
private const string ALL_TABLE_FOREIGN_TABLES =
@"SELECT
table_name as FK_TABLE,
referenced_column_name as PK_COLUMN,
referenced_table_name as PK_TABLE,
column_name as FK_Column,
constraint_name as CONSTRAINT_NAME
FROM
INFORMATION_SCHEMA.KEY_COLUMN_USAGE
WHERE
TABLE_SCHEMA = ?DatabaseName
AND REFERENCED_TABLE_NAME IS NOT NULL";
private const string ALL_TABLE_INDEXES_SQL =
@"SELECT
tc.table_name as TableName,
tc.table_schema as Owner,
kc.column_name as ColumnName,
tc.constraint_type as ConstraintType,
tc.constraint_name as ConstraintName
FROM
INFORMATION_SCHEMA.TABLE_CONSTRAINTS tc
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kc
ON tc.table_schema = kc.table_schema
AND tc.table_name = kc.table_name
AND tc.constraint_name = kc.constraint_name
WHERE
tc.table_schema = ?DatabaseName";
private const string ALL_TABLE_PRIMARY_TABLES =
@"SELECT
referenced_table_name as PK_TABLE,
table_name as FK_TABLE,
column_name as FK_COLUMN
FROM
INFORMATION_SCHEMA.KEY_COLUMN_USAGE
WHERE
TABLE_SCHEMA = ?DatabaseName
AND REFERENCED_TABLE_NAME IS NOT NULL";
private const string ALL_TABLES_SQL =
@"SELECT
TABLE_NAME as Table_Name
FROM
INFORMATION_SCHEMA.TABLES
WHERE
TABLE_SCHEMA = ?DatabaseName";
private const string MANY_TO_MANY_CHECK_ALL =
@"SELECT
FK.TABLE_NAME FK_Table,
KC.COLUMN_NAME FK_Column,
KC.REFERENCED_TABLE_NAME PK_Table,
KC.REFERENCED_COLUMN_NAME PK_Column,
FK.CONSTRAINT_NAME Constraint_Name
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS FK
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE KC ON KC.CONSTRAINT_NAME = FK.CONSTRAINT_NAME
AND KC.TABLE_NAME = FK.TABLE_NAME AND KC.TABLE_SCHEMA = FK.TABLE_SCHEMA
AND FK.TABLE_SCHEMA = ?DatabaseName
AND FK.CONSTRAINT_TYPE = 'FOREIGN KEY'
JOIN (
SELECT tc.TABLE_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS tc
JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS kcu ON tc.Constraint_name = kcu.Constraint_Name AND kcu.TABLE_NAME = tc.TABLE_NAME AND kcu.TABLE_SCHEMA = tc.TABLE_SCHEMA
AND tc.Constraint_Type = 'PRIMARY KEY'
AND tc.TABLE_SCHEMA = ?DatabaseName
JOIN
(
SELECT tc1.Table_Name, kcu1.Column_Name AS Column_Name FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS tc1
JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS kcu1 ON tc1.Constraint_name = kcu1.Constraint_Name AND kcu1.TABLE_NAME = tc1.TABLE_NAME AND kcu1.TABLE_SCHEMA = tc1.TABLE_SCHEMA
AND tc1.Constraint_Type = 'FOREIGN KEY'
AND tc1.TABLE_SCHEMA = ?DatabaseName
)
AS t ON t.Table_Name = tc.table_Name AND t.Column_Name = kcu.Column_Name
GROUP BY tc.Constraint_Name, tc.Table_Name HAVING COUNT(*) > 1
) AS ManyMany ON ManyMany.TABLE_NAME = FK.TABLE_NAME";
private const string MANY_TO_MANY_FOREIGN_MAP_ALL =
@"SELECT
FK.TABLE_NAME FK_Table,
KC.COLUMN_NAME FK_Column,
KC.REFERENCED_TABLE_NAME PK_Table,
KC.REFERENCED_COLUMN_NAME PK_Column,
FK.CONSTRAINT_NAME Constraint_Name
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS FK
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE KC ON KC.CONSTRAINT_NAME = FK.CONSTRAINT_NAME
AND KC.TABLE_NAME = FK.TABLE_NAME AND KC.TABLE_SCHEMA = FK.TABLE_SCHEMA
AND FK.TABLE_SCHEMA = ?DatabaseName
AND FK.CONSTRAINT_TYPE = 'FOREIGN KEY'
";
private static readonly object _lockColumns = new object();
private static readonly object _lockFK = new object();
private static readonly object _lockIndex = new object();
private static readonly object _lockManyToManyCheck = new object();
private static readonly object _lockManyToManyMap = new object();
private static readonly object _lockPK = new object();
private static readonly DataSet dsColumns = new DataSet();
private static readonly DataSet dsFK = new DataSet();
private static readonly DataSet dsIndex = new DataSet();
private static readonly DataSet dsManyToManyCheck = new DataSet();
private static readonly DataSet dsManyToManyMap = new DataSet();
private static readonly DataSet dsPK = new DataSet();
private static readonly DataSet dsTables = new DataSet();
/// <summary>
/// Gets the name of the foreign key table.
/// </summary>
/// <param name="fkColumnName">Name of the fk column.</param>
/// <returns></returns>
public override string GetForeignKeyTableName(string fkColumnName)
{
string tableName = String.Empty;
if(SupportsInformationSchema(GetDatabaseVersion(Name)))
{
string sql =
"SELECT TABLE_NAME FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE WHERE COLUMN_NAME = ?ColumnName AND CONSTRAINT_NAME = 'PRIMARY' AND TABLE_SCHEMA = ?DatabaseName";
using(AutomaticConnectionScope automaticConnectionScope = new AutomaticConnectionScope(this))
{
MySqlCommand cmd = new MySqlCommand(sql);
cmd.CommandType = CommandType.Text;
cmd.Connection = (MySqlConnection)automaticConnectionScope.Connection;
cmd.Parameters.AddWithValue("?ColumnName", fkColumnName);
cmd.Parameters.AddWithValue("?DatabaseName", cmd.Connection.Database);
object result = cmd.ExecuteScalar();
if(result != null)
tableName = result.ToString();
}
}
return tableName;
}
/// <summary>
/// Gets the name of the foreign key table.
/// </summary>
/// <param name="fkColumnName">Name of the fk column.</param>
/// <param name="tableName">Name of the table.</param>
/// <returns></returns>
public override string GetForeignKeyTableName(string fkColumnName, string tableName)
{
string returnTableName = String.Empty;
if(SupportsInformationSchema(GetDatabaseVersion(Name)))
{
string sql =
"SELECT REFERENCED_TABLE_NAME FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE WHERE COLUMN_NAME = ?ColumnName AND TABLE_NAME = ?TableName AND TABLE_SCHEMA = ?DatabaseName";
using(AutomaticConnectionScope automaticConnectionScope = new AutomaticConnectionScope(this))
{
MySqlCommand cmd = new MySqlCommand(sql);
cmd.CommandType = CommandType.Text;
cmd.Connection = (MySqlConnection)automaticConnectionScope.Connection;
cmd.Parameters.AddWithValue("?ColumnName", fkColumnName);
cmd.Parameters.AddWithValue("?TableName", tableName);
cmd.Parameters.AddWithValue("?DatabaseName", cmd.Connection.Database);
object result = cmd.ExecuteScalar();
if(result != null)
returnTableName = result.ToString();
}
}
return returnTableName;
}
/// <summary>
/// Reloads the cached schema
/// </summary>
public override void ReloadSchema()
{
//not sure how to do this here
}
/// <summary>
/// Gets the table schema.
/// </summary>
/// <param name="tableName">Name of the table.</param>
/// <param name="tableType">Type of the table.</param>
/// <returns></returns>
public override TableSchema.Table GetTableSchema(string tableName, TableType tableType)
{
//return base.GetTableSchema(tableName, tableType);
MySqlConnection conn = new MySqlConnection(DefaultConnectionString);
if(dsColumns.Tables[Name] == null)
{
lock(_lockColumns)
{
QueryCommand cmdColumns = new QueryCommand(ALL_TABLE_COLUMNS_SQL, Name);
cmdColumns.Parameters.Add("?DatabaseName", conn.Database, DbType.AnsiString);
DataTable dt = new DataTable(Name);
dt.Load(GetReader(cmdColumns));
dsColumns.Tables.Add(dt);
}
}
DataRow[] drColumns = dsColumns.Tables[Name].Select("TableName ='" + tableName + "'", "OrdinalPosition ASC");
if(drColumns.Length == 0)
return null;
TableSchema.TableColumnCollection columns = new TableSchema.TableColumnCollection();
TableSchema.Table tbl = new TableSchema.Table(tableName, tableType, this);
tbl.ForeignKeys = new TableSchema.ForeignKeyTableCollection();
for(int i = 0; i < drColumns.Length; i++)
{
string nativeDataType = drColumns[i][SqlSchemaVariable.DATA_TYPE].ToString().ToLower();
TableSchema.TableColumn column = new TableSchema.TableColumn(tbl);
column.ColumnName = drColumns[i][SqlSchemaVariable.COLUMN_NAME].ToString();
column.DataType = GetDbType(nativeDataType);
if(SetPropertyDefaultsFromDatabase && drColumns[i][SqlSchemaVariable.COLUMN_DEFAULT] != DBNull.Value &&
drColumns[i][SqlSchemaVariable.COLUMN_DEFAULT].ToString() != "\0")
column.DefaultSetting = drColumns[i][SqlSchemaVariable.COLUMN_DEFAULT].ToString().Trim();
//thanks rauchy!
bool autoIncrement;
bool successfullyParsed = bool.TryParse(drColumns[i][SqlSchemaVariable.IS_IDENTITY].ToString(), out autoIncrement);
if(!successfullyParsed)
autoIncrement = Convert.ToBoolean(drColumns[i][SqlSchemaVariable.IS_IDENTITY]);
column.AutoIncrement = autoIncrement;
int maxLength;
int.TryParse(drColumns[i][SqlSchemaVariable.MAX_LENGTH].ToString(), out maxLength);
column.MaxLength = maxLength;
column.IsNullable = (drColumns[i][SqlSchemaVariable.IS_NULLABLE].ToString() == "YES");
//column.IsReadOnly = (nativeDataType == "timestamp");
columns.Add(column);
}
if(dsIndex.Tables[Name] == null)
{
lock(_lockIndex)
{
QueryCommand cmdIndex = new QueryCommand(ALL_TABLE_INDEXES_SQL, Name);
cmdIndex.Parameters.Add("?DatabaseName", conn.Database, DbType.AnsiString);
DataTable dt = new DataTable(Name);
dt.Load(GetReader(cmdIndex));
dsIndex.Tables.Add(dt);
}
}
DataRow[] drIndexes = dsIndex.Tables[Name].Select("TableName = '" + tableName + "'");
for(int i = 0; i < drIndexes.Length; i++)
{
string colName = drIndexes[i][SqlSchemaVariable.COLUMN_NAME].ToString();
string constraintType = drIndexes[i][SqlSchemaVariable.CONSTRAINT_TYPE].ToString();
TableSchema.TableColumn column = columns.GetColumn(colName);
if(Utility.IsMatch(constraintType, SqlSchemaVariable.PRIMARY_KEY))
column.IsPrimaryKey = true;
else if(Utility.IsMatch(constraintType, SqlSchemaVariable.FOREIGN_KEY))
column.IsForeignKey = true;
//HACK: Allow second pass naming adjust based on whether a column is keyed
column.ColumnName = column.ColumnName;
}
if(dsPK.Tables[Name] == null)
{
lock(_lockPK)
{
QueryCommand cmdPk = new QueryCommand(ALL_TABLE_PRIMARY_TABLES, Name);
cmdPk.Parameters.Add("?DatabaseName", conn.Database, DbType.AnsiString);
DataTable dt = new DataTable(Name);
dt.Load(GetReader(cmdPk));
dsPK.Tables.Add(dt);
}
}
DataRow[] drPK = dsPK.Tables[Name].Select("PK_Table ='" + tableName + "'");
for(int i = 0; i < drPK.Length; i++)
{
string colName = drPK[i]["FK_Column"].ToString();
string fkName = drPK[i]["FK_Table"].ToString();
TableSchema.PrimaryKeyTable pkTable = new TableSchema.PrimaryKeyTable(this);
pkTable.ColumnName = colName;
pkTable.TableName = fkName;
tbl.PrimaryKeyTables.Add(pkTable);
}
if(dsFK.Tables[Name] == null)
{
lock(_lockFK)
{
QueryCommand cmdFK = new QueryCommand(ALL_TABLE_FOREIGN_TABLES, Name);
cmdFK.Parameters.Add("?DatabaseName", conn.Database, DbType.AnsiString);
DataTable dt = new DataTable(Name);
dt.Load(GetReader(cmdFK));
dsFK.Tables.Add(dt);
}
}
DataRow[] drFK = dsFK.Tables[Name].Select("FK_Table ='" + tableName + "'");
ArrayList usedConstraints = new ArrayList();
for(int i = 0; i < drFK.Length; i++)
{
string constraintName = drFK[i]["Constraint_Name"].ToString();
if(!usedConstraints.Contains(constraintName))
{
usedConstraints.Add(constraintName);
string colName = drFK[i]["FK_Column"].ToString();
string fkName = CorrectTableCasing(drFK[i]["PK_Table"].ToString(), conn.Database);
TableSchema.TableColumn column = columns.GetColumn(colName);
if(column != null)
column.ForeignKeyTableName = fkName;
else
continue;
TableSchema.ForeignKeyTable fkTable = new TableSchema.ForeignKeyTable(this);
fkTable.ColumnName = colName;
fkTable.TableName = fkName;
tbl.ForeignKeys.Add(fkTable);
}
}
if(dsManyToManyCheck.Tables[Name] == null)
{
lock(_lockManyToManyCheck)
{
QueryCommand cmdM2M = new QueryCommand(MANY_TO_MANY_CHECK_ALL, Name);
cmdM2M.Parameters.Add("?DatabaseName", conn.Database, DbType.AnsiString);
DataTable dt = new DataTable(Name);
dt.Load(GetReader(cmdM2M));
dsManyToManyCheck.Tables.Add(dt);
}
}
DataRow[] drs = dsManyToManyCheck.Tables[Name].Select("PK_Table = '" + tableName + "'");
if(drs.Length > 0)
{
for(int count = 0; count < drs.Length; count++)
{
string mapTable = drs[count]["FK_Table"].ToString();
string localKey = drs[count]["FK_Column"].ToString();
if(dsManyToManyMap.Tables[Name] == null)
{
lock(_lockManyToManyMap)
{
QueryCommand cmdM2MMap = new QueryCommand(MANY_TO_MANY_FOREIGN_MAP_ALL, Name);
cmdM2MMap.Parameters.Add("?DatabaseName", conn.Database, DbType.AnsiString);
DataTable dt = new DataTable(Name);
dt.Load(GetReader(cmdM2MMap));
dsManyToManyMap.Tables.Add(dt);
}
}
DataRow[] drMap = dsManyToManyMap.Tables[Name].Select("FK_Table = '" + mapTable + "' AND PK_Table <> '" + tableName + "'");
for(int i = 0; i < drMap.Length; i++)
{
TableSchema.ManyToManyRelationship m = new TableSchema.ManyToManyRelationship(mapTable, tbl.Provider);
m.ForeignTableName = drMap[i]["PK_Table"].ToString();
m.ForeignPrimaryKey = drMap[i]["PK_Column"].ToString();
m.MapTableLocalTableKeyColumn = localKey;
m.MapTableForeignTableKeyColumn = drMap[i]["FK_Column"].ToString();
tbl.ManyToManys.Add(m);
}
}
}
tbl.Columns = columns;
return tbl;
}
/// <summary>
/// Hack for windows on mysql and the fact that it does
/// not keep casing in some fields of the information_schema
/// </summary>
/// <param name="TableName">Name of the table.</param>
/// <param name="DatabaseName">Name of the database.</param>
/// <returns></returns>
private string CorrectTableCasing(string TableName, string DatabaseName)
{
if(dsTables.Tables[Name] == null)
{
QueryCommand cmdTables = new QueryCommand(ALL_TABLES_SQL, Name);
cmdTables.Parameters.Add("?DatabaseName", DatabaseName, DbType.AnsiString);
DataTable dt = new DataTable(Name);
dt.Load(GetReader(cmdTables));
dsTables.Tables.Add(dt);
}
DataRow[] table = dsTables.Tables[Name].Select("Table_Name ='" + TableName + "'");
if(table.Length == 1)
return table[0]["Table_Name"].ToString();
return TableName;
}
}
}
#endif
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Tests;
using System.Linq;
using Xunit;
namespace System.Tests
{
public abstract class ArraySegment_Tests<T>: IList_Generic_Tests<T>
{
#region IList<T> Helper Methods
protected override IList<T> GenericIListFactory()
{
return Factory();
}
protected override IList<T> GenericIListFactory(int count)
{
return Factory(count * 2, count / 2, count);
}
protected override bool Enumerator_Current_UndefinedOperation_Throws => true;
protected override bool Enumerator_ModifiedDuringEnumeration_ThrowsInvalidOperationException => false;
protected override bool IsReadOnly_ValidityValue => true;
protected override bool AddRemoveClear_ThrowsNotSupported => true;
#endregion
#region List<T> Helper Methods
protected virtual ArraySegment<T> Factory()
{
return new ArraySegment<T>();
}
protected virtual ArraySegment<T> Factory(int count, int offset, int length)
{
T[] array = CreateEnumerable(EnumerableType.List, null, count, 0, 0).ToArray();
ArraySegment<T> segment = new ArraySegment<T>(array, offset, length);
Assert.Same(array, segment.Array);
Assert.Equal(offset, segment.Offset);
Assert.Equal(length, segment.Count);
return segment;
}
protected void VerifySegment(List<T> expected, ArraySegment<T> segment)
{
Assert.Equal(expected.Count, segment.Count);
for (int i = 0; i < expected.Count; ++i)
{
Assert.True(expected[i] == null ? (segment as IList<T>)[i] == null : expected[i].Equals((segment as IList<T>)[i]));
}
}
#endregion
[Fact]
public void Ctor_Empty()
{
var segment = new ArraySegment<T>();
Assert.Null(segment.Array);
Assert.Equal(0, segment.Offset);
Assert.Equal(0, segment.Count);
T[] array = new T[10];
segment = new ArraySegment<T>(array, 10, 0);
Assert.Same(array, segment.Array);
Assert.Equal(10, segment.Offset);
Assert.Equal(0, segment.Count);
}
[Fact]
public static void Ctor_Invalid()
{
AssertExtensions.Throws<ArgumentNullException>("array", () => new ArraySegment<T>(null));
AssertExtensions.Throws<ArgumentNullException>("array", () => new ArraySegment<T>(null, -1, 1));
AssertExtensions.Throws<ArgumentOutOfRangeException>("offset", () => new ArraySegment<T>(new T[10], -1, 0)); // Offset < 0
AssertExtensions.Throws<ArgumentOutOfRangeException>("count", () => new ArraySegment<T>(new T[10], 0, -1)); // Count < 0
Assert.Throws<ArgumentException>(null, () => new ArraySegment<T>(new T[10], 10, 1)); // Offset + count > array.Length
Assert.Throws<ArgumentException>(null, () => new ArraySegment<T>(new T[10], 9, 2)); // Offset + count > array.Length
}
}
public class ArraySegment_Tests_string : ArraySegment_Tests<string>
{
protected override string CreateT(int seed)
{
int stringLength = seed % 10 + 5;
Random rand = new Random(seed);
byte[] bytes = new byte[stringLength];
rand.NextBytes(bytes);
return Convert.ToBase64String(bytes);
}
}
public class ArraySegment_Tests_int : ArraySegment_Tests<int>
{
protected override int CreateT(int seed)
{
Random rand = new Random(seed);
return rand.Next();
}
}
public static class ArraySegment_Tests
{
public static IEnumerable<object[]> Equals_TestData()
{
var intArray1 = new int[] { 7, 8, 9, 10, 11, 12 };
var intArray2 = new int[] { 7, 8, 9, 10, 11, 12 };
yield return new object[] { new ArraySegment<int>(intArray1), new ArraySegment<int>(intArray1), true };
yield return new object[] { new ArraySegment<int>(intArray1), new ArraySegment<int>(intArray1, 0, intArray1.Length), true };
yield return new object[] { new ArraySegment<int>(intArray1, 2, 3), new ArraySegment<int>(intArray1, 2, 3), true };
yield return new object[] { new ArraySegment<int>(intArray1, 3, 3), new ArraySegment<int>(intArray1, 2, 3), false };
yield return new object[] { new ArraySegment<int>(intArray1, 2, 4), new ArraySegment<int>(intArray1, 2, 3), false };
yield return new object[] { new ArraySegment<int>(intArray1, 2, 4), new ArraySegment<int>(intArray2, 2, 3), false };
yield return new object[] { new ArraySegment<int>(intArray1), intArray1, false };
yield return new object[] { new ArraySegment<int>(intArray1), null, false };
yield return new object[] { new ArraySegment<int>(intArray1, 2, 4), null, false };
}
[Theory]
[MemberData(nameof(Equals_TestData))]
public static void Equals(ArraySegment<int> segment1, object obj, bool expected)
{
if (obj is ArraySegment<int>)
{
ArraySegment<int> segment2 = (ArraySegment<int>)obj;
Assert.Equal(expected, segment1.Equals(segment2));
Assert.Equal(expected, segment1 == segment2);
Assert.Equal(!expected, segment1 != segment2);
Assert.Equal(expected, segment1.GetHashCode().Equals(segment2.GetHashCode()));
}
Assert.Equal(expected, segment1.Equals(obj));
}
[Fact]
public static void IList_GetSetItem()
{
var intArray = new int[] { 7, 8, 9, 10, 11, 12, 13 };
var segment = new ArraySegment<int>(intArray, 2, 3);
IList<int> iList = segment;
Assert.Equal(segment.Count, iList.Count);
for (int i = 0; i < iList.Count; i++)
{
Assert.Equal(intArray[i + segment.Offset], iList[i]);
iList[i] = 99;
Assert.Equal(99, iList[i]);
Assert.Equal(99, intArray[i + segment.Offset]);
}
}
[Fact]
public static void IList_GetSetItem_Invalid()
{
IList<int> iList = new ArraySegment<int>();
Assert.Throws<InvalidOperationException>(() => iList[0]); // Array is null
Assert.Throws<InvalidOperationException>(() => iList[0] = 0); // Array is null
var intArray = new int[] { 7, 8, 9, 10, 11, 12, 13 };
iList = new ArraySegment<int>(intArray, 2, 3);
AssertExtensions.Throws<ArgumentOutOfRangeException>("index", () => iList[-1]); // Index < 0
AssertExtensions.Throws<ArgumentOutOfRangeException>("index", () => iList[iList.Count]); // Index >= list.Count
AssertExtensions.Throws<ArgumentOutOfRangeException>("index", () => iList[-1] = 0); // Index < 0
AssertExtensions.Throws<ArgumentOutOfRangeException>("index", () => iList[iList.Count] = 0); // Index >= list.Count
}
[Fact]
public static void IReadOnlyList_GetItem()
{
var intArray = new int[] { 7, 8, 9, 10, 11, 12, 13 };
var seg = new ArraySegment<int>(intArray, 2, 3);
IReadOnlyList<int> iList = seg;
for (int i = 0; i < iList.Count; i++)
{
Assert.Equal(intArray[i + seg.Offset], iList[i]);
}
}
[Fact]
public static void IReadOnlyList_GetItem_Invalid()
{
IReadOnlyList<int> iList = new ArraySegment<int>();
Assert.Throws<InvalidOperationException>(() => iList[0]); // Array is null
var intArray = new int[] { 7, 8, 9, 10, 11, 12, 13 };
iList = new ArraySegment<int>(intArray, 2, 3);
AssertExtensions.Throws<ArgumentOutOfRangeException>("index", () => iList[-1]); // Index < 0
AssertExtensions.Throws<ArgumentOutOfRangeException>("index", () => iList[iList.Count]); // List >= seg.Count
}
[Fact]
public static void IList_IndexOf()
{
var intArray = new int[] { 7, 8, 9, 10, 11, 12, 13 };
var segment = new ArraySegment<int>(intArray, 2, 3);
IList<int> iList = segment;
for (int i = segment.Offset; i < segment.Count; i++)
{
Assert.Equal(i - segment.Offset, iList.IndexOf(intArray[i]));
}
Assert.Equal(-1, iList.IndexOf(9999)); // No such value
Assert.Equal(-1, iList.IndexOf(7)); // No such value in range
}
[Fact]
public static void IList_IndexOf_NullArray_ThrowsInvalidOperationException()
{
IList<int> iList = new ArraySegment<int>();
Assert.Throws<InvalidOperationException>(() => iList.IndexOf(0)); // Array is null
}
[Fact]
public static void IList_ModifyingCollection_ThrowsNotSupportedException()
{
var intArray = new int[] { 7, 8, 9, 10, 11, 12, 13 };
var segment = new ArraySegment<int>(intArray, 2, 3);
IList<int> iList = segment;
Assert.True(iList.IsReadOnly);
Assert.Throws<NotSupportedException>(() => iList.Add(2));
Assert.Throws<NotSupportedException>(() => iList.Insert(0, 0));
Assert.Throws<NotSupportedException>(() => iList.Clear());
Assert.Throws<NotSupportedException>(() => iList.Remove(2));
Assert.Throws<NotSupportedException>(() => iList.RemoveAt(2));
}
[Fact]
public static void IList_Contains_NullArray_ThrowsInvalidOperationException()
{
IList<int> iList = new ArraySegment<int>();
Assert.Throws<InvalidOperationException>(() => iList.Contains(0)); // Array is null
}
[Fact]
public static void IList_GetEnumerator()
{
var intArray = new int[] { 7, 8, 9, 10, 11, 12, 13 };
ArraySegment<int> segment = new ArraySegment<int>(intArray, 2, 3);
//ArraySegment<int>.Enumerator enumerator = segment.GetEnumerator();
IEnumerator<int> enumerator = (segment as IEnumerable<int>).GetEnumerator();
for (int i = 0; i < 2; i++)
{
int counter = 0;
while (enumerator.MoveNext())
{
Assert.Equal(intArray[counter + 2], enumerator.Current);
counter++;
}
Assert.Equal(segment.Count, counter);
(enumerator as IEnumerator<int>).Reset();
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
namespace JIT.HardwareIntrinsics.Arm
{
public static partial class Program
{
private static void AbsUInt64()
{
var test = new SimpleUnaryOpTest__AbsUInt64();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (AdvSimd.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (AdvSimd.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (AdvSimd.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleUnaryOpTest__AbsUInt64
{
private struct DataTable
{
private byte[] inArray1;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Int64[] inArray1, UInt64[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Int64>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<UInt64>();
if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Int64, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<Int64> _fld1;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = -TestLibrary.Generator.GetInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int64>, byte>(ref testStruct._fld1), ref Unsafe.As<Int64, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int64>>());
return testStruct;
}
public void RunStructFldScenario(SimpleUnaryOpTest__AbsUInt64 testClass)
{
var result = AdvSimd.Arm64.Abs(_fld1);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleUnaryOpTest__AbsUInt64 testClass)
{
fixed (Vector128<Int64>* pFld1 = &_fld1)
{
var result = AdvSimd.Arm64.Abs(
AdvSimd.LoadVector128((Int64*)(pFld1))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Int64>>() / sizeof(Int64);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<UInt64>>() / sizeof(UInt64);
private static Int64[] _data1 = new Int64[Op1ElementCount];
private static Vector128<Int64> _clsVar1;
private Vector128<Int64> _fld1;
private DataTable _dataTable;
static SimpleUnaryOpTest__AbsUInt64()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = -TestLibrary.Generator.GetInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int64>, byte>(ref _clsVar1), ref Unsafe.As<Int64, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int64>>());
}
public SimpleUnaryOpTest__AbsUInt64()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = -TestLibrary.Generator.GetInt64(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int64>, byte>(ref _fld1), ref Unsafe.As<Int64, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int64>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = -TestLibrary.Generator.GetInt64(); }
_dataTable = new DataTable(_data1, new UInt64[RetElementCount], LargestVectorSize);
}
public bool IsSupported => AdvSimd.Arm64.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = AdvSimd.Arm64.Abs(
Unsafe.Read<Vector128<Int64>>(_dataTable.inArray1Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = AdvSimd.Arm64.Abs(
AdvSimd.LoadVector128((Int64*)(_dataTable.inArray1Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(AdvSimd.Arm64).GetMethod(nameof(AdvSimd.Arm64.Abs), new Type[] { typeof(Vector128<Int64>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Int64>>(_dataTable.inArray1Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt64>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(AdvSimd.Arm64).GetMethod(nameof(AdvSimd.Arm64.Abs), new Type[] { typeof(Vector128<Int64>) })
.Invoke(null, new object[] {
AdvSimd.LoadVector128((Int64*)(_dataTable.inArray1Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt64>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = AdvSimd.Arm64.Abs(
_clsVar1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<Int64>* pClsVar1 = &_clsVar1)
{
var result = AdvSimd.Arm64.Abs(
AdvSimd.LoadVector128((Int64*)(pClsVar1))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<Int64>>(_dataTable.inArray1Ptr);
var result = AdvSimd.Arm64.Abs(op1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = AdvSimd.LoadVector128((Int64*)(_dataTable.inArray1Ptr));
var result = AdvSimd.Arm64.Abs(op1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleUnaryOpTest__AbsUInt64();
var result = AdvSimd.Arm64.Abs(test._fld1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleUnaryOpTest__AbsUInt64();
fixed (Vector128<Int64>* pFld1 = &test._fld1)
{
var result = AdvSimd.Arm64.Abs(
AdvSimd.LoadVector128((Int64*)(pFld1))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = AdvSimd.Arm64.Abs(_fld1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<Int64>* pFld1 = &_fld1)
{
var result = AdvSimd.Arm64.Abs(
AdvSimd.LoadVector128((Int64*)(pFld1))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = AdvSimd.Arm64.Abs(test._fld1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = AdvSimd.Arm64.Abs(
AdvSimd.LoadVector128((Int64*)(&test._fld1))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<Int64> op1, void* result, [CallerMemberName] string method = "")
{
Int64[] inArray1 = new Int64[Op1ElementCount];
UInt64[] outArray = new UInt64[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Int64, byte>(ref inArray1[0]), op1);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt64>>());
ValidateResult(inArray1, outArray, method);
}
private void ValidateResult(void* op1, void* result, [CallerMemberName] string method = "")
{
Int64[] inArray1 = new Int64[Op1ElementCount];
UInt64[] outArray = new UInt64[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int64, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Int64>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<UInt64>>());
ValidateResult(inArray1, outArray, method);
}
private void ValidateResult(Int64[] firstOp, UInt64[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (result[0] != (ulong)Math.Abs(firstOp[0]))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (result[i] != (ulong)Math.Abs(firstOp[i]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd.Arm64)}.{nameof(AdvSimd.Arm64.Abs)}<UInt64>(Vector128<Int64>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http.Connections;
using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Xunit;
namespace Microsoft.AspNetCore.SignalR.Tests
{
public class MapSignalRTests
{
[Fact]
public void MapSignalRFailsForInvalidHub()
{
var ex = Assert.Throws<NotSupportedException>(() =>
{
using (var host = BuildWebHost(routes => routes.MapHub<InvalidHub>("/overloads")))
{
host.Start();
}
});
Assert.Equal("Duplicate definitions of 'OverloadedMethod'. Overloading is not supported.", ex.Message);
}
[Fact]
public void NotAddingSignalRServiceThrows()
{
var executedConfigure = false;
var builder = new HostBuilder();
builder.ConfigureWebHost(webHostBuilder =>
{
webHostBuilder
.UseKestrel()
.ConfigureServices(services =>
{
services.AddRouting();
})
.Configure(app =>
{
executedConfigure = true;
var ex = Assert.Throws<InvalidOperationException>(() =>
{
app.UseRouting();
app.UseEndpoints(endpoints =>
{
endpoints.MapHub<AuthHub>("/overloads");
});
});
Assert.Equal("Unable to find the required services. Please add all the required services by calling " +
"'IServiceCollection.AddSignalR' inside the call to 'ConfigureServices(...)' in the application startup code.", ex.Message);
})
.UseUrls("http://127.0.0.1:0");
});
using (var host = builder.Build())
{
host.Start();
}
Assert.True(executedConfigure);
}
[Fact]
public void MapHubFindsAuthAttributeOnHub()
{
var authCount = 0;
using (var host = BuildWebHost(routes => routes.MapHub<AuthHub>("/path", options =>
{
authCount += options.AuthorizationData.Count;
})))
{
host.Start();
var dataSource = host.Services.GetRequiredService<EndpointDataSource>();
// We register 2 endpoints (/negotiate and /)
Assert.Collection(dataSource.Endpoints,
endpoint =>
{
Assert.Equal("/path/negotiate", endpoint.DisplayName);
Assert.Equal(1, endpoint.Metadata.GetOrderedMetadata<IAuthorizeData>().Count);
},
endpoint =>
{
Assert.Equal("/path", endpoint.DisplayName);
Assert.Equal(1, endpoint.Metadata.GetOrderedMetadata<IAuthorizeData>().Count);
});
}
Assert.Equal(0, authCount);
}
[Fact]
public void MapHubFindsAuthAttributeOnInheritedHub()
{
var authCount = 0;
using (var host = BuildWebHost(routes => routes.MapHub<InheritedAuthHub>("/path", options =>
{
authCount += options.AuthorizationData.Count;
})))
{
host.Start();
var dataSource = host.Services.GetRequiredService<EndpointDataSource>();
// We register 2 endpoints (/negotiate and /)
Assert.Collection(dataSource.Endpoints,
endpoint =>
{
Assert.Equal("/path/negotiate", endpoint.DisplayName);
Assert.Equal(1, endpoint.Metadata.GetOrderedMetadata<IAuthorizeData>().Count);
},
endpoint =>
{
Assert.Equal("/path", endpoint.DisplayName);
Assert.Equal(1, endpoint.Metadata.GetOrderedMetadata<IAuthorizeData>().Count);
});
}
Assert.Equal(0, authCount);
}
[Fact]
public void MapHubFindsMultipleAuthAttributesOnDoubleAuthHub()
{
var authCount = 0;
using (var host = BuildWebHost(routes => routes.MapHub<DoubleAuthHub>("/path", options =>
{
authCount += options.AuthorizationData.Count;
})))
{
host.Start();
var dataSource = host.Services.GetRequiredService<EndpointDataSource>();
// We register 2 endpoints (/negotiate and /)
Assert.Collection(dataSource.Endpoints,
endpoint =>
{
Assert.Equal("/path/negotiate", endpoint.DisplayName);
Assert.Equal(2, endpoint.Metadata.GetOrderedMetadata<IAuthorizeData>().Count);
},
endpoint =>
{
Assert.Equal("/path", endpoint.DisplayName);
Assert.Equal(2, endpoint.Metadata.GetOrderedMetadata<IAuthorizeData>().Count);
});
}
Assert.Equal(0, authCount);
}
[Fact]
public void MapHubEndPointRoutingFindsAttributesOnHub()
{
var authCount = 0;
using (var host = BuildWebHost(routes => routes.MapHub<AuthHub>("/path", options =>
{
authCount += options.AuthorizationData.Count;
})))
{
host.Start();
var dataSource = host.Services.GetRequiredService<EndpointDataSource>();
// We register 2 endpoints (/negotiate and /)
Assert.Collection(dataSource.Endpoints,
endpoint =>
{
Assert.Equal("/path/negotiate", endpoint.DisplayName);
Assert.Equal(1, endpoint.Metadata.GetOrderedMetadata<IAuthorizeData>().Count);
},
endpoint =>
{
Assert.Equal("/path", endpoint.DisplayName);
Assert.Equal(1, endpoint.Metadata.GetOrderedMetadata<IAuthorizeData>().Count);
});
}
Assert.Equal(0, authCount);
}
[Fact]
public void MapHubEndPointRoutingFindsAttributesOnHubAndFromOptions()
{
var authCount = 0;
HttpConnectionDispatcherOptions configuredOptions = null;
using (var host = BuildWebHost(routes => routes.MapHub<AuthHub>("/path", options =>
{
authCount += options.AuthorizationData.Count;
options.AuthorizationData.Add(new AuthorizeAttribute());
configuredOptions = options;
})))
{
host.Start();
var dataSource = host.Services.GetRequiredService<EndpointDataSource>();
// We register 2 endpoints (/negotiate and /)
Assert.Collection(dataSource.Endpoints,
endpoint =>
{
Assert.Equal("/path/negotiate", endpoint.DisplayName);
Assert.Equal(2, endpoint.Metadata.GetOrderedMetadata<IAuthorizeData>().Count);
},
endpoint =>
{
Assert.Equal("/path", endpoint.DisplayName);
Assert.Equal(2, endpoint.Metadata.GetOrderedMetadata<IAuthorizeData>().Count);
});
}
Assert.Equal(0, authCount);
}
[Fact]
public void MapHubEndPointRoutingAppliesAttributesBeforeConventions()
{
void ConfigureRoutes(IEndpointRouteBuilder endpoints)
{
// This "Foo" policy should override the default auth attribute
endpoints.MapHub<AuthHub>("/path")
.RequireAuthorization(new AuthorizeAttribute("Foo"));
}
using (var host = BuildWebHost(ConfigureRoutes))
{
host.Start();
var dataSource = host.Services.GetRequiredService<EndpointDataSource>();
// We register 2 endpoints (/negotiate and /)
Assert.Collection(dataSource.Endpoints,
endpoint =>
{
Assert.Equal("/path/negotiate", endpoint.DisplayName);
Assert.Collection(endpoint.Metadata.GetOrderedMetadata<IAuthorizeData>(),
auth => { },
auth =>
{
Assert.Equal("Foo", auth?.Policy);
});
},
endpoint =>
{
Assert.Equal("/path", endpoint.DisplayName);
Assert.Collection(endpoint.Metadata.GetOrderedMetadata<IAuthorizeData>(),
auth => { },
auth =>
{
Assert.Equal("Foo", auth?.Policy);
});
});
}
}
[Fact]
public void MapHubEndPointRoutingAppliesHubMetadata()
{
void ConfigureRoutes(IEndpointRouteBuilder endpoints)
{
// This "Foo" policy should override the default auth attribute
endpoints.MapHub<AuthHub>("/path");
}
using (var host = BuildWebHost(ConfigureRoutes))
{
host.Start();
var dataSource = host.Services.GetRequiredService<EndpointDataSource>();
// We register 2 endpoints (/negotiate and /)
Assert.Collection(dataSource.Endpoints,
endpoint =>
{
Assert.Equal("/path/negotiate", endpoint.DisplayName);
Assert.Equal(typeof(AuthHub), endpoint.Metadata.GetMetadata<HubMetadata>()?.HubType);
Assert.NotNull(endpoint.Metadata.GetMetadata<NegotiateMetadata>());
},
endpoint =>
{
Assert.Equal("/path", endpoint.DisplayName);
Assert.Equal(typeof(AuthHub), endpoint.Metadata.GetMetadata<HubMetadata>()?.HubType);
Assert.Null(endpoint.Metadata.GetMetadata<NegotiateMetadata>());
});
}
}
[Fact]
public void MapHubAppliesHubMetadata()
{
void ConfigureRoutes(IEndpointRouteBuilder routes)
{
// This "Foo" policy should override the default auth attribute
routes.MapHub<AuthHub>("/path");
}
using (var host = BuildWebHost(ConfigureRoutes))
{
host.Start();
var dataSource = host.Services.GetRequiredService<EndpointDataSource>();
// We register 2 endpoints (/negotiate and /)
Assert.Collection(dataSource.Endpoints,
endpoint =>
{
Assert.Equal("/path/negotiate", endpoint.DisplayName);
Assert.Equal(typeof(AuthHub), endpoint.Metadata.GetMetadata<HubMetadata>()?.HubType);
Assert.NotNull(endpoint.Metadata.GetMetadata<NegotiateMetadata>());
},
endpoint =>
{
Assert.Equal("/path", endpoint.DisplayName);
Assert.Equal(typeof(AuthHub), endpoint.Metadata.GetMetadata<HubMetadata>()?.HubType);
Assert.Null(endpoint.Metadata.GetMetadata<NegotiateMetadata>());
});
}
}
private class InvalidHub : Hub
{
public void OverloadedMethod(int num)
{
}
public void OverloadedMethod(string message)
{
}
}
[Authorize]
private class DoubleAuthHub : AuthHub
{
}
private class InheritedAuthHub : AuthHub
{
}
[Authorize]
private class AuthHub : Hub
{
}
private IHost BuildWebHost(Action<IEndpointRouteBuilder> configure)
{
return new HostBuilder()
.ConfigureWebHost(webHostBuilder =>
{
webHostBuilder
.UseKestrel()
.ConfigureServices(services =>
{
services.AddSignalR();
})
.Configure(app =>
{
app.UseRouting();
app.UseEndpoints(endpoints => configure(endpoints));
})
.UseUrls("http://127.0.0.1:0");
})
.Build();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using Xunit;
namespace System.Dynamic.Tests
{
public class BindingRestrictionsTests
{
// Considers itself equal to all of its kind, hence distinguishing equality and reference equality in tests.
private class Egalitarian : IEquatable<Egalitarian>
{
public bool Equals(Egalitarian other) => true;
public override bool Equals(object obj) => obj is Egalitarian;
public override int GetHashCode() => 1;
}
[Fact]
public void EmptyAllowsAll()
{
Expression exp = BindingRestrictions.Empty.ToExpression();
Assert.IsType<ConstantExpression>(exp);
Assert.Equal(typeof(bool), exp.Type);
Assert.Equal(true, ((ConstantExpression)exp).Value);
// The above are implementation details that could reasonably change without error.
// The below must still hold so that empty binding restrictions still allows everything.
Assert.True(Expression.Lambda<Func<bool>>(exp).Compile()());
}
[Fact]
public void MergeWithEmptyReturnsSame()
{
BindingRestrictions isTrueBool = BindingRestrictions.GetTypeRestriction(
Expression.Constant(true), typeof(bool));
Assert.Same(isTrueBool, isTrueBool.Merge(BindingRestrictions.Empty));
Assert.Same(isTrueBool, BindingRestrictions.Empty.Merge(isTrueBool));
}
[Fact]
public void MergeWithSelfReturnsNotSame()
{
BindingRestrictions isTrueBool = BindingRestrictions.GetTypeRestriction(
Expression.Constant(true), typeof(bool));
Assert.NotSame(isTrueBool, isTrueBool.Merge(isTrueBool));
}
[Fact]
public void MergeWithSelfHasSameExpression()
{
Expression exp = Expression.Constant(true);
BindingRestrictions allowAll = BindingRestrictions.GetExpressionRestriction(exp);
BindingRestrictions doubled = allowAll.Merge(allowAll);
Assert.Same(exp, doubled.ToExpression());
}
[Fact]
public void MergeNull()
{
AssertExtensions.Throws<ArgumentNullException>("restrictions", () => BindingRestrictions.Empty.Merge(null));
}
[Fact]
public void ExpressionRestrictionFromNull()
{
AssertExtensions.Throws<ArgumentNullException>("expression", () => BindingRestrictions.GetExpressionRestriction(null));
}
[Fact]
public void ExpressionRestrictionFromNonBooleanExpression()
{
AssertExtensions.Throws<ArgumentException>(
"expression", () => BindingRestrictions.GetExpressionRestriction(Expression.Empty()));
AssertExtensions.Throws<ArgumentException>(
"expression", () => BindingRestrictions.GetExpressionRestriction(Expression.Constant("")));
}
[Fact]
public void InstanceRestrictionFromNull()
{
AssertExtensions.Throws<ArgumentNullException>(
"expression", () => BindingRestrictions.GetInstanceRestriction(null, new object()));
}
[Fact]
public void CombineRestrictionsFromNull()
{
Assert.Same(BindingRestrictions.Empty, BindingRestrictions.Combine(null));
}
[Fact]
public void CombineRestrictionsFromEmpty()
{
Assert.Same(BindingRestrictions.Empty, BindingRestrictions.Combine(Array.Empty<DynamicMetaObject>()));
}
[Fact]
public void CombineRestrictionsFromAllNull()
{
Assert.Same(BindingRestrictions.Empty, BindingRestrictions.Combine(new DynamicMetaObject[10]));
}
[Fact]
public void CustomRestrictionsEqualIfExpressionSame()
{
Expression exp = Expression.Constant(false);
BindingRestrictions x = BindingRestrictions.GetExpressionRestriction(exp);
BindingRestrictions y = BindingRestrictions.GetExpressionRestriction(exp);
Assert.Equal(x, y);
Assert.Equal(x.GetHashCode(), y.GetHashCode());
}
[Fact]
public void CustomRestrictionsNotEqualIfExpressionsNotSame()
{
BindingRestrictions x = BindingRestrictions.GetExpressionRestriction(Expression.Constant(false));
BindingRestrictions y = BindingRestrictions.GetExpressionRestriction(Expression.Constant(false));
Assert.NotEqual(x, y);
}
[Fact]
public void CustomRestrictionsNotEqualNull()
{
BindingRestrictions br = BindingRestrictions.GetExpressionRestriction(Expression.Constant(false));
Assert.False(br.Equals(null));
}
[Fact]
public void MergeCombines()
{
foreach (bool x in new[] {false, true})
foreach (bool y in new[] {false, true})
{
BindingRestrictions bX = BindingRestrictions.GetExpressionRestriction(Expression.Constant(x));
BindingRestrictions bY = BindingRestrictions.GetExpressionRestriction(Expression.Constant(y));
BindingRestrictions merged = bX.Merge(bY);
Assert.Equal(x & y, Expression.Lambda<Func<bool>>(merged.ToExpression()).Compile()());
}
}
[Fact]
public void MergeCombinesDeeper()
{
foreach (bool w in new[] {false, true})
{
BindingRestrictions bW = BindingRestrictions.GetExpressionRestriction(Expression.Constant(w));
foreach (bool x in new[] {false, true})
{
BindingRestrictions bX = BindingRestrictions.GetExpressionRestriction(Expression.Constant(x));
foreach (bool y in new[] {false, true})
{
BindingRestrictions bY = BindingRestrictions.GetExpressionRestriction(Expression.Constant(y));
BindingRestrictions merged = bW.Merge(bX).Merge(bY);
Assert.Equal(w & x & y, Expression.Lambda<Func<bool>>(merged.ToExpression()).Compile()());
foreach (bool z in new[] {false, true})
{
BindingRestrictions bZ = BindingRestrictions.GetExpressionRestriction(
Expression.Constant(z));
merged = bW.Merge(bX).Merge(bY).Merge(bZ);
Assert.Equal(
w & x & y & z, Expression.Lambda<Func<bool>>(merged.ToExpression()).Compile()());
}
}
}
}
}
[Fact]
public void InstanceRestrictionRequiresIdentity()
{
Egalitarian instance = new Egalitarian();
Expression exp = Expression.Constant(instance);
BindingRestrictions sameInstance = BindingRestrictions.GetInstanceRestriction(exp, instance);
Assert.True(Expression.Lambda<Func<bool>>(sameInstance.ToExpression()).Compile()());
BindingRestrictions diffInstance = BindingRestrictions.GetInstanceRestriction(exp, new Egalitarian());
Assert.False(Expression.Lambda<Func<bool>>(diffInstance.ToExpression()).Compile()());
BindingRestrictions noInstance = BindingRestrictions.GetInstanceRestriction(exp, null);
Assert.False(Expression.Lambda<Func<bool>>(noInstance.ToExpression()).Compile()());
}
[Fact]
public void InstanceRestrictionForNull()
{
Expression exp = Expression.Default(typeof(Egalitarian));
BindingRestrictions hasNull = BindingRestrictions.GetInstanceRestriction(exp, null);
Assert.True(Expression.Lambda<Func<bool>>(hasNull.ToExpression()).Compile()());
BindingRestrictions hasInst = BindingRestrictions.GetInstanceRestriction(exp, new Egalitarian());
Assert.False(Expression.Lambda<Func<bool>>(hasInst.ToExpression()).Compile()());
}
[Fact]
public void InstanceRestrictionEqualsIfAllSame()
{
Expression exp = Expression.Default(typeof(Egalitarian));
Egalitarian inst = new Egalitarian();
BindingRestrictions x = BindingRestrictions.GetInstanceRestriction(exp, inst);
BindingRestrictions y = BindingRestrictions.GetInstanceRestriction(exp, inst);
Assert.Equal(x, y);
Assert.Equal(x.GetHashCode(), y.GetHashCode());
x = BindingRestrictions.GetInstanceRestriction(exp, null);
y = BindingRestrictions.GetInstanceRestriction(exp, null);
Assert.Equal(x, y);
Assert.Equal(x.GetHashCode(), y.GetHashCode());
}
[Fact]
public void InstanceRestrictionNotEqualIfDifferentInstance()
{
Expression exp = Expression.Default(typeof(Egalitarian));
BindingRestrictions x = BindingRestrictions.GetInstanceRestriction(exp, new Egalitarian());
BindingRestrictions y = BindingRestrictions.GetInstanceRestriction(exp, new Egalitarian());
Assert.NotEqual(x, y);
}
[Fact]
public void InstanceRestrictionNotEqualNull()
{
BindingRestrictions br = BindingRestrictions.GetInstanceRestriction(
Expression.Default(typeof(Egalitarian)), null);
Assert.False(br.Equals(null));
}
[Fact]
public void InstanceRestrictionNotEqualIfDifferentExpression()
{
Egalitarian inst = new Egalitarian();
BindingRestrictions x = BindingRestrictions.GetInstanceRestriction(
Expression.Default(typeof(Egalitarian)), inst);
BindingRestrictions y = BindingRestrictions.GetInstanceRestriction(
Expression.Default(typeof(Egalitarian)), inst);
Assert.NotEqual(x, y);
}
private static IEnumerable<object> SomeObjects()
{
yield return "";
yield return 0;
yield return new Uri("https://example.net/");
yield return DateTime.MaxValue;
}
public static IEnumerable<object[]> ObjectsAsArguments => SomeObjects().Select(o => new[] {o});
public static IEnumerable<object[]> ObjectsAndWrongTypes() => from obj in SomeObjects()
from typeObj in SomeObjects()
where obj.GetType() != typeObj.GetType()
select new[] {obj, typeObj.GetType()};
[Theory, MemberData(nameof(ObjectsAsArguments))]
public void TypeRestrictionTrueForMatchType(object obj)
{
BindingRestrictions isType = BindingRestrictions.GetTypeRestriction(Expression.Constant(obj), obj.GetType());
Assert.True(Expression.Lambda<Func<bool>>(isType.ToExpression()).Compile()());
}
[Theory, MemberData(nameof(ObjectsAndWrongTypes))]
public void TypeRestrictionFalseForOtherType(object obj, Type type)
{
BindingRestrictions isType = BindingRestrictions.GetTypeRestriction(Expression.Constant(obj), type);
Assert.False(Expression.Lambda<Func<bool>>(isType.ToExpression()).Compile()());
}
[Fact]
public void TypeRestrictionEqualIfSameTypeAndExpression()
{
Expression exp = Expression.Default(typeof(Egalitarian));
BindingRestrictions x = BindingRestrictions.GetTypeRestriction(exp, typeof(Egalitarian));
BindingRestrictions y = BindingRestrictions.GetTypeRestriction(
exp, typeof(Egalitarian).MakeArrayType().GetElementType());
Assert.Equal(x, y);
Assert.Equal(x.GetHashCode(), y.GetHashCode());
}
[Fact]
public void TypeRestrictionNotEqualIfDifferentType()
{
Expression exp = Expression.Default(typeof(Egalitarian));
BindingRestrictions x = BindingRestrictions.GetTypeRestriction(exp, typeof(Egalitarian));
BindingRestrictions y = BindingRestrictions.GetTypeRestriction(exp, typeof(string));
Assert.NotEqual(x, y);
}
[Fact]
public void TypeRestrictionNotEqualIfDifferentExpression()
{
BindingRestrictions x = BindingRestrictions.GetTypeRestriction(
Expression.Default(typeof(Egalitarian)), typeof(Egalitarian));
BindingRestrictions y = BindingRestrictions.GetTypeRestriction(
Expression.Default(typeof(Egalitarian)), typeof(Egalitarian));
Assert.NotEqual(x, y);
}
[Fact]
public void TypeRestrictionNotEqualNull()
{
BindingRestrictions br = BindingRestrictions.GetTypeRestriction(
Expression.Default(typeof(Egalitarian)), typeof(Egalitarian));
Assert.False(br.Equals(null));
}
}
}
| |
/* Genuine Channels product.
*
* Copyright (c) 2002-2007 Dmitry Belikov. All rights reserved.
*
* This source code comes under and must be used and distributed according to the Genuine Channels license agreement.
*/
using System;
using System.IO;
namespace Belikov.GenuineChannels.Logbook
{
/// <summary>
/// Implements a transactional writing to the file.
/// </summary>
public class FileWritingStream : Stream
{
/// <summary>
/// Constructs an instance of the FileWritingStream class.
/// </summary>
/// <param name="baseFileName">The base part of the file name.</param>
/// <param name="addSuffixToBaseFileName">true to add suffix (date, time, and extension) to the file name.</param>
public FileWritingStream(string baseFileName, bool addSuffixToBaseFileName)
{
if (baseFileName == null)
throw new NullReferenceException();
this._addSuffixToBaseFileName = addSuffixToBaseFileName;
this._baseFileName = baseFileName;
}
/// <summary>
/// Constructs an instance of the FileWritingStream class.
/// </summary>
/// <param name="baseFileName">The base part of the file name.</param>
public FileWritingStream(string baseFileName) : this(baseFileName, true) {}
private bool _addSuffixToBaseFileName;
private string _baseFileName;
#region -- Writing methods -----------------------------------------------------------------
private DateTime _lastDateTimeValue = DateTime.MinValue;
private Stream _fileStream;
private bool _isRecordStarted = false;
private int nextTry = GenuineUtility.TickCount;
/// <summary>
/// Ensures that the file is opened. Opens the file if necessary.
/// </summary>
private void CheckThatCorrectFileIsOpened()
{
try
{
DateTime now = DateTime.Today;
if ( (this._lastDateTimeValue != now || _fileStream == null) && GenuineUtility.IsTimeoutExpired(nextTry) )
{
if (this._fileStream != null)
this._fileStream.Close();
this._lastDateTimeValue = DateTime.Today;
string filename = this._baseFileName;
if (this._addSuffixToBaseFileName)
filename += "." + this._lastDateTimeValue.ToString("yyyy-MM-dd") + ".genchlog";
this._fileStream = new FileStream(filename, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Read);
this._fileStream.Seek(0, SeekOrigin.End);
}
}
catch
{
this._fileStream = null;
nextTry = GenuineUtility.GetTimeout(15000);
}
}
/// <summary>
/// Writes a sequence of bytes to the current stream and advances the current position within this stream by the number of bytes written.
/// </summary>
/// <param name="buffer">An array of bytes.</param>
/// <param name="offset">The zero-based byte offset in buffer at which to begin copying bytes to the current stream.</param>
/// <param name="count">The number of bytes to be written to the current stream.</param>
public override void Write(byte[] buffer, int offset, int count)
{
if (! this._isRecordStarted)
{
this.CheckThatCorrectFileIsOpened();
this._isRecordStarted = true;
}
if (this._fileStream != null)
this._fileStream.Write(buffer, offset, count);
}
/// <summary>
/// Writes a byte to the current position in the stream and advances the position within the stream by one byte.
/// </summary>
/// <param name="val">The byte to write to the stream.</param>
public override void WriteByte(byte val)
{
if (! this._isRecordStarted)
{
this.CheckThatCorrectFileIsOpened();
this._isRecordStarted = true;
}
if (this._fileStream != null)
this._fileStream.WriteByte(val);
}
/// <summary>
/// Clears all buffers for this stream and causes any buffered data to be written to the underlying device.
/// </summary>
public override void Flush()
{
if (this._fileStream != null)
this._fileStream.Flush();
this._isRecordStarted = false;
}
#endregion
#region -- Insignificat Stream members -----------------------------------------------------
/// <summary>
/// Gets a value indicating whether the current stream supports reading.
/// </summary>
public override bool CanRead
{
get
{
return false;
}
}
/// <summary>
/// Gets a value indicating whether the current stream supports seeking.
/// </summary>
public override bool CanSeek
{
get
{
return false;
}
}
/// <summary>
/// Gets a value indicating whether the current stream supports writing.
/// </summary>
public override bool CanWrite
{
get
{
return true;
}
}
/// <summary>
/// Gets the length in bytes of the stream.
/// </summary>
public override long Length
{
get
{
throw new NotSupportedException();
}
}
/// <summary>
/// Gets or sets the position within the current stream.
/// Always fires NotSupportedException exception.
/// </summary>
public override long Position
{
get
{
return this._fileStream.Position;
}
set
{
throw new NotSupportedException();
}
}
/// <summary>
/// Closes the stream.
/// </summary>
public override void Close()
{
}
/// <summary>
/// Reads a sequence of bytes from the current stream and advances the position within the stream by the number of bytes read.
/// </summary>
/// <param name="buffer">An array of bytes.</param>
/// <param name="offset">The zero-based byte offset in buffer at which to begin storing the data read from the current stream.</param>
/// <param name="count">The maximum number of bytes to be read from the current stream.</param>
/// <returns>The total number of bytes read into the buffer.</returns>
public override int Read(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
/// <summary>
/// Reads a byte from the stream and advances the position within the stream by one byte, or returns -1 if at the end of the stream.
/// </summary>
/// <returns>The unsigned byte cast to an Int32, or -1 if at the end of the stream.</returns>
public override int ReadByte()
{
throw new NotSupportedException();
}
/// <summary>
/// Sets the position within the current stream.
/// </summary>
/// <param name="offset">A byte offset relative to the origin parameter.</param>
/// <param name="origin">A value of type SeekOrigin indicating the reference point used to obtain the new position.</param>
/// <returns>The new position within the current stream.</returns>
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
/// <summary>
/// Sets the length of the current stream.
/// </summary>
/// <param name="val">The desired length of the current stream in bytes.</param>
public override void SetLength(long val)
{
throw new NotSupportedException();
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Orleans.Runtime.Configuration;
namespace Orleans.Runtime.ReminderService
{
internal class AzureBasedReminderTable : IReminderTable
{
private Logger logger;
private RemindersTableManager remTableManager;
public async Task Init(GlobalConfiguration config, Logger logger)
{
this.logger = logger;
remTableManager = await RemindersTableManager.GetManager(config.ServiceId, config.DeploymentId, config.DataConnectionStringForReminders);
}
#region Utility methods
private ReminderTableData ConvertFromTableEntryList(IEnumerable<Tuple<ReminderTableEntry, string>> entries)
{
var remEntries = new List<ReminderEntry>();
foreach (var entry in entries)
{
try
{
ReminderEntry converted = ConvertFromTableEntry(entry.Item1, entry.Item2);
remEntries.Add(converted);
}
catch (Exception)
{
// Ignoring...
}
}
return new ReminderTableData(remEntries);
}
private ReminderEntry ConvertFromTableEntry(ReminderTableEntry tableEntry, string eTag)
{
try
{
return new ReminderEntry
{
GrainRef = GrainReference.FromKeyString(tableEntry.GrainReference),
ReminderName = tableEntry.ReminderName,
StartAt = LogFormatter.ParseDate(tableEntry.StartAt),
Period = TimeSpan.Parse(tableEntry.Period),
ETag = eTag,
};
}
catch (Exception exc)
{
var error =
$"Failed to parse ReminderTableEntry: {tableEntry}. This entry is corrupt, going to ignore it.";
logger.Error(ErrorCode.AzureTable_49, error, exc);
throw;
}
finally
{
string serviceIdStr = ReminderTableEntry.ConstructServiceIdStr(remTableManager.ServiceId);
if (!tableEntry.ServiceId.Equals(serviceIdStr))
{
var error =
$"Read a reminder entry for wrong Service id. Read {tableEntry}, but my service id is {serviceIdStr}. Going to discard it.";
logger.Warn(ErrorCode.AzureTable_ReadWrongReminder, error);
throw new OrleansException(error);
}
}
}
private static ReminderTableEntry ConvertToTableEntry(ReminderEntry remEntry, Guid serviceId, string deploymentId)
{
string partitionKey = ReminderTableEntry.ConstructPartitionKey(serviceId, remEntry.GrainRef);
string rowKey = ReminderTableEntry.ConstructRowKey(remEntry.GrainRef, remEntry.ReminderName);
string serviceIdStr = ReminderTableEntry.ConstructServiceIdStr(serviceId);
var consistentHash = remEntry.GrainRef.GetUniformHashCode();
return new ReminderTableEntry
{
PartitionKey = partitionKey,
RowKey = rowKey,
ServiceId = serviceIdStr,
DeploymentId = deploymentId,
GrainReference = remEntry.GrainRef.ToKeyString(),
ReminderName = remEntry.ReminderName,
StartAt = LogFormatter.PrintDate(remEntry.StartAt),
Period = remEntry.Period.ToString(),
GrainRefConsistentHash = String.Format("{0:X8}", consistentHash),
ETag = remEntry.ETag,
};
}
#endregion
public Task TestOnlyClearTable()
{
return remTableManager.DeleteTableEntries();
}
public async Task<ReminderTableData> ReadRows(GrainReference key)
{
try
{
var entries = await remTableManager.FindReminderEntries(key);
ReminderTableData data = ConvertFromTableEntryList(entries);
if (logger.IsVerbose2) logger.Verbose2("Read for grain {0} Table=" + Environment.NewLine + "{1}", key, data.ToString());
return data;
}
catch (Exception exc)
{
logger.Warn(ErrorCode.AzureTable_47,
$"Intermediate error reading reminders for grain {key} in table {remTableManager.TableName}.", exc);
throw;
}
}
public async Task<ReminderTableData> ReadRows(uint begin, uint end)
{
try
{
var entries = await remTableManager.FindReminderEntries(begin, end);
ReminderTableData data = ConvertFromTableEntryList(entries);
if (logger.IsVerbose2) logger.Verbose2("Read in {0} Table=" + Environment.NewLine + "{1}", RangeFactory.CreateRange(begin, end), data);
return data;
}
catch (Exception exc)
{
logger.Warn(ErrorCode.AzureTable_40,
$"Intermediate error reading reminders in range {RangeFactory.CreateRange(begin, end)} for table {remTableManager.TableName}.", exc);
throw;
}
}
public async Task<ReminderEntry> ReadRow(GrainReference grainRef, string reminderName)
{
try
{
if (logger.IsVerbose) logger.Verbose("ReadRow grainRef = {0} reminderName = {1}", grainRef, reminderName);
var result = await remTableManager.FindReminderEntry(grainRef, reminderName);
return result == null ? null : ConvertFromTableEntry(result.Item1, result.Item2);
}
catch (Exception exc)
{
logger.Warn(ErrorCode.AzureTable_46,
$"Intermediate error reading row with grainId = {grainRef} reminderName = {reminderName} from table {remTableManager.TableName}.", exc);
throw;
}
}
public async Task<string> UpsertRow(ReminderEntry entry)
{
try
{
if (logger.IsVerbose) logger.Verbose("UpsertRow entry = {0}", entry.ToString());
ReminderTableEntry remTableEntry = ConvertToTableEntry(entry, remTableManager.ServiceId, remTableManager.DeploymentId);
string result = await remTableManager.UpsertRow(remTableEntry);
if (result == null)
{
logger.Warn(ErrorCode.AzureTable_45,
$"Upsert failed on the reminder table. Will retry. Entry = {entry.ToString()}");
}
return result;
}
catch (Exception exc)
{
logger.Warn(ErrorCode.AzureTable_42,
$"Intermediate error upserting reminder entry {entry.ToString()} to the table {remTableManager.TableName}.", exc);
throw;
}
}
public async Task<bool> RemoveRow(GrainReference grainRef, string reminderName, string eTag)
{
var entry = new ReminderTableEntry
{
PartitionKey = ReminderTableEntry.ConstructPartitionKey(remTableManager.ServiceId, grainRef),
RowKey = ReminderTableEntry.ConstructRowKey(grainRef, reminderName),
ETag = eTag,
};
try
{
if (logger.IsVerbose2) logger.Verbose2("RemoveRow entry = {0}", entry.ToString());
bool result = await remTableManager.DeleteReminderEntryConditionally(entry, eTag);
if (result == false)
{
logger.Warn(ErrorCode.AzureTable_43,
$"Delete failed on the reminder table. Will retry. Entry = {entry}");
}
return result;
}
catch (Exception exc)
{
logger.Warn(ErrorCode.AzureTable_44,
$"Intermediate error when deleting reminder entry {entry} to the table {remTableManager.TableName}.", exc);
throw;
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Net;
using System.Reflection;
using log4net;
using Mono.Addins;
using Nini.Config;
using OpenSim.Framework;
using OpenSim.Framework.Servers.HttpServer;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Services.Interfaces;
using OpenSim.Server.Base;
using OpenMetaverse;
using OpenMetaverse.StructuredData;
using GridRegion = OpenSim.Services.Interfaces.GridRegion;
namespace OpenSim.Services.Connectors.SimianGrid
{
/// <summary>
/// Connects region registration and neighbor lookups to the SimianGrid
/// backend
/// </summary>
[Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule")]
public class SimianGridServiceConnector : IGridService, ISharedRegionModule
{
private static readonly ILog m_log =
LogManager.GetLogger(
MethodBase.GetCurrentMethod().DeclaringType);
private string m_serverUrl = String.Empty;
#region ISharedRegionModule
public Type ReplaceableInterface { get { return null; } }
public void RegionLoaded(Scene scene) { }
public void PostInitialise() { }
public void Close() { }
public SimianGridServiceConnector() { }
public string Name { get { return "SimianGridServiceConnector"; } }
public void AddRegion(Scene scene) { if (!String.IsNullOrEmpty(m_serverUrl)) { scene.RegisterModuleInterface<IGridService>(this); } }
public void RemoveRegion(Scene scene) { if (!String.IsNullOrEmpty(m_serverUrl)) { scene.UnregisterModuleInterface<IGridService>(this); } }
#endregion ISharedRegionModule
public SimianGridServiceConnector(IConfigSource source)
{
Initialise(source);
}
public void Initialise(IConfigSource source)
{
if (Simian.IsSimianEnabled(source, "GridServices", this.Name))
{
IConfig gridConfig = source.Configs["GridService"];
if (gridConfig == null)
{
m_log.Error("[SIMIAN GRID CONNECTOR]: GridService missing from OpenSim.ini");
throw new Exception("Grid connector init error");
}
string serviceUrl = gridConfig.GetString("GridServerURI");
if (String.IsNullOrEmpty(serviceUrl))
{
m_log.Error("[SIMIAN GRID CONNECTOR]: No Server URI named in section GridService");
throw new Exception("Grid connector init error");
}
m_serverUrl = serviceUrl;
}
}
#region IGridService
public string RegisterRegion(UUID scopeID, GridRegion regionInfo)
{
Vector3d minPosition = new Vector3d(regionInfo.RegionLocX, regionInfo.RegionLocY, 0.0);
Vector3d maxPosition = minPosition + new Vector3d(Constants.RegionSize, Constants.RegionSize, 4096.0);
string httpAddress = "http://" + regionInfo.ExternalHostName + ":" + regionInfo.HttpPort + "/";
OSDMap extraData = new OSDMap
{
{ "ServerURI", OSD.FromString(regionInfo.ServerURI) },
{ "InternalAddress", OSD.FromString(regionInfo.InternalEndPoint.Address.ToString()) },
{ "InternalPort", OSD.FromInteger(regionInfo.InternalEndPoint.Port) },
{ "ExternalAddress", OSD.FromString(regionInfo.ExternalEndPoint.Address.ToString()) },
{ "ExternalPort", OSD.FromInteger(regionInfo.ExternalEndPoint.Port) },
{ "MapTexture", OSD.FromUUID(regionInfo.TerrainImage) },
{ "Access", OSD.FromInteger(regionInfo.Access) },
{ "RegionSecret", OSD.FromString(regionInfo.RegionSecret) },
{ "EstateOwner", OSD.FromUUID(regionInfo.EstateOwner) },
{ "Token", OSD.FromString(regionInfo.Token) }
};
NameValueCollection requestArgs = new NameValueCollection
{
{ "RequestMethod", "AddScene" },
{ "SceneID", regionInfo.RegionID.ToString() },
{ "Name", regionInfo.RegionName },
{ "MinPosition", minPosition.ToString() },
{ "MaxPosition", maxPosition.ToString() },
{ "Address", httpAddress },
{ "Enabled", "1" },
{ "ExtraData", OSDParser.SerializeJsonString(extraData) }
};
OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs);
if (response["Success"].AsBoolean())
return String.Empty;
else
return "Region registration for " + regionInfo.RegionName + " failed: " + response["Message"].AsString();
}
public bool DeregisterRegion(UUID regionID)
{
NameValueCollection requestArgs = new NameValueCollection
{
{ "RequestMethod", "AddScene" },
{ "SceneID", regionID.ToString() },
{ "Enabled", "0" }
};
OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs);
bool success = response["Success"].AsBoolean();
if (!success)
m_log.Warn("[SIMIAN GRID CONNECTOR]: Region deregistration for " + regionID + " failed: " + response["Message"].AsString());
return success;
}
public List<GridRegion> GetNeighbours(UUID scopeID, UUID regionID)
{
const int NEIGHBOR_RADIUS = 128;
GridRegion region = GetRegionByUUID(scopeID, regionID);
if (region != null)
{
List<GridRegion> regions = GetRegionRange(scopeID,
region.RegionLocX - NEIGHBOR_RADIUS, region.RegionLocX + (int)Constants.RegionSize + NEIGHBOR_RADIUS,
region.RegionLocY - NEIGHBOR_RADIUS, region.RegionLocY + (int)Constants.RegionSize + NEIGHBOR_RADIUS);
for (int i = 0; i < regions.Count; i++)
{
if (regions[i].RegionID == regionID)
{
regions.RemoveAt(i);
break;
}
}
m_log.Debug("[SIMIAN GRID CONNECTOR]: Found " + regions.Count + " neighbors for region " + regionID);
return regions;
}
return new List<GridRegion>(0);
}
public GridRegion GetRegionByUUID(UUID scopeID, UUID regionID)
{
NameValueCollection requestArgs = new NameValueCollection
{
{ "RequestMethod", "GetScene" },
{ "SceneID", regionID.ToString() }
};
OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs);
if (response["Success"].AsBoolean())
{
return ResponseToGridRegion(response);
}
else
{
m_log.Warn("[SIMIAN GRID CONNECTOR]: Grid service did not find a match for region " + regionID);
return null;
}
}
public GridRegion GetRegionByPosition(UUID scopeID, int x, int y)
{
// Go one meter in from the requested x/y coords to avoid requesting a position
// that falls on the border of two sims
Vector3d position = new Vector3d(x + 1, y + 1, 0.0);
NameValueCollection requestArgs = new NameValueCollection
{
{ "RequestMethod", "GetScene" },
{ "Position", position.ToString() },
{ "Enabled", "1" }
};
OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs);
if (response["Success"].AsBoolean())
{
return ResponseToGridRegion(response);
}
else
{
//m_log.InfoFormat("[SIMIAN GRID CONNECTOR]: Grid service did not find a match for region at {0},{1}",
// x / Constants.RegionSize, y / Constants.RegionSize);
return null;
}
}
public GridRegion GetRegionByName(UUID scopeID, string regionName)
{
List<GridRegion> regions = GetRegionsByName(scopeID, regionName, 1);
m_log.Debug("[SIMIAN GRID CONNECTOR]: Got " + regions.Count + " matches for region name " + regionName);
if (regions.Count > 0)
return regions[0];
return null;
}
public List<GridRegion> GetRegionsByName(UUID scopeID, string name, int maxNumber)
{
List<GridRegion> foundRegions = new List<GridRegion>();
NameValueCollection requestArgs = new NameValueCollection
{
{ "RequestMethod", "GetScenes" },
{ "NameQuery", name },
{ "Enabled", "1" }
};
if (maxNumber > 0)
requestArgs["MaxNumber"] = maxNumber.ToString();
OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs);
if (response["Success"].AsBoolean())
{
OSDArray array = response["Scenes"] as OSDArray;
if (array != null)
{
for (int i = 0; i < array.Count; i++)
{
GridRegion region = ResponseToGridRegion(array[i] as OSDMap);
if (region != null)
foundRegions.Add(region);
}
}
}
return foundRegions;
}
public List<GridRegion> GetRegionRange(UUID scopeID, int xmin, int xmax, int ymin, int ymax)
{
List<GridRegion> foundRegions = new List<GridRegion>();
Vector3d minPosition = new Vector3d(xmin, ymin, 0.0);
Vector3d maxPosition = new Vector3d(xmax, ymax, 4096.0);
NameValueCollection requestArgs = new NameValueCollection
{
{ "RequestMethod", "GetScenes" },
{ "MinPosition", minPosition.ToString() },
{ "MaxPosition", maxPosition.ToString() },
{ "Enabled", "1" }
};
OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs);
if (response["Success"].AsBoolean())
{
OSDArray array = response["Scenes"] as OSDArray;
if (array != null)
{
for (int i = 0; i < array.Count; i++)
{
GridRegion region = ResponseToGridRegion(array[i] as OSDMap);
if (region != null)
foundRegions.Add(region);
}
}
}
return foundRegions;
}
public List<GridRegion> GetDefaultRegions(UUID scopeID)
{
// TODO: Allow specifying the default grid location
const int DEFAULT_X = 1000 * 256;
const int DEFAULT_Y = 1000 * 256;
GridRegion defRegion = GetNearestRegion(new Vector3d(DEFAULT_X, DEFAULT_Y, 0.0), true);
if (defRegion != null)
return new List<GridRegion>(1) { defRegion };
else
return new List<GridRegion>(0);
}
public List<GridRegion> GetFallbackRegions(UUID scopeID, int x, int y)
{
GridRegion defRegion = GetNearestRegion(new Vector3d(x, y, 0.0), true);
if (defRegion != null)
return new List<GridRegion>(1) { defRegion };
else
return new List<GridRegion>(0);
}
public int GetRegionFlags(UUID scopeID, UUID regionID)
{
const int REGION_ONLINE = 4;
NameValueCollection requestArgs = new NameValueCollection
{
{ "RequestMethod", "GetScene" },
{ "SceneID", regionID.ToString() }
};
OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs);
if (response["Success"].AsBoolean())
{
return response["Enabled"].AsBoolean() ? REGION_ONLINE : 0;
}
else
{
m_log.Warn("[SIMIAN GRID CONNECTOR]: Grid service did not find a match for region " + regionID + " during region flags check");
return -1;
}
}
#endregion IGridService
private GridRegion GetNearestRegion(Vector3d position, bool onlyEnabled)
{
NameValueCollection requestArgs = new NameValueCollection
{
{ "RequestMethod", "GetScene" },
{ "Position", position.ToString() },
{ "FindClosest", "1" }
};
if (onlyEnabled)
requestArgs["Enabled"] = "1";
OSDMap response = WebUtil.PostToService(m_serverUrl, requestArgs);
if (response["Success"].AsBoolean())
{
return ResponseToGridRegion(response);
}
else
{
m_log.Warn("[SIMIAN GRID CONNECTOR]: Grid service did not find a match for region at " + position);
return null;
}
}
private GridRegion ResponseToGridRegion(OSDMap response)
{
if (response == null)
return null;
OSDMap extraData = response["ExtraData"] as OSDMap;
if (extraData == null)
return null;
GridRegion region = new GridRegion();
region.RegionID = response["SceneID"].AsUUID();
region.RegionName = response["Name"].AsString();
Vector3d minPosition = response["MinPosition"].AsVector3d();
region.RegionLocX = (int)minPosition.X;
region.RegionLocY = (int)minPosition.Y;
Uri httpAddress = response["Address"].AsUri();
region.ExternalHostName = httpAddress.Host;
region.HttpPort = (uint)httpAddress.Port;
region.ServerURI = extraData["ServerURI"].AsString();
IPAddress internalAddress;
IPAddress.TryParse(extraData["InternalAddress"].AsString(), out internalAddress);
if (internalAddress == null)
internalAddress = IPAddress.Any;
region.InternalEndPoint = new IPEndPoint(internalAddress, extraData["InternalPort"].AsInteger());
region.TerrainImage = extraData["MapTexture"].AsUUID();
region.Access = (byte)extraData["Access"].AsInteger();
region.RegionSecret = extraData["RegionSecret"].AsString();
region.EstateOwner = extraData["EstateOwner"].AsUUID();
region.Token = extraData["Token"].AsString();
return region;
}
}
}
| |
// Copyright 2019 DeepMind Technologies Limited
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Xml;
using NUnit.Framework;
using UnityEngine;
using UnityEngine.TestTools.Utils;
namespace Mujoco {
[TestFixture]
public class MjSceneGenerationTests {
[Test]
public void AssigningComponentsUniqueName() {
_fakeBodyA.enabled = true;
_fakeBodyB.enabled = true;
_scene.CreateScene(skipCompile:true);
Assert.That(_fakeBodyA.MujocoName, Is.Not.EqualTo(_fakeBodyB.MujocoName));
}
[Test]
public void AttachingComponentsToMjNodesThroughId() {
_fakeBodyA.enabled = true;
_fakeBodyB.enabled = true;
_scene.CreateScene();
Assert.That(_fakeBodyA.MujocoId, Is.GreaterThanOrEqualTo(0));
Assert.That(_fakeBodyB.MujocoId, Is.GreaterThanOrEqualTo(0));
Assert.That(_fakeBodyA.MujocoId, Is.Not.EqualTo(_fakeBodyB.MujocoId));
}
[Test]
public unsafe void ModelStructureIsInitialized() {
_fakeBodyA.enabled = true;
_fakeBodyB.enabled = true;
_scene.CreateScene();
Assert.That(_scene.Model->nbody, Is.GreaterThanOrEqualTo(2));
}
[Test]
public void SyncingComponentStateOnFixedUpdate() {
_fakeBodyA.enabled = true;
_fakeBodyB.enabled = true;
_scene.CreateScene();
Assert.That(_fakeBodyA.StateSynced, Is.False);
Assert.That(_fakeBodyB.StateSynced, Is.False);
_scene.StepScene();
Assert.That(_fakeBodyA.StateSynced, Is.True);
Assert.That(_fakeBodyB.StateSynced, Is.True);
}
[Test]
public unsafe void PhysicsRuntimeError() {
_scene.CreateScene();
_scene.Data->qpos[0] = float.PositiveInfinity;
Assert.That(
() => { _scene.StepScene(); },
Throws.TypeOf<PhysicsRuntimeException>()
.With.Message.EqualTo("BADQPOS: NaN/inf in qpos."));
}
[Test]
public void MjcfHierarchyMirrorsTheHierarchyOfUnityObjects() {
_fakeBodyA.transform.parent = _fakeBodyB.transform;
_fakeBodyA.enabled = true;
_fakeBodyB.enabled = true;
var mjcf = _scene.CreateScene(skipCompile:true);
// Three components are already in this scene: body, joint and inertia (see SetUp function below)
var mjcfFakeBodyB = mjcf.SelectNodes("/mujoco/worldbody/body")[1] as XmlElement;
var mjcfFakeBodyA = mjcf.SelectNodes("/mujoco/worldbody/body/body")[0] as XmlElement;
Assert.That(mjcfFakeBodyA, Is.Not.Null);
Assert.That(mjcfFakeBodyB, Is.Not.Null);
// B is the parent, so it appears first in the XML
Assert.That(mjcfFakeBodyB.GetAttribute("name"), Is.EqualTo("component_3"));
Assert.That(mjcfFakeBodyA.GetAttribute("name"), Is.EqualTo("component_4"));
}
[Test]
public void ActuatorsAreAddedToDedicatedTag() {
_actuator.enabled = true;
var mjcf = _scene.CreateScene(skipCompile:true);
var mjcfForActuator = mjcf.SelectNodes("/mujoco/actuator/general")[0] as XmlElement;
Assert.That(mjcfForActuator, Is.Not.Null);
}
[Test]
public void SensorsAreAddedToDedicatedTag() {
_sensor.enabled = true;
var mjcf = _scene.CreateScene(skipCompile:true);
var mjcfForSensor = mjcf.SelectNodes("/mujoco/sensor/jointpos")[0] as XmlElement;
Assert.That(mjcfForSensor, Is.Not.Null);
}
[Test]
public unsafe void SceneRecreatedWithAddition() {
_scene.CreateScene();
_scene.Data->qvel[0] = 1;
var nq = _scene.Model->nq;
var tickedRotation = _body.transform.rotation;
var body = new GameObject("body").AddComponent<MjBody>();
var inertia = new GameObject("inertia").AddComponent<MjInertial>();
inertia.transform.parent = body.transform;
var joint = new GameObject("joint").AddComponent<MjHingeJoint>();
joint.transform.parent = body.transform;
_scene.RecreateScene();
Assert.That(_scene.Model->nq, Is.EqualTo(nq+1));
Assert.That(_scene.Data->qvel[joint.DofAddress], Is.EqualTo(0));
Assert.That(_scene.Data->qvel[_joint.DofAddress], Is.EqualTo(1));
UnityEngine.Object.DestroyImmediate(inertia.gameObject);
UnityEngine.Object.DestroyImmediate(joint.gameObject);
UnityEngine.Object.DestroyImmediate(body.gameObject);
}
[Test]
public unsafe void SceneRecreatedAfterDeletion() {
var body = new GameObject("body").AddComponent<MjBody>();
var inertia = new GameObject("inertia").AddComponent<MjInertial>();
inertia.transform.parent = body.transform;
var joint = new GameObject("joint").AddComponent<MjHingeJoint>();
joint.transform.parent = body.transform;
_scene.CreateScene();
_scene.Data->qvel[0] = 1;
_scene.Data->qvel[1] = 2;
var nq = _scene.Model->nq;
UnityEngine.Object.DestroyImmediate(inertia.gameObject);
UnityEngine.Object.DestroyImmediate(joint.gameObject);
UnityEngine.Object.DestroyImmediate(body.gameObject);
_scene.RecreateScene();
Assert.That(_scene.Model->nq, Is.EqualTo(nq-1));
Assert.That(_scene.Data->qvel[0], Is.EqualTo(2));
}
#region Test setup.
public class FakeMjBody : MjBaseBody {
public bool StateSynced = false;
public override MujocoLib.mjtObj ObjectType => MujocoLib.mjtObj.mjOBJ_BODY;
protected override void OnParseMjcf(XmlElement mjcf) {}
protected override XmlElement OnGenerateMjcf(XmlDocument doc) {
// We'll create an actual element because the code will then pass it on to Mujoco scene
// compiler, and we care that the compiler doesn't fail.
var mjcf = doc.CreateElement("body");
return mjcf;
}
public override unsafe void OnSyncState(MujocoLib.mjData_* data) {
StateSynced = true;
}
}
private MjScene _scene;
private FakeMjBody _fakeBodyA;
private FakeMjBody _fakeBodyB;
private MjJointScalarSensor _sensor;
private MjBody _body;
private MjInertial _inertia;
private MjHingeJoint _joint;
private MjActuator _actuator;
private Vector4EqualityComparer _quaternionComparer;
[SetUp]
public void SetUp() {
_quaternionComparer = new Vector4EqualityComparer(1e-5f);
_scene = MjScene.Instance;
_fakeBodyA = new GameObject("component").AddComponent<FakeMjBody>();
_fakeBodyB = new GameObject("component").AddComponent<FakeMjBody>();
_sensor = new GameObject("sensor").AddComponent<MjJointScalarSensor>();
_actuator = new GameObject("actuator").AddComponent<MjActuator>();
// body, joint and inertia are always present so that the actuator and sensor are valid
_body = new GameObject("body").AddComponent<MjBody>();
_inertia = new GameObject("inertia").AddComponent<MjInertial>();
_inertia.transform.parent = _body.transform;
_joint = new GameObject("joint").AddComponent<MjHingeJoint>();
_joint.transform.parent = _body.transform;
_sensor.Joint = _joint;
_sensor.SensorType = MjJointScalarSensor.AvailableSensors.JointPos;
_actuator.Joint = _joint;
_fakeBodyA.enabled = false;
_fakeBodyB.enabled = false;
_sensor.enabled = false;
_actuator.enabled = false;
}
[TearDown]
public void TearDown() {
UnityEngine.Object.DestroyImmediate(_fakeBodyA.gameObject);
UnityEngine.Object.DestroyImmediate(_fakeBodyB.gameObject);
UnityEngine.Object.DestroyImmediate(_sensor.gameObject);
UnityEngine.Object.DestroyImmediate(_actuator.gameObject);
UnityEngine.Object.DestroyImmediate(_joint.gameObject);
UnityEngine.Object.DestroyImmediate(_inertia.gameObject);
UnityEngine.Object.DestroyImmediate(_body.gameObject);
UnityEngine.Object.DestroyImmediate(_scene.gameObject);
}
#endregion
}
}
| |
using System;
using System.Collections;
namespace Python.Runtime
{
/// <summary>
/// Implements a Python type for managed arrays. This type is essentially
/// the same as a ClassObject, except that it provides sequence semantics
/// to support natural array usage (indexing) from Python.
/// </summary>
internal class ArrayObject : ClassBase
{
internal ArrayObject(Type tp) : base(tp)
{
}
internal override bool CanSubclass()
{
return false;
}
public static IntPtr tp_new(IntPtr tp, IntPtr args, IntPtr kw)
{
var self = GetManagedObject(tp) as ArrayObject;
if (Runtime.PyTuple_Size(args) != 1)
{
return Exceptions.RaiseTypeError("array expects 1 argument");
}
IntPtr op = Runtime.PyTuple_GetItem(args, 0);
object result;
if (!Converter.ToManaged(op, self.type, out result, true))
{
return IntPtr.Zero;
}
return CLRObject.GetInstHandle(result, tp);
}
/// <summary>
/// Implements __getitem__ for array types.
/// </summary>
public static IntPtr mp_subscript(IntPtr ob, IntPtr idx)
{
var obj = (CLRObject)GetManagedObject(ob);
var items = obj.inst as Array;
Type itemType = obj.inst.GetType().GetElementType();
int rank = items.Rank;
int index;
object value;
// Note that CLR 1.0 only supports int indexes - methods to
// support long indices were introduced in 1.1. We could
// support long indices automatically, but given that long
// indices are not backward compatible and a relative edge
// case, we won't bother for now.
// Single-dimensional arrays are the most common case and are
// cheaper to deal with than multi-dimensional, so check first.
if (rank == 1)
{
index = Runtime.PyInt_AsLong(idx);
if (Exceptions.ErrorOccurred())
{
return Exceptions.RaiseTypeError("invalid index value");
}
if (index < 0)
{
index = items.Length + index;
}
try
{
value = items.GetValue(index);
}
catch (IndexOutOfRangeException)
{
Exceptions.SetError(Exceptions.IndexError, "array index out of range");
return IntPtr.Zero;
}
return Converter.ToPython(value, itemType);
}
// Multi-dimensional arrays can be indexed a la: list[1, 2, 3].
if (!Runtime.PyTuple_Check(idx))
{
Exceptions.SetError(Exceptions.TypeError, "invalid index value");
return IntPtr.Zero;
}
int count = Runtime.PyTuple_Size(idx);
var args = new int[count];
for (var i = 0; i < count; i++)
{
IntPtr op = Runtime.PyTuple_GetItem(idx, i);
index = Runtime.PyInt_AsLong(op);
if (Exceptions.ErrorOccurred())
{
return Exceptions.RaiseTypeError("invalid index value");
}
if (index < 0)
{
index = items.GetLength(i) + index;
}
args.SetValue(index, i);
}
try
{
value = items.GetValue(args);
}
catch (IndexOutOfRangeException)
{
Exceptions.SetError(Exceptions.IndexError, "array index out of range");
return IntPtr.Zero;
}
return Converter.ToPython(value, itemType);
}
/// <summary>
/// Implements __setitem__ for array types.
/// </summary>
public static int mp_ass_subscript(IntPtr ob, IntPtr idx, IntPtr v)
{
var obj = (CLRObject)GetManagedObject(ob);
var items = obj.inst as Array;
Type itemType = obj.inst.GetType().GetElementType();
int rank = items.Rank;
int index;
object value;
if (items.IsReadOnly)
{
Exceptions.RaiseTypeError("array is read-only");
return -1;
}
if (!Converter.ToManaged(v, itemType, out value, true))
{
return -1;
}
if (rank == 1)
{
index = Runtime.PyInt_AsLong(idx);
if (Exceptions.ErrorOccurred())
{
Exceptions.RaiseTypeError("invalid index value");
return -1;
}
if (index < 0)
{
index = items.Length + index;
}
try
{
items.SetValue(value, index);
}
catch (IndexOutOfRangeException)
{
Exceptions.SetError(Exceptions.IndexError, "array index out of range");
return -1;
}
return 0;
}
if (!Runtime.PyTuple_Check(idx))
{
Exceptions.RaiseTypeError("invalid index value");
return -1;
}
int count = Runtime.PyTuple_Size(idx);
var args = new int[count];
for (var i = 0; i < count; i++)
{
IntPtr op = Runtime.PyTuple_GetItem(idx, i);
index = Runtime.PyInt_AsLong(op);
if (Exceptions.ErrorOccurred())
{
Exceptions.RaiseTypeError("invalid index value");
return -1;
}
if (index < 0)
{
index = items.GetLength(i) + index;
}
args.SetValue(index, i);
}
try
{
items.SetValue(value, args);
}
catch (IndexOutOfRangeException)
{
Exceptions.SetError(Exceptions.IndexError, "array index out of range");
return -1;
}
return 0;
}
/// <summary>
/// Implements __contains__ for array types.
/// </summary>
public static int sq_contains(IntPtr ob, IntPtr v)
{
var obj = (CLRObject)GetManagedObject(ob);
Type itemType = obj.inst.GetType().GetElementType();
var items = obj.inst as IList;
object value;
if (!Converter.ToManaged(v, itemType, out value, false))
{
return 0;
}
if (items.Contains(value))
{
return 1;
}
return 0;
}
/// <summary>
/// Implements __len__ for array types.
/// </summary>
public static int mp_length(IntPtr ob)
{
var self = (CLRObject)GetManagedObject(ob);
var items = self.inst as Array;
return items.Length;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using FluentNHibernate.Mapping;
using FluentNHibernate.Mapping.Providers;
using FluentNHibernate.MappingModel;
using FluentNHibernate.MappingModel.ClassBased;
using FluentNHibernate.Visitors;
using NUnit.Framework;
namespace FluentNHibernate.Testing.PersistenceModelTests
{
[TestFixture]
public class SeparateSubclassVisitorFixture
{
private IList<IIndeterminateSubclassMappingProvider> providers;
private ClassMapping fooMapping;
[SetUp]
public void SetUp()
{
providers = new List<IIndeterminateSubclassMappingProvider>();
}
[Test]
public void Should_add_subclass_that_implements_the_parent_interface()
{
/* The Parent is the IFoo interface the desired results
* of this test is the inclusion of the Foo<T> through the
* GenericFooMap<T> subclass mapping.
*/
fooMapping = ((IMappingProvider)new FooMap()).GetClassMapping();
providers.Add(new StringFooMap());
var sut = CreateSut();
sut.ProcessClass(fooMapping);
Assert.AreEqual(1, fooMapping.Subclasses.Count());
Assert.AreEqual(1, fooMapping.Subclasses.Where(sub => sub.Type.Equals(typeof(Foo<string>))).Count());
}
[Test]
public void Should_add_subclass_that_implements_the_parent_base()
{
/* The Parent is the FooBase class the desired results
* of this test is the inclusion of the Foo<T> through the
* GenericFooMap<T> subclass mapping.
*/
fooMapping = ((IMappingProvider)new BaseMap()).GetClassMapping();
providers.Add(new StringFooMap());
var sut = CreateSut();
sut.ProcessClass(fooMapping);
Assert.AreEqual(1, fooMapping.Subclasses.Count());
Assert.AreEqual(1, fooMapping.Subclasses.Where(sub => sub.Type.Equals(typeof(Foo<string>))).Count());
}
[Test]
public void Should_not_add_subclassmap_that_does_not_implement_parent_interface()
{
/* The Parent is the IFoo interface the desired results
* of this test is the exclusion of the StandAlone class
* since it does not implement the interface.
*/
fooMapping = ((IMappingProvider)new FooMap()).GetClassMapping();
providers.Add(new StandAloneMap());
var sut = CreateSut();
sut.ProcessClass(fooMapping);
Assert.AreEqual(0, fooMapping.Subclasses.Count());
}
[Test]
public void Should_not_add_subclassmap_that_does_not_implement_parent_base()
{
/* The Parent is the FooBase class the desired results
* of this test is the exclusion of the StandAlone class
* since it does not implement the interface.
*/
fooMapping = ((IMappingProvider)new BaseMap()).GetClassMapping();
providers.Add(new StandAloneMap());
var sut = CreateSut();
sut.ProcessClass(fooMapping);
Assert.AreEqual(0, fooMapping.Subclasses.Count());
}
[Test]
public void Should_not_add_subclassmap_that_implements_a_subclass_of_the_parent_interface()
{
/* The Parent is the IFoo interface the desired results
* of this test is the inclusion of the BaseImpl class and
* the exclusion of the Foo<T> class since it implements
* the BaseImpl class which already implements FooBase.
*/
fooMapping = ((IMappingProvider)new FooMap()).GetClassMapping();
providers.Add(new BaseImplMap());
providers.Add(new StringFooMap());
var sut = CreateSut();
sut.ProcessClass(fooMapping);
Assert.AreEqual(1, fooMapping.Subclasses.Count());
Assert.AreEqual(1, fooMapping.Subclasses.Where(sub => sub.Type.Equals(typeof(BaseImpl))).Count());
}
[Test]
public void Should_not_add_subclassmap_that_implements_a_subclass_of_the_parent_base()
{
/* The Parent is the FooBase class the desired results
* of this test is the inclusion of the BaseImpl class and
* the exclusion of the Foo<T> class since it implements
* the BaseImpl class which already implements FooBase.
*/
fooMapping = ((IMappingProvider)new BaseMap()).GetClassMapping();
providers.Add(new BaseImplMap());
providers.Add(new StringFooMap());
var sut = CreateSut();
sut.ProcessClass(fooMapping);
Assert.AreEqual(1, fooMapping.Subclasses.Count());
Assert.AreEqual(1, fooMapping.Subclasses.Where(sub => sub.Type.Equals(typeof(BaseImpl))).Count());
}
[Test]
public void Should_add_explicit_extend_subclasses_to_their_parent()
{
fooMapping = ((IMappingProvider)new ExtendsParentMap()).GetClassMapping();
providers.Add(new ExtendsChildMap());
var sut = CreateSut();
sut.ProcessClass(fooMapping);
Assert.AreEqual(1, fooMapping.Subclasses.Count());
Assert.AreEqual(1, fooMapping.Subclasses.Where(sub => sub.Type.Equals(typeof(ExtendsChild))).Count());
}
[Test]
public void Should_choose_UnionSubclass_when_the_class_mapping_IsUnionSubclass_is_true()
{
fooMapping = ((IMappingProvider)new BaseMap()).GetClassMapping();
fooMapping.Set(x => x.IsUnionSubclass, Layer.Defaults, true);
providers.Add(new StringFooMap());
var sut = CreateSut();
sut.ProcessClass(fooMapping);
fooMapping.Subclasses.First().SubclassType.ShouldEqual(SubclassType.UnionSubclass);
}
private SeparateSubclassVisitor CreateSut()
{
return new SeparateSubclassVisitor(providers);
}
private interface IFoo
{ }
private class Base : IFoo
{ }
private abstract class BaseImpl : Base
{ }
private class Foo<T> : BaseImpl, IFoo
{ }
private class FooMap : ClassMap<IFoo>
{ }
private class BaseMap : ClassMap<Base>
{ }
private class BaseImplMap : SubclassMap<BaseImpl>
{ }
private abstract class GenericFooMap<T> : SubclassMap<Foo<T>>
{ }
private class StringFooMap : GenericFooMap<string>
{ }
private interface IStand
{ }
private class StandAlone : IStand
{ }
private class StandAloneMap : SubclassMap<StandAlone>
{ }
class ExtendsParent
{}
class ExtendsChild
{}
class ExtendsParentMap : ClassMap<ExtendsParent>
{}
class ExtendsChildMap : SubclassMap<ExtendsChild>
{
public ExtendsChildMap()
{
Extends<ExtendsParent>();
}
}
}
}
| |
// Python Tools for Visual Studio
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.NetworkInformation;
using System.Net.Sockets;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Xml.XPath;
using Microsoft.PythonTools.Analysis;
using Microsoft.PythonTools.Infrastructure;
using Microsoft.PythonTools.Ipc.Json;
using Microsoft.VisualStudio.TestPlatform.ObjectModel;
using Microsoft.VisualStudio.TestPlatform.ObjectModel.Adapter;
using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging;
using TP = Microsoft.PythonTools.TestAdapter.TestProtocol;
namespace Microsoft.PythonTools.TestAdapter {
[SuppressMessage("Microsoft.Design", "CA1001:TypesThatOwnDisposableFieldsShouldBeDisposable",
Justification = "object owned by VS")]
[ExtensionUri(PythonConstants.TestExecutorUriString)]
class TestExecutor : ITestExecutor {
private static readonly Guid PythonRemoteDebugPortSupplierUnsecuredId = new Guid("{FEB76325-D127-4E02-B59D-B16D93D46CF5}");
private static readonly Guid PythonDebugEngineGuid = new Guid("EC1375B7-E2CE-43E8-BF75-DC638DE1F1F9");
private static readonly Guid NativeDebugEngineGuid = new Guid("3B476D35-A401-11D2-AAD4-00C04F990171");
private static readonly string TestLauncherPath = PythonToolsInstallPath.GetFile("visualstudio_py_testlauncher.py");
internal static readonly Uri PythonCodeCoverageUri = new Uri("datacollector://Microsoft/PythonCodeCoverage/1.0");
private readonly ManualResetEvent _cancelRequested = new ManualResetEvent(false);
private readonly VisualStudioProxy _app;
public TestExecutor() {
_app = VisualStudioProxy.FromEnvironmentVariable(PythonConstants.PythonToolsProcessIdEnvironmentVariable);
}
public void Cancel() {
_cancelRequested.Set();
}
public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) {
ValidateArg.NotNull(sources, "sources");
ValidateArg.NotNull(runContext, "runContext");
ValidateArg.NotNull(frameworkHandle, "frameworkHandle");
_cancelRequested.Reset();
var executorUri = new Uri(PythonConstants.TestExecutorUriString);
var tests = new List<TestCase>();
var doc = new XPathDocument(new StringReader(runContext.RunSettings.SettingsXml));
foreach (var t in TestReader.ReadTests(doc, new HashSet<string>(sources, StringComparer.OrdinalIgnoreCase), m => {
frameworkHandle?.SendMessage(TestMessageLevel.Warning, m);
})) {
tests.Add(new TestCase(t.FullyQualifiedName, executorUri, t.SourceFile) {
DisplayName = t.DisplayName,
LineNumber = t.LineNo,
CodeFilePath = t.SourceFile
});
}
if (_cancelRequested.WaitOne(0)) {
return;
}
RunTestCases(tests, runContext, frameworkHandle);
}
private Dictionary<string, PythonProjectSettings> GetSourceToSettings(IRunSettings settings) {
var doc = new XPathDocument(new StringReader(settings.SettingsXml));
XPathNodeIterator nodes = doc.CreateNavigator().Select("/RunSettings/Python/TestCases/Project");
Dictionary<string, PythonProjectSettings> res = new Dictionary<string, PythonProjectSettings>();
foreach (XPathNavigator project in nodes) {
PythonProjectSettings projSettings = new PythonProjectSettings(
project.GetAttribute("home", ""),
project.GetAttribute("workingDir", ""),
project.GetAttribute("interpreter", ""),
project.GetAttribute("pathEnv", ""),
project.GetAttribute("nativeDebugging", "").IsTrue()
);
foreach (XPathNavigator environment in project.Select("Environment/Variable")) {
projSettings.Environment[environment.GetAttribute("name", "")] = environment.GetAttribute("value", "");
}
string djangoSettings = project.GetAttribute("djangoSettingsModule", "");
if (!String.IsNullOrWhiteSpace(djangoSettings)) {
projSettings.Environment["DJANGO_SETTINGS_MODULE"] = djangoSettings;
}
foreach (XPathNavigator searchPath in project.Select("SearchPaths/Search")) {
projSettings.SearchPath.Add(searchPath.GetAttribute("value", ""));
}
foreach (XPathNavigator test in project.Select("Test")) {
string testFile = test.GetAttribute("file", "");
Debug.Assert(!string.IsNullOrWhiteSpace(testFile));
res[testFile] = projSettings;
}
}
return res;
}
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) {
ValidateArg.NotNull(tests, "tests");
ValidateArg.NotNull(runContext, "runContext");
ValidateArg.NotNull(frameworkHandle, "frameworkHandle");
_cancelRequested.Reset();
RunTestCases(tests, runContext, frameworkHandle);
}
private void RunTestCases(
IEnumerable<TestCase> tests,
IRunContext runContext,
IFrameworkHandle frameworkHandle
) {
bool codeCoverage = EnableCodeCoverage(runContext);
string covPath = null;
if (codeCoverage) {
covPath = GetCoveragePath(tests);
}
// .py file path -> project settings
var sourceToSettings = GetSourceToSettings(runContext.RunSettings);
foreach (var testGroup in tests.GroupBy(x => sourceToSettings[x.CodeFilePath])) {
if (_cancelRequested.WaitOne(0)) {
break;
}
using (var runner = new TestRunner(
frameworkHandle,
runContext,
testGroup,
covPath,
testGroup.Key,
_app,
_cancelRequested
)) {
runner.Run();
}
}
if (codeCoverage) {
if (File.Exists(covPath + ".xml")) {
var set = new AttachmentSet(PythonCodeCoverageUri, "CodeCoverage");
set.Attachments.Add(
new UriDataAttachment(new Uri(covPath + ".xml"), "Coverage Data")
);
frameworkHandle.RecordAttachments(new[] { set });
File.Delete(covPath);
} else {
frameworkHandle.SendMessage(TestMessageLevel.Warning, Strings.Test_NoCoverageProduced);
}
}
}
private static string GetCoveragePath(IEnumerable<TestCase> tests) {
string bestFile = null, bestClass = null, bestMethod = null;
// Try and generate a friendly name for the coverage report. We use
// the filename, class, and method. We include each one if we're
// running from a single filename/class/method. When we have multiple
// we drop the identifying names. If we have multiple files we
// go to the top level directory... If all else fails we do "pycov".
foreach (var test in tests) {
string testFile, testClass, testMethod;
TestReader.ParseFullyQualifiedTestName(
test.FullyQualifiedName,
out testFile,
out testClass,
out testMethod
);
bestFile = UpdateBestFile(bestFile, test.CodeFilePath);
if (bestFile != test.CodeFilePath) {
// Different files, don't include class/methods even
// if they happen to be the same.
bestClass = bestMethod = "";
}
bestClass = UpdateBest(bestClass, testClass);
bestMethod = UpdateBest(bestMethod, testMethod);
}
string filename = "";
if (!String.IsNullOrWhiteSpace(bestFile)) {
if (ModulePath.IsPythonSourceFile(bestFile)) {
filename = ModulePath.FromFullPath(bestFile).ModuleName;
} else {
filename = Path.GetFileName(bestFile);
}
} else {
filename = "pycov";
}
if (!String.IsNullOrWhiteSpace(bestClass)) {
filename += "_" + bestClass;
}
if (!String.IsNullOrWhiteSpace(bestMethod)) {
filename += "_" + bestMethod;
}
filename += "_" + DateTime.Now.ToString("s").Replace(':', '_');
return Path.Combine(Path.GetTempPath(), filename);
}
private static string UpdateBest(string best, string test) {
if (best == null || best == test) {
best = test;
} else if (!string.IsNullOrEmpty(best)) {
best = "";
}
return best;
}
internal static string UpdateBestFile(string bestFile, string testFile) {
if (bestFile == null || bestFile == testFile) {
bestFile = testFile;
} else if (!string.IsNullOrEmpty(bestFile)) {
// Get common directory name, trim to the last \\ where we
// have things in common
int lastSlash = 0;
for (int i = 0; i < bestFile.Length && i < testFile.Length; i++) {
if (bestFile[i] != testFile[i]) {
bestFile = bestFile.Substring(0, lastSlash);
break;
} else if (bestFile[i] == '\\' || bestFile[i] == '/') {
lastSlash = i;
}
}
}
return bestFile;
}
private static bool EnableCodeCoverage(IRunContext runContext) {
var doc = new XPathDocument(new StringReader(runContext.RunSettings.SettingsXml));
XPathNodeIterator nodes = doc.CreateNavigator().Select("/RunSettings/Python/EnableCoverage");
bool enableCoverage;
if (nodes.MoveNext()) {
if (Boolean.TryParse(nodes.Current.Value, out enableCoverage)) {
return enableCoverage;
}
}
return false;
}
/// <summary>
/// Returns true if this is a dry run. Dry runs require a
/// <DryRun value="true" /> element under RunSettings/Python.
/// </summary>
private static bool IsDryRun(IRunSettings settings) {
var doc = new XPathDocument(new StringReader(settings.SettingsXml));
try {
var node = doc.CreateNavigator().SelectSingleNode("/RunSettings/Python/DryRun[@value='true']");
return node != null;
} catch (Exception ex) {
Debug.Fail(ex.ToUnhandledExceptionMessage(typeof(TestExecutor)));
return false;
}
}
/// <summary>
/// Returns true if the console should be shown. This is the default
/// unless a <ShowConsole value="false" /> element exists under
/// RunSettings/Python.
/// </summary>
private static bool ShouldShowConsole(IRunSettings settings) {
var doc = new XPathDocument(new StringReader(settings.SettingsXml));
try {
var node = doc.CreateNavigator().SelectSingleNode("/RunSettings/Python/ShowConsole[@value='false']");
return node == null;
} catch (Exception ex) {
Debug.Fail(ex.ToUnhandledExceptionMessage(typeof(TestExecutor)));
return true;
}
}
sealed class TestRunner : IDisposable {
private readonly IFrameworkHandle _frameworkHandle;
private readonly IRunContext _context;
private readonly TestCase[] _tests;
private readonly string _codeCoverageFile;
private readonly PythonProjectSettings _settings;
private readonly PythonDebugMode _debugMode;
private readonly VisualStudioProxy _app;
private readonly string _searchPaths;
private readonly Dictionary<string, string> _env;
private readonly string _debugSecret;
private readonly int _debugPort;
private readonly ManualResetEvent _cancelRequested;
private readonly AutoResetEvent _connected = new AutoResetEvent(false);
private readonly AutoResetEvent _done = new AutoResetEvent(false);
private Connection _connection;
private readonly Socket _socket;
private readonly StringBuilder _stdOut = new StringBuilder(), _stdErr = new StringBuilder();
private TestCase _curTest;
private readonly bool _dryRun, _showConsole;
public TestRunner(
IFrameworkHandle frameworkHandle,
IRunContext runContext,
IEnumerable<TestCase> tests,
string codeCoverageFile,
PythonProjectSettings settings,
VisualStudioProxy app,
ManualResetEvent cancelRequested) {
_frameworkHandle = frameworkHandle;
_context = runContext;
_tests = tests.ToArray();
_codeCoverageFile = codeCoverageFile;
_settings = settings;
_app = app;
_cancelRequested = cancelRequested;
_dryRun = IsDryRun(runContext.RunSettings);
_showConsole = ShouldShowConsole(runContext.RunSettings);
_env = new Dictionary<string, string>();
_debugMode = PythonDebugMode.None;
if (runContext.IsBeingDebugged && _app != null) {
_debugMode = settings.EnableNativeCodeDebugging ? PythonDebugMode.PythonAndNative : PythonDebugMode.PythonOnly;
}
_searchPaths = GetSearchPaths(tests, settings);
if (_debugMode == PythonDebugMode.PythonOnly) {
var secretBuffer = new byte[24];
RandomNumberGenerator.Create().GetNonZeroBytes(secretBuffer);
_debugSecret = Convert.ToBase64String(secretBuffer)
.Replace('+', '-')
.Replace('/', '_')
.TrimEnd('=');
_debugPort = GetFreePort();
}
_socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.IP);
_socket.Bind(new IPEndPoint(IPAddress.Loopback, 0));
_socket.Listen(0);
_socket.BeginAccept(AcceptConnection, _socket);
}
[SuppressMessage("Microsoft.Usage", "CA2213:DisposableFieldsShouldBeDisposed", MessageId = "_connection")]
public void Dispose() {
_connected.Dispose();
_done.Dispose();
_connection?.Dispose();
_socket.Dispose();
}
private static Task RequestHandler(RequestArgs arg1, Func<Response, Task> arg2) {
throw new NotImplementedException();
}
private void ConectionReceivedEvent(object sender, EventReceivedEventArgs e) {
switch (e.Name) {
case TP.ResultEvent.Name:
var result = (TP.ResultEvent)e.Event;
TestOutcome outcome = TestOutcome.None;
switch (result.outcome) {
case "passed": outcome = TestOutcome.Passed; break;
case "failed": outcome = TestOutcome.Failed; break;
case "skipped": outcome = TestOutcome.Skipped; break;
}
var testResult = new TestResult(_curTest);
RecordEnd(
_frameworkHandle,
_curTest,
testResult,
_stdOut.ToString(),
_stdErr.ToString(),
outcome,
result
);
_stdOut.Clear();
_stdErr.Clear();
break;
case TP.StartEvent.Name:
var start = (TP.StartEvent)e.Event;
_curTest = null;
foreach (var test in GetTestCases()) {
if (test.Key == start.test) {
_curTest = test.Value;
break;
}
}
if (_curTest != null) {
_frameworkHandle.RecordStart(_curTest);
} else {
Warning(Strings.Test_UnexpectedResult.FormatUI(start.classname, start.method));
}
break;
case TP.StdErrEvent.Name:
var err = (TP.StdErrEvent)e.Event;
_stdErr.Append(err.content);
break;
case TP.StdOutEvent.Name:
var outp = (TP.StdOutEvent)e.Event;
_stdOut.Append(outp.content);
break;
case TP.DoneEvent.Name:
_done.Set();
break;
}
}
private string GetSearchPaths(IEnumerable<TestCase> tests, PythonProjectSettings settings) {
var paths = settings.SearchPath.ToList();
HashSet<string> knownModulePaths = new HashSet<string>();
foreach (var test in tests) {
string testFilePath = PathUtils.GetAbsoluteFilePath(settings.ProjectHome, test.CodeFilePath);
var modulePath = ModulePath.FromFullPath(testFilePath);
if (knownModulePaths.Add(modulePath.LibraryPath)) {
paths.Insert(0, modulePath.LibraryPath);
}
}
paths.Insert(0, settings.WorkingDirectory);
if (_debugMode == PythonDebugMode.PythonOnly) {
paths.Insert(0, PtvsdSearchPath);
}
string searchPaths = string.Join(
";",
paths.Where(Directory.Exists).Distinct(StringComparer.OrdinalIgnoreCase)
);
return searchPaths;
}
private void AcceptConnection(IAsyncResult iar) {
Socket socket;
var socketSource = ((Socket)iar.AsyncState);
try {
socket = socketSource.EndAccept(iar);
} catch (SocketException ex) {
Debug.WriteLine("DebugConnectionListener socket failed");
Debug.WriteLine(ex);
return;
} catch (ObjectDisposedException) {
Debug.WriteLine("DebugConnectionListener socket closed");
return;
}
var stream = new NetworkStream(socket, ownsSocket: true);
_connection = new Connection(
new MemoryStream(),
stream,
RequestHandler,
TP.RegisteredTypes
);
_connection.EventReceived += ConectionReceivedEvent;
_connection.StartProcessing();
_connected.Set();
}
public void Run() {
if (!File.Exists(_settings.InterpreterPath)) {
Error(Strings.Test_InterpreterDoesNotExist.FormatUI(_settings.InterpreterPath));
return;
}
try {
DetachFromSillyManagedProcess();
var pythonPath = InitializeEnvironment();
string testList = null;
// For a small set of tests, we'll pass them on the command
// line. Once we exceed a certain (arbitrary) number, create
// a test list on disk so that we do not overflow the
// 32K argument limit.
if (_tests.Length > 5) {
testList = CreateTestList();
}
var arguments = GetArguments(testList);
////////////////////////////////////////////////////////////
// Do the test run
using (var proc = ProcessOutput.Run(
_settings.InterpreterPath,
arguments,
_settings.WorkingDirectory,
_env,
_showConsole,
null
)) {
bool killed = false;
DebugInfo("cd " + _settings.WorkingDirectory);
DebugInfo("set " + pythonPath.Key + "=" + pythonPath.Value);
DebugInfo(proc.Arguments);
_connected.WaitOne();
if (proc.ExitCode.HasValue) {
// Process has already exited
proc.Wait();
Error(Strings.Test_FailedToStartExited);
if (proc.StandardErrorLines.Any()) {
foreach (var line in proc.StandardErrorLines) {
Error(line);
}
}
}
if (_debugMode != PythonDebugMode.None) {
try {
if (_debugMode == PythonDebugMode.PythonOnly) {
string qualifierUri = string.Format("tcp://{0}@localhost:{1}", _debugSecret, _debugPort);
while (!_app.AttachToProcess(proc, PythonRemoteDebugPortSupplierUnsecuredId, qualifierUri)) {
if (proc.Wait(TimeSpan.FromMilliseconds(500))) {
break;
}
}
} else {
var engines = new[] { PythonDebugEngineGuid, NativeDebugEngineGuid };
while (!_app.AttachToProcess(proc, engines)) {
if (proc.Wait(TimeSpan.FromMilliseconds(500))) {
break;
}
}
}
} catch (COMException ex) {
Error(Strings.Test_ErrorConnecting);
DebugError(ex.ToString());
try {
proc.Kill();
} catch (InvalidOperationException) {
// Process has already exited
}
killed = true;
}
}
// https://pytools.codeplex.com/workitem/2290
// Check that proc.WaitHandle was not null to avoid crashing if
// a test fails to start running. We will report failure and
// send the error message from stdout/stderr.
var handles = new WaitHandle[] { _cancelRequested, proc.WaitHandle, _done };
if (handles[1] == null) {
killed = true;
}
if (!killed) {
switch (WaitHandle.WaitAny(handles)) {
case 0:
// We've been cancelled
try {
proc.Kill();
} catch (InvalidOperationException) {
// Process has already exited
}
killed = true;
break;
case 1:
// The process has exited, give a chance for our comm channel
// to be flushed...
handles = new WaitHandle[] { _cancelRequested, _done };
if (WaitHandle.WaitAny(handles, 10000) != 1) {
Warning(Strings.Test_NoTestFinishedNotification);
}
break;
case 2:
// We received the done event
break;
}
}
}
if (File.Exists(testList)) {
try {
File.Delete(testList);
} catch (IOException) {
}
}
} catch (Exception e) {
Error(e.ToString());
}
}
[Conditional("DEBUG")]
private void DebugInfo(string message) {
_frameworkHandle.SendMessage(TestMessageLevel.Informational, message);
}
[Conditional("DEBUG")]
private void DebugError(string message) {
_frameworkHandle.SendMessage(TestMessageLevel.Error, message);
}
private void Info(string message) {
_frameworkHandle.SendMessage(TestMessageLevel.Informational, message);
}
private void Error(string message) {
_frameworkHandle.SendMessage(TestMessageLevel.Error, message);
}
private void Warning(string message) {
_frameworkHandle.SendMessage(TestMessageLevel.Warning, message);
}
private void DetachFromSillyManagedProcess() {
var dte = _app != null ? _app.GetDTE() : null;
if (dte != null && _debugMode != PythonDebugMode.None) {
dte.Debugger.DetachAll();
}
}
private KeyValuePair<string, string> InitializeEnvironment() {
var pythonPathVar = _settings.PathEnv;
var pythonPath = _searchPaths;
if (!string.IsNullOrWhiteSpace(pythonPathVar)) {
_env[pythonPathVar] = pythonPath;
}
foreach (var envVar in _settings.Environment) {
_env[envVar.Key] = envVar.Value;
}
return new KeyValuePair<string, string>(pythonPathVar, pythonPath);
}
private IEnumerable<KeyValuePair<string, TestCase>> GetTestCases() {
var moduleCache = new Dictionary<string, ModulePath>();
foreach (var test in _tests) {
string testFile, testClass, testMethod;
TestReader.ParseFullyQualifiedTestName(
test.FullyQualifiedName,
out testFile,
out testClass,
out testMethod
);
ModulePath module;
if (!moduleCache.TryGetValue(testFile, out module)) {
string testFilePath = PathUtils.GetAbsoluteFilePath(_settings.ProjectHome, testFile);
moduleCache[testFile] = module = ModulePath.FromFullPath(testFilePath);
}
yield return new KeyValuePair<string, TestCase>("{0}.{1}.{2}".FormatInvariant(
module.ModuleName,
testClass,
testMethod
), test);
}
}
private string CreateTestList() {
var testList = Path.GetTempFileName();
using (var writer = new StreamWriter(testList, false, new UTF8Encoding(false))) {
foreach (var test in GetTestCases()) {
writer.WriteLine(test.Key);
}
}
return testList;
}
private string[] GetArguments(string testList = null) {
var arguments = new List<string>();
arguments.Add(TestLauncherPath);
if (string.IsNullOrEmpty(testList)) {
foreach (var test in GetTestCases()) {
arguments.Add("-t");
arguments.Add(test.Key);
}
} else {
arguments.Add("--test-list");
arguments.Add(testList);
}
if (_dryRun) {
arguments.Add("--dry-run");
}
if (_codeCoverageFile != null) {
arguments.Add("--coverage");
arguments.Add(_codeCoverageFile);
}
if (_debugMode == PythonDebugMode.PythonOnly) {
arguments.AddRange(new[] {
"-s", _debugSecret,
"-p", _debugPort.ToString()
});
} else if (_debugMode == PythonDebugMode.PythonAndNative) {
arguments.Add("-x");
}
arguments.Add("-r");
arguments.Add(((IPEndPoint)_socket.LocalEndPoint).Port.ToString());
return arguments.ToArray();
}
}
private static void RecordEnd(IFrameworkHandle frameworkHandle, TestCase test, TestResult result, string stdout, string stderr, TestOutcome outcome, TP.ResultEvent resultInfo) {
result.EndTime = DateTimeOffset.Now;
result.Duration = result.EndTime - result.StartTime;
result.Outcome = outcome;
// Replace \n with \r\n to be more friendly when copying output...
stdout = stdout.Replace("\r\n", "\n").Replace("\n", "\r\n");
stderr = stderr.Replace("\r\n", "\n").Replace("\n", "\r\n");
result.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, stdout));
result.Messages.Add(new TestResultMessage(TestResultMessage.StandardErrorCategory, stderr));
result.Messages.Add(new TestResultMessage(TestResultMessage.AdditionalInfoCategory, stderr));
if (resultInfo.traceback != null) {
result.ErrorStackTrace = resultInfo.traceback;
result.Messages.Add(new TestResultMessage(TestResultMessage.DebugTraceCategory, resultInfo.traceback));
}
if (resultInfo.message != null) {
result.ErrorMessage = resultInfo.message;
}
frameworkHandle.RecordResult(result);
frameworkHandle.RecordEnd(test, outcome);
}
class TestReceiver : ITestCaseDiscoverySink {
public List<TestCase> Tests { get; private set; }
public TestReceiver() {
Tests = new List<TestCase>();
}
public void SendTestCase(TestCase discoveredTest) {
Tests.Add(discoveredTest);
}
}
sealed class PythonProjectSettings : IEquatable<PythonProjectSettings> {
public readonly string ProjectHome, WorkingDirectory, InterpreterPath, PathEnv;
public readonly bool EnableNativeCodeDebugging;
public readonly List<string> SearchPath;
public readonly Dictionary<string, string> Environment;
public PythonProjectSettings(string projectHome, string workingDir, string interpreter, string pathEnv, bool nativeDebugging) {
ProjectHome = projectHome;
WorkingDirectory = workingDir;
InterpreterPath = interpreter;
PathEnv = pathEnv;
EnableNativeCodeDebugging = nativeDebugging;
SearchPath = new List<string>();
Environment = new Dictionary<string, string>();
}
public override bool Equals(object obj) {
return Equals(obj as PythonProjectSettings);
}
public override int GetHashCode() {
return ProjectHome.GetHashCode() ^
WorkingDirectory.GetHashCode() ^
InterpreterPath.GetHashCode();
}
public bool Equals(PythonProjectSettings other) {
if (other == null) {
return false;
}
if (ProjectHome == other.ProjectHome &&
WorkingDirectory == other.WorkingDirectory &&
InterpreterPath == other.InterpreterPath &&
PathEnv == other.PathEnv &&
EnableNativeCodeDebugging == other.EnableNativeCodeDebugging) {
if (SearchPath.Count == other.SearchPath.Count &&
Environment.Count == other.Environment.Count) {
for (int i = 0; i < SearchPath.Count; i++) {
if (SearchPath[i] != other.SearchPath[i]) {
return false;
}
}
foreach (var keyValue in Environment) {
string value;
if (!other.Environment.TryGetValue(keyValue.Key, out value) ||
value != keyValue.Value) {
return false;
}
}
return true;
}
}
return false;
}
}
enum PythonDebugMode {
None,
PythonOnly,
PythonAndNative
}
private static string PtvsdSearchPath {
get {
return Path.GetDirectoryName(Path.GetDirectoryName(PythonToolsInstallPath.GetFile("ptvsd\\__init__.py")));
}
}
private static int GetFreePort() {
return Enumerable.Range(new Random().Next(49152, 65536), 60000).Except(
from connection in IPGlobalProperties.GetIPGlobalProperties().GetActiveTcpConnections()
select connection.LocalEndPoint.Port
).First();
}
}
}
| |
#if !UNITY_WINRT || UNITY_EDITOR || (UNITY_WP8 && !UNITY_WP_8_1)
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Linq;
using System.Reflection;
using System.Runtime.Serialization;
using PlayFab.Json.Linq;
using PlayFab.Json.Utilities;
#if UNITY_IPHONE || UNITY_IOS || UNITY_WEBGL || UNITY_XBOXONE || UNITY_XBOX360 || UNITY_PS4 || UNITY_PS3 || UNITY_WII
using PlayFab.Json.Aot;
#endif
namespace PlayFab.Json.Serialization
{
internal class JsonSerializerInternalReader : JsonSerializerInternalBase
{
private JsonSerializerProxy _internalSerializer;
#if !((UNITY_WP8 || UNITY_WP_8_1) || (UNITY_WINRT && !UNITY_EDITOR))
private JsonFormatterConverter _formatterConverter;
#endif
public JsonSerializerInternalReader(JsonSerializer serializer)
: base(serializer)
{
}
public void Populate(JsonReader reader, object target)
{
ValidationUtils.ArgumentNotNull(target, "target");
Type objectType = target.GetType();
JsonContract contract = Serializer.ContractResolver.ResolveContract(objectType);
if (reader.TokenType == JsonToken.None)
reader.Read();
if (reader.TokenType == JsonToken.StartArray)
{
if (contract is JsonArrayContract)
PopulateList(CollectionUtils.CreateCollectionWrapper(target), reader, null, (JsonArrayContract)contract);
else
throw new JsonSerializationException("Cannot populate JSON array onto type '{0}'.".FormatWith(CultureInfo.InvariantCulture, objectType));
}
else if (reader.TokenType == JsonToken.StartObject)
{
CheckedRead(reader);
string id = null;
if (reader.TokenType == JsonToken.PropertyName && string.Equals(reader.Value.ToString(), JsonTypeReflector.IdPropertyName, StringComparison.Ordinal))
{
CheckedRead(reader);
id = (reader.Value != null) ? reader.Value.ToString() : null;
CheckedRead(reader);
}
if (contract is JsonDictionaryContract)
PopulateDictionary(CollectionUtils.CreateDictionaryWrapper(target), reader, (JsonDictionaryContract)contract, id);
else if (contract is JsonObjectContract)
PopulateObject(target, reader, (JsonObjectContract)contract, id);
else
throw new JsonSerializationException("Cannot populate JSON object onto type '{0}'.".FormatWith(CultureInfo.InvariantCulture, objectType));
}
else
{
throw new JsonSerializationException("Unexpected initial token '{0}' when populating object. Expected JSON object or array.".FormatWith(CultureInfo.InvariantCulture, reader.TokenType));
}
}
private JsonContract GetContractSafe(Type type)
{
if (type == null)
return null;
return Serializer.ContractResolver.ResolveContract(type);
}
private JsonContract GetContractSafe(Type type, object value)
{
if (value == null)
return GetContractSafe(type);
return Serializer.ContractResolver.ResolveContract(value.GetType());
}
public object Deserialize(JsonReader reader, Type objectType)
{
if (reader == null)
throw new ArgumentNullException("reader");
if (reader.TokenType == JsonToken.None && !ReadForType(reader, objectType, null))
return null;
return CreateValueNonProperty(reader, objectType, GetContractSafe(objectType));
}
private JsonSerializerProxy GetInternalSerializer()
{
if (_internalSerializer == null)
_internalSerializer = new JsonSerializerProxy(this);
return _internalSerializer;
}
#if !((UNITY_WP8 || UNITY_WP_8_1) || (UNITY_WINRT && !UNITY_EDITOR))
private JsonFormatterConverter GetFormatterConverter()
{
if (_formatterConverter == null)
_formatterConverter = new JsonFormatterConverter(GetInternalSerializer());
return _formatterConverter;
}
#endif
private JToken CreateJToken(JsonReader reader, JsonContract contract)
{
ValidationUtils.ArgumentNotNull(reader, "reader");
if (contract != null && contract.UnderlyingType == typeof(JRaw))
{
return JRaw.Create(reader);
}
else
{
JToken token;
using (JTokenWriter writer = new JTokenWriter())
{
writer.WriteToken(reader);
token = writer.Token;
}
return token;
}
}
private JToken CreateJObject(JsonReader reader)
{
ValidationUtils.ArgumentNotNull(reader, "reader");
// this is needed because we've already read inside the object, looking for special properties
JToken token;
using (JTokenWriter writer = new JTokenWriter())
{
writer.WriteStartObject();
if (reader.TokenType == JsonToken.PropertyName)
writer.WriteToken(reader, reader.Depth - 1);
else
writer.WriteEndObject();
token = writer.Token;
}
return token;
}
private object CreateValueProperty(JsonReader reader, JsonProperty property, object target, bool gottenCurrentValue, object currentValue)
{
JsonContract contract = GetContractSafe(property.PropertyType, currentValue);
Type objectType = property.PropertyType;
JsonConverter converter = GetConverter(contract, property.MemberConverter);
if (converter != null && converter.CanRead)
{
if (!gottenCurrentValue && target != null && property.Readable)
currentValue = property.ValueProvider.GetValue(target);
return converter.ReadJson(reader, objectType, currentValue, GetInternalSerializer());
}
return CreateValueInternal(reader, objectType, contract, property, currentValue);
}
private object CreateValueNonProperty(JsonReader reader, Type objectType, JsonContract contract)
{
JsonConverter converter = GetConverter(contract, null);
if (converter != null && converter.CanRead)
return converter.ReadJson(reader, objectType, null, GetInternalSerializer());
return CreateValueInternal(reader, objectType, contract, null, null);
}
private object CreateValueInternal(JsonReader reader, Type objectType, JsonContract contract, JsonProperty member, object existingValue)
{
if (contract is JsonLinqContract)
return CreateJToken(reader, contract);
do
{
switch (reader.TokenType)
{
// populate a typed object or generic dictionary/array
// depending upon whether an objectType was supplied
case JsonToken.StartObject:
return CreateObject(reader, objectType, contract, member, existingValue);
case JsonToken.StartArray:
return CreateList(reader, objectType, contract, member, existingValue, null);
case JsonToken.Integer:
case JsonToken.Float:
case JsonToken.Boolean:
case JsonToken.Date:
case JsonToken.Bytes:
return EnsureType(reader.Value, CultureInfo.InvariantCulture, objectType);
case JsonToken.String:
// convert empty string to null automatically for nullable types
if (string.IsNullOrEmpty((string)reader.Value) &&
objectType != null &&
ReflectionUtils.IsNullableType(objectType))
return null;
// string that needs to be returned as a byte array should be base 64 decoded
if (objectType == typeof(byte[]))
return Convert.FromBase64String((string)reader.Value);
return EnsureType(reader.Value, CultureInfo.InvariantCulture, objectType);
case JsonToken.StartConstructor:
case JsonToken.EndConstructor:
string constructorName = reader.Value.ToString();
return constructorName;
case JsonToken.Null:
case JsonToken.Undefined:
if (objectType == typeof(DBNull))
return DBNull.Value;
return EnsureType(reader.Value, CultureInfo.InvariantCulture, objectType);
case JsonToken.Raw:
return new JRaw((string)reader.Value);
case JsonToken.Comment:
// ignore
break;
default:
throw new JsonSerializationException("Unexpected token while deserializing object: " + reader.TokenType);
}
} while (reader.Read());
throw new JsonSerializationException("Unexpected end when deserializing object.");
}
private JsonConverter GetConverter(JsonContract contract, JsonConverter memberConverter)
{
JsonConverter converter = null;
if (memberConverter != null)
{
// member attribute converter
converter = memberConverter;
}
else if (contract != null)
{
JsonConverter matchingConverter;
if (contract.Converter != null)
// class attribute converter
converter = contract.Converter;
else if ((matchingConverter = Serializer.GetMatchingConverter(contract.UnderlyingType)) != null)
// passed in converters
converter = matchingConverter;
else if (contract.InternalConverter != null)
// internally specified converter
converter = contract.InternalConverter;
}
return converter;
}
private object CreateObject(JsonReader reader, Type objectType, JsonContract contract, JsonProperty member, object existingValue)
{
CheckedRead(reader);
string id = null;
if (reader.TokenType == JsonToken.PropertyName)
{
bool specialProperty;
do
{
string propertyName = reader.Value.ToString();
if (string.Equals(propertyName, JsonTypeReflector.RefPropertyName, StringComparison.Ordinal))
{
CheckedRead(reader);
if (reader.TokenType != JsonToken.String && reader.TokenType != JsonToken.Null)
throw new JsonSerializationException("JSON reference {0} property must have a string or null value.".FormatWith(CultureInfo.InvariantCulture, JsonTypeReflector.RefPropertyName));
string reference = (reader.Value != null) ? reader.Value.ToString() : null;
CheckedRead(reader);
if (reference != null)
{
if (reader.TokenType == JsonToken.PropertyName)
throw new JsonSerializationException("Additional content found in JSON reference object. A JSON reference object should only have a {0} property.".FormatWith(CultureInfo.InvariantCulture, JsonTypeReflector.RefPropertyName));
return Serializer.ReferenceResolver.ResolveReference(this, reference);
}
else
{
specialProperty = true;
}
}
else if (string.Equals(propertyName, JsonTypeReflector.TypePropertyName, StringComparison.Ordinal))
{
CheckedRead(reader);
string qualifiedTypeName = reader.Value.ToString();
CheckedRead(reader);
if ((((member != null) ? member.TypeNameHandling : null) ?? Serializer.TypeNameHandling) != TypeNameHandling.None)
{
string typeName;
string assemblyName;
ReflectionUtils.SplitFullyQualifiedTypeName(qualifiedTypeName, out typeName, out assemblyName);
Type specifiedType;
try
{
specifiedType = Serializer.Binder.BindToType(assemblyName, typeName);
}
catch (Exception ex)
{
throw new JsonSerializationException("Error resolving type specified in JSON '{0}'.".FormatWith(CultureInfo.InvariantCulture, qualifiedTypeName), ex);
}
if (specifiedType == null)
throw new JsonSerializationException("Type specified in JSON '{0}' was not resolved.".FormatWith(CultureInfo.InvariantCulture, qualifiedTypeName));
if (objectType != null && !objectType.IsAssignableFrom(specifiedType))
throw new JsonSerializationException("Type specified in JSON '{0}' is not compatible with '{1}'.".FormatWith(CultureInfo.InvariantCulture, specifiedType.AssemblyQualifiedName, objectType.AssemblyQualifiedName));
objectType = specifiedType;
contract = GetContractSafe(specifiedType);
}
specialProperty = true;
}
else if (string.Equals(propertyName, JsonTypeReflector.IdPropertyName, StringComparison.Ordinal))
{
CheckedRead(reader);
id = (reader.Value != null) ? reader.Value.ToString() : null;
CheckedRead(reader);
specialProperty = true;
}
else if (string.Equals(propertyName, JsonTypeReflector.ArrayValuesPropertyName, StringComparison.Ordinal))
{
CheckedRead(reader);
object list = CreateList(reader, objectType, contract, member, existingValue, id);
CheckedRead(reader);
return list;
}
else
{
specialProperty = false;
}
} while (specialProperty
&& reader.TokenType == JsonToken.PropertyName);
}
if (!HasDefinedType(objectType))
return CreateJObject(reader);
if (contract == null)
throw new JsonSerializationException("Could not resolve type '{0}' to a JsonContract.".FormatWith(CultureInfo.InvariantCulture, objectType));
JsonDictionaryContract dictionaryContract = contract as JsonDictionaryContract;
if (dictionaryContract != null)
{
if (existingValue == null)
return CreateAndPopulateDictionary(reader, dictionaryContract, id);
return PopulateDictionary(dictionaryContract.CreateWrapper(existingValue), reader, dictionaryContract, id);
}
JsonObjectContract objectContract = contract as JsonObjectContract;
if (objectContract != null)
{
if (existingValue == null)
return CreateAndPopulateObject(reader, objectContract, id);
return PopulateObject(existingValue, reader, objectContract, id);
}
JsonPrimitiveContract primitiveContract = contract as JsonPrimitiveContract;
if (primitiveContract != null)
{
// if the content is inside $value then read past it
if (reader.TokenType == JsonToken.PropertyName && string.Equals(reader.Value.ToString(), JsonTypeReflector.ValuePropertyName, StringComparison.Ordinal))
{
CheckedRead(reader);
object value = CreateValueInternal(reader, objectType, primitiveContract, member, existingValue);
CheckedRead(reader);
return value;
}
}
#if !((UNITY_WINRT && !UNITY_EDITOR) || (UNITY_WP8 || UNITY_WP_8_1))
JsonISerializableContract serializableContract = contract as JsonISerializableContract;
if (serializableContract != null)
{
return CreateISerializable(reader, serializableContract, id);
}
#endif
throw new JsonSerializationException("Cannot deserialize JSON object into type '{0}'.".FormatWith(CultureInfo.InvariantCulture, objectType));
}
private JsonArrayContract EnsureArrayContract(Type objectType, JsonContract contract)
{
if (contract == null)
throw new JsonSerializationException("Could not resolve type '{0}' to a JsonContract.".FormatWith(CultureInfo.InvariantCulture, objectType));
JsonArrayContract arrayContract = contract as JsonArrayContract;
if (arrayContract == null)
throw new JsonSerializationException("Cannot deserialize JSON array into type '{0}'.".FormatWith(CultureInfo.InvariantCulture, objectType));
return arrayContract;
}
private void CheckedRead(JsonReader reader)
{
if (!reader.Read())
throw new JsonSerializationException("Unexpected end when deserializing object.");
}
private object CreateList(JsonReader reader, Type objectType, JsonContract contract, JsonProperty member, object existingValue, string reference)
{
object value;
if (HasDefinedType(objectType))
{
JsonArrayContract arrayContract = EnsureArrayContract(objectType, contract);
if (existingValue == null || objectType == typeof(BitArray))
value = CreateAndPopulateList(reader, reference, arrayContract);
else
value = PopulateList(arrayContract.CreateWrapper(existingValue), reader, reference, arrayContract);
}
else
{
value = CreateJToken(reader, contract);
}
return value;
}
private bool HasDefinedType(Type type)
{
return (type != null && type != typeof(object) && !typeof(JToken).IsAssignableFrom(type));
}
private object EnsureType(object value, CultureInfo culture, Type targetType)
{
if (targetType == null)
return value;
Type valueType = ReflectionUtils.GetObjectType(value);
// type of value and type of target don't match
// attempt to convert value's type to target's type
if (valueType != targetType)
{
try
{
return ConvertUtils.ConvertOrCast(value, culture, targetType);
}
catch (Exception ex)
{
throw new JsonSerializationException("Error converting value {0} to type '{1}'.".FormatWith(CultureInfo.InvariantCulture, FormatValueForPrint(value), targetType), ex);
}
}
return value;
}
private string FormatValueForPrint(object value)
{
if (value == null)
return "{null}";
if (value is string)
return @"""" + value + @"""";
return value.ToString();
}
private void SetPropertyValue(JsonProperty property, JsonReader reader, object target)
{
if (property.Ignored)
{
reader.Skip();
return;
}
object currentValue = null;
bool useExistingValue = false;
bool gottenCurrentValue = false;
ObjectCreationHandling objectCreationHandling =
property.ObjectCreationHandling.GetValueOrDefault(Serializer.ObjectCreationHandling);
if ((objectCreationHandling == ObjectCreationHandling.Auto || objectCreationHandling == ObjectCreationHandling.Reuse)
&& (reader.TokenType == JsonToken.StartArray || reader.TokenType == JsonToken.StartObject)
&& property.Readable)
{
currentValue = property.ValueProvider.GetValue(target);
gottenCurrentValue = true;
useExistingValue = (currentValue != null
&& !property.PropertyType.IsArray
&& !ReflectionUtils.InheritsGenericDefinition(property.PropertyType, typeof(ReadOnlyCollection<>))
&& !property.PropertyType.IsValueType);
}
if (!property.Writable && !useExistingValue)
{
reader.Skip();
return;
}
// test tokentype here because null might not be convertable to some types, e.g. ignoring null when applied to DateTime
if (property.NullValueHandling.GetValueOrDefault(Serializer.NullValueHandling) == NullValueHandling.Ignore && reader.TokenType == JsonToken.Null)
{
reader.Skip();
return;
}
// test tokentype here because default value might not be convertable to actual type, e.g. default of "" for DateTime
if (HasFlag(property.DefaultValueHandling.GetValueOrDefault(Serializer.DefaultValueHandling), DefaultValueHandling.Ignore)
&& JsonReader.IsPrimitiveToken(reader.TokenType)
&& MiscellaneousUtils.ValueEquals(reader.Value, property.DefaultValue))
{
reader.Skip();
return;
}
object existingValue = (useExistingValue) ? currentValue : null;
object value = CreateValueProperty(reader, property, target, gottenCurrentValue, existingValue);
// always set the value if useExistingValue is false,
// otherwise also set it if CreateValue returns a new value compared to the currentValue
// this could happen because of a JsonConverter against the type
if ((!useExistingValue || value != currentValue)
&& ShouldSetPropertyValue(property, value))
{
property.ValueProvider.SetValue(target, value);
if (property.SetIsSpecified != null)
property.SetIsSpecified(target, true);
}
}
private bool HasFlag(DefaultValueHandling value, DefaultValueHandling flag)
{
return ((value & flag) == flag);
}
private bool ShouldSetPropertyValue(JsonProperty property, object value)
{
if (property.NullValueHandling.GetValueOrDefault(Serializer.NullValueHandling) == NullValueHandling.Ignore && value == null)
return false;
if (HasFlag(property.DefaultValueHandling.GetValueOrDefault(Serializer.DefaultValueHandling), DefaultValueHandling.Ignore)
&& MiscellaneousUtils.ValueEquals(value, property.DefaultValue))
return false;
if (!property.Writable)
return false;
return true;
}
private object CreateAndPopulateDictionary(JsonReader reader, JsonDictionaryContract contract, string id)
{
object dictionary;
if (contract.DefaultCreator != null &&
(!contract.DefaultCreatorNonPublic || Serializer.ConstructorHandling == ConstructorHandling.AllowNonPublicDefaultConstructor))
dictionary = contract.DefaultCreator();
else
throw new JsonSerializationException("Unable to find a default constructor to use for type {0}.".FormatWith(CultureInfo.InvariantCulture, contract.UnderlyingType));
IWrappedDictionary dictionaryWrapper = contract.CreateWrapper(dictionary);
PopulateDictionary(dictionaryWrapper, reader, contract, id);
return dictionaryWrapper.UnderlyingDictionary;
}
private object PopulateDictionary(IWrappedDictionary dictionary, JsonReader reader, JsonDictionaryContract contract, string id)
{
if (id != null)
Serializer.ReferenceResolver.AddReference(this, id, dictionary.UnderlyingDictionary);
contract.InvokeOnDeserializing(dictionary.UnderlyingDictionary, Serializer.Context);
int initialDepth = reader.Depth;
do
{
switch (reader.TokenType)
{
case JsonToken.PropertyName:
object keyValue = reader.Value;
try
{
try
{
keyValue = EnsureType(keyValue, CultureInfo.InvariantCulture, contract.DictionaryKeyType);
}
catch (Exception ex)
{
throw new JsonSerializationException("Could not convert string '{0}' to dictionary key type '{1}'. Create a TypeConverter to convert from the string to the key type object.".FormatWith(CultureInfo.InvariantCulture, reader.Value, contract.DictionaryKeyType), ex);
}
if (!ReadForType(reader, contract.DictionaryValueType, null))
throw new JsonSerializationException("Unexpected end when deserializing object.");
dictionary[keyValue] = CreateValueNonProperty(reader, contract.DictionaryValueType, GetContractSafe(contract.DictionaryValueType));
}
catch (Exception ex)
{
if (IsErrorHandled(dictionary, contract, keyValue, ex))
HandleError(reader, initialDepth);
else
throw;
}
break;
case JsonToken.Comment:
break;
case JsonToken.EndObject:
contract.InvokeOnDeserialized(dictionary.UnderlyingDictionary, Serializer.Context);
return dictionary.UnderlyingDictionary;
default:
throw new JsonSerializationException("Unexpected token when deserializing object: " + reader.TokenType);
}
} while (reader.Read());
throw new JsonSerializationException("Unexpected end when deserializing object.");
}
private object CreateAndPopulateList(JsonReader reader, string reference, JsonArrayContract contract)
{
return CollectionUtils.CreateAndPopulateList(contract.CreatedType, (l, isTemporaryListReference) =>
{
if (reference != null && isTemporaryListReference)
throw new JsonSerializationException("Cannot preserve reference to array or readonly list: {0}".FormatWith(CultureInfo.InvariantCulture, contract.UnderlyingType));
if (contract.OnSerializing != null && isTemporaryListReference)
throw new JsonSerializationException("Cannot call OnSerializing on an array or readonly list: {0}".FormatWith(CultureInfo.InvariantCulture, contract.UnderlyingType));
if (contract.OnError != null && isTemporaryListReference)
throw new JsonSerializationException("Cannot call OnError on an array or readonly list: {0}".FormatWith(CultureInfo.InvariantCulture, contract.UnderlyingType));
if (!contract.IsMultidimensionalArray)
PopulateList(contract.CreateWrapper(l), reader, reference, contract);
else
PopulateMultidimensionalArray(l, reader, reference, contract);
});
}
private bool ReadForTypeArrayHack(JsonReader reader, Type t)
{
// this is a nasty hack because calling ReadAsDecimal for example will error when we hit the end of the array
// need to think of a better way of doing this
try
{
return ReadForType(reader, t, null);
}
catch (JsonReaderException)
{
if (reader.TokenType == JsonToken.EndArray)
return true;
throw;
}
}
private object PopulateList(IWrappedCollection wrappedList, JsonReader reader, string reference, JsonArrayContract contract)
{
object list = wrappedList.UnderlyingCollection;
// can't populate an existing array
if (wrappedList.IsFixedSize)
{
reader.Skip();
return wrappedList.UnderlyingCollection;
}
if (reference != null)
Serializer.ReferenceResolver.AddReference(this, reference, list);
contract.InvokeOnDeserializing(list, Serializer.Context);
int initialDepth = reader.Depth;
while (ReadForTypeArrayHack(reader, contract.CollectionItemType))
{
switch (reader.TokenType)
{
case JsonToken.EndArray:
contract.InvokeOnDeserialized(list, Serializer.Context);
return wrappedList.UnderlyingCollection;
case JsonToken.Comment:
break;
default:
try
{
object value = CreateValueNonProperty(reader, contract.CollectionItemType, GetContractSafe(contract.CollectionItemType));
wrappedList.Add(value);
}
catch (Exception ex)
{
if (IsErrorHandled(list, contract, wrappedList.Count, ex))
HandleError(reader, initialDepth);
else
throw;
}
break;
}
}
throw new JsonSerializationException("Unexpected end when deserializing array.");
}
private object PopulateMultidimensionalArray(IList list, JsonReader reader, string reference, JsonArrayContract contract)
{
int rank = contract.UnderlyingType.GetArrayRank();
if (reference != null)
Serializer.ReferenceResolver.AddReference(this, reference, list);
contract.InvokeOnDeserializing(list, Serializer.Context);
//JsonContract collectionItemContract = GetContractSafe(contract.CollectionItemType);
//JsonConverter collectionItemConverter = GetConverter(collectionItemContract, null, contract, containerProperty);
//int? previousErrorIndex = null;
Stack<IList> listStack = new Stack<IList>();
listStack.Push(list);
IList currentList = list;
bool finished = false;
do
{
int initialDepth = reader.Depth;
if (listStack.Count == rank)
{
if (ReadForTypeArrayHack(reader, contract.CollectionItemType))
{
switch (reader.TokenType)
{
case JsonToken.EndArray:
listStack.Pop();
currentList = listStack.Peek();
//previousErrorIndex = null;
break;
case JsonToken.Comment:
break;
default:
try
{
object value = CreateValueNonProperty(reader, contract.CollectionItemType, GetContractSafe(contract.CollectionItemType));
currentList.Add(value);
}
catch (Exception ex)
{
if (IsErrorHandled(list, contract, currentList.Count, ex))
HandleError(reader, initialDepth);
else
throw;
}
break;
}
}
else
{
break;
}
}
else
{
if (reader.Read())
{
switch (reader.TokenType)
{
case JsonToken.StartArray:
IList newList = new List<object>();
currentList.Add(newList);
listStack.Push(newList);
currentList = newList;
break;
case JsonToken.EndArray:
listStack.Pop();
if (listStack.Count > 0)
{
currentList = listStack.Peek();
}
else
{
finished = true;
}
break;
case JsonToken.Comment:
break;
default:
throw new JsonSerializationException("Unexpected token when deserializing multidimensional array: " + reader.TokenType);
}
}
else
{
break;
}
}
} while (!finished);
if (!finished)
throw new JsonSerializationException("Unexpected end when deserializing array." + reader.TokenType);
contract.InvokeOnDeserialized(list, Serializer.Context);
return list;
}
#if !((UNITY_WINRT && !UNITY_EDITOR) || (UNITY_WP8 || UNITY_WP_8_1))
private object CreateISerializable(JsonReader reader, JsonISerializableContract contract, string id)
{
Type objectType = contract.UnderlyingType;
SerializationInfo serializationInfo = new SerializationInfo(contract.UnderlyingType, GetFormatterConverter());
bool exit = false;
do
{
switch (reader.TokenType)
{
case JsonToken.PropertyName:
string memberName = reader.Value.ToString();
if (!reader.Read())
throw new JsonSerializationException("Unexpected end when setting {0}'s value.".FormatWith(CultureInfo.InvariantCulture, memberName));
serializationInfo.AddValue(memberName, JToken.ReadFrom(reader));
break;
case JsonToken.Comment:
break;
case JsonToken.EndObject:
exit = true;
break;
default:
throw new JsonSerializationException("Unexpected token when deserializing object: " + reader.TokenType);
}
} while (!exit && reader.Read());
if (contract.ISerializableCreator == null)
throw new JsonSerializationException("ISerializable type '{0}' does not have a valid constructor. To correctly implement ISerializable a constructor that takes SerializationInfo and StreamingContext parameters should be present.".FormatWith(CultureInfo.InvariantCulture, objectType));
object createdObject = contract.ISerializableCreator(serializationInfo, Serializer.Context);
if (id != null)
Serializer.ReferenceResolver.AddReference(this, id, createdObject);
// these are together because OnDeserializing takes an object but for an ISerializable the object is full created in the constructor
contract.InvokeOnDeserializing(createdObject, Serializer.Context);
contract.InvokeOnDeserialized(createdObject, Serializer.Context);
return createdObject;
}
#endif
private object CreateAndPopulateObject(JsonReader reader, JsonObjectContract contract, string id)
{
object newObject = null;
if (contract.UnderlyingType.IsInterface || contract.UnderlyingType.IsAbstract)
throw new JsonSerializationException("Could not create an instance of type {0}. Type is an interface or abstract class and cannot be instantated.".FormatWith(CultureInfo.InvariantCulture, contract.UnderlyingType));
if (contract.OverrideConstructor != null)
{
if (contract.OverrideConstructor.GetParameters().Length > 0)
return CreateObjectFromNonDefaultConstructor(reader, contract, contract.OverrideConstructor, id);
newObject = contract.OverrideConstructor.Invoke(null);
}
else if (contract.DefaultCreator != null &&
(!contract.DefaultCreatorNonPublic || Serializer.ConstructorHandling == ConstructorHandling.AllowNonPublicDefaultConstructor))
{
newObject = contract.DefaultCreator();
}
else if (contract.ParametrizedConstructor != null)
{
return CreateObjectFromNonDefaultConstructor(reader, contract, contract.ParametrizedConstructor, id);
}
if (newObject == null)
throw new JsonSerializationException("Unable to find a constructor to use for type {0}. A class should either have a default constructor, one constructor with arguments or a constructor marked with the JsonConstructor attribute.".FormatWith(CultureInfo.InvariantCulture, contract.UnderlyingType));
PopulateObject(newObject, reader, contract, id);
return newObject;
}
private object CreateObjectFromNonDefaultConstructor(JsonReader reader, JsonObjectContract contract, ConstructorInfo constructorInfo, string id)
{
ValidationUtils.ArgumentNotNull(constructorInfo, "constructorInfo");
Type objectType = contract.UnderlyingType;
IDictionary<JsonProperty, object> propertyValues = ResolvePropertyAndConstructorValues(contract, reader, objectType);
IDictionary<ParameterInfo, object> constructorParameters = constructorInfo.GetParameters().ToDictionary(p => p, p => (object)null);
IDictionary<JsonProperty, object> remainingPropertyValues = new Dictionary<JsonProperty, object>();
#if !(UNITY_IPHONE || UNITY_IOS || UNITY_WEBGL || UNITY_XBOXONE || UNITY_XBOX360 || UNITY_PS4 || UNITY_PS3 || UNITY_WII) || (UNITY_IOS || UNITY_WEBGL || UNITY_XBOXONE || UNITY_XBOX360 || UNITY_PS4 || UNITY_PS3 || UNITY_WII && !(UNITY_3_5 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3))
foreach (KeyValuePair<JsonProperty, object> propertyValue in propertyValues)
{
ParameterInfo matchingConstructorParameter = constructorParameters.ForgivingCaseSensitiveFind(kv => kv.Key.Name, propertyValue.Key.UnderlyingName).Key;
if (matchingConstructorParameter != null)
constructorParameters[matchingConstructorParameter] = propertyValue.Value;
else
remainingPropertyValues.Add(propertyValue);
}
#else
propertyValues.ForEach(propertyValue => {
ParameterInfo matchingConstructorParameter = constructorParameters.ForgivingCaseSensitiveFind(kv => kv.Key.Name, propertyValue.Key.UnderlyingName).Key;
if (matchingConstructorParameter != null)
constructorParameters[matchingConstructorParameter] = propertyValue.Value;
else
remainingPropertyValues.Add(propertyValue);
});
#endif
object createdObject = constructorInfo.Invoke(constructorParameters.Values.ToArray());
if (id != null)
Serializer.ReferenceResolver.AddReference(this, id, createdObject);
contract.InvokeOnDeserializing(createdObject, Serializer.Context);
#if !(UNITY_IPHONE || UNITY_IOS || UNITY_WEBGL || UNITY_XBOXONE || UNITY_XBOX360 || UNITY_PS4 || UNITY_PS3 || UNITY_WII) || (UNITY_IOS || UNITY_WEBGL || UNITY_XBOXONE || UNITY_XBOX360 || UNITY_PS4 || UNITY_PS3 || UNITY_WII && !(UNITY_3_5 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3))
// go through unused values and set the newly created object's properties
foreach (KeyValuePair<JsonProperty, object> remainingPropertyValue in remainingPropertyValues)
{
JsonProperty property = remainingPropertyValue.Key;
object value = remainingPropertyValue.Value;
if (ShouldSetPropertyValue(remainingPropertyValue.Key, remainingPropertyValue.Value))
{
property.ValueProvider.SetValue(createdObject, value);
}
else if (!property.Writable && value != null)
{
// handle readonly collection/dictionary properties
JsonContract propertyContract = Serializer.ContractResolver.ResolveContract(property.PropertyType);
if (propertyContract is JsonArrayContract)
{
JsonArrayContract propertyArrayContract = propertyContract as JsonArrayContract;
object createdObjectCollection = property.ValueProvider.GetValue(createdObject);
if (createdObjectCollection != null)
{
IWrappedCollection createdObjectCollectionWrapper = propertyArrayContract.CreateWrapper(createdObjectCollection);
IWrappedCollection newValues = propertyArrayContract.CreateWrapper(value);
foreach (object newValue in newValues)
{
createdObjectCollectionWrapper.Add(newValue);
}
}
}
else if (propertyContract is JsonDictionaryContract)
{
JsonDictionaryContract jsonDictionaryContract = propertyContract as JsonDictionaryContract;
object createdObjectDictionary = property.ValueProvider.GetValue(createdObject);
if (createdObjectDictionary != null)
{
IWrappedDictionary createdObjectDictionaryWrapper = jsonDictionaryContract.CreateWrapper(createdObjectDictionary);
IWrappedDictionary newValues = jsonDictionaryContract.CreateWrapper(value);
foreach (DictionaryEntry newValue in newValues)
{
createdObjectDictionaryWrapper.Add(newValue.Key, newValue.Value);
}
}
}
}
}
#else
// go through unused values and set the newly created object's properties
remainingPropertyValues.ForEach(remainingPropertyValue => {
JsonProperty property = remainingPropertyValue.Key;
object value = remainingPropertyValue.Value;
if (ShouldSetPropertyValue(remainingPropertyValue.Key, remainingPropertyValue.Value))
{
property.ValueProvider.SetValue(createdObject, value);
}
else if (!property.Writable && value != null)
{
// handle readonly collection/dictionary properties
JsonContract propertyContract = Serializer.ContractResolver.ResolveContract(property.PropertyType);
if (propertyContract is JsonArrayContract)
{
JsonArrayContract propertyArrayContract = propertyContract as JsonArrayContract;
object createdObjectCollection = property.ValueProvider.GetValue(createdObject);
if (createdObjectCollection != null)
{
IWrappedCollection createdObjectCollectionWrapper = propertyArrayContract.CreateWrapper(createdObjectCollection);
IWrappedCollection newValues = propertyArrayContract.CreateWrapper(value);
foreach (object newValue in newValues)
{
createdObjectCollectionWrapper.Add(newValue);
}
}
}
else if (propertyContract is JsonDictionaryContract)
{
JsonDictionaryContract jsonDictionaryContract = propertyContract as JsonDictionaryContract;
object createdObjectDictionary = property.ValueProvider.GetValue(createdObject);
if (createdObjectDictionary != null)
{
IWrappedDictionary createdObjectDictionaryWrapper = jsonDictionaryContract.CreateWrapper(createdObjectDictionary);
IWrappedDictionary newValues = jsonDictionaryContract.CreateWrapper(value);
foreach (DictionaryEntry newValue in newValues)
{
createdObjectDictionaryWrapper.Add(newValue.Key, newValue.Value);
}
}
}
}
});
#endif
contract.InvokeOnDeserialized(createdObject, Serializer.Context);
return createdObject;
}
private IDictionary<JsonProperty, object> ResolvePropertyAndConstructorValues(JsonObjectContract contract, JsonReader reader, Type objectType)
{
IDictionary<JsonProperty, object> propertyValues = new Dictionary<JsonProperty, object>();
bool exit = false;
do
{
switch (reader.TokenType)
{
case JsonToken.PropertyName:
string memberName = reader.Value.ToString();
// attempt exact case match first
// then try match ignoring case
JsonProperty property = contract.ConstructorParameters.GetClosestMatchProperty(memberName) ??
contract.Properties.GetClosestMatchProperty(memberName);
if (property != null)
{
if (!ReadForType(reader, property.PropertyType, property.Converter))
throw new JsonSerializationException("Unexpected end when setting {0}'s value.".FormatWith(CultureInfo.InvariantCulture, memberName));
if (!property.Ignored)
propertyValues[property] = CreateValueProperty(reader, property, null, true, null);
else
reader.Skip();
}
else
{
if (!reader.Read())
throw new JsonSerializationException("Unexpected end when setting {0}'s value.".FormatWith(CultureInfo.InvariantCulture, memberName));
if (Serializer.MissingMemberHandling == MissingMemberHandling.Error)
throw new JsonSerializationException("Could not find member '{0}' on object of type '{1}'".FormatWith(CultureInfo.InvariantCulture, memberName, objectType.Name));
reader.Skip();
}
break;
case JsonToken.Comment:
break;
case JsonToken.EndObject:
exit = true;
break;
default:
throw new JsonSerializationException("Unexpected token when deserializing object: " + reader.TokenType);
}
} while (!exit && reader.Read());
return propertyValues;
}
private bool ReadForType(JsonReader reader, Type t, JsonConverter propertyConverter)
{
// don't read properties with converters as a specific value
// the value might be a string which will then get converted which will error if read as date for example
bool hasConverter = (GetConverter(GetContractSafe(t), propertyConverter) != null);
if (hasConverter)
return reader.Read();
if (t == typeof(byte[]))
{
reader.ReadAsBytes();
return true;
}
else if ((t == typeof(decimal) || t == typeof(decimal?)))
{
reader.ReadAsDecimal();
return true;
}
else if ((t == typeof(DateTimeOffset) || t == typeof(DateTimeOffset?)))
{
reader.ReadAsDateTimeOffset();
return true;
}
do
{
if (!reader.Read())
return false;
} while (reader.TokenType == JsonToken.Comment);
return true;
}
private object PopulateObject(object newObject, JsonReader reader, JsonObjectContract contract, string id)
{
contract.InvokeOnDeserializing(newObject, Serializer.Context);
Dictionary<JsonProperty, PropertyPresence> propertiesPresence =
contract.Properties.ToDictionary(m => m, m => PropertyPresence.None);
if (id != null)
Serializer.ReferenceResolver.AddReference(this, id, newObject);
int initialDepth = reader.Depth;
do
{
switch (reader.TokenType)
{
case JsonToken.PropertyName:
string memberName = reader.Value.ToString();
try
{
// attempt exact case match first
// then try match ignoring case
JsonProperty property = contract.Properties.GetClosestMatchProperty(memberName);
if (property == null)
{
if (Serializer.MissingMemberHandling == MissingMemberHandling.Error)
throw new JsonSerializationException("Could not find member '{0}' on object of type '{1}'".FormatWith(CultureInfo.InvariantCulture, memberName, contract.UnderlyingType.Name));
reader.Skip();
continue;
}
if (!ReadForType(reader, property.PropertyType, property.Converter))
throw new JsonSerializationException("Unexpected end when setting {0}'s value.".FormatWith(CultureInfo.InvariantCulture, memberName));
SetPropertyPresence(reader, property, propertiesPresence);
SetPropertyValue(property, reader, newObject);
}
catch (Exception ex)
{
if (IsErrorHandled(newObject, contract, memberName, ex))
HandleError(reader, initialDepth);
else
throw;
}
break;
case JsonToken.EndObject:
foreach (KeyValuePair<JsonProperty, PropertyPresence> propertyPresence in propertiesPresence)
{
JsonProperty property = propertyPresence.Key;
PropertyPresence presence = propertyPresence.Value;
switch (presence)
{
case PropertyPresence.None:
if (property.Required == Required.AllowNull || property.Required == Required.Always)
throw new JsonSerializationException("Required property '{0}' not found in JSON.".FormatWith(CultureInfo.InvariantCulture, property.PropertyName));
if (HasFlag(property.DefaultValueHandling.GetValueOrDefault(Serializer.DefaultValueHandling), DefaultValueHandling.Populate)
&& property.Writable)
property.ValueProvider.SetValue(newObject, EnsureType(property.DefaultValue, CultureInfo.InvariantCulture, property.PropertyType));
break;
case PropertyPresence.Null:
if (property.Required == Required.Always)
throw new JsonSerializationException("Required property '{0}' expects a value but got null.".FormatWith(CultureInfo.InvariantCulture, property.PropertyName));
break;
}
}
contract.InvokeOnDeserialized(newObject, Serializer.Context);
return newObject;
case JsonToken.Comment:
// ignore
break;
default:
throw new JsonSerializationException("Unexpected token when deserializing object: " + reader.TokenType);
}
} while (reader.Read());
throw new JsonSerializationException("Unexpected end when deserializing object.");
}
private void SetPropertyPresence(JsonReader reader, JsonProperty property, Dictionary<JsonProperty, PropertyPresence> requiredProperties)
{
if (property != null)
{
requiredProperties[property] = (reader.TokenType == JsonToken.Null || reader.TokenType == JsonToken.Undefined)
? PropertyPresence.Null
: PropertyPresence.Value;
}
}
private void HandleError(JsonReader reader, int initialDepth)
{
ClearErrorContext();
reader.Skip();
while (reader.Depth > (initialDepth + 1))
{
reader.Read();
}
}
internal enum PropertyPresence
{
None,
Null,
Value
}
}
}
#endif
| |
// Copyright 2016 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
using System.Runtime.InteropServices;
using System;
using System.Collections.Generic;
/// <summary>
/// Plays video using Exoplayer rendering it on the main texture.
/// </summary>
public class GvrVideoPlayerTexture : MonoBehaviour {
/// <summary>
/// The video player pointer used to uniquely identify the player instance.
/// </summary>
private IntPtr videoPlayerPtr;
/// <summary>
/// The video player event base.
/// </summary>
/// <remarks>This is added to the event id when issues events to
/// the plugin.
/// </remarks>
private int videoPlayerEventBase;
private Texture initialTexture;
private Texture surfaceTexture;
private float[] videoMatrix;
private long lastVideoTimestamp;
private bool initialized;
private int texWidth = 1024;
private int texHeight = 1024;
private long lastBufferedPosition;
private float framecount = 0;
private Graphic graphicComponent;
private Renderer rendererComponent;
/// <summary>
/// The render event function.
/// </summary>
private IntPtr renderEventFunction;
private bool playOnResume;
/// <summary>List of callbacks to invoke when the video is ready.</summary>
private List<Action<int>> onEventCallbacks;
/// <summary>List of callbacks to invoke on exception.</summary>
/// <remarks>The first parameter is the type of exception,
/// the second is the message.
/// </remarks>
private List<Action<string, string>> onExceptionCallbacks;
private readonly static Queue<Action> ExecuteOnMainThread = new Queue<Action>();
// Attach a text component to get some debug status info.
public Text statusText;
/// <summary>
/// Video type.
/// </summary>
public enum VideoType {
Dash = 0,
HLS = 2,
Other = 3
};
public enum VideoResolution {
Lowest = 1,
_720 = 720,
_1080 = 1080,
_2048 = 2048,
Highest = 4096
};
/// <summary>
/// Video player state.
/// </summary>
public enum VideoPlayerState {
Idle = 1,
Preparing = 2,
Buffering = 3,
Ready = 4,
Ended = 5
};
public enum VideoEvents {
VideoReady = 1,
VideoStartPlayback = 2,
VideoFormatChanged = 3,
VideoSurfaceSet = 4,
VideoSizeChanged = 5
};
/// <summary>
/// Plugin render commands.
/// </summary>
/// <remarks>
/// These are added to the eventbase for the specific player object and
/// issued to the plugin.
/// </remarks>
private enum RenderCommand {
None = -1,
InitializePlayer = 0,
UpdateVideo = 1,
RenderMono = 2,
RenderLeftEye = 3,
RenderRightEye = 4,
Shutdown = 5
};
/// <summary>
/// The type of the video.
/// </summary>
public VideoType videoType;
public string videoURL;
public string videoContentID;
public string videoProviderId;
public VideoResolution initialResolution = VideoResolution.Highest;
/// <summary>
/// True for adjusting the aspect ratio of the renderer.
/// </summary>
public bool adjustAspectRatio;
/// <summary>
/// The use secure path for DRM protected video.
/// </summary>
public bool useSecurePath;
public bool VideoReady {
get {
return videoPlayerPtr != IntPtr.Zero && IsVideoReady(videoPlayerPtr);
}
}
public long CurrentPosition {
get {
return videoPlayerPtr != IntPtr.Zero ? GetCurrentPosition(videoPlayerPtr) : 0;
}
set {
// If the position is being set to 0, reset the framecount as well.
// This allows the texture swapping to work correctly at the beginning
// of the stream.
if (value == 0) {
framecount = 0;
}
SetCurrentPosition(videoPlayerPtr, value);
}
}
public long VideoDuration {
get {
return videoPlayerPtr != IntPtr.Zero ? GetDuration(videoPlayerPtr) : 0;
}
}
public long BufferedPosition {
get {
return videoPlayerPtr != IntPtr.Zero ? GetBufferedPosition(videoPlayerPtr) : 0;
}
}
public int BufferedPercentage {
get {
return videoPlayerPtr != IntPtr.Zero ? GetBufferedPercentage(videoPlayerPtr) : 0;
}
}
public bool IsPaused {
get {
return !initialized || videoPlayerPtr == IntPtr.Zero || IsVideoPaused(videoPlayerPtr);
}
}
public VideoPlayerState PlayerState {
get {
return videoPlayerPtr != IntPtr.Zero ? (VideoPlayerState)GetPlayerState(videoPlayerPtr) : VideoPlayerState.Idle;
}
}
public int MaxVolume {
get {
return videoPlayerPtr != IntPtr.Zero ? GetMaxVolume(videoPlayerPtr) : 0;
}
}
public int CurrentVolume {
get {
return videoPlayerPtr != IntPtr.Zero ? GetCurrentVolume(videoPlayerPtr) : 0;
}
set {
SetCurrentVolume(value);
}
}
/// Create the video player instance and the event base id.
void Awake() {
videoMatrix = new float[16];
// Find the components on which to set the video texture.
graphicComponent = GetComponent<Graphic>();
rendererComponent = GetComponent<Renderer>();
CreatePlayer();
}
void CreatePlayer() {
videoPlayerPtr = CreateVideoPlayer();
videoPlayerEventBase = GetVideoPlayerEventBase(videoPlayerPtr);
Debug.Log(" -- " + gameObject.name + " created with base " +
videoPlayerEventBase);
SetOnVideoEventCallback((eventId) => {
Debug.Log("------------- E V E N T " + eventId + " -----------------");
UpdateStatusText();
});
SetOnExceptionCallback((type, msg) => {
Debug.LogError("Exception: " + type + ": " + msg);
});
initialized = false;
if (rendererComponent != null) {
initialTexture = rendererComponent.material.mainTexture;
} else if (graphicComponent != null) {
initialTexture = graphicComponent.mainTexture;
}
}
void OnDisable() {
if (videoPlayerPtr != IntPtr.Zero) {
if (GetPlayerState(videoPlayerPtr) == (int)VideoPlayerState.Ready) {
PauseVideo(videoPlayerPtr);
}
}
}
/// <summary>
/// Sets the display texture.
/// </summary>
/// <param name="texture">Texture to display.
// If null, the initial texture of the renderer is used.</param>
public void SetDisplayTexture(Texture texture) {
if (texture == null) {
texture = initialTexture;
}
if (texture == null) {
return;
}
if (rendererComponent != null) {
rendererComponent.sharedMaterial.mainTexture = texture;
} else if (graphicComponent != null) {
graphicComponent.material.mainTexture = texture;
}
}
public void CleanupVideo() {
Debug.Log("Cleaning Up video!");
if (videoPlayerPtr != IntPtr.Zero) {
DestroyVideoPlayer(videoPlayerPtr);
videoPlayerPtr = IntPtr.Zero;
}
if (surfaceTexture != null) {
Destroy(surfaceTexture);
surfaceTexture = null;
}
if (rendererComponent != null) {
rendererComponent.sharedMaterial.mainTexture = initialTexture;
} else if (graphicComponent != null) {
graphicComponent.material.mainTexture = initialTexture;
}
}
public void ReInitializeVideo() {
if (rendererComponent != null) {
rendererComponent.sharedMaterial.mainTexture = initialTexture;
} else if (graphicComponent != null) {
graphicComponent.material.mainTexture = initialTexture;
}
if (videoPlayerPtr == IntPtr.Zero) {
CreatePlayer();
}
Init();
}
void OnDestroy() {
CleanupVideo();
}
void OnValidate() {
Renderer r = GetComponent<Renderer>();
Graphic g = GetComponent<Graphic>();
if (g == null && r == null) {
Debug.LogError("TexturePlayer object must have either " +
"a Renderer component or a Graphic component.");
}
}
void OnApplicationPause(bool bPause) {
if (videoPlayerPtr != IntPtr.Zero) {
if (bPause) {
playOnResume = !IsPaused;
PauseVideo(videoPlayerPtr);
} else {
if (playOnResume) {
PlayVideo(videoPlayerPtr);
}
}
}
}
void UpdateMaterial() {
// Don't render if not initialized.
if (videoPlayerPtr == IntPtr.Zero) {
return;
}
texWidth = GetWidth(videoPlayerPtr);
texHeight = GetHeight(videoPlayerPtr);
int externalTextureId = GetExternalSurfaceTextureId(videoPlayerPtr);
if (surfaceTexture != null
&& surfaceTexture.GetNativeTexturePtr().ToInt32() != externalTextureId) {
Destroy(surfaceTexture);
surfaceTexture = null;
}
if (surfaceTexture == null && externalTextureId != 0) {
Debug.Log("Creating external texture with surface texture id " + externalTextureId);
// Size of this texture doesn't really matter and can change on the fly anyway.
surfaceTexture = Texture2D.CreateExternalTexture(4, 4, TextureFormat.RGBA32,
false, false, new System.IntPtr(externalTextureId));
}
if (surfaceTexture == null) {
return;
}
// Don't swap the textures if the video ended.
if (PlayerState == VideoPlayerState.Ended) {
return;
}
if (graphicComponent == null && rendererComponent == null) {
Debug.LogError("GvrVideoPlayerTexture: No render or graphic component.");
return;
}
// Extract the shader's simplified scale/offset from the SurfaceTexture's
// transformation matrix.
Vector2 vidTexScale = new Vector2(videoMatrix[0], videoMatrix[5]);
Vector2 vidTexOffset = new Vector2(videoMatrix[12], videoMatrix[13]);
// Handle either the renderer component or the graphic component.
if (rendererComponent != null) {
// Unity may build new a new material instance when assigning
// material.x which can lead to duplicating materials each frame
// whereas using the shared material will modify the original material.
// Update the material's texture if it is different.
if (rendererComponent.material.mainTexture == null ||
rendererComponent.sharedMaterial.mainTexture.GetNativeTexturePtr() !=
surfaceTexture.GetNativeTexturePtr()) {
rendererComponent.sharedMaterial.mainTexture = surfaceTexture;
}
rendererComponent.sharedMaterial.mainTextureScale = vidTexScale;
rendererComponent.sharedMaterial.mainTextureOffset = vidTexOffset;
} else if (graphicComponent != null) {
if (graphicComponent.material.mainTexture == null ||
graphicComponent.material.mainTexture.GetNativeTexturePtr() !=
surfaceTexture.GetNativeTexturePtr()) {
graphicComponent.material.mainTexture = surfaceTexture;
}
graphicComponent.material.mainTextureScale = vidTexScale;
graphicComponent.material.mainTextureOffset = vidTexOffset;
}
}
private void OnRestartVideoEvent(int eventId) {
if (eventId == (int)VideoEvents.VideoReady) {
Debug.Log("Restarting video complete.");
RemoveOnVideoEventCallback(OnRestartVideoEvent);
}
}
/// <summary>
/// Resets the video player.
/// </summary>
public void RestartVideo() {
SetOnVideoEventCallback(OnRestartVideoEvent);
string theUrl = ProcessURL();
InitVideoPlayer(videoPlayerPtr, (int) videoType, theUrl,
videoContentID,
videoProviderId,
useSecurePath,
true);
framecount = 0;
lastVideoTimestamp = -1;
}
public void SetCurrentVolume(int val) {
SetCurrentVolume(videoPlayerPtr, val);
}
/// <summary>
/// Initialize the video player.
/// </summary>
/// <returns>true if successful</returns>
public bool Init() {
if (initialized) {
Debug.Log("Skipping initialization: video player already loaded");
return true;
}
if (videoURL == null || videoURL.Length == 0) {
Debug.LogError("Cannot initialize with null videoURL");
return false;
}
videoURL = videoURL == null ? "" : videoURL.Trim();
videoContentID = videoContentID == null ? "" : videoContentID.Trim();
videoProviderId = videoProviderId == null ? "" : videoProviderId.Trim();
SetInitialResolution(videoPlayerPtr, (int) initialResolution);
string theUrl = ProcessURL();
Debug.Log("Playing " + videoType + " " + theUrl);
Debug.Log("videoContentID = " + videoContentID);
Debug.Log("videoProviderId = " + videoProviderId);
videoPlayerPtr = InitVideoPlayer(videoPlayerPtr, (int) videoType, theUrl,
videoContentID, videoProviderId,
useSecurePath, false);
IssuePlayerEvent(RenderCommand.InitializePlayer);
initialized = true;
framecount = 0;
lastVideoTimestamp = -1;
return videoPlayerPtr != IntPtr.Zero;
}
public bool Play() {
if (!initialized) {
Init();
}
if (videoPlayerPtr != IntPtr.Zero && IsVideoReady(videoPlayerPtr)) {
return PlayVideo(videoPlayerPtr) == 0;
} else {
Debug.LogError("Video player not ready to Play!");
return false;
}
}
public bool Pause() {
if (!initialized) {
Init();
}
if (VideoReady) {
return PauseVideo(videoPlayerPtr) == 0;
} else {
Debug.LogError("Video player not ready to Pause!");
return false;
}
}
/// <summary>
/// Adjusts the aspect ratio.
/// </summary>
/// <remarks>
/// This adjusts the transform scale to match the aspect
/// ratio of the texture.
/// </remarks>
private void AdjustAspectRatio() {
float aspectRatio = texWidth / texHeight;
// set the y scale based on the x value
Vector3 newscale = transform.localScale;
newscale.y = Mathf.Min(newscale.y, newscale.x / aspectRatio);
transform.localScale = newscale;
}
private void UpdateStatusText() {
float fps = CurrentPosition > 0 ?
(framecount / (CurrentPosition / 1000f)) : CurrentPosition;
string status = texWidth + " x " + texHeight + " buffer: " +
(BufferedPosition / 1000) + " " + PlayerState + " fps: " + fps;
if (statusText != null) {
if (statusText.text != status) {
statusText.text = status;
}
}
}
/// <summary>
/// Issues the player event.
/// </summary>
/// <param name="evt">The event to send to the video player
/// instance.
/// </param>
private void IssuePlayerEvent(RenderCommand evt) {
if (renderEventFunction == IntPtr.Zero) {
renderEventFunction = GetRenderEventFunc();
}
if (renderEventFunction == IntPtr.Zero || evt == RenderCommand.None) {
Debug.LogError("Attempt to IssuePlayerEvent before renderEventFunction ready.");
return;
}
GL.IssuePluginEvent(renderEventFunction, videoPlayerEventBase + (int)evt);
}
void Update() {
while (ExecuteOnMainThread.Count > 0) {
ExecuteOnMainThread.Dequeue().Invoke();
}
if (VideoReady) {
IssuePlayerEvent(RenderCommand.UpdateVideo);
GetVideoMatrix(videoPlayerPtr, videoMatrix);
long vidTimestamp = GetVideoTimestampNs(videoPlayerPtr);
if (vidTimestamp != lastVideoTimestamp) {
framecount++;
}
lastVideoTimestamp = vidTimestamp;
UpdateMaterial();
if (adjustAspectRatio) {
AdjustAspectRatio();
}
if ((int) framecount % 30 == 0) {
UpdateStatusText();
}
long bp = BufferedPosition;
if (bp != lastBufferedPosition) {
lastBufferedPosition = bp;
UpdateStatusText();
}
}
}
public void RemoveOnVideoEventCallback(Action<int> callback) {
if (onEventCallbacks != null) {
onEventCallbacks.Remove(callback);
}
}
public void SetOnVideoEventCallback(Action<int> callback) {
if (onEventCallbacks == null) {
onEventCallbacks = new List<Action<int>>();
}
onEventCallbacks.Add(callback);
SetOnVideoEventCallback(videoPlayerPtr, InternalOnVideoEventCallback,
ToIntPtr(this));
}
internal void FireVideoEvent(int eventId) {
if (onEventCallbacks == null) {
return;
}
// Copy the collection so the callbacks can remove themselves from the list.
Action<int>[] cblist = onEventCallbacks.ToArray();
foreach (Action<int> cb in cblist) {
try {
cb(eventId);
} catch (Exception e) {
Debug.LogError("exception calling callback: " + e);
}
}
}
[AOT.MonoPInvokeCallback(typeof(OnVideoEventCallback))]
static void InternalOnVideoEventCallback(IntPtr cbdata, int eventId) {
if (cbdata == IntPtr.Zero) {
return;
}
GvrVideoPlayerTexture player;
var gcHandle = GCHandle.FromIntPtr(cbdata);
try {
player = (GvrVideoPlayerTexture) gcHandle.Target;
}
catch (InvalidCastException e) {
Debug.LogError("GC Handle pointed to unexpected type: " +
gcHandle.Target + ". Expected " +
typeof(GvrVideoPlayerTexture));
throw e;
}
if (player != null) {
ExecuteOnMainThread.Enqueue(() => player.FireVideoEvent(eventId));
}
}
public void SetOnExceptionCallback(Action<string, string> callback) {
if (onExceptionCallbacks == null) {
onExceptionCallbacks = new List<Action<string, string>>();
SetOnExceptionCallback(videoPlayerPtr, InternalOnExceptionCallback,
ToIntPtr(this));
}
onExceptionCallbacks.Add(callback);
}
[AOT.MonoPInvokeCallback(typeof(OnExceptionCallback))]
static void InternalOnExceptionCallback(string type, string msg,
IntPtr cbdata) {
if (cbdata == IntPtr.Zero) {
return;
}
GvrVideoPlayerTexture player;
var gcHandle = GCHandle.FromIntPtr(cbdata);
try {
player = (GvrVideoPlayerTexture) gcHandle.Target;
}
catch (InvalidCastException e) {
Debug.LogError("GC Handle pointed to unexpected type: " +
gcHandle.Target + ". Expected " +
typeof(GvrVideoPlayerTexture));
throw e;
}
if (player != null) {
ExecuteOnMainThread.Enqueue(() => player.FireOnException(type, msg));
}
}
internal void FireOnException(string type, string msg) {
if (onExceptionCallbacks == null) {
return;
}
foreach (Action<string, string> cb in onExceptionCallbacks) {
try {
cb(type, msg);
} catch (Exception e) {
Debug.LogError("exception calling callback: " + e);
}
}
}
internal static IntPtr ToIntPtr(System.Object obj) {
GCHandle handle = GCHandle.Alloc(obj);
return GCHandle.ToIntPtr(handle);
}
internal string ProcessURL() {
return videoURL.Replace("${Application.dataPath}", Application.dataPath);
}
internal delegate void OnVideoEventCallback(IntPtr cbdata, int eventId);
internal delegate void OnExceptionCallback(string type, string msg,
IntPtr cbdata);
#if UNITY_ANDROID && !UNITY_EDITOR
private const string dllName = "gvrvideo";
[DllImport(dllName)]
private static extern IntPtr GetRenderEventFunc();
[DllImport(dllName)]
private static extern void SetExternalTextures(IntPtr videoPlayerPtr,
int[] texIds,
int size,
int w,
int h);
[DllImport(dllName)]
private static extern IntPtr GetRenderableTextureId(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern int GetExternalSurfaceTextureId(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern void GetVideoMatrix(IntPtr videoPlayerPtr,
float[] videoMatrix);
[DllImport(dllName)]
private static extern long GetVideoTimestampNs(IntPtr videoPlayerPtr);
// Keep public so we can check for the dll being present at runtime.
[DllImport(dllName)]
public static extern IntPtr CreateVideoPlayer();
// Keep public so we can check for the dll being present at runtime.
[DllImport(dllName)]
public static extern void DestroyVideoPlayer(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern int GetVideoPlayerEventBase(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern IntPtr InitVideoPlayer(IntPtr videoPlayerPtr,
int videoType,
string videoURL,
string contentID,
string providerId,
bool useSecurePath,
bool useExisting);
[DllImport(dllName)]
private static extern void SetInitialResolution(IntPtr videoPlayerPtr,
int initialResolution);
[DllImport(dllName)]
private static extern int GetPlayerState(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern int GetWidth(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern int GetHeight(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern int PlayVideo(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern int PauseVideo(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern bool IsVideoReady(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern bool IsVideoPaused(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern long GetDuration(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern long GetBufferedPosition(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern long GetCurrentPosition(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern void SetCurrentPosition(IntPtr videoPlayerPtr,
long pos);
[DllImport(dllName)]
private static extern int GetBufferedPercentage(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern int GetMaxVolume(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern int GetCurrentVolume(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern void SetCurrentVolume(IntPtr videoPlayerPtr,
int value);
[DllImport(dllName)]
private static extern bool SetVideoPlayerSupportClassname(
IntPtr videoPlayerPtr,
string classname);
[DllImport(dllName)]
private static extern IntPtr GetRawPlayer(IntPtr videoPlayerPtr);
[DllImport(dllName)]
private static extern void SetOnVideoEventCallback(IntPtr videoPlayerPtr,
OnVideoEventCallback callback,
IntPtr callback_arg);
[DllImport(dllName)]
private static extern void SetOnExceptionCallback(IntPtr videoPlayerPtr,
OnExceptionCallback callback,
IntPtr callback_arg);
#else
private const string NOT_IMPLEMENTED_MSG =
"Not implemented on this platform";
private static IntPtr GetRenderEventFunc() {
Debug.Log(NOT_IMPLEMENTED_MSG);
return IntPtr.Zero;
}
private static void SetExternalTextures(IntPtr videoPlayerPtr,
int[] texIds,
int size,
int w,
int h) {
Debug.Log(NOT_IMPLEMENTED_MSG);
}
private static IntPtr GetRenderableTextureId(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return IntPtr.Zero;
}
private static int GetExternalSurfaceTextureId(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return 0;
}
private static void GetVideoMatrix(IntPtr videoPlayerPtr,
float[] videoMatrix) {
Debug.Log(NOT_IMPLEMENTED_MSG);
}
private static long GetVideoTimestampNs(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return -1;
}
// Make this public so we can test the loading of the DLL.
public static IntPtr CreateVideoPlayer() {
Debug.Log(NOT_IMPLEMENTED_MSG);
return IntPtr.Zero;
}
// Make this public so we can test the loading of the DLL.
public static void DestroyVideoPlayer(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
}
private static int GetVideoPlayerEventBase(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return 0;
}
private static IntPtr InitVideoPlayer(IntPtr videoPlayerPtr, int videoType,
string videoURL,
string contentID,
string providerId,
bool useSecurePath,
bool useExisting) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return IntPtr.Zero;
}
private static void SetInitialResolution(IntPtr videoPlayerPtr,
int initialResolution) {
Debug.Log(NOT_IMPLEMENTED_MSG);
}
private static int GetPlayerState(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return -1;
}
private static int GetWidth(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return -1;
}
private static int GetHeight(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return -1;
}
private static int PlayVideo(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return 0;
}
private static int PauseVideo(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return 0;
}
private static bool IsVideoReady(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return false;
}
private static bool IsVideoPaused(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return true;
}
private static long GetDuration(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return -1;
}
private static long GetBufferedPosition(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return -1;
}
private static long GetCurrentPosition(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return -1;
}
private static void SetCurrentPosition(IntPtr videoPlayerPtr, long pos) {
Debug.Log(NOT_IMPLEMENTED_MSG);
}
private static int GetBufferedPercentage(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return 0;
}
private static int GetMaxVolume(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return 0;
}
private static int GetCurrentVolume(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return 0;
}
private static void SetCurrentVolume(IntPtr videoPlayerPtr, int value) {
Debug.Log(NOT_IMPLEMENTED_MSG);
}
private static bool SetVideoPlayerSupportClassname(IntPtr videoPlayerPtr,
string classname) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return false;
}
private static IntPtr GetRawPlayer(IntPtr videoPlayerPtr) {
Debug.Log(NOT_IMPLEMENTED_MSG);
return IntPtr.Zero;
}
private static void SetOnVideoEventCallback(IntPtr videoPlayerPtr,
OnVideoEventCallback callback,
IntPtr callback_arg) {
Debug.Log(NOT_IMPLEMENTED_MSG);
}
private static void SetOnExceptionCallback(IntPtr videoPlayerPtr,
OnExceptionCallback callback,
IntPtr callback_arg) {
Debug.Log(NOT_IMPLEMENTED_MSG);
}
#endif // UNITY_ANDROID && !UNITY_EDITOR
}
| |
//-----------------------------------------------------------------------
//
// Microsoft Windows Client Platform
// Copyright (C) Microsoft Corporation, 2002
//
// File: CompositeFontFamily.cs
//
// Contents: Composite font family
//
// Created: 5-25-2003 Worachai Chaoweeraprasit (wchao)
//
//------------------------------------------------------------------------
using System;
using System.Globalization;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Security;
using System.Windows;
using System.Windows.Markup; // for XmlLanguage
using System.Windows.Media;
using System.Windows.Media.TextFormatting;
using MS.Internal.FontCache;
using MS.Internal.FontFace;
using MS.Internal.TextFormatting;
using FontFamily = System.Windows.Media.FontFamily;
using SR=MS.Internal.PresentationCore.SR;
using SRID=MS.Internal.PresentationCore.SRID;
namespace MS.Internal.Shaping
{
/// <summary>
/// Composite font family
/// </summary>
internal sealed class CompositeFontFamily : IFontFamily
{
private readonly CompositeFontInfo _fontInfo;
private IFontFamily _firstFontFamily;
#region Constructors
/// <summary>
/// Construct a default composite font family
/// </summary>
internal CompositeFontFamily()
: this(new CompositeFontInfo())
{}
/// <summary>
/// Construct a composite font family from composite font info
/// </summary>
internal CompositeFontFamily(CompositeFontInfo fontInfo)
{
_fontInfo = fontInfo;
}
/// <summary>
/// Construct a composite font family with a single target family name
/// </summary>
internal CompositeFontFamily(
string friendlyName
) :
this(
friendlyName,
null // firstFontFamily
)
{}
/// <summary>
/// Construct a composite font family with a single target family name
/// after the first font family in the target family is known
/// </summary>
/// <SecurityNote>
/// Critical - as this accesses _firstFontFamily which is marked critical.
/// Safe - as this doesn't expose it.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
internal CompositeFontFamily(
string friendlyName,
IFontFamily firstFontFamily
) :
this()
{
FamilyMaps.Add(
new FontFamilyMap(
0, FontFamilyMap.LastUnicodeScalar,
null, // any language
friendlyName,
1 // scaleInEm
)
);
_firstFontFamily = firstFontFamily;
}
#endregion
#region IFontFamily properties
/// <summary>
/// Font family name table indexed by culture
/// </summary>
IDictionary<XmlLanguage, string> IFontFamily.Names
{
get
{
return _fontInfo.FamilyNames;
}
}
/// <summary>
/// Distance from character cell top to English baseline relative to em size.
/// </summary>
public double Baseline(double emSize, double toReal, double pixelsPerDip, TextFormattingMode textFormattingMode)
{
if (textFormattingMode == TextFormattingMode.Ideal)
{
return ((IFontFamily)this).BaselineDesign * emSize;
}
else
{
// If the composite font has a pre specified Baseline then we respect it in calculating the
// baseline but we round it since Compatible metrics are pixel aligned.
if (_fontInfo.Baseline != 0)
{
return Math.Round(_fontInfo.Baseline * emSize);
}
// If the composite font has no specifed Baseline then we get the compatible font metrics of the
// first fontfamily in the composite font.
else
{
return GetFirstFontFamily().Baseline(emSize, toReal, pixelsPerDip, textFormattingMode);
}
}
}
public void SetBaseline(double value)
{
_fontInfo.Baseline = value;
}
/// <summary>
/// Recommended baseline-to-baseline distance for text in this font.
/// </summary>
public double LineSpacing(double emSize, double toReal, double pixelsPerDip, TextFormattingMode textFormattingMode)
{
if (textFormattingMode == TextFormattingMode.Ideal)
{
return ((IFontFamily)this).LineSpacingDesign * emSize;
}
else
{
// If the composite font has a pre specified LineSpacing then we respect it in calculating the
// linespacing but we round it since Compatible metrics are pixel aligned.
if (_fontInfo.LineSpacing != 0)
{
return Math.Round(_fontInfo.LineSpacing * emSize);
}
// If the composite font has no specifed LineSpacing then we get the compatible font metrics of the
// first fontfamily in the composite font.
else
{
return GetFirstFontFamily().LineSpacing(emSize, toReal, pixelsPerDip, textFormattingMode);
}
}
}
double IFontFamily.BaselineDesign
{
get
{
if (_fontInfo.Baseline == 0)
{
_fontInfo.Baseline = GetFirstFontFamily().BaselineDesign;
}
return _fontInfo.Baseline;
}
}
double IFontFamily.LineSpacingDesign
{
get
{
if (_fontInfo.LineSpacing == 0)
{
_fontInfo.LineSpacing = GetFirstFontFamily().LineSpacingDesign;
}
return _fontInfo.LineSpacing;
}
}
public void SetLineSpacing(double value)
{
_fontInfo.LineSpacing = value;
}
/// <summary>
/// Get typeface metrics of the specified typeface
/// </summary>
ITypefaceMetrics IFontFamily.GetTypefaceMetrics(
FontStyle style,
FontWeight weight,
FontStretch stretch
)
{
if (_fontInfo.FamilyTypefaces == null &&
_fontInfo.FamilyMaps.Count == 1 &&
_fontInfo.FamilyMaps[0].IsSimpleFamilyMap)
{
// Typical e.g. "MyFont, sans-serif"
return GetFirstFontFamily().GetTypefaceMetrics(style, weight, stretch);
}
return FindTypefaceMetrics(style, weight, stretch);
}
/// <summary>
/// Look up device font for the typeface.
/// </summary>
IDeviceFont IFontFamily.GetDeviceFont(FontStyle style, FontWeight weight, FontStretch stretch)
{
FamilyTypeface bestFace = FindExactFamilyTypeface(style, weight, stretch);
if (bestFace != null && bestFace.DeviceFontName != null)
return bestFace;
else
return null;
}
/// <summary>
/// Get family name correspondent to the first n-characters of the specified character string
/// </summary>
bool IFontFamily.GetMapTargetFamilyNameAndScale(
CharacterBufferRange unicodeString,
CultureInfo culture,
CultureInfo digitCulture,
double defaultSizeInEm,
out int cchAdvance,
out string targetFamilyName,
out double scaleInEm
)
{
Invariant.Assert(unicodeString.CharacterBuffer != null && unicodeString.Length > 0);
Invariant.Assert(culture != null);
// Get the family map. This will find the first family map that matches
// the specified culture, an ancestor neutral culture, or "any" culture.
FontFamilyMap familyMap = GetTargetFamilyMap(
unicodeString,
culture,
digitCulture,
out cchAdvance
);
// Return the values for the matching FontFamilyMap. If there is none this is
// FontFamilyMap.Default which has Target == null and Scale == 1.0.
targetFamilyName = familyMap.Target;
scaleInEm = familyMap.Scale;
return true;
}
ICollection<Typeface> IFontFamily.GetTypefaces(FontFamilyIdentifier familyIdentifier)
{
return new TypefaceCollection(new FontFamily(familyIdentifier), FamilyTypefaces);
}
#endregion
#region collections exposed by FontFamily
internal LanguageSpecificStringDictionary FamilyNames
{
get { return _fontInfo.FamilyNames; }
}
internal FamilyTypefaceCollection FamilyTypefaces
{
get { return _fontInfo.GetFamilyTypefaceList(); }
}
internal FontFamilyMapCollection FamilyMaps
{
get { return _fontInfo.FamilyMaps; }
}
#endregion
private FontFamilyMap GetTargetFamilyMap(
CharacterBufferRange unicodeString,
CultureInfo culture,
CultureInfo digitCulture,
out int cchAdvance
)
{
DigitMap digitMap = new DigitMap(digitCulture);
ushort[] familyMaps = _fontInfo.GetFamilyMapsOfLanguage(XmlLanguage.GetLanguage(culture.IetfLanguageTag));
int sizeofChar = 0;
int ch = 0;
// skip all the leading joinder characters. They need to be shaped with the
// surrounding strong characters.
cchAdvance = Classification.AdvanceWhile(unicodeString, ItemClass.JoinerClass);
if (cchAdvance >= unicodeString.Length)
{
// It is rare that the run only contains joiner characters.
// If it really happens, just map them to the initial family map.
return _fontInfo.GetFamilyMapOfChar(
familyMaps,
Classification.UnicodeScalar(unicodeString, out sizeofChar)
);
}
//
// If the run starts with combining marks, we will not be able to find base characters for them
// within the run. These combining marks will be mapped to their best fonts as normal characters.
//
ch = Classification.UnicodeScalar(
new CharacterBufferRange(unicodeString, cchAdvance, unicodeString.Length - cchAdvance),
out sizeofChar
);
bool hasBaseChar = !Classification.IsCombining(ch);
ch = digitMap[ch];
FontFamilyMap familyMap = _fontInfo.GetFamilyMapOfChar(familyMaps, ch);
Invariant.Assert(familyMap != null);
for (cchAdvance += sizeofChar; cchAdvance < unicodeString.Length; cchAdvance += sizeofChar)
{
ch = Classification.UnicodeScalar(
new CharacterBufferRange(unicodeString, cchAdvance, unicodeString.Length - cchAdvance),
out sizeofChar
);
if (Classification.IsJoiner(ch))
continue; // continue to advance if current char is a joiner
if (!Classification.IsCombining(ch))
{
hasBaseChar = true;
}
else if (hasBaseChar)
{
continue; // continue to advance for combining mark with base char
}
ch = digitMap[ch];
if (_fontInfo.GetFamilyMapOfChar(familyMaps, ch) != familyMap)
break;
}
return familyMap;
}
/// <summary>
/// Get the first font family of the first target family name
/// </summary>
private IFontFamily GetFirstFontFamily()
{
if(_firstFontFamily == null)
{
if (_fontInfo.FamilyMaps.Count != 0)
{
_firstFontFamily = FontFamily.FindFontFamilyFromFriendlyNameList(_fontInfo.FamilyMaps[0].Target);
}
else
{
_firstFontFamily = FontFamily.LookupFontFamily(FontFamily.NullFontFamilyCanonicalName);
}
Invariant.Assert(_firstFontFamily != null);
}
return _firstFontFamily;
}
private ITypefaceMetrics FindTypefaceMetrics(
FontStyle style,
FontWeight weight,
FontStretch stretch
)
{
FamilyTypeface bestFace = FindNearestFamilyTypeface(style, weight, stretch);
if (bestFace == null)
return new CompositeTypefaceMetrics();
else
return bestFace;
}
/// <summary>
/// Find the face closest to the specified style, weight and stretch.
/// Returns null if there is no matching face.
/// </summary>
private FamilyTypeface FindNearestFamilyTypeface(
FontStyle style,
FontWeight weight,
FontStretch stretch
)
{
if (_fontInfo.FamilyTypefaces == null || _fontInfo.FamilyTypefaces.Count == 0)
{
return null;
}
FamilyTypeface bestFace = (FamilyTypeface)_fontInfo.FamilyTypefaces[0];
MatchingStyle bestMatch = new MatchingStyle(bestFace.Style, bestFace.Weight, bestFace.Stretch);
MatchingStyle target = new MatchingStyle(style, weight, stretch);
for (int i = 1; i < _fontInfo.FamilyTypefaces.Count; i++)
{
FamilyTypeface currentFace = (FamilyTypeface)_fontInfo.FamilyTypefaces[i];
MatchingStyle currentMatch = new MatchingStyle(currentFace.Style, currentFace.Weight, currentFace.Stretch);
if (MatchingStyle.IsBetterMatch(target, bestMatch, ref currentMatch))
{
bestFace = currentFace;
bestMatch = currentMatch;
}
}
return bestFace;
}
/// <summary>
/// Find the face exactly matching the specified style, weight and stretch.
/// Returns null if there is no matching face.
/// </summary>
private FamilyTypeface FindExactFamilyTypeface(
FontStyle style,
FontWeight weight,
FontStretch stretch
)
{
if (_fontInfo.FamilyTypefaces == null || _fontInfo.FamilyTypefaces.Count == 0)
{
return null;
}
MatchingStyle target = new MatchingStyle(style, weight, stretch);
foreach (FamilyTypeface currentFace in _fontInfo.FamilyTypefaces)
{
MatchingStyle currentMatch = new MatchingStyle(currentFace.Style, currentFace.Weight, currentFace.Stretch);
if (currentMatch == target)
{
return currentFace;
}
}
return null;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using Orleans.Runtime.Configuration;
namespace Orleans.Runtime
{
internal class SocketManager
{
private readonly LRU<IPEndPoint, Socket> cache;
private const int MAX_SOCKETS = 200;
internal SocketManager(IMessagingConfiguration config)
{
cache = new LRU<IPEndPoint, Socket>(MAX_SOCKETS, config.MaxSocketAge, SendingSocketCreator);
cache.RaiseFlushEvent += FlushHandler;
}
/// <summary>
/// Creates a socket bound to an address for use accepting connections.
/// This is for use by client gateways and other acceptors.
/// </summary>
/// <param name="address">The address to bind to.</param>
/// <returns>The new socket, appropriately bound.</returns>
internal static Socket GetAcceptingSocketForEndpoint(IPEndPoint address)
{
var s = new Socket(address.AddressFamily, SocketType.Stream, ProtocolType.Tcp);
try
{
// Prep the socket so it will reset on close
s.LingerState = new LingerOption(true, 0);
s.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true);
// And bind it to the address
s.Bind(address);
}
catch (Exception)
{
CloseSocket(s);
throw;
}
return s;
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal bool CheckSendingSocket(IPEndPoint target)
{
return cache.ContainsKey(target);
}
internal Socket GetSendingSocket(IPEndPoint target)
{
return cache.Get(target);
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")]
private Socket SendingSocketCreator(IPEndPoint target)
{
var s = new Socket(target.AddressFamily, SocketType.Stream, ProtocolType.Tcp);
try
{
s.Connect(target);
// Prep the socket so it will reset on close and won't Nagle
s.LingerState = new LingerOption(true, 0);
s.NoDelay = true;
WriteConnectionPreamble(s, Constants.SiloDirectConnectionId); // Identifies this client as a direct silo-to-silo socket
// Start an asynch receive off of the socket to detect closure
var receiveAsyncEventArgs = new SocketAsyncEventArgs
{
BufferList = new List<ArraySegment<byte>> { new ArraySegment<byte>(new byte[4]) },
UserToken = new Tuple<Socket, IPEndPoint, SocketManager>(s, target, this)
};
receiveAsyncEventArgs.Completed += ReceiveCallback;
bool receiveCompleted = s.ReceiveAsync(receiveAsyncEventArgs);
NetworkingStatisticsGroup.OnOpenedSendingSocket();
if (!receiveCompleted)
{
ReceiveCallback(this, receiveAsyncEventArgs);
}
}
catch (Exception)
{
try
{
s.Dispose();
}
catch (Exception)
{
// ignore
}
throw;
}
return s;
}
internal static void WriteConnectionPreamble(Socket socket, GrainId grainId)
{
int size = 0;
byte[] grainIdByteArray = null;
if (grainId != null)
{
grainIdByteArray = grainId.ToByteArray();
size += grainIdByteArray.Length;
}
ByteArrayBuilder sizeArray = new ByteArrayBuilder();
sizeArray.Append(size);
socket.Send(sizeArray.ToBytes()); // The size of the data that is coming next.
//socket.Send(guid.ToByteArray()); // The guid of client/silo id
if (grainId != null)
{
// No need to send in a loop.
// From MSDN: If you are using a connection-oriented protocol, Send will block until all of the bytes in the buffer are sent,
// unless a time-out was set by using Socket.SendTimeout.
// If the time-out value was exceeded, the Send call will throw a SocketException.
socket.Send(grainIdByteArray); // The grainId of the client
}
}
// We start an asynch receive, with this callback, off of every send socket.
// Since we should never see data coming in on these sockets, having the receive complete means that
// the socket is in an unknown state and we should close it and try again.
private static void ReceiveCallback(object sender, SocketAsyncEventArgs socketAsyncEventArgs)
{
var t = socketAsyncEventArgs.UserToken as Tuple<Socket, IPEndPoint, SocketManager>;
try
{
t?.Item3.InvalidateEntry(t.Item2);
}
catch (Exception ex)
{
LogManager.GetLogger("SocketManager", LoggerType.Runtime).Error(ErrorCode.Messaging_Socket_ReceiveError, $"ReceiveCallback: {t?.Item2}", ex);
}
finally
{
socketAsyncEventArgs.Dispose();
}
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1822:MarkMembersAsStatic"), System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA1801:ReviewUnusedParameters", MessageId = "s")]
internal void ReturnSendingSocket(Socket s)
{
// Do nothing -- the socket will get cleaned up when it gets flushed from the cache
}
private static void FlushHandler(Object sender, LRU<IPEndPoint, Socket>.FlushEventArgs args)
{
if (args.Value == null) return;
CloseSocket(args.Value);
NetworkingStatisticsGroup.OnClosedSendingSocket();
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")]
internal void InvalidateEntry(IPEndPoint target)
{
Socket socket;
if (!cache.RemoveKey(target, out socket)) return;
CloseSocket(socket);
NetworkingStatisticsGroup.OnClosedSendingSocket();
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")]
// Note that this method assumes that there are no other threads accessing this object while this method runs.
// Since this is true for the MessageCenter's use of this object, we don't lock around all calls to avoid the overhead.
internal void Stop()
{
// Clear() on an LRU<> calls the flush handler on every item, so no need to manually close the sockets.
cache.Clear();
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")]
internal static void CloseSocket(Socket s)
{
if (s == null)
{
return;
}
try
{
s.Shutdown(SocketShutdown.Both);
}
catch (ObjectDisposedException)
{
// Socket is already closed -- we're done here
return;
}
catch (Exception)
{
// Ignore
}
#if !NETSTANDARD
try
{
s.Disconnect(false);
}
catch (Exception)
{
// Ignore
}
#endif
try
{
s.Dispose();
}
catch (Exception)
{
// Ignore
}
}
}
}
| |
// SPDX-License-Identifier: MIT
// Copyright [email protected]
// Copyright iced contributors
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Generator.Constants;
using Generator.Enums;
using Generator.IO;
using Generator.Tables;
namespace Generator.Formatters.Rust {
[Generator(TargetLanguage.Rust)]
sealed class RustTableGen : TableGen {
readonly GeneratorContext generatorContext;
readonly IdentifierConverter idConverter;
public RustTableGen(GeneratorContext generatorContext)
: base(generatorContext.Types) {
this.generatorContext = generatorContext;
idConverter = RustIdentifierConverter.Create();
}
protected override void Generate(MemorySizeDef[] defs) {
var fmtConsts1 = new Dictionary<string, string>(StringComparer.Ordinal);
var fmtConsts2 = new Dictionary<string, string[]>(StringComparer.Ordinal);
GenerateFast(defs);
GenerateGas(defs, fmtConsts1);
GenerateIntel(defs, fmtConsts1, fmtConsts2);
GenerateMasm(defs, fmtConsts1, fmtConsts2);
GenerateNasm(defs, fmtConsts1, fmtConsts2);
GenerateFmtStrings(fmtConsts1, fmtConsts2);
}
void GenerateFmtStrings(Dictionary<string, string> fmtConsts1, Dictionary<string, string[]> fmtConsts2) {
var consts1 = fmtConsts1.OrderBy(a => a.Key, StringComparer.Ordinal).ToArray();
var consts2 = fmtConsts2.OrderBy(a => a.Key, StringComparer.Ordinal).ToArray();
var filename = generatorContext.Types.Dirs.GetRustFilename("formatter", "fmt_consts.rs");
new FileUpdater(TargetLanguage.Rust, "FormatterConstantsDef", filename).Generate(writer => {
foreach (var kv in consts1)
writer.WriteLine($"pub(super) {kv.Key}: FormatterString,");
});
new FileUpdater(TargetLanguage.Rust, "FormatterConstantsInit", filename).Generate(writer => {
foreach (var kv in consts1)
writer.WriteLine($"{kv.Key}: FormatterString::new_str(\"{kv.Value}\"),");
});
new FileUpdater(TargetLanguage.Rust, "FormatterArrayConstantsDef", filename).Generate(writer => {
foreach (var kv in consts2)
writer.WriteLine($"pub(super) {kv.Key}: [&'static FormatterString; 2],");
});
new FileUpdater(TargetLanguage.Rust, "FormatterArrayConstantsCreate", filename).Generate(writer => {
foreach (var kv in consts2) {
var strings = kv.Value;
writer.WriteLine($"let {kv.Key}: [&'static FormatterString; 2] = [{string.Join(", ", strings.Select(a => $"&c.{a}"))}];");
}
});
new FileUpdater(TargetLanguage.Rust, "FormatterArrayConstantsInit", filename).Generate(writer => {
foreach (var kv in consts2)
writer.WriteLine($"{kv.Key},");
});
}
static void Add(Dictionary<string, string> fmtConsts1, BroadcastToKind bcst) {
var s = bcst.ToString();
if (!s.StartsWith("b", StringComparison.Ordinal))
throw new InvalidOperationException();
var value = s[1..];
fmtConsts1[s] = value;
}
static void AddKeywords(Dictionary<string, string> fmtConsts1, Dictionary<string, string[]> fmtConsts2, string name) {
var parts = name.Split('_');
if (parts.Length > 2)
throw new InvalidOperationException();
foreach (var kw in parts)
fmtConsts1[kw] = kw;
if (parts.Length == 2)
fmtConsts2[name] = parts;
}
void GenerateFast(MemorySizeDef[] defs) {
var filename = generatorContext.Types.Dirs.GetRustFilename("formatter", "fast", "mem_size_tbl.rs");
new FileUpdater(TargetLanguage.Rust, "MemorySizes", filename).Generate(writer => {
writer.WriteLine(RustConstants.AttributeNoRustFmt);
writer.WriteLine($"static MEM_SIZE_TBL_DATA: [u8; {defs.Length}] = [");
using (writer.Indent()) {
foreach (var def in defs) {
writer.WriteByte(checked((byte)def.Fast.Value));
writer.WriteLine();
}
}
writer.WriteLine("];");
});
new FileUpdater(TargetLanguage.Rust, "Match", filename).Generate(writer => {
foreach (var kw in defs.Select(a => a.Fast).Distinct().OrderBy(a => a.Value)) {
var s = (FastMemoryKeywords)kw.Value == FastMemoryKeywords.None ? string.Empty : (kw.RawName + "_").Replace('_', ' ');
writer.WriteLine($"{kw.Value} => \"{s}\",");
}
});
}
void GenerateGas(MemorySizeDef[] defs, Dictionary<string, string> fmtConsts1) {
var icedConstants = genTypes.GetConstantsType(TypeIds.IcedConstants);
var broadcastToKindValues = genTypes[TypeIds.BroadcastToKind].Values;
var filename = generatorContext.Types.Dirs.GetRustFilename("formatter", "gas", "mem_size_tbl.rs");
new FileUpdater(TargetLanguage.Rust, "BcstTo", filename).Generate(writer => {
int first = (int)icedConstants[IcedConstants.FirstBroadcastMemorySizeName].ValueUInt64;
int len = defs.Length - first;
writer.WriteLine(RustConstants.AttributeNoRustFmt);
writer.WriteLine($"static BCST_TO_DATA: [u8; {len}] = [");
using (writer.Indent()) {
for (int i = first; i < defs.Length; i++) {
writer.WriteByte(checked((byte)defs[i].BroadcastToKind.Value));
writer.WriteLine();
}
}
writer.WriteLine("];");
});
new FileUpdater(TargetLanguage.Rust, "BroadcastToKindMatch", filename).Generate(writer => {
foreach (var kw in broadcastToKindValues) {
writer.Write($"0x{kw.Value:X2} => ");
var bcst = (BroadcastToKind)kw.Value;
if (bcst == BroadcastToKind.None)
writer.WriteLine("&c.empty,");
else {
Add(fmtConsts1, bcst);
writer.WriteLine($"&c.{kw.RawName},");
}
}
});
}
void GenerateIntel(MemorySizeDef[] defs, Dictionary<string, string> fmtConsts1, Dictionary<string, string[]> fmtConsts2) {
var broadcastToKindValues = genTypes[TypeIds.BroadcastToKind].Values;
var filename = generatorContext.Types.Dirs.GetRustFilename("formatter", "intel", "mem_size_tbl.rs");
var intelKeywords = genTypes[TypeIds.IntelMemoryKeywords].Values;
const int BroadcastToKindShift = 5;
const int MemoryKeywordsMask = 0x1F;
new FileUpdater(TargetLanguage.Rust, "ConstData", filename).Generate(writer => {
writer.WriteLine($"const {idConverter.Constant(nameof(BroadcastToKindShift))}: u32 = {BroadcastToKindShift};");
writer.WriteLine($"const {idConverter.Constant(nameof(MemoryKeywordsMask))}: u8 = {MemoryKeywordsMask};");
});
new FileUpdater(TargetLanguage.Rust, "MemorySizes", filename).Generate(writer => {
writer.WriteLine(RustConstants.AttributeNoRustFmt);
writer.WriteLine($"static MEM_SIZE_TBL_DATA: [u8; {defs.Length}] = [");
using (writer.Indent()) {
foreach (var def in defs) {
uint value = def.Intel.Value | (def.BroadcastToKind.Value << BroadcastToKindShift);
if (value > 0xFF || def.Intel.Value > MemoryKeywordsMask)
throw new InvalidOperationException();
writer.WriteByte(checked((byte)value));
writer.WriteLine();
}
}
writer.WriteLine("];");
});
new FileUpdater(TargetLanguage.Rust, "MemoryKeywordsMatch", filename).Generate(writer => {
foreach (var kw in intelKeywords) {
writer.Write($"0x{kw.Value:X2} => ");
if ((IntelMemoryKeywords)kw.Value == IntelMemoryKeywords.None)
writer.WriteLine("&ac.nothing,");
else {
AddKeywords(fmtConsts1, fmtConsts2, kw.RawName);
writer.WriteLine($"&ac.{kw.RawName},");
}
}
});
new FileUpdater(TargetLanguage.Rust, "BroadcastToKindMatch", filename).Generate(writer => {
foreach (var kw in broadcastToKindValues) {
writer.Write($"0x{kw.Value:X2} => ");
var bcst = (BroadcastToKind)kw.Value;
if (bcst == BroadcastToKind.None)
writer.WriteLine("&c.empty,");
else {
Add(fmtConsts1, bcst);
writer.WriteLine($"&c.{kw.RawName},");
}
}
});
}
void GenerateMasm(MemorySizeDef[] defs, Dictionary<string, string> fmtConsts1, Dictionary<string, string[]> fmtConsts2) {
var filename = generatorContext.Types.Dirs.GetRustFilename("formatter", "masm", "mem_size_tbl.rs");
var masmKeywords = genTypes[TypeIds.MasmMemoryKeywords].Values;
var sizeToIndex = new Dictionary<uint, uint>();
uint index = 0;
foreach (var size in defs.Select(a => a.Size).Distinct().OrderBy(a => a))
sizeToIndex[size] = index++;
const int SizeKindShift = 5;
const int MemoryKeywordsMask = 0x1F;
new FileUpdater(TargetLanguage.CSharp, "ConstData", filename).Generate(writer => {
writer.WriteLine($"const {idConverter.Constant(nameof(SizeKindShift))}: u32 = {SizeKindShift};");
writer.WriteLine($"const {idConverter.Constant(nameof(MemoryKeywordsMask))}: u16 = {MemoryKeywordsMask};");
writer.WriteLine(RustConstants.AttributeNoRustFmt);
writer.WriteLine($"static SIZES: [u16; {sizeToIndex.Count}] = [");
using (writer.Indent()) {
foreach (var size in sizeToIndex.Select(a => a.Key).OrderBy(a => a))
writer.WriteLine($"{size},");
}
writer.WriteLine("];");
});
new FileUpdater(TargetLanguage.Rust, "MemorySizes", filename).Generate(writer => {
writer.WriteLine(RustConstants.AttributeNoRustFmt);
writer.WriteLine($"static MEM_SIZE_TBL_DATA: [u16; {defs.Length}] = [");
using (writer.Indent()) {
foreach (var def in defs) {
uint value = def.Masm.Value | (sizeToIndex[def.Size] << SizeKindShift);
if (value > 0xFFFF || def.Masm.Value > MemoryKeywordsMask)
throw new InvalidOperationException();
writer.WriteLine($"0x{value:X4},");
}
}
writer.WriteLine("];");
});
new FileUpdater(TargetLanguage.Rust, "MemoryKeywordsMatch", filename).Generate(writer => {
foreach (var kw in masmKeywords) {
writer.Write($"0x{kw.Value:X2} => ");
if ((MasmMemoryKeywords)kw.Value == MasmMemoryKeywords.None)
writer.WriteLine("&ac.nothing,");
else {
AddKeywords(fmtConsts1, fmtConsts2, kw.RawName);
writer.WriteLine($"&ac.{kw.RawName},");
}
}
});
AddKeywords(fmtConsts1, fmtConsts2, "mmword_ptr");
}
void GenerateNasm(MemorySizeDef[] defs, Dictionary<string, string> fmtConsts1, Dictionary<string, string[]> fmtConsts2) {
var icedConstants = genTypes.GetConstantsType(TypeIds.IcedConstants);
var broadcastToKindValues = genTypes[TypeIds.BroadcastToKind].Values;
var filename = generatorContext.Types.Dirs.GetRustFilename("formatter", "nasm", "mem_size_tbl.rs");
var nasmKeywords = genTypes[TypeIds.NasmMemoryKeywords].Values;
new FileUpdater(TargetLanguage.Rust, "BcstTo", filename).Generate(writer => {
int first = (int)icedConstants[IcedConstants.FirstBroadcastMemorySizeName].ValueUInt64;
int len = defs.Length - first;
writer.WriteLine(RustConstants.AttributeNoRustFmt);
writer.WriteLine($"static BCST_TO_DATA: [u8; {len}] = [");
using (writer.Indent()) {
for (int i = first; i < defs.Length; i++) {
writer.WriteByte(checked((byte)defs[i].BroadcastToKind.Value));
writer.WriteLine();
}
}
writer.WriteLine("];");
});
new FileUpdater(TargetLanguage.Rust, "MemorySizes", filename).Generate(writer => {
writer.WriteLine(RustConstants.AttributeNoRustFmt);
writer.WriteLine($"static MEM_SIZE_TBL_DATA: [u8; {defs.Length}] = [");
using (writer.Indent()) {
foreach (var def in defs) {
writer.WriteByte(checked((byte)def.Nasm.Value));
writer.WriteLine();
}
}
writer.WriteLine("];");
});
new FileUpdater(TargetLanguage.Rust, "MemoryKeywordsMatch", filename).Generate(writer => {
foreach (var kw in nasmKeywords) {
writer.Write($"0x{kw.Value:X2} => ");
if ((NasmMemoryKeywords)kw.Value == NasmMemoryKeywords.None)
writer.WriteLine("&c.empty,");
else {
AddKeywords(fmtConsts1, fmtConsts2, kw.RawName);
writer.WriteLine($"&c.{kw.RawName},");
}
}
});
new FileUpdater(TargetLanguage.Rust, "BroadcastToKindMatch", filename).Generate(writer => {
foreach (var kw in broadcastToKindValues) {
writer.Write($"0x{kw.Value:X2} => ");
var bcst = (BroadcastToKind)kw.Value;
if (bcst == BroadcastToKind.None)
writer.WriteLine("&c.empty,");
else {
Add(fmtConsts1, bcst);
writer.WriteLine($"&c.{kw.RawName},");
}
}
});
}
protected override void GenerateRegisters(string[] registers) {
var filename = generatorContext.Types.Dirs.GetRustFilename("formatter", "regs_tbl.rs");
new FileUpdater(TargetLanguage.Rust, "Registers", filename).Generate(writer => {
var totalLen = registers.Length + registers.Sum(a => a.Length);
writer.WriteLine(RustConstants.AttributeNoRustFmt);
writer.WriteLine($"static REGS_DATA: [u8; {totalLen}] = [");
int maxLen = 0;
using (writer.Indent()) {
foreach (var register in registers) {
maxLen = Math.Max(maxLen, register.Length);
var bytes = Encoding.UTF8.GetBytes(register);
writer.Write($"0x{bytes.Length:X2}");
foreach (var b in bytes)
writer.Write($", 0x{b:X2}");
writer.Write(",");
writer.WriteCommentLine(register);
}
}
writer.WriteLine("];");
writer.WriteLine($"pub(super) const MAX_STRING_LENGTH: usize = {maxLen};");
writer.WriteLine($"const STRINGS_COUNT: usize = {registers.Length};");
});
}
protected override void GenerateFormatterFlowControl((EnumValue flowCtrl, EnumValue[] code)[] infos) {
var filename = generatorContext.Types.Dirs.GetRustFilename("formatter", "fmt_utils.rs");
new FileUpdater(TargetLanguage.Rust, "FormatterFlowControlSwitch", filename).Generate(writer => {
var codeStr = genTypes[TypeIds.Code].Name(idConverter);
var flowCtrlStr = genTypes[TypeIds.FormatterFlowControl].Name(idConverter);
foreach (var info in infos) {
if (info.code.Length == 0)
continue;
var bar = string.Empty;
foreach (var c in info.code) {
writer.WriteLine($"{bar}{codeStr}::{c.Name(idConverter)}");
bar = "| ";
}
writer.WriteLine($"=> {flowCtrlStr}::{info.flowCtrl.Name(idConverter)},");
}
});
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
namespace NPOI.HSSF.Util
{
using System;
using System.Collections;
using NPOI.HSSF.UserModel;
using NPOI.SS.UserModel;
using System.Collections.Generic;
using NPOI.HSSF.Record;
using NPOI.SS.Util;
/// <summary>
/// Various utility functions that make working with a cells and rows easier. The various
/// methods that deal with style's allow you to Create your HSSFCellStyles as you need them.
/// When you apply a style change to a cell, the code will attempt to see if a style already
/// exists that meets your needs. If not, then it will Create a new style. This is to prevent
/// creating too many styles. there is an upper limit in Excel on the number of styles that
/// can be supported.
/// @author Eric Pugh [email protected]
/// </summary>
[Obsolete("deprecated 3.15 beta2. Removed in 3.17. Use {@link org.apache.poi.ss.util.CellUtil} instead.")]
public class HSSFCellUtil
{
private HSSFCellUtil()
{
// no instances of this class
}
/// <summary>
/// Get a row from the spreadsheet, and Create it if it doesn't exist.
/// </summary>
/// <param name="rowCounter">The 0 based row number</param>
/// <param name="sheet">The sheet that the row is part of.</param>
/// <returns>The row indicated by the rowCounter</returns>
[Obsolete("@deprecated 3.15 beta2. Removed in 3.17. Use {@link org.apache.poi.ss.util.CellUtil#getRow} instead.")]
public static IRow GetRow(int rowIndex, HSSFSheet sheet)
{
return (HSSFRow)CellUtil.GetRow(rowIndex, sheet);
}
/// <summary>
/// Get a specific cell from a row. If the cell doesn't exist,
/// </summary>
/// <param name="row">The row that the cell is part of</param>
/// <param name="column">The column index that the cell is in.</param>
/// <returns>The cell indicated by the column.</returns>
[Obsolete("@deprecated 3.15 beta2. Removed in 3.17. Use {@link org.apache.poi.ss.util.CellUtil#getCell} instead.")]
public static ICell GetCell(IRow row, int columnIndex)
{
return (HSSFCell)CellUtil.GetCell(row, columnIndex);
}
/// <summary>
/// Creates a cell, gives it a value, and applies a style if provided
/// </summary>
/// <param name="row">the row to Create the cell in</param>
/// <param name="column">the column index to Create the cell in</param>
/// <param name="value">The value of the cell</param>
/// <param name="style">If the style is not null, then Set</param>
/// <returns>A new HSSFCell</returns>
[Obsolete("@deprecated 3.15 beta2. Removed in 3.17. Use {@link org.apache.poi.ss.util.CellUtil#createCell} instead.")]
public static ICell CreateCell(IRow row, int column, String value, HSSFCellStyle style)
{
return (HSSFCell)CellUtil.CreateCell(row, column, value, style);
}
/// <summary>
/// Create a cell, and give it a value.
/// </summary>
/// <param name="row">the row to Create the cell in</param>
/// <param name="column">the column index to Create the cell in</param>
/// <param name="value">The value of the cell</param>
/// <returns>A new HSSFCell.</returns>
[Obsolete("@deprecated 3.15 beta2. Removed in 3.17. Use {@link org.apache.poi.ss.util.CellUtil#createCell} instead.")]
public static ICell CreateCell(IRow row, int column, String value)
{
return CreateCell(row, column, value, null);
}
/// <summary>
/// Take a cell, and align it.
/// </summary>
/// <param name="cell">the cell to Set the alignment for</param>
/// <param name="workbook">The workbook that is being worked with.</param>
/// <param name="align">the column alignment to use.</param>
[Obsolete("@deprecated 3.15 beta2. Removed in 3.17. Use {@link org.apache.poi.ss.util.CellUtil#setAlignment} instead.")]
public static void SetAlignment(ICell cell, HSSFWorkbook workbook, short align)
{
CellUtil.SetAlignment(cell, (HorizontalAlignment)align);
}
/// <summary>
/// Take a cell, and apply a font to it
/// </summary>
/// <param name="cell">the cell to Set the alignment for</param>
/// <param name="workbook">The workbook that is being worked with.</param>
/// <param name="font">The HSSFFont that you want to Set...</param>
[Obsolete("@deprecated 3.15 beta2. Removed in 3.17. Use {@link org.apache.poi.ss.util.CellUtil#setFont} instead.")]
public static void SetFont(ICell cell, HSSFWorkbook workbook, HSSFFont font)
{
CellUtil.SetFont(cell, font);
}
/**
* This method attempt to find an already existing HSSFCellStyle that matches
* what you want the style to be. If it does not find the style, then it
* Creates a new one. If it does Create a new one, then it applies the
* propertyName and propertyValue to the style. This is necessary because
* Excel has an upper limit on the number of Styles that it supports.
*
*@param workbook The workbook that is being worked with.
*@param propertyName The name of the property that is to be
* changed.
*@param propertyValue The value of the property that is to be
* changed.
*@param cell The cell that needs it's style changes
*@exception NestableException Thrown if an error happens.
*/
[Obsolete("@deprecated 3.15 beta2. Removed in 3.17. Use {@link org.apache.poi.ss.util.CellUtil#setCellStyleProperty} instead.")]
public static void SetCellStyleProperty(ICell cell, HSSFWorkbook workbook, String propertyName, Object propertyValue)
{
CellUtil.SetCellStyleProperty(cell, propertyName, propertyValue);
}
/// <summary>
/// Looks for text in the cell that should be unicode, like alpha; and provides the
/// unicode version of it.
/// </summary>
/// <param name="cell">The cell to check for unicode values</param>
/// <returns>transalted to unicode</returns>
[Obsolete("@deprecated 3.15 beta2. Removed in 3.17. Use {@link org.apache.poi.ss.util.CellUtil#translateUnicodeValues} instead.")]
public static ICell TranslateUnicodeValues(ICell cell)
{
CellUtil.TranslateUnicodeValues(cell);
return cell;
}
/// <summary>
/// Translate color palette entries from the source to the destination sheet
/// </summary>
private static void RemapCellStyle(HSSFCellStyle stylish, Dictionary<short, short> paletteMap)
{
if (paletteMap.ContainsKey(stylish.BorderDiagonalColor))
{
stylish.BorderDiagonalColor = paletteMap[stylish.BorderDiagonalColor];
}
if (paletteMap.ContainsKey(stylish.BottomBorderColor))
{
stylish.BottomBorderColor = paletteMap[stylish.BottomBorderColor];
}
if (paletteMap.ContainsKey(stylish.FillBackgroundColor))
{
stylish.FillBackgroundColor = paletteMap[stylish.FillBackgroundColor];
}
if (paletteMap.ContainsKey(stylish.FillForegroundColor))
{
stylish.FillForegroundColor = paletteMap[stylish.FillForegroundColor];
}
if (paletteMap.ContainsKey(stylish.LeftBorderColor))
{
stylish.LeftBorderColor = paletteMap[stylish.LeftBorderColor];
}
if (paletteMap.ContainsKey(stylish.RightBorderColor))
{
stylish.RightBorderColor = paletteMap[stylish.RightBorderColor];
}
if (paletteMap.ContainsKey(stylish.TopBorderColor))
{
stylish.TopBorderColor = paletteMap[stylish.TopBorderColor];
}
}
public static void CopyCell(HSSFCell oldCell, HSSFCell newCell, IDictionary<Int32, HSSFCellStyle> styleMap, Dictionary<short, short> paletteMap, Boolean keepFormulas)
{
if (styleMap != null)
{
if (oldCell.CellStyle != null)
{
if (oldCell.Sheet.Workbook == newCell.Sheet.Workbook)
{
newCell.CellStyle = oldCell.CellStyle;
}
else
{
int styleHashCode = oldCell.CellStyle.GetHashCode();
if (styleMap.ContainsKey(styleHashCode))
{
newCell.CellStyle = styleMap[styleHashCode];
}
else
{
HSSFCellStyle newCellStyle = (HSSFCellStyle)newCell.Sheet.Workbook.CreateCellStyle();
newCellStyle.CloneStyleFrom(oldCell.CellStyle);
RemapCellStyle(newCellStyle, paletteMap); //Clone copies as-is, we need to remap colors manually
newCell.CellStyle = newCellStyle;
//Clone of cell style always clones the font. This makes my life easier
IFont theFont = newCellStyle.GetFont(newCell.Sheet.Workbook);
if (theFont.Color > 0 && paletteMap.ContainsKey(theFont.Color))
{
theFont.Color = paletteMap[theFont.Color]; //Remap font color
}
styleMap.Add(styleHashCode, newCellStyle);
}
}
}
else
{
newCell.CellStyle = null;
}
}
switch (oldCell.CellType)
{
case CellType.String:
HSSFRichTextString rts = oldCell.RichStringCellValue as HSSFRichTextString;
newCell.SetCellValue(rts);
if (rts != null)
{
for (int j = 0; j < rts.NumFormattingRuns; j++)
{
short fontIndex = rts.GetFontOfFormattingRun(j);
int startIndex = rts.GetIndexOfFormattingRun(j);
int endIndex = 0;
if (j + 1 == rts.NumFormattingRuns)
{
endIndex = rts.Length;
}
else
{
endIndex = rts.GetIndexOfFormattingRun(j + 1);
}
FontRecord fr = newCell.BoundWorkbook.CreateNewFont();
fr.CloneStyleFrom(oldCell.BoundWorkbook.GetFontRecordAt(fontIndex));
HSSFFont font = new HSSFFont((short)(newCell.BoundWorkbook.GetFontIndex(fr)), fr);
newCell.RichStringCellValue.ApplyFont(startIndex, endIndex, font);
}
}
break;
case CellType.Numeric:
newCell.SetCellValue(oldCell.NumericCellValue);
break;
case CellType.Blank:
newCell.SetCellType(CellType.Blank);
break;
case CellType.Boolean:
newCell.SetCellValue(oldCell.BooleanCellValue);
break;
case CellType.Error:
newCell.SetCellValue(oldCell.ErrorCellValue);
break;
case CellType.Formula:
if (keepFormulas)
{
newCell.SetCellType(CellType.Formula);
newCell.CellFormula = oldCell.CellFormula;
}
else
{
try
{
newCell.SetCellType(CellType.Numeric);
newCell.SetCellValue(oldCell.NumericCellValue);
}
catch (Exception)
{
newCell.SetCellType(CellType.String);
newCell.SetCellValue(oldCell.ToString());
}
}
break;
default:
break;
}
}
}
}
| |
using System;
using System.Data;
using Csla;
using Csla.Data;
using SelfLoadSoftDelete.DataAccess;
using SelfLoadSoftDelete.DataAccess.ERCLevel;
namespace SelfLoadSoftDelete.Business.ERCLevel
{
/// <summary>
/// H02_Continent (editable child object).<br/>
/// This is a generated base class of <see cref="H02_Continent"/> business object.
/// </summary>
/// <remarks>
/// This class contains one child collection:<br/>
/// - <see cref="H03_SubContinentObjects"/> of type <see cref="H03_SubContinentColl"/> (1:M relation to <see cref="H04_SubContinent"/>)<br/>
/// This class is an item of <see cref="H01_ContinentColl"/> collection.
/// </remarks>
[Serializable]
public partial class H02_Continent : BusinessBase<H02_Continent>
{
#region Static Fields
private static int _lastID;
#endregion
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="Continent_ID"/> property.
/// </summary>
public static readonly PropertyInfo<int> Continent_IDProperty = RegisterProperty<int>(p => p.Continent_ID, "Continents ID");
/// <summary>
/// Gets the Continents ID.
/// </summary>
/// <value>The Continents ID.</value>
public int Continent_ID
{
get { return GetProperty(Continent_IDProperty); }
}
/// <summary>
/// Maintains metadata about <see cref="Continent_Name"/> property.
/// </summary>
public static readonly PropertyInfo<string> Continent_NameProperty = RegisterProperty<string>(p => p.Continent_Name, "Continents Name");
/// <summary>
/// Gets or sets the Continents Name.
/// </summary>
/// <value>The Continents Name.</value>
public string Continent_Name
{
get { return GetProperty(Continent_NameProperty); }
set { SetProperty(Continent_NameProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="H03_Continent_SingleObject"/> property.
/// </summary>
public static readonly PropertyInfo<H03_Continent_Child> H03_Continent_SingleObjectProperty = RegisterProperty<H03_Continent_Child>(p => p.H03_Continent_SingleObject, "H03 Continent Single Object", RelationshipTypes.Child);
/// <summary>
/// Gets the H03 Continent Single Object ("self load" child property).
/// </summary>
/// <value>The H03 Continent Single Object.</value>
public H03_Continent_Child H03_Continent_SingleObject
{
get { return GetProperty(H03_Continent_SingleObjectProperty); }
private set { LoadProperty(H03_Continent_SingleObjectProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="H03_Continent_ASingleObject"/> property.
/// </summary>
public static readonly PropertyInfo<H03_Continent_ReChild> H03_Continent_ASingleObjectProperty = RegisterProperty<H03_Continent_ReChild>(p => p.H03_Continent_ASingleObject, "H03 Continent ASingle Object", RelationshipTypes.Child);
/// <summary>
/// Gets the H03 Continent ASingle Object ("self load" child property).
/// </summary>
/// <value>The H03 Continent ASingle Object.</value>
public H03_Continent_ReChild H03_Continent_ASingleObject
{
get { return GetProperty(H03_Continent_ASingleObjectProperty); }
private set { LoadProperty(H03_Continent_ASingleObjectProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="H03_SubContinentObjects"/> property.
/// </summary>
public static readonly PropertyInfo<H03_SubContinentColl> H03_SubContinentObjectsProperty = RegisterProperty<H03_SubContinentColl>(p => p.H03_SubContinentObjects, "H03 SubContinent Objects", RelationshipTypes.Child);
/// <summary>
/// Gets the H03 Sub Continent Objects ("self load" child property).
/// </summary>
/// <value>The H03 Sub Continent Objects.</value>
public H03_SubContinentColl H03_SubContinentObjects
{
get { return GetProperty(H03_SubContinentObjectsProperty); }
private set { LoadProperty(H03_SubContinentObjectsProperty, value); }
}
#endregion
#region Factory Methods
/// <summary>
/// Factory method. Creates a new <see cref="H02_Continent"/> object.
/// </summary>
/// <returns>A reference to the created <see cref="H02_Continent"/> object.</returns>
internal static H02_Continent NewH02_Continent()
{
return DataPortal.CreateChild<H02_Continent>();
}
/// <summary>
/// Factory method. Loads a <see cref="H02_Continent"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
/// <returns>A reference to the fetched <see cref="H02_Continent"/> object.</returns>
internal static H02_Continent GetH02_Continent(SafeDataReader dr)
{
H02_Continent obj = new H02_Continent();
// show the framework that this is a child object
obj.MarkAsChild();
obj.Fetch(dr);
obj.MarkOld();
return obj;
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="H02_Continent"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public H02_Continent()
{
// Use factory methods and do not use direct creation.
// show the framework that this is a child object
MarkAsChild();
}
#endregion
#region Data Access
/// <summary>
/// Loads default values for the <see cref="H02_Continent"/> object properties.
/// </summary>
[Csla.RunLocal]
protected override void Child_Create()
{
LoadProperty(Continent_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID));
LoadProperty(H03_Continent_SingleObjectProperty, DataPortal.CreateChild<H03_Continent_Child>());
LoadProperty(H03_Continent_ASingleObjectProperty, DataPortal.CreateChild<H03_Continent_ReChild>());
LoadProperty(H03_SubContinentObjectsProperty, DataPortal.CreateChild<H03_SubContinentColl>());
var args = new DataPortalHookArgs();
OnCreate(args);
base.Child_Create();
}
/// <summary>
/// Loads a <see cref="H02_Continent"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void Fetch(SafeDataReader dr)
{
// Value properties
LoadProperty(Continent_IDProperty, dr.GetInt32("Continent_ID"));
LoadProperty(Continent_NameProperty, dr.GetString("Continent_Name"));
var args = new DataPortalHookArgs(dr);
OnFetchRead(args);
}
/// <summary>
/// Loads child objects.
/// </summary>
internal void FetchChildren()
{
LoadProperty(H03_Continent_SingleObjectProperty, H03_Continent_Child.GetH03_Continent_Child(Continent_ID));
LoadProperty(H03_Continent_ASingleObjectProperty, H03_Continent_ReChild.GetH03_Continent_ReChild(Continent_ID));
LoadProperty(H03_SubContinentObjectsProperty, H03_SubContinentColl.GetH03_SubContinentColl(Continent_ID));
}
/// <summary>
/// Inserts a new <see cref="H02_Continent"/> object in the database.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Insert()
{
using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager())
{
var args = new DataPortalHookArgs();
OnInsertPre(args);
var dal = dalManager.GetProvider<IH02_ContinentDal>();
using (BypassPropertyChecks)
{
int continent_ID = -1;
dal.Insert(
out continent_ID,
Continent_Name
);
LoadProperty(Continent_IDProperty, continent_ID);
}
OnInsertPost(args);
// flushes all pending data operations
FieldManager.UpdateChildren(this);
}
}
/// <summary>
/// Updates in the database all changes made to the <see cref="H02_Continent"/> object.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Update()
{
if (!IsDirty)
return;
using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager())
{
var args = new DataPortalHookArgs();
OnUpdatePre(args);
var dal = dalManager.GetProvider<IH02_ContinentDal>();
using (BypassPropertyChecks)
{
dal.Update(
Continent_ID,
Continent_Name
);
}
OnUpdatePost(args);
// flushes all pending data operations
FieldManager.UpdateChildren(this);
}
}
/// <summary>
/// Self deletes the <see cref="H02_Continent"/> object from database.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_DeleteSelf()
{
using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager())
{
var args = new DataPortalHookArgs();
// flushes all pending data operations
FieldManager.UpdateChildren(this);
OnDeletePre(args);
var dal = dalManager.GetProvider<IH02_ContinentDal>();
using (BypassPropertyChecks)
{
dal.Delete(ReadProperty(Continent_IDProperty));
}
OnDeletePost(args);
}
}
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting all defaults for object creation.
/// </summary>
partial void OnCreate(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation.
/// </summary>
partial void OnDeletePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after the delete operation, before Commit().
/// </summary>
partial void OnDeletePost(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the update operation.
/// </summary>
partial void OnUpdatePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit().
/// </summary>
partial void OnUpdatePost(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation.
/// </summary>
partial void OnInsertPre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit().
/// </summary>
partial void OnInsertPost(DataPortalHookArgs args);
#endregion
}
}
| |
//
// Author:
// Jb Evain ([email protected])
//
// Copyright (c) 2008 - 2015 Jb Evain
// Copyright (c) 2008 - 2011 Novell, Inc.
//
// Licensed under the MIT/X11 license.
//
using System;
using System.IO;
using Mono.Cecil.Cil;
using Mono.Cecil.Metadata;
using Mono.Collections.Generic;
using RVA = System.UInt32;
namespace Mono.Cecil.PE {
sealed class ImageReader : BinaryStreamReader {
readonly Image image;
DataDirectory cli;
DataDirectory metadata;
uint table_heap_offset;
public ImageReader (Disposable<Stream> stream, string file_name)
: base (stream.value)
{
image = new Image ();
image.Stream = stream;
image.FileName = file_name;
}
void MoveTo (DataDirectory directory)
{
BaseStream.Position = image.ResolveVirtualAddress (directory.VirtualAddress);
}
void ReadImage ()
{
if (BaseStream.Length < 128)
throw new BadImageFormatException ();
// - DOSHeader
// PE 2
// Start 58
// Lfanew 4
// End 64
if (ReadUInt16 () != 0x5a4d)
throw new BadImageFormatException ();
Advance (58);
MoveTo (ReadUInt32 ());
if (ReadUInt32 () != 0x00004550)
throw new BadImageFormatException ();
// - PEFileHeader
// Machine 2
image.Architecture = ReadArchitecture ();
// NumberOfSections 2
ushort sections = ReadUInt16 ();
// TimeDateStamp 4
image.Timestamp = ReadUInt32 ();
// PointerToSymbolTable 4
// NumberOfSymbols 4
// OptionalHeaderSize 2
Advance (10);
// Characteristics 2
ushort characteristics = ReadUInt16 ();
ushort subsystem, dll_characteristics;
ReadOptionalHeaders (out subsystem, out dll_characteristics);
ReadSections (sections);
ReadCLIHeader ();
ReadMetadata ();
ReadDebugHeader ();
image.Kind = GetModuleKind (characteristics, subsystem);
image.Characteristics = (ModuleCharacteristics) dll_characteristics;
}
TargetArchitecture ReadArchitecture ()
{
return (TargetArchitecture) ReadUInt16 ();
}
static ModuleKind GetModuleKind (ushort characteristics, ushort subsystem)
{
if ((characteristics & 0x2000) != 0) // ImageCharacteristics.Dll
return ModuleKind.Dll;
if (subsystem == 0x2 || subsystem == 0x9) // SubSystem.WindowsGui || SubSystem.WindowsCeGui
return ModuleKind.Windows;
return ModuleKind.Console;
}
void ReadOptionalHeaders (out ushort subsystem, out ushort dll_characteristics)
{
// - PEOptionalHeader
// - StandardFieldsHeader
// Magic 2
bool pe64 = ReadUInt16 () == 0x20b;
// pe32 || pe64
// LMajor 1
// LMinor 1
// CodeSize 4
// InitializedDataSize 4
// UninitializedDataSize4
// EntryPointRVA 4
// BaseOfCode 4
// BaseOfData 4 || 0
// - NTSpecificFieldsHeader
// ImageBase 4 || 8
// SectionAlignment 4
// FileAlignement 4
// OSMajor 2
// OSMinor 2
// UserMajor 2
// UserMinor 2
// SubSysMajor 2
// SubSysMinor 2
// Reserved 4
// ImageSize 4
// HeaderSize 4
// FileChecksum 4
Advance (66);
// SubSystem 2
subsystem = ReadUInt16 ();
// DLLFlags 2
dll_characteristics = ReadUInt16 ();
// StackReserveSize 4 || 8
// StackCommitSize 4 || 8
// HeapReserveSize 4 || 8
// HeapCommitSize 4 || 8
// LoaderFlags 4
// NumberOfDataDir 4
// - DataDirectoriesHeader
// ExportTable 8
// ImportTable 8
Advance (pe64 ? 56 : 40);
// ResourceTable 8
image.Win32Resources = ReadDataDirectory ();
// ExceptionTable 8
// CertificateTable 8
// BaseRelocationTable 8
Advance (24);
// Debug 8
image.Debug = ReadDataDirectory ();
// Copyright 8
// GlobalPtr 8
// TLSTable 8
// LoadConfigTable 8
// BoundImport 8
// IAT 8
// DelayImportDescriptor8
Advance (56);
// CLIHeader 8
cli = ReadDataDirectory ();
if (cli.IsZero)
throw new BadImageFormatException ();
// Reserved 8
Advance (8);
}
string ReadAlignedString (int length)
{
int read = 0;
var buffer = new char [length];
while (read < length) {
var current = ReadByte ();
if (current == 0)
break;
buffer [read++] = (char) current;
}
Advance (-1 + ((read + 4) & ~3) - read);
return new string (buffer, 0, read);
}
string ReadZeroTerminatedString (int length)
{
int read = 0;
var buffer = new char [length];
var bytes = ReadBytes (length);
while (read < length) {
var current = bytes [read];
if (current == 0)
break;
buffer [read++] = (char) current;
}
return new string (buffer, 0, read);
}
void ReadSections (ushort count)
{
var sections = new Section [count];
for (int i = 0; i < count; i++) {
var section = new Section ();
// Name
section.Name = ReadZeroTerminatedString (8);
// VirtualSize 4
Advance (4);
// VirtualAddress 4
section.VirtualAddress = ReadUInt32 ();
// SizeOfRawData 4
section.SizeOfRawData = ReadUInt32 ();
// PointerToRawData 4
section.PointerToRawData = ReadUInt32 ();
// PointerToRelocations 4
// PointerToLineNumbers 4
// NumberOfRelocations 2
// NumberOfLineNumbers 2
// Characteristics 4
Advance (16);
sections [i] = section;
}
image.Sections = sections;
}
void ReadCLIHeader ()
{
MoveTo (cli);
// - CLIHeader
// Cb 4
// MajorRuntimeVersion 2
// MinorRuntimeVersion 2
Advance (8);
// Metadata 8
metadata = ReadDataDirectory ();
// Flags 4
image.Attributes = (ModuleAttributes) ReadUInt32 ();
// EntryPointToken 4
image.EntryPointToken = ReadUInt32 ();
// Resources 8
image.Resources = ReadDataDirectory ();
// StrongNameSignature 8
image.StrongName = ReadDataDirectory ();
// CodeManagerTable 8
// VTableFixups 8
// ExportAddressTableJumps 8
// ManagedNativeHeader 8
}
void ReadMetadata ()
{
MoveTo (metadata);
if (ReadUInt32 () != 0x424a5342)
throw new BadImageFormatException ();
// MajorVersion 2
// MinorVersion 2
// Reserved 4
Advance (8);
image.RuntimeVersion = ReadZeroTerminatedString (ReadInt32 ());
// Flags 2
Advance (2);
var streams = ReadUInt16 ();
var section = image.GetSectionAtVirtualAddress (metadata.VirtualAddress);
if (section == null)
throw new BadImageFormatException ();
image.MetadataSection = section;
for (int i = 0; i < streams; i++)
ReadMetadataStream (section);
if (image.PdbHeap != null)
ReadPdbHeap ();
if (image.TableHeap != null)
ReadTableHeap ();
}
void ReadDebugHeader ()
{
if (image.Debug.IsZero) {
image.DebugHeader = new ImageDebugHeader (Empty<ImageDebugHeaderEntry>.Array);
return;
}
MoveTo (image.Debug);
var entries = new ImageDebugHeaderEntry [(int) image.Debug.Size / ImageDebugDirectory.Size];
for (int i = 0; i < entries.Length; i++) {
var directory = new ImageDebugDirectory {
Characteristics = ReadInt32 (),
TimeDateStamp = ReadInt32 (),
MajorVersion = ReadInt16 (),
MinorVersion = ReadInt16 (),
Type = (ImageDebugType) ReadInt32 (),
SizeOfData = ReadInt32 (),
AddressOfRawData = ReadInt32 (),
PointerToRawData = ReadInt32 (),
};
if (directory.AddressOfRawData == 0) {
entries [i] = new ImageDebugHeaderEntry (directory, Empty<byte>.Array);
continue;
}
var position = Position;
try {
MoveTo ((uint) directory.PointerToRawData);
var data = ReadBytes (directory.SizeOfData);
entries [i] = new ImageDebugHeaderEntry (directory, data);
} finally {
Position = position;
}
}
image.DebugHeader = new ImageDebugHeader (entries);
}
void ReadMetadataStream (Section section)
{
// Offset 4
uint offset = metadata.VirtualAddress - section.VirtualAddress + ReadUInt32 (); // relative to the section start
// Size 4
uint size = ReadUInt32 ();
var data = ReadHeapData (offset, size);
var name = ReadAlignedString (16);
switch (name) {
case "#~":
case "#-":
image.TableHeap = new TableHeap (data);
table_heap_offset = offset;
break;
case "#Strings":
image.StringHeap = new StringHeap (data);
break;
case "#Blob":
image.BlobHeap = new BlobHeap (data);
break;
case "#GUID":
image.GuidHeap = new GuidHeap (data);
break;
case "#US":
image.UserStringHeap = new UserStringHeap (data);
break;
case "#Pdb":
image.PdbHeap = new PdbHeap (data);
break;
}
}
byte [] ReadHeapData (uint offset, uint size)
{
var position = BaseStream.Position;
MoveTo (offset + image.MetadataSection.PointerToRawData);
var data = ReadBytes ((int) size);
BaseStream.Position = position;
return data;
}
void ReadTableHeap ()
{
var heap = image.TableHeap;
MoveTo (table_heap_offset + image.MetadataSection.PointerToRawData);
// Reserved 4
// MajorVersion 1
// MinorVersion 1
Advance (6);
// HeapSizes 1
var sizes = ReadByte ();
// Reserved2 1
Advance (1);
// Valid 8
heap.Valid = ReadInt64 ();
// Sorted 8
heap.Sorted = ReadInt64 ();
if (image.PdbHeap != null) {
for (int i = 0; i < Mixin.TableCount; i++) {
if (!image.PdbHeap.HasTable ((Table) i))
continue;
heap.Tables [i].Length = image.PdbHeap.TypeSystemTableRows [i];
}
}
for (int i = 0; i < Mixin.TableCount; i++) {
if (!heap.HasTable ((Table) i))
continue;
heap.Tables [i].Length = ReadUInt32 ();
}
SetIndexSize (image.StringHeap, sizes, 0x1);
SetIndexSize (image.GuidHeap, sizes, 0x2);
SetIndexSize (image.BlobHeap, sizes, 0x4);
ComputeTableInformations ();
}
static void SetIndexSize (Heap heap, uint sizes, byte flag)
{
if (heap == null)
return;
heap.IndexSize = (sizes & flag) > 0 ? 4 : 2;
}
int GetTableIndexSize (Table table)
{
return image.GetTableIndexSize (table);
}
int GetCodedIndexSize (CodedIndex index)
{
return image.GetCodedIndexSize (index);
}
void ComputeTableInformations ()
{
uint offset = (uint) BaseStream.Position - table_heap_offset - image.MetadataSection.PointerToRawData; // header
int stridx_size = image.StringHeap.IndexSize;
int guididx_size = image.GuidHeap != null ? image.GuidHeap.IndexSize : 2;
int blobidx_size = image.BlobHeap != null ? image.BlobHeap.IndexSize : 2;
var heap = image.TableHeap;
var tables = heap.Tables;
for (int i = 0; i < Mixin.TableCount; i++) {
var table = (Table) i;
if (!heap.HasTable (table))
continue;
int size;
switch (table) {
case Table.Module:
size = 2 // Generation
+ stridx_size // Name
+ (guididx_size * 3); // Mvid, EncId, EncBaseId
break;
case Table.TypeRef:
size = GetCodedIndexSize (CodedIndex.ResolutionScope) // ResolutionScope
+ (stridx_size * 2); // Name, Namespace
break;
case Table.TypeDef:
size = 4 // Flags
+ (stridx_size * 2) // Name, Namespace
+ GetCodedIndexSize (CodedIndex.TypeDefOrRef) // BaseType
+ GetTableIndexSize (Table.Field) // FieldList
+ GetTableIndexSize (Table.Method); // MethodList
break;
case Table.FieldPtr:
size = GetTableIndexSize (Table.Field); // Field
break;
case Table.Field:
size = 2 // Flags
+ stridx_size // Name
+ blobidx_size; // Signature
break;
case Table.MethodPtr:
size = GetTableIndexSize (Table.Method); // Method
break;
case Table.Method:
size = 8 // Rva 4, ImplFlags 2, Flags 2
+ stridx_size // Name
+ blobidx_size // Signature
+ GetTableIndexSize (Table.Param); // ParamList
break;
case Table.ParamPtr:
size = GetTableIndexSize (Table.Param); // Param
break;
case Table.Param:
size = 4 // Flags 2, Sequence 2
+ stridx_size; // Name
break;
case Table.InterfaceImpl:
size = GetTableIndexSize (Table.TypeDef) // Class
+ GetCodedIndexSize (CodedIndex.TypeDefOrRef); // Interface
break;
case Table.MemberRef:
size = GetCodedIndexSize (CodedIndex.MemberRefParent) // Class
+ stridx_size // Name
+ blobidx_size; // Signature
break;
case Table.Constant:
size = 2 // Type
+ GetCodedIndexSize (CodedIndex.HasConstant) // Parent
+ blobidx_size; // Value
break;
case Table.CustomAttribute:
size = GetCodedIndexSize (CodedIndex.HasCustomAttribute) // Parent
+ GetCodedIndexSize (CodedIndex.CustomAttributeType) // Type
+ blobidx_size; // Value
break;
case Table.FieldMarshal:
size = GetCodedIndexSize (CodedIndex.HasFieldMarshal) // Parent
+ blobidx_size; // NativeType
break;
case Table.DeclSecurity:
size = 2 // Action
+ GetCodedIndexSize (CodedIndex.HasDeclSecurity) // Parent
+ blobidx_size; // PermissionSet
break;
case Table.ClassLayout:
size = 6 // PackingSize 2, ClassSize 4
+ GetTableIndexSize (Table.TypeDef); // Parent
break;
case Table.FieldLayout:
size = 4 // Offset
+ GetTableIndexSize (Table.Field); // Field
break;
case Table.StandAloneSig:
size = blobidx_size; // Signature
break;
case Table.EventMap:
size = GetTableIndexSize (Table.TypeDef) // Parent
+ GetTableIndexSize (Table.Event); // EventList
break;
case Table.EventPtr:
size = GetTableIndexSize (Table.Event); // Event
break;
case Table.Event:
size = 2 // Flags
+ stridx_size // Name
+ GetCodedIndexSize (CodedIndex.TypeDefOrRef); // EventType
break;
case Table.PropertyMap:
size = GetTableIndexSize (Table.TypeDef) // Parent
+ GetTableIndexSize (Table.Property); // PropertyList
break;
case Table.PropertyPtr:
size = GetTableIndexSize (Table.Property); // Property
break;
case Table.Property:
size = 2 // Flags
+ stridx_size // Name
+ blobidx_size; // Type
break;
case Table.MethodSemantics:
size = 2 // Semantics
+ GetTableIndexSize (Table.Method) // Method
+ GetCodedIndexSize (CodedIndex.HasSemantics); // Association
break;
case Table.MethodImpl:
size = GetTableIndexSize (Table.TypeDef) // Class
+ GetCodedIndexSize (CodedIndex.MethodDefOrRef) // MethodBody
+ GetCodedIndexSize (CodedIndex.MethodDefOrRef); // MethodDeclaration
break;
case Table.ModuleRef:
size = stridx_size; // Name
break;
case Table.TypeSpec:
size = blobidx_size; // Signature
break;
case Table.ImplMap:
size = 2 // MappingFlags
+ GetCodedIndexSize (CodedIndex.MemberForwarded) // MemberForwarded
+ stridx_size // ImportName
+ GetTableIndexSize (Table.ModuleRef); // ImportScope
break;
case Table.FieldRVA:
size = 4 // RVA
+ GetTableIndexSize (Table.Field); // Field
break;
case Table.EncLog:
size = 8;
break;
case Table.EncMap:
size = 4;
break;
case Table.Assembly:
size = 16 // HashAlgId 4, Version 4 * 2, Flags 4
+ blobidx_size // PublicKey
+ (stridx_size * 2); // Name, Culture
break;
case Table.AssemblyProcessor:
size = 4; // Processor
break;
case Table.AssemblyOS:
size = 12; // Platform 4, Version 2 * 4
break;
case Table.AssemblyRef:
size = 12 // Version 2 * 4 + Flags 4
+ (blobidx_size * 2) // PublicKeyOrToken, HashValue
+ (stridx_size * 2); // Name, Culture
break;
case Table.AssemblyRefProcessor:
size = 4 // Processor
+ GetTableIndexSize (Table.AssemblyRef); // AssemblyRef
break;
case Table.AssemblyRefOS:
size = 12 // Platform 4, Version 2 * 4
+ GetTableIndexSize (Table.AssemblyRef); // AssemblyRef
break;
case Table.File:
size = 4 // Flags
+ stridx_size // Name
+ blobidx_size; // HashValue
break;
case Table.ExportedType:
size = 8 // Flags 4, TypeDefId 4
+ (stridx_size * 2) // Name, Namespace
+ GetCodedIndexSize (CodedIndex.Implementation); // Implementation
break;
case Table.ManifestResource:
size = 8 // Offset, Flags
+ stridx_size // Name
+ GetCodedIndexSize (CodedIndex.Implementation); // Implementation
break;
case Table.NestedClass:
size = GetTableIndexSize (Table.TypeDef) // NestedClass
+ GetTableIndexSize (Table.TypeDef); // EnclosingClass
break;
case Table.GenericParam:
size = 4 // Number, Flags
+ GetCodedIndexSize (CodedIndex.TypeOrMethodDef) // Owner
+ stridx_size; // Name
break;
case Table.MethodSpec:
size = GetCodedIndexSize (CodedIndex.MethodDefOrRef) // Method
+ blobidx_size; // Instantiation
break;
case Table.GenericParamConstraint:
size = GetTableIndexSize (Table.GenericParam) // Owner
+ GetCodedIndexSize (CodedIndex.TypeDefOrRef); // Constraint
break;
case Table.Document:
size = blobidx_size // Name
+ guididx_size // HashAlgorithm
+ blobidx_size // Hash
+ guididx_size; // Language
break;
case Table.MethodDebugInformation:
size = GetTableIndexSize (Table.Document) // Document
+ blobidx_size; // SequencePoints
break;
case Table.LocalScope:
size = GetTableIndexSize (Table.Method) // Method
+ GetTableIndexSize (Table.ImportScope) // ImportScope
+ GetTableIndexSize (Table.LocalVariable) // VariableList
+ GetTableIndexSize (Table.LocalConstant) // ConstantList
+ 4 * 2; // StartOffset, Length
break;
case Table.LocalVariable:
size = 2 // Attributes
+ 2 // Index
+ stridx_size; // Name
break;
case Table.LocalConstant:
size = stridx_size // Name
+ blobidx_size; // Signature
break;
case Table.ImportScope:
size = GetTableIndexSize (Table.ImportScope) // Parent
+ blobidx_size;
break;
case Table.StateMachineMethod:
size = GetTableIndexSize (Table.Method) // MoveNextMethod
+ GetTableIndexSize (Table.Method); // KickOffMethod
break;
case Table.CustomDebugInformation:
size = GetCodedIndexSize (CodedIndex.HasCustomDebugInformation) // Parent
+ guididx_size // Kind
+ blobidx_size; // Value
break;
default:
throw new NotSupportedException ();
}
tables [i].RowSize = (uint) size;
tables [i].Offset = offset;
offset += (uint) size * tables [i].Length;
}
}
void ReadPdbHeap ()
{
var heap = image.PdbHeap;
var buffer = new ByteBuffer (heap.data);
heap.Id = buffer.ReadBytes (20);
heap.EntryPoint = buffer.ReadUInt32 ();
heap.TypeSystemTables = buffer.ReadInt64 ();
heap.TypeSystemTableRows = new uint [Mixin.TableCount];
for (int i = 0; i < Mixin.TableCount; i++) {
var table = (Table) i;
if (!heap.HasTable (table))
continue;
heap.TypeSystemTableRows [i] = buffer.ReadUInt32 ();
}
}
public static Image ReadImage (Disposable<Stream> stream, string file_name)
{
try {
var reader = new ImageReader (stream, file_name);
reader.ReadImage ();
return reader.image;
} catch (EndOfStreamException e) {
throw new BadImageFormatException (stream.value.GetFileName (), e);
}
}
public static Image ReadPortablePdb (Disposable<Stream> stream, string file_name)
{
try {
var reader = new ImageReader (stream, file_name);
var length = (uint) stream.value.Length;
reader.image.Sections = new[] {
new Section {
PointerToRawData = 0,
SizeOfRawData = length,
VirtualAddress = 0,
VirtualSize = length,
}
};
reader.metadata = new DataDirectory (0, length);
reader.ReadMetadata ();
return reader.image;
} catch (EndOfStreamException e) {
throw new BadImageFormatException (stream.value.GetFileName (), e);
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
// =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
//
// TaskRunSync.cs
//
//
// Test class using UnitTestDriver that ensures that the Runsynchronously method works as excepted
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using Xunit;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Security;
using System.Threading;
using System.Threading.Tasks;
using System.Diagnostics;
namespace System.Threading.Tasks.Tests
{
#region Helper Classes / Enums
public enum PreTaskStatus
{
Created, // task has been created
Continued, // task is a continuation task
Running, // task has started running, could be waiting-to-run in the queue
Canceled, // task has been canceled before running
Completed, // task has been completed
}
public enum PostRunSyncAction
{
Wait, //to test you can wait on a task that was run synchronously
Cancel, //to test you can cancel the token that was used by the task
ContinueWith, //to test you can continuewith on a task that was run synchronously
}
public enum WorkloadType
{
CreateChildTask, //Start a attached childTask in the workload
CreateDetachedChildTask, //start a detached childTask in the workload
ContinueInside, //Invoke continuewith as the workload inside the task
RunWithUserScheduler, //create a task with custom task scheduler that runs that task inline
ThrowException, //throw an exception
}
public enum TaskSchedulerType
{
Default, //Use the default taskscheduler TaskScheduler.Current
Null, //pass null as the Task Scheduler
CustomWithInlineExecution, //Use a custom TaskScheduler that runs the Task inline
CustomWithoutInlineExecution //Use a custom TaskScheduler that does not run the Task inline
}
/// <summary>
/// An implementation of TaskScheduler that is able to perform RunSynchronously, and
/// keep track of number of times Task was executed synchronously
/// </summary>
public class TaskRunSyncTaskScheduler : TaskScheduler, IDisposable
{
public bool AbleToExecuteInline { get; set; }
public int RunSyncCalledCount { get; set; }
private Task[] _threads;
private BlockingCollection<Task> _tasks = new BlockingCollection<Task>();
public TaskRunSyncTaskScheduler(bool ableToExecuteInline)
{
AbleToExecuteInline = ableToExecuteInline;
/*need at least two threads since we might schedule two tasks (parent-child)*/
int numberOfThreads = Math.Max(Environment.ProcessorCount, 2);
_threads = new Task[numberOfThreads];
for (int i = 0; i < numberOfThreads; i++)
{
_threads[i] = Task.Run(() =>
{
foreach (var task in _tasks.GetConsumingEnumerable())
{
if (task.Status == TaskStatus.WaitingToRun)
{
ExecuteTask(task);
}
}
});
}
}
[SecuritySafeCritical]
private bool ExecuteTask(Task task)
{
return TryExecuteTask(task);
}
[SecurityCritical]
protected override void QueueTask(Task task)
{
_tasks.Add(task);
}
public override int MaximumConcurrencyLevel
{
get
{
return _threads.Length;
}
}
[SecurityCritical]
protected override bool TryExecuteTaskInline(Task task, bool taskWasPreviouslyQueued)
{
RunSyncCalledCount++;
if (taskWasPreviouslyQueued)
{
return false;
}
if (AbleToExecuteInline)
{
return ExecuteTask(task);
}
else
{
return false;
}
}
[SecurityCritical]
protected override IEnumerable<Task> GetScheduledTasks()
{
return _tasks;
}
public void Dispose()
{
if (_threads != null)
{
_tasks.CompleteAdding();
Task task;
while (_tasks.TryTake(out task))
;
Task.WaitAll(_threads);
_threads = null;
}
}
}
public class TestParameters_RunSync
{
public readonly PreTaskStatus PreTaskStatus;
public readonly PostRunSyncAction PostRunSyncAction;
public readonly WorkloadType WorkloadType;
public readonly TaskCreationOptions TaskCreationOptions;
public readonly TaskSchedulerType TaskSchedulerType;
public TestParameters_RunSync(PreTaskStatus preTaskStatus, PostRunSyncAction postRunSyncAction, WorkloadType workType, TaskCreationOptions taskCreationOptions, TaskSchedulerType taskScheduler)
{
PreTaskStatus = preTaskStatus;
PostRunSyncAction = postRunSyncAction;
WorkloadType = workType;
TaskCreationOptions = taskCreationOptions;
TaskSchedulerType = taskScheduler;
}
}
#endregion
public sealed class TaskRunSyncTest
{
private PreTaskStatus _preTaskStatus;
private PostRunSyncAction _postRunSyncAction;
private WorkloadType _workloadType;
private TaskCreationOptions _option;
private TaskSchedulerType _taskSchedulerType;
private Task _task; // the main task to be run synchronously
private CancellationTokenSource _cts; // The CancellationTokenSource of which the Token is apssed to the Main task
private int _taskThreadID;
public TaskRunSyncTest(TestParameters_RunSync parameters)
{
_preTaskStatus = parameters.PreTaskStatus;
_postRunSyncAction = parameters.PostRunSyncAction;
_workloadType = parameters.WorkloadType;
_option = parameters.TaskCreationOptions;
_taskSchedulerType = parameters.TaskSchedulerType;
}
/// <summary>
/// The main test method that execute the API. There are five steps involved in the execution of the test
/// </summary>
internal void RealRun()
{
TaskScheduler ts = TaskScheduler.Default;
switch (_taskSchedulerType)
{
case TaskSchedulerType.Null:
ts = null;
break;
case TaskSchedulerType.CustomWithInlineExecution:
ts = new TaskRunSyncTaskScheduler(true);
break;
case TaskSchedulerType.CustomWithoutInlineExecution:
ts = new TaskRunSyncTaskScheduler(false);
break;
default:
ts = TaskScheduler.Default;
break;
}
// Stage 1 -- create task
CreateTask();
// Stage 2 - start with the pre-action
switch (_preTaskStatus)
{
case PreTaskStatus.Continued:
_task = _task.ContinueWith((t) => { }, _cts.Token, TaskContinuationOptions.None, ts);
break;
case PreTaskStatus.Running:
_task.Start(ts);
break;
case PreTaskStatus.Canceled:
_cts.Cancel();
break;
case PreTaskStatus.Completed:
_task.Start(ts);
((IAsyncResult)_task).AsyncWaitHandle.WaitOne(); // wait on AsyncWaitHandle to avoid getting exp
break;
}
int expectedThreadID = Environment.CurrentManagedThreadId;
// Stage 3 - exercise the API
try
{
if (_taskSchedulerType == TaskSchedulerType.Default)
_task.RunSynchronously();
else
_task.RunSynchronously(ts);
if (ExpectRunSyncFailure)
Assert.True(false, string.Format("Fail to throw expected InvalidOperationException"));
if (_taskSchedulerType == TaskSchedulerType.Null)
Assert.True(false, string.Format("Fail to throw expected ArgumentNullException"));
}
catch (InvalidOperationException ex)
{
if (!ExpectRunSyncFailure)
Assert.True(false, string.Format("Caught un-expected InvalidOperationException - {0}", ex));
else
{
Debug.WriteLine("Caught expected InvalidOperationException");
DisposeScheduler(ts);
return;
}
}
catch (ArgumentNullException ex)
{
if (_taskSchedulerType != TaskSchedulerType.Null)
Assert.True(false, string.Format("Caught un-expected ArgumentNullException - {0}", ex));
else
{
Debug.WriteLine("Caught expected ArgumentNullException");
DisposeScheduler(ts);
return;
}
}
// Stage 4 - do verification against Context, IsCompleted and the TaskStatus
if (_taskSchedulerType == TaskSchedulerType.CustomWithInlineExecution)
Assert.Equal(expectedThreadID, _taskThreadID);
else if (_taskSchedulerType == TaskSchedulerType.CustomWithoutInlineExecution)
Assert.NotEqual(expectedThreadID, _taskThreadID);
else if (_taskThreadID != expectedThreadID)
Debug.WriteLine("Warning: RunSynchronously request ignored -- Task did not run under the same context");
Assert.True(_task.IsCompleted, "RunSynchronously contract broken -- Task is not complete when the call return");
if (_workloadType == WorkloadType.ThrowException)
{
if (_task.Status != TaskStatus.Faulted)
Assert.True(false, string.Format("Wrong final task status on a faulty workload"));
CheckExpectedAggregateException(_task.Exception);
//Assert.True(false, string.Format("Fail to record the test exception in Task.Exception"));
}
else
{
if (_task.Status != TaskStatus.RanToCompletion)
Assert.True(false, string.Format("Wrong final task status on a regular workload"));
}
//
// Extra verification to ensure the Task was RunSynchronously on
// specified TaskScheduler
//
if (_taskSchedulerType == TaskSchedulerType.CustomWithInlineExecution ||
_taskSchedulerType == TaskSchedulerType.CustomWithoutInlineExecution)
{
if (((TaskRunSyncTaskScheduler)ts).RunSyncCalledCount <= 0)
Assert.True(false, string.Format("Task wasn't RunSynchronously with TaskScheduler specified"));
}
// Stage 5 - follow with the post-action
switch (_postRunSyncAction)
{
case PostRunSyncAction.Wait:
try
{
if (_postRunSyncAction == PostRunSyncAction.Wait)
_task.Wait(0);
if (_workloadType == WorkloadType.ThrowException)
Assert.True(false, string.Format("expected failure is not propogated out of Wait"));
}
catch (AggregateException ae)
{
CheckExpectedAggregateException(ae);
}
break;
case PostRunSyncAction.Cancel:
_cts.Cancel();
break;
case PostRunSyncAction.ContinueWith:
_task.ContinueWith((t) => { }).Wait();
break;
}
DisposeScheduler(ts);
}
private static void DisposeScheduler(TaskScheduler ts)
{
if (ts is TaskRunSyncTaskScheduler)
{
((TaskRunSyncTaskScheduler)ts).Dispose();
}
}
private void CreateTask()
{
_cts = new CancellationTokenSource();
_task = new Task((o) =>
{
_taskThreadID = Environment.CurrentManagedThreadId;
switch (_workloadType)
{
case WorkloadType.CreateChildTask:
case WorkloadType.CreateDetachedChildTask:
Task.Factory.StartNew(() => { }, _workloadType == WorkloadType.CreateDetachedChildTask
? TaskCreationOptions.None
: TaskCreationOptions.AttachedToParent);
break;
case WorkloadType.ContinueInside:
_task.ContinueWith((t) => { });
break;
case WorkloadType.RunWithUserScheduler:
TaskScheduler ts = new TaskRunSyncTaskScheduler(true);
Task.Factory.StartNew(() => { }, _cts.Token, TaskCreationOptions.AttachedToParent, ts).ContinueWith((task) => DisposeScheduler(ts), TaskScheduler.Default);
break;
case WorkloadType.ThrowException:
throw new TPLTestException();
}
}, null, _cts.Token, _option);
}
private bool ExpectRunSyncFailure
{
get
{
// The following cases will cause an exception
// 1. Task already started / canceled / disposed / completed
// 2. Task is an contination task
return (_preTaskStatus != PreTaskStatus.Created);
}
}
/// <summary>
/// Method that checks to ensure that the AggregateException contains TPLException (the one throw by the workload)
/// </summary>
/// <param name="ae"></param>
/// <returns></returns>
private void CheckExpectedAggregateException(AggregateException ae)
{
if (_workloadType == WorkloadType.ThrowException)
ae.Flatten().Handle((e) => e is TPLTestException);
else
Assert.True(false, string.Format("Caught un-expected exception - {0]. Fail to re-progogate the test exception via Wait", ae));
}
}
public class TaskRunSyncTests
{
#region Test methods
[Fact]
public static void TaskRunSyncTest0()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Canceled, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.CustomWithInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest1()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Canceled, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.CustomWithoutInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest2()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Canceled, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.Default);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
[OuterLoop]
public static void TaskRunSyncTest3()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Completed, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.CustomWithInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest4()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Completed, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.CustomWithoutInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest5()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Completed, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.Default);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest6()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Continued, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.CustomWithInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest7()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Continued, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.CustomWithoutInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest8()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Continued, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.Default);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest9()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.Cancel, WorkloadType.ContinueInside, TaskCreationOptions.LongRunning, TaskSchedulerType.Default);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest10()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.Cancel, WorkloadType.CreateChildTask, TaskCreationOptions.LongRunning, TaskSchedulerType.CustomWithoutInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest11()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.Cancel, WorkloadType.CreateDetachedChildTask, TaskCreationOptions.AttachedToParent, TaskSchedulerType.CustomWithoutInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest12()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.Cancel, WorkloadType.CreateDetachedChildTask, TaskCreationOptions.None, TaskSchedulerType.CustomWithInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest13()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.Cancel, WorkloadType.RunWithUserScheduler, TaskCreationOptions.None, TaskSchedulerType.Default);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest14()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.Cancel, WorkloadType.ThrowException, TaskCreationOptions.AttachedToParent, TaskSchedulerType.CustomWithInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
[OuterLoop]
public static void TaskRunSyncTest15()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.ContinueWith, WorkloadType.ContinueInside, TaskCreationOptions.None, TaskSchedulerType.CustomWithInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest16()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.ContinueWith, WorkloadType.CreateChildTask, TaskCreationOptions.AttachedToParent, TaskSchedulerType.Default);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
[OuterLoop]
public static void TaskRunSyncTest17()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.ContinueWith, WorkloadType.CreateDetachedChildTask, TaskCreationOptions.AttachedToParent, TaskSchedulerType.CustomWithInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
[OuterLoop]
public static void TaskRunSyncTest18()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.ContinueWith, WorkloadType.RunWithUserScheduler, TaskCreationOptions.LongRunning, TaskSchedulerType.CustomWithInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
[OuterLoop]
public static void TaskRunSyncTest19()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.ContinueWith, WorkloadType.ThrowException, TaskCreationOptions.LongRunning, TaskSchedulerType.CustomWithoutInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
[OuterLoop]
public static void TaskRunSyncTest20()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.Wait, WorkloadType.ContinueInside, TaskCreationOptions.AttachedToParent, TaskSchedulerType.CustomWithoutInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest21()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.Null);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest22()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.Wait, WorkloadType.CreateDetachedChildTask, TaskCreationOptions.LongRunning, TaskSchedulerType.Default);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
[OuterLoop]
public static void TaskRunSyncTest23()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.Wait, WorkloadType.RunWithUserScheduler, TaskCreationOptions.AttachedToParent, TaskSchedulerType.CustomWithoutInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
[OuterLoop]
public static void TaskRunSyncTest24()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Created, PostRunSyncAction.Wait, WorkloadType.ThrowException, TaskCreationOptions.None, TaskSchedulerType.Default);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest28()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Running, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.CustomWithInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest29()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Running, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.CustomWithoutInlineExecution);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
[Fact]
public static void TaskRunSyncTest30()
{
TestParameters_RunSync parameters = new TestParameters_RunSync(PreTaskStatus.Running, PostRunSyncAction.Wait, WorkloadType.CreateChildTask, TaskCreationOptions.None, TaskSchedulerType.Default);
TaskRunSyncTest test = new TaskRunSyncTest(parameters);
test.RealRun();
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if WINDOWS
using GCPerfTestFramework.Metrics.Builders;
using Microsoft.Diagnostics.Tracing;
using Microsoft.Diagnostics.Tracing.Parsers;
using Microsoft.Xunit.Performance;
using Microsoft.Xunit.Performance.Sdk;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using Xunit.Abstractions;
namespace GCPerfTestFramework.Metrics
{
/// <summary>
/// GCMetricDiscoverer is one of two publicly-exposed classes from the library and is the
/// portion of this library that speaks directly to xunit-performance. When a
/// <see cref="CollectGCMetricsAttribute"/> is observed when xunit-performance is enumerating
/// the attributes on a test method, it instantiates an instance of this class and calls
/// GetMetrics on it, which yields the list of metrics that this library provides.
///
/// This class and <see cref="CollectGCMetricsAttribute"/> should be the *only* classes
/// exposed by this namespace.
/// </summary>
public class GCMetricDiscoverer : IPerformanceMetricDiscoverer
{
/// <summary>
/// Yields all current custom GC metrics.
/// </summary>
/// <param name="metricAttribute">Unused.</param>
/// <returns>An enumerator yielding new instances of all of the existing custom GC metrics.</returns>
public IEnumerable<PerformanceMetricInfo> GetMetrics(IAttributeInfo metricAttribute)
{
yield return new GCMaxPauseMetric();
yield return new GCMeanPauseMetric();
yield return new GCPeakVirtualMemoryMetric();
yield return new GCPeakWorkingSetMetric();
yield return new GCTotalPauseTimeMetric();
yield return new GCCpuTimeInGCMetric();
yield return new GCGenZeroMeanPauseDuration();
yield return new GCGenOneMeanPauseDuration();
yield return new GCGenTwoMeanPauseDuration();
yield return new GCGenZeroCount();
yield return new GCGenOneCount();
yield return new GCGenTwoBGCCount();
yield return new GCGenTwoGCCount();
}
}
/// <summary>
/// Base class for all GC-related metrics that handles provider registration for child metrics, since
/// all GC-related metrics will be listening to the same trace providers.
/// </summary>
internal abstract class GCMetric : PerformanceMetric
{
/// <summary>
/// Number of bytes in a megabyte, for convenience.
/// </summary>
public const int BytesInMegabyte = 1048576;
/// <summary>
/// Creates a new GCMetric with the given ID, display name, and unit.
/// </summary>
/// <param name="id">The ID of the metric</param>
/// <param name="displayName">A human-friendly display name of the metric</param>
/// <param name="unit">The unit of the metric</param>
public GCMetric(string id, string displayName, string unit)
: base(id, displayName, unit)
{
}
/// <summary>
/// Indicates to xunit-performance what trace providers that these metrics
/// require.
/// </summary>
public override IEnumerable<ProviderInfo> ProviderInfo
{
get
{
yield return new KernelProviderInfo()
{
Keywords = (ulong)(KernelTraceEventParser.Keywords.ContextSwitch
| KernelTraceEventParser.Keywords.Profile
| KernelTraceEventParser.Keywords.ProcessCounters)
};
yield return new UserProviderInfo()
{
ProviderGuid = ClrTraceEventParser.ProviderGuid,
Level = TraceEventLevel.Verbose,
Keywords = (ulong)ClrTraceEventParser.Keywords.GC
};
}
}
/// <summary>
/// Constructs a new PerformanceMetricEvaluator for this metric. Implementors of a custom metric must override
/// this method and instruct it to instantiate the GCEvaluator for that custom metric.
/// </summary>
/// <param name="context"></param>
/// <returns></returns>
public abstract override PerformanceMetricEvaluator CreateEvaluator(PerformanceMetricEvaluationContext context);
}
/// <summary>
/// Base class for all GC-related metric evaluators that handles the complexity of multiplexing possibly many
/// GC metrics on top of a single "trace session" using a reference-counting strategy.
/// </summary>
internal abstract class GCEvaluator : PerformanceMetricEvaluator
{
/// <summary>
/// The sample rate used by xunit-performance when collecting ETW traces. Used
/// to infer the total time spent in GC based on CPU samples.
/// </summary>
const float SampleRate = 1.0f;
// These three fields are part of a bit of a hack to avoid having to re-parse the ETL file
// every time a new metric is evaluated.
//
// The idea here is that every class that derives from GCEvaluator increments the
// reference count whenever an iteration begins and decrements it whenever an iteration ends.
// When the reference count is zero, the session is nulled out for the next iteration.
// If _session is null when an iteration begins, the first metric to reach it will set it up
// to trace the session. In this way, the first metric in sets up the session and the last one
// out tears it down in preparation for the next iteration.
//
// This scheme is not thread-safe and will break if xunit-performance ever runs benchmarks in
// parallel, although that's pretty unlikely for a benchmarking framework.
private static IDictionary<int, GCProcess> s_session;
private static int s_sessionRefCount;
private static bool s_hasComputedRollup;
private readonly PerformanceMetricEvaluationContext _context;
/// <summary>
/// Property exposed to child metrics that automatically ensures that the session is valid and that
/// rollup information has been calculated, calculating it if it has not happened already.
/// </summary>
/// <exception cref="InvalidOperationException">
/// Thrown if this property is unable to determine an
/// appropriate process for analysis. Usually this occurs when
/// the test framework itself crashes and fails to launch a test.
/// </exception>
protected GCProcess ProcessInfo
{
get
{
if (!s_hasComputedRollup)
{
GCProcess.ComputeRollup(s_session);
s_hasComputedRollup = true;
}
// Since we are spawning this process with UseShellExecute set to false,
// the spawned process itself spawns an instance of "conhost.exe" on Windows.
// We want to be sure we don't pick out that one for analysis.
foreach (var candidate in s_session.Values)
{
if (candidate.CommandLine != null)
{
if (!candidate.CommandLine.Contains("conhost.exe"))
{
return candidate;
}
}
}
// This should never happen in GC-related tests, which are always required to spawn an additional process.
throw new InvalidOperationException("Failed to find an appropriate target process for analysis!");
}
}
/// <summary>
/// Constructs a new GCEvaluator and sets its content to the given PerformanceMetricEvaluationContext.
/// </summary>
/// <param name="context">The context received from the test framework</param>
public GCEvaluator(PerformanceMetricEvaluationContext context)
{
Debug.Assert(context.TraceEventSource is TraceEventDispatcher);
_context = context;
}
/// <summary>
/// Creates a session if it does not exist and increments the reference count on the session.
/// </summary>
/// <param name="beginEvent">Unused.</param>
public override void BeginIteration(TraceEvent beginEvent)
{
if (s_session == null)
{
// The filter function here is to filter out events that we are not concerned with collecting, i.e. events from
// processes not spawned by us.
s_session = GCProcess.Collect(_context.TraceEventSource as TraceEventDispatcher, SampleRate, filterFunc: _context.IsTestEvent);
s_hasComputedRollup = false;
}
s_sessionRefCount++;
}
/// <summary>
/// Yields the metric and decrements the reference count on the session, disposing it
/// if the reference count is zero.
/// </summary>
/// <param name="endEvent">Unused.</param>
/// <returns>The value of the metric calculated by this class</returns>
public override double EndIteration(TraceEvent endEvent)
{
var metric = YieldMetric();
s_sessionRefCount--;
if (s_sessionRefCount == 0)
{
s_session = null;
// not doing this results in tremendous memory leaks!
_context.TraceEventSource.Kernel.RemoveCallback<TraceEvent>(null);
_context.TraceEventSource.Clr.RemoveCallback<TraceEvent>(null);
}
return metric;
}
/// <summary>
/// Overriden by child metrics to determine how to yield the value of the metric
/// that the child metric provides. In general, overriders of this method
/// do something with the value of the <see cref="ProcessInfo"/> property.
/// </summary>
/// <returns>The value of this metric</returns>
protected abstract double YieldMetric();
}
}
#endif
| |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using Management.Storage.ScenarioTest.Common;
using Management.Storage.ScenarioTest.Util;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Microsoft.WindowsAzure.Storage.Blob;
using MS.Test.Common.MsTestLib;
using StorageTestLib;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using StorageBlob = Microsoft.WindowsAzure.Storage.Blob;
namespace Management.Storage.ScenarioTest.Functional.Blob
{
/// <summary>
/// functional tests for Set-ContainerAcl
/// </summary>
[TestClass]
class SetBlobContent : TestBase
{
private static string uploadDirRoot;
private static List<string> files = new List<string>();
//TODO upload a already opened read/write file
[ClassInitialize()]
public static void ClassInit(TestContext testContext)
{
TestBase.TestClassInitialize(testContext);
uploadDirRoot = Test.Data.Get("UploadDir");
SetupUploadDir();
}
[ClassCleanup()]
public static void SetBlobContentClassCleanup()
{
TestBase.TestClassCleanup();
}
/// <summary>
/// create upload dir and temp files
/// </summary>
private static void SetupUploadDir()
{
Test.Verbose("Create Upload dir {0}", uploadDirRoot);
if (!Directory.Exists(uploadDirRoot))
{
Directory.CreateDirectory(uploadDirRoot);
}
FileUtil.CleanDirectory(uploadDirRoot);
int minDirDepth = 1, maxDirDepth = 3;
int dirDepth = random.Next(minDirDepth, maxDirDepth);
Test.Info("Generate Temp files for Set-AzureStorageBlobContent");
files = FileUtil.GenerateTempFiles(uploadDirRoot, dirDepth);
files.Sort();
}
/// <summary>
/// set azure blob content by mutilple files
/// 8.14 Set-AzureStorageBlobContent
/// 3. Upload a list of new blob files
/// </summary>
[TestMethod()]
[TestCategory(Tag.Function)]
[TestCategory(PsTag.Blob)]
[TestCategory(PsTag.SetBlobContent)]
public void SetBlobContentByMultipleFiles()
{
string containerName = Utility.GenNameString("container");
CloudBlobContainer container = blobUtil.CreateContainer(containerName);
try
{
List<IListBlobItem> blobLists = container.ListBlobs(string.Empty, true, BlobListingDetails.All).ToList();
Test.Assert(blobLists.Count == 0, string.Format("container {0} should contain {1} blobs, and actually it contain {2} blobs", containerName, 0, blobLists.Count));
DirectoryInfo rootDir = new DirectoryInfo(uploadDirRoot);
FileInfo[] rootFiles = rootDir.GetFiles();
((PowerShellAgent)agent).AddPipelineScript(string.Format("ls -File -Path {0}", uploadDirRoot));
Test.Info("Upload files...");
Test.Assert(agent.SetAzureStorageBlobContent(string.Empty, containerName, StorageBlob.BlobType.BlockBlob), "upload multiple files should be successsed");
Test.Info("Upload finished...");
blobLists = container.ListBlobs(string.Empty, true, BlobListingDetails.All).ToList();
Test.Assert(blobLists.Count == rootFiles.Count(), string.Format("set-azurestorageblobcontent should upload {0} files, and actually it's {1}", rootFiles.Count(), blobLists.Count));
ICloudBlob blob = null;
for (int i = 0, count = rootFiles.Count(); i < count; i++)
{
blob = blobLists[i] as ICloudBlob;
if (blob == null)
{
Test.AssertFail("blob can't be null");
}
Test.Assert(rootFiles[i].Name == blob.Name, string.Format("blob name should be {0}, and actully it's {1}", rootFiles[i].Name, blob.Name));
string localMd5 = Helper.GetFileContentMD5(Path.Combine(uploadDirRoot, rootFiles[i].Name));
Test.Assert(blob.BlobType == Microsoft.WindowsAzure.Storage.Blob.BlobType.BlockBlob, "blob type should be block blob");
Test.Assert(localMd5 == blob.Properties.ContentMD5, string.Format("blob content md5 should be {0}, and actualy it's {1}", localMd5, blob.Properties.ContentMD5));
}
}
finally
{
blobUtil.RemoveContainer(containerName);
}
}
/// <summary>
/// upload files in subdirectory
/// 8.14 Set-AzureStorageBlobContent positive functional cases.
/// 4. Upload a block blob file and a page blob file with a subdirectory
/// </summary>
[TestMethod()]
[TestCategory(Tag.Function)]
[TestCategory(PsTag.Blob)]
[TestCategory(PsTag.SetBlobContent)]
public void SetBlobContentWithSubDirectory()
{
DirectoryInfo rootDir = new DirectoryInfo(uploadDirRoot);
DirectoryInfo[] dirs = rootDir.GetDirectories();
foreach (DirectoryInfo dir in dirs)
{
string containerName = Utility.GenNameString("container");
CloudBlobContainer container = blobUtil.CreateContainer(containerName);
try
{
List<IListBlobItem> blobLists = container.ListBlobs(string.Empty, true, BlobListingDetails.All).ToList();
Test.Assert(blobLists.Count == 0, string.Format("container {0} should contain {1} blobs, and actually it contain {2} blobs", containerName, 0, blobLists.Count));
StorageBlob.BlobType blobType = StorageBlob.BlobType.BlockBlob;
if (dir.Name.StartsWith("dirpage"))
{
blobType = Microsoft.WindowsAzure.Storage.Blob.BlobType.PageBlob;
}
((PowerShellAgent)agent).AddPipelineScript(string.Format("ls -File -Recurse -Path {0}", dir.FullName));
Test.Info("Upload files...");
Test.Assert(agent.SetAzureStorageBlobContent(string.Empty, containerName, blobType), "upload multiple files should be successsed");
Test.Info("Upload finished...");
blobLists = container.ListBlobs(string.Empty, true, BlobListingDetails.All).ToList();
List<string> dirFiles = files.FindAll(item => item.StartsWith(dir.Name));
Test.Assert(blobLists.Count == dirFiles.Count(), string.Format("set-azurestorageblobcontent should upload {0} files, and actually it's {1}", dirFiles.Count(), blobLists.Count));
ICloudBlob blob = null;
for (int i = 0, count = dirFiles.Count(); i < count; i++)
{
blob = blobLists[i] as ICloudBlob;
if (blob == null)
{
Test.AssertFail("blob can't be null");
}
string convertedName = blobUtil.ConvertBlobNameToFileName(blob.Name, dir.Name);
Test.Assert(dirFiles[i] == convertedName, string.Format("blob name should be {0}, and actully it's {1}", dirFiles[i], convertedName));
string localMd5 = Helper.GetFileContentMD5(Path.Combine(uploadDirRoot, dirFiles[i]));
Test.Assert(blob.BlobType == blobType, "blob type should be block blob");
Test.Assert(localMd5 == blob.Properties.ContentMD5, string.Format("blob content md5 should be {0}, and actualy it's {1}", localMd5, blob.Properties.ContentMD5));
}
}
finally
{
blobUtil.RemoveContainer(containerName);
}
}
}
/// <summary>
/// set blob content with invalid bob name
/// 8.14 Set-AzureStorageBlobContent negative functional cases
/// 1. Upload a block blob file and a page blob file with a subdirectory
/// </summary>
[TestMethod()]
[TestCategory(Tag.Function)]
[TestCategory(PsTag.Blob)]
[TestCategory(PsTag.SetBlobContent)]
public void SetBlobContentWithInvalidBlobName()
{
string containerName = Utility.GenNameString("container");
CloudBlobContainer container = blobUtil.CreateContainer(containerName);
try
{
int MaxBlobNameLength = 1024;
string blobName = new string('a', MaxBlobNameLength + 1);
List<IListBlobItem> blobLists = container.ListBlobs(string.Empty, true, BlobListingDetails.All).ToList();
Test.Assert(blobLists.Count == 0, string.Format("container {0} should contain {1} blobs, and actually it contain {2} blobs", containerName, 0, blobLists.Count));
Test.Assert(!agent.SetAzureStorageBlobContent(Path.Combine(uploadDirRoot, files[0]), containerName, StorageBlob.BlobType.BlockBlob, blobName), "upload blob with invalid blob name should be failed");
string expectedErrorMessage = string.Format("Blob name '{0}' is invalid.", blobName);
ExpectedStartsWithErrorMessage(expectedErrorMessage);
}
finally
{
blobUtil.RemoveContainer(containerName);
}
}
/// <summary>
/// set blob content with invalid blob type
/// 8.14 Set-AzureStorageBlobContent negative functional cases
/// 6. Upload a blob file with the same name but with different BlobType
/// </summary>
[TestMethod()]
[TestCategory(Tag.Function)]
[TestCategory(PsTag.Blob)]
[TestCategory(PsTag.SetBlobContent)]
public void SetBlobContentWithInvalidBlobType()
{
string containerName = Utility.GenNameString("container");
CloudBlobContainer container = blobUtil.CreateContainer(containerName);
try
{
string blobName = files[0];
List<IListBlobItem> blobLists = container.ListBlobs(string.Empty, true, BlobListingDetails.All).ToList();
Test.Assert(blobLists.Count == 0, string.Format("container {0} should contain {1} blobs, and actually it contain {2} blobs", containerName, 0, blobLists.Count));
Test.Assert(agent.SetAzureStorageBlobContent(Path.Combine(uploadDirRoot, files[0]), containerName, StorageBlob.BlobType.BlockBlob, blobName), "upload blob should be successful.");
blobLists = container.ListBlobs(string.Empty, true, BlobListingDetails.All).ToList();
Test.Assert(blobLists.Count == 1, string.Format("container {0} should contain {1} blobs, and actually it contain {2} blobs", containerName, 1, blobLists.Count));
string convertBlobName = blobUtil.ConvertFileNameToBlobName(blobName);
Test.Assert(((ICloudBlob)blobLists[0]).Name == convertBlobName, string.Format("blob name should be {0}, actually it's {1}", convertBlobName, ((ICloudBlob)blobLists[0]).Name));
Test.Assert(!agent.SetAzureStorageBlobContent(Path.Combine(uploadDirRoot, files[0]), containerName, StorageBlob.BlobType.PageBlob, blobName), "upload blob should be with invalid blob should be failed.");
string expectedErrorMessage = string.Format("Blob type mismatched, the current blob type of '{0}' is BlockBlob.", ((ICloudBlob)blobLists[0]).Name);
Test.Assert(agent.ErrorMessages[0] == expectedErrorMessage, string.Format("Expect error message: {0} != {1}", expectedErrorMessage, agent.ErrorMessages[0]));
}
finally
{
blobUtil.RemoveContainer(containerName);
}
}
/// <summary>
/// upload page blob with invalid file size
/// 8.14 Set-AzureStorageBlobContent negative functional cases
/// 8. Upload a page blob the size of which is not 512*n
/// </summary>
[TestMethod()]
[TestCategory(Tag.Function)]
[TestCategory(PsTag.Blob)]
[TestCategory(PsTag.SetBlobContent)]
public void SetPageBlobWithInvalidFileSize()
{
string fileName = Utility.GenNameString("tinypageblob");
string filePath = Path.Combine(uploadDirRoot, fileName);
int fileSize = 480;
Helper.GenerateTinyFile(filePath, fileSize);
string containerName = Utility.GenNameString("container");
CloudBlobContainer container = blobUtil.CreateContainer(containerName);
try
{
List<IListBlobItem> blobLists = container.ListBlobs(string.Empty, true, BlobListingDetails.All).ToList();
Test.Assert(blobLists.Count == 0, string.Format("container {0} should contain {1} blobs, and actually it contain {2} blobs", containerName, 0, blobLists.Count));
Test.Assert(!agent.SetAzureStorageBlobContent(filePath, containerName, StorageBlob.BlobType.PageBlob), "upload page blob with invalid file size should be failed.");
string expectedErrorMessage = "The page blob size must be a multiple of 512 bytes.";
Test.Assert(agent.ErrorMessages[0].StartsWith(expectedErrorMessage), expectedErrorMessage);
blobLists = container.ListBlobs(string.Empty, true, BlobListingDetails.All).ToList();
Test.Assert(blobLists.Count == 0, string.Format("container {0} should contain {1} blobs, and actually it contain {2} blobs", containerName, 0, blobLists.Count));
}
finally
{
blobUtil.RemoveContainer(containerName);
FileUtil.RemoveFile(filePath);
}
}
/// <summary>
/// Set blob content with blob properties
/// </summary>
[TestMethod()]
[TestCategory(Tag.Function)]
[TestCategory(PsTag.Blob)]
[TestCategory(PsTag.SetBlobContent)]
public void SetBlobContentWithProperties()
{
SetBlobContentWithProperties(StorageBlob.BlobType.BlockBlob);
SetBlobContentWithProperties(StorageBlob.BlobType.PageBlob);
}
/// <summary>
/// set blob content with blob meta data
/// </summary>
[TestMethod()]
[TestCategory(Tag.Function)]
[TestCategory(PsTag.Blob)]
[TestCategory(PsTag.SetBlobContent)]
public void SetBlobContentWithMetadata()
{
SetBlobContentWithMetadata(StorageBlob.BlobType.BlockBlob);
SetBlobContentWithMetadata(StorageBlob.BlobType.PageBlob);
}
/// <summary>
/// set blob content with blob meta data
/// </summary>
[TestMethod()]
[TestCategory(Tag.Function)]
[TestCategory(PsTag.Blob)]
[TestCategory(PsTag.SetBlobContent)]
public void SetBlobContentForEixstsBlobWithoutForce()
{
string filePath = FileUtil.GenerateOneTempTestFile();
CloudBlobContainer container = blobUtil.CreateContainer();
string blobName = Utility.GenNameString("blob");
ICloudBlob blob = blobUtil.CreateRandomBlob(container, blobName);
try
{
string previousMd5 = blob.Properties.ContentMD5;
Test.Assert(!agent.SetAzureStorageBlobContent(filePath, container.Name, blob.BlobType, blob.Name, false), "set blob content without force parameter should fail");
ExpectedContainErrorMessage(ConfirmExceptionMessage);
blob.FetchAttributes();
ExpectEqual(previousMd5, blob.Properties.ContentMD5, "content md5");
}
finally
{
blobUtil.RemoveContainer(container.Name);
FileUtil.RemoveFile(filePath);
}
}
public void SetBlobContentWithProperties(StorageBlob.BlobType blobType)
{
string filePath = FileUtil.GenerateOneTempTestFile();
CloudBlobContainer container = blobUtil.CreateContainer();
Hashtable properties = new Hashtable();
properties.Add("CacheControl", Utility.GenNameString(string.Empty));
properties.Add("ContentEncoding", Utility.GenNameString(string.Empty));
properties.Add("ContentLanguage", Utility.GenNameString(string.Empty));
properties.Add("ContentMD5", Utility.GenNameString(string.Empty));
properties.Add("ContentType", Utility.GenNameString(string.Empty));
try
{
Test.Assert(agent.SetAzureStorageBlobContent(filePath, container.Name, blobType, string.Empty, true, -1, properties), "set blob content with property should succeed");
ICloudBlob blob = container.GetBlobReferenceFromServer(Path.GetFileName(filePath));
blob.FetchAttributes();
ExpectEqual(properties["CacheControl"].ToString(), blob.Properties.CacheControl, "Cache control");
ExpectEqual(properties["ContentEncoding"].ToString(), blob.Properties.ContentEncoding, "Content Encoding");
ExpectEqual(properties["ContentLanguage"].ToString(), blob.Properties.ContentLanguage, "Content Language");
ExpectEqual(properties["ContentMD5"].ToString(), blob.Properties.ContentMD5, "Content MD5");
ExpectEqual(properties["ContentType"].ToString(), blob.Properties.ContentType, "Content Type");
}
finally
{
blobUtil.RemoveContainer(container.Name);
FileUtil.RemoveFile(filePath);
}
}
public void SetBlobContentWithMetadata(StorageBlob.BlobType blobType)
{
string filePath = FileUtil.GenerateOneTempTestFile();
CloudBlobContainer container = blobUtil.CreateContainer();
Hashtable metadata = new Hashtable();
int metaCount = GetRandomTestCount();
for (int i = 0; i < metaCount; i++)
{
string key = Utility.GenRandomAlphabetString();
string value = Utility.GenNameString(string.Empty);
if (!metadata.ContainsKey(key))
{
Test.Info(string.Format("Add meta key: {0} value : {1}", key, value));
metadata.Add(key, value);
}
}
try
{
Test.Assert(agent.SetAzureStorageBlobContent(filePath, container.Name, blobType, string.Empty, true, -1, null, metadata), "set blob content with meta should succeed");
ICloudBlob blob = container.GetBlobReferenceFromServer(Path.GetFileName(filePath));
blob.FetchAttributes();
ExpectEqual(metadata.Count, blob.Metadata.Count, "meta data count");
foreach (string key in metadata.Keys)
{
ExpectEqual(metadata[key].ToString(), blob.Metadata[key], "Meta data key " + key);
}
}
finally
{
blobUtil.RemoveContainer(container.Name);
FileUtil.RemoveFile(filePath);
}
}
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.ComponentModel.Composition;
using QuantConnect.Configuration;
using QuantConnect.Interfaces;
using QuantConnect.Lean.Engine.DataFeeds;
using QuantConnect.Lean.Engine.RealTime;
using QuantConnect.Lean.Engine.Results;
using QuantConnect.Lean.Engine.Setup;
using QuantConnect.Lean.Engine.TransactionHandlers;
using QuantConnect.Util;
namespace QuantConnect.Lean.Engine
{
/// <summary>
/// Provides a container for the algorithm specific handlers
/// </summary>
public class LeanEngineAlgorithmHandlers : IDisposable
{
private readonly IDataFeed _dataFeed;
private readonly ISetupHandler _setup;
private readonly IResultHandler _results;
private readonly IRealTimeHandler _realTime;
private readonly ITransactionHandler _transactions;
private readonly IHistoryProvider _historyProvider;
private readonly ICommandQueueHandler _commandQueue;
private readonly IMapFileProvider _mapFileProvider;
private readonly IFactorFileProvider _factorFileProvider;
/// <summary>
/// Gets the result handler used to communicate results from the algorithm
/// </summary>
public IResultHandler Results
{
get { return _results; }
}
/// <summary>
/// Gets the setup handler used to initialize the algorithm state
/// </summary>
public ISetupHandler Setup
{
get { return _setup; }
}
/// <summary>
/// Gets the data feed handler used to provide data to the algorithm
/// </summary>
public IDataFeed DataFeed
{
get { return _dataFeed; }
}
/// <summary>
/// Gets the transaction handler used to process orders from the algorithm
/// </summary>
public ITransactionHandler Transactions
{
get { return _transactions; }
}
/// <summary>
/// Gets the real time handler used to process real time events
/// </summary>
public IRealTimeHandler RealTime
{
get { return _realTime; }
}
/// <summary>
/// Gets the history provider used to process historical data requests within the algorithm
/// </summary>
public IHistoryProvider HistoryProvider
{
get { return _historyProvider; }
}
/// <summary>
/// Gets the command queue responsible for receiving external commands for the algorithm
/// </summary>
public ICommandQueueHandler CommandQueue
{
get { return _commandQueue; }
}
/// <summary>
/// Gets the map file provider used as a map file source for the data feed
/// </summary>
public IMapFileProvider MapFileProvider
{
get { return _mapFileProvider; }
}
/// <summary>
/// Gets the map file provider used as a map file source for the data feed
/// </summary>
public IFactorFileProvider FactorFileProvider
{
get { return _factorFileProvider; }
}
/// <summary>
/// Initializes a new instance of the <see cref="LeanEngineAlgorithmHandlers"/> class from the specified handlers
/// </summary>
/// <param name="results">The result handler for communicating results from the algorithm</param>
/// <param name="setup">The setup handler used to initialize algorithm state</param>
/// <param name="dataFeed">The data feed handler used to pump data to the algorithm</param>
/// <param name="transactions">The transaction handler used to process orders from the algorithm</param>
/// <param name="realTime">The real time handler used to process real time events</param>
/// <param name="historyProvider">The history provider used to process historical data requests</param>
/// <param name="commandQueue">The command queue handler used to receive external commands for the algorithm</param>
/// <param name="mapFileProvider">The map file provider used to retrieve map files for the data feed</param>
public LeanEngineAlgorithmHandlers(IResultHandler results,
ISetupHandler setup,
IDataFeed dataFeed,
ITransactionHandler transactions,
IRealTimeHandler realTime,
IHistoryProvider historyProvider,
ICommandQueueHandler commandQueue,
IMapFileProvider mapFileProvider,
IFactorFileProvider factorFileProvider
)
{
if (results == null)
{
throw new ArgumentNullException("results");
}
if (setup == null)
{
throw new ArgumentNullException("setup");
}
if (dataFeed == null)
{
throw new ArgumentNullException("dataFeed");
}
if (transactions == null)
{
throw new ArgumentNullException("transactions");
}
if (realTime == null)
{
throw new ArgumentNullException("realTime");
}
if (historyProvider == null)
{
throw new ArgumentNullException("realTime");
}
if (commandQueue == null)
{
throw new ArgumentNullException("commandQueue");
}
if (mapFileProvider == null)
{
throw new ArgumentNullException("mapFileProvider");
}
if (factorFileProvider == null)
{
throw new ArgumentNullException("factorFileProvider");
}
_results = results;
_setup = setup;
_dataFeed = dataFeed;
_transactions = transactions;
_realTime = realTime;
_historyProvider = historyProvider;
_commandQueue = commandQueue;
_mapFileProvider = mapFileProvider;
_factorFileProvider = factorFileProvider;
}
/// <summary>
/// Creates a new instance of the <see cref="LeanEngineAlgorithmHandlers"/> class from the specified composer using type names from configuration
/// </summary>
/// <param name="composer">The composer instance to obtain implementations from</param>
/// <returns>A fully hydrates <see cref="LeanEngineSystemHandlers"/> instance.</returns>
/// <exception cref="CompositionException">Throws a CompositionException during failure to load</exception>
public static LeanEngineAlgorithmHandlers FromConfiguration(Composer composer)
{
var setupHandlerTypeName = Config.Get("setup-handler", "ConsoleSetupHandler");
var transactionHandlerTypeName = Config.Get("transaction-handler", "BacktestingTransactionHandler");
var realTimeHandlerTypeName = Config.Get("real-time-handler", "BacktestingRealTimeHandler");
var dataFeedHandlerTypeName = Config.Get("data-feed-handler", "FileSystemDataFeed");
var resultHandlerTypeName = Config.Get("result-handler", "ConsoleResultHandler");
var historyProviderTypeName = Config.Get("history-provider", "SubscriptionDataReaderHistoryProvider");
var commandQueueHandlerTypeName = Config.Get("command-queue-handler", "EmptyCommandQueueHandler");
var mapFileProviderTypeName = Config.Get("map-file-provider", "LocalDiskMapFileProvider");
var factorFileProviderTypeName = Config.Get("factor-file-provider", "LocalDiskFactorFileProvider");
return new LeanEngineAlgorithmHandlers(
composer.GetExportedValueByTypeName<IResultHandler>(resultHandlerTypeName),
composer.GetExportedValueByTypeName<ISetupHandler>(setupHandlerTypeName),
composer.GetExportedValueByTypeName<IDataFeed>(dataFeedHandlerTypeName),
composer.GetExportedValueByTypeName<ITransactionHandler>(transactionHandlerTypeName),
composer.GetExportedValueByTypeName<IRealTimeHandler>(realTimeHandlerTypeName),
composer.GetExportedValueByTypeName<IHistoryProvider>(historyProviderTypeName),
composer.GetExportedValueByTypeName<ICommandQueueHandler>(commandQueueHandlerTypeName),
composer.GetExportedValueByTypeName<IMapFileProvider>(mapFileProviderTypeName),
composer.GetExportedValueByTypeName<IFactorFileProvider>(factorFileProviderTypeName)
);
}
/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
/// </summary>
/// <filterpriority>2</filterpriority>
public void Dispose()
{
Setup.Dispose();
CommandQueue.Dispose();
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
namespace Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition
{
using Microsoft.Azure.Management.Network.Fluent;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.ResourceActions;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.GroupableResource.Definition;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Definition;
using Microsoft.Azure.Management.Network.Fluent.NicIPConfiguration.Definition;
/// <summary>
/// The stage of the network interface definition allowing to associate it with a load balancer.
/// </summary>
public interface IWithLoadBalancer
{
/// <summary>
/// Associates the network interface's primary IP configuration with a backend of an existing load balancer.
/// </summary>
/// <param name="loadBalancer">An existing load balancer.</param>
/// <param name="backendName">The name of an existing backend on that load balancer.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithExistingLoadBalancerBackend(ILoadBalancer loadBalancer, string backendName);
/// <summary>
/// Associates the network interface's primary IP configuration with an inbound NAT rule of an existing load balancer.
/// </summary>
/// <param name="loadBalancer">An existing load balancer.</param>
/// <param name="inboundNatRuleName">The name of an existing inbound NAT rule on the selected load balancer.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithExistingLoadBalancerInboundNatRule(ILoadBalancer loadBalancer, string inboundNatRuleName);
}
/// <summary>
/// The stage of the network interface definition allowing to specify the virtual network for
/// primary IP configuration.
/// </summary>
public interface IWithPrimaryNetwork
{
/// <summary>
/// Associate an existing virtual network with the network interface's primary IP configuration.
/// </summary>
/// <param name="network">An existing virtual network.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithPrimaryNetworkSubnet WithExistingPrimaryNetwork(INetwork network);
/// <summary>
/// Create a new virtual network to associate with the network interface's primary IP configuration,
/// based on the provided definition.
/// </summary>
/// <param name="creatable">A creatable definition for a new virtual network.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithPrimaryPrivateIP WithNewPrimaryNetwork(ICreatable<Microsoft.Azure.Management.Network.Fluent.INetwork> creatable);
/// <summary>
/// Creates a new virtual network to associate with the network interface's primary IP configuration.
/// The virtual network will be created in the same resource group and region as of network interface,
/// it will be created with the specified address space and a default subnet covering the entirety of
/// the network IP address space.
/// </summary>
/// <param name="name">The name of the new virtual network.</param>
/// <param name="addressSpace">The address space for rhe virtual network.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithPrimaryPrivateIP WithNewPrimaryNetwork(string name, string addressSpace);
/// <summary>
/// Creates a new virtual network to associate with the network interface's primary IP configuration.
/// The virtual network will be created in the same resource group and region as of network interface,
/// it will be created with the specified address space and a default subnet covering the entirety of
/// the network IP address space.
/// </summary>
/// <param name="addressSpace">The address space for the virtual network.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithPrimaryPrivateIP WithNewPrimaryNetwork(string addressSpace);
}
/// <summary>
/// The stage of the network interface definition allowing to associate a network security group.
/// </summary>
public interface IWithNetworkSecurityGroup
{
/// <summary>
/// Create a new network security group to associate with network interface, based on the provided definition.
/// </summary>
/// <param name="creatable">A creatable definition for a new network security group.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithNewNetworkSecurityGroup(ICreatable<Microsoft.Azure.Management.Network.Fluent.INetworkSecurityGroup> creatable);
/// <summary>
/// Associates an existing network security group with the network interface.
/// </summary>
/// <param name="networkSecurityGroup">An existing network security group.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithExistingNetworkSecurityGroup(INetworkSecurityGroup networkSecurityGroup);
}
/// <summary>
/// The stage of the network interface definition allowing to specify the resource group.
/// </summary>
public interface IWithGroup :
Microsoft.Azure.Management.ResourceManager.Fluent.Core.GroupableResource.Definition.IWithGroup<Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithPrimaryNetwork>
{
}
/// <summary>
/// The stage of the network interface definition which contains all the minimum required inputs for
/// the resource to be created, but also allows
/// for any other optional settings to be specified.
/// </summary>
public interface IWithCreate :
Microsoft.Azure.Management.ResourceManager.Fluent.Core.ResourceActions.ICreatable<Microsoft.Azure.Management.Network.Fluent.INetworkInterface>,
Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Definition.IDefinitionWithTags<Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate>,
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithPrimaryPublicIPAddress,
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithNetworkSecurityGroup,
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithSecondaryIPConfiguration,
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithAcceleratedNetworking,
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithLoadBalancer
{
/// <summary>
/// Specifies the internal DNS name label for the network interface.
/// </summary>
/// <param name="dnsNameLabel">The internal DNS name label.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithInternalDnsNameLabel(string dnsNameLabel);
/// <summary>
/// Enables IP forwarding in the network interface.
/// </summary>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithIPForwarding();
/// <summary>
/// Specifies the IP address of the custom DNS server to associate with the network interface.
/// Note this method's effect is additive, i.e. each time it is used, the new dns server is
/// added to the network interface.
/// </summary>
/// <param name="ipAddress">The IP address of the DNS server.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithDnsServer(string ipAddress);
}
/// <summary>
/// The stage of the network interface definition allowing to associate public IP address with it's primary
/// IP configuration.
/// </summary>
public interface IWithPrimaryPublicIPAddress
{
/// <summary>
/// Associates an existing public IP address with the network interface's primary IP configuration.
/// </summary>
/// <param name="publicIPAddress">An existing public IP address.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithExistingPrimaryPublicIPAddress(IPublicIPAddress publicIPAddress);
/// <summary>
/// Create a new public IP address to associate with network interface's primary IP configuration, based on
/// the provided definition.
/// </summary>
/// <param name="creatable">A creatable definition for a new public IP.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithNewPrimaryPublicIPAddress(ICreatable<Microsoft.Azure.Management.Network.Fluent.IPublicIPAddress> creatable);
/// <summary>
/// Creates a new public IP address in the same region and group as the resource and associate it
/// with the network interface's primary IP configuration.
/// the internal name and DNS label for the public IP address will be derived from the network interface name.
/// </summary>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithNewPrimaryPublicIPAddress();
/// <summary>
/// Creates a new public IP address in the same region and group as the resource, with the specified DNS label
/// and associate it with the network interface's primary IP configuration.
/// the internal name for the public IP address will be derived from the DNS label.
/// </summary>
/// <param name="leafDnsLabel">The leaf domain label.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithNewPrimaryPublicIPAddress(string leafDnsLabel);
}
/// <summary>
/// The stage of the network interface definition allowing to enable accelerated networking.
/// </summary>
public interface IWithAcceleratedNetworking
{
/// <summary>
/// Enables accelerated networking.
/// Note that additional steps need to be taken in the virtual machine itself for the virtual machine associated with this network interface to be able to
/// take advantage of accelerated networking. This feature might not be available in some regions, virtual machine sizes, or operating system versions.
/// It can be enabled only during the creation of a network interface, not during an update.
/// </summary>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithAcceleratedNetworking();
}
/// <summary>
/// The stage of the network interface definition allowing to specify subnet.
/// </summary>
public interface IWithPrimaryNetworkSubnet
{
/// <summary>
/// Associate a subnet with the network interface's primary IP configuration.
/// </summary>
/// <param name="name">The subnet name.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithPrimaryPrivateIP WithSubnet(string name);
}
/// <summary>
/// The stage of the network interface definition allowing to specify private IP address within
/// a virtual network subnet.
/// </summary>
public interface IWithPrimaryPrivateIP
{
/// <summary>
/// Enables dynamic private IP address allocation within the specified existing virtual network
/// subnet for the network interface's primary IP configuration.
/// </summary>
/// <return>The next stage of network interface definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithPrimaryPrivateIPAddressDynamic();
/// <summary>
/// Assigns the specified static private IP address within the specified existing virtual network
/// subnet to the network interface's primary IP configuration.
/// </summary>
/// <param name="staticPrivateIPAddress">
/// The static IP address within the specified subnet to assign to
/// the network interface.
/// </param>
/// <return>The next stage of network interface definition.</return>
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate WithPrimaryPrivateIPAddressStatic(string staticPrivateIPAddress);
}
/// <summary>
/// The first stage of the network interface.
/// </summary>
public interface IBlank :
Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Definition.IDefinitionWithRegion<Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithGroup>
{
}
/// <summary>
/// The stage of the network interface definition allowing to associate a secondary IP configurations.
/// </summary>
public interface IWithSecondaryIPConfiguration
{
/// <summary>
/// Starts definition of a secondary IP configuration.
/// </summary>
/// <param name="name">Name for the IP configuration.</param>
/// <return>The first stage of a secondary IP configuration definition.</return>
Microsoft.Azure.Management.Network.Fluent.NicIPConfiguration.Definition.IBlank<Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate> DefineSecondaryIPConfiguration(string name);
}
/// <summary>
/// The entirety of the network interface definition.
/// </summary>
public interface IDefinition :
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IBlank,
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithGroup,
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithPrimaryNetwork,
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithPrimaryNetworkSubnet,
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithPrimaryPrivateIP,
Microsoft.Azure.Management.Network.Fluent.NetworkInterface.Definition.IWithCreate
{
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.ErrorReporting;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.Notification;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Microsoft.CodeAnalysis.Versions;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.SolutionCrawler
{
internal sealed partial class WorkCoordinatorRegistrationService
{
private sealed partial class WorkCoordinator
{
private sealed partial class IncrementalAnalyzerProcessor
{
private sealed class NormalPriorityProcessor : GlobalOperationAwareIdleProcessor
{
private readonly AsyncDocumentWorkItemQueue _workItemQueue;
private readonly Lazy<ImmutableArray<IIncrementalAnalyzer>> _lazyAnalyzers;
private readonly ConcurrentDictionary<DocumentId, bool> _higherPriorityDocumentsNotProcessed;
private readonly HashSet<ProjectId> _currentSnapshotVersionTrackingSet;
private ProjectId _currentProjectProcessing;
private Solution _processingSolution;
private IDisposable _projectCache;
// whether this processor is running or not
private Task _running;
public NormalPriorityProcessor(
IAsynchronousOperationListener listener,
IncrementalAnalyzerProcessor processor,
Lazy<ImmutableArray<IIncrementalAnalyzer>> lazyAnalyzers,
IGlobalOperationNotificationService globalOperationNotificationService,
int backOffTimeSpanInMs,
CancellationToken shutdownToken) :
base(listener, processor, globalOperationNotificationService, backOffTimeSpanInMs, shutdownToken)
{
_lazyAnalyzers = lazyAnalyzers;
_running = SpecializedTasks.EmptyTask;
_workItemQueue = new AsyncDocumentWorkItemQueue();
_higherPriorityDocumentsNotProcessed = new ConcurrentDictionary<DocumentId, bool>(concurrencyLevel: 2, capacity: 20);
_currentProjectProcessing = default(ProjectId);
_processingSolution = null;
_currentSnapshotVersionTrackingSet = new HashSet<ProjectId>();
Start();
}
internal ImmutableArray<IIncrementalAnalyzer> Analyzers
{
get
{
return _lazyAnalyzers.Value;
}
}
public void Enqueue(WorkItem item)
{
Contract.ThrowIfFalse(item.DocumentId != null, "can only enqueue a document work item");
this.UpdateLastAccessTime();
var added = _workItemQueue.AddOrReplace(item);
Logger.Log(FunctionId.WorkCoordinator_DocumentWorker_Enqueue, s_enqueueLogger, Environment.TickCount, item.DocumentId, !added);
CheckHigherPriorityDocument(item);
SolutionCrawlerLogger.LogWorkItemEnqueue(
this.Processor._logAggregator, item.Language, item.DocumentId, item.InvocationReasons, item.IsLowPriority, item.ActiveMember, added);
}
private void CheckHigherPriorityDocument(WorkItem item)
{
if (item.InvocationReasons.Contains(PredefinedInvocationReasons.DocumentOpened) ||
item.InvocationReasons.Contains(PredefinedInvocationReasons.DocumentClosed))
{
AddHigherPriorityDocument(item.DocumentId);
}
}
private void AddHigherPriorityDocument(DocumentId id)
{
_higherPriorityDocumentsNotProcessed[id] = true;
SolutionCrawlerLogger.LogHigherPriority(this.Processor._logAggregator, id.Id);
}
protected override Task WaitAsync(CancellationToken cancellationToken)
{
if (!_workItemQueue.HasAnyWork)
{
if (_projectCache != null)
{
_projectCache.Dispose();
_projectCache = null;
}
}
return _workItemQueue.WaitAsync(cancellationToken);
}
public Task Running
{
get
{
return _running;
}
}
protected override async Task ExecuteAsync()
{
if (this.CancellationToken.IsCancellationRequested)
{
return;
}
var source = new TaskCompletionSource<object>();
try
{
// mark it as running
_running = source.Task;
// we wait for global operation if there is anything going on
await GlobalOperationWaitAsync().ConfigureAwait(false);
// we wait for higher processor to finish its working
await this.Processor._highPriorityProcessor.Running.ConfigureAwait(false);
// okay, there must be at least one item in the map
await ResetStatesAsync().ConfigureAwait(false);
if (await TryProcessOneHigherPriorityDocumentAsync().ConfigureAwait(false))
{
// successfully processed a high priority document.
return;
}
// process one of documents remaining
var documentCancellation = default(CancellationTokenSource);
WorkItem workItem;
if (!_workItemQueue.TryTakeAnyWork(_currentProjectProcessing, out workItem, out documentCancellation))
{
return;
}
// check whether we have been shutdown
if (this.CancellationToken.IsCancellationRequested)
{
return;
}
// check whether we have moved to new project
SetProjectProcessing(workItem.ProjectId);
// process the new document
await ProcessDocumentAsync(this.Analyzers, workItem, documentCancellation).ConfigureAwait(false);
}
catch (Exception e) when(FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
finally
{
// mark it as done running
source.SetResult(null);
}
}
private void SetProjectProcessing(ProjectId currentProject)
{
if (currentProject != _currentProjectProcessing)
{
if (_projectCache != null)
{
_projectCache.Dispose();
_projectCache = null;
}
var projectCacheService = _processingSolution.Workspace.Services.GetService<IProjectCacheService>();
if (projectCacheService != null)
{
_projectCache = projectCacheService.EnableCaching(currentProject);
}
}
_currentProjectProcessing = currentProject;
}
private IEnumerable<DocumentId> GetPrioritizedPendingDocuments()
{
if (this.Processor._documentTracker != null)
{
// First the active document
var activeDocumentId = this.Processor._documentTracker.GetActiveDocument();
if (activeDocumentId != null && _higherPriorityDocumentsNotProcessed.ContainsKey(activeDocumentId))
{
yield return activeDocumentId;
}
// Now any visible documents
foreach (var visibleDocumentId in this.Processor._documentTracker.GetVisibleDocuments())
{
if (_higherPriorityDocumentsNotProcessed.ContainsKey(visibleDocumentId))
{
yield return visibleDocumentId;
}
}
}
// Any other opened documents
foreach (var documentId in _higherPriorityDocumentsNotProcessed.Keys)
{
yield return documentId;
}
}
private async Task<bool> TryProcessOneHigherPriorityDocumentAsync()
{
try
{
// this is an best effort algorithm with some shortcommings.
//
// the most obvious issue is if there is a new work item (without a solution change - but very unlikely)
// for a opened document we already processed, the work item will be treated as a regular one rather than higher priority one
// (opened document)
CancellationTokenSource documentCancellation;
foreach (var documentId in this.GetPrioritizedPendingDocuments())
{
if (this.CancellationToken.IsCancellationRequested)
{
return true;
}
// see whether we have work item for the document
WorkItem workItem;
if (!_workItemQueue.TryTake(documentId, out workItem, out documentCancellation))
{
continue;
}
// okay now we have work to do
await ProcessDocumentAsync(this.Analyzers, workItem, documentCancellation).ConfigureAwait(false);
// remove opened document processed
bool dummy;
_higherPriorityDocumentsNotProcessed.TryRemove(documentId, out dummy);
return true;
}
return false;
}
catch (Exception e) when(FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
private async Task ProcessDocumentAsync(ImmutableArray<IIncrementalAnalyzer> analyzers, WorkItem workItem, CancellationTokenSource source)
{
if (this.CancellationToken.IsCancellationRequested)
{
return;
}
var processedEverything = false;
var documentId = workItem.DocumentId;
try
{
using (Logger.LogBlock(FunctionId.WorkCoordinator_ProcessDocumentAsync, source.Token))
{
var cancellationToken = source.Token;
var document = _processingSolution.GetDocument(documentId);
if (document != null)
{
await TrackSemanticVersionsAsync(document, workItem, cancellationToken).ConfigureAwait(false);
// if we are called because a document is opened, we invalidate the document so that
// it can be re-analyzed. otherwise, since newly opened document has same version as before
// analyzer will simply return same data back
if (workItem.MustRefresh && !workItem.IsRetry)
{
var isOpen = document.IsOpen();
await ProcessOpenDocumentIfNeeded(analyzers, workItem, document, isOpen, cancellationToken).ConfigureAwait(false);
await ProcessCloseDocumentIfNeeded(analyzers, workItem, document, isOpen, cancellationToken).ConfigureAwait(false);
}
// check whether we are having special reanalyze request
await ProcessReanalyzeDocumentAsync(workItem, document, cancellationToken).ConfigureAwait(false);
await ProcessDocumentAnalyzersAsync(document, analyzers, workItem, cancellationToken).ConfigureAwait(false);
}
else
{
SolutionCrawlerLogger.LogProcessDocumentNotExist(this.Processor._logAggregator);
RemoveDocument(documentId);
}
if (!cancellationToken.IsCancellationRequested)
{
processedEverything = true;
}
}
}
catch (Exception e) when(FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
finally
{
// we got cancelled in the middle of processing the document.
// let's make sure newly enqueued work item has all the flag needed.
if (!processedEverything)
{
_workItemQueue.AddOrReplace(workItem.Retry(this.Listener.BeginAsyncOperation("ReenqueueWorkItem")));
}
SolutionCrawlerLogger.LogProcessDocument(this.Processor._logAggregator, documentId.Id, processedEverything);
// remove one that is finished running
_workItemQueue.RemoveCancellationSource(workItem.DocumentId);
}
}
private async Task TrackSemanticVersionsAsync(Document document, WorkItem workItem, CancellationToken cancellationToken)
{
if (workItem.IsRetry ||
workItem.InvocationReasons.Contains(PredefinedInvocationReasons.DocumentAdded) ||
!workItem.InvocationReasons.Contains(PredefinedInvocationReasons.SyntaxChanged))
{
return;
}
var service = document.Project.Solution.Workspace.Services.GetService<ISemanticVersionTrackingService>();
if (service == null)
{
return;
}
// we already reported about this project for same snapshot, don't need to do it again
if (_currentSnapshotVersionTrackingSet.Contains(document.Project.Id))
{
return;
}
await service.RecordSemanticVersionsAsync(document.Project, cancellationToken).ConfigureAwait(false);
// mark this project as already processed.
_currentSnapshotVersionTrackingSet.Add(document.Project.Id);
}
private async Task ProcessOpenDocumentIfNeeded(ImmutableArray<IIncrementalAnalyzer> analyzers, WorkItem workItem, Document document, bool isOpen, CancellationToken cancellationToken)
{
if (!isOpen || !workItem.InvocationReasons.Contains(PredefinedInvocationReasons.DocumentOpened))
{
return;
}
SolutionCrawlerLogger.LogProcessOpenDocument(this.Processor._logAggregator, document.Id.Id);
await RunAnalyzersAsync(analyzers, document, (a, d, c) => a.DocumentOpenAsync(d, c), cancellationToken).ConfigureAwait(false);
}
private async Task ProcessCloseDocumentIfNeeded(ImmutableArray<IIncrementalAnalyzer> analyzers, WorkItem workItem, Document document, bool isOpen, CancellationToken cancellationToken)
{
if (isOpen || !workItem.InvocationReasons.Contains(PredefinedInvocationReasons.DocumentClosed))
{
return;
}
SolutionCrawlerLogger.LogProcessCloseDocument(this.Processor._logAggregator, document.Id.Id);
await RunAnalyzersAsync(analyzers, document, (a, d, c) => a.DocumentResetAsync(d, c), cancellationToken).ConfigureAwait(false);
}
private async Task ProcessReanalyzeDocumentAsync(WorkItem workItem, Document document, CancellationToken cancellationToken)
{
try
{
#if DEBUG
Contract.Requires(!workItem.InvocationReasons.Contains(PredefinedInvocationReasons.Reanalyze) || workItem.Analyzers.Count > 0);
#endif
// no-reanalyze request or we already have a request to re-analyze every thing
if (workItem.MustRefresh || !workItem.InvocationReasons.Contains(PredefinedInvocationReasons.Reanalyze))
{
return;
}
// First reset the document state in analyzers.
var reanalyzers = workItem.Analyzers.ToImmutableArray();
await RunAnalyzersAsync(reanalyzers, document, (a, d, c) => a.DocumentResetAsync(d, c), cancellationToken).ConfigureAwait(false);
// no request to re-run syntax change analysis. run it here
if (!workItem.InvocationReasons.Contains(PredefinedInvocationReasons.SyntaxChanged))
{
await RunAnalyzersAsync(reanalyzers, document, (a, d, c) => a.AnalyzeSyntaxAsync(d, c), cancellationToken).ConfigureAwait(false);
}
// no request to re-run semantic change analysis. run it here
if (!workItem.InvocationReasons.Contains(PredefinedInvocationReasons.SemanticChanged))
{
await RunAnalyzersAsync(reanalyzers, document, (a, d, c) => a.AnalyzeDocumentAsync(d, null, c), cancellationToken).ConfigureAwait(false);
}
}
catch (Exception e) when(FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
private void RemoveDocument(DocumentId documentId)
{
RemoveDocument(this.Analyzers, documentId);
}
private static void RemoveDocument(IEnumerable<IIncrementalAnalyzer> analyzers, DocumentId documentId)
{
foreach (var analyzer in analyzers)
{
analyzer.RemoveDocument(documentId);
}
}
private void ResetLogAggregatorIfNeeded(Solution currentSolution)
{
if (currentSolution == null || _processingSolution == null ||
currentSolution.Id == _processingSolution.Id)
{
return;
}
SolutionCrawlerLogger.LogIncrementalAnalyzerProcessorStatistics(
this.Processor._correlationId, _processingSolution, this.Processor._logAggregator, this.Analyzers);
this.Processor.ResetLogAggregator();
}
private async Task ResetStatesAsync()
{
try
{
var currentSolution = this.Processor.CurrentSolution;
if (currentSolution != _processingSolution)
{
ResetLogAggregatorIfNeeded(currentSolution);
// clear version tracking set we already reported.
_currentSnapshotVersionTrackingSet.Clear();
_processingSolution = currentSolution;
await RunAnalyzersAsync(this.Analyzers, currentSolution, (a, s, c) => a.NewSolutionSnapshotAsync(s, c), this.CancellationToken).ConfigureAwait(false);
foreach (var id in this.Processor.GetOpenDocumentIds())
{
AddHigherPriorityDocument(id);
}
SolutionCrawlerLogger.LogResetStates(this.Processor._logAggregator);
}
}
catch (Exception e) when(FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
public override void Shutdown()
{
base.Shutdown();
SolutionCrawlerLogger.LogIncrementalAnalyzerProcessorStatistics(this.Processor._correlationId, _processingSolution, this.Processor._logAggregator, this.Analyzers);
_workItemQueue.Dispose();
if (_projectCache != null)
{
_projectCache.Dispose();
_projectCache = null;
}
}
internal void WaitUntilCompletion_ForTestingPurposesOnly(ImmutableArray<IIncrementalAnalyzer> analyzers, List<WorkItem> items)
{
CancellationTokenSource source = new CancellationTokenSource();
_processingSolution = this.Processor.CurrentSolution;
foreach (var item in items)
{
ProcessDocumentAsync(analyzers, item, source).Wait();
}
}
internal void WaitUntilCompletion_ForTestingPurposesOnly()
{
// this shouldn't happen. would like to get some diagnostic
while (_workItemQueue.HasAnyWork)
{
Environment.FailFast("How?");
}
}
}
}
}
}
}
| |
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using LldbApi;
using NSubstitute;
using NUnit.Framework;
using System.Collections.Generic;
using System.IO;
namespace DebuggerGrpcServer.Tests
{
[TestFixture]
[Timeout(5000)]
class RemoteTargetTests
{
const string TEST_MNEMONIC = "Test Mnemonic";
const string TEST_OPERANDS = "Test Operands";
const string TEST_COMMENT = "Test Comment";
const ulong TEST_ADDRESS = 0x123456789abc;
const string TEST_SYMBOL = "Test Symbol";
const string TEST_DIRECTORY = "C:\\";
const string TEST_FILENAME = "main.cc";
const uint TEST_LINE = 123456u;
const uint TEST_COLUMN = 654321u;
const string TEST_FUNCTION_NAME = "testFunctionName";
const int EXPECTED_ID = 1234;
SbTarget mockTarget;
RemoteTarget remoteTarget;
SbAddress mockAddress;
SbProcess mockProcess;
SbMemoryRegionInfo mockMemoryRegion;
SbError mockError;
SbBreakpoint mockBreakpoint;
RemoteBreakpoint remoteBreakpoint;
SbFunction mockFunction;
[SetUp]
public void SetUp()
{
mockTarget = Substitute.For<SbTarget>();
remoteTarget = new RemoteTargetFactory(new RemoteBreakpointFactory())
.Create(mockTarget);
mockAddress = Substitute.For<SbAddress>();
mockProcess = Substitute.For<SbProcess>();
mockMemoryRegion = Substitute.For<SbMemoryRegionInfo>();
mockError = Substitute.For<SbError>();
mockBreakpoint = Substitute.For<SbBreakpoint>();
remoteBreakpoint = new RemoteBreakpointFactory().Create(mockBreakpoint);
mockFunction = Substitute.For<SbFunction>();
mockTarget.GetProcess().Returns(mockProcess);
}
[Test]
public void ReadFull()
{
uint numberInstructions = 20;
MockRead(numberInstructions, mockAddress, mockMemoryRegion);
var instructions = remoteTarget.ReadInstructionInfos(mockAddress, numberInstructions,
"intel");
Assert.AreEqual(numberInstructions, instructions.Count);
Assert.IsNull(instructions[0].SymbolName);
}
[Test]
public void ReadEmpty()
{
uint instructionsToRead = 10;
for (uint i = 0; i < instructionsToRead; i++)
{
var mockInvalidAddress = Substitute.For<SbAddress>();
if (i == 0)
{
mockAddress = mockInvalidAddress;
}
ulong address = TEST_ADDRESS + i;
MockRead(0, mockInvalidAddress, mockMemoryRegion, address);
}
var instructions =
remoteTarget.ReadInstructionInfos(mockAddress, instructionsToRead, "intel");
Assert.AreEqual(instructionsToRead, instructions.Count);
}
[Test]
public void ReadInstructionsWithoutAddress()
{
uint numberInstructions = 10;
// Create mock instructions without address
var mockInstructions = MockRead(numberInstructions, mockAddress, mockMemoryRegion,
TEST_ADDRESS, true, false);
mockTarget
.GetInstructionsWithFlavor(mockAddress, Arg.Any<byte[]>(), Arg.Any<ulong>(),
"intel").Returns(mockInstructions);
var instructions = remoteTarget.ReadInstructionInfos(mockAddress, numberInstructions,
"intel");
// Should break and return an empty list
Assert.AreEqual(0, instructions.Count);
}
[Test]
public void ReadWithSingleSymbol()
{
uint numberInstructions = 20;
int symbolPos = 6;
var mockInstructions = MockRead(numberInstructions, mockAddress, mockMemoryRegion);
var mockSbAddress = mockInstructions[symbolPos].GetAddress();
var mockSymbol = Substitute.For<SbSymbol>();
mockSymbol.GetName().Returns(TEST_SYMBOL);
// Make sure it's the same address as the function
mockSymbol.GetStartAddress().Returns(mockSbAddress);
mockSbAddress.GetSymbol().Returns(mockSymbol);
var instructions = remoteTarget.ReadInstructionInfos(mockAddress, numberInstructions,
"intel");
Assert.AreEqual(numberInstructions, instructions.Count);
Assert.AreEqual(instructions[symbolPos].SymbolName, TEST_SYMBOL);
}
[Test]
public void ReadWithSingleSymbolWrongAddress()
{
uint numberInstructions = 20;
int symbolPos = 8;
var mockInstructions = MockRead(numberInstructions, mockAddress, mockMemoryRegion);
var mockSbAddress = mockInstructions[symbolPos].GetAddress();
var mockSymbol = Substitute.For<SbSymbol>();
mockSymbol.GetName().Returns(TEST_SYMBOL);
// Make sure it returns an address that is not equal to the instruction
mockSymbol.GetStartAddress().GetLoadAddress(mockTarget).
Returns(TEST_ADDRESS + 0xdeadbeef);
mockSbAddress.GetSymbol().Returns(mockSymbol);
var instructions = remoteTarget.ReadInstructionInfos(mockAddress, numberInstructions,
"intel");
Assert.AreEqual(numberInstructions, instructions.Count);
Assert.AreEqual(instructions[symbolPos].SymbolName, null);
}
[Test]
public void ReadWithSingleLineEntry()
{
uint numberInstructions = 20;
int lineEntryPos = 9;
var mockInstructions = MockRead(numberInstructions, mockAddress, mockMemoryRegion);
var mockSbAddress = mockInstructions[lineEntryPos].GetAddress();
var mockLineEntry = Substitute.For<SbLineEntry>();
mockLineEntry.GetFileName().Returns(TEST_FILENAME);
mockLineEntry.GetDirectory().Returns(TEST_DIRECTORY);
mockLineEntry.GetLine().Returns(TEST_LINE);
mockLineEntry.GetColumn().Returns(TEST_COLUMN);
mockSbAddress.GetLineEntry().Returns(mockLineEntry);
var instructions = remoteTarget.ReadInstructionInfos(mockAddress, numberInstructions,
"intel");
Assert.AreEqual(numberInstructions, instructions.Count);
Assert.AreEqual(instructions[lineEntryPos].LineEntry.FileName, TEST_FILENAME);
Assert.AreEqual(instructions[lineEntryPos].LineEntry.Directory, TEST_DIRECTORY);
Assert.AreEqual(instructions[lineEntryPos].LineEntry.Line, TEST_LINE);
Assert.AreEqual(instructions[lineEntryPos].LineEntry.Column, TEST_COLUMN);
}
[Test]
public void ReadWithInvalidInstruction()
{
uint numberInstructions = 20;
uint invalidPos = 7;
uint numberInstructionsAfter = numberInstructions - invalidPos - 1u;
var mockBeforeAddress = Substitute.For<SbAddress>();
var mockAfterAddress = Substitute.For<SbAddress>();
// Make sure that it will resolve to the correct address after the invalid instruction
mockTarget.ResolveLoadAddress(TEST_ADDRESS + invalidPos + 1).Returns(mockAfterAddress);
// Create valid instructions up to |invalidPos|
var mockBeforeInvalidInstructions =
MockRead(invalidPos, mockBeforeAddress, mockMemoryRegion);
var mockAfterInvalidInstructions =
MockRead(numberInstructionsAfter, mockAfterAddress, mockMemoryRegion);
var instructions = remoteTarget.ReadInstructionInfos(mockBeforeAddress,
numberInstructions, "intel");
var invalidInstruction = instructions[(int)invalidPos];
Assert.AreEqual(numberInstructions, instructions.Count);
Assert.AreEqual("??", invalidInstruction.Operands);
Assert.AreEqual("??", invalidInstruction.Mnemonic);
}
[Test]
public void ReadOutsideOfProcessMemory()
{
uint numberInstructions = 20;
MockRead(numberInstructions, mockAddress, mockMemoryRegion, TEST_ADDRESS, false);
var instructions = remoteTarget.ReadInstructionInfos(mockAddress, numberInstructions,
"intel");
// Make sure we did not try to disassemble
mockTarget.DidNotReceiveWithAnyArgs()
.GetInstructionsWithFlavor(mockAddress, Arg.Any<byte[]>(), Arg.Any<ulong>(),
Arg.Any<string>());
Assert.AreEqual(numberInstructions, instructions.Count);
}
// Tries to read across memory regions where the first is mapped and the second is unmapped
[Test]
public void ReadAcrossMemoryRegionsMappedUnmapped()
{
uint instructionsToRead = 20;
uint instructionsToCreate = 10;
var mockFirstAddress = Substitute.For<SbAddress>();
var mockSecondAddress = Substitute.For<SbAddress>();
var mockFirstMemoryRegion = Substitute.For<SbMemoryRegionInfo>();
var mockSecondMemoryRegion = Substitute.For<SbMemoryRegionInfo>();
ulong secondAddressPage = 1;
ulong pageSize = 4096;
ulong secondAddress = secondAddressPage * pageSize;
ulong firstAddress = secondAddress - instructionsToCreate;
MockRead(instructionsToCreate, mockFirstAddress, mockFirstMemoryRegion, firstAddress);
MockRead(0, mockSecondAddress, mockSecondMemoryRegion, secondAddress, false);
var instructions =
remoteTarget.ReadInstructionInfos(mockFirstAddress, instructionsToRead, "intel");
mockTarget.Received(1).GetInstructionsWithFlavor(mockFirstAddress, Arg.Any<byte[]>(),
secondAddress - firstAddress, "intel");
Assert.AreEqual(instructionsToRead, instructions.Count);
Assert.AreNotEqual("??", instructions[(int)instructionsToCreate - 1].Operands);
Assert.AreEqual("??", instructions[(int)instructionsToCreate].Operands);
}
// Tries to read across memory regions where the first is unmapped and the second is mapped
[Test]
public void ReadAcrossMemoryRegionsUnmappedMapped()
{
uint instructionsToRead = 20;
uint instructionsToCreate = 10;
var mockFirstAddress = Substitute.For<SbAddress>();
var mockSecondAddress = Substitute.For<SbAddress>();
var mockFirstMemoryRegion = Substitute.For<SbMemoryRegionInfo>();
var mockSecondMemoryRegion = Substitute.For<SbMemoryRegionInfo>();
ulong firstAddressPage = 1;
ulong pageSize = 4096;
ulong secondAddress = (firstAddressPage + 1) * pageSize;
ulong firstAddress = secondAddress - instructionsToCreate;
MockRead(instructionsToCreate, mockFirstAddress, mockFirstMemoryRegion, firstAddress,
false, true, secondAddress);
MockRead(instructionsToRead - instructionsToCreate, mockSecondAddress,
mockSecondMemoryRegion, secondAddress);
var instructions =
remoteTarget.ReadInstructionInfos(mockFirstAddress, instructionsToRead, "intel");
mockTarget.Received(1).GetInstructionsWithFlavor(mockSecondAddress, Arg.Any<byte[]>(),
pageSize, "intel");
Assert.AreEqual(instructionsToRead, instructions.Count);
Assert.AreEqual("??", instructions[(int)instructionsToCreate - 1].Operands);
Assert.AreNotEqual("??", instructions[(int)instructionsToCreate].Operands);
}
[Test]
public void ReadSamePageTwiceOnlyCheckThatItIsMappedOnce()
{
const ulong firstAddressPage = 1;
const ulong pageSize = 4096;
const uint firstInstructionsCount = 3;
const uint secondInstructionsCount = 4;
const uint invalidInstructionCount = 1;
const uint totalInstructions = firstInstructionsCount + invalidInstructionCount +
secondInstructionsCount;
const ulong firstAddress = firstAddressPage * pageSize;
const ulong invalidInstructionAddress = firstAddress + firstInstructionsCount;
const ulong secondAddress = invalidInstructionAddress + invalidInstructionCount;
var mockFirstAddress = Substitute.For<SbAddress>();
var mockSecondAddress = Substitute.For<SbAddress>();
var mockFirstMemoryRegion = Substitute.For<SbMemoryRegionInfo>();
var mockSecondMemoryRegion = Substitute.For<SbMemoryRegionInfo>();
MockRead(firstInstructionsCount, mockFirstAddress, mockFirstMemoryRegion, firstAddress);
MockRead(secondInstructionsCount, mockSecondAddress, mockSecondMemoryRegion,
secondAddress);
var instructions =
remoteTarget.ReadInstructionInfos(mockFirstAddress, totalInstructions, "intel");
var anyRegionInfo = Arg.Any<SbMemoryRegionInfo>();
mockProcess.Received(1).GetMemoryRegionInfo(firstAddress, out anyRegionInfo);
mockProcess.DidNotReceive().GetMemoryRegionInfo(secondAddress, out anyRegionInfo);
mockProcess.DidNotReceive()
.GetMemoryRegionInfo(invalidInstructionAddress, out anyRegionInfo);
mockTarget.Received(1).GetInstructionsWithFlavor(mockFirstAddress, Arg.Any<byte[]>(),
pageSize, "intel");
mockTarget.Received(1).GetInstructionsWithFlavor(mockSecondAddress, Arg.Any<byte[]>(),
pageSize - firstInstructionsCount -
invalidInstructionCount, "intel");
Assert.That(instructions.Count, Is.EqualTo(totalInstructions));
Assert.That(instructions[(int) (invalidInstructionAddress - firstAddress)].Operands,
Is.EqualTo("??"));
Assert.That(instructions[0].Operands, Is.Not.EqualTo("??"));
Assert.That(instructions[(int) (secondAddress - firstAddress)].Operands,
Is.Not.EqualTo("??"));
}
[Test]
public void ReadWhenInstructionIsOnPageBoundary()
{
const ulong pageNumber = 1;
const ulong pageSize = 4096;
const uint firstInstructionsCount = 1;
const uint secondInstructionsCount = 1;
const uint boundaryInstructionSize = 3;
const uint totalInstructions = firstInstructionsCount + secondInstructionsCount + 1;
const ulong firstAddress = pageNumber * pageSize - 2;
const ulong boundaryInstructionAddress = firstAddress + firstInstructionsCount;
const ulong secondAddress = boundaryInstructionAddress + boundaryInstructionSize;
var mockFirstAddress = Substitute.For<SbAddress>();
var mockSecondAddress = Substitute.For<SbAddress>();
var mockBoundaryInstructionAddress = Substitute.For<SbAddress>();
mockTarget.ResolveLoadAddress(boundaryInstructionAddress)
.Returns(mockBoundaryInstructionAddress);
mockBoundaryInstructionAddress.GetLoadAddress(mockTarget)
.Returns(boundaryInstructionAddress);
var mockFirstMemoryRegion = Substitute.For<SbMemoryRegionInfo>();
var mockSecondMemoryRegion = Substitute.For<SbMemoryRegionInfo>();
MockRead(firstInstructionsCount, mockFirstAddress, mockFirstMemoryRegion, firstAddress);
MockRead(secondInstructionsCount, mockSecondAddress, mockSecondMemoryRegion,
secondAddress);
var instruction = Substitute.For<SbInstruction>();
instruction.GetByteSize().Returns(boundaryInstructionSize);
instruction.GetAddress().Returns(mockBoundaryInstructionAddress);
mockTarget.ReadInstructions(mockBoundaryInstructionAddress, 1, "intel")
.Returns(new List<SbInstruction>() { instruction });
var instructions =
remoteTarget.ReadInstructionInfos(mockFirstAddress, totalInstructions, "intel");
mockTarget.Received(1).ReadInstructions(mockBoundaryInstructionAddress, 1, "intel");
mockTarget.Received(1).GetInstructionsWithFlavor(mockFirstAddress, Arg.Any<byte[]>(),
Arg.Any<ulong>(), "intel");
mockTarget.Received(1).GetInstructionsWithFlavor(mockSecondAddress, Arg.Any<byte[]>(),
Arg.Any<ulong>(), "intel");
Assert.That(instructions.Count, Is.EqualTo(totalInstructions));
Assert.That(instructions[0].Operands, Is.Not.EqualTo("??"));
Assert.That(instructions[1].Operands, Is.Not.EqualTo("??"));
Assert.That(instructions[2].Operands, Is.Not.EqualTo("??"));
}
[Test]
public void InvalidInstructionInsertedWhenNotOnBoundary()
{
const ulong pageNumber = 1;
const ulong pageSize = 4096;
const uint instructionsCount = 1;
const ulong address = pageNumber * pageSize;
var mockInstructionAddress = Substitute.For<SbAddress>();
var mockInstructionMemoryRegion = Substitute.For<SbMemoryRegionInfo>();
MockRead(instructionsCount, mockInstructionAddress, mockInstructionMemoryRegion,
address);
var instructions =
remoteTarget.ReadInstructionInfos(mockInstructionAddress, 2, "intel");
mockTarget.DidNotReceive()
.ReadInstructions(Arg.Any<SbAddress>(), Arg.Any<uint>(), Arg.Any<string>());
mockTarget.Received(1)
.GetInstructionsWithFlavor(mockInstructionAddress, Arg.Any<byte[]>(),
Arg.Any<ulong>(), "intel");
Assert.That(instructions.Count, Is.EqualTo(2));
Assert.That(instructions[0].Operands, Is.Not.EqualTo("??"));
Assert.That(instructions[1].Operands, Is.EqualTo("??"));
}
[Test]
public void ReadMemoryRegionFail()
{
uint numberInstructions = 20;
MockRead(numberInstructions, mockAddress, mockMemoryRegion);
mockError.Fail().Returns(true);
var instructions = remoteTarget.ReadInstructionInfos(mockAddress, numberInstructions,
"intel");
Assert.AreEqual(0, instructions.Count);
mockMemoryRegion.DidNotReceive().IsMapped();
mockTarget.DidNotReceiveWithAnyArgs()
.GetInstructionsWithFlavor(mockAddress, Arg.Any<byte[]>(), Arg.Any<ulong>(),
Arg.Any<string>());
}
[Test]
public void BindFunctionBreakpointWithOffset()
{
MockFunctionBreakpoint(1);
uint startPosition = 75u;
uint endPosition = 100u;
uint offset = 10;
uint newPosition = startPosition + offset + 1;
string path = Path.Combine(TEST_DIRECTORY, TEST_FILENAME);
MockFunctionData(startPosition, endPosition, TEST_DIRECTORY, TEST_FILENAME);
mockTarget.BreakpointCreateByLocation(path, newPosition)
.Returns(mockBreakpoint);
var testBreakpoint = remoteTarget.CreateFunctionOffsetBreakpoint(TEST_FUNCTION_NAME,
offset);
mockTarget.Received().BreakpointDelete(EXPECTED_ID);
mockTarget.Received().BreakpointCreateByLocation(path, newPosition);
Assert.AreEqual(mockBreakpoint.GetId(), remoteBreakpoint.GetId());
}
// Test when offset takes you out of the function
[Test]
public void BindInvalidFunctionBreakpointWithOffset()
{
MockFunctionBreakpoint(1);
uint startPosition = 75u;
uint endPosition = 100u;
uint offset = endPosition - startPosition + 1;
uint newPosition = startPosition + offset + 1;
string path = Path.Combine(TEST_DIRECTORY, TEST_FILENAME);
MockFunctionData(startPosition, endPosition, TEST_DIRECTORY, TEST_FILENAME);
mockTarget.BreakpointCreateByLocation(path, newPosition)
.Returns(mockBreakpoint);
var testBreakpoint = remoteTarget.CreateFunctionOffsetBreakpoint(TEST_FUNCTION_NAME,
offset);
mockTarget.Received().BreakpointDelete(EXPECTED_ID);
mockTarget.DidNotReceive().BreakpointCreateByLocation(path, newPosition);
Assert.AreEqual(null, testBreakpoint.breakpoint);
}
// Test when function breakpoint is not bound to any location
[Test]
public void BindFunctionBreakpointWithOffsetZeroLocations()
{
MockFunctionBreakpoint(0);
uint startPosition = 75u;
uint endPosition = 100u;
uint offset = endPosition - startPosition + 1;
uint newPosition = startPosition + offset + 1;
string path = Path.Combine(TEST_DIRECTORY, TEST_FILENAME);
MockFunctionData(startPosition, endPosition, TEST_DIRECTORY, TEST_FILENAME);
mockTarget.BreakpointCreateByLocation(path, newPosition)
.Returns(mockBreakpoint);
var testBreakpoint = remoteTarget.CreateFunctionOffsetBreakpoint(TEST_FUNCTION_NAME,
offset);
mockTarget.DidNotReceive().BreakpointDelete(Arg.Any<int>());
mockTarget.DidNotReceive().BreakpointCreateByLocation(path, newPosition);
Assert.AreEqual(null, testBreakpoint.breakpoint);
}
// Test when function cannot be found
[Test]
public void BindFunctionBreakpointWithOffsetNoFunction()
{
MockFunctionBreakpoint(1);
uint startPosition = 75u;
uint endPosition = 100u;
uint offset = endPosition - startPosition + 1;
uint newPosition = startPosition + offset + 1;
string path = Path.Combine(TEST_DIRECTORY, TEST_FILENAME);
MockFunctionData(startPosition, endPosition, TEST_DIRECTORY, TEST_FILENAME);
mockTarget.BreakpointCreateByLocation(path, newPosition)
.Returns(mockBreakpoint);
var testBreakpoint = remoteTarget.CreateFunctionOffsetBreakpoint(TEST_FUNCTION_NAME,
offset);
mockTarget.Received().BreakpointDelete(EXPECTED_ID);
mockTarget.DidNotReceive().BreakpointCreateByLocation(path, newPosition);
Assert.AreEqual(null, testBreakpoint.breakpoint);
}
void MockFunctionData(uint startPosition, uint endPosition, string directory,
string fileName)
{
SbBreakpointLocation location = mockBreakpoint.GetLocationAtIndex(0);
SbAddress mockBreakpointAddress = Substitute.For<SbAddress>();
SbAddress mockStartAddress = Substitute.For<SbAddress>();
SbAddress mockFunctionEndAddress = Substitute.For<SbAddress>();
SbAddress mockActualEndAddress = Substitute.For<SbAddress>();
SbLineEntry mockStartLineEntry = Substitute.For<SbLineEntry>();
SbLineEntry mockEndLineEntry = Substitute.For<SbLineEntry>();
ulong address = 0x1234567;
location.GetAddress().Returns(mockBreakpointAddress);
mockBreakpointAddress.GetFunction().Returns(mockFunction);
mockFunction.GetStartAddress().Returns(mockStartAddress);
mockFunction.GetEndAddress().Returns(mockFunctionEndAddress);
mockFunctionEndAddress.GetLoadAddress(mockTarget).Returns(address);
mockTarget.ResolveLoadAddress(address - 1).Returns(mockActualEndAddress);
mockStartAddress.GetLineEntry().Returns(mockStartLineEntry);
mockActualEndAddress.GetLineEntry().Returns(mockEndLineEntry);
mockStartLineEntry.GetLine().Returns(startPosition);
mockStartLineEntry.GetDirectory().Returns(directory);
mockStartLineEntry.GetFileName().Returns(fileName);
mockEndLineEntry.GetLine().Returns(endPosition);
}
// Create default mocks, and return values for the lldb breakpoint and breakpoint locations
// for a function breakpoint. numBreakpointLocations specifies how many mock breakpoint
// locations to return.
void MockFunctionBreakpoint(int numBreakpointLocations)
{
List<SbBreakpointLocation> breakpointLocations =
CreateMockBreakpointLocations(numBreakpointLocations);
MockFunctionBreakpoint(breakpointLocations);
}
// Create default mocks, and return values for the lldb breakpoint and breakpoint locations
// for a function breakpoint. breakpointLocations is a list of mock breakpoint locations
// that will be returned by the mock lldb breakpoint.
void MockFunctionBreakpoint(List<SbBreakpointLocation> breakpointLocations)
{
for (uint i = 0; i < breakpointLocations.Count; i++)
{
mockBreakpoint.GetLocationAtIndex(i).Returns(breakpointLocations[(int)i]);
}
mockBreakpoint.GetNumLocations().Returns((uint)breakpointLocations.Count);
mockBreakpoint.GetId().Returns(EXPECTED_ID);
mockTarget.BreakpointCreateByName(TEST_FUNCTION_NAME).Returns(mockBreakpoint);
}
List<SbBreakpointLocation> CreateMockBreakpointLocations(int numBreakpointLocations)
{
List<SbBreakpointLocation> breakpointLocations =
new List<SbBreakpointLocation>(numBreakpointLocations);
for (int i = 0; i < numBreakpointLocations; i++)
{
var mockBreakpointLocation = Substitute.For<SbBreakpointLocation>();
mockBreakpointLocation.GetId().Returns(i);
breakpointLocations.Add(mockBreakpointLocation);
}
return breakpointLocations;
}
List<SbInstruction> MockRead(uint instructionsToCreate, SbAddress startSbAddress,
SbMemoryRegionInfo memoryRegion,
ulong startAddress = TEST_ADDRESS, bool isMapped = true,
bool hasAddress = true, ulong regionEnd = ulong.MaxValue)
{
var instructions =
CreateMockInstructions(instructionsToCreate, startAddress, hasAddress);
ulong currentPage = startAddress / 4096;
ulong bytesToRead = (currentPage + 1) * 4096 - startAddress;
mockTarget
.GetInstructionsWithFlavor(startSbAddress, Arg.Any<byte[]>(), bytesToRead, "intel")
.Returns(instructions);
startSbAddress.GetLoadAddress(mockTarget).Returns(startAddress);
mockTarget.ResolveLoadAddress(startAddress).Returns(startSbAddress);
memoryRegion.IsMapped().Returns(isMapped);
if (!isMapped)
{
memoryRegion.GetRegionEnd().Returns(regionEnd);
}
var anyRegion = Arg.Any<SbMemoryRegionInfo>();
mockProcess.GetMemoryRegionInfo(startAddress, out anyRegion).Returns(x =>
{
x[1] = memoryRegion;
return mockError;
});
var anyError = Arg.Any<SbError>();
mockProcess.ReadMemory(default, default, default, out anyError)
.ReturnsForAnyArgs((x =>
{
var bufferArg = (byte[])x[1];
int length = bufferArg?.Length ?? 0;
x[3] = mockError;
return (ulong)length;
}));
return instructions;
}
List<SbInstruction> CreateMockInstructions(uint count, ulong startAddress = TEST_ADDRESS,
bool hasAddress = true)
{
var instructions = new List<SbInstruction>();
for (uint i = 0; i < count; i++)
{
SbAddress mockSbAddress = null;
if (hasAddress)
{
mockSbAddress = Substitute.For<SbAddress>();
mockSbAddress
.GetLoadAddress(mockTarget)
.Returns(startAddress + i);
mockSbAddress.GetSymbol().Returns((SbSymbol)null);
mockSbAddress.GetLineEntry().Returns((SbLineEntry)null);
}
var mockInstruction = Substitute.For<SbInstruction>();
mockInstruction.GetAddress().Returns(mockSbAddress);
mockInstruction.GetMnemonic(mockTarget).Returns(TEST_MNEMONIC + i);
mockInstruction.GetOperands(mockTarget).Returns(TEST_OPERANDS + i);
mockInstruction.GetComment(mockTarget).Returns(TEST_COMMENT);
mockInstruction.GetByteSize().Returns(1u);
instructions.Add(mockInstruction);
}
return instructions;
}
}
}
| |
#pragma warning disable CS1591 // Missing XML comment for publicly visible type or member
namespace UnitTest.Rollbar
{
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Linq;
using System.Collections.Generic;
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Threading;
using UnitTest.RollbarTestCommon;
using global::Rollbar;
using global::Rollbar.Infrastructure;
/// <summary>
/// Defines test class RollbarLiveFixtureBase.
/// Implements the <see cref="System.IDisposable" />
/// </summary>
/// <seealso cref="System.IDisposable" />
/// <remarks>This is a base abstraction for creating live Rollbar unit tests
/// (ones that actually expected to communicate with the Rollbar API).
/// It allows to set expectations for internal Rollbar events
/// (as payload delivery to Rollbar API or any communication or internal errors).
/// It has built-in verification of actual event counts against the expected ones per type of the events.</remarks>
//[TestClass]
//[TestCategory(nameof(RollbarLiveFixtureBase))]
public abstract class RollbarLiveFixtureBase
: IDisposable
{
/// <summary>
/// The logger configuration
/// </summary>
private IRollbarLoggerConfig _loggerConfig;
/// <summary>
/// The disposable rollbar instances
/// </summary>
private readonly List<IRollbar> _disposableRollbarInstances = new List<IRollbar>();
/// <summary>
/// The default rollbar timeout
/// </summary>
protected static readonly TimeSpan defaultRollbarTimeout = TimeSpan.FromSeconds(3);
protected static readonly RollbarInfrastructureConfig infrastructureConfig;
static RollbarLiveFixtureBase()
{
RollbarUnitTestEnvironmentUtil.SetupLiveTestRollbarInfrastructure();
}
/// <summary>
/// Initializes a new instance of the <see cref="RollbarLiveFixtureBase"/> class.
/// </summary>
protected RollbarLiveFixtureBase()
{
RollbarQueueController.Instance.InternalEvent += OnRollbarInternalEvent;
}
/// <summary>
/// Sets the fixture up.
/// </summary>
//[TestInitialize]
public virtual void SetupFixture()
{
RollbarUnitTestEnvironmentUtil.SetupLiveTestRollbarInfrastructure();
RollbarDataSecurityOptions dataSecurityOptions = new RollbarDataSecurityOptions();
dataSecurityOptions.ScrubFields = new string[] { "secret", "super_secret", };
RollbarInfrastructure.Instance
.Config
.RollbarLoggerConfig
.RollbarDataSecurityOptions
.Reconfigure(dataSecurityOptions);
this._loggerConfig = RollbarInfrastructure.Instance.Config.RollbarLoggerConfig;
this.Reset();
}
/// <summary>
/// Tears down this fixture.
/// </summary>
//[TestCleanup]
public virtual void TearDownFixture()
{
TimeSpan timeout = RollbarQueueController.Instance.GetRecommendedTimeout();
Thread.Sleep(timeout);
this.VerifyActualEventsAgainstExpectedTotals();
}
/// <summary>
/// Handles the <see cref="E:RollbarInternalEvent" /> event.
/// </summary>
/// <param name="sender">The sender.</param>
/// <param name="e">The <see cref="RollbarEventArgs"/> instance containing the event data.</param>
private void OnRollbarInternalEvent(object sender, RollbarEventArgs e)
{
if(!(e is CommunicationEventArgs))
{
}
// for basic RollbarRateLimitVerification test:
//switch(e)
//{
// case RollbarApiErrorEventArgs apiErrorEvent:
// //this.ApiErrorEvents.Add(apiErrorEvent);
// return;
// case CommunicationEventArgs commEvent:
// Console.WriteLine(commEvent.EventTimestamp + " SENT: ");
// return;
// case CommunicationErrorEventArgs commErrorEvent:
// //this.CommunicationErrorEvents.Add(commErrorEvent);
// return;
// case InternalErrorEventArgs internalErrorEvent:
// //this.InternalSdkErrorEvents.Add(internalErrorEvent);
// return;
// case PayloadDropEventArgs payloadDropEvent:
// Console.WriteLine(payloadDropEvent.EventTimestamp + " DROP: " + payloadDropEvent.Reason);
// return;
// default:
// //Assert.Fail("Unexpected RollbarEventArgs specialization type!");
// return;
//}
//Console.WriteLine(e.TraceAsString());
//Trace.WriteLine(e.TraceAsString());
this.Register(e);
}
/// <summary>
/// Resets this instance.
/// </summary>
protected void Reset()
{
this.ClearAllExpectedEventCounts();
this.ClearAllRollbarEvents();
RollbarInfrastructure.Instance.QueueController.FlushQueues();
}
/// <summary>
/// Makes the sure all the payloads processed.
/// </summary>
private void MakeSureAllThePayloadsProcessed()
{
Thread.Sleep(RollbarQueueController.Instance.GetRecommendedTimeout().Add(TimeSpan.FromSeconds(1)));
Assert.AreEqual(0, RollbarQueueController.Instance.GetTotalPayloadCount(), "All the payloads are expected to be out of the queues...");
}
/// <summary>
/// Verifies the actual events against expected totals.
/// </summary>
private void VerifyActualEventsAgainstExpectedTotals()
{
MakeSureAllThePayloadsProcessed();
foreach (var eventType in this._expectedEventCountByType.Keys)
{
string message = $"Matching count of {eventType.Name} events...";
Console.WriteLine(message);
Trace.WriteLine(message);
Assert.AreEqual(this._expectedEventCountByType[eventType], this._rollbarEventsByType[eventType].Count, message);
}
}
#region Actual Rollbar events
/// <summary>
/// The rollbar events by type
/// </summary>
private readonly IDictionary<Type, List<RollbarEventArgs>> _rollbarEventsByType = new ConcurrentDictionary<Type, List<RollbarEventArgs>>();
/// <summary>
/// Registers the specified rollbar event.
/// </summary>
/// <param name="rollbarEvent">The <see cref="RollbarEventArgs"/> instance containing the event data.</param>
private void Register(RollbarEventArgs rollbarEvent)
{
if (rollbarEvent == null)
{
return;
}
var eventType = rollbarEvent.GetType();
if (this._rollbarEventsByType.TryGetValue(eventType, out var rollbarEvents))
{
rollbarEvents.Add(rollbarEvent);
}
else
{
this._rollbarEventsByType.Add(
eventType,
new List<RollbarEventArgs>(new[] {rollbarEvent})
);
}
}
/// <summary>
/// Gets all events.
/// </summary>
/// <typeparam name="TRollbarEvent">The type of the t rollbar event.</typeparam>
/// <returns>IReadOnlyCollection<TRollbarEvent>.</returns>
protected IReadOnlyCollection<TRollbarEvent> GetAllEvents<TRollbarEvent>()
where TRollbarEvent : RollbarEventArgs
{
if (this._rollbarEventsByType.TryGetValue(typeof(TRollbarEvent), out var rollbarEvents))
{
return rollbarEvents.Cast<TRollbarEvent>().ToArray();
}
else
{
return Array.Empty<TRollbarEvent>();
}
}
/// <summary>
/// Gets the count.
/// </summary>
/// <typeparam name="TRollbarEvent">The type of the t rollbar event.</typeparam>
/// <returns>System.Int32.</returns>
protected int GetCount<TRollbarEvent>()
where TRollbarEvent : RollbarEventArgs
{
if (this._rollbarEventsByType.TryGetValue(typeof(TRollbarEvent), out var rollbarEvents))
{
return rollbarEvents.Count;
}
else
{
return 0;
}
}
/// <summary>
/// Clears all rollbar events.
/// </summary>
protected void ClearAllRollbarEvents()
{
foreach (var eventType in this._rollbarEventsByType.Keys)
{
this._rollbarEventsByType[eventType].Clear();
}
}
/// <summary>
/// Clears this instance.
/// </summary>
/// <typeparam name="TRollbarEvent">The type of the t rollbar event.</typeparam>
protected void Clear<TRollbarEvent>()
where TRollbarEvent : RollbarEventArgs
{
if (this._rollbarEventsByType.TryGetValue(typeof(TRollbarEvent), out var rollbarEvents))
{
rollbarEvents.Clear();
}
}
#endregion Actual Rollbar events
#region Expected Rollbar events
/// <summary>
/// The expected event count by type
/// </summary>
private readonly IDictionary<Type, int> _expectedEventCountByType = new ConcurrentDictionary<Type, int>();
/// <summary>
/// Increments the count.
/// </summary>
/// <typeparam name="TRollbarEvent">The type of the t rollbar event.</typeparam>
protected void IncrementCount<TRollbarEvent>()
where TRollbarEvent : RollbarEventArgs
{
const int countIncrement = 1;
this.IncrementCount<TRollbarEvent>(countIncrement);
}
/// <summary>
/// Increments the count.
/// </summary>
/// <typeparam name="TRollbarEvent">The type of the t rollbar event.</typeparam>
/// <param name="countIncrement">The count increment.</param>
protected void IncrementCount<TRollbarEvent>(int countIncrement)
where TRollbarEvent : RollbarEventArgs
{
var eventType = typeof(TRollbarEvent);
if (this._expectedEventCountByType.TryGetValue(eventType, out var eventsCount))
{
this._expectedEventCountByType[eventType] += countIncrement;
}
else
{
this._expectedEventCountByType.Add(
eventType,
countIncrement
);
}
}
/// <summary>
/// Clears all expected event counts.
/// </summary>
protected void ClearAllExpectedEventCounts()
{
foreach (var eventType in this._expectedEventCountByType.Keys)
{
this._expectedEventCountByType[eventType] = 0;
}
}
/// <summary>
/// Clears the expected count.
/// </summary>
/// <typeparam name="TRollbarEvent">The type of the t rollbar event.</typeparam>
protected void ClearExpectedCount<TRollbarEvent>()
where TRollbarEvent : RollbarEventArgs
{
var eventType = typeof(TRollbarEvent);
if (this._expectedEventCountByType.TryGetValue(eventType, out var eventCount))
{
this._expectedEventCountByType[eventType] = 0;
}
}
#endregion Actual Rollbar events
/// <summary>
/// Provides the live rollbar configuration.
/// </summary>
/// <returns>IRollbarConfig.</returns>
protected IRollbarLoggerConfig ProvideLiveRollbarConfig()
{
return this.ProvideLiveRollbarConfig(RollbarUnitTestSettings.AccessToken, RollbarUnitTestSettings.Environment);
}
/// <summary>
/// Provides the live rollbar configuration.
/// </summary>
/// <param name="rollbarAccessToken">The rollbar access token.</param>
/// <param name="rollbarEnvironment">The rollbar environment.</param>
/// <returns>IRollbarConfig.</returns>
protected IRollbarLoggerConfig ProvideLiveRollbarConfig(string rollbarAccessToken, string rollbarEnvironment)
{
if (this._loggerConfig == null)
{
RollbarDestinationOptions destinationOptions =
new RollbarDestinationOptions(rollbarAccessToken, rollbarEnvironment);
RollbarDataSecurityOptions dataSecurityOptions = new RollbarDataSecurityOptions();
dataSecurityOptions.ScrubFields = new string[] { "secret", "super_secret", };
RollbarLoggerConfig loggerConfig = new RollbarLoggerConfig();
loggerConfig.RollbarDestinationOptions.Reconfigure(destinationOptions);
loggerConfig.RollbarDataSecurityOptions.Reconfigure(dataSecurityOptions);
this._loggerConfig = loggerConfig;
}
return this._loggerConfig;
}
/// <summary>
/// Provides the disposable rollbar.
/// </summary>
/// <returns>IRollbar.</returns>
protected IRollbar ProvideDisposableRollbar()
{
IRollbar rollbar = RollbarFactory.CreateNew(config: this.ProvideLiveRollbarConfig());
this._disposableRollbarInstances.Add(rollbar);
return rollbar;
}
/// <summary>
/// Provides the shared rollbar.
/// </summary>
/// <returns>IRollbar.</returns>
protected IRollbar ProvideSharedRollbar()
{
if (!RollbarLocator.RollbarInstance.Equals(ProvideLiveRollbarConfig()))
{
RollbarLocator.RollbarInstance.Configure(ProvideLiveRollbarConfig());
}
return RollbarLocator.RollbarInstance;
}
/// <summary>
/// Verifies the instance operational.
/// </summary>
/// <param name="rollbar">The rollbar.</param>
protected void VerifyInstanceOperational(IRollbar rollbar)
{
MakeSureAllThePayloadsProcessed();
//Assert.IsTrue(0 == RollbarQueueController.Instance.GetTotalPayloadCount(), "Making sure all the queues are clear...");
int initialCommunicationEventsCount = this.GetCount<CommunicationEventArgs>();
this.IncrementCount<CommunicationEventArgs>();
rollbar.AsBlockingLogger(defaultRollbarTimeout).Critical("Making sure Rollbar.NET is operational...");
Assert.AreEqual(this.GetCount<CommunicationEventArgs>(), initialCommunicationEventsCount + 1, "Confirming Rollbar.NET is operational...");
}
[TestMethod]
public void _VerifyInstanceOperationalTest()
{
// this test more about verifying if the test harness itself works well:
this.ClearAllRollbarEvents();
using(IRollbar rollbar = this.ProvideDisposableRollbar())
{
this.VerifyInstanceOperational(rollbar);
}
}
#region IDisposable Support
/// <summary>
/// The disposed value
/// </summary>
private bool disposedValue = false; // To detect redundant calls
/// <summary>
/// Releases unmanaged and - optionally - managed resources.
/// </summary>
/// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param>
protected virtual void Dispose(bool disposing)
{
if (!disposedValue)
{
if (disposing)
{
// TODO: dispose managed state (managed objects).
TimeSpan timeout = RollbarQueueController.Instance.GetRecommendedTimeout();
Thread.Sleep(timeout);
RollbarQueueController.Instance.InternalEvent -= OnRollbarInternalEvent;
foreach (var rollbar in this._disposableRollbarInstances)
{
rollbar.Dispose();
}
}
// TODO: free unmanaged resources (unmanaged objects) and override a finalizer below.
// TODO: set large fields to null.
disposedValue = true;
}
}
// TODO: override a finalizer only if Dispose(bool disposing) above has code to free unmanaged resources.
// ~RollbarLiveFixtureBase() {
// // Do not change this code. Put cleanup code in Dispose(bool disposing) above.
// Dispose(false);
// }
// This code added to correctly implement the disposable pattern.
/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
/// </summary>
void IDisposable.Dispose()
{
// Do not change this code. Put cleanup code in Dispose(bool disposing) above.
Dispose(true);
// TODO: uncomment the following line if the finalizer is overridden above.
// GC.SuppressFinalize(this);
}
#endregion IDisposable Support
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Text;
using System.Diagnostics;
using System.Collections.Generic;
namespace System.Xml
{
internal partial class XmlWellFormedWriter : XmlWriter
{
//
// Private types
//
class NamespaceResolverProxy : IXmlNamespaceResolver
{
XmlWellFormedWriter wfWriter;
internal NamespaceResolverProxy(XmlWellFormedWriter wfWriter)
{
this.wfWriter = wfWriter;
}
IDictionary<string, string> IXmlNamespaceResolver.GetNamespacesInScope(XmlNamespaceScope scope)
{
throw NotImplemented.ByDesign;
}
string IXmlNamespaceResolver.LookupNamespace(string prefix)
{
return wfWriter.LookupNamespace(prefix);
}
string IXmlNamespaceResolver.LookupPrefix(string namespaceName)
{
return wfWriter.LookupPrefix(namespaceName);
}
}
partial struct ElementScope
{
internal int prevNSTop;
internal string prefix;
internal string localName;
internal string namespaceUri;
internal XmlSpace xmlSpace;
internal string xmlLang;
internal void Set(string prefix, string localName, string namespaceUri, int prevNSTop)
{
this.prevNSTop = prevNSTop;
this.prefix = prefix;
this.namespaceUri = namespaceUri;
this.localName = localName;
this.xmlSpace = (System.Xml.XmlSpace)(int)-1;
this.xmlLang = null;
}
internal void WriteEndElement(XmlRawWriter rawWriter)
{
rawWriter.WriteEndElement(prefix, localName, namespaceUri);
}
internal void WriteFullEndElement(XmlRawWriter rawWriter)
{
rawWriter.WriteFullEndElement(prefix, localName, namespaceUri);
}
}
enum NamespaceKind
{
Written,
NeedToWrite,
Implied,
Special,
}
partial struct Namespace
{
internal string prefix;
internal string namespaceUri;
internal NamespaceKind kind;
internal int prevNsIndex;
internal void Set(string prefix, string namespaceUri, NamespaceKind kind)
{
this.prefix = prefix;
this.namespaceUri = namespaceUri;
this.kind = kind;
this.prevNsIndex = -1;
}
internal void WriteDecl(XmlWriter writer, XmlRawWriter rawWriter)
{
Debug.Assert(kind == NamespaceKind.NeedToWrite);
if (null != rawWriter)
{
rawWriter.WriteNamespaceDeclaration(prefix, namespaceUri);
}
else
{
if (prefix.Length == 0)
{
writer.WriteStartAttribute(string.Empty, XmlConst.NsXmlNs, XmlConst.ReservedNsXmlNs);
}
else
{
writer.WriteStartAttribute(XmlConst.NsXmlNs, prefix, XmlConst.ReservedNsXmlNs);
}
writer.WriteString(namespaceUri);
writer.WriteEndAttribute();
}
}
}
struct AttrName
{
internal string prefix;
internal string namespaceUri;
internal string localName;
internal int prev;
internal void Set(string prefix, string localName, string namespaceUri)
{
this.prefix = prefix;
this.namespaceUri = namespaceUri;
this.localName = localName;
this.prev = 0;
}
internal bool IsDuplicate(string prefix, string localName, string namespaceUri)
{
return ((this.localName == localName)
&& ((this.prefix == prefix) || (this.namespaceUri == namespaceUri)));
}
}
enum SpecialAttribute
{
No = 0,
DefaultXmlns,
PrefixedXmlns,
XmlSpace,
XmlLang
}
partial class AttributeValueCache
{
enum ItemType
{
EntityRef,
CharEntity,
SurrogateCharEntity,
Whitespace,
String,
StringChars,
Raw,
RawChars,
ValueString,
}
class Item
{
internal ItemType type;
internal object data;
internal Item() { }
internal void Set(ItemType type, object data)
{
this.type = type;
this.data = data;
}
}
class BufferChunk
{
internal char[] buffer;
internal int index;
internal int count;
internal BufferChunk(char[] buffer, int index, int count)
{
this.buffer = buffer;
this.index = index;
this.count = count;
}
}
StringBuilder stringValue = new StringBuilder();
string singleStringValue; // special-case for a single WriteString call
Item[] items;
int firstItem;
int lastItem = -1;
internal string StringValue
{
get
{
if (singleStringValue != null)
{
return singleStringValue;
}
else
{
return stringValue.ToString();
}
}
}
internal void WriteEntityRef(string name)
{
if (singleStringValue != null)
{
StartComplexValue();
}
switch (name)
{
case "lt":
stringValue.Append('<');
break;
case "gt":
stringValue.Append('>');
break;
case "quot":
stringValue.Append('"');
break;
case "apos":
stringValue.Append('\'');
break;
case "amp":
stringValue.Append('&');
break;
default:
stringValue.Append('&');
stringValue.Append(name);
stringValue.Append(';');
break;
}
AddItem(ItemType.EntityRef, name);
}
internal void WriteCharEntity(char ch)
{
if (singleStringValue != null)
{
StartComplexValue();
}
stringValue.Append(ch);
AddItem(ItemType.CharEntity, ch);
}
internal void WriteSurrogateCharEntity(char lowChar, char highChar)
{
if (singleStringValue != null)
{
StartComplexValue();
}
stringValue.Append(highChar);
stringValue.Append(lowChar);
AddItem(ItemType.SurrogateCharEntity, new char[] { lowChar, highChar });
}
internal void WriteWhitespace(string ws)
{
if (singleStringValue != null)
{
StartComplexValue();
}
stringValue.Append(ws);
AddItem(ItemType.Whitespace, ws);
}
internal void WriteString(string text)
{
if (singleStringValue != null)
{
StartComplexValue();
}
else
{
// special-case for a single WriteString
if (lastItem == -1)
{
singleStringValue = text;
return;
}
}
stringValue.Append(text);
AddItem(ItemType.String, text);
}
internal void WriteChars(char[] buffer, int index, int count)
{
if (singleStringValue != null)
{
StartComplexValue();
}
stringValue.Append(buffer, index, count);
AddItem(ItemType.StringChars, new BufferChunk(buffer, index, count));
}
internal void WriteRaw(char[] buffer, int index, int count)
{
if (singleStringValue != null)
{
StartComplexValue();
}
stringValue.Append(buffer, index, count);
AddItem(ItemType.RawChars, new BufferChunk(buffer, index, count));
}
internal void WriteRaw(string data)
{
if (singleStringValue != null)
{
StartComplexValue();
}
stringValue.Append(data);
AddItem(ItemType.Raw, data);
}
internal void WriteValue(string value)
{
if (singleStringValue != null)
{
StartComplexValue();
}
stringValue.Append(value);
AddItem(ItemType.ValueString, value);
}
internal void Replay(XmlWriter writer)
{
if (singleStringValue != null)
{
writer.WriteString(singleStringValue);
return;
}
BufferChunk bufChunk;
for (int i = firstItem; i <= lastItem; i++)
{
Item item = items[i];
switch (item.type)
{
case ItemType.EntityRef:
writer.WriteEntityRef((string)item.data);
break;
case ItemType.CharEntity:
writer.WriteCharEntity((char)item.data);
break;
case ItemType.SurrogateCharEntity:
char[] chars = (char[])item.data;
writer.WriteSurrogateCharEntity(chars[0], chars[1]);
break;
case ItemType.Whitespace:
writer.WriteWhitespace((string)item.data);
break;
case ItemType.String:
writer.WriteString((string)item.data);
break;
case ItemType.StringChars:
bufChunk = (BufferChunk)item.data;
writer.WriteChars(bufChunk.buffer, bufChunk.index, bufChunk.count);
break;
case ItemType.Raw:
writer.WriteRaw((string)item.data);
break;
case ItemType.RawChars:
bufChunk = (BufferChunk)item.data;
writer.WriteChars(bufChunk.buffer, bufChunk.index, bufChunk.count);
break;
case ItemType.ValueString:
writer.WriteValue((string)item.data);
break;
default:
Debug.Assert(false, "Unexpected ItemType value.");
break;
}
}
}
// This method trims whitespaces from the beginnig and the end of the string and cached writer events
internal void Trim()
{
// if only one string value -> trim the write spaces directly
if (singleStringValue != null)
{
singleStringValue = XmlConvertEx.TrimString(singleStringValue);
return;
}
// trim the string in StringBuilder
string valBefore = stringValue.ToString();
string valAfter = XmlConvertEx.TrimString(valBefore);
if (valBefore != valAfter)
{
stringValue = new StringBuilder(valAfter);
}
// trim the beginning of the recorded writer events
XmlCharType xmlCharType = XmlCharType.Instance;
int i = firstItem;
while (i == firstItem && i <= lastItem)
{
Item item = items[i];
switch (item.type)
{
case ItemType.Whitespace:
firstItem++;
break;
case ItemType.String:
case ItemType.Raw:
case ItemType.ValueString:
item.data = XmlConvertEx.TrimStringStart((string)item.data);
if (((string)item.data).Length == 0)
{
// no characters left -> move the firstItem index to exclude it from the Replay
firstItem++;
}
break;
case ItemType.StringChars:
case ItemType.RawChars:
BufferChunk bufChunk = (BufferChunk)item.data;
int endIndex = bufChunk.index + bufChunk.count;
while (bufChunk.index < endIndex && xmlCharType.IsWhiteSpace(bufChunk.buffer[bufChunk.index]))
{
bufChunk.index++;
bufChunk.count--;
}
if (bufChunk.index == endIndex)
{
// no characters left -> move the firstItem index to exclude it from the Replay
firstItem++;
}
break;
}
i++;
}
// trim the end of the recorded writer events
i = lastItem;
while (i == lastItem && i >= firstItem)
{
Item item = items[i];
switch (item.type)
{
case ItemType.Whitespace:
lastItem--;
break;
case ItemType.String:
case ItemType.Raw:
case ItemType.ValueString:
item.data = XmlConvertEx.TrimStringEnd((string)item.data);
if (((string)item.data).Length == 0)
{
// no characters left -> move the lastItem index to exclude it from the Replay
lastItem--;
}
break;
case ItemType.StringChars:
case ItemType.RawChars:
BufferChunk bufChunk = (BufferChunk)item.data;
while (bufChunk.count > 0 && xmlCharType.IsWhiteSpace(bufChunk.buffer[bufChunk.index + bufChunk.count - 1]))
{
bufChunk.count--;
}
if (bufChunk.count == 0)
{
// no characters left -> move the lastItem index to exclude it from the Replay
lastItem--;
}
break;
}
i--;
}
}
internal void Clear()
{
singleStringValue = null;
lastItem = -1;
firstItem = 0;
stringValue.Length = 0;
}
private void StartComplexValue()
{
Debug.Assert(singleStringValue != null);
Debug.Assert(lastItem == -1);
stringValue.Append(singleStringValue);
AddItem(ItemType.String, singleStringValue);
singleStringValue = null;
}
void AddItem(ItemType type, object data)
{
int newItemIndex = lastItem + 1;
if (items == null)
{
items = new Item[4];
}
else if (items.Length == newItemIndex)
{
Item[] newItems = new Item[newItemIndex * 2];
Array.Copy(items, newItems, newItemIndex);
items = newItems;
}
if (items[newItemIndex] == null)
{
items[newItemIndex] = new Item();
}
items[newItemIndex].Set(type, data);
lastItem = newItemIndex;
}
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using gaxgrpc = Google.Api.Gax.Grpc;
using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore;
using lro = Google.LongRunning;
using proto = Google.Protobuf;
using grpccore = Grpc.Core;
using grpcinter = Grpc.Core.Interceptors;
using sys = System;
using scg = System.Collections.Generic;
using sco = System.Collections.ObjectModel;
using st = System.Threading;
using stt = System.Threading.Tasks;
namespace Google.Cloud.DocumentAI.V1
{
/// <summary>Settings for <see cref="DocumentProcessorServiceClient"/> instances.</summary>
public sealed partial class DocumentProcessorServiceSettings : gaxgrpc::ServiceSettingsBase
{
/// <summary>Get a new instance of the default <see cref="DocumentProcessorServiceSettings"/>.</summary>
/// <returns>A new instance of the default <see cref="DocumentProcessorServiceSettings"/>.</returns>
public static DocumentProcessorServiceSettings GetDefault() => new DocumentProcessorServiceSettings();
/// <summary>
/// Constructs a new <see cref="DocumentProcessorServiceSettings"/> object with default settings.
/// </summary>
public DocumentProcessorServiceSettings()
{
}
private DocumentProcessorServiceSettings(DocumentProcessorServiceSettings existing) : base(existing)
{
gax::GaxPreconditions.CheckNotNull(existing, nameof(existing));
ProcessDocumentSettings = existing.ProcessDocumentSettings;
BatchProcessDocumentsSettings = existing.BatchProcessDocumentsSettings;
BatchProcessDocumentsOperationsSettings = existing.BatchProcessDocumentsOperationsSettings.Clone();
ReviewDocumentSettings = existing.ReviewDocumentSettings;
ReviewDocumentOperationsSettings = existing.ReviewDocumentOperationsSettings.Clone();
OnCopy(existing);
}
partial void OnCopy(DocumentProcessorServiceSettings existing);
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>DocumentProcessorServiceClient.ProcessDocument</c> and
/// <c>DocumentProcessorServiceClient.ProcessDocumentAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 100 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.DeadlineExceeded"/>,
/// <see cref="grpccore::StatusCode.Unavailable"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 120 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings ProcessDocumentSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(120000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(100), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.DeadlineExceeded, grpccore::StatusCode.Unavailable)));
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>DocumentProcessorServiceClient.BatchProcessDocuments</c> and
/// <c>DocumentProcessorServiceClient.BatchProcessDocumentsAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 100 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.DeadlineExceeded"/>,
/// <see cref="grpccore::StatusCode.Unavailable"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 120 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings BatchProcessDocumentsSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(120000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(100), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.DeadlineExceeded, grpccore::StatusCode.Unavailable)));
/// <summary>
/// Long Running Operation settings for calls to <c>DocumentProcessorServiceClient.BatchProcessDocuments</c> and
/// <c>DocumentProcessorServiceClient.BatchProcessDocumentsAsync</c>.
/// </summary>
/// <remarks>
/// Uses default <see cref="gax::PollSettings"/> of:
/// <list type="bullet">
/// <item><description>Initial delay: 20 seconds.</description></item>
/// <item><description>Delay multiplier: 1.5</description></item>
/// <item><description>Maximum delay: 45 seconds.</description></item>
/// <item><description>Total timeout: 24 hours.</description></item>
/// </list>
/// </remarks>
public lro::OperationsSettings BatchProcessDocumentsOperationsSettings { get; set; } = new lro::OperationsSettings
{
DefaultPollSettings = new gax::PollSettings(gax::Expiration.FromTimeout(sys::TimeSpan.FromHours(24)), sys::TimeSpan.FromSeconds(20), 1.5, sys::TimeSpan.FromSeconds(45)),
};
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>DocumentProcessorServiceClient.ReviewDocument</c> and
/// <c>DocumentProcessorServiceClient.ReviewDocumentAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 100 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.DeadlineExceeded"/>,
/// <see cref="grpccore::StatusCode.Unavailable"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 120 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings ReviewDocumentSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(120000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(100), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.DeadlineExceeded, grpccore::StatusCode.Unavailable)));
/// <summary>
/// Long Running Operation settings for calls to <c>DocumentProcessorServiceClient.ReviewDocument</c> and
/// <c>DocumentProcessorServiceClient.ReviewDocumentAsync</c>.
/// </summary>
/// <remarks>
/// Uses default <see cref="gax::PollSettings"/> of:
/// <list type="bullet">
/// <item><description>Initial delay: 20 seconds.</description></item>
/// <item><description>Delay multiplier: 1.5</description></item>
/// <item><description>Maximum delay: 45 seconds.</description></item>
/// <item><description>Total timeout: 24 hours.</description></item>
/// </list>
/// </remarks>
public lro::OperationsSettings ReviewDocumentOperationsSettings { get; set; } = new lro::OperationsSettings
{
DefaultPollSettings = new gax::PollSettings(gax::Expiration.FromTimeout(sys::TimeSpan.FromHours(24)), sys::TimeSpan.FromSeconds(20), 1.5, sys::TimeSpan.FromSeconds(45)),
};
/// <summary>Creates a deep clone of this object, with all the same property values.</summary>
/// <returns>A deep clone of this <see cref="DocumentProcessorServiceSettings"/> object.</returns>
public DocumentProcessorServiceSettings Clone() => new DocumentProcessorServiceSettings(this);
}
/// <summary>
/// Builder class for <see cref="DocumentProcessorServiceClient"/> to provide simple configuration of credentials,
/// endpoint etc.
/// </summary>
public sealed partial class DocumentProcessorServiceClientBuilder : gaxgrpc::ClientBuilderBase<DocumentProcessorServiceClient>
{
/// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary>
public DocumentProcessorServiceSettings Settings { get; set; }
/// <summary>Creates a new builder with default settings.</summary>
public DocumentProcessorServiceClientBuilder()
{
UseJwtAccessWithScopes = DocumentProcessorServiceClient.UseJwtAccessWithScopes;
}
partial void InterceptBuild(ref DocumentProcessorServiceClient client);
partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<DocumentProcessorServiceClient> task);
/// <summary>Builds the resulting client.</summary>
public override DocumentProcessorServiceClient Build()
{
DocumentProcessorServiceClient client = null;
InterceptBuild(ref client);
return client ?? BuildImpl();
}
/// <summary>Builds the resulting client asynchronously.</summary>
public override stt::Task<DocumentProcessorServiceClient> BuildAsync(st::CancellationToken cancellationToken = default)
{
stt::Task<DocumentProcessorServiceClient> task = null;
InterceptBuildAsync(cancellationToken, ref task);
return task ?? BuildAsyncImpl(cancellationToken);
}
private DocumentProcessorServiceClient BuildImpl()
{
Validate();
grpccore::CallInvoker callInvoker = CreateCallInvoker();
return DocumentProcessorServiceClient.Create(callInvoker, Settings);
}
private async stt::Task<DocumentProcessorServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken)
{
Validate();
grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false);
return DocumentProcessorServiceClient.Create(callInvoker, Settings);
}
/// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary>
protected override string GetDefaultEndpoint() => DocumentProcessorServiceClient.DefaultEndpoint;
/// <summary>
/// Returns the default scopes for this builder type, used if no scopes are otherwise specified.
/// </summary>
protected override scg::IReadOnlyList<string> GetDefaultScopes() => DocumentProcessorServiceClient.DefaultScopes;
/// <summary>Returns the channel pool to use when no other options are specified.</summary>
protected override gaxgrpc::ChannelPool GetChannelPool() => DocumentProcessorServiceClient.ChannelPool;
/// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary>
protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance;
}
/// <summary>DocumentProcessorService client wrapper, for convenient use.</summary>
/// <remarks>
/// Service to call Cloud DocumentAI to process documents according to the
/// processor's definition. Processors are built using state-of-the-art Google
/// AI such as natural language, computer vision, and translation to extract
/// structured information from unstructured or semi-structured documents.
/// </remarks>
public abstract partial class DocumentProcessorServiceClient
{
/// <summary>
/// The default endpoint for the DocumentProcessorService service, which is a host of
/// "documentai.googleapis.com" and a port of 443.
/// </summary>
public static string DefaultEndpoint { get; } = "documentai.googleapis.com:443";
/// <summary>The default DocumentProcessorService scopes.</summary>
/// <remarks>
/// The default DocumentProcessorService scopes are:
/// <list type="bullet">
/// <item><description>https://www.googleapis.com/auth/cloud-platform</description></item>
/// </list>
/// </remarks>
public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[]
{
"https://www.googleapis.com/auth/cloud-platform",
});
internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes);
internal static bool UseJwtAccessWithScopes
{
get
{
bool useJwtAccessWithScopes = true;
MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes);
return useJwtAccessWithScopes;
}
}
static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes);
/// <summary>
/// Asynchronously creates a <see cref="DocumentProcessorServiceClient"/> using the default credentials,
/// endpoint and settings. To specify custom credentials or other settings, use
/// <see cref="DocumentProcessorServiceClientBuilder"/>.
/// </summary>
/// <param name="cancellationToken">
/// The <see cref="st::CancellationToken"/> to use while creating the client.
/// </param>
/// <returns>The task representing the created <see cref="DocumentProcessorServiceClient"/>.</returns>
public static stt::Task<DocumentProcessorServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) =>
new DocumentProcessorServiceClientBuilder().BuildAsync(cancellationToken);
/// <summary>
/// Synchronously creates a <see cref="DocumentProcessorServiceClient"/> using the default credentials, endpoint
/// and settings. To specify custom credentials or other settings, use
/// <see cref="DocumentProcessorServiceClientBuilder"/>.
/// </summary>
/// <returns>The created <see cref="DocumentProcessorServiceClient"/>.</returns>
public static DocumentProcessorServiceClient Create() => new DocumentProcessorServiceClientBuilder().Build();
/// <summary>
/// Creates a <see cref="DocumentProcessorServiceClient"/> which uses the specified call invoker for remote
/// operations.
/// </summary>
/// <param name="callInvoker">
/// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null.
/// </param>
/// <param name="settings">Optional <see cref="DocumentProcessorServiceSettings"/>.</param>
/// <returns>The created <see cref="DocumentProcessorServiceClient"/>.</returns>
internal static DocumentProcessorServiceClient Create(grpccore::CallInvoker callInvoker, DocumentProcessorServiceSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker));
grpcinter::Interceptor interceptor = settings?.Interceptor;
if (interceptor != null)
{
callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor);
}
DocumentProcessorService.DocumentProcessorServiceClient grpcClient = new DocumentProcessorService.DocumentProcessorServiceClient(callInvoker);
return new DocumentProcessorServiceClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not
/// affected.
/// </summary>
/// <remarks>
/// After calling this method, further calls to <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down
/// by another call to this method.
/// </remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync();
/// <summary>The underlying gRPC DocumentProcessorService client</summary>
public virtual DocumentProcessorService.DocumentProcessorServiceClient GrpcClient => throw new sys::NotImplementedException();
/// <summary>
/// Processes a single document.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual ProcessResponse ProcessDocument(ProcessRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Processes a single document.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ProcessResponse> ProcessDocumentAsync(ProcessRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Processes a single document.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ProcessResponse> ProcessDocumentAsync(ProcessRequest request, st::CancellationToken cancellationToken) =>
ProcessDocumentAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Processes a single document.
/// </summary>
/// <param name="name">
/// Required. The processor resource name.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual ProcessResponse ProcessDocument(string name, gaxgrpc::CallSettings callSettings = null) =>
ProcessDocument(new ProcessRequest
{
Name = gax::GaxPreconditions.CheckNotNullOrEmpty(name, nameof(name)),
}, callSettings);
/// <summary>
/// Processes a single document.
/// </summary>
/// <param name="name">
/// Required. The processor resource name.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ProcessResponse> ProcessDocumentAsync(string name, gaxgrpc::CallSettings callSettings = null) =>
ProcessDocumentAsync(new ProcessRequest
{
Name = gax::GaxPreconditions.CheckNotNullOrEmpty(name, nameof(name)),
}, callSettings);
/// <summary>
/// Processes a single document.
/// </summary>
/// <param name="name">
/// Required. The processor resource name.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ProcessResponse> ProcessDocumentAsync(string name, st::CancellationToken cancellationToken) =>
ProcessDocumentAsync(name, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Processes a single document.
/// </summary>
/// <param name="name">
/// Required. The processor resource name.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual ProcessResponse ProcessDocument(ProcessorName name, gaxgrpc::CallSettings callSettings = null) =>
ProcessDocument(new ProcessRequest
{
ProcessorName = gax::GaxPreconditions.CheckNotNull(name, nameof(name)),
}, callSettings);
/// <summary>
/// Processes a single document.
/// </summary>
/// <param name="name">
/// Required. The processor resource name.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ProcessResponse> ProcessDocumentAsync(ProcessorName name, gaxgrpc::CallSettings callSettings = null) =>
ProcessDocumentAsync(new ProcessRequest
{
ProcessorName = gax::GaxPreconditions.CheckNotNull(name, nameof(name)),
}, callSettings);
/// <summary>
/// Processes a single document.
/// </summary>
/// <param name="name">
/// Required. The processor resource name.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ProcessResponse> ProcessDocumentAsync(ProcessorName name, st::CancellationToken cancellationToken) =>
ProcessDocumentAsync(name, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// LRO endpoint to batch process many documents. The output is written
/// to Cloud Storage as JSON in the [Document] format.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual lro::Operation<BatchProcessResponse, BatchProcessMetadata> BatchProcessDocuments(BatchProcessRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// LRO endpoint to batch process many documents. The output is written
/// to Cloud Storage as JSON in the [Document] format.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<lro::Operation<BatchProcessResponse, BatchProcessMetadata>> BatchProcessDocumentsAsync(BatchProcessRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// LRO endpoint to batch process many documents. The output is written
/// to Cloud Storage as JSON in the [Document] format.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<lro::Operation<BatchProcessResponse, BatchProcessMetadata>> BatchProcessDocumentsAsync(BatchProcessRequest request, st::CancellationToken cancellationToken) =>
BatchProcessDocumentsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>The long-running operations client for <c>BatchProcessDocuments</c>.</summary>
public virtual lro::OperationsClient BatchProcessDocumentsOperationsClient => throw new sys::NotImplementedException();
/// <summary>
/// Poll an operation once, using an <c>operationName</c> from a previous invocation of <c>BatchProcessDocuments</c>
/// .
/// </summary>
/// <param name="operationName">
/// The name of a previously invoked operation. Must not be <c>null</c> or empty.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The result of polling the operation.</returns>
public virtual lro::Operation<BatchProcessResponse, BatchProcessMetadata> PollOnceBatchProcessDocuments(string operationName, gaxgrpc::CallSettings callSettings = null) =>
lro::Operation<BatchProcessResponse, BatchProcessMetadata>.PollOnceFromName(gax::GaxPreconditions.CheckNotNullOrEmpty(operationName, nameof(operationName)), BatchProcessDocumentsOperationsClient, callSettings);
/// <summary>
/// Asynchronously poll an operation once, using an <c>operationName</c> from a previous invocation of
/// <c>BatchProcessDocuments</c>.
/// </summary>
/// <param name="operationName">
/// The name of a previously invoked operation. Must not be <c>null</c> or empty.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A task representing the result of polling the operation.</returns>
public virtual stt::Task<lro::Operation<BatchProcessResponse, BatchProcessMetadata>> PollOnceBatchProcessDocumentsAsync(string operationName, gaxgrpc::CallSettings callSettings = null) =>
lro::Operation<BatchProcessResponse, BatchProcessMetadata>.PollOnceFromNameAsync(gax::GaxPreconditions.CheckNotNullOrEmpty(operationName, nameof(operationName)), BatchProcessDocumentsOperationsClient, callSettings);
/// <summary>
/// LRO endpoint to batch process many documents. The output is written
/// to Cloud Storage as JSON in the [Document] format.
/// </summary>
/// <param name="name">
/// Required. The processor resource name.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual lro::Operation<BatchProcessResponse, BatchProcessMetadata> BatchProcessDocuments(string name, gaxgrpc::CallSettings callSettings = null) =>
BatchProcessDocuments(new BatchProcessRequest
{
Name = gax::GaxPreconditions.CheckNotNullOrEmpty(name, nameof(name)),
}, callSettings);
/// <summary>
/// LRO endpoint to batch process many documents. The output is written
/// to Cloud Storage as JSON in the [Document] format.
/// </summary>
/// <param name="name">
/// Required. The processor resource name.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<lro::Operation<BatchProcessResponse, BatchProcessMetadata>> BatchProcessDocumentsAsync(string name, gaxgrpc::CallSettings callSettings = null) =>
BatchProcessDocumentsAsync(new BatchProcessRequest
{
Name = gax::GaxPreconditions.CheckNotNullOrEmpty(name, nameof(name)),
}, callSettings);
/// <summary>
/// LRO endpoint to batch process many documents. The output is written
/// to Cloud Storage as JSON in the [Document] format.
/// </summary>
/// <param name="name">
/// Required. The processor resource name.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<lro::Operation<BatchProcessResponse, BatchProcessMetadata>> BatchProcessDocumentsAsync(string name, st::CancellationToken cancellationToken) =>
BatchProcessDocumentsAsync(name, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// LRO endpoint to batch process many documents. The output is written
/// to Cloud Storage as JSON in the [Document] format.
/// </summary>
/// <param name="name">
/// Required. The processor resource name.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual lro::Operation<BatchProcessResponse, BatchProcessMetadata> BatchProcessDocuments(ProcessorName name, gaxgrpc::CallSettings callSettings = null) =>
BatchProcessDocuments(new BatchProcessRequest
{
ProcessorName = gax::GaxPreconditions.CheckNotNull(name, nameof(name)),
}, callSettings);
/// <summary>
/// LRO endpoint to batch process many documents. The output is written
/// to Cloud Storage as JSON in the [Document] format.
/// </summary>
/// <param name="name">
/// Required. The processor resource name.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<lro::Operation<BatchProcessResponse, BatchProcessMetadata>> BatchProcessDocumentsAsync(ProcessorName name, gaxgrpc::CallSettings callSettings = null) =>
BatchProcessDocumentsAsync(new BatchProcessRequest
{
ProcessorName = gax::GaxPreconditions.CheckNotNull(name, nameof(name)),
}, callSettings);
/// <summary>
/// LRO endpoint to batch process many documents. The output is written
/// to Cloud Storage as JSON in the [Document] format.
/// </summary>
/// <param name="name">
/// Required. The processor resource name.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<lro::Operation<BatchProcessResponse, BatchProcessMetadata>> BatchProcessDocumentsAsync(ProcessorName name, st::CancellationToken cancellationToken) =>
BatchProcessDocumentsAsync(name, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Send a document for Human Review. The input document should be processed by
/// the specified processor.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata> ReviewDocument(ReviewDocumentRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Send a document for Human Review. The input document should be processed by
/// the specified processor.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata>> ReviewDocumentAsync(ReviewDocumentRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Send a document for Human Review. The input document should be processed by
/// the specified processor.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata>> ReviewDocumentAsync(ReviewDocumentRequest request, st::CancellationToken cancellationToken) =>
ReviewDocumentAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>The long-running operations client for <c>ReviewDocument</c>.</summary>
public virtual lro::OperationsClient ReviewDocumentOperationsClient => throw new sys::NotImplementedException();
/// <summary>
/// Poll an operation once, using an <c>operationName</c> from a previous invocation of <c>ReviewDocument</c>.
/// </summary>
/// <param name="operationName">
/// The name of a previously invoked operation. Must not be <c>null</c> or empty.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The result of polling the operation.</returns>
public virtual lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata> PollOnceReviewDocument(string operationName, gaxgrpc::CallSettings callSettings = null) =>
lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata>.PollOnceFromName(gax::GaxPreconditions.CheckNotNullOrEmpty(operationName, nameof(operationName)), ReviewDocumentOperationsClient, callSettings);
/// <summary>
/// Asynchronously poll an operation once, using an <c>operationName</c> from a previous invocation of
/// <c>ReviewDocument</c>.
/// </summary>
/// <param name="operationName">
/// The name of a previously invoked operation. Must not be <c>null</c> or empty.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A task representing the result of polling the operation.</returns>
public virtual stt::Task<lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata>> PollOnceReviewDocumentAsync(string operationName, gaxgrpc::CallSettings callSettings = null) =>
lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata>.PollOnceFromNameAsync(gax::GaxPreconditions.CheckNotNullOrEmpty(operationName, nameof(operationName)), ReviewDocumentOperationsClient, callSettings);
/// <summary>
/// Send a document for Human Review. The input document should be processed by
/// the specified processor.
/// </summary>
/// <param name="humanReviewConfig">
/// Required. The resource name of the HumanReviewConfig that the document will be
/// reviewed with.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata> ReviewDocument(string humanReviewConfig, gaxgrpc::CallSettings callSettings = null) =>
ReviewDocument(new ReviewDocumentRequest
{
HumanReviewConfig = gax::GaxPreconditions.CheckNotNullOrEmpty(humanReviewConfig, nameof(humanReviewConfig)),
}, callSettings);
/// <summary>
/// Send a document for Human Review. The input document should be processed by
/// the specified processor.
/// </summary>
/// <param name="humanReviewConfig">
/// Required. The resource name of the HumanReviewConfig that the document will be
/// reviewed with.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata>> ReviewDocumentAsync(string humanReviewConfig, gaxgrpc::CallSettings callSettings = null) =>
ReviewDocumentAsync(new ReviewDocumentRequest
{
HumanReviewConfig = gax::GaxPreconditions.CheckNotNullOrEmpty(humanReviewConfig, nameof(humanReviewConfig)),
}, callSettings);
/// <summary>
/// Send a document for Human Review. The input document should be processed by
/// the specified processor.
/// </summary>
/// <param name="humanReviewConfig">
/// Required. The resource name of the HumanReviewConfig that the document will be
/// reviewed with.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata>> ReviewDocumentAsync(string humanReviewConfig, st::CancellationToken cancellationToken) =>
ReviewDocumentAsync(humanReviewConfig, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Send a document for Human Review. The input document should be processed by
/// the specified processor.
/// </summary>
/// <param name="humanReviewConfig">
/// Required. The resource name of the HumanReviewConfig that the document will be
/// reviewed with.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata> ReviewDocument(HumanReviewConfigName humanReviewConfig, gaxgrpc::CallSettings callSettings = null) =>
ReviewDocument(new ReviewDocumentRequest
{
HumanReviewConfigAsHumanReviewConfigName = gax::GaxPreconditions.CheckNotNull(humanReviewConfig, nameof(humanReviewConfig)),
}, callSettings);
/// <summary>
/// Send a document for Human Review. The input document should be processed by
/// the specified processor.
/// </summary>
/// <param name="humanReviewConfig">
/// Required. The resource name of the HumanReviewConfig that the document will be
/// reviewed with.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata>> ReviewDocumentAsync(HumanReviewConfigName humanReviewConfig, gaxgrpc::CallSettings callSettings = null) =>
ReviewDocumentAsync(new ReviewDocumentRequest
{
HumanReviewConfigAsHumanReviewConfigName = gax::GaxPreconditions.CheckNotNull(humanReviewConfig, nameof(humanReviewConfig)),
}, callSettings);
/// <summary>
/// Send a document for Human Review. The input document should be processed by
/// the specified processor.
/// </summary>
/// <param name="humanReviewConfig">
/// Required. The resource name of the HumanReviewConfig that the document will be
/// reviewed with.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata>> ReviewDocumentAsync(HumanReviewConfigName humanReviewConfig, st::CancellationToken cancellationToken) =>
ReviewDocumentAsync(humanReviewConfig, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
}
/// <summary>DocumentProcessorService client wrapper implementation, for convenient use.</summary>
/// <remarks>
/// Service to call Cloud DocumentAI to process documents according to the
/// processor's definition. Processors are built using state-of-the-art Google
/// AI such as natural language, computer vision, and translation to extract
/// structured information from unstructured or semi-structured documents.
/// </remarks>
public sealed partial class DocumentProcessorServiceClientImpl : DocumentProcessorServiceClient
{
private readonly gaxgrpc::ApiCall<ProcessRequest, ProcessResponse> _callProcessDocument;
private readonly gaxgrpc::ApiCall<BatchProcessRequest, lro::Operation> _callBatchProcessDocuments;
private readonly gaxgrpc::ApiCall<ReviewDocumentRequest, lro::Operation> _callReviewDocument;
/// <summary>
/// Constructs a client wrapper for the DocumentProcessorService service, with the specified gRPC client and
/// settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">
/// The base <see cref="DocumentProcessorServiceSettings"/> used within this client.
/// </param>
public DocumentProcessorServiceClientImpl(DocumentProcessorService.DocumentProcessorServiceClient grpcClient, DocumentProcessorServiceSettings settings)
{
GrpcClient = grpcClient;
DocumentProcessorServiceSettings effectiveSettings = settings ?? DocumentProcessorServiceSettings.GetDefault();
gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings);
BatchProcessDocumentsOperationsClient = new lro::OperationsClientImpl(grpcClient.CreateOperationsClient(), effectiveSettings.BatchProcessDocumentsOperationsSettings);
ReviewDocumentOperationsClient = new lro::OperationsClientImpl(grpcClient.CreateOperationsClient(), effectiveSettings.ReviewDocumentOperationsSettings);
_callProcessDocument = clientHelper.BuildApiCall<ProcessRequest, ProcessResponse>(grpcClient.ProcessDocumentAsync, grpcClient.ProcessDocument, effectiveSettings.ProcessDocumentSettings).WithGoogleRequestParam("name", request => request.Name);
Modify_ApiCall(ref _callProcessDocument);
Modify_ProcessDocumentApiCall(ref _callProcessDocument);
_callBatchProcessDocuments = clientHelper.BuildApiCall<BatchProcessRequest, lro::Operation>(grpcClient.BatchProcessDocumentsAsync, grpcClient.BatchProcessDocuments, effectiveSettings.BatchProcessDocumentsSettings).WithGoogleRequestParam("name", request => request.Name);
Modify_ApiCall(ref _callBatchProcessDocuments);
Modify_BatchProcessDocumentsApiCall(ref _callBatchProcessDocuments);
_callReviewDocument = clientHelper.BuildApiCall<ReviewDocumentRequest, lro::Operation>(grpcClient.ReviewDocumentAsync, grpcClient.ReviewDocument, effectiveSettings.ReviewDocumentSettings).WithGoogleRequestParam("human_review_config", request => request.HumanReviewConfig);
Modify_ApiCall(ref _callReviewDocument);
Modify_ReviewDocumentApiCall(ref _callReviewDocument);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>;
partial void Modify_ProcessDocumentApiCall(ref gaxgrpc::ApiCall<ProcessRequest, ProcessResponse> call);
partial void Modify_BatchProcessDocumentsApiCall(ref gaxgrpc::ApiCall<BatchProcessRequest, lro::Operation> call);
partial void Modify_ReviewDocumentApiCall(ref gaxgrpc::ApiCall<ReviewDocumentRequest, lro::Operation> call);
partial void OnConstruction(DocumentProcessorService.DocumentProcessorServiceClient grpcClient, DocumentProcessorServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper);
/// <summary>The underlying gRPC DocumentProcessorService client</summary>
public override DocumentProcessorService.DocumentProcessorServiceClient GrpcClient { get; }
partial void Modify_ProcessRequest(ref ProcessRequest request, ref gaxgrpc::CallSettings settings);
partial void Modify_BatchProcessRequest(ref BatchProcessRequest request, ref gaxgrpc::CallSettings settings);
partial void Modify_ReviewDocumentRequest(ref ReviewDocumentRequest request, ref gaxgrpc::CallSettings settings);
/// <summary>
/// Processes a single document.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override ProcessResponse ProcessDocument(ProcessRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ProcessRequest(ref request, ref callSettings);
return _callProcessDocument.Sync(request, callSettings);
}
/// <summary>
/// Processes a single document.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<ProcessResponse> ProcessDocumentAsync(ProcessRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ProcessRequest(ref request, ref callSettings);
return _callProcessDocument.Async(request, callSettings);
}
/// <summary>The long-running operations client for <c>BatchProcessDocuments</c>.</summary>
public override lro::OperationsClient BatchProcessDocumentsOperationsClient { get; }
/// <summary>
/// LRO endpoint to batch process many documents. The output is written
/// to Cloud Storage as JSON in the [Document] format.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override lro::Operation<BatchProcessResponse, BatchProcessMetadata> BatchProcessDocuments(BatchProcessRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_BatchProcessRequest(ref request, ref callSettings);
return new lro::Operation<BatchProcessResponse, BatchProcessMetadata>(_callBatchProcessDocuments.Sync(request, callSettings), BatchProcessDocumentsOperationsClient);
}
/// <summary>
/// LRO endpoint to batch process many documents. The output is written
/// to Cloud Storage as JSON in the [Document] format.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override async stt::Task<lro::Operation<BatchProcessResponse, BatchProcessMetadata>> BatchProcessDocumentsAsync(BatchProcessRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_BatchProcessRequest(ref request, ref callSettings);
return new lro::Operation<BatchProcessResponse, BatchProcessMetadata>(await _callBatchProcessDocuments.Async(request, callSettings).ConfigureAwait(false), BatchProcessDocumentsOperationsClient);
}
/// <summary>The long-running operations client for <c>ReviewDocument</c>.</summary>
public override lro::OperationsClient ReviewDocumentOperationsClient { get; }
/// <summary>
/// Send a document for Human Review. The input document should be processed by
/// the specified processor.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata> ReviewDocument(ReviewDocumentRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ReviewDocumentRequest(ref request, ref callSettings);
return new lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata>(_callReviewDocument.Sync(request, callSettings), ReviewDocumentOperationsClient);
}
/// <summary>
/// Send a document for Human Review. The input document should be processed by
/// the specified processor.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override async stt::Task<lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata>> ReviewDocumentAsync(ReviewDocumentRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ReviewDocumentRequest(ref request, ref callSettings);
return new lro::Operation<ReviewDocumentResponse, ReviewDocumentOperationMetadata>(await _callReviewDocument.Async(request, callSettings).ConfigureAwait(false), ReviewDocumentOperationsClient);
}
}
public static partial class DocumentProcessorService
{
public partial class DocumentProcessorServiceClient
{
/// <summary>
/// Creates a new instance of <see cref="lro::Operations.OperationsClient"/> using the same call invoker as
/// this client.
/// </summary>
/// <returns>A new Operations client for the same target as this client.</returns>
public virtual lro::Operations.OperationsClient CreateOperationsClient() =>
new lro::Operations.OperationsClient(CallInvoker);
}
}
}
| |
#region License
/*
* All content copyright Marko Lahma, unless otherwise indicated. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
#endregion
using System;
using System.Diagnostics;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Quartz.Logging;
namespace Quartz.Job
{
/// <summary>
/// Built in job for executing native executables in a separate process.
/// </summary>
/// <remarks>
/// <example>
/// JobDetail job = new JobDetail("dumbJob", null, typeof(Quartz.Jobs.NativeJob));
/// job.JobDataMap.Put(Quartz.Jobs.NativeJob.PropertyCommand, "echo \"hi\" >> foobar.txt");
/// Trigger trigger = TriggerUtils.MakeSecondlyTrigger(5);
/// trigger.Name = "dumbTrigger";
/// sched.ScheduleJob(job, trigger);
/// </example>
/// If PropertyWaitForProcess is true, then the integer exit value of the process
/// will be saved as the job execution result in the JobExecutionContext.
/// </remarks>
/// <author>Matthew Payne</author>
/// <author>James House</author>
/// <author>Steinar Overbeck Cook</author>
/// <author>Marko Lahma (.NET)</author>
public class NativeJob : IJob
{
/// <summary>
/// Required parameter that specifies the name of the command (executable)
/// to be ran.
/// </summary>
public const string PropertyCommand = "command";
/// <summary>
/// Optional parameter that specifies the parameters to be passed to the
/// executed command.
/// </summary>
public const string PropertyParameters = "parameters";
/// <summary>
/// Optional parameter (value should be 'true' or 'false') that specifies
/// whether the job should wait for the execution of the native process to
/// complete before it completes.
///
/// <para>Defaults to <see langword="true" />.</para>
/// </summary>
public const string PropertyWaitForProcess = "waitForProcess";
/// <summary>
/// Optional parameter (value should be 'true' or 'false') that specifies
/// whether the spawned process's stdout and stderr streams should be
/// consumed. If the process creates output, it is possible that it might
/// 'hang' if the streams are not consumed.
///
/// <para>Defaults to <see langword="false" />.</para>
/// </summary>
public const string PropertyConsumeStreams = "consumeStreams";
/// <summary>
/// Optional parameter that specifies the working directory to be used by
/// the executed command.
/// </summary>
public const string PropertyWorkingDirectory = "workingDirectory";
private const string StreamTypeStandardOutput = "stdout";
private const string StreamTypeError = "stderr";
/// <summary>
/// Gets the log.
/// </summary>
/// <value>The log.</value>
private ILog Log { get; }
/// <summary>
/// Initializes a new instance of the <see cref="NativeJob"/> class.
/// </summary>
public NativeJob()
{
Log = LogProvider.GetLogger(typeof(NativeJob));
}
/// <summary>
/// Called by the <see cref="IScheduler" /> when a <see cref="ITrigger" />
/// fires that is associated with the <see cref="IJob" />.
/// <para>
/// The implementation may wish to set a result object on the
/// JobExecutionContext before this method exits. The result itself
/// is meaningless to Quartz, but may be informative to
/// <see cref="IJobListener" />s or
/// <see cref="ITriggerListener" />s that are watching the job's
/// execution.
/// </para>
/// </summary>
/// <param name="context"></param>
public virtual Task Execute(IJobExecutionContext context)
{
JobDataMap data = context.MergedJobDataMap;
string command = data.GetString(PropertyCommand) ?? throw new JobExecutionException("command missing");
string parameters = data.GetString(PropertyParameters) ?? "";
bool wait = true;
if (data.ContainsKey(PropertyWaitForProcess))
{
wait = data.GetBooleanValue(PropertyWaitForProcess);
}
bool consumeStreams = false;
if (data.ContainsKey(PropertyConsumeStreams))
{
consumeStreams = data.GetBooleanValue(PropertyConsumeStreams);
}
var workingDirectory = data.GetString(PropertyWorkingDirectory);
int exitCode = RunNativeCommand(command, parameters, workingDirectory, wait, consumeStreams);
context.Result = exitCode;
return Task.FromResult(true);
}
private int RunNativeCommand(string command, string parameters, string? workingDirectory, bool wait, bool consumeStreams)
{
string[] cmd;
string[] args = new string[2];
args[0] = command;
args[1] = parameters;
int result = -1;
try
{
//with this variable will be done the switching
string? osName = Environment.GetEnvironmentVariable("OS");
if (osName == null)
{
throw new JobExecutionException("Could not read environment variable for OS");
}
if (osName.ToLower().IndexOf("windows") > -1)
{
cmd = new string[args.Length + 2];
cmd[0] = "cmd.exe";
cmd[1] = "/C";
for (int i = 0; i < args.Length; i++)
{
cmd[i + 2] = args[i];
}
}
else if (osName.ToLower().IndexOf("linux") > -1)
{
cmd = new string[3];
cmd[0] = "/bin/sh";
cmd[1] = "-c";
cmd[2] = args[0] + " " + args[1];
}
else
{
// try this...
cmd = args;
}
// Executes the command
string temp = "";
for (int i = 1; i < cmd.Length; i++)
{
temp += cmd[i] + " ";
}
temp = temp.Trim();
Log.Info($"About to run {cmd[0]} {temp}...");
Process proc = new Process();
proc.StartInfo.FileName = cmd[0];
proc.StartInfo.Arguments = temp;
proc.StartInfo.WindowStyle = ProcessWindowStyle.Hidden;
proc.StartInfo.CreateNoWindow = true;
proc.StartInfo.UseShellExecute = false;
proc.StartInfo.RedirectStandardError = true;
proc.StartInfo.RedirectStandardOutput = true;
if (!string.IsNullOrEmpty(workingDirectory))
{
proc.StartInfo.WorkingDirectory = workingDirectory;
}
proc.Start();
// Consumes the stdout from the process
StreamConsumer stdoutConsumer = new StreamConsumer(this, proc.StandardOutput.BaseStream, StreamTypeStandardOutput);
Thread stdoutConsumerThread = new Thread(stdoutConsumer.Run);
// Consumes the stderr from the process
if (consumeStreams)
{
StreamConsumer stderrConsumer = new StreamConsumer(this, proc.StandardError.BaseStream, StreamTypeError);
Thread stderrConsumerThread = new Thread(stderrConsumer.Run);
stdoutConsumerThread.Start();
stderrConsumerThread.Start();
}
if (wait)
{
proc.WaitForExit();
result = proc.ExitCode;
}
// any error message?
}
catch (Exception x)
{
throw new JobExecutionException("Error launching native command: " + x.Message, x, false);
}
return result;
}
/// <summary>
/// Consumes data from the given input stream until EOF and prints the data to stdout
/// </summary>
/// <author>cooste</author>
/// <author>James House</author>
private class StreamConsumer
{
private readonly NativeJob enclosingInstance;
private readonly Stream inputStream;
private readonly string type;
/// <summary>
/// Initializes a new instance of the <see cref="StreamConsumer"/> class.
/// </summary>
/// <param name="enclosingInstance">The enclosing instance.</param>
/// <param name="inputStream">The input stream.</param>
/// <param name="type">The type.</param>
public StreamConsumer(NativeJob enclosingInstance, Stream inputStream, string type)
{
this.enclosingInstance = enclosingInstance;
this.inputStream = inputStream;
this.type = type;
}
/// <summary>
/// Runs this object as a separate thread, printing the contents of the input stream
/// supplied during instantiation, to either Console. or stderr
/// </summary>
public void Run()
{
try
{
using StreamReader br = new StreamReader(inputStream);
string? line;
while ((line = br.ReadLine()) != null)
{
if (type == StreamTypeError)
{
enclosingInstance.Log.Warn($"{type}>{line}");
}
else
{
enclosingInstance.Log.Info($"{type}>{line}");
}
}
}
catch (IOException ioe)
{
enclosingInstance.Log.ErrorException($"Error consuming {type} stream of spawned process.", ioe);
}
}
}
}
}
| |
#if !SILVERLIGHT && !MONOTOUCH && !XBOX
//
// ServiceStack: Useful extensions to simplify parsing xml with XLinq
//
// Authors:
// Demis Bellot ([email protected])
//
// Copyright 2010 Liquidbit Ltd.
//
// Licensed under the same terms of reddis and ServiceStack: new BSD license.
//
using System;
using System.Collections.Generic;
using System.Xml;
using System.Xml.Linq;
namespace ServiceStack.ServiceModel
{
public static class XLinqExtensions
{
public static string GetString(this XElement el, string name)
{
return el == null ? null : GetElementValueOrDefault(el, name, x => x.Value);
}
public static string GetStringAttributeOrDefault(this XElement element, string name)
{
var attr = AnyAttribute(element, name);
return attr == null ? null : GetAttributeValueOrDefault(attr, name, x => x.Value);
}
public static bool GetBool(this XElement el, string name)
{
AssertElementHasValue(el, name);
return (bool)GetElement(el, name);
}
public static bool GetBoolOrDefault(this XElement el, string name)
{
return GetElementValueOrDefault(el, name, x => (bool)x);
}
public static bool? GetNullableBool(this XElement el, string name)
{
var childEl = GetElement(el, name);
return childEl == null || string.IsNullOrEmpty(childEl.Value) ? null : (bool?)childEl;
}
public static int GetInt(this XElement el, string name)
{
AssertElementHasValue(el, name);
return (int)GetElement(el, name);
}
public static int GetIntOrDefault(this XElement el, string name)
{
return GetElementValueOrDefault(el, name, x => (int)x);
}
public static int? GetNullableInt(this XElement el, string name)
{
var childEl = GetElement(el, name);
return childEl == null || string.IsNullOrEmpty(childEl.Value) ? null : (int?)childEl;
}
public static long GetLong(this XElement el, string name)
{
AssertElementHasValue(el, name);
return (long)GetElement(el, name);
}
public static long GetLongOrDefault(this XElement el, string name)
{
return GetElementValueOrDefault(el, name, x => (long)x);
}
public static long? GetNullableLong(this XElement el, string name)
{
var childEl = GetElement(el, name);
return childEl == null || string.IsNullOrEmpty(childEl.Value) ? null : (long?)childEl;
}
public static decimal GetDecimal(this XElement el, string name)
{
AssertElementHasValue(el, name);
return (decimal)GetElement(el, name);
}
public static decimal GetDecimalOrDefault(this XElement el, string name)
{
return GetElementValueOrDefault(el, name, x => (decimal)x);
}
public static decimal? GetNullableDecimal(this XElement el, string name)
{
var childEl = GetElement(el, name);
return childEl == null || string.IsNullOrEmpty(childEl.Value) ? null : (decimal?)childEl;
}
public static DateTime GetDateTime(this XElement el, string name)
{
AssertElementHasValue(el, name);
return (DateTime)GetElement(el, name);
}
public static DateTime GetDateTimeOrDefault(this XElement el, string name)
{
return GetElementValueOrDefault(el, name, x => (DateTime)x);
}
public static DateTime? GetNullableDateTime(this XElement el, string name)
{
var childEl = GetElement(el, name);
return childEl == null || string.IsNullOrEmpty(childEl.Value) ? null : (DateTime?)childEl;
}
public static TimeSpan GetTimeSpan(this XElement el, string name)
{
AssertElementHasValue(el, name);
return (TimeSpan)GetElement(el, name);
}
public static TimeSpan GetTimeSpanOrDefault(this XElement el, string name)
{
return GetElementValueOrDefault(el, name, x => (TimeSpan)x);
}
public static TimeSpan? GetNullableTimeSpan(this XElement el, string name)
{
var childEl = GetElement(el, name);
return childEl == null || string.IsNullOrEmpty(childEl.Value) ? null : (TimeSpan?)childEl;
}
public static Guid GetGuid(this XElement el, string name)
{
AssertElementHasValue(el, name);
return (Guid)GetElement(el, name);
}
public static Guid GetGuidOrDefault(this XElement el, string name)
{
return GetElementValueOrDefault(el, name, x => (Guid)x);
}
public static Guid? GetNullableGuid(this XElement el, string name)
{
var childEl = GetElement(el, name);
return childEl == null || string.IsNullOrEmpty(childEl.Value) ? null : (Guid?)childEl;
}
public static T GetElementValueOrDefault<T>(this XElement element, string name, Func<XElement, T> converter)
{
if (converter == null)
{
throw new ArgumentNullException("converter");
}
var el = GetElement(element, name);
return el == null || string.IsNullOrEmpty(el.Value) ? default(T) : converter(el);
}
public static XElement GetElement(this XElement element, string name)
{
if (element == null)
{
throw new ArgumentNullException("element");
}
if (name == null)
{
throw new ArgumentNullException("name");
}
return element.AnyElement(name);
}
public static T GetAttributeValueOrDefault<T>(this XAttribute attr, string name, Func<XAttribute, T> converter)
{
if (converter == null)
{
throw new ArgumentNullException("converter");
}
return attr == null || string.IsNullOrEmpty(attr.Value) ? default(T) : converter(attr);
}
public static void AssertExactlyOneResult(this XElement queryListItems, string referenceNumber, string formType)
{
int count = Convert.ToInt32(queryListItems.AnyAttribute("ItemCount").Value);
if (count == 0)
throw new InvalidOperationException(string.Format("There is no {0} for with a deal reference number {1}", formType, referenceNumber));
if (count > 1)
throw new InvalidOperationException(
string.Format("There are more than one {0}s with deal reference number {1}", formType, referenceNumber));
}
public static void AssertElementHasValue(this XElement element, string name)
{
if (element == null)
{
throw new ArgumentNullException("element");
}
if (name == null)
{
throw new ArgumentNullException("name");
}
var childEl = element.AnyElement(name);
if (childEl == null || string.IsNullOrEmpty(childEl.Value))
{
throw new ArgumentNullException(name, string.Format("{0} is required", name));
}
}
public static List<string> GetValues(this IEnumerable<XElement> els)
{
var values = new List<string>();
foreach (var el in els)
{
values.Add(el.Value);
}
return values;
}
public static XAttribute AnyAttribute(this XElement element, string name)
{
if (element == null) return null;
foreach (var attribute in element.Attributes())
{
if (attribute.Name.LocalName == name)
{
return attribute;
}
}
return null;
}
public static IEnumerable<XElement> AllElements(this XElement element, string name)
{
var els = new List<XElement>();
if (element == null) return els;
foreach (var node in element.Nodes())
{
if (node.NodeType != XmlNodeType.Element) continue;
var childEl = (XElement)node;
if (childEl.Name.LocalName == name)
{
els.Add(childEl);
}
}
return els;
}
public static XElement AnyElement(this XElement element, string name)
{
if (element == null) return null;
foreach (var node in element.Nodes())
{
if (node.NodeType != XmlNodeType.Element) continue;
var childEl = (XElement)node;
if (childEl.Name.LocalName == name)
{
return childEl;
}
}
return null;
}
public static XElement AnyElement(this IEnumerable<XElement> elements, string name)
{
foreach (var element in elements)
{
if (element.Name.LocalName == name)
{
return element;
}
}
return null;
}
public static IEnumerable<XElement> AllElements(this IEnumerable<XElement> elements, string name)
{
var els = new List<XElement>();
foreach (var element in elements)
{
els.AddRange(AllElements(element, name));
}
return els;
}
public static XElement FirstElement(this XElement element)
{
if (element.FirstNode.NodeType == XmlNodeType.Element)
{
return (XElement)element.FirstNode;
}
return null;
}
}
}
#endif
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.ComponentModel;
using System.Linq;
using Avalonia.Controls.Generators;
using Avalonia.Controls.Selection;
using Avalonia.Data;
using Avalonia.Input;
using Avalonia.Input.Platform;
using Avalonia.Interactivity;
using Avalonia.Threading;
using Avalonia.VisualTree;
#nullable enable
namespace Avalonia.Controls.Primitives
{
/// <summary>
/// An <see cref="ItemsControl"/> that maintains a selection.
/// </summary>
/// <remarks>
/// <para>
/// <see cref="SelectingItemsControl"/> provides a base class for <see cref="ItemsControl"/>s
/// that maintain a selection (single or multiple). By default only its
/// <see cref="SelectedIndex"/> and <see cref="SelectedItem"/> properties are visible; the
/// current multiple <see cref="Selection"/> and <see cref="SelectedItems"/> together with the
/// <see cref="SelectionMode"/> properties are protected, however a derived class can expose
/// these if it wishes to support multiple selection.
/// </para>
/// <para>
/// <see cref="SelectingItemsControl"/> maintains a selection respecting the current
/// <see cref="SelectionMode"/> but it does not react to user input; this must be handled in a
/// derived class. It does, however, respond to <see cref="IsSelectedChangedEvent"/> events
/// from items and updates the selection accordingly.
/// </para>
/// </remarks>
public class SelectingItemsControl : ItemsControl
{
/// <summary>
/// Defines the <see cref="AutoScrollToSelectedItem"/> property.
/// </summary>
public static readonly StyledProperty<bool> AutoScrollToSelectedItemProperty =
AvaloniaProperty.Register<SelectingItemsControl, bool>(
nameof(AutoScrollToSelectedItem),
defaultValue: true);
/// <summary>
/// Defines the <see cref="SelectedIndex"/> property.
/// </summary>
public static readonly DirectProperty<SelectingItemsControl, int> SelectedIndexProperty =
AvaloniaProperty.RegisterDirect<SelectingItemsControl, int>(
nameof(SelectedIndex),
o => o.SelectedIndex,
(o, v) => o.SelectedIndex = v,
unsetValue: -1,
defaultBindingMode: BindingMode.TwoWay);
/// <summary>
/// Defines the <see cref="SelectedItem"/> property.
/// </summary>
public static readonly DirectProperty<SelectingItemsControl, object?> SelectedItemProperty =
AvaloniaProperty.RegisterDirect<SelectingItemsControl, object?>(
nameof(SelectedItem),
o => o.SelectedItem,
(o, v) => o.SelectedItem = v,
defaultBindingMode: BindingMode.TwoWay, enableDataValidation: true);
/// <summary>
/// Defines the <see cref="SelectedItems"/> property.
/// </summary>
protected static readonly DirectProperty<SelectingItemsControl, IList?> SelectedItemsProperty =
AvaloniaProperty.RegisterDirect<SelectingItemsControl, IList?>(
nameof(SelectedItems),
o => o.SelectedItems,
(o, v) => o.SelectedItems = v);
/// <summary>
/// Defines the <see cref="Selection"/> property.
/// </summary>
protected static readonly DirectProperty<SelectingItemsControl, ISelectionModel> SelectionProperty =
AvaloniaProperty.RegisterDirect<SelectingItemsControl, ISelectionModel>(
nameof(Selection),
o => o.Selection,
(o, v) => o.Selection = v);
/// <summary>
/// Defines the <see cref="SelectionMode"/> property.
/// </summary>
protected static readonly StyledProperty<SelectionMode> SelectionModeProperty =
AvaloniaProperty.Register<SelectingItemsControl, SelectionMode>(
nameof(SelectionMode));
/// <summary>
/// Defines the <see cref="IsTextSearchEnabled"/> property.
/// </summary>
public static readonly StyledProperty<bool> IsTextSearchEnabledProperty =
AvaloniaProperty.Register<ItemsControl, bool>(nameof(IsTextSearchEnabled), false);
/// <summary>
/// Event that should be raised by items that implement <see cref="ISelectable"/> to
/// notify the parent <see cref="SelectingItemsControl"/> that their selection state
/// has changed.
/// </summary>
public static readonly RoutedEvent<RoutedEventArgs> IsSelectedChangedEvent =
RoutedEvent.Register<SelectingItemsControl, RoutedEventArgs>(
"IsSelectedChanged",
RoutingStrategies.Bubble);
/// <summary>
/// Defines the <see cref="SelectionChanged"/> event.
/// </summary>
public static readonly RoutedEvent<SelectionChangedEventArgs> SelectionChangedEvent =
RoutedEvent.Register<SelectingItemsControl, SelectionChangedEventArgs>(
"SelectionChanged",
RoutingStrategies.Bubble);
private static readonly IList Empty = Array.Empty<object>();
private string _textSearchTerm = string.Empty;
private DispatcherTimer? _textSearchTimer;
private ISelectionModel? _selection;
private int _oldSelectedIndex;
private object? _oldSelectedItem;
private IList? _oldSelectedItems;
private bool _ignoreContainerSelectionChanged;
private UpdateState? _updateState;
private bool _hasScrolledToSelectedItem;
/// <summary>
/// Initializes static members of the <see cref="SelectingItemsControl"/> class.
/// </summary>
static SelectingItemsControl()
{
IsSelectedChangedEvent.AddClassHandler<SelectingItemsControl>((x, e) => x.ContainerSelectionChanged(e));
}
/// <summary>
/// Occurs when the control's selection changes.
/// </summary>
public event EventHandler<SelectionChangedEventArgs> SelectionChanged
{
add { AddHandler(SelectionChangedEvent, value); }
remove { RemoveHandler(SelectionChangedEvent, value); }
}
/// <summary>
/// Gets or sets a value indicating whether to automatically scroll to newly selected items.
/// </summary>
public bool AutoScrollToSelectedItem
{
get { return GetValue(AutoScrollToSelectedItemProperty); }
set { SetValue(AutoScrollToSelectedItemProperty, value); }
}
/// <summary>
/// Gets or sets the index of the selected item.
/// </summary>
public int SelectedIndex
{
get
{
// When a Begin/EndInit/DataContext update is in place we return the value to be
// updated here, even though it's not yet active and the property changed notification
// has not yet been raised. If we don't do this then the old value will be written back
// to the source when two-way bound, and the update value will be lost.
return _updateState?.SelectedIndex.HasValue == true ?
_updateState.SelectedIndex.Value :
Selection.SelectedIndex;
}
set
{
if (_updateState is object)
{
_updateState.SelectedIndex = value;
}
else
{
Selection.SelectedIndex = value;
}
}
}
/// <summary>
/// Gets or sets the selected item.
/// </summary>
public object? SelectedItem
{
get
{
// See SelectedIndex setter for more information.
return _updateState?.SelectedItem.HasValue == true ?
_updateState.SelectedItem.Value :
Selection.SelectedItem;
}
set
{
if (_updateState is object)
{
_updateState.SelectedItem = value;
}
else
{
Selection.SelectedItem = value;
}
}
}
/// <summary>
/// Gets or sets the selected items.
/// </summary>
/// <remarks>
/// By default returns a collection that can be modified in order to manipulate the control
/// selection, however this property will return null if <see cref="Selection"/> is
/// re-assigned; you should only use _either_ Selection or SelectedItems.
/// </remarks>
protected IList? SelectedItems
{
get
{
// See SelectedIndex setter for more information.
if (_updateState?.SelectedItems.HasValue == true)
{
return _updateState.SelectedItems.Value;
}
else if (Selection is InternalSelectionModel ism)
{
var result = ism.WritableSelectedItems;
_oldSelectedItems = result;
return result;
}
return null;
}
set
{
if (_updateState is object)
{
_updateState.SelectedItems = new Optional<IList?>(value);
}
else if (Selection is InternalSelectionModel i)
{
i.WritableSelectedItems = value;
}
else
{
throw new InvalidOperationException("Cannot set both Selection and SelectedItems.");
}
}
}
/// <summary>
/// Gets or sets the model that holds the current selection.
/// </summary>
protected ISelectionModel Selection
{
get
{
if (_updateState?.Selection.HasValue == true)
{
return _updateState.Selection.Value;
}
else
{
if (_selection is null)
{
_selection = CreateDefaultSelectionModel();
InitializeSelectionModel(_selection);
}
return _selection;
}
}
set
{
value ??= CreateDefaultSelectionModel();
if (_updateState is object)
{
_updateState.Selection = new Optional<ISelectionModel>(value);
}
else if (_selection != value)
{
if (value.Source != null && value.Source != Items)
{
throw new ArgumentException(
"The supplied ISelectionModel already has an assigned Source but this " +
"collection is different to the Items on the control.");
}
var oldSelection = _selection?.SelectedItems.ToList();
DeinitializeSelectionModel(_selection);
_selection = value;
if (oldSelection?.Count > 0)
{
RaiseEvent(new SelectionChangedEventArgs(
SelectionChangedEvent,
oldSelection,
Array.Empty<object>()));
}
InitializeSelectionModel(_selection);
if (_oldSelectedItems != SelectedItems)
{
RaisePropertyChanged(
SelectedItemsProperty,
new Optional<IList?>(_oldSelectedItems),
new BindingValue<IList?>(SelectedItems));
_oldSelectedItems = SelectedItems;
}
}
}
}
/// <summary>
/// Gets or sets a value that specifies whether a user can jump to a value by typing.
/// </summary>
public bool IsTextSearchEnabled
{
get { return GetValue(IsTextSearchEnabledProperty); }
set { SetValue(IsTextSearchEnabledProperty, value); }
}
/// <summary>
/// Gets or sets the selection mode.
/// </summary>
/// <remarks>
/// Note that the selection mode only applies to selections made via user interaction.
/// Multiple selections can be made programmatically regardless of the value of this property.
/// </remarks>
protected SelectionMode SelectionMode
{
get { return GetValue(SelectionModeProperty); }
set { SetValue(SelectionModeProperty, value); }
}
/// <summary>
/// Gets a value indicating whether <see cref="SelectionMode.AlwaysSelected"/> is set.
/// </summary>
protected bool AlwaysSelected => SelectionMode.HasAllFlags(SelectionMode.AlwaysSelected);
/// <inheritdoc/>
public override void BeginInit()
{
base.BeginInit();
BeginUpdating();
}
/// <inheritdoc/>
public override void EndInit()
{
base.EndInit();
EndUpdating();
}
/// <summary>
/// Scrolls the specified item into view.
/// </summary>
/// <param name="index">The index of the item.</param>
public void ScrollIntoView(int index) => Presenter?.ScrollIntoView(index);
/// <summary>
/// Scrolls the specified item into view.
/// </summary>
/// <param name="item">The item.</param>
public void ScrollIntoView(object item) => ScrollIntoView(IndexOf(Items, item));
/// <summary>
/// Tries to get the container that was the source of an event.
/// </summary>
/// <param name="eventSource">The control that raised the event.</param>
/// <returns>The container or null if the event did not originate in a container.</returns>
protected IControl? GetContainerFromEventSource(IInteractive? eventSource)
{
for (var current = eventSource as IVisual; current != null; current = current.VisualParent)
{
if (current is IControl control && control.LogicalParent == this &&
ItemContainerGenerator?.IndexFromContainer(control) != -1)
{
return control;
}
}
return null;
}
protected override void ItemsCollectionChanged(object sender, NotifyCollectionChangedEventArgs e)
{
base.ItemsCollectionChanged(sender, e);
if (AlwaysSelected && SelectedIndex == -1 && ItemCount > 0)
{
SelectedIndex = 0;
}
}
protected override void OnAttachedToVisualTree(VisualTreeAttachmentEventArgs e)
{
base.OnAttachedToVisualTree(e);
AutoScrollToSelectedItemIfNecessary();
}
protected override void OnApplyTemplate(TemplateAppliedEventArgs e)
{
base.OnApplyTemplate(e);
void ExecuteScrollWhenLayoutUpdated(object sender, EventArgs e)
{
LayoutUpdated -= ExecuteScrollWhenLayoutUpdated;
AutoScrollToSelectedItemIfNecessary();
}
if (AutoScrollToSelectedItem)
{
LayoutUpdated += ExecuteScrollWhenLayoutUpdated;
}
}
/// <inheritdoc/>
protected override void OnContainersMaterialized(ItemContainerEventArgs e)
{
base.OnContainersMaterialized(e);
foreach (var container in e.Containers)
{
if ((container.ContainerControl as ISelectable)?.IsSelected == true)
{
Selection.Select(container.Index);
MarkContainerSelected(container.ContainerControl, true);
}
else
{
var selected = Selection.IsSelected(container.Index);
MarkContainerSelected(container.ContainerControl, selected);
}
}
}
/// <inheritdoc/>
protected override void OnContainersDematerialized(ItemContainerEventArgs e)
{
base.OnContainersDematerialized(e);
var panel = (InputElement)Presenter.Panel;
if (panel != null)
{
foreach (var container in e.Containers)
{
if (KeyboardNavigation.GetTabOnceActiveElement(panel) == container.ContainerControl)
{
KeyboardNavigation.SetTabOnceActiveElement(panel, null);
break;
}
}
}
}
protected override void OnContainersRecycled(ItemContainerEventArgs e)
{
foreach (var i in e.Containers)
{
if (i.ContainerControl != null && i.Item != null)
{
bool selected = Selection.IsSelected(i.Index);
MarkContainerSelected(i.ContainerControl, selected);
}
}
}
/// <inheritdoc/>
protected override void OnDataContextBeginUpdate()
{
base.OnDataContextBeginUpdate();
BeginUpdating();
}
/// <inheritdoc/>
protected override void OnDataContextEndUpdate()
{
base.OnDataContextEndUpdate();
EndUpdating();
}
/// <summary>
/// Called to update the validation state for properties for which data validation is
/// enabled.
/// </summary>
/// <param name="property">The property.</param>
/// <param name="value">The new binding value for the property.</param>
protected override void UpdateDataValidation<T>(AvaloniaProperty<T> property, BindingValue<T> value)
{
if (property == SelectedItemProperty)
{
DataValidationErrors.SetError(this, value.Error);
}
}
protected override void OnInitialized()
{
base.OnInitialized();
if (_selection is object)
{
_selection.Source = Items;
}
}
protected override void OnTextInput(TextInputEventArgs e)
{
if (!e.Handled)
{
if (!IsTextSearchEnabled)
return;
StopTextSearchTimer();
_textSearchTerm += e.Text;
bool match(ItemContainerInfo info) =>
info.ContainerControl is IContentControl control &&
control.Content?.ToString()?.StartsWith(_textSearchTerm, StringComparison.OrdinalIgnoreCase) == true;
var info = ItemContainerGenerator.Containers.FirstOrDefault(match);
if (info != null)
{
SelectedIndex = info.Index;
}
StartTextSearchTimer();
e.Handled = true;
}
base.OnTextInput(e);
}
protected override void OnKeyDown(KeyEventArgs e)
{
base.OnKeyDown(e);
if (!e.Handled)
{
var keymap = AvaloniaLocator.Current.GetService<PlatformHotkeyConfiguration>();
bool Match(List<KeyGesture> gestures) => gestures.Any(g => g.Matches(e));
if (ItemCount > 0 &&
Match(keymap.SelectAll) &&
SelectionMode.HasAllFlags(SelectionMode.Multiple))
{
Selection.SelectAll();
e.Handled = true;
}
}
}
protected override void OnPropertyChanged<T>(AvaloniaPropertyChangedEventArgs<T> change)
{
base.OnPropertyChanged(change);
if (change.Property == AutoScrollToSelectedItemProperty)
{
AutoScrollToSelectedItemIfNecessary();
}
if (change.Property == ItemsProperty && _updateState is null && _selection is object)
{
var newValue = change.NewValue.GetValueOrDefault<IEnumerable>();
_selection.Source = newValue;
if (newValue is null)
{
_selection.Clear();
}
}
else if (change.Property == SelectionModeProperty && _selection is object)
{
var newValue = change.NewValue.GetValueOrDefault<SelectionMode>();
_selection.SingleSelect = !newValue.HasAllFlags(SelectionMode.Multiple);
}
}
/// <summary>
/// Moves the selection in the specified direction relative to the current selection.
/// </summary>
/// <param name="direction">The direction to move.</param>
/// <param name="wrap">Whether to wrap when the selection reaches the first or last item.</param>
/// <returns>True if the selection was moved; otherwise false.</returns>
protected bool MoveSelection(NavigationDirection direction, bool wrap)
{
var from = SelectedIndex != -1 ? ItemContainerGenerator.ContainerFromIndex(SelectedIndex) : null;
return MoveSelection(from, direction, wrap);
}
/// <summary>
/// Moves the selection in the specified direction relative to the specified container.
/// </summary>
/// <param name="from">The container which serves as a starting point for the movement.</param>
/// <param name="direction">The direction to move.</param>
/// <param name="wrap">Whether to wrap when the selection reaches the first or last item.</param>
/// <returns>True if the selection was moved; otherwise false.</returns>
protected bool MoveSelection(IControl? from, NavigationDirection direction, bool wrap)
{
if (Presenter?.Panel is INavigableContainer container &&
GetNextControl(container, direction, from, wrap) is IControl next)
{
var index = ItemContainerGenerator.IndexFromContainer(next);
if (index != -1)
{
SelectedIndex = index;
return true;
}
}
return false;
}
/// <summary>
/// Updates the selection for an item based on user interaction.
/// </summary>
/// <param name="index">The index of the item.</param>
/// <param name="select">Whether the item should be selected or unselected.</param>
/// <param name="rangeModifier">Whether the range modifier is enabled (i.e. shift key).</param>
/// <param name="toggleModifier">Whether the toggle modifier is enabled (i.e. ctrl key).</param>
/// <param name="rightButton">Whether the event is a right-click.</param>
protected void UpdateSelection(
int index,
bool select = true,
bool rangeModifier = false,
bool toggleModifier = false,
bool rightButton = false)
{
if (index < 0 || index >= ItemCount)
{
return;
}
var mode = SelectionMode;
var multi = mode.HasAllFlags(SelectionMode.Multiple);
var toggle = toggleModifier || mode.HasAllFlags(SelectionMode.Toggle);
var range = multi && rangeModifier;
if (!select)
{
Selection.Deselect(index);
}
else if (rightButton)
{
if (Selection.IsSelected(index) == false)
{
SelectedIndex = index;
}
}
else if (range)
{
using var operation = Selection.BatchUpdate();
Selection.Clear();
Selection.SelectRange(Selection.AnchorIndex, index);
}
else if (multi && toggle)
{
if (Selection.IsSelected(index) == true)
{
Selection.Deselect(index);
}
else
{
Selection.Select(index);
}
}
else if (toggle)
{
SelectedIndex = (SelectedIndex == index) ? -1 : index;
}
else
{
using var operation = Selection.BatchUpdate();
Selection.Clear();
Selection.Select(index);
}
if (Presenter?.Panel != null)
{
var container = ItemContainerGenerator.ContainerFromIndex(index);
KeyboardNavigation.SetTabOnceActiveElement(
(InputElement)Presenter.Panel,
container);
}
}
/// <summary>
/// Updates the selection for a container based on user interaction.
/// </summary>
/// <param name="container">The container.</param>
/// <param name="select">Whether the container should be selected or unselected.</param>
/// <param name="rangeModifier">Whether the range modifier is enabled (i.e. shift key).</param>
/// <param name="toggleModifier">Whether the toggle modifier is enabled (i.e. ctrl key).</param>
/// <param name="rightButton">Whether the event is a right-click.</param>
protected void UpdateSelection(
IControl container,
bool select = true,
bool rangeModifier = false,
bool toggleModifier = false,
bool rightButton = false)
{
var index = ItemContainerGenerator?.IndexFromContainer(container) ?? -1;
if (index != -1)
{
UpdateSelection(index, select, rangeModifier, toggleModifier, rightButton);
}
}
/// <summary>
/// Updates the selection based on an event that may have originated in a container that
/// belongs to the control.
/// </summary>
/// <param name="eventSource">The control that raised the event.</param>
/// <param name="select">Whether the container should be selected or unselected.</param>
/// <param name="rangeModifier">Whether the range modifier is enabled (i.e. shift key).</param>
/// <param name="toggleModifier">Whether the toggle modifier is enabled (i.e. ctrl key).</param>
/// <param name="rightButton">Whether the event is a right-click.</param>
/// <returns>
/// True if the event originated from a container that belongs to the control; otherwise
/// false.
/// </returns>
protected bool UpdateSelectionFromEventSource(
IInteractive? eventSource,
bool select = true,
bool rangeModifier = false,
bool toggleModifier = false,
bool rightButton = false)
{
var container = GetContainerFromEventSource(eventSource);
if (container != null)
{
UpdateSelection(container, select, rangeModifier, toggleModifier, rightButton);
return true;
}
return false;
}
/// <summary>
/// Called when <see cref="INotifyPropertyChanged.PropertyChanged"/> is raised on
/// <see cref="Selection"/>.
/// </summary>
/// <param name="sender">The sender.</param>
/// <param name="e">The event args.</param>
private void OnSelectionModelPropertyChanged(object sender, PropertyChangedEventArgs e)
{
if (e.PropertyName == nameof(ISelectionModel.AnchorIndex))
{
_hasScrolledToSelectedItem = false;
AutoScrollToSelectedItemIfNecessary();
}
else if (e.PropertyName == nameof(ISelectionModel.SelectedIndex) && _oldSelectedIndex != SelectedIndex)
{
RaisePropertyChanged(SelectedIndexProperty, _oldSelectedIndex, SelectedIndex);
_oldSelectedIndex = SelectedIndex;
}
else if (e.PropertyName == nameof(ISelectionModel.SelectedItem) && _oldSelectedItem != SelectedItem)
{
RaisePropertyChanged(SelectedItemProperty, _oldSelectedItem, SelectedItem);
_oldSelectedItem = SelectedItem;
}
else if (e.PropertyName == nameof(InternalSelectionModel.WritableSelectedItems) &&
_oldSelectedItems != (Selection as InternalSelectionModel)?.SelectedItems)
{
RaisePropertyChanged(
SelectedItemsProperty,
new Optional<IList?>(_oldSelectedItems),
new BindingValue<IList?>(SelectedItems));
_oldSelectedItems = SelectedItems;
}
}
/// <summary>
/// Called when <see cref="ISelectionModel.SelectionChanged"/> event is raised on
/// <see cref="Selection"/>.
/// </summary>
/// <param name="sender">The sender.</param>
/// <param name="e">The event args.</param>
private void OnSelectionModelSelectionChanged(object sender, SelectionModelSelectionChangedEventArgs e)
{
void Mark(int index, bool selected)
{
var container = ItemContainerGenerator.ContainerFromIndex(index);
if (container != null)
{
MarkContainerSelected(container, selected);
}
}
foreach (var i in e.SelectedIndexes)
{
Mark(i, true);
}
foreach (var i in e.DeselectedIndexes)
{
Mark(i, false);
}
var route = BuildEventRoute(SelectionChangedEvent);
if (route.HasHandlers)
{
var ev = new SelectionChangedEventArgs(
SelectionChangedEvent,
e.DeselectedItems.ToList(),
e.SelectedItems.ToList());
RaiseEvent(ev);
}
}
/// <summary>
/// Called when <see cref="ISelectionModel.LostSelection"/> event is raised on
/// <see cref="Selection"/>.
/// </summary>
/// <param name="sender">The sender.</param>
/// <param name="e">The event args.</param>
private void OnSelectionModelLostSelection(object sender, EventArgs e)
{
if (AlwaysSelected && Items is object)
{
SelectedIndex = 0;
}
}
private void AutoScrollToSelectedItemIfNecessary()
{
if (AutoScrollToSelectedItem &&
!_hasScrolledToSelectedItem &&
Presenter is object &&
Selection.AnchorIndex >= 0 &&
((IVisual)this).IsAttachedToVisualTree)
{
ScrollIntoView(Selection.AnchorIndex);
_hasScrolledToSelectedItem = true;
}
}
/// <summary>
/// Called when a container raises the <see cref="IsSelectedChangedEvent"/>.
/// </summary>
/// <param name="e">The event.</param>
private void ContainerSelectionChanged(RoutedEventArgs e)
{
if (!_ignoreContainerSelectionChanged &&
e.Source is IControl control &&
e.Source is ISelectable selectable &&
control.LogicalParent == this &&
ItemContainerGenerator?.IndexFromContainer(control) != -1)
{
UpdateSelection(control, selectable.IsSelected);
}
if (e.Source != this)
{
e.Handled = true;
}
}
/// <summary>
/// Sets a container's 'selected' class or <see cref="ISelectable.IsSelected"/>.
/// </summary>
/// <param name="container">The container.</param>
/// <param name="selected">Whether the control is selected</param>
/// <returns>The previous selection state.</returns>
private bool MarkContainerSelected(IControl container, bool selected)
{
try
{
bool result;
_ignoreContainerSelectionChanged = true;
if (container is ISelectable selectable)
{
result = selectable.IsSelected;
selectable.IsSelected = selected;
}
else
{
result = container.Classes.Contains(":selected");
((IPseudoClasses)container.Classes).Set(":selected", selected);
}
return result;
}
finally
{
_ignoreContainerSelectionChanged = false;
}
}
private void UpdateContainerSelection()
{
if (Presenter?.Panel is IPanel panel)
{
foreach (var container in panel.Children)
{
MarkContainerSelected(
container,
Selection.IsSelected(ItemContainerGenerator.IndexFromContainer(container)));
}
}
}
private ISelectionModel CreateDefaultSelectionModel()
{
return new InternalSelectionModel
{
SingleSelect = !SelectionMode.HasAllFlags(SelectionMode.Multiple),
};
}
private void InitializeSelectionModel(ISelectionModel model)
{
if (_updateState is null)
{
model.Source = Items;
}
model.PropertyChanged += OnSelectionModelPropertyChanged;
model.SelectionChanged += OnSelectionModelSelectionChanged;
model.LostSelection += OnSelectionModelLostSelection;
if (model.SingleSelect)
{
SelectionMode &= ~SelectionMode.Multiple;
}
else
{
SelectionMode |= SelectionMode.Multiple;
}
_oldSelectedIndex = model.SelectedIndex;
_oldSelectedItem = model.SelectedItem;
if (AlwaysSelected && model.Count == 0)
{
model.SelectedIndex = 0;
}
UpdateContainerSelection();
if (SelectedIndex != -1)
{
RaiseEvent(new SelectionChangedEventArgs(
SelectionChangedEvent,
Array.Empty<object>(),
Selection.SelectedItems.ToList()));
}
}
private void DeinitializeSelectionModel(ISelectionModel? model)
{
if (model is object)
{
model.PropertyChanged -= OnSelectionModelPropertyChanged;
model.SelectionChanged -= OnSelectionModelSelectionChanged;
}
}
private void BeginUpdating()
{
_updateState ??= new UpdateState();
_updateState.UpdateCount++;
}
private void EndUpdating()
{
if (_updateState is object && --_updateState.UpdateCount == 0)
{
var state = _updateState;
_updateState = null;
if (state.Selection.HasValue)
{
Selection = state.Selection.Value;
}
if (state.SelectedItems.HasValue)
{
SelectedItems = state.SelectedItems.Value;
}
Selection.Source = Items;
if (Items is null)
{
Selection.Clear();
}
if (state.SelectedIndex.HasValue)
{
SelectedIndex = state.SelectedIndex.Value;
}
else if (state.SelectedItem.HasValue)
{
SelectedItem = state.SelectedItem.Value;
}
}
}
private void StartTextSearchTimer()
{
_textSearchTimer = new DispatcherTimer { Interval = TimeSpan.FromSeconds(1) };
_textSearchTimer.Tick += TextSearchTimer_Tick;
_textSearchTimer.Start();
}
private void StopTextSearchTimer()
{
if (_textSearchTimer == null)
{
return;
}
_textSearchTimer.Tick -= TextSearchTimer_Tick;
_textSearchTimer.Stop();
_textSearchTimer = null;
}
private void TextSearchTimer_Tick(object sender, EventArgs e)
{
_textSearchTerm = string.Empty;
StopTextSearchTimer();
}
// When in a BeginInit..EndInit block, or when the DataContext is updating, we need to
// defer changes to the selection model because we have no idea in which order properties
// will be set. Consider:
//
// - Both Items and SelectedItem are bound
// - The DataContext changes
// - The binding for SelectedItem updates first, producing an item
// - Items is searched to find the index of the new selected item
// - However Items isn't yet updated; the item is not found
// - SelectedIndex is incorrectly set to -1
//
// This logic cannot be encapsulated in SelectionModel because the selection model can also
// be bound, consider:
//
// - Both Items and Selection are bound
// - The DataContext changes
// - The binding for Items updates first
// - The new items are assigned to Selection.Source
// - The binding for Selection updates, producing a new SelectionModel
// - Both the old and new SelectionModels have the incorrect Source
private class UpdateState
{
private Optional<int> _selectedIndex;
private Optional<object?> _selectedItem;
public int UpdateCount { get; set; }
public Optional<ISelectionModel> Selection { get; set; }
public Optional<IList?> SelectedItems { get; set; }
public Optional<int> SelectedIndex
{
get => _selectedIndex;
set
{
_selectedIndex = value;
_selectedItem = default;
}
}
public Optional<object?> SelectedItem
{
get => _selectedItem;
set
{
_selectedItem = value;
_selectedIndex = default;
}
}
}
}
}
| |
/*
* MindTouch Core - open source enterprise collaborative networking
* Copyright (c) 2006-2010 MindTouch Inc.
* www.mindtouch.com [email protected]
*
* For community documentation and downloads visit www.opengarden.org;
* please review the licensing section.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
* http://www.gnu.org/copyleft/gpl.html
*/
using System;
using System.IO;
using MindTouch.Dream;
namespace MindTouch.Deki {
public enum TagType { ALL = -1, TEXT = 0, DATE = 1, USER = 2, DEFINE = 3 };
public enum DekiInstanceStatus : byte {
/// <summary>
/// instance has been created but not yet initialized
/// </summary>
CREATED,
/// <summary>
/// instance is initializing
/// </summary>
INITIALIZING,
/// <summary>
/// instance is serving requests, but some services may not be ready yet
/// </summary>
STARTING_SERVICES,
/// <summary>
/// instance has been initialized and is ready to serve requests
/// </summary>
RUNNING,
/// <summary>
/// (unused)
/// </summary>
STOPPED,
/// <summary>
/// instance has failed to initialize and will reject requests
/// </summary>
ABANDONED
}
public enum RC : uint {
// TODO (steveb): add rc_type for file restore, upload, delete, move, description, wipe
// page related changes
EDIT = 0,
NEW = 1,
MOVE = 2,
LOG = 3,
MOVE_OVER_REDIRECT = 4, // NOTE (steveb): not used, maintained for backwards compatibility
PAGEDELETED = 5,
PAGERESTORED = 6,
// 4x : comment related changes
COMMENT_CREATE = 40,
COMMENT_UPDATE = 41,
COMMENT_DELETE = 42,
// 5x: misc. changes
FILE = 50,
PAGEMETA = 51, // NOTE (steveb): only used to track page language changes since 9.02
TAGS = 52,
GRANTS_ADDED = 54,
GRANTS_REMOVED = 55,
RESTRICTION_UPDATED = 56,
// 6x : user related changes
USER_CREATED = 60,
}
public enum FeedFormat {
ATOM_DAILY,
RAW,
RAW_DAILY,
ATOM_ALL
}
public enum RatioType {
UNDEFINED,
FIXED,
VARIABLE
};
public enum SizeType {
UNDEFINED,
ORIGINAL,
THUMB,
WEBVIEW,
BESTFIT,
CUSTOM
};
public enum FormatType {
UNDEFINED,
JPG,
PNG,
BMP,
GIF
};
public enum GrantType : byte {
GROUP,
USER,
UNDEFINED
};
public enum RoleType : byte {
ROLE,
RESTRICTION,
UNDEFINED
};
public enum ServiceType {
AUTH,
EXT,
UNDEFINED
};
public enum ParserMode {
EDIT,
RAW,
VIEW,
VIEW_NO_EXECUTE,
SAVE
}
[Flags]
public enum Permissions : ulong {
NONE = 0,
LOGIN = 1, // able to log in
BROWSE = 2, // can see page title in navigation
READ = 4, // Can see page and attachment contents
SUBSCRIBE = 8, // subscribe to page changes/rss
UPDATE = 16, // Can edit an existing page and work with attachments
CREATE = 32, // create new page
DELETE = 256, // delete a file or page
CHANGEPERMISSIONS = 1024, // change page permissions/grants
CONTROLPANEL = 2048, // can access the control panel
UNSAFECONTENT = 4096, // can write unsafe content (e.g. <script>, <embed>, <form>, etc.)
ADMIN = 0x8000000000000000UL
}
public static class PermissionSets {
//--- Constants ---
public const Permissions PAGE_INDEPENDENT = Permissions.ADMIN | Permissions.CONTROLPANEL | Permissions.LOGIN;
public const Permissions INVALID_LICENSE_REVOKE_LIST = Permissions.LOGIN;
public const Permissions MINIMAL_ANONYMOUS_PERMISSIONS = Permissions.LOGIN;
public const Permissions ALL = (Permissions)0xFFFFFFFFFFFFFFFFUL;
}
[Serializable]
public struct PermissionStruct {
public PermissionStruct(Permissions userPermissions, Permissions pageRestrictionsMask, Permissions pageGrantPermissions) {
UserPermissions = userPermissions;
PageRestrictionsMask = pageRestrictionsMask;
PageGrantPermissions = pageGrantPermissions;
}
public PermissionStruct(ulong userPermissions, ulong pageRestrictionsMask, ulong pageGrantPermissions) {
UserPermissions = (Permissions)userPermissions;
PageRestrictionsMask = (Permissions)pageRestrictionsMask;
PageGrantPermissions = (Permissions)pageGrantPermissions;
}
public Permissions UserPermissions;
public Permissions PageRestrictionsMask;
public Permissions PageGrantPermissions;
}
public enum CascadeType : byte {
NONE, //Permissions are not cascaded to child pages
DELTA, //Changes between given page's security and proposed security cascaded to child nodes
ABSOLUTE //Proposed security is set on child pages
}
public class DekiMimeType {
//--- Constants ---
public const string MEDIAWIKI_TEXT = "text/x.mediawiki";
public const string DEKI_TEXT = "application/x.deki-text";
public const string DEKI_XML0702 = "application/x.deki0702+xml";
public const string DEKI_XML0805 = "application/x.deki0805+xml";
public const string HTML_TEXT = "text/html";
}
public class TagPrefix {
//--- Constants ---
public static readonly string USER = "@";
public static readonly string DEFINE = "define:";
public static readonly string DATE = "date:";
public static readonly string TEXT = String.Empty;
}
public class Role {
// TODO (MaxM): These names should never be hardcoded
// BUGBUGBUG (steveb): roles must be localizable
public const string CONTRIBUTOR = "Contributor";
}
public class ConfigValue {
//--- Fields ---
public readonly string Value;
public bool IsReadOnly;
public bool IsHidden;
//--- Constructors ---
public ConfigValue(string value) : this(value, false, false) { }
public ConfigValue(string value, bool readOnly, bool hidden) {
this.Value = value;
this.IsReadOnly = readOnly;
this.IsHidden = hidden;
}
//--- Methods ---
public override string ToString() {
return Value;
}
}
public enum CommentFilter {
ANY = 0,
DELETED = 1,
NONDELETED = 2
};
public enum DeletionFilter : int {
ANY = -1,
DELETEDONLY = 1,
ACTIVEONLY = 0
};
public enum SortDirection : byte {
UNDEFINED,
ASC,
DESC
};
public enum GroupsSortField {
UNDEFINED,
ID,
NAME,
ROLE,
SERVICE
}
public enum ServicesSortField {
UNDEFINED,
DESCRIPTION,
ID,
INIT,
LOCAL,
SID,
TYPE,
URI
}
public enum UsersSortField {
UNDEFINED,
DATE_CREATED,
EMAIL,
FULLNAME,
ID,
DATE_LASTLOGIN,
NICK,
ROLE,
SERVICE,
STATUS,
USERNAME
}
public class SetDiscriminator {
//--- Fields ---
public uint Offset;
public uint Limit;
public bool Ascending = true;
public string SortField;
//--- Properties ---
public string SortBy {
get {
return Ascending || string.IsNullOrEmpty(SortField) ? SortField : "-" + SortField;
}
}
//--- Methods ---
public void SetSortInfo(string sortBy, string defaultSortBy) {
if(string.IsNullOrEmpty(defaultSortBy)) {
throw new ArgumentNullException("defaultSortBy");
}
sortBy = sortBy.IfNullOrEmpty(defaultSortBy);
if(sortBy.StartsWith("-")) {
Ascending = false;
SortField = sortBy.Substring(1);
} else {
Ascending = true;
SortField = sortBy;
}
}
}
[Serializable]
public class ResourceIdMapping {
public readonly uint? ResourceId;
public readonly uint? FileId;
public readonly uint? PageId;
public ResourceIdMapping(uint? resourceId, uint? fileId, uint? pageId) {
this.ResourceId = resourceId;
this.FileId = fileId;
this.PageId = pageId;
}
public ResourceIdMapping Copy() {
return new ResourceIdMapping(ResourceId, FileId, PageId);
}
}
public class StreamInfo : IDisposable {
//--- Fields ---
public readonly Stream Stream;
public readonly long Length;
public readonly MimeType Type;
public readonly DateTime? Modified;
public readonly XUri Uri;
//--- Constructors ---
public StreamInfo(Stream stream, long size) : this(stream, size, null, null) { }
public StreamInfo(Stream stream, long size, MimeType type) : this(stream, size, type, null) { }
public StreamInfo(Stream stream, long size, MimeType type, DateTime? modified) {
if(stream == null) {
throw new ArgumentNullException("stream");
}
this.Stream = stream;
this.Length = size;
this.Type = type ?? MimeType.BINARY;
this.Modified = modified;
}
public StreamInfo(XUri uri) {
this.Uri = uri;
}
//--- Methods ---
public void Close() {
if(Stream != null) {
Stream.Close();
}
}
public void Dispose() {
Close();
}
}
[Serializable]
public class PageTextContainer {
//--- Fields ---
public readonly ulong PageId;
public readonly string Text;
public readonly DateTime TimeStamp;
//--- Constructors ---
public PageTextContainer(ulong pageId, string text, DateTime timestamp) {
this.PageId = pageId;
this.Text = text;
this.TimeStamp = timestamp;
}
}
[Serializable]
public class UserPagePermissionContainer {
//--- Fields ---
public readonly PermissionStruct Permission;
public readonly uint UserId;
public readonly ulong PageId;
//--- Constructors ---
public UserPagePermissionContainer(uint userId, ulong pageId) {
this.UserId = userId;
this.PageId = pageId;
}
public UserPagePermissionContainer(uint userId, ulong pageId, PermissionStruct Permission) {
this.UserId = userId;
this.PageId = pageId;
this.Permission = Permission;
}
}
}
| |
#region License
//
// Copyright (c) 2007-2018, Sean Chambers <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using FluentMigrator.Expressions;
using FluentMigrator.Runner.Announcers;
using FluentMigrator.Runner.Generators;
using FluentMigrator.Runner.Initialization;
using FluentMigrator.Runner.Logging;
using JetBrains.Annotations;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace FluentMigrator.Runner.Processors
{
public class ConnectionlessProcessor: IMigrationProcessor
{
[NotNull] private readonly ILogger _logger;
#pragma warning disable 612
[Obsolete]
private readonly IMigrationProcessorOptions _legacyOptions;
#pragma warning restore 612
[Obsolete]
public ConnectionlessProcessor(
IMigrationGenerator generator,
IRunnerContext context,
IMigrationProcessorOptions options)
{
_logger = new AnnouncerFluentMigratorLogger(context.Announcer);
_legacyOptions = options;
DatabaseType = context.Database;
Generator = generator;
Announcer = context.Announcer;
Options = options.GetProcessorOptions(connectionString: null);
}
public ConnectionlessProcessor(
[NotNull] IGeneratorAccessor generatorAccessor,
[NotNull] ILogger logger,
[NotNull] IOptionsSnapshot<ProcessorOptions> options,
[NotNull] IOptions<SelectingProcessorAccessorOptions> accessorOptions)
{
_logger = logger;
var generator = generatorAccessor.Generator;
DatabaseType = string.IsNullOrEmpty(accessorOptions.Value.ProcessorId) ? generator.GetName() : accessorOptions.Value.ProcessorId;
Generator = generator;
Options = options.Value;
#pragma warning disable 612
Announcer = new LoggerAnnouncer(logger, new AnnouncerOptions() { ShowElapsedTime = true, ShowSql = true });
_legacyOptions = options.Value;
#pragma warning restore 612
}
public ConnectionlessProcessor(
[NotNull] IGeneratorAccessor generatorAccessor,
[NotNull] ILogger logger,
[NotNull] IOptionsSnapshot<ProcessorOptions> options,
[NotNull] IReadOnlyCollection<string> processorIds)
{
_logger = logger;
var generator = generatorAccessor.Generator;
DatabaseType = processorIds.FirstOrDefault() ?? generator.GetName();
DatabaseTypeAliases = processorIds.Count == 0 ? Array.Empty<string>() : processorIds.Skip(1).ToArray();
Generator = generator;
Options = options.Value;
#pragma warning disable 612
Announcer = new LoggerAnnouncer(logger, AnnouncerOptions.AllEnabled);
_legacyOptions = options.Value;
#pragma warning restore 612
}
[Obsolete("Will change from public to protected")]
public string ConnectionString { get; } = "No connection";
public IMigrationGenerator Generator { get; set; }
[Obsolete]
public IAnnouncer Announcer { get; set; }
public ProcessorOptions Options {get; set;}
[Obsolete]
IMigrationProcessorOptions IMigrationProcessor.Options => _legacyOptions;
/// <inheritdoc />
public void Execute(string sql)
{
Process(sql);
}
public void Execute(string template, params object[] args)
{
Process(string.Format(template, args));
}
public DataSet ReadTableData(string schemaName, string tableName)
{
throw new NotImplementedException("Method is not supported by the connectionless processor");
}
public DataSet Read(string template, params object[] args)
{
throw new NotImplementedException("Method is not supported by the connectionless processor");
}
public bool Exists(string template, params object[] args)
{
throw new NotImplementedException("Method is not supported by the connectionless processor");
}
public void BeginTransaction()
{
}
public void CommitTransaction()
{
}
public void RollbackTransaction()
{
}
protected void Process(string sql)
{
_logger.LogSql(sql);
}
public void Process(CreateSchemaExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(DeleteSchemaExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(AlterTableExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(AlterColumnExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(CreateTableExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(CreateColumnExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(DeleteTableExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(DeleteColumnExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(CreateForeignKeyExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(DeleteForeignKeyExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(CreateIndexExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(DeleteIndexExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(RenameTableExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(RenameColumnExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(InsertDataExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(AlterDefaultConstraintExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(PerformDBOperationExpression expression)
{
_logger.LogSay("Performing DB Operation");
}
public void Process(DeleteDataExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(UpdateDataExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(AlterSchemaExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(CreateSequenceExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(DeleteSequenceExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(CreateConstraintExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(DeleteConstraintExpression expression)
{
Process(Generator.Generate(expression));
}
public void Process(DeleteDefaultConstraintExpression expression)
{
Process(Generator.Generate(expression));
}
public bool SchemaExists(string schemaName)
{
throw new NotImplementedException("Method is not supported by the connectionless processor");
}
public bool TableExists(string schemaName, string tableName)
{
throw new NotImplementedException("Method is not supported by the connectionless processor");
}
public bool ColumnExists(string schemaName, string tableName, string columnName)
{
throw new NotImplementedException("Method is not supported by the connectionless processor");
}
public bool ConstraintExists(string schemaName, string tableName, string constraintName)
{
throw new NotImplementedException("Method is not supported by the connectionless processor");
}
public bool IndexExists(string schemaName, string tableName, string indexName)
{
throw new NotImplementedException("Method is not supported by the connectionless processor");
}
public bool SequenceExists(string schemaName, string sequenceName)
{
throw new NotImplementedException("Method is not supported by the connectionless processor");
}
public bool DefaultValueExists(string schemaName, string tableName, string columnName, object defaultValue)
{
throw new NotImplementedException("Method is not supported by the connectionless processor");
}
#pragma warning disable 618
public string DatabaseType { get; }
#pragma warning restore 618
public IList<string> DatabaseTypeAliases { get; } = new List<string>();
public void Dispose()
{
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="TransactionalRoot.cs" company="Marimer LLC">
// Copyright (c) Marimer LLC. All rights reserved.
// Website: https://cslanet.com
// </copyright>
// <summary>no summary</summary>
//-----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Configuration;
namespace Csla.Test.DataPortal
{
[Serializable()]
public class TransactionalRoot : BusinessBase<TransactionalRoot>
{
#region "Business methods"
//get the configurationmanager to work right
//public static string CONNECTION_STRING = WellKnownValues.ConnectionStrings.DataPortalTestDatabase;
public static PropertyInfo<int> IDProperty = RegisterProperty<int>(c => c.ID);
public int ID
{
get { return GetProperty(IDProperty); }
private set { LoadProperty(IDProperty, value); }
}
public static PropertyInfo<string> FirstNameProperty = RegisterProperty<string>(c => c.FirstName);
public string FirstName
{
get { return GetProperty(FirstNameProperty); }
set { SetProperty(FirstNameProperty, value); }
}
public static PropertyInfo<string> LastNameProperty = RegisterProperty<string>(c => c.LastName);
public string LastName
{
get { return GetProperty(LastNameProperty); }
set { SetProperty(LastNameProperty, value); }
}
public static PropertyInfo<string> SmallColumnProperty = RegisterProperty<string>(c => c.SmallColumn);
public string SmallColumn
{
get { return GetProperty(SmallColumnProperty); }
set { SetProperty(SmallColumnProperty, value); }
}
#endregion
protected override void AddBusinessRules()
{
//normally, we would add a rule that prevents SmallColumn from being too long
//but to easily test the transactional functionality of the server-side dataportal
//we are going to allow strings that are longer than what the database allows
}
#region "Factory Methods"
public static TransactionalRoot NewTransactionalRoot(IDataPortal<TransactionalRoot> dataPortal)
{
return dataPortal.Create();
}
public static TransactionalRoot GetTransactionalRoot(int ID, IDataPortal<TransactionalRoot> dataPortal)
{
return dataPortal.Fetch(new Criteria(ID));
}
public static void DeleteTransactionalRoot(int ID, IDataPortal<TransactionalRoot> dataPortal)
{
dataPortal.Delete(new Criteria(ID));
}
#endregion
#region "Criteria"
[Serializable()]
private class Criteria
{
public int _id;
public Criteria(int id)
{
this._id = id;
}
}
#endregion
#region "Data Access"
[RunLocal()]
[Create]
protected void DataPortal_Create()
{
TestResults.Reinitialise();
TestResults.Add("TransactionalRoot", "Created");
BusinessRules.CheckRules();
Console.WriteLine("DataPortal_Create");
}
protected void DataPortal_Fetch(object criteria)
{
Criteria crit = (Criteria)(criteria);
if (crit._id == 13)
{
throw new System.ApplicationException("DataPortal_Fetch: you chose an unlucky number");
}
Console.WriteLine("DataPortal_Fetch");
TestResults.Reinitialise();
TestResults.Add("TransactionalRoot", "Fetched");
BusinessRules.CheckRules();
}
[Transactional(TransactionalTypes.TransactionScope)]
[Insert]
protected void DataPortal_Insert()
{
SqlConnection cn = new SqlConnection(WellKnownValues.DataPortalTestDatabase);
string firstName = this.FirstName;
string lastName = this.LastName;
string smallColumn = this.SmallColumn;
//this command will always execute successfully
//since it inserts a string less than 5 characters
//into SmallColumn
SqlCommand cm1 = new SqlCommand();
cm1.Connection = cn;
cm1.CommandText = "INSERT INTO Table2(FirstName, LastName, SmallColumn) VALUES('Bill', 'Thompson', 'abc')";
//this command will throw an exception
//if SmallColumn is set to a string longer than
//5 characters
SqlCommand cm2 = new SqlCommand();
cm2.Connection = cn;
//use stringbuilder
cm2.CommandText = "INSERT INTO Table2(FirstName, LastName, SmallColumn) VALUES('";
cm2.CommandText += firstName;
cm2.CommandText += "', '" + lastName + "', '" + smallColumn + "')";
cn.Open();
cm1.ExecuteNonQuery();
cm2.ExecuteNonQuery();
cn.Close();
TestResults.Reinitialise();
TestResults.Add("TransactionalRoot", "Inserted");
Console.WriteLine("DataPortal_Insert");
}
[Transactional(TransactionalTypes.TransactionScope)]
[Update]
protected void DataPortal_Update()
{
Console.WriteLine("DataPortal_Update");
TestResults.Reinitialise();
TestResults.Add("TransactionalRoot", "Updated");
}
[DeleteSelf]
protected void DataPortal_DeleteSelf()
{
Console.WriteLine("DataPortal_DeleteSelf");
TestResults.Reinitialise();
TestResults.Add("TransactionalRoot", "Deleted Self");
}
[Delete]
protected void DataPortal_Delete(object criteria)
{
Criteria crit = (Criteria)(criteria);
if (crit._id == 13)
{
throw new System.ApplicationException("DataPortal_Delete: you chose an unlucky number");
}
Console.WriteLine("DataPortal_Delete");
TestResults.Reinitialise();
TestResults.Add("TransactionRoot", "Deleted");
}
#endregion
#region "DataPortalException"
protected override void DataPortal_OnDataPortalException(DataPortalEventArgs e, Exception ex)
{
TestResults.Reinitialise();
TestResults.Add("OnDataPortalException", "Called");
Console.WriteLine("OnDataPortalException called");
}
#endregion
}
}
| |
/*
* Copyright (c) Citrix Systems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1) Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using Newtonsoft.Json;
namespace XenAPI
{
/// <summary>
/// VM Snapshot Schedule
/// First published in XenServer 7.2.
/// </summary>
public partial class VMSS : XenObject<VMSS>
{
#region Constructors
public VMSS()
{
}
public VMSS(string uuid,
string name_label,
string name_description,
bool enabled,
vmss_type type,
long retained_snapshots,
vmss_frequency frequency,
Dictionary<string, string> schedule,
DateTime last_run_time,
List<XenRef<VM>> VMs)
{
this.uuid = uuid;
this.name_label = name_label;
this.name_description = name_description;
this.enabled = enabled;
this.type = type;
this.retained_snapshots = retained_snapshots;
this.frequency = frequency;
this.schedule = schedule;
this.last_run_time = last_run_time;
this.VMs = VMs;
}
/// <summary>
/// Creates a new VMSS from a Hashtable.
/// Note that the fields not contained in the Hashtable
/// will be created with their default values.
/// </summary>
/// <param name="table"></param>
public VMSS(Hashtable table)
: this()
{
UpdateFrom(table);
}
/// <summary>
/// Creates a new VMSS from a Proxy_VMSS.
/// </summary>
/// <param name="proxy"></param>
public VMSS(Proxy_VMSS proxy)
{
UpdateFrom(proxy);
}
#endregion
/// <summary>
/// Updates each field of this instance with the value of
/// the corresponding field of a given VMSS.
/// </summary>
public override void UpdateFrom(VMSS update)
{
uuid = update.uuid;
name_label = update.name_label;
name_description = update.name_description;
enabled = update.enabled;
type = update.type;
retained_snapshots = update.retained_snapshots;
frequency = update.frequency;
schedule = update.schedule;
last_run_time = update.last_run_time;
VMs = update.VMs;
}
internal void UpdateFrom(Proxy_VMSS proxy)
{
uuid = proxy.uuid == null ? null : proxy.uuid;
name_label = proxy.name_label == null ? null : proxy.name_label;
name_description = proxy.name_description == null ? null : proxy.name_description;
enabled = (bool)proxy.enabled;
type = proxy.type == null ? (vmss_type) 0 : (vmss_type)Helper.EnumParseDefault(typeof(vmss_type), (string)proxy.type);
retained_snapshots = proxy.retained_snapshots == null ? 0 : long.Parse(proxy.retained_snapshots);
frequency = proxy.frequency == null ? (vmss_frequency) 0 : (vmss_frequency)Helper.EnumParseDefault(typeof(vmss_frequency), (string)proxy.frequency);
schedule = proxy.schedule == null ? null : Maps.convert_from_proxy_string_string(proxy.schedule);
last_run_time = proxy.last_run_time;
VMs = proxy.VMs == null ? null : XenRef<VM>.Create(proxy.VMs);
}
public Proxy_VMSS ToProxy()
{
Proxy_VMSS result_ = new Proxy_VMSS();
result_.uuid = uuid ?? "";
result_.name_label = name_label ?? "";
result_.name_description = name_description ?? "";
result_.enabled = enabled;
result_.type = vmss_type_helper.ToString(type);
result_.retained_snapshots = retained_snapshots.ToString();
result_.frequency = vmss_frequency_helper.ToString(frequency);
result_.schedule = Maps.convert_to_proxy_string_string(schedule);
result_.last_run_time = last_run_time;
result_.VMs = VMs == null ? new string[] {} : Helper.RefListToStringArray(VMs);
return result_;
}
/// <summary>
/// Given a Hashtable with field-value pairs, it updates the fields of this VMSS
/// with the values listed in the Hashtable. Note that only the fields contained
/// in the Hashtable will be updated and the rest will remain the same.
/// </summary>
/// <param name="table"></param>
public void UpdateFrom(Hashtable table)
{
if (table.ContainsKey("uuid"))
uuid = Marshalling.ParseString(table, "uuid");
if (table.ContainsKey("name_label"))
name_label = Marshalling.ParseString(table, "name_label");
if (table.ContainsKey("name_description"))
name_description = Marshalling.ParseString(table, "name_description");
if (table.ContainsKey("enabled"))
enabled = Marshalling.ParseBool(table, "enabled");
if (table.ContainsKey("type"))
type = (vmss_type)Helper.EnumParseDefault(typeof(vmss_type), Marshalling.ParseString(table, "type"));
if (table.ContainsKey("retained_snapshots"))
retained_snapshots = Marshalling.ParseLong(table, "retained_snapshots");
if (table.ContainsKey("frequency"))
frequency = (vmss_frequency)Helper.EnumParseDefault(typeof(vmss_frequency), Marshalling.ParseString(table, "frequency"));
if (table.ContainsKey("schedule"))
schedule = Maps.convert_from_proxy_string_string(Marshalling.ParseHashTable(table, "schedule"));
if (table.ContainsKey("last_run_time"))
last_run_time = Marshalling.ParseDateTime(table, "last_run_time");
if (table.ContainsKey("VMs"))
VMs = Marshalling.ParseSetRef<VM>(table, "VMs");
}
public bool DeepEquals(VMSS other)
{
if (ReferenceEquals(null, other))
return false;
if (ReferenceEquals(this, other))
return true;
return Helper.AreEqual2(this._uuid, other._uuid) &&
Helper.AreEqual2(this._name_label, other._name_label) &&
Helper.AreEqual2(this._name_description, other._name_description) &&
Helper.AreEqual2(this._enabled, other._enabled) &&
Helper.AreEqual2(this._type, other._type) &&
Helper.AreEqual2(this._retained_snapshots, other._retained_snapshots) &&
Helper.AreEqual2(this._frequency, other._frequency) &&
Helper.AreEqual2(this._schedule, other._schedule) &&
Helper.AreEqual2(this._last_run_time, other._last_run_time) &&
Helper.AreEqual2(this._VMs, other._VMs);
}
internal static List<VMSS> ProxyArrayToObjectList(Proxy_VMSS[] input)
{
var result = new List<VMSS>();
foreach (var item in input)
result.Add(new VMSS(item));
return result;
}
public override string SaveChanges(Session session, string opaqueRef, VMSS server)
{
if (opaqueRef == null)
{
var reference = create(session, this);
return reference == null ? null : reference.opaque_ref;
}
else
{
if (!Helper.AreEqual2(_name_label, server._name_label))
{
VMSS.set_name_label(session, opaqueRef, _name_label);
}
if (!Helper.AreEqual2(_name_description, server._name_description))
{
VMSS.set_name_description(session, opaqueRef, _name_description);
}
if (!Helper.AreEqual2(_enabled, server._enabled))
{
VMSS.set_enabled(session, opaqueRef, _enabled);
}
if (!Helper.AreEqual2(_type, server._type))
{
VMSS.set_type(session, opaqueRef, _type);
}
if (!Helper.AreEqual2(_retained_snapshots, server._retained_snapshots))
{
VMSS.set_retained_snapshots(session, opaqueRef, _retained_snapshots);
}
if (!Helper.AreEqual2(_frequency, server._frequency))
{
VMSS.set_frequency(session, opaqueRef, _frequency);
}
if (!Helper.AreEqual2(_schedule, server._schedule))
{
VMSS.set_schedule(session, opaqueRef, _schedule);
}
return null;
}
}
/// <summary>
/// Get a record containing the current state of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static VMSS get_record(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_record(session.opaque_ref, _vmss);
else
return new VMSS(session.XmlRpcProxy.vmss_get_record(session.opaque_ref, _vmss ?? "").parse());
}
/// <summary>
/// Get a reference to the VMSS instance with the specified UUID.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_uuid">UUID of object to return</param>
public static XenRef<VMSS> get_by_uuid(Session session, string _uuid)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_by_uuid(session.opaque_ref, _uuid);
else
return XenRef<VMSS>.Create(session.XmlRpcProxy.vmss_get_by_uuid(session.opaque_ref, _uuid ?? "").parse());
}
/// <summary>
/// Create a new VMSS instance, and return its handle.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_record">All constructor arguments</param>
public static XenRef<VMSS> create(Session session, VMSS _record)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_create(session.opaque_ref, _record);
else
return XenRef<VMSS>.Create(session.XmlRpcProxy.vmss_create(session.opaque_ref, _record.ToProxy()).parse());
}
/// <summary>
/// Create a new VMSS instance, and return its handle.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_record">All constructor arguments</param>
public static XenRef<Task> async_create(Session session, VMSS _record)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.async_vmss_create(session.opaque_ref, _record);
else
return XenRef<Task>.Create(session.XmlRpcProxy.async_vmss_create(session.opaque_ref, _record.ToProxy()).parse());
}
/// <summary>
/// Destroy the specified VMSS instance.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static void destroy(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.vmss_destroy(session.opaque_ref, _vmss);
else
session.XmlRpcProxy.vmss_destroy(session.opaque_ref, _vmss ?? "").parse();
}
/// <summary>
/// Destroy the specified VMSS instance.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static XenRef<Task> async_destroy(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.async_vmss_destroy(session.opaque_ref, _vmss);
else
return XenRef<Task>.Create(session.XmlRpcProxy.async_vmss_destroy(session.opaque_ref, _vmss ?? "").parse());
}
/// <summary>
/// Get all the VMSS instances with the given label.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_label">label of object to return</param>
public static List<XenRef<VMSS>> get_by_name_label(Session session, string _label)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_by_name_label(session.opaque_ref, _label);
else
return XenRef<VMSS>.Create(session.XmlRpcProxy.vmss_get_by_name_label(session.opaque_ref, _label ?? "").parse());
}
/// <summary>
/// Get the uuid field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static string get_uuid(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_uuid(session.opaque_ref, _vmss);
else
return session.XmlRpcProxy.vmss_get_uuid(session.opaque_ref, _vmss ?? "").parse();
}
/// <summary>
/// Get the name/label field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static string get_name_label(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_name_label(session.opaque_ref, _vmss);
else
return session.XmlRpcProxy.vmss_get_name_label(session.opaque_ref, _vmss ?? "").parse();
}
/// <summary>
/// Get the name/description field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static string get_name_description(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_name_description(session.opaque_ref, _vmss);
else
return session.XmlRpcProxy.vmss_get_name_description(session.opaque_ref, _vmss ?? "").parse();
}
/// <summary>
/// Get the enabled field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static bool get_enabled(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_enabled(session.opaque_ref, _vmss);
else
return (bool)session.XmlRpcProxy.vmss_get_enabled(session.opaque_ref, _vmss ?? "").parse();
}
/// <summary>
/// Get the type field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static vmss_type get_type(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_type(session.opaque_ref, _vmss);
else
return (vmss_type)Helper.EnumParseDefault(typeof(vmss_type), (string)session.XmlRpcProxy.vmss_get_type(session.opaque_ref, _vmss ?? "").parse());
}
/// <summary>
/// Get the retained_snapshots field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static long get_retained_snapshots(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_retained_snapshots(session.opaque_ref, _vmss);
else
return long.Parse(session.XmlRpcProxy.vmss_get_retained_snapshots(session.opaque_ref, _vmss ?? "").parse());
}
/// <summary>
/// Get the frequency field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static vmss_frequency get_frequency(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_frequency(session.opaque_ref, _vmss);
else
return (vmss_frequency)Helper.EnumParseDefault(typeof(vmss_frequency), (string)session.XmlRpcProxy.vmss_get_frequency(session.opaque_ref, _vmss ?? "").parse());
}
/// <summary>
/// Get the schedule field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static Dictionary<string, string> get_schedule(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_schedule(session.opaque_ref, _vmss);
else
return Maps.convert_from_proxy_string_string(session.XmlRpcProxy.vmss_get_schedule(session.opaque_ref, _vmss ?? "").parse());
}
/// <summary>
/// Get the last_run_time field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static DateTime get_last_run_time(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_last_run_time(session.opaque_ref, _vmss);
else
return session.XmlRpcProxy.vmss_get_last_run_time(session.opaque_ref, _vmss ?? "").parse();
}
/// <summary>
/// Get the VMs field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static List<XenRef<VM>> get_VMs(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_vms(session.opaque_ref, _vmss);
else
return XenRef<VM>.Create(session.XmlRpcProxy.vmss_get_vms(session.opaque_ref, _vmss ?? "").parse());
}
/// <summary>
/// Set the name/label field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
/// <param name="_label">New value to set</param>
public static void set_name_label(Session session, string _vmss, string _label)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.vmss_set_name_label(session.opaque_ref, _vmss, _label);
else
session.XmlRpcProxy.vmss_set_name_label(session.opaque_ref, _vmss ?? "", _label ?? "").parse();
}
/// <summary>
/// Set the name/description field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
/// <param name="_description">New value to set</param>
public static void set_name_description(Session session, string _vmss, string _description)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.vmss_set_name_description(session.opaque_ref, _vmss, _description);
else
session.XmlRpcProxy.vmss_set_name_description(session.opaque_ref, _vmss ?? "", _description ?? "").parse();
}
/// <summary>
/// Set the enabled field of the given VMSS.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
/// <param name="_enabled">New value to set</param>
public static void set_enabled(Session session, string _vmss, bool _enabled)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.vmss_set_enabled(session.opaque_ref, _vmss, _enabled);
else
session.XmlRpcProxy.vmss_set_enabled(session.opaque_ref, _vmss ?? "", _enabled).parse();
}
/// <summary>
/// This call executes the snapshot schedule immediately
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
public static string snapshot_now(Session session, string _vmss)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_snapshot_now(session.opaque_ref, _vmss);
else
return session.XmlRpcProxy.vmss_snapshot_now(session.opaque_ref, _vmss ?? "").parse();
}
/// <summary>
///
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
/// <param name="_value">the value to set</param>
public static void set_retained_snapshots(Session session, string _vmss, long _value)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.vmss_set_retained_snapshots(session.opaque_ref, _vmss, _value);
else
session.XmlRpcProxy.vmss_set_retained_snapshots(session.opaque_ref, _vmss ?? "", _value.ToString()).parse();
}
/// <summary>
/// Set the value of the frequency field
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
/// <param name="_value">the snapshot schedule frequency</param>
public static void set_frequency(Session session, string _vmss, vmss_frequency _value)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.vmss_set_frequency(session.opaque_ref, _vmss, _value);
else
session.XmlRpcProxy.vmss_set_frequency(session.opaque_ref, _vmss ?? "", vmss_frequency_helper.ToString(_value)).parse();
}
/// <summary>
///
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
/// <param name="_value">the value to set</param>
public static void set_schedule(Session session, string _vmss, Dictionary<string, string> _value)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.vmss_set_schedule(session.opaque_ref, _vmss, _value);
else
session.XmlRpcProxy.vmss_set_schedule(session.opaque_ref, _vmss ?? "", Maps.convert_to_proxy_string_string(_value)).parse();
}
/// <summary>
///
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
/// <param name="_key">the key to add</param>
/// <param name="_value">the value to add</param>
public static void add_to_schedule(Session session, string _vmss, string _key, string _value)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.vmss_add_to_schedule(session.opaque_ref, _vmss, _key, _value);
else
session.XmlRpcProxy.vmss_add_to_schedule(session.opaque_ref, _vmss ?? "", _key ?? "", _value ?? "").parse();
}
/// <summary>
///
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
/// <param name="_key">the key to remove</param>
public static void remove_from_schedule(Session session, string _vmss, string _key)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.vmss_remove_from_schedule(session.opaque_ref, _vmss, _key);
else
session.XmlRpcProxy.vmss_remove_from_schedule(session.opaque_ref, _vmss ?? "", _key ?? "").parse();
}
/// <summary>
///
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
/// <param name="_value">the value to set</param>
public static void set_last_run_time(Session session, string _vmss, DateTime _value)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.vmss_set_last_run_time(session.opaque_ref, _vmss, _value);
else
session.XmlRpcProxy.vmss_set_last_run_time(session.opaque_ref, _vmss ?? "", _value).parse();
}
/// <summary>
///
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vmss">The opaque_ref of the given vmss</param>
/// <param name="_value">the snapshot schedule type</param>
public static void set_type(Session session, string _vmss, vmss_type _value)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.vmss_set_type(session.opaque_ref, _vmss, _value);
else
session.XmlRpcProxy.vmss_set_type(session.opaque_ref, _vmss ?? "", vmss_type_helper.ToString(_value)).parse();
}
/// <summary>
/// Return a list of all the VMSSs known to the system.
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
public static List<XenRef<VMSS>> get_all(Session session)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_all(session.opaque_ref);
else
return XenRef<VMSS>.Create(session.XmlRpcProxy.vmss_get_all(session.opaque_ref).parse());
}
/// <summary>
/// Get all the VMSS Records at once, in a single XML RPC call
/// First published in XenServer 7.2.
/// </summary>
/// <param name="session">The session</param>
public static Dictionary<XenRef<VMSS>, VMSS> get_all_records(Session session)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.vmss_get_all_records(session.opaque_ref);
else
return XenRef<VMSS>.Create<Proxy_VMSS>(session.XmlRpcProxy.vmss_get_all_records(session.opaque_ref).parse());
}
/// <summary>
/// Unique identifier/object reference
/// </summary>
public virtual string uuid
{
get { return _uuid; }
set
{
if (!Helper.AreEqual(value, _uuid))
{
_uuid = value;
NotifyPropertyChanged("uuid");
}
}
}
private string _uuid = "";
/// <summary>
/// a human-readable name
/// </summary>
public virtual string name_label
{
get { return _name_label; }
set
{
if (!Helper.AreEqual(value, _name_label))
{
_name_label = value;
NotifyPropertyChanged("name_label");
}
}
}
private string _name_label = "";
/// <summary>
/// a notes field containing human-readable description
/// </summary>
public virtual string name_description
{
get { return _name_description; }
set
{
if (!Helper.AreEqual(value, _name_description))
{
_name_description = value;
NotifyPropertyChanged("name_description");
}
}
}
private string _name_description = "";
/// <summary>
/// enable or disable this snapshot schedule
/// </summary>
public virtual bool enabled
{
get { return _enabled; }
set
{
if (!Helper.AreEqual(value, _enabled))
{
_enabled = value;
NotifyPropertyChanged("enabled");
}
}
}
private bool _enabled = true;
/// <summary>
/// type of the snapshot schedule
/// </summary>
[JsonConverter(typeof(vmss_typeConverter))]
public virtual vmss_type type
{
get { return _type; }
set
{
if (!Helper.AreEqual(value, _type))
{
_type = value;
NotifyPropertyChanged("type");
}
}
}
private vmss_type _type;
/// <summary>
/// maximum number of snapshots that should be stored at any time
/// </summary>
public virtual long retained_snapshots
{
get { return _retained_snapshots; }
set
{
if (!Helper.AreEqual(value, _retained_snapshots))
{
_retained_snapshots = value;
NotifyPropertyChanged("retained_snapshots");
}
}
}
private long _retained_snapshots = 7;
/// <summary>
/// frequency of taking snapshot from snapshot schedule
/// </summary>
[JsonConverter(typeof(vmss_frequencyConverter))]
public virtual vmss_frequency frequency
{
get { return _frequency; }
set
{
if (!Helper.AreEqual(value, _frequency))
{
_frequency = value;
NotifyPropertyChanged("frequency");
}
}
}
private vmss_frequency _frequency;
/// <summary>
/// schedule of the snapshot containing 'hour', 'min', 'days'. Date/time-related information is in Local Timezone
/// </summary>
[JsonConverter(typeof(StringStringMapConverter))]
public virtual Dictionary<string, string> schedule
{
get { return _schedule; }
set
{
if (!Helper.AreEqual(value, _schedule))
{
_schedule = value;
NotifyPropertyChanged("schedule");
}
}
}
private Dictionary<string, string> _schedule = new Dictionary<string, string>() {};
/// <summary>
/// time of the last snapshot
/// </summary>
[JsonConverter(typeof(XenDateTimeConverter))]
public virtual DateTime last_run_time
{
get { return _last_run_time; }
set
{
if (!Helper.AreEqual(value, _last_run_time))
{
_last_run_time = value;
NotifyPropertyChanged("last_run_time");
}
}
}
private DateTime _last_run_time = DateTime.ParseExact("19700101T00:00:00Z", "yyyyMMddTHH:mm:ssZ", CultureInfo.InvariantCulture);
/// <summary>
/// all VMs attached to this snapshot schedule
/// </summary>
[JsonConverter(typeof(XenRefListConverter<VM>))]
public virtual List<XenRef<VM>> VMs
{
get { return _VMs; }
set
{
if (!Helper.AreEqual(value, _VMs))
{
_VMs = value;
NotifyPropertyChanged("VMs");
}
}
}
private List<XenRef<VM>> _VMs = new List<XenRef<VM>>() {};
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Xunit;
namespace System.Linq.Expressions.Tests
{
public static class BinarySubtractTests
{
#region Test methods
[Fact]
public static void CheckByteSubtractTest()
{
byte[] array = new byte[] { 0, 1, byte.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyByteSubtract(array[i], array[j]);
}
}
}
[Fact]
public static void CheckSByteSubtractTest()
{
sbyte[] array = new sbyte[] { 0, 1, -1, sbyte.MinValue, sbyte.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifySByteSubtract(array[i], array[j]);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckUShortSubtractTest(bool useInterpreter)
{
ushort[] array = new ushort[] { 0, 1, ushort.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyUShortSubtract(array[i], array[j], useInterpreter);
VerifyUShortSubtractOvf(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckShortSubtractTest(bool useInterpreter)
{
short[] array = new short[] { 0, 1, -1, short.MinValue, short.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyShortSubtract(array[i], array[j], useInterpreter);
VerifyShortSubtractOvf(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckUIntSubtractTest(bool useInterpreter)
{
uint[] array = new uint[] { 0, 1, uint.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyUIntSubtract(array[i], array[j], useInterpreter);
VerifyUIntSubtractOvf(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckIntSubtractTest(bool useInterpreter)
{
int[] array = new int[] { 0, 1, -1, int.MinValue, int.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyIntSubtract(array[i], array[j], useInterpreter);
VerifyIntSubtractOvf(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckULongSubtractTest(bool useInterpreter)
{
ulong[] array = new ulong[] { 0, 1, ulong.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyULongSubtract(array[i], array[j], useInterpreter);
VerifyULongSubtractOvf(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckLongSubtractTest(bool useInterpreter)
{
long[] array = new long[] { 0, 1, -1, long.MinValue, long.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyLongSubtract(array[i], array[j], useInterpreter);
VerifyLongSubtractOvf(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckFloatSubtractTest(bool useInterpreter)
{
float[] array = new float[] { 0, 1, -1, float.MinValue, float.MaxValue, float.Epsilon, float.NegativeInfinity, float.PositiveInfinity, float.NaN };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyFloatSubtract(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckDoubleSubtractTest(bool useInterpreter)
{
double[] array = new double[] { 0, 1, -1, double.MinValue, double.MaxValue, double.Epsilon, double.NegativeInfinity, double.PositiveInfinity, double.NaN };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyDoubleSubtract(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckDecimalSubtractTest(bool useInterpreter)
{
decimal[] array = new decimal[] { decimal.Zero, decimal.One, decimal.MinusOne, decimal.MinValue, decimal.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyDecimalSubtract(array[i], array[j], useInterpreter);
}
}
}
[Fact]
public static void CheckCharSubtractTest()
{
char[] array = new char[] { '\0', '\b', 'A', '\uffff' };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyCharSubtract(array[i], array[j]);
}
}
}
#endregion
#region Test verifiers
private static void VerifyByteSubtract(byte a, byte b)
{
Expression aExp = Expression.Constant(a, typeof(byte));
Expression bExp = Expression.Constant(b, typeof(byte));
Assert.Throws<InvalidOperationException>(() => Expression.Subtract(aExp, bExp));
}
private static void VerifySByteSubtract(sbyte a, sbyte b)
{
Expression aExp = Expression.Constant(a, typeof(sbyte));
Expression bExp = Expression.Constant(b, typeof(sbyte));
Assert.Throws<InvalidOperationException>(() => Expression.Subtract(aExp, bExp));
}
private static void VerifyUShortSubtract(ushort a, ushort b, bool useInterpreter)
{
Expression<Func<ushort>> e =
Expression.Lambda<Func<ushort>>(
Expression.Subtract(
Expression.Constant(a, typeof(ushort)),
Expression.Constant(b, typeof(ushort))),
Enumerable.Empty<ParameterExpression>());
Func<ushort> f = e.Compile(useInterpreter);
Assert.Equal(unchecked((ushort)(a - b)), f());
}
private static void VerifyUShortSubtractOvf(ushort a, ushort b, bool useInterpreter)
{
Expression<Func<ushort>> e =
Expression.Lambda<Func<ushort>>(
Expression.SubtractChecked(
Expression.Constant(a, typeof(ushort)),
Expression.Constant(b, typeof(ushort))),
Enumerable.Empty<ParameterExpression>());
Func<ushort> f = e.Compile(useInterpreter);
ushort expected = 0;
try
{
expected = checked((ushort)(a - b));
}
catch (OverflowException)
{
Assert.Throws<OverflowException>(() => f());
return;
}
Assert.Equal(expected, f());
}
private static void VerifyShortSubtract(short a, short b, bool useInterpreter)
{
Expression<Func<short>> e =
Expression.Lambda<Func<short>>(
Expression.Subtract(
Expression.Constant(a, typeof(short)),
Expression.Constant(b, typeof(short))),
Enumerable.Empty<ParameterExpression>());
Func<short> f = e.Compile(useInterpreter);
Assert.Equal(unchecked((short)(a - b)), f());
}
private static void VerifyShortSubtractOvf(short a, short b, bool useInterpreter)
{
Expression<Func<short>> e =
Expression.Lambda<Func<short>>(
Expression.SubtractChecked(
Expression.Constant(a, typeof(short)),
Expression.Constant(b, typeof(short))),
Enumerable.Empty<ParameterExpression>());
Func<short> f = e.Compile(useInterpreter);
short expected = 0;
try
{
expected = checked((short)(a - b));
}
catch (OverflowException)
{
Assert.Throws<OverflowException>(() => f());
return;
}
Assert.Equal(expected, f());
}
private static void VerifyUIntSubtract(uint a, uint b, bool useInterpreter)
{
Expression<Func<uint>> e =
Expression.Lambda<Func<uint>>(
Expression.Subtract(
Expression.Constant(a, typeof(uint)),
Expression.Constant(b, typeof(uint))),
Enumerable.Empty<ParameterExpression>());
Func<uint> f = e.Compile(useInterpreter);
Assert.Equal(unchecked(a - b), f());
}
private static void VerifyUIntSubtractOvf(uint a, uint b, bool useInterpreter)
{
Expression<Func<uint>> e =
Expression.Lambda<Func<uint>>(
Expression.SubtractChecked(
Expression.Constant(a, typeof(uint)),
Expression.Constant(b, typeof(uint))),
Enumerable.Empty<ParameterExpression>());
Func<uint> f = e.Compile(useInterpreter);
uint expected = 0;
try
{
expected = checked(a - b);
}
catch (OverflowException)
{
Assert.Throws<OverflowException>(() => f());
return;
}
Assert.Equal(expected, f());
}
private static void VerifyIntSubtract(int a, int b, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.Subtract(
Expression.Constant(a, typeof(int)),
Expression.Constant(b, typeof(int))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
Assert.Equal(unchecked(a - b), f());
}
private static void VerifyIntSubtractOvf(int a, int b, bool useInterpreter)
{
Expression<Func<int>> e =
Expression.Lambda<Func<int>>(
Expression.SubtractChecked(
Expression.Constant(a, typeof(int)),
Expression.Constant(b, typeof(int))),
Enumerable.Empty<ParameterExpression>());
Func<int> f = e.Compile(useInterpreter);
int expected = 0;
try
{
expected = checked(a - b);
}
catch (OverflowException)
{
Assert.Throws<OverflowException>(() => f());
return;
}
Assert.Equal(expected, f());
}
private static void VerifyULongSubtract(ulong a, ulong b, bool useInterpreter)
{
Expression<Func<ulong>> e =
Expression.Lambda<Func<ulong>>(
Expression.Subtract(
Expression.Constant(a, typeof(ulong)),
Expression.Constant(b, typeof(ulong))),
Enumerable.Empty<ParameterExpression>());
Func<ulong> f = e.Compile(useInterpreter);
Assert.Equal(unchecked(a - b), f());
}
private static void VerifyULongSubtractOvf(ulong a, ulong b, bool useInterpreter)
{
Expression<Func<ulong>> e =
Expression.Lambda<Func<ulong>>(
Expression.SubtractChecked(
Expression.Constant(a, typeof(ulong)),
Expression.Constant(b, typeof(ulong))),
Enumerable.Empty<ParameterExpression>());
Func<ulong> f = e.Compile(useInterpreter);
ulong expected = 0;
try
{
expected = checked(a - b);
}
catch (OverflowException)
{
Assert.Throws<OverflowException>(() => f());
return;
}
Assert.Equal(expected, f());
}
private static void VerifyLongSubtract(long a, long b, bool useInterpreter)
{
Expression<Func<long>> e =
Expression.Lambda<Func<long>>(
Expression.Subtract(
Expression.Constant(a, typeof(long)),
Expression.Constant(b, typeof(long))),
Enumerable.Empty<ParameterExpression>());
Func<long> f = e.Compile(useInterpreter);
Assert.Equal(unchecked(a - b), f());
}
private static void VerifyLongSubtractOvf(long a, long b, bool useInterpreter)
{
Expression<Func<long>> e =
Expression.Lambda<Func<long>>(
Expression.SubtractChecked(
Expression.Constant(a, typeof(long)),
Expression.Constant(b, typeof(long))),
Enumerable.Empty<ParameterExpression>());
Func<long> f = e.Compile(useInterpreter);
long expected = 0;
try
{
expected = checked(a - b);
}
catch (OverflowException)
{
Assert.Throws<OverflowException>(() => f());
return;
}
Assert.Equal(expected, f());
}
private static void VerifyFloatSubtract(float a, float b, bool useInterpreter)
{
Expression<Func<float>> e =
Expression.Lambda<Func<float>>(
Expression.Subtract(
Expression.Constant(a, typeof(float)),
Expression.Constant(b, typeof(float))),
Enumerable.Empty<ParameterExpression>());
Func<float> f = e.Compile(useInterpreter);
Assert.Equal(a - b, f());
}
private static void VerifyDoubleSubtract(double a, double b, bool useInterpreter)
{
Expression<Func<double>> e =
Expression.Lambda<Func<double>>(
Expression.Subtract(
Expression.Constant(a, typeof(double)),
Expression.Constant(b, typeof(double))),
Enumerable.Empty<ParameterExpression>());
Func<double> f = e.Compile(useInterpreter);
Assert.Equal(a - b, f());
}
private static void VerifyDecimalSubtract(decimal a, decimal b, bool useInterpreter)
{
Expression<Func<decimal>> e =
Expression.Lambda<Func<decimal>>(
Expression.Subtract(
Expression.Constant(a, typeof(decimal)),
Expression.Constant(b, typeof(decimal))),
Enumerable.Empty<ParameterExpression>());
Func<decimal> f = e.Compile(useInterpreter);
decimal expected = 0;
try
{
expected = a - b;
}
catch (OverflowException)
{
Assert.Throws<OverflowException>(() => f());
return;
}
Assert.Equal(expected, f());
}
private static void VerifyCharSubtract(char a, char b)
{
Expression aExp = Expression.Constant(a, typeof(char));
Expression bExp = Expression.Constant(b, typeof(char));
Assert.Throws<InvalidOperationException>(() => Expression.Subtract(aExp, bExp));
}
#endregion
[Theory]
[ClassData(typeof(CompilationTypes))]
public static void Subtract_MultipleOverloads_CorrectlyResolvesOperator1(bool useInterpreter)
{
BinaryExpression subtract = Expression.Subtract(Expression.Constant(new DateTime(100)), Expression.Constant(new DateTime(10)));
Func<TimeSpan> lambda = Expression.Lambda<Func<TimeSpan>>(subtract).Compile(useInterpreter);
Assert.Equal(new TimeSpan(90), lambda());
}
[Theory]
[ClassData(typeof(CompilationTypes))]
public static void Subtract_MultipleOverloads_CorrectlyResolvesOperator2(bool useInterpreter)
{
BinaryExpression subtract = Expression.Subtract(Expression.Constant(new DateTime(100)), Expression.Constant(new TimeSpan(10)));
Func<DateTime> lambda = Expression.Lambda<Func<DateTime>>(subtract).Compile(useInterpreter);
Assert.Equal(new DateTime(90), lambda());
}
[Fact]
public static void Subtract_NoSuchOperatorDeclaredOnType_ThrowsInvalidOperationException()
{
Assert.Throws<InvalidOperationException>(() => Expression.Add(Expression.Constant(new SubClass(0)), Expression.Constant(new SubClass(1))));
}
public class BaseClass
{
public BaseClass(int value) { Value = value; }
public int Value { get; }
public static BaseClass operator -(BaseClass i1, BaseClass i2) => new BaseClass(i1.Value - i2.Value);
}
public class SubClass : BaseClass
{
public SubClass(int value) : base(value) { }
}
[Fact]
public static void CannotReduce()
{
Expression exp = Expression.Subtract(Expression.Constant(0), Expression.Constant(0));
Assert.False(exp.CanReduce);
Assert.Same(exp, exp.Reduce());
Assert.Throws<ArgumentException>(null, () => exp.ReduceAndCheck());
}
[Fact]
public static void CannotReduceChecked()
{
Expression exp = Expression.SubtractChecked(Expression.Constant(0), Expression.Constant(0));
Assert.False(exp.CanReduce);
Assert.Same(exp, exp.Reduce());
Assert.Throws<ArgumentException>(null, () => exp.ReduceAndCheck());
}
[Fact]
public static void ThrowsOnLeftNull()
{
AssertExtensions.Throws<ArgumentNullException>("left", () => Expression.Subtract(null, Expression.Constant("")));
}
[Fact]
public static void ThrowsOnRightNull()
{
AssertExtensions.Throws<ArgumentNullException>("right", () => Expression.Subtract(Expression.Constant(""), null));
}
[Fact]
public static void CheckedThrowsOnLeftNull()
{
AssertExtensions.Throws<ArgumentNullException>("left", () => Expression.SubtractChecked(null, Expression.Constant("")));
}
[Fact]
public static void CheckedThrowsOnRightNull()
{
AssertExtensions.Throws<ArgumentNullException>("right", () => Expression.SubtractChecked(Expression.Constant(""), null));
}
private static class Unreadable<T>
{
public static T WriteOnly
{
set { }
}
}
[Fact]
public static void ThrowsOnLeftUnreadable()
{
Expression value = Expression.Property(null, typeof(Unreadable<int>), "WriteOnly");
AssertExtensions.Throws<ArgumentException>("left", () => Expression.Subtract(value, Expression.Constant(1)));
}
[Fact]
public static void ThrowsOnRightUnreadable()
{
Expression value = Expression.Property(null, typeof(Unreadable<int>), "WriteOnly");
AssertExtensions.Throws<ArgumentException>("right", () => Expression.Subtract(Expression.Constant(1), value));
}
[Fact]
public static void CheckedThrowsOnLeftUnreadable()
{
Expression value = Expression.Property(null, typeof(Unreadable<int>), "WriteOnly");
AssertExtensions.Throws<ArgumentException>("left", () => Expression.SubtractChecked(value, Expression.Constant(1)));
}
[Fact]
public static void CheckedThrowsOnRightUnreadable()
{
Expression value = Expression.Property(null, typeof(Unreadable<int>), "WriteOnly");
AssertExtensions.Throws<ArgumentException>("right", () => Expression.SubtractChecked(Expression.Constant(1), value));
}
[Fact]
public static void ToStringTest()
{
BinaryExpression e1 = Expression.Subtract(Expression.Parameter(typeof(int), "a"), Expression.Parameter(typeof(int), "b"));
Assert.Equal("(a - b)", e1.ToString());
BinaryExpression e2 = Expression.SubtractChecked(Expression.Parameter(typeof(int), "a"), Expression.Parameter(typeof(int), "b"));
Assert.Equal("(a - b)", e2.ToString());
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void XorSingle()
{
var test = new SimpleBinaryOpTest__XorSingle();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (Sse.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (Sse.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (Sse.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (Sse.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (Sse.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__XorSingle
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Single[] inArray1, Single[] inArray2, Single[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Single>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Single>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Single>();
if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Single, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Single, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<Single> _fld1;
public Vector128<Single> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref testStruct._fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref testStruct._fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__XorSingle testClass)
{
var result = Sse.Xor(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__XorSingle testClass)
{
fixed (Vector128<Single>* pFld1 = &_fld1)
fixed (Vector128<Single>* pFld2 = &_fld2)
{
var result = Sse.Xor(
Sse.LoadVector128((Single*)(pFld1)),
Sse.LoadVector128((Single*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single);
private static Single[] _data1 = new Single[Op1ElementCount];
private static Single[] _data2 = new Single[Op2ElementCount];
private static Vector128<Single> _clsVar1;
private static Vector128<Single> _clsVar2;
private Vector128<Single> _fld1;
private Vector128<Single> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__XorSingle()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _clsVar2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
}
public SimpleBinaryOpTest__XorSingle()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
_dataTable = new DataTable(_data1, _data2, new Single[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Sse.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Sse.Xor(
Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Sse.Xor(
Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Sse.Xor(
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Sse).GetMethod(nameof(Sse.Xor), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Sse).GetMethod(nameof(Sse.Xor), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) })
.Invoke(null, new object[] {
Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Sse).GetMethod(nameof(Sse.Xor), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) })
.Invoke(null, new object[] {
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Sse.Xor(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<Single>* pClsVar1 = &_clsVar1)
fixed (Vector128<Single>* pClsVar2 = &_clsVar2)
{
var result = Sse.Xor(
Sse.LoadVector128((Single*)(pClsVar1)),
Sse.LoadVector128((Single*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr);
var result = Sse.Xor(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr));
var op2 = Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr));
var result = Sse.Xor(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var op1 = Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr));
var op2 = Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr));
var result = Sse.Xor(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__XorSingle();
var result = Sse.Xor(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__XorSingle();
fixed (Vector128<Single>* pFld1 = &test._fld1)
fixed (Vector128<Single>* pFld2 = &test._fld2)
{
var result = Sse.Xor(
Sse.LoadVector128((Single*)(pFld1)),
Sse.LoadVector128((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Sse.Xor(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<Single>* pFld1 = &_fld1)
fixed (Vector128<Single>* pFld2 = &_fld2)
{
var result = Sse.Xor(
Sse.LoadVector128((Single*)(pFld1)),
Sse.LoadVector128((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Sse.Xor(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = Sse.Xor(
Sse.LoadVector128((Single*)(&test._fld1)),
Sse.LoadVector128((Single*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<Single> op1, Vector128<Single> op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Single[] left, Single[] right, Single[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if ((BitConverter.SingleToInt32Bits(left[0]) ^ BitConverter.SingleToInt32Bits(right[0])) != BitConverter.SingleToInt32Bits(result[0]))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if ((BitConverter.SingleToInt32Bits(left[0]) ^ BitConverter.SingleToInt32Bits(right[0])) != BitConverter.SingleToInt32Bits(result[0]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Sse)}.{nameof(Sse.Xor)}<Single>(Vector128<Single>, Vector128<Single>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
using System;
interface IFoo
{
int Foo1(int a); // { return a + 1 };
int Foo2(int a); // { return a + 2 };
int Foo3(int a);
int Foo4(int a);
int Foo5(int a);
int Foo6(int a);
int Foo7(int a);
int Foo8(int a);
int Foo9(int a);
}
interface IBar : IFoo
{
// @OVERRIDE
// IFoo.Foo1/2/3/4/5
int Bar1(int b); // { return a + 11; }
int Bar2(int b); // { return a + 22; }
int Bar3(int b); // { return a + 33; }
int Bar4(int b);
int Bar5(int b);
int Bar6(int b);
int Bar7(int b);
int Bar8(int b);
int Bar9(int b);
}
interface IBlah : IBar
{
// @OVERRIDE IFoo.Foo6/7/8/9
// @OVERRIDE IBar.Bar6/7/8/9
int Blah1(int c);
int Blah2(int c);
int Blah3(int c);
}
class IBarImpl : IBar
{
// @REMOVE all implementation
int IFoo.Foo1(int a)
{
Console.WriteLine("At IIFoo.Foo1");
return a + 10;
}
int IFoo.Foo2(int a)
{
Console.WriteLine("At IIFoo.Foo1");
return a + 20;
}
int IFoo.Foo3(int a)
{
Console.WriteLine("At IIFoo.Foo1");
return a + 30;
}
int IFoo.Foo4(int a)
{
Console.WriteLine("At IIFoo.Foo1");
return a + 40;
}
int IFoo.Foo5(int a)
{
Console.WriteLine("At IIFoo.Foo1");
return a + 50;
}
int IFoo.Foo6(int a)
{
Console.WriteLine("At IIFoo.Foo1");
return a + 60;
}
int IFoo.Foo7(int a)
{
Console.WriteLine("At IIFoo.Foo1");
return a + 70;
}
int IFoo.Foo8(int a)
{
Console.WriteLine("At IIFoo.Foo1");
return a + 80;
}
int IFoo.Foo9(int a)
{
Console.WriteLine("At IIFoo.Foo1");
return a + 19;
}
int IBar.Bar1(int a)
{
Console.WriteLine("At IBar.Bar1");
return a + 110;
}
int IBar.Bar2(int a)
{
Console.WriteLine("At IBar.Bar1");
return a + 220;
}
int IBar.Bar3(int a)
{
Console.WriteLine("At IBar.Bar1");
return a + 330;
}
int IBar.Bar4(int a)
{
Console.WriteLine("At IBar.Bar1");
return a + 440;
}
int IBar.Bar5(int a)
{
Console.WriteLine("At IBar.Bar1");
return a + 550;
}
int IBar.Bar6(int a)
{
Console.WriteLine("At IBar.Bar1");
return a + 660;
}
int IBar.Bar7(int a)
{
Console.WriteLine("At IBar.Bar1");
return a + 770;
}
int IBar.Bar8(int a)
{
Console.WriteLine("At IBar.Bar1");
return a + 880;
}
int IBar.Bar9(int a)
{
Console.WriteLine("At IBar.Bar1");
return a + 990;
}
}
class IBlahImpl : IBarImpl, IBlah
{
// @REMOVE all implementation
// @OVERRIDE IBlah2/3 with + 2220/3330
int IBlah.Blah1(int c)
{
Console.WriteLine("At IBlah.Blah1");
return c+111;
}
int IBlah.Blah2(int c)
{
Console.WriteLine("At IBlah.Blah2");
return c+222;
}
int IBlah.Blah3(int c)
{
Console.WriteLine("At IBlah.Blah3");
return c+333;
}
}
interface IFooBarBlah : IFoo, IBar, IBlah
{
// FooBarBlah1 .override IFoo.Foo1/IBar.Bar1/IBlah.Blah1 return 1+11111
// FooBarBlah2 .override IFoo.Foo2/IBar.Bar2/IBlah.Blah2 return i+22222
// FooBarBLah345 .override IFoo.Foo345/IBar.Bar345/IBlah.Blah3 return i+33333
}
class FooBarBlahImpl :
IBlahImpl, // @REMOVE
IFooBarBlah
{
}
class Program
{
public static int Main()
{
SingleOverride();
MultiOverride();
return Test.Ret();
}
private static void SingleOverride()
{
IBarImpl barImpl = new IBarImpl();
IFoo foo = (IFoo) barImpl;
Console.WriteLine("Calling IFoo.Foo methods on IBarImpl...");
Test.Assert(foo.Foo1(1) == 11, "Calling IFoo.Foo1 on IBarImpl");
Test.Assert(foo.Foo2(2) == 22, "Calling IFoo.Foo2 on IBarImpl");
Test.Assert(foo.Foo3(3) == 33, "Calling IFoo.Foo3 on IBarImpl");
Test.Assert(foo.Foo4(4) == 44, "Calling IFoo.Foo4 on IBarImpl");
Test.Assert(foo.Foo5(5) == 55, "Calling IFoo.Foo5 on IBarImpl");
Test.Assert(foo.Foo6(0) == 6, "Calling IFoo.Foo6 on IBarImpl");
Test.Assert(foo.Foo7(0) == 7, "Calling IFoo.Foo7 on IBarImpl");
Test.Assert(foo.Foo8(0) == 8, "Calling IFoo.Foo8 on IBarImpl");
Test.Assert(foo.Foo9(0) == 9, "Calling IFoo.Foo9 on IBarImpl");
IBar bar = (IBar) barImpl;
Console.WriteLine("Calling IBar.Bar methods on IBarImpl...");
Test.Assert(bar.Bar1(0) == 11, "Calling IBar.Bar1 on IBarImpl");
Test.Assert(bar.Bar2(0) == 22, "Calling IBar.Bar2 on IBarImpl");
Test.Assert(bar.Bar3(0) == 33, "Calling IBar.Bar3 on IBarImpl");
Test.Assert(bar.Bar4(0) == 44, "Calling IBar.Bar4 on IBarImpl");
Test.Assert(bar.Bar5(0) == 55, "Calling IBar.Bar5 on IBarImpl");
Test.Assert(bar.Bar6(0) == 66, "Calling IBar.Bar6 on IBarImpl");
Test.Assert(bar.Bar7(0) == 77, "Calling IBar.Bar7 on IBarImpl");
Test.Assert(bar.Bar8(0) == 88, "Calling IBar.Bar8 on IBarImpl");
Test.Assert(bar.Bar9(0) == 99, "Calling IBar.Bar9 on IBarImpl");
IBlahImpl blahImpl = new IBlahImpl();
foo = (IFoo) blahImpl;
Test.Assert(foo.Foo1(1) == 11, "Calling IFoo.Foo1 on IBlahImpl");
Test.Assert(foo.Foo2(2) == 22, "Calling IFoo.Foo2 on IBlahImpl");
Test.Assert(foo.Foo3(3) == 33, "Calling IFoo.Foo3 on IBlahImpl");
Test.Assert(foo.Foo4(4) == 44, "Calling IFoo.Foo4 on IBlahImpl");
Test.Assert(foo.Foo5(5) == 55, "Calling IFoo.Foo5 on IBlahImpl");
Test.Assert(foo.Foo6(6) == 66, "Calling IFoo.Foo6 on IBlahImpl");
Test.Assert(foo.Foo7(7) == 77, "Calling IFoo.Foo7 on IBlahImpl");
Test.Assert(foo.Foo8(8) == 88, "Calling IFoo.Foo8 on IBlahImpl");
Test.Assert(foo.Foo9(9) == 99, "Calling IFoo.Foo9 on IBlahImpl");
bar = (IBar) blahImpl;
Console.WriteLine("Calling IBar.Bar methods on IBlahImpl...");
Test.Assert(bar.Bar1(1) == 111, "Calling IBar.Bar1 on IBlahImpl");
Test.Assert(bar.Bar2(2) == 222, "Calling IBar.Bar2 on IBlahImpl");
Test.Assert(bar.Bar3(3) == 333, "Calling IBar.Bar3 on IBlahImpl");
Test.Assert(bar.Bar4(4) == 444, "Calling IBar.Bar4 on IBlahImpl");
Test.Assert(bar.Bar5(5) == 555, "Calling IBar.Bar5 on IBlahImpl");
Test.Assert(bar.Bar6(0) == 66, "Calling IBar.Bar6 on IBlahImpl");
Test.Assert(bar.Bar7(0) == 77, "Calling IBar.Bar7 on IBlahImpl");
Test.Assert(bar.Bar8(0) == 88, "Calling IBar.Bar8 on IBlahImpl");
Test.Assert(bar.Bar9(0) == 99, "Calling IBar.Bar9 on IBlahImpl");
IBlah blah = (IBlah) blahImpl;
Console.WriteLine("Calling IBlah.Blah methods on IBlahImpl...");
Test.Assert(blah.Blah1(0) == 111, "Calling IBlah.Blah1 on IBlahImpl");
Test.Assert(blah.Blah2(2) == 2222, "Calling IBlah.Blah1 on IBlahImpl");
Test.Assert(blah.Blah3(3) == 3333, "Calling IBlah.Blah1 on IBlahImpl");
}
private static void MultiOverride()
{
FooBarBlahImpl fooBarBlah = new FooBarBlahImpl();
IFoo foo = (IFoo) fooBarBlah;
Console.WriteLine("Calling IFoo.Foo methods on FooBarBlahImpl...");
Test.Assert(foo.Foo1(0) == 11111, "Calling IFoo.Foo1 on FooBarBlahImpl");
Test.Assert(foo.Foo2(0) == 22222, "Calling IFoo.Foo2 on FooBarBlahImpl");
Test.Assert(foo.Foo3(0) == 33333, "Calling IFoo.Foo3 on FooBarBlahImpl");
Test.Assert(foo.Foo4(0) == 33333, "Calling IFoo.Foo4 on FooBarBlahImpl");
Test.Assert(foo.Foo5(0) == 33333, "Calling IFoo.Foo5 on FooBarBlahImpl");
Test.Assert(foo.Foo6(6) == 66, "Calling IFoo.Foo6 on FooBarBlahImpl");
Test.Assert(foo.Foo7(7) == 77, "Calling IFoo.Foo7 on FooBarBlahImpl");
Test.Assert(foo.Foo8(8) == 88, "Calling IFoo.Foo8 on FooBarBlahImpl");
Test.Assert(foo.Foo9(9) == 99, "Calling IFoo.Foo9 on FooBarBlahImpl");
IBar bar = (IBar) fooBarBlah;
Console.WriteLine("Calling IBar.Bar methods on FooBarBlahImpl...");
Test.Assert(bar.Bar1(0) == 11111, "Calling IBar.Bar1 on FooBarBlahImpl");
Test.Assert(bar.Bar2(0) == 22222, "Calling IBar.Bar2 on FooBarBlahImpl");
Test.Assert(bar.Bar3(0) == 33333, "Calling IBar.Bar3 on FooBarBlahImpl");
Test.Assert(bar.Bar4(0) == 33333, "Calling IBar.Bar4 on FooBarBlahImpl");
Test.Assert(bar.Bar5(0) == 33333, "Calling IBar.Bar5 on FooBarBlahImpl");
Test.Assert(bar.Bar6(0) == 66, "Calling IBar.Bar6 on FooBarBlahImpl");
Test.Assert(bar.Bar7(0) == 77, "Calling IBar.Bar7 on FooBarBlahImpl");
Test.Assert(bar.Bar8(0) == 88, "Calling IBar.Bar8 on FooBarBlahImpl");
Test.Assert(bar.Bar9(0) == 99, "Calling IBar.Bar9 on FooBarBlahImpl");
IBlah blah = (IBlah) fooBarBlah;
Console.WriteLine("Calling IBlah.Blah methods on FooBarBlahImpl...");
Test.Assert(blah.Blah1(0) == 11111, "Calling IBlah.Blah1 on FooBarBlahImpl");
Test.Assert(blah.Blah2(0) == 22222, "Calling IBlah.Blah1 on FooBarBlahImpl");
Test.Assert(blah.Blah3(0) == 33333, "Calling IBlah.Blah1 on FooBarBlahImpl");
}
}
class Test
{
private static bool Pass = true;
public static int Ret()
{
return Pass? 100 : 101;
}
public static void Assert(bool cond, string msg)
{
if (cond)
{
Console.WriteLine("PASS");
}
else
{
Console.WriteLine("FAIL: " + msg);
Pass = false;
}
}
}
| |
/*
* Copyright 2006 Jeremias Maerki in part, and ZXing Authors in part
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This file has been modified from its original form in Barcode4J.
*/
using System;
#if (SILVERLIGHT4 || SILVERLIGHT5 || NET40 || NET45 || NET46 || NET47 || NETFX_CORE || NETSTANDARD) && !NETSTANDARD1_0
using System.Numerics;
#else
using BigIntegerLibrary;
#endif
using System.Text;
using ZXing.Common;
namespace ZXing.PDF417.Internal
{
/// <summary>
/// PDF417 high-level encoder following the algorithm described in ISO/IEC 15438:2001(E) in
/// annex P.
/// </summary>
internal static class PDF417HighLevelEncoder
{
/// <summary>
/// code for Text compaction
/// </summary>
private const int TEXT_COMPACTION = 0;
/// <summary>
/// code for Byte compaction
/// </summary>
private const int BYTE_COMPACTION = 1;
/// <summary>
/// code for Numeric compaction
/// </summary>
private const int NUMERIC_COMPACTION = 2;
/// <summary>
/// Text compaction submode Alpha
/// </summary>
private const int SUBMODE_ALPHA = 0;
/// <summary>
/// Text compaction submode Lower
/// </summary>
private const int SUBMODE_LOWER = 1;
/// <summary>
/// Text compaction submode Mixed
/// </summary>
private const int SUBMODE_MIXED = 2;
/// <summary>
/// Text compaction submode Punctuation
/// </summary>
private const int SUBMODE_PUNCTUATION = 3;
/// <summary>
/// mode latch to Text Compaction mode
/// </summary>
private const int LATCH_TO_TEXT = 900;
/// <summary>
/// mode latch to Byte Compaction mode (number of characters NOT a multiple of 6)
/// </summary>
private const int LATCH_TO_BYTE_PADDED = 901;
/// <summary>
/// mode latch to Numeric Compaction mode
/// </summary>
private const int LATCH_TO_NUMERIC = 902;
/// <summary>
/// mode shift to Byte Compaction mode
/// </summary>
private const int SHIFT_TO_BYTE = 913;
/// <summary>
/// mode latch to Byte Compaction mode (number of characters a multiple of 6)
/// </summary>
private const int LATCH_TO_BYTE = 924;
/// <summary>
/// identifier for a user defined Extended Channel Interpretation (ECI)
/// </summary>
private const int ECI_USER_DEFINED = 925;
/// <summary>
/// identifier for a general purpose ECO format
/// </summary>
private const int ECI_GENERAL_PURPOSE = 926;
/// <summary>
/// identifier for an ECI of a character set of code page
/// </summary>
private const int ECI_CHARSET = 927;
/// <summary>
/// Raw code table for text compaction Mixed sub-mode
/// </summary>
private static readonly sbyte[] TEXT_MIXED_RAW =
{
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 38, 13, 9, 44, 58,
35, 45, 46, 36, 47, 43, 37, 42, 61, 94, 0, 32, 0, 0, 0
};
/// <summary>
/// Raw code table for text compaction: Punctuation sub-mode
/// </summary>
private static readonly sbyte[] TEXT_PUNCTUATION_RAW =
{
59, 60, 62, 64, 91, 92, 93, 95, 96, 126, 33, 13, 9, 44, 58,
10, 45, 46, 36, 47, 34, 124, 42, 40, 41, 63, 123, 125, 39, 0
};
private static readonly sbyte[] MIXED = new sbyte[128];
private static readonly sbyte[] PUNCTUATION = new sbyte[128];
internal static string DEFAULT_ENCODING_NAME = "ISO-8859-1";
static PDF417HighLevelEncoder()
{
//Construct inverse lookups
for (int idx = 0; idx < MIXED.Length; idx++)
MIXED[idx] = -1;
for (int i = 0; i < TEXT_MIXED_RAW.Length; i++)
{
sbyte b = TEXT_MIXED_RAW[i];
if (b > 0)
{
MIXED[b] = (sbyte)i;
}
}
for (int idx = 0; idx < PUNCTUATION.Length; idx++)
PUNCTUATION[idx] = -1;
for (int i = 0; i < TEXT_PUNCTUATION_RAW.Length; i++)
{
sbyte b = TEXT_PUNCTUATION_RAW[i];
if (b > 0)
{
PUNCTUATION[b] = (sbyte)i;
}
}
}
/// <summary>
/// Performs high-level encoding of a PDF417 message using the algorithm described in annex P
/// of ISO/IEC 15438:2001(E). If byte compaction has been selected, then only byte compaction
/// is used.
/// </summary>
/// <param name="msg">the message</param>
/// <param name="compaction">compaction mode to use</param>
/// <param name="encoding">character encoding used to encode in default or byte compaction
/// or null for default / not applicable</param>
/// <param name="disableEci">if true, don't add an ECI segment for different encodings than default</param>
/// <returns>the encoded message (the char values range from 0 to 928)</returns>
internal static String encodeHighLevel(String msg, Compaction compaction, Encoding encoding, bool disableEci)
{
//the codewords 0..928 are encoded as Unicode characters
var sb = new StringBuilder(msg.Length);
if (encoding != null && !disableEci && String.Compare(DEFAULT_ENCODING_NAME, encoding.WebName, StringComparison.Ordinal) != 0)
{
CharacterSetECI eci = CharacterSetECI.getCharacterSetECIByName(encoding.WebName);
if (eci != null)
{
encodingECI(eci.Value, sb);
}
}
int len = msg.Length;
int p = 0;
int textSubMode = SUBMODE_ALPHA;
// User selected encoding mode
switch (compaction)
{
case Compaction.TEXT:
encodeText(msg, p, len, sb, textSubMode);
break;
case Compaction.BYTE:
var msgBytes = toBytes(msg, encoding);
encodeBinary(msgBytes, p, msgBytes.Length, BYTE_COMPACTION, sb);
break;
case Compaction.NUMERIC:
sb.Append((char) LATCH_TO_NUMERIC);
encodeNumeric(msg, p, len, sb);
break;
default:
int encodingMode = TEXT_COMPACTION; //Default mode, see 4.4.2.1
byte[] bytes = null;
while (p < len)
{
int n = determineConsecutiveDigitCount(msg, p);
if (n >= 13)
{
sb.Append((char) LATCH_TO_NUMERIC);
encodingMode = NUMERIC_COMPACTION;
textSubMode = SUBMODE_ALPHA; //Reset after latch
encodeNumeric(msg, p, n, sb);
p += n;
}
else
{
int t = determineConsecutiveTextCount(msg, p);
if (t >= 5 || n == len)
{
if (encodingMode != TEXT_COMPACTION)
{
sb.Append((char) LATCH_TO_TEXT);
encodingMode = TEXT_COMPACTION;
textSubMode = SUBMODE_ALPHA; //start with submode alpha after latch
}
textSubMode = encodeText(msg, p, t, sb, textSubMode);
p += t;
}
else
{
if (bytes == null)
{
bytes = toBytes(msg, encoding);
}
int b = determineConsecutiveBinaryCount(msg, bytes, p, encoding);
if (b == 0)
{
b = 1;
}
if (b == 1 && encodingMode == TEXT_COMPACTION)
{
//Switch for one byte (instead of latch)
encodeBinary(bytes, 0, 1, TEXT_COMPACTION, sb);
}
else
{
//Mode latch performed by encodeBinary()
encodeBinary(bytes,
toBytes(msg.Substring(0, p), encoding).Length,
toBytes(msg.Substring(p, b), encoding).Length,
encodingMode,
sb);
encodingMode = BYTE_COMPACTION;
textSubMode = SUBMODE_ALPHA; //Reset after latch
}
p += b;
}
}
}
break;
}
return sb.ToString();
}
private static Encoding getEncoder(Encoding encoding)
{
// Defer instantiating default Charset until needed, since it may be for an unsupported
// encoding.
if (encoding == null)
{
try
{
encoding = Encoding.GetEncoding(DEFAULT_ENCODING_NAME);
}
catch (Exception)
{
// continue
}
if (encoding == null)
{
// Fallbacks
try
{
#if WindowsCE
try
{
encoding = Encoding.GetEncoding(1252);
}
catch (PlatformNotSupportedException)
{
// WindowsCE doesn't support all encodings. But it is device depended.
// So we try here some different ones
encoding = Encoding.GetEncoding("CP437");
}
#else
// Silverlight supports only UTF-8 and UTF-16 out-of-the-box
encoding = Encoding.GetEncoding("UTF-8");
#endif
}
catch (Exception uce)
{
throw new WriterException("No support for any encoding: " + DEFAULT_ENCODING_NAME, uce);
}
}
}
return encoding;
}
private static byte[] toBytes(String msg, Encoding encoding)
{
return getEncoder(encoding).GetBytes(msg);
}
private static byte[] toBytes(char msg, Encoding encoding)
{
return getEncoder(encoding).GetBytes(new []{msg});
}
/// <summary>
/// Encode parts of the message using Text Compaction as described in ISO/IEC 15438:2001(E),
/// chapter 4.4.2.
///
/// <param name="msg">the message</param>
/// <param name="startpos">the start position within the message</param>
/// <param name="count">the number of characters to encode</param>
/// <param name="sb">receives the encoded codewords</param>
/// <param name="initialSubmode">should normally be SUBMODE_ALPHA</param>
/// <returns>the text submode in which this method ends</returns>
/// </summary>
private static int encodeText(String msg,
int startpos,
int count,
StringBuilder sb,
int initialSubmode)
{
StringBuilder tmp = new StringBuilder(count);
int submode = initialSubmode;
int idx = 0;
while (true)
{
char ch = msg[startpos + idx];
switch (submode)
{
case SUBMODE_ALPHA:
if (isAlphaUpper(ch))
{
if (ch == ' ')
{
tmp.Append((char) 26); //space
}
else
{
tmp.Append((char) (ch - 65));
}
}
else
{
if (isAlphaLower(ch))
{
submode = SUBMODE_LOWER;
tmp.Append((char) 27); //ll
continue;
}
else if (isMixed(ch))
{
submode = SUBMODE_MIXED;
tmp.Append((char) 28); //ml
continue;
}
else
{
tmp.Append((char) 29); //ps
tmp.Append((char) PUNCTUATION[ch]);
break;
}
}
break;
case SUBMODE_LOWER:
if (isAlphaLower(ch))
{
if (ch == ' ')
{
tmp.Append((char) 26); //space
}
else
{
tmp.Append((char) (ch - 97));
}
}
else
{
if (isAlphaUpper(ch))
{
tmp.Append((char) 27); //as
tmp.Append((char) (ch - 65));
//space cannot happen here, it is also in "Lower"
break;
}
else if (isMixed(ch))
{
submode = SUBMODE_MIXED;
tmp.Append((char) 28); //ml
continue;
}
else
{
tmp.Append((char) 29); //ps
tmp.Append((char) PUNCTUATION[ch]);
break;
}
}
break;
case SUBMODE_MIXED:
if (isMixed(ch))
{
tmp.Append((char) MIXED[ch]);
}
else
{
if (isAlphaUpper(ch))
{
submode = SUBMODE_ALPHA;
tmp.Append((char) 28); //al
continue;
}
else if (isAlphaLower(ch))
{
submode = SUBMODE_LOWER;
tmp.Append((char) 27); //ll
continue;
}
else
{
if (startpos + idx + 1 < count)
{
char next = msg[startpos + idx + 1];
if (isPunctuation(next))
{
submode = SUBMODE_PUNCTUATION;
tmp.Append((char) 25); //pl
continue;
}
}
tmp.Append((char) 29); //ps
tmp.Append((char) PUNCTUATION[ch]);
}
}
break;
default: //SUBMODE_PUNCTUATION
if (isPunctuation(ch))
{
tmp.Append((char) PUNCTUATION[ch]);
}
else
{
submode = SUBMODE_ALPHA;
tmp.Append((char) 29); //al
continue;
}
break;
}
idx++;
if (idx >= count)
{
break;
}
}
char h = (char) 0;
int len = tmp.Length;
for (int i = 0; i < len; i++)
{
bool odd = (i%2) != 0;
if (odd)
{
h = (char) ((h*30) + tmp[i]);
sb.Append(h);
}
else
{
h = tmp[i];
}
}
if ((len%2) != 0)
{
sb.Append((char) ((h*30) + 29)); //ps
}
return submode;
}
/// <summary>
/// Encode parts of the message using Byte Compaction as described in ISO/IEC 15438:2001(E),
/// chapter 4.4.3. The Unicode characters will be converted to binary using the cp437
/// codepage.
///
/// <param name="bytes">the message converted to a byte array</param>
/// <param name="startpos">the start position within the message</param>
/// <param name="count">the number of bytes to encode</param>
/// <param name="startmode">the mode from which this method starts</param>
/// <param name="sb">receives the encoded codewords</param>
/// </summary>
private static void encodeBinary(byte[] bytes,
int startpos,
int count,
int startmode,
StringBuilder sb)
{
if (count == 1 && startmode == TEXT_COMPACTION)
{
sb.Append((char) SHIFT_TO_BYTE);
}
else
{
if ((count % 6) == 0)
{
sb.Append((char)LATCH_TO_BYTE);
}
else
{
sb.Append((char)LATCH_TO_BYTE_PADDED);
}
}
int idx = startpos;
// Encode sixpacks
if (count >= 6)
{
char[] chars = new char[5];
while ((startpos + count - idx) >= 6)
{
long t = 0;
for (int i = 0; i < 6; i++)
{
t <<= 8;
t += bytes[idx + i] & 0xff;
}
for (int i = 0; i < 5; i++)
{
chars[i] = (char) (t%900);
t /= 900;
}
for (int i = chars.Length - 1; i >= 0; i--)
{
sb.Append(chars[i]);
}
idx += 6;
}
}
//Encode rest (remaining n<5 bytes if any)
for (int i = idx; i < startpos + count; i++)
{
int ch = bytes[i] & 0xff;
sb.Append((char) ch);
}
}
private static void encodeNumeric(String msg, int startpos, int count, StringBuilder sb)
{
#if (SILVERLIGHT4 || SILVERLIGHT5 || NET40 || NET45 || NET46 || NET47 || NETFX_CORE || NETSTANDARD) && !NETSTANDARD1_0
int idx = 0;
StringBuilder tmp = new StringBuilder(count/3 + 1);
BigInteger num900 = new BigInteger(900);
BigInteger num0 = new BigInteger(0);
while (idx < count - 1)
{
tmp.Length = 0;
int len = Math.Min(44, count - idx);
String part = '1' + msg.Substring(startpos + idx, len);
#if SILVERLIGHT4 || SILVERLIGHT5
BigInteger bigint = BigIntegerExtensions.Parse(part);
#else
BigInteger bigint = BigInteger.Parse(part);
#endif
do
{
BigInteger c = bigint%num900;
tmp.Append((char) c);
bigint = BigInteger.Divide(bigint, num900);
} while (!bigint.Equals(num0));
//Reverse temporary string
for (int i = tmp.Length - 1; i >= 0; i--)
{
sb.Append(tmp[i]);
}
idx += len;
}
#else
int idx = 0;
StringBuilder tmp = new StringBuilder(count / 3 + 1);
BigInteger num900 = new BigInteger(900);
BigInteger num0 = new BigInteger(0);
while (idx < count - 1)
{
tmp.Length = 0;
int len = Math.Min(44, count - idx);
String part = '1' + msg.Substring(startpos + idx, len);
BigInteger bigint = BigInteger.Parse(part);
do
{
BigInteger c = BigInteger.Modulo(bigint, num900);
tmp.Append((char)c.GetHashCode());
bigint = BigInteger.Division(bigint, num900);
} while (!bigint.Equals(num0));
//Reverse temporary string
for (int i = tmp.Length - 1; i >= 0; i--)
{
sb.Append(tmp[i]);
}
idx += len;
}
#endif
}
private static bool isDigit(char ch)
{
return ch >= '0' && ch <= '9';
}
private static bool isAlphaUpper(char ch)
{
return ch == ' ' || (ch >= 'A' && ch <= 'Z');
}
private static bool isAlphaLower(char ch)
{
return ch == ' ' || (ch >= 'a' && ch <= 'z');
}
private static bool isMixed(char ch)
{
return MIXED[ch] != -1;
}
private static bool isPunctuation(char ch)
{
return PUNCTUATION[ch] != -1;
}
private static bool isText(char ch)
{
return ch == '\t' || ch == '\n' || ch == '\r' || (ch >= 32 && ch <= 126);
}
/// <summary>
/// Determines the number of consecutive characters that are encodable using numeric compaction.
///
/// <param name="msg">the message</param>
/// <param name="startpos">the start position within the message</param>
/// <returns>the requested character count</returns>
/// </summary>
private static int determineConsecutiveDigitCount(String msg, int startpos)
{
int count = 0;
int len = msg.Length;
int idx = startpos;
if (idx < len)
{
char ch = msg[idx];
while (isDigit(ch) && idx < len)
{
count++;
idx++;
if (idx < len)
{
ch = msg[idx];
}
}
}
return count;
}
/// <summary>
/// Determines the number of consecutive characters that are encodable using text compaction.
///
/// <param name="msg">the message</param>
/// <param name="startpos">the start position within the message</param>
/// <returns>the requested character count</returns>
/// </summary>
private static int determineConsecutiveTextCount(String msg, int startpos)
{
int len = msg.Length;
int idx = startpos;
while (idx < len)
{
char ch = msg[idx];
int numericCount = 0;
while (numericCount < 13 && isDigit(ch) && idx < len)
{
numericCount++;
idx++;
if (idx < len)
{
ch = msg[idx];
}
}
if (numericCount >= 13)
{
return idx - startpos - numericCount;
}
if (numericCount > 0)
{
//Heuristic: All text-encodable chars or digits are binary encodable
continue;
}
ch = msg[idx];
//Check if character is encodable
if (!isText(ch))
{
break;
}
idx++;
}
return idx - startpos;
}
/// <summary>
/// Determines the number of consecutive characters that are encodable using binary compaction.
///
/// <param name="msg">the message</param>
/// <param name="bytes">the message converted to a byte array</param>
/// <param name="startpos">the start position within the message</param>
/// <returns>the requested character count</returns>
/// </summary>
private static int determineConsecutiveBinaryCount(String msg, byte[] bytes, int startpos, Encoding encoding)
{
int len = msg.Length;
int idx = startpos;
int idxb = idx; // bytes index (may differ from idx for utf-8 and other unicode encodings)
encoding = getEncoder(encoding);
while (idx < len)
{
char ch = msg[idx];
int numericCount = 0;
while (numericCount < 13 && isDigit(ch))
{
numericCount++;
//textCount++;
int i = idx + numericCount;
if (i >= len)
{
break;
}
ch = msg[i];
}
if (numericCount >= 13)
{
return idx - startpos;
}
ch = msg[idx];
// .Net fallback strategie: REPLACEMENT_CHARACTER 0x3F
if (bytes[idxb] == 63 && ch != '?')
{
throw new WriterException("Non-encodable character detected: " + ch + " (Unicode: " + (int) ch + ')');
}
idx++;
idxb++;
if (toBytes(ch, encoding).Length > 1) // for non-ascii symbols
idxb++;
}
return idx - startpos;
}
private static void encodingECI(int eci, StringBuilder sb)
{
if (eci >= 0 && eci < 900)
{
sb.Append((char) ECI_CHARSET);
sb.Append((char) eci);
}
else if (eci < 810900)
{
sb.Append((char) ECI_GENERAL_PURPOSE);
sb.Append((char) (eci/900 - 1));
sb.Append((char) (eci%900));
}
else if (eci < 811800)
{
sb.Append((char) ECI_USER_DEFINED);
sb.Append((char) (810900 - eci));
}
else
{
throw new WriterException("ECI number not in valid range from 0..811799, but was " + eci);
}
}
}
}
| |
namespace HearThis.UI
{
partial class AdministrativeSettings
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (components != null)
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
System.Windows.Forms.Label _lblBreakClauses;
System.Windows.Forms.Panel pnlLine;
SIL.Windows.Forms.SettingProtection.SettingsProtectionLauncherButton settingsProtectionLauncherButton1;
System.Windows.Forms.Button _btnCancel;
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(AdministrativeSettings));
this.tabControl1 = new System.Windows.Forms.TabControl();
this.tabPageModes = new System.Windows.Forms.TabPage();
this.lblSelectModes = new System.Windows.Forms.Label();
this._tableLayoutModes = new System.Windows.Forms.TableLayoutPanel();
this.lnkNormalRecordingModeSetAsDefault = new System.Windows.Forms.LinkLabel();
this.NormalRecording = new System.Windows.Forms.CheckBox();
this.lnkAdministrativeModeSetAsDefault = new System.Windows.Forms.LinkLabel();
this.Administrator = new System.Windows.Forms.CheckBox();
this.tabPageSkipping = new System.Windows.Forms.TabPage();
this._tableLayoutPanelSkipping = new System.Windows.Forms.TableLayoutPanel();
this._lblSkippingInstructions = new System.Windows.Forms.Label();
this._lbSkippedStyles = new System.Windows.Forms.CheckedListBox();
this._chkShowSkipButton = new System.Windows.Forms.CheckBox();
this._btnClearAllSkipInfo = new System.Windows.Forms.Button();
this.tabPagePunctuation = new System.Windows.Forms.TabPage();
this._tableLayoutPanelPunctuation = new System.Windows.Forms.TableLayoutPanel();
this._lblClauseSeparators = new System.Windows.Forms.Label();
this._lblAdditionalLineBreakCharacters = new System.Windows.Forms.Label();
this._txtAdditionalBlockSeparators = new System.Windows.Forms.TextBox();
this._txtClauseSeparatorCharacters = new System.Windows.Forms.TextBox();
this._lblBreakBlocks = new System.Windows.Forms.Label();
this._lblWarningExistingRecordings = new System.Windows.Forms.Label();
this._chkBreakAtQuotes = new System.Windows.Forms.CheckBox();
this._chkBreakAtParagraphBreaks = new System.Windows.Forms.CheckBox();
this.tabPageInterface = new System.Windows.Forms.TabPage();
this._groupAdvancedUI = new System.Windows.Forms.GroupBox();
this._tableLayoutPanelAdvancedUI = new System.Windows.Forms.TableLayoutPanel();
this._chkEnableClipShifting = new System.Windows.Forms.CheckBox();
this._lblShiftClipsMenuWarning = new System.Windows.Forms.Label();
this._lblShiftClipsExplanation = new System.Windows.Forms.Label();
this._chkShowBookAndChapterLabels = new System.Windows.Forms.CheckBox();
this.lblColorSchemeChangeRestartWarning = new System.Windows.Forms.Label();
this._cboColorScheme = new System.Windows.Forms.ComboBox();
this.lblInterface = new System.Windows.Forms.Label();
this._btnOk = new System.Windows.Forms.Button();
this.l10NSharpExtender1 = new L10NSharp.UI.L10NSharpExtender(this.components);
_lblBreakClauses = new System.Windows.Forms.Label();
pnlLine = new System.Windows.Forms.Panel();
settingsProtectionLauncherButton1 = new SIL.Windows.Forms.SettingProtection.SettingsProtectionLauncherButton();
_btnCancel = new System.Windows.Forms.Button();
this.tabControl1.SuspendLayout();
this.tabPageModes.SuspendLayout();
this._tableLayoutModes.SuspendLayout();
this.tabPageSkipping.SuspendLayout();
this._tableLayoutPanelSkipping.SuspendLayout();
this.tabPagePunctuation.SuspendLayout();
this._tableLayoutPanelPunctuation.SuspendLayout();
this.tabPageInterface.SuspendLayout();
this._groupAdvancedUI.SuspendLayout();
this._tableLayoutPanelAdvancedUI.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.l10NSharpExtender1)).BeginInit();
this.SuspendLayout();
//
// _lblBreakClauses
//
_lblBreakClauses.FlatStyle = System.Windows.Forms.FlatStyle.Flat;
_lblBreakClauses.Image = global::HearThis.Properties.Resources.BottomToolbar_BreakOnCommas;
this.l10NSharpExtender1.SetLocalizableToolTip(_lblBreakClauses, null);
this.l10NSharpExtender1.SetLocalizationComment(_lblBreakClauses, null);
this.l10NSharpExtender1.SetLocalizationPriority(_lblBreakClauses, L10NSharp.LocalizationPriority.NotLocalizable);
this.l10NSharpExtender1.SetLocalizingId(_lblBreakClauses, "AdministrativeSettings._lblBreakClauses");
_lblBreakClauses.Location = new System.Drawing.Point(14, 272);
_lblBreakClauses.Margin = new System.Windows.Forms.Padding(3, 6, 3, 0);
_lblBreakClauses.Name = "_lblBreakClauses";
_lblBreakClauses.Size = new System.Drawing.Size(35, 13);
_lblBreakClauses.TabIndex = 3;
//
// pnlLine
//
pnlLine.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
this._tableLayoutPanelPunctuation.SetColumnSpan(pnlLine, 2);
pnlLine.Dock = System.Windows.Forms.DockStyle.Bottom;
pnlLine.Location = new System.Drawing.Point(14, 256);
pnlLine.Margin = new System.Windows.Forms.Padding(3, 10, 3, 6);
pnlLine.Name = "pnlLine";
pnlLine.Size = new System.Drawing.Size(332, 4);
pnlLine.TabIndex = 10;
//
// settingsProtectionLauncherButton1
//
settingsProtectionLauncherButton1.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.l10NSharpExtender1.SetLocalizableToolTip(settingsProtectionLauncherButton1, null);
this.l10NSharpExtender1.SetLocalizationComment(settingsProtectionLauncherButton1, null);
this.l10NSharpExtender1.SetLocalizingId(settingsProtectionLauncherButton1, "AdministrativeSettings.SettingsProtectionLauncherButton");
settingsProtectionLauncherButton1.Location = new System.Drawing.Point(9, 389);
settingsProtectionLauncherButton1.Margin = new System.Windows.Forms.Padding(0, 5, 0, 0);
settingsProtectionLauncherButton1.Name = "settingsProtectionLauncherButton1";
settingsProtectionLauncherButton1.Size = new System.Drawing.Size(205, 37);
settingsProtectionLauncherButton1.TabIndex = 8;
//
// _btnCancel
//
_btnCancel.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
_btnCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.l10NSharpExtender1.SetLocalizableToolTip(_btnCancel, null);
this.l10NSharpExtender1.SetLocalizationComment(_btnCancel, null);
this.l10NSharpExtender1.SetLocalizingId(_btnCancel, "Common.Cancel");
_btnCancel.Location = new System.Drawing.Point(298, 403);
_btnCancel.Name = "_btnCancel";
_btnCancel.Size = new System.Drawing.Size(75, 23);
_btnCancel.TabIndex = 9;
_btnCancel.Text = "&Cancel";
_btnCancel.UseVisualStyleBackColor = true;
//
// tabControl1
//
this.tabControl1.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.tabControl1.Controls.Add(this.tabPageModes);
this.tabControl1.Controls.Add(this.tabPageSkipping);
this.tabControl1.Controls.Add(this.tabPagePunctuation);
this.tabControl1.Controls.Add(this.tabPageInterface);
this.tabControl1.Location = new System.Drawing.Point(12, 12);
this.tabControl1.Name = "tabControl1";
this.tabControl1.SelectedIndex = 0;
this.tabControl1.Size = new System.Drawing.Size(368, 369);
this.tabControl1.TabIndex = 3;
//
// tabPageModes
//
this.tabPageModes.BackColor = System.Drawing.SystemColors.ButtonFace;
this.tabPageModes.Controls.Add(this.lblSelectModes);
this.tabPageModes.Controls.Add(this._tableLayoutModes);
this.l10NSharpExtender1.SetLocalizableToolTip(this.tabPageModes, null);
this.l10NSharpExtender1.SetLocalizationComment(this.tabPageModes, null);
this.l10NSharpExtender1.SetLocalizingId(this.tabPageModes, "AdministrativeSettings.tabPageModes");
this.tabPageModes.Location = new System.Drawing.Point(4, 22);
this.tabPageModes.Name = "tabPageModes";
this.tabPageModes.Padding = new System.Windows.Forms.Padding(3);
this.tabPageModes.Size = new System.Drawing.Size(360, 343);
this.tabPageModes.TabIndex = 0;
this.tabPageModes.Text = "Modes";
//
// lblSelectModes
//
this.lblSelectModes.AutoSize = true;
this.l10NSharpExtender1.SetLocalizableToolTip(this.lblSelectModes, null);
this.l10NSharpExtender1.SetLocalizationComment(this.lblSelectModes, null);
this.l10NSharpExtender1.SetLocalizingId(this.lblSelectModes, "AdministrativeSettings.lblSelectModes");
this.lblSelectModes.Location = new System.Drawing.Point(11, 11);
this.lblSelectModes.Margin = new System.Windows.Forms.Padding(0, 0, 3, 10);
this.lblSelectModes.Name = "lblSelectModes";
this.lblSelectModes.Size = new System.Drawing.Size(178, 13);
this.lblSelectModes.TabIndex = 5;
this.lblSelectModes.Text = "Select the modes to make available:";
//
// _tableLayoutModes
//
this._tableLayoutModes.AutoSize = true;
this._tableLayoutModes.ColumnCount = 2;
this._tableLayoutModes.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle());
this._tableLayoutModes.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle());
this._tableLayoutModes.Controls.Add(this.lnkNormalRecordingModeSetAsDefault, 1, 1);
this._tableLayoutModes.Controls.Add(this.NormalRecording, 0, 1);
this._tableLayoutModes.Controls.Add(this.lnkAdministrativeModeSetAsDefault, 1, 0);
this._tableLayoutModes.Controls.Add(this.Administrator, 0, 0);
this._tableLayoutModes.Location = new System.Drawing.Point(11, 37);
this._tableLayoutModes.Margin = new System.Windows.Forms.Padding(0, 3, 3, 3);
this._tableLayoutModes.Name = "_tableLayoutModes";
this._tableLayoutModes.RowCount = 2;
this._tableLayoutModes.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutModes.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutModes.Size = new System.Drawing.Size(230, 64);
this._tableLayoutModes.TabIndex = 3;
//
// lnkNormalRecordingModeSetAsDefault
//
this.lnkNormalRecordingModeSetAsDefault.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.lnkNormalRecordingModeSetAsDefault.AutoSize = true;
this.l10NSharpExtender1.SetLocalizableToolTip(this.lnkNormalRecordingModeSetAsDefault, null);
this.l10NSharpExtender1.SetLocalizationComment(this.lnkNormalRecordingModeSetAsDefault, null);
this.l10NSharpExtender1.SetLocalizingId(this.lnkNormalRecordingModeSetAsDefault, "AdministrativeSettings.lnkSetAsDefault");
this.lnkNormalRecordingModeSetAsDefault.Location = new System.Drawing.Point(150, 33);
this.lnkNormalRecordingModeSetAsDefault.Name = "lnkNormalRecordingModeSetAsDefault";
this.lnkNormalRecordingModeSetAsDefault.Size = new System.Drawing.Size(77, 31);
this.lnkNormalRecordingModeSetAsDefault.TabIndex = 12;
this.lnkNormalRecordingModeSetAsDefault.TabStop = true;
this.lnkNormalRecordingModeSetAsDefault.Text = "Set as Default";
this.lnkNormalRecordingModeSetAsDefault.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
//
// NormalRecording
//
this.NormalRecording.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)));
this.NormalRecording.AutoSize = true;
this.NormalRecording.Checked = true;
this.NormalRecording.CheckState = System.Windows.Forms.CheckState.Checked;
this.l10NSharpExtender1.SetLocalizableToolTip(this.NormalRecording, null);
this.l10NSharpExtender1.SetLocalizationComment(this.NormalRecording, null);
this.l10NSharpExtender1.SetLocalizingId(this.NormalRecording, "AdministrativeSettings._chkNormalRecordingMode");
this.NormalRecording.Location = new System.Drawing.Point(3, 36);
this.NormalRecording.MinimumSize = new System.Drawing.Size(0, 25);
this.NormalRecording.Name = "NormalRecording";
this.NormalRecording.Padding = new System.Windows.Forms.Padding(0, 3, 3, 3);
this.NormalRecording.Size = new System.Drawing.Size(114, 25);
this.NormalRecording.TabIndex = 6;
this.NormalRecording.Text = "Normal Recording";
this.NormalRecording.UseVisualStyleBackColor = true;
//
// lnkAdministrativeModeSetAsDefault
//
this.lnkAdministrativeModeSetAsDefault.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.lnkAdministrativeModeSetAsDefault.AutoSize = true;
this.lnkAdministrativeModeSetAsDefault.Enabled = false;
this.l10NSharpExtender1.SetLocalizableToolTip(this.lnkAdministrativeModeSetAsDefault, null);
this.l10NSharpExtender1.SetLocalizationComment(this.lnkAdministrativeModeSetAsDefault, null);
this.l10NSharpExtender1.SetLocalizingId(this.lnkAdministrativeModeSetAsDefault, "AdministrativeSettings.lnkSetAsDefault");
this.lnkAdministrativeModeSetAsDefault.Location = new System.Drawing.Point(150, 0);
this.lnkAdministrativeModeSetAsDefault.Name = "lnkAdministrativeModeSetAsDefault";
this.lnkAdministrativeModeSetAsDefault.Size = new System.Drawing.Size(77, 33);
this.lnkAdministrativeModeSetAsDefault.TabIndex = 11;
this.lnkAdministrativeModeSetAsDefault.TabStop = true;
this.lnkAdministrativeModeSetAsDefault.Text = "Set as Default";
this.lnkAdministrativeModeSetAsDefault.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
//
// Administrator
//
this.Administrator.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)));
this.Administrator.AutoSize = true;
this.Administrator.Checked = true;
this.Administrator.CheckState = System.Windows.Forms.CheckState.Checked;
this.Administrator.Image = global::HearThis.Properties.Resources._1406663178_tick_circle_frame;
this.l10NSharpExtender1.SetLocalizableToolTip(this.Administrator, null);
this.l10NSharpExtender1.SetLocalizationComment(this.Administrator, null);
this.l10NSharpExtender1.SetLocalizingId(this.Administrator, "AdministrativeSettings._chkAdministrativeMode");
this.Administrator.Location = new System.Drawing.Point(3, 3);
this.Administrator.MinimumSize = new System.Drawing.Size(0, 27);
this.Administrator.Name = "Administrator";
this.Administrator.Padding = new System.Windows.Forms.Padding(0, 3, 3, 3);
this.Administrator.Size = new System.Drawing.Size(141, 27);
this.Administrator.TabIndex = 4;
this.Administrator.Text = "Administrative Setup";
this.Administrator.TextImageRelation = System.Windows.Forms.TextImageRelation.TextBeforeImage;
this.Administrator.UseVisualStyleBackColor = true;
//
// tabPageSkipping
//
this.tabPageSkipping.BackColor = System.Drawing.SystemColors.ButtonFace;
this.tabPageSkipping.Controls.Add(this._tableLayoutPanelSkipping);
this.tabPageSkipping.Controls.Add(this._btnClearAllSkipInfo);
this.l10NSharpExtender1.SetLocalizableToolTip(this.tabPageSkipping, null);
this.l10NSharpExtender1.SetLocalizationComment(this.tabPageSkipping, null);
this.l10NSharpExtender1.SetLocalizingId(this.tabPageSkipping, "AdministrativeSettings.tabPageSkipping");
this.tabPageSkipping.Location = new System.Drawing.Point(4, 22);
this.tabPageSkipping.Name = "tabPageSkipping";
this.tabPageSkipping.Padding = new System.Windows.Forms.Padding(3);
this.tabPageSkipping.Size = new System.Drawing.Size(360, 343);
this.tabPageSkipping.TabIndex = 1;
this.tabPageSkipping.Text = "Skipping";
//
// _tableLayoutPanelSkipping
//
this._tableLayoutPanelSkipping.ColumnCount = 1;
this._tableLayoutPanelSkipping.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 100F));
this._tableLayoutPanelSkipping.Controls.Add(this._lblSkippingInstructions, 0, 1);
this._tableLayoutPanelSkipping.Controls.Add(this._lbSkippedStyles, 0, 2);
this._tableLayoutPanelSkipping.Controls.Add(this._chkShowSkipButton, 0, 0);
this._tableLayoutPanelSkipping.Dock = System.Windows.Forms.DockStyle.Fill;
this._tableLayoutPanelSkipping.Location = new System.Drawing.Point(3, 3);
this._tableLayoutPanelSkipping.Name = "_tableLayoutPanelSkipping";
this._tableLayoutPanelSkipping.Padding = new System.Windows.Forms.Padding(11);
this._tableLayoutPanelSkipping.RowCount = 3;
this._tableLayoutPanelSkipping.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutPanelSkipping.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutPanelSkipping.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 100F));
this._tableLayoutPanelSkipping.Size = new System.Drawing.Size(354, 337);
this._tableLayoutPanelSkipping.TabIndex = 3;
//
// _lblSkippingInstructions
//
this._lblSkippingInstructions.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this._lblSkippingInstructions.AutoSize = true;
this.l10NSharpExtender1.SetLocalizableToolTip(this._lblSkippingInstructions, null);
this.l10NSharpExtender1.SetLocalizationComment(this._lblSkippingInstructions, null);
this.l10NSharpExtender1.SetLocalizingId(this._lblSkippingInstructions, "AdministrativeSettings._lblSkippingInstructions");
this._lblSkippingInstructions.Location = new System.Drawing.Point(11, 44);
this._lblSkippingInstructions.Margin = new System.Windows.Forms.Padding(0, 0, 3, 6);
this._lblSkippingInstructions.Name = "_lblSkippingInstructions";
this._lblSkippingInstructions.Size = new System.Drawing.Size(329, 26);
this._lblSkippingInstructions.TabIndex = 2;
this._lblSkippingInstructions.Text = "Select any styles whose text should never be recorded for project {0}.";
//
// _lbSkippedStyles
//
this._lbSkippedStyles.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this._lbSkippedStyles.FormattingEnabled = true;
this.l10NSharpExtender1.SetLocalizableToolTip(this._lbSkippedStyles, null);
this.l10NSharpExtender1.SetLocalizationComment(this._lbSkippedStyles, null);
this.l10NSharpExtender1.SetLocalizationPriority(this._lbSkippedStyles, L10NSharp.LocalizationPriority.NotLocalizable);
this.l10NSharpExtender1.SetLocalizingId(this._lbSkippedStyles, "AdministrativeSettings._lbSkippedStyles");
this._lbSkippedStyles.Location = new System.Drawing.Point(14, 79);
this._lbSkippedStyles.Name = "_lbSkippedStyles";
this._lbSkippedStyles.Size = new System.Drawing.Size(326, 244);
this._lbSkippedStyles.TabIndex = 0;
//
// _chkShowSkipButton
//
this._chkShowSkipButton.AutoSize = true;
this.l10NSharpExtender1.SetLocalizableToolTip(this._chkShowSkipButton, null);
this.l10NSharpExtender1.SetLocalizationComment(this._chkShowSkipButton, null);
this.l10NSharpExtender1.SetLocalizingId(this._chkShowSkipButton, "AdministrativeSettings._chkShowSkipButton");
this._chkShowSkipButton.Location = new System.Drawing.Point(14, 14);
this._chkShowSkipButton.Name = "_chkShowSkipButton";
this._chkShowSkipButton.Padding = new System.Windows.Forms.Padding(0, 0, 3, 10);
this._chkShowSkipButton.Size = new System.Drawing.Size(114, 27);
this._chkShowSkipButton.TabIndex = 3;
this._chkShowSkipButton.Text = "Show Skip Button";
this._chkShowSkipButton.UseVisualStyleBackColor = true;
//
// _btnClearAllSkipInfo
//
this._btnClearAllSkipInfo.Anchor = System.Windows.Forms.AnchorStyles.Bottom;
this.l10NSharpExtender1.SetLocalizableToolTip(this._btnClearAllSkipInfo, null);
this.l10NSharpExtender1.SetLocalizationComment(this._btnClearAllSkipInfo, null);
this.l10NSharpExtender1.SetLocalizingId(this._btnClearAllSkipInfo, "AdministrativeSettings._btnClearAllSkipInfo");
this._btnClearAllSkipInfo.Location = new System.Drawing.Point(117, 207);
this._btnClearAllSkipInfo.Name = "_btnClearAllSkipInfo";
this._btnClearAllSkipInfo.Size = new System.Drawing.Size(107, 23);
this._btnClearAllSkipInfo.TabIndex = 1;
this._btnClearAllSkipInfo.Text = "Clear All Skips";
this._btnClearAllSkipInfo.UseVisualStyleBackColor = true;
this._btnClearAllSkipInfo.Visible = false;
this._btnClearAllSkipInfo.Click += new System.EventHandler(this.HandleClearAllSkipInfo_Click);
//
// tabPagePunctuation
//
this.tabPagePunctuation.BackColor = System.Drawing.SystemColors.ButtonFace;
this.tabPagePunctuation.Controls.Add(this._tableLayoutPanelPunctuation);
this.l10NSharpExtender1.SetLocalizableToolTip(this.tabPagePunctuation, null);
this.l10NSharpExtender1.SetLocalizationComment(this.tabPagePunctuation, null);
this.l10NSharpExtender1.SetLocalizingId(this.tabPagePunctuation, "AdministrativeSettings.tabPagePunctuation");
this.tabPagePunctuation.Location = new System.Drawing.Point(4, 22);
this.tabPagePunctuation.Name = "tabPagePunctuation";
this.tabPagePunctuation.Size = new System.Drawing.Size(360, 343);
this.tabPagePunctuation.TabIndex = 2;
this.tabPagePunctuation.Text = "Punctuation";
//
// _tableLayoutPanelPunctuation
//
this._tableLayoutPanelPunctuation.ColumnCount = 2;
this._tableLayoutPanelPunctuation.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle());
this._tableLayoutPanelPunctuation.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 100F));
this._tableLayoutPanelPunctuation.Controls.Add(this._lblClauseSeparators, 1, 6);
this._tableLayoutPanelPunctuation.Controls.Add(_lblBreakClauses, 0, 6);
this._tableLayoutPanelPunctuation.Controls.Add(pnlLine, 0, 5);
this._tableLayoutPanelPunctuation.Controls.Add(this._lblAdditionalLineBreakCharacters, 1, 1);
this._tableLayoutPanelPunctuation.Controls.Add(this._txtAdditionalBlockSeparators, 0, 2);
this._tableLayoutPanelPunctuation.Controls.Add(this._txtClauseSeparatorCharacters, 0, 7);
this._tableLayoutPanelPunctuation.Controls.Add(this._lblBreakBlocks, 0, 1);
this._tableLayoutPanelPunctuation.Controls.Add(this._lblWarningExistingRecordings, 0, 4);
this._tableLayoutPanelPunctuation.Controls.Add(this._chkBreakAtQuotes, 1, 0);
this._tableLayoutPanelPunctuation.Controls.Add(this._chkBreakAtParagraphBreaks, 0, 3);
this._tableLayoutPanelPunctuation.Dock = System.Windows.Forms.DockStyle.Fill;
this._tableLayoutPanelPunctuation.Location = new System.Drawing.Point(0, 0);
this._tableLayoutPanelPunctuation.Name = "_tableLayoutPanelPunctuation";
this._tableLayoutPanelPunctuation.Padding = new System.Windows.Forms.Padding(11);
this._tableLayoutPanelPunctuation.RowCount = 1;
this._tableLayoutPanelPunctuation.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutPanelPunctuation.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutPanelPunctuation.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutPanelPunctuation.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutPanelPunctuation.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutPanelPunctuation.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutPanelPunctuation.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutPanelPunctuation.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutPanelPunctuation.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
this._tableLayoutPanelPunctuation.Size = new System.Drawing.Size(360, 343);
this._tableLayoutPanelPunctuation.TabIndex = 1;
//
// _lblClauseSeparators
//
this._lblClauseSeparators.AutoSize = true;
this.l10NSharpExtender1.SetLocalizableToolTip(this._lblClauseSeparators, null);
this.l10NSharpExtender1.SetLocalizationComment(this._lblClauseSeparators, null);
this.l10NSharpExtender1.SetLocalizingId(this._lblClauseSeparators, "AdministrativeSettings._lblClauseSeparators");
this._lblClauseSeparators.Location = new System.Drawing.Point(55, 269);
this._lblClauseSeparators.Margin = new System.Windows.Forms.Padding(3, 3, 3, 0);
this._lblClauseSeparators.Name = "_lblClauseSeparators";
this._lblClauseSeparators.Size = new System.Drawing.Size(285, 26);
this._lblClauseSeparators.TabIndex = 2;
this._lblClauseSeparators.Text = "Pause punctuation (used when option to break blocks into lines is selected):";
//
// _lblAdditionalLineBreakCharacters
//
this._lblAdditionalLineBreakCharacters.AutoSize = true;
this.l10NSharpExtender1.SetLocalizableToolTip(this._lblAdditionalLineBreakCharacters, null);
this.l10NSharpExtender1.SetLocalizationComment(this._lblAdditionalLineBreakCharacters, null);
this.l10NSharpExtender1.SetLocalizingId(this._lblAdditionalLineBreakCharacters, "AdministrativeSettings._lblAdditionalLineBreakCharacters");
this._lblAdditionalLineBreakCharacters.Location = new System.Drawing.Point(55, 41);
this._lblAdditionalLineBreakCharacters.Name = "_lblAdditionalLineBreakCharacters";
this._lblAdditionalLineBreakCharacters.Size = new System.Drawing.Size(233, 26);
this._lblAdditionalLineBreakCharacters.TabIndex = 12;
this._lblAdditionalLineBreakCharacters.Text = "Additional characters (besides sentence-ending punctuation) to break text into bl" +
"ocks:";
//
// _txtAdditionalBlockSeparators
//
this._tableLayoutPanelPunctuation.SetColumnSpan(this._txtAdditionalBlockSeparators, 2);
this._txtAdditionalBlockSeparators.Dock = System.Windows.Forms.DockStyle.Fill;
this._txtAdditionalBlockSeparators.Font = new System.Drawing.Font("Segoe UI Semibold", 9.75F, System.Drawing.FontStyle.Bold);
this.l10NSharpExtender1.SetLocalizableToolTip(this._txtAdditionalBlockSeparators, null);
this.l10NSharpExtender1.SetLocalizationComment(this._txtAdditionalBlockSeparators, null);
this.l10NSharpExtender1.SetLocalizationPriority(this._txtAdditionalBlockSeparators, L10NSharp.LocalizationPriority.NotLocalizable);
this.l10NSharpExtender1.SetLocalizingId(this._txtAdditionalBlockSeparators, "AdministrativeSettings._txtAdditionalBlockSeparators");
this._txtAdditionalBlockSeparators.Location = new System.Drawing.Point(14, 73);
this._txtAdditionalBlockSeparators.Margin = new System.Windows.Forms.Padding(3, 3, 3, 10);
this._txtAdditionalBlockSeparators.Name = "_txtAdditionalBlockSeparators";
this._txtAdditionalBlockSeparators.Size = new System.Drawing.Size(332, 25);
this._txtAdditionalBlockSeparators.TabIndex = 13;
this._txtAdditionalBlockSeparators.TextChanged += new System.EventHandler(this.UpdateWarningTextColor);
this._txtAdditionalBlockSeparators.Leave += new System.EventHandler(this._txtAdditionalBlockSeparators_Leave);
//
// _txtClauseSeparatorCharacters
//
this._tableLayoutPanelPunctuation.SetColumnSpan(this._txtClauseSeparatorCharacters, 2);
this._txtClauseSeparatorCharacters.Dock = System.Windows.Forms.DockStyle.Fill;
this._txtClauseSeparatorCharacters.Font = new System.Drawing.Font("Segoe UI Semibold", 9.75F, System.Drawing.FontStyle.Bold);
this.l10NSharpExtender1.SetLocalizableToolTip(this._txtClauseSeparatorCharacters, null);
this.l10NSharpExtender1.SetLocalizationComment(this._txtClauseSeparatorCharacters, null);
this.l10NSharpExtender1.SetLocalizationPriority(this._txtClauseSeparatorCharacters, L10NSharp.LocalizationPriority.NotLocalizable);
this.l10NSharpExtender1.SetLocalizingId(this._txtClauseSeparatorCharacters, "AdministrativeSettings._txtClauseSeparatorCharacters");
this._txtClauseSeparatorCharacters.Location = new System.Drawing.Point(14, 298);
this._txtClauseSeparatorCharacters.Margin = new System.Windows.Forms.Padding(3, 3, 3, 10);
this._txtClauseSeparatorCharacters.Name = "_txtClauseSeparatorCharacters";
this._txtClauseSeparatorCharacters.Size = new System.Drawing.Size(332, 25);
this._txtClauseSeparatorCharacters.TabIndex = 15;
this._txtClauseSeparatorCharacters.Leave += new System.EventHandler(this._txtClauseSeparatorCharacters_Leave);
//
// _lblBreakBlocks
//
this._lblBreakBlocks.Anchor = System.Windows.Forms.AnchorStyles.Left;
this._lblBreakBlocks.FlatStyle = System.Windows.Forms.FlatStyle.Flat;
this._lblBreakBlocks.Image = global::HearThis.Properties.Resources.Icon_BlockBreak;
this._lblBreakBlocks.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft;
this.l10NSharpExtender1.SetLocalizableToolTip(this._lblBreakBlocks, null);
this.l10NSharpExtender1.SetLocalizationComment(this._lblBreakBlocks, null);
this.l10NSharpExtender1.SetLocalizingId(this._lblBreakBlocks, "AdministrativeSettings._lblBreakBlocks");
this._lblBreakBlocks.Location = new System.Drawing.Point(14, 41);
this._lblBreakBlocks.Name = "_lblBreakBlocks";
this._lblBreakBlocks.Size = new System.Drawing.Size(35, 29);
this._lblBreakBlocks.TabIndex = 14;
//
// _lblWarningExistingRecordings
//
this._lblWarningExistingRecordings.AutoSize = true;
this._tableLayoutPanelPunctuation.SetColumnSpan(this._lblWarningExistingRecordings, 2);
this._lblWarningExistingRecordings.Dock = System.Windows.Forms.DockStyle.Fill;
this._lblWarningExistingRecordings.ForeColor = System.Drawing.Color.Red;
this.l10NSharpExtender1.SetLocalizableToolTip(this._lblWarningExistingRecordings, null);
this.l10NSharpExtender1.SetLocalizationComment(this._lblWarningExistingRecordings, null);
this.l10NSharpExtender1.SetLocalizingId(this._lblWarningExistingRecordings, "AdministrativeSettings._lblWarningExistingRecordings");
this._lblWarningExistingRecordings.Location = new System.Drawing.Point(14, 155);
this._lblWarningExistingRecordings.Name = "_lblWarningExistingRecordings";
this._lblWarningExistingRecordings.Size = new System.Drawing.Size(332, 91);
this._lblWarningExistingRecordings.TabIndex = 11;
this._lblWarningExistingRecordings.Text = resources.GetString("_lblWarningExistingRecordings.Text");
//
// _chkBreakAtQuotes
//
this._chkBreakAtQuotes.AutoSize = true;
this._chkBreakAtQuotes.Checked = true;
this._chkBreakAtQuotes.CheckState = System.Windows.Forms.CheckState.Checked;
this._tableLayoutPanelPunctuation.SetColumnSpan(this._chkBreakAtQuotes, 2);
this.l10NSharpExtender1.SetLocalizableToolTip(this._chkBreakAtQuotes, null);
this.l10NSharpExtender1.SetLocalizationComment(this._chkBreakAtQuotes, null);
this.l10NSharpExtender1.SetLocalizingId(this._chkBreakAtQuotes, "AdministrativeSettings._chkBreakAtQuotes");
this._chkBreakAtQuotes.Location = new System.Drawing.Point(14, 14);
this._chkBreakAtQuotes.Margin = new System.Windows.Forms.Padding(3, 3, 3, 10);
this._chkBreakAtQuotes.Name = "_chkBreakAtQuotes";
this._chkBreakAtQuotes.Size = new System.Drawing.Size(242, 17);
this._chkBreakAtQuotes.TabIndex = 1;
this._chkBreakAtQuotes.Text = "Treat quotations as separate recording blocks";
this._chkBreakAtQuotes.UseVisualStyleBackColor = true;
this._chkBreakAtQuotes.CheckedChanged += new System.EventHandler(this.UpdateWarningTextColor);
//
// _chkBreakAtParagraphBreaks
//
this._tableLayoutPanelPunctuation.SetColumnSpan(this._chkBreakAtParagraphBreaks, 2);
this.l10NSharpExtender1.SetLocalizableToolTip(this._chkBreakAtParagraphBreaks, null);
this.l10NSharpExtender1.SetLocalizationComment(this._chkBreakAtParagraphBreaks, null);
this.l10NSharpExtender1.SetLocalizingId(this._chkBreakAtParagraphBreaks, "AdministrativeSettings._chkBreakAtParagraphBreaks");
this._chkBreakAtParagraphBreaks.Location = new System.Drawing.Point(14, 111);
this._chkBreakAtParagraphBreaks.Name = "_chkBreakAtParagraphBreaks";
this._chkBreakAtParagraphBreaks.Size = new System.Drawing.Size(332, 41);
this._chkBreakAtParagraphBreaks.TabIndex = 16;
this._chkBreakAtParagraphBreaks.Text = "Treat paragraph breaks as separate recording blocks (useful for poetry)";
this._chkBreakAtParagraphBreaks.TextAlign = System.Drawing.ContentAlignment.BottomLeft;
this._chkBreakAtParagraphBreaks.CheckedChanged += new System.EventHandler(this.UpdateWarningTextColor);
//
// tabPageInterface
//
this.tabPageInterface.BackColor = System.Drawing.SystemColors.ButtonFace;
this.tabPageInterface.Controls.Add(this._groupAdvancedUI);
this.tabPageInterface.Controls.Add(this._chkShowBookAndChapterLabels);
this.tabPageInterface.Controls.Add(this.lblColorSchemeChangeRestartWarning);
this.tabPageInterface.Controls.Add(this._cboColorScheme);
this.tabPageInterface.Controls.Add(this.lblInterface);
this.l10NSharpExtender1.SetLocalizableToolTip(this.tabPageInterface, null);
this.l10NSharpExtender1.SetLocalizationComment(this.tabPageInterface, null);
this.l10NSharpExtender1.SetLocalizingId(this.tabPageInterface, "AdministrativeSettings.tabPageInterface");
this.tabPageInterface.Location = new System.Drawing.Point(4, 22);
this.tabPageInterface.Name = "tabPageInterface";
this.tabPageInterface.Size = new System.Drawing.Size(360, 343);
this.tabPageInterface.TabIndex = 3;
this.tabPageInterface.Text = "Interface";
//
// _groupAdvancedUI
//
this._groupAdvancedUI.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this._groupAdvancedUI.Controls.Add(this._tableLayoutPanelAdvancedUI);
this.l10NSharpExtender1.SetLocalizableToolTip(this._groupAdvancedUI, null);
this.l10NSharpExtender1.SetLocalizationComment(this._groupAdvancedUI, null);
this.l10NSharpExtender1.SetLocalizingId(this._groupAdvancedUI, "AdministrativeSettings._groupAdvancedUI");
this._groupAdvancedUI.Location = new System.Drawing.Point(14, 172);
this._groupAdvancedUI.Name = "_groupAdvancedUI";
this._groupAdvancedUI.Size = new System.Drawing.Size(331, 144);
this._groupAdvancedUI.TabIndex = 10;
this._groupAdvancedUI.TabStop = false;
this._groupAdvancedUI.Text = "Advanced";
//
// _tableLayoutPanelAdvancedUI
//
this._tableLayoutPanelAdvancedUI.ColumnCount = 1;
this._tableLayoutPanelAdvancedUI.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 100F));
this._tableLayoutPanelAdvancedUI.Controls.Add(this._chkEnableClipShifting, 0, 0);
this._tableLayoutPanelAdvancedUI.Controls.Add(this._lblShiftClipsMenuWarning, 0, 2);
this._tableLayoutPanelAdvancedUI.Controls.Add(this._lblShiftClipsExplanation, 0, 1);
this._tableLayoutPanelAdvancedUI.Dock = System.Windows.Forms.DockStyle.Fill;
this._tableLayoutPanelAdvancedUI.Location = new System.Drawing.Point(3, 16);
this._tableLayoutPanelAdvancedUI.Name = "_tableLayoutPanelAdvancedUI";
this._tableLayoutPanelAdvancedUI.RowCount = 3;
this._tableLayoutPanelAdvancedUI.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutPanelAdvancedUI.RowStyles.Add(new System.Windows.Forms.RowStyle());
this._tableLayoutPanelAdvancedUI.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 100F));
this._tableLayoutPanelAdvancedUI.Size = new System.Drawing.Size(325, 125);
this._tableLayoutPanelAdvancedUI.TabIndex = 13;
//
// _chkEnableClipShifting
//
this._chkEnableClipShifting.AutoSize = true;
this.l10NSharpExtender1.SetLocalizableToolTip(this._chkEnableClipShifting, null);
this.l10NSharpExtender1.SetLocalizationComment(this._chkEnableClipShifting, null);
this.l10NSharpExtender1.SetLocalizingId(this._chkEnableClipShifting, "AdministrativeSettings._chkEnableClipShifting");
this._chkEnableClipShifting.Location = new System.Drawing.Point(3, 3);
this._chkEnableClipShifting.Name = "_chkEnableClipShifting";
this._chkEnableClipShifting.Size = new System.Drawing.Size(125, 17);
this._chkEnableClipShifting.TabIndex = 11;
this._chkEnableClipShifting.Text = "Enable {0} command";
this._chkEnableClipShifting.UseVisualStyleBackColor = true;
this._chkEnableClipShifting.CheckedChanged += new System.EventHandler(this.chkEnableClipShifting_CheckedChanged);
//
// _lblShiftClipsMenuWarning
//
this._lblShiftClipsMenuWarning.AutoSize = true;
this._lblShiftClipsMenuWarning.ForeColor = System.Drawing.Color.Red;
this.l10NSharpExtender1.SetLocalizableToolTip(this._lblShiftClipsMenuWarning, null);
this.l10NSharpExtender1.SetLocalizationComment(this._lblShiftClipsMenuWarning, "Param 0: name of \"Shift Clips\" menu command; Param 1: HearThis (program name)");
this.l10NSharpExtender1.SetLocalizingId(this._lblShiftClipsMenuWarning, "AdministrativeSettings._lblShiftClipsMenuWarning");
this._lblShiftClipsMenuWarning.Location = new System.Drawing.Point(3, 49);
this._lblShiftClipsMenuWarning.Name = "_lblShiftClipsMenuWarning";
this._lblShiftClipsMenuWarning.Size = new System.Drawing.Size(310, 65);
this._lblShiftClipsMenuWarning.TabIndex = 12;
this._lblShiftClipsMenuWarning.Text = resources.GetString("_lblShiftClipsMenuWarning.Text");
this._lblShiftClipsMenuWarning.Visible = false;
//
// _lblShiftClipsExplanation
//
this._lblShiftClipsExplanation.AutoSize = true;
this.l10NSharpExtender1.SetLocalizableToolTip(this._lblShiftClipsExplanation, null);
this.l10NSharpExtender1.SetLocalizationComment(this._lblShiftClipsExplanation, null);
this.l10NSharpExtender1.SetLocalizingId(this._lblShiftClipsExplanation, "AdministrativeSettings._labelShiftClipsExplanation");
this._lblShiftClipsExplanation.Location = new System.Drawing.Point(3, 23);
this._lblShiftClipsExplanation.Name = "_lblShiftClipsExplanation";
this._lblShiftClipsExplanation.Size = new System.Drawing.Size(290, 26);
this._lblShiftClipsExplanation.TabIndex = 13;
this._lblShiftClipsExplanation.Text = "To use this command, right-click the block slider in the main window.";
this._lblShiftClipsExplanation.Visible = false;
//
// _chkShowBookAndChapterLabels
//
this._chkShowBookAndChapterLabels.AutoSize = true;
this._chkShowBookAndChapterLabels.Checked = true;
this._chkShowBookAndChapterLabels.CheckState = System.Windows.Forms.CheckState.Checked;
this.l10NSharpExtender1.SetLocalizableToolTip(this._chkShowBookAndChapterLabels, null);
this.l10NSharpExtender1.SetLocalizationComment(this._chkShowBookAndChapterLabels, null);
this.l10NSharpExtender1.SetLocalizingId(this._chkShowBookAndChapterLabels, "AdministrativeSettings._chkShowBookAndChapterLabels");
this._chkShowBookAndChapterLabels.Location = new System.Drawing.Point(14, 16);
this._chkShowBookAndChapterLabels.Name = "_chkShowBookAndChapterLabels";
this._chkShowBookAndChapterLabels.Size = new System.Drawing.Size(275, 17);
this._chkShowBookAndChapterLabels.TabIndex = 9;
this._chkShowBookAndChapterLabels.Text = "Show book and chapter labels on navigation buttons";
this._chkShowBookAndChapterLabels.UseVisualStyleBackColor = true;
//
// lblColorSchemeChangeRestartWarning
//
this.lblColorSchemeChangeRestartWarning.AutoSize = true;
this.l10NSharpExtender1.SetLocalizableToolTip(this.lblColorSchemeChangeRestartWarning, null);
this.l10NSharpExtender1.SetLocalizationComment(this.lblColorSchemeChangeRestartWarning, null);
this.l10NSharpExtender1.SetLocalizingId(this.lblColorSchemeChangeRestartWarning, "AdministrativeSettings.lblColorSchemeChangeRestartWarning");
this.lblColorSchemeChangeRestartWarning.Location = new System.Drawing.Point(15, 116);
this.lblColorSchemeChangeRestartWarning.Name = "lblColorSchemeChangeRestartWarning";
this.lblColorSchemeChangeRestartWarning.Size = new System.Drawing.Size(249, 13);
this.lblColorSchemeChangeRestartWarning.TabIndex = 8;
this.lblColorSchemeChangeRestartWarning.Text = "HearThis will restart to apply the new color scheme.";
this.lblColorSchemeChangeRestartWarning.Visible = false;
//
// _cboColorScheme
//
this._cboColorScheme.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this._cboColorScheme.FormattingEnabled = true;
this.l10NSharpExtender1.SetLocalizableToolTip(this._cboColorScheme, null);
this.l10NSharpExtender1.SetLocalizationComment(this._cboColorScheme, null);
this.l10NSharpExtender1.SetLocalizingId(this._cboColorScheme, "AdministrativeSettings.comboBox1");
this._cboColorScheme.Location = new System.Drawing.Point(14, 79);
this._cboColorScheme.Name = "_cboColorScheme";
this._cboColorScheme.Size = new System.Drawing.Size(155, 21);
this._cboColorScheme.TabIndex = 7;
this._cboColorScheme.SelectedIndexChanged += new System.EventHandler(this.cboColorScheme_SelectedIndexChanged);
//
// lblInterface
//
this.lblInterface.AutoSize = true;
this.l10NSharpExtender1.SetLocalizableToolTip(this.lblInterface, null);
this.l10NSharpExtender1.SetLocalizationComment(this.lblInterface, null);
this.l10NSharpExtender1.SetLocalizingId(this.lblInterface, "AdministrativeSettings.lblInterface");
this.lblInterface.Location = new System.Drawing.Point(11, 53);
this.lblInterface.Margin = new System.Windows.Forms.Padding(0, 0, 3, 10);
this.lblInterface.Name = "lblInterface";
this.lblInterface.Size = new System.Drawing.Size(76, 13);
this.lblInterface.TabIndex = 6;
this.lblInterface.Text = "Color Scheme:";
//
// _btnOk
//
this._btnOk.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this._btnOk.DialogResult = System.Windows.Forms.DialogResult.OK;
this.l10NSharpExtender1.SetLocalizableToolTip(this._btnOk, null);
this.l10NSharpExtender1.SetLocalizationComment(this._btnOk, null);
this.l10NSharpExtender1.SetLocalizingId(this._btnOk, "Common.OK");
this._btnOk.Location = new System.Drawing.Point(217, 403);
this._btnOk.Name = "_btnOk";
this._btnOk.Size = new System.Drawing.Size(75, 23);
this._btnOk.TabIndex = 2;
this._btnOk.Text = "OK";
this._btnOk.UseVisualStyleBackColor = true;
this._btnOk.Click += new System.EventHandler(this.HandleOkButtonClick);
//
// l10NSharpExtender1
//
this.l10NSharpExtender1.LocalizationManagerId = "HearThis";
this.l10NSharpExtender1.PrefixForNewItems = "";
//
// AdministrativeSettings
//
this.AcceptButton = this._btnOk;
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.CancelButton = _btnCancel;
this.ClientSize = new System.Drawing.Size(392, 438);
this.Controls.Add(_btnCancel);
this.Controls.Add(settingsProtectionLauncherButton1);
this.Controls.Add(this.tabControl1);
this.Controls.Add(this._btnOk);
this.l10NSharpExtender1.SetLocalizableToolTip(this, null);
this.l10NSharpExtender1.SetLocalizationComment(this, null);
this.l10NSharpExtender1.SetLocalizingId(this, "RestrictAdministrativeAccess.WindowTitle");
this.MaximizeBox = false;
this.MinimizeBox = false;
this.MinimumSize = new System.Drawing.Size(234, 477);
this.Name = "AdministrativeSettings";
this.ShowIcon = false;
this.ShowInTaskbar = false;
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "Settings";
this.tabControl1.ResumeLayout(false);
this.tabPageModes.ResumeLayout(false);
this.tabPageModes.PerformLayout();
this._tableLayoutModes.ResumeLayout(false);
this._tableLayoutModes.PerformLayout();
this.tabPageSkipping.ResumeLayout(false);
this._tableLayoutPanelSkipping.ResumeLayout(false);
this._tableLayoutPanelSkipping.PerformLayout();
this.tabPagePunctuation.ResumeLayout(false);
this._tableLayoutPanelPunctuation.ResumeLayout(false);
this._tableLayoutPanelPunctuation.PerformLayout();
this.tabPageInterface.ResumeLayout(false);
this.tabPageInterface.PerformLayout();
this._groupAdvancedUI.ResumeLayout(false);
this._tableLayoutPanelAdvancedUI.ResumeLayout(false);
this._tableLayoutPanelAdvancedUI.PerformLayout();
((System.ComponentModel.ISupportInitialize)(this.l10NSharpExtender1)).EndInit();
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.Button _btnOk;
private System.Windows.Forms.CheckBox Administrator;
private L10NSharp.UI.L10NSharpExtender l10NSharpExtender1;
private System.Windows.Forms.Label lblSelectModes;
private System.Windows.Forms.CheckBox NormalRecording;
private System.Windows.Forms.LinkLabel lnkAdministrativeModeSetAsDefault;
private System.Windows.Forms.LinkLabel lnkNormalRecordingModeSetAsDefault;
private System.Windows.Forms.TableLayoutPanel _tableLayoutModes;
private System.Windows.Forms.TabPage tabPageModes;
private System.Windows.Forms.TabPage tabPageSkipping;
private System.Windows.Forms.Label _lblSkippingInstructions;
private System.Windows.Forms.Button _btnClearAllSkipInfo;
private System.Windows.Forms.CheckedListBox _lbSkippedStyles;
private System.Windows.Forms.TabPage tabPagePunctuation;
private System.Windows.Forms.TableLayoutPanel _tableLayoutPanelPunctuation;
private System.Windows.Forms.Label _lblClauseSeparators;
private System.Windows.Forms.Label _lblWarningExistingRecordings;
private System.Windows.Forms.TabControl tabControl1;
private System.Windows.Forms.TableLayoutPanel _tableLayoutPanelSkipping;
private System.Windows.Forms.CheckBox _chkShowSkipButton;
private System.Windows.Forms.Label _lblAdditionalLineBreakCharacters;
private System.Windows.Forms.Label _lblBreakBlocks;
private System.Windows.Forms.TextBox _txtAdditionalBlockSeparators;
private System.Windows.Forms.TextBox _txtClauseSeparatorCharacters;
private System.Windows.Forms.CheckBox _chkBreakAtQuotes;
private System.Windows.Forms.CheckBox _chkBreakAtParagraphBreaks;
private System.Windows.Forms.TabPage tabPageInterface;
private System.Windows.Forms.ComboBox _cboColorScheme;
private System.Windows.Forms.Label lblInterface;
private System.Windows.Forms.Label lblColorSchemeChangeRestartWarning;
private System.Windows.Forms.CheckBox _chkShowBookAndChapterLabels;
private System.Windows.Forms.GroupBox _groupAdvancedUI;
private System.Windows.Forms.Label _lblShiftClipsMenuWarning;
private System.Windows.Forms.CheckBox _chkEnableClipShifting;
private System.Windows.Forms.TableLayoutPanel _tableLayoutPanelAdvancedUI;
private System.Windows.Forms.Label _lblShiftClipsExplanation;
}
}
| |
using System.Collections.Generic;
using System.Collections;
using System.Threading;
using UnityEngine;
namespace Pathfinding {
using Pathfinding;
#if NETFX_CORE
using Thread = Pathfinding.WindowsStore.Thread;
using ParameterizedThreadStart = Pathfinding.WindowsStore.ParameterizedThreadStart;
#else
using Thread = System.Threading.Thread;
using ParameterizedThreadStart = System.Threading.ParameterizedThreadStart;
#endif
class GraphUpdateProcessor {
public event System.Action OnGraphsUpdated;
/** Holds graphs that can be updated */
readonly AstarPath astar;
#if !UNITY_WEBGL
/**
* Reference to the thread which handles async graph updates.
* \see ProcessGraphUpdatesAsync
*/
Thread graphUpdateThread;
#endif
/**
* Stack containing all waiting graph update queries. Add to this stack by using \link UpdateGraphs \endlink
* \see UpdateGraphs
*/
readonly Queue<GraphUpdateObject> graphUpdateQueue = new Queue<GraphUpdateObject>();
/** Queue of all async graph updates waiting to be executed */
readonly Queue<GUOSingle> graphUpdateQueueAsync = new Queue<GUOSingle>();
/** Queue of all non-async graph updates waiting to be executed */
readonly Queue<GUOSingle> graphUpdateQueueRegular = new Queue<GUOSingle>();
readonly System.Threading.AutoResetEvent graphUpdateAsyncEvent = new System.Threading.AutoResetEvent(false);
readonly System.Threading.ManualResetEvent asyncGraphUpdatesComplete = new System.Threading.ManualResetEvent(true);
#if !UNITY_WEBGL
readonly System.Threading.AutoResetEvent exitAsyncThread = new System.Threading.AutoResetEvent(false);
#endif
/** Returns if any graph updates are waiting to be applied */
public bool IsAnyGraphUpdateQueued { get { return graphUpdateQueue.Count > 0; }}
/** The last area index which was used.
* Used for the \link FloodFill(GraphNode node) FloodFill \endlink function to start flood filling with an unused area.
* \see FloodFill(Node node)
*/
uint lastUniqueAreaIndex = 0;
/**
* Stack used for flood-filling the graph.
* It is cached to reduce memory allocations
*/
Stack<GraphNode> floodStack;
/** Order type for updating graphs */
enum GraphUpdateOrder {
GraphUpdate,
FloodFill
}
/** Holds a single update that needs to be performed on a graph */
struct GUOSingle {
public GraphUpdateOrder order;
public IUpdatableGraph graph;
public GraphUpdateObject obj;
}
public GraphUpdateProcessor (AstarPath astar) {
this.astar = astar;
}
/** Work item which can be used to apply all queued updates */
public AstarWorkItem GetWorkItem () {
return new AstarWorkItem(QueueGraphUpdatesInternal, ProcessGraphUpdates);
}
public void EnableMultithreading () {
#if !UNITY_WEBGL
if (graphUpdateThread == null || !graphUpdateThread.IsAlive) {
graphUpdateThread = new Thread (ProcessGraphUpdatesAsync);
graphUpdateThread.IsBackground = true;
// Set the thread priority for graph updates
// Unless compiling for windows store or windows phone which does not support it
#if !UNITY_WINRT
graphUpdateThread.Priority = System.Threading.ThreadPriority.Lowest;
#endif
graphUpdateThread.Start (this);
}
#endif
}
public void DisableMultithreading () {
#if !UNITY_WEBGL
if (graphUpdateThread != null && graphUpdateThread.IsAlive) {
//Resume graph update thread, will cause it to terminate
exitAsyncThread.Set();
if (!graphUpdateThread.Join(20*1000)) {
Debug.LogError("Graph update thread did not exit in 20 seconds");
}
graphUpdateThread = null;
}
#endif
}
/** Update all graphs using the GraphUpdateObject.
* This can be used to, e.g make all nodes in an area unwalkable, or set them to a higher penalty.
* The graphs will be updated as soon as possible (with respect to #limitGraphUpdates)
*
* \see FlushGraphUpdates
*/
public void UpdateGraphs (GraphUpdateObject ob) {
//Put the GUO in the queue
graphUpdateQueue.Enqueue (ob);
}
/** Schedules graph updates internally */
void QueueGraphUpdatesInternal () {
bool anyRequiresFloodFill = false;
while (graphUpdateQueue.Count > 0) {
GraphUpdateObject ob = graphUpdateQueue.Dequeue ();
if (ob.requiresFloodFill) anyRequiresFloodFill = true;
foreach (IUpdatableGraph g in astar.astarData.GetUpdateableGraphs ()) {
NavGraph gr = g as NavGraph;
if (ob.nnConstraint == null || ob.nnConstraint.SuitableGraph (astar.astarData.GetGraphIndex (gr),gr)) {
var guo = new GUOSingle ();
guo.order = GraphUpdateOrder.GraphUpdate;
guo.obj = ob;
guo.graph = g;
graphUpdateQueueRegular.Enqueue (guo);
}
}
}
if (anyRequiresFloodFill) {
var guo = new GUOSingle();
guo.order = GraphUpdateOrder.FloodFill;
graphUpdateQueueRegular.Enqueue (guo);
}
GraphModifier.TriggerEvent (GraphModifier.EventType.PreUpdate);
}
/** Updates graphs.
* Will do some graph updates, possibly signal another thread to do them.
* Will only process graph updates added by QueueGraphUpdatesInternal
*
* \param force If true, all graph updates will be processed before this function returns. The return value
* will be True.
*
* \returns True if all graph updates have been done and pathfinding (or other tasks) may resume.
* False if there are still graph updates being done or waiting in the queue.
*
*
*/
bool ProcessGraphUpdates (bool force) {
if (force) {
asyncGraphUpdatesComplete.WaitOne ();
} else {
#if !UNITY_WEBGL
if (!asyncGraphUpdatesComplete.WaitOne (0)) {
return false;
}
#endif
}
if (graphUpdateQueueAsync.Count != 0) throw new System.Exception ("Queue should be empty at this stage");
while (graphUpdateQueueRegular.Count > 0) {
GUOSingle s = graphUpdateQueueRegular.Peek ();
GraphUpdateThreading threading = s.order == GraphUpdateOrder.FloodFill ? GraphUpdateThreading.SeparateThread : s.graph.CanUpdateAsync(s.obj);
#if !UNITY_WEBGL
bool forceUnityThread = force;
// When not playing or when not using a graph update thread (or if it has crashed), everything runs in the Unity thread
if ( !Application.isPlaying || graphUpdateThread == null || !graphUpdateThread.IsAlive ) {
forceUnityThread = true;
}
if (!forceUnityThread && (threading == GraphUpdateThreading.SeparateAndUnityInit)) {
if (graphUpdateQueueAsync.Count > 0) {
//Process async graph updates first.
//Next call to this function will process this object so it is not dequeued now
asyncGraphUpdatesComplete.Reset ();
graphUpdateAsyncEvent.Set ();
return false;
}
s.graph.UpdateAreaInit (s.obj);
//Move GUO to async queue to be updated by another thread
graphUpdateQueueRegular.Dequeue ();
graphUpdateQueueAsync.Enqueue (s);
//Next call to this function will process this object so it is not dequeued now
asyncGraphUpdatesComplete.Reset ();
graphUpdateAsyncEvent.Set ();
return false;
} else if (!forceUnityThread && (threading == GraphUpdateThreading.SeparateThread)) {
//Move GUO to async queue to be updated by another thread
graphUpdateQueueRegular.Dequeue ();
graphUpdateQueueAsync.Enqueue (s);
} else {
#endif
//Everything should be done in the unity thread
if (graphUpdateQueueAsync.Count > 0) {
//Process async graph updates first.
if (force) throw new System.Exception ("This should not happen");
//Next call to this function will process this object so it is not dequeued now
asyncGraphUpdatesComplete.Reset ();
graphUpdateAsyncEvent.Set ();
return false;
}
graphUpdateQueueRegular.Dequeue ();
if (s.order == GraphUpdateOrder.FloodFill) {
FloodFill ();
} else {
if (threading == GraphUpdateThreading.SeparateAndUnityInit) {
try {
s.graph.UpdateAreaInit (s.obj);
} catch (System.Exception e) {
Debug.LogError ("Error while initializing GraphUpdates\n" + e);
}
}
try {
s.graph.UpdateArea (s.obj);
} catch (System.Exception e) {
Debug.LogError ("Error while updating graphs\n"+e);
}
}
#if !UNITY_WEBGL
}
#endif
}
#if !UNITY_WEBGL
if (graphUpdateQueueAsync.Count > 0) {
//Next call to this function will process this object so it is not dequeued now
asyncGraphUpdatesComplete.Reset ();
graphUpdateAsyncEvent.Set ();
return false;
}
#endif
GraphModifier.TriggerEvent (GraphModifier.EventType.PostUpdate);
if (OnGraphsUpdated != null) OnGraphsUpdated();
return true;
}
#if !UNITY_WEBGL
/** Graph update thread.
* Async graph updates will be executed by this method in another thread.
*/
void ProcessGraphUpdatesAsync () {
var handles = new [] { graphUpdateAsyncEvent, exitAsyncThread };
while(true) {
// Wait for the next batch or exit event
var handleIndex = WaitHandle.WaitAny(handles);
if (handleIndex == 1) {
// Exit even was fired
//Abort thread and clear queue
graphUpdateQueueAsync.Clear ();
asyncGraphUpdatesComplete.Set ();
return;
}
while (graphUpdateQueueAsync.Count > 0) {
GUOSingle aguo = graphUpdateQueueAsync.Dequeue ();
try {
if (aguo.order == GraphUpdateOrder.GraphUpdate) {
aguo.graph.UpdateArea (aguo.obj);
} else if (aguo.order == GraphUpdateOrder.FloodFill) {
FloodFill ();
} else {
throw new System.NotSupportedException ("" + aguo.order);
}
} catch (System.Exception e) {
Debug.LogError ("Exception while updating graphs:\n"+e);
}
}
// Done
asyncGraphUpdatesComplete.Set ();
}
}
#endif
/** Floodfills starting from the specified node */
public void FloodFill (GraphNode seed) {
FloodFill (seed, lastUniqueAreaIndex+1);
lastUniqueAreaIndex++;
}
/** Floodfills starting from 'seed' using the specified area */
public void FloodFill (GraphNode seed, uint area) {
if (area > GraphNode.MaxAreaIndex) {
Debug.LogError ("Too high area index - The maximum area index is " + GraphNode.MaxAreaIndex);
return;
}
if (area < 0) {
Debug.LogError ("Too low area index - The minimum area index is 0");
return;
}
if (floodStack == null) {
floodStack = new Stack<GraphNode> (1024);
}
Stack<GraphNode> stack = floodStack;
stack.Clear ();
stack.Push (seed);
seed.Area = (uint)area;
while (stack.Count > 0) {
stack.Pop ().FloodFill (stack,(uint)area);
}
}
/** Floodfills all graphs and updates areas for every node.
* The different colored areas that you see in the scene view when looking at graphs
* are called just 'areas', this method calculates which nodes are in what areas.
* \see Pathfinding.Node.area
*/
[ContextMenu("Flood Fill Graphs")]
public void FloodFill () {
if (astar.astarData.graphs == null) {
return;
}
uint area = 0;
lastUniqueAreaIndex = 0;
if (floodStack == null) {
floodStack = new Stack<GraphNode> (1024);
}
Stack<GraphNode> stack = floodStack;
var graphs = astar.graphs;
// Iterate through all nodes in all graphs
// and reset their Area field
for (int i = 0;i < graphs.Length;i++) {
NavGraph graph = graphs[i];
if (graph != null) {
graph.GetNodes (node => {
node.Area = 0;
return true;
});
}
}
int smallAreasDetected = 0;
bool warnAboutAreas = false;
List<GraphNode> smallAreaList = Pathfinding.Util.ListPool<GraphNode>.Claim();
for (int i = 0; i < graphs.Length; i++) {
NavGraph graph = graphs[i];
if (graph == null) continue;
GraphNodeDelegateCancelable del = delegate (GraphNode node) {
if (node.Walkable && node.Area == 0) {
area++;
uint thisArea = area;
if (area > GraphNode.MaxAreaIndex) {
if ( smallAreaList.Count > 0 ) {
GraphNode smallOne = smallAreaList[smallAreaList.Count-1];
thisArea = smallOne.Area;
smallAreaList.RemoveAt (smallAreaList.Count-1);
//Flood fill the area again with area ID GraphNode.MaxAreaIndex-1, this identifies a small area
stack.Clear ();
stack.Push (smallOne);
smallOne.Area = GraphNode.MaxAreaIndex;
while (stack.Count > 0) {
stack.Pop ().FloodFill (stack,GraphNode.MaxAreaIndex);
}
smallAreasDetected++;
} else {
// Forced to consider this a small area
area--;
thisArea = area;
warnAboutAreas = true;
}
}
stack.Clear ();
stack.Push (node);
int counter = 1;
node.Area = thisArea;
while (stack.Count > 0) {
counter++;
stack.Pop ().FloodFill (stack,thisArea);
}
if (counter < astar.minAreaSize) {
smallAreaList.Add ( node );
}
}
return true;
};
graph.GetNodes (del);
}
lastUniqueAreaIndex = area;
if (warnAboutAreas) {
Debug.LogError ("Too many areas - The maximum number of areas is " + GraphNode.MaxAreaIndex +". Try raising the A* Inspector -> Settings -> Min Area Size value. Enable the optimization ASTAR_MORE_AREAS under the Optimizations tab.");
}
if (smallAreasDetected > 0) {
astar.Log (smallAreasDetected +" small areas were detected (fewer than " + astar.minAreaSize + " nodes)," +
"these might have the same IDs as other areas, but it shouldn't affect pathfinding in any significant way (you might get All Nodes Searched as a reason for path failure)." +
"\nWhich areas are defined as 'small' is controlled by the 'Min Area Size' variable, it can be changed in the A* inspector-->Settings-->Min Area Size" +
"\nThe small areas will use the area id "+ GraphNode.MaxAreaIndex);
}
Pathfinding.Util.ListPool<GraphNode>.Release (smallAreaList);
}
}
}
| |
/******************************************************************************
* Copyright (C) Leap Motion, Inc. 2011-2017. *
* Leap Motion proprietary and confidential. *
* *
* Use subject to the terms of the Leap Motion SDK Agreement available at *
* https://developer.leapmotion.com/sdk_agreement, or another agreement *
* between Leap Motion and you, your company or other organization. *
******************************************************************************/
using Leap.Unity.Query;
using UnityEngine.Events;
using UnityEngine;
using UnityEngine.SceneManagement;
using System.Collections.Generic;
namespace Leap.Unity {
/// <summary>
/// Static convenience methods and extension methods for getting useful Hand data.
/// </summary>
public static class Hands {
private static LeapProvider s_provider;
private static GameObject s_leapRig;
static Hands() {
InitStatic();
SceneManager.activeSceneChanged += InitStaticOnNewScene;
}
private static void InitStaticOnNewScene(Scene unused, Scene unused2) {
InitStatic();
}
private static void InitStatic() {
s_provider = GameObject.FindObjectOfType<LeapProvider>();
if (s_provider == null) return;
Camera providerCamera = s_provider.GetComponentInParent<Camera>();
if (providerCamera == null) return;
if (providerCamera.transform.parent == null) return;
s_leapRig = providerCamera.transform.parent.gameObject;
}
/// <summary>
/// Static convenience accessor for the Leap camera rig. This is the parent
/// of the Camera that contains a LeapProvider in one of its children,
/// or null if there is no such GameObject.
/// </summary>
public static GameObject Rig {
get {
if (s_leapRig == null) {
InitStatic();
if (s_leapRig == null) {
Debug.LogWarning("Camera has no parent; Rig will return null.");
}
}
return s_leapRig;
}
}
/// <summary>
/// Static convenience accessor for the LeapProvider.
/// </summary>
public static LeapProvider Provider {
get {
if (s_provider == null) {
InitStatic();
if (s_provider == null) {
Debug.LogWarning("No LeapProvider found in the scene.");
}
}
return s_provider;
}
}
[System.Serializable]
public class HandEvent : UnityEvent<Hand> { }
/// <summary>
/// Returns the first hand of the argument Chirality in the current frame,
/// otherwise returns null if no such hand is found.
/// </summary>
public static Hand Get(Chirality chirality) {
if (chirality == Chirality.Left) return Left;
else return Right;
}
/// <summary>
/// As Get, but returns the FixedUpdate (physics timestep) hand as opposed to the Update hand.
/// </summary>
public static Hand GetFixed(Chirality chirality) {
if (chirality == Chirality.Left) return FixedLeft;
else return FixedRight;
}
/// <summary>
/// Returns the first left hand found by Leap in the current frame, otherwise
/// returns null if no such hand is found.
/// </summary>
public static Hand Left {
get {
if (Provider == null) return null;
if (Provider.CurrentFrame == null) return null;
return Provider.CurrentFrame.Hands.Query().FirstOrDefault(hand => hand.IsLeft);
}
}
/// <summary>
/// Returns the first right hand found by Leap in the current frame, otherwise
/// returns null if no such hand is found.
/// </summary>
public static Hand Right {
get {
if (Provider == null) return null;
if (Provider.CurrentFrame == null) return null;
else return Provider.CurrentFrame.Hands.Query().FirstOrDefault(hand => hand.IsRight);
}
}
/// <summary>
/// Returns the first left hand found by Leap in the current fixed frame, otherwise
/// returns null if no such hand is found. The fixed frame is aligned with the physics timestep.
/// </summary>
public static Hand FixedLeft {
get {
if (Provider == null) return null;
if (Provider.CurrentFixedFrame == null) return null;
return Provider.CurrentFixedFrame.Hands.Query().FirstOrDefault(hand => hand.IsLeft);
}
}
/// <summary>
/// Returns the first right hand found by Leap in the current fixed frame, otherwise
/// returns null if no such hand is found. The fixed frame is aligned with the physics timestep.
/// </summary>
public static Hand FixedRight {
get {
if (Provider == null) return null;
if (Provider.CurrentFixedFrame == null) return null;
else return Provider.CurrentFixedFrame.Hands.Query().FirstOrDefault(hand => hand.IsRight);
}
}
/// Shorthand for hand.Fingers[(int)Leap.Finger.FingerType.TYPE_THUMB],
/// or, alternatively, hand.Fingers[0].
/// </summary>
public static Finger GetThumb(this Hand hand) {
return hand.Fingers[(int)Leap.Finger.FingerType.TYPE_THUMB];
}
/// <summary>
/// Shorthand for hand.Fingers[(int)Leap.Finger.FingerType.TYPE_INDEX],
/// or, alternatively, hand.Fingers[1].
/// </summary>
public static Finger GetIndex(this Hand hand) {
return hand.Fingers[(int)Leap.Finger.FingerType.TYPE_INDEX];
}
/// <summary>
/// Shorthand for hand.Fingers[(int)Leap.Finger.FingerType.TYPE_MIDDLE],
/// or, alternatively, hand.Fingers[2].
/// </summary>
public static Finger GetMiddle(this Hand hand) {
return hand.Fingers[(int)Leap.Finger.FingerType.TYPE_MIDDLE];
}
/// <summary>
/// Shorthand for hand.Fingers[(int)Leap.Finger.FingerType.TYPE_RING],
/// or, alternatively, hand.Fingers[3].
/// </summary>
public static Finger GetRing(this Hand hand) {
return hand.Fingers[(int)Leap.Finger.FingerType.TYPE_RING];
}
/// <summary>
/// Shorthand for hand.Fingers[(int)Leap.Finger.FingerType.TYPE_PINKY],
/// or, alternatively, hand.Fingers[4].
/// </summary>
public static Finger GetPinky(this Hand hand) {
return hand.Fingers[(int)Leap.Finger.FingerType.TYPE_PINKY];
}
/// <summary>
/// Returns the direction the Hand's palm is facing. For the other two palm-basis
/// directions, see RadialAxis and DistalAxis.
///
/// The direction out of the back of the hand would be called the dorsal axis.
/// </summary>
public static Vector3 PalmarAxis(this Hand hand) {
return -hand.Basis.yBasis.ToVector3();
}
/// <summary>
/// Returns the the direction towards the thumb that is perpendicular to the palmar
/// and distal axes. Left and right hands will return opposing directions.
///
/// The direction away from the thumb would be called the ulnar axis.
/// </summary>
public static Vector3 RadialAxis(this Hand hand) {
if (hand.IsRight) {
return -hand.Basis.xBasis.ToVector3();
}
else {
return hand.Basis.xBasis.ToVector3();
}
}
/// <summary>
/// Returns the direction towards the fingers that is perpendicular to the palmar
/// and radial axes.
///
/// The direction towards the wrist would be called the proximal axis.
/// </summary>
public static Vector3 DistalAxis (this Hand hand) {
return hand.Basis.zBasis.ToVector3();
}
/// <summary>
/// Returns whether the pinch strength for the hand is greater than 0.8.
/// For more reliable pinch behavior, try applying hysteresis to the PinchStrength property.
/// </summary>
public static bool IsPinching(this Hand hand) {
return hand.PinchStrength > 0.8F;
}
/// <summary>
/// Returns approximately where the thumb and index finger will be if they are pinched together.
/// </summary>
public static Vector3 GetPinchPosition(this Hand hand) {
Vector indexPosition = hand.Fingers[(int)Finger.FingerType.TYPE_INDEX].TipPosition;
Vector thumbPosition = hand.Fingers[(int)Finger.FingerType.TYPE_THUMB].TipPosition;
return (2 * thumbPosition + indexPosition).ToVector3() * 0.333333F;
}
/// <summary>
/// Returns a decent approximation of where the hand is pinching, or where it will pinch,
/// even if the index and thumb tips are far apart.
///
/// In general, this will be more stable than GetPinchPosition().
/// </summary>
public static Vector3 GetPredictedPinchPosition(this Hand hand) {
Vector3 indexTip = hand.GetIndex().TipPosition.ToVector3();
Vector3 thumbTip = hand.GetThumb().TipPosition.ToVector3();
// The predicted pinch point is a rigid point in hand-space linearly offset by the
// index finger knuckle position, scaled by the index finger's length, and lightly
// influenced by the actual thumb and index tip positions.
Vector3 indexKnuckle = hand.Fingers[1].bones[1].PrevJoint.ToVector3();
float indexLength = hand.Fingers[1].Length;
Vector3 radialAxis = hand.RadialAxis();
float thumbInfluence = Vector3.Dot((thumbTip - indexKnuckle).normalized, radialAxis).Map(0F, 1F, 0.5F, 0F);
Vector3 predictedPinchPoint = indexKnuckle + hand.PalmarAxis() * indexLength * 0.85F
+ hand.DistalAxis() * indexLength * 0.20F
+ radialAxis * indexLength * 0.20F;
predictedPinchPoint = Vector3.Lerp(predictedPinchPoint, thumbTip, thumbInfluence);
predictedPinchPoint = Vector3.Lerp(predictedPinchPoint, indexTip, 0.15F);
return predictedPinchPoint;
}
/// <summary>
/// Returns whether this vector faces from a given world position towards another world position within a maximum angle of error.
/// </summary>
public static bool IsFacing(this Vector3 facingVector, Vector3 fromWorldPosition, Vector3 towardsWorldPosition, float maxOffsetAngleAllowed) {
Vector3 actualVectorTowardsWorldPosition = (towardsWorldPosition - fromWorldPosition).normalized;
return Vector3.Angle(facingVector, actualVectorTowardsWorldPosition) <= maxOffsetAngleAllowed;
}
/// <summary>
/// Returns a confidence value from 0 to 1 indicating how strongly the Hand is making a fist.
/// </summary>
public static float GetFistStrength(this Hand hand) {
return (Vector3.Dot(hand.Fingers[1].Direction.ToVector3(), -hand.DistalAxis() )
+ Vector3.Dot(hand.Fingers[2].Direction.ToVector3(), -hand.DistalAxis() )
+ Vector3.Dot(hand.Fingers[3].Direction.ToVector3(), -hand.DistalAxis() )
+ Vector3.Dot(hand.Fingers[4].Direction.ToVector3(), -hand.DistalAxis() )
+ Vector3.Dot(hand.Fingers[0].Direction.ToVector3(), -hand.RadialAxis() )
).Map(-5, 5, 0, 1);
}
/// <summary>
/// Transforms a bone by a position and rotation.
/// </summary>
public static void Transform(this Bone bone, Vector3 position, Quaternion rotation) {
bone.Transform(new LeapTransform(position.ToVector(), rotation.ToLeapQuaternion()));
}
/// <summary>
/// Transforms a finger by a position and rotation.
/// </summary>
public static void Transform(this Finger finger, Vector3 position, Quaternion rotation) {
finger.Transform(new LeapTransform(position.ToVector(), rotation.ToLeapQuaternion()));
}
/// <summary>
/// Transforms a hand by a position and rotation.
/// </summary>
public static void Transform(this Hand hand, Vector3 position, Quaternion rotation) {
hand.Transform(new LeapTransform(position.ToVector(), rotation.ToLeapQuaternion()));
}
/// <summary>
/// Transforms a frame by a position and rotation.
/// </summary>
public static void Transform(this Frame frame, Vector3 position, Quaternion rotation) {
frame.Transform(new LeapTransform(position.ToVector(), rotation.ToLeapQuaternion()));
}
/// <summary>
/// Transforms a bone to a position and rotation.
/// </summary>
public static void SetTransform(this Bone bone, Vector3 position, Quaternion rotation) {
bone.Transform(Vector3.zero, (rotation * Quaternion.Inverse(bone.Rotation.ToQuaternion())));
bone.Transform(position - bone.PrevJoint.ToVector3(), Quaternion.identity);
}
/// <summary>
/// Transforms a finger to a position and rotation by its fingertip.
/// </summary>
public static void SetTipTransform(this Finger finger, Vector3 position, Quaternion rotation) {
finger.Transform(Vector3.zero, (rotation * Quaternion.Inverse(finger.bones[3].Rotation.ToQuaternion())));
finger.Transform(position - finger.bones[3].NextJoint.ToVector3(), Quaternion.identity);
}
/// <summary>
/// Transforms a hand to a position and rotation.
/// </summary>
public static void SetTransform(this Hand hand, Vector3 position, Quaternion rotation) {
hand.Transform(Vector3.zero, (rotation * Quaternion.Inverse(hand.Rotation.ToQuaternion())));
hand.Transform(position - hand.PalmPosition.ToVector3(), Quaternion.identity);
}
}
/// <summary>
/// Utility methods for constructing and manipulating Leap hand object data.
/// </summary>
public static class HandUtils {
/// <summary>
/// Fills the Hand object with the provided hand data. You can pass null for the
/// fingers input; this will leave the hand's finger data unmodified.
/// </summary>
public static void Fill(this Hand toFill,
long frameID,
int id,
float confidence,
float grabStrength,
float grabAngle,
float pinchStrength,
float pinchDistance,
float palmWidth,
bool isLeft,
float timeVisible,
/* Arm arm,*/
List<Finger> fingers,
Vector palmPosition,
Vector stabilizedPalmPosition,
Vector palmVelocity,
Vector palmNormal,
LeapQuaternion rotation,
Vector direction,
Vector wristPosition) {
toFill.FrameId = frameID;
toFill.Id = id;
toFill.Confidence = confidence;
toFill.GrabStrength = grabStrength;
toFill.GrabAngle = grabAngle;
toFill.PinchStrength = pinchStrength;
toFill.PinchDistance = pinchDistance;
toFill.PalmWidth = palmWidth;
toFill.IsLeft = isLeft;
toFill.TimeVisible = timeVisible;
if (fingers != null) toFill.Fingers = fingers;
toFill.PalmPosition = palmPosition;
toFill.StabilizedPalmPosition = stabilizedPalmPosition;
toFill.PalmVelocity = palmVelocity;
toFill.PalmNormal = palmNormal;
toFill.Rotation = rotation;
toFill.Direction = direction;
toFill.WristPosition = wristPosition;
}
/// <summary>
/// Fills the Bone object with the provided bone data.
/// </summary>
public static void Fill(this Bone toFill,
Vector prevJoint,
Vector nextJoint,
Vector center,
Vector direction,
float length,
float width,
Bone.BoneType type,
LeapQuaternion rotation) {
toFill.PrevJoint = prevJoint;
toFill.NextJoint = nextJoint;
toFill.Center = center;
toFill.Direction = direction;
toFill.Length = length;
toFill.Width = width;
toFill.Type = type;
toFill.Rotation = rotation;
}
/// <summary>
/// Fills the Finger object with the provided finger data. You can pass null for
/// bones; A null bone will not modify the underlying hand's data for that bone.
/// </summary>
public static void Fill(this Finger toFill,
long frameId,
int handId,
int fingerId,
float timeVisible,
Vector tipPosition,
Vector tipVelocity,
Vector direction,
Vector stabilizedTipPosition,
float width,
float length,
bool isExtended,
Finger.FingerType type,
Bone metacarpal = null,
Bone proximal = null,
Bone intermediate = null,
Bone distal = null) {
toFill.Id = handId;
toFill.HandId = handId;
toFill.TimeVisible = timeVisible;
toFill.TipPosition = tipPosition;
toFill.TipVelocity = tipVelocity;
toFill.StabilizedTipPosition = stabilizedTipPosition;
toFill.Direction = direction;
toFill.Width = width;
toFill.Length = length;
toFill.IsExtended = isExtended;
toFill.Type = type;
if (metacarpal != null) toFill.bones[0] = metacarpal;
if (proximal != null) toFill.bones[1] = proximal;
if (intermediate != null) toFill.bones[2] = intermediate;
if (distal != null) toFill.bones[3] = distal;
}
/// <summary>
/// Fills the Arm object with the provided arm data.
/// </summary>
public static void Fill(this Arm toFill,
Vector elbow,
Vector wrist,
Vector center,
Vector direction,
float length,
float width,
LeapQuaternion rotation) {
toFill.PrevJoint = elbow;
toFill.NextJoint = wrist;
toFill.Center = center;
toFill.Direction = direction;
toFill.Length = length;
toFill.Width = width;
toFill.Rotation = rotation;
}
/// <summary>
/// Fills the hand's PalmVelocity and each finger's TipVelocity data based on the
/// previous hand object and the provided delta time between the two hands.
/// </summary>
public static void FillTemporalData(this Hand toFill,
Hand previousHand, float deltaTime) {
toFill.PalmVelocity = (toFill.PalmPosition - previousHand.PalmPosition)
/ deltaTime;
for (int i = 0; i < toFill.Fingers.Count; i++) {
toFill.Fingers[i].TipVelocity = (toFill.Fingers[i].TipPosition
- previousHand.Fingers[i].TipPosition)
/ deltaTime;
}
}
}
}
| |
// DeflateStream.cs
// ------------------------------------------------------------------
//
// Copyright (c) 2009-2010 Dino Chiesa.
// All rights reserved.
//
// This code module is part of DotNetZip, a zipfile class library.
//
// ------------------------------------------------------------------
//
// This code is licensed under the Microsoft Public License.
// See the file License.txt for the license details.
// More info on: http://dotnetzip.codeplex.com
//
// ------------------------------------------------------------------
//
// last saved (in emacs):
// Time-stamp: <2011-July-31 14:48:11>
//
// ------------------------------------------------------------------
//
// This module defines the DeflateStream class, which can be used as a replacement for
// the System.IO.Compression.DeflateStream class in the .NET BCL.
//
// ------------------------------------------------------------------
using System;
namespace Ionic.Zlib
{
/// <summary>
/// A class for compressing and decompressing streams using the Deflate algorithm.
/// </summary>
///
/// <remarks>
///
/// <para>
/// The DeflateStream is a <see
/// href="http://en.wikipedia.org/wiki/Decorator_pattern">Decorator</see> on a <see
/// cref="System.IO.Stream"/>. It adds DEFLATE compression or decompression to any
/// stream.
/// </para>
///
/// <para>
/// Using this stream, applications can compress or decompress data via stream
/// <c>Read</c> and <c>Write</c> operations. Either compresssion or decompression
/// can occur through either reading or writing. The compression format used is
/// DEFLATE, which is documented in <see
/// href="http://www.ietf.org/rfc/rfc1951.txt">IETF RFC 1951</see>, "DEFLATE
/// Compressed Data Format Specification version 1.3.".
/// </para>
///
/// <para>
/// This class is similar to <see cref="ZlibStream"/>, except that
/// <c>ZlibStream</c> adds the <see href="http://www.ietf.org/rfc/rfc1950.txt">RFC
/// 1950 - ZLIB</see> framing bytes to a compressed stream when compressing, or
/// expects the RFC1950 framing bytes when decompressing. The <c>DeflateStream</c>
/// does not.
/// </para>
///
/// </remarks>
///
/// <seealso cref="ZlibStream" />
/// <seealso cref="GZipStream" />
internal class DeflateStream : System.IO.Stream
{
internal ZlibBaseStream _baseStream;
internal System.IO.Stream _innerStream;
bool _disposed;
/// <summary>
/// Create a DeflateStream using the specified CompressionMode.
/// </summary>
///
/// <remarks>
/// When mode is <c>CompressionMode.Compress</c>, the DeflateStream will use
/// the default compression level. The "captive" stream will be closed when
/// the DeflateStream is closed.
/// </remarks>
///
/// <example>
/// This example uses a DeflateStream to compress data from a file, and writes
/// the compressed data to another file.
/// <code>
/// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress))
/// {
/// using (var raw = System.IO.File.Create(fileToCompress + ".deflated"))
/// {
/// using (Stream compressor = new DeflateStream(raw, CompressionMode.Compress))
/// {
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
/// int n;
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
/// {
/// compressor.Write(buffer, 0, n);
/// }
/// }
/// }
/// }
/// </code>
///
/// <code lang="VB">
/// Using input As Stream = File.OpenRead(fileToCompress)
/// Using raw As FileStream = File.Create(fileToCompress & ".deflated")
/// Using compressor As Stream = New DeflateStream(raw, CompressionMode.Compress)
/// Dim buffer As Byte() = New Byte(4096) {}
/// Dim n As Integer = -1
/// Do While (n <> 0)
/// If (n > 0) Then
/// compressor.Write(buffer, 0, n)
/// End If
/// n = input.Read(buffer, 0, buffer.Length)
/// Loop
/// End Using
/// End Using
/// End Using
/// </code>
/// </example>
/// <param name="stream">The stream which will be read or written.</param>
/// <param name="mode">Indicates whether the DeflateStream will compress or decompress.</param>
public DeflateStream(System.IO.Stream stream, CompressionMode mode)
: this(stream, mode, CompressionLevel.Default, false)
{
}
/// <summary>
/// Create a DeflateStream using the specified CompressionMode and the specified CompressionLevel.
/// </summary>
///
/// <remarks>
///
/// <para>
/// When mode is <c>CompressionMode.Decompress</c>, the level parameter is
/// ignored. The "captive" stream will be closed when the DeflateStream is
/// closed.
/// </para>
///
/// </remarks>
///
/// <example>
///
/// This example uses a DeflateStream to compress data from a file, and writes
/// the compressed data to another file.
///
/// <code>
/// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress))
/// {
/// using (var raw = System.IO.File.Create(fileToCompress + ".deflated"))
/// {
/// using (Stream compressor = new DeflateStream(raw,
/// CompressionMode.Compress,
/// CompressionLevel.BestCompression))
/// {
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
/// int n= -1;
/// while (n != 0)
/// {
/// if (n > 0)
/// compressor.Write(buffer, 0, n);
/// n= input.Read(buffer, 0, buffer.Length);
/// }
/// }
/// }
/// }
/// </code>
///
/// <code lang="VB">
/// Using input As Stream = File.OpenRead(fileToCompress)
/// Using raw As FileStream = File.Create(fileToCompress & ".deflated")
/// Using compressor As Stream = New DeflateStream(raw, CompressionMode.Compress, CompressionLevel.BestCompression)
/// Dim buffer As Byte() = New Byte(4096) {}
/// Dim n As Integer = -1
/// Do While (n <> 0)
/// If (n > 0) Then
/// compressor.Write(buffer, 0, n)
/// End If
/// n = input.Read(buffer, 0, buffer.Length)
/// Loop
/// End Using
/// End Using
/// End Using
/// </code>
/// </example>
/// <param name="stream">The stream to be read or written while deflating or inflating.</param>
/// <param name="mode">Indicates whether the <c>DeflateStream</c> will compress or decompress.</param>
/// <param name="level">A tuning knob to trade speed for effectiveness.</param>
public DeflateStream(System.IO.Stream stream, CompressionMode mode, CompressionLevel level)
: this(stream, mode, level, false)
{
}
/// <summary>
/// Create a <c>DeflateStream</c> using the specified
/// <c>CompressionMode</c>, and explicitly specify whether the
/// stream should be left open after Deflation or Inflation.
/// </summary>
///
/// <remarks>
///
/// <para>
/// This constructor allows the application to request that the captive stream
/// remain open after the deflation or inflation occurs. By default, after
/// <c>Close()</c> is called on the stream, the captive stream is also
/// closed. In some cases this is not desired, for example if the stream is a
/// memory stream that will be re-read after compression. Specify true for
/// the <paramref name="leaveOpen"/> parameter to leave the stream open.
/// </para>
///
/// <para>
/// The <c>DeflateStream</c> will use the default compression level.
/// </para>
///
/// <para>
/// See the other overloads of this constructor for example code.
/// </para>
/// </remarks>
///
/// <param name="stream">
/// The stream which will be read or written. This is called the
/// "captive" stream in other places in this documentation.
/// </param>
///
/// <param name="mode">
/// Indicates whether the <c>DeflateStream</c> will compress or decompress.
/// </param>
///
/// <param name="leaveOpen">true if the application would like the stream to
/// remain open after inflation/deflation.</param>
public DeflateStream(System.IO.Stream stream, CompressionMode mode, bool leaveOpen)
: this(stream, mode, CompressionLevel.Default, leaveOpen)
{
}
/// <summary>
/// Create a <c>DeflateStream</c> using the specified <c>CompressionMode</c>
/// and the specified <c>CompressionLevel</c>, and explicitly specify whether
/// the stream should be left open after Deflation or Inflation.
/// </summary>
///
/// <remarks>
///
/// <para>
/// When mode is <c>CompressionMode.Decompress</c>, the level parameter is ignored.
/// </para>
///
/// <para>
/// This constructor allows the application to request that the captive stream
/// remain open after the deflation or inflation occurs. By default, after
/// <c>Close()</c> is called on the stream, the captive stream is also
/// closed. In some cases this is not desired, for example if the stream is a
/// <see cref="System.IO.MemoryStream"/> that will be re-read after
/// compression. Specify true for the <paramref name="leaveOpen"/> parameter
/// to leave the stream open.
/// </para>
///
/// </remarks>
///
/// <example>
///
/// This example shows how to use a <c>DeflateStream</c> to compress data from
/// a file, and store the compressed data into another file.
///
/// <code>
/// using (var output = System.IO.File.Create(fileToCompress + ".deflated"))
/// {
/// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress))
/// {
/// using (Stream compressor = new DeflateStream(output, CompressionMode.Compress, CompressionLevel.BestCompression, true))
/// {
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
/// int n= -1;
/// while (n != 0)
/// {
/// if (n > 0)
/// compressor.Write(buffer, 0, n);
/// n= input.Read(buffer, 0, buffer.Length);
/// }
/// }
/// }
/// // can write additional data to the output stream here
/// }
/// </code>
///
/// <code lang="VB">
/// Using output As FileStream = File.Create(fileToCompress & ".deflated")
/// Using input As Stream = File.OpenRead(fileToCompress)
/// Using compressor As Stream = New DeflateStream(output, CompressionMode.Compress, CompressionLevel.BestCompression, True)
/// Dim buffer As Byte() = New Byte(4096) {}
/// Dim n As Integer = -1
/// Do While (n <> 0)
/// If (n > 0) Then
/// compressor.Write(buffer, 0, n)
/// End If
/// n = input.Read(buffer, 0, buffer.Length)
/// Loop
/// End Using
/// End Using
/// ' can write additional data to the output stream here.
/// End Using
/// </code>
/// </example>
/// <param name="stream">The stream which will be read or written.</param>
/// <param name="mode">Indicates whether the DeflateStream will compress or decompress.</param>
/// <param name="leaveOpen">true if the application would like the stream to remain open after inflation/deflation.</param>
/// <param name="level">A tuning knob to trade speed for effectiveness.</param>
public DeflateStream(System.IO.Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen)
{
_innerStream = stream;
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.DEFLATE, leaveOpen);
}
#region Zlib properties
/// <summary>
/// This property sets the flush behavior on the stream.
/// </summary>
/// <remarks> See the ZLIB documentation for the meaning of the flush behavior.
/// </remarks>
virtual public FlushType FlushMode
{
get { return (this._baseStream._flushMode); }
set
{
if (_disposed) throw new ObjectDisposedException("DeflateStream");
this._baseStream._flushMode = value;
}
}
/// <summary>
/// The size of the working buffer for the compression codec.
/// </summary>
///
/// <remarks>
/// <para>
/// The working buffer is used for all stream operations. The default size is
/// 1024 bytes. The minimum size is 128 bytes. You may get better performance
/// with a larger buffer. Then again, you might not. You would have to test
/// it.
/// </para>
///
/// <para>
/// Set this before the first call to <c>Read()</c> or <c>Write()</c> on the
/// stream. If you try to set it afterwards, it will throw.
/// </para>
/// </remarks>
public int BufferSize
{
get
{
return this._baseStream._bufferSize;
}
set
{
if (_disposed) throw new ObjectDisposedException("DeflateStream");
if (this._baseStream._workingBuffer != null)
throw new ZlibException("The working buffer is already set.");
if (value < ZlibConstants.WorkingBufferSizeMin)
throw new ZlibException(String.Format("Don't be silly. {0} bytes?? Use a bigger buffer, at least {1}.", value, ZlibConstants.WorkingBufferSizeMin));
this._baseStream._bufferSize = value;
}
}
/// <summary>
/// The ZLIB strategy to be used during compression.
/// </summary>
///
/// <remarks>
/// By tweaking this parameter, you may be able to optimize the compression for
/// data with particular characteristics.
/// </remarks>
public CompressionStrategy Strategy
{
get
{
return this._baseStream.Strategy;
}
set
{
if (_disposed) throw new ObjectDisposedException("DeflateStream");
this._baseStream.Strategy = value;
}
}
/// <summary> Returns the total number of bytes input so far.</summary>
virtual public long TotalIn
{
get
{
return this._baseStream._z.TotalBytesIn;
}
}
/// <summary> Returns the total number of bytes output so far.</summary>
virtual public long TotalOut
{
get
{
return this._baseStream._z.TotalBytesOut;
}
}
#endregion
#region System.IO.Stream methods
/// <summary>
/// Dispose the stream.
/// </summary>
/// <remarks>
/// <para>
/// This may or may not result in a <c>Close()</c> call on the captive
/// stream. See the constructors that have a <c>leaveOpen</c> parameter
/// for more information.
/// </para>
/// <para>
/// Application code won't call this code directly. This method may be
/// invoked in two distinct scenarios. If disposing == true, the method
/// has been called directly or indirectly by a user's code, for example
/// via the public Dispose() method. In this case, both managed and
/// unmanaged resources can be referenced and disposed. If disposing ==
/// false, the method has been called by the runtime from inside the
/// object finalizer and this method should not reference other objects;
/// in that case only unmanaged resources must be referenced or
/// disposed.
/// </para>
/// </remarks>
/// <param name="disposing">
/// true if the Dispose method was invoked by user code.
/// </param>
protected override void Dispose(bool disposing)
{
try
{
if (!_disposed)
{
if (disposing && (this._baseStream != null))
this._baseStream.Close();
_disposed = true;
}
}
finally
{
base.Dispose(disposing);
}
}
/// <summary>
/// Indicates whether the stream can be read.
/// </summary>
/// <remarks>
/// The return value depends on whether the captive stream supports reading.
/// </remarks>
public override bool CanRead
{
get
{
if (_disposed) throw new ObjectDisposedException("DeflateStream");
return _baseStream._stream.CanRead;
}
}
/// <summary>
/// Indicates whether the stream supports Seek operations.
/// </summary>
/// <remarks>
/// Always returns false.
/// </remarks>
public override bool CanSeek
{
get { return false; }
}
/// <summary>
/// Indicates whether the stream can be written.
/// </summary>
/// <remarks>
/// The return value depends on whether the captive stream supports writing.
/// </remarks>
public override bool CanWrite
{
get
{
if (_disposed) throw new ObjectDisposedException("DeflateStream");
return _baseStream._stream.CanWrite;
}
}
/// <summary>
/// Flush the stream.
/// </summary>
public override void Flush()
{
if (_disposed) throw new ObjectDisposedException("DeflateStream");
_baseStream.Flush();
}
/// <summary>
/// Reading this property always throws a <see cref="NotImplementedException"/>.
/// </summary>
public override long Length
{
get { throw new NotImplementedException(); }
}
/// <summary>
/// The position of the stream pointer.
/// </summary>
///
/// <remarks>
/// Setting this property always throws a <see
/// cref="NotImplementedException"/>. Reading will return the total bytes
/// written out, if used in writing, or the total bytes read in, if used in
/// reading. The count may refer to compressed bytes or uncompressed bytes,
/// depending on how you've used the stream.
/// </remarks>
public override long Position
{
get
{
if (this._baseStream._streamMode == Ionic.Zlib.ZlibBaseStream.StreamMode.Writer)
return this._baseStream._z.TotalBytesOut;
if (this._baseStream._streamMode == Ionic.Zlib.ZlibBaseStream.StreamMode.Reader)
return this._baseStream._z.TotalBytesIn;
return 0;
}
set { throw new NotImplementedException(); }
}
/// <summary>
/// Read data from the stream.
/// </summary>
/// <remarks>
///
/// <para>
/// If you wish to use the <c>DeflateStream</c> to compress data while
/// reading, you can create a <c>DeflateStream</c> with
/// <c>CompressionMode.Compress</c>, providing an uncompressed data stream.
/// Then call Read() on that <c>DeflateStream</c>, and the data read will be
/// compressed as you read. If you wish to use the <c>DeflateStream</c> to
/// decompress data while reading, you can create a <c>DeflateStream</c> with
/// <c>CompressionMode.Decompress</c>, providing a readable compressed data
/// stream. Then call Read() on that <c>DeflateStream</c>, and the data read
/// will be decompressed as you read.
/// </para>
///
/// <para>
/// A <c>DeflateStream</c> can be used for <c>Read()</c> or <c>Write()</c>, but not both.
/// </para>
///
/// </remarks>
/// <param name="buffer">The buffer into which the read data should be placed.</param>
/// <param name="offset">the offset within that data array to put the first byte read.</param>
/// <param name="count">the number of bytes to read.</param>
/// <returns>the number of bytes actually read</returns>
public override int Read(byte[] buffer, int offset, int count)
{
if (_disposed) throw new ObjectDisposedException("DeflateStream");
return _baseStream.Read(buffer, offset, count);
}
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
/// <param name="offset">this is irrelevant, since it will always throw!</param>
/// <param name="origin">this is irrelevant, since it will always throw!</param>
/// <returns>irrelevant!</returns>
public override long Seek(long offset, System.IO.SeekOrigin origin)
{
throw new NotImplementedException();
}
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
/// <param name="value">this is irrelevant, since it will always throw!</param>
public override void SetLength(long value)
{
throw new NotImplementedException();
}
/// <summary>
/// Write data to the stream.
/// </summary>
/// <remarks>
///
/// <para>
/// If you wish to use the <c>DeflateStream</c> to compress data while
/// writing, you can create a <c>DeflateStream</c> with
/// <c>CompressionMode.Compress</c>, and a writable output stream. Then call
/// <c>Write()</c> on that <c>DeflateStream</c>, providing uncompressed data
/// as input. The data sent to the output stream will be the compressed form
/// of the data written. If you wish to use the <c>DeflateStream</c> to
/// decompress data while writing, you can create a <c>DeflateStream</c> with
/// <c>CompressionMode.Decompress</c>, and a writable output stream. Then
/// call <c>Write()</c> on that stream, providing previously compressed
/// data. The data sent to the output stream will be the decompressed form of
/// the data written.
/// </para>
///
/// <para>
/// A <c>DeflateStream</c> can be used for <c>Read()</c> or <c>Write()</c>,
/// but not both.
/// </para>
///
/// </remarks>
///
/// <param name="buffer">The buffer holding data to write to the stream.</param>
/// <param name="offset">the offset within that data array to find the first byte to write.</param>
/// <param name="count">the number of bytes to write.</param>
public override void Write(byte[] buffer, int offset, int count)
{
if (_disposed) throw new ObjectDisposedException("DeflateStream");
_baseStream.Write(buffer, offset, count);
}
#endregion
/// <summary>
/// Compress a string into a byte array using DEFLATE (RFC 1951).
/// </summary>
///
/// <remarks>
/// Uncompress it with <see cref="DeflateStream.UncompressString(byte[])"/>.
/// </remarks>
///
/// <seealso cref="DeflateStream.UncompressString(byte[])">DeflateStream.UncompressString(byte[])</seealso>
/// <seealso cref="DeflateStream.CompressBuffer(byte[])">DeflateStream.CompressBuffer(byte[])</seealso>
/// <seealso cref="GZipStream.CompressString(string)">GZipStream.CompressString(string)</seealso>
/// <seealso cref="ZlibStream.CompressString(string)">ZlibStream.CompressString(string)</seealso>
///
/// <param name="s">
/// A string to compress. The string will first be encoded
/// using UTF8, then compressed.
/// </param>
///
/// <returns>The string in compressed form</returns>
public static byte[] CompressString(String s)
{
using (var ms = new System.IO.MemoryStream())
{
System.IO.Stream compressor =
new DeflateStream(ms, CompressionMode.Compress, CompressionLevel.BestCompression);
ZlibBaseStream.CompressString(s, compressor);
return ms.ToArray();
}
}
/// <summary>
/// Compress a byte array into a new byte array using DEFLATE.
/// </summary>
///
/// <remarks>
/// Uncompress it with <see cref="DeflateStream.UncompressBuffer(byte[])"/>.
/// </remarks>
///
/// <seealso cref="DeflateStream.CompressString(string)">DeflateStream.CompressString(string)</seealso>
/// <seealso cref="DeflateStream.UncompressBuffer(byte[])">DeflateStream.UncompressBuffer(byte[])</seealso>
/// <seealso cref="GZipStream.CompressBuffer(byte[])">GZipStream.CompressBuffer(byte[])</seealso>
/// <seealso cref="ZlibStream.CompressBuffer(byte[])">ZlibStream.CompressBuffer(byte[])</seealso>
///
/// <param name="b">
/// A buffer to compress.
/// </param>
///
/// <returns>The data in compressed form</returns>
public static byte[] CompressBuffer(byte[] b)
{
using (var ms = new System.IO.MemoryStream())
{
System.IO.Stream compressor =
new DeflateStream( ms, CompressionMode.Compress, CompressionLevel.BestCompression );
ZlibBaseStream.CompressBuffer(b, compressor);
return ms.ToArray();
}
}
/// <summary>
/// Uncompress a DEFLATE'd byte array into a single string.
/// </summary>
///
/// <seealso cref="DeflateStream.CompressString(String)">DeflateStream.CompressString(String)</seealso>
/// <seealso cref="DeflateStream.UncompressBuffer(byte[])">DeflateStream.UncompressBuffer(byte[])</seealso>
/// <seealso cref="GZipStream.UncompressString(byte[])">GZipStream.UncompressString(byte[])</seealso>
/// <seealso cref="ZlibStream.UncompressString(byte[])">ZlibStream.UncompressString(byte[])</seealso>
///
/// <param name="compressed">
/// A buffer containing DEFLATE-compressed data.
/// </param>
///
/// <returns>The uncompressed string</returns>
public static String UncompressString(byte[] compressed)
{
using (var input = new System.IO.MemoryStream(compressed))
{
System.IO.Stream decompressor =
new DeflateStream(input, CompressionMode.Decompress);
return ZlibBaseStream.UncompressString(compressed, decompressor);
}
}
/// <summary>
/// Uncompress a DEFLATE'd byte array into a byte array.
/// </summary>
///
/// <seealso cref="DeflateStream.CompressBuffer(byte[])">DeflateStream.CompressBuffer(byte[])</seealso>
/// <seealso cref="DeflateStream.UncompressString(byte[])">DeflateStream.UncompressString(byte[])</seealso>
/// <seealso cref="GZipStream.UncompressBuffer(byte[])">GZipStream.UncompressBuffer(byte[])</seealso>
/// <seealso cref="ZlibStream.UncompressBuffer(byte[])">ZlibStream.UncompressBuffer(byte[])</seealso>
///
/// <param name="compressed">
/// A buffer containing data that has been compressed with DEFLATE.
/// </param>
///
/// <returns>The data in uncompressed form</returns>
public static byte[] UncompressBuffer(byte[] compressed)
{
using (var input = new System.IO.MemoryStream(compressed))
{
System.IO.Stream decompressor =
new DeflateStream( input, CompressionMode.Decompress );
return ZlibBaseStream.UncompressBuffer(compressed, decompressor);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Runtime;
using System.Runtime.Serialization;
using System.Text;
using System.Xml;
namespace System.Runtime.Serialization.Json
{
internal class XmlJsonWriter : XmlDictionaryWriter, IXmlJsonWriterInitializer
{
private const char BACK_SLASH = '\\';
private const char FORWARD_SLASH = '/';
private const char HIGH_SURROGATE_START = (char)0xd800;
private const char LOW_SURROGATE_END = (char)0xdfff;
private const char MAX_CHAR = (char)0xfffe;
private const char WHITESPACE = ' ';
private const char CARRIAGE_RETURN = '\r';
private const char NEWLINE = '\n';
private const char BACKSPACE = '\b';
private const char FORM_FEED = '\f';
private const char HORIZONTAL_TABULATION = '\t';
private const string xmlNamespace = "http://www.w3.org/XML/1998/namespace";
private const string xmlnsNamespace = "http://www.w3.org/2000/xmlns/";
// This array was part of a perf improvement for escaping characters < WHITESPACE.
private static readonly string[] s_escapedJsonStringTable = CreateEscapedJsonStringTable();
private static BinHexEncoding s_binHexEncoding;
private string _attributeText;
private JsonDataType _dataType;
private int _depth;
private bool _endElementBuffer;
private bool _isWritingDataTypeAttribute;
private bool _isWritingServerTypeAttribute;
private bool _isWritingXmlnsAttribute;
private bool _isWritingXmlnsAttributeDefaultNs;
private NameState _nameState;
private JsonNodeType _nodeType;
private JsonNodeWriter _nodeWriter;
private JsonNodeType[] _scopes;
private string _serverTypeValue;
// Do not use this field's value anywhere other than the WriteState property.
// It's OK to set this field's value anywhere and then change the WriteState property appropriately.
// If it's necessary to check the WriteState outside WriteState, use the WriteState property.
private WriteState _writeState;
private bool _wroteServerTypeAttribute;
private bool _indent;
private string _indentChars;
private int _indentLevel;
public XmlJsonWriter() : this(false, null) { }
public XmlJsonWriter(bool indent, string indentChars)
{
_indent = indent;
if (indent)
{
if (indentChars == null)
{
throw new ArgumentNullException(nameof(indentChars));
}
_indentChars = indentChars;
}
InitializeWriter();
}
private static string[] CreateEscapedJsonStringTable()
{
var table = new string[WHITESPACE];
for (int ch = 0; ch < WHITESPACE; ch++)
{
char abbrev;
table[ch] = TryEscapeControlCharacter((char)ch, out abbrev) ?
string.Concat(BACK_SLASH, abbrev) :
string.Format(CultureInfo.InvariantCulture, "\\u{0:x4}", ch);
}
return table;
}
private enum JsonDataType
{
None,
Null,
Boolean,
Number,
String,
Object,
Array
};
[Flags]
private enum NameState
{
None = 0,
IsWritingNameWithMapping = 1,
IsWritingNameAttribute = 2,
WrittenNameWithMapping = 4,
}
public override XmlWriterSettings Settings
{
// The XmlWriterSettings object used to create this writer instance.
// If this writer was not created using the Create method, this property
// returns a null reference.
get { return null; }
}
public override WriteState WriteState
{
get
{
if (_writeState == WriteState.Closed)
{
return WriteState.Closed;
}
if (HasOpenAttribute)
{
return WriteState.Attribute;
}
switch (_nodeType)
{
case JsonNodeType.None:
return WriteState.Start;
case JsonNodeType.Element:
return WriteState.Element;
case JsonNodeType.QuotedText:
case JsonNodeType.StandaloneText:
case JsonNodeType.EndElement:
return WriteState.Content;
default:
return WriteState.Error;
}
}
}
public override string XmlLang
{
get { return null; }
}
public override XmlSpace XmlSpace
{
get { return XmlSpace.None; }
}
private static BinHexEncoding BinHexEncoding
{
get
{
if (s_binHexEncoding == null)
{
s_binHexEncoding = new BinHexEncoding();
}
return s_binHexEncoding;
}
}
private bool HasOpenAttribute => (_isWritingDataTypeAttribute || _isWritingServerTypeAttribute || IsWritingNameAttribute || _isWritingXmlnsAttribute);
private bool IsClosed => (WriteState == WriteState.Closed);
private bool IsWritingCollection => (_depth > 0) && (_scopes[_depth] == JsonNodeType.Collection);
private bool IsWritingNameAttribute => (_nameState & NameState.IsWritingNameAttribute) == NameState.IsWritingNameAttribute;
private bool IsWritingNameWithMapping => (_nameState & NameState.IsWritingNameWithMapping) == NameState.IsWritingNameWithMapping;
private bool WrittenNameWithMapping => (_nameState & NameState.WrittenNameWithMapping) == NameState.WrittenNameWithMapping;
protected override void Dispose(bool disposing)
{
if (!IsClosed)
{
try
{
WriteEndDocument();
}
finally
{
try
{
_nodeWriter.Flush();
_nodeWriter.Close();
}
finally
{
_writeState = WriteState.Closed;
if (_depth != 0)
{
_depth = 0;
}
}
}
}
base.Dispose(disposing);
}
public override void Flush()
{
if (IsClosed)
{
ThrowClosed();
}
_nodeWriter.Flush();
}
public override string LookupPrefix(string ns)
{
if (ns == null)
{
throw new ArgumentNullException(nameof(ns));
}
if (ns == Globals.XmlnsNamespace)
{
return Globals.XmlnsPrefix;
}
if (ns == xmlNamespace)
{
return JsonGlobals.xmlPrefix;
}
if (ns == string.Empty)
{
return string.Empty;
}
return null;
}
public void SetOutput(Stream stream, Encoding encoding, bool ownsStream)
{
if (stream == null)
{
throw new ArgumentNullException(nameof(stream));
}
if (encoding == null)
{
throw new ArgumentNullException(nameof(encoding));
}
if (encoding.WebName != Encoding.UTF8.WebName)
{
stream = new JsonEncodingStreamWrapper(stream, encoding, false);
}
else
{
encoding = null;
}
if (_nodeWriter == null)
{
_nodeWriter = new JsonNodeWriter();
}
_nodeWriter.SetOutput(stream, ownsStream, encoding);
InitializeWriter();
}
public override void WriteArray(string prefix, string localName, string namespaceUri, bool[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, string localName, string namespaceUri, Int16[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, string localName, string namespaceUri, Int32[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, string localName, string namespaceUri, Int64[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, string localName, string namespaceUri, float[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, string localName, string namespaceUri, double[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, string localName, string namespaceUri, decimal[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, string localName, string namespaceUri, DateTime[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, string localName, string namespaceUri, Guid[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, string localName, string namespaceUri, TimeSpan[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, XmlDictionaryString localName, XmlDictionaryString namespaceUri, bool[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, XmlDictionaryString localName, XmlDictionaryString namespaceUri, decimal[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, XmlDictionaryString localName, XmlDictionaryString namespaceUri, double[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, XmlDictionaryString localName, XmlDictionaryString namespaceUri, float[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, XmlDictionaryString localName, XmlDictionaryString namespaceUri, int[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, XmlDictionaryString localName, XmlDictionaryString namespaceUri, long[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, XmlDictionaryString localName, XmlDictionaryString namespaceUri, short[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, XmlDictionaryString localName, XmlDictionaryString namespaceUri, DateTime[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, XmlDictionaryString localName, XmlDictionaryString namespaceUri, Guid[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteArray(string prefix, XmlDictionaryString localName, XmlDictionaryString namespaceUri, TimeSpan[] array, int offset, int count)
{
throw new NotSupportedException(SR.JsonWriteArrayNotSupported);
}
public override void WriteBase64(byte[] buffer, int index, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
// Not checking upper bound because it will be caught by "count". This is what XmlTextWriter does.
if (index < 0)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ValueMustBeNonNegative);
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), SR.ValueMustBeNonNegative);
}
if (count > buffer.Length - index)
{
throw new ArgumentOutOfRangeException(nameof(count), SR.Format(SR.JsonSizeExceedsRemainingBufferSpace, buffer.Length - index));
}
StartText();
_nodeWriter.WriteBase64Text(buffer, 0, buffer, index, count);
}
public override void WriteBinHex(byte[] buffer, int index, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
// Not checking upper bound because it will be caught by "count". This is what XmlTextWriter does.
if (index < 0)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ValueMustBeNonNegative);
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), SR.ValueMustBeNonNegative);
}
if (count > buffer.Length - index)
{
throw new ArgumentOutOfRangeException(nameof(count), SR.Format(SR.JsonSizeExceedsRemainingBufferSpace, buffer.Length - index));
}
StartText();
WriteEscapedJsonString(BinHexEncoding.GetString(buffer, index, count));
}
public override void WriteCData(string text)
{
WriteString(text);
}
public override void WriteCharEntity(char ch)
{
WriteString(ch.ToString());
}
public override void WriteChars(char[] buffer, int index, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
// Not checking upper bound because it will be caught by "count". This is what XmlTextWriter does.
if (index < 0)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ValueMustBeNonNegative);
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), SR.ValueMustBeNonNegative);
}
if (count > buffer.Length - index)
{
throw new ArgumentOutOfRangeException(nameof(count), SR.Format(SR.JsonSizeExceedsRemainingBufferSpace, buffer.Length - index));
}
WriteString(new string(buffer, index, count));
}
public override void WriteComment(string text)
{
throw new NotSupportedException(SR.Format(SR.JsonMethodNotSupported, "WriteComment"));
}
[SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "2#sysid", Justification = "This method is derived from the base")]
[SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "1#pubid", Justification = "This method is derived from the base")]
public override void WriteDocType(string name, string pubid, string sysid, string subset)
{
throw new NotSupportedException(SR.Format(SR.JsonMethodNotSupported, "WriteDocType"));
}
public override void WriteEndAttribute()
{
if (IsClosed)
{
ThrowClosed();
}
if (!HasOpenAttribute)
{
throw new XmlException(SR.JsonNoMatchingStartAttribute);
}
Fx.Assert(!(_isWritingDataTypeAttribute && _isWritingServerTypeAttribute),
"Can not write type attribute and __type attribute at the same time.");
if (_isWritingDataTypeAttribute)
{
switch (_attributeText)
{
case JsonGlobals.numberString:
{
ThrowIfServerTypeWritten(JsonGlobals.numberString);
_dataType = JsonDataType.Number;
break;
}
case JsonGlobals.stringString:
{
ThrowIfServerTypeWritten(JsonGlobals.stringString);
_dataType = JsonDataType.String;
break;
}
case JsonGlobals.arrayString:
{
ThrowIfServerTypeWritten(JsonGlobals.arrayString);
_dataType = JsonDataType.Array;
break;
}
case JsonGlobals.objectString:
{
_dataType = JsonDataType.Object;
break;
}
case JsonGlobals.nullString:
{
ThrowIfServerTypeWritten(JsonGlobals.nullString);
_dataType = JsonDataType.Null;
break;
}
case JsonGlobals.booleanString:
{
ThrowIfServerTypeWritten(JsonGlobals.booleanString);
_dataType = JsonDataType.Boolean;
break;
}
default:
throw new XmlException(SR.Format(SR.JsonUnexpectedAttributeValue, _attributeText));
}
_attributeText = null;
_isWritingDataTypeAttribute = false;
if (!IsWritingNameWithMapping || WrittenNameWithMapping)
{
WriteDataTypeServerType();
}
}
else if (_isWritingServerTypeAttribute)
{
_serverTypeValue = _attributeText;
_attributeText = null;
_isWritingServerTypeAttribute = false;
// we are writing __type after type="object" (enforced by WSE)
if ((!IsWritingNameWithMapping || WrittenNameWithMapping) && _dataType == JsonDataType.Object)
{
WriteServerTypeAttribute();
}
}
else if (IsWritingNameAttribute)
{
WriteJsonElementName(_attributeText);
_attributeText = null;
_nameState = NameState.IsWritingNameWithMapping | NameState.WrittenNameWithMapping;
WriteDataTypeServerType();
}
else if (_isWritingXmlnsAttribute)
{
if (!string.IsNullOrEmpty(_attributeText) && _isWritingXmlnsAttributeDefaultNs)
{
throw new ArgumentException(SR.Format(SR.JsonNamespaceMustBeEmpty, _attributeText));
}
_attributeText = null;
_isWritingXmlnsAttribute = false;
_isWritingXmlnsAttributeDefaultNs = false;
}
}
public override void WriteEndDocument()
{
if (IsClosed)
{
ThrowClosed();
}
if (_nodeType != JsonNodeType.None)
{
while (_depth > 0)
{
WriteEndElement();
}
}
}
public override void WriteEndElement()
{
if (IsClosed)
{
ThrowClosed();
}
if (_depth == 0)
{
throw new XmlException(SR.JsonEndElementNoOpenNodes);
}
if (HasOpenAttribute)
{
throw new XmlException(SR.Format(SR.JsonOpenAttributeMustBeClosedFirst, "WriteEndElement"));
}
_endElementBuffer = false;
JsonNodeType token = ExitScope();
if (token == JsonNodeType.Collection)
{
_indentLevel--;
if (_indent)
{
if (_nodeType == JsonNodeType.Element)
{
_nodeWriter.WriteText(WHITESPACE);
}
else
{
WriteNewLine();
WriteIndent();
}
}
_nodeWriter.WriteText(JsonGlobals.EndCollectionChar);
token = ExitScope();
}
else if (_nodeType == JsonNodeType.QuotedText)
{
// For writing "
WriteJsonQuote();
}
else if (_nodeType == JsonNodeType.Element)
{
if ((_dataType == JsonDataType.None) && (_serverTypeValue != null))
{
throw new XmlException(SR.Format(SR.JsonMustSpecifyDataType, JsonGlobals.typeString, JsonGlobals.objectString, JsonGlobals.serverTypeString));
}
if (IsWritingNameWithMapping && !WrittenNameWithMapping)
{
// Ending </item> without writing item attribute
// Not providing a better error message because localization deadline has passed.
throw new XmlException(SR.Format(SR.JsonMustSpecifyDataType, JsonGlobals.itemString, string.Empty, JsonGlobals.itemString));
}
// the element is empty, it does not have any content,
if ((_dataType == JsonDataType.None) ||
(_dataType == JsonDataType.String))
{
_nodeWriter.WriteText(JsonGlobals.QuoteChar);
_nodeWriter.WriteText(JsonGlobals.QuoteChar);
}
}
else
{
// Assert on only StandaloneText and EndElement because preceding if
// conditions take care of checking for QuotedText and Element.
Fx.Assert((_nodeType == JsonNodeType.StandaloneText) || (_nodeType == JsonNodeType.EndElement),
"nodeType has invalid value " + _nodeType + ". Expected it to be QuotedText, Element, StandaloneText, or EndElement.");
}
if (_depth != 0)
{
if (token == JsonNodeType.Element)
{
_endElementBuffer = true;
}
else if (token == JsonNodeType.Object)
{
_indentLevel--;
if (_indent)
{
if (_nodeType == JsonNodeType.Element)
{
_nodeWriter.WriteText(WHITESPACE);
}
else
{
WriteNewLine();
WriteIndent();
}
}
_nodeWriter.WriteText(JsonGlobals.EndObjectChar);
if ((_depth > 0) && _scopes[_depth] == JsonNodeType.Element)
{
ExitScope();
_endElementBuffer = true;
}
}
}
_dataType = JsonDataType.None;
_nodeType = JsonNodeType.EndElement;
_nameState = NameState.None;
_wroteServerTypeAttribute = false;
}
public override void WriteEntityRef(string name)
{
throw new NotSupportedException(SR.Format(SR.JsonMethodNotSupported, "WriteEntityRef"));
}
public override void WriteFullEndElement()
{
WriteEndElement();
}
public override void WriteProcessingInstruction(string name, string text)
{
if (IsClosed)
{
ThrowClosed();
}
if (!name.Equals("xml", StringComparison.OrdinalIgnoreCase))
{
throw new ArgumentException(SR.JsonXmlProcessingInstructionNotSupported, nameof(name));
}
if (WriteState != WriteState.Start)
{
throw new XmlException(SR.JsonXmlInvalidDeclaration);
}
}
public override void WriteQualifiedName(string localName, string ns)
{
if (localName == null)
{
throw new ArgumentNullException(nameof(localName));
}
if (localName.Length == 0)
{
throw new ArgumentException(SR.JsonInvalidLocalNameEmpty, nameof(localName));
}
if (ns == null)
{
ns = string.Empty;
}
base.WriteQualifiedName(localName, ns);
}
public override void WriteRaw(string data)
{
WriteString(data);
}
public override void WriteRaw(char[] buffer, int index, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
// Not checking upper bound because it will be caught by "count". This is what XmlTextWriter does.
if (index < 0)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ValueMustBeNonNegative);
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), SR.ValueMustBeNonNegative);
}
if (count > buffer.Length - index)
{
throw new ArgumentOutOfRangeException(nameof(count), SR.Format(SR.JsonSizeExceedsRemainingBufferSpace, buffer.Length - index));
}
WriteString(new string(buffer, index, count));
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Globalization", "CA1308:NormalizeStringsToUppercase")] // Microsoft, ToLowerInvariant is just used in Json error message
public override void WriteStartAttribute(string prefix, string localName, string ns)
{
if (IsClosed)
{
ThrowClosed();
}
if (!string.IsNullOrEmpty(prefix))
{
if (IsWritingNameWithMapping && prefix == JsonGlobals.xmlnsPrefix)
{
if (ns != null && ns != xmlnsNamespace)
{
throw new ArgumentException(SR.Format(SR.XmlPrefixBoundToNamespace, "xmlns", xmlnsNamespace, ns), nameof(ns));
}
}
else
{
throw new ArgumentException(SR.Format(SR.JsonPrefixMustBeNullOrEmpty, prefix), nameof(prefix));
}
}
else
{
if (IsWritingNameWithMapping && ns == xmlnsNamespace && localName != JsonGlobals.xmlnsPrefix)
{
prefix = JsonGlobals.xmlnsPrefix;
}
}
if (!string.IsNullOrEmpty(ns))
{
if (IsWritingNameWithMapping && ns == xmlnsNamespace)
{
prefix = JsonGlobals.xmlnsPrefix;
}
else if (string.IsNullOrEmpty(prefix) && localName == JsonGlobals.xmlnsPrefix && ns == xmlnsNamespace)
{
prefix = JsonGlobals.xmlnsPrefix;
_isWritingXmlnsAttributeDefaultNs = true;
}
else
{
throw new ArgumentException(SR.Format(SR.JsonNamespaceMustBeEmpty, ns), nameof(ns));
}
}
if (localName == null)
{
throw new ArgumentNullException(nameof(localName));
}
if (localName.Length == 0)
{
throw new ArgumentException(SR.JsonInvalidLocalNameEmpty, nameof(localName));
}
if ((_nodeType != JsonNodeType.Element) && !_wroteServerTypeAttribute)
{
throw new XmlException(SR.JsonAttributeMustHaveElement);
}
if (HasOpenAttribute)
{
throw new XmlException(SR.Format(SR.JsonOpenAttributeMustBeClosedFirst, "WriteStartAttribute"));
}
if (prefix == JsonGlobals.xmlnsPrefix)
{
_isWritingXmlnsAttribute = true;
}
else if (localName == JsonGlobals.typeString)
{
if (_dataType != JsonDataType.None)
{
throw new XmlException(SR.Format(SR.JsonAttributeAlreadyWritten, JsonGlobals.typeString));
}
_isWritingDataTypeAttribute = true;
}
else if (localName == JsonGlobals.serverTypeString)
{
if (_serverTypeValue != null)
{
throw new XmlException(SR.Format(SR.JsonAttributeAlreadyWritten, JsonGlobals.serverTypeString));
}
if ((_dataType != JsonDataType.None) && (_dataType != JsonDataType.Object))
{
throw new XmlException(SR.Format(SR.JsonServerTypeSpecifiedForInvalidDataType,
JsonGlobals.serverTypeString, JsonGlobals.typeString, _dataType.ToString().ToLowerInvariant(), JsonGlobals.objectString));
}
_isWritingServerTypeAttribute = true;
}
else if (localName == JsonGlobals.itemString)
{
if (WrittenNameWithMapping)
{
throw new XmlException(SR.Format(SR.JsonAttributeAlreadyWritten, JsonGlobals.itemString));
}
if (!IsWritingNameWithMapping)
{
// Don't write attribute with local name "item" if <item> element is not open.
// Not providing a better error message because localization deadline has passed.
throw new XmlException(SR.JsonEndElementNoOpenNodes);
}
_nameState |= NameState.IsWritingNameAttribute;
}
else
{
throw new ArgumentException(SR.Format(SR.JsonUnexpectedAttributeLocalName, localName), nameof(localName));
}
}
public override void WriteStartDocument(bool standalone)
{
// In XML, writes the XML declaration with the version "1.0" and the standalone attribute.
WriteStartDocument();
}
public override void WriteStartDocument()
{
// In XML, writes the XML declaration with the version "1.0".
if (IsClosed)
{
ThrowClosed();
}
if (WriteState != WriteState.Start)
{
throw new XmlException(SR.Format(SR.JsonInvalidWriteState, "WriteStartDocument", WriteState.ToString()));
}
}
public override void WriteStartElement(string prefix, string localName, string ns)
{
if (localName == null)
{
throw new ArgumentNullException(nameof(localName));
}
if (localName.Length == 0)
{
throw new ArgumentException(SR.JsonInvalidLocalNameEmpty, nameof(localName));
}
if (!string.IsNullOrEmpty(prefix))
{
if (string.IsNullOrEmpty(ns) || !TrySetWritingNameWithMapping(localName, ns))
{
throw new ArgumentException(SR.Format(SR.JsonPrefixMustBeNullOrEmpty, prefix), nameof(prefix));
}
}
if (!string.IsNullOrEmpty(ns))
{
if (!TrySetWritingNameWithMapping(localName, ns))
{
throw new ArgumentException(SR.Format(SR.JsonNamespaceMustBeEmpty, ns), nameof(ns));
}
}
if (IsClosed)
{
ThrowClosed();
}
if (HasOpenAttribute)
{
throw new XmlException(SR.Format(SR.JsonOpenAttributeMustBeClosedFirst, "WriteStartElement"));
}
if ((_nodeType != JsonNodeType.None) && _depth == 0)
{
throw new XmlException(SR.JsonMultipleRootElementsNotAllowedOnWriter);
}
switch (_nodeType)
{
case JsonNodeType.None:
{
if (!localName.Equals(JsonGlobals.rootString))
{
throw new XmlException(SR.Format(SR.JsonInvalidRootElementName, localName, JsonGlobals.rootString));
}
EnterScope(JsonNodeType.Element);
break;
}
case JsonNodeType.Element:
{
if ((_dataType != JsonDataType.Array) && (_dataType != JsonDataType.Object))
{
throw new XmlException(SR.JsonNodeTypeArrayOrObjectNotSpecified);
}
if (_indent)
{
WriteNewLine();
WriteIndent();
}
if (!IsWritingCollection)
{
if (_nameState != NameState.IsWritingNameWithMapping)
{
WriteJsonElementName(localName);
}
}
else if (!localName.Equals(JsonGlobals.itemString))
{
throw new XmlException(SR.Format(SR.JsonInvalidItemNameForArrayElement, localName, JsonGlobals.itemString));
}
EnterScope(JsonNodeType.Element);
break;
}
case JsonNodeType.EndElement:
{
if (_endElementBuffer)
{
_nodeWriter.WriteText(JsonGlobals.MemberSeparatorChar);
}
if (_indent)
{
WriteNewLine();
WriteIndent();
}
if (!IsWritingCollection)
{
if (_nameState != NameState.IsWritingNameWithMapping)
{
WriteJsonElementName(localName);
}
}
else if (!localName.Equals(JsonGlobals.itemString))
{
throw new XmlException(SR.Format(SR.JsonInvalidItemNameForArrayElement, localName, JsonGlobals.itemString));
}
EnterScope(JsonNodeType.Element);
break;
}
default:
throw new XmlException(SR.JsonInvalidStartElementCall);
}
_isWritingDataTypeAttribute = false;
_isWritingServerTypeAttribute = false;
_isWritingXmlnsAttribute = false;
_wroteServerTypeAttribute = false;
_serverTypeValue = null;
_dataType = JsonDataType.None;
_nodeType = JsonNodeType.Element;
}
public override void WriteString(string text)
{
if (HasOpenAttribute && (text != null))
{
_attributeText += text;
}
else
{
if (text == null)
{
text = string.Empty;
}
// do work only when not indenting whitespace
if (!((_dataType == JsonDataType.Array || _dataType == JsonDataType.Object || _nodeType == JsonNodeType.EndElement) && XmlConverter.IsWhitespace(text)))
{
StartText();
WriteEscapedJsonString(text);
}
}
}
public override void WriteSurrogateCharEntity(char lowChar, char highChar)
{
WriteString(string.Concat(highChar, lowChar));
}
public override void WriteValue(bool value)
{
StartText();
_nodeWriter.WriteBoolText(value);
}
public override void WriteValue(decimal value)
{
StartText();
_nodeWriter.WriteDecimalText(value);
}
public override void WriteValue(double value)
{
StartText();
_nodeWriter.WriteDoubleText(value);
}
public override void WriteValue(float value)
{
StartText();
_nodeWriter.WriteFloatText(value);
}
public override void WriteValue(int value)
{
StartText();
_nodeWriter.WriteInt32Text(value);
}
public override void WriteValue(long value)
{
StartText();
_nodeWriter.WriteInt64Text(value);
}
public override void WriteValue(Guid value)
{
StartText();
_nodeWriter.WriteGuidText(value);
}
public override void WriteValue(DateTime value)
{
StartText();
_nodeWriter.WriteDateTimeText(value);
}
public override void WriteValue(string value)
{
WriteString(value);
}
public override void WriteValue(TimeSpan value)
{
StartText();
_nodeWriter.WriteTimeSpanText(value);
}
public override void WriteValue(UniqueId value)
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
StartText();
_nodeWriter.WriteUniqueIdText(value);
}
public override void WriteValue(object value)
{
if (IsClosed)
{
ThrowClosed();
}
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
if (value is Array)
{
WriteValue((Array)value);
}
else if (value is IStreamProvider)
{
WriteValue((IStreamProvider)value);
}
else
{
WritePrimitiveValue(value);
}
}
[SuppressMessage("Microsoft.Naming", "CA1702:CompoundWordsShouldBeCasedCorrectly", MessageId = "Whitespace", Justification = "This method is derived from the base")]
public override void WriteWhitespace(string ws)
{
if (IsClosed)
{
ThrowClosed();
}
if (ws == null)
{
throw new ArgumentNullException(nameof(ws));
}
for (int i = 0; i < ws.Length; ++i)
{
char c = ws[i];
if (c != ' ' &&
c != '\t' &&
c != '\n' &&
c != '\r')
{
throw new ArgumentException(SR.Format(SR.JsonOnlyWhitespace, c.ToString(), "WriteWhitespace"), nameof(ws));
}
}
WriteString(ws);
}
public override void WriteXmlAttribute(string localName, string value)
{
throw new NotSupportedException(SR.Format(SR.JsonMethodNotSupported, "WriteXmlAttribute"));
}
public override void WriteXmlAttribute(XmlDictionaryString localName, XmlDictionaryString value)
{
throw new NotSupportedException(SR.Format(SR.JsonMethodNotSupported, "WriteXmlAttribute"));
}
public override void WriteXmlnsAttribute(string prefix, string namespaceUri)
{
if (!IsWritingNameWithMapping)
{
throw new NotSupportedException(SR.Format(SR.JsonMethodNotSupported, "WriteXmlnsAttribute"));
}
}
public override void WriteXmlnsAttribute(string prefix, XmlDictionaryString namespaceUri)
{
if (!IsWritingNameWithMapping)
{
throw new NotSupportedException(SR.Format(SR.JsonMethodNotSupported, "WriteXmlnsAttribute"));
}
}
internal static bool CharacterNeedsEscaping(char ch)
{
return (ch == FORWARD_SLASH || ch == JsonGlobals.QuoteChar || ch < WHITESPACE || ch == BACK_SLASH
|| (ch >= HIGH_SURROGATE_START && (ch <= LOW_SURROGATE_END || ch >= MAX_CHAR)));
}
private static void ThrowClosed()
{
throw new InvalidOperationException(SR.JsonWriterClosed);
}
private void CheckText(JsonNodeType nextNodeType)
{
if (IsClosed)
{
ThrowClosed();
}
if (_depth == 0)
{
throw new InvalidOperationException(SR.XmlIllegalOutsideRoot);
}
if ((nextNodeType == JsonNodeType.StandaloneText) &&
(_nodeType == JsonNodeType.QuotedText))
{
throw new XmlException(SR.JsonCannotWriteStandaloneTextAfterQuotedText);
}
}
private void EnterScope(JsonNodeType currentNodeType)
{
_depth++;
if (_scopes == null)
{
_scopes = new JsonNodeType[4];
}
else if (_scopes.Length == _depth)
{
JsonNodeType[] newScopes = new JsonNodeType[_depth * 2];
Array.Copy(_scopes, 0, newScopes, 0, _depth);
_scopes = newScopes;
}
_scopes[_depth] = currentNodeType;
}
private JsonNodeType ExitScope()
{
JsonNodeType nodeTypeToReturn = _scopes[_depth];
_scopes[_depth] = JsonNodeType.None;
_depth--;
return nodeTypeToReturn;
}
private void InitializeWriter()
{
_nodeType = JsonNodeType.None;
_dataType = JsonDataType.None;
_isWritingDataTypeAttribute = false;
_wroteServerTypeAttribute = false;
_isWritingServerTypeAttribute = false;
_serverTypeValue = null;
_attributeText = null;
if (_depth != 0)
{
_depth = 0;
}
if ((_scopes != null) && (_scopes.Length > JsonGlobals.maxScopeSize))
{
_scopes = null;
}
// Can't let writeState be at Closed if reinitializing.
_writeState = WriteState.Start;
_endElementBuffer = false;
_indentLevel = 0;
}
private static bool IsUnicodeNewlineCharacter(char c)
{
// Newline characters in JSON strings need to be encoded on the way out (DevDiv #665974)
// See Unicode 6.2, Table 5-1 (http://www.unicode.org/versions/Unicode6.2.0/ch05.pdf]) for the full list.
// We only care about NEL, LS, and PS, since the other newline characters are all
// control characters so are already encoded.
return (c == '\u0085' || c == '\u2028' || c == '\u2029');
}
private void StartText()
{
if (HasOpenAttribute)
{
throw new InvalidOperationException(SR.JsonMustUseWriteStringForWritingAttributeValues);
}
if ((_dataType == JsonDataType.None) && (_serverTypeValue != null))
{
throw new XmlException(SR.Format(SR.JsonMustSpecifyDataType, JsonGlobals.typeString, JsonGlobals.objectString, JsonGlobals.serverTypeString));
}
if (IsWritingNameWithMapping && !WrittenNameWithMapping)
{
// Don't write out any text content unless the local name has been written.
// Not providing a better error message because localization deadline has passed.
throw new XmlException(SR.Format(SR.JsonMustSpecifyDataType, JsonGlobals.itemString, string.Empty, JsonGlobals.itemString));
}
if ((_dataType == JsonDataType.String) ||
(_dataType == JsonDataType.None))
{
CheckText(JsonNodeType.QuotedText);
if (_nodeType != JsonNodeType.QuotedText)
{
WriteJsonQuote();
}
_nodeType = JsonNodeType.QuotedText;
}
else if ((_dataType == JsonDataType.Number) ||
(_dataType == JsonDataType.Boolean))
{
CheckText(JsonNodeType.StandaloneText);
_nodeType = JsonNodeType.StandaloneText;
}
else
{
ThrowInvalidAttributeContent();
}
}
private void ThrowIfServerTypeWritten(string dataTypeSpecified)
{
if (_serverTypeValue != null)
{
throw new XmlException(SR.Format(SR.JsonInvalidDataTypeSpecifiedForServerType, JsonGlobals.typeString, dataTypeSpecified, JsonGlobals.serverTypeString, JsonGlobals.objectString));
}
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Globalization", "CA1308:NormalizeStringsToUppercase")] // Microsoft, ToLowerInvariant is just used in Json error message
private void ThrowInvalidAttributeContent()
{
if (HasOpenAttribute)
{
throw new XmlException(SR.JsonInvalidMethodBetweenStartEndAttribute);
}
else
{
throw new XmlException(SR.Format(SR.JsonCannotWriteTextAfterNonTextAttribute, _dataType.ToString().ToLowerInvariant()));
}
}
private bool TrySetWritingNameWithMapping(string localName, string ns)
{
if (localName.Equals(JsonGlobals.itemString) && ns.Equals(JsonGlobals.itemString))
{
_nameState = NameState.IsWritingNameWithMapping;
return true;
}
return false;
}
private void WriteDataTypeServerType()
{
if (_dataType != JsonDataType.None)
{
switch (_dataType)
{
case JsonDataType.Array:
{
EnterScope(JsonNodeType.Collection);
_nodeWriter.WriteText(JsonGlobals.CollectionChar);
_indentLevel++;
break;
}
case JsonDataType.Object:
{
EnterScope(JsonNodeType.Object);
_nodeWriter.WriteText(JsonGlobals.ObjectChar);
_indentLevel++;
break;
}
case JsonDataType.Null:
{
_nodeWriter.WriteText(JsonGlobals.nullString);
break;
}
default:
break;
}
if (_serverTypeValue != null)
{
// dataType must be object because we throw in all other case.
WriteServerTypeAttribute();
}
}
}
private unsafe void WriteEscapedJsonString(string str)
{
fixed (char* chars = str)
{
int i = 0;
int j;
for (j = 0; j < str.Length; j++)
{
char ch = chars[j];
if (ch <= FORWARD_SLASH)
{
if (ch == FORWARD_SLASH || ch == JsonGlobals.QuoteChar)
{
_nodeWriter.WriteChars(chars + i, j - i);
_nodeWriter.WriteText(BACK_SLASH);
_nodeWriter.WriteText(ch);
i = j + 1;
}
else if (ch < WHITESPACE)
{
_nodeWriter.WriteChars(chars + i, j - i);
_nodeWriter.WriteText(s_escapedJsonStringTable[ch]);
i = j + 1;
}
}
else if (ch == BACK_SLASH)
{
_nodeWriter.WriteChars(chars + i, j - i);
_nodeWriter.WriteText(BACK_SLASH);
_nodeWriter.WriteText(ch);
i = j + 1;
}
else if ((ch >= HIGH_SURROGATE_START && (ch <= LOW_SURROGATE_END || ch >= MAX_CHAR)) || IsUnicodeNewlineCharacter(ch))
{
_nodeWriter.WriteChars(chars + i, j - i);
_nodeWriter.WriteText(BACK_SLASH);
_nodeWriter.WriteText('u');
_nodeWriter.WriteText(string.Format(CultureInfo.InvariantCulture, "{0:x4}", (int)ch));
i = j + 1;
}
}
if (i < j)
{
_nodeWriter.WriteChars(chars + i, j - i);
}
}
}
private static bool TryEscapeControlCharacter(char ch, out char abbrev)
{
switch (ch)
{
case BACKSPACE:
abbrev = 'b';
break;
case FORM_FEED:
abbrev = 'f';
break;
case NEWLINE:
abbrev = 'n';
break;
case CARRIAGE_RETURN:
abbrev = 'r';
break;
case HORIZONTAL_TABULATION:
abbrev = 't';
break;
default:
abbrev = ' ';
return false;
}
return true;
}
private void WriteIndent()
{
for (int i = 0; i < _indentLevel; i++)
{
_nodeWriter.WriteText(_indentChars);
}
}
private void WriteNewLine()
{
_nodeWriter.WriteText(CARRIAGE_RETURN);
_nodeWriter.WriteText(NEWLINE);
}
private void WriteJsonElementName(string localName)
{
WriteJsonQuote();
WriteEscapedJsonString(localName);
WriteJsonQuote();
_nodeWriter.WriteText(JsonGlobals.NameValueSeparatorChar);
if (_indent)
{
_nodeWriter.WriteText(WHITESPACE);
}
}
private void WriteJsonQuote()
{
_nodeWriter.WriteText(JsonGlobals.QuoteChar);
}
private void WritePrimitiveValue(object value)
{
if (IsClosed)
{
ThrowClosed();
}
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
if (value is ulong)
{
WriteValue((ulong)value);
}
else if (value is string)
{
WriteValue((string)value);
}
else if (value is int)
{
WriteValue((int)value);
}
else if (value is long)
{
WriteValue((long)value);
}
else if (value is bool)
{
WriteValue((bool)value);
}
else if (value is double)
{
WriteValue((double)value);
}
else if (value is DateTime)
{
WriteValue((DateTime)value);
}
else if (value is float)
{
WriteValue((float)value);
}
else if (value is decimal)
{
WriteValue((decimal)value);
}
else if (value is XmlDictionaryString)
{
WriteValue((XmlDictionaryString)value);
}
else if (value is UniqueId)
{
WriteValue((UniqueId)value);
}
else if (value is Guid)
{
WriteValue((Guid)value);
}
else if (value is TimeSpan)
{
WriteValue((TimeSpan)value);
}
else if (value.GetType().IsArray)
{
throw new ArgumentException(SR.JsonNestedArraysNotSupported, nameof(value));
}
else
{
base.WriteValue(value);
}
}
private void WriteServerTypeAttribute()
{
string value = _serverTypeValue;
JsonDataType oldDataType = _dataType;
NameState oldNameState = _nameState;
WriteStartElement(JsonGlobals.serverTypeString);
WriteValue(value);
WriteEndElement();
_dataType = oldDataType;
_nameState = oldNameState;
_wroteServerTypeAttribute = true;
}
private void WriteValue(ulong value)
{
StartText();
_nodeWriter.WriteUInt64Text(value);
}
private void WriteValue(Array array)
{
// This method is called only if WriteValue(object) is called with an array
// The contract for XmlWriter.WriteValue(object) requires that this object array be written out as a string.
// E.g. WriteValue(new int[] { 1, 2, 3}) should be equivalent to WriteString("1 2 3").
JsonDataType oldDataType = _dataType;
// Set attribute mode to String because WritePrimitiveValue might write numerical text.
// Calls to methods that write numbers can't be mixed with calls that write quoted text unless the attribute mode is explicitly string.
_dataType = JsonDataType.String;
StartText();
for (int i = 0; i < array.Length; i++)
{
if (i != 0)
{
_nodeWriter.WriteText(JsonGlobals.WhitespaceChar);
}
WritePrimitiveValue(array.GetValue(i));
}
_dataType = oldDataType;
}
private class JsonNodeWriter : XmlUTF8NodeWriter
{
internal unsafe void WriteChars(char* chars, int charCount)
{
base.UnsafeWriteUTF8Chars(chars, charCount);
}
}
}
}
| |
/*
Copyright (C) 2013-2015 MetaMorph Software, Inc
Permission is hereby granted, free of charge, to any person obtaining a
copy of this data, including any software or models in source or binary
form, as well as any drawings, specifications, and documentation
(collectively "the Data"), to deal in the Data without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Data, and to
permit persons to whom the Data is furnished to do so, subject to the
following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Data.
THE DATA IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS, SPONSORS, DEVELOPERS, CONTRIBUTORS, OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE DATA OR THE USE OR OTHER DEALINGS IN THE DATA.
=======================
This version of the META tools is a fork of an original version produced
by Vanderbilt University's Institute for Software Integrated Systems (ISIS).
Their license statement:
Copyright (C) 2011-2014 Vanderbilt University
Developed with the sponsorship of the Defense Advanced Research Projects
Agency (DARPA) and delivered to the U.S. Government with Unlimited Rights
as defined in DFARS 252.227-7013.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this data, including any software or models in source or binary
form, as well as any drawings, specifications, and documentation
(collectively "the Data"), to deal in the Data without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Data, and to
permit persons to whom the Data is furnished to do so, subject to the
following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Data.
THE DATA IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS, SPONSORS, DEVELOPERS, CONTRIBUTORS, OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE DATA OR THE USE OR OTHER DEALINGS IN THE DATA.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using GME.CSharp;
using GME;
using GME.MGA;
using GME.MGA.Core;
using System.Windows.Forms;
using CyPhy = ISIS.GME.Dsml.CyPhyML.Interfaces;
using CyPhyClasses = ISIS.GME.Dsml.CyPhyML.Classes;
using CyPhy2DesignInterchange;
using System.Linq;
using CyPhyGUIs;
using System.Reflection;
using System.Xml;
using META;
using Ionic.Zip;
namespace CyPhyDesignExporter
{
/// <summary>
/// This class implements the necessary COM interfaces for a GME interpreter component.
/// </summary>
[Guid(ComponentConfig.guid),
ProgId(ComponentConfig.progID),
ClassInterface(ClassInterfaceType.AutoDual)]
[ComVisible(true)]
public class CyPhyDesignExporterInterpreter : IMgaComponentEx, IGMEVersionInfo, ICyPhyInterpreter
{
/// <summary>
/// Contains information about the GUI event that initiated the invocation.
/// </summary>
public enum ComponentStartMode
{
GME_MAIN_START = 0, // Not used by GME
GME_BROWSER_START = 1, // Right click in the GME Tree Browser window
GME_CONTEXT_START = 2, // Using the context menu by right clicking a model element in the GME modeling window
GME_EMBEDDED_START = 3, // Not used by GME
GME_MENU_START = 16, // Clicking on the toolbar icon, or using the main menu
GME_BGCONTEXT_START = 18, // Using the context menu by right clicking the background of the GME modeling window
GME_ICON_START = 32, // Not used by GME
GME_SILENT_MODE = 128 // Not used by GME, available to testers not using GME
}
/// <summary>
/// This function is called for each interpreter invocation before Main.
/// Don't perform MGA operations here unless you open a tansaction.
/// </summary>
/// <param name="project">The handle of the project opened in GME, for which the interpreter was called.</param>
public void Initialize(MgaProject project)
{
// TODO: Add your initialization code here...
}
/// <summary>
/// Parameter of this run.
/// </summary>
private InterpreterMainParameters mainParameters { get; set; }
public string InterpreterConfigurationProgId
{
get
{
return (typeof(CyPhyGUIs.NullInterpreterConfiguration).GetCustomAttributes(typeof(ProgIdAttribute), false)[0] as ProgIdAttribute).Value;
}
}
public IInterpreterPreConfiguration PreConfig(IPreConfigParameters parameters)
{
return null;
}
public IInterpreterConfiguration DoGUIConfiguration(IInterpreterPreConfiguration preConfig, IInterpreterConfiguration previousConfig)
{
return new CyPhyGUIs.NullInterpreterConfiguration();
}
public IInterpreterResult Main(IInterpreterMainParameters parameters)
{
this.mainParameters = (InterpreterMainParameters)parameters;
try
{
MgaGateway = new MgaGateway(mainParameters.Project);
parameters.Project.CreateTerritoryWithoutSink(out MgaGateway.territory);
MgaGateway.PerformInTransaction(delegate
{
MainInTransaction((InterpreterMainParameters)parameters);
});
return new InterpreterResult() { Success = true, RunCommand = "" };
}
finally
{
if (MgaGateway.territory != null)
{
MgaGateway.territory.Destroy();
}
MgaGateway = null;
GC.Collect();
GC.WaitForPendingFinalizers();
}
}
public void MainInTransaction(InterpreterMainParameters parameters)
{
this.mainParameters = (InterpreterMainParameters)parameters;
Boolean disposeLogger = false;
if (Logger == null)
{
Logger = new GMELogger(mainParameters.Project, "CyPhyDesignExporter");
disposeLogger = true;
}
var currentObject = mainParameters.CurrentFCO;
var currentOutputDirectory = mainParameters.OutputDirectory;
string artifactName = string.Empty;
string metaBaseName = currentObject.MetaBase.Name;
try
{
if (metaBaseName == typeof(CyPhyClasses.DesignContainer).Name)
{
artifactName = ExportToFile(CyPhyClasses.DesignContainer.Cast(currentObject), currentOutputDirectory);
}
else if (metaBaseName == typeof(CyPhyClasses.ComponentAssembly).Name)
{
artifactName = ExportToFile(CyPhyClasses.ComponentAssembly.Cast(currentObject), currentOutputDirectory);
}
else if (IsTestBenchType(metaBaseName))
{
artifactName = ExportToFile(CyPhyClasses.TestBenchType.Cast(currentObject), currentOutputDirectory);
}
if (!string.IsNullOrWhiteSpace(artifactName))
{
var manifest = AVM.DDP.MetaTBManifest.OpenForUpdate(currentOutputDirectory);
manifest.AddArtifact(Path.GetFileName(artifactName), "Design Model");
manifest.Serialize(currentOutputDirectory);
}
}
finally
{
if (disposeLogger)
{
DisposeLogger();
}
}
}
private static Lazy<string[]> m_TestBenchTypeNames = new Lazy<string[]>(() =>
{
var tbt = typeof(CyPhy.TestBenchType);
return Assembly.
GetAssembly(tbt).
GetTypes().
Where(t => tbt.IsAssignableFrom(t)).
Select(x => x.Name).ToArray();
});
private static string[] TestBenchTypeNames
{
get
{
return m_TestBenchTypeNames.Value;
}
}
private bool IsDesignType(string typeName)
{
if (typeName == "ComponentAssembly" ||
typeName == "DesignContainer")
{
return true;
}
return false;
}
private bool IsTestBenchType(string typeName)
{
return TestBenchTypeNames.Contains(typeName);
}
private string ExportToFile(CyPhy.TestBenchType testBench, string outputDirectory)
{
var topLevelSystem = testBench.Children.TopLevelSystemUnderTestCollection.FirstOrDefault();
if (topLevelSystem != null)
{
var design = topLevelSystem.Referred.DesignEntity;
if (design != null)
{
return ExportToFile(design, outputDirectory);
}
}
else
{
var tlsut = ((MgaObject)testBench).ChildObjects.
Cast<MgaObject>().
OfType<MgaFCO>().
Where(x => x.MetaBase.Name == "TopLevelSystemUnderTest")
.Cast<CyPhyClasses.DesignEntity>().FirstOrDefault();
if (tlsut != null)
return ExportToFile(tlsut, outputDirectory);
}
throw new NotSupportedException("No TopLevelSystemUnderTest found");
}
private String Safeify(String s_in)
{
String rtn = s_in;
rtn = rtn.Replace("\\", "_");
rtn = rtn.Replace("/", "_");
return rtn;
}
private string ExportToFile(CyPhy.DesignEntity de, String s_outFolder)
{
// Elaborate first
CallElaborator(de.Impl.Project, de.Impl as MgaFCO, null, 128, true);
var dm = CyPhy2DesignInterchange.CyPhy2DesignInterchange.Convert(de);
String s_outFilePath = String.Format("{0}\\{1}.adm", s_outFolder, Safeify(de.Name));
//dm.SaveToFile(s_outFilePath);
XSD2CSharp.AvmXmlSerializer.SaveToFile(Path.GetFullPath(Path.Combine(s_outFolder, Safeify(de.Name) + ".adm")), dm);
CheckForDuplicateIDs(dm);
return s_outFilePath;
}
public string ExportToPackage(CyPhy.ComponentAssembly ca, String s_outFolder)
{
// Create a temp folder
var pathTemp = Path.Combine(System.IO.Path.GetTempPath(), Path.GetRandomFileName());
Directory.CreateDirectory(pathTemp);
// Export an ADM file to that temp folder
var pathADM = ExportToFile(ca, pathTemp);
// Generate zip file
String pathADP = Path.Combine(s_outFolder,
Path.GetFileNameWithoutExtension(pathADM) + ".adp");
File.Delete(pathADP);
using (ZipFile zip = new ZipFile(pathADP)
{
CompressionLevel = Ionic.Zlib.CompressionLevel.BestCompression
})
{
var pathCA = ca.GetDirectoryPath(ComponentLibraryManager.PathConvention.ABSOLUTE);
if (false == (pathCA.EndsWith("//") ||
pathCA.EndsWith("\\\\")))
{
pathCA += "//";
}
foreach (var file in Directory.EnumerateFiles(pathCA, "*.*", SearchOption.AllDirectories))
{
var relpath = Path.GetDirectoryName(ComponentLibraryManager.MakeRelativePath(pathCA, file));
zip.AddFile(file, relpath);
}
// Add the ADM file
zip.AddFile(pathADM, "");
zip.Save();
}
// Delete temporary directory
Directory.Delete(pathTemp, true);
return pathADP;
}
public bool CheckForDuplicateIDs(avm.Design d)
{
//String str = d.Serialize();
String str = XSD2CSharp.AvmXmlSerializer.Serialize(d);
XmlDocument doc = new XmlDocument();
doc.LoadXml(str);
XmlNode root = doc.DocumentElement;
var ls_EncounteredIDs = new List<String>();
foreach (XmlAttribute node in root.SelectNodes("//@ID"))
{
ls_EncounteredIDs.Add(node.Value);
}
// Get all duplicate IDs that aren't empty/whitespace
var duplicates = ls_EncounteredIDs.Where(s => !String.IsNullOrWhiteSpace(s))
.GroupBy(s => s)
.Where(g => g.Count() > 1)
.Select(g => g.Key)
.ToList();
if (duplicates.Any())
{
String msg = "Duplicate IDs found in exported design: ";
foreach (var dupe in duplicates)
msg += String.Format("{0}\"{1}\", ", Environment.NewLine, dupe);
if (Logger != null)
Logger.WriteError(msg);
return true;
}
return false;
}
private bool CallElaborator(
MgaProject project,
MgaFCO currentobj,
MgaFCOs selectedobjs,
int param,
bool expand = true)
{
bool result = false;
try
{
if (Logger != null)
Logger.WriteInfo("Elaborating model...");
var elaborator = new CyPhyElaborateCS.CyPhyElaborateCSInterpreter();
elaborator.Initialize(project);
int verbosity = 128;
elaborator.UnrollConnectors = false;
result = elaborator.RunInTransaction(project, currentobj, selectedobjs, verbosity);
if (Logger != null)
Logger.WriteInfo("Elaboration is done.");
}
catch (Exception ex)
{
if (Logger != null)
Logger.WriteError("Exception occurred in Elaborator : {0}", ex.ToString());
result = false;
}
return result;
}
/// <summary>
/// The main entry point of the interpreter. A transaction is already open,
/// GMEConsole is available. A general try-catch block catches all the exceptions
/// coming from this function, you don't need to add it. For more information, see InvokeEx.
/// </summary>
/// <param name="project">The handle of the project opened in GME, for which the interpreter was called.</param>
/// <param name="currentobj">The model open in the active tab in GME. Its value is null if no model is open (no GME modeling windows open). </param>
/// <param name="selectedobjs">
/// A collection for the selected model elements. It is never null.
/// If the interpreter is invoked by the context menu of the GME Tree Browser, then the selected items in the tree browser. Folders
/// are never passed (they are not FCOs).
/// If the interpreter is invoked by clicking on the toolbar icon or the context menu of the modeling window, then the selected items
/// in the active GME modeling window. If nothing is selected, the collection is empty (contains zero elements).
/// </param>
/// <param name="startMode">Contains information about the GUI event that initiated the invocation.</param>
[ComVisible(false)]
public void Main(MgaProject project, MgaFCO currentobj, MgaFCOs selectedobjs, ComponentStartMode startMode)
{
Boolean disposeLogger = false;
if (Logger == null)
{
Logger = new CyPhyGUIs.GMELogger(project, "CyPhyDesignExporter");
disposeLogger = true;
}
// TODO: Add your interpreter code
Logger.WriteInfo("Running Design Exporter...");
#region Prompt for Output Path
// Get an output path from the user.
if (this.OutputDir == null)
{
using (META.FolderBrowserDialog fbd = new META.FolderBrowserDialog()
{
Description = "Choose a path for the generated files.",
//ShowNewFolderButton = true,
SelectedPath = Environment.CurrentDirectory,
})
{
DialogResult dr = fbd.ShowDialog();
if (dr == DialogResult.OK)
{
OutputDir = fbd.SelectedPath;
}
else
{
Logger.WriteWarning("Design Exporter cancelled");
return;
}
}
}
#endregion
Logger.WriteInfo("Beginning Export...");
List<CyPhy.DesignEntity> lde_allCAandDC = new List<CyPhy.DesignEntity>();
List<CyPhy.TestBenchType> ltbt_allTB = new List<CyPhy.TestBenchType>();
if (currentobj != null &&
currentobj.Meta.Name == "ComponentAssembly")
{
lde_allCAandDC.Add(CyPhyClasses.ComponentAssembly.Cast(currentobj));
}
else if (currentobj != null &&
currentobj.Meta.Name == "DesignContainer")
{
lde_allCAandDC.Add(CyPhyClasses.DesignContainer.Cast(currentobj));
}
else if (currentobj != null &&
IsTestBenchType(currentobj.MetaBase.Name))
{
ltbt_allTB.Add(CyPhyClasses.TestBenchType.Cast(currentobj));
}
else if (selectedobjs != null && selectedobjs.Count > 0)
{
foreach (MgaFCO mf in selectedobjs)
{
if (mf.Meta.Name == "ComponentAssembly")
{
lde_allCAandDC.Add(CyPhyClasses.ComponentAssembly.Cast(mf));
}
else if (mf.Meta.Name == "DesignContainer")
{
lde_allCAandDC.Add(CyPhyClasses.DesignContainer.Cast(mf));
}
else if (IsTestBenchType(mf.MetaBase.Name))
{
ltbt_allTB.Add(CyPhyClasses.TestBenchType.Cast(mf));
}
}
}
else
{
CyPhy.RootFolder rootFolder = ISIS.GME.Common.Utils.CreateObject<CyPhyClasses.RootFolder>(project.RootFolder as MgaObject);
MgaFilter filter = project.CreateFilter();
filter.Kind = "ComponentAssembly";
foreach (var item in project.AllFCOs(filter).Cast<MgaFCO>())
{
if (item.ParentFolder != null)
{
lde_allCAandDC.Add(CyPhyClasses.ComponentAssembly.Cast(item));
}
}
filter = project.CreateFilter();
filter.Kind = "DesignContainer";
foreach (var item in project.AllFCOs(filter).Cast<MgaFCO>())
{
if (item.ParentFolder != null)
{
lde_allCAandDC.Add(CyPhyClasses.DesignContainer.Cast(item));
}
}
filter = project.CreateFilter();
filter.Kind = "TestBenchType";
foreach (var item in project.AllFCOs(filter).Cast<MgaFCO>())
{
if (item.ParentFolder != null)
{
ltbt_allTB.Add(CyPhyClasses.TestBenchType.Cast(item));
}
}
}
foreach (CyPhy.DesignEntity de in lde_allCAandDC)
{
System.Windows.Forms.Application.DoEvents();
try
{
if (de is CyPhy.ComponentAssembly)
{
ExportToPackage(de as CyPhy.ComponentAssembly, OutputDir);
}
else
{
ExportToFile(de, OutputDir);
}
}
catch (Exception ex)
{
Logger.WriteError("{0}: Exception encountered ({1})",de.Name,ex.Message);
}
Logger.WriteInfo("{0}: {1}", de.Name, OutputDir);
}
foreach (CyPhy.TestBenchType tbt in ltbt_allTB)
{
System.Windows.Forms.Application.DoEvents();
try
{
ExportToFile(tbt, OutputDir);
}
catch (Exception ex)
{
Logger.WriteError("{0}: Exception encountered ({1})", tbt.Name, ex.Message);
}
Logger.WriteInfo("{0}: {1}", tbt.Name, OutputDir);
}
Logger.WriteInfo(String.Format("{0} model(s) exported", lde_allCAandDC.Count + ltbt_allTB.Count));
Logger.WriteInfo("Design Exporter finished");
if (disposeLogger)
{
DisposeLogger();
}
}
#region IMgaComponentEx Members
MgaGateway MgaGateway { get; set; }
GMELogger Logger { get; set; }
public void DisposeLogger()
{
if (Logger != null)
{
Logger.Dispose();
Logger = null;
}
}
public void InvokeEx(MgaProject project, MgaFCO currentobj, MgaFCOs selectedobjs, int param)
{
if (!enabled)
{
return;
}
try
{
MgaGateway = new MgaGateway(project);
project.CreateTerritoryWithoutSink(out MgaGateway.territory);
MgaGateway.BeginTransaction();
Main(project, currentobj, selectedobjs, Convert(param));
MgaGateway.AbortTransaction();
}
finally
{
if (MgaGateway.territory != null)
{
MgaGateway.territory.Destroy();
}
MgaGateway = null;
project = null;
currentobj = null;
selectedobjs = null;
GC.Collect();
GC.WaitForPendingFinalizers();
}
}
private ComponentStartMode Convert(int param)
{
switch (param)
{
case (int)ComponentStartMode.GME_BGCONTEXT_START:
return ComponentStartMode.GME_BGCONTEXT_START;
case (int)ComponentStartMode.GME_BROWSER_START:
return ComponentStartMode.GME_BROWSER_START;
case (int)ComponentStartMode.GME_CONTEXT_START:
return ComponentStartMode.GME_CONTEXT_START;
case (int)ComponentStartMode.GME_EMBEDDED_START:
return ComponentStartMode.GME_EMBEDDED_START;
case (int)ComponentStartMode.GME_ICON_START:
return ComponentStartMode.GME_ICON_START;
case (int)ComponentStartMode.GME_MAIN_START:
return ComponentStartMode.GME_MAIN_START;
case (int)ComponentStartMode.GME_MENU_START:
return ComponentStartMode.GME_MENU_START;
case (int)ComponentStartMode.GME_SILENT_MODE:
return ComponentStartMode.GME_SILENT_MODE;
}
return ComponentStartMode.GME_SILENT_MODE;
}
#region Component Information
public string ComponentName
{
get { return GetType().Name; }
}
public string ComponentProgID
{
get
{
return ComponentConfig.progID;
}
}
public componenttype_enum ComponentType
{
get { return ComponentConfig.componentType; }
}
public string Paradigm
{
get { return ComponentConfig.paradigmName; }
}
#endregion
#region Enabling
bool enabled = true;
public void Enable(bool newval)
{
enabled = newval;
}
#endregion
#region Interactive Mode
protected bool interactiveMode = true;
public bool InteractiveMode
{
get
{
return interactiveMode;
}
set
{
interactiveMode = value;
}
}
#endregion
#region Custom Parameters
SortedDictionary<string, object> componentParameters = null;
public object get_ComponentParameter(string Name)
{
if (Name == "type")
return "csharp";
if (Name == "path")
return GetType().Assembly.Location;
if (Name == "fullname")
return GetType().FullName;
object value;
if (componentParameters != null && componentParameters.TryGetValue(Name, out value))
{
return value;
}
return null;
}
public void set_ComponentParameter(string Name, object pVal)
{
if (componentParameters == null)
{
componentParameters = new SortedDictionary<string, object>();
}
componentParameters[Name] = pVal;
}
#endregion
#region Unused Methods
// Old interface, it is never called for MgaComponentEx interfaces
public void Invoke(MgaProject Project, MgaFCOs selectedobjs, int param)
{
throw new NotImplementedException();
}
// Not used by GME
public void ObjectsInvokeEx(MgaProject Project, MgaObject currentobj, MgaObjects selectedobjs, int param)
{
throw new NotImplementedException();
}
#endregion
#endregion
#region IMgaVersionInfo Members
public GMEInterfaceVersion_enum version
{
get { return GMEInterfaceVersion_enum.GMEInterfaceVersion_Current; }
}
#endregion
#region Registration Helpers
[ComRegisterFunctionAttribute]
public static void GMERegister(Type t)
{
Registrar.RegisterComponentsInGMERegistry();
}
[ComUnregisterFunctionAttribute]
public static void GMEUnRegister(Type t)
{
Registrar.UnregisterComponentsInGMERegistry();
}
#endregion
public string OutputDir;
}
}
| |
using System;
using System.Xml;
using NUnit.Framework;
using ServiceStack.Text.Common;
namespace ServiceStack.Text.Tests.Utils
{
[TestFixture]
public class DateTimeSerializerTests
: TestBase
{
public void PrintFormats(DateTime dateTime)
{
Log("dateTime.ToShortDateString(): " + dateTime.ToShortDateString());
Log("dateTime.ToShortTimeString(): " + dateTime.ToShortTimeString());
Log("dateTime.ToLongTimeString(): " + dateTime.ToLongTimeString());
Log("dateTime.ToShortTimeString(): " + dateTime.ToShortTimeString());
Log("dateTime.ToString(): " + dateTime.ToString());
Log("DateTimeSerializer.ToShortestXsdDateTimeString(dateTime): " + DateTimeSerializer.ToShortestXsdDateTimeString(dateTime));
Log("DateTimeSerializer.ToDateTimeString(dateTime): " + DateTimeSerializer.ToDateTimeString(dateTime));
Log("DateTimeSerializer.ToXsdDateTimeString(dateTime): " + DateTimeSerializer.ToXsdDateTimeString(dateTime));
Log("\n");
}
public void PrintFormats(TimeSpan timeSpan)
{
Log("DateTimeSerializer.ToXsdTimeSpanString(timeSpan): " + DateTimeSerializer.ToXsdTimeSpanString(timeSpan));
Log("\n");
}
[Test]
public void PrintDate()
{
PrintFormats(DateTime.Now);
PrintFormats(DateTime.UtcNow);
PrintFormats(new DateTime(1979, 5, 9));
PrintFormats(new DateTime(1979, 5, 9, 0, 0, 1));
PrintFormats(new DateTime(1979, 5, 9, 0, 0, 0, 1));
PrintFormats(new DateTime(2010, 10, 20, 10, 10, 10, 1));
PrintFormats(new DateTime(2010, 11, 22, 11, 11, 11, 1));
}
[Test]
public void PrintTimeSpan()
{
PrintFormats(new TimeSpan());
PrintFormats(new TimeSpan(1));
PrintFormats(new TimeSpan(1, 2, 3));
PrintFormats(new TimeSpan(1, 2, 3, 4));
}
[Test]
public void ToShortestXsdDateTimeString_works()
{
var shortDate = new DateTime(1979, 5, 9);
const string shortDateString = "1979-05-09";
var shortDateTime = new DateTime(1979, 5, 9, 0, 0, 1, DateTimeKind.Utc);
var shortDateTimeString = shortDateTime.Equals(shortDateTime.ToStableUniversalTime())
? "1979-05-09T00:00:01Z"
: "1979-05-08T23:00:01Z";
var longDateTime = new DateTime(1979, 5, 9, 0, 0, 0, 1, DateTimeKind.Utc);
var longDateTimeString = longDateTime.Equals(longDateTime.ToStableUniversalTime())
? "1979-05-09T00:00:00.001Z"
: "1979-05-08T23:00:00.001Z";
Assert.That(shortDateString, Is.EqualTo(DateTimeSerializer.ToShortestXsdDateTimeString(shortDate)));
Assert.That(shortDateTimeString, Is.EqualTo(DateTimeSerializer.ToShortestXsdDateTimeString(shortDateTime)));
Assert.That(longDateTimeString, Is.EqualTo(DateTimeSerializer.ToShortestXsdDateTimeString(longDateTime)));
}
[Test]
public void CanDeserializeDateTimeOffsetWithTimeSpanIsZero()
{
var expectedValue = new DateTimeOffset(2012, 6, 27, 11, 26, 04, 524, TimeSpan.Zero);
var s = DateTimeSerializer.ToWcfJsonDateTimeOffset(expectedValue);
Assert.AreEqual("\\/Date(1340796364524)\\/", s);
var afterValue = DateTimeSerializer.ParseWcfJsonDateOffset(s);
Assert.AreEqual(expectedValue, afterValue);
}
[Test][Ignore]
public void Utc_Local_Equals()
{
var now = DateTime.Now;
var utcNow = now.ToStableUniversalTime();
Assert.That(now.Ticks, Is.EqualTo(utcNow.Ticks), "Ticks are different");
Assert.That(now, Is.EqualTo(utcNow), "DateTimes are different");
}
[Test]
public void ParseShortestXsdDateTime_works()
{
DateTime shortDate = DateTimeSerializer.ParseShortestXsdDateTime("2011-8-4");
Assert.That (shortDate, Is.EqualTo(new DateTime (2011, 8, 4)), "Month and day without leading 0");
shortDate = DateTimeSerializer.ParseShortestXsdDateTime("2011-8-05");
Assert.That (shortDate, Is.EqualTo(new DateTime (2011, 8, 5)), "Month without leading 0");
shortDate = DateTimeSerializer.ParseShortestXsdDateTime("2011-09-4");
Assert.That (shortDate, Is.EqualTo(new DateTime (2011, 9, 4)), "Day without leading 0");
}
[Test]
public void TestSqlServerDateTime()
{
var result = TypeSerializer.DeserializeFromString<DateTime>("2010-06-01 21:52:59.280");
Assert.That(result, Is.Not.Null);
}
[Test]
public void DateTimeWithoutMilliseconds_should_always_be_deserialized_correctly_by_TypeSerializer()
{
var dateWithoutMillisecondsUtc = new DateTime(2013, 4, 9, 15, 20, 0, DateTimeKind.Utc);
var dateWithoutMillisecondsLocal = new DateTime(2013, 4, 9, 15, 20, 0, DateTimeKind.Local);
var dateWithoutMillisecondsUnspecified = new DateTime(2013, 4, 9, 15, 20, 0, DateTimeKind.Unspecified);
string serialized = null;
DateTime deserialized;
serialized = TypeSerializer.SerializeToString(dateWithoutMillisecondsUtc);
deserialized = TypeSerializer.DeserializeFromString<DateTime>(serialized);
Assert.AreEqual(dateWithoutMillisecondsUtc.ToLocalTime(), deserialized);
serialized = TypeSerializer.SerializeToString(dateWithoutMillisecondsLocal);
deserialized = TypeSerializer.DeserializeFromString<DateTime>(serialized);
Assert.AreEqual(dateWithoutMillisecondsLocal, deserialized);
serialized = TypeSerializer.SerializeToString(dateWithoutMillisecondsUnspecified);
deserialized = TypeSerializer.DeserializeFromString<DateTime>(serialized);
Assert.AreEqual(dateWithoutMillisecondsUnspecified, deserialized);
}
[Test, Ignore("Don't pre-serialize into Utc")]
public void UtcDateTime_Is_Deserialized_As_Kind_Utc()
{
//Serializing UTC
var utcNow = new DateTime(2012, 1, 8, 12, 17, 1, 538, DateTimeKind.Utc);
Assert.That(utcNow.Kind, Is.EqualTo(DateTimeKind.Utc));
var serialized = JsonSerializer.SerializeToString(utcNow);
//Deserializing UTC?
var deserialized = JsonSerializer.DeserializeFromString<DateTime>(serialized);
Assert.That(deserialized.Kind, Is.EqualTo(DateTimeKind.Utc)); //fails -> is DateTimeKind.Local
}
/// <summary>
/// These timestamp strings were pulled from SQLite columns written via OrmLite using SQlite.1.88
/// Most of the time, timestamps correctly use the 'T' separator between the date and time,
/// but under some (still unknown) scnearios, SQLite will write timestamps using a space instead of a 'T'.
/// If that happens, OrmLite will fail to read the row, complaining that: The string '...' is not a valid Xsd value.
/// </summary>
private static string[] _problematicXsdStrings = new[] {
"2013-10-10 20:04:04.8773249Z",
"2013-10-10 20:04:04Z",
};
[Test]
[TestCase(0)]
[TestCase(1)]
public void CanParseProblematicXsdStrings(int whichString)
{
var xsdString = _problematicXsdStrings[whichString];
var dateTime = DateTimeSerializer.ParseShortestXsdDateTime(xsdString);
Assert.That(dateTime.Kind, Is.EqualTo(DateTimeKind.Local));
}
[Test]
public void CanParseLongAndShortXsdStrings()
{
var shortXsdString = "2013-10-10T13:40:50Z";
var longXsdString = shortXsdString.Substring(0, shortXsdString.Length - 1) + ".0000000" +
shortXsdString.Substring(shortXsdString.Length - 1);
var dateTimeShort = DateTimeSerializer.ParseShortestXsdDateTime(shortXsdString);
var dateTimeLong = DateTimeSerializer.ParseShortestXsdDateTime(longXsdString);
Assert.That(dateTimeShort.Ticks, Is.EqualTo(dateTimeLong.Ticks));
Assert.That(dateTimeShort.Kind, Is.EqualTo(dateTimeLong.Kind));
}
private static DateTime[] _dateTimeTests = new[] {
DateTime.Now,
DateTime.UtcNow,
new DateTime(1979, 5, 9),
new DateTime(1972, 3, 24, 0, 0, 0, DateTimeKind.Local),
new DateTime(1972, 4, 24),
new DateTime(1979, 5, 9, 0, 0, 1),
new DateTime(1979, 5, 9, 0, 0, 0, 1),
new DateTime(2010, 10, 20, 10, 10, 10, 1),
new DateTime(2010, 11, 22, 11, 11, 11, 1),
new DateTime(622119282055250000)
};
[Test]
[TestCase(0)]
[TestCase(1)]
[TestCase(2)]
//[TestCase(3)] //.NET Date BUG see: Test_MS_Dates
[TestCase(4)]
[TestCase(5)]
[TestCase(6)]
[TestCase(7)]
[TestCase(8)]
public void AssertDateIsEqual(int whichDate)
{
DateTime dateTime = _dateTimeTests[whichDate];
//Don't test short dates without time to UTC as you lose precision
var shortDateStr = dateTime.ToString(DateTimeSerializer.ShortDateTimeFormat);
var shortDateTimeStr = dateTime.ToStableUniversalTime().ToString(DateTimeSerializer.XsdDateTimeFormatSeconds);
var longDateTimeStr = DateTimeSerializer.ToXsdDateTimeString(dateTime);
var shortestDateStr = DateTimeSerializer.ToShortestXsdDateTimeString(dateTime);
Log("{0} | {1} | {2} [{3}]",
shortDateStr, shortDateTimeStr, longDateTimeStr, shortestDateStr);
var shortDate = DateTimeSerializer.ParseShortestXsdDateTime(shortDateStr);
var shortDateTime = DateTimeSerializer.ParseShortestXsdDateTime(shortDateTimeStr);
var longDateTime = DateTimeSerializer.ParseShortestXsdDateTime(longDateTimeStr);
Assert.That(shortDate, Is.EqualTo(dateTime.Date));
var shortDateTimeUtc = shortDateTime.ToStableUniversalTime();
Assert.That(shortDateTimeUtc, Is.EqualTo(
new DateTime(
shortDateTimeUtc.Year, shortDateTimeUtc.Month, shortDateTimeUtc.Day,
shortDateTimeUtc.Hour, shortDateTimeUtc.Minute, shortDateTimeUtc.Second,
shortDateTimeUtc.Millisecond, DateTimeKind.Utc)));
AssertDatesAreEqual(longDateTime.ToStableUniversalTime(), dateTime.ToStableUniversalTime());
var toDateTime = DateTimeSerializer.ParseShortestXsdDateTime(shortestDateStr);
AssertDatesAreEqual(toDateTime, dateTime, "shortestDate");
var unixTime = dateTime.ToUnixTimeMs();
var fromUnixTime = DateTimeExtensions.FromUnixTimeMs(unixTime);
AssertDatesAreEqual(fromUnixTime, dateTime, "unixTimeMs");
var wcfDateString = DateTimeSerializer.ToWcfJsonDate(dateTime);
var wcfDate = DateTimeSerializer.ParseWcfJsonDate(wcfDateString);
AssertDatesAreEqual(wcfDate, dateTime, "wcf date");
}
private void AssertDatesAreEqual(DateTime toDateTime, DateTime dateTime, string which=null)
{
Assert.That(toDateTime.ToStableUniversalTime().RoundToMs(), Is.EqualTo(dateTime.ToStableUniversalTime().RoundToMs()), which);
}
[Test]
public void Can_Serialize_new_DateTime()
{
var newDateTime = new DateTime();
var convertedUnixTimeMs = newDateTime.ToUnixTimeMs();
Assert.That(convertedUnixTimeMs.FromUnixTimeMs(), Is.EqualTo(newDateTime));
}
[Explicit("Test .NET Date Serialization behavior")]
[TestCase(0)]
[TestCase(1)]
[TestCase(2)]
[TestCase(3)]
[TestCase(4)]
[TestCase(5)]
[TestCase(6)]
[TestCase(7)]
[TestCase(8)]
public void Test_MS_Dates(int whichDate)
{
var dateTime = _dateTimeTests[whichDate];
var dateTimeStr = XmlConvert.ToString(dateTime.ToStableUniversalTime(), XmlDateTimeSerializationMode.Utc);
dateTimeStr.Print(); //1972-03-24T05:00:00Z
var fromStr = DateTime.Parse(dateTimeStr);
fromStr.ToString().Print();
AssertDatesAreEqual(fromStr, dateTime);
}
}
[TestFixture]
public class DateTimeISO8601Tests
: TestBase
{
public class TestObject
{
public DateTime Date { get; set; }
}
[TestFixtureSetUp]
public void TestFixtureSetUp()
{
JsConfig.DateHandler = JsonDateHandler.ISO8601;
}
[TestFixtureTearDown]
public void TestFixtureTearDown()
{
JsConfig.Reset();
}
[Test]
public void DateTime_Is_Serialized_As_Utc_and_Deserialized_as_local()
{
var testObject = new TestObject
{
Date = new DateTime(2013, 1, 1, 0, 0, 1, DateTimeKind.Utc)
};
Assert.AreEqual(DateTimeKind.Local, TypeSerializer.DeserializeFromString<TestObject>(TypeSerializer.SerializeToString<TestObject>(testObject)).Date.Kind);
//Can change default behavior with config
using (JsConfig.With(alwaysUseUtc: true))
{
Assert.AreEqual(DateTimeKind.Utc, TypeSerializer.DeserializeFromString<TestObject>(TypeSerializer.SerializeToString<TestObject>(testObject)).Date.Kind);
}
testObject = new TestObject
{
Date = new DateTime(2013, 1, 1, 0, 0, 0, DateTimeKind.Utc)
};
Assert.AreEqual(DateTimeKind.Local, TypeSerializer.DeserializeFromString<TestObject>(TypeSerializer.SerializeToString<TestObject>(testObject)).Date.Kind);
//Can change default behavior with config
using (JsConfig.With(alwaysUseUtc: true))
{
Assert.AreEqual(DateTimeKind.Utc, TypeSerializer.DeserializeFromString<TestObject>(TypeSerializer.SerializeToString<TestObject>(testObject)).Date.Kind);
}
}
}
}
| |
using System;
using System.Data;
using System.Data.SqlClient;
using Csla;
using Csla.Data;
namespace ParentLoadSoftDelete.Business.ERCLevel
{
/// <summary>
/// F12_CityRoad (editable child object).<br/>
/// This is a generated base class of <see cref="F12_CityRoad"/> business object.
/// </summary>
/// <remarks>
/// This class is an item of <see cref="F11_CityRoadColl"/> collection.
/// </remarks>
[Serializable]
public partial class F12_CityRoad : BusinessBase<F12_CityRoad>
{
#region Static Fields
private static int _lastID;
#endregion
#region State Fields
[NotUndoable]
[NonSerialized]
internal int parent_City_ID = 0;
#endregion
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="CityRoad_ID"/> property.
/// </summary>
public static readonly PropertyInfo<int> CityRoad_IDProperty = RegisterProperty<int>(p => p.CityRoad_ID, "CityRoads ID");
/// <summary>
/// Gets the CityRoads ID.
/// </summary>
/// <value>The CityRoads ID.</value>
public int CityRoad_ID
{
get { return GetProperty(CityRoad_IDProperty); }
}
/// <summary>
/// Maintains metadata about <see cref="CityRoad_Name"/> property.
/// </summary>
public static readonly PropertyInfo<string> CityRoad_NameProperty = RegisterProperty<string>(p => p.CityRoad_Name, "CityRoads Name");
/// <summary>
/// Gets or sets the CityRoads Name.
/// </summary>
/// <value>The CityRoads Name.</value>
public string CityRoad_Name
{
get { return GetProperty(CityRoad_NameProperty); }
set { SetProperty(CityRoad_NameProperty, value); }
}
#endregion
#region Factory Methods
/// <summary>
/// Factory method. Creates a new <see cref="F12_CityRoad"/> object.
/// </summary>
/// <returns>A reference to the created <see cref="F12_CityRoad"/> object.</returns>
internal static F12_CityRoad NewF12_CityRoad()
{
return DataPortal.CreateChild<F12_CityRoad>();
}
/// <summary>
/// Factory method. Loads a <see cref="F12_CityRoad"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
/// <returns>A reference to the fetched <see cref="F12_CityRoad"/> object.</returns>
internal static F12_CityRoad GetF12_CityRoad(SafeDataReader dr)
{
F12_CityRoad obj = new F12_CityRoad();
// show the framework that this is a child object
obj.MarkAsChild();
obj.Fetch(dr);
obj.MarkOld();
return obj;
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="F12_CityRoad"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public F12_CityRoad()
{
// Use factory methods and do not use direct creation.
// show the framework that this is a child object
MarkAsChild();
}
#endregion
#region Data Access
/// <summary>
/// Loads default values for the <see cref="F12_CityRoad"/> object properties.
/// </summary>
[Csla.RunLocal]
protected override void Child_Create()
{
LoadProperty(CityRoad_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID));
var args = new DataPortalHookArgs();
OnCreate(args);
base.Child_Create();
}
/// <summary>
/// Loads a <see cref="F12_CityRoad"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void Fetch(SafeDataReader dr)
{
// Value properties
LoadProperty(CityRoad_IDProperty, dr.GetInt32("CityRoad_ID"));
LoadProperty(CityRoad_NameProperty, dr.GetString("CityRoad_Name"));
// parent properties
parent_City_ID = dr.GetInt32("Parent_City_ID");
var args = new DataPortalHookArgs(dr);
OnFetchRead(args);
}
/// <summary>
/// Inserts a new <see cref="F12_CityRoad"/> object in the database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Insert(F10_City parent)
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("AddF12_CityRoad", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@Parent_City_ID", parent.City_ID).DbType = DbType.Int32;
cmd.Parameters.AddWithValue("@CityRoad_ID", ReadProperty(CityRoad_IDProperty)).Direction = ParameterDirection.Output;
cmd.Parameters.AddWithValue("@CityRoad_Name", ReadProperty(CityRoad_NameProperty)).DbType = DbType.String;
var args = new DataPortalHookArgs(cmd);
OnInsertPre(args);
cmd.ExecuteNonQuery();
OnInsertPost(args);
LoadProperty(CityRoad_IDProperty, (int) cmd.Parameters["@CityRoad_ID"].Value);
}
}
}
/// <summary>
/// Updates in the database all changes made to the <see cref="F12_CityRoad"/> object.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Update()
{
if (!IsDirty)
return;
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("UpdateF12_CityRoad", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@CityRoad_ID", ReadProperty(CityRoad_IDProperty)).DbType = DbType.Int32;
cmd.Parameters.AddWithValue("@CityRoad_Name", ReadProperty(CityRoad_NameProperty)).DbType = DbType.String;
var args = new DataPortalHookArgs(cmd);
OnUpdatePre(args);
cmd.ExecuteNonQuery();
OnUpdatePost(args);
}
}
}
/// <summary>
/// Self deletes the <see cref="F12_CityRoad"/> object from database.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_DeleteSelf()
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("DeleteF12_CityRoad", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@CityRoad_ID", ReadProperty(CityRoad_IDProperty)).DbType = DbType.Int32;
var args = new DataPortalHookArgs(cmd);
OnDeletePre(args);
cmd.ExecuteNonQuery();
OnDeletePost(args);
}
}
}
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting all defaults for object creation.
/// </summary>
partial void OnCreate(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation.
/// </summary>
partial void OnDeletePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after the delete operation, before Commit().
/// </summary>
partial void OnDeletePost(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the update operation.
/// </summary>
partial void OnUpdatePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit().
/// </summary>
partial void OnUpdatePost(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation.
/// </summary>
partial void OnInsertPre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit().
/// </summary>
partial void OnInsertPost(DataPortalHookArgs args);
#endregion
}
}
| |
//
// (C) Copyright 2003-2011 by Autodesk, Inc.
//
// Permission to use, copy, modify, and distribute this software in
// object code form for any purpose and without fee is hereby granted,
// provided that the above copyright notice appears in all copies and
// that both that copyright notice and the limited warranty and
// restricted rights notice below appear in all supporting
// documentation.
//
// AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS.
// AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF
// MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC.
// DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE
// UNINTERRUPTED OR ERROR FREE.
//
// Use, duplication, or disclosure by the U.S. Government is subject to
// restrictions set forth in FAR 52.227-19 (Commercial Computer
// Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii)
// (Rights in Technical Data and Computer Software), as applicable.
//
using System;
using System.Collections.Generic;
using System.Text;
using Autodesk.Revit;
using Autodesk.Revit.DB;
using Autodesk.Revit.UI;
using System.Windows.Forms;
namespace Revit.SDK.Samples.DoorSwing.CS
{
/// <summary>
/// A ExternalCommand class inherited IExternalCommand interface.
/// This command will add needed shared parameters and initialize them.
/// It will initialize door opening parameter based on family's actual geometry and
/// country's standard. It will initialize each door instance's opening, ToRoom, FromRoom and
/// internal door flag values according to door's current geometry.
/// </summary>
[Autodesk.Revit.Attributes.Transaction(Autodesk.Revit.Attributes.TransactionMode.Manual)]
[Autodesk.Revit.Attributes.Regeneration(Autodesk.Revit.Attributes.RegenerationOption.Manual)]
[Autodesk.Revit.Attributes.Journaling(Autodesk.Revit.Attributes.JournalingMode.NoCommandData)]
public class InitializeCommand : IExternalCommand
{
#region IExternalCommand Members
/// <summary>
/// Implement this method as an external command for Revit.
/// </summary>
/// <param name="commandData">An object that is passed to the external application
/// which contains data related to the command,
/// such as the application object and active view.</param>
/// <param name="message">A message that can be set by the external application
/// which will be displayed if a failure or cancellation is returned by
/// the external command.</param>
/// <param name="elements">A set of elements to which the external application
/// can add elements that are to be highlighted in case of failure or cancellation.</param>
/// <returns>Return the status of the external command.
/// A result of Succeeded means that the API external method functioned as expected.
/// Cancelled can be used to signify that the user cancelled the external operation
/// at some point. Failure should be returned if the application is unable to proceed with
/// the operation.</returns>
public Autodesk.Revit.UI.Result Execute(ExternalCommandData commandData,
ref string message,
ElementSet elements)
{
Autodesk.Revit.UI.Result returnCode = Autodesk.Revit.UI.Result.Cancelled;
Transaction tran = new Transaction(commandData.Application.ActiveUIDocument.Document, "Initialize Command");
tran.Start();
try
{
// one instance of DoorSwingData class.
DoorSwingData databuffer = new DoorSwingData(commandData.Application);
using (InitializeForm initForm = new InitializeForm(databuffer))
{
// Show UI
DialogResult dialogResult = initForm.ShowDialog();
if (DialogResult.OK == dialogResult)
{
databuffer.DeleteTempDoorInstances();
// update door type's opening feature based on family's actual geometry and
// country's standard.
databuffer.UpdateDoorFamiliesOpeningFeature();
// update each door instance's Opening feature and internal door flag
returnCode = DoorSwingData.UpdateDoorsInfo(commandData.Application.ActiveUIDocument.Document, false, true, ref message);
}
}
}
catch (Exception ex)
{
// if there is anything wrong, give error information and return failed.
message = ex.Message;
returnCode = Autodesk.Revit.UI.Result.Failed;
}
if (Autodesk.Revit.UI.Result.Succeeded == returnCode)
{
tran.Commit();
}
else
{
tran.RollBack();
}
return returnCode;
}
#endregion
}
/// <summary>
/// A ExternalCommand class inherited IExternalCommand interface.
/// This command will update each door instance's opening, ToRoom, FromRoom and
/// internal door flag values according to door's current geometry.
/// </summary>
[Autodesk.Revit.Attributes.Transaction(Autodesk.Revit.Attributes.TransactionMode.Manual)]
[Autodesk.Revit.Attributes.Regeneration(Autodesk.Revit.Attributes.RegenerationOption.Manual)]
public class UpdateParamsCommand : IExternalCommand
{
#region IExternalCommand Members
/// <summary>
/// Implement this method as an external command for Revit.
/// </summary>
/// <param name="commandData">An object that is passed to the external application
/// which contains data related to the command,
/// such as the application object and active view.</param>
/// <param name="message">A message that can be set by the external application
/// which will be displayed if a failure or cancellation is returned by
/// the external command.</param>
/// <param name="elements">A set of elements to which the external application
/// can add elements that are to be highlighted in case of failure or cancellation.</param>
/// <returns>Return the status of the external command.
/// A result of Succeeded means that the API external method functioned as expected.
/// Cancelled can be used to signify that the user cancelled the external operation
/// at some point. Failure should be returned if the application is unable to proceed with
/// the operation.</returns>
public Autodesk.Revit.UI.Result Execute(ExternalCommandData commandData, ref string message, Autodesk.Revit.DB.ElementSet elements)
{
Autodesk.Revit.UI.Result returnCode = Autodesk.Revit.UI.Result.Succeeded;
Autodesk.Revit.UI.UIApplication app = commandData.Application;
UIDocument doc = app.ActiveUIDocument;
Transaction tran = new Transaction(doc.Document, "Update Parameters Command");
tran.Start();
try
{
if (doc.Selection.Elements.IsEmpty)
{
returnCode = DoorSwingData.UpdateDoorsInfo(doc.Document, false, true, ref message);
}
else
{
returnCode = DoorSwingData.UpdateDoorsInfo(doc.Document, true, true, ref message);
}
}
catch (Exception ex)
{
// if there is anything wrong, give error information and return failed.
message = ex.Message;
returnCode = Autodesk.Revit.UI.Result.Failed;
}
if (Autodesk.Revit.UI.Result.Succeeded == returnCode)
{
tran.Commit();
}
else
{
tran.RollBack();
}
return returnCode;
}
#endregion
}
/// <summary>
/// A ExternalCommand class inherited IExternalCommand interface.
/// This command will update door instance's geometry according to door's
/// current To/From Room value.
/// </summary>
[Autodesk.Revit.Attributes.Transaction(Autodesk.Revit.Attributes.TransactionMode.Manual)]
[Autodesk.Revit.Attributes.Regeneration(Autodesk.Revit.Attributes.RegenerationOption.Manual)]
public class UpdateGeometryCommand : IExternalCommand
{
#region IExternalCommand Members
/// <summary>
/// Implement this method as an external command for Revit.
/// </summary>
/// <param name="commandData">An object that is passed to the external application
/// which contains data related to the command,
/// such as the application object and active view.</param>
/// <param name="message">A message that can be set by the external application
/// which will be displayed if a failure or cancellation is returned by
/// the external command.</param>
/// <param name="elements">A set of elements to which the external application
/// can add elements that are to be highlighted in case of failure or cancellation.</param>
/// <returns>Return the status of the external command.
/// A result of Succeeded means that the API external method functioned as expected.
/// Cancelled can be used to signify that the user cancelled the external operation
/// at some point. Failure should be returned if the application is unable to proceed with
/// the operation.</returns>
public Autodesk.Revit.UI.Result Execute(ExternalCommandData commandData, ref string message, Autodesk.Revit.DB.ElementSet elements)
{
Autodesk.Revit.UI.Result returnCode = Autodesk.Revit.UI.Result.Succeeded;
Autodesk.Revit.UI.UIApplication app = commandData.Application;
UIDocument doc = app.ActiveUIDocument;
Transaction tran = new Transaction(doc.Document, "Update Geometry Command");
tran.Start();
try
{
if (doc.Selection.Elements.IsEmpty)
{
DoorSwingData.UpdateDoorsGeometry(doc.Document, false);
}
else
{
DoorSwingData.UpdateDoorsGeometry(doc.Document, true);
}
returnCode = Autodesk.Revit.UI.Result.Succeeded;
}
catch (Exception ex)
{
// if there is anything wrong, give error information and return failed.
message = ex.Message;
returnCode = Autodesk.Revit.UI.Result.Failed;
}
if (Autodesk.Revit.UI.Result.Succeeded == returnCode)
{
tran.Commit();
}
else
{
tran.RollBack();
}
return returnCode;
}
#endregion
}
}
| |
using Q42.HueApi.ColorConverters;
using Q42.HueApi.Streaming;
using Q42.HueApi.Streaming.Effects;
using Q42.HueApi.Streaming.Effects.Examples;
using Q42.HueApi.Streaming.Extensions;
using Q42.HueApi.Streaming.Models;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace Q42.HueApi.Streaming.Sample
{
public class HueStreaming
{
public async Task Start()
{
StreamingGroup stream = await StreamingSetup.SetupAndReturnGroup();
var baseEntLayer = stream.GetNewLayer(isBaseLayer: true);
var effectLayer = stream.GetNewLayer();
//Optional: calculated effects that are placed on this layer
baseEntLayer.AutoCalculateEffectUpdate(new CancellationToken());
effectLayer.AutoCalculateEffectUpdate(new CancellationToken());
//Order lights based on position in the room
var orderedLeft = baseEntLayer.GetLeft().OrderByDescending(x => x.LightLocation.Y).ThenBy(x => x.LightLocation.X).To2DGroup();
var orderedRight = baseEntLayer.GetRight().OrderByDescending(x => x.LightLocation.Y).ThenByDescending(x => x.LightLocation.X);
var allLightsOrdered = baseEntLayer.OrderBy(x => x.LightLocation.X).ThenBy(x => x.LightLocation.Y).ToList().To2DGroup();
var allLightsOrderedFlat = baseEntLayer.OrderBy(x => x.LightLocation.X).ThenBy(x => x.LightLocation.Y).ToList();
var orderedByDistance = baseEntLayer.OrderBy(x => x.LightLocation.Distance(0, 0, 0)).To2DGroup();
var orderedByAngle = baseEntLayer.OrderBy(x => x.LightLocation.Angle(0, 0)).To2DGroup();
var line1 = baseEntLayer.Where(x => x.LightLocation.X <= -0.6).ToList();
var line2 = baseEntLayer.Where(x => x.LightLocation.X > -0.6 && x.LightLocation.X <= -0.1).ToList();
var line3 = baseEntLayer.Where(x => x.LightLocation.X > -0.1 && x.LightLocation.X <= 0.1).ToList();
var line4 = baseEntLayer.Where(x => x.LightLocation.X > 0.1 && x.LightLocation.X <= 0.6).ToList();
var line5 = baseEntLayer.Where(x => x.LightLocation.X > 0.6).ToList();
var allLightsReverse = allLightsOrdered.ToList();
allLightsReverse.Reverse();
CancellationTokenSource cst = new CancellationTokenSource();
//Console.WriteLine("Blue line on 90 degree angle");
//var blueLineEffect = new HorizontalScanLineEffect();
//baseEntLayer.PlaceEffect(blueLineEffect);
//blueLineEffect.Start();
//cst = WaitCancelAndNext(cst);
//blueLineEffect.Stop();
//Ref<int?> stepSize = 20;
//blueLineEffect.Rotate(stepSize);
//Console.ReadLine();
//stepSize.Value -= 5;
//Console.ReadLine();
//stepSize.Value -= 5;
//Console.ReadLine();
//stepSize.Value -= 5;
//Console.ReadLine();
//stepSize.Value -= 5;
//Console.ReadLine();
//stepSize.Value -= 5;
//Console.ReadLine();
//stepSize.Value -= 5;
//Console.ReadLine();
//stepSize.Value -= 5;
//Console.ReadLine();
//stepSize.Value -= 5;
//Console.ReadLine();
//stepSize.Value -= 5;
//Console.ReadLine();
//stepSize.Value -= 5;
//Console.WriteLine("Finished");
//cst = WaitCancelAndNext(cst);
//blueLineEffect.Stop();
var quarter = new[] { baseEntLayer.GetLeft().GetFront(), baseEntLayer.GetLeft().GetBack(), baseEntLayer.GetRight().GetBack(), baseEntLayer.GetRight().GetFront() }.ToList();
baseEntLayer.SetState(cst.Token, new RGBColor("FFFFFF"), 1);
cst = WaitCancelAndNext(cst);
Console.WriteLine("Transition to red in 10 seconds");
baseEntLayer.SetState(cst.Token, new RGBColor("FF0000"), TimeSpan.FromSeconds(10));
Console.ReadLine();
Console.WriteLine("Transition to bri 0.25");
baseEntLayer.SetState(cst.Token, null, default, 0.25, TimeSpan.FromSeconds(1), true);
Console.ReadLine();
Console.WriteLine("Transition to bri 1");
baseEntLayer.SetState(cst.Token, new RGBColor("0000FF"), TimeSpan.FromSeconds(5), 1, TimeSpan.FromSeconds(1), false);
Console.ReadLine();
cst = WaitCancelAndNext(cst);
Console.WriteLine("Random color All / All");
quarter.SetRandomColor(cst.Token, IteratorEffectMode.All, IteratorEffectMode.All, waitTime: () => TimeSpan.FromMilliseconds(500));
cst = WaitCancelAndNext(cst);
Console.WriteLine("Flash on lights Cycle / Random");
quarter.FlashQuick(cst.Token, new Q42.HueApi.ColorConverters.RGBColor("FFFFFF"), IteratorEffectMode.Cycle, IteratorEffectMode.Random, waitTime: () => TimeSpan.FromMilliseconds(50));
cst = WaitCancelAndNext(cst);
Console.WriteLine("SetColor white Single / Single");
quarter.SetColor(cst.Token, new RGBColor("FFFFFF"), IteratorEffectMode.Single, IteratorEffectMode.Single, waitTime: () => TimeSpan.FromMilliseconds(200));
cst = WaitCancelAndNext(cst);
Console.WriteLine("Flash on lights Cycle / All");
quarter.FlashQuick(cst.Token, new Q42.HueApi.ColorConverters.RGBColor("FFFFFF"), IteratorEffectMode.Cycle, IteratorEffectMode.All, waitTime: () => TimeSpan.FromMilliseconds(50));
cst = WaitCancelAndNext(cst);
Console.WriteLine("Flash on lights Cycle / Single");
quarter.FlashQuick(cst.Token, new Q42.HueApi.ColorConverters.RGBColor("FFFFFF"), IteratorEffectMode.Cycle, IteratorEffectMode.Single, waitTime: () => TimeSpan.FromMilliseconds(50));
cst = WaitCancelAndNext(cst);
Console.WriteLine("Random color Cycle / All");
quarter.SetRandomColor(cst.Token, IteratorEffectMode.Cycle, IteratorEffectMode.All, waitTime: () => TimeSpan.FromMilliseconds(500));
cst = WaitCancelAndNext(cst);
Console.WriteLine("Random color Cycle / AllIndividual");
quarter.SetRandomColor(cst.Token, IteratorEffectMode.Cycle, IteratorEffectMode.AllIndividual, waitTime: () => TimeSpan.FromMilliseconds(500));
cst = WaitCancelAndNext(cst);
Console.WriteLine("Random color Cycle / Single");
quarter.SetRandomColor(cst.Token, IteratorEffectMode.Cycle, IteratorEffectMode.Single, waitTime: () => TimeSpan.FromMilliseconds(500));
cst = WaitCancelAndNext(cst);
Console.WriteLine("Random color Cycle / Random");
quarter.SetRandomColor(cst.Token, IteratorEffectMode.Cycle, IteratorEffectMode.Random, waitTime: () => TimeSpan.FromMilliseconds(500));
cst = WaitCancelAndNext(cst);
Console.WriteLine("Random color Cycle / Bounce");
quarter.SetRandomColor(cst.Token, IteratorEffectMode.Cycle, IteratorEffectMode.Bounce, waitTime: () => TimeSpan.FromMilliseconds(500));
cst = WaitCancelAndNext(cst);
Console.WriteLine("Random color on all lights");
baseEntLayer.To2DGroup().SetRandomColor(cst.Token, IteratorEffectMode.All, waitTime: () => TimeSpan.FromMilliseconds(500));
cst = WaitCancelAndNext(cst);
//Uncomment for demo using a secondary layer
//var secondGroup = stream.GetNewLayer();
//secondGroup.FlashQuick(new Q42.HueApi.ColorConverters.RGBColor("FFFFFF"), IteratorEffectMode.Cycle, waitTime: TimeSpan.FromMilliseconds(500));
cst = WaitCancelAndNext(cst);
//Group demo
//Console.WriteLine("Group demo");
////var groups = new List<IEnumerable<EntertainmentLight>>() { line1, line2, line3, line4, line5 };
//var groups = allLightsOrderedFlat.ChunkBy(5);
//var groupstest = allLightsOrderedFlat.ChunkByGroupNumber(4);
//groups.IteratorEffect(async (current, duration) => {
// //var r = new Random();
// //var color = new RGBColor(r.NextDouble(), r.NextDouble(), r.NextDouble());
// //current.SetState(color, 1);
// current.SetRandomColor(IteratorEffectMode.All, TimeSpan.FromMilliseconds(5000), duration: duration);
//}, IteratorEffectMode.All, TimeSpan.FromMilliseconds(500));
//cst = WaitCancelAndNext(cst);
//Random color from center
Console.WriteLine("Fill white color from center");
await orderedByDistance.SetColor(cst.Token, new RGBColor("FFFFFF"), IteratorEffectMode.Single, waitTime: () => TimeSpan.FromMilliseconds(50));
cst = WaitCancelAndNext(cst);
//Random color from center
Console.WriteLine("Fill red color order by angle from center");
await orderedByAngle.SetColor(cst.Token, new RGBColor("FF0000"), IteratorEffectMode.Single, waitTime: () => TimeSpan.FromMilliseconds(50));
cst = WaitCancelAndNext(cst);
Console.WriteLine("A pulse of random color is placed on an XY grid, matching your entertainment setup");
var randomPulseEffect = new RandomPulseEffect();
baseEntLayer.PlaceEffect(randomPulseEffect);
randomPulseEffect.Start();
cst = WaitCancelAndNext(cst);
randomPulseEffect.Stop();
Console.WriteLine("A pulse of random color is placed on an XY grid, matching your entertainment setup");
var randomPulseEffectNoFade = new RandomPulseEffect(false);
baseEntLayer.PlaceEffect(randomPulseEffectNoFade);
randomPulseEffectNoFade.Start();
cst = WaitCancelAndNext(cst);
randomPulseEffectNoFade.Stop();
Console.WriteLine("Different random colors on all lights");
baseEntLayer.To2DGroup().SetRandomColor(cst.Token, IteratorEffectMode.AllIndividual, waitTime: () => TimeSpan.FromMilliseconds(500));
cst = WaitCancelAndNext(cst);
Console.WriteLine("Trailing light effect with transition times");
allLightsOrdered.Flash(cst.Token, new Q42.HueApi.ColorConverters.RGBColor("FF0000"), IteratorEffectMode.Cycle, waitTime: () => TimeSpan.FromMilliseconds(500), transitionTimeOn: () => TimeSpan.FromMilliseconds(1000), transitionTimeOff: () => TimeSpan.FromMilliseconds(1000), waitTillFinished: false);
cst = WaitCancelAndNext(cst);
Console.WriteLine("Knight rider (works best with 6+ lights)");
allLightsOrdered.KnightRider(cst.Token);
cst = WaitCancelAndNext(cst);
Ref<TimeSpan> waitTime = TimeSpan.FromMilliseconds(750);
Console.WriteLine("Flash lights (750ms), press enter to decrease by 200 ms");
allLightsOrdered.FlashQuick(cst.Token, new Q42.HueApi.ColorConverters.RGBColor("FFFFFF"), IteratorEffectMode.Cycle, waitTime: () => waitTime);
Console.ReadLine();
waitTime.Value -= TimeSpan.FromMilliseconds(200);
Console.WriteLine($"Flash ({waitTime.Value.TotalMilliseconds})");
Console.ReadLine();
waitTime.Value -= TimeSpan.FromMilliseconds(200);
Console.WriteLine($"Flash ({waitTime.Value.TotalMilliseconds})");
Console.ReadLine();
waitTime.Value -= TimeSpan.FromMilliseconds(200);
Console.WriteLine($"Flash ({waitTime.Value.TotalMilliseconds})");
Console.ReadLine();
waitTime.Value -= TimeSpan.FromMilliseconds(100);
Console.WriteLine($"Flash ({waitTime.Value.TotalMilliseconds})");
cst = WaitCancelAndNext(cst);
Console.WriteLine("Flash on random lights");
allLightsOrdered.FlashQuick(cst.Token, new Q42.HueApi.ColorConverters.RGBColor("FFFFFF"), IteratorEffectMode.Random, waitTime: () => waitTime);
cst = WaitCancelAndNext(cst);
Console.WriteLine("Flash on ALL lights");
waitTime.Value = TimeSpan.FromMilliseconds(150);
allLightsOrdered.Flash(cst.Token, new Q42.HueApi.ColorConverters.RGBColor("FFFFFF"), IteratorEffectMode.All, waitTime: () => waitTime);
cst = WaitCancelAndNext(cst);
Console.WriteLine("Flash effect with transition times");
baseEntLayer.GetLeft().To2DGroup().Flash(cst.Token, new Q42.HueApi.ColorConverters.RGBColor("FF0000"), IteratorEffectMode.All, waitTime: () => TimeSpan.FromSeconds(1), transitionTimeOn: () => TimeSpan.FromMilliseconds(1000), transitionTimeOff: () => TimeSpan.FromMilliseconds(1000));
await Task.Delay(2000);
baseEntLayer.GetRight().To2DGroup().Flash(cst.Token, new Q42.HueApi.ColorConverters.RGBColor("FF0000"), IteratorEffectMode.All, waitTime: () => TimeSpan.FromSeconds(1), transitionTimeOn: () => TimeSpan.FromMilliseconds(1000), transitionTimeOff: () => TimeSpan.FromMilliseconds(1000));
cst = WaitCancelAndNext(cst);
Console.WriteLine("A red light that is moving in vertical direction and is placed on an XY grid, matching your entertainment setup");
var redLightEffect = new RedLightEffect();
redLightEffect.Radius = 0.7;
redLightEffect.Y = -0.8;
redLightEffect.X = -0.8;
baseEntLayer.PlaceEffect(redLightEffect);
redLightEffect.Start();
Task.Run(async () =>
{
double step = 0.2;
while (true)
{
redLightEffect.Y += step;
await Task.Delay(100);
if (redLightEffect.Y >= 2)
step = -0.1;
if (redLightEffect.Y <= -2)
step = +0.1;
}
}, cst.Token);
cst = WaitCancelAndNext(cst);
redLightEffect.Stop();
Console.WriteLine("Thank you for using Q42.Hue.Streaming. This library was developed during Christmas 2017.");
await allLightsOrdered.Christmas(cst.Token);
cst = WaitCancelAndNext(cst);
Console.WriteLine("Press Enter to Exit");
Console.ReadLine();
}
private static CancellationTokenSource WaitCancelAndNext(CancellationTokenSource cst)
{
Console.WriteLine("Press Enter for next sample");
Console.ReadLine();
cst.Cancel();
cst = new CancellationTokenSource();
return cst;
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Features;
using Microsoft.AspNetCore.SignalR.Configuration;
using Microsoft.AspNetCore.SignalR.Infrastructure;
using Microsoft.AspNetCore.SignalR.Json;
using Microsoft.AspNetCore.SignalR.Messaging;
using Microsoft.AspNetCore.SignalR.Transports;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Newtonsoft.Json;
namespace Microsoft.AspNetCore.SignalR
{
/// <summary>
/// Represents a connection between client and server.
/// </summary>
public abstract class PersistentConnection
{
private const string WebSocketsTransportName = "webSockets";
private const string PingJsonPayload = "{ \"Response\": \"pong\" }";
private const string StartJsonPayload = "{ \"Response\": \"started\" }";
private static readonly char[] SplitChars = new[] { ':' };
private static readonly ProtocolResolver _protocolResolver = new ProtocolResolver();
private SignalROptions _options;
private ITransportManager _transportManager;
public virtual void Initialize(IServiceProvider serviceProvider)
{
MessageBus = serviceProvider.GetRequiredService<IMessageBus>();
JsonSerializer = serviceProvider.GetRequiredService<JsonSerializer>();
LoggerFactory = serviceProvider.GetRequiredService<ILoggerFactory>();
Counters = serviceProvider.GetRequiredService<IPerformanceCounterManager>();
AckHandler = serviceProvider.GetRequiredService<IAckHandler>();
ProtectedData = serviceProvider.GetRequiredService<IProtectedData>();
UserIdProvider = serviceProvider.GetRequiredService<IUserIdProvider>();
Pool = serviceProvider.GetRequiredService<IMemoryPool>();
_options = serviceProvider.GetRequiredService<IOptions<SignalROptions>>().Value;
_transportManager = serviceProvider.GetRequiredService<ITransportManager>();
// Ensure that this server is listening for any ACKs sent over the bus.
serviceProvider.GetRequiredService<AckSubscriber>();
}
public bool Authorize(HttpRequest request)
{
return AuthorizeRequest(request);
}
protected virtual ILogger Logger
{
get
{
return LoggerFactory.CreateLogger<PersistentConnection>();
}
}
protected IProtectedData ProtectedData { get; private set; }
protected IMessageBus MessageBus { get; private set; }
protected JsonSerializer JsonSerializer { get; private set; }
protected IAckHandler AckHandler { get; private set; }
protected ILoggerFactory LoggerFactory { get; private set; }
protected IPerformanceCounterManager Counters { get; private set; }
protected ITransport Transport { get; private set; }
protected IUserIdProvider UserIdProvider { get; private set; }
protected IMemoryPool Pool { get; set; }
/// <summary>
/// Gets the <see cref="IConnection"/> for the <see cref="PersistentConnection"/>.
/// </summary>
public IConnection Connection
{
get;
private set;
}
/// <summary>
/// Gets the <see cref="IConnectionGroupManager"/> for the <see cref="PersistentConnection"/>.
/// </summary>
public IConnectionGroupManager Groups
{
get;
private set;
}
private string DefaultSignal
{
get
{
return PrefixHelper.GetPersistentConnectionName(DefaultSignalRaw);
}
}
private string DefaultSignalRaw
{
get
{
return GetType().FullName;
}
}
internal virtual string GroupPrefix
{
get
{
return PrefixHelper.PersistentConnectionGroupPrefix;
}
}
/// <summary>
/// OWIN entry point.
/// </summary>
/// <param name="environment"></param>
/// <returns></returns>
public Task ProcessRequest(HttpContext context)
{
if (context == null)
{
throw new ArgumentNullException("context");
}
var response = context.Response;
// Add the nosniff header for all responses to prevent IE from trying to sniff mime type from contents
context.Response.Headers["X-Content-Type-Options"] = "nosniff";
if (AuthorizeRequest(context.Request))
{
return ProcessRequestCore(context);
}
if (context.User != null &&
context.User.Identity.IsAuthenticated)
{
// If the user is authenticated and authorize failed then 403
response.StatusCode = 403;
}
else
{
// If we failed to authorize the request then return a 401
response.StatusCode = 401;
}
return TaskAsyncHelper.Empty;
}
/// <summary>
/// Handles all requests for <see cref="PersistentConnection"/>s.
/// </summary>
/// <param name="context">The <see cref="HttpContext"/> for the current request.</param>
/// <returns>A <see cref="Task"/> that completes when the <see cref="PersistentConnection"/> pipeline is complete.</returns>
/// <exception cref="T:System.InvalidOperationException">
/// Thrown if the transport wasn't specified.
/// Thrown if the connection id wasn't specified.
/// </exception>
public virtual async Task ProcessRequestCore(HttpContext context)
{
if (context == null)
{
throw new ArgumentNullException("context");
}
if (IsNegotiationRequest(context.Request))
{
await ProcessNegotiationRequest(context).PreserveCulture();
return;
}
else if (IsPingRequest(context.Request))
{
await ProcessPingRequest(context).PreserveCulture();
return;
}
Transport = GetTransport(context);
if (Transport == null)
{
await FailResponse(context.Response, String.Format(CultureInfo.CurrentCulture, Resources.Error_ProtocolErrorUnknownTransport)).PreserveCulture();
return;
}
string connectionToken = context.Request.Query["connectionToken"];
// If there's no connection id then this is a bad request
if (String.IsNullOrEmpty(connectionToken))
{
await FailResponse(context.Response, String.Format(CultureInfo.CurrentCulture, Resources.Error_ProtocolErrorMissingConnectionToken)).PreserveCulture();
return;
}
string connectionId;
string message;
int statusCode;
if (!TryGetConnectionId(context, connectionToken, out connectionId, out message, out statusCode))
{
await FailResponse(context.Response, message, statusCode).PreserveCulture();
return;
}
// Set the transport's connection id to the unprotected one
Transport.ConnectionId = connectionId;
// Get the user id from the request
string userId = UserIdProvider.GetUserId(context.Request);
// Get the groups token from the request
string groupsToken = await Transport.GetGroupsToken().PreserveCulture();
IList<string> signals = GetSignals(userId, connectionId);
IList<string> groups = AppendGroupPrefixes(context, connectionId, groupsToken);
Connection connection = CreateConnection(connectionId, signals, groups);
Connection = connection;
string groupName = PrefixHelper.GetPersistentConnectionGroupName(DefaultSignalRaw);
Groups = new GroupManager(connection, groupName);
// We handle /start requests after the PersistentConnection has been initialized,
// because ProcessStartRequest calls OnConnected.
if (IsStartRequest(context.Request))
{
await ProcessStartRequest(context, connectionId).PreserveCulture();
return;
}
Transport.Connected = () =>
{
return TaskAsyncHelper.FromMethod(() => OnConnected(context.Request, connectionId).OrEmpty());
};
Transport.Reconnected = () =>
{
return TaskAsyncHelper.FromMethod(() => OnReconnected(context.Request, connectionId).OrEmpty());
};
Transport.Received = data =>
{
Counters.ConnectionMessagesSentTotal.Increment();
Counters.ConnectionMessagesSentPerSec.Increment();
return TaskAsyncHelper.FromMethod(() => OnReceived(context.Request, connectionId, data).OrEmpty());
};
Transport.Disconnected = clean =>
{
return TaskAsyncHelper.FromMethod(() => OnDisconnected(context.Request, connectionId, stopCalled: clean).OrEmpty());
};
await Transport.ProcessRequest(connection).OrEmpty().Catch(Logger, Counters.ErrorsAllTotal, Counters.ErrorsAllPerSec).PreserveCulture();
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "We want to catch any exception when unprotecting data.")]
internal bool TryGetConnectionId(HttpContext context,
string connectionToken,
out string connectionId,
out string message,
out int statusCode)
{
string unprotectedConnectionToken = null;
// connectionId is only valid when this method returns true
connectionId = null;
// message and statusCode are only valid when this method returns false
message = null;
statusCode = 400;
try
{
unprotectedConnectionToken = ProtectedData.Unprotect(connectionToken, Purposes.ConnectionToken);
}
catch (Exception ex)
{
Logger.LogInformation(String.Format("Failed to process connectionToken {0}: {1}", connectionToken, ex));
}
if (String.IsNullOrEmpty(unprotectedConnectionToken))
{
message = String.Format(CultureInfo.CurrentCulture, Resources.Error_ConnectionIdIncorrectFormat);
return false;
}
var tokens = unprotectedConnectionToken.Split(SplitChars, 2);
connectionId = tokens[0];
string tokenUserName = tokens.Length > 1 ? tokens[1] : String.Empty;
string userName = GetUserIdentity(context);
if (!String.Equals(tokenUserName, userName, StringComparison.OrdinalIgnoreCase))
{
message = String.Format(CultureInfo.CurrentCulture, Resources.Error_UnrecognizedUserIdentity);
statusCode = 403;
return false;
}
return true;
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "We want to prevent any failures in unprotecting")]
internal IList<string> VerifyGroups(string connectionId, string groupsToken)
{
if (String.IsNullOrEmpty(groupsToken))
{
return ListHelper<string>.Empty;
}
string unprotectedGroupsToken = null;
try
{
unprotectedGroupsToken = ProtectedData.Unprotect(groupsToken, Purposes.Groups);
}
catch (Exception ex)
{
Logger.LogInformation(String.Format("Failed to process groupsToken {0}: {1}", groupsToken, ex));
}
if (String.IsNullOrEmpty(unprotectedGroupsToken))
{
return ListHelper<string>.Empty;
}
var tokens = unprotectedGroupsToken.Split(SplitChars, 2);
string groupConnectionId = tokens[0];
string groupsValue = tokens.Length > 1 ? tokens[1] : String.Empty;
if (!String.Equals(groupConnectionId, connectionId, StringComparison.OrdinalIgnoreCase))
{
return ListHelper<string>.Empty;
}
return JsonSerializer.Parse<string[]>(groupsValue);
}
private IList<string> AppendGroupPrefixes(HttpContext context, string connectionId, string groupsToken)
{
return (from g in OnRejoiningGroups(context.Request, VerifyGroups(connectionId, groupsToken), connectionId)
select GroupPrefix + g).ToList();
}
private Connection CreateConnection(string connectionId, IList<string> signals, IList<string> groups)
{
return new Connection(MessageBus,
JsonSerializer,
DefaultSignal,
connectionId,
signals,
groups,
LoggerFactory,
AckHandler,
Counters,
ProtectedData,
Pool);
}
[SuppressMessage("Microsoft.Usage", "CA1801:ReviewUnusedParameters", MessageId = "userId", Justification = "This method is virtual and is used in the derived class")]
private IList<string> GetDefaultSignals(string userId, string connectionId)
{
// The list of default signals this connection cares about:
// 1. The default signal (the type name)
// 2. The connection id (so we can message this particular connection)
return new string[] {
DefaultSignal,
PrefixHelper.GetConnectionId(connectionId)
};
}
/// <summary>
/// Returns the signals used in the <see cref="PersistentConnection"/>.
/// </summary>
/// <param name="userId">The user id for the current connection.</param>
/// <param name="connectionId">The id of the incoming connection.</param>
/// <returns>The signals used for this <see cref="PersistentConnection"/>.</returns>
protected virtual IList<string> GetSignals(string userId, string connectionId)
{
return GetDefaultSignals(userId, connectionId);
}
/// <summary>
/// Called before every request and gives the user a authorize the user.
/// </summary>
/// <param name="request">The <see cref="HttpRequest"/> for the current connection.</param>
/// <returns>A boolean value that represents if the request is authorized.</returns>
protected virtual bool AuthorizeRequest(HttpRequest request)
{
return true;
}
/// <summary>
/// Called when a connection reconnects after a timeout to determine which groups should be rejoined.
/// </summary>
/// <param name="request">The <see cref="HttpRequest"/> for the current connection.</param>
/// <param name="groups">The groups the calling connection claims to be part of.</param>
/// <param name="connectionId">The id of the reconnecting client.</param>
/// <returns>A collection of group names that should be joined on reconnect</returns>
protected virtual IList<string> OnRejoiningGroups(HttpRequest request, IList<string> groups, string connectionId)
{
return groups;
}
/// <summary>
/// Called when a new connection is made.
/// </summary>
/// <param name="request">The <see cref="HttpRequest"/> for the current connection.</param>
/// <param name="connectionId">The id of the connecting client.</param>
/// <returns>A <see cref="Task"/> that completes when the connect operation is complete.</returns>
protected virtual Task OnConnected(HttpRequest request, string connectionId)
{
return TaskAsyncHelper.Empty;
}
/// <summary>
/// Called when a connection reconnects after a timeout.
/// </summary>
/// <param name="request">The <see cref="HttpRequest"/> for the current connection.</param>
/// <param name="connectionId">The id of the re-connecting client.</param>
/// <returns>A <see cref="Task"/> that completes when the re-connect operation is complete.</returns>
protected virtual Task OnReconnected(HttpRequest request, string connectionId)
{
return TaskAsyncHelper.Empty;
}
/// <summary>
/// Called when data is received from a connection.
/// </summary>
/// <param name="request">The <see cref="HttpRequest"/> for the current connection.</param>
/// <param name="connectionId">The id of the connection sending the data.</param>
/// <param name="data">The payload sent to the connection.</param>
/// <returns>A <see cref="Task"/> that completes when the receive operation is complete.</returns>
protected virtual Task OnReceived(HttpRequest request, string connectionId, string data)
{
return TaskAsyncHelper.Empty;
}
/// <summary>
/// Called when a connection disconnects gracefully or due to a timeout.
/// </summary>
/// <param name="request">The <see cref="HttpRequest"/> for the current connection.</param>
/// <param name="connectionId">The id of the disconnected connection.</param>
/// <param name="stopCalled">
/// true, if stop was called on the client closing the connection gracefully;
/// false, if the connection has been lost for longer than the
/// <see cref="Configuration.IConfigurationManager.DisconnectTimeout"/>.
/// Timeouts can occur in scaleout when clients reconnect with another server.
/// </param>
/// <returns>A <see cref="Task"/> that completes when the disconnect operation is complete.</returns>
protected virtual Task OnDisconnected(HttpRequest request, string connectionId, bool stopCalled)
{
return TaskAsyncHelper.Empty;
}
private static Task ProcessPingRequest(HttpContext context)
{
return SendJsonResponse(context, PingJsonPayload);
}
private Task ProcessNegotiationRequest(HttpContext context)
{
// Total amount of time without a keep alive before the client should attempt to reconnect in seconds.
var keepAliveTimeout = _options.Transports.KeepAliveTimeout();
string connectionId = Guid.NewGuid().ToString("d");
string connectionToken = connectionId + ':' + GetUserIdentity(context);
var payload = new
{
Url = context.Request.LocalPath().Replace("/negotiate", ""),
ConnectionToken = ProtectedData.Protect(connectionToken, Purposes.ConnectionToken),
ConnectionId = connectionId,
KeepAliveTimeout = keepAliveTimeout != null ? keepAliveTimeout.Value.TotalSeconds : (double?)null,
DisconnectTimeout = _options.Transports.DisconnectTimeout.TotalSeconds,
ConnectionTimeout = _options.Transports.LongPolling.PollTimeout.TotalSeconds,
// TODO: Supports websockets
TryWebSockets = _transportManager.SupportsTransport(WebSocketsTransportName) && context.Features.Get<IHttpWebSocketFeature>() != null,
ProtocolVersion = _protocolResolver.Resolve(context.Request).ToString(),
TransportConnectTimeout = _options.Transports.TransportConnectTimeout.TotalSeconds,
LongPollDelay = _options.Transports.LongPolling.PollDelay.TotalSeconds
};
return SendJsonResponse(context, JsonSerializer.Stringify(payload));
}
private async Task ProcessStartRequest(HttpContext context, string connectionId)
{
await OnConnected(context.Request, connectionId).OrEmpty().PreserveCulture();
await SendJsonResponse(context, StartJsonPayload).PreserveCulture();
Counters.ConnectionsConnected.Increment();
}
private static Task SendJsonResponse(HttpContext context, string jsonPayload)
{
var callback = context.Request.Query["callback"];
if (String.IsNullOrEmpty(callback))
{
// Send normal JSON response
context.Response.ContentType = JsonUtility.JsonMimeType;
return context.Response.End(jsonPayload);
}
// Send JSONP response since a callback is specified by the query string
var callbackInvocation = JsonUtility.CreateJsonpCallback(callback, jsonPayload);
context.Response.ContentType = JsonUtility.JavaScriptMimeType;
return context.Response.End(callbackInvocation);
}
private static string GetUserIdentity(HttpContext context)
{
if (context.User != null && context.User.Identity.IsAuthenticated)
{
return context.User.Identity.Name ?? String.Empty;
}
return String.Empty;
}
private static Task FailResponse(HttpResponse response, string message, int statusCode = 400)
{
response.StatusCode = statusCode;
return response.End(message);
}
private static bool IsNegotiationRequest(HttpRequest request)
{
return request.LocalPath().EndsWith("/negotiate", StringComparison.OrdinalIgnoreCase);
}
private static bool IsStartRequest(HttpRequest request)
{
return request.LocalPath().EndsWith("/start", StringComparison.OrdinalIgnoreCase);
}
private static bool IsPingRequest(HttpRequest request)
{
return request.LocalPath().EndsWith("/ping", StringComparison.OrdinalIgnoreCase);
}
private ITransport GetTransport(HttpContext context)
{
return _transportManager.GetTransport(context);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.IO;
using System.Text;
using System.Threading.Tasks;
using Xunit;
namespace System.Security.Cryptography.Encryption.Tests.Asymmetric
{
public static partial class CryptoStreamTests
{
[Fact]
public static void Ctor()
{
var transform = new IdentityTransform(1, 1, true);
AssertExtensions.Throws<ArgumentException>(null, () => new CryptoStream(new MemoryStream(), transform, (CryptoStreamMode)12345));
AssertExtensions.Throws<ArgumentException>(null, "stream", () => new CryptoStream(new MemoryStream(new byte[0], writable: false), transform, CryptoStreamMode.Write));
AssertExtensions.Throws<ArgumentException>(null, "stream", () => new CryptoStream(new CryptoStream(new MemoryStream(new byte[0]), transform, CryptoStreamMode.Write), transform, CryptoStreamMode.Read));
}
[Theory]
[InlineData(64, 64, true)]
[InlineData(64, 128, true)]
[InlineData(128, 64, true)]
[InlineData(1, 1, true)]
[InlineData(37, 24, true)]
[InlineData(128, 3, true)]
[InlineData(8192, 64, true)]
[InlineData(64, 64, false)]
public static void Roundtrip(int inputBlockSize, int outputBlockSize, bool canTransformMultipleBlocks)
{
ICryptoTransform encryptor = new IdentityTransform(inputBlockSize, outputBlockSize, canTransformMultipleBlocks);
ICryptoTransform decryptor = new IdentityTransform(inputBlockSize, outputBlockSize, canTransformMultipleBlocks);
var stream = new MemoryStream();
using (CryptoStream encryptStream = new CryptoStream(stream, encryptor, CryptoStreamMode.Write))
{
Assert.True(encryptStream.CanWrite);
Assert.False(encryptStream.CanRead);
Assert.False(encryptStream.CanSeek);
Assert.False(encryptStream.HasFlushedFinalBlock);
Assert.Throws<NotSupportedException>(() => encryptStream.SetLength(1));
Assert.Throws<NotSupportedException>(() => encryptStream.Length);
Assert.Throws<NotSupportedException>(() => encryptStream.Position);
Assert.Throws<NotSupportedException>(() => encryptStream.Position = 0);
Assert.Throws<NotSupportedException>(() => encryptStream.Seek(0, SeekOrigin.Begin));
Assert.Throws<NotSupportedException>(() => encryptStream.Read(new byte[0], 0, 0));
Assert.Throws<NullReferenceException>(() => encryptStream.Write(null, 0, 0)); // No arg validation on buffer?
Assert.Throws<ArgumentOutOfRangeException>(() => encryptStream.Write(new byte[0], -1, 0));
Assert.Throws<ArgumentOutOfRangeException>(() => encryptStream.Write(new byte[0], 0, -1));
Assert.Throws<ArgumentOutOfRangeException>(() => encryptStream.Write(new byte[0], 0, -1));
AssertExtensions.Throws<ArgumentException>(null, () => encryptStream.Write(new byte[3], 1, 4));
byte[] toWrite = Encoding.UTF8.GetBytes(LoremText);
// Write it all at once
encryptStream.Write(toWrite, 0, toWrite.Length);
Assert.False(encryptStream.HasFlushedFinalBlock);
// Write in chunks
encryptStream.Write(toWrite, 0, toWrite.Length / 2);
encryptStream.Write(toWrite, toWrite.Length / 2, toWrite.Length - (toWrite.Length / 2));
Assert.False(encryptStream.HasFlushedFinalBlock);
// Write one byte at a time
for (int i = 0; i < toWrite.Length; i++)
{
encryptStream.WriteByte(toWrite[i]);
}
Assert.False(encryptStream.HasFlushedFinalBlock);
// Write async
encryptStream.WriteAsync(toWrite, 0, toWrite.Length).GetAwaiter().GetResult();
Assert.False(encryptStream.HasFlushedFinalBlock);
// Flush (nops)
encryptStream.Flush();
encryptStream.FlushAsync().GetAwaiter().GetResult();
encryptStream.FlushFinalBlock();
Assert.Throws<NotSupportedException>(() => encryptStream.FlushFinalBlock());
Assert.True(encryptStream.HasFlushedFinalBlock);
Assert.True(stream.Length > 0);
}
// Read/decrypt using Read
stream = new MemoryStream(stream.ToArray()); // CryptoStream.Dispose disposes the stream
using (CryptoStream decryptStream = new CryptoStream(stream, decryptor, CryptoStreamMode.Read))
{
Assert.False(decryptStream.CanWrite);
Assert.True(decryptStream.CanRead);
Assert.False(decryptStream.CanSeek);
Assert.False(decryptStream.HasFlushedFinalBlock);
Assert.Throws<NotSupportedException>(() => decryptStream.SetLength(1));
Assert.Throws<NotSupportedException>(() => decryptStream.Length);
Assert.Throws<NotSupportedException>(() => decryptStream.Position);
Assert.Throws<NotSupportedException>(() => decryptStream.Position = 0);
Assert.Throws<NotSupportedException>(() => decryptStream.Seek(0, SeekOrigin.Begin));
Assert.Throws<NotSupportedException>(() => decryptStream.Write(new byte[0], 0, 0));
Assert.Throws<NullReferenceException>(() => decryptStream.Read(null, 0, 0)); // No arg validation on buffer?
Assert.Throws<ArgumentOutOfRangeException>(() => decryptStream.Read(new byte[0], -1, 0));
Assert.Throws<ArgumentOutOfRangeException>(() => decryptStream.Read(new byte[0], 0, -1));
Assert.Throws<ArgumentOutOfRangeException>(() => decryptStream.Read(new byte[0], 0, -1));
AssertExtensions.Throws<ArgumentException>(null, () => decryptStream.Read(new byte[3], 1, 4));
using (StreamReader reader = new StreamReader(decryptStream))
{
Assert.Equal(
LoremText + LoremText + LoremText + LoremText,
reader.ReadToEnd());
}
}
// Read/decrypt using ReadToEnd
stream = new MemoryStream(stream.ToArray()); // CryptoStream.Dispose disposes the stream
using (CryptoStream decryptStream = new CryptoStream(stream, decryptor, CryptoStreamMode.Read))
using (StreamReader reader = new StreamReader(decryptStream))
{
Assert.Equal(
LoremText + LoremText + LoremText + LoremText,
reader.ReadToEndAsync().GetAwaiter().GetResult());
}
// Read/decrypt using a small buffer to force multiple calls to Read
stream = new MemoryStream(stream.ToArray()); // CryptoStream.Dispose disposes the stream
using (CryptoStream decryptStream = new CryptoStream(stream, decryptor, CryptoStreamMode.Read))
using (StreamReader reader = new StreamReader(decryptStream, Encoding.UTF8, true, bufferSize: 10))
{
Assert.Equal(
LoremText + LoremText + LoremText + LoremText,
reader.ReadToEndAsync().GetAwaiter().GetResult());
}
// Read/decrypt one byte at a time with ReadByte
stream = new MemoryStream(stream.ToArray()); // CryptoStream.Dispose disposes the stream
using (CryptoStream decryptStream = new CryptoStream(stream, decryptor, CryptoStreamMode.Read))
{
string expectedStr = LoremText + LoremText + LoremText + LoremText;
foreach (char c in expectedStr)
{
Assert.Equal(c, decryptStream.ReadByte()); // relies on LoremText being ASCII
}
Assert.Equal(-1, decryptStream.ReadByte());
}
}
[Fact]
public static void NestedCryptoStreams()
{
ICryptoTransform encryptor = new IdentityTransform(1, 1, true);
using (MemoryStream output = new MemoryStream())
using (CryptoStream encryptStream1 = new CryptoStream(output, encryptor, CryptoStreamMode.Write))
using (CryptoStream encryptStream2 = new CryptoStream(encryptStream1, encryptor, CryptoStreamMode.Write))
{
encryptStream2.Write(new byte[] { 1, 2, 3, 4, 5 }, 0, 5);
}
}
[Fact]
public static void Clear()
{
ICryptoTransform encryptor = new IdentityTransform(1, 1, true);
using (MemoryStream output = new MemoryStream())
using (CryptoStream encryptStream = new CryptoStream(output, encryptor, CryptoStreamMode.Write))
{
encryptStream.Clear();
Assert.Throws<NotSupportedException>(() => encryptStream.Write(new byte[] { 1, 2, 3, 4, 5 }, 0, 5));
}
}
[Fact]
public static void FlushAsync()
{
ICryptoTransform encryptor = new IdentityTransform(1, 1, true);
using (MemoryStream output = new MemoryStream())
using (CryptoStream encryptStream = new CryptoStream(output, encryptor, CryptoStreamMode.Write))
{
encryptStream.WriteAsync(new byte[] { 1, 2, 3, 4, 5 }, 0, 5);
Task waitable = encryptStream.FlushAsync(new Threading.CancellationToken(false));
Assert.False(waitable.IsCanceled);
encryptStream.WriteAsync(new byte[] { 1, 2, 3, 4, 5 }, 0, 5);
waitable = encryptStream.FlushAsync(new Threading.CancellationToken(true));
Assert.True(waitable.IsCanceled);
}
}
[Fact]
public static void FlushCalledOnFlushAsync_DeriveClass()
{
ICryptoTransform encryptor = new IdentityTransform(1, 1, true);
using (MemoryStream output = new MemoryStream())
using (MinimalCryptoStream encryptStream = new MinimalCryptoStream(output, encryptor, CryptoStreamMode.Write))
{
encryptStream.WriteAsync(new byte[] { 1, 2, 3, 4, 5 }, 0, 5);
Task waitable = encryptStream.FlushAsync(new Threading.CancellationToken(false));
Assert.False(waitable.IsCanceled);
waitable.Wait();
Assert.True(encryptStream.FlushCalled);
}
}
[Fact]
public static void MultipleDispose()
{
ICryptoTransform encryptor = new IdentityTransform(1, 1, true);
using (MemoryStream output = new MemoryStream())
{
using (CryptoStream encryptStream = new CryptoStream(output, encryptor, CryptoStreamMode.Write))
{
encryptStream.Dispose();
}
Assert.Equal(false, output.CanRead);
}
#if netcoreapp
using (MemoryStream output = new MemoryStream())
{
using (CryptoStream encryptStream = new CryptoStream(output, encryptor, CryptoStreamMode.Write, leaveOpen: false))
{
encryptStream.Dispose();
}
Assert.Equal(false, output.CanRead);
}
using (MemoryStream output = new MemoryStream())
{
using (CryptoStream encryptStream = new CryptoStream(output, encryptor, CryptoStreamMode.Write, leaveOpen: true))
{
encryptStream.Dispose();
}
Assert.Equal(true, output.CanRead);
}
#endif
}
private const string LoremText =
@"Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Maecenas porttitor congue massa.
Fusce posuere, magna sed pulvinar ultricies, purus lectus malesuada libero, sit amet commodo magna eros quis urna.
Nunc viverra imperdiet enim. Fusce est. Vivamus a tellus.
Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas.
Proin pharetra nonummy pede. Mauris et orci.
Aenean nec lorem. In porttitor. Donec laoreet nonummy augue.
Suspendisse dui purus, scelerisque at, vulputate vitae, pretium mattis, nunc. Mauris eget neque at sem venenatis eleifend.
Ut nonummy.";
private sealed class IdentityTransform : ICryptoTransform
{
private readonly int _inputBlockSize, _outputBlockSize;
private readonly bool _canTransformMultipleBlocks;
private readonly object _lock = new object();
private long _writePos, _readPos;
private MemoryStream _stream;
internal IdentityTransform(int inputBlockSize, int outputBlockSize, bool canTransformMultipleBlocks)
{
_inputBlockSize = inputBlockSize;
_outputBlockSize = outputBlockSize;
_canTransformMultipleBlocks = canTransformMultipleBlocks;
_stream = new MemoryStream();
}
public bool CanReuseTransform { get { return true; } }
public bool CanTransformMultipleBlocks { get { return _canTransformMultipleBlocks; } }
public int InputBlockSize { get { return _inputBlockSize; } }
public int OutputBlockSize { get { return _outputBlockSize; } }
public void Dispose() { }
public int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset)
{
lock (_lock)
{
_stream.Position = _writePos;
_stream.Write(inputBuffer, inputOffset, inputCount);
_writePos = _stream.Position;
_stream.Position = _readPos;
int copied = _stream.Read(outputBuffer, outputOffset, outputBuffer.Length - outputOffset);
_readPos = _stream.Position;
return copied;
}
}
public byte[] TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount)
{
lock (_lock)
{
_stream.Position = _writePos;
_stream.Write(inputBuffer, inputOffset, inputCount);
_stream.Position = _readPos;
long len = _stream.Length - _stream.Position;
byte[] outputBuffer = new byte[len];
_stream.Read(outputBuffer, 0, outputBuffer.Length);
_stream = new MemoryStream();
_writePos = 0;
_readPos = 0;
return outputBuffer;
}
}
}
public class MinimalCryptoStream : CryptoStream
{
public bool FlushCalled;
public MinimalCryptoStream(Stream stream, ICryptoTransform transform, CryptoStreamMode mode) : base(stream, transform, mode) { }
public override void Flush()
{
FlushCalled = true;
base.Flush();
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.ApiManagement
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for PolicyOperations.
/// </summary>
public static partial class PolicyOperationsExtensions
{
/// <summary>
/// Lists all the Global Policy definitions of the Api Management service.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// The name of the API Management service.
/// </param>
/// <param name='scope'>
/// Policy scope. Possible values include: 'Tenant', 'Product', 'Api',
/// 'Operation', 'All'
/// </param>
public static PolicyCollection ListByService(this IPolicyOperations operations, string resourceGroupName, string serviceName, PolicyScopeContract? scope = default(PolicyScopeContract?))
{
return operations.ListByServiceAsync(resourceGroupName, serviceName, scope).GetAwaiter().GetResult();
}
/// <summary>
/// Lists all the Global Policy definitions of the Api Management service.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// The name of the API Management service.
/// </param>
/// <param name='scope'>
/// Policy scope. Possible values include: 'Tenant', 'Product', 'Api',
/// 'Operation', 'All'
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<PolicyCollection> ListByServiceAsync(this IPolicyOperations operations, string resourceGroupName, string serviceName, PolicyScopeContract? scope = default(PolicyScopeContract?), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByServiceWithHttpMessagesAsync(resourceGroupName, serviceName, scope, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Get the Global policy definition of the Api Management service.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// The name of the API Management service.
/// </param>
public static PolicyContract Get(this IPolicyOperations operations, string resourceGroupName, string serviceName)
{
return operations.GetAsync(resourceGroupName, serviceName).GetAwaiter().GetResult();
}
/// <summary>
/// Get the Global policy definition of the Api Management service.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// The name of the API Management service.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<PolicyContract> GetAsync(this IPolicyOperations operations, string resourceGroupName, string serviceName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, serviceName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates or updates the global policy configuration of the Api Management
/// service.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// The name of the API Management service.
/// </param>
/// <param name='parameters'>
/// The policy contents to apply.
/// </param>
public static PolicyContract CreateOrUpdate(this IPolicyOperations operations, string resourceGroupName, string serviceName, PolicyContract parameters)
{
return operations.CreateOrUpdateAsync(resourceGroupName, serviceName, parameters).GetAwaiter().GetResult();
}
/// <summary>
/// Creates or updates the global policy configuration of the Api Management
/// service.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// The name of the API Management service.
/// </param>
/// <param name='parameters'>
/// The policy contents to apply.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<PolicyContract> CreateOrUpdateAsync(this IPolicyOperations operations, string resourceGroupName, string serviceName, PolicyContract parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, serviceName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes the global policy configuration of the Api Management Service.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// The name of the API Management service.
/// </param>
/// <param name='ifMatch'>
/// The entity state (Etag) version of the policy to be deleted. A value of "*"
/// can be used for If-Match to unconditionally apply the operation.
/// </param>
public static void Delete(this IPolicyOperations operations, string resourceGroupName, string serviceName, string ifMatch)
{
operations.DeleteAsync(resourceGroupName, serviceName, ifMatch).GetAwaiter().GetResult();
}
/// <summary>
/// Deletes the global policy configuration of the Api Management Service.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// The name of the API Management service.
/// </param>
/// <param name='ifMatch'>
/// The entity state (Etag) version of the policy to be deleted. A value of "*"
/// can be used for If-Match to unconditionally apply the operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAsync(this IPolicyOperations operations, string resourceGroupName, string serviceName, string ifMatch, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.DeleteWithHttpMessagesAsync(resourceGroupName, serviceName, ifMatch, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
}
}
| |
using Microsoft.EntityFrameworkCore;
using MvcTemplate.Components.Security.Area.Tests;
using MvcTemplate.Data;
using MvcTemplate.Objects;
using MvcTemplate.Tests;
using NSubstitute;
using System;
using System.Reflection;
using Xunit;
namespace MvcTemplate.Components.Security.Tests
{
public class AuthorizationTests : IDisposable
{
private DbContext context;
private IServiceProvider services;
private Authorization authorization;
public AuthorizationTests()
{
context = TestingContext.Create();
services = Substitute.For<IServiceProvider>();
services.GetService(typeof(IAuthorization)).Returns(Substitute.For<IAuthorization>());
services.GetService(typeof(IUnitOfWork)).Returns(_ => new UnitOfWork(TestingContext.Create()));
authorization = new Authorization(Assembly.GetExecutingAssembly(), services);
}
public void Dispose()
{
context.Dispose();
}
[Fact]
public void IsGrantedFor_AuthorizesControllerByIgnoringCase()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), nameof(AuthorizedController.Action));
Assert.True(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.Action)}".ToUpper()));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeControllerByIgnoringCase()
{
Int64 accountId = CreateAccountWithPermissionFor("Test", "Test", "Test");
Assert.False(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.Action)}".ToUpper()));
}
[Fact]
public void IsGrantedFor_AuthorizesControllerWithoutArea()
{
Int64 accountId = CreateAccountWithPermissionFor("", nameof(AuthorizeController), nameof(AuthorizeController.Action));
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(AuthorizeController)}/{nameof(AuthorizeController.Action)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeControllerWithoutArea()
{
Int64 accountId = CreateAccountWithPermissionFor("", "Test", "Test");
Assert.False(authorization.IsGrantedFor(accountId, $"/{nameof(AuthorizeController)}/{nameof(AuthorizeController.Action)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesControllerWithArea()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), nameof(AuthorizedController.Action));
Assert.True(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.Action)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeControllerWithArea()
{
Int64 accountId = CreateAccountWithPermissionFor("Test", "Test", "Test");
Assert.False(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.Action)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesGetAction()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), nameof(AuthorizedController.AuthorizedGetAction));
Assert.True(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.AuthorizedGetAction)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeGetAction()
{
Int64 accountId = CreateAccountWithPermissionFor("Test", "Test", "Test");
Assert.False(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.AuthorizedGetAction)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesNamedGetAction()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), "AuthorizedNamedGetAction");
Assert.True(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/AuthorizedNamedGetAction"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeNamedGetAction()
{
Int64 accountId = CreateAccountWithPermissionFor("Test", "Test", "Test");
Assert.False(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/AuthorizedNamedGetAction"));
}
[Fact]
public void IsGrantedFor_AuthorizesNotExistingAction()
{
Int64 accountId = CreateAccountWithPermissionFor("", nameof(AuthorizeController), "Test");
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(AuthorizeController)}/Test"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeNotExistingAction()
{
Int64 accountId = CreateAccountWithPermissionFor("", nameof(AuthorizeController), "Other");
Assert.False(authorization.IsGrantedFor(accountId, $"/{nameof(AuthorizeController)}/Test"));
}
[Fact]
public void IsGrantedFor_AuthorizesNonGetAction()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), nameof(AuthorizedController.AuthorizedPostAction));
Assert.True(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.AuthorizedPostAction)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeNonGetAction()
{
Int64 accountId = CreateAccountWithPermissionFor("Test", "Test", "Test");
Assert.False(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.AuthorizedPostAction)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesNamedNonGetAction()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), "AuthorizedNamedPostAction");
Assert.True(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/AuthorizedNamedPostAction"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeNamedNonGetAction()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", "Test", "Test");
Assert.False(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/AuthorizedNamedPostAction"));
}
[Fact]
public void IsGrantedFor_AuthorizesActionAsAction()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), nameof(AuthorizedController.Action));
Assert.True(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.AuthorizedAsAction)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeActionAsAction()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), nameof(AuthorizedController.AuthorizedAsAction));
Assert.False(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.AuthorizedAsAction)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesActionAsSelf()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), nameof(AuthorizedController.AuthorizedAsSelf));
Assert.True(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.AuthorizedAsSelf)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeActionAsSelf()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", "Test", "Test");
Assert.False(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.AuthorizedAsSelf)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesActionAsOtherAction()
{
Int64 accountId = CreateAccountWithPermissionFor("", nameof(InheritedAuthorizedController), nameof(InheritedAuthorizedController.InheritanceAction));
Assert.True(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.AuthorizedAsOtherAction)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeActionAsOtherAction()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), nameof(AuthorizedController.AuthorizedAsOtherAction));
Assert.False(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.AuthorizedAsOtherAction)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesEmptyAreaAsNull()
{
Int64 accountId = CreateAccountWithPermissionFor("", nameof(AuthorizeController), nameof(AuthorizeController.Action));
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(AuthorizeController)}/{nameof(AuthorizeController.Action)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeEmptyAreaAsNull()
{
Int64 accountId = CreateAccountWithPermissionFor("", "Test", "Test");
Assert.False(authorization.IsGrantedFor(accountId, $"/{nameof(AuthorizeController)}/{nameof(AuthorizeController.Action)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesAuthorizedAction()
{
Int64 accountId = CreateAccountWithPermissionFor("", "AllowAnonymous", "AuthorizedAction");
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(AllowAnonymousController)}/{nameof(AllowAnonymousController.AuthorizedAction)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesAllowAnonymousAction()
{
Int64 accountId = CreateAccountWithPermissionFor("", "Test", "Test");
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(AuthorizeController)}/{nameof(AuthorizeController.AllowAnonymousAction)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesAllowUnauthorizedAction()
{
Int64 accountId = CreateAccountWithPermissionFor("", "Test", "Test");
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(AuthorizeController)}/{nameof(AuthorizeController.AllowUnauthorizedAction)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesAuthorizedController()
{
Int64 accountId = CreateAccountWithPermissionFor("", nameof(AuthorizeController), nameof(AuthorizeController.Action));
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(AuthorizeController)}/{nameof(AuthorizeController.Action)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeAuthorizedController()
{
Int64 accountId = CreateAccountWithPermissionFor("", "Test", "Test");
Assert.False(authorization.IsGrantedFor(accountId, $"/{nameof(AuthorizeController)}/{nameof(AuthorizeController.Action)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesAllowAnonymousController()
{
Int64 accountId = CreateAccountWithPermissionFor("", "Test", "Test");
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(AllowAnonymousController)}/{nameof(AllowAnonymousController.SimpleAction)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesAllowUnauthorizedController()
{
Int64 accountId = CreateAccountWithPermissionFor("", "Test", "Test");
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(AllowUnauthorizedController)}/{nameof(AllowUnauthorizedController.AuthorizedAction)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesInheritedAuthorizedController()
{
Int64 accountId = CreateAccountWithPermissionFor("", nameof(InheritedAuthorizedController), nameof(InheritedAuthorizedController.InheritanceAction));
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(InheritedAuthorizedController)}/{nameof(InheritedAuthorizedController.InheritanceAction)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeInheritedAuthorizedController()
{
Int64 accountId = CreateAccountWithPermissionFor("", "Test", "Test");
Assert.False(authorization.IsGrantedFor(accountId, $"/{nameof(InheritedAuthorizedController)}/{nameof(InheritedAuthorizedController.InheritanceAction)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesInheritedAllowAnonymousController()
{
Int64 accountId = CreateAccountWithPermissionFor("", "Test", "Test");
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(InheritedAllowAnonymousController)}/{nameof(InheritedAllowAnonymousController.InheritanceAction)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesInheritedAllowUnauthorizedController()
{
Int64 accountId = CreateAccountWithPermissionFor("", "Test", "Test");
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(InheritedAllowUnauthorizedController)}/{nameof(InheritedAllowUnauthorizedController.InheritanceAction)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesNotAttributedController()
{
Int64 accountId = CreateAccountWithPermissionFor("", "Test", "Test");
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(NotAttributedController)}/{nameof(NotAttributedController.Action)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeNotExistingAccount()
{
CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), nameof(AuthorizedController.Action));
Assert.False(authorization.IsGrantedFor(0, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.Action)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeLockedAccount()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), nameof(AuthorizedController.Action), isLocked: true);
Assert.False(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.Action)}"));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeNullAccount()
{
CreateAccountWithPermissionFor("", nameof(AuthorizeController), nameof(AuthorizeController.Action));
Assert.False(authorization.IsGrantedFor(null, $"/{nameof(AuthorizeController)}/{nameof(AuthorizeController.Action)}"));
}
[Fact]
public void IsGrantedFor_AuthorizesByIgnoringCase()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), nameof(AuthorizedController.Action));
Assert.True(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.Action)}".ToLower()));
}
[Fact]
public void IsGrantedFor_DoesNotAuthorizeByIgnoringCase()
{
Int64 accountId = CreateAccountWithPermissionFor("Test", "Test", "Test");
Assert.False(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.Action)}".ToLower()));
}
[Fact]
public void IsGrantedFor_CachesAccountPermissions()
{
Int64 accountId = CreateAccountWithPermissionFor("", nameof(AuthorizeController), nameof(AuthorizeController.Action));
context.Drop();
Assert.True(authorization.IsGrantedFor(accountId, $"/{nameof(AuthorizeController)}/{nameof(AuthorizeController.Action)}"));
}
[Fact]
public void Refresh_Permissions()
{
Int64 accountId = CreateAccountWithPermissionFor("Area", nameof(AuthorizedController), nameof(AuthorizedController.Action));
Assert.True(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.Action)}"));
context.Drop();
authorization.Refresh(services);
Assert.False(authorization.IsGrantedFor(accountId, $"Area/{nameof(AuthorizedController)}/{nameof(AuthorizedController.Action)}"));
}
private Int64 CreateAccountWithPermissionFor(String area, String controller, String action, Boolean isLocked = false)
{
RolePermission rolePermission = ObjectsFactory.CreateRolePermission(0);
rolePermission.Permission.Controller = controller;
rolePermission.Permission.Action = action;
rolePermission.Permission.Area = area;
Account account = ObjectsFactory.CreateAccount(0);
account.Role = rolePermission.Role;
account.IsLocked = isLocked;
context.Drop().Add(rolePermission);
context.Add(account);
context.SaveChanges();
authorization.Refresh(services);
return account.Id;
}
}
}
| |
//
// BansheeMetrics.cs
//
// Author:
// Gabriel Burt <[email protected]>
//
// Copyright (c) 2010 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.Linq;
using System.Reflection;
using Mono.Unix;
using Hyena;
using Hyena.Metrics;
using Hyena.Data.Sqlite;
using Banshee.Configuration;
using Banshee.ServiceStack;
using Banshee.Networking;
using Banshee.Sources;
using Banshee.PlaybackController;
namespace Banshee.Metrics
{
public class BansheeMetrics : IDisposable
{
private static BansheeMetrics banshee_metrics;
public static BansheeMetrics Instance { get { return banshee_metrics; } }
public static event System.Action Started;
public static event System.Action Stopped;
public static void Start ()
{
// Only enable collection 20% of the time
var one_in_five = new Random ().NextDouble () < 0.2;
if (one_in_five && banshee_metrics == null) {
Log.Information ("Starting collection of anonymous usage data");
try {
banshee_metrics = new BansheeMetrics ();
} catch (Exception e) {
Hyena.Log.Exception ("Failed to start collection of anonymous usage data", e);
banshee_metrics = null;
}
}
}
public static void Stop ()
{
if (banshee_metrics != null) {
Log.Information ("Stopping collection of anonymous usage data");
banshee_metrics.Dispose ();
banshee_metrics = null;
}
}
private MetricsCollection metrics;
private string id_key = "AnonymousUsageData.Userid";
private string last_post_key = "AnonymousUsageData.LastPostStamp";
private Metric shutdown, duration, active_source_changed, sqlite_executed;
private Metric playback_source_changed, shuffle_changed, repeat_changed;
private BansheeMetrics ()
{
banshee_metrics = this;
string unique_userid = DatabaseConfigurationClient.Client.Get<string> (id_key, null);
if (String.IsNullOrEmpty (unique_userid)) {
unique_userid = System.Guid.NewGuid ().ToString ();
DatabaseConfigurationClient.Client.Set<string> (id_key, unique_userid);
}
metrics = new MetricsCollection (unique_userid, new DbSampleStore (
ServiceManager.DbConnection, "AnonymousUsageData"
));
Configuration.Start ();
if (Application.ActiveClient != null && Application.ActiveClient.IsStarted) {
Initialize (null);
} else {
Application.ClientStarted += Initialize;
}
}
private void Initialize (Client client)
{
Application.ClientStarted -= Initialize;
var handler = Started;
if (handler != null) {
handler ();
}
Application.RunTimeout (5*1000, delegate {
if (BansheeMetrics.Instance == null) {
return false;
}
ThreadAssist.SpawnFromMain (delegate {
metrics.AddDefaults ();
AddMetrics ();
if (ApplicationContext.CommandLine.Contains ("debug-metrics")) {
Log.InformationFormat ("Anonymous usage data collected:\n{0}", metrics.ToJsonString ());
System.IO.File.WriteAllText ("usage-data.json", metrics.ToJsonString ());
}
if (!ServiceManager.Get<Network> ().Connected) {
return;
}
// Don't post to server more than every 48 hours
var last_post_time = DatabaseConfigurationClient.Client.Get<DateTime> (last_post_key, DateTime.MinValue);
var last_post_rel = (DateTime.Now - last_post_time).TotalDays;
if (last_post_rel < 0 || last_post_rel > 4.0) {
var poster = new HttpPoster ("http://metrics.banshee.fm/submit/", metrics);
bool posted = poster.Post ();
Log.InformationFormat ("Posted usage data? {0}", posted);
// Clear the old metrics, even if we failed to post them; it might be a server-side
// problem w/ the data we want to send (eg too big, out of space) and we don't want
// to keep retrying to send the same data.
metrics.Store.Clear ();
DatabaseConfigurationClient.Client.Set<DateTime> (last_post_key, DateTime.Now);
}
});
return false;
});
}
private void AddMetrics ()
{
Add ("Client", Application.ActiveClient);
Add ("BuildHostCpu", Application.BuildHostCpu);
Add ("BuildHostOS", Application.BuildHostOperatingSystem);
Add ("BuildTime", Application.BuildTime);
Add ("BuildVendor", Application.BuildVendor);
Add ("Version", Application.Version);
Add ("StartedAt", ApplicationContext.StartedAt);
// Query basic stats about what content the user has
foreach (var src in ServiceManager.SourceManager.FindSources<PrimarySource> ()) {
var type_name = src.TypeName;
var reader = new HyenaDataReader (ServiceManager.DbConnection.Query (
@"SELECT COUNT(*),
COUNT(CASE ifnull(Rating, 0) WHEN 0 THEN NULL ELSE 1 END),
COUNT(CASE ifnull(BPM, 0) WHEN 0 THEN NULL ELSE 1 END),
COUNT(CASE ifnull(LastStreamError, 0) WHEN 0 THEN NULL ELSE 1 END),
COUNT(CASE ifnull(Composer, 0) WHEN 0 THEN NULL ELSE 1 END),
COUNT(CASE ifnull(LicenseUri, 0) WHEN 0 THEN NULL ELSE 1 END),
COUNT(CASE ifnull(Grouping, 0) WHEN 0 THEN NULL ELSE 1 END),
COUNT(CASE PlayCount WHEN 0 THEN 1 ELSE NULL END),
AVG(Score),
AVG(BitRate),
SUM(PlayCount),
SUM(SkipCount),
CAST (SUM(PlayCount * (Duration/1000)) AS INTEGER),
SUM(FileSize)
FROM CoreTracks WHERE PrimarySourceID = ?", src.DbId
));
// DateAdded, Grouping
var results = new string [] {
"TrackCount", "RatedTrackCount", "BpmTrackCount", "ErrorTrackCount", "ComposerTrackCount",
"LicenseUriTrackCount", "GroupingTrackCount", "UnplayedTrackCount", "AvgScore",
"AvgBitRate", "TotalPlayCount", "TotalSkipCount", "TotalPlaySeconds", "TotalFileSize"
};
for (int i = 0; i < results.Length; i++) {
Add (String.Format ("{0}/{1}", type_name, results[i]), reader.Get<long> (i));
}
reader.Dispose ();
}
// Wire up event-triggered metrics
active_source_changed = Add ("ActiveSourceChanged");
ServiceManager.SourceManager.ActiveSourceChanged += OnActiveSourceChanged;
shutdown = Add ("ShutdownAt", () => DateTime.Now);
duration = Add ("RunDuration", () => DateTime.Now - ApplicationContext.StartedAt);
Application.ShutdownRequested += OnShutdownRequested;
sqlite_executed = Add ("LongSqliteCommand");
HyenaSqliteCommand.CommandExecuted += OnSqliteCommandExecuted;
HyenaSqliteCommand.RaiseCommandExecuted = true;
HyenaSqliteCommand.RaiseCommandExecutedThresholdMs = 400;
playback_source_changed = Add ("PlaybackSourceChanged");
ServiceManager.PlaybackController.SourceChanged += OnPlaybackSourceChanged;
shuffle_changed = Add ("ShuffleModeChanged");
ServiceManager.PlaybackController.ShuffleModeChanged += OnShuffleModeChanged;
repeat_changed = Add ("RepeatModeChanged");
ServiceManager.PlaybackController.RepeatModeChanged += OnRepeatModeChanged;
}
public Metric Add (string name)
{
return metrics.Add (String.Format ("Banshee/{0}", name));
}
public Metric Add (string name, object value)
{
return metrics.Add (String.Format ("Banshee/{0}", name), value);
}
public Metric Add (string name, Func<object> func)
{
return metrics.Add (String.Format ("Banshee/{0}", name), func);
}
public void Dispose ()
{
var handler = Stopped;
if (handler != null) {
handler ();
}
Configuration.Stop ();
// Disconnect from events we're listening to
ServiceManager.SourceManager.ActiveSourceChanged -= OnActiveSourceChanged;
Application.ShutdownRequested -= OnShutdownRequested;
HyenaSqliteCommand.CommandExecuted -= OnSqliteCommandExecuted;
ServiceManager.PlaybackController.SourceChanged -= OnPlaybackSourceChanged;
ServiceManager.PlaybackController.ShuffleModeChanged -= OnShuffleModeChanged;
ServiceManager.PlaybackController.RepeatModeChanged -= OnRepeatModeChanged;
// Delete any collected data
metrics.Store.Clear ();
metrics.Dispose ();
metrics = null;
// Forget the user's unique id
DatabaseConfigurationClient.Client.Set<string> (id_key, "");
}
private string GetSourceString (Source src)
{
if (src == null)
return null;
var parent = src.Parent;
if (parent == null) {
return src.GetType ().ToString ();
} else {
return String.Format ("{0}/{1}", parent.GetType (), src.GetType ());
}
}
#region Event Handlers
private void OnActiveSourceChanged (SourceEventArgs args)
{
active_source_changed.PushSample (GetSourceString (ServiceManager.SourceManager.ActiveSource));
}
private bool OnShutdownRequested ()
{
shutdown.TakeSample ();
duration.TakeSample ();
return true;
}
private void OnSqliteCommandExecuted (object o, CommandExecutedArgs args)
{
sqlite_executed.PushSample (String.Format ("{0}ms -- {1}", args.Ms, args.Sql));
}
private void OnPlaybackSourceChanged (object o, EventArgs args)
{
playback_source_changed.PushSample (GetSourceString (ServiceManager.PlaybackController.Source as Source));
}
private void OnShuffleModeChanged (object o, EventArgs<string> args)
{
shuffle_changed.PushSample (args.Value);
}
private void OnRepeatModeChanged (object o, EventArgs<PlaybackRepeatMode> args)
{
repeat_changed.PushSample (args.Value);
}
#endregion
public static SchemaEntry<bool> EnableCollection = new SchemaEntry<bool> (
"core", "send_anonymous_usage_data", false, // disabled by default
"Improve Banshee by sending anonymous usage data", null
);
}
}
| |
//
// Encog(tm) Core v3.3 - .Net Version
// http://www.heatonresearch.com/encog/
//
// Copyright 2008-2014 Heaton Research, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// For more information on Heaton Research copyrights, licenses
// and trademarks visit:
// http://www.heatonresearch.com/copyright
//
using System;
using Encog.MathUtil.Matrices;
using Encog.ML.Data;
using Encog.ML.Data.Specific;
using Encog.Util;
namespace Encog.Neural.Thermal
{
/// <summary>
/// Implements a Hopfield network.
/// </summary>
[Serializable]
public class HopfieldNetwork : ThermalNetwork
{
/// <summary>
/// Default constructor.
/// </summary>
///
public HopfieldNetwork()
{
}
/// <summary>
/// Construct a Hopfield with the specified neuron count.
/// </summary>
///
/// <param name="neuronCount">The neuron count.</param>
public HopfieldNetwork(int neuronCount) : base(neuronCount)
{
}
/// <inheritdoc/>
public override int InputCount
{
get { return NeuronCount; }
}
/// <inheritdoc/>
public override int OutputCount
{
get { return NeuronCount; }
}
/// <summary>
/// Train the neural network for the specified pattern. The neural network
/// can be trained for more than one pattern. To do this simply call the
/// train method more than once.
/// </summary>
///
/// <param name="pattern">The pattern to train for.</param>
public void AddPattern(IMLData pattern)
{
if (pattern.Count != NeuronCount)
{
throw new NeuralNetworkError("Network with " + NeuronCount
+ " neurons, cannot learn a pattern of size "
+ pattern.Count);
}
// Create a row matrix from the input, convert boolean to bipolar
Matrix m2 = Matrix.CreateRowMatrix(pattern);
// Transpose the matrix and multiply by the original input matrix
Matrix m1 = MatrixMath.Transpose(m2);
Matrix m3 = MatrixMath.Multiply(m1, m2);
// matrix 3 should be square by now, so create an identity
// matrix of the same size.
Matrix identity = MatrixMath.Identity(m3.Rows);
// subtract the identity matrix
Matrix m4 = MatrixMath.Subtract(m3, identity);
// now add the calculated matrix, for this pattern, to the
// existing weight matrix.
ConvertHopfieldMatrix(m4);
}
/// <summary>
/// Note: for Hopfield networks, you will usually want to call the "run"
/// method to compute the output.
/// This method can be used to copy the input data to the current state. A
/// single iteration is then run, and the new current state is returned.
/// </summary>
///
/// <param name="input">The input pattern.</param>
/// <returns>The new current state.</returns>
public override sealed IMLData Compute(IMLData input)
{
var result = new BiPolarMLData(input.Count);
input.CopyTo(CurrentState.Data, 0, input.Count);
Run();
for (int i = 0; i < CurrentState.Count; i++)
{
result.SetBoolean(i,
BiPolarUtil.Double2bipolar(CurrentState[i]));
}
EngineArray.ArrayCopy(CurrentState.Data, result.Data);
return result;
}
/// <summary>
/// Update the Hopfield weights after training.
/// </summary>
///
/// <param name="delta">The amount to change the weights by.</param>
private void ConvertHopfieldMatrix(Matrix delta)
{
// add the new weight matrix to what is there already
for (int row = 0; row < delta.Rows; row++)
{
for (int col = 0; col < delta.Rows; col++)
{
AddWeight(row, col, delta[row, col]);
}
}
}
/// <summary>
/// Perform one Hopfield iteration.
/// </summary>
///
public void Run()
{
for (int toNeuron = 0; toNeuron < NeuronCount; toNeuron++)
{
double sum = 0;
for (int fromNeuron = 0; fromNeuron < NeuronCount; fromNeuron++)
{
sum += CurrentState[fromNeuron]
*GetWeight(fromNeuron, toNeuron);
}
CurrentState[toNeuron] = sum;
}
}
/// <summary>
/// Run the network until it becomes stable and does not change from more
/// runs.
/// </summary>
///
/// <param name="max">The maximum number of cycles to run before giving up.</param>
/// <returns>The number of cycles that were run.</returns>
public int RunUntilStable(int max)
{
bool done = false;
String currentStateStr = (CurrentState.ToString());
int cycle = 0;
do
{
Run();
cycle++;
String lastStateStr = (CurrentState.ToString());
if (!currentStateStr.Equals(lastStateStr))
{
if (cycle > max)
{
done = true;
}
}
else
{
done = true;
}
currentStateStr = lastStateStr;
} while (!done);
return cycle;
}
/// <summary>
///
/// </summary>
///
public override void UpdateProperties()
{
// nothing needed here
}
}
}
| |
#if UNITY_EDITOR
using UnityEngine;
using UnityEditor;
using UnityEditorInternal;
using System.Collections.Generic;
using System;
namespace UMA.Editors
{
public partial class RaceInspector
{
private ReorderableList wardrobeSlotList;
private bool wardrobeSlotListInitialized = false;
private int compatibleRacePickerID;
static bool[] _BCFoldouts = new bool[0];
List<SlotData> baseSlotsList = new List<SlotData>();
List<string> baseSlotsNamesList = new List<string>();
// Drop area for Backwards Compatible Races
private void CompatibleRacesDropArea(Rect dropArea, SerializedProperty crossCompatibilitySettingsData)
{
Event evt = Event.current;
//make the box clickable so that the user can select raceData assets from the asset selection window
if (evt.type == EventType.MouseUp)
{
if (dropArea.Contains(evt.mousePosition))
{
compatibleRacePickerID = EditorGUIUtility.GetControlID(new GUIContent("crfObjectPicker"), FocusType.Passive);
EditorGUIUtility.ShowObjectPicker<RaceData>(null, false, "", compatibleRacePickerID);
Event.current.Use();//stops the Mismatched LayoutGroup errors
return;
}
}
if (evt.commandName == "ObjectSelectorUpdated" && EditorGUIUtility.GetObjectPickerControlID() == compatibleRacePickerID)
{
RaceData tempRaceDataAsset = EditorGUIUtility.GetObjectPickerObject() as RaceData;
if (tempRaceDataAsset)
{
AddRaceDataAsset(tempRaceDataAsset, crossCompatibilitySettingsData);
}
if(Event.current.type != EventType.Layout)
Event.current.Use();//stops the Mismatched LayoutGroup errors
return;
}
if (evt.type == EventType.DragUpdated)
{
if (dropArea.Contains(evt.mousePosition))
{
DragAndDrop.visualMode = DragAndDropVisualMode.Copy;
}
}
if (evt.type == EventType.DragPerform)
{
if (dropArea.Contains(evt.mousePosition))
{
DragAndDrop.AcceptDrag();
UnityEngine.Object[] draggedObjects = DragAndDrop.objectReferences as UnityEngine.Object[];
for (int i = 0; i < draggedObjects.Length; i++)
{
if (draggedObjects[i])
{
RaceData tempRaceDataAsset = draggedObjects[i] as RaceData;
if (tempRaceDataAsset)
{
AddRaceDataAsset(tempRaceDataAsset, crossCompatibilitySettingsData);
continue;
}
var path = AssetDatabase.GetAssetPath(draggedObjects[i]);
if (System.IO.Directory.Exists(path))
{
RecursiveScanFoldersForAssets(path, crossCompatibilitySettingsData);
}
}
}
}
}
}
private void RecursiveScanFoldersForAssets(string path, SerializedProperty crossCompatibilitySettingsData)
{
var assetFiles = System.IO.Directory.GetFiles(path, "*.asset");
foreach (var assetFile in assetFiles)
{
var tempRaceDataAsset = AssetDatabase.LoadAssetAtPath(assetFile, typeof(RaceData)) as RaceData;
if (tempRaceDataAsset)
{
AddRaceDataAsset(tempRaceDataAsset, crossCompatibilitySettingsData);
}
}
foreach (var subFolder in System.IO.Directory.GetDirectories(path))
{
RecursiveScanFoldersForAssets(subFolder.Replace('\\', '/'), crossCompatibilitySettingsData);
}
}
private void AddRaceDataAsset(RaceData raceDataAsset, SerializedProperty crossCompatibilitySettingsData)
{
if (raceDataAsset.raceName == serializedObject.FindProperty("raceName").stringValue)
return;
bool found = false;
for(int i = 0; i < crossCompatibilitySettingsData.arraySize; i++)
{
var ccRaceName = crossCompatibilitySettingsData.GetArrayElementAtIndex(i).FindPropertyRelative("ccRace").stringValue;
if (ccRaceName == raceDataAsset.raceName)
found = true;
}
if (!found)
{
crossCompatibilitySettingsData.InsertArrayElementAtIndex(crossCompatibilitySettingsData.arraySize);
crossCompatibilitySettingsData.GetArrayElementAtIndex(crossCompatibilitySettingsData.arraySize - 1).FindPropertyRelative("ccRace").stringValue = raceDataAsset.raceName;
serializedObject.ApplyModifiedProperties();
}
//if (!compatibleRaces.Contains(raceDataAsset.raceName))
// compatibleRaces.Add(raceDataAsset.raceName);
}
partial void PreInspectorGUI(ref bool result)
{
if(!wardrobeSlotListInitialized){
InitWardrobeSlotList();
}
result = AddExtraStuff();
}
private void InitWardrobeSlotList(){
var thisWardrobeSlotList = serializedObject.FindProperty ("wardrobeSlots");
if (thisWardrobeSlotList.arraySize == 0) {
race.ValidateWardrobeSlots (true);
thisWardrobeSlotList = serializedObject.FindProperty ("wardrobeSlots");
}
wardrobeSlotList = new ReorderableList (serializedObject, thisWardrobeSlotList, true, true, true, true);
wardrobeSlotList.drawHeaderCallback = (Rect rect) =>{
EditorGUI.LabelField(rect,"Wardrobe Slots");
};
wardrobeSlotList.drawElementCallback = (Rect rect, int index, bool isActive, bool isFocused) =>{
var element = wardrobeSlotList.serializedProperty.GetArrayElementAtIndex(index);
rect.y += 2;
element.stringValue = EditorGUI.TextField(new Rect(rect.x+10, rect.y, rect.width-10, EditorGUIUtility.singleLineHeight),element.stringValue);
};
wardrobeSlotListInitialized = true;
}
public bool AddExtraStuff(){
SerializedProperty baseRaceRecipe = serializedObject.FindProperty("baseRaceRecipe");
EditorGUI.BeginChangeCheck();
EditorGUILayout.PropertyField(baseRaceRecipe, true);
if(EditorGUI.EndChangeCheck()) {
serializedObject.ApplyModifiedProperties();
}
if (wardrobeSlotList == null) {
InitWardrobeSlotList ();
}
EditorGUILayout.Space();
EditorGUI.BeginChangeCheck();
wardrobeSlotList.DoLayoutList();
if(EditorGUI.EndChangeCheck()) {
serializedObject.ApplyModifiedProperties();
if (!race.ValidateWardrobeSlots ()) {
EditorUtility.SetDirty(race);
}
}
//new CrossCompatibilitySettings
//To push any old settings in RaceData.backwardsCompatibleWith into the new crossCompatibilitySettings we have to call GetCrossCompatibleRaces() directly on the target
#pragma warning disable 618
if (race.backwardsCompatibleWith.Count > 0)
{
var cc = race.GetCrossCompatibleRaces();
if (cc.Count > 0)
serializedObject.Update();
}
#pragma warning restore 618
SerializedProperty _crossCompatibilitySettings = serializedObject.FindProperty("_crossCompatibilitySettings");
SerializedProperty _crossCompatibilitySettingsData = _crossCompatibilitySettings.FindPropertyRelative("settingsData");
//draw the new version of the crossCompatibility list that allows users to define what slots in this races base recipe equate to in the backwards compatible races base recipe
_crossCompatibilitySettings.isExpanded = EditorGUILayout.Foldout(_crossCompatibilitySettings.isExpanded, "Cross Compatibility Settings");
if (_crossCompatibilitySettings.isExpanded)
{
//draw an info foldout
EditorGUI.indentLevel++;
_crossCompatibilitySettingsData.isExpanded = EditorGUILayout.Foldout(_crossCompatibilitySettingsData.isExpanded, "Help");
if (_crossCompatibilitySettingsData.isExpanded)
{
var helpText = "CrossCompatibilitySettings allows this race to wear wardrobe slots from another race, if this race has a wardrobe slot that the recipe is set to.";
helpText += " You can further configure the compatibility settings for each compatible race to define 'equivalent' slotdatas in the races' base recipes.";
helpText += " For example you could define that this races 'highpolyMaleChest' slotdata in its base recipe is equivalent to HumanMales 'MaleChest' slot data in its base recipe.";
helpText += " This would mean that any recipes which hid or applied an overlay to 'MaleChest' would hide or apply an overlay to 'highPolyMaleChest' on this race.";
helpText += " If 'Overlays Match' is unchecked then overlays in a recipe wont be applied.";
EditorGUILayout.HelpBox(helpText, MessageType.Info);
}
EditorGUI.indentLevel--;
if (baseRaceRecipe.objectReferenceValue != null)
{
Rect dropArea = new Rect();
dropArea = GUILayoutUtility.GetRect(0.0f, 50.0f, GUILayout.ExpandWidth(true));
GUI.Box(dropArea, "Drag cross compatible Races here. Click to pick.");
CompatibleRacesDropArea(dropArea, _crossCompatibilitySettingsData);
EditorGUILayout.Space();
//update the foldouts list if the dropbox changes anything
if (_BCFoldouts.Length != _crossCompatibilitySettingsData.arraySize)
{
Array.Resize<bool>(ref _BCFoldouts, _crossCompatibilitySettingsData.arraySize);
}
//we need an uptodate list of the slots in THIS races base recipe
baseSlotsList.Clear();
baseSlotsNamesList.Clear();
//editing a race will require a context too because we need to get the base recipes and their slots
if (UMAContext.FindInstance() == null)
{
EditorUMAContext = UMAContext.CreateEditorContext();
}
UMAData.UMARecipe thisBaseRecipe = (baseRaceRecipe.objectReferenceValue as UMARecipeBase).GetCachedRecipe(UMAContext.Instance);
SlotData[] thisBaseSlots = thisBaseRecipe.GetAllSlots();
foreach (SlotData slot in thisBaseSlots)
{
if (slot != null)
{
baseSlotsList.Add(slot);
baseSlotsNamesList.Add(slot.slotName);
}
}
List<int> crossCompatibleSettingsToDelete = new List<int>();
//draw a foldout area for each compatible race that will show an entry for each slot in this races base recipe
//with a picker to choose the slot from the compatible race's base recipe that it equates to
for (int i = 0; i < _crossCompatibilitySettingsData.arraySize; i++)
{
bool del = false;
var thisCCSettings = _crossCompatibilitySettingsData.GetArrayElementAtIndex(i).FindPropertyRelative("ccSettings");
var ccRaceName = _crossCompatibilitySettingsData.GetArrayElementAtIndex(i).FindPropertyRelative("ccRace").stringValue;
//this could be missing- we should show that
var label = ccRaceName;
if (GetCompatibleRaceData(ccRaceName) == null)
label += " (missing)";
GUIHelper.FoldoutBar(ref _BCFoldouts[i], label, out del);
if (del)
{
crossCompatibleSettingsToDelete.Add(i);
}
if (_BCFoldouts[i])
{
DrawCCUI(ccRaceName, baseRaceRecipe, thisCCSettings);
}
}
if (crossCompatibleSettingsToDelete.Count > 0)
{
foreach (int del in crossCompatibleSettingsToDelete)
{
_crossCompatibilitySettingsData.DeleteArrayElementAtIndex(del);
serializedObject.ApplyModifiedProperties();
}
}
}
else
{
EditorGUILayout.HelpBox("Please define this races baseRaceRecipe before trying to define its cross compatibility settings.", MessageType.Info);
}
}
EditorGUILayout.Space();
EditorGUI.BeginChangeCheck();
EditorGUILayout.PropertyField(serializedObject.FindProperty("raceThumbnails"), true);
if (EditorGUI.EndChangeCheck()) {
serializedObject.ApplyModifiedProperties();
}
return false;
}
private RaceData GetCompatibleRaceData(string raceName)
{
RaceData foundRace = null;
string[] foundRacesStrings = AssetDatabase.FindAssets("t:RaceData");
for (int i = 0; i < foundRacesStrings.Length; i++)
{
RaceData thisFoundRace = AssetDatabase.LoadAssetAtPath<RaceData>(AssetDatabase.GUIDToAssetPath(foundRacesStrings[i]));
if (thisFoundRace.raceName == raceName)
{
foundRace = thisFoundRace;
break;
}
}
return foundRace;
}
private void DrawCCUI( string ccRaceName, SerializedProperty baseRaceRecipe, SerializedProperty thisCCSettings)
{
GUIHelper.BeginVerticalPadded(5, new Color(0.75f, 0.875f, 1f));
EditorGUILayout.LabelField("Equivalent Slots with " + ccRaceName, EditorStyles.centeredGreyMiniLabel);
if (baseRaceRecipe.objectReferenceValue == null)
{
EditorGUILayout.HelpBox("Please set this Races 'Base Race Recipe' before trying to set equivalent Slots.", MessageType.Warning);
}
else
{
//we need to get the base raceRecipeSlots for this compatible race
var ccRaceData = GetCompatibleRaceData(ccRaceName);
if (ccRaceData != null)
{
if (ccRaceData.baseRaceRecipe == null)
{
EditorGUILayout.HelpBox("Please set " + ccRaceData.raceName + " Races 'Base Race Recipe' before trying to set equivalent Slots.", MessageType.Warning);
}
else
{
var ccSlotsList = new List<SlotData>();
var ccSlotsNamesList = new List<string>();
UMAData.UMARecipe ccBaseRecipe = ccRaceData.baseRaceRecipe.GetCachedRecipe(UMAContext.Instance);
SlotData[] ccBaseSlots = ccBaseRecipe.GetAllSlots();
foreach (SlotData slot in ccBaseSlots)
{
if (slot != null)
{
ccSlotsList.Add(slot);
ccSlotsNamesList.Add(slot.slotName);
}
}
//if that worked we can draw the UI for any set values and a button to add new ones
GUIHelper.BeginVerticalPadded(2, new Color(1f, 1f, 1f, 0.5f));
var headerRect = GUILayoutUtility.GetRect(0.0f, (EditorGUIUtility.singleLineHeight * 2), GUILayout.ExpandWidth(true));
var slotLabelRect = headerRect;
var gapRect = headerRect;
var cSlotLabelRect = headerRect;
var overlaysMatchLabelRect = headerRect;
var deleteRect = headerRect;
slotLabelRect.width = (headerRect.width - 50f - 22f - 22f) / 2;
gapRect.xMin = slotLabelRect.xMax;
gapRect.width = 22f;
cSlotLabelRect.xMin = gapRect.xMax;
cSlotLabelRect.width = slotLabelRect.width;
overlaysMatchLabelRect.xMin = cSlotLabelRect.xMax;
overlaysMatchLabelRect.width = 50f;
deleteRect.xMin = overlaysMatchLabelRect.xMax;
deleteRect.width = 22f;
//move this up
var tableHeaderStyle = EditorStyles.wordWrappedMiniLabel;
tableHeaderStyle.alignment = TextAnchor.MiddleCenter;
//we need a gui style for this that wraps the text and vertically centers it in the space
EditorGUI.LabelField(slotLabelRect, "This Races Slot", tableHeaderStyle);
EditorGUI.LabelField(gapRect, "", tableHeaderStyle);
EditorGUI.LabelField(cSlotLabelRect, "Compatible Races Slot", tableHeaderStyle);
EditorGUI.LabelField(overlaysMatchLabelRect, "Overlays Match", tableHeaderStyle);
GUIHelper.EndVerticalPadded(2);
GUIHelper.BeginVerticalPadded(2, new Color(0.75f, 0.875f, 1f));
if (thisCCSettings.arraySize > 0)
{
for (int ccsd = 0; ccsd < thisCCSettings.arraySize; ccsd++)
{
if(DrawCCUISetting(ccsd, thisCCSettings, ccSlotsNamesList))
serializedObject.ApplyModifiedProperties();
}
}
else
{
EditorGUILayout.LabelField("No equivalent slots defined", EditorStyles.miniLabel);
}
GUIHelper.EndVerticalPadded(2);
var addButtonRect = GUILayoutUtility.GetRect(0.0f, EditorGUIUtility.singleLineHeight, GUILayout.ExpandWidth(true));
addButtonRect.xMin = addButtonRect.xMax - 70f;
addButtonRect.width = 70f;
if (GUI.Button(addButtonRect, "Add"))
{
thisCCSettings.InsertArrayElementAtIndex(thisCCSettings.arraySize);
serializedObject.ApplyModifiedProperties();
}
}
}
else
{
EditorGUILayout.HelpBox("The cross compatible race "+ ccRaceName+" could not be found!", MessageType.Warning);
}
}
GUIHelper.EndVerticalPadded(5);
}
private bool DrawCCUISetting(int ccsd, SerializedProperty thisCCSettings, List<string> ccSlotsNamesList)
{
var changed = false;
var startingRect = GUILayoutUtility.GetRect(0.0f, EditorGUIUtility.singleLineHeight, GUILayout.ExpandWidth(true));
var thisSlot = thisCCSettings.GetArrayElementAtIndex(ccsd).FindPropertyRelative("raceSlot").stringValue;
var thisSlotIndex = baseSlotsNamesList.IndexOf(thisSlot);
var thisCompatibleSlot = thisCCSettings.GetArrayElementAtIndex(ccsd).FindPropertyRelative("compatibleRaceSlot").stringValue;
var thisCompatibleSlotIndex = ccSlotsNamesList.IndexOf(thisCompatibleSlot);
var thisOverlaysMatch = thisCCSettings.GetArrayElementAtIndex(ccsd).FindPropertyRelative("overlaysMatch").boolValue;
var thisSlotRect = startingRect;
var thisEqualsLabelRect = startingRect;
var thisCompatibleSlotRect = startingRect;
//var thisOverlaysLabelRect = startingRect;
var thisOverlaysMatchRect = startingRect;
var thisDeleteRect = startingRect;
thisSlotRect.width = (startingRect.width - 50f - 22f - 22f) / 2;
thisEqualsLabelRect.xMin = thisSlotRect.xMax;
thisEqualsLabelRect.width = 22f;
thisCompatibleSlotRect.xMin = thisEqualsLabelRect.xMax;
thisCompatibleSlotRect.width = thisSlotRect.width;
thisOverlaysMatchRect.xMin = thisCompatibleSlotRect.xMax + 22f;
thisOverlaysMatchRect.width = 50f - 22f;
thisDeleteRect.xMin = thisOverlaysMatchRect.xMax;
thisDeleteRect.width = 22f;
EditorGUI.BeginChangeCheck();
var newSlotIndex = EditorGUI.Popup(thisSlotRect, "", thisSlotIndex, baseSlotsNamesList.ToArray());
if (EditorGUI.EndChangeCheck())
{
if (newSlotIndex != thisSlotIndex)
{
thisCCSettings.GetArrayElementAtIndex(ccsd).FindPropertyRelative("raceSlot").stringValue = baseSlotsNamesList[newSlotIndex];
changed = true;
}
}
EditorGUI.LabelField(thisEqualsLabelRect, "==");
EditorGUI.BeginChangeCheck();
var newCompatibleSlotIndex = EditorGUI.Popup(thisCompatibleSlotRect, "", thisCompatibleSlotIndex, ccSlotsNamesList.ToArray());
if (EditorGUI.EndChangeCheck())
{
if (newCompatibleSlotIndex != thisCompatibleSlotIndex)
{
thisCCSettings.GetArrayElementAtIndex(ccsd).FindPropertyRelative("compatibleRaceSlot").stringValue = ccSlotsNamesList[newCompatibleSlotIndex];
/*var ccSlotsOverlays = ccSlotsList[newCompatibleSlotIndex].GetOverlayList();
thisCCSettings.GetArrayElementAtIndex(ccsd).FindPropertyRelative("compatibleRaceSlotOverlays").arraySize = ccSlotsOverlays.Count;
for (int ccai = 0; ccai < ccSlotsOverlays.Count; ccai++)
thisCCSettings.GetArrayElementAtIndex(ccsd).FindPropertyRelative("compatibleRaceSlotOverlays").GetArrayElementAtIndex(ccai).stringValue = ccSlotsOverlays[ccai].overlayName;*/
changed = true;
}
}
//we need a gui style for this that centers this horizontally
EditorGUI.BeginChangeCheck();
var newOverlaysMatch = EditorGUI.ToggleLeft(thisOverlaysMatchRect, " ", thisOverlaysMatch);
if (EditorGUI.EndChangeCheck())
{
if (newOverlaysMatch != thisOverlaysMatch)
{
thisCCSettings.GetArrayElementAtIndex(ccsd).FindPropertyRelative("overlaysMatch").boolValue = newOverlaysMatch;
changed = true;
}
}
if (GUI.Button(thisDeleteRect, "X", EditorStyles.miniButton))
{
thisCCSettings.DeleteArrayElementAtIndex(ccsd);
changed = true;
}
//******NEEDS TO BE IN THE RETURN***//
//if (changed)
// serializedObject.ApplyModifiedProperties();
//GUILayout.EndHorizontal();
GUILayout.Space(2f);
return changed;
}
}
}
#endif
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.IO;
using System.Xml;
using System.Xml.XPath;
using System.Xml.Schema;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Text;
using System.Globalization;
using System.Reflection;
using System.Reflection.Emit;
using System.Xml.Xsl.Qil;
using System.Xml.Xsl.IlGen;
using System.ComponentModel;
using MS.Internal.Xml.XPath;
using System.Runtime.Versioning;
namespace System.Xml.Xsl.Runtime
{
/// <summary>
/// XmlQueryRuntime is passed as the first parameter to all generated query methods.
///
/// XmlQueryRuntime contains runtime support for generated ILGen queries:
/// 1. Stack of output writers (stack handles nested document construction)
/// 2. Manages list of all xml types that are used within the query
/// 3. Manages list of all atomized names that are used within the query
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class XmlQueryRuntime
{
// Early-Bound Library Objects
private XmlQueryContext _ctxt;
private XsltLibrary _xsltLib;
private EarlyBoundInfo[] _earlyInfo;
private object[] _earlyObjects;
// Global variables and parameters
private string[] _globalNames;
private object[] _globalValues;
// Names, prefix mappings, and name filters
private XmlNameTable _nameTableQuery;
private string[] _atomizedNames; // Names after atomization
private XmlNavigatorFilter[] _filters; // Name filters (contain atomized names)
private StringPair[][] _prefixMappingsList; // Lists of prefix mappings (used to resolve computed names)
// Xml types
private XmlQueryType[] _types;
// Collations
private XmlCollation[] _collations;
// Document ordering
private DocumentOrderComparer _docOrderCmp;
// Indexes
private ArrayList[] _indexes;
// Output construction
private XmlQueryOutput _output;
private Stack<XmlQueryOutput> _stkOutput;
//-----------------------------------------------
// Constructors
//-----------------------------------------------
/// <summary>
/// This constructor is internal so that external users cannot construct it (and therefore we do not have to test it separately).
/// </summary>
internal XmlQueryRuntime(XmlQueryStaticData data, object defaultDataSource, XmlResolver dataSources, XsltArgumentList argList, XmlSequenceWriter seqWrt)
{
Debug.Assert(data != null);
string[] names = data.Names;
Int32Pair[] filters = data.Filters;
WhitespaceRuleLookup wsRules;
int i;
// Early-Bound Library Objects
wsRules = (data.WhitespaceRules != null && data.WhitespaceRules.Count != 0) ? new WhitespaceRuleLookup(data.WhitespaceRules) : null;
_ctxt = new XmlQueryContext(this, defaultDataSource, dataSources, argList, wsRules);
_xsltLib = null;
_earlyInfo = data.EarlyBound;
_earlyObjects = (_earlyInfo != null) ? new object[_earlyInfo.Length] : null;
// Global variables and parameters
_globalNames = data.GlobalNames;
_globalValues = (_globalNames != null) ? new object[_globalNames.Length] : null;
// Names
_nameTableQuery = _ctxt.QueryNameTable;
_atomizedNames = null;
if (names != null)
{
// Atomize all names in "nameTableQuery". Use names from the default data source's
// name table when possible.
XmlNameTable nameTableDefault = _ctxt.DefaultNameTable;
_atomizedNames = new string[names.Length];
if (nameTableDefault != _nameTableQuery && nameTableDefault != null)
{
// Ensure that atomized names from the default data source are added to the
// name table used in this query
for (i = 0; i < names.Length; i++)
{
string name = nameTableDefault.Get(names[i]);
_atomizedNames[i] = _nameTableQuery.Add(name ?? names[i]);
}
}
else
{
// Enter names into nametable used in this query
for (i = 0; i < names.Length; i++)
_atomizedNames[i] = _nameTableQuery.Add(names[i]);
}
}
// Name filters
_filters = null;
if (filters != null)
{
// Construct name filters. Each pair of integers in the filters[] array specifies the
// (localName, namespaceUri) of the NameFilter to be created.
_filters = new XmlNavigatorFilter[filters.Length];
for (i = 0; i < filters.Length; i++)
_filters[i] = XmlNavNameFilter.Create(_atomizedNames[filters[i].Left], _atomizedNames[filters[i].Right]);
}
// Prefix maping lists
_prefixMappingsList = data.PrefixMappingsList;
// Xml types
_types = data.Types;
// Xml collations
_collations = data.Collations;
// Document ordering
_docOrderCmp = new DocumentOrderComparer();
// Indexes
_indexes = null;
// Output construction
_stkOutput = new Stack<XmlQueryOutput>(16);
_output = new XmlQueryOutput(this, seqWrt);
}
//-----------------------------------------------
// Debugger Utility Methods
//-----------------------------------------------
/// <summary>
/// Return array containing the names of all the global variables and parameters used in this query, in this format:
/// {namespace}prefix:local-name
/// </summary>
public string[] DebugGetGlobalNames()
{
return _globalNames;
}
/// <summary>
/// Get the value of a global value having the specified name. Always return the global value as a list of XPathItem.
/// Return null if there is no global value having the specified name.
/// </summary>
public IList DebugGetGlobalValue(string name)
{
for (int idx = 0; idx < _globalNames.Length; idx++)
{
if (_globalNames[idx] == name)
{
Debug.Assert(IsGlobalComputed(idx), "Cannot get the value of a global value until it has been computed.");
Debug.Assert(_globalValues[idx] is IList<XPathItem>, "Only debugger should call this method, and all global values should have type item* in debugging scenarios.");
return (IList)_globalValues[idx];
}
}
return null;
}
/// <summary>
/// Set the value of a global value having the specified name. If there is no such value, this method is a no-op.
/// </summary>
public void DebugSetGlobalValue(string name, object value)
{
for (int idx = 0; idx < _globalNames.Length; idx++)
{
if (_globalNames[idx] == name)
{
Debug.Assert(IsGlobalComputed(idx), "Cannot get the value of a global value until it has been computed.");
Debug.Assert(_globalValues[idx] is IList<XPathItem>, "Only debugger should call this method, and all global values should have type item* in debugging scenarios.");
// Always convert "value" to a list of XPathItem using the item* converter
_globalValues[idx] = (IList<XPathItem>)XmlAnyListConverter.ItemList.ChangeType(value, typeof(XPathItem[]), null);
break;
}
}
}
/// <summary>
/// Convert sequence to it's appropriate XSLT type and return to caller.
/// </summary>
public object DebugGetXsltValue(IList seq)
{
if (seq != null && seq.Count == 1)
{
XPathItem item = seq[0] as XPathItem;
if (item != null && !item.IsNode)
{
return item.TypedValue;
}
else if (item is RtfNavigator)
{
return ((RtfNavigator)item).ToNavigator();
}
}
return seq;
}
//-----------------------------------------------
// Early-Bound Library Objects
//-----------------------------------------------
internal const BindingFlags EarlyBoundFlags = BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static;
internal const BindingFlags LateBoundFlags = BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static;
/// <summary>
/// Return the object that manages external user context information such as data sources, parameters, extension objects, etc.
/// </summary>
public XmlQueryContext ExternalContext
{
get { return _ctxt; }
}
/// <summary>
/// Return the object that manages the state needed to implement various Xslt functions.
/// </summary>
public XsltLibrary XsltFunctions
{
get
{
if (_xsltLib == null)
{
_xsltLib = new XsltLibrary(this);
}
return _xsltLib;
}
}
/// <summary>
/// Get the early-bound extension object identified by "index". If it does not yet exist, create an instance using the
/// corresponding ConstructorInfo.
/// </summary>
public object GetEarlyBoundObject(int index)
{
object obj;
Debug.Assert(_earlyObjects != null && index < _earlyObjects.Length, "Early bound object does not exist");
obj = _earlyObjects[index];
if (obj == null)
{
// Early-bound object does not yet exist, so create it now
obj = _earlyInfo[index].CreateObject();
_earlyObjects[index] = obj;
}
return obj;
}
/// <summary>
/// Return true if the early bound object identified by "namespaceUri" contains a method that matches "name".
/// </summary>
public bool EarlyBoundFunctionExists(string name, string namespaceUri)
{
if (_earlyInfo == null)
return false;
for (int idx = 0; idx < _earlyInfo.Length; idx++)
{
if (namespaceUri == _earlyInfo[idx].NamespaceUri)
return new XmlExtensionFunction(name, namespaceUri, -1, _earlyInfo[idx].EarlyBoundType, EarlyBoundFlags).CanBind();
}
return false;
}
//-----------------------------------------------
// Global variables and parameters
//-----------------------------------------------
/// <summary>
/// Return true if the global value specified by idxValue was previously computed.
/// </summary>
public bool IsGlobalComputed(int index)
{
return _globalValues[index] != null;
}
/// <summary>
/// Return the value that is bound to the global variable or parameter specified by idxValue.
/// If the value has not yet been computed, then compute it now and store it in this.globalValues.
/// </summary>
public object GetGlobalValue(int index)
{
Debug.Assert(IsGlobalComputed(index), "Cannot get the value of a global value until it has been computed.");
return _globalValues[index];
}
/// <summary>
/// Return the value that is bound to the global variable or parameter specified by idxValue.
/// If the value has not yet been computed, then compute it now and store it in this.globalValues.
/// </summary>
public void SetGlobalValue(int index, object value)
{
Debug.Assert(!IsGlobalComputed(index), "Global value should only be set once.");
_globalValues[index] = value;
}
//-----------------------------------------------
// Names, prefix mappings, and name filters
//-----------------------------------------------
/// <summary>
/// Return the name table used to atomize all names used by the query.
/// </summary>
public XmlNameTable NameTable
{
get { return _nameTableQuery; }
}
/// <summary>
/// Get the atomized name at the specified index in the array of names.
/// </summary>
public string GetAtomizedName(int index)
{
Debug.Assert(_atomizedNames != null);
return _atomizedNames[index];
}
/// <summary>
/// Get the name filter at the specified index in the array of filters.
/// </summary>
public XmlNavigatorFilter GetNameFilter(int index)
{
Debug.Assert(_filters != null);
return _filters[index];
}
/// <summary>
/// XPathNodeType.All: Filters all nodes
/// XPathNodeType.Attribute: Filters attributes
/// XPathNodeType.Namespace: Not allowed
/// XPathNodeType.XXX: Filters all nodes *except* those having XPathNodeType.XXX
/// </summary>
public XmlNavigatorFilter GetTypeFilter(XPathNodeType nodeType)
{
if (nodeType == XPathNodeType.All)
return XmlNavNeverFilter.Create();
if (nodeType == XPathNodeType.Attribute)
return XmlNavAttrFilter.Create();
return XmlNavTypeFilter.Create(nodeType);
}
/// <summary>
/// Parse the specified tag name (foo:bar) and resolve the resulting prefix. If the prefix cannot be resolved,
/// then throw an error. Return an XmlQualifiedName.
/// </summary>
public XmlQualifiedName ParseTagName(string tagName, int indexPrefixMappings)
{
string prefix, localName, ns;
// Parse the tagName as a prefix, localName pair and resolve the prefix
ParseTagName(tagName, indexPrefixMappings, out prefix, out localName, out ns);
return new XmlQualifiedName(localName, ns);
}
/// <summary>
/// Parse the specified tag name (foo:bar). Return an XmlQualifiedName consisting of the parsed local name
/// and the specified namespace.
/// </summary>
public XmlQualifiedName ParseTagName(string tagName, string ns)
{
string prefix, localName;
// Parse the tagName as a prefix, localName pair
ValidateNames.ParseQNameThrow(tagName, out prefix, out localName);
return new XmlQualifiedName(localName, ns);
}
/// <summary>
/// Parse the specified tag name (foo:bar) and resolve the resulting prefix. If the prefix cannot be resolved,
/// then throw an error. Return the prefix, localName, and namespace URI.
/// </summary>
internal void ParseTagName(string tagName, int idxPrefixMappings, out string prefix, out string localName, out string ns)
{
Debug.Assert(_prefixMappingsList != null);
// Parse the tagName as a prefix, localName pair
ValidateNames.ParseQNameThrow(tagName, out prefix, out localName);
// Map the prefix to a namespace URI
ns = null;
foreach (StringPair pair in _prefixMappingsList[idxPrefixMappings])
{
if (prefix == pair.Left)
{
ns = pair.Right;
break;
}
}
// Throw exception if prefix could not be resolved
if (ns == null)
{
// Check for mappings that are always in-scope
if (prefix.Length == 0)
ns = "";
else if (prefix.Equals("xml"))
ns = XmlReservedNs.NsXml;
// It is not correct to resolve xmlns prefix in XPath but removing it would be a breaking change.
else if (prefix.Equals("xmlns"))
ns = XmlReservedNs.NsXmlNs;
else
throw new XslTransformException(SR.Xslt_InvalidPrefix, prefix);
}
}
/// <summary>
/// Return true if the nav1's LocalName and NamespaceURI properties equal nav2's corresponding properties.
/// </summary>
public bool IsQNameEqual(XPathNavigator n1, XPathNavigator n2)
{
if ((object)n1.NameTable == (object)n2.NameTable)
{
// Use atomized comparison
return (object)n1.LocalName == (object)n2.LocalName && (object)n1.NamespaceURI == (object)n2.NamespaceURI;
}
return (n1.LocalName == n2.LocalName) && (n1.NamespaceURI == n2.NamespaceURI);
}
/// <summary>
/// Return true if the specified navigator's LocalName and NamespaceURI properties equal the argument names.
/// </summary>
public bool IsQNameEqual(XPathNavigator navigator, int indexLocalName, int indexNamespaceUri)
{
if ((object)navigator.NameTable == (object)_nameTableQuery)
{
// Use atomized comparison
return ((object)GetAtomizedName(indexLocalName) == (object)navigator.LocalName &&
(object)GetAtomizedName(indexNamespaceUri) == (object)navigator.NamespaceURI);
}
// Use string comparison
return (GetAtomizedName(indexLocalName) == navigator.LocalName) && (GetAtomizedName(indexNamespaceUri) == navigator.NamespaceURI);
}
/// <summary>
/// Get the Xml query type at the specified index in the array of types.
/// </summary>
internal XmlQueryType GetXmlType(int idxType)
{
Debug.Assert(_types != null);
return _types[idxType];
}
/// <summary>
/// Forward call to ChangeTypeXsltArgument(XmlQueryType, object, Type).
/// </summary>
public object ChangeTypeXsltArgument(int indexType, object value, Type destinationType)
{
return ChangeTypeXsltArgument(GetXmlType(indexType), value, destinationType);
}
/// <summary>
/// Convert from the Clr type of "value" to Clr type "destinationType" using V1 Xslt rules.
/// These rules include converting any Rtf values to Nodes.
/// </summary>
internal object ChangeTypeXsltArgument(XmlQueryType xmlType, object value, Type destinationType)
{
Debug.Assert(XmlILTypeHelper.GetStorageType(xmlType).IsAssignableFrom(value.GetType()),
"Values passed to ChangeTypeXsltArgument should be in ILGen's default Clr representation.");
Debug.Assert(destinationType == XsltConvert.ObjectType || !destinationType.IsAssignableFrom(value.GetType()),
"No need to call ChangeTypeXsltArgument since value is already assignable to destinationType " + destinationType);
switch (xmlType.TypeCode)
{
case XmlTypeCode.String:
if (destinationType == XsltConvert.DateTimeType)
value = XsltConvert.ToDateTime((string)value);
break;
case XmlTypeCode.Double:
if (destinationType != XsltConvert.DoubleType)
value = Convert.ChangeType(value, destinationType, CultureInfo.InvariantCulture);
break;
case XmlTypeCode.Node:
Debug.Assert(xmlType != XmlQueryTypeFactory.Node && xmlType != XmlQueryTypeFactory.NodeS,
"Rtf values should have been eliminated by caller.");
if (destinationType == XsltConvert.XPathNodeIteratorType)
{
value = new XPathArrayIterator((IList)value);
}
else if (destinationType == XsltConvert.XPathNavigatorArrayType)
{
// Copy sequence to XPathNavigator[]
IList<XPathNavigator> seq = (IList<XPathNavigator>)value;
XPathNavigator[] navArray = new XPathNavigator[seq.Count];
for (int i = 0; i < seq.Count; i++)
navArray[i] = seq[i];
value = navArray;
}
break;
case XmlTypeCode.Item:
{
// Only typeof(object) is supported as a destination type
if (destinationType != XsltConvert.ObjectType)
throw new XslTransformException(SR.Xslt_UnsupportedClrType, destinationType.Name);
// Convert to default, backwards-compatible representation
// 1. NodeSet: System.Xml.XPath.XPathNodeIterator
// 2. Rtf: System.Xml.XPath.XPathNavigator
// 3. Other: Default V1 representation
IList<XPathItem> seq = (IList<XPathItem>)value;
if (seq.Count == 1)
{
XPathItem item = seq[0];
if (item.IsNode)
{
// Node or Rtf
RtfNavigator rtf = item as RtfNavigator;
if (rtf != null)
value = rtf.ToNavigator();
else
value = new XPathArrayIterator((IList)value);
}
else
{
// Atomic value
value = item.TypedValue;
}
}
else
{
// Nodeset
value = new XPathArrayIterator((IList)value);
}
break;
}
}
Debug.Assert(destinationType.IsAssignableFrom(value.GetType()), "ChangeType from type " + value.GetType().Name + " to type " + destinationType.Name + " failed");
return value;
}
/// <summary>
/// Forward call to ChangeTypeXsltResult(XmlQueryType, object)
/// </summary>
public object ChangeTypeXsltResult(int indexType, object value)
{
return ChangeTypeXsltResult(GetXmlType(indexType), value);
}
/// <summary>
/// Convert from the Clr type of "value" to the default Clr type that ILGen uses to represent the xml type, using
/// the conversion rules of the xml type.
/// </summary>
internal object ChangeTypeXsltResult(XmlQueryType xmlType, object value)
{
if (value == null)
throw new XslTransformException(SR.Xslt_ItemNull, string.Empty);
switch (xmlType.TypeCode)
{
case XmlTypeCode.String:
if (value.GetType() == XsltConvert.DateTimeType)
value = XsltConvert.ToString((DateTime)value);
break;
case XmlTypeCode.Double:
if (value.GetType() != XsltConvert.DoubleType)
value = ((IConvertible)value).ToDouble(null);
break;
case XmlTypeCode.Node:
if (!xmlType.IsSingleton)
{
XPathArrayIterator iter = value as XPathArrayIterator;
// Special-case XPathArrayIterator in order to avoid copies
if (iter != null && iter.AsList is XmlQueryNodeSequence)
{
value = iter.AsList as XmlQueryNodeSequence;
}
else
{
// Iterate over list and ensure it only contains nodes
XmlQueryNodeSequence seq = new XmlQueryNodeSequence();
IList list = value as IList;
if (list != null)
{
for (int i = 0; i < list.Count; i++)
seq.Add(EnsureNavigator(list[i]));
}
else
{
foreach (object o in (IEnumerable)value)
seq.Add(EnsureNavigator(o));
}
value = seq;
}
// Always sort node-set by document order
value = ((XmlQueryNodeSequence)value).DocOrderDistinct(_docOrderCmp);
}
break;
case XmlTypeCode.Item:
{
Type sourceType = value.GetType();
IXPathNavigable navigable;
// If static type is item, then infer type based on dynamic value
switch (XsltConvert.InferXsltType(sourceType).TypeCode)
{
case XmlTypeCode.Boolean:
value = new XmlQueryItemSequence(new XmlAtomicValue(XmlSchemaType.GetBuiltInSimpleType(XmlTypeCode.Boolean), value));
break;
case XmlTypeCode.Double:
value = new XmlQueryItemSequence(new XmlAtomicValue(XmlSchemaType.GetBuiltInSimpleType(XmlTypeCode.Double), ((IConvertible)value).ToDouble(null)));
break;
case XmlTypeCode.String:
if (sourceType == XsltConvert.DateTimeType)
value = new XmlQueryItemSequence(new XmlAtomicValue(XmlSchemaType.GetBuiltInSimpleType(XmlTypeCode.String), XsltConvert.ToString((DateTime)value)));
else
value = new XmlQueryItemSequence(new XmlAtomicValue(XmlSchemaType.GetBuiltInSimpleType(XmlTypeCode.String), value));
break;
case XmlTypeCode.Node:
// Support XPathNavigator[]
value = ChangeTypeXsltResult(XmlQueryTypeFactory.NodeS, value);
break;
case XmlTypeCode.Item:
// Support XPathNodeIterator
if (value is XPathNodeIterator)
{
value = ChangeTypeXsltResult(XmlQueryTypeFactory.NodeS, value);
break;
}
// Support IXPathNavigable and XPathNavigator
navigable = value as IXPathNavigable;
if (navigable != null)
{
if (value is XPathNavigator)
value = new XmlQueryNodeSequence((XPathNavigator)value);
else
value = new XmlQueryNodeSequence(navigable.CreateNavigator());
break;
}
throw new XslTransformException(SR.Xslt_UnsupportedClrType, sourceType.Name);
}
break;
}
}
Debug.Assert(XmlILTypeHelper.GetStorageType(xmlType).IsAssignableFrom(value.GetType()), "Xml type " + xmlType + " is not represented in ILGen as " + value.GetType().Name);
return value;
}
/// <summary>
/// Ensure that "value" is a navigator and not null.
/// </summary>
private static XPathNavigator EnsureNavigator(object value)
{
XPathNavigator nav = value as XPathNavigator;
if (nav == null)
throw new XslTransformException(SR.Xslt_ItemNull, string.Empty);
return nav;
}
/// <summary>
/// Return true if the type of every item in "seq" matches the xml type identified by "idxType".
/// </summary>
public bool MatchesXmlType(IList<XPathItem> seq, int indexType)
{
XmlQueryType typBase = GetXmlType(indexType);
XmlQueryCardinality card;
switch (seq.Count)
{
case 0: card = XmlQueryCardinality.Zero; break;
case 1: card = XmlQueryCardinality.One; break;
default: card = XmlQueryCardinality.More; break;
}
if (!(card <= typBase.Cardinality))
return false;
typBase = typBase.Prime;
for (int i = 0; i < seq.Count; i++)
{
if (!CreateXmlType(seq[0]).IsSubtypeOf(typBase))
return false;
}
return true;
}
/// <summary>
/// Return true if the type of "item" matches the xml type identified by "idxType".
/// </summary>
public bool MatchesXmlType(XPathItem item, int indexType)
{
return CreateXmlType(item).IsSubtypeOf(GetXmlType(indexType));
}
/// <summary>
/// Return true if the type of "seq" is a subtype of a singleton type identified by "code".
/// </summary>
public bool MatchesXmlType(IList<XPathItem> seq, XmlTypeCode code)
{
if (seq.Count != 1)
return false;
return MatchesXmlType(seq[0], code);
}
/// <summary>
/// Return true if the type of "item" is a subtype of the type identified by "code".
/// </summary>
public bool MatchesXmlType(XPathItem item, XmlTypeCode code)
{
// All atomic type codes appear after AnyAtomicType
if (code > XmlTypeCode.AnyAtomicType)
return !item.IsNode && item.XmlType.TypeCode == code;
// Handle node code and AnyAtomicType
switch (code)
{
case XmlTypeCode.AnyAtomicType: return !item.IsNode;
case XmlTypeCode.Node: return item.IsNode;
case XmlTypeCode.Item: return true;
default:
if (!item.IsNode)
return false;
switch (((XPathNavigator)item).NodeType)
{
case XPathNodeType.Root: return code == XmlTypeCode.Document;
case XPathNodeType.Element: return code == XmlTypeCode.Element;
case XPathNodeType.Attribute: return code == XmlTypeCode.Attribute;
case XPathNodeType.Namespace: return code == XmlTypeCode.Namespace;
case XPathNodeType.Text: return code == XmlTypeCode.Text;
case XPathNodeType.SignificantWhitespace: return code == XmlTypeCode.Text;
case XPathNodeType.Whitespace: return code == XmlTypeCode.Text;
case XPathNodeType.ProcessingInstruction: return code == XmlTypeCode.ProcessingInstruction;
case XPathNodeType.Comment: return code == XmlTypeCode.Comment;
}
break;
}
Debug.Fail("XmlTypeCode " + code + " was not fully handled.");
return false;
}
/// <summary>
/// Create an XmlQueryType that represents the type of "item".
/// </summary>
private XmlQueryType CreateXmlType(XPathItem item)
{
if (item.IsNode)
{
// Rtf
RtfNavigator rtf = item as RtfNavigator;
if (rtf != null)
return XmlQueryTypeFactory.Node;
// Node
XPathNavigator nav = (XPathNavigator)item;
switch (nav.NodeType)
{
case XPathNodeType.Root:
case XPathNodeType.Element:
if (nav.XmlType == null)
return XmlQueryTypeFactory.Type(nav.NodeType, XmlQualifiedNameTest.New(nav.LocalName, nav.NamespaceURI), XmlSchemaComplexType.UntypedAnyType, false);
return XmlQueryTypeFactory.Type(nav.NodeType, XmlQualifiedNameTest.New(nav.LocalName, nav.NamespaceURI), nav.XmlType, nav.SchemaInfo.SchemaElement.IsNillable);
case XPathNodeType.Attribute:
if (nav.XmlType == null)
return XmlQueryTypeFactory.Type(nav.NodeType, XmlQualifiedNameTest.New(nav.LocalName, nav.NamespaceURI), DatatypeImplementation.UntypedAtomicType, false);
return XmlQueryTypeFactory.Type(nav.NodeType, XmlQualifiedNameTest.New(nav.LocalName, nav.NamespaceURI), nav.XmlType, false);
}
return XmlQueryTypeFactory.Type(nav.NodeType, XmlQualifiedNameTest.Wildcard, XmlSchemaComplexType.AnyType, false);
}
// Atomic value
return XmlQueryTypeFactory.Type((XmlSchemaSimpleType)item.XmlType, true);
}
//-----------------------------------------------
// Xml collations
//-----------------------------------------------
/// <summary>
/// Get a collation that was statically created.
/// </summary>
public XmlCollation GetCollation(int index)
{
Debug.Assert(_collations != null);
return _collations[index];
}
/// <summary>
/// Create a collation from a string.
/// </summary>
public XmlCollation CreateCollation(string collation)
{
return XmlCollation.Create(collation);
}
//-----------------------------------------------
// Document Ordering and Identity
//-----------------------------------------------
/// <summary>
/// Compare the relative positions of two navigators. Return -1 if navThis is before navThat, 1 if after, and
/// 0 if they are positioned to the same node.
/// </summary>
public int ComparePosition(XPathNavigator navigatorThis, XPathNavigator navigatorThat)
{
return _docOrderCmp.Compare(navigatorThis, navigatorThat);
}
/// <summary>
/// Get a comparer which guarantees a stable ordering among nodes, even those from different documents.
/// </summary>
public IList<XPathNavigator> DocOrderDistinct(IList<XPathNavigator> seq)
{
if (seq.Count <= 1)
return seq;
XmlQueryNodeSequence nodeSeq = (XmlQueryNodeSequence)seq;
if (nodeSeq == null)
nodeSeq = new XmlQueryNodeSequence(seq);
return nodeSeq.DocOrderDistinct(_docOrderCmp);
}
/// <summary>
/// Generate a unique string identifier for the specified node. Do this by asking the navigator for an identifier
/// that is unique within the document, and then prepend a document index.
/// </summary>
public string GenerateId(XPathNavigator navigator)
{
return string.Concat("ID", _docOrderCmp.GetDocumentIndex(navigator).ToString(CultureInfo.InvariantCulture), navigator.UniqueId);
}
//-----------------------------------------------
// Indexes
//-----------------------------------------------
/// <summary>
/// If an index having the specified Id has already been created over the "context" document, then return it
/// in "index" and return true. Otherwise, create a new, empty index and return false.
/// </summary>
public bool FindIndex(XPathNavigator context, int indexId, out XmlILIndex index)
{
XPathNavigator navRoot;
ArrayList docIndexes;
Debug.Assert(context != null);
// Get root of document
navRoot = context.Clone();
navRoot.MoveToRoot();
// Search pre-existing indexes in order to determine whether the specified index has already been created
if (_indexes != null && indexId < _indexes.Length)
{
docIndexes = (ArrayList)_indexes[indexId];
if (docIndexes != null)
{
// Search for an index defined over the specified document
for (int i = 0; i < docIndexes.Count; i += 2)
{
// If we find a matching document, then return the index saved in the next slot
if (((XPathNavigator)docIndexes[i]).IsSamePosition(navRoot))
{
index = (XmlILIndex)docIndexes[i + 1];
return true;
}
}
}
}
// Return a new, empty index
index = new XmlILIndex();
return false;
}
/// <summary>
/// Add a newly built index over the specified "context" document to the existing collection of indexes.
/// </summary>
public void AddNewIndex(XPathNavigator context, int indexId, XmlILIndex index)
{
XPathNavigator navRoot;
ArrayList docIndexes;
Debug.Assert(context != null);
// Get root of document
navRoot = context.Clone();
navRoot.MoveToRoot();
// Ensure that a slot exists for the new index
if (_indexes == null)
{
_indexes = new ArrayList[indexId + 4];
}
else if (indexId >= _indexes.Length)
{
// Resize array
ArrayList[] indexesNew = new ArrayList[indexId + 4];
Array.Copy(_indexes, 0, indexesNew, 0, _indexes.Length);
_indexes = indexesNew;
}
docIndexes = (ArrayList)_indexes[indexId];
if (docIndexes == null)
{
docIndexes = new ArrayList();
_indexes[indexId] = docIndexes;
}
docIndexes.Add(navRoot);
docIndexes.Add(index);
}
//-----------------------------------------------
// Output construction
//-----------------------------------------------
/// <summary>
/// Get output writer object.
/// </summary>
public XmlQueryOutput Output
{
get { return _output; }
}
/// <summary>
/// Start construction of a nested sequence of items. Return a new XmlQueryOutput that will be
/// used to construct this new sequence.
/// </summary>
public void StartSequenceConstruction(out XmlQueryOutput output)
{
// Push current writer
_stkOutput.Push(_output);
// Create new writers
output = _output = new XmlQueryOutput(this, new XmlCachedSequenceWriter());
}
/// <summary>
/// End construction of a nested sequence of items and return the items as an IList<XPathItem>
/// internal class. Return previous XmlQueryOutput.
/// </summary>
public IList<XPathItem> EndSequenceConstruction(out XmlQueryOutput output)
{
IList<XPathItem> seq = ((XmlCachedSequenceWriter)_output.SequenceWriter).ResultSequence;
// Restore previous XmlQueryOutput
output = _output = _stkOutput.Pop();
return seq;
}
/// <summary>
/// Start construction of an Rtf. Return a new XmlQueryOutput object that will be used to construct this Rtf.
/// </summary>
public void StartRtfConstruction(string baseUri, out XmlQueryOutput output)
{
// Push current writer
_stkOutput.Push(_output);
// Create new XmlQueryOutput over an Rtf writer
output = _output = new XmlQueryOutput(this, new XmlEventCache(baseUri, true));
}
/// <summary>
/// End construction of an Rtf and return it as an RtfNavigator. Return previous XmlQueryOutput object.
/// </summary>
public XPathNavigator EndRtfConstruction(out XmlQueryOutput output)
{
XmlEventCache events;
events = (XmlEventCache)_output.Writer;
// Restore previous XmlQueryOutput
output = _output = _stkOutput.Pop();
// Return Rtf as an RtfNavigator
events.EndEvents();
return new RtfTreeNavigator(events, _nameTableQuery);
}
/// <summary>
/// Construct a new RtfTextNavigator from the specified "text". This is much more efficient than calling
/// StartNodeConstruction(), StartRtf(), WriteString(), EndRtf(), and EndNodeConstruction().
/// </summary>
public XPathNavigator TextRtfConstruction(string text, string baseUri)
{
return new RtfTextNavigator(text, baseUri);
}
//-----------------------------------------------
// Miscellaneous
//-----------------------------------------------
/// <summary>
/// Report query execution information to event handler.
/// </summary>
public void SendMessage(string message)
{
_ctxt.OnXsltMessageEncountered(message);
}
/// <summary>
/// Throw an Xml exception having the specified message text.
/// </summary>
public void ThrowException(string text)
{
throw new XslTransformException(text);
}
/// <summary>
/// Position navThis to the same location as navThat.
/// </summary>
internal static XPathNavigator SyncToNavigator(XPathNavigator navigatorThis, XPathNavigator navigatorThat)
{
if (navigatorThis == null || !navigatorThis.MoveTo(navigatorThat))
return navigatorThat.Clone();
return navigatorThis;
}
/// <summary>
/// Function is called in Debug mode on each time context node change.
/// </summary>
public static int OnCurrentNodeChanged(XPathNavigator currentNode)
{
IXmlLineInfo lineInfo = currentNode as IXmlLineInfo;
// In case of a namespace node, check whether it is inherited or locally defined
if (lineInfo != null && !(currentNode.NodeType == XPathNodeType.Namespace && IsInheritedNamespace(currentNode)))
{
OnCurrentNodeChanged2(currentNode.BaseURI, lineInfo.LineNumber, lineInfo.LinePosition);
}
return 0;
}
// 'true' if current Namespace "inherited" from it's parent. Not defined locally.
private static bool IsInheritedNamespace(XPathNavigator node)
{
Debug.Assert(node.NodeType == XPathNodeType.Namespace);
XPathNavigator nav = node.Clone();
if (nav.MoveToParent())
{
if (nav.MoveToFirstNamespace(XPathNamespaceScope.Local))
{
do
{
if ((object)nav.LocalName == (object)node.LocalName)
{
return false;
}
} while (nav.MoveToNextNamespace(XPathNamespaceScope.Local));
}
}
return true;
}
private static void OnCurrentNodeChanged2(string baseUri, int lineNumber, int linePosition) { }
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Xunit;
namespace System.Diagnostics.TraceSourceTests
{
using Method = TestTraceListener.Method;
public sealed class TraceInternalTests
{
[Fact]
public void Unindent()
{
Trace.IndentLevel = 1;
Trace.Unindent();
Assert.Equal(0, Trace.IndentLevel);
Trace.Unindent();
Assert.Equal(0, Trace.IndentLevel);
}
}
public sealed class TraceInternalTests_Default : TraceInternalTestsBase
{
// default mode: GlobalLock = true, AutoFlush = false, ThreadSafeListener = false
}
public sealed class TraceInternalTests_AutoFlush : TraceInternalTestsBase
{
internal override bool AutoFlush
{
get { return true; }
}
}
public sealed class TraceInternalTests_NoGlobalLock : TraceInternalTestsBase
{
internal override bool UseGlobalLock
{
get { return false; }
}
}
public sealed class TraceInternalTests_NoGlobalLock_AutoFlush : TraceInternalTestsBase
{
internal override bool UseGlobalLock
{
get { return false; }
}
internal override bool AutoFlush
{
get { return true; }
}
}
public sealed class TraceInternalTests_ThreadSafeListener : TraceInternalTestsBase
{
internal override bool ThreadSafeListener
{
get { return true; }
}
}
public sealed class TraceInternalTests_ThreadSafeListener_AutoFlush : TraceInternalTestsBase
{
internal override bool ThreadSafeListener
{
get { return true; }
}
internal override bool AutoFlush
{
get { return true; }
}
}
// Defines abstract tests that will be executed in different modes via the above concrete classes.
public abstract class TraceInternalTestsBase
{
public TraceInternalTestsBase()
{
TraceTestHelper.ResetState();
Trace.AutoFlush = AutoFlush;
Trace.UseGlobalLock = UseGlobalLock;
}
// properties are overridden to define different "modes" of execution
internal virtual bool UseGlobalLock
{
get
{
// ThreadSafeListener is only meaningful when not using a global lock,
// so UseGlobalLock will be auto-disabled in that mode.
return true && !ThreadSafeListener;
}
}
internal virtual bool AutoFlush
{
get { return false; }
}
internal virtual bool ThreadSafeListener
{
get { return false; }
}
private TestTraceListener GetTraceListener()
{
return new TestTraceListener(ThreadSafeListener);
}
[Fact]
public void FlushTest()
{
var listener = GetTraceListener();
Trace.Listeners.Add(listener);
Trace.Flush();
Assert.Equal(1, listener.GetCallCount(Method.Flush));
}
[Fact]
public void TraceEvent1Test()
{
var listener = GetTraceListener();
Trace.Listeners.Add(listener);
Trace.TraceError("Message");
Assert.Equal(1, listener.GetCallCount(Method.TraceEvent));
}
[Fact]
public void TraceEvent2Test()
{
var listener = GetTraceListener();
Trace.Listeners.Add(listener);
Trace.TraceError("Message", "Arg1", "Arg2");
Assert.Equal(1, listener.GetCallCount(Method.TraceEvent));
var flushExpected = AutoFlush ? 1 : 0;
Assert.Equal(flushExpected, listener.GetCallCount(Method.Flush));
}
[Fact]
public void WriteObjectTest()
{
var listener = GetTraceListener();
Trace.Listeners.Add(listener);
Trace.Write((object)"Message");
Assert.Equal(1, listener.GetCallCount(Method.Write));
var flushExpected = AutoFlush ? 1 : 0;
Assert.Equal(flushExpected, listener.GetCallCount(Method.Flush));
}
[Fact]
public void WriteTest()
{
var listener = GetTraceListener();
Trace.Listeners.Add(listener);
Trace.Write("Message");
Assert.Equal(1, listener.GetCallCount(Method.Write));
var flushExpected = AutoFlush ? 1 : 0;
Assert.Equal(flushExpected, listener.GetCallCount(Method.Flush));
}
[Fact]
public void Write2Test()
{
var listener = GetTraceListener();
Trace.Listeners.Add(listener);
Trace.Write("Message", "Category");
Assert.Equal(1, listener.GetCallCount(Method.Write));
var flushExpected = AutoFlush ? 1 : 0;
Assert.Equal(flushExpected, listener.GetCallCount(Method.Flush));
}
[Fact]
public void WriteObject2Test()
{
var listener = GetTraceListener();
Trace.Listeners.Add(listener);
Trace.Write((object)"Message", "Category");
Assert.Equal(1, listener.GetCallCount(Method.Write));
var flushExpected = AutoFlush ? 1 : 0;
Assert.Equal(flushExpected, listener.GetCallCount(Method.Flush));
}
[Fact]
public void WriteLineTest()
{
var listener = GetTraceListener();
Trace.Listeners.Add(listener);
Trace.WriteLine("Message");
Assert.Equal(1, listener.GetCallCount(Method.WriteLine));
var flushExpected = AutoFlush ? 1 : 0;
Assert.Equal(flushExpected, listener.GetCallCount(Method.Flush));
}
[Fact]
public void WriteLineObjectTest()
{
var listener = GetTraceListener();
Trace.Listeners.Add(listener);
Trace.WriteLine((object)"Message");
Assert.Equal(1, listener.GetCallCount(Method.WriteLine));
var flushExpected = AutoFlush ? 1 : 0;
Assert.Equal(flushExpected, listener.GetCallCount(Method.Flush));
}
[Fact]
public void WriteLine2Test()
{
var listener = GetTraceListener();
Trace.Listeners.Add(listener);
Trace.WriteLine("Message", "Category");
Assert.Equal(1, listener.GetCallCount(Method.WriteLine));
var flushExpected = AutoFlush ? 1 : 0;
Assert.Equal(flushExpected, listener.GetCallCount(Method.Flush));
}
[Fact]
public void WriteLineObject2Test()
{
var listener = GetTraceListener();
Trace.Listeners.Add(listener);
Trace.WriteLine((object)"Message", "Category");
Assert.Equal(1, listener.GetCallCount(Method.WriteLine));
var flushExpected = AutoFlush ? 1 : 0;
Assert.Equal(flushExpected, listener.GetCallCount(Method.Flush));
}
[Fact]
public void FailTest()
{
var listener = GetTraceListener();
// We have to clear the listeners list on Trace since there is a trace listener by default with AssertUiEnabled = true in Desktop and that will pop up an assert window with Trace.Fail
Trace.Listeners.Clear();
Trace.Listeners.Add(listener);
Trace.Fail("Message");
Assert.Equal(1, listener.GetCallCount(Method.Fail));
var flushExpected = AutoFlush ? 1 : 0;
Assert.Equal(flushExpected, listener.GetCallCount(Method.Flush));
}
[Fact]
public void Fail2Test()
{
var listener = GetTraceListener();
// We have to clear the listeners list on Trace since there is a trace listener by default with AssertUiEnabled = true in Desktop and that will pop up an assert window with Trace.Fail
Trace.Listeners.Clear();
Trace.Listeners.Add(listener);
Trace.Fail("Message", "Category");
Assert.Equal(1, listener.GetCallCount(Method.Fail));
var flushExpected = AutoFlush ? 1 : 0;
Assert.Equal(flushExpected, listener.GetCallCount(Method.Flush));
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Reflection;
using System.Runtime.Serialization;
using System.Web.Http;
using System.Web.Http.Description;
using System.Xml.Serialization;
using Newtonsoft.Json;
namespace AllHttpMethods.Areas.HelpPage.ModelDescriptions
{
/// <summary>
/// Generates model descriptions for given types.
/// </summary>
public class ModelDescriptionGenerator
{
// Modify this to support more data annotation attributes.
private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>>
{
{ typeof(RequiredAttribute), a => "Required" },
{ typeof(RangeAttribute), a =>
{
RangeAttribute range = (RangeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum);
}
},
{ typeof(MaxLengthAttribute), a =>
{
MaxLengthAttribute maxLength = (MaxLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length);
}
},
{ typeof(MinLengthAttribute), a =>
{
MinLengthAttribute minLength = (MinLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length);
}
},
{ typeof(StringLengthAttribute), a =>
{
StringLengthAttribute strLength = (StringLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength);
}
},
{ typeof(DataTypeAttribute), a =>
{
DataTypeAttribute dataType = (DataTypeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString());
}
},
{ typeof(RegularExpressionAttribute), a =>
{
RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern);
}
},
};
// Modify this to add more default documentations.
private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string>
{
{ typeof(Int16), "integer" },
{ typeof(Int32), "integer" },
{ typeof(Int64), "integer" },
{ typeof(UInt16), "unsigned integer" },
{ typeof(UInt32), "unsigned integer" },
{ typeof(UInt64), "unsigned integer" },
{ typeof(Byte), "byte" },
{ typeof(Char), "character" },
{ typeof(SByte), "signed byte" },
{ typeof(Uri), "URI" },
{ typeof(Single), "decimal number" },
{ typeof(Double), "decimal number" },
{ typeof(Decimal), "decimal number" },
{ typeof(String), "string" },
{ typeof(Guid), "globally unique identifier" },
{ typeof(TimeSpan), "time interval" },
{ typeof(DateTime), "date" },
{ typeof(DateTimeOffset), "date" },
{ typeof(Boolean), "boolean" },
};
private Lazy<IModelDocumentationProvider> _documentationProvider;
public ModelDescriptionGenerator(HttpConfiguration config)
{
if (config == null)
{
throw new ArgumentNullException("config");
}
_documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider);
GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase);
}
public Dictionary<string, ModelDescription> GeneratedModels { get; private set; }
private IModelDocumentationProvider DocumentationProvider
{
get
{
return _documentationProvider.Value;
}
}
public ModelDescription GetOrCreateModelDescription(Type modelType)
{
if (modelType == null)
{
throw new ArgumentNullException("modelType");
}
Type underlyingType = Nullable.GetUnderlyingType(modelType);
if (underlyingType != null)
{
modelType = underlyingType;
}
ModelDescription modelDescription;
string modelName = ModelNameHelper.GetModelName(modelType);
if (GeneratedModels.TryGetValue(modelName, out modelDescription))
{
if (modelType != modelDescription.ModelType)
{
throw new InvalidOperationException(
String.Format(
CultureInfo.CurrentCulture,
"A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " +
"Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.",
modelName,
modelDescription.ModelType.FullName,
modelType.FullName));
}
return modelDescription;
}
if (DefaultTypeDocumentation.ContainsKey(modelType))
{
return GenerateSimpleTypeModelDescription(modelType);
}
if (modelType.IsEnum)
{
return GenerateEnumTypeModelDescription(modelType);
}
if (modelType.IsGenericType)
{
Type[] genericArguments = modelType.GetGenericArguments();
if (genericArguments.Length == 1)
{
Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments);
if (enumerableType.IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, genericArguments[0]);
}
}
if (genericArguments.Length == 2)
{
Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments);
if (dictionaryType.IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments);
if (keyValuePairType.IsAssignableFrom(modelType))
{
return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
}
}
if (modelType.IsArray)
{
Type elementType = modelType.GetElementType();
return GenerateCollectionModelDescription(modelType, elementType);
}
if (modelType == typeof(NameValueCollection))
{
return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string));
}
if (typeof(IDictionary).IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object));
}
if (typeof(IEnumerable).IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, typeof(object));
}
return GenerateComplexTypeModelDescription(modelType);
}
// Change this to provide different name for the member.
private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute)
{
JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>();
if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName))
{
return jsonProperty.PropertyName;
}
if (hasDataContractAttribute)
{
DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>();
if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name))
{
return dataMember.Name;
}
}
return member.Name;
}
private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute)
{
JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>();
XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>();
IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>();
NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>();
ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>();
bool hasMemberAttribute = member.DeclaringType.IsEnum ?
member.GetCustomAttribute<EnumMemberAttribute>() != null :
member.GetCustomAttribute<DataMemberAttribute>() != null;
// Display member only if all the followings are true:
// no JsonIgnoreAttribute
// no XmlIgnoreAttribute
// no IgnoreDataMemberAttribute
// no NonSerializedAttribute
// no ApiExplorerSettingsAttribute with IgnoreApi set to true
// no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute
return jsonIgnore == null &&
xmlIgnore == null &&
ignoreDataMember == null &&
nonSerialized == null &&
(apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) &&
(!hasDataContractAttribute || hasMemberAttribute);
}
private string CreateDefaultDocumentation(Type type)
{
string documentation;
if (DefaultTypeDocumentation.TryGetValue(type, out documentation))
{
return documentation;
}
if (DocumentationProvider != null)
{
documentation = DocumentationProvider.GetDocumentation(type);
}
return documentation;
}
private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel)
{
List<ParameterAnnotation> annotations = new List<ParameterAnnotation>();
IEnumerable<Attribute> attributes = property.GetCustomAttributes();
foreach (Attribute attribute in attributes)
{
Func<object, string> textGenerator;
if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator))
{
annotations.Add(
new ParameterAnnotation
{
AnnotationAttribute = attribute,
Documentation = textGenerator(attribute)
});
}
}
// Rearrange the annotations
annotations.Sort((x, y) =>
{
// Special-case RequiredAttribute so that it shows up on top
if (x.AnnotationAttribute is RequiredAttribute)
{
return -1;
}
if (y.AnnotationAttribute is RequiredAttribute)
{
return 1;
}
// Sort the rest based on alphabetic order of the documentation
return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase);
});
foreach (ParameterAnnotation annotation in annotations)
{
propertyModel.Annotations.Add(annotation);
}
}
private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType)
{
ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType);
if (collectionModelDescription != null)
{
return new CollectionModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
ElementDescription = collectionModelDescription
};
}
return null;
}
private ModelDescription GenerateComplexTypeModelDescription(Type modelType)
{
ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(complexModelDescription.Name, complexModelDescription);
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance);
foreach (PropertyInfo property in properties)
{
if (ShouldDisplayMember(property, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(property, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(property);
}
GenerateAnnotations(property, propertyModel);
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType);
}
}
FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance);
foreach (FieldInfo field in fields)
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(field, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(field);
}
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType);
}
}
return complexModelDescription;
}
private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new DictionaryModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType)
{
EnumTypeModelDescription enumDescription = new EnumTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static))
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
EnumValueDescription enumValue = new EnumValueDescription
{
Name = field.Name,
Value = field.GetRawConstantValue().ToString()
};
if (DocumentationProvider != null)
{
enumValue.Documentation = DocumentationProvider.GetDocumentation(field);
}
enumDescription.Values.Add(enumValue);
}
}
GeneratedModels.Add(enumDescription.Name, enumDescription);
return enumDescription;
}
private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new KeyValuePairModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private ModelDescription GenerateSimpleTypeModelDescription(Type modelType)
{
SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription);
return simpleModelDescription;
}
}
}
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="BehaviorTree.cs" company="Slash Games">
// Copyright (c) Slash Games. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Slash.AI.BehaviorTrees
{
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using Slash.AI.BehaviorTrees.Editor;
using Slash.AI.BehaviorTrees.Enums;
using Slash.AI.BehaviorTrees.Interfaces;
using Slash.AI.BehaviorTrees.Tree;
using Slash.Serialization.Utils;
/// <summary>
/// Implementation of a behavior tree.
/// </summary>
[Serializable]
public class BehaviorTree : IBehaviorTree, ICloneable
{
#region Fields
/// <summary>
/// Root of behavior tree.
/// </summary>
private ITask root;
#endregion
#region Properties
/// <summary>
/// Gets or sets the root.
/// </summary>
[XmlIgnore]
public ITask Root
{
get
{
return this.root;
}
set
{
this.root = value;
}
}
/// <summary>
/// Xml serialization for root decider.
/// </summary>
[XmlElement("Root")]
public XmlWrapper RootSerialized
{
get
{
return new XmlWrapper(this.root);
}
set
{
this.root = value.Task;
}
}
#endregion
#region Public Methods and Operators
/// <summary>
/// Creates a new object that is a copy of the current instance.
/// </summary>
/// <returns> A new object that is a copy of this instance. </returns>
/// <filterpriority>2</filterpriority>
public object Clone()
{
// Serialize and deserialize again to clone.
return SerializationUtils.DeepCopy(this);
}
/// <summary>
/// Deactivation.
/// </summary>
/// <param name="agentData"> Agent data. </param>
public void Deactivate(IAgentData agentData)
{
if (this.root == null)
{
return;
}
agentData.CurrentDeciderLevel = 0;
switch (agentData.ExecutionStatus)
{
case ExecutionStatus.Running:
{
// Deactivate root task.
this.root.Deactivate(agentData);
}
break;
}
}
/// <summary>
/// Determines whether the specified <see cref="T:BehaviorTree" /> is equal to the current <see cref="T:BehaviorTree" />.
/// </summary>
/// <returns>
/// true if the specified <see cref="T:BehaviorTree" /> is equal to the current <see cref="T:BehaviorTree" />; otherwise,
/// false.
/// </returns>
/// <param name="other">The <see cref="T:BehaviorTree" /> to compare with the current <see cref="T:BehaviorTree" />.</param>
public bool Equals(BehaviorTree other)
{
if (ReferenceEquals(null, other))
{
return false;
}
if (ReferenceEquals(this, other))
{
return true;
}
return Equals(other.root, this.root);
}
/// <summary>
/// Determines whether the specified <see cref="T:System.Object" /> is equal to the current
/// <see cref="T:System.Object" />.
/// </summary>
/// <returns>
/// true if the specified <see cref="T:System.Object" /> is equal to the current <see cref="T:System.Object" />;
/// otherwise, false.
/// </returns>
/// <param name="obj">The <see cref="T:System.Object" /> to compare with the current <see cref="T:System.Object" />. </param>
/// <filterpriority>2</filterpriority>
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj))
{
return false;
}
if (ReferenceEquals(this, obj))
{
return true;
}
if (obj.GetType() != typeof(BehaviorTree))
{
return false;
}
return this.Equals((BehaviorTree)obj);
}
/// <summary>
/// Generates a collection of active task nodes under this task. Used for debugging only.
/// </summary>
/// <param name="agentData"> Agent data. </param>
/// <returns> Collection of active task nodes. </returns>
public ICollection<TaskNode> GetActiveTasks(IAgentData agentData)
{
if (this.root == null || agentData.ExecutionStatus != ExecutionStatus.Running)
{
return null;
}
agentData.CurrentDeciderLevel = 0;
TaskNode taskNode = new TaskNode { Task = this.root };
ICollection<TaskNode> activeTasks = new List<TaskNode> { taskNode };
this.root.GetActiveTasks(agentData, taskNode, ref activeTasks);
return activeTasks;
}
/// <summary>
/// Serves as a hash function for a particular type.
/// </summary>
/// <returns>
/// A hash code for the current <see cref="T:System.Object" />.
/// </returns>
/// <filterpriority>2</filterpriority>
public override int GetHashCode()
{
return this.root != null ? this.root.GetHashCode() : 0;
}
/// <summary>
/// Per frame data.
/// </summary>
/// <param name="agentData"> Agent data. </param>
public void Update(IAgentData agentData)
{
if (this.Root == null)
{
return;
}
agentData.CurrentDeciderLevel = 0;
agentData.PreUpdate();
switch (agentData.ExecutionStatus)
{
case ExecutionStatus.None:
case ExecutionStatus.Failed:
case ExecutionStatus.Success:
{
IDecisionData decisionData = null;
if (this.Root.Decide(agentData, ref decisionData) > 0.0f)
{
// Activate decider.
agentData.ExecutionStatus = this.Root.Activate(agentData, decisionData);
if (agentData.ExecutionStatus == ExecutionStatus.Running)
{
// Update decider.
agentData.ExecutionStatus = this.Root.Update(agentData);
}
}
}
break;
case ExecutionStatus.Running:
{
// Update decider.
agentData.ExecutionStatus = this.Root.Update(agentData);
}
break;
}
agentData.PostUpdate();
}
#endregion
}
}
| |
// FastZip.cs
//
// Copyright 2005 John Reilly
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; either version 2
// of the License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// Linking this library statically or dynamically with other modules is
// making a combined work based on this library. Thus, the terms and
// conditions of the GNU General Public License cover the whole
// combination.
//
// As a special exception, the copyright holders of this library give you
// permission to link this library with independent modules to produce an
// executable, regardless of the license terms of these independent
// modules, and to copy and distribute the resulting executable under
// terms of your choice, provided that you also meet, for each linked
// independent module, the terms and conditions of the license of that
// module. An independent module is a module which is not derived from
// or based on this library. If you modify this library, you may extend
// this exception to your version of the library, but you are not
// obligated to do so. If you do not wish to do so, delete this
// exception statement from your version.
using System;
using System.IO;
using ICSharpCode.SharpZipLib.Core;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// FastZipEvents supports all events applicable to <see cref="FastZip">FastZip</see> operations.
/// </summary>
public class FastZipEvents
{
/// <summary>
/// Delegate to invoke when processing directories.
/// </summary>
public ProcessDirectoryHandler ProcessDirectory;
/// <summary>
/// Delegate to invoke when processing files.
/// </summary>
public ProcessFileHandler ProcessFile;
/// <summary>
/// Delegate to invoke during processing of files.
/// </summary>
public ProgressHandler Progress;
/// <summary>
/// Delegate to invoke when processing for a file has been completed.
/// </summary>
public CompletedFileHandler CompletedFile;
/// <summary>
/// Delegate to invoke when processing directory failures.
/// </summary>
public DirectoryFailureHandler DirectoryFailure;
/// <summary>
/// Delegate to invoke when processing file failures.
/// </summary>
public FileFailureHandler FileFailure;
/// <summary>
/// Raise the <see cref="DirectoryFailure">directory failure</see> event.
/// </summary>
/// <param name="directory">The directory causing the failure.</param>
/// <param name="e">The exception for this event.</param>
/// <returns>A boolean indicating if execution should continue or not.</returns>
public bool OnDirectoryFailure(string directory, Exception e)
{
bool result = false;
DirectoryFailureHandler handler = DirectoryFailure;
if ( handler != null ) {
ScanFailureEventArgs args = new ScanFailureEventArgs(directory, e);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Fires the <see cref="FileFailure"> file failure handler delegate</see>.
/// </summary>
/// <param name="file">The file causing the failure.</param>
/// <param name="e">The exception for this failure.</param>
/// <returns>A boolean indicating if execution should continue or not.</returns>
public bool OnFileFailure(string file, Exception e)
{
FileFailureHandler handler = FileFailure;
bool result = (handler != null);
if ( result ) {
ScanFailureEventArgs args = new ScanFailureEventArgs(file, e);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Fires the <see cref="ProcessFile">ProcessFile delegate</see>.
/// </summary>
/// <param name="file">The file being processed.</param>
/// <returns>A boolean indicating if execution should continue or not.</returns>
public bool OnProcessFile(string file)
{
bool result = true;
ProcessFileHandler handler = ProcessFile;
if ( handler != null ) {
ScanEventArgs args = new ScanEventArgs(file);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Fires the <see cref="CompletedFile"/> delegate
/// </summary>
/// <param name="file">The file whose processing has been completed.</param>
/// <returns>A boolean indicating if execution should continue or not.</returns>
public bool OnCompletedFile(string file)
{
bool result = true;
CompletedFileHandler handler = CompletedFile;
if ( handler != null ) {
ScanEventArgs args = new ScanEventArgs(file);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Fires the <see cref="ProcessDirectory">process directory</see> delegate.
/// </summary>
/// <param name="directory">The directory being processed.</param>
/// <param name="hasMatchingFiles">Flag indicating if the directory has matching files as determined by the current filter.</param>
/// <returns>A <see cref="bool"/> of true if the operation should continue; false otherwise.</returns>
public bool OnProcessDirectory(string directory, bool hasMatchingFiles)
{
bool result = true;
ProcessDirectoryHandler handler = ProcessDirectory;
if ( handler != null ) {
DirectoryEventArgs args = new DirectoryEventArgs(directory, hasMatchingFiles);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// The minimum timespan between <see cref="Progress"/> events.
/// </summary>
/// <value>The minimum period of time between <see cref="Progress"/> events.</value>
/// <seealso cref="Progress"/>
/// <remarks>The default interval is three seconds.</remarks>
public TimeSpan ProgressInterval
{
get { return progressInterval_; }
set { progressInterval_ = value; }
}
#region Instance Fields
TimeSpan progressInterval_ = TimeSpan.FromSeconds(3);
#endregion
}
/// <summary>
/// FastZip provides facilities for creating and extracting zip files.
/// </summary>
public class FastZip
{
#region Enumerations
/// <summary>
/// Defines the desired handling when overwriting files during extraction.
/// </summary>
public enum Overwrite
{
/// <summary>
/// Prompt the user to confirm overwriting
/// </summary>
Prompt,
/// <summary>
/// Never overwrite files.
/// </summary>
Never,
/// <summary>
/// Always overwrite files.
/// </summary>
Always
}
#endregion
#region Constructors
/// <summary>
/// Initialise a default instance of <see cref="FastZip"/>.
/// </summary>
public FastZip()
{
}
/// <summary>
/// Initialise a new instance of <see cref="FastZip"/>
/// </summary>
/// <param name="events">The <see cref="FastZipEvents">events</see> to use during operations.</param>
public FastZip(FastZipEvents events)
{
events_ = events;
}
#endregion
#region Properties
/// <summary>
/// Get/set a value indicating wether empty directories should be created.
/// </summary>
public bool CreateEmptyDirectories
{
get { return createEmptyDirectories_; }
set { createEmptyDirectories_ = value; }
}
#if !NETCF_1_0
/// <summary>
/// Get / set the password value.
/// </summary>
public string Password
{
get { return password_; }
set { password_ = value; }
}
#endif
/// <summary>
/// Get or set the <see cref="INameTransform"></see> active when creating Zip files.
/// </summary>
/// <seealso cref="EntryFactory"></seealso>
public INameTransform NameTransform
{
get { return entryFactory_.NameTransform; }
set {
entryFactory_.NameTransform = value;
}
}
/// <summary>
/// Get or set the <see cref="IEntryFactory"></see> active when creating Zip files.
/// </summary>
public IEntryFactory EntryFactory
{
get { return entryFactory_; }
set {
if ( value == null ) {
entryFactory_ = new ZipEntryFactory();
}
else {
entryFactory_ = value;
}
}
}
/// <summary>
/// Gets or sets the setting for <see cref="UseZip64">Zip64 handling when writing.</see>
/// </summary>
/// <remarks>
/// The default value is dynamic which is not backwards compatible with old
/// programs and can cause problems with XP's built in compression which cant
/// read Zip64 archives. However it does avoid the situation were a large file
/// is added and cannot be completed correctly.
/// NOTE: Setting the size for entries before they are added is the best solution!
/// By default the EntryFactory used by FastZip will set fhe file size.
/// </remarks>
public UseZip64 UseZip64
{
get { return useZip64_; }
set { useZip64_ = value; }
}
/// <summary>
/// Get/set a value indicating wether file dates and times should
/// be restored when extracting files from an archive.
/// </summary>
/// <remarks>The default value is false.</remarks>
public bool RestoreDateTimeOnExtract
{
get {
return restoreDateTimeOnExtract_;
}
set {
restoreDateTimeOnExtract_ = value;
}
}
/// <summary>
/// Get/set a value indicating wether file attributes should
/// be restored during extract operations
/// </summary>
public bool RestoreAttributesOnExtract
{
get { return restoreAttributesOnExtract_; }
set { restoreAttributesOnExtract_ = value; }
}
#endregion
#region Delegates
/// <summary>
/// Delegate called when confirming overwriting of files.
/// </summary>
public delegate bool ConfirmOverwriteDelegate(string fileName);
#endregion
#region CreateZip
/// <summary>
/// Create a zip file.
/// </summary>
/// <param name="zipFileName">The name of the zip file to create.</param>
/// <param name="sourceDirectory">The directory to source files from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="fileFilter">The <see cref="PathFilter">file filter</see> to apply.</param>
/// <param name="directoryFilter">The <see cref="PathFilter">directory filter</see> to apply.</param>
public void CreateZip(string zipFileName, string sourceDirectory,
bool recurse, string fileFilter, string directoryFilter)
{
CreateZip(File.Create(zipFileName), sourceDirectory, recurse, fileFilter, directoryFilter);
}
/// <summary>
/// Create a zip file/archive.
/// </summary>
/// <param name="zipFileName">The name of the zip file to create.</param>
/// <param name="sourceDirectory">The directory to obtain files and directories from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="fileFilter">The file filter to apply.</param>
public void CreateZip(string zipFileName, string sourceDirectory, bool recurse, string fileFilter)
{
CreateZip(File.Create(zipFileName), sourceDirectory, recurse, fileFilter, null);
}
/// <summary>
/// Create a zip archive sending output to the <paramref name="outputStream"/> passed.
/// </summary>
/// <param name="outputStream">The stream to write archive data to.</param>
/// <param name="sourceDirectory">The directory to source files from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="fileFilter">The <see cref="PathFilter">file filter</see> to apply.</param>
/// <param name="directoryFilter">The <see cref="PathFilter">directory filter</see> to apply.</param>
/// <remarks>The <paramref name="outputStream"/> is closed after creation.</remarks>
public void CreateZip(Stream outputStream, string sourceDirectory, bool recurse, string fileFilter, string directoryFilter)
{
NameTransform = new ZipNameTransform(sourceDirectory);
sourceDirectory_ = sourceDirectory;
using ( outputStream_ = new ZipOutputStream(outputStream) ) {
#if !NETCF_1_0
if ( password_ != null ) {
outputStream_.Password = password_;
}
#endif
outputStream_.UseZip64 = UseZip64;
FileSystemScanner scanner = new FileSystemScanner(fileFilter, directoryFilter);
scanner.ProcessFile += new ProcessFileHandler(ProcessFile);
if ( this.CreateEmptyDirectories ) {
scanner.ProcessDirectory += new ProcessDirectoryHandler(ProcessDirectory);
}
if (events_ != null) {
if ( events_.FileFailure != null ) {
scanner.FileFailure += events_.FileFailure;
}
if ( events_.DirectoryFailure != null ) {
scanner.DirectoryFailure += events_.DirectoryFailure;
}
}
scanner.Scan(sourceDirectory, recurse);
}
}
#endregion
#region ExtractZip
/// <summary>
/// Extract the contents of a zip file.
/// </summary>
/// <param name="zipFileName">The zip file to extract from.</param>
/// <param name="targetDirectory">The directory to save extracted information in.</param>
/// <param name="fileFilter">A filter to apply to files.</param>
public void ExtractZip(string zipFileName, string targetDirectory, string fileFilter)
{
ExtractZip(zipFileName, targetDirectory, Overwrite.Always, null, fileFilter, null, restoreDateTimeOnExtract_);
}
/// <summary>
/// Extract the contents of a zip file.
/// </summary>
/// <param name="zipFileName">The zip file to extract from.</param>
/// <param name="targetDirectory">The directory to save extracted information in.</param>
/// <param name="overwrite">The style of <see cref="Overwrite">overwriting</see> to apply.</param>
/// <param name="confirmDelegate">A delegate to invoke when confirming overwriting.</param>
/// <param name="fileFilter">A filter to apply to files.</param>
/// <param name="directoryFilter">A filter to apply to directories.</param>
/// <param name="restoreDateTime">Flag indicating whether to restore the date and time for extracted files.</param>
public void ExtractZip(string zipFileName, string targetDirectory,
Overwrite overwrite, ConfirmOverwriteDelegate confirmDelegate,
string fileFilter, string directoryFilter, bool restoreDateTime)
{
Stream inputStream = File.Open(zipFileName, FileMode.Open, FileAccess.Read, FileShare.Read);
ExtractZip(inputStream, targetDirectory, overwrite, confirmDelegate, fileFilter, directoryFilter, restoreDateTime, true);
}
/// <summary>
/// Extract the contents of a zip file held in a stream.
/// </summary>
/// <param name="inputStream">The seekable input stream containing the zip to extract from.</param>
/// <param name="targetDirectory">The directory to save extracted information in.</param>
/// <param name="overwrite">The style of <see cref="Overwrite">overwriting</see> to apply.</param>
/// <param name="confirmDelegate">A delegate to invoke when confirming overwriting.</param>
/// <param name="fileFilter">A filter to apply to files.</param>
/// <param name="directoryFilter">A filter to apply to directories.</param>
/// <param name="restoreDateTime">Flag indicating whether to restore the date and time for extracted files.</param>
/// <param name="isStreamOwner">Flag indicating whether the inputStream will be closed by this method.</param>
public void ExtractZip(Stream inputStream, string targetDirectory,
Overwrite overwrite, ConfirmOverwriteDelegate confirmDelegate,
string fileFilter, string directoryFilter, bool restoreDateTime,
bool isStreamOwner)
{
if ((overwrite == Overwrite.Prompt) && (confirmDelegate == null)) {
throw new ArgumentNullException("confirmDelegate");
}
continueRunning_ = true;
overwrite_ = overwrite;
confirmDelegate_ = confirmDelegate;
extractNameTransform_ = new WindowsNameTransform(targetDirectory);
fileFilter_ = new NameFilter(fileFilter);
directoryFilter_ = new NameFilter(directoryFilter);
restoreDateTimeOnExtract_ = restoreDateTime;
using (zipFile_ = new ZipFile(inputStream)) {
#if !NETCF_1_0
if (password_ != null) {
zipFile_.Password = password_;
}
#endif
zipFile_.IsStreamOwner = isStreamOwner;
System.Collections.IEnumerator enumerator = zipFile_.GetEnumerator();
while (continueRunning_ && enumerator.MoveNext()) {
ZipEntry entry = (ZipEntry)enumerator.Current;
if (entry.IsFile)
{
// TODO Path.GetDirectory can fail here on invalid characters.
if (directoryFilter_.IsMatch(Path.GetDirectoryName(entry.Name)) && fileFilter_.IsMatch(entry.Name)) {
ExtractEntry(entry);
}
}
else if (entry.IsDirectory) {
if (directoryFilter_.IsMatch(entry.Name) && CreateEmptyDirectories) {
ExtractEntry(entry);
}
}
else {
// Do nothing for volume labels etc...
}
}
}
}
#endregion
#region Internal Processing
void ProcessDirectory(object sender, DirectoryEventArgs e)
{
if ( !e.HasMatchingFiles && CreateEmptyDirectories ) {
if ( events_ != null ) {
events_.OnProcessDirectory(e.Name, e.HasMatchingFiles);
}
if ( e.ContinueRunning ) {
if (e.Name != sourceDirectory_) {
ZipEntry entry = entryFactory_.MakeDirectoryEntry(e.Name);
outputStream_.PutNextEntry(entry);
}
}
}
}
void ProcessFile(object sender, ScanEventArgs e)
{
if ( (events_ != null) && (events_.ProcessFile != null) ) {
events_.ProcessFile(sender, e);
}
if ( e.ContinueRunning ) {
try {
// The open below is equivalent to OpenRead which gaurantees that if opened the
// file will not be changed by subsequent openers, but precludes opening in some cases
// were it could succeed.
using (FileStream stream = File.Open(e.Name, FileMode.Open, FileAccess.Read, FileShare.Read)) {
ZipEntry entry = entryFactory_.MakeFileEntry(e.Name);
outputStream_.PutNextEntry(entry);
AddFileContents(e.Name, stream);
}
}
catch(Exception ex) {
if (events_ != null) {
continueRunning_ = events_.OnFileFailure(e.Name, ex);
}
else {
continueRunning_ = false;
throw;
}
}
}
}
void AddFileContents(string name, Stream stream)
{
if( stream==null ) {
throw new ArgumentNullException("stream");
}
if( buffer_==null ) {
buffer_=new byte[4096];
}
if( (events_!=null)&&(events_.Progress!=null) ) {
StreamUtils.Copy(stream, outputStream_, buffer_,
events_.Progress, events_.ProgressInterval, this, name);
}
else {
StreamUtils.Copy(stream, outputStream_, buffer_);
}
if( events_!=null ) {
continueRunning_=events_.OnCompletedFile(name);
}
}
void ExtractFileEntry(ZipEntry entry, string targetName)
{
bool proceed = true;
if ( overwrite_ != Overwrite.Always ) {
if ( File.Exists(targetName) ) {
if ( (overwrite_ == Overwrite.Prompt) && (confirmDelegate_ != null) ) {
proceed = confirmDelegate_(targetName);
}
else {
proceed = false;
}
}
}
if ( proceed ) {
if ( events_ != null ) {
continueRunning_ = events_.OnProcessFile(entry.Name);
}
if ( continueRunning_ ) {
try {
using ( FileStream outputStream = File.Create(targetName) ) {
if ( buffer_ == null ) {
buffer_ = new byte[4096];
}
if ((events_ != null) && (events_.Progress != null))
{
StreamUtils.Copy(zipFile_.GetInputStream(entry), outputStream, buffer_,
events_.Progress, events_.ProgressInterval, this, entry.Name, entry.Size);
}
else
{
StreamUtils.Copy(zipFile_.GetInputStream(entry), outputStream, buffer_);
}
if (events_ != null) {
continueRunning_ = events_.OnCompletedFile(entry.Name);
}
}
#if !NETCF_1_0 && !NETCF_2_0
if ( restoreDateTimeOnExtract_ ) {
File.SetLastWriteTime(targetName, entry.DateTime);
}
if ( RestoreAttributesOnExtract && entry.IsDOSEntry && (entry.ExternalFileAttributes != -1)) {
FileAttributes fileAttributes = (FileAttributes) entry.ExternalFileAttributes;
// TODO: FastZip - Setting of other file attributes on extraction is a little trickier.
fileAttributes &= (FileAttributes.Archive | FileAttributes.Normal | FileAttributes.ReadOnly | FileAttributes.Hidden);
File.SetAttributes(targetName, fileAttributes);
}
#endif
}
catch(Exception ex) {
if ( events_ != null ) {
continueRunning_ = events_.OnFileFailure(targetName, ex);
}
else {
continueRunning_ = false;
throw;
}
}
}
}
}
void ExtractEntry(ZipEntry entry)
{
bool doExtraction = entry.IsCompressionMethodSupported();
string targetName = entry.Name;
if ( doExtraction ) {
if ( entry.IsFile ) {
targetName = extractNameTransform_.TransformFile(targetName);
}
else if ( entry.IsDirectory ) {
targetName = extractNameTransform_.TransformDirectory(targetName);
}
doExtraction = !((targetName == null) || (targetName.Length == 0));
}
// TODO: Fire delegate/throw exception were compression method not supported, or name is invalid?
string dirName = null;
if ( doExtraction ) {
if ( entry.IsDirectory ) {
dirName = targetName;
}
else {
dirName = Path.GetDirectoryName(Path.GetFullPath(targetName));
}
}
if ( doExtraction && !Directory.Exists(dirName) ) {
if ( !entry.IsDirectory || CreateEmptyDirectories ) {
try {
Directory.CreateDirectory(dirName);
}
catch (Exception ex) {
doExtraction = false;
if ( events_ != null ) {
if ( entry.IsDirectory ) {
continueRunning_ = events_.OnDirectoryFailure(targetName, ex);
}
else {
continueRunning_ = events_.OnFileFailure(targetName, ex);
}
}
else {
continueRunning_ = false;
throw;
}
}
}
}
if ( doExtraction && entry.IsFile ) {
ExtractFileEntry(entry, targetName);
}
}
static int MakeExternalAttributes(FileInfo info)
{
return (int)info.Attributes;
}
#if NET_1_0 || NET_1_1 || NETCF_1_0
static bool NameIsValid(string name)
{
return (name != null) &&
(name.Length > 0) &&
(name.IndexOfAny(Path.InvalidPathChars) < 0);
}
#else
static bool NameIsValid(string name)
{
return (name != null) &&
(name.Length > 0) &&
(name.IndexOfAny(Path.GetInvalidPathChars()) < 0);
}
#endif
#endregion
#region Instance Fields
bool continueRunning_;
byte[] buffer_;
ZipOutputStream outputStream_;
ZipFile zipFile_;
string sourceDirectory_;
NameFilter fileFilter_;
NameFilter directoryFilter_;
Overwrite overwrite_;
ConfirmOverwriteDelegate confirmDelegate_;
bool restoreDateTimeOnExtract_;
bool restoreAttributesOnExtract_;
bool createEmptyDirectories_;
FastZipEvents events_;
IEntryFactory entryFactory_ = new ZipEntryFactory();
INameTransform extractNameTransform_;
UseZip64 useZip64_=UseZip64.Dynamic;
#if !NETCF_1_0
string password_;
#endif
#endregion
}
}
| |
using System;
using SFML.Window;
namespace SFML
{
namespace Graphics
{
////////////////////////////////////////////////////////////
/// <summary>
/// Decomposed transform defined by a position, a rotation and a scale
/// </summary>
////////////////////////////////////////////////////////////
public class Transformable : ObjectBase
{
////////////////////////////////////////////////////////////
/// <summary>
/// Default constructor
/// </summary>
////////////////////////////////////////////////////////////
public Transformable() :
base(IntPtr.Zero)
{
}
////////////////////////////////////////////////////////////
/// <summary>
/// Construct the transformable from another transformable
/// </summary>
/// <param name="transformable">Transformable to copy</param>
////////////////////////////////////////////////////////////
public Transformable(Transformable transformable) :
base(IntPtr.Zero)
{
Origin = transformable.Origin;
Position = transformable.Position;
Rotation = transformable.Rotation;
Scale = transformable.Scale;
}
////////////////////////////////////////////////////////////
/// <summary>
/// Position of the object
/// </summary>
////////////////////////////////////////////////////////////
public Vector2f Position
{
get
{
return myPosition;
}
set
{
myPosition = value;
myTransformNeedUpdate = true;
myInverseNeedUpdate = true;
}
}
////////////////////////////////////////////////////////////
/// <summary>
/// Rotation of the object
/// </summary>
////////////////////////////////////////////////////////////
public float Rotation
{
get
{
return myRotation;
}
set
{
myRotation = value;
myTransformNeedUpdate = true;
myInverseNeedUpdate = true;
}
}
////////////////////////////////////////////////////////////
/// <summary>
/// Scale of the object
/// </summary>
////////////////////////////////////////////////////////////
public Vector2f Scale
{
get
{
return myScale;
}
set
{
myScale = value;
myTransformNeedUpdate = true;
myInverseNeedUpdate = true;
}
}
////////////////////////////////////////////////////////////
/// <summary>
/// The origin of an object defines the center point for
/// all transformations (position, scale, rotation).
/// The coordinates of this point must be relative to the
/// top-left corner of the object, and ignore all
/// transformations (position, scale, rotation).
/// </summary>
////////////////////////////////////////////////////////////
public Vector2f Origin
{
get
{
return myOrigin;
}
set
{
myOrigin = value;
myTransformNeedUpdate = true;
myInverseNeedUpdate = true;
}
}
////////////////////////////////////////////////////////////
/// <summary>
/// The combined transform of the object
/// </summary>
////////////////////////////////////////////////////////////
public Transform Transform
{
get
{
if (myTransformNeedUpdate)
{
myTransformNeedUpdate = false;
float angle = -myRotation * 3.141592654F / 180.0F;
float cosine = (float)Math.Cos(angle);
float sine = (float)Math.Sin(angle);
float sxc = myScale.X * cosine;
float syc = myScale.Y * cosine;
float sxs = myScale.X * sine;
float sys = myScale.Y * sine;
float tx = -myOrigin.X * sxc - myOrigin.Y * sys + myPosition.X;
float ty = myOrigin.X * sxs - myOrigin.Y * syc + myPosition.Y;
myTransform = new Transform( sxc, sys, tx,
-sxs, syc, ty,
0.0F, 0.0F, 1.0F);
}
return myTransform;
}
}
////////////////////////////////////////////////////////////
/// <summary>
/// The combined transform of the object
/// </summary>
////////////////////////////////////////////////////////////
public Transform InverseTransform
{
get
{
if (myInverseNeedUpdate)
{
myInverseTransform = Transform.GetInverse();
myInverseNeedUpdate = false;
}
return myInverseTransform;
}
}
////////////////////////////////////////////////////////////
/// <summary>
/// Construct the object from its internal C pointer
/// </summary>
/// <param name="cPointer">Pointer to the object in the C library</param>
////////////////////////////////////////////////////////////
protected Transformable(IntPtr cPointer) :
base(cPointer)
{
}
////////////////////////////////////////////////////////////
/// <summary>
/// Handle the destruction of the object
/// </summary>
/// <param name="disposing">Is the GC disposing the object, or is it an explicit call ?</param>
////////////////////////////////////////////////////////////
protected override void Destroy(bool disposing)
{
// Does nothing, this instance is either pure C# (if created by the user)
// or not the final object (if used as a base for a drawable class)
}
private Vector2f myOrigin = new Vector2f(0, 0);
private Vector2f myPosition = new Vector2f(0, 0);
private float myRotation = 0;
private Vector2f myScale = new Vector2f(1, 1);
private Transform myTransform;
private Transform myInverseTransform;
private bool myTransformNeedUpdate = true;
private bool myInverseNeedUpdate = true;
}
}
}
| |
/* Generated SBE (Simple Binary Encoding) message codec */
#pragma warning disable 1591 // disable warning on missing comments
using System;
using Adaptive.SimpleBinaryEncoding;
namespace Adaptive.SimpleBinaryEncoding.Tests.Generated
{
public class SecurityStatus
{
public const ushort TemplateId = (ushort)24;
public const byte TemplateVersion = (byte)1;
public const ushort BlockLength = (ushort)29;
public const string SematicType = "f";
private readonly SecurityStatus _parentMessage;
private DirectBuffer _buffer;
private int _offset;
private int _limit;
private int _actingBlockLength;
private int _actingVersion;
public int Offset { get { return _offset; } }
public SecurityStatus()
{
_parentMessage = this;
}
public void WrapForEncode(DirectBuffer buffer, int offset)
{
_buffer = buffer;
_offset = offset;
_actingBlockLength = BlockLength;
_actingVersion = TemplateVersion;
Limit = offset + _actingBlockLength;
}
public void WrapForDecode(DirectBuffer buffer, int offset,
int actingBlockLength, int actingVersion)
{
_buffer = buffer;
_offset = offset;
_actingBlockLength = actingBlockLength;
_actingVersion = actingVersion;
Limit = offset + _actingBlockLength;
}
public int Size
{
get
{
return _limit - _offset;
}
}
public int Limit
{
get
{
return _limit;
}
set
{
_buffer.CheckLimit(_limit);
_limit = value;
}
}
public const int TransactTimeSchemaId = 60;
public static string TransactTimeMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.Epoch: return "unix";
case MetaAttribute.TimeUnit: return "nanosecond";
case MetaAttribute.SemanticType: return "UTCTimestamp";
}
return "";
}
public const ulong TransactTimeNullValue = 0x8000000000000000UL;
public const ulong TransactTimeMinValue = 0x0UL;
public const ulong TransactTimeMaxValue = 0x7fffffffffffffffUL;
public ulong TransactTime
{
get
{
return _buffer.Uint64GetLittleEndian(_offset + 0);
}
set
{
_buffer.Uint64PutLittleEndian(_offset + 0, value);
}
}
public const int TradeDateSchemaId = 75;
public static string TradeDateMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.Epoch: return "unix";
case MetaAttribute.TimeUnit: return "nanosecond";
case MetaAttribute.SemanticType: return "LocalMktDate";
}
return "";
}
public const ushort TradeDateNullValue = (ushort)65535;
public const ushort TradeDateMinValue = (ushort)0;
public const ushort TradeDateMaxValue = (ushort)65534;
public ushort TradeDate
{
get
{
return _buffer.Uint16GetLittleEndian(_offset + 8);
}
set
{
_buffer.Uint16PutLittleEndian(_offset + 8, value);
}
}
public const int SecurityGroupSchemaId = 1151;
public static string SecurityGroupMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.Epoch: return "unix";
case MetaAttribute.TimeUnit: return "nanosecond";
case MetaAttribute.SemanticType: return "String";
}
return "";
}
public const byte SecurityGroupNullValue = (byte)0;
public const byte SecurityGroupMinValue = (byte)32;
public const byte SecurityGroupMaxValue = (byte)126;
public const int SecurityGroupLength = 6;
public byte GetSecurityGroup(int index)
{
if (index < 0 || index >= 6)
{
throw new IndexOutOfRangeException("index out of range: index=" + index);
}
return _buffer.CharGet(_offset + 10 + (index * 1));
}
public void SetSecurityGroup(int index, byte value)
{
if (index < 0 || index >= 6)
{
throw new IndexOutOfRangeException("index out of range: index=" + index);
}
_buffer.CharPut(_offset + 10 + (index * 1), value);
}
public const string SecurityGroupCharacterEncoding = "UTF-8";
public int GetSecurityGroup(byte[] dst, int dstOffset)
{
const int length = 6;
if (dstOffset < 0 || dstOffset > (dst.Length - length))
{
throw new IndexOutOfRangeException("dstOffset out of range for copy: offset=" + dstOffset);
}
_buffer.GetBytes(_offset + 10, dst, dstOffset, length);
return length;
}
public void SetSecurityGroup(byte[] src, int srcOffset)
{
const int length = 6;
if (srcOffset < 0 || srcOffset > (src.Length - length))
{
throw new IndexOutOfRangeException("srcOffset out of range for copy: offset=" + srcOffset);
}
_buffer.SetBytes(_offset + 10, src, srcOffset, length);
}
public const int AssetSchemaId = 6937;
public static string AssetMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.Epoch: return "unix";
case MetaAttribute.TimeUnit: return "nanosecond";
case MetaAttribute.SemanticType: return "String";
}
return "";
}
public const byte AssetNullValue = (byte)0;
public const byte AssetMinValue = (byte)32;
public const byte AssetMaxValue = (byte)126;
public const int AssetLength = 6;
public byte GetAsset(int index)
{
if (index < 0 || index >= 6)
{
throw new IndexOutOfRangeException("index out of range: index=" + index);
}
return _buffer.CharGet(_offset + 16 + (index * 1));
}
public void SetAsset(int index, byte value)
{
if (index < 0 || index >= 6)
{
throw new IndexOutOfRangeException("index out of range: index=" + index);
}
_buffer.CharPut(_offset + 16 + (index * 1), value);
}
public const string AssetCharacterEncoding = "UTF-8";
public int GetAsset(byte[] dst, int dstOffset)
{
const int length = 6;
if (dstOffset < 0 || dstOffset > (dst.Length - length))
{
throw new IndexOutOfRangeException("dstOffset out of range for copy: offset=" + dstOffset);
}
_buffer.GetBytes(_offset + 16, dst, dstOffset, length);
return length;
}
public void SetAsset(byte[] src, int srcOffset)
{
const int length = 6;
if (srcOffset < 0 || srcOffset > (src.Length - length))
{
throw new IndexOutOfRangeException("srcOffset out of range for copy: offset=" + srcOffset);
}
_buffer.SetBytes(_offset + 16, src, srcOffset, length);
}
public const int SecurityIDSchemaId = 48;
public static string SecurityIDMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.Epoch: return "unix";
case MetaAttribute.TimeUnit: return "nanosecond";
case MetaAttribute.SemanticType: return "int";
}
return "";
}
public const int SecurityIDNullValue = 2147483647;
public const int SecurityIDMinValue = -2147483647;
public const int SecurityIDMaxValue = 2147483647;
public int SecurityID
{
get
{
return _buffer.Int32GetLittleEndian(_offset + 22);
}
set
{
_buffer.Int32PutLittleEndian(_offset + 22, value);
}
}
public const int SecurityTradingStatusSchemaId = 326;
public static string SecurityTradingStatusMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.Epoch: return "unix";
case MetaAttribute.TimeUnit: return "nanosecond";
case MetaAttribute.SemanticType: return "int";
}
return "";
}
public SecurityTradingStatus SecurityTradingStatus
{
get
{
return (SecurityTradingStatus)_buffer.Uint8Get(_offset + 26);
}
set
{
_buffer.Uint8Put(_offset + 26, (byte)value);
}
}
public const int HaltReasonSchemaId = 327;
public static string HaltReasonMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.Epoch: return "unix";
case MetaAttribute.TimeUnit: return "nanosecond";
case MetaAttribute.SemanticType: return "int";
}
return "";
}
public HaltReason HaltReason
{
get
{
return (HaltReason)_buffer.Uint8Get(_offset + 27);
}
set
{
_buffer.Uint8Put(_offset + 27, (byte)value);
}
}
public const int SecurityTradingEventSchemaId = 1174;
public static string SecurityTradingEventMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.Epoch: return "unix";
case MetaAttribute.TimeUnit: return "nanosecond";
case MetaAttribute.SemanticType: return "int";
}
return "";
}
public SecurityTradingEvent SecurityTradingEvent
{
get
{
return (SecurityTradingEvent)_buffer.Uint8Get(_offset + 28);
}
set
{
_buffer.Uint8Put(_offset + 28, (byte)value);
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections;
using System.Collections.Specialized;
using System.Reflection;
using System.IO;
using System.Web;
using log4net;
using Nini.Config;
using OpenMetaverse;
using OpenMetaverse.StructuredData;
using OpenSim.Framework;
using OpenSim.Framework.Servers;
using OpenSim.Framework.Servers.HttpServer;
using OpenSim.Services.Interfaces;
using Caps = OpenSim.Framework.Capabilities.Caps;
namespace OpenSim.Capabilities.Handlers
{
public class GetMeshHandler
{
private static readonly ILog m_log =
LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private IAssetService m_assetService;
public const string DefaultFormat = "vnd.ll.mesh";
public GetMeshHandler(IAssetService assService)
{
m_assetService = assService;
}
public Hashtable Handle(Hashtable request)
{
Hashtable ret = new Hashtable();
ret["int_response_code"] = (int)System.Net.HttpStatusCode.NotFound;
ret["content_type"] = "text/plain";
ret["int_bytes"] = 0;
string MeshStr = (string)request["mesh_id"];
//m_log.DebugFormat("[GETMESH]: called {0}", MeshStr);
if (m_assetService == null)
{
m_log.Error("[GETMESH]: Cannot fetch mesh " + MeshStr + " without an asset service");
ret["keepalive"] = false;
return ret;
}
UUID meshID;
if (!String.IsNullOrEmpty(MeshStr) && UUID.TryParse(MeshStr, out meshID))
{
// m_log.DebugFormat("[GETMESH]: Received request for mesh id {0}", meshID);
ret = ProcessGetMesh(request, UUID.Zero, null);
}
else
{
m_log.Warn("[GETMESH]: Failed to parse a mesh_id from GetMesh request: " + (string)request["uri"]);
}
return ret;
}
public Hashtable ProcessGetMesh(Hashtable request, UUID AgentId, Caps cap)
{
Hashtable responsedata = new Hashtable();
responsedata["int_response_code"] = 400; //501; //410; //404;
responsedata["content_type"] = "text/plain";
responsedata["int_bytes"] = 0;
string meshStr = string.Empty;
if (request.ContainsKey("mesh_id"))
meshStr = request["mesh_id"].ToString();
UUID meshID = UUID.Zero;
if (!String.IsNullOrEmpty(meshStr) && UUID.TryParse(meshStr, out meshID))
{
if (m_assetService == null)
{
responsedata["int_response_code"] = 404; //501; //410; //404;
responsedata["keepalive"] = false;
responsedata["str_response_string"] = "The asset service is unavailable. So is your mesh.";
return responsedata;
}
AssetBase mesh = m_assetService.Get(meshID.ToString());
if (mesh != null)
{
if (mesh.Type == (SByte)AssetType.Mesh)
{
Hashtable headers = new Hashtable();
responsedata["headers"] = headers;
string range = String.Empty;
if (((Hashtable)request["headers"])["range"] != null)
range = (string)((Hashtable)request["headers"])["range"];
else if (((Hashtable)request["headers"])["Range"] != null)
range = (string)((Hashtable)request["headers"])["Range"];
if (!String.IsNullOrEmpty(range)) // Mesh Asset LOD // Physics
{
// Range request
int start, end;
if (TryParseRange(range, out start, out end))
{
// Before clamping start make sure we can satisfy it in order to avoid
// sending back the last byte instead of an error status
if (start >= mesh.Data.Length)
{
responsedata["int_response_code"] = 404; //501; //410; //404;
responsedata["content_type"] = "text/plain";
responsedata["str_response_string"] = "This range doesnt exist.";
return responsedata;
}
else
{
end = Utils.Clamp(end, 0, mesh.Data.Length - 1);
start = Utils.Clamp(start, 0, end);
int len = end - start + 1;
//m_log.Debug("Serving " + start + " to " + end + " of " + texture.Data.Length + " bytes for texture " + texture.ID);
if (start == 0 && len == mesh.Data.Length) // well redudante maybe
{
responsedata["int_response_code"] = (int)System.Net.HttpStatusCode.OK;
responsedata["bin_response_data"] = mesh.Data;
responsedata["int_bytes"] = mesh.Data.Length;
}
else
{
responsedata["int_response_code"] =
(int)System.Net.HttpStatusCode.PartialContent;
headers["Content-Range"] = String.Format("bytes {0}-{1}/{2}", start, end,
mesh.Data.Length);
byte[] d = new byte[len];
Array.Copy(mesh.Data, start, d, 0, len);
responsedata["bin_response_data"] = d;
responsedata["int_bytes"] = len;
}
}
}
else
{
m_log.Warn("[GETMESH]: Failed to parse a range from GetMesh request, sending full asset: " + (string)request["uri"]);
responsedata["str_response_string"] = Convert.ToBase64String(mesh.Data);
responsedata["content_type"] = "application/vnd.ll.mesh";
responsedata["int_response_code"] = 200;
}
}
else
{
responsedata["str_response_string"] = Convert.ToBase64String(mesh.Data);
responsedata["content_type"] = "application/vnd.ll.mesh";
responsedata["int_response_code"] = 200;
}
}
// Optionally add additional mesh types here
else
{
responsedata["int_response_code"] = 404; //501; //410; //404;
responsedata["content_type"] = "text/plain";
responsedata["str_response_string"] = "Unfortunately, this asset isn't a mesh.";
return responsedata;
}
}
else
{
responsedata["int_response_code"] = 404; //501; //410; //404;
responsedata["content_type"] = "text/plain";
responsedata["str_response_string"] = "Your Mesh wasn't found. Sorry!";
return responsedata;
}
}
return responsedata;
}
private bool TryParseRange(string header, out int start, out int end)
{
if (header.StartsWith("bytes="))
{
string[] rangeValues = header.Substring(6).Split('-');
if (rangeValues.Length == 2)
{
if (Int32.TryParse(rangeValues[0], out start) && Int32.TryParse(rangeValues[1], out end))
return true;
}
}
start = end = 0;
return false;
}
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using NodaTime;
using NUnit.Framework;
using Python.Runtime;
using QuantConnect.Data;
using QuantConnect.Data.Custom;
using QuantConnect.Data.Custom.IconicTypes;
using QuantConnect.Data.Market;
using QuantConnect.Indicators;
using QuantConnect.Python;
namespace QuantConnect.Tests.Common.Data
{
[TestFixture]
public class SliceTests
{
private readonly DateTime _dataTime = DateTime.UtcNow;
[Test]
public void AccessesByDataType()
{
var now = DateTime.UtcNow;
var tradeBar = new TradeBar { Symbol = Symbols.SPY, Time = now };
var unlinkedData = new UnlinkedData { Symbol = Symbols.SPY, Time = now };
var quoteBar = new QuoteBar { Symbol = Symbols.SPY, Time = now };
var tick = new Tick(now, Symbols.SPY, 1.1m, 2.1m) {TickType = TickType.Trade};
var openInterest = new OpenInterest(now, Symbols.SPY, 1);
var split = new Split(Symbols.SPY, now, 1, 1, SplitType.SplitOccurred);
var delisting = new Delisting(Symbols.SPY, now, 1, DelistingType.Delisted);
var slice = new Slice(now, new BaseData[] {quoteBar, tradeBar, unlinkedData, tick, split, delisting, openInterest }, now);
Assert.AreEqual(slice.Get(typeof(TradeBar))[Symbols.SPY], tradeBar);
Assert.AreEqual(slice.Get(typeof(UnlinkedData))[Symbols.SPY], unlinkedData);
Assert.AreEqual(slice.Get(typeof(QuoteBar))[Symbols.SPY], quoteBar);
Assert.AreEqual(slice.Get(typeof(Tick))[Symbols.SPY], tick);
Assert.AreEqual(slice.Get(typeof(Split))[Symbols.SPY], split);
Assert.AreEqual(slice.Get(typeof(Delisting))[Symbols.SPY], delisting);
Assert.AreEqual(slice.Get(typeof(OpenInterest))[Symbols.SPY], openInterest);
}
[Test]
public void AccessesBaseBySymbol()
{
IndicatorDataPoint tick = new IndicatorDataPoint(Symbols.SPY, DateTime.Now, 1);
Slice slice = new Slice(DateTime.Now, new[] { tick }, DateTime.Now);
IndicatorDataPoint data = slice[tick.Symbol];
Assert.AreEqual(tick, data);
}
[Test]
public void AccessesTradeBarBySymbol()
{
TradeBar tradeBar = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now };
Slice slice = new Slice(DateTime.Now, new[] { tradeBar }, DateTime.Now);
TradeBar data = slice[tradeBar.Symbol];
Assert.AreEqual(tradeBar, data);
}
[Test]
public void EquitiesIgnoreQuoteBars()
{
var quoteBar = new QuoteBar { Symbol = Symbols.SPY, Time = DateTime.Now };
var slice = new Slice(DateTime.Now, new[] { quoteBar }, DateTime.Now);
Assert.IsFalse(slice.HasData);
Assert.IsTrue(slice.ToList().Count == 0);
Assert.IsFalse(slice.ContainsKey(Symbols.SPY));
Assert.Throws<KeyNotFoundException>(() => { var data = slice[Symbols.SPY]; });
Assert.AreEqual(0, slice.Count);
var tickQuoteBar = new Tick { Symbol = Symbols.SPY, Time = DateTime.Now, TickType = TickType.Quote };
slice = new Slice(DateTime.Now, new[] { tickQuoteBar }, DateTime.Now);
Assert.IsFalse(slice.HasData);
Assert.IsTrue(slice.ToList().Count == 0);
Assert.IsFalse(slice.ContainsKey(Symbols.SPY));
Assert.Throws<KeyNotFoundException>(() => { var data = slice[Symbols.SPY]; });
Assert.AreEqual(0, slice.Count);
}
[Test]
public void AccessesTradeBarCollection()
{
TradeBar tradeBar1 = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now };
TradeBar tradeBar2 = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now };
Slice slice = new Slice(DateTime.Now, new[] { tradeBar1, tradeBar2 }, DateTime.Now);
TradeBars tradeBars = slice.Bars;
Assert.AreEqual(2, tradeBars.Count);
}
[Test]
public void AccessesTicksBySymbol()
{
Tick tick1 = new Tick { Time = DateTime.Now, Symbol = Symbols.SPY, Value = 1m, Quantity = 2m };
Tick tick2 = new Tick { Time = DateTime.Now, Symbol = Symbols.SPY, Value = 1.1m, Quantity = 2.1m };
Slice slice = new Slice(DateTime.Now, new[] { tick1, tick2 }, DateTime.Now);
List<Tick> data = slice[tick1.Symbol];
Assert.IsInstanceOf(typeof(List<Tick>), data);
Assert.AreEqual(2, data.Count);
}
[Test]
public void AccessesTicksCollection()
{
Tick tick1 = new Tick { Time = DateTime.Now, Symbol = Symbols.SPY, Value = 1, Quantity = 2 };
Tick tick2 = new Tick { Time = DateTime.Now, Symbol = Symbols.SPY, Value = 1.1m, Quantity = 2.1m };
Tick tick3 = new Tick { Time = DateTime.Now, Symbol = Symbols.AAPL, Value = 1, Quantity = 2 };
Tick tick4 = new Tick { Time = DateTime.Now, Symbol = Symbols.AAPL, Value = 1.1m, Quantity = 2.1m };
Slice slice = new Slice(DateTime.Now, new[] { tick1, tick2, tick3, tick4 }, DateTime.Now);
Ticks ticks = slice.Ticks;
Assert.AreEqual(2, ticks.Count);
Assert.AreEqual(2, ticks[Symbols.SPY].Count);
Assert.AreEqual(2, ticks[Symbols.AAPL].Count);
}
[Test]
public void DifferentCollectionsAreCorrectlyGeneratedSameSymbol()
{
var quoteBar = new QuoteBar(DateTime.Now, Symbols.SPY,
new Bar(3100, 3100, 3100, 3100), 0,
new Bar(3101, 3101, 3101, 3101), 0,
Time.OneMinute);
var tradeBar = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now };
var slice = new Slice(DateTime.Now, new BaseData[] { quoteBar, tradeBar }, DateTime.Now);
Assert.AreEqual(1, slice.QuoteBars.Count);
Assert.AreEqual(1, slice.Bars.Count);
Assert.AreEqual(1, slice.Get<QuoteBar>().Count);
Assert.AreEqual(1, slice.Get<TradeBar>().Count);
}
[Test]
public void AccessesCustomGenericallyByTypeOtherTypesPresent()
{
var tradeBar = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now };
var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now };
var slice = new Slice(DateTime.Now, new BaseData[] { unlinkedDataSpy, tradeBar }, DateTime.Now);
var unlinkedData = slice.Get<UnlinkedData>();
Assert.AreEqual(1, unlinkedData.Count);
}
[Test]
public void AccessesCustomGenericallyByType()
{
var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now };
var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now };
var slice = new Slice(DateTime.Now, new[] { unlinkedDataSpy, unlinkedDataAapl }, DateTime.Now);
var unlinkedData = slice.Get<UnlinkedData>();
Assert.AreEqual(2, unlinkedData.Count);
}
[Test]
public void AccessesTickGenericallyByType()
{
Tick TickSpy = new Tick { Symbol = Symbols.SPY, Time = DateTime.Now };
Tick TickAapl = new Tick { Symbol = Symbols.AAPL, Time = DateTime.Now };
Slice slice = new Slice(DateTime.Now, new[] { TickSpy, TickAapl }, DateTime.Now);
DataDictionary<Tick> TickData = slice.Get<Tick>();
Assert.AreEqual(2, TickData.Count);
}
[Test]
public void AccessesTradeBarGenericallyByType()
{
TradeBar TradeBarSpy = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now };
TradeBar TradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now };
Slice slice = new Slice(DateTime.Now, new[] { TradeBarSpy, TradeBarAapl }, DateTime.Now);
DataDictionary<TradeBar> TradeBarData = slice.Get<TradeBar>();
Assert.AreEqual(2, TradeBarData.Count);
}
[Test]
public void AccessesGenericallyByTypeAndSymbol()
{
var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now };
var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now };
var slice = new Slice(DateTime.Now, new[] { unlinkedDataSpy, unlinkedDataAapl }, DateTime.Now);
var unlinkedData = slice.Get<UnlinkedData>(Symbols.SPY);
Assert.AreEqual(unlinkedDataSpy, unlinkedData);
}
[Test]
public void MergeSlice()
{
var tradeBar1 = new TradeBar { Symbol = Symbols.SPY, Time = _dataTime };
var tradeBar2 = new TradeBar { Symbol = Symbols.AAPL, Time = _dataTime, Open = 23 };
var quoteBar1 = new QuoteBar { Symbol = Symbols.SPY, Time = _dataTime };
var tick1 = new Tick(_dataTime, Symbols.SPY, 1.1m, 2.1m) { TickType = TickType.Trade };
var split1 = new Split(Symbols.SPY, _dataTime, 1, 1, SplitType.SplitOccurred);
var dividend1 = new Dividend(Symbols.SPY, _dataTime, 1, 1);
var delisting1 = new Delisting(Symbols.SPY, _dataTime, 1, DelistingType.Delisted);
var symbolChangedEvent1 = new SymbolChangedEvent(Symbols.SPY, _dataTime, "SPY", "SP");
var slice1 = new Slice(_dataTime, new BaseData[] { tradeBar1, tradeBar2,
quoteBar1, tick1, split1, dividend1, delisting1, symbolChangedEvent1
}, _dataTime);
var tradeBar3 = new TradeBar { Symbol = Symbols.AAPL, Time = _dataTime, Open = 24 };
var tradeBar4 = new TradeBar { Symbol = Symbols.SBIN, Time = _dataTime };
var tradeBar3_4 = new TradeBar { Symbol = Symbols.BTCEUR, Time = _dataTime };
var quoteBar2 = new QuoteBar { Symbol = Symbols.SBIN, Time = _dataTime };
var tick2 = new Tick(_dataTime, Symbols.SBIN, 1.1m, 2.1m) { TickType = TickType.Trade };
var split2 = new Split(Symbols.SBIN, _dataTime, 1, 1, SplitType.SplitOccurred);
var dividend2 = new Dividend(Symbols.SBIN, _dataTime, 1, 1);
var delisting2 = new Delisting(Symbols.SBIN, _dataTime, 1, DelistingType.Delisted);
var symbolChangedEvent2 = new SymbolChangedEvent(Symbols.SBIN, _dataTime, "SBIN", "BIN");
var slice2 = new Slice(_dataTime, new BaseData[] { tradeBar3, tradeBar4, tradeBar3_4,
quoteBar2, tick2, split2, dividend2, delisting2, symbolChangedEvent2
}, _dataTime);
slice1.MergeSlice(slice2);
Assert.AreEqual(4, slice1.Bars.Count);
Assert.AreEqual(2, slice1.QuoteBars.Count);
Assert.AreEqual(2, slice1.Ticks.Count);
Assert.AreEqual(2, slice1.Splits.Count);
Assert.AreEqual(2, slice1.Dividends.Count);
Assert.AreEqual(2, slice1.Delistings.Count);
Assert.AreEqual(2, slice1.SymbolChangedEvents.Count);
}
[Test]
public void CheckMergeUpdatePrivateAttributes()
{
var tradeBar0 = new TradeBar { Symbol = Symbols.BTCUSD, Time = _dataTime };
var slice1 = new Slice(_dataTime, new BaseData[] { tradeBar0 }, _dataTime);
var tradeBar1 = new TradeBar { Symbol = Symbols.SPY, Time = _dataTime };
var tradeBar2 = new TradeBar { Symbol = Symbols.AAPL, Time = _dataTime, Open = 23 };
var slice2 = new Slice(_dataTime, new BaseData[] { tradeBar1, tradeBar2 }, _dataTime);
slice1.MergeSlice(slice2);
// Check private _data is updated
Assert.AreEqual(3, slice1.Values.Count);
var tradeBar3 = new TradeBar { Symbol = Symbols.AAPL, Time = _dataTime, Open = 24 };
var tradeBar4 = new TradeBar { Symbol = Symbols.SBIN, Time = _dataTime };
var tradeBar3_4 = new TradeBar { Symbol = Symbols.BTCEUR, Time = _dataTime };
var slice3 = new Slice(_dataTime, new BaseData[] { tradeBar3, tradeBar4, tradeBar3_4 }, _dataTime);
slice1.MergeSlice(slice3);
// Should use first non Null value
var testTradeBar = (TradeBar)slice1.Values.Where(datum => datum.DataType == MarketDataType.TradeBar && datum.Symbol.Value == "AAPL").Single();
Assert.AreEqual(23, testTradeBar.Open);
// Check private _rawDataList is updated
Assert.AreEqual(5, slice1.Values.Count);
}
[Test]
public void MergeTicks()
{
var tradeBar1 = new TradeBar { Symbol = Symbols.SPY, Time = _dataTime };
var tick1 = new Tick(_dataTime, Symbols.SPY, 1.1m, 2.1m) { TickType = TickType.Trade };
var slice1 = new Slice(_dataTime, new BaseData[] { tradeBar1, tick1 }, _dataTime);
//var Use List<tick>
var ticks = new Ticks { { Symbols.MSFT, new List<Tick> { tick1 } } };
var slice2 = new Slice(_dataTime, new List<BaseData>(), null, null, ticks, null, null, null, null, null, null, _dataTime);
slice1.MergeSlice(slice2);
Assert.AreEqual(2, slice1.Ticks.Count);
// Should merge only when different
var tick2 = new Tick(_dataTime, Symbols.MSFT, 1.1m, 2.1m) { TickType = TickType.Trade };
var slice3 = new Slice(_dataTime, new BaseData[] { tradeBar1, tick2 }, _dataTime);
slice2.MergeSlice(slice3);
Assert.AreEqual(1, slice2.Ticks.Count);
}
[Test]
public void MergeOptionsAndFuturesChain()
{
// Merge optionChains and FutureChains
var optionChain1 = new OptionChains();
var optionChain2 = new OptionChains();
optionChain1.Add(Symbols.SPY, new OptionChain(Symbols.SPY, _dataTime));
optionChain2.Add(Symbols.AAPL, new OptionChain(Symbols.SPY, _dataTime));
var futuresChain1 = new FuturesChains();
var futuresChain2 = new FuturesChains();
futuresChain1.Add(Symbols.SPY, new FuturesChain(Symbols.SPY, _dataTime));
futuresChain2.Add(Symbols.AAPL, new FuturesChain(Symbols.SPY, _dataTime));
var slice4 = new Slice(_dataTime, new List<BaseData>(),
new TradeBars(_dataTime), new QuoteBars(),
new Ticks(), optionChain1,
futuresChain1, new Splits(),
new Dividends(_dataTime), new Delistings(),
new SymbolChangedEvents(), _dataTime);
var slice5 = new Slice(_dataTime, new List<BaseData>(),
new TradeBars(_dataTime), new QuoteBars(),
new Ticks(), optionChain2,
futuresChain2, new Splits(),
new Dividends(_dataTime), new Delistings(),
new SymbolChangedEvents(), _dataTime);
slice4.MergeSlice(slice5);
Assert.AreEqual(2, slice4.OptionChains.Count);
Assert.AreEqual(2, slice4.FutureChains.Count);
}
[Test]
public void MergeCustomData()
{
var tradeBar1 = new TradeBar { Symbol = Symbols.SPY, Time = _dataTime };
var tradeBar2 = new TradeBar { Symbol = Symbols.AAPL, Time = _dataTime, Open = 23 };
var custom1 = new FxcmVolume { DataType = MarketDataType.Base, Symbol = Symbols.MSFT };
var custom2 = new FxcmVolume { DataType = MarketDataType.Base, Symbol = Symbols.SBIN };
var custom3 = new FxcmVolume { DataType = MarketDataType.Base, Symbol = Symbols.MSFT };
var custom4 = new FxcmVolume { DataType = MarketDataType.Base, Symbol = Symbols.SBIN };
var slice6 = new Slice(_dataTime, new BaseData[] { custom1, custom2, custom3, tradeBar2 }, _dataTime);
var slice5 = new Slice(_dataTime, new BaseData[] { tradeBar1, custom4 }, _dataTime);
slice5.MergeSlice(slice6);
Assert.AreEqual(4, slice5.Values.Count);
Assert.AreEqual(2, slice5.Values.Where(x => x.DataType == MarketDataType.Base).Count());
}
[Test]
public void PythonGetCustomData()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice):
data = slice.Get(UnlinkedData)
return data").GetAttr("Test");
var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 };
var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 };
var slice = new Slice(DateTime.Now, new[] { unlinkedDataSpy, unlinkedDataAapl }, DateTime.Now);
var data = test(new PythonSlice(slice));
Assert.AreEqual(2, (int)data.Count);
Assert.AreEqual(10, (int)data[Symbols.SPY].Value);
Assert.AreEqual(11, (int)data[Symbols.AAPL].Value);
}
}
[Test]
public void PythonGetPythonCustomData()
{
using (Py.GIL())
{
dynamic testModule = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
class CustomDataTest(PythonData):
def Reader(self, config, line, date, isLiveMode):
result = CustomDataTest()
result.Symbol = config.Symbol
result.Value = 10
return result
def GetSource(config, date, isLiveMode):
return None
class CustomDataTest2(PythonData):
def Reader(self, config, line, date, isLiveMode):
result = CustomDataTest2()
result.Symbol = config.Symbol
result.Value = 11
return result
def GetSource(config, date, isLiveMode):
return None
def Test(slice):
data = slice.Get(CustomDataTest)
return data");
var test = testModule.GetAttr("Test");
var type = Extensions.CreateType(testModule.GetAttr("CustomDataTest"));
var customDataTest = new PythonData(testModule.GetAttr("CustomDataTest")());
var config = new SubscriptionDataConfig(type, Symbols.SPY, Resolution.Daily, DateTimeZone.Utc,
DateTimeZone.Utc, false, false, false, isCustom: true);
var data1 = customDataTest.Reader(config, "something", DateTime.UtcNow, false);
var customDataTest2 = new PythonData(testModule.GetAttr("CustomDataTest2")());
var config2 = new SubscriptionDataConfig(config, Extensions.CreateType(testModule.GetAttr("CustomDataTest2")));
var data2 = customDataTest2.Reader(config2, "something2", DateTime.UtcNow, false);
var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.UtcNow, Value = 10 };
var slice = new Slice(DateTime.UtcNow, new[] { unlinkedDataSpy, data2, data1 }, DateTime.UtcNow);
var data = test(new PythonSlice(slice));
Assert.AreEqual(1, (int)data.Count);
Assert.AreEqual(10, (int)data[Symbols.SPY].Value);
}
}
[Test]
public void PythonEnumerationWorks()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice):
for dataPoint in slice:
return dataPoint").GetAttr("Test");
var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 };
var slice = new Slice(DateTime.Now, new[] { unlinkedDataAapl }, DateTime.Now);
var data = test(new PythonSlice(slice)) as PyObject;
var keyValuePair = data.As<KeyValuePair<Symbol, BaseData>>();
Assert.IsNotNull(keyValuePair);
Assert.AreEqual(11, keyValuePair.Value.Value);
}
}
[Test]
public void PythonGetBySymbolCustomData()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
from QuantConnect.Tests import *
def Test(slice):
data = slice.Get(UnlinkedData)
value = data[Symbols.AAPL].Value
if value != 11:
raise Exception('Unexpected value')").GetAttr("Test");
var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 };
var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 };
var slice = new Slice(DateTime.Now, new[] { unlinkedDataSpy, unlinkedDataAapl }, DateTime.Now);
Assert.DoesNotThrow(() => test(new PythonSlice(slice)));
}
}
[Test]
public void PythonGetAndSymbolCustomData()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
from QuantConnect.Tests import *
def Test(slice):
data = slice.Get(UnlinkedData, Symbols.AAPL)
value = data.Value
if value != 11:
raise Exception('Unexpected value')").GetAttr("Test");
var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 };
var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 };
var slice = new Slice(DateTime.Now, new[] { unlinkedDataSpy, unlinkedDataAapl }, DateTime.Now);
Assert.DoesNotThrow(() => test(new PythonSlice(slice)));
}
}
[Test]
public void PythonGetTradeBar()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice):
data = slice.Get(TradeBar)
return data").GetAttr("Test");
var TradeBarSpy = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 8 };
var TradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 9 };
var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 };
var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 };
var slice = new Slice(DateTime.Now, new BaseData[] { unlinkedDataSpy, TradeBarAapl, unlinkedDataAapl, TradeBarSpy }, DateTime.Now);
var data = test(new PythonSlice(slice));
Assert.AreEqual(2, (int)data.Count);
Assert.AreEqual(8, (int)data[Symbols.SPY].Value);
Assert.AreEqual(9, (int)data[Symbols.AAPL].Value);
}
}
[Test]
public void PythonGetBySymbolOpenInterest()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
from QuantConnect.Tests import *
def Test(slice):
data = slice.Get(OpenInterest)
value = data[Symbols.AAPL].Value
if value != 33:
raise Exception('Unexpected value')").GetAttr("Test");
var now = DateTime.UtcNow;
var TradeBarSpy = new TradeBar { Symbol = Symbols.SPY, Time = now, Value = 8 };
var TradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = now, Value = 9 };
var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = now, Value = 10 };
var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = now, Value = 11 };
var openInterest = new OpenInterest(now, Symbols.AAPL, 33);
var slice = new Slice(now, new BaseData[] { unlinkedDataSpy, TradeBarAapl, unlinkedDataAapl, TradeBarSpy, openInterest }, now);
Assert.DoesNotThrow(() => test(new PythonSlice(slice)));
}
}
[Test]
public void PythonGetBySymbolTradeBar()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
from QuantConnect.Tests import *
def Test(slice):
data = slice.Get(TradeBar)
value = data[Symbols.AAPL].Value
if value != 9:
raise Exception('Unexpected value')").GetAttr("Test");
var TradeBarSpy = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 8 };
var TradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 9 };
var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 };
var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 };
var slice = new Slice(DateTime.Now, new BaseData[] { unlinkedDataSpy, TradeBarAapl, unlinkedDataAapl, TradeBarSpy }, DateTime.Now);
Assert.DoesNotThrow(() => test(new PythonSlice(slice)));
}
}
[Test]
public void PythonGetAndSymbolTradeBar()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
from QuantConnect.Tests import *
def Test(slice):
data = slice.Get(TradeBar, Symbols.AAPL)
value = data.Value
if value != 9:
raise Exception('Unexpected value')").GetAttr("Test");
var TradeBarSpy = new TradeBar { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 8 };
var TradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 9 };
var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 };
var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 };
var slice = new Slice(DateTime.Now, new BaseData[] { unlinkedDataSpy, TradeBarAapl, unlinkedDataAapl, TradeBarSpy }, DateTime.Now);
Assert.DoesNotThrow(() => test(new PythonSlice(slice)));
}
}
[Test]
public void PythonGetCustomData_Iterate_IndexedLinkedData()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
from QuantConnect.Data.Custom.IconicTypes import *
from QuantConnect.Logging import *
def Test(slice):
data = slice.Get(IndexedLinkedData)
count = 0
for singleData in data:
Log.Trace(str(singleData))
count += 1
if count != 2:
raise Exception('Unexpected value')").GetAttr("Test");
var indexedLinkedDataSpy = new IndexedLinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 };
var tradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 9 };
var indexedLinkedDataAapl = new IndexedLinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 };
var slice = new Slice(DateTime.Now, new BaseData[] { indexedLinkedDataSpy, tradeBarAapl, indexedLinkedDataAapl }, DateTime.Now);
Assert.DoesNotThrow(() => test(new PythonSlice(slice)));
}
}
[Test]
public void PythonGetCustomData_Iterate_IndexedLinkedData_Empty()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
from QuantConnect.Data.Custom.IconicTypes import *
def Test(slice):
data = slice.Get(IndexedLinkedData)
for singleData in data:
raise Exception('Unexpected iteration')
for singleData in data.Values:
raise Exception('Unexpected iteration')
data = slice.Get(IndexedLinkedData)
for singleData in data:
raise Exception('Unexpected iteration')
for singleData in data.Values:
raise Exception('Unexpected iteration')").GetAttr("Test");
var tradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 9 };
var slice = new Slice(DateTime.Now, new List<BaseData> { tradeBarAapl }, DateTime.Now);
Assert.DoesNotThrow(() => test(new PythonSlice(slice)));
}
}
[Test]
public void PythonGetCustomData_Iterate()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice):
data = slice.Get(UnlinkedData)
count = 0
for singleData in data:
count += 1
if count != 2:
raise Exception('Unexpected value')").GetAttr("Test");
var unlinkedDataSpy = new UnlinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 };
var unlinkedDataAapl = new UnlinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 };
var slice = new Slice(DateTime.Now, new[] { unlinkedDataSpy, unlinkedDataAapl }, DateTime.Now);
Assert.DoesNotThrow(() => test(new PythonSlice(slice)));
}
}
[Test]
public void EnumeratorDoesNotThrowWithTicks()
{
var slice = new Slice(DateTime.Now, new[]
{
new Tick {Time = DateTime.Now, Symbol = Symbols.SPY, Value = 1, Quantity = 2},
new Tick{Time = DateTime.Now, Symbol = Symbols.SPY, Value = 1.1m, Quantity = 2.1m},
new Tick{Time = DateTime.Now, Symbol = Symbols.AAPL, Value = 1, Quantity = 2},
new Tick{Time = DateTime.Now, Symbol = Symbols.AAPL, Value = 1.1m, Quantity = 2.1m}
}, DateTime.Now);
Assert.AreEqual(4, slice.Count());
}
[Test]
public void AccessesTradeBarAndQuoteBarForSameSymbol()
{
var tradeBar = new TradeBar(DateTime.Now, Symbols.BTCUSD,
3000, 3000, 3000, 3000, 100, Time.OneMinute);
var quoteBar = new QuoteBar(DateTime.Now, Symbols.BTCUSD,
new Bar(3100, 3100, 3100, 3100), 0,
new Bar(3101, 3101, 3101, 3101), 0,
Time.OneMinute);
var tradeBars = new TradeBars { { Symbols.BTCUSD, tradeBar } };
var quoteBars = new QuoteBars { { Symbols.BTCUSD, quoteBar } };
var slice = new Slice(DateTime.Now, new List<BaseData>(){ tradeBar, quoteBar }, tradeBars, quoteBars, null, null, null, null, null, null, null, DateTime.Now);
var tradeBarData = slice.Get<TradeBar>();
Assert.AreEqual(1, tradeBarData.Count);
Assert.AreEqual(3000, tradeBarData[Symbols.BTCUSD].Close);
var quoteBarData = slice.Get<QuoteBar>();
Assert.AreEqual(1, quoteBarData.Count);
Assert.AreEqual(3100, quoteBarData[Symbols.BTCUSD].Bid.Close);
Assert.AreEqual(3101, quoteBarData[Symbols.BTCUSD].Ask.Close);
slice = new Slice(DateTime.Now, new BaseData[] { tradeBar, quoteBar }, DateTime.Now);
tradeBarData = slice.Get<TradeBar>();
Assert.AreEqual(1, tradeBarData.Count);
Assert.AreEqual(3000, tradeBarData[Symbols.BTCUSD].Close);
quoteBarData = slice.Get<QuoteBar>();
Assert.AreEqual(1, quoteBarData.Count);
Assert.AreEqual(3100, quoteBarData[Symbols.BTCUSD].Bid.Close);
Assert.AreEqual(3101, quoteBarData[Symbols.BTCUSD].Ask.Close);
}
[Test]
public void PythonSlice_clear()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice):
slice.clear()").GetAttr("Test");
Assert.Throws<PythonException>(() => test(GetPythonSlice()), "Slice is read-only: cannot clear the collection");
}
}
[Test]
public void PythonSlice_popitem()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice):
slice.popitem()").GetAttr("Test");
Assert.Throws<PythonException>(() => test(GetPythonSlice()), "Slice is read-only: cannot pop an item from the collection");
}
}
[Test]
public void PythonSlice_pop()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, symbol):
slice.pop(symbol)").GetAttr("Test");
Assert.Throws<PythonException>(() => test(GetPythonSlice(), Symbols.SPY), $"Slice is read-only: cannot pop the value for {Symbols.SPY} from the collection");
}
}
[Test]
public void PythonSlice_pop_default()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, symbol, default_value):
slice.pop(symbol, default_value)").GetAttr("Test");
Assert.Throws<PythonException>(() => test(GetPythonSlice(), Symbols.SPY, null), $"Slice is read-only: cannot pop the value for {Symbols.SPY} from the collection");
}
}
[Test]
public void PythonSlice_update_fails()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, symbol):
item = { symbol: 1 }
slice.update(item)").GetAttr("Test");
Assert.Throws<PythonException>(() => test(GetPythonSlice(), Symbols.SPY), "Slice is read-only: cannot update the collection");
}
}
[Test]
public void PythonSlice_update_success()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, symbol, bar):
item = { symbol: bar }
slice.Bars.update(item)").GetAttr("Test");
var expected = new TradeBar();
var pythonSlice = GetPythonSlice();
Assert.DoesNotThrow(() => test(pythonSlice, Symbols.SPY, expected));
Assert.AreEqual(expected, pythonSlice.Bars[Symbols.SPY]);
}
}
[Test]
public void PythonSlice_contains()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
AddReference(""QuantConnect.Tests"")
from QuantConnect.Tests.Common.Data import *
def Test(slice, symbol):
return symbol in slice").GetAttr("Test");
bool result = false;
Assert.DoesNotThrow(() => result = test(GetSlice(), Symbols.SPY));
Assert.IsTrue(result);
result = false;
Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.SPY));
Assert.IsTrue(result);
}
}
[Test, Ignore("Performance test")]
public void PythonSlice_performance()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
AddReference(""QuantConnect.Tests"")
from QuantConnect.Tests.Common.Data import *
def Test(slice, symbol):
msg = '__contains__'
if 'SPY' in slice:
msg += ' Py'
now = datetime.now()
for i in range(0,1000000):
result = 'SPY' in slice
span1 = (datetime.now()-now).total_seconds()
if slice.ContainsKey('SPY'):
msg += ' C#\n'
now = datetime.now()
for i in range(0,1000000):
result = slice.ContainsKey('SPY')
span2 = (datetime.now()-now).total_seconds()
msg += f'Py: {span1}\nC#: {span2}\nRatio: {span1/span2}'
msg += '\n\n__len__'
if len(slice) > 0:
msg += ' Py'
now = datetime.now()
for i in range(0,1000000):
result = len(slice)
span1 = (datetime.now()-now).total_seconds()
if slice.Count > 0:
msg += ' C#\n'
now = datetime.now()
for i in range(0,1000000):
result = slice.Count
span2 = (datetime.now()-now).total_seconds()
msg += f'Py: {span1}\nC#: {span2}\nRatio: {span1/span2}'
msg += '\n\nkeys()'
if len(slice.keys()) > 0:
msg += ' Py'
now = datetime.now()
for i in range(0,1000000):
result = slice.keys()
span1 = (datetime.now()-now).total_seconds()
if len(slice.Keys) > 0:
msg += ' C#\n'
now = datetime.now()
for i in range(0,1000000):
result = slice.Keys
span2 = (datetime.now()-now).total_seconds()
msg += f'Py: {span1}\nC#: {span2}\nRatio: {span1/span2}'
msg += '\n\nvalues()'
if len(slice.values()) > 0:
msg += ' Py'
now = datetime.now()
for i in range(0,1000000):
result = slice.values()
span1 = (datetime.now()-now).total_seconds()
if len(slice.Values) > 0:
msg += ' C#\n'
now = datetime.now()
for i in range(0,1000000):
result = slice.Values
span2 = (datetime.now()-now).total_seconds()
msg += f'Py: {span1}\nC#: {span2}\nRatio: {span1/span2}'
msg += '\n\nget()'
if slice.get(symbol):
msg += ' Py'
now = datetime.now()
for i in range(0,1000000):
result = slice.get(symbol)
span1 = (datetime.now()-now).total_seconds()
dummy = None
if slice.TryGetValue(symbol, dummy):
msg += ' C#\n'
now = datetime.now()
for i in range(0,1000000):
result = slice.TryGetValue(symbol, dummy)
span2 = (datetime.now()-now).total_seconds()
msg += f'Py: {span1}\nC#: {span2}\nRatio: {span1/span2}'
msg += '\n\nitems()'
if slice.items():
msg += ' Py'
now = datetime.now()
for i in range(0,1000000):
result = list(slice.items())
span1 = (datetime.now()-now).total_seconds()
msg += ' C#\n'
now = datetime.now()
for i in range(0,1000000):
result = [x for x in slice]
span2 = (datetime.now()-now).total_seconds()
msg += f'Py: {span1}\nC#: {span2}\nRatio: {span1/span2}'
return msg").GetAttr("Test");
var message = string.Empty;
Assert.DoesNotThrow(() => message = test(GetPythonSlice(), Symbols.SPY));
Assert.Ignore(message);
}
}
[Test]
public void PythonSlice_len()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
AddReference(""QuantConnect.Tests"")
from QuantConnect.Tests.Common.Data import *
def Test(slice, symbol):
return len(slice)").GetAttr("Test");
var result = -1;
Assert.DoesNotThrow(() => result = test(GetSlice(), Symbols.SPY));
Assert.AreEqual(2, result);
result = -1;
Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.SPY));
Assert.AreEqual(2, result);
}
}
[Test]
public void PythonSlice_copy()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, symbol):
copy = slice.copy()
return ', '.join([f'{k}: {v.Value}' for k,v in copy.items()])").GetAttr("Test");
var result = string.Empty;
Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.SPY));
Assert.AreEqual("SPY R735QTJ8XC9X: 10.0, AAPL R735QTJ8XC9X: 11.0", result);
}
}
[Test]
public void PythonSlice_items()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice):
return ', '.join([f'{k}: {v.Value}' for k,v in slice.items()])").GetAttr("Test");
var result = string.Empty;
Assert.DoesNotThrow(() => result = test(GetPythonSlice()));
Assert.AreEqual("SPY R735QTJ8XC9X: 10.0, AAPL R735QTJ8XC9X: 11.0", result);
}
}
[Test]
public void PythonSlice_keys()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice):
return slice.keys()").GetAttr("Test");
var slice = GetPythonSlice();
var result = new List<Symbol>();
Assert.DoesNotThrow(() => result = test(slice));
foreach (var key in slice.Keys)
{
Assert.IsTrue(result.Contains(key));
}
}
}
[Test]
public void PythonSlice_values()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice):
return slice.values()").GetAttr("Test");
var slice = GetPythonSlice();
var result = new List<BaseData>();
Assert.DoesNotThrow(() => result = test(slice));
foreach (var value in slice.Values)
{
Assert.IsTrue(result.Contains(value));
}
}
}
[Test]
public void PythonSlice_fromkeys()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, keys):
newDict = slice.fromkeys(keys)
return ', '.join([f'{k}: {v.Value}' for k,v in newDict.items()])").GetAttr("Test");
var result = string.Empty;
Assert.DoesNotThrow(() => result = test(GetPythonSlice(), new[] { Symbols.SPY }));
Assert.AreEqual("SPY R735QTJ8XC9X: 10.0", result);
}
}
[Test]
public void PythonSlice_fromkeys_default()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, keys, default_value):
newDict = slice.fromkeys(keys, default_value)
return ', '.join([f'{k}: {v.Value}' for k,v in newDict.items()])").GetAttr("Test");
var result = string.Empty;
Assert.DoesNotThrow(() => result = test(GetPythonSlice(), new[] { Symbols.EURUSD }, new Tick()));
Assert.AreEqual("EURUSD 8G: 0.0", result);
}
}
[Test]
public void PythonSlice_get_success()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, symbol):
return slice.get(symbol)").GetAttr("Test");
var pythonSlice = GetPythonSlice();
dynamic expected = pythonSlice[Symbols.SPY];
PyObject result = null;
Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.SPY ));
BaseData actual;
Assert.IsTrue(result.TryConvert(out actual));
Assert.AreEqual(expected.Symbol, actual.Symbol);
Assert.AreEqual(expected.Value, actual.Value);
}
}
[Test]
public void PythonSlice_get_default()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, symbol, default_value):
return slice.get(symbol, default_value)").GetAttr("Test");
var pythonSlice = GetPythonSlice();
var expected = new QuoteBar { Symbol = Symbols.EURUSD, Time = DateTime.Now, Value = 9 };
PyObject result = null;
Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.EURUSD, expected));
BaseData actual;
Assert.IsTrue(result.TryConvert(out actual));
Assert.AreEqual(expected.Symbol, actual.Symbol);
Assert.AreEqual(expected.Value, actual.Value);
}
}
[Test]
public void PythonSlice_get_NoneIfKeyNotFound()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, symbol):
return slice.get(symbol)").GetAttr("Test");
Assert.IsNull(test(GetPythonSlice(), Symbols.EURUSD));
}
}
[Test]
public void PythonSlice_setdefault_success()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, symbol):
return slice.setdefault(symbol)").GetAttr("Test");
var pythonSlice = GetPythonSlice();
dynamic expected = pythonSlice[Symbols.SPY];
PyObject result = null;
Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.SPY));
BaseData actual;
Assert.IsTrue(result.TryConvert(out actual));
Assert.AreEqual(expected.Symbol, actual.Symbol);
Assert.AreEqual(expected.Value, actual.Value);
}
}
[Test]
public void PythonSlice_setdefault_default_success()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, symbol, default_value):
return slice.setdefault(symbol, default_value)").GetAttr("Test");
var value = new Tick();
var pythonSlice = GetPythonSlice();
dynamic expected = pythonSlice[Symbols.SPY];
PyObject result = null;
// Since SPY is found, no need to set the default. Therefore it does not throw.
Assert.DoesNotThrow(() => result = test(GetPythonSlice(), Symbols.SPY, value));
BaseData actual;
Assert.IsTrue(result.TryConvert(out actual));
Assert.AreEqual(expected.Symbol, actual.Symbol);
Assert.AreEqual(expected.Value, actual.Value);
}
}
[Test]
public void PythonSlice_setdefault_keynotfound()
{
using (Py.GIL())
{
dynamic test = PythonEngine.ModuleFromString("testModule",
@"
from AlgorithmImports import *
def Test(slice, symbol):
return slice.setdefault(symbol)").GetAttr("Test");
var symbol = Symbols.EURUSD;
Assert.Throws<PythonException>(() => test(GetPythonSlice(), symbol),
$"Slice is read-only: cannot set default value to for {symbol}");
}
}
private Slice GetSlice()
{
SymbolCache.Clear();
var indexedLinkedDataSpy = new IndexedLinkedData { Symbol = Symbols.SPY, Time = DateTime.Now, Value = 10 };
var tradeBarAapl = new TradeBar { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 9 };
var indexedLinkedDataAapl = new IndexedLinkedData { Symbol = Symbols.AAPL, Time = DateTime.Now, Value = 11 };
return new Slice(DateTime.Now, new BaseData[] { indexedLinkedDataSpy, tradeBarAapl, indexedLinkedDataAapl }, DateTime.Now);
}
private PythonSlice GetPythonSlice() => new PythonSlice(GetSlice());
}
public class PublicArrayTest
{
public int[] items;
public PublicArrayTest()
{
items = new int[5] { 0, 1, 2, 3, 4 };
}
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using gcsv = Google.Cloud.SecurityCenter.V1;
using sys = System;
namespace Google.Cloud.SecurityCenter.V1
{
/// <summary>Resource name for the <c>Asset</c> resource.</summary>
public sealed partial class AssetName : gax::IResourceName, sys::IEquatable<AssetName>
{
/// <summary>The possible contents of <see cref="AssetName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>A resource name with pattern <c>organizations/{organization}/assets/{asset}</c>.</summary>
OrganizationAsset = 1,
/// <summary>A resource name with pattern <c>folders/{folder}/assets/{asset}</c>.</summary>
FolderAsset = 2,
/// <summary>A resource name with pattern <c>projects/{project}/assets/{asset}</c>.</summary>
ProjectAsset = 3,
}
private static gax::PathTemplate s_organizationAsset = new gax::PathTemplate("organizations/{organization}/assets/{asset}");
private static gax::PathTemplate s_folderAsset = new gax::PathTemplate("folders/{folder}/assets/{asset}");
private static gax::PathTemplate s_projectAsset = new gax::PathTemplate("projects/{project}/assets/{asset}");
/// <summary>Creates a <see cref="AssetName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="AssetName"/> containing the provided <paramref name="unparsedResourceName"/>.
/// </returns>
public static AssetName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new AssetName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="AssetName"/> with the pattern <c>organizations/{organization}/assets/{asset}</c>.
/// </summary>
/// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="assetId">The <c>Asset</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="AssetName"/> constructed from the provided ids.</returns>
public static AssetName FromOrganizationAsset(string organizationId, string assetId) =>
new AssetName(ResourceNameType.OrganizationAsset, organizationId: gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), assetId: gax::GaxPreconditions.CheckNotNullOrEmpty(assetId, nameof(assetId)));
/// <summary>
/// Creates a <see cref="AssetName"/> with the pattern <c>folders/{folder}/assets/{asset}</c>.
/// </summary>
/// <param name="folderId">The <c>Folder</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="assetId">The <c>Asset</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="AssetName"/> constructed from the provided ids.</returns>
public static AssetName FromFolderAsset(string folderId, string assetId) =>
new AssetName(ResourceNameType.FolderAsset, folderId: gax::GaxPreconditions.CheckNotNullOrEmpty(folderId, nameof(folderId)), assetId: gax::GaxPreconditions.CheckNotNullOrEmpty(assetId, nameof(assetId)));
/// <summary>
/// Creates a <see cref="AssetName"/> with the pattern <c>projects/{project}/assets/{asset}</c>.
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="assetId">The <c>Asset</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="AssetName"/> constructed from the provided ids.</returns>
public static AssetName FromProjectAsset(string projectId, string assetId) =>
new AssetName(ResourceNameType.ProjectAsset, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), assetId: gax::GaxPreconditions.CheckNotNullOrEmpty(assetId, nameof(assetId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="AssetName"/> with pattern
/// <c>organizations/{organization}/assets/{asset}</c>.
/// </summary>
/// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="assetId">The <c>Asset</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="AssetName"/> with pattern
/// <c>organizations/{organization}/assets/{asset}</c>.
/// </returns>
public static string Format(string organizationId, string assetId) => FormatOrganizationAsset(organizationId, assetId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="AssetName"/> with pattern
/// <c>organizations/{organization}/assets/{asset}</c>.
/// </summary>
/// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="assetId">The <c>Asset</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="AssetName"/> with pattern
/// <c>organizations/{organization}/assets/{asset}</c>.
/// </returns>
public static string FormatOrganizationAsset(string organizationId, string assetId) =>
s_organizationAsset.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(assetId, nameof(assetId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="AssetName"/> with pattern
/// <c>folders/{folder}/assets/{asset}</c>.
/// </summary>
/// <param name="folderId">The <c>Folder</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="assetId">The <c>Asset</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="AssetName"/> with pattern <c>folders/{folder}/assets/{asset}</c>
/// .
/// </returns>
public static string FormatFolderAsset(string folderId, string assetId) =>
s_folderAsset.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(folderId, nameof(folderId)), gax::GaxPreconditions.CheckNotNullOrEmpty(assetId, nameof(assetId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="AssetName"/> with pattern
/// <c>projects/{project}/assets/{asset}</c>.
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="assetId">The <c>Asset</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="AssetName"/> with pattern <c>projects/{project}/assets/{asset}</c>
/// .
/// </returns>
public static string FormatProjectAsset(string projectId, string assetId) =>
s_projectAsset.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), gax::GaxPreconditions.CheckNotNullOrEmpty(assetId, nameof(assetId)));
/// <summary>Parses the given resource name string into a new <see cref="AssetName"/> instance.</summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>organizations/{organization}/assets/{asset}</c></description></item>
/// <item><description><c>folders/{folder}/assets/{asset}</c></description></item>
/// <item><description><c>projects/{project}/assets/{asset}</c></description></item>
/// </list>
/// </remarks>
/// <param name="assetName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="AssetName"/> if successful.</returns>
public static AssetName Parse(string assetName) => Parse(assetName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="AssetName"/> instance; optionally allowing an
/// unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>organizations/{organization}/assets/{asset}</c></description></item>
/// <item><description><c>folders/{folder}/assets/{asset}</c></description></item>
/// <item><description><c>projects/{project}/assets/{asset}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="assetName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="AssetName"/> if successful.</returns>
public static AssetName Parse(string assetName, bool allowUnparsed) =>
TryParse(assetName, allowUnparsed, out AssetName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="AssetName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>organizations/{organization}/assets/{asset}</c></description></item>
/// <item><description><c>folders/{folder}/assets/{asset}</c></description></item>
/// <item><description><c>projects/{project}/assets/{asset}</c></description></item>
/// </list>
/// </remarks>
/// <param name="assetName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="AssetName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string assetName, out AssetName result) => TryParse(assetName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="AssetName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>organizations/{organization}/assets/{asset}</c></description></item>
/// <item><description><c>folders/{folder}/assets/{asset}</c></description></item>
/// <item><description><c>projects/{project}/assets/{asset}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="assetName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="AssetName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string assetName, bool allowUnparsed, out AssetName result)
{
gax::GaxPreconditions.CheckNotNull(assetName, nameof(assetName));
gax::TemplatedResourceName resourceName;
if (s_organizationAsset.TryParseName(assetName, out resourceName))
{
result = FromOrganizationAsset(resourceName[0], resourceName[1]);
return true;
}
if (s_folderAsset.TryParseName(assetName, out resourceName))
{
result = FromFolderAsset(resourceName[0], resourceName[1]);
return true;
}
if (s_projectAsset.TryParseName(assetName, out resourceName))
{
result = FromProjectAsset(resourceName[0], resourceName[1]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(assetName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private AssetName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string assetId = null, string folderId = null, string organizationId = null, string projectId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
AssetId = assetId;
FolderId = folderId;
OrganizationId = organizationId;
ProjectId = projectId;
}
/// <summary>
/// Constructs a new instance of a <see cref="AssetName"/> class from the component parts of pattern
/// <c>organizations/{organization}/assets/{asset}</c>
/// </summary>
/// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="assetId">The <c>Asset</c> ID. Must not be <c>null</c> or empty.</param>
public AssetName(string organizationId, string assetId) : this(ResourceNameType.OrganizationAsset, organizationId: gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), assetId: gax::GaxPreconditions.CheckNotNullOrEmpty(assetId, nameof(assetId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>Asset</c> ID. May be <c>null</c>, depending on which resource name is contained by this instance.
/// </summary>
public string AssetId { get; }
/// <summary>
/// The <c>Folder</c> ID. May be <c>null</c>, depending on which resource name is contained by this instance.
/// </summary>
public string FolderId { get; }
/// <summary>
/// The <c>Organization</c> ID. May be <c>null</c>, depending on which resource name is contained by this
/// instance.
/// </summary>
public string OrganizationId { get; }
/// <summary>
/// The <c>Project</c> ID. May be <c>null</c>, depending on which resource name is contained by this instance.
/// </summary>
public string ProjectId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.OrganizationAsset: return s_organizationAsset.Expand(OrganizationId, AssetId);
case ResourceNameType.FolderAsset: return s_folderAsset.Expand(FolderId, AssetId);
case ResourceNameType.ProjectAsset: return s_projectAsset.Expand(ProjectId, AssetId);
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as AssetName);
/// <inheritdoc/>
public bool Equals(AssetName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(AssetName a, AssetName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(AssetName a, AssetName b) => !(a == b);
}
public partial class Asset
{
/// <summary>
/// <see cref="gcsv::AssetName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcsv::AssetName AssetName
{
get => string.IsNullOrEmpty(Name) ? null : gcsv::AssetName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.RecoveryServices.Backup
{
using System.Linq;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// BackupOperationStatusesOperations operations.
/// </summary>
internal partial class BackupOperationStatusesOperations : Microsoft.Rest.IServiceOperations<RecoveryServicesBackupClient>, IBackupOperationStatusesOperations
{
/// <summary>
/// Initializes a new instance of the BackupOperationStatusesOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal BackupOperationStatusesOperations(RecoveryServicesBackupClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
this.Client = client;
}
/// <summary>
/// Gets a reference to the RecoveryServicesBackupClient
/// </summary>
public RecoveryServicesBackupClient Client { get; private set; }
/// <summary>
/// Fetches the status of an operation such as triggering a backup, restore.
/// The status can be in progress, completed or failed. You can refer to the
/// OperationStatus enum for all the possible states of an operation. Some
/// operations create jobs. This method returns the list of jobs when the
/// operation is complete.
/// </summary>
/// <param name='vaultName'>
/// The name of the recovery services vault.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group where the recovery services vault is
/// present.
/// </param>
/// <param name='operationId'>
/// OperationID which represents the operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse<OperationStatus>> GetWithHttpMessagesAsync(string vaultName, string resourceGroupName, string operationId, System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
if (vaultName == null)
{
throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "vaultName");
}
if (resourceGroupName == null)
{
throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "resourceGroupName");
}
if (this.Client.SubscriptionId == null)
{
throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
if (operationId == null)
{
throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "operationId");
}
string apiVersion = "2016-06-01";
// Tracing
bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString();
System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>();
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("vaultName", vaultName);
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("operationId", operationId);
tracingParameters.Add("cancellationToken", cancellationToken);
Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "Subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupOperations/{operationId}").ToString();
_url = _url.Replace("{vaultName}", System.Uri.EscapeDataString(vaultName));
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(this.Client.SubscriptionId));
_url = _url.Replace("{operationId}", System.Uri.EscapeDataString(operationId));
System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new Microsoft.Rest.Azure.CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new Microsoft.Rest.Azure.CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new Microsoft.Rest.Azure.AzureOperationResponse<OperationStatus>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<OperationStatus>(_responseContent, this.Client.DeserializationSettings);
}
catch (Newtonsoft.Json.JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new Microsoft.Rest.SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
using System;
using System.Data;
using System.Configuration;
using System.Collections;
using System.Web;
using System.Web.Security;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Web.UI.WebControls.WebParts;
using System.Web.UI.HtmlControls;
using Rainbow.Framework.Web.UI.WebControls;
using Rainbow.Framework.Security;
using Rainbow.Framework.Data;
using Rainbow.Framework;
using Rainbow.Framework.Users.Data;
using System.Text;
using Rainbow.Framework.Helpers;
using Rainbow.Framework.Settings;
using Rainbow.Framework.Site.Configuration;
using Rainbow.Framework.Providers.RainbowMembershipProvider;
namespace Rainbow.Content.Web.Modules {
public partial class Register2 : PortalModuleControl, IEditUserProfile {
public override Guid GuidID {
get {
return new Guid( "6D601CA1-BEB9-42ac-B559-4020A64A9707" );
}
}
#region Private Fields
private string _redirectPage;
#endregion
#region Properties
/// <summary>
///
/// </summary>
public bool EditMode {
get {
return ( userName.Length != 0 );
}
}
/// <summary>
///
/// </summary>
public string RedirectPage {
get {
if ( _redirectPage == null ) {
// changed by Mario Endara <[email protected]> (2004/11/05)
// it's necessary the ModuleID in the URL to apply security checking in the target
return
Request.Url.Segments[Request.Url.Segments.Length - 1] + "?TabID=" + PageID + "&mID=" + ModuleID +
"&username=" + EmailField.Text;
}
return _redirectPage;
}
set {
_redirectPage = value;
}
}
private string userName {
get {
string uid = string.Empty;
if ( Request.Params["userName"] != null )
uid = Request.Params["userName"];
if ( uid.Length == 0 && HttpContext.Current.Items["userName"] != null )
uid = HttpContext.Current.Items["userName"].ToString();
return uid;
}
}
private Guid originalUserID {
get {
if ( ViewState["originalUserID"] != null )
return ( Guid )ViewState["originalUserID"];
else
return Guid.Empty;
}
set {
ViewState["originalUserID"] = value;
}
}
private bool selfEdit {
get {
if ( ViewState["selfEdit"] != null )
return ( bool )ViewState["selfEdit"];
else
return false;
}
set {
ViewState["selfEdit"] = value;
}
}
#endregion
#region Methods
private void BindCountry() {
DataTable dtCountries = new DataTable( "Countries" );
string sql = "Select CountryID, Country From rb_Countries Order By Country";
dtCountries = DBHelper.GetDataSet( sql ).Tables[0];
if ( dtCountries == null || dtCountries.Rows.Count == 0 )
ErrorHandler.Publish( LogLevel.Warn, "No Countries Returned in bind conutry" );
// TODO : Fix this
//dtCountries.ReadXml(Rainbow.Framework.Settings.Path.ApplicationPhysicalPath + "\\App_GlobalResources\\Countries.xml");
//if (CountriesFilter.Length != 0)
//{
// CountryField.DataSource = CountryInfo.GetCountries(CountriesFilter);
//}
//else
//{
// CountryField.DataSource = CountryInfo.GetCountries(CountryTypes.InhabitedCountries,CountryFields.DisplayName);
//}
CountryField.DataSource = dtCountries;
CountryField.DataBind();
}
private void BindState() {
StateRow.Visible = false;
if ( CountryField.SelectedItem != null ) {
string currentCountry = CountryField.SelectedItem.Value.ToString();
//added next line to clear the list.
//The stateField seems to remember it's values even when you set the
//DataSource to null
//Michel Barneveld [email protected]
StateField.Items.Clear();
// TODO: Check fixing country info comments in BindCountry()
//StateField.DataSource = new CountryInfo(currentCountry).Childs;
//StateField.DataBind();
if ( StateField.Items.Count > 0 ) {
StateRow.Visible = true;
ThisCountryLabel.Text = CountryField.SelectedItem.Text;
}
else {
StateRow.Visible = false;
}
}
}
/// <summary>
/// Save user data
/// </summary>
/// <returns></returns>
public Guid SaveUserData() {
Guid returnID = Guid.Empty;
//if (PasswordField.Text.Length > 0 || ConfirmPasswordField.Text.Length > 0)
//{
// if (PasswordField.Text != ConfirmPasswordField.Text)
// ComparePasswords.IsValid = false;
//}
// Only attempt a login if all form fields on the page are valid
if ( Page.IsValid ) {
UsersDB accountSystem = new UsersDB();
string CountryID = string.Empty;
if ( CountryField.SelectedItem != null )
CountryID = CountryField.SelectedItem.Value;
int StateID = 0;
if ( StateField.SelectedItem != null )
StateID = Convert.ToInt32( StateField.SelectedItem.Value );
try {
if ( userName == string.Empty ) {
// Add New User to Portal User Database
returnID =
accountSystem.AddUser( NameField.Text, CompanyField.Text,
AddressField.Text, CityField.Text, ZipField.Text, CountryID, StateID,
PhoneField.Text, FaxField.Text,
PasswordField.Text, EmailField.Text, SendNewsletter.Checked );
}
else {
// Update user
if ( PasswordField.Text.Equals( string.Empty ) ) {
accountSystem.UpdateUser( originalUserID, NameField.Text, CompanyField.Text, AddressField.Text,
CityField.Text, ZipField.Text, CountryID, StateID, PhoneField.Text, FaxField.Text, EmailField.Text, SendNewsletter.Checked );
}
else {
accountSystem.UpdateUser( originalUserID, NameField.Text, CompanyField.Text, AddressField.Text,
CityField.Text, ZipField.Text, CountryID, StateID, PhoneField.Text, PasswordField.Text,
FaxField.Text, EmailField.Text, SendNewsletter.Checked );
}
}
//If we are here no error occurred
}
catch ( Exception ex ) {
Message.Text = General.GetString( "REGISTRATION_FAILED", "Registration failed", Message ) + " - ";
ErrorHandler.Publish( LogLevel.Error, "Error registering user", ex );
}
}
return returnID;
}
/// <summary>
/// Sends registration information to portal administrator.
/// </summary>
public void SendRegistrationNoticeToAdmin() {
StringBuilder sb = new StringBuilder();
sb.Append( "New User Registration\n" );
sb.Append( "---------------------\n" );
sb.Append( "PORTAL : " + portalSettings.PortalTitle + "\n" );
sb.Append( "Name : " + NameField.Text + "\n" );
sb.Append( "Company : " + CompanyField.Text + "\n" );
sb.Append( "Address : " + AddressField.Text + "\n" );
sb.Append( " " + CityField.Text + ", " );
if ( StateField.SelectedItem != null )
sb.Append( StateField.SelectedItem.Text + " " );
sb.Append( ZipField.Text + "\n" );
sb.Append( " " + CountryField.SelectedItem.Text + "\n" );
sb.Append( " " + PhoneField.Text + "\n" );
sb.Append( "Fax : " + FaxField.Text + "\n" );
sb.Append( "Email : " + EmailField.Text + "\n" );
sb.Append( "Send Newsletter: " + SendNewsletter.Checked.ToString() + "\n" );
MailHelper.SendMailNoAttachment(
portalSettings.CustomSettings["SITESETTINGS_ON_REGISTER_SEND_TO"].ToString(),
portalSettings.CustomSettings["SITESETTINGS_ON_REGISTER_SEND_TO"].ToString(),
"New User Registration for " + portalSettings.PortalAlias,
sb.ToString(),
string.Empty,
string.Empty,
Config.SmtpServer );
}
#endregion
#region Events
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
protected void RegisterBtn_Click( object sender, EventArgs e ) {
Guid returnID = SaveUserData();
if ( returnID != Guid.Empty ) {
if ( portalSettings.CustomSettings["SITESETTINGS_ON_REGISTER_SEND_TO"].ToString().Length > 0 )
SendRegistrationNoticeToAdmin();
//Full signon
PortalSecurity.SignOn( EmailField.Text, PasswordField.Text, false, RedirectPage );
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
protected void SaveChangesBtn_Click( object sender, EventArgs e ) {
Guid returnID = SaveUserData();
if ( returnID == Guid.Empty ) {
if ( selfEdit ) {
// TODO: do we need to signout? if we are hashing pwds, we can't retrieve old pwd
////All should be ok now
////Try logoff user
//PortalSecurity.SignOut( string.Empty, true );
////Logon user again with new settings
//string actualPassword;
//if ( PasswordField.Text.Length != 0 )
// actualPassword = PasswordField.Text;
//else
// throw new NotSupportedException( "Changing passwords is not still supported" );
////Full signon
//PortalSecurity.SignOn( EmailField.Text, actualPassword, false, RedirectPage );
}
else if ( RedirectPage == string.Empty ) {
// Redirect browser back to home page
PortalSecurity.PortalHome();
}
else {
Response.Redirect( RedirectPage );
}
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
protected void Page_Load( object sender, EventArgs e ) {
if ( Page.IsPostBack == false ) {
BindCountry();
BindState();
// Edit check
if ( EditMode ) // Someone requested edit this record
{
//True is use is editing himself, false if is edited by an admin
selfEdit = ( userName == PortalSettings.CurrentUser.Identity.Email );
// Removed by Mario Endara <[email protected]> (2004/11/04)
// if (PortalSecurity.IsInRoles("Admins") || selfEdit)
if ( PortalSecurity.HasEditPermissions( ModuleID ) || PortalSecurity.HasAddPermissions( ModuleID ) ||
selfEdit ) {
//We can edit
// Hide
RequiredPassword.Visible = false;
RequiredConfirm.Visible = false;
EditPasswordRow.Visible = true;
SaveChangesBtn.Visible = true;
RegisterBtn.Visible = false;
// Obtain a single row of event information
UsersDB accountSystem = new UsersDB();
RainbowUser memberUser = accountSystem.GetSingleUser( userName );
try {
originalUserID = memberUser.ProviderUserKey;
NameField.Text = memberUser.Name;
EmailField.Text = memberUser.Email;
CompanyField.Text = memberUser.Company;
AddressField.Text = memberUser.Address;
ZipField.Text = memberUser.Zip;
CityField.Text = memberUser.City;
CountryField.ClearSelection();
if ( CountryField.Items.FindByValue( memberUser.CountryID ) != null )
CountryField.Items.FindByValue( memberUser.CountryID ).Selected = true;
BindState();
StateField.ClearSelection();
if ( StateField.Items.Count > 0 &&
StateField.Items.FindByValue( memberUser.StateID.ToString() ) != null )
StateField.Items.FindByValue( memberUser.StateID.ToString() ).Selected = true;
FaxField.Text = memberUser.Fax;
PhoneField.Text = memberUser.Phone;
SendNewsletter.Checked = memberUser.SendNewsletter;
//stores original password for later check
// originalPassword = memberUser.GetPassword(); NOT STILL SUPPORTED
}
catch ( System.ArgumentNullException error ) {
// no existe el usuario;
}
}
else {
//We do not have rights to do it!
PortalSecurity.AccessDeniedEdit();
}
}
else {
BindState();
//No edit
RequiredPassword.Visible = true;
RequiredConfirm.Visible = true;
EditPasswordRow.Visible = false;
SaveChangesBtn.Visible = false;
RegisterBtn.Visible = true;
}
string termsOfService = portalSettings.GetTermsOfService;
//Verify if we have to show conditions
if ( termsOfService.Length != 0 ) {
//Shows conditions
FieldConditions.Text = termsOfService;
ConditionsRow.Visible = true;
}
else {
//Hides conditions
ConditionsRow.Visible = false;
}
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
protected void CountryField_SelectedIndexChanged( object sender, EventArgs e ) {
BindState();
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
protected void cancelButton_Click( object sender, EventArgs e ) {
Page.RedirectBackToReferringPage();
}
/// <summary>
///
/// </summary>
/// <param name="source"></param>
/// <param name="args"></param>
protected void CheckTermsValidator_ServerValidate( object source, ServerValidateEventArgs args ) {
args.IsValid = Accept.Checked;
}
#endregion
#region Web Form Designer generated code
/// <summary>
/// Raises the Init event.
/// </summary>
/// <param name="e"></param>
protected override void OnInit( EventArgs e ) {
InitializeComponent();
base.OnInit( e );
}
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent() {
this.Load += new EventHandler( this.Page_Load );
}
#endregion
}
}
| |
/****************************************************************************
Tilde
Copyright (c) 2008 Tantalus Media Pty
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using System.Threading;
using Tilde.Framework.Controller;
using Tilde.Framework.View;
namespace Tilde.LuaDebugger
{
[ToolWindowAttribute(Group = "Debug")]
public partial class PendingDownloadsWindow : Tilde.Framework.View.ToolWindow
{
IManager m_manager;
DebugManager m_debugger;
bool m_downloadInProgress = false;
enum FileType
{
Unknown,
LuaScript
}
class ListItemTag
{
public ListItemTag(string name, FileType type)
{
m_name = name;
m_type = type;
m_locked = false;
}
public string m_name;
public FileType m_type;
public bool m_locked;
}
public PendingDownloadsWindow(IManager manager)
{
InitializeComponent();
m_manager = manager;
m_debugger = ((LuaPlugin)manager.GetPlugin(typeof(LuaPlugin))).Debugger;
m_debugger.DebuggerConnected += new DebuggerConnectedEventHandler(Debugger_DebuggerConnected);
m_debugger.DebuggerDisconnected += new DebuggerDisconnectedEventHandler(Debugger_DebuggerDisconnected);
pendingFileListView.Enabled = false;
}
public bool DownloadInProgress
{
get { return m_downloadInProgress; }
}
void SetDownloadInProgress(bool inprogress)
{
m_downloadInProgress = inprogress;
progressBarDownloading.Style = inprogress ? ProgressBarStyle.Marquee : ProgressBarStyle.Continuous;
}
void Debugger_DebuggerConnected(DebugManager sender, Target target)
{
pendingFileListView.Items.Clear();
pendingFileListView.Enabled = true;
m_manager.FileWatcher.FileModified += new FileModifiedEventHandler(FileWatcher_FileModified);
}
void Debugger_DebuggerDisconnected(DebugManager sender)
{
m_manager.FileWatcher.FileModified -= new FileModifiedEventHandler(FileWatcher_FileModified);
pendingFileListView.Enabled = false;
pendingFileListView.Items.Clear();
}
private void FileWatcher_FileModified(object sender, string fileName)
{
if (IsHandleCreated && !IsDisposed && InvokeRequired)
BeginInvoke(new FileModifiedEventHandler(FileWatcher_FileModified), new object[] { sender, fileName });
else
{
if (m_debugger.ConnectionStatus == ConnectionStatus.Connected && m_manager.Project != null && m_manager.Project.FindDocument(fileName) != null)
{
if (fileName.EndsWith(".lua", StringComparison.InvariantCultureIgnoreCase))
{
AddFile(fileName, FileType.LuaScript);
}
}
}
}
private void AddFile(string fileName, FileType type)
{
if(!pendingFileListView.Items.ContainsKey(fileName.ToLowerInvariant()))
{
ListViewItem item = new ListViewItem(fileName);
item.Tag = new ListItemTag(fileName, type);
item.Name = fileName.ToLowerInvariant();
item.SubItems.Add(type.ToString());
item.SubItems.Add("");
pendingFileListView.Items.Add(item);
}
}
private bool DownloadLuaFile(string fileName)
{
try
{
return m_debugger.ConnectedTarget.DownloadFile(fileName);
}
catch (Exception ex)
{
m_manager.MainWindow.Invoke(new MethodInvoker(delegate() { MessageBox.Show(m_manager.MainWindow, String.Format("Could not download file\r\n\r\n{0}\r\n\r\n{1}", fileName, ex.ToString()), "Download Failed"); }));
return false;
}
}
private void PendingDownloadsWindow_FormClosing(object sender, FormClosingEventArgs e)
{
m_manager.FileWatcher.FileModified -= new FileModifiedEventHandler(FileWatcher_FileModified);
}
private void contextMenuStrip_Opening(object sender, CancelEventArgs e)
{
bool selection = pendingFileListView.SelectedItems.Count > 0;
menuItemDownloadNow.Enabled = selection;
menuItemRemove.Enabled = selection;
}
private void menuItemDownloadNow_Click(object sender, EventArgs e)
{
List<ListViewItem> items = new List<ListViewItem>(pendingFileListView.SelectedItems.Count);
foreach (ListViewItem item in pendingFileListView.SelectedItems)
{
((ListItemTag) item.Tag).m_locked = true;
item.SubItems[2].Text = "Queued";
items.Add(item);
}
SetDownloadInProgress(true);
ThreadPool.QueueUserWorkItem(new WaitCallback(delegate(object state) { DownloadThreadProc(items); }));
}
void DownloadThreadProc(List<ListViewItem> items)
{
try
{
foreach (ListViewItem item in items)
{
ListItemTag tag = (ListItemTag)item.Tag;
if (tag.m_type == FileType.LuaScript)
{
this.Invoke(new MethodInvoker(delegate() { item.SubItems[2].Text = "Downloading..."; }));
bool result = DownloadLuaFile(tag.m_name);
if (!result)
break;
}
this.Invoke(new MethodInvoker(delegate() { if(pendingFileListView.Items.Contains(item)) pendingFileListView.Items.Remove(item); }));
}
}
finally
{
this.Invoke(new MethodInvoker(delegate() { SetDownloadInProgress(false); }));
}
}
private void menuItemRemove_Click(object sender, EventArgs e)
{
List<ListViewItem> items = new List<ListViewItem>(pendingFileListView.SelectedItems.Count);
foreach (ListViewItem item in pendingFileListView.SelectedItems)
{
if (!((ListItemTag)item.Tag).m_locked)
items.Add(item);
}
foreach (ListViewItem item in items)
pendingFileListView.Items.Remove(item);
}
private void menuItemOpenInEditor_Click(object sender, EventArgs e)
{
foreach(ListViewItem item in pendingFileListView.Items)
{
m_manager.OpenDocument(((ListItemTag)item.Tag).m_name);
}
}
private void pendingFileListView_ItemActivate(object sender, EventArgs e)
{
m_manager.ShowDocument(((ListItemTag) (pendingFileListView.SelectedItems[0].Tag)).m_name);
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
using NPOI.SS.UserModel;
using System;
using Spreadsheet=NPOI.OpenXmlFormats.Spreadsheet;
using NPOI.OpenXmlFormats.Spreadsheet;
using NPOI.OpenXmlFormats.Dml;
using Dml = NPOI.OpenXmlFormats.Dml;
using NPOI.XSSF.Model;
namespace NPOI.XSSF.UserModel
{
/**
* Represents a font used in a workbook.
*
* @author Gisella Bronzetti
*/
public class XSSFFont : IFont
{
/**
* By default, Microsoft Office Excel 2007 uses the Calibry font in font size 11
*/
public const String DEFAULT_FONT_NAME = "Calibri";
/**
* By default, Microsoft Office Excel 2007 uses the Calibry font in font size 11
*/
public const short DEFAULT_FONT_SIZE = 11;
/**
* Default font color is black
* @see NPOI.SS.usermodel.IndexedColors#BLACK
*/
public static short DEFAULT_FONT_COLOR = IndexedColors.Black.Index;
private ThemesTable _themes;
private CT_Font _ctFont;
private short _index;
/**
* Create a new XSSFFont
*
* @param font the underlying CT_Font bean
*/
public XSSFFont(CT_Font font)
{
_ctFont = font;
_index = 0;
}
public XSSFFont(CT_Font font, int index)
{
_ctFont = font;
_index = (short)index;
}
/**
* Create a new XSSFont. This method is protected to be used only by XSSFWorkbook
*/
public XSSFFont()
{
this._ctFont = new CT_Font();
FontName = DEFAULT_FONT_NAME;
FontHeight =DEFAULT_FONT_SIZE;
}
/**
* get the underlying CT_Font font
*/
public CT_Font GetCTFont()
{
return _ctFont;
}
/**
* get a bool value for the boldness to use.
*
* @return bool - bold
*/
public bool IsBold
{
get
{
CT_BooleanProperty bold = _ctFont.SizeOfBArray() == 0 ? null : _ctFont.GetBArray(0);
return (bold != null && bold.val);
}
set
{
if (value)
{
CT_BooleanProperty ctBold = _ctFont.SizeOfBArray() == 0 ? _ctFont.AddNewB() : _ctFont.GetBArray(0);
ctBold.val = value;
}
else
{
_ctFont.SetBArray(null);
}
}
}
/**
* get character-set to use.
*
* @return int - character-set (0-255)
* @see NPOI.SS.usermodel.FontCharset
*/
public short Charset
{
get
{
CT_IntProperty charset = _ctFont.sizeOfCharsetArray() == 0 ? null : _ctFont.GetCharsetArray(0);
int val = charset == null ? FontCharset.ANSI.Value : FontCharset.ValueOf(charset.val).Value;
return (short)val;
}
set
{
}
}
/**
* get the indexed color value for the font
* References a color defined in IndexedColors.
*
* @return short - indexed color to use
* @see IndexedColors
*/
public short Color
{
get
{
Spreadsheet.CT_Color color = _ctFont.sizeOfColorArray() == 0 ? null : _ctFont.GetColorArray(0);
if (color == null) return IndexedColors.Black.Index;
if (!color.indexedSpecified) return IndexedColors.Black.Index;
long index = color.indexed;
if (index == XSSFFont.DEFAULT_FONT_COLOR)
{
return IndexedColors.Black.Index;
}
else if (index == IndexedColors.Red.Index)
{
return IndexedColors.Red.Index;
}
else
{
return (short)index;
}
}
set
{
Spreadsheet.CT_Color ctColor = _ctFont.sizeOfColorArray() == 0 ? _ctFont.AddNewColor() : _ctFont.GetColorArray(0);
switch (value)
{
case (short)FontColor.Normal:
ctColor.indexed = (uint)(XSSFFont.DEFAULT_FONT_COLOR);
ctColor.indexedSpecified = true;
break;
case (short)FontColor.Red:
ctColor.indexed = (uint)(IndexedColors.Red.Index);
ctColor.indexedSpecified = true;
break;
default:
ctColor.indexed = (uint)(value);
ctColor.indexedSpecified = true;
break;
}
}
}
/**
* get the color value for the font
* References a color defined as Standard Alpha Red Green Blue color value (ARGB).
*
* @return XSSFColor - rgb color to use
*/
public XSSFColor GetXSSFColor()
{
Spreadsheet.CT_Color ctColor = _ctFont.sizeOfColorArray() == 0 ? null : _ctFont.GetColorArray(0);
if (ctColor != null)
{
XSSFColor color = new XSSFColor(ctColor);
if (_themes != null)
{
_themes.InheritFromThemeAsRequired(color);
}
return color;
}
else
{
return null;
}
}
/**
* get the color value for the font
* References a color defined in theme.
*
* @return short - theme defined to use
*/
public short GetThemeColor()
{
Spreadsheet.CT_Color color = _ctFont.sizeOfColorArray() == 0 ? null : _ctFont.GetColorArray(0);
long index = ((color == null) || !color.themeSpecified) ? 0 : color.theme;
return (short)index;
}
/**
* get the font height in point.
*
* @return short - height in point
*/
public double FontHeight
{
get
{
CT_FontSize size = _ctFont.sizeOfSzArray() == 0 ? null : _ctFont.GetSzArray(0);
if (size != null)
{
double fontHeight = size.val;
return (short)(fontHeight * 20);
}
return (short)(DEFAULT_FONT_SIZE * 20);
}
set
{
CT_FontSize fontSize = _ctFont.sizeOfSzArray() == 0 ? _ctFont.AddNewSz() : _ctFont.GetSzArray(0);
fontSize.val = value;
}
}
/**
* @see #GetFontHeight()
*/
public short FontHeightInPoints
{
get
{
return (short)(FontHeight / 20);
}
set
{
CT_FontSize fontSize = _ctFont.sizeOfSzArray() == 0 ? _ctFont.AddNewSz() : _ctFont.GetSzArray(0);
fontSize.val = value;
}
}
/**
* get the name of the font (i.e. Arial)
*
* @return String - a string representing the name of the font to use
*/
public String FontName
{
get
{
CT_FontName name = _ctFont.name;
return name == null ? DEFAULT_FONT_NAME : name.val;
}
set
{
CT_FontName fontName = _ctFont.name==null?_ctFont.AddNewName():_ctFont.name;
fontName.val = value == null ? DEFAULT_FONT_NAME : value;
}
}
/**
* get a bool value that specify whether to use italics or not
*
* @return bool - value for italic
*/
public bool IsItalic
{
get
{
CT_BooleanProperty italic = _ctFont.sizeOfIArray() == 0 ? null : _ctFont.GetIArray(0);
return italic != null && italic.val;
}
set
{
if (value)
{
CT_BooleanProperty bool1 = _ctFont.sizeOfIArray() == 0 ? _ctFont.AddNewI() : _ctFont.GetIArray(0);
bool1.val = value;
}
else
{
_ctFont.SetIArray(null);
}
}
}
/**
* get a bool value that specify whether to use a strikeout horizontal line through the text or not
*
* @return bool - value for strikeout
*/
public bool IsStrikeout
{
get
{
CT_BooleanProperty strike = _ctFont.sizeOfStrikeArray() == 0 ? null : _ctFont.GetStrikeArray(0);
return strike != null && strike.val;
}
set
{
if (!value) _ctFont.SetStrikeArray(null);
else
{
CT_BooleanProperty strike = _ctFont.sizeOfStrikeArray() == 0 ? _ctFont.AddNewStrike() : _ctFont.GetStrikeArray(0);
strike.val = value;
}
}
}
/**
* get normal,super or subscript.
*
* @return short - offset type to use (none,super,sub)
* @see Font#SS_NONE
* @see Font#SS_SUPER
* @see Font#SS_SUB
*/
public FontSuperScript TypeOffset
{
get
{
CT_VerticalAlignFontProperty vAlign = _ctFont.sizeOfVertAlignArray() == 0 ? null : _ctFont.GetVertAlignArray(0);
if (vAlign == null)
{
return FontSuperScript.None;
}
ST_VerticalAlignRun val = vAlign.val;
switch (val)
{
case ST_VerticalAlignRun.baseline:
return FontSuperScript.None;
case ST_VerticalAlignRun.subscript:
return FontSuperScript.Sub;
case ST_VerticalAlignRun.superscript:
return FontSuperScript.Super;
default:
throw new POIXMLException("Wrong offset value " + val);
}
}
set
{
if (value == (short)FontSuperScript.None)
{
_ctFont.SetVertAlignArray(null);
}
else
{
CT_VerticalAlignFontProperty offSetProperty = _ctFont.sizeOfVertAlignArray() == 0 ? _ctFont.AddNewVertAlign() : _ctFont.GetVertAlignArray(0);
switch (value)
{
case FontSuperScript.None:
offSetProperty.val = ST_VerticalAlignRun.baseline;
break;
case FontSuperScript.Sub:
offSetProperty.val = ST_VerticalAlignRun.subscript;
break;
case FontSuperScript.Super:
offSetProperty.val = ST_VerticalAlignRun.superscript;
break;
}
}
}
}
/**
* get type of text underlining to use
*
* @return byte - underlining type
* @see NPOI.SS.usermodel.FontUnderline
*/
public FontUnderlineType Underline
{
get
{
CT_UnderlineProperty underline = _ctFont.sizeOfUArray() == 0 ? null : _ctFont.GetUArray(0);
if (underline != null)
{
FontUnderline val = FontUnderline.ValueOf((int)underline.val);
return (FontUnderlineType)val.ByteValue;
}
return (FontUnderlineType)FontUnderline.NONE.ByteValue;
}
set
{
SetUnderline(value);
}
}
/**
* get the boldness to use
* @return boldweight
* @see #BOLDWEIGHT_NORMAL
* @see #BOLDWEIGHT_BOLD
*/
public short Boldweight
{
get
{
return (IsBold ? (short)FontBoldWeight.Bold : (short)FontBoldWeight.Normal);
}
set
{
this.IsBold = (value == (short)FontBoldWeight.Bold);
}
}
/**
* set character-set to use.
*
* @param charset - charset
* @see FontCharset
*/
public void SetCharSet(byte charSet)
{
int cs = (int)charSet;
if (cs < 0)
{
cs += 256;
}
SetCharSet(cs);
}
/**
* set character-set to use.
*
* @param charset - charset
* @see FontCharset
*/
public void SetCharSet(int charset)
{
FontCharset FontCharset = FontCharset.ValueOf(charset);
if (FontCharset != null)
{
SetCharSet(FontCharset);
}
else
{
throw new POIXMLException("Attention: an attempt to set a type of unknow charset and charSet");
}
}
/**
* set character-set to use.
*
* @param charSet
*/
public void SetCharSet(FontCharset charset)
{
CT_IntProperty charSetProperty;
if (_ctFont.sizeOfCharsetArray() == 0)
{
charSetProperty = _ctFont.AddNewCharset();
}
else
{
charSetProperty = _ctFont.GetCharsetArray(0);
}
// We know that FontCharset only has valid entries in it,
// so we can just set the int value from it
charSetProperty.val = charset.Value;
}
/**
* set the color for the font in Standard Alpha Red Green Blue color value
*
* @param color - color to use
*/
public void SetColor(XSSFColor color)
{
if (color == null) _ctFont.SetColorArray(null);
else
{
Spreadsheet.CT_Color ctColor = _ctFont.sizeOfColorArray() == 0 ? _ctFont.AddNewColor() : _ctFont.GetColorArray(0);
ctColor.SetRgb(color.RGB);
}
}
/**
* set the theme color for the font to use
*
* @param theme - theme color to use
*/
public void SetThemeColor(short theme)
{
Spreadsheet.CT_Color ctColor = _ctFont.sizeOfColorArray() == 0 ? _ctFont.AddNewColor() : _ctFont.GetColorArray(0);
ctColor.theme = (uint)theme;
}
/**
* set an enumeration representing the style of underlining that is used.
* The none style is equivalent to not using underlining at all.
* The possible values for this attribute are defined by the FontUnderline
*
* @param underline - FontUnderline enum value
*/
internal void SetUnderline(FontUnderlineType underline)
{
if (underline == FontUnderlineType.None && _ctFont.sizeOfUArray() > 0)
{
_ctFont.SetUArray(null);
}
else
{
CT_UnderlineProperty ctUnderline = _ctFont.sizeOfUArray() == 0 ? _ctFont.AddNewU() : _ctFont.GetUArray(0);
ST_UnderlineValues val = (ST_UnderlineValues)FontUnderline.ValueOf(underline).Value;
ctUnderline.val = val;
}
}
public override String ToString()
{
return _ctFont.ToString();
}
///**
// * Perform a registration of ourselves
// * to the style table
// */
public long RegisterTo(StylesTable styles)
{
this._themes = styles.GetTheme();
short idx = (short)styles.PutFont(this, true);
this._index = idx;
return idx;
}
/**
* Records the Themes Table that is associated with
* the current font, used when looking up theme
* based colours and properties.
*/
public void SetThemesTable(ThemesTable themes)
{
this._themes = themes;
}
/**
* get the font scheme property.
* is used only in StylesTable to create the default instance of font
*
* @return FontScheme
* @see NPOI.XSSF.model.StylesTable#CreateDefaultFont()
*/
public FontScheme GetScheme()
{
NPOI.OpenXmlFormats.Spreadsheet.CT_FontScheme scheme = _ctFont.sizeOfSchemeArray() == 0 ? null : _ctFont.GetSchemeArray(0);
return scheme == null ? FontScheme.NONE : FontScheme.ValueOf((int)scheme.val);
}
/**
* set font scheme property
*
* @param scheme - FontScheme enum value
* @see FontScheme
*/
public void SetScheme(FontScheme scheme)
{
NPOI.OpenXmlFormats.Spreadsheet.CT_FontScheme ctFontScheme = _ctFont.sizeOfSchemeArray() == 0 ? _ctFont.AddNewScheme() : _ctFont.GetSchemeArray(0);
ST_FontScheme val = (ST_FontScheme)scheme.Value;
ctFontScheme.val = val;
}
/**
* get the font family to use.
*
* @return the font family to use
* @see NPOI.SS.usermodel.FontFamily
*/
public int Family
{
get
{
CT_IntProperty family = _ctFont.sizeOfFamilyArray() == 0 ? _ctFont.AddNewFamily() : _ctFont.GetFamilyArray(0);
return family == null ? FontFamily.NOT_APPLICABLE.Value : FontFamily.ValueOf(family.val).Value;
}
set
{
CT_IntProperty family = _ctFont.sizeOfFamilyArray() == 0 ? _ctFont.AddNewFamily() : _ctFont.GetFamilyArray(0);
family.val = value;
}
}
/**
* set an enumeration representing the font family this font belongs to.
* A font family is a set of fonts having common stroke width and serif characteristics.
*
* @param family font family
* @link #SetFamily(int value)
*/
public void SetFamily(FontFamily family)
{
Family = family.Value;
}
/**
* get the index within the XSSFWorkbook (sequence within the collection of Font objects)
* @return unique index number of the underlying record this Font represents (probably you don't care
* unless you're comparing which one is which)
*/
public short Index
{
get
{
return _index;
}
}
public override int GetHashCode()
{
return _ctFont.ToString().GetHashCode();
}
public override bool Equals(Object o)
{
if (!(o is XSSFFont)) return false;
XSSFFont cf = (XSSFFont)o;
return _ctFont.ToString().Equals(cf.GetCTFont().ToString());
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Runtime;
using System.Runtime.CompilerServices;
using System.Runtime.Diagnostics;
using System.Security;
using System.ServiceModel.Channels;
using System.ServiceModel.Diagnostics;
using System.Xml;
namespace System.ServiceModel.Dispatcher
{
internal delegate void MessageRpcProcessor(ref MessageRpc rpc);
internal struct MessageRpc
{
internal readonly ServiceChannel Channel;
internal readonly ChannelHandler channelHandler;
internal readonly object[] Correlation;
internal readonly OperationContext OperationContext;
internal ServiceModelActivity Activity;
internal Guid ResponseActivityId;
internal IAsyncResult AsyncResult;
internal bool CanSendReply;
internal bool SuccessfullySendReply;
internal object[] InputParameters;
internal object[] OutputParameters;
internal object ReturnParameter;
internal bool ParametersDisposed;
internal bool DidDeserializeRequestBody;
internal Exception Error;
internal MessageRpcProcessor ErrorProcessor;
internal ErrorHandlerFaultInfo FaultInfo;
internal bool HasSecurityContext;
internal object Instance;
internal bool MessageRpcOwnsInstanceContextThrottle;
internal MessageRpcProcessor NextProcessor;
internal Collection<MessageHeaderInfo> NotUnderstoodHeaders;
internal DispatchOperationRuntime Operation;
internal Message Request;
internal RequestContext RequestContext;
internal bool RequestContextThrewOnReply;
internal UniqueId RequestID;
internal Message Reply;
internal TimeoutHelper ReplyTimeoutHelper;
internal RequestReplyCorrelator.ReplyToInfo ReplyToInfo;
internal MessageVersion RequestVersion;
internal ServiceSecurityContext SecurityContext;
internal InstanceContext InstanceContext;
internal bool SuccessfullyBoundInstance;
internal bool SuccessfullyIncrementedActivity;
internal bool SuccessfullyLockedInstance;
internal MessageRpcInvokeNotification InvokeNotification;
internal EventTraceActivity EventTraceActivity;
private bool _paused;
private bool _switchedThreads;
private bool _isInstanceContextSingleton;
private SignalGate<IAsyncResult> _invokeContinueGate;
internal MessageRpc(RequestContext requestContext, Message request, DispatchOperationRuntime operation,
ServiceChannel channel, ChannelHandler channelHandler, bool cleanThread,
OperationContext operationContext, InstanceContext instanceContext, EventTraceActivity eventTraceActivity)
{
Fx.Assert((operationContext != null), "System.ServiceModel.Dispatcher.MessageRpc.MessageRpc(), operationContext == null");
Fx.Assert(channelHandler != null, "System.ServiceModel.Dispatcher.MessageRpc.MessageRpc(), channelHandler == null");
this.Activity = null;
this.EventTraceActivity = eventTraceActivity;
this.AsyncResult = null;
this.CanSendReply = true;
this.Channel = channel;
this.channelHandler = channelHandler;
this.Correlation = EmptyArray<object>.Allocate(operation.Parent.CorrelationCount);
this.DidDeserializeRequestBody = false;
this.Error = null;
this.ErrorProcessor = null;
this.FaultInfo = new ErrorHandlerFaultInfo(request.Version.Addressing.DefaultFaultAction);
this.HasSecurityContext = false;
this.Instance = null;
this.MessageRpcOwnsInstanceContextThrottle = false;
this.NextProcessor = null;
this.NotUnderstoodHeaders = null;
this.Operation = operation;
this.OperationContext = operationContext;
_paused = false;
this.ParametersDisposed = false;
this.Request = request;
this.RequestContext = requestContext;
this.RequestContextThrewOnReply = false;
this.SuccessfullySendReply = false;
this.RequestVersion = request.Version;
this.Reply = null;
this.ReplyTimeoutHelper = new TimeoutHelper();
this.SecurityContext = null;
this.InstanceContext = instanceContext;
this.SuccessfullyBoundInstance = false;
this.SuccessfullyIncrementedActivity = false;
this.SuccessfullyLockedInstance = false;
_switchedThreads = !cleanThread;
this.InputParameters = null;
this.OutputParameters = null;
this.ReturnParameter = null;
_isInstanceContextSingleton = false;
_invokeContinueGate = null;
if (!operation.IsOneWay && !operation.Parent.ManualAddressing)
{
this.RequestID = request.Headers.MessageId;
this.ReplyToInfo = new RequestReplyCorrelator.ReplyToInfo(request);
}
else
{
this.RequestID = null;
this.ReplyToInfo = new RequestReplyCorrelator.ReplyToInfo();
}
if (DiagnosticUtility.ShouldUseActivity)
{
this.Activity = TraceUtility.ExtractActivity(this.Request);
}
if (DiagnosticUtility.ShouldUseActivity || TraceUtility.ShouldPropagateActivity)
{
this.ResponseActivityId = ActivityIdHeader.ExtractActivityId(this.Request);
}
else
{
this.ResponseActivityId = Guid.Empty;
}
this.InvokeNotification = new MessageRpcInvokeNotification(this.Activity, this.channelHandler);
if (this.EventTraceActivity == null && FxTrace.Trace.IsEnd2EndActivityTracingEnabled)
{
if (this.Request != null)
{
this.EventTraceActivity = EventTraceActivityHelper.TryExtractActivity(this.Request, true);
}
}
}
internal bool IsPaused
{
get { return _paused; }
}
internal bool SwitchedThreads
{
get { return _switchedThreads; }
}
internal void Abort()
{
this.AbortRequestContext();
this.AbortChannel();
this.AbortInstanceContext();
}
private void AbortRequestContext(RequestContext requestContext)
{
try
{
requestContext.Abort();
}
catch (Exception e)
{
if (Fx.IsFatal(e))
{
throw;
}
this.channelHandler.HandleError(e);
}
}
internal void AbortRequestContext()
{
if (this.OperationContext.RequestContext != null)
{
this.AbortRequestContext(this.OperationContext.RequestContext);
}
if ((this.RequestContext != null) && (this.RequestContext != this.OperationContext.RequestContext))
{
this.AbortRequestContext(this.RequestContext);
}
TraceCallDurationInDispatcherIfNecessary(false);
}
private void TraceCallDurationInDispatcherIfNecessary(bool requestContextWasClosedSuccessfully)
{
}
internal void CloseRequestContext()
{
if (this.OperationContext.RequestContext != null)
{
this.DisposeRequestContext(this.OperationContext.RequestContext);
}
if ((this.RequestContext != null) && (this.RequestContext != this.OperationContext.RequestContext))
{
this.DisposeRequestContext(this.RequestContext);
}
TraceCallDurationInDispatcherIfNecessary(true);
}
private void DisposeRequestContext(RequestContext context)
{
try
{
context.Close();
}
catch (Exception e)
{
if (Fx.IsFatal(e))
{
throw;
}
this.AbortRequestContext(context);
this.channelHandler.HandleError(e);
}
}
internal void AbortChannel()
{
if ((this.Channel != null) && this.Channel.HasSession)
{
try
{
this.Channel.Abort();
}
catch (Exception e)
{
if (Fx.IsFatal(e))
{
throw;
}
this.channelHandler.HandleError(e);
}
}
}
internal void CloseChannel()
{
if ((this.Channel != null) && this.Channel.HasSession)
{
try
{
this.Channel.Close(ChannelHandler.CloseAfterFaultTimeout);
}
catch (Exception e)
{
if (Fx.IsFatal(e))
{
throw;
}
this.channelHandler.HandleError(e);
}
}
}
internal void AbortInstanceContext()
{
if (this.InstanceContext != null && !_isInstanceContextSingleton)
{
try
{
this.InstanceContext.Abort();
}
catch (Exception e)
{
if (Fx.IsFatal(e))
{
throw;
}
this.channelHandler.HandleError(e);
}
}
}
internal void EnsureReceive()
{
using (ServiceModelActivity.BoundOperation(this.Activity))
{
ChannelHandler.Register(this.channelHandler);
}
}
private bool ProcessError(Exception e)
{
MessageRpcProcessor handler = this.ErrorProcessor;
try
{
if (TraceUtility.MessageFlowTracingOnly)
{
TraceUtility.SetActivityId(this.Request.Properties);
if (Guid.Empty == DiagnosticTraceBase.ActivityId)
{
Guid receivedActivityId = TraceUtility.ExtractActivityId(this.Request);
if (Guid.Empty != receivedActivityId)
{
DiagnosticTraceBase.ActivityId = receivedActivityId;
}
}
}
this.Error = e;
if (this.ErrorProcessor != null)
{
this.ErrorProcessor(ref this);
}
return (this.Error == null);
}
#pragma warning suppress 56500 // covered by FxCOP
catch (Exception e2)
{
if (Fx.IsFatal(e2))
{
throw;
}
return ((handler != this.ErrorProcessor) && this.ProcessError(e2));
}
}
internal void DisposeParameters(bool excludeInput)
{
if (this.Operation.DisposeParameters)
{
this.DisposeParametersCore(excludeInput);
}
}
internal void DisposeParametersCore(bool excludeInput)
{
if (!this.ParametersDisposed)
{
if (!excludeInput)
{
this.DisposeParameterList(this.InputParameters);
}
this.DisposeParameterList(this.OutputParameters);
IDisposable disposableParameter = this.ReturnParameter as IDisposable;
if (disposableParameter != null)
{
try
{
disposableParameter.Dispose();
}
catch (Exception e)
{
if (Fx.IsFatal(e))
{
throw;
}
this.channelHandler.HandleError(e);
}
}
this.ParametersDisposed = true;
}
}
void DisposeParameterList(object[] parameters)
{
IDisposable disposableParameter = null;
if (parameters != null)
{
foreach (Object obj in parameters)
{
disposableParameter = obj as IDisposable;
if (disposableParameter != null)
{
try
{
disposableParameter.Dispose();
}
catch (Exception e)
{
if (Fx.IsFatal(e))
{
throw;
}
this.channelHandler.HandleError(e);
}
}
}
}
}
// See notes on UnPause and Resume (mutually exclusive)
// Pausing will Increment the BusyCount for the hosting environment
internal IResumeMessageRpc Pause()
{
Wrapper wrapper = new Wrapper(ref this);
_paused = true;
return wrapper;
}
[Fx.Tag.SecurityNote(Critical = "Calls SecurityCritical method ApplyHostingIntegrationContext.",
Safe = "Does call properly and calls Dispose, doesn't leak control of the IDisposable out of the function.")]
[SecuritySafeCritical]
internal bool Process(bool isOperationContextSet)
{
using (ServiceModelActivity.BoundOperation(this.Activity))
{
bool completed = true;
if (this.NextProcessor != null)
{
MessageRpcProcessor processor = this.NextProcessor;
this.NextProcessor = null;
OperationContext originalContext;
OperationContext.Holder contextHolder;
if (!isOperationContextSet)
{
contextHolder = OperationContext.CurrentHolder;
originalContext = contextHolder.Context;
}
else
{
contextHolder = null;
originalContext = null;
}
IncrementBusyCount();
try
{
if (!isOperationContextSet)
{
contextHolder.Context = this.OperationContext;
}
processor(ref this);
if (!_paused)
{
this.OperationContext.SetClientReply(null, false);
}
}
#pragma warning suppress 56500 // covered by FxCOP
catch (Exception e)
{
if (Fx.IsFatal(e))
{
throw;
}
if (!this.ProcessError(e) && this.FaultInfo.Fault == null)
{
this.Abort();
}
}
finally
{
try
{
DecrementBusyCount();
if (!isOperationContextSet)
{
contextHolder.Context = originalContext;
}
completed = !_paused;
if (completed)
{
this.channelHandler.DispatchDone();
this.OperationContext.ClearClientReplyNoThrow();
}
}
#pragma warning suppress 56500 // covered by FxCOP
catch (Exception e)
{
if (Fx.IsFatal(e))
{
throw;
}
throw DiagnosticUtility.ExceptionUtility.ThrowHelperFatal(e.Message, e);
}
}
}
return completed;
}
}
// UnPause is called on the original MessageRpc to continue work on the current thread, and the copy is ignored.
// Since the copy is ignored, Decrement the BusyCount
internal void UnPause()
{
_paused = false;
DecrementBusyCount();
}
internal bool UnlockInvokeContinueGate(out IAsyncResult result)
{
return _invokeContinueGate.Unlock(out result);
}
internal void PrepareInvokeContinueGate()
{
_invokeContinueGate = new SignalGate<IAsyncResult>();
}
private void IncrementBusyCount()
{
}
private void DecrementBusyCount()
{
}
private class CallbackState
{
public ChannelHandler ChannelHandler
{
get;
set;
}
}
internal class Wrapper : IResumeMessageRpc
{
private MessageRpc _rpc;
private bool _alreadyResumed;
internal Wrapper(ref MessageRpc rpc)
{
_rpc = rpc;
if (rpc.NextProcessor == null)
{
Fx.Assert("MessageRpc.Wrapper.Wrapper: (rpc.NextProcessor != null)");
}
_rpc.IncrementBusyCount();
}
public InstanceContext GetMessageInstanceContext()
{
return _rpc.InstanceContext;
}
// Resume is called on the copy on some completing thread, whereupon work continues on that thread.
// BusyCount is Decremented as the copy is now complete
public void Resume(out bool alreadyResumedNoLock)
{
try
{
alreadyResumedNoLock = _alreadyResumed;
_alreadyResumed = true;
_rpc._switchedThreads = true;
if (_rpc.Process(false) && !_rpc.InvokeNotification.DidInvokerEnsurePump)
{
_rpc.EnsureReceive();
}
}
finally
{
_rpc.DecrementBusyCount();
}
}
public void Resume(IAsyncResult result)
{
_rpc.AsyncResult = result;
this.Resume();
}
public void Resume(object instance)
{
_rpc.Instance = instance;
this.Resume();
}
public void Resume()
{
using (ServiceModelActivity.BoundOperation(_rpc.Activity, true))
{
bool alreadyResumedNoLock;
this.Resume(out alreadyResumedNoLock);
if (alreadyResumedNoLock)
{
string text = SR.Format(SR.SFxMultipleCallbackFromAsyncOperation,
String.Empty);
Exception error = new InvalidOperationException(text);
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(error);
}
}
}
public void SignalConditionalResume(IAsyncResult result)
{
if (_rpc._invokeContinueGate.Signal(result))
{
_rpc.AsyncResult = result;
Resume();
}
}
}
}
internal class MessageRpcInvokeNotification : IInvokeReceivedNotification
{
private ServiceModelActivity _activity;
private ChannelHandler _handler;
public MessageRpcInvokeNotification(ServiceModelActivity activity, ChannelHandler handler)
{
_activity = activity;
_handler = handler;
}
public bool DidInvokerEnsurePump { get; set; }
public void NotifyInvokeReceived()
{
using (ServiceModelActivity.BoundOperation(_activity))
{
ChannelHandler.Register(_handler);
}
this.DidInvokerEnsurePump = true;
}
public void NotifyInvokeReceived(RequestContext request)
{
using (ServiceModelActivity.BoundOperation(_activity))
{
ChannelHandler.Register(_handler, request);
}
this.DidInvokerEnsurePump = true;
}
}
}
| |
using System;
using System.Windows;
using DevExpress.Mvvm.UI.Interactivity.Internal;
#if !NETFX_CORE
using System.Windows.Data;
using DevExpress.Mvvm.UI.Native;
#else
using Windows.UI.Xaml;
using Windows.UI.Xaml.Data;
using Windows.ApplicationModel;
using DevExpress.Mvvm.UI.Native;
#if FREE && !NETFX_CORE
using DevExpress.Mvvm.UI.Native;
#else
#endif
#endif
namespace DevExpress.Mvvm.UI.Interactivity {
public abstract class TriggerBase : Behavior {
internal TriggerBase(Type type)
: base(type) {
}
}
#if !NETFX_CORE
public abstract class TriggerBase<T> : TriggerBase where T : DependencyObject {
#else
public abstract class TriggerBase<T> : TriggerBase where T : FrameworkElement {
#endif
protected TriggerBase()
: base(typeof(T)) {
}
protected new T AssociatedObject {
get { return (T)base.AssociatedObject; }
}
}
#if !NETFX_CORE
public class EventTriggerBase<T> : TriggerBase<T> where T : DependencyObject {
#else
public class EventTriggerBase<T> : TriggerBase<T> where T : FrameworkElement {
#endif
#region Static
[IgnoreDependencyPropertiesConsistencyChecker]
public static readonly DependencyProperty EventNameProperty =
DependencyProperty.Register("EventName", typeof(string), typeof(EventTriggerBase<T>),
new PropertyMetadata("Loaded", (d, e) => ((EventTriggerBase<T>)d).OnEventNameChanged((string)e.OldValue, (string)e.NewValue)));
#if !SILVERLIGHT && !NETFX_CORE
[IgnoreDependencyPropertiesConsistencyChecker]
public static readonly DependencyProperty EventProperty =
DependencyProperty.Register("Event", typeof(RoutedEvent), typeof(EventTriggerBase<T>),
new PropertyMetadata(null, (d, e) => ((EventTriggerBase<T>)d).OnEventChanged((RoutedEvent)e.OldValue, (RoutedEvent)e.NewValue)));
#endif
[IgnoreDependencyPropertiesConsistencyChecker]
public static readonly DependencyProperty SourceNameProperty =
DependencyProperty.Register("SourceName", typeof(string), typeof(EventTriggerBase<T>),
new PropertyMetadata(null, (d, e) => ((EventTriggerBase<T>)d).OnSourceNameChanged()));
[IgnoreDependencyPropertiesConsistencyChecker]
public static readonly DependencyProperty SourceObjectProperty =
DependencyProperty.Register("SourceObject", typeof(object), typeof(EventTriggerBase<T>),
new PropertyMetadata(null, (d, e) => ((EventTriggerBase<T>)d).OnSourceObjectChanged()));
static BindingExpression GetBindingExp(DependencyObject d, DependencyProperty dp) {
#if !SILVERLIGHT && !NETFX_CORE
return BindingOperations.GetBindingExpression(d, dp);
#else
if(d is FrameworkElement)
return ((FrameworkElement)d).GetBindingExpression(dp);
return d.ReadLocalValue(dp) as BindingExpression;
#endif
}
static string GetObjectName(object obj) {
FrameworkElement fe = obj as FrameworkElement;
if(fe != null)
return fe.Name;
#if !SILVERLIGHT && !NETFX_CORE
FrameworkContentElement fce = obj as FrameworkContentElement;
if(fce != null)
return fce.Name;
#endif
return null;
}
static DependencyObject FindObject(DependencyObject root, string elementName, bool useVisualTree) {
if(GetObjectName(root) == elementName) return root;
DependencyObject res = null;
FrameworkElement fe = root as FrameworkElement;
FrameworkElement feParent = fe.Parent as FrameworkElement;
FrameworkElement el = feParent ?? fe;
#if !SILVERLIGHT && !NETFX_CORE
try {
res = LogicalTreeHelper.FindLogicalNode(el, elementName);
} catch { }
if(res != null) return res;
FrameworkContentElement fce = root as FrameworkContentElement;
res = fce != null ? (DependencyObject)fce.FindName(elementName) : null;
if(res != null) return res;
#endif
res = el != null ? (DependencyObject)el.FindName(elementName) : null;
if(res != null) return res;
if(useVisualTree) {
res = feParent != null ? LayoutHelper.FindElementByName(feParent, elementName) : null;
if(res != null) return res;
res = fe != null ? LayoutHelper.FindElementByName(fe, elementName) : null;
if(res != null) return res;
}
return null;
}
#endregion
internal int RaiseSourceChangedCount = 0;
public string EventName {
get { return (string)GetValue(EventNameProperty); }
set { SetValue(EventNameProperty, value); }
}
#if !SILVERLIGHT && !NETFX_CORE
public RoutedEvent Event {
get { return (RoutedEvent)GetValue(EventProperty); }
set { SetValue(EventProperty, value); }
}
#endif
public string SourceName {
get { return (string)GetValue(SourceNameProperty); }
set { SetValue(SourceNameProperty, value); }
}
public object SourceObject {
get { return (object)GetValue(SourceObjectProperty); }
set { SetValue(SourceObjectProperty, value); }
}
object source;
public object Source {
get {
VerifyRead();
return source;
}
private set {
VerifyRead();
if(source == value) return;
VerifyWrite();
object oldValue = source;
source = value;
NotifyChanged();
OnSourceChanged(oldValue, source);
}
}
void ResolveSource(bool forceResolving, bool? useVisualTree = null) {
if(ViewModelBase.IsInDesignMode) return;
if(!IsAttached) return;
if(Source != null && !forceResolving)
return;
if(SourceObject != null) {
Source = SourceObject;
return;
}
#if !SILVERLIGHT && !NETFX_CORE
bool useVisualTreeCore = useVisualTree ?? false;
#else
bool useVisualTreeCore = useVisualTree ?? true;
#endif
var sourceObjectBinding = GetBindingExp(this, SourceObjectProperty);
if(sourceObjectBinding != null) {
string elementName = null;
if(sourceObjectBinding.ParentBinding != null)
elementName = sourceObjectBinding.ParentBinding.ElementName;
Source = FindObject(AssociatedObject, elementName, useVisualTreeCore);
return;
}
var sourceNameBinding = GetBindingExp(this, SourceNameProperty);
if(!string.IsNullOrEmpty(SourceName) || sourceNameBinding != null) {
Source = FindObject(AssociatedObject, SourceName, useVisualTreeCore);
return;
}
Source = AssociatedObject;
return;
}
void OnSourceNameChanged() {
ResolveSource(true);
}
void OnSourceObjectChanged() {
ResolveSource(true);
}
EventTriggerEventSubscriber EventHelper;
public EventTriggerBase()
: base() {
EventHelper = new EventTriggerEventSubscriber(OnEvent);
}
protected virtual void OnEvent(object sender, object eventArgs) { }
protected virtual void OnSourceChanged(object oldSource, object newSource) {
RaiseSourceChangedCount++;
#if !SILVERLIGHT && !NETFX_CORE
EventHelper.UnsubscribeFromEvent(oldSource, Event);
#endif
EventHelper.UnsubscribeFromEvent(oldSource, EventName);
EventHelper.SubscribeToEvent(newSource, EventName);
#if !SILVERLIGHT && !NETFX_CORE
EventHelper.SubscribeToEvent(newSource, Event);
#endif
}
protected virtual void OnEventNameChanged(string oldEventName, string newEventName) {
#if !SILVERLIGHT && !NETFX_CORE
if(newEventName != null)
Event = null;
#endif
if(!IsAttached) return;
EventHelper.UnsubscribeFromEvent(Source, oldEventName);
EventHelper.SubscribeToEvent(Source, newEventName);
}
#if !SILVERLIGHT && !NETFX_CORE
protected virtual void OnEventChanged(RoutedEvent oldRoutedEvent, RoutedEvent newRoutedEvent) {
if(newRoutedEvent != null)
EventName = null;
if(!IsAttached) return;
EventHelper.UnsubscribeFromEvent(Source, oldRoutedEvent);
EventHelper.SubscribeToEvent(Source, newRoutedEvent);
}
#endif
protected override void OnAttached() {
base.OnAttached();
#if !SILVERLIGHT && !NETFX_CORE
EventHelper.UnsubscribeFromEvent(Source, Event);
#endif
EventHelper.UnsubscribeFromEvent(Source, EventName);
EventHelper.SubscribeToEvent(Source, EventName);
#if !SILVERLIGHT && !NETFX_CORE
EventHelper.SubscribeToEvent(Source, Event);
#endif
ResolveSource(false);
#if !NETFX_CORE
Dispatcher.BeginInvoke(new Action(() => ResolveSource(false)));
#else
#pragma warning disable 4014
if (!DesignMode.DesignModeEnabled)
Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Low, new Windows.UI.Core.DispatchedHandler(() => ResolveSource(false)));
#pragma warning restore 4014
#endif
SubsribeAssociatedObject();
}
protected override void OnDetaching() {
UnsubscribeAssociatedObject();
EventHelper.UnsubscribeFromEvent(Source, EventName);
#if !SILVERLIGHT && !NETFX_CORE
EventHelper.UnsubscribeFromEvent(Source, Event);
#endif
Source = null;
base.OnDetaching();
}
void SubsribeAssociatedObject() {
UnsubscribeAssociatedObject();
FrameworkElement fe = AssociatedObject as FrameworkElement;
if(fe != null) {
#if !SILVERLIGHT && !NETFX_CORE
fe.Initialized += OnAssociatedObjectUpdated;
#endif
fe.LayoutUpdated += AssociatedObjectLayoutUpdated;
fe.SizeChanged += AssociatedObjectSizeChanged;
fe.Loaded += AssociatedObjectLoaded;
return;
}
#if !SILVERLIGHT && !NETFX_CORE
FrameworkContentElement fce = AssociatedObject as FrameworkContentElement;
if(fce != null) {
fce.Initialized += OnAssociatedObjectUpdated;
fce.Loaded += OnAssociatedObjectUpdated;
return;
}
#endif
}
void AssociatedObjectLoaded(object sender, RoutedEventArgs e) {
OnAssociatedObjectUpdated(sender, EventArgs.Empty);
}
void AssociatedObjectSizeChanged(object sender, SizeChangedEventArgs e) {
OnAssociatedObjectUpdated(sender, EventArgs.Empty);
}
void AssociatedObjectLayoutUpdated(object sender, object e) {
OnAssociatedObjectUpdated(sender, EventArgs.Empty);
}
void UnsubscribeAssociatedObject() {
FrameworkElement fe = AssociatedObject as FrameworkElement;
if(fe != null) {
#if !SILVERLIGHT && !NETFX_CORE
fe.Initialized -= OnAssociatedObjectUpdated;
#endif
fe.LayoutUpdated -= AssociatedObjectLayoutUpdated;
fe.SizeChanged -= AssociatedObjectSizeChanged;
fe.Loaded -= AssociatedObjectLoaded;
}
#if !SILVERLIGHT && !NETFX_CORE
FrameworkContentElement fce = AssociatedObject as FrameworkContentElement;
if(fce != null) {
fce.Initialized -= OnAssociatedObjectUpdated;
fce.Loaded -= OnAssociatedObjectUpdated;
return;
}
#endif
}
void OnAssociatedObjectUpdated(object sender, EventArgs e) {
ResolveSource(false);
FrameworkElement associatedObject = AssociatedObject as FrameworkElement;
if(associatedObject == null) return;
if(LayoutHelper.IsElementLoaded(associatedObject) || Source != null) {
UnsubscribeAssociatedObject();
if(Source == null) {
ResolveSource(false, true);
}
}
}
}
#if !NETFX_CORE
public class EventTrigger : EventTriggerBase<DependencyObject> {
#else
public class EventTrigger : EventTriggerBase<FrameworkElement> {
#endif
public EventTrigger() { }
public EventTrigger(string eventName)
: this() {
EventName = eventName;
}
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Apis.Datastore.v1beta1
{
/// <summary>The Datastore Service.</summary>
public class DatastoreService : Google.Apis.Services.BaseClientService
{
/// <summary>The API version.</summary>
public const string Version = "v1beta1";
/// <summary>The discovery version used to generate this service.</summary>
public static Google.Apis.Discovery.DiscoveryVersion DiscoveryVersionUsed = Google.Apis.Discovery.DiscoveryVersion.Version_1_0;
/// <summary>Constructs a new service.</summary>
public DatastoreService() : this(new Google.Apis.Services.BaseClientService.Initializer())
{
}
/// <summary>Constructs a new service.</summary>
/// <param name="initializer">The service initializer.</param>
public DatastoreService(Google.Apis.Services.BaseClientService.Initializer initializer) : base(initializer)
{
Projects = new ProjectsResource(this);
}
/// <summary>Gets the service supported features.</summary>
public override System.Collections.Generic.IList<string> Features => new string[0];
/// <summary>Gets the service name.</summary>
public override string Name => "datastore";
/// <summary>Gets the service base URI.</summary>
public override string BaseUri =>
#if NETSTANDARD1_3 || NETSTANDARD2_0 || NET45
BaseUriOverride ?? "https://datastore.googleapis.com/";
#else
"https://datastore.googleapis.com/";
#endif
/// <summary>Gets the service base path.</summary>
public override string BasePath => "";
#if !NET40
/// <summary>Gets the batch base URI; <c>null</c> if unspecified.</summary>
public override string BatchUri => "https://datastore.googleapis.com/batch";
/// <summary>Gets the batch base path; <c>null</c> if unspecified.</summary>
public override string BatchPath => "batch";
#endif
/// <summary>Available OAuth 2.0 scopes for use with the Cloud Datastore API.</summary>
public class Scope
{
/// <summary>
/// See, edit, configure, and delete your Google Cloud data and see the email address for your Google
/// Account.
/// </summary>
public static string CloudPlatform = "https://www.googleapis.com/auth/cloud-platform";
/// <summary>View and manage your Google Cloud Datastore data</summary>
public static string Datastore = "https://www.googleapis.com/auth/datastore";
}
/// <summary>Available OAuth 2.0 scope constants for use with the Cloud Datastore API.</summary>
public static class ScopeConstants
{
/// <summary>
/// See, edit, configure, and delete your Google Cloud data and see the email address for your Google
/// Account.
/// </summary>
public const string CloudPlatform = "https://www.googleapis.com/auth/cloud-platform";
/// <summary>View and manage your Google Cloud Datastore data</summary>
public const string Datastore = "https://www.googleapis.com/auth/datastore";
}
/// <summary>Gets the Projects resource.</summary>
public virtual ProjectsResource Projects { get; }
}
/// <summary>A base abstract class for Datastore requests.</summary>
public abstract class DatastoreBaseServiceRequest<TResponse> : Google.Apis.Requests.ClientServiceRequest<TResponse>
{
/// <summary>Constructs a new DatastoreBaseServiceRequest instance.</summary>
protected DatastoreBaseServiceRequest(Google.Apis.Services.IClientService service) : base(service)
{
}
/// <summary>V1 error format.</summary>
[Google.Apis.Util.RequestParameterAttribute("$.xgafv", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<XgafvEnum> Xgafv { get; set; }
/// <summary>V1 error format.</summary>
public enum XgafvEnum
{
/// <summary>v1 error format</summary>
[Google.Apis.Util.StringValueAttribute("1")]
Value1 = 0,
/// <summary>v2 error format</summary>
[Google.Apis.Util.StringValueAttribute("2")]
Value2 = 1,
}
/// <summary>OAuth access token.</summary>
[Google.Apis.Util.RequestParameterAttribute("access_token", Google.Apis.Util.RequestParameterType.Query)]
public virtual string AccessToken { get; set; }
/// <summary>Data format for response.</summary>
[Google.Apis.Util.RequestParameterAttribute("alt", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<AltEnum> Alt { get; set; }
/// <summary>Data format for response.</summary>
public enum AltEnum
{
/// <summary>Responses with Content-Type of application/json</summary>
[Google.Apis.Util.StringValueAttribute("json")]
Json = 0,
/// <summary>Media download with context-dependent Content-Type</summary>
[Google.Apis.Util.StringValueAttribute("media")]
Media = 1,
/// <summary>Responses with Content-Type of application/x-protobuf</summary>
[Google.Apis.Util.StringValueAttribute("proto")]
Proto = 2,
}
/// <summary>JSONP</summary>
[Google.Apis.Util.RequestParameterAttribute("callback", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Callback { get; set; }
/// <summary>Selector specifying which fields to include in a partial response.</summary>
[Google.Apis.Util.RequestParameterAttribute("fields", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Fields { get; set; }
/// <summary>
/// API key. Your API key identifies your project and provides you with API access, quota, and reports. Required
/// unless you provide an OAuth 2.0 token.
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("key", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Key { get; set; }
/// <summary>OAuth 2.0 token for the current user.</summary>
[Google.Apis.Util.RequestParameterAttribute("oauth_token", Google.Apis.Util.RequestParameterType.Query)]
public virtual string OauthToken { get; set; }
/// <summary>Returns response with indentations and line breaks.</summary>
[Google.Apis.Util.RequestParameterAttribute("prettyPrint", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<bool> PrettyPrint { get; set; }
/// <summary>
/// Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a
/// user, but should not exceed 40 characters.
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("quotaUser", Google.Apis.Util.RequestParameterType.Query)]
public virtual string QuotaUser { get; set; }
/// <summary>Legacy upload protocol for media (e.g. "media", "multipart").</summary>
[Google.Apis.Util.RequestParameterAttribute("uploadType", Google.Apis.Util.RequestParameterType.Query)]
public virtual string UploadType { get; set; }
/// <summary>Upload protocol for media (e.g. "raw", "multipart").</summary>
[Google.Apis.Util.RequestParameterAttribute("upload_protocol", Google.Apis.Util.RequestParameterType.Query)]
public virtual string UploadProtocol { get; set; }
/// <summary>Initializes Datastore parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("$.xgafv", new Google.Apis.Discovery.Parameter
{
Name = "$.xgafv",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("access_token", new Google.Apis.Discovery.Parameter
{
Name = "access_token",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("alt", new Google.Apis.Discovery.Parameter
{
Name = "alt",
IsRequired = false,
ParameterType = "query",
DefaultValue = "json",
Pattern = null,
});
RequestParameters.Add("callback", new Google.Apis.Discovery.Parameter
{
Name = "callback",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("fields", new Google.Apis.Discovery.Parameter
{
Name = "fields",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("key", new Google.Apis.Discovery.Parameter
{
Name = "key",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("oauth_token", new Google.Apis.Discovery.Parameter
{
Name = "oauth_token",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("prettyPrint", new Google.Apis.Discovery.Parameter
{
Name = "prettyPrint",
IsRequired = false,
ParameterType = "query",
DefaultValue = "true",
Pattern = null,
});
RequestParameters.Add("quotaUser", new Google.Apis.Discovery.Parameter
{
Name = "quotaUser",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("uploadType", new Google.Apis.Discovery.Parameter
{
Name = "uploadType",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("upload_protocol", new Google.Apis.Discovery.Parameter
{
Name = "upload_protocol",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
}
}
/// <summary>The "projects" collection of methods.</summary>
public class ProjectsResource
{
private const string Resource = "projects";
/// <summary>The service which this resource belongs to.</summary>
private readonly Google.Apis.Services.IClientService service;
/// <summary>Constructs a new resource.</summary>
public ProjectsResource(Google.Apis.Services.IClientService service)
{
this.service = service;
}
/// <summary>
/// Exports a copy of all or a subset of entities from Google Cloud Datastore to another storage system, such as
/// Google Cloud Storage. Recent updates to entities may not be reflected in the export. The export occurs in
/// the background and its progress can be monitored and managed via the Operation resource that is created. The
/// output of an export may only be used once the associated operation is done. If an export operation is
/// cancelled before completion it may leave partial data behind in Google Cloud Storage.
/// </summary>
/// <param name="body">The body of the request.</param>
/// <param name="projectId">Project ID against which to make the request.</param>
public virtual ExportRequest Export(Google.Apis.Datastore.v1beta1.Data.GoogleDatastoreAdminV1beta1ExportEntitiesRequest body, string projectId)
{
return new ExportRequest(service, body, projectId);
}
/// <summary>
/// Exports a copy of all or a subset of entities from Google Cloud Datastore to another storage system, such as
/// Google Cloud Storage. Recent updates to entities may not be reflected in the export. The export occurs in
/// the background and its progress can be monitored and managed via the Operation resource that is created. The
/// output of an export may only be used once the associated operation is done. If an export operation is
/// cancelled before completion it may leave partial data behind in Google Cloud Storage.
/// </summary>
public class ExportRequest : DatastoreBaseServiceRequest<Google.Apis.Datastore.v1beta1.Data.GoogleLongrunningOperation>
{
/// <summary>Constructs a new Export request.</summary>
public ExportRequest(Google.Apis.Services.IClientService service, Google.Apis.Datastore.v1beta1.Data.GoogleDatastoreAdminV1beta1ExportEntitiesRequest body, string projectId) : base(service)
{
ProjectId = projectId;
Body = body;
InitParameters();
}
/// <summary>Project ID against which to make the request.</summary>
[Google.Apis.Util.RequestParameterAttribute("projectId", Google.Apis.Util.RequestParameterType.Path)]
public virtual string ProjectId { get; private set; }
/// <summary>Gets or sets the body of this request.</summary>
Google.Apis.Datastore.v1beta1.Data.GoogleDatastoreAdminV1beta1ExportEntitiesRequest Body { get; set; }
/// <summary>Returns the body of the request.</summary>
protected override object GetBody() => Body;
/// <summary>Gets the method name.</summary>
public override string MethodName => "export";
/// <summary>Gets the HTTP method.</summary>
public override string HttpMethod => "POST";
/// <summary>Gets the REST path.</summary>
public override string RestPath => "v1beta1/projects/{projectId}:export";
/// <summary>Initializes Export parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("projectId", new Google.Apis.Discovery.Parameter
{
Name = "projectId",
IsRequired = true,
ParameterType = "path",
DefaultValue = null,
Pattern = null,
});
}
}
/// <summary>
/// Imports entities into Google Cloud Datastore. Existing entities with the same key are overwritten. The
/// import occurs in the background and its progress can be monitored and managed via the Operation resource
/// that is created. If an ImportEntities operation is cancelled, it is possible that a subset of the data has
/// already been imported to Cloud Datastore.
/// </summary>
/// <param name="body">The body of the request.</param>
/// <param name="projectId">Project ID against which to make the request.</param>
public virtual ImportRequest Import(Google.Apis.Datastore.v1beta1.Data.GoogleDatastoreAdminV1beta1ImportEntitiesRequest body, string projectId)
{
return new ImportRequest(service, body, projectId);
}
/// <summary>
/// Imports entities into Google Cloud Datastore. Existing entities with the same key are overwritten. The
/// import occurs in the background and its progress can be monitored and managed via the Operation resource
/// that is created. If an ImportEntities operation is cancelled, it is possible that a subset of the data has
/// already been imported to Cloud Datastore.
/// </summary>
public class ImportRequest : DatastoreBaseServiceRequest<Google.Apis.Datastore.v1beta1.Data.GoogleLongrunningOperation>
{
/// <summary>Constructs a new Import request.</summary>
public ImportRequest(Google.Apis.Services.IClientService service, Google.Apis.Datastore.v1beta1.Data.GoogleDatastoreAdminV1beta1ImportEntitiesRequest body, string projectId) : base(service)
{
ProjectId = projectId;
Body = body;
InitParameters();
}
/// <summary>Project ID against which to make the request.</summary>
[Google.Apis.Util.RequestParameterAttribute("projectId", Google.Apis.Util.RequestParameterType.Path)]
public virtual string ProjectId { get; private set; }
/// <summary>Gets or sets the body of this request.</summary>
Google.Apis.Datastore.v1beta1.Data.GoogleDatastoreAdminV1beta1ImportEntitiesRequest Body { get; set; }
/// <summary>Returns the body of the request.</summary>
protected override object GetBody() => Body;
/// <summary>Gets the method name.</summary>
public override string MethodName => "import";
/// <summary>Gets the HTTP method.</summary>
public override string HttpMethod => "POST";
/// <summary>Gets the REST path.</summary>
public override string RestPath => "v1beta1/projects/{projectId}:import";
/// <summary>Initializes Import parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("projectId", new Google.Apis.Discovery.Parameter
{
Name = "projectId",
IsRequired = true,
ParameterType = "path",
DefaultValue = null,
Pattern = null,
});
}
}
}
}
namespace Google.Apis.Datastore.v1beta1.Data
{
/// <summary>Metadata common to all Datastore Admin operations.</summary>
public class GoogleDatastoreAdminV1CommonMetadata : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The time the operation ended, either successfully or otherwise.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("endTime")]
public virtual object EndTime { get; set; }
/// <summary>
/// The client-assigned labels which were provided when the operation was created. May also include additional
/// labels.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("labels")]
public virtual System.Collections.Generic.IDictionary<string, string> Labels { get; set; }
/// <summary>The type of the operation. Can be used as a filter in ListOperationsRequest.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("operationType")]
public virtual string OperationType { get; set; }
/// <summary>The time that work began on the operation.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("startTime")]
public virtual object StartTime { get; set; }
/// <summary>The current state of the Operation.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("state")]
public virtual string State { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>
/// Metadata for Datastore to Firestore migration operations. The DatastoreFirestoreMigration operation is not
/// started by the end-user via an explicit "creation" method. This is an intentional deviation from the LRO design
/// pattern. This singleton resource can be accessed at:
/// "projects/{project_id}/operations/datastore-firestore-migration"
/// </summary>
public class GoogleDatastoreAdminV1DatastoreFirestoreMigrationMetadata : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The current state of migration from Cloud Datastore to Cloud Firestore in Datastore mode.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("migrationState")]
public virtual string MigrationState { get; set; }
/// <summary>The current step of migration from Cloud Datastore to Cloud Firestore in Datastore mode.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("migrationStep")]
public virtual string MigrationStep { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>
/// Identifies a subset of entities in a project. This is specified as combinations of kinds and namespaces (either
/// or both of which may be all, as described in the following examples). Example usage: Entire project: kinds=[],
/// namespace_ids=[] Kinds Foo and Bar in all namespaces: kinds=['Foo', 'Bar'], namespace_ids=[] Kinds Foo and Bar
/// only in the default namespace: kinds=['Foo', 'Bar'], namespace_ids=[''] Kinds Foo and Bar in both the default
/// and Baz namespaces: kinds=['Foo', 'Bar'], namespace_ids=['', 'Baz'] The entire Baz namespace: kinds=[],
/// namespace_ids=['Baz']
/// </summary>
public class GoogleDatastoreAdminV1EntityFilter : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>If empty, then this represents all kinds.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("kinds")]
public virtual System.Collections.Generic.IList<string> Kinds { get; set; }
/// <summary>
/// An empty list represents all namespaces. This is the preferred usage for projects that don't use namespaces.
/// An empty string element represents the default namespace. This should be used if the project has data in
/// non-default namespaces, but doesn't want to include them. Each namespace in this list must be unique.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("namespaceIds")]
public virtual System.Collections.Generic.IList<string> NamespaceIds { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Metadata for ExportEntities operations.</summary>
public class GoogleDatastoreAdminV1ExportEntitiesMetadata : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>Metadata common to all Datastore Admin operations.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("common")]
public virtual GoogleDatastoreAdminV1CommonMetadata Common { get; set; }
/// <summary>Description of which entities are being exported.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("entityFilter")]
public virtual GoogleDatastoreAdminV1EntityFilter EntityFilter { get; set; }
/// <summary>
/// Location for the export metadata and data files. This will be the same value as the
/// google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix field. The final output location is
/// provided in google.datastore.admin.v1.ExportEntitiesResponse.output_url.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("outputUrlPrefix")]
public virtual string OutputUrlPrefix { get; set; }
/// <summary>An estimate of the number of bytes processed.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("progressBytes")]
public virtual GoogleDatastoreAdminV1Progress ProgressBytes { get; set; }
/// <summary>An estimate of the number of entities processed.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("progressEntities")]
public virtual GoogleDatastoreAdminV1Progress ProgressEntities { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>The response for google.datastore.admin.v1.DatastoreAdmin.ExportEntities.</summary>
public class GoogleDatastoreAdminV1ExportEntitiesResponse : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>
/// Location of the output metadata file. This can be used to begin an import into Cloud Datastore (this project
/// or another project). See google.datastore.admin.v1.ImportEntitiesRequest.input_url. Only present if the
/// operation completed successfully.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("outputUrl")]
public virtual string OutputUrl { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Metadata for ImportEntities operations.</summary>
public class GoogleDatastoreAdminV1ImportEntitiesMetadata : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>Metadata common to all Datastore Admin operations.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("common")]
public virtual GoogleDatastoreAdminV1CommonMetadata Common { get; set; }
/// <summary>Description of which entities are being imported.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("entityFilter")]
public virtual GoogleDatastoreAdminV1EntityFilter EntityFilter { get; set; }
/// <summary>
/// The location of the import metadata file. This will be the same value as the
/// google.datastore.admin.v1.ExportEntitiesResponse.output_url field.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("inputUrl")]
public virtual string InputUrl { get; set; }
/// <summary>An estimate of the number of bytes processed.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("progressBytes")]
public virtual GoogleDatastoreAdminV1Progress ProgressBytes { get; set; }
/// <summary>An estimate of the number of entities processed.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("progressEntities")]
public virtual GoogleDatastoreAdminV1Progress ProgressEntities { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Metadata for Index operations.</summary>
public class GoogleDatastoreAdminV1IndexOperationMetadata : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>Metadata common to all Datastore Admin operations.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("common")]
public virtual GoogleDatastoreAdminV1CommonMetadata Common { get; set; }
/// <summary>The index resource ID that this operation is acting on.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("indexId")]
public virtual string IndexId { get; set; }
/// <summary>An estimate of the number of entities processed.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("progressEntities")]
public virtual GoogleDatastoreAdminV1Progress ProgressEntities { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>
/// An event signifying the start of a new step in a [migration from Cloud Datastore to Cloud Firestore in Datastore
/// mode](https://cloud.google.com/datastore/docs/upgrade-to-firestore).
/// </summary>
public class GoogleDatastoreAdminV1MigrationProgressEvent : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>Details for the `PREPARE` step.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("prepareStepDetails")]
public virtual GoogleDatastoreAdminV1PrepareStepDetails PrepareStepDetails { get; set; }
/// <summary>Details for the `REDIRECT_WRITES` step.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("redirectWritesStepDetails")]
public virtual GoogleDatastoreAdminV1RedirectWritesStepDetails RedirectWritesStepDetails { get; set; }
/// <summary>
/// The step that is starting. An event with step set to `START` indicates that the migration has been reverted
/// back to the initial pre-migration state.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("step")]
public virtual string Step { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>
/// An event signifying a change in state of a [migration from Cloud Datastore to Cloud Firestore in Datastore
/// mode](https://cloud.google.com/datastore/docs/upgrade-to-firestore).
/// </summary>
public class GoogleDatastoreAdminV1MigrationStateEvent : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The new state of the migration.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("state")]
public virtual string State { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Details for the `PREPARE` step.</summary>
public class GoogleDatastoreAdminV1PrepareStepDetails : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The concurrency mode this database will use when it reaches the `REDIRECT_WRITES` step.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("concurrencyMode")]
public virtual string ConcurrencyMode { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Measures the progress of a particular metric.</summary>
public class GoogleDatastoreAdminV1Progress : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>
/// The amount of work that has been completed. Note that this may be greater than work_estimated.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("workCompleted")]
public virtual System.Nullable<long> WorkCompleted { get; set; }
/// <summary>
/// An estimate of how much work needs to be performed. May be zero if the work estimate is unavailable.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("workEstimated")]
public virtual System.Nullable<long> WorkEstimated { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Details for the `REDIRECT_WRITES` step.</summary>
public class GoogleDatastoreAdminV1RedirectWritesStepDetails : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>Ths concurrency mode for this database.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("concurrencyMode")]
public virtual string ConcurrencyMode { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Metadata common to all Datastore Admin operations.</summary>
public class GoogleDatastoreAdminV1beta1CommonMetadata : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The time the operation ended, either successfully or otherwise.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("endTime")]
public virtual object EndTime { get; set; }
/// <summary>
/// The client-assigned labels which were provided when the operation was created. May also include additional
/// labels.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("labels")]
public virtual System.Collections.Generic.IDictionary<string, string> Labels { get; set; }
/// <summary>The type of the operation. Can be used as a filter in ListOperationsRequest.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("operationType")]
public virtual string OperationType { get; set; }
/// <summary>The time that work began on the operation.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("startTime")]
public virtual object StartTime { get; set; }
/// <summary>The current state of the Operation.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("state")]
public virtual string State { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>
/// Identifies a subset of entities in a project. This is specified as combinations of kinds and namespaces (either
/// or both of which may be all, as described in the following examples). Example usage: Entire project: kinds=[],
/// namespace_ids=[] Kinds Foo and Bar in all namespaces: kinds=['Foo', 'Bar'], namespace_ids=[] Kinds Foo and Bar
/// only in the default namespace: kinds=['Foo', 'Bar'], namespace_ids=[''] Kinds Foo and Bar in both the default
/// and Baz namespaces: kinds=['Foo', 'Bar'], namespace_ids=['', 'Baz'] The entire Baz namespace: kinds=[],
/// namespace_ids=['Baz']
/// </summary>
public class GoogleDatastoreAdminV1beta1EntityFilter : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>If empty, then this represents all kinds.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("kinds")]
public virtual System.Collections.Generic.IList<string> Kinds { get; set; }
/// <summary>
/// An empty list represents all namespaces. This is the preferred usage for projects that don't use namespaces.
/// An empty string element represents the default namespace. This should be used if the project has data in
/// non-default namespaces, but doesn't want to include them. Each namespace in this list must be unique.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("namespaceIds")]
public virtual System.Collections.Generic.IList<string> NamespaceIds { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Metadata for ExportEntities operations.</summary>
public class GoogleDatastoreAdminV1beta1ExportEntitiesMetadata : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>Metadata common to all Datastore Admin operations.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("common")]
public virtual GoogleDatastoreAdminV1beta1CommonMetadata Common { get; set; }
/// <summary>Description of which entities are being exported.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("entityFilter")]
public virtual GoogleDatastoreAdminV1beta1EntityFilter EntityFilter { get; set; }
/// <summary>
/// Location for the export metadata and data files. This will be the same value as the
/// google.datastore.admin.v1beta1.ExportEntitiesRequest.output_url_prefix field. The final output location is
/// provided in google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("outputUrlPrefix")]
public virtual string OutputUrlPrefix { get; set; }
/// <summary>An estimate of the number of bytes processed.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("progressBytes")]
public virtual GoogleDatastoreAdminV1beta1Progress ProgressBytes { get; set; }
/// <summary>An estimate of the number of entities processed.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("progressEntities")]
public virtual GoogleDatastoreAdminV1beta1Progress ProgressEntities { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>The request for google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities.</summary>
public class GoogleDatastoreAdminV1beta1ExportEntitiesRequest : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>Description of what data from the project is included in the export.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("entityFilter")]
public virtual GoogleDatastoreAdminV1beta1EntityFilter EntityFilter { get; set; }
/// <summary>Client-assigned labels.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("labels")]
public virtual System.Collections.Generic.IDictionary<string, string> Labels { get; set; }
/// <summary>
/// Location for the export metadata and data files. The full resource URL of the external storage location.
/// Currently, only Google Cloud Storage is supported. So output_url_prefix should be of the form:
/// `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the name of the Cloud Storage bucket and
/// `NAMESPACE_PATH` is an optional Cloud Storage namespace path (this is not a Cloud Datastore namespace). For
/// more information about Cloud Storage namespace paths, see [Object name
/// considerations](https://cloud.google.com/storage/docs/naming#object-considerations). The resulting files
/// will be nested deeper than the specified URL prefix. The final output URL will be provided in the
/// google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url field. That value should be used for
/// subsequent ImportEntities operations. By nesting the data files deeper, the same Cloud Storage bucket can be
/// used in multiple ExportEntities operations without conflict.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("outputUrlPrefix")]
public virtual string OutputUrlPrefix { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>The response for google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities.</summary>
public class GoogleDatastoreAdminV1beta1ExportEntitiesResponse : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>
/// Location of the output metadata file. This can be used to begin an import into Cloud Datastore (this project
/// or another project). See google.datastore.admin.v1beta1.ImportEntitiesRequest.input_url. Only present if the
/// operation completed successfully.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("outputUrl")]
public virtual string OutputUrl { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Metadata for ImportEntities operations.</summary>
public class GoogleDatastoreAdminV1beta1ImportEntitiesMetadata : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>Metadata common to all Datastore Admin operations.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("common")]
public virtual GoogleDatastoreAdminV1beta1CommonMetadata Common { get; set; }
/// <summary>Description of which entities are being imported.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("entityFilter")]
public virtual GoogleDatastoreAdminV1beta1EntityFilter EntityFilter { get; set; }
/// <summary>
/// The location of the import metadata file. This will be the same value as the
/// google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url field.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("inputUrl")]
public virtual string InputUrl { get; set; }
/// <summary>An estimate of the number of bytes processed.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("progressBytes")]
public virtual GoogleDatastoreAdminV1beta1Progress ProgressBytes { get; set; }
/// <summary>An estimate of the number of entities processed.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("progressEntities")]
public virtual GoogleDatastoreAdminV1beta1Progress ProgressEntities { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>The request for google.datastore.admin.v1beta1.DatastoreAdmin.ImportEntities.</summary>
public class GoogleDatastoreAdminV1beta1ImportEntitiesRequest : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>
/// Optionally specify which kinds/namespaces are to be imported. If provided, the list must be a subset of the
/// EntityFilter used in creating the export, otherwise a FAILED_PRECONDITION error will be returned. If no
/// filter is specified then all entities from the export are imported.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("entityFilter")]
public virtual GoogleDatastoreAdminV1beta1EntityFilter EntityFilter { get; set; }
/// <summary>
/// The full resource URL of the external storage location. Currently, only Google Cloud Storage is supported.
/// So input_url should be of the form: `gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE`, where
/// `BUCKET_NAME` is the name of the Cloud Storage bucket, `NAMESPACE_PATH` is an optional Cloud Storage
/// namespace path (this is not a Cloud Datastore namespace), and `OVERALL_EXPORT_METADATA_FILE` is the metadata
/// file written by the ExportEntities operation. For more information about Cloud Storage namespace paths, see
/// [Object name considerations](https://cloud.google.com/storage/docs/naming#object-considerations). For more
/// information, see google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("inputUrl")]
public virtual string InputUrl { get; set; }
/// <summary>Client-assigned labels.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("labels")]
public virtual System.Collections.Generic.IDictionary<string, string> Labels { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Measures the progress of a particular metric.</summary>
public class GoogleDatastoreAdminV1beta1Progress : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>
/// The amount of work that has been completed. Note that this may be greater than work_estimated.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("workCompleted")]
public virtual System.Nullable<long> WorkCompleted { get; set; }
/// <summary>
/// An estimate of how much work needs to be performed. May be zero if the work estimate is unavailable.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("workEstimated")]
public virtual System.Nullable<long> WorkEstimated { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>This resource represents a long-running operation that is the result of a network API call.</summary>
public class GoogleLongrunningOperation : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>
/// If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed,
/// and either `error` or `response` is available.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("done")]
public virtual System.Nullable<bool> Done { get; set; }
/// <summary>The error result of the operation in case of failure or cancellation.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("error")]
public virtual Status Error { get; set; }
/// <summary>
/// Service-specific metadata associated with the operation. It typically contains progress information and
/// common metadata such as create time. Some services might not provide such metadata. Any method that returns
/// a long-running operation should document the metadata type, if any.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("metadata")]
public virtual System.Collections.Generic.IDictionary<string, object> Metadata { get; set; }
/// <summary>
/// The server-assigned name, which is only unique within the same service that originally returns it. If you
/// use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("name")]
public virtual string Name { get; set; }
/// <summary>
/// The normal response of the operation in case of success. If the original method returns no data on success,
/// such as `Delete`, the response is `google.protobuf.Empty`. If the original method is standard
/// `Get`/`Create`/`Update`, the response should be the resource. For other methods, the response should have
/// the type `XxxResponse`, where `Xxx` is the original method name. For example, if the original method name is
/// `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("response")]
public virtual System.Collections.Generic.IDictionary<string, object> Response { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>
/// The `Status` type defines a logical error model that is suitable for different programming environments,
/// including REST APIs and RPC APIs. It is used by [gRPC](https://github.com/grpc). Each `Status` message contains
/// three pieces of data: error code, error message, and error details. You can find out more about this error model
/// and how to work with it in the [API Design Guide](https://cloud.google.com/apis/design/errors).
/// </summary>
public class Status : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The status code, which should be an enum value of google.rpc.Code.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("code")]
public virtual System.Nullable<int> Code { get; set; }
/// <summary>
/// A list of messages that carry the error details. There is a common set of message types for APIs to use.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("details")]
public virtual System.Collections.Generic.IList<System.Collections.Generic.IDictionary<string, object>> Details { get; set; }
/// <summary>
/// A developer-facing error message, which should be in English. Any user-facing error message should be
/// localized and sent in the google.rpc.Status.details field, or localized by the client.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("message")]
public virtual string Message { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Web.Http;
using System.Web.Http.Controllers;
using System.Web.Http.Description;
using MysteryRiddles.Areas.HelpPage.ModelDescriptions;
using MysteryRiddles.Areas.HelpPage.Models;
namespace MysteryRiddles.Areas.HelpPage
{
public static class HelpPageConfigurationExtensions
{
private const string ApiModelPrefix = "MS_HelpPageApiModel_";
/// <summary>
/// Sets the documentation provider for help page.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="documentationProvider">The documentation provider.</param>
public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider)
{
config.Services.Replace(typeof(IDocumentationProvider), documentationProvider);
}
/// <summary>
/// Sets the objects that will be used by the formatters to produce sample requests/responses.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleObjects">The sample objects.</param>
public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects)
{
config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects;
}
/// <summary>
/// Sets the sample request directly for the specified media type and action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type and action with parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type of the action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample response directly for the specified media type of the action with specific parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified type and media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="type">The parameter type or return type of an action.</param>
public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Gets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <returns>The help page sample generator.</returns>
public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config)
{
return (HelpPageSampleGenerator)config.Properties.GetOrAdd(
typeof(HelpPageSampleGenerator),
k => new HelpPageSampleGenerator());
}
/// <summary>
/// Sets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleGenerator">The help page sample generator.</param>
public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator)
{
config.Properties.AddOrUpdate(
typeof(HelpPageSampleGenerator),
k => sampleGenerator,
(k, o) => sampleGenerator);
}
/// <summary>
/// Gets the model description generator.
/// </summary>
/// <param name="config">The configuration.</param>
/// <returns>The <see cref="ModelDescriptionGenerator"/></returns>
public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config)
{
return (ModelDescriptionGenerator)config.Properties.GetOrAdd(
typeof(ModelDescriptionGenerator),
k => InitializeModelDescriptionGenerator(config));
}
/// <summary>
/// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param>
/// <returns>
/// An <see cref="HelpPageApiModel"/>
/// </returns>
public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId)
{
object model;
string modelId = ApiModelPrefix + apiDescriptionId;
if (!config.Properties.TryGetValue(modelId, out model))
{
Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions;
ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase));
if (apiDescription != null)
{
model = GenerateApiModel(apiDescription, config);
config.Properties.TryAdd(modelId, model);
}
}
return (HelpPageApiModel)model;
}
private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config)
{
HelpPageApiModel apiModel = new HelpPageApiModel()
{
ApiDescription = apiDescription,
};
ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator();
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
GenerateUriParameters(apiModel, modelGenerator);
GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator);
GenerateResourceDescription(apiModel, modelGenerator);
GenerateSamples(apiModel, sampleGenerator);
return apiModel;
}
private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromUri)
{
HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor;
Type parameterType = null;
ModelDescription typeDescription = null;
ComplexTypeModelDescription complexTypeDescription = null;
if (parameterDescriptor != null)
{
parameterType = parameterDescriptor.ParameterType;
typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
complexTypeDescription = typeDescription as ComplexTypeModelDescription;
}
// Example:
// [TypeConverter(typeof(PointConverter))]
// public class Point
// {
// public Point(int x, int y)
// {
// X = x;
// Y = y;
// }
// public int X { get; set; }
// public int Y { get; set; }
// }
// Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection.
//
// public class Point
// {
// public int X { get; set; }
// public int Y { get; set; }
// }
// Regular complex class Point will have properties X and Y added to UriParameters collection.
if (complexTypeDescription != null
&& !IsBindableWithTypeConverter(parameterType))
{
foreach (ParameterDescription uriParameter in complexTypeDescription.Properties)
{
apiModel.UriParameters.Add(uriParameter);
}
}
else if (parameterDescriptor != null)
{
ParameterDescription uriParameter =
AddParameterDescription(apiModel, apiParameter, typeDescription);
if (!parameterDescriptor.IsOptional)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" });
}
object defaultValue = parameterDescriptor.DefaultValue;
if (defaultValue != null)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) });
}
}
else
{
Debug.Assert(parameterDescriptor == null);
// If parameterDescriptor is null, this is an undeclared route parameter which only occurs
// when source is FromUri. Ignored in request model and among resource parameters but listed
// as a simple string here.
ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string));
AddParameterDescription(apiModel, apiParameter, modelDescription);
}
}
}
}
private static bool IsBindableWithTypeConverter(Type parameterType)
{
if (parameterType == null)
{
return false;
}
return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string));
}
private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel,
ApiParameterDescription apiParameter, ModelDescription typeDescription)
{
ParameterDescription parameterDescription = new ParameterDescription
{
Name = apiParameter.Name,
Documentation = apiParameter.Documentation,
TypeDescription = typeDescription,
};
apiModel.UriParameters.Add(parameterDescription);
return parameterDescription;
}
private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromBody)
{
Type parameterType = apiParameter.ParameterDescriptor.ParameterType;
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
apiModel.RequestDocumentation = apiParameter.Documentation;
}
else if (apiParameter.ParameterDescriptor != null &&
apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))
{
Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
if (parameterType != null)
{
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
}
}
private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ResponseDescription response = apiModel.ApiDescription.ResponseDescription;
Type responseType = response.ResponseType ?? response.DeclaredType;
if (responseType != null && responseType != typeof(void))
{
apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType);
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")]
private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator)
{
try
{
foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription))
{
apiModel.SampleRequests.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription))
{
apiModel.SampleResponses.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
}
catch (Exception e)
{
apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture,
"An exception has occurred while generating the sample. Exception message: {0}",
HelpPageSampleGenerator.UnwrapException(e).Message));
}
}
private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType)
{
parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault(
p => p.Source == ApiParameterSource.FromBody ||
(p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)));
if (parameterDescription == null)
{
resourceType = null;
return false;
}
resourceType = parameterDescription.ParameterDescriptor.ParameterType;
if (resourceType == typeof(HttpRequestMessage))
{
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
}
if (resourceType == null)
{
parameterDescription = null;
return false;
}
return true;
}
private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config)
{
ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config);
Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions;
foreach (ApiDescription api in apis)
{
ApiParameterDescription parameterDescription;
Type parameterType;
if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType))
{
modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
return modelGenerator;
}
private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample)
{
InvalidSample invalidSample = sample as InvalidSample;
if (invalidSample != null)
{
apiModel.ErrorMessages.Add(invalidSample.ErrorMessage);
}
}
}
}
| |
/* Copyright (c) Citrix Systems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
using System;
using System.IO;
using System.Text;
namespace CommandLib
{
/// <summary>
/// Thrown if we fail to verify a tar header checksum
/// </summary>
public class HeaderChecksumFailed : ApplicationException
{
private uint expected;
private uint received;
public HeaderChecksumFailed(uint expected, uint received)
{
this.expected = expected;
this.received = received;
}
public override string ToString()
{
string expected = Convert.ToString(this.expected);
string received = Convert.ToString(this.received);
return "Failed to verify the tar header checksum: received = " + received + "; expected = " + expected;
}
}
/// <summary>
/// Thrown when we find the end of archive marker (two zero blocks)
/// </summary>
class EndOfArchive : ApplicationException
{
public EndOfArchive()
{
}
public override string ToString()
{
return "End of tar archive";
}
}
public class Header
{
public string file_name;
public int file_mode;
public int user_id;
public int group_id;
public uint file_size;
public uint mod_time;
public bool link;
public int link_name;
/* Length of a header block */
public static uint length = 512;
/* http://en.wikipedia.org/w/index.php?title=Tar_%28file_format%29&oldid=83554041 */
private static int file_name_off = 0;
private static int file_name_len = 100;
private static int file_mode_off = 100;
private static int file_mode_len = 8;
private static int user_id_off = 108;
private static int user_id_len = 8;
private static int group_id_off = 116;
private static int group_id_len = 8;
private static int file_size_off = 124;
private static int file_size_len = 12;
private static int mod_time_off = 136;
private static int mod_time_len = 12;
private static int chksum_off = 148;
private static int chksum_len = 8;
private static int link_off = 156;
private static int link_len = 1;
private static int link_name_off = 156;
private static int link_name_len = 100;
/// <summary>
/// True if a buffer contains all zeroes
/// </summary>
public static bool all_zeroes(byte[] buffer)
{
bool zeroes = true;
for (int i = 0; i < buffer.Length && zeroes; i++)
{
if (buffer[i] != 0) zeroes = false;
}
return zeroes;
}
/// <summary>
/// Return a sub-array of bytes
/// </summary>
private byte[] slice(byte[] input, int offset, int length)
{
byte[] result = new byte[length];
for (int i = 0; i < length; i++)
{
result[i] = input[offset + i];
}
return result;
}
/// <summary>
/// Remove NULLs and spaces from the end of a string
/// </summary>
private string trim_trailing_stuff(string x)
{
char[] trimmed = {'\0', ' '};
return x.TrimEnd(trimmed);
}
/// <summary>
/// Convert the byte array into a string (assume UTF8)
/// </summary>
private string unmarshal_string(byte[] buffer)
{
Decoder decoder = Encoding.UTF8.GetDecoder();
char[] chars = new char[decoder.GetCharCount(buffer, 0, (int)buffer.Length)];
decoder.GetChars(buffer, 0, (int)buffer.Length, chars, 0);
return trim_trailing_stuff(new string(chars));
}
/// <summary>
/// Unmarshal an octal string into an int32
/// </summary>
private uint unmarshal_int32(byte[] buffer)
{
string octal = "0" + unmarshal_string(buffer);
return System.Convert.ToUInt32(octal, 8);
}
/// <summary>
/// Unmarshal an octal string into an int
/// </summary>
private int unmarshal_int(byte[] buffer)
{
string octal = "0" + unmarshal_string(buffer);
return System.Convert.ToInt32(octal, 8);
}
/// <summary>
/// Recompute the (weak) header checksum
/// </summary>
private uint compute_checksum(byte[] buffer)
{
uint total = 0;
for (int i = 0; i < buffer.Length; i++)
{
/* treat the checksum digits as ' ' */
if ((i >= chksum_off) && (i < (chksum_off + chksum_len)))
{
total += 32; /* ' ' */
}
else
{
total += buffer[i];
}
}
return total;
}
/// <summary>
/// Compute the required length of padding data to follow the data payload
/// </summary>
public uint paddingLength()
{
/* round up to the next whole number of blocks */
uint next_block_length = (file_size + length - 1) / length * length;
return next_block_length - file_size;
}
/// <summary>
/// pretty-print a header
/// </summary>
public override string ToString()
{
return String.Format("{0}/{1} {2:000000000000} {3:000000000000} {4}",
user_id, group_id, file_size, mod_time, file_name);
}
/// <summary>
/// Unmarshal a header from a buffer, throw an exception if the checksum doesn't validate
/// </summary>
public Header(byte[] buffer)
{
file_name = unmarshal_string(slice(buffer, file_name_off, file_name_len));
file_mode = unmarshal_int(slice(buffer, file_mode_off, file_mode_len));
user_id = unmarshal_int(slice(buffer, user_id_off, user_id_len));
group_id = unmarshal_int(slice(buffer, group_id_off, group_id_len));
file_size = unmarshal_int32(slice(buffer, file_size_off, file_size_len));
mod_time = unmarshal_int32(slice(buffer, mod_time_off, mod_time_len));
link = unmarshal_string(slice(buffer, link_off, link_len)) == "1";
link_name = unmarshal_int(slice(buffer, link_name_off, link_name_len));
uint chksum = unmarshal_int32(slice(buffer, chksum_off, chksum_len));
uint recomputed = compute_checksum(buffer);
if (chksum != recomputed)
throw new HeaderChecksumFailed(recomputed, chksum);
}
/// <summary>
/// Read a tar header from a stream
/// </summary>
public static Header fromStream(Stream input)
{
byte[] one = IO.unmarshal_n(input, length);
if (all_zeroes(one))
{
byte[] two = IO.unmarshal_n(input, length);
if (all_zeroes(two))
throw new EndOfArchive();
return new Header(two);
}
return new Header(one);
}
}
public class Archive
{
public static void list(Stream stream)
{
try
{
while (true)
{
Header x = Header.fromStream(stream);
Console.WriteLine(x);
IO.skip(stream, x.file_size);
IO.skip(stream, x.paddingLength());
}
}
catch (EndOfArchive)
{
Console.WriteLine("EOF");
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace FormsAuthTest.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
//
// EncryptedXmlTest.cs
//
// Author:
// Atsushi Enomoto <[email protected]>
//
// Copyright (C) 2006 Novell, Inc (http://www.novell.com)
//
// Licensed to the .NET Foundation under one or more agreements.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Security.Cryptography.X509Certificates;
using System.Text;
using System.Xml;
using Xunit;
namespace System.Security.Cryptography.Xml.Tests
{
public class EncryptedXmlTest
{
private class NotSupportedSymmetricAlgorithm : SymmetricAlgorithm
{
public override ICryptoTransform CreateDecryptor(byte[] rgbKey, byte[] rgbIV)
{
throw new NotImplementedException();
}
public override ICryptoTransform CreateEncryptor(byte[] rgbKey, byte[] rgbIV)
{
throw new NotImplementedException();
}
public override void GenerateIV()
{
throw new NotImplementedException();
}
public override void GenerateKey()
{
throw new NotImplementedException();
}
}
[Fact]
public void Constructor_Default()
{
EncryptedXml encryptedXml = new EncryptedXml();
Assert.Equal(Encoding.UTF8, encryptedXml.Encoding);
Assert.Equal(CipherMode.CBC, encryptedXml.Mode);
Assert.Equal(PaddingMode.ISO10126, encryptedXml.Padding);
Assert.Equal(string.Empty, encryptedXml.Recipient);
Assert.Equal(null, encryptedXml.Resolver);
Assert.Equal(20, encryptedXml.XmlDSigSearchDepth);
}
[Fact]
public void Constructor_XmlDocument()
{
EncryptedXml encryptedXml = new EncryptedXml(null);
Assert.Equal(Encoding.UTF8, encryptedXml.Encoding);
Assert.Equal(CipherMode.CBC, encryptedXml.Mode);
Assert.Equal(PaddingMode.ISO10126, encryptedXml.Padding);
Assert.Equal(string.Empty, encryptedXml.Recipient);
Assert.Equal(null, encryptedXml.Resolver);
Assert.Equal(20, encryptedXml.XmlDSigSearchDepth);
}
[Theory]
[InlineData("System.Security.Cryptography.Xml.Tests.EncryptedXmlSample1.xml")]
[InlineData("System.Security.Cryptography.Xml.Tests.EncryptedXmlSample3.xml")]
public void RsaDecryption(string resourceName)
{
XmlDocument doc = new XmlDocument();
doc.PreserveWhitespace = true;
string originalXml;
using (Stream stream = TestHelpers.LoadResourceStream(resourceName))
using (StreamReader streamReader = new StreamReader(stream))
{
originalXml = streamReader.ReadToEnd();
doc.LoadXml(originalXml);
}
EncryptedXml encxml = new EncryptedXml(doc);
using (X509Certificate2 certificate = TestHelpers.GetSampleX509Certificate())
using (RSA rsa = certificate.GetRSAPrivateKey())
{
Assert.NotNull(rsa);
XmlNamespaceManager nm = new XmlNamespaceManager(doc.NameTable);
nm.AddNamespace("s", "http://www.w3.org/2003/05/soap-envelope");
nm.AddNamespace("o", "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd");
nm.AddNamespace("e", EncryptedXml.XmlEncNamespaceUrl);
XmlElement el = doc.SelectSingleNode("/s:Envelope/s:Header/o:Security/e:EncryptedKey", nm) as XmlElement;
EncryptedKey ekey = new EncryptedKey();
ekey.LoadXml(el);
byte[] key = rsa.Decrypt(ekey.CipherData.CipherValue, RSAEncryptionPadding.OaepSHA1);
using (Aes aes = Aes.Create())
{
aes.Key = key;
aes.Mode = CipherMode.CBC;
List<XmlElement> elements = new List<XmlElement>();
foreach (XmlElement encryptedDataElement in doc.SelectNodes("//e:EncryptedData", nm))
{
elements.Add(encryptedDataElement);
}
foreach (XmlElement encryptedDataElement in elements)
{
EncryptedData edata = new EncryptedData();
edata.LoadXml(encryptedDataElement);
encxml.ReplaceData(encryptedDataElement, encxml.DecryptData(edata, aes));
}
}
}
}
[Fact]
public void Sample2()
{
using (Aes aes = Aes.Create())
{
aes.Mode = CipherMode.CBC;
aes.KeySize = 256;
aes.Key = Convert.FromBase64String("o/ilseZu+keLBBWGGPlUHweqxIPc4gzZEFWr2nBt640=");
aes.Padding = PaddingMode.Zeros;
XmlDocument doc = new XmlDocument();
doc.PreserveWhitespace = true;
doc.Load(TestHelpers.LoadResourceStream("System.Security.Cryptography.Xml.Tests.EncryptedXmlSample2.xml"));
EncryptedXml encxml = new EncryptedXml(doc);
EncryptedData edata = new EncryptedData();
edata.LoadXml(doc.DocumentElement);
encxml.ReplaceData(doc.DocumentElement, encxml.DecryptData(edata, aes));
}
}
[Fact]
public void RoundtripSample1()
{
using (StringWriter sw = new StringWriter())
{
// Encryption
{
XmlDocument doc = new XmlDocument();
doc.PreserveWhitespace = true;
doc.LoadXml("<root> <child>sample</child> </root>");
XmlElement body = doc.DocumentElement;
using (Aes aes = Aes.Create())
{
aes.Mode = CipherMode.CBC;
aes.KeySize = 256;
aes.IV = Convert.FromBase64String("pBUM5P03rZ6AE4ZK5EyBrw==");
aes.Key = Convert.FromBase64String("o/ilseZu+keLBBWGGPlUHweqxIPc4gzZEFWr2nBt640=");
aes.Padding = PaddingMode.Zeros;
EncryptedXml exml = new EncryptedXml();
byte[] encrypted = exml.EncryptData(body, aes, false);
EncryptedData edata = new EncryptedData();
edata.Type = EncryptedXml.XmlEncElementUrl;
edata.EncryptionMethod = new EncryptionMethod(EncryptedXml.XmlEncAES256Url);
EncryptedKey ekey = new EncryptedKey();
// omit key encryption, here for testing
byte[] encKeyBytes = aes.Key;
ekey.CipherData = new CipherData(encKeyBytes);
ekey.EncryptionMethod = new EncryptionMethod(EncryptedXml.XmlEncRSA15Url);
DataReference dr = new DataReference();
dr.Uri = "_0";
ekey.AddReference(dr);
edata.KeyInfo.AddClause(new KeyInfoEncryptedKey(ekey));
ekey.KeyInfo.AddClause(new RSAKeyValue(RSA.Create()));
edata.CipherData.CipherValue = encrypted;
EncryptedXml.ReplaceElement(doc.DocumentElement, edata, false);
doc.Save(new XmlTextWriter(sw));
}
}
// Decryption
{
using (Aes aes = Aes.Create())
{
aes.Mode = CipherMode.CBC;
aes.KeySize = 256;
aes.Key = Convert.FromBase64String(
"o/ilseZu+keLBBWGGPlUHweqxIPc4gzZEFWr2nBt640=");
aes.Padding = PaddingMode.Zeros;
XmlDocument doc = new XmlDocument();
doc.PreserveWhitespace = true;
doc.LoadXml(sw.ToString());
EncryptedXml encxml = new EncryptedXml(doc);
EncryptedData edata = new EncryptedData();
edata.LoadXml(doc.DocumentElement);
encxml.ReplaceData(doc.DocumentElement, encxml.DecryptData(edata, aes));
}
}
}
}
[Fact]
public void Encrypt_DecryptDocument_AES()
{
XmlDocument doc = new XmlDocument();
doc.PreserveWhitespace = true;
string xml = "<root> <child>sample</child> </root>";
doc.LoadXml(xml);
using (Aes aes = Aes.Create())
{
EncryptedXml exml = new EncryptedXml();
exml.AddKeyNameMapping("aes", aes);
EncryptedData ed = exml.Encrypt(doc.DocumentElement, "aes");
doc.LoadXml(ed.GetXml().OuterXml);
EncryptedXml exmlDecryptor = new EncryptedXml(doc);
exmlDecryptor.AddKeyNameMapping("aes", aes);
exmlDecryptor.DecryptDocument();
Assert.Equal(xml, doc.OuterXml);
}
}
[Fact]
public void Encrypt_X509()
{
XmlDocument doc = new XmlDocument();
doc.PreserveWhitespace = true;
string xml = "<root> <child>sample</child> </root>";
doc.LoadXml(xml);
using (X509Certificate2 certificate = TestHelpers.GetSampleX509Certificate())
{
EncryptedXml exml = new EncryptedXml();
EncryptedData ed = exml.Encrypt(doc.DocumentElement, certificate);
Assert.NotNull(ed);
doc.LoadXml(ed.GetXml().OuterXml);
XmlNamespaceManager nm = new XmlNamespaceManager(doc.NameTable);
nm.AddNamespace("enc", EncryptedXml.XmlEncNamespaceUrl);
Assert.NotNull(doc.SelectSingleNode("//enc:EncryptedKey", nm));
Assert.DoesNotContain("sample", doc.OuterXml);
}
}
[Fact]
public void Encrypt_X509_XmlNull()
{
using (X509Certificate2 certificate = TestHelpers.GetSampleX509Certificate())
{
EncryptedXml exml = new EncryptedXml();
Assert.Throws<ArgumentNullException>(() => exml.Encrypt(null, certificate));
}
}
[Fact]
public void Encrypt_X509_CertificateNull()
{
XmlDocument doc = new XmlDocument();
doc.LoadXml("<root />");
EncryptedXml exml = new EncryptedXml();
X509Certificate2 certificate = null;
Assert.Throws<ArgumentNullException>(() => exml.Encrypt(doc.DocumentElement, certificate));
}
[Fact]
public void Encrypt_XmlNull()
{
EncryptedXml exml = new EncryptedXml();
Assert.Throws<ArgumentNullException>(() => exml.Encrypt(null, "aes"));
}
[Fact]
public void Encrypt_KeyNameNull()
{
XmlDocument doc = new XmlDocument();
doc.LoadXml("<root />");
EncryptedXml exml = new EncryptedXml();
string keyName = null;
Assert.Throws<ArgumentNullException>(() => exml.Encrypt(doc.DocumentElement, keyName));
}
[Fact]
public void Encrypt_MissingKey()
{
XmlDocument doc = new XmlDocument();
doc.LoadXml("<root />");
EncryptedXml exml = new EncryptedXml();
Assert.Throws<CryptographicException>(() => exml.Encrypt(doc.DocumentElement, "aes"));
}
[Fact]
public void Encrypt_RSA()
{
using (RSA rsa = RSA.Create())
{
CheckEncryptionMethod(rsa, EncryptedXml.XmlEncRSA15Url);
}
}
[Fact]
public void Encrypt_TripleDES()
{
using (TripleDES tripleDes = TripleDES.Create())
{
CheckEncryptionMethod(tripleDes, EncryptedXml.XmlEncTripleDESKeyWrapUrl);
}
}
[Fact]
public void Encrypt_AES128()
{
using (Aes aes = Aes.Create())
{
aes.KeySize = 128;
CheckEncryptionMethod(aes, EncryptedXml.XmlEncAES128KeyWrapUrl);
}
}
[Fact]
public void Encrypt_AES192()
{
using (Aes aes = Aes.Create())
{
aes.KeySize = 192;
CheckEncryptionMethod(aes, EncryptedXml.XmlEncAES192KeyWrapUrl);
}
}
[Fact]
public void Encrypt_NotSupportedAlgorithm()
{
Assert.Throws<CryptographicException>(() => CheckEncryptionMethod(new NotSupportedSymmetricAlgorithm(), EncryptedXml.XmlEncAES192KeyWrapUrl));
}
[Fact]
public void AddKeyNameMapping_KeyNameNull()
{
EncryptedXml exml = new EncryptedXml();
using (Aes aes = Aes.Create())
{
Assert.Throws<ArgumentNullException>(() => exml.AddKeyNameMapping(null, aes));
}
}
[Fact]
public void AddKeyNameMapping_KeyObjectNull()
{
EncryptedXml exml = new EncryptedXml();
Assert.Throws<ArgumentNullException>(() => exml.AddKeyNameMapping("no_object", null));
}
[Fact]
public void AddKeyNameMapping_KeyObjectWrongType()
{
EncryptedXml exml = new EncryptedXml();
Assert.Throws<CryptographicException>(() => exml.AddKeyNameMapping("string", ""));
}
[Fact]
public void ReplaceData_XmlElementNull()
{
EncryptedXml ex = new EncryptedXml();
Assert.Throws<ArgumentNullException>(() => ex.ReplaceData(null, new byte[0]));
}
[Fact]
public void ReplaceData_EncryptedDataNull()
{
EncryptedXml ex = new EncryptedXml();
XmlDocument doc = new XmlDocument();
doc.LoadXml("<root />");
Assert.Throws<ArgumentNullException>(() => ex.ReplaceData(doc.DocumentElement, null));
}
[Fact]
public void ReplaceElement_XmlElementNull()
{
Assert.Throws<ArgumentNullException>(() => EncryptedXml.ReplaceElement(null, new EncryptedData(), true));
}
[Fact]
public void ReplaceElement_EncryptedDataNull()
{
XmlDocument doc = new XmlDocument();
doc.LoadXml("<root />");
Assert.Throws<ArgumentNullException>(() => EncryptedXml.ReplaceElement(doc.DocumentElement, null, false));
}
[Fact]
public void ReplaceElement_ContentTrue()
{
XmlDocument doc = new XmlDocument();
doc.LoadXml("<root />");
EncryptedData edata = new EncryptedData();
edata.CipherData.CipherValue = new byte[16];
EncryptedXml.ReplaceElement(doc.DocumentElement, edata, true);
Assert.Equal("root", doc.DocumentElement.Name);
Assert.Equal("EncryptedData", doc.DocumentElement.FirstChild.Name);
}
[Fact]
public void GetIdElement_XmlDocumentNull()
{
EncryptedXml ex = new EncryptedXml();
Assert.Null(ex.GetIdElement(null, "value"));
}
[Fact]
public void GetIdElement_StringNull()
{
EncryptedXml ex = new EncryptedXml();
Assert.Throws<ArgumentNullException>(() => ex.GetIdElement(new XmlDocument(), null));
}
[Fact]
public void GetDecryptionKey_EncryptedDataNull()
{
EncryptedXml ex = new EncryptedXml();
Assert.Throws<ArgumentNullException>(() => ex.GetDecryptionKey(null, EncryptedXml.XmlEncAES128Url));
}
[Fact]
public void GetDecryptionKey_NoEncryptionMethod()
{
EncryptedData edata = new EncryptedData();
edata.KeyInfo = new KeyInfo();
edata.KeyInfo.AddClause(new KeyInfoEncryptedKey(new EncryptedKey()));
EncryptedXml exml = new EncryptedXml();
Assert.Throws<CryptographicException>(() => exml.GetDecryptionKey(edata, null));
}
[Fact]
public void GetDecryptionKey_StringNull()
{
EncryptedXml ex = new EncryptedXml();
Assert.Null(ex.GetDecryptionKey(new EncryptedData(), null));
}
[Fact]
public void GetDecryptionKey_KeyInfoName()
{
using (Aes aes = Aes.Create())
{
EncryptedData edata = new EncryptedData();
edata.KeyInfo = new KeyInfo();
edata.KeyInfo.AddClause(new KeyInfoName("aes"));
EncryptedXml exml = new EncryptedXml();
exml.AddKeyNameMapping("aes", aes);
SymmetricAlgorithm decryptedAlg = exml.GetDecryptionKey(edata, null);
Assert.Equal(aes.Key, decryptedAlg.Key);
}
}
[Fact]
public void GetDecryptionKey_CarriedKeyName()
{
using (Aes aes = Aes.Create())
using (Aes innerAes = Aes.Create())
{
innerAes.KeySize = 128;
EncryptedData edata = new EncryptedData();
edata.KeyInfo = new KeyInfo();
edata.KeyInfo.AddClause(new KeyInfoName("aes"));
EncryptedKey ekey = new EncryptedKey();
byte[] encKeyBytes = EncryptedXml.EncryptKey(innerAes.Key, aes);
ekey.CipherData = new CipherData(encKeyBytes);
ekey.EncryptionMethod = new EncryptionMethod(EncryptedXml.XmlEncAES256Url);
ekey.CarriedKeyName = "aes";
ekey.KeyInfo = new KeyInfo();
ekey.KeyInfo.AddClause(new KeyInfoName("another_aes"));
XmlDocument doc = new XmlDocument();
doc.LoadXml(ekey.GetXml().OuterXml);
EncryptedXml exml = new EncryptedXml(doc);
exml.AddKeyNameMapping("another_aes", aes);
SymmetricAlgorithm decryptedAlg = exml.GetDecryptionKey(edata, EncryptedXml.XmlEncAES256Url);
Assert.Equal(innerAes.Key, decryptedAlg.Key);
}
}
[Fact]
public void GetDecryptionIV_EncryptedDataNull()
{
EncryptedXml ex = new EncryptedXml();
Assert.Throws<ArgumentNullException>(() => ex.GetDecryptionIV(null, EncryptedXml.XmlEncAES128Url));
}
[Fact]
public void GetDecryptionIV_StringNull()
{
EncryptedXml ex = new EncryptedXml();
EncryptedData encryptedData = new EncryptedData();
encryptedData.EncryptionMethod = new EncryptionMethod(EncryptedXml.XmlEncAES256Url);
encryptedData.CipherData = new CipherData(new byte[16]);
Assert.Equal(new byte[16], ex.GetDecryptionIV(encryptedData, null));
}
[Fact]
public void GetDecryptionIV_StringNullWithoutEncryptionMethod()
{
EncryptedXml ex = new EncryptedXml();
EncryptedData encryptedData = new EncryptedData();
encryptedData.CipherData = new CipherData(new byte[16]);
Assert.Throws<CryptographicException>(() => ex.GetDecryptionIV(encryptedData, null));
}
[Fact]
public void GetDecryptionIV_InvalidAlgorithmUri()
{
EncryptedXml ex = new EncryptedXml();
EncryptedData encryptedData = new EncryptedData();
encryptedData.CipherData = new CipherData(new byte[16]);
Assert.Throws<CryptographicException>(() => ex.GetDecryptionIV(encryptedData, "invalid"));
}
[Fact]
public void GetDecryptionIV_TripleDesUri()
{
EncryptedXml ex = new EncryptedXml();
EncryptedData encryptedData = new EncryptedData();
encryptedData.CipherData = new CipherData(new byte[16]);
Assert.Equal(8, ex.GetDecryptionIV(encryptedData, EncryptedXml.XmlEncTripleDESUrl).Length);
}
[Fact]
public void DecryptKey_KeyNull()
{
using (Aes aes = Aes.Create())
{
Assert.Throws<ArgumentNullException>(() => EncryptedXml.DecryptKey(null, aes));
}
}
[Fact]
public void DecryptKey_SymmetricAlgorithmNull()
{
Assert.Throws<ArgumentNullException>(() => EncryptedXml.DecryptKey(new byte[16], null));
}
[Fact]
public void EncryptKey_KeyNull()
{
using (Aes aes = Aes.Create())
{
Assert.Throws<ArgumentNullException>(() => EncryptedXml.EncryptKey(null, aes));
}
}
[Fact]
public void EncryptKey_SymmetricAlgorithmNull()
{
Assert.Throws<ArgumentNullException>(() => EncryptedXml.EncryptKey(new byte[16], null));
}
[Fact]
public void EncryptKey_WrongSymmetricAlgorithm()
{
Assert.Throws<CryptographicException>(() => EncryptedXml.EncryptKey(new byte[16], new NotSupportedSymmetricAlgorithm()));
}
[Fact]
public void EncryptKey_RSA_KeyDataNull()
{
using (RSA rsa = RSA.Create())
{
Assert.Throws<ArgumentNullException>(() => EncryptedXml.EncryptKey(null, rsa, false));
}
}
[Fact]
public void EncryptKey_RSA_RSANull()
{
Assert.Throws<ArgumentNullException>(() => EncryptedXml.EncryptKey(new byte[16], null, false));
}
[Fact]
public void EncryptKey_RSA_UseOAEP()
{
byte[] data = Encoding.ASCII.GetBytes("12345678");
using (RSA rsa = RSA.Create())
{
byte[] encryptedData = EncryptedXml.EncryptKey(data, rsa, true);
byte[] decryptedData = EncryptedXml.DecryptKey(encryptedData, rsa, true);
Assert.Equal(data, decryptedData);
}
}
[Fact]
public void DecryptData_EncryptedDataNull()
{
EncryptedXml ex = new EncryptedXml();
using (Aes aes = Aes.Create())
{
Assert.Throws<ArgumentNullException>(() => ex.DecryptData(null, aes));
}
}
[Fact]
public void DecryptData_SymmetricAlgorithmNull()
{
EncryptedXml ex = new EncryptedXml();
Assert.Throws<ArgumentNullException>(() => ex.DecryptData(new EncryptedData(), null));
}
[Fact]
public void DecryptData_CipherReference_InvalidUri()
{
XmlDocument doc = new XmlDocument();
doc.PreserveWhitespace = true;
string xml = "<root> <child>sample</child> </root>";
doc.LoadXml(xml);
using (Aes aes = Aes.Create())
{
EncryptedXml exml = new EncryptedXml();
exml.AddKeyNameMapping("aes", aes);
EncryptedData ed = exml.Encrypt(doc.DocumentElement, "aes");
ed.CipherData = new CipherData();
ed.CipherData.CipherReference = new CipherReference("invaliduri");
// https://github.com/dotnet/corefx/issues/19272
Action decrypt = () => exml.DecryptData(ed, aes);
if (PlatformDetection.IsFullFramework)
Assert.Throws<ArgumentNullException>(decrypt);
else
Assert.Throws<CryptographicException>(decrypt);
}
}
[Fact]
public void DecryptData_CipherReference_IdUri()
{
XmlDocument doc = new XmlDocument();
doc.PreserveWhitespace = true;
string xml = "<root> <child>sample</child> </root>";
doc.LoadXml(xml);
using (Aes aes = Aes.Create())
{
EncryptedXml exml = new EncryptedXml(doc);
string cipherValue = Convert.ToBase64String(exml.EncryptData(Encoding.UTF8.GetBytes(xml), aes));
EncryptedData ed = new EncryptedData();
ed.Type = EncryptedXml.XmlEncElementUrl;
ed.EncryptionMethod = new EncryptionMethod(EncryptedXml.XmlEncAES256Url);
ed.CipherData = new CipherData();
// Create CipherReference: first extract node value, then convert from base64 using Transforms
ed.CipherData.CipherReference = new CipherReference("#ID_0");
string xslt = "<xsl:stylesheet version=\"1.0\" xmlns:xsl=\"http://www.w3.org/1999/XSL/Transform\"><xsl:template match = \"/\"><xsl:value-of select=\".\" /></xsl:template></xsl:stylesheet>";
XmlDsigXsltTransform xsltTransform = new XmlDsigXsltTransform();
XmlDocument xsltDoc = new XmlDocument();
xsltDoc.LoadXml(xslt);
xsltTransform.LoadInnerXml(xsltDoc.ChildNodes);
ed.CipherData.CipherReference.AddTransform(xsltTransform);
ed.CipherData.CipherReference.AddTransform(new XmlDsigBase64Transform());
// Create a document with EncryptedData and node with the actual cipher data (with the ID)
doc.LoadXml("<root></root>");
XmlNode encryptedDataNode = doc.ImportNode(ed.GetXml(), true);
doc.DocumentElement.AppendChild(encryptedDataNode);
XmlElement cipherDataByReference = doc.CreateElement("CipherData");
cipherDataByReference.SetAttribute("ID", "ID_0");
cipherDataByReference.InnerText = cipherValue;
doc.DocumentElement.AppendChild(cipherDataByReference);
if (PlatformDetection.IsXmlDsigXsltTransformSupported)
{
string decryptedXmlString = Encoding.UTF8.GetString(exml.DecryptData(ed, aes));
Assert.Equal(xml, decryptedXmlString);
}
}
}
[Fact]
public void EncryptData_DataNull()
{
EncryptedXml ex = new EncryptedXml();
using (Aes aes = Aes.Create())
{
Assert.Throws<ArgumentNullException>(() => ex.EncryptData(null, aes));
}
}
[Fact]
public void EncryptData_SymmetricAlgorithmNull()
{
EncryptedXml ex = new EncryptedXml();
Assert.Throws<ArgumentNullException>(() => ex.EncryptData(new byte[16], null));
}
[Fact]
public void EncryptData_Xml_SymmetricAlgorithmNull()
{
XmlDocument doc = new XmlDocument();
doc.LoadXml("<root />");
EncryptedXml ex = new EncryptedXml();
Assert.Throws<ArgumentNullException>(() => ex.EncryptData(doc.DocumentElement, null, true));
}
[Fact]
public void EncryptData_Xml_XmlElementNull()
{
EncryptedXml ex = new EncryptedXml();
using (Aes aes = Aes.Create())
{
Assert.Throws<ArgumentNullException>(() => ex.EncryptData(null, aes, true));
}
}
[Fact]
public void DecryptEncryptedKey_Null()
{
EncryptedXml ex = new EncryptedXml();
Assert.Throws<ArgumentNullException>(() => ex.DecryptEncryptedKey(null));
}
[Fact]
public void DecryptEncryptedKey_Empty()
{
EncryptedXml ex = new EncryptedXml();
EncryptedKey ek = new EncryptedKey();
Assert.Null(ex.DecryptEncryptedKey(ek));
}
[Fact]
public void DecryptEncryptedKey_KeyInfoRetrievalMethod()
{
XmlDocument doc = new XmlDocument();
doc.PreserveWhitespace = true;
string xml = "<root> <child>sample</child> </root>";
doc.LoadXml(xml);
using (Aes aes = Aes.Create())
using (Aes innerAes = Aes.Create())
{
innerAes.KeySize = 128;
EncryptedXml exml = new EncryptedXml(doc);
exml.AddKeyNameMapping("aes", aes);
EncryptedKey ekey = new EncryptedKey();
byte[] encKeyBytes = EncryptedXml.EncryptKey(innerAes.Key, aes);
ekey.CipherData = new CipherData(encKeyBytes);
ekey.EncryptionMethod = new EncryptionMethod(EncryptedXml.XmlEncAES256Url);
ekey.Id = "Key_ID";
ekey.KeyInfo = new KeyInfo();
ekey.KeyInfo.AddClause(new KeyInfoName("aes"));
doc.LoadXml(ekey.GetXml().OuterXml);
EncryptedKey ekeyRetrieval = new EncryptedKey();
KeyInfo keyInfoRetrieval = new KeyInfo();
keyInfoRetrieval.AddClause(new KeyInfoRetrievalMethod("#Key_ID"));
ekeyRetrieval.KeyInfo = keyInfoRetrieval;
byte[] decryptedKey = exml.DecryptEncryptedKey(ekeyRetrieval);
Assert.Equal(innerAes.Key, decryptedKey);
EncryptedData eData = new EncryptedData();
eData.EncryptionMethod = new EncryptionMethod(EncryptedXml.XmlEncAES256Url);
eData.KeyInfo = keyInfoRetrieval;
SymmetricAlgorithm decryptedAlg = exml.GetDecryptionKey(eData, null);
Assert.Equal(innerAes.Key, decryptedAlg.Key);
}
}
[Fact]
public void DecryptEncryptedKey_KeyInfoEncryptedKey()
{
XmlDocument doc = new XmlDocument();
doc.PreserveWhitespace = true;
string xml = "<root> <child>sample</child> </root>";
doc.LoadXml(xml);
using (Aes aes = Aes.Create())
using (Aes outerAes = Aes.Create())
using (Aes innerAes = Aes.Create())
{
outerAes.KeySize = 192;
innerAes.KeySize = 128;
EncryptedXml exml = new EncryptedXml(doc);
exml.AddKeyNameMapping("aes", aes);
EncryptedKey ekey = new EncryptedKey();
byte[] encKeyBytes = EncryptedXml.EncryptKey(outerAes.Key, aes);
ekey.CipherData = new CipherData(encKeyBytes);
ekey.EncryptionMethod = new EncryptionMethod(EncryptedXml.XmlEncAES256Url);
ekey.Id = "Key_ID";
ekey.KeyInfo = new KeyInfo();
ekey.KeyInfo.AddClause(new KeyInfoName("aes"));
KeyInfo topLevelKeyInfo = new KeyInfo();
topLevelKeyInfo.AddClause(new KeyInfoEncryptedKey(ekey));
EncryptedKey ekeyTopLevel = new EncryptedKey();
byte[] encTopKeyBytes = EncryptedXml.EncryptKey(innerAes.Key, outerAes);
ekeyTopLevel.CipherData = new CipherData(encTopKeyBytes);
ekeyTopLevel.EncryptionMethod = new EncryptionMethod(EncryptedXml.XmlEncAES256Url);
ekeyTopLevel.KeyInfo = topLevelKeyInfo;
doc.LoadXml(ekeyTopLevel.GetXml().OuterXml);
byte[] decryptedKey = exml.DecryptEncryptedKey(ekeyTopLevel);
Assert.Equal(innerAes.Key, decryptedKey);
EncryptedData eData = new EncryptedData();
eData.EncryptionMethod = new EncryptionMethod(EncryptedXml.XmlEncAES256Url);
eData.KeyInfo = topLevelKeyInfo;
SymmetricAlgorithm decryptedAlg = exml.GetDecryptionKey(eData, null);
Assert.Equal(outerAes.Key, decryptedAlg.Key);
}
}
[Fact]
public void EncryptKey_TripleDES()
{
using (TripleDES tripleDES = TripleDES.Create())
{
byte[] key = Encoding.ASCII.GetBytes("123456781234567812345678");
byte[] encryptedKey = EncryptedXml.EncryptKey(key, tripleDES);
Assert.NotNull(encryptedKey);
Assert.Equal(key, EncryptedXml.DecryptKey(encryptedKey, tripleDES));
}
}
[Fact]
public void EncryptKey_AES()
{
using (Aes aes = Aes.Create())
{
byte[] key = Encoding.ASCII.GetBytes("123456781234567812345678");
byte[] encryptedKey = EncryptedXml.EncryptKey(key, aes);
Assert.NotNull(encryptedKey);
Assert.Equal(key, EncryptedXml.DecryptKey(encryptedKey, aes));
}
}
[Fact]
public void EncryptKey_AES8Bytes()
{
using (Aes aes = Aes.Create())
{
byte[] key = Encoding.ASCII.GetBytes("12345678");
byte[] encryptedKey = EncryptedXml.EncryptKey(key, aes);
Assert.NotNull(encryptedKey);
Assert.Equal(key, EncryptedXml.DecryptKey(encryptedKey, aes));
}
}
[Fact]
public void EncryptKey_AESNotDivisibleBy8()
{
using (Aes aes = Aes.Create())
{
byte[] key = Encoding.ASCII.GetBytes("1234567");
Assert.Throws<CryptographicException>(() => EncryptedXml.EncryptKey(key, aes));
}
}
[Fact]
public void DecryptKey_TripleDESWrongKeySize()
{
using (TripleDES tripleDES = TripleDES.Create())
{
byte[] key = Encoding.ASCII.GetBytes("123");
Assert.Throws<CryptographicException>(() => EncryptedXml.DecryptKey(key, tripleDES));
}
}
[Fact]
public void DecryptKey_TripleDESCorruptedKey()
{
using (TripleDES tripleDES = TripleDES.Create())
{
byte[] key = Encoding.ASCII.GetBytes("123456781234567812345678");
byte[] encryptedKey = EncryptedXml.EncryptKey(key, tripleDES);
encryptedKey[0] ^= 0xFF;
Assert.Throws<CryptographicException>(() => EncryptedXml.DecryptKey(encryptedKey, tripleDES));
}
}
[Fact]
public void DecryptKey_AESWrongKeySize()
{
using (Aes aes = Aes.Create())
{
byte[] key = Encoding.ASCII.GetBytes("123");
Assert.Throws<CryptographicException>(() => EncryptedXml.DecryptKey(key, aes));
}
}
[Fact]
public void DecryptKey_AESCorruptedKey()
{
using (Aes aes = Aes.Create())
{
byte[] key = Encoding.ASCII.GetBytes("123456781234567812345678");
byte[] encryptedKey = EncryptedXml.EncryptKey(key, aes);
encryptedKey[0] ^= 0xFF;
Assert.Throws<CryptographicException>(() => EncryptedXml.DecryptKey(encryptedKey, aes));
}
}
[Fact]
public void DecryptKey_AESCorruptedKey8Bytes()
{
using (Aes aes = Aes.Create())
{
byte[] key = Encoding.ASCII.GetBytes("12345678");
byte[] encryptedKey = EncryptedXml.EncryptKey(key, aes);
encryptedKey[0] ^= 0xFF;
Assert.Throws<CryptographicException>(() => EncryptedXml.DecryptKey(encryptedKey, aes));
}
}
[Fact]
public void DecryptKey_NotSupportedAlgorithm()
{
Assert.Throws<CryptographicException>(() => EncryptedXml.DecryptKey(new byte[16], new NotSupportedSymmetricAlgorithm()));
}
[Fact]
public void DecryptKey_RSA_KeyDataNull()
{
using (RSA rsa = RSA.Create())
{
Assert.Throws<ArgumentNullException>(() => EncryptedXml.DecryptKey(null, rsa, false));
}
}
[Fact]
public void DecryptKey_RSA_RSANull()
{
Assert.Throws<ArgumentNullException>(() => EncryptedXml.DecryptKey(new byte[16], null, false));
}
[Fact]
public void Properties()
{
EncryptedXml exml = new EncryptedXml();
exml.XmlDSigSearchDepth = 10;
exml.Resolver = null;
exml.Padding = PaddingMode.None;
exml.Mode = CipherMode.CBC;
exml.Encoding = Encoding.ASCII;
exml.Recipient = "Recipient";
Assert.Equal(10, exml.XmlDSigSearchDepth);
Assert.Null(exml.Resolver);
Assert.Equal(PaddingMode.None, exml.Padding);
Assert.Equal(CipherMode.CBC, exml.Mode);
Assert.Equal(Encoding.ASCII, exml.Encoding);
Assert.Equal("Recipient", exml.Recipient);
}
private void CheckEncryptionMethod(object algorithm, string uri)
{
XmlDocument doc = new XmlDocument();
doc.LoadXml("<root />");
EncryptedXml exml = new EncryptedXml();
exml.AddKeyNameMapping("key", algorithm);
EncryptedData edata = exml.Encrypt(doc.DocumentElement, "key");
IEnumerator keyInfoEnum = edata.KeyInfo.GetEnumerator();
keyInfoEnum.MoveNext();
KeyInfoEncryptedKey kiEncKey = keyInfoEnum.Current as KeyInfoEncryptedKey;
Assert.NotNull(edata);
Assert.Equal(uri, kiEncKey.EncryptedKey.EncryptionMethod.KeyAlgorithm);
Assert.NotNull(edata.CipherData.CipherValue);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Web.Http;
using System.Web.Http.Controllers;
using System.Web.Http.Description;
using JeffBot2LAPI.Areas.HelpPage.ModelDescriptions;
using JeffBot2LAPI.Areas.HelpPage.Models;
namespace JeffBot2LAPI.Areas.HelpPage
{
public static class HelpPageConfigurationExtensions
{
private const string ApiModelPrefix = "MS_HelpPageApiModel_";
/// <summary>
/// Sets the documentation provider for help page.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="documentationProvider">The documentation provider.</param>
public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider)
{
config.Services.Replace(typeof(IDocumentationProvider), documentationProvider);
}
/// <summary>
/// Sets the objects that will be used by the formatters to produce sample requests/responses.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleObjects">The sample objects.</param>
public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects)
{
config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects;
}
/// <summary>
/// Sets the sample request directly for the specified media type and action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type and action with parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type of the action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample response directly for the specified media type of the action with specific parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified type and media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="type">The parameter type or return type of an action.</param>
public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Gets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <returns>The help page sample generator.</returns>
public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config)
{
return (HelpPageSampleGenerator)config.Properties.GetOrAdd(
typeof(HelpPageSampleGenerator),
k => new HelpPageSampleGenerator());
}
/// <summary>
/// Sets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleGenerator">The help page sample generator.</param>
public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator)
{
config.Properties.AddOrUpdate(
typeof(HelpPageSampleGenerator),
k => sampleGenerator,
(k, o) => sampleGenerator);
}
/// <summary>
/// Gets the model description generator.
/// </summary>
/// <param name="config">The configuration.</param>
/// <returns>The <see cref="ModelDescriptionGenerator"/></returns>
public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config)
{
return (ModelDescriptionGenerator)config.Properties.GetOrAdd(
typeof(ModelDescriptionGenerator),
k => InitializeModelDescriptionGenerator(config));
}
/// <summary>
/// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param>
/// <returns>
/// An <see cref="HelpPageApiModel"/>
/// </returns>
public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId)
{
object model;
string modelId = ApiModelPrefix + apiDescriptionId;
if (!config.Properties.TryGetValue(modelId, out model))
{
Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions;
ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase));
if (apiDescription != null)
{
model = GenerateApiModel(apiDescription, config);
config.Properties.TryAdd(modelId, model);
}
}
return (HelpPageApiModel)model;
}
private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config)
{
HelpPageApiModel apiModel = new HelpPageApiModel()
{
ApiDescription = apiDescription,
};
ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator();
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
GenerateUriParameters(apiModel, modelGenerator);
GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator);
GenerateResourceDescription(apiModel, modelGenerator);
GenerateSamples(apiModel, sampleGenerator);
return apiModel;
}
private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromUri)
{
HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor;
Type parameterType = null;
ModelDescription typeDescription = null;
ComplexTypeModelDescription complexTypeDescription = null;
if (parameterDescriptor != null)
{
parameterType = parameterDescriptor.ParameterType;
typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
complexTypeDescription = typeDescription as ComplexTypeModelDescription;
}
// Example:
// [TypeConverter(typeof(PointConverter))]
// public class Point
// {
// public Point(int x, int y)
// {
// X = x;
// Y = y;
// }
// public int X { get; set; }
// public int Y { get; set; }
// }
// Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection.
//
// public class Point
// {
// public int X { get; set; }
// public int Y { get; set; }
// }
// Regular complex class Point will have properties X and Y added to UriParameters collection.
if (complexTypeDescription != null
&& !IsBindableWithTypeConverter(parameterType))
{
foreach (ParameterDescription uriParameter in complexTypeDescription.Properties)
{
apiModel.UriParameters.Add(uriParameter);
}
}
else if (parameterDescriptor != null)
{
ParameterDescription uriParameter =
AddParameterDescription(apiModel, apiParameter, typeDescription);
if (!parameterDescriptor.IsOptional)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" });
}
object defaultValue = parameterDescriptor.DefaultValue;
if (defaultValue != null)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) });
}
}
else
{
Debug.Assert(parameterDescriptor == null);
// If parameterDescriptor is null, this is an undeclared route parameter which only occurs
// when source is FromUri. Ignored in request model and among resource parameters but listed
// as a simple string here.
ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string));
AddParameterDescription(apiModel, apiParameter, modelDescription);
}
}
}
}
private static bool IsBindableWithTypeConverter(Type parameterType)
{
if (parameterType == null)
{
return false;
}
return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string));
}
private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel,
ApiParameterDescription apiParameter, ModelDescription typeDescription)
{
ParameterDescription parameterDescription = new ParameterDescription
{
Name = apiParameter.Name,
Documentation = apiParameter.Documentation,
TypeDescription = typeDescription,
};
apiModel.UriParameters.Add(parameterDescription);
return parameterDescription;
}
private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromBody)
{
Type parameterType = apiParameter.ParameterDescriptor.ParameterType;
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
apiModel.RequestDocumentation = apiParameter.Documentation;
}
else if (apiParameter.ParameterDescriptor != null &&
apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))
{
Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
if (parameterType != null)
{
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
}
}
private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ResponseDescription response = apiModel.ApiDescription.ResponseDescription;
Type responseType = response.ResponseType ?? response.DeclaredType;
if (responseType != null && responseType != typeof(void))
{
apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType);
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")]
private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator)
{
try
{
foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription))
{
apiModel.SampleRequests.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription))
{
apiModel.SampleResponses.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
}
catch (Exception e)
{
apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture,
"An exception has occurred while generating the sample. Exception message: {0}",
HelpPageSampleGenerator.UnwrapException(e).Message));
}
}
private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType)
{
parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault(
p => p.Source == ApiParameterSource.FromBody ||
(p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)));
if (parameterDescription == null)
{
resourceType = null;
return false;
}
resourceType = parameterDescription.ParameterDescriptor.ParameterType;
if (resourceType == typeof(HttpRequestMessage))
{
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
}
if (resourceType == null)
{
parameterDescription = null;
return false;
}
return true;
}
private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config)
{
ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config);
Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions;
foreach (ApiDescription api in apis)
{
ApiParameterDescription parameterDescription;
Type parameterType;
if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType))
{
modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
return modelGenerator;
}
private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample)
{
InvalidSample invalidSample = sample as InvalidSample;
if (invalidSample != null)
{
apiModel.ErrorMessages.Add(invalidSample.ErrorMessage);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime;
using Internal.Runtime.Augments;
namespace System.Collections.ObjectModel
{
[DebuggerTypeProxy(typeof(Mscorlib_CollectionDebugView<>))]
[DebuggerDisplay("Count = {Count}")]
public class Collection<T> : IList<T>, IList, IReadOnlyList<T>
{
private IList<T> _items;
private Object _syncRoot;
public Collection()
{
// We must implement our backing list using List<T>() as we have store apps that call Collection<T>.Items and cast
// the result to List<T>.
_items = new List<T>();
}
public Collection(IList<T> list)
{
if (list == null)
{
throw new ArgumentNullException(nameof(list));
}
_items = list;
}
public int Count
{
get { return _items.Count; }
}
protected IList<T> Items
{
get { return _items; }
}
public T this[int index]
{
get { return _items[index]; }
set
{
if (_items.IsReadOnly)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
if (index < 0 || index >= _items.Count)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_ListItem);
}
SetItem(index, value);
}
}
public void Add(T item)
{
if (_items.IsReadOnly)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
int index = _items.Count;
InsertItem(index, item);
}
public void Clear()
{
if (_items.IsReadOnly)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
ClearItems();
}
public void CopyTo(T[] array, int index)
{
_items.CopyTo(array, index);
}
public bool Contains(T item)
{
return _items.Contains(item);
}
public IEnumerator<T> GetEnumerator()
{
return _items.GetEnumerator();
}
public int IndexOf(T item)
{
return _items.IndexOf(item);
}
public void Insert(int index, T item)
{
if (_items.IsReadOnly)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
if (index < 0 || index > _items.Count)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_ListInsert);
}
InsertItem(index, item);
}
public bool Remove(T item)
{
if (_items.IsReadOnly)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
int index = _items.IndexOf(item);
if (index < 0) return false;
RemoveItem(index);
return true;
}
public void RemoveAt(int index)
{
if (_items.IsReadOnly)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
if (index < 0 || index >= _items.Count)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_ListRemoveAt);
}
RemoveItem(index);
}
protected virtual void ClearItems()
{
_items.Clear();
}
protected virtual void InsertItem(int index, T item)
{
_items.Insert(index, item);
}
protected virtual void RemoveItem(int index)
{
_items.RemoveAt(index);
}
protected virtual void SetItem(int index, T item)
{
_items[index] = item;
}
bool ICollection<T>.IsReadOnly
{
get
{
return _items.IsReadOnly;
}
}
IEnumerator IEnumerable.GetEnumerator()
{
return ((IEnumerable)_items).GetEnumerator();
}
bool ICollection.IsSynchronized
{
get { return false; }
}
object ICollection.SyncRoot
{
get
{
if (_syncRoot == null)
{
ICollection c = _items as ICollection;
if (c != null)
{
_syncRoot = c.SyncRoot;
}
else
{
System.Threading.Interlocked.CompareExchange<Object>(ref _syncRoot, new Object(), null);
}
}
return _syncRoot;
}
}
void ICollection.CopyTo(Array array, int index)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
if (array.Rank != 1)
{
throw new ArgumentException(SR.Arg_RankMultiDimNotSupported);
}
if (array.GetLowerBound(0) != 0)
{
throw new ArgumentException(SR.Arg_NonZeroLowerBound);
}
if (index < 0)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - index < Count)
{
throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
}
T[] tArray = array as T[];
if (tArray != null)
{
_items.CopyTo(tArray, index);
}
else
{
/* ProjectN port note: IsAssignable no longer available on Type surface area. This is a non-reliable check so we should be able to do without.
//
// Catch the obvious case assignment will fail.
// We can found all possible problems by doing the check though.
// For example, if the element type of the Array is derived from T,
// we can't figure out if we can successfully copy the element beforehand.
//
IResolvedRuntimeType targetType = array.GetType().GetElementType().ResolvedType;
IResolvedRuntimeType sourceType = typeof(T).ResolvedType;
if(!(targetType.IsAssignableFrom(sourceType) || sourceType.IsAssignableFrom(targetType))) {
throw new ArgumentException(SR.Argument_InvalidArrayType);
}
*/
//
// We can't cast array of value type to object[], so we don't support
// widening of primitive types here.
//
object[] objects = array as object[];
if (objects == null)
{
throw new ArgumentException(SR.Argument_InvalidArrayType);
}
int count = _items.Count;
try
{
for (int i = 0; i < count; i++)
{
objects[index++] = _items[i];
}
}
catch (ArrayTypeMismatchException)
{
throw new ArgumentException(SR.Argument_InvalidArrayType);
}
}
}
object IList.this[int index]
{
get { return _items[index]; }
set
{
if (value == null && !(default(T) == null))
{
throw new ArgumentNullException(nameof(value));
}
try
{
this[index] = (T)value;
}
catch (InvalidCastException)
{
throw new ArgumentException(SR.Format(SR.Arg_WrongType, value, typeof(T)), nameof(value));
}
}
}
bool IList.IsReadOnly
{
get
{
return _items.IsReadOnly;
}
}
bool IList.IsFixedSize
{
get
{
// There is no IList<T>.IsFixedSize, so we must assume that only
// readonly collections are fixed size, if our internal item
// collection does not implement IList. Note that Array implements
// IList, and therefore T[] and U[] will be fixed-size.
IList list = _items as IList;
if (list != null)
{
return list.IsFixedSize;
}
return _items.IsReadOnly;
}
}
int IList.Add(object value)
{
if (_items.IsReadOnly)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
if (value == null && !(default(T) == null))
{
throw new ArgumentNullException(nameof(value));
}
try
{
Add((T)value);
}
catch (InvalidCastException)
{
throw new ArgumentException(SR.Format(SR.Arg_WrongType, value, typeof(T)), nameof(value));
}
return this.Count - 1;
}
bool IList.Contains(object value)
{
if (IsCompatibleObject(value))
{
return Contains((T)value);
}
return false;
}
int IList.IndexOf(object value)
{
if (IsCompatibleObject(value))
{
return IndexOf((T)value);
}
return -1;
}
void IList.Insert(int index, object value)
{
if (_items.IsReadOnly)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
if (value == null && !(default(T) == null))
{
throw new ArgumentNullException(nameof(value));
}
try
{
Insert(index, (T)value);
}
catch (InvalidCastException)
{
throw new ArgumentException(SR.Format(SR.Arg_WrongType, value, typeof(T)), nameof(value));
}
}
void IList.Remove(object value)
{
if (_items.IsReadOnly)
{
throw new NotSupportedException(SR.NotSupported_ReadOnlyCollection);
}
if (IsCompatibleObject(value))
{
Remove((T)value);
}
}
private static bool IsCompatibleObject(object value)
{
// Non-null values are fine. Only accept nulls if T is a class or Nullable<U>.
// Note that default(T) is not equal to null for value types except when T is Nullable<U>.
return ((value is T) || (value == null && default(T) == null));
}
}
}
| |
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus SDK License Version 3.4.1 (the "License");
you may not use the Oculus SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEditor;
using UnityEditor.Callbacks;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using System.IO;
using System.Diagnostics;
[InitializeOnLoad]
class OVRPluginUpdater
{
enum PluginPlatform
{
Android,
AndroidUniversal,
OSXUniversal,
Win,
Win64,
}
class PluginPackage
{
public string RootPath;
public System.Version Version;
public Dictionary<PluginPlatform, string> Plugins = new Dictionary<PluginPlatform, string>();
public bool IsBundledPluginPackage()
{
return (RootPath == GetBundledPluginRootPath());
}
public bool IsEnabled()
{
// TODO: Check each individual platform rather than using the Win64 DLL status for the overall package status.
string path = "";
if (Plugins.TryGetValue(PluginPlatform.Win64, out path))
{
return File.Exists(path);
}
return false;
}
public bool IsAndroidUniversalEnabled()
{
string path = "";
if (Plugins.TryGetValue(PluginPlatform.AndroidUniversal, out path))
{
if (File.Exists(path))
{
string basePath = GetCurrentProjectPath();
string relPath = path.Substring(basePath.Length + 1);
PluginImporter pi = PluginImporter.GetAtPath(relPath) as PluginImporter;
if (pi != null)
{
return pi.GetCompatibleWithPlatform(BuildTarget.Android);
}
}
}
return false;
}
public bool IsAndroidUniversalPresent()
{
string path = "";
if (Plugins.TryGetValue(PluginPlatform.AndroidUniversal, out path))
{
string disabledPath = path + GetDisabledPluginSuffix();
if (File.Exists(path) || File.Exists(disabledPath))
{
return true;
}
}
return false;
}
}
private static bool restartPending = false;
private static bool unityRunningInBatchmode = false;
private static bool unityVersionSupportsAndroidUniversal = false;
private static bool enableAndroidUniversalSupport = true;
private static System.Version invalidVersion = new System.Version("0.0.0");
static OVRPluginUpdater()
{
EditorApplication.delayCall += OnDelayCall;
}
static void OnDelayCall()
{
if (System.Environment.CommandLine.Contains("-batchmode"))
{
unityRunningInBatchmode = true;
}
if (enableAndroidUniversalSupport)
{
#if UNITY_2018_3_OR_NEWER
unityVersionSupportsAndroidUniversal = true;
#endif
}
if (ShouldAttemptPluginUpdate())
{
AttemptPluginUpdate(true);
}
}
private static PluginPackage GetPluginPackage(string rootPath)
{
return new PluginPackage()
{
RootPath = rootPath,
Version = GetPluginVersion(rootPath),
Plugins = new Dictionary<PluginPlatform, string>()
{
{ PluginPlatform.Android, rootPath + GetPluginBuildTargetSubPath(PluginPlatform.Android) },
{ PluginPlatform.AndroidUniversal, rootPath + GetPluginBuildTargetSubPath(PluginPlatform.AndroidUniversal) },
{ PluginPlatform.OSXUniversal, rootPath + GetPluginBuildTargetSubPath(PluginPlatform.OSXUniversal) },
{ PluginPlatform.Win, rootPath + GetPluginBuildTargetSubPath(PluginPlatform.Win) },
{ PluginPlatform.Win64, rootPath + GetPluginBuildTargetSubPath(PluginPlatform.Win64) },
}
};
}
private static PluginPackage GetBundledPluginPackage()
{
return GetPluginPackage(GetBundledPluginRootPath());
}
private static List<PluginPackage> GetAllUtilitiesPluginPackages()
{
string pluginRootPath = GetUtilitiesPluginRootPath();
List<PluginPackage> packages = new List<PluginPackage>();
if (Directory.Exists(pluginRootPath))
{
var dirs = Directory.GetDirectories(pluginRootPath);
foreach(string dir in dirs)
{
packages.Add(GetPluginPackage(dir));
}
}
return packages;
}
private static string GetCurrentProjectPath()
{
return Directory.GetParent(Application.dataPath).FullName;
}
private static string GetUtilitiesPluginRootPath()
{
return GetUtilitiesRootPath() + @"/Plugins";
}
private static string GetUtilitiesRootPath()
{
var so = ScriptableObject.CreateInstance(typeof(OVRPluginUpdaterStub));
var script = MonoScript.FromScriptableObject(so);
string assetPath = AssetDatabase.GetAssetPath(script);
string editorDir = Directory.GetParent(assetPath).FullName;
string ovrDir = Directory.GetParent(editorDir).FullName;
return ovrDir;
}
private static string GetBundledPluginRootPath()
{
string basePath = EditorApplication.applicationContentsPath;
string pluginPath = @"/UnityExtensions/Unity/VR";
return basePath + pluginPath;
}
private static string GetPluginBuildTargetSubPath(PluginPlatform target)
{
string path = string.Empty;
switch (target)
{
case PluginPlatform.Android:
path = @"/Android/OVRPlugin.aar";
break;
case PluginPlatform.AndroidUniversal:
path = @"/AndroidUniversal/OVRPlugin.aar";
break;
case PluginPlatform.OSXUniversal:
path = @"/OSXUniversal/OVRPlugin.bundle";
break;
case PluginPlatform.Win:
path = @"/Win/OVRPlugin.dll";
break;
case PluginPlatform.Win64:
path = @"/Win64/OVRPlugin.dll";
break;
default:
throw new ArgumentException("Attempted GetPluginBuildTargetSubPath() for unsupported BuildTarget: " + target);
}
return path;
}
private static string GetDisabledPluginSuffix()
{
return @".disabled";
}
private static System.Version GetPluginVersion(string path)
{
System.Version pluginVersion = invalidVersion;
try
{
pluginVersion = new System.Version(Path.GetFileName(path));
}
catch
{
pluginVersion = invalidVersion;
}
if (pluginVersion == invalidVersion)
{
//Unable to determine version from path, fallback to Win64 DLL meta data
path += GetPluginBuildTargetSubPath(PluginPlatform.Win64);
if (!File.Exists(path))
{
path += GetDisabledPluginSuffix();
if (!File.Exists(path))
{
return invalidVersion;
}
}
FileVersionInfo pluginVersionInfo = FileVersionInfo.GetVersionInfo(path);
if (pluginVersionInfo == null || pluginVersionInfo.ProductVersion == null || pluginVersionInfo.ProductVersion == "")
{
return invalidVersion;
}
pluginVersion = new System.Version(pluginVersionInfo.ProductVersion);
}
return pluginVersion;
}
public static string GetVersionDescription(System.Version version)
{
bool isVersionValid = (version != invalidVersion);
return isVersionValid ? version.ToString() : "(Unknown)";
}
private static bool ShouldAttemptPluginUpdate()
{
if (unityRunningInBatchmode)
{
return false;
}
else
{
return !UnitySupportsEnabledAndroidPlugin() || (autoUpdateEnabled && !restartPending && !Application.isPlaying);
}
}
private static void DisableAllUtilitiesPluginPackages()
{
List<PluginPackage> allUtilsPluginPkgs = GetAllUtilitiesPluginPackages();
foreach(PluginPackage pluginPkg in allUtilsPluginPkgs)
{
foreach(string path in pluginPkg.Plugins.Values)
{
if ((Directory.Exists(path)) || (File.Exists(path)))
{
string basePath = GetCurrentProjectPath();
string relPath = path.Substring(basePath.Length + 1);
string relDisabledPath = relPath + GetDisabledPluginSuffix();
AssetDatabase.MoveAsset(relPath, relDisabledPath);
AssetDatabase.ImportAsset(relDisabledPath, ImportAssetOptions.ForceUpdate);
}
}
}
AssetDatabase.Refresh();
AssetDatabase.SaveAssets();
}
private static void EnablePluginPackage(PluginPackage pluginPkg)
{
foreach(var kvp in pluginPkg.Plugins)
{
PluginPlatform platform = kvp.Key;
string path = kvp.Value;
if ((Directory.Exists(path + GetDisabledPluginSuffix())) || (File.Exists(path + GetDisabledPluginSuffix())))
{
string basePath = GetCurrentProjectPath();
string relPath = path.Substring(basePath.Length + 1);
string relDisabledPath = relPath + GetDisabledPluginSuffix();
AssetDatabase.MoveAsset(relDisabledPath, relPath);
AssetDatabase.ImportAsset(relPath, ImportAssetOptions.ForceUpdate);
PluginImporter pi = PluginImporter.GetAtPath(relPath) as PluginImporter;
if (pi == null)
{
continue;
}
// Disable support for all platforms, then conditionally enable desired support below
pi.SetCompatibleWithEditor(false);
pi.SetCompatibleWithAnyPlatform(false);
pi.SetCompatibleWithPlatform(BuildTarget.Android, false);
pi.SetCompatibleWithPlatform(BuildTarget.StandaloneWindows, false);
pi.SetCompatibleWithPlatform(BuildTarget.StandaloneWindows64, false);
#if UNITY_2017_3_OR_NEWER
pi.SetCompatibleWithPlatform(BuildTarget.StandaloneOSX, false);
#else
pi.SetCompatibleWithPlatform(BuildTarget.StandaloneOSXUniversal, false);
pi.SetCompatibleWithPlatform(BuildTarget.StandaloneOSXIntel, false);
pi.SetCompatibleWithPlatform(BuildTarget.StandaloneOSXIntel64, false);
#endif
switch (platform)
{
case PluginPlatform.Android:
pi.SetCompatibleWithPlatform(BuildTarget.Android, !unityVersionSupportsAndroidUniversal);
if (!unityVersionSupportsAndroidUniversal)
{
pi.SetPlatformData(BuildTarget.Android, "CPU", "ARMv7");
}
break;
case PluginPlatform.AndroidUniversal:
pi.SetCompatibleWithPlatform(BuildTarget.Android, unityVersionSupportsAndroidUniversal);
break;
case PluginPlatform.OSXUniversal:
#if UNITY_2017_3_OR_NEWER
pi.SetCompatibleWithPlatform(BuildTarget.StandaloneOSX, true);
#else
pi.SetCompatibleWithPlatform(BuildTarget.StandaloneOSXUniversal, true);
pi.SetCompatibleWithPlatform(BuildTarget.StandaloneOSXIntel, true);
pi.SetCompatibleWithPlatform(BuildTarget.StandaloneOSXIntel64, true);
#endif
pi.SetCompatibleWithEditor(true);
pi.SetEditorData("CPU", "AnyCPU");
pi.SetEditorData("OS", "OSX");
pi.SetPlatformData("Editor", "CPU", "AnyCPU");
pi.SetPlatformData("Editor", "OS", "OSX");
break;
case PluginPlatform.Win:
pi.SetCompatibleWithPlatform(BuildTarget.StandaloneWindows, true);
pi.SetCompatibleWithEditor(true);
pi.SetEditorData("CPU", "X86");
pi.SetEditorData("OS", "Windows");
pi.SetPlatformData("Editor", "CPU", "X86");
pi.SetPlatformData("Editor", "OS", "Windows");
break;
case PluginPlatform.Win64:
pi.SetCompatibleWithPlatform(BuildTarget.StandaloneWindows64, true);
pi.SetCompatibleWithEditor(true);
pi.SetEditorData("CPU", "X86_64");
pi.SetEditorData("OS", "Windows");
pi.SetPlatformData("Editor", "CPU", "X86_64");
pi.SetPlatformData("Editor", "OS", "Windows");
break;
default:
throw new ArgumentException("Attempted EnablePluginPackage() for unsupported BuildTarget: " + platform);
}
AssetDatabase.ImportAsset(relPath, ImportAssetOptions.ForceUpdate);
}
}
AssetDatabase.Refresh();
AssetDatabase.SaveAssets();
}
private static readonly string autoUpdateEnabledKey = "Oculus_Utilities_OVRPluginUpdater_AutoUpdate_" + OVRManager.utilitiesVersion;
private static bool autoUpdateEnabled
{
get {
return PlayerPrefs.GetInt(autoUpdateEnabledKey, 1) == 1;
}
set {
PlayerPrefs.SetInt(autoUpdateEnabledKey, value ? 1 : 0);
}
}
[MenuItem("Oculus/Tools/Disable OVR Utilities Plugin")]
private static void AttemptPluginDisable()
{
PluginPackage bundledPluginPkg = GetBundledPluginPackage();
List<PluginPackage> allUtilsPluginPkgs = GetAllUtilitiesPluginPackages();
PluginPackage enabledUtilsPluginPkg = null;
foreach(PluginPackage pluginPkg in allUtilsPluginPkgs)
{
if (pluginPkg.IsEnabled())
{
if ((enabledUtilsPluginPkg == null) || (pluginPkg.Version > enabledUtilsPluginPkg.Version))
{
enabledUtilsPluginPkg = pluginPkg;
}
}
}
if (enabledUtilsPluginPkg == null)
{
if (unityRunningInBatchmode
#if UNITY_2018_3_OR_NEWER
|| EditorUtility.DisplayDialog("Disable Oculus Utilities Plugin",
"The OVRPlugin included with Oculus Utilities is already disabled."
+ " The OVRPlugin installed through the Package Manager will continue to be used.\n",
"Ok",
""))
#else
|| EditorUtility.DisplayDialog("Disable Oculus Utilities Plugin",
"The OVRPlugin included with Oculus Utilities is already disabled."
+ " The OVRPlugin bundled with the Unity Editor will continue to be used.\n\n"
+ "Bundled version: "
+ GetVersionDescription(bundledPluginPkg.Version),
"Ok",
""))
#endif
{
return;
}
}
else
{
if (unityRunningInBatchmode
#if UNITY_2018_3_OR_NEWER
|| EditorUtility.DisplayDialog("Disable Oculus Utilities Plugin",
"Do you want to disable the OVRPlugin included with Oculus Utilities and revert to the OVRPlugin installed through the Package Manager?\n\n"
+ "Current version: " + GetVersionDescription(enabledUtilsPluginPkg.Version),
"Yes",
"No"))
#else
|| EditorUtility.DisplayDialog("Disable Oculus Utilities Plugin",
"Do you want to disable the OVRPlugin included with Oculus Utilities and revert to the OVRPlugin bundled with the Unity Editor?\n\n"
+ "Current version: " + GetVersionDescription(enabledUtilsPluginPkg.Version)
+ "\nBundled version: " + GetVersionDescription(bundledPluginPkg.Version),
"Yes",
"No"))
#endif
{
DisableAllUtilitiesPluginPackages();
if (unityRunningInBatchmode
#if UNITY_2018_3_OR_NEWER
|| EditorUtility.DisplayDialog("Restart Unity",
"Now you will be using the OVRPlugin installed through Package Manager."
+ "\n\nPlease restart the Unity Editor to complete the update process.",
"Restart",
"Not Now"))
#else
|| EditorUtility.DisplayDialog("Restart Unity",
"OVRPlugin has been updated to "
+ GetVersionDescription(bundledPluginPkg.Version)
+ ".\n\nPlease restart the Unity Editor to complete the update process."
#if !UNITY_2017_1_OR_NEWER
+ " You may need to manually relaunch Unity if you are using Unity 5.6 and higher."
#endif
,
"Restart",
"Not Now"))
#endif
{
RestartUnityEditor();
}
}
}
}
[MenuItem("Oculus/Tools/Update OVR Utilities Plugin")]
private static void RunPluginUpdate()
{
autoUpdateEnabled = true;
AttemptPluginUpdate(false);
}
// Separate entry point needed since "-executeMethod" does not support parameters or default parameter values
private static void BatchmodePluginUpdate()
{
OnDelayCall(); // manually invoke when running editor in batchmode
AttemptPluginUpdate(false);
}
private static void AttemptPluginUpdate(bool triggeredByAutoUpdate)
{
OVRPlugin.SendEvent("attempt_plugin_update_auto", triggeredByAutoUpdate.ToString());
PluginPackage bundledPluginPkg = GetBundledPluginPackage();
List<PluginPackage> allUtilsPluginPkgs = GetAllUtilitiesPluginPackages();
PluginPackage enabledUtilsPluginPkg = null;
PluginPackage newestUtilsPluginPkg = null;
foreach(PluginPackage pluginPkg in allUtilsPluginPkgs)
{
if ((newestUtilsPluginPkg == null) || (pluginPkg.Version > newestUtilsPluginPkg.Version))
{
newestUtilsPluginPkg = pluginPkg;
}
if (pluginPkg.IsEnabled())
{
if ((enabledUtilsPluginPkg == null) || (pluginPkg.Version > enabledUtilsPluginPkg.Version))
{
enabledUtilsPluginPkg = pluginPkg;
}
}
}
bool reenableCurrentPluginPkg = false;
PluginPackage targetPluginPkg = null;
if ((newestUtilsPluginPkg != null) && (newestUtilsPluginPkg.Version > bundledPluginPkg.Version))
{
if ((enabledUtilsPluginPkg == null) || (enabledUtilsPluginPkg.Version != newestUtilsPluginPkg.Version))
{
targetPluginPkg = newestUtilsPluginPkg;
}
}
else if ((enabledUtilsPluginPkg != null) && (enabledUtilsPluginPkg.Version < bundledPluginPkg.Version))
{
targetPluginPkg = bundledPluginPkg;
}
PluginPackage currentPluginPkg = (enabledUtilsPluginPkg != null) ? enabledUtilsPluginPkg : bundledPluginPkg;
if ((targetPluginPkg == null) && !UnitySupportsEnabledAndroidPlugin())
{
// Force reenabling the current package to configure the correct android plugin for this unity version.
reenableCurrentPluginPkg = true;
targetPluginPkg = currentPluginPkg;
}
if (targetPluginPkg == null)
{
if (!triggeredByAutoUpdate && !unityRunningInBatchmode)
{
#if UNITY_2018_3_OR_NEWER
EditorUtility.DisplayDialog("Update Oculus Utilities Plugin",
"OVRPlugin is already up to date.\n\nCurrent version: "
+ GetVersionDescription(currentPluginPkg.Version),
"Ok",
"");
#else
EditorUtility.DisplayDialog("Update Oculus Utilities Plugin",
"OVRPlugin is already up to date.\n\nCurrent version: "
+ GetVersionDescription(currentPluginPkg.Version) + "\nBundled version: "
+ GetVersionDescription(bundledPluginPkg.Version),
"Ok",
"");
#endif
}
return; // No update necessary.
}
System.Version targetVersion = targetPluginPkg.Version;
bool userAcceptsUpdate = false;
if (unityRunningInBatchmode)
{
userAcceptsUpdate = true;
}
else
{
string dialogBody = "Oculus Utilities has detected that a newer OVRPlugin is available."
+ " Using the newest version is recommended. Do you want to enable it?\n\n"
+ "Current version: "
+ GetVersionDescription(currentPluginPkg.Version)
+ "\nAvailable version: "
+ targetVersion;
if (reenableCurrentPluginPkg)
{
dialogBody = "Oculus Utilities has detected a configuration change that requires re-enabling the current OVRPlugin."
+ " Do you want to proceed?\n\nCurrent version: "
+ GetVersionDescription(currentPluginPkg.Version);
}
int dialogResult = EditorUtility.DisplayDialogComplex("Update Oculus Utilities Plugin", dialogBody, "Yes", "No, Don't Ask Again", "No");
switch (dialogResult)
{
case 0: // "Yes"
userAcceptsUpdate = true;
break;
case 1: // "No, Don't Ask Again"
autoUpdateEnabled = false;
EditorUtility.DisplayDialog("Oculus Utilities OVRPlugin",
"To manually update in the future, use the following menu option:\n\n"
+ "[Oculus -> Tools -> Update OVR Utilities Plugin]",
"Ok",
"");
return;
case 2: // "No"
return;
}
}
if (userAcceptsUpdate)
{
DisableAllUtilitiesPluginPackages();
if (!targetPluginPkg.IsBundledPluginPackage())
{
EnablePluginPackage(targetPluginPkg);
}
if (unityRunningInBatchmode
|| EditorUtility.DisplayDialog("Restart Unity",
"OVRPlugin has been updated to "
+ GetVersionDescription(targetPluginPkg.Version)
+ ".\n\nPlease restart the Unity Editor to complete the update process."
#if !UNITY_2017_1_OR_NEWER
+ " You may need to manually relaunch Unity if you are using Unity 5.6 and higher."
#endif
,
"Restart",
"Not Now"))
{
RestartUnityEditor();
}
}
}
private static bool UnitySupportsEnabledAndroidPlugin()
{
List<PluginPackage> allUtilsPluginPkgs = GetAllUtilitiesPluginPackages();
foreach(PluginPackage pluginPkg in allUtilsPluginPkgs)
{
if (pluginPkg.IsEnabled())
{
if (pluginPkg.IsAndroidUniversalEnabled() && !unityVersionSupportsAndroidUniversal)
{
// Android Universal should only be enabled on supported Unity versions since it can prevent app launch.
return false;
}
else if (!pluginPkg.IsAndroidUniversalEnabled() && pluginPkg.IsAndroidUniversalPresent() && unityVersionSupportsAndroidUniversal)
{
// Android Universal is present and should be enabled on supported Unity versions since ARM64 config will fail otherwise.
return false;
}
}
}
return true;
}
private static void RestartUnityEditor()
{
if (unityRunningInBatchmode)
{
EditorApplication.Exit(0);
}
else
{
restartPending = true;
EditorApplication.OpenProject(GetCurrentProjectPath());
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
//
// This file was autogenerated by a tool.
// Do not modify it.
//
namespace Microsoft.Azure.Batch
{
using FileStaging;
using Models = Microsoft.Azure.Batch.Protocol.Models;
using System;
using System.Collections.Generic;
using System.Linq;
/// <summary>
/// An Azure Batch task. A task is a piece of work that is associated with a job and runs on a compute node.
/// </summary>
/// <remarks>
/// Batch will retry tasks when a recovery operation is triggered on a compute node. Examples of recovery operations
/// include (but are not limited to) when an unhealthy compute node is rebooted or a compute node disappeared due to
/// host failure. Retries due to recovery operations are independent of and are not counted against the <see cref="TaskConstraints.MaxTaskRetryCount"
/// />. Even if the <see cref="TaskConstraints.MaxTaskRetryCount" /> is 0, an internal retry due to a recovery operation
/// may occur. Because of this, all tasks should be idempotent. This means tasks need to tolerate being interrupted and
/// restarted without causing any corruption or duplicate data. The best practice for long running tasks is to use some
/// form of checkpointing. The maximum lifetime of a task from addition to completion is 180 days. If a task has not
/// completed within 180 days of being added it will be terminated by the Batch service and left in whatever state it
/// was in at that time.
/// </remarks>
public partial class CloudTask : ITransportObjectProvider<Models.TaskAddParameter>, IInheritedBehaviors, IPropertyMetadata
{
private class PropertyContainer : PropertyCollection
{
public readonly PropertyAccessor<AffinityInformation> AffinityInformationProperty;
public readonly PropertyAccessor<IList<ApplicationPackageReference>> ApplicationPackageReferencesProperty;
public readonly PropertyAccessor<AuthenticationTokenSettings> AuthenticationTokenSettingsProperty;
public readonly PropertyAccessor<string> CommandLineProperty;
public readonly PropertyAccessor<ComputeNodeInformation> ComputeNodeInformationProperty;
public readonly PropertyAccessor<TaskConstraints> ConstraintsProperty;
public readonly PropertyAccessor<TaskContainerSettings> ContainerSettingsProperty;
public readonly PropertyAccessor<DateTime?> CreationTimeProperty;
public readonly PropertyAccessor<TaskDependencies> DependsOnProperty;
public readonly PropertyAccessor<string> DisplayNameProperty;
public readonly PropertyAccessor<IList<EnvironmentSetting>> EnvironmentSettingsProperty;
public readonly PropertyAccessor<string> ETagProperty;
public readonly PropertyAccessor<TaskExecutionInformation> ExecutionInformationProperty;
public readonly PropertyAccessor<ExitConditions> ExitConditionsProperty;
public readonly PropertyAccessor<IList<IFileStagingProvider>> FilesToStageProperty;
public readonly PropertyAccessor<string> IdProperty;
public readonly PropertyAccessor<DateTime?> LastModifiedProperty;
public readonly PropertyAccessor<MultiInstanceSettings> MultiInstanceSettingsProperty;
public readonly PropertyAccessor<IList<OutputFile>> OutputFilesProperty;
public readonly PropertyAccessor<Common.TaskState?> PreviousStateProperty;
public readonly PropertyAccessor<DateTime?> PreviousStateTransitionTimeProperty;
public readonly PropertyAccessor<IList<ResourceFile>> ResourceFilesProperty;
public readonly PropertyAccessor<Common.TaskState?> StateProperty;
public readonly PropertyAccessor<DateTime?> StateTransitionTimeProperty;
public readonly PropertyAccessor<TaskStatistics> StatisticsProperty;
public readonly PropertyAccessor<string> UrlProperty;
public readonly PropertyAccessor<UserIdentity> UserIdentityProperty;
public PropertyContainer() : base(BindingState.Unbound)
{
this.AffinityInformationProperty = this.CreatePropertyAccessor<AffinityInformation>(nameof(AffinityInformation), BindingAccess.Read | BindingAccess.Write);
this.ApplicationPackageReferencesProperty = this.CreatePropertyAccessor<IList<ApplicationPackageReference>>(nameof(ApplicationPackageReferences), BindingAccess.Read | BindingAccess.Write);
this.AuthenticationTokenSettingsProperty = this.CreatePropertyAccessor<AuthenticationTokenSettings>(nameof(AuthenticationTokenSettings), BindingAccess.Read | BindingAccess.Write);
this.CommandLineProperty = this.CreatePropertyAccessor<string>(nameof(CommandLine), BindingAccess.Read | BindingAccess.Write);
this.ComputeNodeInformationProperty = this.CreatePropertyAccessor<ComputeNodeInformation>(nameof(ComputeNodeInformation), BindingAccess.None);
this.ConstraintsProperty = this.CreatePropertyAccessor<TaskConstraints>(nameof(Constraints), BindingAccess.Read | BindingAccess.Write);
this.ContainerSettingsProperty = this.CreatePropertyAccessor<TaskContainerSettings>(nameof(ContainerSettings), BindingAccess.Read | BindingAccess.Write);
this.CreationTimeProperty = this.CreatePropertyAccessor<DateTime?>(nameof(CreationTime), BindingAccess.None);
this.DependsOnProperty = this.CreatePropertyAccessor<TaskDependencies>(nameof(DependsOn), BindingAccess.Read | BindingAccess.Write);
this.DisplayNameProperty = this.CreatePropertyAccessor<string>(nameof(DisplayName), BindingAccess.Read | BindingAccess.Write);
this.EnvironmentSettingsProperty = this.CreatePropertyAccessor<IList<EnvironmentSetting>>(nameof(EnvironmentSettings), BindingAccess.Read | BindingAccess.Write);
this.ETagProperty = this.CreatePropertyAccessor<string>(nameof(ETag), BindingAccess.None);
this.ExecutionInformationProperty = this.CreatePropertyAccessor<TaskExecutionInformation>(nameof(ExecutionInformation), BindingAccess.None);
this.ExitConditionsProperty = this.CreatePropertyAccessor<ExitConditions>(nameof(ExitConditions), BindingAccess.Read | BindingAccess.Write);
this.FilesToStageProperty = this.CreatePropertyAccessor<IList<IFileStagingProvider>>(nameof(FilesToStage), BindingAccess.Read | BindingAccess.Write);
this.IdProperty = this.CreatePropertyAccessor<string>(nameof(Id), BindingAccess.Read | BindingAccess.Write);
this.LastModifiedProperty = this.CreatePropertyAccessor<DateTime?>(nameof(LastModified), BindingAccess.None);
this.MultiInstanceSettingsProperty = this.CreatePropertyAccessor<MultiInstanceSettings>(nameof(MultiInstanceSettings), BindingAccess.Read | BindingAccess.Write);
this.OutputFilesProperty = this.CreatePropertyAccessor<IList<OutputFile>>(nameof(OutputFiles), BindingAccess.Read | BindingAccess.Write);
this.PreviousStateProperty = this.CreatePropertyAccessor<Common.TaskState?>(nameof(PreviousState), BindingAccess.None);
this.PreviousStateTransitionTimeProperty = this.CreatePropertyAccessor<DateTime?>(nameof(PreviousStateTransitionTime), BindingAccess.None);
this.ResourceFilesProperty = this.CreatePropertyAccessor<IList<ResourceFile>>(nameof(ResourceFiles), BindingAccess.Read | BindingAccess.Write);
this.StateProperty = this.CreatePropertyAccessor<Common.TaskState?>(nameof(State), BindingAccess.None);
this.StateTransitionTimeProperty = this.CreatePropertyAccessor<DateTime?>(nameof(StateTransitionTime), BindingAccess.None);
this.StatisticsProperty = this.CreatePropertyAccessor<TaskStatistics>(nameof(Statistics), BindingAccess.None);
this.UrlProperty = this.CreatePropertyAccessor<string>(nameof(Url), BindingAccess.None);
this.UserIdentityProperty = this.CreatePropertyAccessor<UserIdentity>(nameof(UserIdentity), BindingAccess.Read | BindingAccess.Write);
}
public PropertyContainer(Models.CloudTask protocolObject) : base(BindingState.Bound)
{
this.AffinityInformationProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.AffinityInfo, o => new AffinityInformation(o).Freeze()),
nameof(AffinityInformation),
BindingAccess.Read);
this.ApplicationPackageReferencesProperty = this.CreatePropertyAccessor(
ApplicationPackageReference.ConvertFromProtocolCollectionAndFreeze(protocolObject.ApplicationPackageReferences),
nameof(ApplicationPackageReferences),
BindingAccess.Read);
this.AuthenticationTokenSettingsProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.AuthenticationTokenSettings, o => new AuthenticationTokenSettings(o).Freeze()),
nameof(AuthenticationTokenSettings),
BindingAccess.Read);
this.CommandLineProperty = this.CreatePropertyAccessor(
protocolObject.CommandLine,
nameof(CommandLine),
BindingAccess.Read);
this.ComputeNodeInformationProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.NodeInfo, o => new ComputeNodeInformation(o).Freeze()),
nameof(ComputeNodeInformation),
BindingAccess.Read);
this.ConstraintsProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.Constraints, o => new TaskConstraints(o)),
nameof(Constraints),
BindingAccess.Read | BindingAccess.Write);
this.ContainerSettingsProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.ContainerSettings, o => new TaskContainerSettings(o).Freeze()),
nameof(ContainerSettings),
BindingAccess.Read);
this.CreationTimeProperty = this.CreatePropertyAccessor(
protocolObject.CreationTime,
nameof(CreationTime),
BindingAccess.Read);
this.DependsOnProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.DependsOn, o => new TaskDependencies(o).Freeze()),
nameof(DependsOn),
BindingAccess.Read);
this.DisplayNameProperty = this.CreatePropertyAccessor(
protocolObject.DisplayName,
nameof(DisplayName),
BindingAccess.Read);
this.EnvironmentSettingsProperty = this.CreatePropertyAccessor(
EnvironmentSetting.ConvertFromProtocolCollectionAndFreeze(protocolObject.EnvironmentSettings),
nameof(EnvironmentSettings),
BindingAccess.Read);
this.ETagProperty = this.CreatePropertyAccessor(
protocolObject.ETag,
nameof(ETag),
BindingAccess.Read);
this.ExecutionInformationProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.ExecutionInfo, o => new TaskExecutionInformation(o).Freeze()),
nameof(ExecutionInformation),
BindingAccess.Read);
this.ExitConditionsProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.ExitConditions, o => new ExitConditions(o).Freeze()),
nameof(ExitConditions),
BindingAccess.Read);
this.FilesToStageProperty = this.CreatePropertyAccessor<IList<IFileStagingProvider>>(
nameof(FilesToStage),
BindingAccess.None);
this.IdProperty = this.CreatePropertyAccessor(
protocolObject.Id,
nameof(Id),
BindingAccess.Read);
this.LastModifiedProperty = this.CreatePropertyAccessor(
protocolObject.LastModified,
nameof(LastModified),
BindingAccess.Read);
this.MultiInstanceSettingsProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.MultiInstanceSettings, o => new MultiInstanceSettings(o).Freeze()),
nameof(MultiInstanceSettings),
BindingAccess.Read);
this.OutputFilesProperty = this.CreatePropertyAccessor(
OutputFile.ConvertFromProtocolCollectionAndFreeze(protocolObject.OutputFiles),
nameof(OutputFiles),
BindingAccess.Read);
this.PreviousStateProperty = this.CreatePropertyAccessor(
UtilitiesInternal.MapNullableEnum<Models.TaskState, Common.TaskState>(protocolObject.PreviousState),
nameof(PreviousState),
BindingAccess.Read);
this.PreviousStateTransitionTimeProperty = this.CreatePropertyAccessor(
protocolObject.PreviousStateTransitionTime,
nameof(PreviousStateTransitionTime),
BindingAccess.Read);
this.ResourceFilesProperty = this.CreatePropertyAccessor(
ResourceFile.ConvertFromProtocolCollectionAndFreeze(protocolObject.ResourceFiles),
nameof(ResourceFiles),
BindingAccess.Read);
this.StateProperty = this.CreatePropertyAccessor(
UtilitiesInternal.MapNullableEnum<Models.TaskState, Common.TaskState>(protocolObject.State),
nameof(State),
BindingAccess.Read);
this.StateTransitionTimeProperty = this.CreatePropertyAccessor(
protocolObject.StateTransitionTime,
nameof(StateTransitionTime),
BindingAccess.Read);
this.StatisticsProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.Stats, o => new TaskStatistics(o).Freeze()),
nameof(Statistics),
BindingAccess.Read);
this.UrlProperty = this.CreatePropertyAccessor(
protocolObject.Url,
nameof(Url),
BindingAccess.Read);
this.UserIdentityProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.UserIdentity, o => new UserIdentity(o).Freeze()),
nameof(UserIdentity),
BindingAccess.Read);
}
}
private PropertyContainer propertyContainer;
private readonly BatchClient parentBatchClient;
private readonly string parentJobId;
internal string ParentJobId
{
get
{
return this.parentJobId;
}
}
#region Constructors
internal CloudTask(
BatchClient parentBatchClient,
string parentJobId,
Models.CloudTask protocolObject,
IEnumerable<BatchClientBehavior> baseBehaviors)
{
this.parentJobId = parentJobId;
this.parentBatchClient = parentBatchClient;
InheritUtil.InheritClientBehaviorsAndSetPublicProperty(this, baseBehaviors);
this.propertyContainer = new PropertyContainer(protocolObject);
}
#endregion Constructors
#region IInheritedBehaviors
/// <summary>
/// Gets or sets a list of behaviors that modify or customize requests to the Batch service
/// made via this <see cref="CloudTask"/>.
/// </summary>
/// <remarks>
/// <para>These behaviors are inherited by child objects.</para>
/// <para>Modifications are applied in the order of the collection. The last write wins.</para>
/// </remarks>
public IList<BatchClientBehavior> CustomBehaviors { get; set; }
#endregion IInheritedBehaviors
#region CloudTask
/// <summary>
/// Gets or sets a locality hint that can be used by the Batch service to select a node on which to start the task.
/// </summary>
public AffinityInformation AffinityInformation
{
get { return this.propertyContainer.AffinityInformationProperty.Value; }
set { this.propertyContainer.AffinityInformationProperty.Value = value; }
}
/// <summary>
/// Gets or sets a list of application packages that the Batch service will deploy to the compute node before running
/// the command line.
/// </summary>
public IList<ApplicationPackageReference> ApplicationPackageReferences
{
get { return this.propertyContainer.ApplicationPackageReferencesProperty.Value; }
set
{
this.propertyContainer.ApplicationPackageReferencesProperty.Value = ConcurrentChangeTrackedModifiableList<ApplicationPackageReference>.TransformEnumerableToConcurrentModifiableList(value);
}
}
/// <summary>
/// Gets or sets the settings for an authentication token that the task can use to perform Batch service operations.
/// </summary>
/// <remarks>
/// If this property is set, the Batch service provides the task with an authentication token which can be used to
/// authenticate Batch service operations without requiring an account access key. The token is provided via the
/// AZ_BATCH_AUTHENTICATION_TOKEN environment variable. The operations that the task can carry out using the token
/// depend on the settings. For example, a task can request job permissions in order to add other tasks to the job,
/// or check the status of the job or of other tasks.
/// </remarks>
public AuthenticationTokenSettings AuthenticationTokenSettings
{
get { return this.propertyContainer.AuthenticationTokenSettingsProperty.Value; }
set { this.propertyContainer.AuthenticationTokenSettingsProperty.Value = value; }
}
/// <summary>
/// Gets or sets the command line of the task.
/// </summary>
/// <remarks>
/// The command line does not run under a shell, and therefore cannot take advantage of shell features such as environment
/// variable expansion. If you want to take advantage of such features, you should invoke the shell in the command
/// line, for example using \"cmd /c MyCommand\" in Windows or \"/bin/sh -c MyCommand\" in Linux. If the command
/// line refers to file paths, it should use a relative path (relative to the task working directory), or use the
/// Batch provided environment variables (https://docs.microsoft.com/en-us/azure/batch/batch-compute-node-environment-variables).
/// </remarks>
public string CommandLine
{
get { return this.propertyContainer.CommandLineProperty.Value; }
set { this.propertyContainer.CommandLineProperty.Value = value; }
}
/// <summary>
/// Gets information about the compute node on which the task ran.
/// </summary>
public ComputeNodeInformation ComputeNodeInformation
{
get { return this.propertyContainer.ComputeNodeInformationProperty.Value; }
}
/// <summary>
/// Gets or sets the execution constraints that apply to this task.
/// </summary>
public TaskConstraints Constraints
{
get { return this.propertyContainer.ConstraintsProperty.Value; }
set { this.propertyContainer.ConstraintsProperty.Value = value; }
}
/// <summary>
/// Gets or sets the settings for the container under which the task runs.
/// </summary>
/// <remarks>
/// If the pool that will run this task has <see cref="VirtualMachineConfiguration.ContainerConfiguration"/> set,
/// this must be set as well. If the pool that will run this task doesn't have <see cref="VirtualMachineConfiguration.ContainerConfiguration"/>
/// set, this must not be set. When this is specified, all directories recursively below the AZ_BATCH_NODE_ROOT_DIR
/// (the root of Azure Batch directories on the node) are mapped into the container, all task environment variables
/// are mapped into the container, and the task command line is executed in the container. Files produced in the
/// container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file
/// APIs will not be able to access them.
/// </remarks>
public TaskContainerSettings ContainerSettings
{
get { return this.propertyContainer.ContainerSettingsProperty.Value; }
set { this.propertyContainer.ContainerSettingsProperty.Value = value; }
}
/// <summary>
/// Gets the creation time of the task.
/// </summary>
public DateTime? CreationTime
{
get { return this.propertyContainer.CreationTimeProperty.Value; }
}
/// <summary>
/// Gets or sets any other tasks that this <see cref="CloudTask"/> depends on. The task will not be scheduled until
/// all depended-on tasks have completed successfully.
/// </summary>
/// <remarks>
/// The job must set <see cref="CloudJob.UsesTaskDependencies"/> to true in order to use task dependencies. If UsesTaskDependencies
/// is false (the default), adding a task with dependencies will fail with an error.
/// </remarks>
public TaskDependencies DependsOn
{
get { return this.propertyContainer.DependsOnProperty.Value; }
set { this.propertyContainer.DependsOnProperty.Value = value; }
}
/// <summary>
/// Gets or sets the display name of the task.
/// </summary>
public string DisplayName
{
get { return this.propertyContainer.DisplayNameProperty.Value; }
set { this.propertyContainer.DisplayNameProperty.Value = value; }
}
/// <summary>
/// Gets or sets a list of environment variable settings for the task.
/// </summary>
public IList<EnvironmentSetting> EnvironmentSettings
{
get { return this.propertyContainer.EnvironmentSettingsProperty.Value; }
set
{
this.propertyContainer.EnvironmentSettingsProperty.Value = ConcurrentChangeTrackedModifiableList<EnvironmentSetting>.TransformEnumerableToConcurrentModifiableList(value);
}
}
/// <summary>
/// Gets the ETag for the task.
/// </summary>
public string ETag
{
get { return this.propertyContainer.ETagProperty.Value; }
}
/// <summary>
/// Gets the execution information for the task.
/// </summary>
public TaskExecutionInformation ExecutionInformation
{
get { return this.propertyContainer.ExecutionInformationProperty.Value; }
}
/// <summary>
/// Gets or sets how the Batch service should respond when the task completes.
/// </summary>
public ExitConditions ExitConditions
{
get { return this.propertyContainer.ExitConditionsProperty.Value; }
set { this.propertyContainer.ExitConditionsProperty.Value = value; }
}
/// <summary>
/// Gets or sets a list of files to be staged for the task.
/// </summary>
public IList<IFileStagingProvider> FilesToStage
{
get { return this.propertyContainer.FilesToStageProperty.Value; }
set
{
this.propertyContainer.FilesToStageProperty.Value = ConcurrentChangeTrackedList<IFileStagingProvider>.TransformEnumerableToConcurrentList(value);
}
}
/// <summary>
/// Gets or sets the id of the task.
/// </summary>
public string Id
{
get { return this.propertyContainer.IdProperty.Value; }
set { this.propertyContainer.IdProperty.Value = value; }
}
/// <summary>
/// Gets the last modified time of the task.
/// </summary>
public DateTime? LastModified
{
get { return this.propertyContainer.LastModifiedProperty.Value; }
}
/// <summary>
/// Gets or sets information about how to run the multi-instance task.
/// </summary>
public MultiInstanceSettings MultiInstanceSettings
{
get { return this.propertyContainer.MultiInstanceSettingsProperty.Value; }
set { this.propertyContainer.MultiInstanceSettingsProperty.Value = value; }
}
/// <summary>
/// Gets or sets a list of files that the Batch service will upload from the compute node after running the command
/// line.
/// </summary>
public IList<OutputFile> OutputFiles
{
get { return this.propertyContainer.OutputFilesProperty.Value; }
set
{
this.propertyContainer.OutputFilesProperty.Value = ConcurrentChangeTrackedModifiableList<OutputFile>.TransformEnumerableToConcurrentModifiableList(value);
}
}
/// <summary>
/// Gets the previous state of the task.
/// </summary>
/// <remarks>
/// If the task is in its initial <see cref="Common.TaskState.Active"/> state, the PreviousState property is not
/// defined.
/// </remarks>
public Common.TaskState? PreviousState
{
get { return this.propertyContainer.PreviousStateProperty.Value; }
}
/// <summary>
/// Gets the time at which the task entered its previous state.
/// </summary>
/// <remarks>
/// If the task is in its initial <see cref="Common.TaskState.Active"/> state, the PreviousStateTransitionTime property
/// is not defined.
/// </remarks>
public DateTime? PreviousStateTransitionTime
{
get { return this.propertyContainer.PreviousStateTransitionTimeProperty.Value; }
}
/// <summary>
/// Gets or sets a list of files that the Batch service will download to the compute node before running the command
/// line.
/// </summary>
/// <remarks>
/// There is a maximum size for the list of resource files. When the max size is exceeded, the request will fail
/// and the response error code will be RequestEntityTooLarge. If this occurs, the collection of resource files must
/// be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers.
/// </remarks>
public IList<ResourceFile> ResourceFiles
{
get { return this.propertyContainer.ResourceFilesProperty.Value; }
set
{
this.propertyContainer.ResourceFilesProperty.Value = ConcurrentChangeTrackedModifiableList<ResourceFile>.TransformEnumerableToConcurrentModifiableList(value);
}
}
/// <summary>
/// Gets the current state of the task.
/// </summary>
public Common.TaskState? State
{
get { return this.propertyContainer.StateProperty.Value; }
}
/// <summary>
/// Gets the time at which the task entered its current state.
/// </summary>
public DateTime? StateTransitionTime
{
get { return this.propertyContainer.StateTransitionTimeProperty.Value; }
}
/// <summary>
/// Gets resource usage statistics for the task.
/// </summary>
/// <remarks>
/// This property is populated only if the <see cref="CloudTask"/> was retrieved with an <see cref="ODATADetailLevel.ExpandClause"/>
/// including the 'stats' attribute; otherwise it is null.
/// </remarks>
public TaskStatistics Statistics
{
get { return this.propertyContainer.StatisticsProperty.Value; }
}
/// <summary>
/// Gets the URL of the task.
/// </summary>
public string Url
{
get { return this.propertyContainer.UrlProperty.Value; }
}
/// <summary>
/// Gets or sets the user identity under which the task runs.
/// </summary>
/// <remarks>
/// If omitted, the task runs as a non-administrative user unique to the task.
/// </remarks>
public UserIdentity UserIdentity
{
get { return this.propertyContainer.UserIdentityProperty.Value; }
set { this.propertyContainer.UserIdentityProperty.Value = value; }
}
#endregion // CloudTask
#region IPropertyMetadata
bool IModifiable.HasBeenModified
{
get { return this.propertyContainer.HasBeenModified; }
}
bool IReadOnly.IsReadOnly
{
get { return this.propertyContainer.IsReadOnly; }
set { this.propertyContainer.IsReadOnly = value; }
}
#endregion //IPropertyMetadata
#region Internal/private methods
/// <summary>
/// Return a protocol object of the requested type.
/// </summary>
/// <returns>The protocol object of the requested type.</returns>
Models.TaskAddParameter ITransportObjectProvider<Models.TaskAddParameter>.GetTransportObject()
{
Models.TaskAddParameter result = new Models.TaskAddParameter()
{
AffinityInfo = UtilitiesInternal.CreateObjectWithNullCheck(this.AffinityInformation, (o) => o.GetTransportObject()),
ApplicationPackageReferences = UtilitiesInternal.ConvertToProtocolCollection(this.ApplicationPackageReferences),
AuthenticationTokenSettings = UtilitiesInternal.CreateObjectWithNullCheck(this.AuthenticationTokenSettings, (o) => o.GetTransportObject()),
CommandLine = this.CommandLine,
Constraints = UtilitiesInternal.CreateObjectWithNullCheck(this.Constraints, (o) => o.GetTransportObject()),
ContainerSettings = UtilitiesInternal.CreateObjectWithNullCheck(this.ContainerSettings, (o) => o.GetTransportObject()),
DependsOn = UtilitiesInternal.CreateObjectWithNullCheck(this.DependsOn, (o) => o.GetTransportObject()),
DisplayName = this.DisplayName,
EnvironmentSettings = UtilitiesInternal.ConvertToProtocolCollection(this.EnvironmentSettings),
ExitConditions = UtilitiesInternal.CreateObjectWithNullCheck(this.ExitConditions, (o) => o.GetTransportObject()),
Id = this.Id,
MultiInstanceSettings = UtilitiesInternal.CreateObjectWithNullCheck(this.MultiInstanceSettings, (o) => o.GetTransportObject()),
OutputFiles = UtilitiesInternal.ConvertToProtocolCollection(this.OutputFiles),
ResourceFiles = UtilitiesInternal.ConvertToProtocolCollection(this.ResourceFiles),
UserIdentity = UtilitiesInternal.CreateObjectWithNullCheck(this.UserIdentity, (o) => o.GetTransportObject()),
};
return result;
}
#endregion // Internal/private methods
}
}
| |
using UnityEngine;
using UnityStandardAssets.CrossPlatformInput;
using UnityStandardAssets.Utility;
using Random = UnityEngine.Random;
namespace UnityStandardAssets.Characters.FirstPerson
{
[RequireComponent(typeof (CharacterController))]
[RequireComponent(typeof (AudioSource))]
public class FirstPersonController : MonoBehaviour
{
[SerializeField] private bool m_IsWalking;
[SerializeField] private bool m_IsCrouching;
[SerializeField] private float m_WalkSpeed;
[SerializeField] private float m_RunSpeed;
[SerializeField] private float crouchSmoothness;
[SerializeField] [Range(0f, 1f)] private float m_RunstepLenghten;
[SerializeField] private float m_JumpSpeed;
[SerializeField] private float m_StickToGroundForce;
[SerializeField] private float m_GravityMultiplier;
//[SerializeField] private MouseLook m_MouseLook; DO NOT UNCOMMENT THIS. Had to create my own mouse_look script.
[SerializeField] private bool m_UseFovKick;
[SerializeField] private FOVKick m_FovKick = new FOVKick();
[SerializeField] private bool m_UseHeadBob;
[SerializeField] private CurveControlledBob m_HeadBob = new CurveControlledBob();
[SerializeField] private LerpControlledBob m_JumpBob = new LerpControlledBob();
[SerializeField] private float m_StepInterval;
[SerializeField] private AudioClip[] m_FootstepSounds; // an array of footstep sounds that will be randomly selected from.
[SerializeField] private AudioClip m_JumpSound; // the sound played when character leaves the ground.
[SerializeField] private AudioClip m_LandSound; // the sound played when character touches back on ground.
private Camera m_Camera;
private bool m_Jump;
private float m_YRotation;
private Vector2 m_Input;
private Vector3 m_MoveDir = Vector3.zero;
private CharacterController m_CharacterController;
private CollisionFlags m_CollisionFlags;
private bool m_PreviouslyGrounded;
private Vector3 m_OriginalCameraPosition;
private float m_StepCycle;
private float m_NextStep;
private bool m_Jumping;
private AudioSource m_AudioSource;
private float originalCharacterHeight;
private float crouchCharacterHeight;
// Use this for initialization
private void Start()
{
m_CharacterController = GetComponent<CharacterController>();
m_Camera = Camera.main;
m_OriginalCameraPosition = m_Camera.transform.localPosition;
m_FovKick.Setup(m_Camera);
m_HeadBob.Setup(m_Camera, m_StepInterval);
m_StepCycle = 0f;
m_NextStep = m_StepCycle/2f;
m_Jumping = false;
m_AudioSource = GetComponent<AudioSource>();
originalCharacterHeight = m_CharacterController.height;
crouchCharacterHeight = originalCharacterHeight * 0.5f;
//m_MouseLook.Init(transform , m_Camera.transform);
}
// Update is called once per frame
private void Update()
{
//RotateView();
// the jump state needs to read here to make sure it is not missed
if (!m_Jump)
{
m_Jump = CrossPlatformInputManager.GetButtonDown("Jump");
}
if(CrossPlatformInputManager.GetButtonDown("Crouching"))
{
m_IsCrouching = !m_IsCrouching;
}
float lastHeight = m_CharacterController.height;
if (m_IsCrouching)
{
m_CharacterController.height = Mathf.Lerp(m_CharacterController.height, crouchCharacterHeight, crouchSmoothness * Time.deltaTime);
}
else
{
m_CharacterController.height = Mathf.Lerp(m_CharacterController.height, originalCharacterHeight, crouchSmoothness * Time.deltaTime);
}
Vector3 heightOffset = new Vector3(0, (m_CharacterController.height - lastHeight) * 0.5f, 0);
transform.position += heightOffset;
if (!m_PreviouslyGrounded && m_CharacterController.isGrounded)
{
StartCoroutine(m_JumpBob.DoBobCycle());
PlayLandingSound();
m_MoveDir.y = 0f;
m_Jumping = false;
}
if (!m_CharacterController.isGrounded && !m_Jumping && m_PreviouslyGrounded)
{
m_MoveDir.y = 0f;
}
m_PreviouslyGrounded = m_CharacterController.isGrounded;
}
private void PlayLandingSound()
{
m_AudioSource.clip = m_LandSound;
m_AudioSource.Play();
m_NextStep = m_StepCycle + .5f;
}
private void FixedUpdate()
{
float speed;
GetInput(out speed);
// always move along the camera forward as it is the direction that it being aimed at
Vector3 desiredMove = transform.forward*m_Input.y + transform.right*m_Input.x;
// get a normal for the surface that is being touched to move along it
RaycastHit hitInfo;
Physics.SphereCast(transform.position, m_CharacterController.radius, Vector3.down, out hitInfo,
crouchCharacterHeight, Physics.AllLayers, QueryTriggerInteraction.Ignore);
desiredMove = Vector3.ProjectOnPlane(desiredMove, hitInfo.normal).normalized;
m_MoveDir.x = desiredMove.x*speed;
m_MoveDir.z = desiredMove.z*speed;
if (m_CharacterController.isGrounded)
{
m_MoveDir.y = -m_StickToGroundForce;
if (m_Jump)
{
m_MoveDir.y = m_JumpSpeed;
PlayJumpSound();
m_Jump = false;
m_Jumping = true;
}
}
else
{
m_MoveDir += Physics.gravity*m_GravityMultiplier*Time.fixedDeltaTime;
}
m_CollisionFlags = m_CharacterController.Move(m_MoveDir*Time.fixedDeltaTime);
ProgressStepCycle(speed);
UpdateCameraPosition(speed);
//m_MouseLook.UpdateCursorLock();
}
private void PlayJumpSound()
{
m_AudioSource.clip = m_JumpSound;
m_AudioSource.Play();
}
private void ProgressStepCycle(float speed)
{
if (m_CharacterController.velocity.sqrMagnitude > 0 && (m_Input.x != 0 || m_Input.y != 0))
{
m_StepCycle += (m_CharacterController.velocity.magnitude + (speed*(m_IsWalking ? 1f : m_RunstepLenghten)))*
Time.fixedDeltaTime;
}
if (!(m_StepCycle > m_NextStep))
{
return;
}
m_NextStep = m_StepCycle + m_StepInterval;
PlayFootStepAudio();
}
private void PlayFootStepAudio()
{
if (!m_CharacterController.isGrounded)
{
return;
}
// pick & play a random footstep sound from the array,
// excluding sound at index 0
int n = Random.Range(1, m_FootstepSounds.Length);
m_AudioSource.clip = m_FootstepSounds[n];
m_AudioSource.PlayOneShot(m_AudioSource.clip);
// move picked sound to index 0 so it's not picked next time
m_FootstepSounds[n] = m_FootstepSounds[0];
m_FootstepSounds[0] = m_AudioSource.clip;
}
private void UpdateCameraPosition(float speed)
{
Vector3 newCameraPosition;
if (!m_UseHeadBob)
{
return;
}
if (m_CharacterController.velocity.magnitude > 0 && m_CharacterController.isGrounded)
{
m_Camera.transform.localPosition =
m_HeadBob.DoHeadBob(m_CharacterController.velocity.magnitude +
(speed*(m_IsWalking ? 1f : m_RunstepLenghten)));
newCameraPosition = m_Camera.transform.localPosition;
newCameraPosition.y = m_Camera.transform.localPosition.y - m_JumpBob.Offset();
}
else
{
newCameraPosition = m_Camera.transform.localPosition;
newCameraPosition.y = m_OriginalCameraPosition.y - m_JumpBob.Offset();
}
m_Camera.transform.localPosition = newCameraPosition;
}
private void GetInput(out float speed)
{
// Read input
float horizontal = CrossPlatformInputManager.GetAxis("Horizontal");
float vertical = CrossPlatformInputManager.GetAxis("Vertical");
bool waswalking = m_IsWalking;
#if !MOBILE_INPUT
// On standalone builds, walk/run speed is modified by a key press.
// keep track of whether or not the character is walking or running
m_IsWalking = !CrossPlatformInputManager.GetButton("Running"); ;
#endif
// set the desired speed to be walking or running
speed = m_IsWalking ? m_WalkSpeed : m_RunSpeed;
m_Input = new Vector2(horizontal, vertical);
// normalize input if it exceeds 1 in combined length:
if (m_Input.sqrMagnitude > 1)
{
m_Input.Normalize();
}
// handle speed change to give an fov kick
// only if the player is going to a run, is running and the fovkick is to be used
if (m_IsWalking != waswalking && m_UseFovKick && m_CharacterController.velocity.sqrMagnitude > 0)
{
StopAllCoroutines();
StartCoroutine(!m_IsWalking ? m_FovKick.FOVKickUp() : m_FovKick.FOVKickDown());
}
}
private void RotateView()
{
//m_MouseLook.LookRotation (transform, m_Camera.transform);
}
private void OnControllerColliderHit(ControllerColliderHit hit)
{
Rigidbody body = hit.collider.attachedRigidbody;
//dont move the rigidbody if the character is on top of it
if (m_CollisionFlags == CollisionFlags.Below)
{
return;
}
if (body == null || body.isKinematic)
{
return;
}
body.AddForceAtPosition(m_CharacterController.velocity*0.1f, hit.point, ForceMode.Impulse);
}
}
}
| |
#nullable enable
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using BTCPayServer.Abstractions.Constants;
using BTCPayServer.Abstractions.Extensions;
using BTCPayServer.Client;
using BTCPayServer.Client.Models;
using BTCPayServer.Payments;
using BTCPayServer.Services;
using BTCPayServer.Services.Invoices;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Cors;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Routing;
using NBitcoin;
using CreateInvoiceRequest = BTCPayServer.Client.Models.CreateInvoiceRequest;
using InvoiceData = BTCPayServer.Client.Models.InvoiceData;
namespace BTCPayServer.Controllers.Greenfield
{
[ApiController]
[Authorize(AuthenticationSchemes = AuthenticationSchemes.Greenfield)]
[EnableCors(CorsPolicies.All)]
public class GreenfieldInvoiceController : Controller
{
private readonly UIInvoiceController _invoiceController;
private readonly InvoiceRepository _invoiceRepository;
private readonly LinkGenerator _linkGenerator;
private readonly BTCPayNetworkProvider _btcPayNetworkProvider;
private readonly EventAggregator _eventAggregator;
private readonly PaymentMethodHandlerDictionary _paymentMethodHandlerDictionary;
public LanguageService LanguageService { get; }
public GreenfieldInvoiceController(UIInvoiceController invoiceController, InvoiceRepository invoiceRepository,
LinkGenerator linkGenerator, LanguageService languageService, BTCPayNetworkProvider btcPayNetworkProvider,
EventAggregator eventAggregator, PaymentMethodHandlerDictionary paymentMethodHandlerDictionary)
{
_invoiceController = invoiceController;
_invoiceRepository = invoiceRepository;
_linkGenerator = linkGenerator;
_btcPayNetworkProvider = btcPayNetworkProvider;
_eventAggregator = eventAggregator;
_paymentMethodHandlerDictionary = paymentMethodHandlerDictionary;
LanguageService = languageService;
}
[Authorize(Policy = Policies.CanViewInvoices,
AuthenticationSchemes = AuthenticationSchemes.Greenfield)]
[HttpGet("~/api/v1/stores/{storeId}/invoices")]
public async Task<IActionResult> GetInvoices(string storeId, [FromQuery] string[]? orderId = null, [FromQuery] string[]? status = null,
[FromQuery]
[ModelBinder(typeof(ModelBinders.DateTimeOffsetModelBinder))]
DateTimeOffset? startDate = null,
[FromQuery]
[ModelBinder(typeof(ModelBinders.DateTimeOffsetModelBinder))]
DateTimeOffset? endDate = null,
[FromQuery] string? textSearch = null,
[FromQuery] bool includeArchived = false,
[FromQuery] int? skip = null,
[FromQuery] int? take = null
)
{
var store = HttpContext.GetStoreData();
if (store == null)
{
return StoreNotFound();
}
if (startDate is DateTimeOffset s &&
endDate is DateTimeOffset e &&
s > e)
{
this.ModelState.AddModelError(nameof(startDate), "startDate should not be above endDate");
this.ModelState.AddModelError(nameof(endDate), "endDate should not be below startDate");
}
if (!ModelState.IsValid)
return this.CreateValidationError(ModelState);
var invoices =
await _invoiceRepository.GetInvoices(new InvoiceQuery()
{
Skip = skip,
Take = take,
StoreId = new[] { store.Id },
IncludeArchived = includeArchived,
StartDate = startDate,
EndDate = endDate,
OrderId = orderId,
Status = status,
TextSearch = textSearch
});
return Ok(invoices.Select(ToModel));
}
[Authorize(Policy = Policies.CanViewInvoices,
AuthenticationSchemes = AuthenticationSchemes.Greenfield)]
[HttpGet("~/api/v1/stores/{storeId}/invoices/{invoiceId}")]
public async Task<IActionResult> GetInvoice(string storeId, string invoiceId)
{
var store = HttpContext.GetStoreData();
if (store == null)
{
return InvoiceNotFound();
}
var invoice = await _invoiceRepository.GetInvoice(invoiceId, true);
if (invoice?.StoreId != store.Id)
{
return InvoiceNotFound();
}
return Ok(ToModel(invoice));
}
[Authorize(Policy = Policies.CanModifyInvoices,
AuthenticationSchemes = AuthenticationSchemes.Greenfield)]
[HttpDelete("~/api/v1/stores/{storeId}/invoices/{invoiceId}")]
public async Task<IActionResult> ArchiveInvoice(string storeId, string invoiceId)
{
var store = HttpContext.GetStoreData();
if (store == null)
{
return InvoiceNotFound();
}
var invoice = await _invoiceRepository.GetInvoice(invoiceId, true);
if (invoice?.StoreId != store.Id)
{
return InvoiceNotFound();
}
await _invoiceRepository.ToggleInvoiceArchival(invoiceId, true, storeId);
return Ok();
}
[Authorize(Policy = Policies.CanModifyInvoices,
AuthenticationSchemes = AuthenticationSchemes.Greenfield)]
[HttpPut("~/api/v1/stores/{storeId}/invoices/{invoiceId}")]
public async Task<IActionResult> UpdateInvoice(string storeId, string invoiceId, UpdateInvoiceRequest request)
{
var store = HttpContext.GetStoreData();
if (store == null)
{
return InvoiceNotFound();
}
var result = await _invoiceRepository.UpdateInvoiceMetadata(invoiceId, storeId, request.Metadata);
if (result != null)
{
return Ok(ToModel(result));
}
return InvoiceNotFound();
}
[Authorize(Policy = Policies.CanCreateInvoice,
AuthenticationSchemes = AuthenticationSchemes.Greenfield)]
[HttpPost("~/api/v1/stores/{storeId}/invoices")]
public async Task<IActionResult> CreateInvoice(string storeId, CreateInvoiceRequest request)
{
var store = HttpContext.GetStoreData();
if (store == null)
{
return StoreNotFound();
}
if (request.Amount < 0.0m)
{
ModelState.AddModelError(nameof(request.Amount), "The amount should be 0 or more.");
}
request.Checkout = request.Checkout ?? new CreateInvoiceRequest.CheckoutOptions();
if (request.Checkout.PaymentMethods?.Any() is true)
{
for (int i = 0; i < request.Checkout.PaymentMethods.Length; i++)
{
if (!PaymentMethodId.TryParse(request.Checkout.PaymentMethods[i], out _))
{
request.AddModelError(invoiceRequest => invoiceRequest.Checkout.PaymentMethods[i],
"Invalid payment method", this);
}
}
}
if (request.Checkout.Expiration != null && request.Checkout.Expiration < TimeSpan.FromSeconds(30.0))
{
request.AddModelError(invoiceRequest => invoiceRequest.Checkout.Expiration,
"Expiration time must be at least 30 seconds", this);
}
if (request.Checkout.PaymentTolerance != null &&
(request.Checkout.PaymentTolerance < 0 || request.Checkout.PaymentTolerance > 100))
{
request.AddModelError(invoiceRequest => invoiceRequest.Checkout.PaymentTolerance,
"PaymentTolerance can only be between 0 and 100 percent", this);
}
if (request.Checkout.DefaultLanguage != null)
{
var lang = LanguageService.FindLanguage(request.Checkout.DefaultLanguage);
if (lang == null)
{
request.AddModelError(invoiceRequest => invoiceRequest.Checkout.DefaultLanguage,
"The requested defaultLang does not exists, Browse the ~/misc/lang page of your BTCPay Server instance to see the list of supported languages.", this);
}
else
{
// Ensure this is good case
request.Checkout.DefaultLanguage = lang.Code;
}
}
if (!ModelState.IsValid)
return this.CreateValidationError(ModelState);
try
{
var invoice = await _invoiceController.CreateInvoiceCoreRaw(request, store,
Request.GetAbsoluteRoot());
return Ok(ToModel(invoice));
}
catch (BitpayHttpException e)
{
return this.CreateAPIError(null, e.Message);
}
}
[Authorize(Policy = Policies.CanModifyInvoices,
AuthenticationSchemes = AuthenticationSchemes.Greenfield)]
[HttpPost("~/api/v1/stores/{storeId}/invoices/{invoiceId}/status")]
public async Task<IActionResult> MarkInvoiceStatus(string storeId, string invoiceId,
MarkInvoiceStatusRequest request)
{
var store = HttpContext.GetStoreData();
if (store == null)
{
return InvoiceNotFound();
}
var invoice = await _invoiceRepository.GetInvoice(invoiceId, true);
if (invoice.StoreId != store.Id)
{
return InvoiceNotFound();
}
if (!await _invoiceRepository.MarkInvoiceStatus(invoice.Id, request.Status))
{
ModelState.AddModelError(nameof(request.Status),
"Status can only be marked to invalid or settled within certain conditions.");
}
if (!ModelState.IsValid)
return this.CreateValidationError(ModelState);
return await GetInvoice(storeId, invoiceId);
}
[Authorize(Policy = Policies.CanModifyInvoices,
AuthenticationSchemes = AuthenticationSchemes.Greenfield)]
[HttpPost("~/api/v1/stores/{storeId}/invoices/{invoiceId}/unarchive")]
public async Task<IActionResult> UnarchiveInvoice(string storeId, string invoiceId)
{
var store = HttpContext.GetStoreData();
if (store == null)
{
return InvoiceNotFound();
}
var invoice = await _invoiceRepository.GetInvoice(invoiceId, true);
if (invoice.StoreId != store.Id)
{
return InvoiceNotFound();
}
if (!invoice.Archived)
{
return this.CreateAPIError("already-unarchived", "Invoice is already unarchived");
}
if (!ModelState.IsValid)
return this.CreateValidationError(ModelState);
await _invoiceRepository.ToggleInvoiceArchival(invoiceId, false, storeId);
return await GetInvoice(storeId, invoiceId);
}
[Authorize(Policy = Policies.CanViewInvoices,
AuthenticationSchemes = AuthenticationSchemes.Greenfield)]
[HttpGet("~/api/v1/stores/{storeId}/invoices/{invoiceId}/payment-methods")]
public async Task<IActionResult> GetInvoicePaymentMethods(string storeId, string invoiceId, bool onlyAccountedPayments = true)
{
var store = HttpContext.GetStoreData();
if (store == null)
{
return InvoiceNotFound();
}
var invoice = await _invoiceRepository.GetInvoice(invoiceId, true);
if (invoice?.StoreId != store.Id)
{
return InvoiceNotFound();
}
return Ok(ToPaymentMethodModels(invoice, onlyAccountedPayments));
}
[Authorize(Policy = Policies.CanViewInvoices,
AuthenticationSchemes = AuthenticationSchemes.Greenfield)]
[HttpPost("~/api/v1/stores/{storeId}/invoices/{invoiceId}/payment-methods/{paymentMethod}/activate")]
public async Task<IActionResult> ActivateInvoicePaymentMethod(string storeId, string invoiceId, string paymentMethod)
{
var store = HttpContext.GetStoreData();
if (store == null)
{
return InvoiceNotFound();
}
var invoice = await _invoiceRepository.GetInvoice(invoiceId, true);
if (invoice?.StoreId != store.Id)
{
return InvoiceNotFound();
}
if (PaymentMethodId.TryParse(paymentMethod, out var paymentMethodId))
{
await _invoiceRepository.ActivateInvoicePaymentMethod(_eventAggregator, _btcPayNetworkProvider,
_paymentMethodHandlerDictionary, store, invoice, paymentMethodId);
return Ok();
}
ModelState.AddModelError(nameof(paymentMethod), "Invalid payment method");
return this.CreateValidationError(ModelState);
}
private IActionResult InvoiceNotFound()
{
return this.CreateAPIError(404, "invoice-not-found", "The invoice was not found");
}
private IActionResult StoreNotFound()
{
return this.CreateAPIError(404, "store-not-found", "The store was not found");
}
private InvoicePaymentMethodDataModel[] ToPaymentMethodModels(InvoiceEntity entity, bool includeAccountedPaymentOnly)
{
return entity.GetPaymentMethods().Select(
method =>
{
var accounting = method.Calculate();
var details = method.GetPaymentMethodDetails();
var payments = method.ParentEntity.GetPayments(includeAccountedPaymentOnly).Where(paymentEntity =>
paymentEntity.GetPaymentMethodId() == method.GetId());
return new InvoicePaymentMethodDataModel()
{
Activated = details.Activated,
PaymentMethod = method.GetId().ToStringNormalized(),
CryptoCode = method.GetId().CryptoCode,
Destination = details.GetPaymentDestination(),
Rate = method.Rate,
Due = accounting.DueUncapped.ToDecimal(MoneyUnit.BTC),
TotalPaid = accounting.Paid.ToDecimal(MoneyUnit.BTC),
PaymentMethodPaid = accounting.CryptoPaid.ToDecimal(MoneyUnit.BTC),
Amount = accounting.TotalDue.ToDecimal(MoneyUnit.BTC),
NetworkFee = accounting.NetworkFee.ToDecimal(MoneyUnit.BTC),
PaymentLink =
method.GetId().PaymentType.GetPaymentLink(method.Network, details, accounting.Due,
Request.GetAbsoluteRoot()),
Payments = payments.Select(paymentEntity => ToPaymentModel(entity, paymentEntity)).ToList(),
AdditionalData = details.GetAdditionalData()
};
}).ToArray();
}
public static InvoicePaymentMethodDataModel.Payment ToPaymentModel(InvoiceEntity entity, PaymentEntity paymentEntity)
{
var data = paymentEntity.GetCryptoPaymentData();
return new InvoicePaymentMethodDataModel.Payment()
{
Destination = data.GetDestination(),
Id = data.GetPaymentId(),
Status = !paymentEntity.Accounted
? InvoicePaymentMethodDataModel.Payment.PaymentStatus.Invalid
: data.PaymentConfirmed(paymentEntity, entity.SpeedPolicy) || data.PaymentCompleted(paymentEntity)
? InvoicePaymentMethodDataModel.Payment.PaymentStatus.Settled
: InvoicePaymentMethodDataModel.Payment.PaymentStatus.Processing,
Fee = paymentEntity.NetworkFee,
Value = data.GetValue(),
ReceivedDate = paymentEntity.ReceivedTime.DateTime
};
}
private InvoiceData ToModel(InvoiceEntity entity)
{
var statuses = new List<InvoiceStatus>();
var state = entity.GetInvoiceState();
if (state.CanMarkComplete())
{
statuses.Add(InvoiceStatus.Settled);
}
if (state.CanMarkInvalid())
{
statuses.Add(InvoiceStatus.Invalid);
}
return new InvoiceData()
{
StoreId = entity.StoreId,
ExpirationTime = entity.ExpirationTime,
MonitoringExpiration = entity.MonitoringExpiration,
CreatedTime = entity.InvoiceTime,
Amount = entity.Price,
Type = entity.Type,
Id = entity.Id,
CheckoutLink = _linkGenerator.CheckoutLink(entity.Id, Request.Scheme, Request.Host, Request.PathBase),
Status = entity.Status.ToModernStatus(),
AdditionalStatus = entity.ExceptionStatus,
Currency = entity.Currency,
Archived = entity.Archived,
Metadata = entity.Metadata.ToJObject(),
AvailableStatusesForManualMarking = statuses.ToArray(),
Checkout = new CreateInvoiceRequest.CheckoutOptions()
{
Expiration = entity.ExpirationTime - entity.InvoiceTime,
Monitoring = entity.MonitoringExpiration - entity.ExpirationTime,
PaymentTolerance = entity.PaymentTolerance,
PaymentMethods =
entity.GetPaymentMethods().Select(method => method.GetId().ToStringNormalized()).ToArray(),
DefaultPaymentMethod = entity.DefaultPaymentMethod,
SpeedPolicy = entity.SpeedPolicy,
DefaultLanguage = entity.DefaultLanguage,
RedirectAutomatically = entity.RedirectAutomatically,
RequiresRefundEmail = entity.RequiresRefundEmail,
RedirectURL = entity.RedirectURLTemplate
}
};
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace FunMatchGame.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading.Tasks;
using Abp.Extensions;
using Abp.MultiTenancy;
using Abp.Runtime.Session;
namespace Abp.Domain.Uow
{
/// <summary>
/// Base for all Unit Of Work classes.
/// </summary>
public abstract class UnitOfWorkBase : IUnitOfWork
{
/// <inheritdoc/>
public event EventHandler Completed;
/// <inheritdoc/>
public event EventHandler<UnitOfWorkFailedEventArgs> Failed;
/// <inheritdoc/>
public event EventHandler Disposed;
/// <inheritdoc/>
public UnitOfWorkOptions Options { get; private set; }
/// <inheritdoc/>
public IReadOnlyList<DataFilterConfiguration> Filters
{
get { return _filters.ToImmutableList(); }
}
private readonly List<DataFilterConfiguration> _filters;
/// <summary>
/// Gets a value indicates that this unit of work is disposed or not.
/// </summary>
public bool IsDisposed { get; private set; }
/// <summary>
/// Reference to current ABP session.
/// </summary>
public IAbpSession AbpSession { private get; set; }
/// <summary>
/// Is <see cref="Begin"/> method called before?
/// </summary>
private bool _isBeginCalledBefore;
/// <summary>
/// Is <see cref="Complete"/> method called before?
/// </summary>
private bool _isCompleteCalledBefore;
/// <summary>
/// Is this unit of work successfully completed.
/// </summary>
private bool _succeed;
/// <summary>
/// A reference to the exception if this unit of work failed.
/// </summary>
private Exception _exception;
/// <summary>
/// Constructor.
/// </summary>
protected UnitOfWorkBase(IUnitOfWorkDefaultOptions defaultOptions)
{
_filters = defaultOptions.Filters.ToList();
AbpSession = NullAbpSession.Instance;
}
/// <inheritdoc/>
public void Begin(UnitOfWorkOptions options)
{
if (options == null)
{
throw new ArgumentNullException("options");
}
PreventMultipleBegin();
Options = options; //TODO: Do not set options like that!
SetFilters(options.FilterOverrides);
BeginUow();
}
/// <inheritdoc/>
public abstract void SaveChanges();
/// <inheritdoc/>
public abstract Task SaveChangesAsync();
/// <inheritdoc/>
public IDisposable DisableFilter(params string[] filterNames)
{
//TODO: Check if filters exists?
var disabledFilters = new List<string>();
foreach (var filterName in filterNames)
{
var filterIndex = GetFilterIndex(filterName);
if (_filters[filterIndex].IsEnabled)
{
disabledFilters.Add(filterName);
_filters[filterIndex] = new DataFilterConfiguration(filterName, false);
}
}
disabledFilters.ForEach(ApplyDisableFilter);
return new DisposeAction(() => EnableFilter(disabledFilters.ToArray()));
}
/// <inheritdoc/>
public IDisposable EnableFilter(params string[] filterNames)
{
//TODO: Check if filters exists?
var enabledFilters = new List<string>();
foreach (var filterName in filterNames)
{
var filterIndex = GetFilterIndex(filterName);
if (!_filters[filterIndex].IsEnabled)
{
enabledFilters.Add(filterName);
_filters[filterIndex] = new DataFilterConfiguration(filterName, true);
}
}
enabledFilters.ForEach(ApplyEnableFilter);
return new DisposeAction(() => DisableFilter(enabledFilters.ToArray()));
}
/// <inheritdoc/>
public bool IsFilterEnabled(string filterName)
{
return GetFilter(filterName).IsEnabled;
}
/// <inheritdoc/>
public void SetFilterParameter(string filterName, string parameterName, object value)
{
var filterIndex = GetFilterIndex(filterName);
var newfilter = new DataFilterConfiguration(_filters[filterIndex]);
newfilter.FilterParameters[parameterName] = value;
_filters[filterIndex] = newfilter;
ApplyFilterParameterValue(filterName, parameterName, value);
}
/// <inheritdoc/>
public void Complete()
{
PreventMultipleComplete();
try
{
CompleteUow();
_succeed = true;
OnCompleted();
}
catch (Exception ex)
{
_exception = ex;
throw;
}
}
/// <inheritdoc/>
public async Task CompleteAsync()
{
PreventMultipleComplete();
try
{
await CompleteUowAsync();
_succeed = true;
OnCompleted();
}
catch (Exception ex)
{
_exception = ex;
throw;
}
}
/// <inheritdoc/>
public void Dispose()
{
if (IsDisposed)
{
return;
}
IsDisposed = true;
if (!_succeed)
{
OnFailed(_exception);
}
DisposeUow();
OnDisposed();
}
/// <summary>
/// Should be implemented by derived classes to start UOW.
/// </summary>
protected abstract void BeginUow();
/// <summary>
/// Should be implemented by derived classes to complete UOW.
/// </summary>
protected abstract void CompleteUow();
/// <summary>
/// Should be implemented by derived classes to complete UOW.
/// </summary>
protected abstract Task CompleteUowAsync();
/// <summary>
/// Should be implemented by derived classes to dispose UOW.
/// </summary>
protected abstract void DisposeUow();
/// <summary>
/// Concrete Unit of work classes should implement this
/// method in order to disable a filter.
/// Should not call base method since it throws <see cref="NotImplementedException"/>.
/// </summary>
/// <param name="filterName">Filter name</param>
protected virtual void ApplyDisableFilter(string filterName)
{
throw new NotImplementedException("DisableFilter is not implemented for " + GetType().FullName);
}
/// <summary>
/// Concrete Unit of work classes should implement this
/// method in order to enable a filter.
/// Should not call base method since it throws <see cref="NotImplementedException"/>.
/// </summary>
/// <param name="filterName">Filter name</param>
protected virtual void ApplyEnableFilter(string filterName)
{
throw new NotImplementedException("EnableFilter is not implemented for " + GetType().FullName);
}
/// <summary>
/// Concrete Unit of work classes should implement this
/// method in order to set a parameter's value.
/// Should not call base method since it throws <see cref="NotImplementedException"/>.
/// </summary>
/// <param name="filterName">Filter name</param>
protected virtual void ApplyFilterParameterValue(string filterName, string parameterName, object value)
{
throw new NotImplementedException("SetFilterParameterValue is not implemented for " + GetType().FullName);
}
/// <summary>
/// Called to trigger <see cref="Completed"/> event.
/// </summary>
protected virtual void OnCompleted()
{
Completed.InvokeSafely(this);
}
/// <summary>
/// Called to trigger <see cref="Failed"/> event.
/// </summary>
/// <param name="exception">Exception that cause failure</param>
protected virtual void OnFailed(Exception exception)
{
Failed.InvokeSafely(this, new UnitOfWorkFailedEventArgs(exception));
}
/// <summary>
/// Called to trigger <see cref="Disposed"/> event.
/// </summary>
protected virtual void OnDisposed()
{
Disposed.InvokeSafely(this);
}
private void PreventMultipleBegin()
{
if (_isBeginCalledBefore)
{
throw new AbpException("This unit of work has started before. Can not call Start method more than once.");
}
_isBeginCalledBefore = true;
}
private void PreventMultipleComplete()
{
if (_isCompleteCalledBefore)
{
throw new AbpException("Complete is called before!");
}
_isCompleteCalledBefore = true;
}
private void SetFilters(List<DataFilterConfiguration> filterOverrides)
{
for (var i = 0; i < _filters.Count; i++)
{
var filterOverride = filterOverrides.FirstOrDefault(f => f.FilterName == _filters[i].FilterName);
if (filterOverride != null)
{
_filters[i] = filterOverride;
}
}
if (!AbpSession.UserId.HasValue || AbpSession.MultiTenancySide == MultiTenancySides.Host)
{
ChangeFilterIsEnabledIfNotOverrided(filterOverrides, AbpDataFilters.MustHaveTenant, false);
}
}
private void ChangeFilterIsEnabledIfNotOverrided(List<DataFilterConfiguration> filterOverrides, string filterName, bool isEnabled)
{
if (filterOverrides.Any(f => f.FilterName == filterName))
{
return;
}
var index = _filters.FindIndex(f => f.FilterName == filterName);
if (index < 0)
{
return;
}
if (_filters[index].IsEnabled == isEnabled)
{
return;
}
_filters[index] = new DataFilterConfiguration(filterName, isEnabled);
}
private DataFilterConfiguration GetFilter(string filterName)
{
var filter = _filters.FirstOrDefault(f => f.FilterName == filterName);
if (filter == null)
{
throw new AbpException("Unknown filter name: " + filterName + ". Be sure this filter is registered before.");
}
return filter;
}
private int GetFilterIndex(string filterName)
{
var filterIndex = _filters.FindIndex(f => f.FilterName == filterName);
if (filterIndex < 0)
{
throw new AbpException("Unknown filter name: " + filterName + ". Be sure this filter is registered before.");
}
return filterIndex;
}
}
}
| |
/*++
Copyright (c) 2003 Microsoft Corporation
Module Name:
SslStream.cs
Abstract:
A public implementation of authenticated stream using SSL protocol
Author:
Alexei Vopilov Sept 28-2003
Revision History:
--*/
#if MONO_FEATURE_NEW_TLS && SECURITY_DEP
#if MONO_X509_ALIAS
extern alias PrebuiltSystem;
using X509CertificateCollection = PrebuiltSystem::System.Security.Cryptography.X509Certificates.X509CertificateCollection;
#endif
using System.Security.Cryptography.X509Certificates;
namespace System.Net.Security {
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using System.Security.Authentication;
using System.Security.Authentication.ExtendedProtection;
using System.Security.Permissions;
using System.Security.Principal;
using System.Net.Configuration;
[Flags]
public enum SslPolicyErrors
{
None = 0x0,
RemoteCertificateNotAvailable = 0x1,
RemoteCertificateNameMismatch = 0x2,
RemoteCertificateChainErrors = 0x4
}
public enum EncryptionPolicy
{
// Prohibit null ciphers (current system defaults)
RequireEncryption = 0,
// Add null ciphers to current system defaults
AllowNoEncryption,
// Request null ciphers only
NoEncryption
}
// A user delegate used to verify remote SSL certificate
public delegate bool RemoteCertificateValidationCallback(object sender, X509Certificate certificate, X509Chain chain, SslPolicyErrors sslPolicyErrors);
// A user delegate used to select local SSL certificate
public delegate X509Certificate LocalCertificateSelectionCallback(object sender, string targetHost, X509CertificateCollection localCertificates, X509Certificate remoteCertificate, string[] acceptableIssuers);
// Internal versions of the above delegates
internal delegate bool RemoteCertValidationCallback(string host, X509Certificate certificate, X509Chain chain, SslPolicyErrors sslPolicyErrors);
internal delegate X509Certificate LocalCertSelectionCallback(string targetHost, X509CertificateCollection localCertificates, X509Certificate remoteCertificate, string[] acceptableIssuers);
//
//
//
public partial class SslStream: AuthenticatedStream
{
private SslState _SslState;
private RemoteCertificateValidationCallback _userCertificateValidationCallback;
private LocalCertificateSelectionCallback _userCertificateSelectionCallback;
private object m_RemoteCertificateOrBytes;
public SslStream(Stream innerStream)
:this(innerStream, false, null, null)
{
}
public SslStream(Stream innerStream, bool leaveInnerStreamOpen)
:this(innerStream, leaveInnerStreamOpen, null, null, EncryptionPolicy.RequireEncryption)
{
}
public SslStream(Stream innerStream, bool leaveInnerStreamOpen, RemoteCertificateValidationCallback userCertificateValidationCallback)
:this(innerStream, leaveInnerStreamOpen, userCertificateValidationCallback, null, EncryptionPolicy.RequireEncryption)
{
}
public SslStream(Stream innerStream, bool leaveInnerStreamOpen, RemoteCertificateValidationCallback userCertificateValidationCallback,
LocalCertificateSelectionCallback userCertificateSelectionCallback)
:this(innerStream, leaveInnerStreamOpen, userCertificateValidationCallback, userCertificateSelectionCallback, EncryptionPolicy.RequireEncryption)
{
}
public SslStream(Stream innerStream, bool leaveInnerStreamOpen, RemoteCertificateValidationCallback userCertificateValidationCallback,
LocalCertificateSelectionCallback userCertificateSelectionCallback, EncryptionPolicy encryptionPolicy)
:base(innerStream, leaveInnerStreamOpen)
{
if (encryptionPolicy != EncryptionPolicy.RequireEncryption && encryptionPolicy != EncryptionPolicy.AllowNoEncryption && encryptionPolicy != EncryptionPolicy.NoEncryption)
throw new ArgumentException(SR.GetString(SR.net_invalid_enum, "EncryptionPolicy"), "encryptionPolicy");
_userCertificateValidationCallback = userCertificateValidationCallback;
_userCertificateSelectionCallback = userCertificateSelectionCallback;
RemoteCertValidationCallback _userCertValidationCallbackWrapper = new RemoteCertValidationCallback(userCertValidationCallbackWrapper);
LocalCertSelectionCallback _userCertSelectionCallbackWrapper = userCertificateSelectionCallback==null ? null : new LocalCertSelectionCallback(userCertSelectionCallbackWrapper);
_SslState = new SslState(innerStream, _userCertValidationCallbackWrapper, _userCertSelectionCallbackWrapper, encryptionPolicy);
}
private bool userCertValidationCallbackWrapper(string hostName, X509Certificate certificate, X509Chain chain, SslPolicyErrors sslPolicyErrors)
{
m_RemoteCertificateOrBytes = certificate == null? null: certificate.GetRawCertData();
if (_userCertificateValidationCallback == null)
{
if (!_SslState.RemoteCertRequired)
sslPolicyErrors &= ~SslPolicyErrors.RemoteCertificateNotAvailable;
return (sslPolicyErrors == SslPolicyErrors.None);
}
else
return _userCertificateValidationCallback(this, certificate, chain, sslPolicyErrors);
}
private X509Certificate userCertSelectionCallbackWrapper(string targetHost, X509CertificateCollection localCertificates, X509Certificate remoteCertificate, string[] acceptableIssuers)
{
return _userCertificateSelectionCallback(this, targetHost, localCertificates, remoteCertificate, acceptableIssuers);
}
private SslProtocols DefaultProtocols()
{
SslProtocols protocols = SslProtocols.Tls12 | SslProtocols.Tls11 | SslProtocols.Tls;
if (ServicePointManager.DisableStrongCrypto)
{
protocols = SslProtocols.Tls | SslProtocols.Ssl3;
}
return protocols;
}
//
// Client side auth
//
public virtual void AuthenticateAsClient(string targetHost)
{
AuthenticateAsClient(targetHost, new X509CertificateCollection(), DefaultProtocols(), false);
}
//
public virtual void AuthenticateAsClient(string targetHost, X509CertificateCollection clientCertificates, SslProtocols enabledSslProtocols, bool checkCertificateRevocation)
{
_SslState.ValidateCreateContext(false, targetHost, enabledSslProtocols, null, clientCertificates, true, checkCertificateRevocation);
_SslState.ProcessAuthentication(null);
}
//
[HostProtection(ExternalThreading=true)]
public virtual IAsyncResult BeginAuthenticateAsClient(string targetHost, AsyncCallback asyncCallback, object asyncState)
{
return BeginAuthenticateAsClient(targetHost, new X509CertificateCollection(), DefaultProtocols(), false,
asyncCallback, asyncState);
}
//
[HostProtection(ExternalThreading=true)]
public virtual IAsyncResult BeginAuthenticateAsClient(string targetHost, X509CertificateCollection clientCertificates,
SslProtocols enabledSslProtocols, bool checkCertificateRevocation,
AsyncCallback asyncCallback, object asyncState)
{
_SslState.ValidateCreateContext(false, targetHost, enabledSslProtocols, null, clientCertificates, true, checkCertificateRevocation);
LazyAsyncResult result = new LazyAsyncResult(_SslState, asyncState, asyncCallback);
_SslState.ProcessAuthentication(result);
return result;
}
//
public virtual void EndAuthenticateAsClient(IAsyncResult asyncResult)
{
_SslState.EndProcessAuthentication(asyncResult);
}
//
//
//server side auth
//
public virtual void AuthenticateAsServer(X509Certificate serverCertificate)
{
AuthenticateAsServer(serverCertificate, false, DefaultProtocols(), false);
}
//
public virtual void AuthenticateAsServer(X509Certificate serverCertificate, bool clientCertificateRequired,
SslProtocols enabledSslProtocols, bool checkCertificateRevocation)
{
_SslState.ValidateCreateContext(true, string.Empty, enabledSslProtocols, serverCertificate, null, clientCertificateRequired, checkCertificateRevocation);
_SslState.ProcessAuthentication(null);
}
//
[HostProtection(ExternalThreading=true)]
public virtual IAsyncResult BeginAuthenticateAsServer(X509Certificate serverCertificate, AsyncCallback asyncCallback, object asyncState)
{
return BeginAuthenticateAsServer(serverCertificate, false, DefaultProtocols(), false,
asyncCallback,
asyncState);
}
//
[HostProtection(ExternalThreading=true)]
public virtual IAsyncResult BeginAuthenticateAsServer(X509Certificate serverCertificate, bool clientCertificateRequired,
SslProtocols enabledSslProtocols, bool checkCertificateRevocation,
AsyncCallback asyncCallback,
object asyncState)
{
_SslState.ValidateCreateContext(true, string.Empty, enabledSslProtocols, serverCertificate, null, clientCertificateRequired, checkCertificateRevocation);
LazyAsyncResult result = new LazyAsyncResult(_SslState, asyncState, asyncCallback);
_SslState.ProcessAuthentication(result);
return result;
}
//
public virtual void EndAuthenticateAsServer(IAsyncResult asyncResult)
{
_SslState.EndProcessAuthentication(asyncResult);
}
public TransportContext TransportContext
{
get
{
#if MONO_NOT_SUPPORTED
return new SslStreamContext(this);
#else
throw new NotSupportedException();
#endif
}
}
internal ChannelBinding GetChannelBinding(ChannelBindingKind kind)
{
return _SslState.GetChannelBinding(kind);
}
//************* Task-based async public methods *************************
[HostProtection(ExternalThreading = true)]
public virtual Task AuthenticateAsClientAsync(string targetHost)
{
return Task.Factory.FromAsync(BeginAuthenticateAsClient, EndAuthenticateAsClient, targetHost, null);
}
[HostProtection(ExternalThreading = true)]
public virtual Task AuthenticateAsClientAsync(string targetHost, X509CertificateCollection clientCertificates, SslProtocols enabledSslProtocols, bool checkCertificateRevocation)
{
return Task.Factory.FromAsync((callback, state) => BeginAuthenticateAsClient(targetHost, clientCertificates, enabledSslProtocols, checkCertificateRevocation, callback, state), EndAuthenticateAsClient, null);
}
[HostProtection(ExternalThreading = true)]
public virtual Task AuthenticateAsServerAsync(X509Certificate serverCertificate)
{
return Task.Factory.FromAsync(BeginAuthenticateAsServer, EndAuthenticateAsServer, serverCertificate, null);
}
[HostProtection(ExternalThreading = true)]
public virtual Task AuthenticateAsServerAsync(X509Certificate serverCertificate, bool clientCertificateRequired, SslProtocols enabledSslProtocols, bool checkCertificateRevocation)
{
return Task.Factory.FromAsync((callback, state) => BeginAuthenticateAsServer(serverCertificate, clientCertificateRequired, enabledSslProtocols, checkCertificateRevocation, callback, state), EndAuthenticateAsServer, null);
}
//
//
// Base class properties
//
public override bool IsAuthenticated {
get {
return _SslState.IsAuthenticated;
}
}
//
public override bool IsMutuallyAuthenticated {
get {
return _SslState.IsMutuallyAuthenticated;
}
}
//
public override bool IsEncrypted {
get {
return IsAuthenticated;
}
}
//
public override bool IsSigned {
get {
return IsAuthenticated;
}
}
//
public override bool IsServer {
get {
return _SslState.IsServer;
}
}
//
//
//SSL specific properties
//
//
public virtual SslProtocols SslProtocol {
get {
return _SslState.SslProtocol;
}
}
//
public virtual bool CheckCertRevocationStatus {
get {
return _SslState.CheckCertRevocationStatus;
}
}
//
public virtual X509Certificate LocalCertificate {
get {
return _SslState.LocalCertificate;
}
}
//
public virtual X509Certificate RemoteCertificate {
get {
_SslState.CheckThrow(true);
object chkCertificateOrBytes = m_RemoteCertificateOrBytes;
if (chkCertificateOrBytes != null && chkCertificateOrBytes.GetType() == typeof(byte[]))
return (X509Certificate)(m_RemoteCertificateOrBytes = new X509Certificate((byte[]) chkCertificateOrBytes));
else
return chkCertificateOrBytes as X509Certificate;
}
}
//
// More informational properties
//
public virtual CipherAlgorithmType CipherAlgorithm {
get {
return _SslState.CipherAlgorithm;
}
}
//
public virtual int CipherStrength {
get {
return _SslState.CipherStrength;
}
}
//
public virtual HashAlgorithmType HashAlgorithm {
get {
return _SslState.HashAlgorithm;
}
}
//
public virtual int HashStrength {
get {
return _SslState.HashStrength;
}
}
//
public virtual ExchangeAlgorithmType KeyExchangeAlgorithm {
get {
return _SslState.KeyExchangeAlgorithm;
}
}
//
public virtual int KeyExchangeStrength {
get {
return _SslState.KeyExchangeStrength;
}
}
//
//
// Stream contract implementation
//
//
//
public override bool CanSeek {
get {
return false;
}
}
//
public override bool CanRead {
get {
return _SslState.IsAuthenticated && InnerStream.CanRead;
}
}
//
public override bool CanTimeout {
get {
return InnerStream.CanTimeout;
}
}
//
public override bool CanWrite {
get {
return _SslState.IsAuthenticated && InnerStream.CanWrite;
}
}
//
//
public override int ReadTimeout {
get {
return InnerStream.ReadTimeout;
}
set {
InnerStream.ReadTimeout = value;
}
}
//
//
public override int WriteTimeout {
get {
return InnerStream.WriteTimeout;
}
set {
InnerStream.WriteTimeout = value;
}
}
//
public override long Length {
get {
return InnerStream.Length;
}
}
//
public override long Position {
get {
return InnerStream.Position;
}
set {
throw new NotSupportedException(SR.GetString(SR.net_noseek));
}
}
//
public override void SetLength(long value) {
InnerStream.SetLength(value);
}
//
public override long Seek(long offset, SeekOrigin origin) {
throw new NotSupportedException(SR.GetString(SR.net_noseek));
}
//
public override void Flush() {
_SslState.Flush();
}
//
//
protected override void Dispose(bool disposing) {
try {
_SslState.Close();
}
finally {
base.Dispose(disposing);
}
}
//
public override int Read(byte[] buffer, int offset, int count) {
return _SslState.SecureStream.Read(buffer, offset, count);
}
//
public void Write(byte[] buffer) {
_SslState.SecureStream.Write(buffer, 0, buffer.Length);
}
//
public override void Write(byte[] buffer, int offset, int count) {
_SslState.SecureStream.Write(buffer, offset, count);
}
//
[HostProtection(ExternalThreading=true)]
public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback asyncCallback, object asyncState) {
return _SslState.SecureStream.BeginRead(buffer, offset, count, asyncCallback, asyncState);
}
//
public override int EndRead(IAsyncResult asyncResult) {
return _SslState.SecureStream.EndRead(asyncResult);
}
//
[HostProtection(ExternalThreading=true)]
public override IAsyncResult BeginWrite( byte[] buffer, int offset, int count, AsyncCallback asyncCallback, object asyncState) {
return _SslState.SecureStream.BeginWrite(buffer, offset, count, asyncCallback, asyncState);
}
//
public override void EndWrite(IAsyncResult asyncResult) {
_SslState.SecureStream.EndWrite(asyncResult);
}
}
}
#endif
| |
using CrystalDecisions.CrystalReports.Engine;
using CrystalDecisions.Windows.Forms;
using DpSdkEngLib;
using DPSDKOPSLib;
using Microsoft.VisualBasic;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Drawing;
using System.Diagnostics;
using System.Windows.Forms;
using System.Linq;
using System.Xml.Linq;
// ERROR: Not supported in C#: OptionDeclaration
namespace _4PosBackOffice.NET
{
internal partial class frmPastelVariables : System.Windows.Forms.Form
{
private ADODB.Recordset withEventsField_adoPrimaryRS;
public ADODB.Recordset adoPrimaryRS {
get { return withEventsField_adoPrimaryRS; }
set {
if (withEventsField_adoPrimaryRS != null) {
withEventsField_adoPrimaryRS.MoveComplete -= adoPrimaryRS_MoveComplete;
withEventsField_adoPrimaryRS.WillChangeRecord -= adoPrimaryRS_WillChangeRecord;
}
withEventsField_adoPrimaryRS = value;
if (withEventsField_adoPrimaryRS != null) {
withEventsField_adoPrimaryRS.MoveComplete += adoPrimaryRS_MoveComplete;
withEventsField_adoPrimaryRS.WillChangeRecord += adoPrimaryRS_WillChangeRecord;
}
}
}
bool mbChangedByCode;
int mvBookMark;
bool mbEditFlag;
bool mbAddNewFlag;
bool mbDataChanged;
bool blHandHeld;
string gFilter;
string gFilterSQL;
int gID;
private void loadLanguage()
{
//frmPastelVariables = No Code [Edit Export Variable]
//rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000
//If rsLang.RecordCount Then frmPastelVariable.Caption = rsLang("LanguageLayoutLnk_Description"): frmPastelVariables.RightToLeft = rsLang("LanguageLayoutLnk_RightTL")
modRecordSet.rsLang.filter = "LanguageLayoutLnk_LanguageID=" + 1004;
//Exit|Checked
if (modRecordSet.rsLang.RecordCount){cmdClose.Text = modRecordSet.rsLang.Fields("LanguageLayoutLnk_Description").Value;cmdClose.RightToLeft = modRecordSet.rsLang.Fields("LanguageLayoutLnk_RightTL").Value;}
//Label1 = No Code [Note: Account Number.......]
//rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000
//If rsLang.RecordCount Then Label1.Caption = rsLang("LanguageLayoutLnk_Description"): Label1.RightToLeft = rsLang("LanguageLayoutLnk_RightTL")
modRecordSet.rsHelp.filter = "Help_Section=0 AND Help_Form='" + this.Name + "'";
//UPGRADE_ISSUE: Form property frmPastelVariables.ToolTip1 was not upgraded. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="CC4C7EC0-C903-48FC-ACCC-81861D12DA4A"'
if (modRecordSet.rsHelp.RecordCount)
this.ToolTip1 = modRecordSet.rsHelp.Fields("Help_ContextID").Value;
}
public void loadItem(ref int id)
{
ADODB.Recordset rs = default(ADODB.Recordset);
gID = id;
getNamespace();
mbDataChanged = false;
loadLanguage();
ShowDialog();
}
private void getNamespace()
{
System.Windows.Forms.TextBox oText = null;
ADODB.Recordset rs = default(ADODB.Recordset);
ADODB.Recordset rj = default(ADODB.Recordset);
adoPrimaryRS = modRecordSet.getRS(ref "SELECT IDDescription,GDC,Decription1,AccountNumber,Reference,Period FROM PastelDescription");
//Display the list of Titles in the DataCombo
grdDataGrid.DataSource = adoPrimaryRS;
grdDataGrid.Columns[0].HeaderText = "ID No";
grdDataGrid.Columns[0].DefaultCellStyle.Alignment = MSDataGridLib.AlignmentConstants.dbgLeft;
grdDataGrid.Columns[0].Width = sizeConvertors.twipsToPixels(800, true);
grdDataGrid.Columns[0].Frozen = true;
grdDataGrid.Columns[1].HeaderText = "GDC";
grdDataGrid.Columns[1].DefaultCellStyle.Alignment = MSDataGridLib.AlignmentConstants.dbgLeft;
grdDataGrid.Columns[1].Width = sizeConvertors.twipsToPixels(800, true);
grdDataGrid.Columns[1].Frozen = true;
grdDataGrid.Columns[2].HeaderText = "Decription";
grdDataGrid.Columns[2].DefaultCellStyle.Alignment = MSDataGridLib.AlignmentConstants.dbgLeft;
grdDataGrid.Columns[2].Width = sizeConvertors.twipsToPixels(3890.124, true);
grdDataGrid.Columns[2].Frozen = false;
grdDataGrid.Columns[3].HeaderText = "Account Number";
grdDataGrid.Columns[3].DefaultCellStyle.Alignment = MSDataGridLib.AlignmentConstants.dbgLeft;
grdDataGrid.Columns[3].Width = sizeConvertors.twipsToPixels(1890.124, true);
grdDataGrid.Columns[3].DefaultCellStyle.Format = 1;
grdDataGrid.Columns[3].Frozen = false;
grdDataGrid.Columns[4].HeaderText = "Reference";
grdDataGrid.Columns[4].DefaultCellStyle.Alignment = MSDataGridLib.AlignmentConstants.dbgRight;
grdDataGrid.Columns[4].Width = sizeConvertors.twipsToPixels(1000, true);
grdDataGrid.Columns[4].DefaultCellStyle.Format = 1;
grdDataGrid.Columns[4].Frozen = false;
frmPastelVariables_Resize(this, new System.EventArgs());
mbDataChanged = false;
}
private void frmPastelVariables_Load(System.Object eventSender, System.EventArgs eventArgs)
{
modBResolutions.ResizeForm(ref this, ref sizeConvertors.pixelToTwips(this.Width, true), ref sizeConvertors.pixelToTwips(this.Height, false), ref 2);
}
//UPGRADE_WARNING: Event frmPastelVariables.Resize may fire when form is initialized. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="88B12AE1-6DE0-48A0-86F1-60C0686C026A"'
private void frmPastelVariables_Resize(System.Object eventSender, System.EventArgs eventArgs)
{
// ERROR: Not supported in C#: OnErrorStatement
//This will resize the grid when the form is resized
System.Windows.Forms.Application.DoEvents();
grdDataGrid.Height = sizeConvertors.twipsToPixels(sizeConvertors.pixelToTwips(this.ClientRectangle.Height, false) - 30 - sizeConvertors.pixelToTwips(picButtons.Height, false), false);
//grdDataGrid.Columns(1).Width = grdDataGrid.Width
//grdDataGrid.Columns(1).Width = grdDataGrid.Width - 5000
}
private void frmPastelVariables_KeyPress(System.Object eventSender, System.Windows.Forms.KeyPressEventArgs eventArgs)
{
short KeyAscii = Strings.Asc(eventArgs.KeyChar);
if (KeyAscii == 27) {
KeyAscii = 0;
cmdClose_Click(cmdClose, new System.EventArgs());
}
eventArgs.KeyChar = Strings.Chr(KeyAscii);
if (KeyAscii == 0) {
eventArgs.Handled = true;
}
}
private void frmPastelVariables_FormClosed(System.Object eventSender, System.Windows.Forms.FormClosedEventArgs eventArgs)
{
System.Windows.Forms.Cursor.Current = System.Windows.Forms.Cursors.Default;
}
private void adoPrimaryRS_MoveComplete(ADODB.EventReasonEnum adReason, ADODB.Error pError, ref ADODB.EventStatusEnum adStatus, ADODB.Recordset pRecordset)
{
//This will display the current record position for this recordset
}
private void adoPrimaryRS_WillChangeRecord(ADODB.EventReasonEnum adReason, int cRecords, ref ADODB.EventStatusEnum adStatus, ADODB.Recordset pRecordset)
{
int lQuantity = 0;
if (adoPrimaryRS.Fields("Reference").OriginalValue != adoPrimaryRS.Fields("Reference").Value) {
//cnndb.Execute "Update PastelDescription Set Narrative ='
}
if (adoPrimaryRS.Fields("AccountNumber").OriginalValue != adoPrimaryRS.Fields("AccountNumber").Value) {
//cnnDB.Execute "Update PastelDescription Set AccountNumber =' "
}
}
//UPGRADE_NOTE: update was upgraded to update_Renamed. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="A9E4979A-37FA-4718-9994-97DD76ED70A7"'
private void update_Renamed()
{
// ERROR: Not supported in C#: OnErrorStatement
adoPrimaryRS.UpdateBatch(ADODB.AffectEnum.adAffectAll);
if (mbAddNewFlag) {
adoPrimaryRS.MoveLast();
//Move to the new record
}
mbEditFlag = false;
mbAddNewFlag = false;
mbDataChanged = false;
return;
UpdateErr:
Interaction.MsgBox(Err().Description);
}
private void cmdClose_Click(System.Object eventSender, System.EventArgs eventArgs)
{
//On Error Resume Next
//If Val(txtPeriod(0).Text) >= 1 And Val(txtPeriod(0).Text) <= 12 Then
// cnnDB.Execute "UPDATE PastelDescription Set Period = " & Val(txtPeriod(0).Text)
update_Renamed();
this.Close();
//Else
// MsgBox "Period Value must be in range of 1 - 12", vbApplicationModal + vbInformation + vbOKOnly, "Pastel Variables"
//End If
}
private void goFirst()
{
// ERROR: Not supported in C#: OnErrorStatement
adoPrimaryRS.MoveFirst();
mbDataChanged = false;
return;
GoFirstError:
Interaction.MsgBox(Err().Description);
}
private void goLast()
{
// ERROR: Not supported in C#: OnErrorStatement
adoPrimaryRS.MoveLast();
mbDataChanged = false;
return;
GoLastError:
Interaction.MsgBox(Err().Description);
}
//Private Sub grdDataGrid_CellValueChanged(ByVal eventSender As System.Object, ByVal eventArgs As AxMSDataGridLib.DDataGridEvents_CellValueChangedEvent) Handles grdDataGrid.CellValueChanged
// If grdDataGrid.Columns(ColIndex).DataFormat.Format = "#,##0.00" Then
// grdDataGrid.Columns(ColIndex).DataFormat = 0
// End If
//End Sub
}
}
| |
namespace KabMan.Client
{
partial class NewVTPort
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.layoutControl1 = new DevExpress.XtraLayout.LayoutControl();
this.chkboxForDataCenterConn = new DevExpress.XtraEditors.CheckEdit();
this.BtnCancel = new DevExpress.XtraEditors.SimpleButton();
this.BtnSave = new DevExpress.XtraEditors.SimpleButton();
this.LookUpLocation = new DevExpress.XtraEditors.LookUpEdit();
this.LookUpRoom = new DevExpress.XtraEditors.LookUpEdit();
this.LookUpSan = new DevExpress.XtraEditors.LookUpEdit();
this.layoutControlGroup1 = new DevExpress.XtraLayout.LayoutControlGroup();
this.layoutControlItem1 = new DevExpress.XtraLayout.LayoutControlItem();
this.layoutControlItem2 = new DevExpress.XtraLayout.LayoutControlItem();
this.layoutControlItem3 = new DevExpress.XtraLayout.LayoutControlItem();
this.layoutControlItem4 = new DevExpress.XtraLayout.LayoutControlItem();
this.layoutControlItem5 = new DevExpress.XtraLayout.LayoutControlItem();
this.layoutControlItem6 = new DevExpress.XtraLayout.LayoutControlItem();
((System.ComponentModel.ISupportInitialize)(this.layoutControl1)).BeginInit();
this.layoutControl1.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.chkboxForDataCenterConn.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.LookUpLocation.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.LookUpRoom.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.LookUpSan.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlGroup1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem2)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem3)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem4)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem5)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem6)).BeginInit();
this.SuspendLayout();
//
// layoutControl1
//
this.layoutControl1.Controls.Add(this.chkboxForDataCenterConn);
this.layoutControl1.Controls.Add(this.BtnCancel);
this.layoutControl1.Controls.Add(this.BtnSave);
this.layoutControl1.Controls.Add(this.LookUpLocation);
this.layoutControl1.Controls.Add(this.LookUpRoom);
this.layoutControl1.Controls.Add(this.LookUpSan);
this.layoutControl1.Dock = System.Windows.Forms.DockStyle.Fill;
this.layoutControl1.Location = new System.Drawing.Point(0, 0);
this.layoutControl1.Name = "layoutControl1";
this.layoutControl1.Root = this.layoutControlGroup1;
this.layoutControl1.Size = new System.Drawing.Size(400, 128);
this.layoutControl1.TabIndex = 0;
this.layoutControl1.Text = "layoutControl1";
//
// chkboxForDataCenterConn
//
this.chkboxForDataCenterConn.Location = new System.Drawing.Point(7, 100);
this.chkboxForDataCenterConn.Name = "chkboxForDataCenterConn";
this.chkboxForDataCenterConn.Properties.Caption = "for Data Center Connection";
this.chkboxForDataCenterConn.Size = new System.Drawing.Size(184, 19);
this.chkboxForDataCenterConn.StyleController = this.layoutControl1;
this.chkboxForDataCenterConn.TabIndex = 1;
//
// BtnCancel
//
this.BtnCancel.Appearance.BackColor = System.Drawing.SystemColors.ButtonFace;
this.BtnCancel.Appearance.BackColor2 = System.Drawing.SystemColors.ButtonHighlight;
this.BtnCancel.Appearance.BorderColor = System.Drawing.Color.DimGray;
this.BtnCancel.Appearance.Options.UseBackColor = true;
this.BtnCancel.Appearance.Options.UseBorderColor = true;
this.BtnCancel.Appearance.Options.UseForeColor = true;
this.BtnCancel.Location = new System.Drawing.Point(305, 100);
this.BtnCancel.Name = "BtnCancel";
this.BtnCancel.Size = new System.Drawing.Size(89, 22);
this.BtnCancel.StyleController = this.layoutControl1;
this.BtnCancel.TabIndex = 1;
this.BtnCancel.Text = "Cancel";
this.BtnCancel.Click += new System.EventHandler(this.BtnCancel_Click);
//
// BtnSave
//
this.BtnSave.Appearance.BackColor = System.Drawing.SystemColors.ButtonFace;
this.BtnSave.Appearance.BackColor2 = System.Drawing.SystemColors.ButtonHighlight;
this.BtnSave.Appearance.BorderColor = System.Drawing.Color.DimGray;
this.BtnSave.Appearance.Options.UseBackColor = true;
this.BtnSave.Appearance.Options.UseBorderColor = true;
this.BtnSave.Appearance.Options.UseForeColor = true;
this.BtnSave.Location = new System.Drawing.Point(202, 100);
this.BtnSave.Name = "BtnSave";
this.BtnSave.Size = new System.Drawing.Size(92, 22);
this.BtnSave.StyleController = this.layoutControl1;
this.BtnSave.TabIndex = 1;
this.BtnSave.Text = "Save";
this.BtnSave.Click += new System.EventHandler(this.BtnSave_Click);
//
// LookUpLocation
//
this.LookUpLocation.Location = new System.Drawing.Point(78, 7);
this.LookUpLocation.Name = "LookUpLocation";
this.LookUpLocation.Properties.Buttons.AddRange(new DevExpress.XtraEditors.Controls.EditorButton[] {
new DevExpress.XtraEditors.Controls.EditorButton(DevExpress.XtraEditors.Controls.ButtonPredefines.Combo)});
this.LookUpLocation.Properties.Columns.AddRange(new DevExpress.XtraEditors.Controls.LookUpColumnInfo[] {
new DevExpress.XtraEditors.Controls.LookUpColumnInfo("LocationName", "Location", 20, DevExpress.Utils.FormatType.None, "", true, DevExpress.Utils.HorzAlignment.Default, DevExpress.Data.ColumnSortOrder.None)});
this.LookUpLocation.Properties.NullText = "Select to Location!";
this.LookUpLocation.Size = new System.Drawing.Size(316, 20);
this.LookUpLocation.StyleController = this.layoutControl1;
this.LookUpLocation.TabIndex = 1;
this.LookUpLocation.EditValueChanged += new System.EventHandler(this.LookUpLocation_EditValueChanged);
//
// LookUpRoom
//
this.LookUpRoom.Location = new System.Drawing.Point(78, 38);
this.LookUpRoom.Name = "LookUpRoom";
this.LookUpRoom.Properties.Buttons.AddRange(new DevExpress.XtraEditors.Controls.EditorButton[] {
new DevExpress.XtraEditors.Controls.EditorButton(DevExpress.XtraEditors.Controls.ButtonPredefines.Combo)});
this.LookUpRoom.Properties.Columns.AddRange(new DevExpress.XtraEditors.Controls.LookUpColumnInfo[] {
new DevExpress.XtraEditors.Controls.LookUpColumnInfo("RoomName", "Room", 20, DevExpress.Utils.FormatType.None, "", true, DevExpress.Utils.HorzAlignment.Default, DevExpress.Data.ColumnSortOrder.None)});
this.LookUpRoom.Properties.NullText = "Select to Data Center!";
this.LookUpRoom.Size = new System.Drawing.Size(316, 20);
this.LookUpRoom.StyleController = this.layoutControl1;
this.LookUpRoom.TabIndex = 1;
//
// LookUpSan
//
this.LookUpSan.Location = new System.Drawing.Point(78, 69);
this.LookUpSan.Name = "LookUpSan";
this.LookUpSan.Properties.Buttons.AddRange(new DevExpress.XtraEditors.Controls.EditorButton[] {
new DevExpress.XtraEditors.Controls.EditorButton(DevExpress.XtraEditors.Controls.ButtonPredefines.Combo)});
this.LookUpSan.Properties.Columns.AddRange(new DevExpress.XtraEditors.Controls.LookUpColumnInfo[] {
new DevExpress.XtraEditors.Controls.LookUpColumnInfo("San", "SAN", 20, DevExpress.Utils.FormatType.None, "", true, DevExpress.Utils.HorzAlignment.Default, DevExpress.Data.ColumnSortOrder.None)});
this.LookUpSan.Properties.NullText = "Select to SAN!";
this.LookUpSan.Size = new System.Drawing.Size(316, 20);
this.LookUpSan.StyleController = this.layoutControl1;
this.LookUpSan.TabIndex = 4;
//
// layoutControlGroup1
//
this.layoutControlGroup1.CustomizationFormText = "layoutControlGroup1";
this.layoutControlGroup1.Items.AddRange(new DevExpress.XtraLayout.BaseLayoutItem[] {
this.layoutControlItem1,
this.layoutControlItem2,
this.layoutControlItem3,
this.layoutControlItem4,
this.layoutControlItem5,
this.layoutControlItem6});
this.layoutControlGroup1.Location = new System.Drawing.Point(0, 0);
this.layoutControlGroup1.Name = "Root";
this.layoutControlGroup1.Size = new System.Drawing.Size(400, 128);
this.layoutControlGroup1.Spacing = new DevExpress.XtraLayout.Utils.Padding(0, 0, 0, 0);
this.layoutControlGroup1.Text = "Root";
this.layoutControlGroup1.TextVisible = false;
//
// layoutControlItem1
//
this.layoutControlItem1.Control = this.LookUpSan;
this.layoutControlItem1.CustomizationFormText = "San :";
this.layoutControlItem1.Location = new System.Drawing.Point(0, 62);
this.layoutControlItem1.Name = "layoutControlItem1";
this.layoutControlItem1.Size = new System.Drawing.Size(398, 31);
this.layoutControlItem1.Text = "San :";
this.layoutControlItem1.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem1.TextSize = new System.Drawing.Size(66, 20);
//
// layoutControlItem2
//
this.layoutControlItem2.Control = this.LookUpRoom;
this.layoutControlItem2.CustomizationFormText = "Room :";
this.layoutControlItem2.Location = new System.Drawing.Point(0, 31);
this.layoutControlItem2.Name = "layoutControlItem2";
this.layoutControlItem2.Size = new System.Drawing.Size(398, 31);
this.layoutControlItem2.Text = "Data Center :";
this.layoutControlItem2.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem2.TextSize = new System.Drawing.Size(66, 20);
//
// layoutControlItem3
//
this.layoutControlItem3.Control = this.LookUpLocation;
this.layoutControlItem3.CustomizationFormText = "Location :";
this.layoutControlItem3.Location = new System.Drawing.Point(0, 0);
this.layoutControlItem3.Name = "layoutControlItem3";
this.layoutControlItem3.Size = new System.Drawing.Size(398, 31);
this.layoutControlItem3.Text = "Location :";
this.layoutControlItem3.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem3.TextSize = new System.Drawing.Size(66, 20);
//
// layoutControlItem4
//
this.layoutControlItem4.Control = this.BtnSave;
this.layoutControlItem4.CustomizationFormText = "layoutControlItem4";
this.layoutControlItem4.Location = new System.Drawing.Point(195, 93);
this.layoutControlItem4.Name = "layoutControlItem4";
this.layoutControlItem4.Size = new System.Drawing.Size(103, 33);
this.layoutControlItem4.Text = "layoutControlItem4";
this.layoutControlItem4.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem4.TextSize = new System.Drawing.Size(0, 0);
this.layoutControlItem4.TextToControlDistance = 0;
this.layoutControlItem4.TextVisible = false;
//
// layoutControlItem5
//
this.layoutControlItem5.Control = this.BtnCancel;
this.layoutControlItem5.CustomizationFormText = "layoutControlItem5";
this.layoutControlItem5.Location = new System.Drawing.Point(298, 93);
this.layoutControlItem5.Name = "layoutControlItem5";
this.layoutControlItem5.Size = new System.Drawing.Size(100, 33);
this.layoutControlItem5.Text = "layoutControlItem5";
this.layoutControlItem5.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem5.TextSize = new System.Drawing.Size(0, 0);
this.layoutControlItem5.TextToControlDistance = 0;
this.layoutControlItem5.TextVisible = false;
//
// layoutControlItem6
//
this.layoutControlItem6.Control = this.chkboxForDataCenterConn;
this.layoutControlItem6.CustomizationFormText = "layoutControlItem6";
this.layoutControlItem6.Location = new System.Drawing.Point(0, 93);
this.layoutControlItem6.Name = "layoutControlItem6";
this.layoutControlItem6.Size = new System.Drawing.Size(195, 33);
this.layoutControlItem6.Text = "layoutControlItem6";
this.layoutControlItem6.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem6.TextSize = new System.Drawing.Size(0, 0);
this.layoutControlItem6.TextToControlDistance = 0;
this.layoutControlItem6.TextVisible = false;
//
// NewVTPort
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(400, 128);
this.Controls.Add(this.layoutControl1);
this.Name = "NewVTPort";
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
this.Text = "New VT Port";
this.Load += new System.EventHandler(this.NewVTPort_Load);
((System.ComponentModel.ISupportInitialize)(this.layoutControl1)).EndInit();
this.layoutControl1.ResumeLayout(false);
((System.ComponentModel.ISupportInitialize)(this.chkboxForDataCenterConn.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.LookUpLocation.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.LookUpRoom.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.LookUpSan.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlGroup1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem2)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem3)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem4)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem5)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem6)).EndInit();
this.ResumeLayout(false);
}
#endregion
private DevExpress.XtraLayout.LayoutControl layoutControl1;
private DevExpress.XtraLayout.LayoutControlGroup layoutControlGroup1;
private DevExpress.XtraEditors.LookUpEdit LookUpLocation;
private DevExpress.XtraEditors.LookUpEdit LookUpRoom;
private DevExpress.XtraEditors.LookUpEdit LookUpSan;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem1;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem2;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem3;
private DevExpress.XtraEditors.SimpleButton BtnCancel;
private DevExpress.XtraEditors.SimpleButton BtnSave;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem4;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem5;
private DevExpress.XtraEditors.CheckEdit chkboxForDataCenterConn;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem6;
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using com.calitha.goldparser;
using System.Data;
using Epi.Core;
namespace Epi.Core.AnalysisInterpreter.Rules
{
/// <summary>
/// The rule for the LIST command
/// </summary>
public partial class Rule_List : AnalysisRule
{
private bool HasRun = false;
List<string> IdentifierList = new List<string>();
bool IsExceptionList = false;
string CommandText = null;
string ListOption = null;
bool isHTMLOption = false;
Configuration config = null;
string repOfYes;
string repOfNo;
string repOfMissing;
bool isEpi7Project = false;
bool isEpi7View = false;
View CurrentView = null;
/// <summary>
/// Constructor for Rule_List
/// </summary>
/// <param name="pToken">The token used to build the reduction.</param>
public Rule_List(Rule_Context pContext, NonterminalToken pToken) : base(pContext)
{
config = Configuration.GetNewInstance();
repOfYes = config.Settings.RepresentationOfYes;
repOfNo = config.Settings.RepresentationOfNo;
repOfMissing = config.Settings.RepresentationOfMissing;
CommandText = this.ExtractTokens(pToken.Tokens);
foreach (Token T in pToken.Tokens)
{
if (T is NonterminalToken)
{
NonterminalToken NT = (NonterminalToken)T;
switch (NT.Symbol.ToString())
{
case "<List_Identifier_List>":
this.SetIdentifierList(NT);
break;
case "<ListOpt>":
this.SetListOpt(NT);
break;
case "<ListGridOpt>":
this.SetListOpt(NT);
break;
}
}
else
{
TerminalToken TT = (TerminalToken)T;
switch (TT.Symbol.ToString())
{
case "*":
this.IdentifierList.Add("*");
break;
case "EXCEPT":
this.IsExceptionList = true;
break;
}
}
}
if (this.IdentifierList.Count > 1 && this.IdentifierList[0] == "*")
{
this.IdentifierList.Remove("*");
}
/*
!*** List Statement ***!
<List_Statement> ::= LIST
| LIST <ListOpt>
| LIST '*'
| LIST '*' <ListOpt>
| LIST <List_Identifier_List>
| LIST <List_Identifier_List> <ListOpt>
| LIST '*' EXCEPT <List_Identifier_List>
| LIST '*' EXCEPT <List_Identifier_List> <ListOpt>
<ListOpt> ::= <ListGridOpt>
| <ListUpdateOpt>
| <ListHTMLOpt>
<ListGridOpt> ::= GRIDTABLE
<ListUpdateOpt> ::= UPDATE
<ListHTMLOpt> ::= <ListHTMLOptOneColumn>
| <ListHTMLOptTwoColumns>
| <ListHTMLOptThreeColumns>
| <ListHTMLOptNoImage>
| <ListHTMLOptNowrap>
| <ListHTMLOptLine>
| !Null
<ListHTMLOptOneColumn> ::= <ListHTMLOpt> COULMNSIZE '=' DecLiteral
<ListHTMLOptTwoColumns> ::= <ListHTMLOpt> COULMNSIZE '=' DecLiteral ',' DecLiteral
<ListHTMLOptThreeColumns> ::= <ListHTMLOpt> COULMNSIZE '=' DecLiteral ',' DecLiteral ',' DecLiteral
<ListHTMLOptNoImage> ::= <ListHTMLOpt> NOIMAGE
<ListHTMLOptNoWrap> ::= <ListHTMLOpt> NOWRAP
<ListHTMLOptLine> ::= <ListHTMLOpt> LINENUMBERS
!*** End ***!
*/
}
private void SetListOpt(NonterminalToken pT)
{
/*<ListOpt>::= <ListGridOpt>
| <ListUpdateOpt>
| <ListHTMLOpt>*/
switch (pT.Symbol.ToString())
{
case "<ListGridOpt>":
this.ListOption = "GRIDTABLE";
break;
case "<ListUpdateOpt>":
this.ListOption = "UPDATE";
break;
case "<ListHTMLOpt>":
this.SetHTMLOption((NonterminalToken)pT.Tokens[0]);
break;
}
}
private void SetHTMLOption(NonterminalToken pT)
{
/*<ListHTMLOpt> ::= <ListHTMLOptOneColumn>
| <ListHTMLOptTwoColumns>
| <ListHTMLOptThreeColumns>
| <ListHTMLOptNoImage>
| <ListHTMLOptNowrap>
| <ListHTMLOptLine>
| !Null
*/
switch (pT.Symbol.ToString())
{
case "<ListHTMLOptOneColumn>":
case "<ListHTMLOptTwoColumns>":
case "<ListHTMLOptThreeColumns>":
case "<ListHTMLOptNoImage>":
case "<ListHTMLOptNowrap>":
case "<ListHTMLOptLine>":
this.ListOption = this.GetCommandElement(pT.Tokens, 0).Trim();
this.isHTMLOption = true;
break;
}
}
private void SetIdentifierList(NonterminalToken pT)
{
//<List_Identifier_List>::= Identifier | Identifier <List_Identifier_List>
this.IdentifierList.Add(this.GetCommandElement(pT.Tokens, 0).ToUpperInvariant().Trim( new char[] { '[', ']'}) );
if (pT.Tokens.Length > 1)
{
this.SetIdentifierList((NonterminalToken)pT.Tokens[1]);
}
}
/// <summary>
/// performs execution of the List command
/// </summary>
/// <returns>object</returns>
public override object Execute()
{
object result = null;
//System.Data.IDataReader DATAReader = Context.GetCurrentDataTableReader();
if (!HasRun)
{
if (this.Context.CurrentRead != null)
{
this.isEpi7Project = this.Context.CurrentRead.IsEpi7ProjectRead;
}
if (this.isEpi7Project)
{
if (this.Context.CurrentProject.Views.Exists(this.Context.CurrentRead.Identifier))
{
this.CurrentView = this.Context.CurrentProject.GetViewByName(this.Context.CurrentRead.Identifier);
}
}
List<DataRow> DR = this.Context.GetOutput(new List<string>());//.Select("", this.Context.SortExpression.ToString());
if (this.IdentifierList.Count == 1 && this.IdentifierList[0] == "*")
{
this.IdentifierList.Clear();
foreach (DataColumn C in this.Context.DataSet.Tables["Output"].Columns)
{
this.IdentifierList.Add(C.ColumnName.ToUpperInvariant());
}
}
this.Context.ExpandGroupVariables(this.IdentifierList, ref this.IsExceptionList);
StringBuilder builder = new StringBuilder();
if (!string.IsNullOrEmpty(this.ListOption) && this.ListOption.ToLowerInvariant().Equals("gridtable"))
{
this.Context.AnalysisCheckCodeInterface.ShowGridTable(DR, IdentifierList, this.CurrentView);
}
else
{
builder.Append("<table cellpadding=\"2\">");
PrintHeaderRow(this.Context.DataSet.Tables["Output"], builder);
foreach (DataRow dataRow in DR)
{
PrintRow(this.Context.DataSet.Tables["Output"], dataRow, builder);
}
builder.Append("</table>");
}
//result = string.Format("number of records read {0}", Output.Rows.Count);
Dictionary<string, string> args = new Dictionary<string, string>();
args.Add("COMMANDNAME", CommandNames.LIST);
args.Add("DATA", builder.ToString());
args.Add("COMMANDTEXT", CommandText);
this.Context.AnalysisCheckCodeInterface.Display(args);
this.HasRun = true;
}
return result;
}
private string GetImageExtension(System.IO.MemoryStream stream)
{
string result = "";
try
{
System.Drawing.Image image = System.Drawing.Image.FromStream(stream);
Type Type = typeof(System.Drawing.Imaging.ImageFormat);
System.Reflection.PropertyInfo[] imageFormatList = Type.GetProperties(System.Reflection.BindingFlags.Static | System.Reflection.BindingFlags.Public);
for (int i = 0; i != imageFormatList.Length; i++)
{
System.Drawing.Imaging.ImageFormat formatClass = (System.Drawing.Imaging.ImageFormat)imageFormatList[i].GetValue(null, null);
if (formatClass.Guid.Equals(image.RawFormat.Guid))
{
result = imageFormatList[i].Name.ToLowerInvariant();
}
}
}
catch (Exception ex)
{
// do nothing
}
return result;
}
private void PrintRow(System.Data.DataTable pDataTable, System.Data.DataRow pRow, StringBuilder pBuilder)
{
pBuilder.Append("<tr>");
//object[] Items = pRow.ItemArray;
for (int i = 0; i < this.IdentifierList.Count; i++)
{
string ColumnName = this.IdentifierList[i];
DataColumn C = pDataTable.Columns[ColumnName];
if (C == null) continue;
if (PrintColumn(C.ColumnName))
{
string columnDataType = C.DataType.ToString();
switch (columnDataType)
{
case "System.Int16":
case "System.Int32":
case "System.Int64":
case "System.Double":
case "System.Single":
case "System.Decimal":
pBuilder.Append("<td align=\"right\">");
break;
default:
pBuilder.Append("<td>");
break;
}
//pBuilder.Append("<td>");
if (pRow[C.ColumnName] == DBNull.Value)
{
pBuilder.Append(repOfMissing);
}
else switch (C.DataType.Name)
{
case "Boolean":
pBuilder.Append((Convert.ToBoolean(pRow[C.ColumnName]) ? repOfYes : repOfNo));
break;
case "Byte":
if (this.isEpi7Project)
{
pBuilder.Append((Convert.ToBoolean(pRow[C.ColumnName]) ? repOfYes : repOfNo));
}
else
{
pBuilder.Append(pRow[C.ColumnName]);
}
break;
case "Byte[]":
string extension = GetImageExtension(new System.IO.MemoryStream((byte[])pRow[C.ColumnName]));
string imgFileName = System.IO.Path.GetTempPath() + Guid.NewGuid().ToString("N") + "." + extension;
System.IO.FileStream imgStream = System.IO.File.OpenWrite(imgFileName);
System.IO.BinaryWriter imgWriter = new System.IO.BinaryWriter(imgStream);
imgWriter.Write((byte[])pRow[C.ColumnName]);
imgWriter.Close();
imgStream.Close();
pBuilder.Append("<img src=\"" + imgFileName + "\"/>");
break;
case "Single":
pBuilder.Append(string.Format("{0:0.##}", pRow[C.ColumnName]));
break;
case "Double":
case "Float":
pBuilder.Append(string.Format("{0:0.##########}", pRow[C.ColumnName]));
break;
case "DateTime":
if (this.CurrentView == null)
{
pBuilder.Append(pRow[C.ColumnName]);
}
else
{
IVariable var = (IVariable) this.Context.GetVariable(C.ColumnName);
if (var == null) break;
if (var.VarType == VariableType.DataSource)
{
try
{
if (this.CurrentView.Fields.Exists(C.ColumnName) && this.CurrentView.Fields[C.ColumnName] is Epi.Fields.DateField)
{
pBuilder.Append(((DateTime)pRow[C.ColumnName]).ToShortDateString());//
}
else if (this.CurrentView.Fields.Exists(C.ColumnName) && this.CurrentView.Fields[C.ColumnName] is Epi.Fields.TimeField)
{
pBuilder.Append(((DateTime)pRow[C.ColumnName]).ToShortTimeString());//
}
else
{
pBuilder.Append(pRow[C.ColumnName]);
}
}
catch (Exception ex)
{
pBuilder.Append(pRow[C.ColumnName]);
}
}
else
{
if (pRow[C.ColumnName] != DBNull.Value)
{
System.DateTime PrintDate = (System.DateTime)pRow[C.ColumnName];
if (var.DataType == DataType.Date)
{
pBuilder.Append(PrintDate.ToShortDateString());
}
else if (var.DataType == DataType.Time)
{
pBuilder.Append(PrintDate.ToShortTimeString());
}
else
{
pBuilder.Append(pRow[C.ColumnName]);
}
}
else
{
pBuilder.Append(pRow[C.ColumnName]);
}
}
}
break;
default:
pBuilder.Append(pRow[C.ColumnName]);
break;
}
pBuilder.Append("</td>");
}
}
pBuilder.Append("</tr>");
}
private void PrintHeaderRow(System.Data.DataTable pDataTable, StringBuilder pBuilder)
{
pBuilder.Append("<tr>");
for (int i = 0; i < this.IdentifierList.Count; i++)
{
string ColumnName = this.IdentifierList[i];
DataColumn C = pDataTable.Columns[ColumnName];
if (C != null)
{
if (PrintColumn(C.ColumnName))
{
pBuilder.Append("<th>");
pBuilder.Append(C.ColumnName);
pBuilder.Append("</th>");
}
}
}
pBuilder.Append("</tr>");
}
private bool PrintColumn(string pName)
{
bool result = false;
if (this.IsExceptionList)
{
return !this.IdentifierList.Contains(pName.ToUpperInvariant());
}
else
{
if (this.IdentifierList.Count == 1 && this.IdentifierList[0] == "*")
{
result = true;
}
else
{
return this.IdentifierList.Contains(pName.ToUpperInvariant());
}
}
return result;
}
}
}
| |
#region Apache License
//
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to you under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.Text;
using System.Globalization;
using Ctrip.Log4.Core;
using Ctrip.Log4.Layout;
using Ctrip.Log4.Util;
namespace Ctrip.Log4.Appender
{
/// <summary>
/// Appends logging events to the terminal using ANSI color escape sequences.
/// </summary>
/// <remarks>
/// <para>
/// AnsiColorTerminalAppender appends log events to the standard output stream
/// or the error output stream using a layout specified by the
/// user. It also allows the color of a specific level of message to be set.
/// </para>
/// <note>
/// This appender expects the terminal to understand the VT100 control set
/// in order to interpret the color codes. If the terminal or console does not
/// understand the control codes the behavior is not defined.
/// </note>
/// <para>
/// By default, all output is written to the console's standard output stream.
/// The <see cref="Target"/> property can be set to direct the output to the
/// error stream.
/// </para>
/// <para>
/// NOTE: This appender writes each message to the <c>System.Console.Out</c> or
/// <c>System.Console.Error</c> that is set at the time the event is appended.
/// Therefore it is possible to programmatically redirect the output of this appender
/// (for example NUnit does this to capture program output). While this is the desired
/// behavior of this appender it may have security implications in your application.
/// </para>
/// <para>
/// When configuring the ANSI colored terminal appender, a mapping should be
/// specified to map a logging level to a color. For example:
/// </para>
/// <code lang="XML" escaped="true">
/// <mapping>
/// <level value="ERROR" />
/// <foreColor value="White" />
/// <backColor value="Red" />
/// <attributes value="Bright,Underscore" />
/// </mapping>
/// <mapping>
/// <level value="DEBUG" />
/// <backColor value="Green" />
/// </mapping>
/// </code>
/// <para>
/// The Level is the standard Ctrip logging level and ForeColor and BackColor can be any
/// of the following values:
/// <list type="bullet">
/// <item><term>Blue</term><description></description></item>
/// <item><term>Green</term><description></description></item>
/// <item><term>Red</term><description></description></item>
/// <item><term>White</term><description></description></item>
/// <item><term>Yellow</term><description></description></item>
/// <item><term>Purple</term><description></description></item>
/// <item><term>Cyan</term><description></description></item>
/// </list>
/// These color values cannot be combined together to make new colors.
/// </para>
/// <para>
/// The attributes can be any combination of the following:
/// <list type="bullet">
/// <item><term>Bright</term><description>foreground is brighter</description></item>
/// <item><term>Dim</term><description>foreground is dimmer</description></item>
/// <item><term>Underscore</term><description>message is underlined</description></item>
/// <item><term>Blink</term><description>foreground is blinking (does not work on all terminals)</description></item>
/// <item><term>Reverse</term><description>foreground and background are reversed</description></item>
/// <item><term>Hidden</term><description>output is hidden</description></item>
/// <item><term>Strikethrough</term><description>message has a line through it</description></item>
/// </list>
/// While any of these attributes may be combined together not all combinations
/// work well together, for example setting both <i>Bright</i> and <i>Dim</i> attributes makes
/// no sense.
/// </para>
/// </remarks>
/// <author>Patrick Wagstrom</author>
/// <author>Nicko Cadell</author>
public class AnsiColorTerminalAppender : AppenderSkeleton
{
#region Colors Enum
/// <summary>
/// The enum of possible display attributes
/// </summary>
/// <remarks>
/// <para>
/// The following flags can be combined together to
/// form the ANSI color attributes.
/// </para>
/// </remarks>
/// <seealso cref="AnsiColorTerminalAppender" />
[Flags]
public enum AnsiAttributes : int
{
/// <summary>
/// text is bright
/// </summary>
Bright = 1,
/// <summary>
/// text is dim
/// </summary>
Dim = 2,
/// <summary>
/// text is underlined
/// </summary>
Underscore = 4,
/// <summary>
/// text is blinking
/// </summary>
/// <remarks>
/// Not all terminals support this attribute
/// </remarks>
Blink = 8,
/// <summary>
/// text and background colors are reversed
/// </summary>
Reverse = 16,
/// <summary>
/// text is hidden
/// </summary>
Hidden = 32,
/// <summary>
/// text is displayed with a strikethrough
/// </summary>
Strikethrough = 64,
/// <summary>
/// text color is light
/// </summary>
Light = 128
}
/// <summary>
/// The enum of possible foreground or background color values for
/// use with the color mapping method
/// </summary>
/// <remarks>
/// <para>
/// The output can be in one for the following ANSI colors.
/// </para>
/// </remarks>
/// <seealso cref="AnsiColorTerminalAppender" />
public enum AnsiColor : int
{
/// <summary>
/// color is black
/// </summary>
Black = 0,
/// <summary>
/// color is red
/// </summary>
Red = 1,
/// <summary>
/// color is green
/// </summary>
Green = 2,
/// <summary>
/// color is yellow
/// </summary>
Yellow = 3,
/// <summary>
/// color is blue
/// </summary>
Blue = 4,
/// <summary>
/// color is magenta
/// </summary>
Magenta = 5,
/// <summary>
/// color is cyan
/// </summary>
Cyan = 6,
/// <summary>
/// color is white
/// </summary>
White = 7
}
#endregion
#region Public Instance Constructors
/// <summary>
/// Initializes a new instance of the <see cref="AnsiColorTerminalAppender" /> class.
/// </summary>
/// <remarks>
/// The instance of the <see cref="AnsiColorTerminalAppender" /> class is set up to write
/// to the standard output stream.
/// </remarks>
public AnsiColorTerminalAppender()
{
}
#endregion Public Instance Constructors
#region Public Instance Properties
/// <summary>
/// Target is the value of the console output stream.
/// </summary>
/// <value>
/// Target is the value of the console output stream.
/// This is either <c>"Console.Out"</c> or <c>"Console.Error"</c>.
/// </value>
/// <remarks>
/// <para>
/// Target is the value of the console output stream.
/// This is either <c>"Console.Out"</c> or <c>"Console.Error"</c>.
/// </para>
/// </remarks>
virtual public string Target
{
get { return m_writeToErrorStream ? ConsoleError : ConsoleOut; }
set
{
string trimmedTargetName = value.Trim();
if (string.Compare(ConsoleError, trimmedTargetName, true, CultureInfo.InvariantCulture) == 0)
{
m_writeToErrorStream = true;
}
else
{
m_writeToErrorStream = false;
}
}
}
/// <summary>
/// Add a mapping of level to color
/// </summary>
/// <param name="mapping">The mapping to add</param>
/// <remarks>
/// <para>
/// Add a <see cref="LevelColors"/> mapping to this appender.
/// Each mapping defines the foreground and background colours
/// for a level.
/// </para>
/// </remarks>
public void AddMapping(LevelColors mapping)
{
m_levelMapping.Add(mapping);
}
#endregion Public Instance Properties
#region Override implementation of AppenderSkeleton
/// <summary>
/// This method is called by the <see cref="M:AppenderSkeleton.DoAppend(LoggingEvent)"/> method.
/// </summary>
/// <param name="loggingEvent">The event to log.</param>
/// <remarks>
/// <para>
/// Writes the event to the console.
/// </para>
/// <para>
/// The format of the output will depend on the appender's layout.
/// </para>
/// </remarks>
override protected void Append(Ctrip.Log4.Core.LoggingEvent loggingEvent)
{
string loggingMessage = RenderLoggingEvent(loggingEvent);
// see if there is a specified lookup.
LevelColors levelColors = m_levelMapping.Lookup(loggingEvent.Level) as LevelColors;
if (levelColors != null)
{
// Prepend the Ansi Color code
loggingMessage = levelColors.CombinedColor + loggingMessage;
}
// on most terminals there are weird effects if we don't clear the background color
// before the new line. This checks to see if it ends with a newline, and if
// so, inserts the clear codes before the newline, otherwise the clear codes
// are inserted afterwards.
if (loggingMessage.Length > 1)
{
if (loggingMessage.EndsWith("\r\n") || loggingMessage.EndsWith("\n\r"))
{
loggingMessage = loggingMessage.Insert(loggingMessage.Length - 2, PostEventCodes);
}
else if (loggingMessage.EndsWith("\n") || loggingMessage.EndsWith("\r"))
{
loggingMessage = loggingMessage.Insert(loggingMessage.Length - 1, PostEventCodes);
}
else
{
loggingMessage = loggingMessage + PostEventCodes;
}
}
else
{
if (loggingMessage[0] == '\n' || loggingMessage[0] == '\r')
{
loggingMessage = PostEventCodes + loggingMessage;
}
else
{
loggingMessage = loggingMessage + PostEventCodes;
}
}
#if NETCF_1_0
// Write to the output stream
Console.Write(loggingMessage);
#else
if (m_writeToErrorStream)
{
// Write to the error stream
Console.Error.Write(loggingMessage);
}
else
{
// Write to the output stream
Console.Write(loggingMessage);
}
#endif
}
/// <summary>
/// This appender requires a <see cref="Layout"/> to be set.
/// </summary>
/// <value><c>true</c></value>
/// <remarks>
/// <para>
/// This appender requires a <see cref="Layout"/> to be set.
/// </para>
/// </remarks>
override protected bool RequiresLayout
{
get { return true; }
}
/// <summary>
/// Initialize the options for this appender
/// </summary>
/// <remarks>
/// <para>
/// Initialize the level to color mappings set on this appender.
/// </para>
/// </remarks>
public override void ActivateOptions()
{
base.ActivateOptions();
m_levelMapping.ActivateOptions();
}
#endregion Override implementation of AppenderSkeleton
#region Public Static Fields
/// <summary>
/// The <see cref="AnsiColorTerminalAppender.Target"/> to use when writing to the Console
/// standard output stream.
/// </summary>
/// <remarks>
/// <para>
/// The <see cref="AnsiColorTerminalAppender.Target"/> to use when writing to the Console
/// standard output stream.
/// </para>
/// </remarks>
public const string ConsoleOut = "Console.Out";
/// <summary>
/// The <see cref="AnsiColorTerminalAppender.Target"/> to use when writing to the Console
/// standard error output stream.
/// </summary>
/// <remarks>
/// <para>
/// The <see cref="AnsiColorTerminalAppender.Target"/> to use when writing to the Console
/// standard error output stream.
/// </para>
/// </remarks>
public const string ConsoleError = "Console.Error";
#endregion Public Static Fields
#region Private Instances Fields
/// <summary>
/// Flag to write output to the error stream rather than the standard output stream
/// </summary>
private bool m_writeToErrorStream = false;
/// <summary>
/// Mapping from level object to color value
/// </summary>
private LevelMapping m_levelMapping = new LevelMapping();
/// <summary>
/// Ansi code to reset terminal
/// </summary>
private const string PostEventCodes = "\x1b[0m";
#endregion Private Instances Fields
#region LevelColors LevelMapping Entry
/// <summary>
/// A class to act as a mapping between the level that a logging call is made at and
/// the color it should be displayed as.
/// </summary>
/// <remarks>
/// <para>
/// Defines the mapping between a level and the color it should be displayed in.
/// </para>
/// </remarks>
public class LevelColors : LevelMappingEntry
{
private AnsiColor m_foreColor;
private AnsiColor m_backColor;
private AnsiAttributes m_attributes;
private string m_combinedColor = "";
/// <summary>
/// The mapped foreground color for the specified level
/// </summary>
/// <remarks>
/// <para>
/// Required property.
/// The mapped foreground color for the specified level
/// </para>
/// </remarks>
public AnsiColor ForeColor
{
get { return m_foreColor; }
set { m_foreColor = value; }
}
/// <summary>
/// The mapped background color for the specified level
/// </summary>
/// <remarks>
/// <para>
/// Required property.
/// The mapped background color for the specified level
/// </para>
/// </remarks>
public AnsiColor BackColor
{
get { return m_backColor; }
set { m_backColor = value; }
}
/// <summary>
/// The color attributes for the specified level
/// </summary>
/// <remarks>
/// <para>
/// Required property.
/// The color attributes for the specified level
/// </para>
/// </remarks>
public AnsiAttributes Attributes
{
get { return m_attributes; }
set { m_attributes = value; }
}
/// <summary>
/// Initialize the options for the object
/// </summary>
/// <remarks>
/// <para>
/// Combine the <see cref="ForeColor"/> and <see cref="BackColor"/> together
/// and append the attributes.
/// </para>
/// </remarks>
public override void ActivateOptions()
{
base.ActivateOptions();
StringBuilder buf = new StringBuilder();
// Reset any existing codes
buf.Append("\x1b[0;");
int lightAdjustment = ((m_attributes & AnsiAttributes.Light) > 0) ? 60 : 0;
// set the foreground color
buf.Append(30 + lightAdjustment + (int)m_foreColor);
buf.Append(';');
// set the background color
buf.Append(40 + lightAdjustment + (int)m_backColor);
// set the attributes
if ((m_attributes & AnsiAttributes.Bright) > 0)
{
buf.Append(";1");
}
if ((m_attributes & AnsiAttributes.Dim) > 0)
{
buf.Append(";2");
}
if ((m_attributes & AnsiAttributes.Underscore) > 0)
{
buf.Append(";4");
}
if ((m_attributes & AnsiAttributes.Blink) > 0)
{
buf.Append(";5");
}
if ((m_attributes & AnsiAttributes.Reverse) > 0)
{
buf.Append(";7");
}
if ((m_attributes & AnsiAttributes.Hidden) > 0)
{
buf.Append(";8");
}
if ((m_attributes & AnsiAttributes.Strikethrough) > 0)
{
buf.Append(";9");
}
buf.Append('m');
m_combinedColor = buf.ToString();
}
/// <summary>
/// The combined <see cref="ForeColor"/>, <see cref="BackColor"/> and
/// <see cref="Attributes"/> suitable for setting the ansi terminal color.
/// </summary>
internal string CombinedColor
{
get { return m_combinedColor; }
}
}
#endregion // LevelColors LevelMapping Entry
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.