context
stringlengths 2.52k
185k
| gt
stringclasses 1
value |
---|---|
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.DirectoryServices.ActiveDirectory.DirectoryServer.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.DirectoryServices.ActiveDirectory
{
[ContractClass(typeof(DirectoryServerContracts))]
abstract public partial class DirectoryServer : IDisposable
{
#region Methods and constructors
public abstract void CheckReplicationConsistency();
protected DirectoryServer()
{
}
public void Dispose()
{
}
protected virtual new void Dispose(bool disposing)
{
}
public abstract ReplicationNeighborCollection GetAllReplicationNeighbors();
public System.DirectoryServices.DirectoryEntry GetDirectoryEntry()
{
Contract.Ensures(Contract.Result<DirectoryEntry>() != null);
return default(System.DirectoryServices.DirectoryEntry);
}
public abstract ReplicationFailureCollection GetReplicationConnectionFailures();
public abstract ReplicationCursorCollection GetReplicationCursors(string partition);
public abstract ActiveDirectoryReplicationMetadata GetReplicationMetadata(string objectPath);
public abstract ReplicationNeighborCollection GetReplicationNeighbors(string partition);
public abstract ReplicationOperationInformation GetReplicationOperationInformation();
public void MoveToAnotherSite(string siteName)
{
}
public abstract void SyncReplicaFromAllServers(string partition, SyncFromAllServersOptions options);
public abstract void SyncReplicaFromServer(string partition, string sourceServer);
public abstract void TriggerSyncReplicaFromNeighbors(string partition);
#endregion
#region Properties and indexers
public abstract ReplicationConnectionCollection InboundConnections
{
get;
}
public abstract string IPAddress
{
get;
}
public string Name
{
get
{
return default(string);
}
}
public abstract ReplicationConnectionCollection OutboundConnections
{
get;
}
public ReadOnlyStringCollection Partitions
{
get
{
return default(ReadOnlyStringCollection);
}
}
public abstract string SiteName
{
get;
}
public abstract SyncUpdateCallback SyncFromAllServersCallback
{
get;
set;
}
#endregion
}
[ContractClassFor(typeof(DirectoryServer))]
abstract class DirectoryServerContracts : DirectoryServer
{
public override void CheckReplicationConsistency()
{
throw new NotImplementedException();
}
public override ReplicationNeighborCollection GetAllReplicationNeighbors()
{
Contract.Ensures(Contract.Result<ReplicationNeighborCollection>() != null);
throw new NotImplementedException();
}
public override ReplicationFailureCollection GetReplicationConnectionFailures()
{
Contract.Ensures(Contract.Result<ReplicationFailureCollection>() != null);
throw new NotImplementedException();
}
public override ReplicationCursorCollection GetReplicationCursors(string partition)
{
Contract.Ensures(Contract.Result<ReplicationCursorCollection>() != null);
throw new NotImplementedException();
}
public override ActiveDirectoryReplicationMetadata GetReplicationMetadata(string objectPath)
{
Contract.Ensures(Contract.Result<ActiveDirectoryReplicationMetadata>() != null);
throw new NotImplementedException();
}
public override ReplicationNeighborCollection GetReplicationNeighbors(string partition)
{
Contract.Ensures(Contract.Result<ReplicationNeighborCollection>() != null);
throw new NotImplementedException();
}
public override ReplicationOperationInformation GetReplicationOperationInformation()
{
Contract.Ensures(Contract.Result<ReplicationOperationInformation>() != null);
throw new NotImplementedException();
}
public override void SyncReplicaFromAllServers(string partition, SyncFromAllServersOptions options)
{
throw new NotImplementedException();
}
public override void SyncReplicaFromServer(string partition, string sourceServer)
{
throw new NotImplementedException();
}
public override void TriggerSyncReplicaFromNeighbors(string partition)
{
throw new NotImplementedException();
}
public override ReplicationConnectionCollection InboundConnections
{
get {
Contract.Ensures(Contract.Result<ReplicationConnectionCollection>() != null);
throw new NotImplementedException(); }
}
public override string IPAddress
{
get { throw new NotImplementedException(); }
}
public override ReplicationConnectionCollection OutboundConnections
{
get {
Contract.Ensures(Contract.Result<ReplicationConnectionCollection>() != null);
throw new NotImplementedException(); }
}
public override string SiteName
{
get { throw new NotImplementedException(); }
}
public override SyncUpdateCallback SyncFromAllServersCallback
{
get
{
throw new NotImplementedException();
}
set
{
throw new NotImplementedException();
}
}
}
}
| |
//------------------------------------------------------------------------------
// Symbooglix
//
//
// Copyright 2014-2017 Daniel Liew
//
// This file is licensed under the MIT license.
// See LICENSE.txt for details.
//------------------------------------------------------------------------------
using System;
using NUnit.Framework;
using Symbooglix;
using Microsoft.Boogie;
namespace ExprBuilderTests.ConstantFoldingTests
{
[TestFixture()]
public class FoldBVEXTRACT : ConstantFoldingExprBuilderTests
{
// Extract single bit
[TestCase(15, 4, 4, 3, 1)]
[TestCase(15, 4, 1, 0, 1)]
[TestCase(0, 4, 4, 3, 0)]
[TestCase(10, 4, 4, 3, 1)]
[TestCase(10, 4, 3, 2, 0)]
[TestCase(10, 4, 2, 1, 1)]
[TestCase(10, 4, 1, 0, 0)]
[TestCase(-1, 32, 32, 31, 1)]
// Extract half
[TestCase(15, 4, 4, 2, 3)]
[TestCase(10, 4, 4, 2, 2)]
[TestCase(10, 4, 2, 0, 2)]
[TestCase(12, 4, 2, 0, 0)]
public void simpleConstants(int decimalValue, int bitWidth, int end, int start, int expectedValueInDecimalRepr)
{
Assert.IsTrue(end > start);
var cfb = GetConstantFoldingBuilder();
var result = cfb.BVEXTRACT(cfb.ConstantBV(decimalValue, bitWidth), end, start);
CheckIsBvType(result, end - start);
var asLit = ExprUtil.AsLiteral(result);
CheckIsBvType(result, end - start);
Assert.IsNotNull(asLit);
Assert.IsTrue(asLit.isBvConst);
Assert.AreEqual(Microsoft.Basetypes.BigNum.FromInt(expectedValueInDecimalRepr), asLit.asBvConst.Value);
}
[TestCase(15, 4, true)]
[TestCase(7, 4, false)]
[TestCase(-1, 256, true)]
[TestCase(0, 256, false)]
public void nestedBVEXTRACTConstant(int initialValue, int bitWidth, bool bitIsTrue)
{
var cfb = GetConstantFoldingBuilder();
var constant = cfb.ConstantBV(initialValue, bitWidth);
Expr result = constant;
// Keep peeling off the least significant bit until we're only left
// with the most significant bit
for (int count = 0; count < bitWidth - 1; ++count)
{
int topBitPlusOne = bitWidth - count;
result = cfb.BVEXTRACT(result, topBitPlusOne, 1);
}
var asLit = ExprUtil.AsLiteral(result);
Assert.IsNotNull(asLit);
CheckIsBvType(asLit, 1);
if (bitIsTrue)
Assert.AreEqual(Microsoft.Basetypes.BigNum.FromInt(1), asLit.asBvConst.Value);
else
Assert.AreEqual(Microsoft.Basetypes.BigNum.FromInt(0), asLit.asBvConst.Value);
}
[TestCase(5)]
[TestCase(256)]
[TestCase(512)]
public void SelectWholeBitVector(int bitWidth)
{
var cfb = GetConstantFoldingBuilder();
var id = GetVarAndIdExpr("foo", BasicType.GetBvType(bitWidth)).Item2;
var result = cfb.BVEXTRACT(id, bitWidth, 0);
CheckIsBvType(result, bitWidth);
Assert.AreSame(id, result);
}
[TestCase(4)]
[TestCase(8)]
[TestCase(32)]
[TestCase(64)]
[TestCase(128)]
[TestCase(256)]
[TestCase(512)]
public void nestedBVEXTRACTVariableStart(int bitWidth)
{
var cfb = GetConstantFoldingBuilder();
var id = GetVarAndIdExpr("x", BasicType.GetBvType(bitWidth)).Item2;
Expr result = id;
// Keep peeling off the most significant bit until we're only left
// with the least significant bit
for (int count = 0; count < bitWidth - 1; ++count)
{
int topBitMinusOne = result.Type.BvBits - 1;
result = cfb.BVEXTRACT(result, topBitMinusOne, 0);
CheckIsBvType(result, bitWidth - count - 1);
}
var asBvExtract = ExprUtil.AsBVEXTRACT(result);
Assert.IsNotNull(asBvExtract);
CheckIsBvType(asBvExtract, 1);
// Check there is only a single BvExtractExpr
Assert.AreSame(id, asBvExtract.Bitvector);
Assert.AreEqual(1, asBvExtract.End);
Assert.AreEqual(0, asBvExtract.Start);
}
[TestCase(4)]
[TestCase(8)]
[TestCase(32)]
[TestCase(64)]
[TestCase(128)]
[TestCase(256)]
[TestCase(512)]
public void nestedBVEXTRACTVariableEnd(int bitWidth)
{
var cfb = GetConstantFoldingBuilder();
var id = GetVarAndIdExpr("x", BasicType.GetBvType(bitWidth)).Item2;
Expr result = id;
// Keep peeling off the least significant bit until we're only left
// with the most significant bit
for (int count = 0; count < bitWidth - 1; ++count)
{
int topBitPlusOne = bitWidth - count;
result = cfb.BVEXTRACT(result, topBitPlusOne, 1);
CheckIsBvType(result, bitWidth - count - 1);
}
var asBvExtract = ExprUtil.AsBVEXTRACT(result);
Assert.IsNotNull(asBvExtract);
CheckIsBvType(asBvExtract, 1);
// Check there is only a single BvExtractExpr
Assert.AreSame(id, asBvExtract.Bitvector);
Assert.AreEqual(bitWidth, asBvExtract.End);
Assert.AreEqual(bitWidth - 1, asBvExtract.Start);
}
[TestCase(3)]
[TestCase(5)]
[TestCase(7)]
[TestCase(9)]
[TestCase(11)]
[TestCase(13)]
[TestCase(25)]
[TestCase(31)]
[TestCase(127)]
[TestCase(129)]
public void nestedBVEXTRACTVariableMiddle(int bitWidth)
{
Assert.IsTrue(bitWidth >= 3 && (bitWidth % 2 == 1));
var cfb = GetConstantFoldingBuilder();
var id = GetVarAndIdExpr("x", BasicType.GetBvType(bitWidth)).Item2;
Expr result = id;
// Keep peeling off the least significant bit and the most significant bit
// until we're only left the middle bit in the original "id"
for (int count = 0; count < bitWidth/2 ; ++count)
{
int topBit = result.Type.BvBits - 1;
result = cfb.BVEXTRACT(result, topBit, 1);
CheckIsBvType(result, bitWidth - (2*(count +1)));
}
var asBvExtract = ExprUtil.AsBVEXTRACT(result);
Assert.IsNotNull(asBvExtract);
CheckIsBvType(asBvExtract, 1);
// Check there is only a single BvExtractExpr
Assert.AreSame(id, asBvExtract.Bitvector);
var middleBitIndex = bitWidth / 2;
Assert.AreEqual(middleBitIndex + 1, asBvExtract.End);
Assert.AreEqual(middleBitIndex, asBvExtract.Start);
}
[Test()]
public void noFold()
{
var pair = GetSimpleAndConstantFoldingBuilder();
var sb = pair.Item1;
var cfb = pair.Item2;
var id = GetVarAndIdExpr("foo", BasicType.GetBvType(8)).Item2;
var foldedResult = cfb.BVEXTRACT(id, 7, 0);
var simpleResult = sb.BVEXTRACT(id, 7, 0);
CheckIsBvType(foldedResult, 7);
CheckIsBvType(simpleResult, 7);
Assert.AreEqual(simpleResult, foldedResult);
var asBvExtract = ExprUtil.AsBVEXTRACT(foldedResult);
Assert.IsNotNull(asBvExtract);
Assert.AreSame(id, asBvExtract.Bitvector);
Assert.AreEqual(0, asBvExtract.Start);
Assert.AreEqual(7, asBvExtract.End);
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/protobuf/wrappers.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace Google.Protobuf.WellKnownTypes {
/// <summary>Holder for reflection information generated from google/protobuf/wrappers.proto</summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public static partial class WrappersReflection {
#region Descriptor
/// <summary>File descriptor for google/protobuf/wrappers.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static WrappersReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"Ch5nb29nbGUvcHJvdG9idWYvd3JhcHBlcnMucHJvdG8SD2dvb2dsZS5wcm90",
"b2J1ZiIcCgtEb3VibGVWYWx1ZRINCgV2YWx1ZRgBIAEoASIbCgpGbG9hdFZh",
"bHVlEg0KBXZhbHVlGAEgASgCIhsKCkludDY0VmFsdWUSDQoFdmFsdWUYASAB",
"KAMiHAoLVUludDY0VmFsdWUSDQoFdmFsdWUYASABKAQiGwoKSW50MzJWYWx1",
"ZRINCgV2YWx1ZRgBIAEoBSIcCgtVSW50MzJWYWx1ZRINCgV2YWx1ZRgBIAEo",
"DSIaCglCb29sVmFsdWUSDQoFdmFsdWUYASABKAgiHAoLU3RyaW5nVmFsdWUS",
"DQoFdmFsdWUYASABKAkiGwoKQnl0ZXNWYWx1ZRINCgV2YWx1ZRgBIAEoDEJT",
"ChNjb20uZ29vZ2xlLnByb3RvYnVmQg1XcmFwcGVyc1Byb3RvUAGgAQH4AQGi",
"AgNHUEKqAh5Hb29nbGUuUHJvdG9idWYuV2VsbEtub3duVHlwZXNiBnByb3Rv",
"Mw=="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedCodeInfo(null, new pbr::GeneratedCodeInfo[] {
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.WellKnownTypes.DoubleValue), global::Google.Protobuf.WellKnownTypes.DoubleValue.Parser, new[]{ "Value" }, null, null, null),
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.WellKnownTypes.FloatValue), global::Google.Protobuf.WellKnownTypes.FloatValue.Parser, new[]{ "Value" }, null, null, null),
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Int64Value), global::Google.Protobuf.WellKnownTypes.Int64Value.Parser, new[]{ "Value" }, null, null, null),
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.WellKnownTypes.UInt64Value), global::Google.Protobuf.WellKnownTypes.UInt64Value.Parser, new[]{ "Value" }, null, null, null),
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Int32Value), global::Google.Protobuf.WellKnownTypes.Int32Value.Parser, new[]{ "Value" }, null, null, null),
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.WellKnownTypes.UInt32Value), global::Google.Protobuf.WellKnownTypes.UInt32Value.Parser, new[]{ "Value" }, null, null, null),
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.WellKnownTypes.BoolValue), global::Google.Protobuf.WellKnownTypes.BoolValue.Parser, new[]{ "Value" }, null, null, null),
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.WellKnownTypes.StringValue), global::Google.Protobuf.WellKnownTypes.StringValue.Parser, new[]{ "Value" }, null, null, null),
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.WellKnownTypes.BytesValue), global::Google.Protobuf.WellKnownTypes.BytesValue.Parser, new[]{ "Value" }, null, null, null)
}));
}
#endregion
}
#region Messages
/// <summary>
/// Wrapper message for `double`.
///
/// The JSON representation for `DoubleValue` is JSON number.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class DoubleValue : pb::IMessage<DoubleValue> {
private static readonly pb::MessageParser<DoubleValue> _parser = new pb::MessageParser<DoubleValue>(() => new DoubleValue());
public static pb::MessageParser<DoubleValue> Parser { get { return _parser; } }
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor.MessageTypes[0]; }
}
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
public DoubleValue() {
OnConstruction();
}
partial void OnConstruction();
public DoubleValue(DoubleValue other) : this() {
value_ = other.value_;
}
public DoubleValue Clone() {
return new DoubleValue(this);
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 1;
private double value_;
/// <summary>
/// The double value.
/// </summary>
public double Value {
get { return value_; }
set {
value_ = value;
}
}
public override bool Equals(object other) {
return Equals(other as DoubleValue);
}
public bool Equals(DoubleValue other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Value != other.Value) return false;
return true;
}
public override int GetHashCode() {
int hash = 1;
if (Value != 0D) hash ^= Value.GetHashCode();
return hash;
}
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
public void WriteTo(pb::CodedOutputStream output) {
if (Value != 0D) {
output.WriteRawTag(9);
output.WriteDouble(Value);
}
}
public int CalculateSize() {
int size = 0;
if (Value != 0D) {
size += 1 + 8;
}
return size;
}
public void MergeFrom(DoubleValue other) {
if (other == null) {
return;
}
if (other.Value != 0D) {
Value = other.Value;
}
}
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 9: {
Value = input.ReadDouble();
break;
}
}
}
}
}
/// <summary>
/// Wrapper message for `float`.
///
/// The JSON representation for `FloatValue` is JSON number.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class FloatValue : pb::IMessage<FloatValue> {
private static readonly pb::MessageParser<FloatValue> _parser = new pb::MessageParser<FloatValue>(() => new FloatValue());
public static pb::MessageParser<FloatValue> Parser { get { return _parser; } }
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor.MessageTypes[1]; }
}
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
public FloatValue() {
OnConstruction();
}
partial void OnConstruction();
public FloatValue(FloatValue other) : this() {
value_ = other.value_;
}
public FloatValue Clone() {
return new FloatValue(this);
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 1;
private float value_;
/// <summary>
/// The float value.
/// </summary>
public float Value {
get { return value_; }
set {
value_ = value;
}
}
public override bool Equals(object other) {
return Equals(other as FloatValue);
}
public bool Equals(FloatValue other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Value != other.Value) return false;
return true;
}
public override int GetHashCode() {
int hash = 1;
if (Value != 0F) hash ^= Value.GetHashCode();
return hash;
}
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
public void WriteTo(pb::CodedOutputStream output) {
if (Value != 0F) {
output.WriteRawTag(13);
output.WriteFloat(Value);
}
}
public int CalculateSize() {
int size = 0;
if (Value != 0F) {
size += 1 + 4;
}
return size;
}
public void MergeFrom(FloatValue other) {
if (other == null) {
return;
}
if (other.Value != 0F) {
Value = other.Value;
}
}
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 13: {
Value = input.ReadFloat();
break;
}
}
}
}
}
/// <summary>
/// Wrapper message for `int64`.
///
/// The JSON representation for `Int64Value` is JSON string.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class Int64Value : pb::IMessage<Int64Value> {
private static readonly pb::MessageParser<Int64Value> _parser = new pb::MessageParser<Int64Value>(() => new Int64Value());
public static pb::MessageParser<Int64Value> Parser { get { return _parser; } }
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor.MessageTypes[2]; }
}
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
public Int64Value() {
OnConstruction();
}
partial void OnConstruction();
public Int64Value(Int64Value other) : this() {
value_ = other.value_;
}
public Int64Value Clone() {
return new Int64Value(this);
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 1;
private long value_;
/// <summary>
/// The int64 value.
/// </summary>
public long Value {
get { return value_; }
set {
value_ = value;
}
}
public override bool Equals(object other) {
return Equals(other as Int64Value);
}
public bool Equals(Int64Value other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Value != other.Value) return false;
return true;
}
public override int GetHashCode() {
int hash = 1;
if (Value != 0L) hash ^= Value.GetHashCode();
return hash;
}
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
public void WriteTo(pb::CodedOutputStream output) {
if (Value != 0L) {
output.WriteRawTag(8);
output.WriteInt64(Value);
}
}
public int CalculateSize() {
int size = 0;
if (Value != 0L) {
size += 1 + pb::CodedOutputStream.ComputeInt64Size(Value);
}
return size;
}
public void MergeFrom(Int64Value other) {
if (other == null) {
return;
}
if (other.Value != 0L) {
Value = other.Value;
}
}
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 8: {
Value = input.ReadInt64();
break;
}
}
}
}
}
/// <summary>
/// Wrapper message for `uint64`.
///
/// The JSON representation for `UInt64Value` is JSON string.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class UInt64Value : pb::IMessage<UInt64Value> {
private static readonly pb::MessageParser<UInt64Value> _parser = new pb::MessageParser<UInt64Value>(() => new UInt64Value());
public static pb::MessageParser<UInt64Value> Parser { get { return _parser; } }
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor.MessageTypes[3]; }
}
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
public UInt64Value() {
OnConstruction();
}
partial void OnConstruction();
public UInt64Value(UInt64Value other) : this() {
value_ = other.value_;
}
public UInt64Value Clone() {
return new UInt64Value(this);
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 1;
private ulong value_;
/// <summary>
/// The uint64 value.
/// </summary>
public ulong Value {
get { return value_; }
set {
value_ = value;
}
}
public override bool Equals(object other) {
return Equals(other as UInt64Value);
}
public bool Equals(UInt64Value other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Value != other.Value) return false;
return true;
}
public override int GetHashCode() {
int hash = 1;
if (Value != 0UL) hash ^= Value.GetHashCode();
return hash;
}
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
public void WriteTo(pb::CodedOutputStream output) {
if (Value != 0UL) {
output.WriteRawTag(8);
output.WriteUInt64(Value);
}
}
public int CalculateSize() {
int size = 0;
if (Value != 0UL) {
size += 1 + pb::CodedOutputStream.ComputeUInt64Size(Value);
}
return size;
}
public void MergeFrom(UInt64Value other) {
if (other == null) {
return;
}
if (other.Value != 0UL) {
Value = other.Value;
}
}
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 8: {
Value = input.ReadUInt64();
break;
}
}
}
}
}
/// <summary>
/// Wrapper message for `int32`.
///
/// The JSON representation for `Int32Value` is JSON number.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class Int32Value : pb::IMessage<Int32Value> {
private static readonly pb::MessageParser<Int32Value> _parser = new pb::MessageParser<Int32Value>(() => new Int32Value());
public static pb::MessageParser<Int32Value> Parser { get { return _parser; } }
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor.MessageTypes[4]; }
}
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
public Int32Value() {
OnConstruction();
}
partial void OnConstruction();
public Int32Value(Int32Value other) : this() {
value_ = other.value_;
}
public Int32Value Clone() {
return new Int32Value(this);
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 1;
private int value_;
/// <summary>
/// The int32 value.
/// </summary>
public int Value {
get { return value_; }
set {
value_ = value;
}
}
public override bool Equals(object other) {
return Equals(other as Int32Value);
}
public bool Equals(Int32Value other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Value != other.Value) return false;
return true;
}
public override int GetHashCode() {
int hash = 1;
if (Value != 0) hash ^= Value.GetHashCode();
return hash;
}
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
public void WriteTo(pb::CodedOutputStream output) {
if (Value != 0) {
output.WriteRawTag(8);
output.WriteInt32(Value);
}
}
public int CalculateSize() {
int size = 0;
if (Value != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Value);
}
return size;
}
public void MergeFrom(Int32Value other) {
if (other == null) {
return;
}
if (other.Value != 0) {
Value = other.Value;
}
}
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 8: {
Value = input.ReadInt32();
break;
}
}
}
}
}
/// <summary>
/// Wrapper message for `uint32`.
///
/// The JSON representation for `UInt32Value` is JSON number.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class UInt32Value : pb::IMessage<UInt32Value> {
private static readonly pb::MessageParser<UInt32Value> _parser = new pb::MessageParser<UInt32Value>(() => new UInt32Value());
public static pb::MessageParser<UInt32Value> Parser { get { return _parser; } }
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor.MessageTypes[5]; }
}
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
public UInt32Value() {
OnConstruction();
}
partial void OnConstruction();
public UInt32Value(UInt32Value other) : this() {
value_ = other.value_;
}
public UInt32Value Clone() {
return new UInt32Value(this);
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 1;
private uint value_;
/// <summary>
/// The uint32 value.
/// </summary>
public uint Value {
get { return value_; }
set {
value_ = value;
}
}
public override bool Equals(object other) {
return Equals(other as UInt32Value);
}
public bool Equals(UInt32Value other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Value != other.Value) return false;
return true;
}
public override int GetHashCode() {
int hash = 1;
if (Value != 0) hash ^= Value.GetHashCode();
return hash;
}
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
public void WriteTo(pb::CodedOutputStream output) {
if (Value != 0) {
output.WriteRawTag(8);
output.WriteUInt32(Value);
}
}
public int CalculateSize() {
int size = 0;
if (Value != 0) {
size += 1 + pb::CodedOutputStream.ComputeUInt32Size(Value);
}
return size;
}
public void MergeFrom(UInt32Value other) {
if (other == null) {
return;
}
if (other.Value != 0) {
Value = other.Value;
}
}
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 8: {
Value = input.ReadUInt32();
break;
}
}
}
}
}
/// <summary>
/// Wrapper message for `bool`.
///
/// The JSON representation for `BoolValue` is JSON `true` and `false`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class BoolValue : pb::IMessage<BoolValue> {
private static readonly pb::MessageParser<BoolValue> _parser = new pb::MessageParser<BoolValue>(() => new BoolValue());
public static pb::MessageParser<BoolValue> Parser { get { return _parser; } }
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor.MessageTypes[6]; }
}
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
public BoolValue() {
OnConstruction();
}
partial void OnConstruction();
public BoolValue(BoolValue other) : this() {
value_ = other.value_;
}
public BoolValue Clone() {
return new BoolValue(this);
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 1;
private bool value_;
/// <summary>
/// The bool value.
/// </summary>
public bool Value {
get { return value_; }
set {
value_ = value;
}
}
public override bool Equals(object other) {
return Equals(other as BoolValue);
}
public bool Equals(BoolValue other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Value != other.Value) return false;
return true;
}
public override int GetHashCode() {
int hash = 1;
if (Value != false) hash ^= Value.GetHashCode();
return hash;
}
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
public void WriteTo(pb::CodedOutputStream output) {
if (Value != false) {
output.WriteRawTag(8);
output.WriteBool(Value);
}
}
public int CalculateSize() {
int size = 0;
if (Value != false) {
size += 1 + 1;
}
return size;
}
public void MergeFrom(BoolValue other) {
if (other == null) {
return;
}
if (other.Value != false) {
Value = other.Value;
}
}
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 8: {
Value = input.ReadBool();
break;
}
}
}
}
}
/// <summary>
/// Wrapper message for `string`.
///
/// The JSON representation for `StringValue` is JSON string.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class StringValue : pb::IMessage<StringValue> {
private static readonly pb::MessageParser<StringValue> _parser = new pb::MessageParser<StringValue>(() => new StringValue());
public static pb::MessageParser<StringValue> Parser { get { return _parser; } }
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor.MessageTypes[7]; }
}
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
public StringValue() {
OnConstruction();
}
partial void OnConstruction();
public StringValue(StringValue other) : this() {
value_ = other.value_;
}
public StringValue Clone() {
return new StringValue(this);
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 1;
private string value_ = "";
/// <summary>
/// The string value.
/// </summary>
public string Value {
get { return value_; }
set {
value_ = pb::Preconditions.CheckNotNull(value, "value");
}
}
public override bool Equals(object other) {
return Equals(other as StringValue);
}
public bool Equals(StringValue other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Value != other.Value) return false;
return true;
}
public override int GetHashCode() {
int hash = 1;
if (Value.Length != 0) hash ^= Value.GetHashCode();
return hash;
}
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
public void WriteTo(pb::CodedOutputStream output) {
if (Value.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Value);
}
}
public int CalculateSize() {
int size = 0;
if (Value.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Value);
}
return size;
}
public void MergeFrom(StringValue other) {
if (other == null) {
return;
}
if (other.Value.Length != 0) {
Value = other.Value;
}
}
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Value = input.ReadString();
break;
}
}
}
}
}
/// <summary>
/// Wrapper message for `bytes`.
///
/// The JSON representation for `BytesValue` is JSON string.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class BytesValue : pb::IMessage<BytesValue> {
private static readonly pb::MessageParser<BytesValue> _parser = new pb::MessageParser<BytesValue>(() => new BytesValue());
public static pb::MessageParser<BytesValue> Parser { get { return _parser; } }
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor.MessageTypes[8]; }
}
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
public BytesValue() {
OnConstruction();
}
partial void OnConstruction();
public BytesValue(BytesValue other) : this() {
value_ = other.value_;
}
public BytesValue Clone() {
return new BytesValue(this);
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 1;
private pb::ByteString value_ = pb::ByteString.Empty;
/// <summary>
/// The bytes value.
/// </summary>
public pb::ByteString Value {
get { return value_; }
set {
value_ = pb::Preconditions.CheckNotNull(value, "value");
}
}
public override bool Equals(object other) {
return Equals(other as BytesValue);
}
public bool Equals(BytesValue other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Value != other.Value) return false;
return true;
}
public override int GetHashCode() {
int hash = 1;
if (Value.Length != 0) hash ^= Value.GetHashCode();
return hash;
}
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
public void WriteTo(pb::CodedOutputStream output) {
if (Value.Length != 0) {
output.WriteRawTag(10);
output.WriteBytes(Value);
}
}
public int CalculateSize() {
int size = 0;
if (Value.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeBytesSize(Value);
}
return size;
}
public void MergeFrom(BytesValue other) {
if (other == null) {
return;
}
if (other.Value.Length != 0) {
Value = other.Value;
}
}
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Value = input.ReadBytes();
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| |
#region License
// Copyright (c) Jeremy Skinner (http://www.jeremyskinner.co.uk)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// The latest version of this file can be found at http://www.codeplex.com/FluentValidation
#endregion
namespace FluentValidation.Tests {
using System;
using System.Collections;
using System.Globalization;
using System.Linq;
using System.Threading;
using System.Web;
using System.Web.Mvc;
using Attributes;
using Internal;
using Moq;
using Mvc;
using NUnit.Framework;
using Results;
[TestFixture]
public class ModelBinderTester {
FluentValidationModelValidatorProvider provider;
DefaultModelBinder binder;
ControllerContext controllerContext;
[SetUp]
public void Setup() {
Thread.CurrentThread.CurrentUICulture = new CultureInfo("en-US");
provider = new FluentValidationModelValidatorProvider(new AttributedValidatorFactory());
ModelValidatorProviders.Providers.Add(provider);
DataAnnotationsModelValidatorProvider.AddImplicitRequiredAttributeForValueTypes = false;
binder = new DefaultModelBinder();
controllerContext = new ControllerContext { HttpContext = MockHttpContext.Create() };
}
[TearDown]
public void Teardown() {
//Cleanup
ModelValidatorProviders.Providers.Remove(provider);
}
protected ModelMetadata CreateMetaData(Type type) {
var meta = new DataAnnotationsModelMetadataProvider();
return meta.GetMetadataForType(null, type);
//return new ModelMetadata(new EmptyModelMetadataProvider(), null, null, type, null);
}
public class TestModel2 {
}
[Validator(typeof(TestModelValidator))]
public class TestModel {
public string Name { get; set; }
}
public class TestModelValidator : AbstractValidator<TestModel> {
public TestModelValidator() {
RuleFor(x => x.Name).NotNull().WithMessage("Validation Failed");
}
}
[Validator(typeof(TestModelValidator3))]
public class TestModel3 {
public int Id { get; set; }
}
public class TestModelValidator3 : AbstractValidator<TestModel3> {
public TestModelValidator3() {
RuleFor(x => x.Id).NotNull().WithMessage("Validation failed");
}
}
public class TestModelWithoutValidator {
public int Id { get; set; }
}
[Validator(typeof(TestModel4Validator))]
public class TestModel4 {
public string Surname { get; set; }
public string Forename { get; set; }
public string Email { get; set; }
public DateTime DateOfBirth { get; set; }
public string Address1 { get; set; }
}
public class TestModel4Validator : AbstractValidator<TestModel4> {
public TestModel4Validator() {
RuleFor(x => x.Surname).NotEqual(x => x.Forename);
RuleFor(x => x.Email)
.EmailAddress();
RuleFor(x => x.Address1).NotEmpty();
}
}
[Validator(typeof(TestModel6Validator))]
public class TestModel6 {
public int Id { get; set; }
}
public class TestModel6Validator : AbstractValidator<TestModel6> {
public TestModel6Validator() {
//This ctor is intentionally blank.
}
}
[Test]
public void Should_add_all_errors_in_one_go() {
var form = new FormCollection {
{ "Email", "foo" },
{ "Surname", "foo" },
{ "Forename", "foo" },
{ "DateOfBirth", null },
{ "Address1", null }
};
var context = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(TestModel4)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider(),
};
binder.BindModel(controllerContext, context);
context.ModelState.IsValidField("Email").ShouldBeFalse(); //Email validation failed
context.ModelState.IsValidField("DateOfBirth").ShouldBeFalse(); //Date of Birth not specified (implicit required error)
context.ModelState.IsValidField("Surname").ShouldBeFalse(); //cross-property
}
[Validator(typeof(TestModel5Validator))]
public class TestModel5 {
public int Id { get; set; }
public bool SomeBool { get; set; }
}
public class TestModel5Validator : AbstractValidator<TestModel5> {
public TestModel5Validator() {
//force a complex rule
RuleFor(x => x.SomeBool).Must(x => x == true);
RuleFor(x => x.Id).NotEmpty();
}
}
[Test]
public void Should_add_all_erorrs_in_one_go_when_NotEmpty_rule_specified_for_non_nullable_value_type() {
var form = new FormCollection {
{ "SomeBool", "False" },
{ "Id", "0" }
};
var context = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(TestModel5)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider(),
};
binder.BindModel(controllerContext, context);
context.ModelState.IsValidField("SomeBool").ShouldBeFalse(); //Complex rule
context.ModelState.IsValidField("Id").ShouldBeFalse(); //NotEmpty for non-nullable value type
}
[Test]
public void When_a_validation_error_occurs_the_error_should_be_added_to_modelstate() {
var form = new FormCollection {
{ "test.Name", null }
};
var bindingContext = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(TestModel)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider()
};
binder.BindModel(controllerContext, bindingContext);
bindingContext.ModelState["test.Name"].Errors.Single().ErrorMessage.ShouldEqual("Validation Failed");
}
[Test]
public void When_a_validation_error_occurs_the_error_should_be_added_to_Modelstate_without_prefix() {
var form = new FormCollection {
{ "Name", null }
};
var bindingContext = new ModelBindingContext {
ModelName = "foo",
ModelMetadata = CreateMetaData(typeof(TestModel)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider()
};
binder.BindModel(controllerContext, bindingContext);
bindingContext.ModelState["Name"].Errors.Count().ShouldEqual(1);
}
[Test]
public void Should_not_fail_when_no_validator_can_be_found() {
var bindingContext = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(TestModel2)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = new FormCollection().ToValueProvider()
};
binder.BindModel(controllerContext, bindingContext).ShouldNotBeNull();
}
[Test]
public void Should_not_add_default_message_to_modelstate() {
var form = new FormCollection {
{ "Id", "" }
};
var bindingContext = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(TestModel3)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider()
};
binder.BindModel(controllerContext, bindingContext);
bindingContext.ModelState["Id"].Errors.Single().ErrorMessage.ShouldEqual("Validation failed");
}
[Test]
public void Should_not_add_default_message_to_modelstate_when_there_is_no_required_validator_explicitly_specified() {
var form = new FormCollection {
{ "Id", "" }
};
var bindingContext = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(TestModel6)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider()
};
binder.BindModel(controllerContext, bindingContext);
bindingContext.ModelState["Id"].Errors.Single().ErrorMessage.ShouldEqual("'Id' must not be empty.");
}
[Test]
public void Should_add_Default_message_to_modelstate_when_no_validator_specified() {
var form = new FormCollection {
{ "Id", "" }
};
var bindingContext = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(TestModelWithoutValidator)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider()
};
binder.BindModel(controllerContext, bindingContext);
bindingContext.ModelState["Id"].Errors.Single().ErrorMessage.ShouldEqual("A value is required.");
}
[Test]
public void Allows_override_of_required_message_for_non_nullable_value_types() {
var form = new FormCollection {
{ "Id", "" }
};
var bindingContext = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(TestModelWithOverridenMessageValueType)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider()
};
binder.BindModel(controllerContext, bindingContext);
//TODO: Localise test.
bindingContext.ModelState["Id"].Errors.Single().ErrorMessage.ShouldEqual("Foo");
}
[Test]
public void Allows_override_of_required_property_name_for_non_nullable_value_types() {
var form = new FormCollection {
{ "Id", "" }
};
var bindingContext = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(TestModelWithOverridenPropertyNameValueType)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider()
};
binder.BindModel(controllerContext, bindingContext);
//TODO: Localise test.
bindingContext.ModelState["Id"].Errors.Single().ErrorMessage.ShouldEqual("'Foo' must not be empty."
);
}
[Test]
public void Should_add_default_message_to_modelstate_when_both_fv_and_DataAnnotations_have_implicit_required_validation_disabled() {
DataAnnotationsModelValidatorProvider.AddImplicitRequiredAttributeForValueTypes = false;
provider.AddImplicitRequiredValidator = false;
var form = new FormCollection {
{ "Id", "" }
};
var bindingContext = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(TestModelWithoutValidator)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider()
};
binder.BindModel(controllerContext, bindingContext);
bindingContext.ModelState["Id"].Errors.Single().ErrorMessage.ShouldEqual("A value is required.");
provider.AddImplicitRequiredValidator = true;
DataAnnotationsModelValidatorProvider.AddImplicitRequiredAttributeForValueTypes = true;
}
[Test]
public void Should_only_validate_specified_ruleset() {
var form = new FormCollection {
{ "Email", "foo" },
{ "Surname", "foo" },
{ "Forename", "foo" },
};
var context = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(RulesetTestModel)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider(),
};
var binder = new CustomizeValidatorAttribute { RuleSet = "Names" };
binder.BindModel(controllerContext, context);
context.ModelState.IsValidField("Forename").ShouldBeFalse();
context.ModelState.IsValidField("Surname").ShouldBeFalse();
context.ModelState.IsValidField("Email").ShouldBeTrue();
}
[Test]
public void Should_only_validate_specified_properties() {
var form = new FormCollection {
{ "Email", "foo" },
{ "Surname", "foo" },
{ "Forename", "foo" },
};
var context = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(PropertiesTestModel)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider(),
};
var binder = new CustomizeValidatorAttribute { Properties = "Surname,Forename" };
binder.BindModel(controllerContext, context);
context.ModelState.IsValidField("Forename").ShouldBeFalse();
context.ModelState.IsValidField("Surname").ShouldBeFalse();
context.ModelState.IsValidField("Email").ShouldBeTrue();
}
[Test]
public void When_interceptor_specified_Intercepts_validation() {
var form = new FormCollection {
{ "Email", "foo" },
{ "Surname", "foo" },
{ "Forename", "foo" },
};
var context = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(PropertiesTestModel)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider(),
};
var binder = new CustomizeValidatorAttribute { Interceptor = typeof(SimplePropertyInterceptor) };
binder.BindModel(controllerContext, context);
context.ModelState.IsValidField("Forename").ShouldBeFalse();
context.ModelState.IsValidField("Surname").ShouldBeFalse();
context.ModelState.IsValidField("Email").ShouldBeTrue();
}
[Test]
public void When_interceptor_specified_Intercepts_validation_provides_custom_errors() {
var form = new FormCollection {
{ "Email", "foo" },
{ "Surname", "foo" },
{ "Forename", "foo" },
};
var context = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(PropertiesTestModel)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider(),
};
var binder = new CustomizeValidatorAttribute { Interceptor = typeof(ClearErrorsInterceptor) };
binder.BindModel(controllerContext, context);
context.ModelState.IsValid.ShouldBeTrue();
}
[Test]
public void When_validator_implements_IValidatorInterceptor_directly_interceptor_invoked() {
var form = new FormCollection {
{ "Email", "foo" },
{ "Surname", "foo" },
{ "Forename", "foo" },
};
var context = new ModelBindingContext {
ModelName = "test",
ModelMetadata = CreateMetaData(typeof(PropertiesTestModel2)),
ModelState = new ModelStateDictionary(),
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider(),
};
binder.BindModel(controllerContext, context);
context.ModelState.IsValid.ShouldBeTrue();
}
[Test]
public void Validator_customizations_should_only_apply_to_single_parameter() {
var form = new FormCollection {
{ "first.Email", "foo" },
{ "first.Surname", "foo" },
{ "first.Forename", "foo" },
{ "second.Email", "foo" },
{ "second.Surname", "foo" },
{ "second.Forename", "foo" }
};
var modelstate = new ModelStateDictionary();
var firstContext = new ModelBindingContext {
ModelName = "first",
ModelMetadata = CreateMetaData(typeof(RulesetTestModel)),
ModelState = modelstate,
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider(),
};
var secondContext = new ModelBindingContext {
ModelName = "second",
ModelMetadata = CreateMetaData(typeof(RulesetTestModel)),
ModelState = modelstate,
FallbackToEmptyPrefix = true,
ValueProvider = form.ToValueProvider()
};
// Use the customizations for the first
var binder = new CustomizeValidatorAttribute { RuleSet = "Names" };
binder.BindModel(controllerContext, firstContext);
// ...but not for the second.
this.binder.BindModel(controllerContext, secondContext);
//customizations should only apply to the first validator
modelstate.IsValidField("first.Forename").ShouldBeFalse();
modelstate.IsValidField("first.Surname").ShouldBeFalse();
modelstate.IsValidField("second.Forename").ShouldBeTrue();
modelstate.IsValidField("second.Surname").ShouldBeTrue();
}
private class SimplePropertyInterceptor : IValidatorInterceptor {
readonly string[] properties = new[] { "Surname", "Forename" };
public ValidationContext BeforeMvcValidation(ControllerContext cc, ValidationContext context) {
var newContext = context.Clone(selector: new MemberNameValidatorSelector(properties));
return newContext;
}
public ValidationResult AfterMvcValidation(ControllerContext cc, ValidationContext context, ValidationResult result) {
return result;
}
}
private class ClearErrorsInterceptor : IValidatorInterceptor {
public ValidationContext BeforeMvcValidation(ControllerContext cc, ValidationContext context) {
return null;
}
public ValidationResult AfterMvcValidation(ControllerContext cc, ValidationContext context, ValidationResult result) {
return new ValidationResult();
}
}
[Validator(typeof(PropertiesValidator2))]
private class PropertiesTestModel2 {
public string Email { get; set; }
public string Surname { get; set; }
public string Forename { get; set; }
}
private class PropertiesValidator2 : AbstractValidator<PropertiesTestModel2>, IValidatorInterceptor {
public PropertiesValidator2() {
RuleFor(x => x.Email).NotEqual("foo");
RuleFor(x => x.Surname).NotEqual("foo");
RuleFor(x => x.Forename).NotEqual("foo");
}
public ValidationContext BeforeMvcValidation(ControllerContext controllerContext, ValidationContext validationContext) {
return validationContext;
}
public ValidationResult AfterMvcValidation(ControllerContext controllerContext, ValidationContext validationContext, ValidationResult result) {
return new ValidationResult(); //empty errors
}
}
[Validator(typeof(PropertiesValidator))]
private class PropertiesTestModel {
public string Email { get; set; }
public string Surname { get; set; }
public string Forename { get; set; }
}
private class PropertiesValidator : AbstractValidator<PropertiesTestModel> {
public PropertiesValidator() {
RuleFor(x => x.Email).NotEqual("foo");
RuleFor(x => x.Surname).NotEqual("foo");
RuleFor(x => x.Forename).NotEqual("foo");
}
}
[Validator(typeof(RulesetTestValidator))]
private class RulesetTestModel {
public string Email { get; set; }
public string Surname { get; set; }
public string Forename { get; set; }
}
private class RulesetTestValidator : AbstractValidator<RulesetTestModel> {
public RulesetTestValidator() {
RuleFor(x => x.Email).NotEqual("foo");
RuleSet("Names", () => {
RuleFor(x => x.Surname).NotEqual("foo");
RuleFor(x => x.Forename).NotEqual("foo");
});
}
}
[Validator(typeof(TestModelWithOverridenMessageValueTypeValidator))]
private class TestModelWithOverridenMessageValueType {
public int Id { get; set; }
}
[Validator(typeof(TestModelWithOverridenPropertyNameValidator))]
private class TestModelWithOverridenPropertyNameValueType {
public int Id { get; set; }
}
private class TestModelWithOverridenMessageValueTypeValidator : AbstractValidator<TestModelWithOverridenMessageValueType> {
public TestModelWithOverridenMessageValueTypeValidator() {
RuleFor(x => x.Id).NotNull().WithMessage("Foo");
}
}
private class TestModelWithOverridenPropertyNameValidator : AbstractValidator<TestModelWithOverridenPropertyNameValueType> {
public TestModelWithOverridenPropertyNameValidator() {
RuleFor(x => x.Id).NotNull().WithName("Foo");
}
}
public class MockHttpContext : Mock<HttpContextBase> {
public MockHttpContext() {
Setup(x => x.Items).Returns(new Hashtable());
}
public static HttpContextBase Create() {
return new MockHttpContext().Object;
}
}
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using ELFSharp.ELF;
using ELFSharp.ELF.Sections;
using YetiCommon.SSH;
namespace YetiCommon
{
public class ModuleParser : IModuleParser
{
readonly string _debugLinkName = ".gnu_debuglink";
readonly string _debugDirName = ".note.debug_info_dir";
readonly string _buildIdName = ".note.gnu.build-id";
readonly string _debugInfoName = ".debug_info";
static readonly Regex _hexDumpRegex =
new Regex(@"^\s*[0-9a-fA-F]+(?:\s([0-9a-fA-F]+)){1,4}");
public DebugLinkLocationInfo ParseDebugLinkInfo(string filepath)
{
var output = new DebugLinkLocationInfo();
if (!File.Exists(filepath))
{
output.AddError($"{filepath} not found");
return output;
}
if (!ELFReader.TryLoad(filepath, out IELF elfReader))
{
output.AddError(ErrorStrings.InvalidSymbolFileFormat(filepath));
return output;
}
using (elfReader)
{
if (elfReader.TryGetSection(_debugDirName, out ISection directorySection))
{
byte[] contents = directorySection.GetContents();
output.SetDirectory(ParseStringValue(contents));
}
else
{
output.AddError(ErrorStrings.FailedToReadSymbolFileDir(
filepath, ErrorStrings.NoDebugDir));
}
if (elfReader.TryGetSection(_debugLinkName, out ISection debugLinkSection))
{
byte[] contents = debugLinkSection.GetContents();
output.SetFilename(ParseStringValue(contents));
}
else
{
output.AddError(ErrorStrings.FailedToReadSymbolFileName(
filepath, ErrorStrings.NoDebugLink));
}
}
return output;
}
public BuildIdInfo ParseBuildIdInfo(string filepath, bool isElf)
{
var output = new BuildIdInfo();
if (!File.Exists(filepath))
{
output.AddError($"{filepath} not found");
return output;
}
if (isElf)
{
ParseBuildIdFromElf(filepath, ref output);
}
else
{
// TODO: add PE-modules processing
output.AddError("Cannot read BuildId from PE module");
}
return output;
}
void ParseBuildIdFromElf(string filepath, ref BuildIdInfo output)
{
if (!ELFReader.TryLoad(filepath, out IELF elfReader))
{
output.AddError(ErrorStrings.InvalidSymbolFileFormat(filepath));
return;
}
using (elfReader)
{
if (!elfReader.TryGetSection(_buildIdName, out ISection buildIdSection))
{
output.AddError(
ErrorStrings.FailedToReadBuildId(filepath, ErrorStrings.EmptyBuildId));
return;
}
if (buildIdSection is INoteSection buildIdNoteSection)
{
byte[] contents = buildIdNoteSection.Description;
output.Data = ParseBuildIdValue(contents);
}
else
{
output.AddError(
ErrorStrings.FailedToReadBuildId(filepath, ErrorStrings.EmptyBuildId));
}
}
}
public async Task<BuildId> ParseRemoteBuildIdInfoAsync(string filepath, SshTarget target)
{
if (target == null)
{
throw new BinaryFileUtilException(ErrorStrings.FailedToReadBuildId(filepath,
"Remote target was not provided"));
}
string objDumpArgs =
$"-s --section={_buildIdName} {ProcessUtil.QuoteArgument(filepath)}";
ProcessStartInfo startInfo = ProcessStartInfoBuilder.BuildForSsh(
$"{YetiConstants.ObjDumpLinuxExecutable} {objDumpArgs}",
new List<string>(),
target);
var processFactory = new ManagedProcess.Factory();
try
{
List<string> outputLines;
using (IProcess process = processFactory.Create(startInfo))
{
outputLines = await process.RunToExitWithSuccessCapturingOutputAsync();
}
string hexString = ParseHexDump(outputLines);
BuildId buildId = ParseBuildIdOutput(hexString);
return buildId;
}
catch (ProcessExecutionException e)
{
LogObjdumpOutput(e);
// objdump returned an error code, possibly because the file being parsed is not
// actually an elf file. With an SSH target, exit code 255 means SSH failed
// before it had a chance to execute the remote command.
if (e.ExitCode < 255)
{
// The remote command failed, so we need to fix the exception message.
// TODO: ManagedProcess should report the remote filename.
throw new BinaryFileUtilException(
ErrorStrings.FailedToReadBuildId(
filepath, ErrorStrings.ProcessExitedWithErrorCode(
YetiConstants.ObjDumpLinuxExecutable, e.ExitCode)),
e);
}
throw new BinaryFileUtilException(
ErrorStrings.FailedToReadBuildId(filepath, e.Message), e);
}
catch (ProcessException e)
{
// objdump failed to launch, possibly because the SDK was not found. With an SSH
// target, this indicates that SSH failed to launch. In either case, the specific
// filepath was never accessed, so it is not part of the error.
throw new BinaryFileUtilException(
ErrorStrings.FailedToReadBuildId(filepath, e.Message), e);
}
catch (FormatException e)
{
// Indicates the build ID section is malformed.
throw new InvalidBuildIdException(
ErrorStrings.FailedToReadBuildId(
filepath, ErrorStrings.MalformedBuildId),
e);
}
void LogObjdumpOutput(ProcessExecutionException e)
{
Trace.WriteLine("objdump invocation failed \nstdout: \n" +
string.Join("\n", e.OutputLines) +
"\nstderr: \n" +
string.Join("\n", e.ErrorLines));
}
}
/// <summary>
/// Parses a hex dump in the format outputted by objdump, and returns just the hex digits.
/// </summary>
/// <param name="hexDumpOutput">The raw output of the 'objdump' process.</param>
/// <returns>The hexadecimal characters concatenated together without whitespace.</returns>
string ParseHexDump(IList<string> hexDumpOutput)
{
var hexString = new StringBuilder();
foreach (string line in hexDumpOutput)
{
Match match = _hexDumpRegex.Match(line);
foreach (Capture capture in match.Groups[1].Captures)
{
hexString.Append(capture.Value);
}
}
return hexString.ToString();
}
/// <summary>
/// Given the content of the build ID section, returns the build ID.
/// </summary>
/// <param name="hexString">The content of the section represented in hex.</param>
/// <returns>A valid but possibly empty build ID</returns>
/// <exception cref="FormatException">
/// Thrown when the input does not have enough leading bytes, or when it does not encode a
/// valid build ID.
/// </exception>
BuildId ParseBuildIdOutput(string hexString)
{
// A note segment consists of a 4 byte namesz field, a 4 byte descsz field,
// a 4 byte type field, a namesz-length name field, and a descsz-length desc field.
// In the case of the gnu.build-id segment, name is a 4 byte string with the
// contents "GNU\0", and desc is the actual build ID. All together, there are
// 16 bytes preceding the actual build ID, which makes for 32 hex digits that we
// want to skip.
if (hexString.Length < 32)
{
throw new FormatException(
$"Got {hexString.Length} hex digits, " +
"but wanted at least 32 leading digits");
}
var buildId = new BuildId(hexString.Substring(32));
if (buildId == BuildId.Empty)
{
throw new FormatException(ErrorStrings.EmptyBuildId);
}
return buildId;
}
public bool IsValidElf(string filepath, bool isDebugInfoFile, out string errorMessage)
{
errorMessage = "";
if (!File.Exists(filepath))
{
errorMessage = $"{filepath} not found";
return false;
}
if (!ELFReader.TryLoad(filepath, out IELF elfReader))
{
return false;
}
using (elfReader)
{
if (!isDebugInfoFile)
{
// If we are not to check the presence of .debug_info, it is sufficient that
// the file has a valid ELF format.
return true;
}
if (elfReader.TryGetSection(_debugInfoName, out ISection _))
{
return true;
}
errorMessage += ErrorStrings.MissingDebugInfoInSymbolFile(filepath);
}
return false;
}
public string ParseStringValue(byte[] contents)
{
IEnumerable<byte> stringBytes = contents.TakeWhile(x => x != 0);
return Encoding.ASCII.GetString(stringBytes.ToArray());
}
public BuildId ParseBuildIdValue(byte[] contents) => new BuildId(contents);
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
namespace mapgenerator
{
class mapgenerator
{
static void WriteFieldMember(TextWriter writer, MapField field)
{
string type = String.Empty;
switch (field.Type)
{
case FieldType.BOOL:
type = "bool";
break;
case FieldType.F32:
type = "float";
break;
case FieldType.F64:
type = "double";
break;
case FieldType.IPPORT:
case FieldType.U16:
type = "ushort";
break;
case FieldType.IPADDR:
case FieldType.U32:
type = "uint";
break;
case FieldType.LLQuaternion:
type = "Quaternion";
break;
case FieldType.LLUUID:
type = "UUID";
break;
case FieldType.LLVector3:
type = "Vector3";
break;
case FieldType.LLVector3d:
type = "Vector3d";
break;
case FieldType.LLVector4:
type = "Vector4";
break;
case FieldType.S16:
type = "short";
break;
case FieldType.S32:
type = "int";
break;
case FieldType.S8:
type = "sbyte";
break;
case FieldType.U64:
type = "ulong";
break;
case FieldType.U8:
type = "byte";
break;
case FieldType.Fixed:
type = "byte[]";
break;
}
if (field.Type != FieldType.Variable)
{
//writer.WriteLine(" /// <summary>" + field.Name + " field</summary>");
writer.WriteLine(" public " + type + " " + field.Name + ";");
}
else
{
writer.WriteLine(" private byte[] _" + field.Name.ToLower() + ";");
//writer.WriteLine(" /// <summary>" + field.Name + " field</summary>");
writer.WriteLine(" public byte[] " + field.Name + Environment.NewLine + " {");
writer.WriteLine(" get { return _" + field.Name.ToLower() + "; }");
writer.WriteLine(" set" + Environment.NewLine + " {");
writer.WriteLine(" if (value == null) { _" +
field.Name.ToLower() + " = null; return; }");
writer.WriteLine(" if (value.Length > " +
((field.Count == 1) ? "255" : "1100") + ") { throw new OverflowException(" +
"\"Value exceeds " + ((field.Count == 1) ? "255" : "1100") + " characters\"); }");
writer.WriteLine(" else { _" + field.Name.ToLower() +
" = new byte[value.Length]; Buffer.BlockCopy(value, 0, _" +
field.Name.ToLower() + ", 0, value.Length); }");
writer.WriteLine(" }" + Environment.NewLine + " }");
}
}
static void WriteFieldFromBytes(TextWriter writer, MapField field)
{
switch (field.Type)
{
case FieldType.BOOL:
writer.WriteLine(" " +
field.Name + " = (bytes[i++] != 0) ? (bool)true : (bool)false;");
break;
case FieldType.F32:
writer.WriteLine(" " +
field.Name + " = Utils.BytesToFloat(bytes, i); i += 4;");
break;
case FieldType.F64:
writer.WriteLine(" " +
field.Name + " = Utils.BytesToDouble(bytes, i); i += 8;");
break;
case FieldType.Fixed:
writer.WriteLine(" " + field.Name + " = new byte[" + field.Count + "];");
writer.WriteLine(" Buffer.BlockCopy(bytes, i, " + field.Name +
", 0, " + field.Count + "); i += " + field.Count + ";");
break;
case FieldType.IPADDR:
case FieldType.U32:
writer.WriteLine(" " + field.Name +
" = (uint)(bytes[i++] + (bytes[i++] << 8) + (bytes[i++] << 16) + (bytes[i++] << 24));");
break;
case FieldType.IPPORT:
// IPPORT is big endian while U16/S16 are little endian. Go figure
writer.WriteLine(" " + field.Name +
" = (ushort)((bytes[i++] << 8) + bytes[i++]);");
break;
case FieldType.U16:
writer.WriteLine(" " + field.Name +
" = (ushort)(bytes[i++] + (bytes[i++] << 8));");
break;
case FieldType.LLQuaternion:
writer.WriteLine(" " + field.Name + ".FromBytes(bytes, i, true); i += 12;");
break;
case FieldType.LLUUID:
writer.WriteLine(" " + field.Name + ".FromBytes(bytes, i); i += 16;");
break;
case FieldType.LLVector3:
writer.WriteLine(" " + field.Name + ".FromBytes(bytes, i); i += 12;");
break;
case FieldType.LLVector3d:
writer.WriteLine(" " + field.Name + ".FromBytes(bytes, i); i += 24;");
break;
case FieldType.LLVector4:
writer.WriteLine(" " + field.Name + ".FromBytes(bytes, i); i += 16;");
break;
case FieldType.S16:
writer.WriteLine(" " + field.Name +
" = (short)(bytes[i++] + (bytes[i++] << 8));");
break;
case FieldType.S32:
writer.WriteLine(" " + field.Name +
" = (int)(bytes[i++] + (bytes[i++] << 8) + (bytes[i++] << 16) + (bytes[i++] << 24));");
break;
case FieldType.S8:
writer.WriteLine(" " + field.Name +
" = (sbyte)bytes[i++];");
break;
case FieldType.U64:
writer.WriteLine(" " + field.Name +
" = (ulong)((ulong)bytes[i++] + ((ulong)bytes[i++] << 8) + " +
"((ulong)bytes[i++] << 16) + ((ulong)bytes[i++] << 24) + " +
"((ulong)bytes[i++] << 32) + ((ulong)bytes[i++] << 40) + " +
"((ulong)bytes[i++] << 48) + ((ulong)bytes[i++] << 56));");
break;
case FieldType.U8:
writer.WriteLine(" " + field.Name +
" = (byte)bytes[i++];");
break;
case FieldType.Variable:
if (field.Count == 1)
{
writer.WriteLine(" length = (ushort)bytes[i++];");
}
else
{
writer.WriteLine(" length = (ushort)(bytes[i++] + (bytes[i++] << 8));");
}
writer.WriteLine(" _" + field.Name.ToLower() + " = new byte[length];");
writer.WriteLine(" Buffer.BlockCopy(bytes, i, _" + field.Name.ToLower() +
", 0, length); i += length;");
break;
default:
writer.WriteLine("!!! ERROR: Unhandled FieldType: " + field.Type.ToString() + " !!!");
break;
}
}
static void WriteFieldToBytes(TextWriter writer, MapField field)
{
writer.Write(" ");
switch (field.Type)
{
case FieldType.BOOL:
writer.WriteLine("bytes[i++] = (byte)((" + field.Name + ") ? 1 : 0);");
break;
case FieldType.F32:
writer.WriteLine("Utils.FloatToBytes(" + field.Name + ", bytes, i); i += 4;");
break;
case FieldType.F64:
writer.WriteLine("Utils.DoubleToBytes(" + field.Name + ", bytes, i); i += 8;");
break;
case FieldType.Fixed:
writer.WriteLine("Buffer.BlockCopy(" + field.Name + ", 0, bytes, i, " + field.Count + ");" +
"i += " + field.Count + ";");
break;
case FieldType.IPPORT:
// IPPORT is big endian while U16/S16 is little endian. Go figure
writer.WriteLine("bytes[i++] = (byte)((" + field.Name + " >> 8) % 256);");
writer.WriteLine(" bytes[i++] = (byte)(" + field.Name + " % 256);");
break;
case FieldType.U16:
case FieldType.S16:
writer.WriteLine("bytes[i++] = (byte)(" + field.Name + " % 256);");
writer.WriteLine(" bytes[i++] = (byte)((" + field.Name + " >> 8) % 256);");
break;
case FieldType.LLUUID:
writer.WriteLine("Buffer.BlockCopy(" + field.Name + ".GetBytes(), 0, bytes, i, 16); i += 16;");
break;
case FieldType.LLVector4:
writer.WriteLine("Buffer.BlockCopy(" + field.Name + ".GetBytes(), 0, bytes, i, 16); i += 16;");
break;
case FieldType.LLQuaternion:
case FieldType.LLVector3:
writer.WriteLine("Buffer.BlockCopy(" + field.Name + ".GetBytes(), 0, bytes, i, 12); i += 12;");
break;
case FieldType.LLVector3d:
writer.WriteLine("Buffer.BlockCopy(" + field.Name + ".GetBytes(), 0, bytes, i, 24); i += 24;");
break;
case FieldType.U8:
writer.WriteLine("bytes[i++] = " + field.Name + ";");
break;
case FieldType.S8:
writer.WriteLine("bytes[i++] = (byte)" + field.Name + ";");
break;
case FieldType.IPADDR:
case FieldType.U32:
writer.WriteLine("Utils.UIntToBytes(" + field.Name + ", bytes, i); i += 4;");
break;
case FieldType.S32:
writer.WriteLine("Utils.IntToBytes(" + field.Name + ", bytes, i); i += 4;");
break;
case FieldType.U64:
writer.WriteLine("Utils.UInt64ToBytes(" + field.Name + ", bytes, i); i += 8;");
break;
case FieldType.Variable:
writer.WriteLine("if(" + field.Name + " == null) { Console.WriteLine(\"Warning: " + field.Name + " is null, in \" + this.GetType()); }");
writer.Write(" ");
if (field.Count == 1)
{
writer.WriteLine("bytes[i++] = (byte)" + field.Name + ".Length;");
}
else
{
writer.WriteLine("bytes[i++] = (byte)(" + field.Name + ".Length % 256);");
writer.WriteLine(" bytes[i++] = (byte)((" +
field.Name + ".Length >> 8) % 256);");
}
writer.WriteLine(" Buffer.BlockCopy(" + field.Name + ", 0, bytes, i, " +
field.Name + ".Length); " + "i += " + field.Name + ".Length;");
break;
default:
writer.WriteLine("!!! ERROR: Unhandled FieldType: " + field.Type.ToString() + " !!!");
break;
}
}
static int GetFieldLength(TextWriter writer, MapField field)
{
switch(field.Type)
{
case FieldType.BOOL:
case FieldType.U8:
case FieldType.S8:
return 1;
case FieldType.U16:
case FieldType.S16:
case FieldType.IPPORT:
return 2;
case FieldType.U32:
case FieldType.S32:
case FieldType.F32:
case FieldType.IPADDR:
return 4;
case FieldType.U64:
case FieldType.F64:
return 8;
case FieldType.LLVector3:
case FieldType.LLQuaternion:
return 12;
case FieldType.LLUUID:
case FieldType.LLVector4:
return 16;
case FieldType.LLVector3d:
return 24;
case FieldType.Fixed:
return field.Count;
case FieldType.Variable:
return 0;
default:
writer.WriteLine("!!! ERROR: Unhandled FieldType " + field.Type.ToString() + " !!!");
return 0;
}
}
static void WriteBlockClass(TextWriter writer, MapBlock block, MapPacket packet)
{
bool variableFields = false;
//writer.WriteLine(" /// <summary>" + block.Name + " block</summary>");
writer.WriteLine(" /// <exclude/>");
writer.WriteLine(" public class " + block.Name + "Block : PacketBlock" + Environment.NewLine + " {");
foreach (MapField field in block.Fields)
{
WriteFieldMember(writer, field);
if (field.Type == FieldType.Variable) { variableFields = true; }
}
// Length property
writer.WriteLine("");
//writer.WriteLine(" /// <summary>Length of this block serialized in bytes</summary>");
writer.WriteLine(" public override int Length" + Environment.NewLine +
" {" + Environment.NewLine +
" get" + Environment.NewLine +
" {");
int length = 0;
// Figure out the length of this block
foreach (MapField field in block.Fields)
{
length += GetFieldLength(writer, field);
}
if (!variableFields)
{
writer.WriteLine(" return " + length + ";");
}
else
{
writer.WriteLine(" int length = " + length + ";");
foreach (MapField field in block.Fields)
{
if (field.Type == FieldType.Variable)
{
writer.WriteLine(" if (" + field.Name +
" != null) { length += " + field.Count + " + " + field.Name + ".Length; }");
}
}
writer.WriteLine(" return length;");
}
writer.WriteLine(" }" + Environment.NewLine + " }" + Environment.NewLine);
// Default constructor
//writer.WriteLine(" /// <summary>Default constructor</summary>");
writer.WriteLine(" public " + block.Name + "Block() { }");
// Constructor for building the class from bytes
//writer.WriteLine(" /// <summary>Constructor for building the block from a byte array</summary>");
writer.WriteLine(" public " + block.Name + "Block(byte[] bytes, ref int i)" + Environment.NewLine +
" {" + Environment.NewLine +
" FromBytes(bytes, ref i);" + Environment.NewLine +
" }" + Environment.NewLine);
// Initiates instance variables from a byte message
writer.WriteLine(" public override void FromBytes(byte[] bytes, ref int i)" + Environment.NewLine +
" {");
// Declare a length variable if we need it for variable fields in this constructor
if (variableFields) { writer.WriteLine(" int length;"); }
// Start of the try catch block
writer.WriteLine(" try" + Environment.NewLine + " {");
foreach (MapField field in block.Fields)
{
WriteFieldFromBytes(writer, field);
}
writer.WriteLine(" }" + Environment.NewLine +
" catch (Exception)" + Environment.NewLine +
" {" + Environment.NewLine +
" throw new MalformedDataException();" + Environment.NewLine +
" }" + Environment.NewLine + " }" + Environment.NewLine);
// ToBytes() function
//writer.WriteLine(" /// <summary>Serialize this block to a byte array</summary>");
writer.WriteLine(" public override void ToBytes(byte[] bytes, ref int i)" + Environment.NewLine +
" {");
foreach (MapField field in block.Fields)
{
WriteFieldToBytes(writer, field);
}
writer.WriteLine(" }" + Environment.NewLine);
// ToString() function
writer.WriteLine(" public override string ToString()" + Environment.NewLine + " {");
writer.WriteLine(" StringBuilder output = new StringBuilder();");
writer.WriteLine(" output.AppendLine(\"-- " + block.Name + " --\");");
for (int i = 0; i < block.Fields.Count; i++)
{
MapField field = block.Fields[i];
if (field.Type == FieldType.Variable || field.Type == FieldType.Fixed)
{
writer.WriteLine(" Helpers.FieldToString(output, " + field.Name + ", \"" + field.Name + "\");");
if (i != block.Fields.Count - 1) writer.WriteLine(" output.Append(Environment.NewLine);");
}
else
{
if (i != block.Fields.Count - 1) writer.WriteLine(" output.AppendLine(String.Format(\"" + field.Name + ": {0}\", " + field.Name + "));");
else writer.WriteLine(" output.Append(String.Format(\"" + field.Name + ": {0}\", " + field.Name + "));");
}
}
writer.WriteLine(" return output.ToString();" + Environment.NewLine + " }");
writer.WriteLine(" }" + Environment.NewLine);
}
static void WritePacketClass(TextWriter writer, MapPacket packet)
{
string sanitizedName;
//writer.WriteLine(" /// <summary>" + packet.Name + " packet</summary>");
writer.WriteLine(" /// <exclude/>");
writer.WriteLine(" public class " + packet.Name + "Packet : Packet" + Environment.NewLine + " {");
// Write out each block class
foreach (MapBlock block in packet.Blocks)
{
WriteBlockClass(writer, block, packet);
}
// Header member
writer.WriteLine(" private Header header;");
//writer.WriteLine(" /// <summary>The header for this packet</summary>");
writer.WriteLine(" public override Header Header { get { return header; } set { header = value; } }");
// PacketType member
//writer.WriteLine(" /// <summary>Will return PacketType." + packet.Name+ "</summary>");
writer.WriteLine(" public override PacketType Type { get { return PacketType." +
packet.Name + "; } }");
// Block members
foreach (MapBlock block in packet.Blocks)
{
// TODO: More thorough name blacklisting
if (block.Name == "Header") { sanitizedName = "_" + block.Name; }
else { sanitizedName = block.Name; }
//writer.WriteLine(" /// <summary>" + block.Name + " block</summary>");
writer.WriteLine(" public " + block.Name + "Block" +
((block.Count != 1) ? "[]" : "") + " " + sanitizedName + ";");
}
writer.WriteLine("");
// Default constructor
//writer.WriteLine(" /// <summary>Default constructor</summary>");
writer.WriteLine(" public " + packet.Name + "Packet()" + Environment.NewLine + " {");
writer.WriteLine(" Header = new " + packet.Frequency.ToString() + "Header();");
writer.WriteLine(" Header.ID = " + packet.ID + ";");
writer.WriteLine(" Header.Reliable = true;"); // Turn the reliable flag on by default
if (packet.Encoded) { writer.WriteLine(" Header.Zerocoded = true;"); }
foreach (MapBlock block in packet.Blocks)
{
if (block.Name == "Header") { sanitizedName = "_" + block.Name; }
else { sanitizedName = block.Name; }
if (block.Count == 1)
{
// Single count block
writer.WriteLine(" " + sanitizedName + " = new " + block.Name + "Block();");
}
else if (block.Count == -1)
{
// Variable count block
writer.WriteLine(" " + sanitizedName + " = new " + block.Name + "Block[0];");
}
else
{
// Multiple count block
writer.WriteLine(" " + sanitizedName + " = new " + block.Name + "Block[" + block.Count + "];");
}
}
writer.WriteLine(" }" + Environment.NewLine);
// Constructor that takes a byte array and beginning position only (no prebuilt header)
bool seenVariable = false;
//writer.WriteLine(" /// <summary>Constructor that takes a byte array and beginning position (no prebuilt header)</summary>");
writer.WriteLine(" public " + packet.Name + "Packet(byte[] bytes, ref int i) : this()" + Environment.NewLine +
" {" + Environment.NewLine +
" int packetEnd = bytes.Length - 1;" + Environment.NewLine +
" FromBytes(bytes, ref i, ref packetEnd, null);" + Environment.NewLine +
" }" + Environment.NewLine);
writer.WriteLine(" override public void FromBytes(byte[] bytes, ref int i, ref int packetEnd, byte[] zeroBuffer)" + Environment.NewLine + " {");
writer.WriteLine(" header.FromBytes(bytes, ref i, ref packetEnd);");
writer.WriteLine(" if (header.Zerocoded && zeroBuffer != null)");
writer.WriteLine(" {");
writer.WriteLine(" packetEnd = Helpers.ZeroDecode(bytes, packetEnd + 1, zeroBuffer) - 1;");
writer.WriteLine(" bytes = zeroBuffer;");
writer.WriteLine(" }");
foreach (MapBlock block in packet.Blocks)
{
if (block.Name == "Header") { sanitizedName = "_" + block.Name; }
else { sanitizedName = block.Name; }
if (block.Count == 1)
{
// Single count block
writer.WriteLine(" " + sanitizedName + ".FromBytes(bytes, ref i);");
}
else if (block.Count == -1)
{
// Variable count block
if (!seenVariable)
{
writer.WriteLine(" int count = (int)bytes[i++];");
seenVariable = true;
}
else
{
writer.WriteLine(" count = (int)bytes[i++];");
}
writer.WriteLine(" if(" + sanitizedName + ".Length < count) {");
writer.WriteLine(" " + sanitizedName + " = new " + block.Name + "Block[count];");
writer.WriteLine(" for(int j = 0; j < count; j++) " + sanitizedName + "[j] = new " + block.Name + "Block();");
writer.WriteLine(" }");
writer.WriteLine(" for (int j = 0; j < count; j++)");
writer.WriteLine(" { " + sanitizedName + "[j].FromBytes(bytes, ref i); }");
}
else
{
// Multiple count block
writer.WriteLine(" if(" + sanitizedName + ".Length < " + block.Count+") {");
writer.WriteLine(" " + sanitizedName + " = new " + block.Name + "Block[" + block.Count + "];");
writer.WriteLine(" for(int j = 0; j < " + block.Count + "; j++) " + sanitizedName + "[j] = new " + block.Name + "Block();");
writer.WriteLine(" }");
writer.WriteLine(" for (int j = 0; j < " + block.Count + "; j++)");
writer.WriteLine(" { " + sanitizedName + "[j].FromBytes(bytes, ref i); }");
}
}
writer.WriteLine(" }" + Environment.NewLine);
seenVariable = false;
// Constructor that takes a byte array and a prebuilt header
//writer.WriteLine(" /// <summary>Constructor that takes a byte array and a prebuilt header</summary>");
writer.WriteLine(" public " + packet.Name + "Packet(Header head, byte[] bytes, ref int i): this()" + Environment.NewLine +
" {" + Environment.NewLine +
" int packetEnd = bytes.Length - 1;" + Environment.NewLine +
" FromBytes(head, bytes, ref i, ref packetEnd, null);" + Environment.NewLine +
" }" + Environment.NewLine);
writer.WriteLine(" override public void FromBytes(Header head, byte[] bytes, ref int i, ref int packetEnd, byte[] zeroBuffer)" + Environment.NewLine + " {");
writer.WriteLine(" Header = head;");
writer.WriteLine(" if (head.Zerocoded && zeroBuffer != null)");
writer.WriteLine(" {");
writer.WriteLine(" packetEnd = Helpers.ZeroDecode(bytes, packetEnd + 1, zeroBuffer) - 1;");
writer.WriteLine(" bytes = zeroBuffer;");
writer.WriteLine(" }");
foreach (MapBlock block in packet.Blocks)
{
if (block.Name == "Header") { sanitizedName = "_" + block.Name; }
else { sanitizedName = block.Name; }
if (block.Count == 1)
{
// Single count block
writer.WriteLine(" " + sanitizedName + ".FromBytes(bytes, ref i);");
}
else if (block.Count == -1)
{
// Variable count block
if (!seenVariable)
{
writer.WriteLine(" int count = (int)bytes[i++];");
seenVariable = true;
}
else
{
writer.WriteLine(" count = (int)bytes[i++];");
}
writer.WriteLine(" if(" + sanitizedName + ".Length < count) {");
writer.WriteLine(" " + sanitizedName + " = new " + block.Name + "Block[count];");
writer.WriteLine(" for(int j = 0; j < count; j++) " + sanitizedName + "[j] = new " + block.Name + "Block();");
writer.WriteLine(" }");
writer.WriteLine(" for (int j = 0; j < count; j++)");
writer.WriteLine(" { " + sanitizedName + "[j].FromBytes(bytes, ref i); }");
}
else
{
// Multiple count block
writer.WriteLine(" if(" + sanitizedName + ".Length < " + block.Count+") {");
writer.WriteLine(" " + sanitizedName + " = new " + block.Name + "Block[" + block.Count + "];");
writer.WriteLine(" for(int j = 0; j < " + block.Count + "; j++) " + sanitizedName + "[j] = new " + block.Name + "Block();");
writer.WriteLine(" }");
writer.WriteLine(" for (int j = 0; j < " + block.Count + "; j++)");
writer.WriteLine(" { " + sanitizedName + "[j].FromBytes(bytes, ref i); }");
}
}
writer.WriteLine(" }" + Environment.NewLine);
// ToBytes() function
//writer.WriteLine(" /// <summary>Serialize this packet to a byte array</summary><returns>A byte array containing the serialized packet</returns>");
writer.WriteLine(" public override byte[] ToBytes()" + Environment.NewLine + " {");
writer.Write(" int length = ");
if (packet.Frequency == PacketFrequency.Low) { writer.WriteLine("10;"); }
else if (packet.Frequency == PacketFrequency.Medium) { writer.WriteLine("8;"); }
else { writer.WriteLine("7;"); }
foreach (MapBlock block in packet.Blocks)
{
if (block.Name == "Header") { sanitizedName = "_" + block.Name; }
else { sanitizedName = block.Name; }
if (block.Count == 1)
{
// Single count block
writer.Write(" length += " + sanitizedName + ".Length;");
}
}
writer.WriteLine(";");
foreach (MapBlock block in packet.Blocks)
{
if (block.Name == "Header") { sanitizedName = "_" + block.Name; }
else { sanitizedName = block.Name; }
if (block.Count == -1)
{
writer.WriteLine(" length++;");
writer.WriteLine(" for (int j = 0; j < " + sanitizedName +
".Length; j++) { length += " + sanitizedName + "[j].Length; }");
}
else if (block.Count > 1)
{
writer.WriteLine(" for (int j = 0; j < " + block.Count +
"; j++) { length += " + sanitizedName + "[j].Length; }");
}
}
writer.WriteLine(" if (header.AckList.Length > 0) { length += header.AckList.Length * 4 + 1; }");
writer.WriteLine(" byte[] bytes = new byte[length];");
writer.WriteLine(" int i = 0;");
writer.WriteLine(" header.ToBytes(bytes, ref i);");
foreach (MapBlock block in packet.Blocks)
{
if (block.Name == "Header") { sanitizedName = "_" + block.Name; }
else { sanitizedName = block.Name; }
if (block.Count == -1)
{
// Variable count block
writer.WriteLine(" bytes[i++] = (byte)" + sanitizedName + ".Length;");
writer.WriteLine(" for (int j = 0; j < " + sanitizedName +
".Length; j++) { " + sanitizedName + "[j].ToBytes(bytes, ref i); }");
}
else if (block.Count == 1)
{
writer.WriteLine(" " + sanitizedName + ".ToBytes(bytes, ref i);");
}
else
{
// Multiple count block
writer.WriteLine(" for (int j = 0; j < " + block.Count +
"; j++) { " + sanitizedName + "[j].ToBytes(bytes, ref i); }");
}
}
writer.WriteLine(" if (header.AckList.Length > 0) { header.AcksToBytes(bytes, ref i); }");
writer.WriteLine(" return bytes;" + Environment.NewLine + " }" + Environment.NewLine);
// ToString() function
//writer.WriteLine(" /// <summary>Serialize this packet to a string</summary><returns>A string containing the serialized packet</returns>");
writer.WriteLine(" public override string ToString()" + Environment.NewLine + " {");
writer.WriteLine(" string output = \"--- " + packet.Name + " ---\" + Environment.NewLine;");
foreach (MapBlock block in packet.Blocks)
{
if (block.Name == "Header") { sanitizedName = "_" + block.Name; }
else { sanitizedName = block.Name; }
if (block.Count == -1)
{
// Variable count block
writer.WriteLine(" for (int j = 0; j < " +
sanitizedName + ".Length; j++)" + Environment.NewLine + " {");
writer.WriteLine(" output += " + sanitizedName +
"[j].ToString() + Environment.NewLine;" + Environment.NewLine + " }");
}
else if (block.Count == 1)
{
writer.WriteLine(" output += " + sanitizedName + ".ToString() + Environment.NewLine;");
}
else
{
// Multiple count block
writer.WriteLine(" for (int j = 0; j < " +
block.Count + "; j++)" + Environment.NewLine + " {");
writer.WriteLine(" output += " + sanitizedName +
"[j].ToString() + Environment.NewLine;" + Environment.NewLine + " }");
}
}
writer.WriteLine(" return output;" + Environment.NewLine + " }" + Environment.NewLine);
// Closing function bracket
writer.WriteLine(" }" + Environment.NewLine);
}
static int Main(string[] args)
{
ProtocolManager protocol;
List<string> unused = new List<string>();
TextWriter writer;
try
{
if (args.Length != 4)
{
Console.WriteLine("Usage: [message_template.msg] [template.cs] [unusedpackets.txt] [_Packets_.cs]");
return -1;
}
writer = new StreamWriter(args[3]);
protocol = new ProtocolManager(args[0]);
// Build a list of unused packets
using (StreamReader unusedReader = new StreamReader(args[2]))
{
while (unusedReader.Peek() >= 0)
{
unused.Add(unusedReader.ReadLine().Trim());
}
}
// Read in the template.cs file and write it to our output
TextReader reader = new StreamReader(args[1]);
writer.WriteLine(reader.ReadToEnd());
reader.Close();
}
catch (Exception e)
{
Console.WriteLine(e.ToString());
return -2;
}
// Prune all of the unused packets out of the protocol
int i = 0;
foreach (MapPacket packet in protocol.LowMaps)
{
if (packet != null && unused.Contains(packet.Name))
protocol.LowMaps[i] = null;
i++;
}
i = 0;
foreach (MapPacket packet in protocol.MediumMaps)
{
if (packet != null && unused.Contains(packet.Name))
protocol.MediumMaps[i] = null;
i++;
}
i = 0;
foreach (MapPacket packet in protocol.HighMaps)
{
if (packet != null && unused.Contains(packet.Name))
protocol.HighMaps[i] = null;
i++;
}
// Write the PacketType enum
writer.WriteLine(" public enum PacketType" + Environment.NewLine + " {" + Environment.NewLine +
" /// <summary>A generic value, not an actual packet type</summary>" + Environment.NewLine +
" Default,");
foreach (MapPacket packet in protocol.LowMaps)
if (packet != null)
writer.WriteLine(" " + packet.Name + " = " + (0x10000 | packet.ID) + ",");
foreach (MapPacket packet in protocol.MediumMaps)
if (packet != null)
writer.WriteLine(" " + packet.Name + " = " + (0x20000 | packet.ID) + ",");
foreach (MapPacket packet in protocol.HighMaps)
if (packet != null)
writer.WriteLine(" " + packet.Name + " = " + (0x30000 | packet.ID) + ",");
writer.WriteLine(" }" + Environment.NewLine);
// Write the base Packet class
writer.WriteLine(
" public abstract partial class Packet" + Environment.NewLine + " {" + Environment.NewLine +
" public abstract Header Header { get; set; }" + Environment.NewLine +
" public abstract PacketType Type { get; }" + Environment.NewLine +
" public abstract void FromBytes(byte[] bytes, ref int i, ref int packetEnd, byte[] zeroBuffer);" + Environment.NewLine +
" public abstract void FromBytes(Header header, byte[] bytes, ref int i, ref int packetEnd, byte[] zeroBuffer);" + Environment.NewLine +
" public abstract byte[] ToBytes();"
);
// Write the Packet.GetType() function
writer.WriteLine(
" public static PacketType GetType(ushort id, PacketFrequency frequency)" + Environment.NewLine +
" {" + Environment.NewLine +
" switch (frequency)" + Environment.NewLine +
" {" + Environment.NewLine +
" case PacketFrequency.Low:" + Environment.NewLine +
" switch (id)" + Environment.NewLine +
" {");
foreach (MapPacket packet in protocol.LowMaps)
if (packet != null)
writer.WriteLine(" case " + packet.ID + ": return PacketType." + packet.Name + ";");
writer.WriteLine(" }" + Environment.NewLine +
" break;" + Environment.NewLine +
" case PacketFrequency.Medium:" + Environment.NewLine +
" switch (id)" + Environment.NewLine + " {");
foreach (MapPacket packet in protocol.MediumMaps)
if (packet != null)
writer.WriteLine(" case " + packet.ID + ": return PacketType." + packet.Name + ";");
writer.WriteLine(" }" + Environment.NewLine +
" break;" + Environment.NewLine +
" case PacketFrequency.High:" + Environment.NewLine +
" switch (id)" + Environment.NewLine + " {");
foreach (MapPacket packet in protocol.HighMaps)
if (packet != null)
writer.WriteLine(" case " + packet.ID + ": return PacketType." + packet.Name + ";");
writer.WriteLine(" }" + Environment.NewLine +
" break;" + Environment.NewLine + " }" + Environment.NewLine + Environment.NewLine +
" return PacketType.Default;" + Environment.NewLine + " }" + Environment.NewLine);
// Write the Packet.BuildPacket() function
writer.WriteLine(
" public static Packet BuildPacket(byte[] packetBuffer, ref int packetEnd, byte[] zeroBuffer)" + Environment.NewLine +
" {" + Environment.NewLine +
" byte[] bytes; ushort id; PacketFrequency freq;" + Environment.NewLine +
" int i = 0;" + Environment.NewLine +
" Header header = Header.BuildHeader(packetBuffer, ref i, ref packetEnd);" + Environment.NewLine +
" if (header.Zerocoded)" + Environment.NewLine +
" {" + Environment.NewLine +
" packetEnd = Helpers.ZeroDecode(packetBuffer, packetEnd + 1, zeroBuffer) - 1;" + Environment.NewLine +
" bytes = zeroBuffer;" + Environment.NewLine +
" }" + Environment.NewLine +
" else" + Environment.NewLine +
" {" + Environment.NewLine +
" bytes = packetBuffer;" + Environment.NewLine +
" }" + Environment.NewLine + Environment.NewLine +
" if (bytes[6] == 0xFF)" + Environment.NewLine +
" {" + Environment.NewLine +
" if (bytes[7] == 0xFF)" + Environment.NewLine +
" {" + Environment.NewLine +
" id = (ushort)((bytes[8] << 8) + bytes[9]); freq = PacketFrequency.Low;" + Environment.NewLine +
" switch (id)" + Environment.NewLine +
" {");
foreach (MapPacket packet in protocol.LowMaps)
if (packet != null)
writer.WriteLine(" case " + packet.ID + ": return new " + packet.Name + "Packet(header, bytes, ref i);");
writer.WriteLine(" }" + Environment.NewLine + " }" + Environment.NewLine +
" else" + Environment.NewLine +
" {" + Environment.NewLine + " id = (ushort)bytes[7]; freq = PacketFrequency.Medium;" + Environment.NewLine +
" switch (id)" + Environment.NewLine + " {");
foreach (MapPacket packet in protocol.MediumMaps)
if (packet != null)
writer.WriteLine(" case " + packet.ID + ": return new " + packet.Name + "Packet(header, bytes, ref i);");
writer.WriteLine(" }" + Environment.NewLine + " }" + Environment.NewLine + " }" + Environment.NewLine +
" else" + Environment.NewLine + " {" + Environment.NewLine +
" id = (ushort)bytes[6]; freq = PacketFrequency.High;" + Environment.NewLine +
" switch (id)" + Environment.NewLine + " {");
foreach (MapPacket packet in protocol.HighMaps)
if (packet != null)
writer.WriteLine(" case " + packet.ID + ": return new " + packet.Name + "Packet(header, bytes, ref i);");
writer.WriteLine(" }" + Environment.NewLine + " }" + Environment.NewLine + Environment.NewLine +
" throw new MalformedDataException(\"Unknown packet ID \"+freq+\" \"+id);" + Environment.NewLine +
" }" + Environment.NewLine + " }" + Environment.NewLine);
// Write the packet classes
foreach (MapPacket packet in protocol.LowMaps)
if (packet != null) { WritePacketClass(writer, packet); }
foreach (MapPacket packet in protocol.MediumMaps)
if (packet != null) { WritePacketClass(writer, packet); }
foreach (MapPacket packet in protocol.HighMaps)
if (packet != null) { WritePacketClass(writer, packet); }
// Finish up
writer.WriteLine("}");
writer.Close();
return 0;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Reactive.Linq;
using System.Threading.Tasks;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
using Xunit;
using Refit; // InterfaceStubGenerator looks for this
namespace Refit.Tests
{
public class RootObject
{
public string _id { get; set; }
public string _rev { get; set; }
public string name { get; set; }
}
[Headers("User-Agent: Refit Integration Tests")]
public interface INpmJs
{
[Get("/congruence")]
Task<RootObject> GetCongruence();
}
public interface IRequestBin
{
[Post("/1h3a5jm1")]
Task Post();
}
public interface INoRefitHereBuddy
{
Task Post();
}
public interface IAmHalfRefit
{
[Post("/anything")]
Task Post();
Task Get();
}
public interface IHttpBinApi<TResponse, in TParam, in THeader>
where TResponse : class
where THeader : struct
{
[Get("")]
Task<TResponse> Get(TParam param, [Header("X-Refit")] THeader header);
}
public interface IBrokenWebApi
{
[Post("/what-spec")]
Task<bool> PostAValue([Body] string derp);
}
public interface IHttpContentApi
{
[Post("/blah")]
Task<HttpContent> PostFileUpload([Body] HttpContent content);
}
public class HttpBinGet
{
public Dictionary<string, string> Args { get; set; }
public Dictionary<string, string> Headers { get; set; }
public string Origin { get; set; }
public string Url { get; set; }
}
public class RestServiceIntegrationTests
{
[Fact]
public async Task HitTheGitHubUserApi()
{
var fixture = RestService.For<IGitHubApi>("https://api.github.com");
JsonConvert.DefaultSettings =
() => new JsonSerializerSettings() { ContractResolver = new SnakeCasePropertyNamesContractResolver() };
var result = await fixture.GetUser("octocat");
Assert.Equal("octocat", result.Login);
Assert.False(String.IsNullOrEmpty(result.AvatarUrl));
}
[Fact]
public async Task HitWithCamelCaseParameter()
{
var fixture = RestService.For<IGitHubApi>("https://api.github.com");
JsonConvert.DefaultSettings =
() => new JsonSerializerSettings() { ContractResolver = new SnakeCasePropertyNamesContractResolver() };
var result = await fixture.GetUserCamelCase("octocat");
Assert.Equal("octocat", result.Login);
Assert.False(String.IsNullOrEmpty(result.AvatarUrl));
}
[Fact]
public async Task HitTheGitHubOrgMembersApi()
{
var fixture = RestService.For<IGitHubApi>("https://api.github.com");
JsonConvert.DefaultSettings =
() => new JsonSerializerSettings { ContractResolver = new SnakeCasePropertyNamesContractResolver() };
var result = await fixture.GetOrgMembers("github");
Assert.True(result.Count > 0);
Assert.True(result.Any(member => member.Type == "User"));
}
[Fact]
public async Task HitTheGitHubUserSearchApi()
{
var fixture = RestService.For<IGitHubApi>("https://api.github.com");
JsonConvert.DefaultSettings =
() => new JsonSerializerSettings { ContractResolver = new SnakeCasePropertyNamesContractResolver() };
var result = await fixture.FindUsers("tom repos:>42 followers:>1000");
Assert.True(result.TotalCount > 0);
Assert.True(result.Items.Any(member => member.Type == "User"));
}
[Fact]
public async Task HitTheGitHubUserApiAsObservable()
{
var fixture = RestService.For<IGitHubApi>("https://api.github.com");
JsonConvert.DefaultSettings =
() => new JsonSerializerSettings() { ContractResolver = new SnakeCasePropertyNamesContractResolver() };
var result = await fixture.GetUserObservable("octocat")
.Timeout(TimeSpan.FromSeconds(10));
Assert.Equal("octocat", result.Login);
Assert.False(String.IsNullOrEmpty(result.AvatarUrl));
}
[Fact]
public async Task HitTheGitHubUserApiAsObservableAndSubscribeAfterTheFact()
{
var fixture = RestService.For<IGitHubApi>("https://api.github.com");
JsonConvert.DefaultSettings =
() => new JsonSerializerSettings() { ContractResolver = new SnakeCasePropertyNamesContractResolver() };
var obs = fixture.GetUserObservable("octocat")
.Timeout(TimeSpan.FromSeconds(10));
// NB: We're gonna await twice, so that the 2nd await is definitely
// after the result has completed.
await obs;
var result2 = await obs;
Assert.Equal("octocat", result2.Login);
Assert.False(String.IsNullOrEmpty(result2.AvatarUrl));
}
[Fact]
public async Task HitTheGitHubUserApiWithSettingsObj()
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings() { ContractResolver = new CamelCasePropertyNamesContractResolver() };
var fixture = RestService.For<IGitHubApi>(
"https://api.github.com",
new RefitSettings{
JsonSerializerSettings = new JsonSerializerSettings() { ContractResolver = new SnakeCasePropertyNamesContractResolver() }
});
var result = await fixture.GetUser("octocat");
Assert.Equal("octocat", result.Login);
Assert.False(String.IsNullOrEmpty(result.AvatarUrl));
}
[Fact]
public async Task HitWithCamelCaseParameterWithSettingsObj()
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings() { ContractResolver = new CamelCasePropertyNamesContractResolver() };
var fixture = RestService.For<IGitHubApi>(
"https://api.github.com",
new RefitSettings
{
JsonSerializerSettings = new JsonSerializerSettings() { ContractResolver = new SnakeCasePropertyNamesContractResolver() }
});
var result = await fixture.GetUserCamelCase("octocat");
Assert.Equal("octocat", result.Login);
Assert.False(String.IsNullOrEmpty(result.AvatarUrl));
}
[Fact]
public async Task HitTheGitHubOrgMembersApiWithSettingsObj()
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings() { ContractResolver = new CamelCasePropertyNamesContractResolver() };
var fixture = RestService.For<IGitHubApi>(
"https://api.github.com",
new RefitSettings
{
JsonSerializerSettings = new JsonSerializerSettings() { ContractResolver = new SnakeCasePropertyNamesContractResolver() }
});
var result = await fixture.GetOrgMembers("github");
Assert.True(result.Count > 0);
Assert.True(result.Any(member => member.Type == "User"));
}
[Fact]
public async Task HitTheGitHubUserSearchApiWithSettingsObj()
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings() { ContractResolver = new CamelCasePropertyNamesContractResolver() };
var fixture = RestService.For<IGitHubApi>(
"https://api.github.com",
new RefitSettings
{
JsonSerializerSettings = new JsonSerializerSettings() { ContractResolver = new SnakeCasePropertyNamesContractResolver() }
});
var result = await fixture.FindUsers("tom repos:>42 followers:>1000");
Assert.True(result.TotalCount > 0);
Assert.True(result.Items.Any(member => member.Type == "User"));
}
[Fact]
public async Task HitTheGitHubUserApiAsObservableWithSettingsObj()
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings() { ContractResolver = new CamelCasePropertyNamesContractResolver() };
var fixture = RestService.For<IGitHubApi>(
"https://api.github.com",
new RefitSettings
{
JsonSerializerSettings = new JsonSerializerSettings() { ContractResolver = new SnakeCasePropertyNamesContractResolver() }
});
var result = await fixture.GetUserObservable("octocat")
.Timeout(TimeSpan.FromSeconds(10));
Assert.Equal("octocat", result.Login);
Assert.False(String.IsNullOrEmpty(result.AvatarUrl));
}
[Fact]
public async Task HitTheGitHubUserApiAsObservableAndSubscribeAfterTheFactWithSettingsObj()
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings() { ContractResolver = new CamelCasePropertyNamesContractResolver() };
var fixture = RestService.For<IGitHubApi>(
"https://api.github.com",
new RefitSettings
{
JsonSerializerSettings = new JsonSerializerSettings() { ContractResolver = new SnakeCasePropertyNamesContractResolver() }
});
var obs = fixture.GetUserObservable("octocat")
.Timeout(TimeSpan.FromSeconds(10));
// NB: We're gonna await twice, so that the 2nd await is definitely
// after the result has completed.
await obs;
var result2 = await obs;
Assert.Equal("octocat", result2.Login);
Assert.False(String.IsNullOrEmpty(result2.AvatarUrl));
}
[Fact]
public async Task TwoSubscriptionsResultInTwoRequests()
{
var input = new TestHttpMessageHandler();
// we need to use a factory here to ensure each request gets its own httpcontent instance
input.ContentFactory = () => new StringContent("test");
var client = new HttpClient(input) { BaseAddress = new Uri("http://foo") };
var fixture = RestService.For<IGitHubApi>(client);
Assert.Equal(0, input.MessagesSent);
var obs = fixture.GetIndexObservable()
.Timeout(TimeSpan.FromSeconds(10));
var result1 = await obs;
Assert.Equal(1, input.MessagesSent);
var result2 = await obs;
Assert.Equal(2, input.MessagesSent);
// NB: TestHttpMessageHandler returns what we tell it to ('test' by default)
Assert.True(result1.Contains("test"));
Assert.True(result2.Contains("test"));
}
[Fact]
public async Task ShouldRetHttpResponseMessage()
{
var fixture = RestService.For<IGitHubApi>("https://api.github.com");
var result = await fixture.GetIndex();
Assert.NotNull(result);
Assert.True(result.IsSuccessStatusCode);
}
[Fact]
public async Task HitTheNpmJs()
{
var fixture = RestService.For<INpmJs>("https://registry.npmjs.org");
var result = await fixture.GetCongruence();
Assert.Equal("congruence", result._id);
}
[Fact]
public async Task PostToRequestBin()
{
var fixture = RestService.For<IRequestBin>("http://httpbin.org/");
try {
await fixture.Post();
} catch (ApiException ex) {
// we should be good but maybe a 404 occurred
}
}
[Fact]
public async Task CanGetDataOutOfErrorResponses()
{
var fixture = RestService.For<IGitHubApi>("https://api.github.com");
try {
await fixture.NothingToSeeHere();
Assert.True(false);
} catch (ApiException exception) {
Assert.Equal(HttpStatusCode.NotFound, exception.StatusCode);
var content = exception.GetContentAs<Dictionary<string, string>>();
Assert.Equal("Not Found", content["message"]);
Assert.NotNull(content["documentation_url"]);
}
}
[Fact]
public void NonRefitInterfacesThrowMeaningfulExceptions()
{
try {
RestService.For<INoRefitHereBuddy>("http://example.com");
} catch (InvalidOperationException exception) {
Assert.StartsWith("INoRefitHereBuddy", exception.Message);
}
}
[Fact]
public async Task NonRefitMethodsThrowMeaningfulExceptions()
{
try {
var fixture = RestService.For<IAmHalfRefit>("http://example.com");
await fixture.Get();
} catch (NotImplementedException exception) {
Assert.Contains("no Refit HTTP method attribute", exception.Message);
}
}
[Fact]
public async Task GenericsWork()
{
var fixture = RestService.For<IHttpBinApi<HttpBinGet, string, int>>("http://httpbin.org/get");
var result = await fixture.Get("foo", 99);
Assert.Equal("http://httpbin.org/get?param=foo", result.Url);
Assert.Equal("foo", result.Args["param"]);
Assert.Equal("99", result.Headers["X-Refit"]);
}
[Fact]
public async Task ValueTypesArentValidButTheyWorkAnyway()
{
var handler = new TestHttpMessageHandler("true");
var fixture = RestService.For<IBrokenWebApi>(new HttpClient(handler) { BaseAddress = new Uri("http://nowhere.com") });
var result = await fixture.PostAValue("Does this work?");
Assert.Equal(true, result);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Reflection;
using BenchmarkDotNet.Attributes;
using Microsoft.Extensions.DependencyInjection;
using Orleans;
using Orleans.Configuration;
using Orleans.Runtime.Configuration;
using Orleans.Serialization;
using Orleans.Serialization.ProtobufNet;
using UnitTests.GrainInterfaces;
namespace Benchmarks.Serialization
{
public enum SerializerToUse
{
Default,
IlBasedFallbackSerializer,
ProtoBufNet
}
[MemoryDiagnoser]
public class SerializationBenchmarks
{
private void InitializeSerializer(SerializerToUse serializerToUse)
{
Type fallback = null;
switch (serializerToUse)
{
case SerializerToUse.Default:
break;
case SerializerToUse.IlBasedFallbackSerializer:
fallback = typeof(ILBasedSerializer);
break;
case SerializerToUse.ProtoBufNet:
fallback = typeof(ProtobufNetSerializer);
break;
default:
throw new InvalidOperationException("Invalid Serializer was selected");
}
var client = new ClientBuilder()
.UseLocalhostClustering()
.Configure<ClusterOptions>(options =>
{
options.ClusterId = nameof(SerializationBenchmarks);
options.ServiceId = Guid.NewGuid().ToString();
})
.Configure<SerializationProviderOptions>(
options => options.FallbackSerializationProvider = fallback)
.Build();
this.serializationManager = client.ServiceProvider.GetRequiredService<SerializationManager>();
}
[Params(SerializerToUse.IlBasedFallbackSerializer, SerializerToUse.Default, SerializerToUse.ProtoBufNet)]
public SerializerToUse Serializer { get; set; }
private OuterClass.SomeConcreteClass complexClass;
private byte[] serializedBytes;
private LargeTestData largeTestData;
private SerializationManager serializationManager;
[GlobalSetup]
public void BenchmarkSetup()
{
this.InitializeSerializer(this.Serializer);
this.complexClass = OuterClass.GetPrivateClassInstance();
this.complexClass.Int = 89;
this.complexClass.String = Guid.NewGuid().ToString();
this.complexClass.NonSerializedInt = 39;
var classes = new List<SomeAbstractClass>
{
this.complexClass,
new AnotherConcreteClass
{
AnotherString = "hi",
Interfaces = new List<ISomeInterface>
{
this.complexClass
}
},
new AnotherConcreteClass(),
OuterClass.GetPrivateClassInstance()
};
this.complexClass.Classes = classes.ToArray();
this.complexClass.Enum = SomeAbstractClass.SomeEnum.Something;
this.complexClass.SetObsoleteInt(38);
this.complexClass.Struct = new SomeStruct(10)
{
Id = Guid.NewGuid(),
PublicValue = 6,
ValueWithPrivateGetter = 7
};
this.complexClass.Struct.SetValueWithPrivateSetter(8);
this.complexClass.Struct.SetPrivateValue(9);
this.largeTestData = new LargeTestData
{
Description = "This is a test. This is only a test. In the event of a real execution, this would contain actual data.",
EnumValue = TestEnum.First
};
this.largeTestData.SetBit(13);
this.largeTestData.SetEnemy(17, CampaignEnemyTestType.Enemy1);
this.serializedBytes = this.serializationManager.SerializeToByteArray(this.largeTestData);
}
[Benchmark]
public byte[] SerializerBenchmark()
{
return this.serializationManager.SerializeToByteArray(this.largeTestData);
}
[Benchmark]
public object DeserializerBenchmark()
{
return this.serializationManager.DeserializeFromByteArray<LargeTestData>(this.serializedBytes);
}
/// <summary>
/// Performs a full serialization loop using a type which has not had code generation performed.
/// </summary>
/// <returns></returns>
[Benchmark]
public object FallbackFullLoop()
{
return OrleansSerializationLoop(this.complexClass);
}
internal object OrleansSerializationLoop(object input, bool includeWire = true)
{
var copy = this.serializationManager.DeepCopy(input);
if (includeWire)
{
copy = this.serializationManager.RoundTripSerializationForTesting(copy);
}
return copy;
}
}
[Serializable]
internal struct SomeStruct
{
public Guid Id { get; set; }
public int PublicValue { get; set; }
public int ValueWithPrivateSetter { get; private set; }
public int ValueWithPrivateGetter { private get; set; }
private int PrivateValue { get; set; }
public readonly int ReadonlyField;
public SomeStruct(int readonlyField)
: this()
{
this.ReadonlyField = readonlyField;
}
public int GetValueWithPrivateGetter()
{
return this.ValueWithPrivateGetter;
}
public int GetPrivateValue()
{
return this.PrivateValue;
}
public void SetPrivateValue(int value)
{
this.PrivateValue = value;
}
public void SetValueWithPrivateSetter(int value)
{
this.ValueWithPrivateSetter = value;
}
}
internal interface ISomeInterface { int Int { get; set; } }
[Serializable]
internal abstract class SomeAbstractClass : ISomeInterface
{
[NonSerialized]
private int nonSerializedIntField;
public abstract int Int { get; set; }
public List<ISomeInterface> Interfaces { get; set; }
public SomeAbstractClass[] Classes { get; set; }
[Obsolete("This field should not be serialized", true)]
public int ObsoleteIntWithError { get; set; }
[Obsolete("This field should be serialized")]
public int ObsoleteInt { get; set; }
#pragma warning disable 618
public int GetObsoleteInt() => this.ObsoleteInt;
public void SetObsoleteInt(int value)
{
this.ObsoleteInt = value;
}
#pragma warning restore 618
public SomeEnum Enum { get; set; }
public int NonSerializedInt
{
get
{
return this.nonSerializedIntField;
}
set
{
this.nonSerializedIntField = value;
}
}
[Serializable]
public enum SomeEnum
{
None,
Something,
SomethingElse
}
}
internal class OuterClass
{
public static SomeConcreteClass GetPrivateClassInstance() => new PrivateConcreteClass(Guid.NewGuid());
public static Type GetPrivateClassType() => typeof(PrivateConcreteClass);
[Serializable]
public class SomeConcreteClass : SomeAbstractClass
{
public override int Int { get; set; }
public string String { get; set; }
public SomeStruct Struct { get; set; }
private PrivateConcreteClass secretPrivateClass;
public void ConfigureSecretPrivateClass()
{
this.secretPrivateClass = new PrivateConcreteClass(Guid.NewGuid());
}
public bool AreSecretBitsIdentitcal(SomeConcreteClass other)
{
return other.secretPrivateClass?.Identity == this.secretPrivateClass?.Identity;
}
}
[Serializable]
private class PrivateConcreteClass : SomeConcreteClass
{
public PrivateConcreteClass(Guid identity)
{
this.Identity = identity;
}
public readonly Guid Identity;
}
}
[Serializable]
internal class AnotherConcreteClass : SomeAbstractClass
{
public override int Int { get; set; }
public string AnotherString { get; set; }
}
[Serializable]
internal class InnerType
{
public InnerType()
{
this.Id = Guid.NewGuid();
this.Something = this.Id.ToString();
}
public Guid Id { get; set; }
public string Something { get; set; }
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) return false;
if (ReferenceEquals(this, obj)) return true;
if (obj.GetType() != this.GetType()) return false;
return Equals((InnerType)obj);
}
protected bool Equals(InnerType other)
{
return this.Id.Equals(other.Id) && string.Equals(this.Something, other.Something);
}
public override int GetHashCode()
{
unchecked
{
return (this.Id.GetHashCode() * 397) ^ (this.Something?.GetHashCode() ?? 0);
}
}
}
}
| |
// ZlibStream.cs
// ------------------------------------------------------------------
//
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// All rights reserved.
//
// This code module is part of DotNetZip, a zipfile class library.
//
// ------------------------------------------------------------------
//
// This code is licensed under the Microsoft Public License.
// See the file License.txt for the license details.
// More info on: http://dotnetzip.codeplex.com
//
// ------------------------------------------------------------------
//
// last saved (in emacs):
// Time-stamp: <2010-January-09 12:03:25>
//
// ------------------------------------------------------------------
//
// This module defines the ZlibStream class, which is similar in idea to
// the System.IO.Compression.DeflateStream and
// System.IO.Compression.GZipStream classes in the .NET BCL.
//
// ------------------------------------------------------------------
using System;
using System.IO;
namespace Ionic.Zlib
{
/// <summary>
/// Represents a Zlib stream for compression or decompression.
/// </summary>
/// <remarks>
///
/// <para>
/// The ZlibStream is a <see
/// href="http://en.wikipedia.org/wiki/Decorator_pattern">Decorator</see> on a <see
/// cref="System.IO.Stream"/>. It adds ZLIB compression or decompression to any
/// stream.
/// </para>
///
/// <para> Using this stream, applications can compress or decompress data via
/// stream <c>Read()</c> and <c>Write()</c> operations. Either compresssion or
/// decompression can occur through either reading or writing. The compression
/// format used is ZLIB, which is documented in <see
/// href="http://www.ietf.org/rfc/rfc1950.txt">IETF RFC 1950</see>, "ZLIB Compressed
/// Data Format Specification version 3.3". This implementation of ZLIB always uses
/// DEFLATE as the compression method. (see <see
/// href="http://www.ietf.org/rfc/rfc1951.txt">IETF RFC 1951</see>, "DEFLATE
/// Compressed Data Format Specification version 1.3.") </para>
///
/// <para>
/// The ZLIB format allows for varying compression methods, window sizes, and dictionaries.
/// This implementation always uses the DEFLATE compression method, a preset dictionary,
/// and 15 window bits by default.
/// </para>
///
/// <para>
/// This class is similar to <see cref="DeflateStream"/>, except that it adds the
/// RFC1950 header and trailer bytes to a compressed stream when compressing, or expects
/// the RFC1950 header and trailer bytes when decompressing. It is also similar to the
/// <see cref="GZipStream"/>.
/// </para>
/// </remarks>
/// <seealso cref="DeflateStream" />
/// <seealso cref="GZipStream" />
public class ZlibStream : System.IO.Stream
{
internal ZlibBaseStream _baseStream;
bool _disposed;
/// <summary>
/// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c>.
/// </summary>
/// <remarks>
///
/// <para>
/// When mode is <c>CompressionMode.Compress</c>, the <c>ZlibStream</c> will use the
/// default compression level. The "captive" stream will be closed when the
/// <c>ZlibStream</c> is closed.
/// </para>
///
/// </remarks>
///
/// <example>
/// This example uses a <c>ZlibStream</c> to compress a file, and writes the compressed
/// data to another file.
/// <code>
/// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress))
/// {
/// using (var raw = System.IO.File.Create(fileToCompress + ".zlib"))
/// {
/// using (Stream compressor = new ZlibStream(raw, CompressionMode.Compress))
/// {
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
/// int n;
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
/// {
/// compressor.Write(buffer, 0, n);
/// }
/// }
/// }
/// }
/// </code>
/// <code lang="VB">
/// Using input As Stream = File.OpenRead(fileToCompress)
/// Using raw As FileStream = File.Create(fileToCompress & ".zlib")
/// Using compressor As Stream = New ZlibStream(raw, CompressionMode.Compress)
/// Dim buffer As Byte() = New Byte(4096) {}
/// Dim n As Integer = -1
/// Do While (n <> 0)
/// If (n > 0) Then
/// compressor.Write(buffer, 0, n)
/// End If
/// n = input.Read(buffer, 0, buffer.Length)
/// Loop
/// End Using
/// End Using
/// End Using
/// </code>
/// </example>
///
/// <param name="stream">The stream which will be read or written.</param>
/// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param>
public ZlibStream(System.IO.Stream stream, CompressionMode mode)
: this(stream, mode, CompressionLevel.Default, false)
{
}
/// <summary>
/// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c> and
/// the specified <c>CompressionLevel</c>.
/// </summary>
///
/// <remarks>
///
/// <para>
/// When mode is <c>CompressionMode.Decompress</c>, the level parameter is ignored.
/// The "captive" stream will be closed when the <c>ZlibStream</c> is closed.
/// </para>
///
/// </remarks>
///
/// <example>
/// This example uses a <c>ZlibStream</c> to compress data from a file, and writes the
/// compressed data to another file.
///
/// <code>
/// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress))
/// {
/// using (var raw = System.IO.File.Create(fileToCompress + ".zlib"))
/// {
/// using (Stream compressor = new ZlibStream(raw,
/// CompressionMode.Compress,
/// CompressionLevel.BestCompression))
/// {
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
/// int n;
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
/// {
/// compressor.Write(buffer, 0, n);
/// }
/// }
/// }
/// }
/// </code>
///
/// <code lang="VB">
/// Using input As Stream = File.OpenRead(fileToCompress)
/// Using raw As FileStream = File.Create(fileToCompress & ".zlib")
/// Using compressor As Stream = New ZlibStream(raw, CompressionMode.Compress, CompressionLevel.BestCompression)
/// Dim buffer As Byte() = New Byte(4096) {}
/// Dim n As Integer = -1
/// Do While (n <> 0)
/// If (n > 0) Then
/// compressor.Write(buffer, 0, n)
/// End If
/// n = input.Read(buffer, 0, buffer.Length)
/// Loop
/// End Using
/// End Using
/// End Using
/// </code>
/// </example>
///
/// <param name="stream">The stream to be read or written while deflating or inflating.</param>
/// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param>
/// <param name="level">A tuning knob to trade speed for effectiveness.</param>
public ZlibStream(System.IO.Stream stream, CompressionMode mode, CompressionLevel level)
: this(stream, mode, level, false)
{
}
/// <summary>
/// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c>, and
/// explicitly specify whether the captive stream should be left open after
/// Deflation or Inflation.
/// </summary>
///
/// <remarks>
///
/// <para>
/// When mode is <c>CompressionMode.Compress</c>, the <c>ZlibStream</c> will use
/// the default compression level.
/// </para>
///
/// <para>
/// This constructor allows the application to request that the captive stream
/// remain open after the deflation or inflation occurs. By default, after
/// <c>Close()</c> is called on the stream, the captive stream is also
/// closed. In some cases this is not desired, for example if the stream is a
/// <see cref="System.IO.MemoryStream"/> that will be re-read after
/// compression. Specify true for the <paramref name="leaveOpen"/> parameter to leave the stream
/// open.
/// </para>
///
/// <para>
/// See the other overloads of this constructor for example code.
/// </para>
///
/// </remarks>
///
/// <param name="stream">The stream which will be read or written. This is called the
/// "captive" stream in other places in this documentation.</param>
/// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param>
/// <param name="leaveOpen">true if the application would like the stream to remain
/// open after inflation/deflation.</param>
public ZlibStream(System.IO.Stream stream, CompressionMode mode, bool leaveOpen)
: this(stream, mode, CompressionLevel.Default, leaveOpen)
{
}
/// <summary>
/// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c> and
/// the specified <c>CompressionLevel</c>, and explicitly specify whether the
/// stream should be left open after Deflation or Inflation.
/// </summary>
///
/// <remarks>
///
/// <para>
/// This constructor allows the application to request that the captive stream
/// remain open after the deflation or inflation occurs. By default, after
/// <c>Close()</c> is called on the stream, the captive stream is also closed. In
/// some cases this is not desired, for example if the stream is a <see
/// cref="System.IO.MemoryStream"/> that will be re-read after compression.
/// Specify true for the <paramref name="leaveOpen"/> parameter to leave the stream open.
/// </para>
///
/// <para>
/// When mode is <c>CompressionMode.Decompress</c>, the level parameter is ignored.
/// </para>
///
/// </remarks>
///
/// <example>
/// This example shows how to use a ZlibStream to compress the data from a file,
/// and store the result into another file. The filestream remains open to allow
/// additional data to be written to it.
/// <code>
/// using (var output = System.IO.File.Create(fileToCompress + ".zlib"))
/// {
/// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress))
/// {
/// using (Stream compressor = new ZlibStream(output, CompressionMode.Compress, CompressionLevel.BestCompression, true))
/// {
/// byte[] buffer = new byte[WORKING_BUFFER_SIZE];
/// int n;
/// while ((n= input.Read(buffer, 0, buffer.Length)) != 0)
/// {
/// compressor.Write(buffer, 0, n);
/// }
/// }
/// }
/// // can write additional data to the output stream here
/// }
/// </code>
/// <code lang="VB">
/// Using output As FileStream = File.Create(fileToCompress & ".zlib")
/// Using input As Stream = File.OpenRead(fileToCompress)
/// Using compressor As Stream = New ZlibStream(output, CompressionMode.Compress, CompressionLevel.BestCompression, True)
/// Dim buffer As Byte() = New Byte(4096) {}
/// Dim n As Integer = -1
/// Do While (n <> 0)
/// If (n > 0) Then
/// compressor.Write(buffer, 0, n)
/// End If
/// n = input.Read(buffer, 0, buffer.Length)
/// Loop
/// End Using
/// End Using
/// ' can write additional data to the output stream here.
/// End Using
/// </code>
/// </example>
///
/// <param name="stream">The stream which will be read or written.</param>
///
/// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param>
///
/// <param name="leaveOpen">
/// true if the application would like the stream to remain open after inflation/deflation.
/// </param>
///
/// <param name="level">
/// A tuning knob to trade speed for effectiveness. This parameter is effective only when
/// mode is <c>CompressionMode.Compress</c>.
/// </param>
public ZlibStream(System.IO.Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen)
{
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.ZLIB, leaveOpen);
}
#region Zlib properties
/// <summary>
/// This property sets the flush behavior on the stream.
/// Sorry, though, not sure exactly how to describe all the various settings.
/// </summary>
virtual public FlushType FlushMode
{
get { return (this._baseStream._flushMode); }
set
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
this._baseStream._flushMode = value;
}
}
/// <summary>
/// The size of the working buffer for the compression codec.
/// </summary>
///
/// <remarks>
/// <para>
/// The working buffer is used for all stream operations. The default size is
/// 1024 bytes. The minimum size is 128 bytes. You may get better performance
/// with a larger buffer. Then again, you might not. You would have to test
/// it.
/// </para>
///
/// <para>
/// Set this before the first call to <c>Read()</c> or <c>Write()</c> on the
/// stream. If you try to set it afterwards, it will throw.
/// </para>
/// </remarks>
public int BufferSize
{
get
{
return this._baseStream._bufferSize;
}
set
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
if (this._baseStream._workingBuffer != null)
throw new ZlibException("The working buffer is already set.");
if (value < ZlibConstants.WorkingBufferSizeMin)
throw new ZlibException(String.Format("Don't be silly. {0} bytes?? Use a bigger buffer, at least {1}.", value, ZlibConstants.WorkingBufferSizeMin));
this._baseStream._bufferSize = value;
}
}
/// <summary> Returns the total number of bytes input so far.</summary>
virtual public long TotalIn
{
get { return this._baseStream._z.TotalBytesIn; }
}
/// <summary> Returns the total number of bytes output so far.</summary>
virtual public long TotalOut
{
get { return this._baseStream._z.TotalBytesOut; }
}
#endregion
#region System.IO.Stream methods
/// <summary>
/// Dispose the stream.
/// </summary>
/// <remarks>
/// This may or may not result in a <c>Close()</c> call on the captive stream.
/// See the constructors that have a <c>leaveOpen</c> parameter for more information.
/// </remarks>
protected override void Dispose(bool disposing)
{
try
{
if (!_disposed)
{
if (disposing && (this._baseStream != null))
this._baseStream.Close();
_disposed = true;
}
}
finally
{
base.Dispose(disposing);
}
}
/// <summary>
/// Indicates whether the stream can be read.
/// </summary>
/// <remarks>
/// The return value depends on whether the captive stream supports reading.
/// </remarks>
public override bool CanRead
{
get
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
return _baseStream._stream.CanRead;
}
}
/// <summary>
/// Indicates whether the stream supports Seek operations.
/// </summary>
/// <remarks>
/// Always returns false.
/// </remarks>
public override bool CanSeek
{
get { return false; }
}
/// <summary>
/// Indicates whether the stream can be written.
/// </summary>
/// <remarks>
/// The return value depends on whether the captive stream supports writing.
/// </remarks>
public override bool CanWrite
{
get
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
return _baseStream._stream.CanWrite;
}
}
/// <summary>
/// Flush the stream.
/// </summary>
public override void Flush()
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
_baseStream.Flush();
}
/// <summary>
/// Reading this property always throws a <see cref="NotImplementedException"/>.
/// </summary>
public override long Length
{
get { throw new NotImplementedException(); }
}
/// <summary>
/// The position of the stream pointer.
/// </summary>
///
/// <remarks>
/// Setting this property always throws a <see
/// cref="NotImplementedException"/>. Reading will return the total bytes
/// written out, if used in writing, or the total bytes read in, if used in
/// reading. The count may refer to compressed bytes or uncompressed bytes,
/// depending on how you've used the stream.
/// </remarks>
public override long Position
{
get
{
if (this._baseStream._streamMode == Ionic.Zlib.ZlibBaseStream.StreamMode.Writer)
return this._baseStream._z.TotalBytesOut;
if (this._baseStream._streamMode == Ionic.Zlib.ZlibBaseStream.StreamMode.Reader)
return this._baseStream._z.TotalBytesIn;
return 0;
}
set { throw new NotImplementedException(); }
}
/// <summary>
/// Read data from the stream.
/// </summary>
///
/// <remarks>
///
/// <para>
/// If you wish to use the <c>ZlibStream</c> to compress data while reading,
/// you can create a <c>ZlibStream</c> with <c>CompressionMode.Compress</c>,
/// providing an uncompressed data stream. Then call <c>Read()</c> on that
/// <c>ZlibStream</c>, and the data read will be compressed. If you wish to
/// use the <c>ZlibStream</c> to decompress data while reading, you can create
/// a <c>ZlibStream</c> with <c>CompressionMode.Decompress</c>, providing a
/// readable compressed data stream. Then call <c>Read()</c> on that
/// <c>ZlibStream</c>, and the data will be decompressed as it is read.
/// </para>
///
/// <para>
/// A <c>ZlibStream</c> can be used for <c>Read()</c> or <c>Write()</c>, but
/// not both.
/// </para>
///
/// </remarks>
/// <param name="buffer">The buffer into which the read data should be placed.</param>
/// <param name="offset">the offset within that data array to put the first byte read.</param>
/// <param name="count">the number of bytes to read.</param>
public override int Read(byte[] buffer, int offset, int count)
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
return _baseStream.Read(buffer, offset, count);
}
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
public override long Seek(long offset, System.IO.SeekOrigin origin)
{
throw new NotImplementedException();
}
/// <summary>
/// Calling this method always throws a <see cref="NotImplementedException"/>.
/// </summary>
public override void SetLength(long value)
{
throw new NotImplementedException();
}
/// <summary>
/// Write data to the stream.
/// </summary>
///
/// <remarks>
///
/// <para>
/// If you wish to use the <c>ZlibStream</c> to compress data while writing,
/// you can create a <c>ZlibStream</c> with <c>CompressionMode.Compress</c>,
/// and a writable output stream. Then call <c>Write()</c> on that
/// <c>ZlibStream</c>, providing uncompressed data as input. The data sent to
/// the output stream will be the compressed form of the data written. If you
/// wish to use the <c>ZlibStream</c> to decompress data while writing, you
/// can create a <c>ZlibStream</c> with <c>CompressionMode.Decompress</c>, and a
/// writable output stream. Then call <c>Write()</c> on that stream,
/// providing previously compressed data. The data sent to the output stream
/// will be the decompressed form of the data written.
/// </para>
///
/// <para>
/// A <c>ZlibStream</c> can be used for <c>Read()</c> or <c>Write()</c>, but not both.
/// </para>
/// </remarks>
/// <param name="buffer">The buffer holding data to write to the stream.</param>
/// <param name="offset">the offset within that data array to find the first byte to write.</param>
/// <param name="count">the number of bytes to write.</param>
public override void Write(byte[] buffer, int offset, int count)
{
if (_disposed) throw new ObjectDisposedException("ZlibStream");
_baseStream.Write(buffer, offset, count);
}
#endregion
/// <summary>
/// Compress a string into a byte array using ZLIB.
/// </summary>
///
/// <remarks>
/// Uncompress it with <see cref="ZlibStream.UncompressString(byte[])"/>.
/// </remarks>
///
/// <seealso cref="ZlibStream.UncompressString(byte[])"/>
/// <seealso cref="ZlibStream.CompressBuffer(byte[])"/>
///
/// <param name="s">
/// A string to compress. The string will first be encoded
/// using UTF8, then compressed.
/// </param>
///
/// <returns>The string in compressed form</returns>
public static byte[] CompressString(String s)
{
using (var ms = new MemoryStream())
{
Stream compressor =
new ZlibStream(ms, CompressionMode.Compress, CompressionLevel.BestCompression);
ZlibBaseStream.CompressString(s, compressor);
return ms.ToArray();
}
}
/// <summary>
/// Compress a byte array into a new byte array using ZLIB.
/// </summary>
///
/// <remarks>
/// Uncompress it with <see cref="ZlibStream.UncompressBuffer(byte[])"/>.
/// </remarks>
///
/// <seealso cref="ZlibStream.CompressString(string)"/>
/// <seealso cref="ZlibStream.UncompressBuffer(byte[])"/>
///
/// <param name="b">
/// A buffer to compress.
/// </param>
///
/// <returns>The data in compressed form</returns>
public static byte[] CompressBuffer(byte[] b)
{
using (var ms = new MemoryStream())
{
Stream compressor =
new ZlibStream( ms, CompressionMode.Compress, CompressionLevel.BestCompression );
ZlibBaseStream.CompressBuffer(b, compressor);
return ms.ToArray();
}
}
/// <summary>
/// Uncompress a ZLIB-compressed byte array into a single string.
/// </summary>
///
/// <seealso cref="ZlibStream.CompressString(String)"/>
/// <seealso cref="ZlibStream.UncompressBuffer(byte[])"/>
///
/// <param name="compressed">
/// A buffer containing ZLIB-compressed data.
/// </param>
///
/// <returns>The uncompressed string</returns>
public static String UncompressString(byte[] compressed)
{
using (var input = new MemoryStream(compressed))
{
Stream decompressor =
new ZlibStream(input, CompressionMode.Decompress);
return ZlibBaseStream.UncompressString(compressed, decompressor);
}
}
/// <summary>
/// Uncompress a ZLIB-compressed byte array into a byte array.
/// </summary>
///
/// <seealso cref="ZlibStream.CompressBuffer(byte[])"/>
/// <seealso cref="ZlibStream.UncompressString(byte[])"/>
///
/// <param name="compressed">
/// A buffer containing ZLIB-compressed data.
/// </param>
///
/// <returns>The data in uncompressed form</returns>
public static byte[] UncompressBuffer(byte[] compressed)
{
using (var input = new MemoryStream(compressed))
{
Stream decompressor =
new ZlibStream( input, CompressionMode.Decompress );
return ZlibBaseStream.UncompressBuffer(compressed, decompressor);
}
}
}
}
| |
/*
Project Orleans Cloud Service SDK ver. 1.0
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the ""Software""), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
using System;
using System.Linq;
using System.Collections.Generic;
using System.Collections.Concurrent;
using System.Threading.Tasks;
using System.Threading;
using Orleans.Runtime.Configuration;
namespace Orleans.Runtime.Placement
{
internal class ActivationCountPlacementDirector : RandomPlacementDirector, ISiloStatisticsChangeListener
{
private class CachedLocalStat
{
public SiloAddress Address { get; private set; }
public SiloRuntimeStatistics SiloStats { get; private set; }
public int ActivationCount { get { return activationCount; } }
private int activationCount;
internal CachedLocalStat(SiloAddress address, SiloRuntimeStatistics siloStats)
{
Address = address;
SiloStats = siloStats;
}
public void IncrementActivationCount(int delta)
{
Interlocked.Add(ref activationCount, delta);
}
}
// internal for unit tests
internal Func<PlacementStrategy, GrainId, IPlacementContext, Task<PlacementResult>> SelectSilo;
// Track created activations on this silo between statistic intervals.
private readonly ConcurrentDictionary<SiloAddress, CachedLocalStat> localCache = new ConcurrentDictionary<SiloAddress, CachedLocalStat>();
private readonly TraceLogger logger;
private readonly bool useLocalCache = true;
// For: SelectSiloPowerOfK
private readonly SafeRandom random = new SafeRandom();
private int chooseHowMany = 2;
public ActivationCountPlacementDirector()
{
logger = TraceLogger.GetLogger(this.GetType().Name);
}
public void Initialize(GlobalConfiguration globalConfig)
{
DeploymentLoadPublisher.Instance.SubscribeToStatisticsChangeEvents(this);
SelectSilo = SelectSiloPowerOfK;
if (globalConfig.ActivationCountBasedPlacementChooseOutOf <= 0)
throw new ArgumentException("GlobalConfig.ActivationCountBasedPlacementChooseOutOf is " + globalConfig.ActivationCountBasedPlacementChooseOutOf);
chooseHowMany = globalConfig.ActivationCountBasedPlacementChooseOutOf;
}
private static bool IsSiloOverloaded(SiloRuntimeStatistics stats)
{
return stats.IsOverloaded || stats.CpuUsage >= 100;
}
private int SiloLoad_ByActivations(CachedLocalStat cachedStats)
{
return useLocalCache ?
cachedStats.ActivationCount + cachedStats.SiloStats.ActivationCount :
cachedStats.SiloStats.ActivationCount;
}
private int SiloLoad_ByRecentActivations(CachedLocalStat cachedStats)
{
return useLocalCache ?
cachedStats.ActivationCount + cachedStats.SiloStats.RecentlyUsedActivationCount :
cachedStats.SiloStats.RecentlyUsedActivationCount;
}
private Task<PlacementResult> MakePlacement(PlacementStrategy strategy, GrainId grain, IPlacementContext context, CachedLocalStat minLoadedSilo)
{
// Increment placement by number of silos instead of by one.
// This is our trick to get more balanced placement, accounting to the probable
// case when multiple silos place on the same silo at the same time, before stats are refreshed.
minLoadedSilo.IncrementActivationCount(localCache.Count);
return Task.FromResult(PlacementResult.SpecifyCreation(
minLoadedSilo.Address,
strategy,
context.GetGrainTypeName(grain)));
}
public Task<PlacementResult> SelectSiloPowerOfK(PlacementStrategy strategy, GrainId grain, IPlacementContext context)
{
// Exclude overloaded silos
var relevantSilos = new List<CachedLocalStat>();
foreach (CachedLocalStat current in localCache.Values)
{
if (IsSiloOverloaded(current.SiloStats)) continue;
relevantSilos.Add(current);
}
if (relevantSilos.Count > 0)
{
int chooseFrom = Math.Min(relevantSilos.Count, chooseHowMany);
var chooseFromThoseSilos = new List<CachedLocalStat>();
while (chooseFromThoseSilos.Count < chooseFrom)
{
int index = random.Next(relevantSilos.Count);
var pickedSilo = relevantSilos[index];
relevantSilos.RemoveAt(index);
chooseFromThoseSilos.Add(pickedSilo);
}
CachedLocalStat minLoadedSilo = chooseFromThoseSilos.First();
foreach (CachedLocalStat s in chooseFromThoseSilos)
{
if (SiloLoad_ByRecentActivations(s) < SiloLoad_ByRecentActivations(minLoadedSilo))
minLoadedSilo = s;
}
return MakePlacement(strategy, grain, context, minLoadedSilo);
}
var debugLog = string.Format("Unable to select a candidate from {0} silos: {1}", localCache.Count,
Utils.EnumerableToString(
localCache,
kvp => String.Format("SiloAddress = {0} -> {1}", kvp.Key.ToString(), kvp.Value.ToString())));
logger.Warn(ErrorCode.Placement_ActivationCountBasedDirector_NoSilos, debugLog);
throw new OrleansException(debugLog);
}
/// <summary>
/// Selects the best match from list of silos, updates local statistics.
/// </summary>
/// <note>
/// This is equivalent with SelectSiloPowerOfK() with chooseHowMany = #Silos
/// </note>
private Task<PlacementResult> SelectSiloGreedy(PlacementStrategy strategy, GrainId grain, IPlacementContext context)
{
int minLoad = int.MaxValue;
CachedLocalStat minLoadedSilo = null;
foreach (CachedLocalStat current in localCache.Values)
{
if (IsSiloOverloaded(current.SiloStats)) continue;
int load = SiloLoad_ByRecentActivations(current);
if (load >= minLoad) continue;
minLoadedSilo = current;
minLoad = load;
}
if (minLoadedSilo != null)
return MakePlacement(strategy, grain, context, minLoadedSilo);
var debugLog = string.Format("Unable to select a candidate from {0} silos: {1}", localCache.Count,
Utils.EnumerableToString(
localCache,
kvp => String.Format("SiloAddress = {0} -> {1}", kvp.Key.ToString(), kvp.Value.ToString())));
logger.Warn(ErrorCode.Placement_ActivationCountBasedDirector_NoSilos, debugLog);
throw new OrleansException(debugLog);
}
internal override Task<PlacementResult> OnAddActivation(
PlacementStrategy strategy, GrainId grain, IPlacementContext context)
{
return SelectSilo(strategy, grain, context);
}
public void SiloStatisticsChangeNotification(SiloAddress updatedSilo, SiloRuntimeStatistics newSiloStats)
{
// just create a new empty CachedLocalStat and throw the old one.
var updatedCacheEntry = new CachedLocalStat(updatedSilo, newSiloStats);
localCache.AddOrUpdate(updatedSilo, k => updatedCacheEntry, (k, v) => updatedCacheEntry);
}
public void RemoveSilo(SiloAddress removedSilo)
{
CachedLocalStat ignore;
localCache.TryRemove(removedSilo, out ignore);
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Timers;
using OpenMetaverse;
using log4net;
using Nini.Config;
using OpenSim.Framework;
using OpenSim.Region.CoreModules.Framework.InterfaceCommander;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using System.Xml;
using System.Xml.Serialization;
using System.IO;
namespace OpenSim.Region.OptionalModules.World.TreePopulator
{
/// <summary>
/// Version 2.02 - Still hacky
/// </summary>
public class TreePopulatorModule : IRegionModule, ICommandableModule, IVegetationModule
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private readonly Commander m_commander = new Commander("tree");
private Scene m_scene;
[XmlRootAttribute(ElementName = "Copse", IsNullable = false)]
public class Copse
{
public string m_name;
public Boolean m_frozen;
public Tree m_tree_type;
public int m_tree_quantity;
public float m_treeline_low;
public float m_treeline_high;
public Vector3 m_seed_point;
public double m_range;
public Vector3 m_initial_scale;
public Vector3 m_maximum_scale;
public Vector3 m_rate;
[XmlIgnore]
public Boolean m_planted;
[XmlIgnore]
public List<UUID> m_trees;
public Copse()
{
}
public Copse(string fileName, Boolean planted)
{
Copse cp = (Copse)DeserializeObject(fileName);
this.m_name = cp.m_name;
this.m_frozen = cp.m_frozen;
this.m_tree_quantity = cp.m_tree_quantity;
this.m_treeline_high = cp.m_treeline_high;
this.m_treeline_low = cp.m_treeline_low;
this.m_range = cp.m_range;
this.m_tree_type = cp.m_tree_type;
this.m_seed_point = cp.m_seed_point;
this.m_initial_scale = cp.m_initial_scale;
this.m_maximum_scale = cp.m_maximum_scale;
this.m_initial_scale = cp.m_initial_scale;
this.m_rate = cp.m_rate;
this.m_planted = planted;
this.m_trees = new List<UUID>();
}
public Copse(string copsedef)
{
char[] delimiterChars = {':', ';'};
string[] field = copsedef.Split(delimiterChars);
this.m_name = field[1].Trim();
this.m_frozen = (copsedef[0] == 'F');
this.m_tree_quantity = int.Parse(field[2]);
this.m_treeline_high = float.Parse(field[3], Culture.NumberFormatInfo);
this.m_treeline_low = float.Parse(field[4], Culture.NumberFormatInfo);
this.m_range = double.Parse(field[5], Culture.NumberFormatInfo);
this.m_tree_type = (Tree) Enum.Parse(typeof(Tree),field[6]);
this.m_seed_point = Vector3.Parse(field[7]);
this.m_initial_scale = Vector3.Parse(field[8]);
this.m_maximum_scale = Vector3.Parse(field[9]);
this.m_rate = Vector3.Parse(field[10]);
this.m_planted = true;
this.m_trees = new List<UUID>();
}
public Copse(string name, int quantity, float high, float low, double range, Vector3 point, Tree type, Vector3 scale, Vector3 max_scale, Vector3 rate, List<UUID> trees)
{
this.m_name = name;
this.m_frozen = false;
this.m_tree_quantity = quantity;
this.m_treeline_high = high;
this.m_treeline_low = low;
this.m_range = range;
this.m_tree_type = type;
this.m_seed_point = point;
this.m_initial_scale = scale;
this.m_maximum_scale = max_scale;
this.m_rate = rate;
this.m_planted = false;
this.m_trees = trees;
}
public override string ToString()
{
string frozen = (this.m_frozen ? "F" : "A");
return string.Format("{0}TPM: {1}; {2}; {3:0.0}; {4:0.0}; {5:0.0}; {6}; {7:0.0}; {8:0.0}; {9:0.0}; {10:0.00};",
frozen,
this.m_name,
this.m_tree_quantity,
this.m_treeline_high,
this.m_treeline_low,
this.m_range,
this.m_tree_type,
this.m_seed_point.ToString(),
this.m_initial_scale.ToString(),
this.m_maximum_scale.ToString(),
this.m_rate.ToString());
}
}
private List<Copse> m_copse;
private double m_update_ms = 1000.0; // msec between updates
private bool m_active_trees = false;
Timer CalculateTrees;
#region ICommandableModule Members
public ICommander CommandInterface
{
get { return m_commander; }
}
#endregion
#region IRegionModule Members
public void Initialise(Scene scene, IConfigSource config)
{
m_scene = scene;
m_scene.RegisterModuleInterface<IRegionModule>(this);
m_scene.EventManager.OnPluginConsole += EventManager_OnPluginConsole;
// ini file settings
try
{
m_active_trees = config.Configs["Trees"].GetBoolean("active_trees", m_active_trees);
}
catch (Exception)
{
m_log.Debug("[TREES]: ini failure for active_trees - using default");
}
try
{
m_update_ms = config.Configs["Trees"].GetDouble("update_rate", m_update_ms);
}
catch (Exception)
{
m_log.Debug("[TREES]: ini failure for update_rate - using default");
}
InstallCommands();
m_log.Debug("[TREES]: Initialised tree module");
}
public void PostInitialise()
{
ReloadCopse();
if (m_copse.Count > 0)
m_log.Info("[TREES]: Copse load complete");
if (m_active_trees)
activeizeTreeze(true);
}
public void Close()
{
}
public string Name
{
get { return "TreePopulatorModule"; }
}
public bool IsSharedModule
{
get { return false; }
}
#endregion
//--------------------------------------------------------------
#region ICommandableModule Members
private void HandleTreeActive(Object[] args)
{
if ((Boolean)args[0] && !m_active_trees)
{
m_log.InfoFormat("[TREES]: Activating Trees");
m_active_trees = true;
activeizeTreeze(m_active_trees);
}
else if (!(Boolean)args[0] && m_active_trees)
{
m_log.InfoFormat("[TREES]: Trees module is no longer active");
m_active_trees = false;
activeizeTreeze(m_active_trees);
}
else
{
m_log.InfoFormat("[TREES]: Trees module is already in the required state");
}
}
private void HandleTreeFreeze(Object[] args)
{
string copsename = ((string)args[0]).Trim();
Boolean freezeState = (Boolean) args[1];
foreach (Copse cp in m_copse)
{
if (cp.m_name == copsename && (!cp.m_frozen && freezeState || cp.m_frozen && !freezeState))
{
cp.m_frozen = freezeState;
foreach (UUID tree in cp.m_trees)
{
SceneObjectPart sop = ((SceneObjectGroup)m_scene.Entities[tree]).RootPart;
sop.Name = (freezeState ? sop.Name.Replace("ATPM", "FTPM") : sop.Name.Replace("FTPM", "ATPM"));
sop.ParentGroup.HasGroupChanged = true;
}
m_log.InfoFormat("[TREES]: Activity for copse {0} is frozen {1}", copsename, freezeState);
return;
}
else if (cp.m_name == copsename && (cp.m_frozen && freezeState || !cp.m_frozen && !freezeState))
{
m_log.InfoFormat("[TREES]: Copse {0} is already in the requested freeze state", copsename);
return;
}
}
m_log.InfoFormat("[TREES]: Copse {0} was not found - command failed", copsename);
}
private void HandleTreeLoad(Object[] args)
{
Copse copse;
m_log.InfoFormat("[TREES]: Loading copse definition....");
copse = new Copse(((string)args[0]), false);
foreach (Copse cp in m_copse)
{
if (cp.m_name == copse.m_name)
{
m_log.InfoFormat("[TREES]: Copse: {0} is already defined - command failed", copse.m_name);
return;
}
}
m_copse.Add(copse);
m_log.InfoFormat("[TREES]: Loaded copse: {0}", copse.ToString());
}
private void HandleTreePlant(Object[] args)
{
string copsename = ((string)args[0]).Trim();
m_log.InfoFormat("[TREES]: New tree planting for copse {0}", copsename);
UUID uuid = m_scene.RegionInfo.EstateSettings.EstateOwner;
foreach (Copse copse in m_copse)
{
if (copse.m_name == copsename)
{
if (!copse.m_planted)
{
// The first tree for a copse is created here
CreateTree(uuid, copse, copse.m_seed_point);
copse.m_planted = true;
return;
}
else
{
m_log.InfoFormat("[TREES]: Copse {0} has already been planted", copsename);
}
}
}
m_log.InfoFormat("[TREES]: Copse {0} not found for planting", copsename);
}
private void HandleTreeRate(Object[] args)
{
m_update_ms = (double)args[0];
if (m_update_ms >= 1000.0)
{
if (m_active_trees)
{
activeizeTreeze(false);
activeizeTreeze(true);
}
m_log.InfoFormat("[TREES]: Update rate set to {0} mSec", m_update_ms);
}
else
{
m_log.InfoFormat("[TREES]: minimum rate is 1000.0 mSec - command failed");
}
}
private void HandleTreeReload(Object[] args)
{
if (m_active_trees)
{
CalculateTrees.Stop();
}
ReloadCopse();
if (m_active_trees)
{
CalculateTrees.Start();
}
}
private void HandleTreeRemove(Object[] args)
{
string copsename = ((string)args[0]).Trim();
Copse copseIdentity = null;
foreach (Copse cp in m_copse)
{
if (cp.m_name == copsename)
{
copseIdentity = cp;
}
}
if (copseIdentity != null)
{
foreach (UUID tree in copseIdentity.m_trees)
{
if (m_scene.Entities.ContainsKey(tree))
{
SceneObjectPart selectedTree = ((SceneObjectGroup)m_scene.Entities[tree]).RootPart;
m_scene.DeleteSceneObject(selectedTree.ParentGroup, false);
m_scene.ForEachClient(delegate(IClientAPI controller)
{
controller.SendKillObject(m_scene.RegionInfo.RegionHandle,
selectedTree.LocalId);
});
}
else
{
m_log.DebugFormat("[TREES]: Tree not in scene {0}", tree);
}
}
copseIdentity.m_trees = new List<UUID>();
m_copse.Remove(copseIdentity);
m_log.InfoFormat("[TREES]: Copse {0} has been removed", copsename);
}
else
{
m_log.InfoFormat("[TREES]: Copse {0} was not found - command failed", copsename);
}
}
private void HandleTreeStatistics(Object[] args)
{
m_log.InfoFormat("[TREES]: Activity State: {0}; Update Rate: {1}", m_active_trees, m_update_ms);
foreach (Copse cp in m_copse)
{
m_log.InfoFormat("[TREES]: Copse {0}; {1} trees; frozen {2}", cp.m_name, cp.m_trees.Count, cp.m_frozen);
}
}
private void InstallCommands()
{
Command treeActiveCommand =
new Command("active", CommandIntentions.COMMAND_HAZARDOUS, HandleTreeActive, "Change activity state for the trees module");
treeActiveCommand.AddArgument("activeTF", "The required activity state", "Boolean");
Command treeFreezeCommand =
new Command("freeze", CommandIntentions.COMMAND_HAZARDOUS, HandleTreeFreeze, "Freeze/Unfreeze activity for a defined copse");
treeFreezeCommand.AddArgument("copse", "The required copse", "String");
treeFreezeCommand.AddArgument("freezeTF", "The required freeze state", "Boolean");
Command treeLoadCommand =
new Command("load", CommandIntentions.COMMAND_HAZARDOUS, HandleTreeLoad, "Load a copse definition from an xml file");
treeLoadCommand.AddArgument("filename", "The (xml) file you wish to load", "String");
Command treePlantCommand =
new Command("plant", CommandIntentions.COMMAND_HAZARDOUS, HandleTreePlant, "Start the planting on a copse");
treePlantCommand.AddArgument("copse", "The required copse", "String");
Command treeRateCommand =
new Command("rate", CommandIntentions.COMMAND_HAZARDOUS, HandleTreeRate, "Reset the tree update rate (mSec)");
treeRateCommand.AddArgument("updateRate", "The required update rate (minimum 1000.0)", "Double");
Command treeReloadCommand =
new Command("reload", CommandIntentions.COMMAND_HAZARDOUS, HandleTreeReload, "Reload copse definitions from the in-scene trees");
Command treeRemoveCommand =
new Command("remove", CommandIntentions.COMMAND_HAZARDOUS, HandleTreeRemove, "Remove a copse definition and all its in-scene trees");
treeRemoveCommand.AddArgument("copse", "The required copse", "String");
Command treeStatisticsCommand =
new Command("statistics", CommandIntentions.COMMAND_STATISTICAL, HandleTreeStatistics, "Log statistics about the trees");
m_commander.RegisterCommand("active", treeActiveCommand);
m_commander.RegisterCommand("freeze", treeFreezeCommand);
m_commander.RegisterCommand("load", treeLoadCommand);
m_commander.RegisterCommand("plant", treePlantCommand);
m_commander.RegisterCommand("rate", treeRateCommand);
m_commander.RegisterCommand("reload", treeReloadCommand);
m_commander.RegisterCommand("remove", treeRemoveCommand);
m_commander.RegisterCommand("statistics", treeStatisticsCommand);
m_scene.RegisterModuleCommander(m_commander);
}
/// <summary>
/// Processes commandline input. Do not call directly.
/// </summary>
/// <param name="args">Commandline arguments</param>
private void EventManager_OnPluginConsole(string[] args)
{
if (args[0] == "tree")
{
if (args.Length == 1)
{
m_commander.ProcessConsoleCommand("help", new string[0]);
return;
}
string[] tmpArgs = new string[args.Length - 2];
int i;
for (i = 2; i < args.Length; i++)
{
tmpArgs[i - 2] = args[i];
}
m_commander.ProcessConsoleCommand(args[1], tmpArgs);
}
}
#endregion
#region IVegetationModule Members
public SceneObjectGroup AddTree(
UUID uuid, UUID groupID, Vector3 scale, Quaternion rotation, Vector3 position, Tree treeType, bool newTree)
{
PrimitiveBaseShape treeShape = new PrimitiveBaseShape();
treeShape.PathCurve = 16;
treeShape.PathEnd = 49900;
treeShape.PCode = newTree ? (byte)PCode.NewTree : (byte)PCode.Tree;
treeShape.Scale = scale;
treeShape.State = (byte)treeType;
return m_scene.AddNewPrim(uuid, groupID, position, rotation, treeShape);
}
#endregion
#region IEntityCreator Members
protected static readonly PCode[] creationCapabilities = new PCode[] { PCode.NewTree, PCode.Tree };
public PCode[] CreationCapabilities { get { return creationCapabilities; } }
public SceneObjectGroup CreateEntity(
UUID ownerID, UUID groupID, Vector3 pos, Quaternion rot, PrimitiveBaseShape shape)
{
if (Array.IndexOf(creationCapabilities, (PCode)shape.PCode) < 0)
{
m_log.DebugFormat("[VEGETATION]: PCode {0} not handled by {1}", shape.PCode, Name);
return null;
}
SceneObjectGroup sceneObject = new SceneObjectGroup(ownerID, pos, rot, shape);
SceneObjectPart rootPart = sceneObject.GetChildPart(sceneObject.UUID);
rootPart.AddFlag(PrimFlags.Phantom);
m_scene.AddNewSceneObject(sceneObject, true);
sceneObject.SetGroup(groupID, null);
return sceneObject;
}
#endregion
//--------------------------------------------------------------
#region Tree Utilities
static public void SerializeObject(string fileName, Object obj)
{
try
{
XmlSerializer xs = new XmlSerializer(typeof(Copse));
using (XmlTextWriter writer = new XmlTextWriter(fileName, Util.UTF8))
{
writer.Formatting = Formatting.Indented;
xs.Serialize(writer, obj);
}
}
catch (SystemException ex)
{
throw new ApplicationException("Unexpected failure in Tree serialization", ex);
}
}
static public object DeserializeObject(string fileName)
{
try
{
XmlSerializer xs = new XmlSerializer(typeof(Copse));
using (FileStream fs = new FileStream(fileName, FileMode.Open, FileAccess.Read))
return xs.Deserialize(fs);
}
catch (SystemException ex)
{
throw new ApplicationException("Unexpected failure in Tree de-serialization", ex);
}
}
private void ReloadCopse()
{
m_copse = new List<Copse>();
List<EntityBase> objs = m_scene.GetEntities();
foreach (EntityBase obj in objs)
{
if (obj is SceneObjectGroup)
{
SceneObjectGroup grp = (SceneObjectGroup)obj;
if (grp.Name.Length > 5 && (grp.Name.Substring(0, 5) == "ATPM:" || grp.Name.Substring(0, 5) == "FTPM:"))
{
// Create a new copse definition or add uuid to an existing definition
try
{
Boolean copsefound = false;
Copse copse = new Copse(grp.Name);
foreach (Copse cp in m_copse)
{
if (cp.m_name == copse.m_name)
{
copsefound = true;
cp.m_trees.Add(grp.UUID);
//m_log.DebugFormat("[TREES]: Found tree {0}", grp.UUID);
}
}
if (!copsefound)
{
m_log.InfoFormat("[TREES]: Found copse {0}", grp.Name);
m_copse.Add(copse);
copse.m_trees.Add(grp.UUID);
}
}
catch
{
m_log.InfoFormat("[TREES]: Ill formed copse definition {0} - ignoring", grp.Name);
}
}
}
}
}
#endregion
private void activeizeTreeze(bool activeYN)
{
if (activeYN)
{
CalculateTrees = new Timer(m_update_ms);
CalculateTrees.Elapsed += CalculateTrees_Elapsed;
CalculateTrees.Start();
}
else
{
CalculateTrees.Stop();
}
}
private void growTrees()
{
foreach (Copse copse in m_copse)
{
if (!copse.m_frozen)
{
foreach (UUID tree in copse.m_trees)
{
if (m_scene.Entities.ContainsKey(tree))
{
SceneObjectPart s_tree = ((SceneObjectGroup)m_scene.Entities[tree]).RootPart;
if (s_tree.Scale.X < copse.m_maximum_scale.X && s_tree.Scale.Y < copse.m_maximum_scale.Y && s_tree.Scale.Z < copse.m_maximum_scale.Z)
{
s_tree.Scale += copse.m_rate;
s_tree.ParentGroup.HasGroupChanged = true;
s_tree.ScheduleFullUpdate();
}
}
else
{
m_log.DebugFormat("[TREES]: Tree not in scene {0}", tree);
}
}
}
}
}
private void seedTrees()
{
foreach (Copse copse in m_copse)
{
if (!copse.m_frozen)
{
foreach (UUID tree in copse.m_trees)
{
if (m_scene.Entities.ContainsKey(tree))
{
SceneObjectPart s_tree = ((SceneObjectGroup)m_scene.Entities[tree]).RootPart;
if (copse.m_trees.Count < copse.m_tree_quantity)
{
// Tree has grown enough to seed if it has grown by at least 25% of seeded to full grown height
if (s_tree.Scale.Z > copse.m_initial_scale.Z + (copse.m_maximum_scale.Z - copse.m_initial_scale.Z) / 4.0)
{
if (Util.RandomClass.NextDouble() > 0.75)
{
SpawnChild(copse, s_tree);
}
}
}
}
else
{
m_log.DebugFormat("[TREES]: Tree not in scene {0}", tree);
}
}
}
}
}
private void killTrees()
{
foreach (Copse copse in m_copse)
{
if (!copse.m_frozen && copse.m_trees.Count >= copse.m_tree_quantity)
{
foreach (UUID tree in copse.m_trees)
{
double killLikelyhood = 0.0;
if (m_scene.Entities.ContainsKey(tree))
{
SceneObjectPart selectedTree = ((SceneObjectGroup)m_scene.Entities[tree]).RootPart;
double selectedTreeScale = Math.Sqrt(Math.Pow(selectedTree.Scale.X, 2) +
Math.Pow(selectedTree.Scale.Y, 2) +
Math.Pow(selectedTree.Scale.Z, 2));
foreach (UUID picktree in copse.m_trees)
{
if (picktree != tree)
{
SceneObjectPart pickedTree = ((SceneObjectGroup)m_scene.Entities[picktree]).RootPart;
double pickedTreeScale = Math.Sqrt(Math.Pow(pickedTree.Scale.X, 2) +
Math.Pow(pickedTree.Scale.Y, 2) +
Math.Pow(pickedTree.Scale.Z, 2));
double pickedTreeDistance = Vector3.Distance(pickedTree.AbsolutePosition, selectedTree.AbsolutePosition);
killLikelyhood += (selectedTreeScale / (pickedTreeScale * pickedTreeDistance)) * 0.1;
}
}
if (Util.RandomClass.NextDouble() < killLikelyhood)
{
m_scene.DeleteSceneObject(selectedTree.ParentGroup, false);
copse.m_trees.Remove(selectedTree.ParentGroup.UUID);
m_scene.ForEachClient(delegate(IClientAPI controller)
{
controller.SendKillObject(m_scene.RegionInfo.RegionHandle,
selectedTree.LocalId);
});
break;
}
}
else
{
m_log.DebugFormat("[TREES]: Tree not in scene {0}", tree);
}
}
}
}
}
private void SpawnChild(Copse copse, SceneObjectPart s_tree)
{
Vector3 position = new Vector3();
double randX = ((Util.RandomClass.NextDouble() * 2.0) - 1.0) * (s_tree.Scale.X * 3);
double randY = ((Util.RandomClass.NextDouble() * 2.0) - 1.0) * (s_tree.Scale.X * 3);
position.X = s_tree.AbsolutePosition.X + (float)randX;
position.Y = s_tree.AbsolutePosition.Y + (float)randY;
if (position.X <= ((int)Constants.RegionSize - 1) && position.X >= 0 &&
position.Y <= ((int)Constants.RegionSize - 1) && position.Y >= 0 &&
Util.GetDistanceTo(position, copse.m_seed_point) <= copse.m_range)
{
UUID uuid = m_scene.RegionInfo.EstateSettings.EstateOwner;
CreateTree(uuid, copse, position);
}
}
private void CreateTree(UUID uuid, Copse copse, Vector3 position)
{
position.Z = (float)m_scene.Heightmap[(int)position.X, (int)position.Y];
if (position.Z >= copse.m_treeline_low && position.Z <= copse.m_treeline_high)
{
SceneObjectGroup tree = AddTree(uuid, UUID.Zero, copse.m_initial_scale, Quaternion.Identity, position, copse.m_tree_type, false);
tree.Name = copse.ToString();
copse.m_trees.Add(tree.UUID);
tree.SendGroupFullUpdate();
}
}
private void CalculateTrees_Elapsed(object sender, ElapsedEventArgs e)
{
growTrees();
seedTrees();
killTrees();
}
}
}
| |
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using CommandLine;
using Google.Ads.GoogleAds.Lib;
using Google.Ads.GoogleAds.V10.Common;
using Google.Ads.GoogleAds.V10.Errors;
using Google.Ads.GoogleAds.V10.Resources;
using Google.Ads.GoogleAds.V10.Services;
using System;
using System.Collections.Generic;
using static Google.Ads.GoogleAds.V10.Enums.AdGroupCriterionStatusEnum.Types;
using static Google.Ads.GoogleAds.V10.Enums.KeywordMatchTypeEnum.Types;
namespace Google.Ads.GoogleAds.Examples.V10
{
/// <summary>
/// This code example demonstrates how to request an exemption for policy violations of a
/// keyword. Note that the example uses an exemptible policy-violating keyword by default.
/// If you use a keyword that contains non-exemptible policy violations, they will not be
/// sent for exemption request and you will still fail to create a keyword.
/// If you specify a keyword that doesn't violate any policies, this example will just add the
/// keyword as usual, similar to what the AddKeywords example does.
/// Note that once you've requested policy exemption for a keyword, when you send a request for
/// adding it again, the request will pass like when you add a non-violating keyword.
/// </summary>
public class HandleKeywordPolicyViolations : ExampleBase
{
/// <summary>
/// Command line options for running the <see cref="HandleKeywordPolicyViolations"/> example.
/// </summary>
public class Options : OptionsBase
{
/// <summary>
/// The customer ID for which the call is made.
/// </summary>
[Option("customerId", Required = true, HelpText =
"The customer ID for which the call is made.")]
public long CustomerId { get; set; }
/// <summary>
/// ID of the ad group to which keywords are added.
/// </summary>
[Option("adGroupId", Required = true, HelpText =
"ID of the ad group to which keywords are added.")]
public long AdGroupId { get; set; }
/// <summary>
/// The keyword text to add to the ad group.
/// </summary>
[Option("keywordText", Required = false, HelpText =
"The keyword text to add to the ad group.")]
public string KeywordText { get; set; }
}
/// <summary>
/// Main method, to run this code example as a standalone application.
/// </summary>
/// <param name="args">The command line arguments.</param>
public static void Main(string[] args)
{
Options options = new Options();
CommandLine.Parser.Default.ParseArguments<Options>(args).MapResult(
delegate (Options o)
{
options = o;
return 0;
}, delegate (IEnumerable<Error> errors)
{
// The customer ID for which the call is made.
options.CustomerId = long.Parse("INSERT_CUSTOMER_ID_HERE");
// ID of the ad group to which keywords are added.
options.AdGroupId = long.Parse("INSERT_AD_GROUP_ID_HERE");
// The keyword text to add to the ad group.
options.KeywordText = "INSERT_KEYWORD_TEXT_HERE";
return 0;
});
HandleKeywordPolicyViolations codeExample = new HandleKeywordPolicyViolations();
Console.WriteLine(codeExample.Description);
codeExample.Run(new GoogleAdsClient(), options.CustomerId, options.AdGroupId,
options.KeywordText);
}
/// <summary>
/// The default keyword to be used if keyword is not provided.
/// </summary>
private const string DEFAULT_KEYWORD = "medication";
/// <summary>
/// Returns a description about the code example.
/// </summary>
public override string Description =>
"This code example demonstrates how to request an exemption for policy violations of " +
"a keyword. Note that the example uses an exemptible policy-violating keyword by " +
"default. If you use a keyword that contains non-exemptible policy violations, they " +
"will not be sent for exemption request and you will still fail to create a keyword. " +
"If you specify a keyword that doesn't violate any policies, this example will just " +
"add the keyword as usual, similar to what the AddKeywords example does. Note that " +
"once you've requested policy exemption for a keyword, when you send a request for " +
"adding it again, the request will pass like when you add a non-violating keyword.";
/// <summary>
/// Runs the code example.
/// </summary>
/// <param name="client">The Google Ads client.</param>
/// <param name="customerId">The customer ID for which the call is made.</param>
/// <param name="adGroupId">ID of the ad group to which keywords are added.</param>
/// <param name="keywordText">The keyword text to add to the ad group.</param>
public void Run(GoogleAdsClient client, long customerId, long adGroupId,
string keywordText)
{
// Get the AdGroupCriterionServiceClient.
AdGroupCriterionServiceClient service = client.GetService(
Services.V10.AdGroupCriterionService);
if (string.IsNullOrEmpty(keywordText))
{
keywordText = DEFAULT_KEYWORD;
}
// Configures the keyword text and match type settings.
KeywordInfo keywordInfo = new KeywordInfo()
{
Text = keywordText,
MatchType = KeywordMatchType.Exact
};
// Constructs an ad group criterion using the keyword text info above.
AdGroupCriterion adGroupCriterion = new AdGroupCriterion()
{
AdGroup = ResourceNames.AdGroup(customerId, adGroupId),
Status = AdGroupCriterionStatus.Paused,
Keyword = keywordInfo
};
AdGroupCriterionOperation operation = new AdGroupCriterionOperation()
{
Create = adGroupCriterion
};
try
{
try
{
// Try sending a mutate request to add the keyword.
MutateAdGroupCriteriaResponse response = service.MutateAdGroupCriteria(
customerId.ToString(), new[] { operation });
Console.WriteLine($"Added a keyword with resource name " +
$"'{response.Results[0].ResourceName}'.");
}
catch (GoogleAdsException ex)
{
PolicyViolationKey[] exemptPolicyViolationKeys =
FetchExemptPolicyViolationKeys(ex);
// Try sending exemption requests for creating a keyword. However, if your
// keyword contains many policy violations, but not all of them are exemptible,
// the request will not be sent.
RequestExemption(customerId, service, operation, exemptPolicyViolationKeys);
}
}
catch (GoogleAdsException e)
{
Console.WriteLine("Failure:");
Console.WriteLine($"Message: {e.Message}");
Console.WriteLine($"Failure: {e.Failure}");
Console.WriteLine($"Request ID: {e.RequestId}");
throw;
}
}
/// <summary>
/// Collects all policy violation keys that can be exempted for sending a exemption
/// request later.
/// </summary>
/// <param name="ex">The Google Ads exception.</param>
/// <returns>The exemptible policy violation keys.</returns>
// [START handle_keyword_policy_violations]
private static PolicyViolationKey[] FetchExemptPolicyViolationKeys(GoogleAdsException ex)
{
bool isFullyExemptable = true;
List<PolicyViolationKey> exemptPolicyViolationKeys = new List<PolicyViolationKey>();
Console.WriteLine("Google Ads failure details:");
foreach (GoogleAdsError error in ex.Failure.Errors)
{
if (error.ErrorCode.ErrorCodeCase !=
ErrorCode.ErrorCodeOneofCase.PolicyViolationError)
{
Console.WriteLine("No exemption request is sent because there are other " +
"non-policy related errors thrown.");
throw ex;
}
if (error.Details != null && error.Details.PolicyViolationDetails != null)
{
PolicyViolationDetails details = error.Details.PolicyViolationDetails;
Console.WriteLine($"- Policy violation details:");
Console.WriteLine(" - Policy violation details:");
Console.WriteLine($" - External policy name: '{details.ExternalPolicyName}'");
Console.WriteLine($" - External policy description: " +
$"'{details.ExternalPolicyDescription}'");
Console.WriteLine($" - Is exemptable: '{details.IsExemptible}'");
if (details.IsExemptible && details.Key != null)
{
PolicyViolationKey key = details.Key;
Console.WriteLine($" - Policy violation key:");
Console.WriteLine($" - Name: {key.PolicyName}");
Console.WriteLine($" - Violating Text: {key.ViolatingText}");
exemptPolicyViolationKeys.Add(key);
}
else
{
isFullyExemptable = false;
}
}
}
if (!isFullyExemptable)
{
Console.WriteLine("No exemption request is sent because your keyword " +
"contained some non-exemptible policy violations.");
throw ex;
}
return exemptPolicyViolationKeys.ToArray();
}
// [END handle_keyword_policy_violations]
/// <summary>
/// Sends exemption requests for creating a keyword.
/// </summary>
/// <param name="customerId">The customer ID for which the call is made.</param>
/// <param name="service">The ad group criterion service.</param>
/// <param name="operation">The ad group criterion operation to request exemption for.
/// </param>
/// <param name="exemptPolicyViolationKeys">The exemptable policy violation keys.</param>
// [START handle_keyword_policy_violations_1]
private static void RequestExemption(
long customerId, AdGroupCriterionServiceClient service,
AdGroupCriterionOperation operation, PolicyViolationKey[] exemptPolicyViolationKeys)
{
Console.WriteLine("Try adding a keyword again by requesting exemption for its policy "
+ "violations.");
PolicyValidationParameter validationParameter = new PolicyValidationParameter();
validationParameter.ExemptPolicyViolationKeys.AddRange(exemptPolicyViolationKeys);
operation.ExemptPolicyViolationKeys.AddRange(exemptPolicyViolationKeys);
MutateAdGroupCriteriaResponse response = service.MutateAdGroupCriteria(
customerId.ToString(), new[] { operation });
Console.WriteLine($"Successfully added a keyword with resource name " +
$"'{response.Results[0].ResourceName}' by requesting for policy violation " +
$"exemption.");
}
// [END handle_keyword_policy_violations_1]
}
}
| |
namespace codingfreaks.WadLogTail.Ui.WindowsApp.ViewModel
{
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Globalization;
using codingfreaks.cfUtils.Logic.Portable.Extensions;
using System.Linq;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using codingfreaks.cfUtils.Logic.Azure;
using codingfreaks.cfUtils.Logic.Wpf.Components;
using codingfreaks.cfUtils.Logic.Wpf.MvvmLight;
using codingfreaks.WadLogTail.Ui.WindowsApp.Enumerations;
using codingfreaks.WadLogTail.Ui.WindowsApp.Helper;
using codingfreaks.WadLogTail.Ui.WindowsApp.Models;
using GalaSoft.MvvmLight.Command;
using GalaSoft.MvvmLight.Messaging;
using GalaSoft.MvvmLight.Threading;
/// <summary>
/// The view model for the <see cref="MainWindow"/>.
/// </summary>
public class MainViewModel : BroadcastViewModelBase
{
#region member vars
private readonly TableHelper<WadLogEntity> _helper = new TableHelper<WadLogEntity>();
private readonly object _listLock = new object();
private readonly List<WadLogItemViewModel> _rawItems = new List<WadLogItemViewModel>();
private bool _lastSortAsc;
private string _lastSortMemberPath = "timestamp";
private long _receiveCounter;
#endregion
#region constants
private static readonly object _lock = new object();
#endregion
#region constructors and destructors
public MainViewModel()
{
BindingOperations.EnableCollectionSynchronization(Entries, _lock);
if (!IsInDesignMode)
{
DispatcherHelper.Initialize();
// handle event when new results are received from monitoring
_helper.MonitoringReceivedNewEntries += (s, e) =>
{
if (!e.Entries.Any())
{
return;
}
StatusText = "Receiving entries...";
DispatcherHelper.UIDispatcher.Invoke(
() =>
{
try
{
LastResultReceived = DateTime.Now;
_rawItems.ToList().ForEach(ent => ent.IsNew = false);
_rawItems.AddRange(
e.Entries.Select(
entry => new WadLogItemViewModel
{
EntityItem = entry,
IsNew = true,
ReceiveCounter = ++_receiveCounter
}));
FilterAndSortEntities("timestamp", false);
StatusText = e.Entries.Count() + " entries received.";
}
catch (Exception ex)
{
Trace.TraceError(ex.Message);
StatusText = "Error";
}
});
};
// define the command for starting and stopping
OpenSelectStorageWindowCommand = new AutoRelayCommand(
() =>
{
Messenger.Default.Send(
new WindowRequestOpenMessage
{
WindowTarget = WindowTarget.ModalDialog,
WindowType = WindowType.SelectStorageAccountView
});
},
() => !IsRunning);
// define the command for starting and stopping
StartStopMonitoringCommand = new AutoRelayCommand(
() =>
{
if (IsRunning)
{
Task.Run(
() =>
{
StatusText = "Stopping monitoring...";
_helper.StopMonitoringTable();
IsRunning = false;
StatusText = "Monitoring stopped...";
});
return;
}
IsRunning = true;
Task.Run(
() =>
{
_receiveCounter = 0;
var table = StorageHelper.GetTableReference("WADLogsTable", StorageConnectionString);
_helper.StartMonitoringTable(table, 5, TimeSpan.FromDays(30).TotalSeconds);
});
},
() => IsRunning || !StorageConnectionString.IsNullOrEmpty());
// ensure that the StartStopMonitoringCommand is re-evaluated when the StorageConnectionString changes
StartStopMonitoringCommand.DependsOn(() => StorageConnectionString);
// init the command which will be bound to grid-sorting-event
GridSortingCommand = new RelayCommand<DataGridSortingEventArgs>(
e =>
{
var newSortAscending = (e.Column.SortDirection ?? 0) == ListSortDirection.Descending;
FilterAndSortEntities(e.Column.SortMemberPath, newSortAscending);
e.Column.SortDirection = newSortAscending ? ListSortDirection.Ascending : ListSortDirection.Descending;
e.Handled = true;
});
// handle the property-changed event to re-execute filtering when someone enters a text in the filter box
PropertyChanged += (s, e) =>
{
if (e.PropertyName.Equals(nameof(FilterText), StringComparison.OrdinalIgnoreCase))
{
FilterAndSortEntities(_lastSortMemberPath, _lastSortAsc);
}
};
SetAccounts();
// create a new window and display it whenever a response is received
Messenger.Default.Register<WindowOpenMessage>(
this,
o =>
{
var win = o.Window as Window;
switch (o.WindowTarget)
{
case WindowTarget.Dialog:
win?.Show();
break;
case WindowTarget.ModalDialog:
win?.ShowDialog();
break;
}
});
Messenger.Default.Register<SettingsChangedMessage>(this, m => SetAccounts());
}
else
{
// fill sample values for the design mode
StorageConnectionString = "StorageConnectionString";
Entries = new OptimizedObservableCollection<WadLogItemViewModel>
{
new WadLogItemViewModel
{
EntityItem = new WadLogEntity
{
Timestamp = DateTimeOffset.Now,
Message = "Test"
}
},
new WadLogItemViewModel
{
EntityItem = new WadLogEntity
{
Timestamp = DateTimeOffset.Now,
Message = "Test2"
}
},
new WadLogItemViewModel
{
EntityItem = new WadLogEntity
{
Timestamp = DateTimeOffset.Now,
Message = "Test3"
}
},
new WadLogItemViewModel
{
EntityItem = new WadLogEntity
{
Timestamp = DateTimeOffset.Now,
Message = "Test4"
}
}
};
}
}
#endregion
#region methods
public override void Cleanup()
{
try
{
_helper.StopMonitoringTable();
}
catch
{
}
base.Cleanup();
}
/// <summary>
/// Is called when the grid control performs the sorting.
/// </summary>
/// <param name="sortMemberPath">The member path for the column that should be sorted.</param>
/// <param name="ascending"><c>true</c> if the new sort direction should be ascending.</param>
private void FilterAndSortEntities(string sortMemberPath, bool ascending = true)
{
var entries = _rawItems.ToList();
_lastSortMemberPath = sortMemberPath;
_lastSortAsc = ascending;
switch (sortMemberPath.ToLower())
{
case "timestamp":
entries = ascending
? entries.OrderBy(e => e.EntityItem.Timestamp).ThenBy(e => e.ReceiveCounter).ToList()
: entries.OrderByDescending(e => e.EntityItem.Timestamp).ThenByDescending(e => e.ReceiveCounter).ToList();
break;
}
if (!FilterText.IsNullOrEmpty())
{
entries = entries.Where(item => item.EntityItem.MessageCleaned.ToLower().Contains(FilterText.ToLower())).ToList();
}
Entries = new OptimizedObservableCollection<WadLogItemViewModel>();
Entries.AddRange(entries);
}
private void SetAccounts()
{
Accounts = new ObservableCollection<StorageAccountSetting>(Variables.Settings.Accounts);
}
#endregion
#region properties
/// <summary>
/// This list of accounts available.
/// </summary>
public ObservableCollection<StorageAccountSetting> Accounts { get; private set; }
/// <summary>
/// Entries already read from Azure.
/// </summary>
public OptimizedObservableCollection<WadLogItemViewModel> Entries { get; set; } = new OptimizedObservableCollection<WadLogItemViewModel>();
/// <summary>
/// The text to filter for.
/// </summary>
public string FilterText { get; set; }
public string FormattedLastResultReceived => LastResultReceived?.ToString("G", CultureInfo.CurrentUICulture);
/// <summary>
/// Is called when the grids Sorting event is raised.
/// </summary>
public RelayCommand<DataGridSortingEventArgs> GridSortingCommand { get; private set; }
/// <summary>
/// Indicates whether a background operation is taking place currently.
/// </summary>
public bool IsRunning { get; private set; }
/// <summary>
/// The time when the last result was received.
/// </summary>
public DateTime? LastResultReceived { get; set; }
/// <summary>
/// Opens the window for selecting the storage account.
/// </summary>
public AutoRelayCommand OpenSelectStorageWindowCommand { get; private set; }
/// <summary>
/// The caption for the start-/stop button.
/// </summary>
public string StartStopCaption => IsRunning ? "Stop" : "Start";
/// <summary>
/// Starts or stops a monitoring.
/// </summary>
public AutoRelayCommand StartStopMonitoringCommand { get; private set; }
/// <summary>
/// The text to display in the status bar.
/// </summary>
public string StatusText { get; set; }
/// <summary>
/// The Azure storage connection string.
/// </summary>
public string StorageConnectionString { get; set; }
/// <summary>
/// The title for the window.
/// </summary>
public string Title => "codingfreaks Azure Log Table Watcher";
#endregion
}
}
| |
using System;
using System.Reflection;
using System.Threading.Tasks;
namespace Abp.Threading
{
internal static class InternalAsyncHelper
{
public static async Task AwaitTaskWithFinally(Task actualReturnValue, Action<Exception> finalAction)
{
Exception exception = null;
try
{
await actualReturnValue;
}
catch (Exception ex)
{
exception = ex;
throw;
}
finally
{
finalAction(exception);
}
}
public static async Task AwaitTaskWithPostActionAndFinally(Task actualReturnValue, Func<Task> postAction, Action<Exception> finalAction)
{
Exception exception = null;
try
{
await actualReturnValue;
await postAction();
}
catch (Exception ex)
{
exception = ex;
throw;
}
finally
{
finalAction(exception);
}
}
public static async Task AwaitTaskWithPreActionAndPostActionAndFinally(Func<Task> actualReturnValue, Func<Task> preAction = null, Func<Task> postAction = null, Action<Exception> finalAction = null)
{
Exception exception = null;
try
{
if (preAction != null)
{
await preAction();
}
await actualReturnValue();
if (postAction != null)
{
await postAction();
}
}
catch (Exception ex)
{
exception = ex;
throw;
}
finally
{
if (finalAction != null)
{
finalAction(exception);
}
}
}
public static async Task<T> AwaitTaskWithFinallyAndGetResult<T>(Task<T> actualReturnValue, Action<Exception, Task> finalAction)
{
Exception exception = null;
try
{
return await actualReturnValue;
}
catch (Exception ex)
{
exception = ex;
throw;
}
finally
{
finalAction(exception, actualReturnValue);
}
}
public static object CallAwaitTaskWithFinallyAndGetResult(Type taskReturnType, object actualReturnValue, Action<Exception, Task> finalAction)
{
return typeof(InternalAsyncHelper)
.GetMethod("AwaitTaskWithFinallyAndGetResult", BindingFlags.Public | BindingFlags.Static)
.MakeGenericMethod(taskReturnType)
.Invoke(null, new object[] { actualReturnValue, finalAction });
}
public static async Task<T> AwaitTaskWithPostActionAndFinallyAndGetResult<T>(Task<T> actualReturnValue, Func<Task> postAction, Action<Exception> finalAction)
{
Exception exception = null;
try
{
var result = await actualReturnValue;
await postAction();
return result;
}
catch (Exception ex)
{
exception = ex;
throw;
}
finally
{
finalAction(exception);
}
}
public static object CallAwaitTaskWithPostActionAndFinallyAndGetResult(Type taskReturnType, object actualReturnValue, Func<Task> action, Action<Exception> finalAction)
{
return typeof(InternalAsyncHelper)
.GetMethod("AwaitTaskWithPostActionAndFinallyAndGetResult", BindingFlags.Public | BindingFlags.Static)
.MakeGenericMethod(taskReturnType)
.Invoke(null, new object[] { actualReturnValue, action, finalAction });
}
public static async Task<T> AwaitTaskWithPreActionAndPostActionAndFinallyAndGetResult<T>(Func<Task<T>> actualReturnValue, Func<Task> preAction = null, Func<Task> postAction = null, Action<Exception> finalAction = null)
{
Exception exception = null;
try
{
if (preAction != null)
{
await preAction();
}
var result = await actualReturnValue();
if (postAction != null)
{
await postAction();
}
return result;
}
catch (Exception ex)
{
exception = ex;
throw;
}
finally
{
if (finalAction != null)
{
finalAction(exception);
}
}
}
public static object CallAwaitTaskWithPreActionAndPostActionAndFinallyAndGetResult(Type taskReturnType, Func<object> actualReturnValue, Func<Task> preAction = null, Func<Task> postAction = null, Action<Exception> finalAction = null)
{
return typeof(InternalAsyncHelper)
.GetMethod("AwaitTaskWithPreActionAndPostActionAndFinallyAndGetResult", BindingFlags.Public | BindingFlags.Static)
.MakeGenericMethod(taskReturnType)
.Invoke(null, new object[] { actualReturnValue, preAction, postAction, finalAction });
}
public static async Task AwaitTaskWithUsingActionAndFinally(Func<IDisposable> usingAction, Func<Task> action, Action<Exception> finalAction)
{
Exception exception = null;
try
{
using (usingAction())
{
await action();
}
}
catch (Exception ex)
{
exception = ex;
throw;
}
finally
{
finalAction(exception);
}
}
public static async Task<T> AwaitTaskWithUsingActionAndFinallyAndGetResult<T>(Func<IDisposable> usingAction, Func<object> action, Action<Exception> finalAction)
{
Exception exception = null;
try
{
using (usingAction())
{
return await (Task<T>)action();
}
}
catch (Exception ex)
{
exception = ex;
throw;
}
finally
{
finalAction(exception);
}
}
public static object CallAwaitTaskWithUsingActionAndFinallyAndGetResult(Type taskReturnType, Func<IDisposable> usingAction, Func<object> action, Action<Exception> finalAction)
{
return typeof(InternalAsyncHelper)
.GetMethod("AwaitTaskWithUsingActionAndFinallyAndGetResult", BindingFlags.Public | BindingFlags.Static)
.MakeGenericMethod(taskReturnType)
.Invoke(null, new object[] { usingAction, action, finalAction });
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// ------------------------------------------------------------------------------
// Changes to this file must follow the http://aka.ms/api-review process.
// ------------------------------------------------------------------------------
namespace System.ComponentModel.Composition
{
public static partial class AdaptationConstants
{
public const string AdapterContractName = "System.ComponentModel.Composition.AdapterContract";
public const string AdapterFromContractMetadataName = "FromContract";
public const string AdapterToContractMetadataName = "ToContract";
}
public static partial class AttributedModelServices
{
public static System.ComponentModel.Composition.Primitives.ComposablePart AddExportedValue<T>(this System.ComponentModel.Composition.Hosting.CompositionBatch batch, string contractName, T exportedValue) { throw null; }
public static System.ComponentModel.Composition.Primitives.ComposablePart AddExportedValue<T>(this System.ComponentModel.Composition.Hosting.CompositionBatch batch, T exportedValue) { throw null; }
public static System.ComponentModel.Composition.Primitives.ComposablePart AddPart(this System.ComponentModel.Composition.Hosting.CompositionBatch batch, object attributedPart) { throw null; }
public static void ComposeExportedValue<T>(this System.ComponentModel.Composition.Hosting.CompositionContainer container, string contractName, T exportedValue) { }
public static void ComposeExportedValue<T>(this System.ComponentModel.Composition.Hosting.CompositionContainer container, T exportedValue) { }
public static void ComposeParts(this System.ComponentModel.Composition.Hosting.CompositionContainer container, params object[] attributedParts) { }
public static System.ComponentModel.Composition.Primitives.ComposablePart CreatePart(System.ComponentModel.Composition.Primitives.ComposablePartDefinition partDefinition, object attributedPart) { throw null; }
public static System.ComponentModel.Composition.Primitives.ComposablePart CreatePart(object attributedPart) { throw null; }
public static System.ComponentModel.Composition.Primitives.ComposablePart CreatePart(object attributedPart, System.Reflection.ReflectionContext reflectionContext) { throw null; }
public static System.ComponentModel.Composition.Primitives.ComposablePartDefinition CreatePartDefinition(System.Type type, System.ComponentModel.Composition.Primitives.ICompositionElement origin) { throw null; }
public static System.ComponentModel.Composition.Primitives.ComposablePartDefinition CreatePartDefinition(System.Type type, System.ComponentModel.Composition.Primitives.ICompositionElement origin, bool ensureIsDiscoverable) { throw null; }
public static bool Exports(this System.ComponentModel.Composition.Primitives.ComposablePartDefinition part, System.Type contractType) { throw null; }
public static bool Exports<T>(this System.ComponentModel.Composition.Primitives.ComposablePartDefinition part) { throw null; }
public static string GetContractName(System.Type type) { throw null; }
public static TMetadataView GetMetadataView<TMetadataView>(System.Collections.Generic.IDictionary<string, object> metadata) { throw null; }
public static string GetTypeIdentity(System.Reflection.MethodInfo method) { throw null; }
public static string GetTypeIdentity(System.Type type) { throw null; }
public static bool Imports(this System.ComponentModel.Composition.Primitives.ComposablePartDefinition part, System.Type contractType) { throw null; }
public static bool Imports(this System.ComponentModel.Composition.Primitives.ComposablePartDefinition part, System.Type contractType, System.ComponentModel.Composition.Primitives.ImportCardinality importCardinality) { throw null; }
public static bool Imports<T>(this System.ComponentModel.Composition.Primitives.ComposablePartDefinition part) { throw null; }
public static bool Imports<T>(this System.ComponentModel.Composition.Primitives.ComposablePartDefinition part, System.ComponentModel.Composition.Primitives.ImportCardinality importCardinality) { throw null; }
public static System.ComponentModel.Composition.Primitives.ComposablePart SatisfyImportsOnce(this System.ComponentModel.Composition.ICompositionService compositionService, object attributedPart) { throw null; }
public static System.ComponentModel.Composition.Primitives.ComposablePart SatisfyImportsOnce(this System.ComponentModel.Composition.ICompositionService compositionService, object attributedPart, System.Reflection.ReflectionContext reflectionContext) { throw null; }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(1), AllowMultiple=false, Inherited=true)]
public partial class CatalogReflectionContextAttribute : System.Attribute
{
public CatalogReflectionContextAttribute(System.Type reflectionContextType) { }
public System.Reflection.ReflectionContext CreateReflectionContext() { throw null; }
}
public partial class ChangeRejectedException : System.ComponentModel.Composition.CompositionException
{
public ChangeRejectedException() { }
public ChangeRejectedException(System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.CompositionError> errors) { }
public ChangeRejectedException(string message) { }
public ChangeRejectedException(string message, System.Exception innerException) { }
public override string Message { get { throw null; } }
}
public partial class CompositionContractMismatchException : System.Exception
{
public CompositionContractMismatchException() { }
protected CompositionContractMismatchException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
public CompositionContractMismatchException(string message) { }
public CompositionContractMismatchException(string message, System.Exception innerException) { }
}
public partial class CompositionError
{
public CompositionError(string message) { }
public CompositionError(string message, System.ComponentModel.Composition.Primitives.ICompositionElement element) { }
public CompositionError(string message, System.ComponentModel.Composition.Primitives.ICompositionElement element, System.Exception exception) { }
public CompositionError(string message, System.Exception exception) { }
public string Description { get { throw null; } }
public System.ComponentModel.Composition.Primitives.ICompositionElement Element { get { throw null; } }
public System.Exception Exception { get { throw null; } }
public override string ToString() { throw null; }
}
public partial class CompositionException : System.Exception
{
public CompositionException() { }
public CompositionException(System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.CompositionError> errors) { }
public CompositionException(string message) { }
public CompositionException(string message, System.Exception innerException) { }
public System.Collections.ObjectModel.ReadOnlyCollection<System.ComponentModel.Composition.CompositionError> Errors { get { throw null; } }
public override string Message { get { throw null; } }
public System.Collections.ObjectModel.ReadOnlyCollection<System.Exception> RootCauses { get { throw null; } }
}
public enum CreationPolicy
{
Any = 0,
NonShared = 2,
Shared = 1,
}
[System.AttributeUsageAttribute((System.AttributeTargets)(452), AllowMultiple=true, Inherited=false)]
public partial class ExportAttribute : System.Attribute
{
public ExportAttribute() { }
public ExportAttribute(string contractName) { }
public ExportAttribute(string contractName, System.Type contractType) { }
public ExportAttribute(System.Type contractType) { }
public string ContractName { get { throw null; } }
public System.Type ContractType { get { throw null; } }
}
public partial class ExportFactory<T>
{
public ExportFactory(System.Func<System.Tuple<T, System.Action>> exportLifetimeContextCreator) { }
public System.ComponentModel.Composition.ExportLifetimeContext<T> CreateExport() { throw null; }
}
public partial class ExportFactory<T, TMetadata> : System.ComponentModel.Composition.ExportFactory<T>
{
public ExportFactory(System.Func<System.Tuple<T, System.Action>> exportLifetimeContextCreator, TMetadata metadata) : base (default(System.Func<System.Tuple<T, System.Action>>)) { }
public TMetadata Metadata { get { throw null; } }
}
public sealed partial class ExportLifetimeContext<T> : System.IDisposable
{
public ExportLifetimeContext(T value, System.Action disposeAction) { }
public T Value { get { throw null; } }
public void Dispose() { }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(1476), AllowMultiple=true, Inherited=false)]
public sealed partial class ExportMetadataAttribute : System.Attribute
{
public ExportMetadataAttribute(string name, object value) { }
public bool IsMultiple { get { throw null; } set { } }
public string Name { get { throw null; } }
public object Value { get { throw null; } }
}
public partial interface ICompositionService
{
void SatisfyImportsOnce(System.ComponentModel.Composition.Primitives.ComposablePart part);
}
[System.AttributeUsageAttribute((System.AttributeTargets)(2432), AllowMultiple=false, Inherited=false)]
public partial class ImportAttribute : System.Attribute
{
public ImportAttribute() { }
public ImportAttribute(string contractName) { }
public ImportAttribute(string contractName, System.Type contractType) { }
public ImportAttribute(System.Type contractType) { }
public bool AllowDefault { get { throw null; } set { } }
public bool AllowRecomposition { get { throw null; } set { } }
public string ContractName { get { throw null; } }
public System.Type ContractType { get { throw null; } }
public System.ComponentModel.Composition.CreationPolicy RequiredCreationPolicy { get { throw null; } set { } }
public System.ComponentModel.Composition.ImportSource Source { get { throw null; } set { } }
}
public partial class ImportCardinalityMismatchException : System.Exception
{
public ImportCardinalityMismatchException() { }
protected ImportCardinalityMismatchException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
public ImportCardinalityMismatchException(string message) { }
public ImportCardinalityMismatchException(string message, System.Exception innerException) { }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(32), AllowMultiple=false, Inherited=false)]
public partial class ImportingConstructorAttribute : System.Attribute
{
public ImportingConstructorAttribute() { }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(2432), AllowMultiple=false, Inherited=false)]
public partial class ImportManyAttribute : System.Attribute
{
public ImportManyAttribute() { }
public ImportManyAttribute(string contractName) { }
public ImportManyAttribute(string contractName, System.Type contractType) { }
public ImportManyAttribute(System.Type contractType) { }
public bool AllowRecomposition { get { throw null; } set { } }
public string ContractName { get { throw null; } }
public System.Type ContractType { get { throw null; } }
public System.ComponentModel.Composition.CreationPolicy RequiredCreationPolicy { get { throw null; } set { } }
public System.ComponentModel.Composition.ImportSource Source { get { throw null; } set { } }
}
public enum ImportSource
{
Any = 0,
Local = 1,
NonLocal = 2,
}
[System.AttributeUsageAttribute((System.AttributeTargets)(1028), AllowMultiple=true, Inherited=true)]
public partial class InheritedExportAttribute : System.ComponentModel.Composition.ExportAttribute
{
public InheritedExportAttribute() { }
public InheritedExportAttribute(string contractName) { }
public InheritedExportAttribute(string contractName, System.Type contractType) { }
public InheritedExportAttribute(System.Type contractType) { }
}
public partial interface IPartImportsSatisfiedNotification
{
void OnImportsSatisfied();
}
[System.AttributeUsageAttribute((System.AttributeTargets)(4), AllowMultiple=false, Inherited=true)]
public sealed partial class MetadataAttributeAttribute : System.Attribute
{
public MetadataAttributeAttribute() { }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(1024), AllowMultiple=false, Inherited=false)]
public sealed partial class MetadataViewImplementationAttribute : System.Attribute
{
public MetadataViewImplementationAttribute(System.Type implementationType) { }
public System.Type ImplementationType { get { throw null; } }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(4), AllowMultiple=false, Inherited=false)]
public sealed partial class PartCreationPolicyAttribute : System.Attribute
{
public PartCreationPolicyAttribute(System.ComponentModel.Composition.CreationPolicy creationPolicy) { }
public System.ComponentModel.Composition.CreationPolicy CreationPolicy { get { throw null; } }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(4), AllowMultiple=true, Inherited=false)]
public sealed partial class PartMetadataAttribute : System.Attribute
{
public PartMetadataAttribute(string name, object value) { }
public string Name { get { throw null; } }
public object Value { get { throw null; } }
}
[System.AttributeUsageAttribute((System.AttributeTargets)(4), AllowMultiple=false, Inherited=false)]
public sealed partial class PartNotDiscoverableAttribute : System.Attribute
{
public PartNotDiscoverableAttribute() { }
}
}
namespace System.ComponentModel.Composition.Hosting
{
public partial class AggregateCatalog : System.ComponentModel.Composition.Primitives.ComposablePartCatalog, System.ComponentModel.Composition.Hosting.INotifyComposablePartCatalogChanged
{
public AggregateCatalog() { }
public AggregateCatalog(System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ComposablePartCatalog> catalogs) { }
public AggregateCatalog(params System.ComponentModel.Composition.Primitives.ComposablePartCatalog[] catalogs) { }
public System.Collections.Generic.ICollection<System.ComponentModel.Composition.Primitives.ComposablePartCatalog> Catalogs { get { throw null; } }
public event System.EventHandler<System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs> Changed { add { } remove { } }
public event System.EventHandler<System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs> Changing { add { } remove { } }
protected override void Dispose(bool disposing) { }
public override System.Collections.Generic.IEnumerator<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> GetEnumerator() { throw null; }
public override System.Collections.Generic.IEnumerable<System.Tuple<System.ComponentModel.Composition.Primitives.ComposablePartDefinition, System.ComponentModel.Composition.Primitives.ExportDefinition>> GetExports(System.ComponentModel.Composition.Primitives.ImportDefinition definition) { throw null; }
protected virtual void OnChanged(System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs e) { }
protected virtual void OnChanging(System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs e) { }
}
public partial class AggregateExportProvider : System.ComponentModel.Composition.Hosting.ExportProvider, System.IDisposable
{
public AggregateExportProvider(System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Hosting.ExportProvider> providers) { }
public AggregateExportProvider(params System.ComponentModel.Composition.Hosting.ExportProvider[] providers) { }
public System.Collections.ObjectModel.ReadOnlyCollection<System.ComponentModel.Composition.Hosting.ExportProvider> Providers { get { throw null; } }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
protected override System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.Export> GetExportsCore(System.ComponentModel.Composition.Primitives.ImportDefinition definition, System.ComponentModel.Composition.Hosting.AtomicComposition atomicComposition) { throw null; }
}
public partial class ApplicationCatalog : System.ComponentModel.Composition.Primitives.ComposablePartCatalog, System.ComponentModel.Composition.Primitives.ICompositionElement
{
public ApplicationCatalog() { }
public ApplicationCatalog(System.ComponentModel.Composition.Primitives.ICompositionElement definitionOrigin) { }
public ApplicationCatalog(System.Reflection.ReflectionContext reflectionContext) { }
public ApplicationCatalog(System.Reflection.ReflectionContext reflectionContext, System.ComponentModel.Composition.Primitives.ICompositionElement definitionOrigin) { }
string System.ComponentModel.Composition.Primitives.ICompositionElement.DisplayName { get { throw null; } }
System.ComponentModel.Composition.Primitives.ICompositionElement System.ComponentModel.Composition.Primitives.ICompositionElement.Origin { get { throw null; } }
protected override void Dispose(bool disposing) { }
public override System.Collections.Generic.IEnumerator<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> GetEnumerator() { throw null; }
public override System.Collections.Generic.IEnumerable<System.Tuple<System.ComponentModel.Composition.Primitives.ComposablePartDefinition, System.ComponentModel.Composition.Primitives.ExportDefinition>> GetExports(System.ComponentModel.Composition.Primitives.ImportDefinition definition) { throw null; }
public override string ToString() { throw null; }
}
public partial class AssemblyCatalog : System.ComponentModel.Composition.Primitives.ComposablePartCatalog, System.ComponentModel.Composition.Primitives.ICompositionElement
{
public AssemblyCatalog(System.Reflection.Assembly assembly) { }
public AssemblyCatalog(System.Reflection.Assembly assembly, System.ComponentModel.Composition.Primitives.ICompositionElement definitionOrigin) { }
public AssemblyCatalog(System.Reflection.Assembly assembly, System.Reflection.ReflectionContext reflectionContext) { }
public AssemblyCatalog(System.Reflection.Assembly assembly, System.Reflection.ReflectionContext reflectionContext, System.ComponentModel.Composition.Primitives.ICompositionElement definitionOrigin) { }
public AssemblyCatalog(string codeBase) { }
public AssemblyCatalog(string codeBase, System.ComponentModel.Composition.Primitives.ICompositionElement definitionOrigin) { }
public AssemblyCatalog(string codeBase, System.Reflection.ReflectionContext reflectionContext) { }
public AssemblyCatalog(string codeBase, System.Reflection.ReflectionContext reflectionContext, System.ComponentModel.Composition.Primitives.ICompositionElement definitionOrigin) { }
public System.Reflection.Assembly Assembly { get { throw null; } }
string System.ComponentModel.Composition.Primitives.ICompositionElement.DisplayName { get { throw null; } }
System.ComponentModel.Composition.Primitives.ICompositionElement System.ComponentModel.Composition.Primitives.ICompositionElement.Origin { get { throw null; } }
protected override void Dispose(bool disposing) { }
public override System.Collections.Generic.IEnumerator<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> GetEnumerator() { throw null; }
public override System.Collections.Generic.IEnumerable<System.Tuple<System.ComponentModel.Composition.Primitives.ComposablePartDefinition, System.ComponentModel.Composition.Primitives.ExportDefinition>> GetExports(System.ComponentModel.Composition.Primitives.ImportDefinition definition) { throw null; }
public override string ToString() { throw null; }
}
public partial class AtomicComposition : System.IDisposable
{
public AtomicComposition() { }
public AtomicComposition(System.ComponentModel.Composition.Hosting.AtomicComposition outerAtomicComposition) { }
public void AddCompleteAction(System.Action completeAction) { }
public void AddRevertAction(System.Action revertAction) { }
public void Complete() { }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public void SetValue(object key, object value) { }
public bool TryGetValue<T>(object key, bool localAtomicCompositionOnly, out T value) { throw null; }
public bool TryGetValue<T>(object key, out T value) { throw null; }
}
public partial class CatalogExportProvider : System.ComponentModel.Composition.Hosting.ExportProvider, System.IDisposable
{
public CatalogExportProvider(System.ComponentModel.Composition.Primitives.ComposablePartCatalog catalog) { }
public CatalogExportProvider(System.ComponentModel.Composition.Primitives.ComposablePartCatalog catalog, bool isThreadSafe) { }
public CatalogExportProvider(System.ComponentModel.Composition.Primitives.ComposablePartCatalog catalog, System.ComponentModel.Composition.Hosting.CompositionOptions compositionOptions) { }
public System.ComponentModel.Composition.Primitives.ComposablePartCatalog Catalog { get { throw null; } }
public System.ComponentModel.Composition.Hosting.ExportProvider SourceProvider { get { throw null; } set { } }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
protected override System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.Export> GetExportsCore(System.ComponentModel.Composition.Primitives.ImportDefinition definition, System.ComponentModel.Composition.Hosting.AtomicComposition atomicComposition) { throw null; }
}
public static partial class CatalogExtensions
{
public static System.ComponentModel.Composition.Hosting.CompositionService CreateCompositionService(this System.ComponentModel.Composition.Primitives.ComposablePartCatalog composablePartCatalog) { throw null; }
}
public partial class ComposablePartCatalogChangeEventArgs : System.EventArgs
{
public ComposablePartCatalogChangeEventArgs(System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> addedDefinitions, System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> removedDefinitions, System.ComponentModel.Composition.Hosting.AtomicComposition atomicComposition) { }
public System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> AddedDefinitions { get { throw null; } }
public System.ComponentModel.Composition.Hosting.AtomicComposition AtomicComposition { get { throw null; } }
public System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> RemovedDefinitions { get { throw null; } }
}
public partial class ComposablePartExportProvider : System.ComponentModel.Composition.Hosting.ExportProvider, System.IDisposable
{
public ComposablePartExportProvider() { }
public ComposablePartExportProvider(bool isThreadSafe) { }
public ComposablePartExportProvider(System.ComponentModel.Composition.Hosting.CompositionOptions compositionOptions) { }
public System.ComponentModel.Composition.Hosting.ExportProvider SourceProvider { get { throw null; } set { } }
public void Compose(System.ComponentModel.Composition.Hosting.CompositionBatch batch) { }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
protected override System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.Export> GetExportsCore(System.ComponentModel.Composition.Primitives.ImportDefinition definition, System.ComponentModel.Composition.Hosting.AtomicComposition atomicComposition) { throw null; }
}
public partial class CompositionBatch
{
public CompositionBatch() { }
public CompositionBatch(System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ComposablePart> partsToAdd, System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ComposablePart> partsToRemove) { }
public System.Collections.ObjectModel.ReadOnlyCollection<System.ComponentModel.Composition.Primitives.ComposablePart> PartsToAdd { get { throw null; } }
public System.Collections.ObjectModel.ReadOnlyCollection<System.ComponentModel.Composition.Primitives.ComposablePart> PartsToRemove { get { throw null; } }
public System.ComponentModel.Composition.Primitives.ComposablePart AddExport(System.ComponentModel.Composition.Primitives.Export export) { throw null; }
public void AddPart(System.ComponentModel.Composition.Primitives.ComposablePart part) { }
public void RemovePart(System.ComponentModel.Composition.Primitives.ComposablePart part) { }
}
public static partial class CompositionConstants
{
public const string ExportTypeIdentityMetadataName = "ExportTypeIdentity";
public const string GenericContractMetadataName = "System.ComponentModel.Composition.GenericContractName";
public const string GenericParametersMetadataName = "System.ComponentModel.Composition.GenericParameters";
public const string ImportSourceMetadataName = "System.ComponentModel.Composition.ImportSource";
public const string IsGenericPartMetadataName = "System.ComponentModel.Composition.IsGenericPart";
public const string PartCreationPolicyMetadataName = "System.ComponentModel.Composition.CreationPolicy";
}
public partial class CompositionContainer : System.ComponentModel.Composition.Hosting.ExportProvider, System.ComponentModel.Composition.ICompositionService, System.IDisposable
{
public CompositionContainer() { }
public CompositionContainer(System.ComponentModel.Composition.Hosting.CompositionOptions compositionOptions, params System.ComponentModel.Composition.Hosting.ExportProvider[] providers) { }
public CompositionContainer(params System.ComponentModel.Composition.Hosting.ExportProvider[] providers) { }
public CompositionContainer(System.ComponentModel.Composition.Primitives.ComposablePartCatalog catalog, bool isThreadSafe, params System.ComponentModel.Composition.Hosting.ExportProvider[] providers) { }
public CompositionContainer(System.ComponentModel.Composition.Primitives.ComposablePartCatalog catalog, System.ComponentModel.Composition.Hosting.CompositionOptions compositionOptions, params System.ComponentModel.Composition.Hosting.ExportProvider[] providers) { }
public CompositionContainer(System.ComponentModel.Composition.Primitives.ComposablePartCatalog catalog, params System.ComponentModel.Composition.Hosting.ExportProvider[] providers) { }
public System.ComponentModel.Composition.Primitives.ComposablePartCatalog Catalog { get { throw null; } }
public System.Collections.ObjectModel.ReadOnlyCollection<System.ComponentModel.Composition.Hosting.ExportProvider> Providers { get { throw null; } }
public void Compose(System.ComponentModel.Composition.Hosting.CompositionBatch batch) { }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
protected override System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.Export> GetExportsCore(System.ComponentModel.Composition.Primitives.ImportDefinition definition, System.ComponentModel.Composition.Hosting.AtomicComposition atomicComposition) { throw null; }
public void ReleaseExport(System.ComponentModel.Composition.Primitives.Export export) { }
public void ReleaseExports(System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.Export> exports) { }
public void ReleaseExports<T>(System.Collections.Generic.IEnumerable<System.Lazy<T>> exports) { }
public void ReleaseExports<T, TMetadataView>(System.Collections.Generic.IEnumerable<System.Lazy<T, TMetadataView>> exports) { }
public void ReleaseExport<T>(System.Lazy<T> export) { }
public void SatisfyImportsOnce(System.ComponentModel.Composition.Primitives.ComposablePart part) { }
}
[System.FlagsAttribute]
public enum CompositionOptions
{
Default = 0,
DisableSilentRejection = 1,
ExportCompositionService = 4,
IsThreadSafe = 2,
}
public partial class CompositionScopeDefinition : System.ComponentModel.Composition.Primitives.ComposablePartCatalog, System.ComponentModel.Composition.Hosting.INotifyComposablePartCatalogChanged
{
protected CompositionScopeDefinition() { }
public CompositionScopeDefinition(System.ComponentModel.Composition.Primitives.ComposablePartCatalog catalog, System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Hosting.CompositionScopeDefinition> children) { }
public CompositionScopeDefinition(System.ComponentModel.Composition.Primitives.ComposablePartCatalog catalog, System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Hosting.CompositionScopeDefinition> children, System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ExportDefinition> publicSurface) { }
public virtual System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Hosting.CompositionScopeDefinition> Children { get { throw null; } }
public virtual System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ExportDefinition> PublicSurface { get { throw null; } }
public event System.EventHandler<System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs> Changed { add { } remove { } }
public event System.EventHandler<System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs> Changing { add { } remove { } }
protected override void Dispose(bool disposing) { }
public override System.Collections.Generic.IEnumerator<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> GetEnumerator() { throw null; }
public override System.Collections.Generic.IEnumerable<System.Tuple<System.ComponentModel.Composition.Primitives.ComposablePartDefinition, System.ComponentModel.Composition.Primitives.ExportDefinition>> GetExports(System.ComponentModel.Composition.Primitives.ImportDefinition definition) { throw null; }
protected virtual void OnChanged(System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs e) { }
protected virtual void OnChanging(System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs e) { }
}
public partial class CompositionService : System.ComponentModel.Composition.ICompositionService, System.IDisposable
{
internal CompositionService() { }
public void Dispose() { }
public void SatisfyImportsOnce(System.ComponentModel.Composition.Primitives.ComposablePart part) { }
}
public partial class DirectoryCatalog : System.ComponentModel.Composition.Primitives.ComposablePartCatalog, System.ComponentModel.Composition.Hosting.INotifyComposablePartCatalogChanged, System.ComponentModel.Composition.Primitives.ICompositionElement
{
public DirectoryCatalog(string path) { }
public DirectoryCatalog(string path, System.ComponentModel.Composition.Primitives.ICompositionElement definitionOrigin) { }
public DirectoryCatalog(string path, System.Reflection.ReflectionContext reflectionContext) { }
public DirectoryCatalog(string path, System.Reflection.ReflectionContext reflectionContext, System.ComponentModel.Composition.Primitives.ICompositionElement definitionOrigin) { }
public DirectoryCatalog(string path, string searchPattern) { }
public DirectoryCatalog(string path, string searchPattern, System.ComponentModel.Composition.Primitives.ICompositionElement definitionOrigin) { }
public DirectoryCatalog(string path, string searchPattern, System.Reflection.ReflectionContext reflectionContext) { }
public DirectoryCatalog(string path, string searchPattern, System.Reflection.ReflectionContext reflectionContext, System.ComponentModel.Composition.Primitives.ICompositionElement definitionOrigin) { }
public string FullPath { get { throw null; } }
public System.Collections.ObjectModel.ReadOnlyCollection<string> LoadedFiles { get { throw null; } }
public string Path { get { throw null; } }
public string SearchPattern { get { throw null; } }
string System.ComponentModel.Composition.Primitives.ICompositionElement.DisplayName { get { throw null; } }
System.ComponentModel.Composition.Primitives.ICompositionElement System.ComponentModel.Composition.Primitives.ICompositionElement.Origin { get { throw null; } }
public event System.EventHandler<System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs> Changed { add { } remove { } }
public event System.EventHandler<System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs> Changing { add { } remove { } }
protected override void Dispose(bool disposing) { }
public override System.Collections.Generic.IEnumerator<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> GetEnumerator() { throw null; }
public override System.Collections.Generic.IEnumerable<System.Tuple<System.ComponentModel.Composition.Primitives.ComposablePartDefinition, System.ComponentModel.Composition.Primitives.ExportDefinition>> GetExports(System.ComponentModel.Composition.Primitives.ImportDefinition definition) { throw null; }
protected virtual void OnChanged(System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs e) { }
protected virtual void OnChanging(System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs e) { }
public void Refresh() { }
public override string ToString() { throw null; }
}
public abstract partial class ExportProvider
{
protected ExportProvider() { }
public event System.EventHandler<System.ComponentModel.Composition.Hosting.ExportsChangeEventArgs> ExportsChanged { add { } remove { } }
public event System.EventHandler<System.ComponentModel.Composition.Hosting.ExportsChangeEventArgs> ExportsChanging { add { } remove { } }
public T GetExportedValueOrDefault<T>() { throw null; }
public T GetExportedValueOrDefault<T>(string contractName) { throw null; }
public System.Collections.Generic.IEnumerable<T> GetExportedValues<T>() { throw null; }
public System.Collections.Generic.IEnumerable<T> GetExportedValues<T>(string contractName) { throw null; }
public T GetExportedValue<T>() { throw null; }
public T GetExportedValue<T>(string contractName) { throw null; }
public System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.Export> GetExports(System.ComponentModel.Composition.Primitives.ImportDefinition definition) { throw null; }
public System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.Export> GetExports(System.ComponentModel.Composition.Primitives.ImportDefinition definition, System.ComponentModel.Composition.Hosting.AtomicComposition atomicComposition) { throw null; }
public System.Collections.Generic.IEnumerable<System.Lazy<object, object>> GetExports(System.Type type, System.Type metadataViewType, string contractName) { throw null; }
protected abstract System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.Export> GetExportsCore(System.ComponentModel.Composition.Primitives.ImportDefinition definition, System.ComponentModel.Composition.Hosting.AtomicComposition atomicComposition);
public System.Collections.Generic.IEnumerable<System.Lazy<T>> GetExports<T>() { throw null; }
public System.Collections.Generic.IEnumerable<System.Lazy<T>> GetExports<T>(string contractName) { throw null; }
public System.Collections.Generic.IEnumerable<System.Lazy<T, TMetadataView>> GetExports<T, TMetadataView>() { throw null; }
public System.Collections.Generic.IEnumerable<System.Lazy<T, TMetadataView>> GetExports<T, TMetadataView>(string contractName) { throw null; }
public System.Lazy<T> GetExport<T>() { throw null; }
public System.Lazy<T> GetExport<T>(string contractName) { throw null; }
public System.Lazy<T, TMetadataView> GetExport<T, TMetadataView>() { throw null; }
public System.Lazy<T, TMetadataView> GetExport<T, TMetadataView>(string contractName) { throw null; }
protected virtual void OnExportsChanged(System.ComponentModel.Composition.Hosting.ExportsChangeEventArgs e) { }
protected virtual void OnExportsChanging(System.ComponentModel.Composition.Hosting.ExportsChangeEventArgs e) { }
public bool TryGetExports(System.ComponentModel.Composition.Primitives.ImportDefinition definition, System.ComponentModel.Composition.Hosting.AtomicComposition atomicComposition, out System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.Export> exports) { throw null; }
}
public partial class ExportsChangeEventArgs : System.EventArgs
{
public ExportsChangeEventArgs(System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ExportDefinition> addedExports, System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ExportDefinition> removedExports, System.ComponentModel.Composition.Hosting.AtomicComposition atomicComposition) { }
public System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ExportDefinition> AddedExports { get { throw null; } }
public System.ComponentModel.Composition.Hosting.AtomicComposition AtomicComposition { get { throw null; } }
public System.Collections.Generic.IEnumerable<string> ChangedContractNames { get { throw null; } }
public System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ExportDefinition> RemovedExports { get { throw null; } }
}
public partial class FilteredCatalog : System.ComponentModel.Composition.Primitives.ComposablePartCatalog, System.ComponentModel.Composition.Hosting.INotifyComposablePartCatalogChanged
{
public FilteredCatalog(System.ComponentModel.Composition.Primitives.ComposablePartCatalog catalog, System.Func<System.ComponentModel.Composition.Primitives.ComposablePartDefinition, bool> filter) { }
public System.ComponentModel.Composition.Hosting.FilteredCatalog Complement { get { throw null; } }
public event System.EventHandler<System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs> Changed { add { } remove { } }
public event System.EventHandler<System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs> Changing { add { } remove { } }
protected override void Dispose(bool disposing) { }
public override System.Collections.Generic.IEnumerator<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> GetEnumerator() { throw null; }
public override System.Collections.Generic.IEnumerable<System.Tuple<System.ComponentModel.Composition.Primitives.ComposablePartDefinition, System.ComponentModel.Composition.Primitives.ExportDefinition>> GetExports(System.ComponentModel.Composition.Primitives.ImportDefinition definition) { throw null; }
public System.ComponentModel.Composition.Hosting.FilteredCatalog IncludeDependencies() { throw null; }
public System.ComponentModel.Composition.Hosting.FilteredCatalog IncludeDependencies(System.Func<System.ComponentModel.Composition.Primitives.ImportDefinition, bool> importFilter) { throw null; }
public System.ComponentModel.Composition.Hosting.FilteredCatalog IncludeDependents() { throw null; }
public System.ComponentModel.Composition.Hosting.FilteredCatalog IncludeDependents(System.Func<System.ComponentModel.Composition.Primitives.ImportDefinition, bool> importFilter) { throw null; }
protected virtual void OnChanged(System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs e) { }
protected virtual void OnChanging(System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs e) { }
}
public partial class ImportEngine : System.ComponentModel.Composition.ICompositionService, System.IDisposable
{
public ImportEngine(System.ComponentModel.Composition.Hosting.ExportProvider sourceProvider) { }
public ImportEngine(System.ComponentModel.Composition.Hosting.ExportProvider sourceProvider, bool isThreadSafe) { }
public ImportEngine(System.ComponentModel.Composition.Hosting.ExportProvider sourceProvider, System.ComponentModel.Composition.Hosting.CompositionOptions compositionOptions) { }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public void PreviewImports(System.ComponentModel.Composition.Primitives.ComposablePart part, System.ComponentModel.Composition.Hosting.AtomicComposition atomicComposition) { }
public void ReleaseImports(System.ComponentModel.Composition.Primitives.ComposablePart part, System.ComponentModel.Composition.Hosting.AtomicComposition atomicComposition) { }
public void SatisfyImports(System.ComponentModel.Composition.Primitives.ComposablePart part) { }
public void SatisfyImportsOnce(System.ComponentModel.Composition.Primitives.ComposablePart part) { }
}
public partial interface INotifyComposablePartCatalogChanged
{
event System.EventHandler<System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs> Changed;
event System.EventHandler<System.ComponentModel.Composition.Hosting.ComposablePartCatalogChangeEventArgs> Changing;
}
public static partial class ScopingExtensions
{
public static bool ContainsPartMetadataWithKey(this System.ComponentModel.Composition.Primitives.ComposablePartDefinition part, string key) { throw null; }
public static bool ContainsPartMetadata<T>(this System.ComponentModel.Composition.Primitives.ComposablePartDefinition part, string key, T value) { throw null; }
public static bool Exports(this System.ComponentModel.Composition.Primitives.ComposablePartDefinition part, string contractName) { throw null; }
public static System.ComponentModel.Composition.Hosting.FilteredCatalog Filter(this System.ComponentModel.Composition.Primitives.ComposablePartCatalog catalog, System.Func<System.ComponentModel.Composition.Primitives.ComposablePartDefinition, bool> filter) { throw null; }
public static bool Imports(this System.ComponentModel.Composition.Primitives.ComposablePartDefinition part, string contractName) { throw null; }
public static bool Imports(this System.ComponentModel.Composition.Primitives.ComposablePartDefinition part, string contractName, System.ComponentModel.Composition.Primitives.ImportCardinality importCardinality) { throw null; }
}
public partial class TypeCatalog : System.ComponentModel.Composition.Primitives.ComposablePartCatalog, System.ComponentModel.Composition.Primitives.ICompositionElement
{
public TypeCatalog(System.Collections.Generic.IEnumerable<System.Type> types) { }
public TypeCatalog(System.Collections.Generic.IEnumerable<System.Type> types, System.ComponentModel.Composition.Primitives.ICompositionElement definitionOrigin) { }
public TypeCatalog(System.Collections.Generic.IEnumerable<System.Type> types, System.Reflection.ReflectionContext reflectionContext) { }
public TypeCatalog(System.Collections.Generic.IEnumerable<System.Type> types, System.Reflection.ReflectionContext reflectionContext, System.ComponentModel.Composition.Primitives.ICompositionElement definitionOrigin) { }
public TypeCatalog(params System.Type[] types) { }
string System.ComponentModel.Composition.Primitives.ICompositionElement.DisplayName { get { throw null; } }
System.ComponentModel.Composition.Primitives.ICompositionElement System.ComponentModel.Composition.Primitives.ICompositionElement.Origin { get { throw null; } }
protected override void Dispose(bool disposing) { }
public override System.Collections.Generic.IEnumerator<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> GetEnumerator() { throw null; }
public override string ToString() { throw null; }
}
}
namespace System.ComponentModel.Composition.Primitives
{
public abstract partial class ComposablePart
{
protected ComposablePart() { }
public abstract System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ExportDefinition> ExportDefinitions { get; }
public abstract System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ImportDefinition> ImportDefinitions { get; }
public virtual System.Collections.Generic.IDictionary<string, object> Metadata { get { throw null; } }
public virtual void Activate() { }
public abstract object GetExportedValue(System.ComponentModel.Composition.Primitives.ExportDefinition definition);
public abstract void SetImport(System.ComponentModel.Composition.Primitives.ImportDefinition definition, System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.Export> exports);
}
public abstract partial class ComposablePartCatalog : System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ComposablePartDefinition>, System.Collections.IEnumerable, System.IDisposable
{
protected ComposablePartCatalog() { }
[System.ComponentModel.EditorBrowsableAttribute((System.ComponentModel.EditorBrowsableState)(1))]
public virtual System.Linq.IQueryable<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> Parts { get { throw null; } }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public virtual System.Collections.Generic.IEnumerator<System.ComponentModel.Composition.Primitives.ComposablePartDefinition> GetEnumerator() { throw null; }
public virtual System.Collections.Generic.IEnumerable<System.Tuple<System.ComponentModel.Composition.Primitives.ComposablePartDefinition, System.ComponentModel.Composition.Primitives.ExportDefinition>> GetExports(System.ComponentModel.Composition.Primitives.ImportDefinition definition) { throw null; }
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { throw null; }
}
public abstract partial class ComposablePartDefinition
{
protected ComposablePartDefinition() { }
public abstract System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ExportDefinition> ExportDefinitions { get; }
public abstract System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ImportDefinition> ImportDefinitions { get; }
public virtual System.Collections.Generic.IDictionary<string, object> Metadata { get { throw null; } }
public abstract System.ComponentModel.Composition.Primitives.ComposablePart CreatePart();
}
public partial class ComposablePartException : System.Exception
{
public ComposablePartException() { }
public ComposablePartException(string message) { }
public ComposablePartException(string message, System.ComponentModel.Composition.Primitives.ICompositionElement element) { }
public ComposablePartException(string message, System.ComponentModel.Composition.Primitives.ICompositionElement element, System.Exception innerException) { }
public ComposablePartException(string message, System.Exception innerException) { }
public System.ComponentModel.Composition.Primitives.ICompositionElement Element { get { throw null; } }
}
public partial class ContractBasedImportDefinition : System.ComponentModel.Composition.Primitives.ImportDefinition
{
protected ContractBasedImportDefinition() { }
public ContractBasedImportDefinition(string contractName, string requiredTypeIdentity, System.Collections.Generic.IEnumerable<System.Collections.Generic.KeyValuePair<string, System.Type>> requiredMetadata, System.ComponentModel.Composition.Primitives.ImportCardinality cardinality, bool isRecomposable, bool isPrerequisite, System.ComponentModel.Composition.CreationPolicy requiredCreationPolicy) { }
public ContractBasedImportDefinition(string contractName, string requiredTypeIdentity, System.Collections.Generic.IEnumerable<System.Collections.Generic.KeyValuePair<string, System.Type>> requiredMetadata, System.ComponentModel.Composition.Primitives.ImportCardinality cardinality, bool isRecomposable, bool isPrerequisite, System.ComponentModel.Composition.CreationPolicy requiredCreationPolicy, System.Collections.Generic.IDictionary<string, object> metadata) { }
public override System.Linq.Expressions.Expression<System.Func<System.ComponentModel.Composition.Primitives.ExportDefinition, bool>> Constraint { get { throw null; } }
public virtual System.ComponentModel.Composition.CreationPolicy RequiredCreationPolicy { get { throw null; } }
public virtual System.Collections.Generic.IEnumerable<System.Collections.Generic.KeyValuePair<string, System.Type>> RequiredMetadata { get { throw null; } }
public virtual string RequiredTypeIdentity { get { throw null; } }
public override bool IsConstraintSatisfiedBy(System.ComponentModel.Composition.Primitives.ExportDefinition exportDefinition) { throw null; }
public override string ToString() { throw null; }
}
public partial class Export
{
protected Export() { }
public Export(System.ComponentModel.Composition.Primitives.ExportDefinition definition, System.Func<object> exportedValueGetter) { }
public Export(string contractName, System.Collections.Generic.IDictionary<string, object> metadata, System.Func<object> exportedValueGetter) { }
public Export(string contractName, System.Func<object> exportedValueGetter) { }
public virtual System.ComponentModel.Composition.Primitives.ExportDefinition Definition { get { throw null; } }
public System.Collections.Generic.IDictionary<string, object> Metadata { get { throw null; } }
public object Value { get { throw null; } }
protected virtual object GetExportedValueCore() { throw null; }
}
public partial class ExportDefinition
{
protected ExportDefinition() { }
public ExportDefinition(string contractName, System.Collections.Generic.IDictionary<string, object> metadata) { }
public virtual string ContractName { get { throw null; } }
public virtual System.Collections.Generic.IDictionary<string, object> Metadata { get { throw null; } }
public override string ToString() { throw null; }
}
public partial class ExportedDelegate
{
protected ExportedDelegate() { }
public ExportedDelegate(object instance, System.Reflection.MethodInfo method) { }
public virtual System.Delegate CreateDelegate(System.Type delegateType) { throw null; }
}
public partial interface ICompositionElement
{
string DisplayName { get; }
System.ComponentModel.Composition.Primitives.ICompositionElement Origin { get; }
}
public enum ImportCardinality
{
ExactlyOne = 1,
ZeroOrMore = 2,
ZeroOrOne = 0,
}
public partial class ImportDefinition
{
protected ImportDefinition() { }
public ImportDefinition(System.Linq.Expressions.Expression<System.Func<System.ComponentModel.Composition.Primitives.ExportDefinition, bool>> constraint, string contractName, System.ComponentModel.Composition.Primitives.ImportCardinality cardinality, bool isRecomposable, bool isPrerequisite) { }
public ImportDefinition(System.Linq.Expressions.Expression<System.Func<System.ComponentModel.Composition.Primitives.ExportDefinition, bool>> constraint, string contractName, System.ComponentModel.Composition.Primitives.ImportCardinality cardinality, bool isRecomposable, bool isPrerequisite, System.Collections.Generic.IDictionary<string, object> metadata) { }
public virtual System.ComponentModel.Composition.Primitives.ImportCardinality Cardinality { get { throw null; } }
public virtual System.Linq.Expressions.Expression<System.Func<System.ComponentModel.Composition.Primitives.ExportDefinition, bool>> Constraint { get { throw null; } }
public virtual string ContractName { get { throw null; } }
public virtual bool IsPrerequisite { get { throw null; } }
public virtual bool IsRecomposable { get { throw null; } }
public virtual System.Collections.Generic.IDictionary<string, object> Metadata { get { throw null; } }
public virtual bool IsConstraintSatisfiedBy(System.ComponentModel.Composition.Primitives.ExportDefinition exportDefinition) { throw null; }
public override string ToString() { throw null; }
}
}
namespace System.ComponentModel.Composition.ReflectionModel
{
public partial struct LazyMemberInfo
{
private object _dummy;
public LazyMemberInfo(System.Reflection.MemberInfo member) { throw null; }
public LazyMemberInfo(System.Reflection.MemberTypes memberType, System.Func<System.Reflection.MemberInfo[]> accessorsCreator) { throw null; }
public LazyMemberInfo(System.Reflection.MemberTypes memberType, params System.Reflection.MemberInfo[] accessors) { throw null; }
public System.Reflection.MemberTypes MemberType { get { throw null; } }
public override bool Equals(object obj) { throw null; }
public System.Reflection.MemberInfo[] GetAccessors() { throw null; }
public override int GetHashCode() { throw null; }
public static bool operator ==(System.ComponentModel.Composition.ReflectionModel.LazyMemberInfo left, System.ComponentModel.Composition.ReflectionModel.LazyMemberInfo right) { throw null; }
public static bool operator !=(System.ComponentModel.Composition.ReflectionModel.LazyMemberInfo left, System.ComponentModel.Composition.ReflectionModel.LazyMemberInfo right) { throw null; }
}
public static partial class ReflectionModelServices
{
public static System.ComponentModel.Composition.Primitives.ExportDefinition CreateExportDefinition(System.ComponentModel.Composition.ReflectionModel.LazyMemberInfo exportingMember, string contractName, System.Lazy<System.Collections.Generic.IDictionary<string, object>> metadata, System.ComponentModel.Composition.Primitives.ICompositionElement origin) { throw null; }
public static System.ComponentModel.Composition.Primitives.ContractBasedImportDefinition CreateImportDefinition(System.ComponentModel.Composition.ReflectionModel.LazyMemberInfo importingMember, string contractName, string requiredTypeIdentity, System.Collections.Generic.IEnumerable<System.Collections.Generic.KeyValuePair<string, System.Type>> requiredMetadata, System.ComponentModel.Composition.Primitives.ImportCardinality cardinality, bool isRecomposable, bool isPreRequisite, System.ComponentModel.Composition.CreationPolicy requiredCreationPolicy, System.Collections.Generic.IDictionary<string, object> metadata, bool isExportFactory, System.ComponentModel.Composition.Primitives.ICompositionElement origin) { throw null; }
public static System.ComponentModel.Composition.Primitives.ContractBasedImportDefinition CreateImportDefinition(System.ComponentModel.Composition.ReflectionModel.LazyMemberInfo importingMember, string contractName, string requiredTypeIdentity, System.Collections.Generic.IEnumerable<System.Collections.Generic.KeyValuePair<string, System.Type>> requiredMetadata, System.ComponentModel.Composition.Primitives.ImportCardinality cardinality, bool isRecomposable, System.ComponentModel.Composition.CreationPolicy requiredCreationPolicy, System.Collections.Generic.IDictionary<string, object> metadata, bool isExportFactory, System.ComponentModel.Composition.Primitives.ICompositionElement origin) { throw null; }
public static System.ComponentModel.Composition.Primitives.ContractBasedImportDefinition CreateImportDefinition(System.ComponentModel.Composition.ReflectionModel.LazyMemberInfo importingMember, string contractName, string requiredTypeIdentity, System.Collections.Generic.IEnumerable<System.Collections.Generic.KeyValuePair<string, System.Type>> requiredMetadata, System.ComponentModel.Composition.Primitives.ImportCardinality cardinality, bool isRecomposable, System.ComponentModel.Composition.CreationPolicy requiredCreationPolicy, System.ComponentModel.Composition.Primitives.ICompositionElement origin) { throw null; }
public static System.ComponentModel.Composition.Primitives.ContractBasedImportDefinition CreateImportDefinition(System.Lazy<System.Reflection.ParameterInfo> parameter, string contractName, string requiredTypeIdentity, System.Collections.Generic.IEnumerable<System.Collections.Generic.KeyValuePair<string, System.Type>> requiredMetadata, System.ComponentModel.Composition.Primitives.ImportCardinality cardinality, System.ComponentModel.Composition.CreationPolicy requiredCreationPolicy, System.Collections.Generic.IDictionary<string, object> metadata, bool isExportFactory, System.ComponentModel.Composition.Primitives.ICompositionElement origin) { throw null; }
public static System.ComponentModel.Composition.Primitives.ContractBasedImportDefinition CreateImportDefinition(System.Lazy<System.Reflection.ParameterInfo> parameter, string contractName, string requiredTypeIdentity, System.Collections.Generic.IEnumerable<System.Collections.Generic.KeyValuePair<string, System.Type>> requiredMetadata, System.ComponentModel.Composition.Primitives.ImportCardinality cardinality, System.ComponentModel.Composition.CreationPolicy requiredCreationPolicy, System.ComponentModel.Composition.Primitives.ICompositionElement origin) { throw null; }
public static System.ComponentModel.Composition.Primitives.ComposablePartDefinition CreatePartDefinition(System.Lazy<System.Type> partType, bool isDisposalRequired, System.Lazy<System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ImportDefinition>> imports, System.Lazy<System.Collections.Generic.IEnumerable<System.ComponentModel.Composition.Primitives.ExportDefinition>> exports, System.Lazy<System.Collections.Generic.IDictionary<string, object>> metadata, System.ComponentModel.Composition.Primitives.ICompositionElement origin) { throw null; }
public static System.ComponentModel.Composition.Primitives.ContractBasedImportDefinition GetExportFactoryProductImportDefinition(System.ComponentModel.Composition.Primitives.ImportDefinition importDefinition) { throw null; }
public static System.ComponentModel.Composition.ReflectionModel.LazyMemberInfo GetExportingMember(System.ComponentModel.Composition.Primitives.ExportDefinition exportDefinition) { throw null; }
public static System.ComponentModel.Composition.ReflectionModel.LazyMemberInfo GetImportingMember(System.ComponentModel.Composition.Primitives.ImportDefinition importDefinition) { throw null; }
public static System.Lazy<System.Reflection.ParameterInfo> GetImportingParameter(System.ComponentModel.Composition.Primitives.ImportDefinition importDefinition) { throw null; }
public static System.Lazy<System.Type> GetPartType(System.ComponentModel.Composition.Primitives.ComposablePartDefinition partDefinition) { throw null; }
public static bool IsDisposalRequired(System.ComponentModel.Composition.Primitives.ComposablePartDefinition partDefinition) { throw null; }
public static bool IsExportFactoryImportDefinition(System.ComponentModel.Composition.Primitives.ImportDefinition importDefinition) { throw null; }
public static bool IsImportingParameter(System.ComponentModel.Composition.Primitives.ImportDefinition importDefinition) { throw null; }
public static bool TryMakeGenericPartDefinition(System.ComponentModel.Composition.Primitives.ComposablePartDefinition partDefinition, System.Collections.Generic.IEnumerable<System.Type> genericParameters, out System.ComponentModel.Composition.Primitives.ComposablePartDefinition specialization) { throw null; }
}
}
| |
using System;
using System.Data;
using System.Data.SqlClient;
using Csla;
using Csla.Data;
namespace ParentLoadROSoftDelete.Business.ERLevel
{
/// <summary>
/// E02Level1 (read only object).<br/>
/// This is a generated base class of <see cref="E02Level1"/> business object.
/// This class is a root object.
/// </summary>
/// <remarks>
/// This class contains one child collection:<br/>
/// - <see cref="E03Level11Objects"/> of type <see cref="E03Level11Coll"/> (1:M relation to <see cref="E04Level11"/>)
/// </remarks>
[Serializable]
public partial class E02Level1 : ReadOnlyBase<E02Level1>
{
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="Level_1_ID"/> property.
/// </summary>
public static readonly PropertyInfo<int> Level_1_IDProperty = RegisterProperty<int>(p => p.Level_1_ID, "Level_1 ID", -1);
/// <summary>
/// Gets the Level_1 ID.
/// </summary>
/// <value>The Level_1 ID.</value>
public int Level_1_ID
{
get { return GetProperty(Level_1_IDProperty); }
}
/// <summary>
/// Maintains metadata about <see cref="Level_1_Name"/> property.
/// </summary>
public static readonly PropertyInfo<string> Level_1_NameProperty = RegisterProperty<string>(p => p.Level_1_Name, "Level_1 Name");
/// <summary>
/// Gets the Level_1 Name.
/// </summary>
/// <value>The Level_1 Name.</value>
public string Level_1_Name
{
get { return GetProperty(Level_1_NameProperty); }
}
/// <summary>
/// Maintains metadata about child <see cref="E03Level11SingleObject"/> property.
/// </summary>
public static readonly PropertyInfo<E03Level11Child> E03Level11SingleObjectProperty = RegisterProperty<E03Level11Child>(p => p.E03Level11SingleObject, "A3 Level11 Single Object");
/// <summary>
/// Gets the E03 Level11 Single Object ("parent load" child property).
/// </summary>
/// <value>The E03 Level11 Single Object.</value>
public E03Level11Child E03Level11SingleObject
{
get { return GetProperty(E03Level11SingleObjectProperty); }
private set { LoadProperty(E03Level11SingleObjectProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="E03Level11ASingleObject"/> property.
/// </summary>
public static readonly PropertyInfo<E03Level11ReChild> E03Level11ASingleObjectProperty = RegisterProperty<E03Level11ReChild>(p => p.E03Level11ASingleObject, "A3 Level11 ASingle Object");
/// <summary>
/// Gets the E03 Level11 ASingle Object ("parent load" child property).
/// </summary>
/// <value>The E03 Level11 ASingle Object.</value>
public E03Level11ReChild E03Level11ASingleObject
{
get { return GetProperty(E03Level11ASingleObjectProperty); }
private set { LoadProperty(E03Level11ASingleObjectProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="E03Level11Objects"/> property.
/// </summary>
public static readonly PropertyInfo<E03Level11Coll> E03Level11ObjectsProperty = RegisterProperty<E03Level11Coll>(p => p.E03Level11Objects, "A3 Level11 Objects");
/// <summary>
/// Gets the E03 Level11 Objects ("parent load" child property).
/// </summary>
/// <value>The E03 Level11 Objects.</value>
public E03Level11Coll E03Level11Objects
{
get { return GetProperty(E03Level11ObjectsProperty); }
private set { LoadProperty(E03Level11ObjectsProperty, value); }
}
#endregion
#region Factory Methods
/// <summary>
/// Factory method. Loads a <see cref="E02Level1"/> object, based on given parameters.
/// </summary>
/// <param name="level_1_ID">The Level_1_ID parameter of the E02Level1 to fetch.</param>
/// <returns>A reference to the fetched <see cref="E02Level1"/> object.</returns>
public static E02Level1 GetE02Level1(int level_1_ID)
{
return DataPortal.Fetch<E02Level1>(level_1_ID);
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="E02Level1"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
private E02Level1()
{
// Prevent direct creation
}
#endregion
#region Data Access
/// <summary>
/// Loads a <see cref="E02Level1"/> object from the database, based on given criteria.
/// </summary>
/// <param name="level_1_ID">The Level_1 ID.</param>
protected void DataPortal_Fetch(int level_1_ID)
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("GetE02Level1", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@Level_1_ID", level_1_ID).DbType = DbType.Int32;
var args = new DataPortalHookArgs(cmd, level_1_ID);
OnFetchPre(args);
Fetch(cmd);
OnFetchPost(args);
}
}
}
private void Fetch(SqlCommand cmd)
{
using (var dr = new SafeDataReader(cmd.ExecuteReader()))
{
if (dr.Read())
{
Fetch(dr);
FetchChildren(dr);
}
BusinessRules.CheckRules();
}
}
/// <summary>
/// Loads a <see cref="E02Level1"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void Fetch(SafeDataReader dr)
{
// Value properties
LoadProperty(Level_1_IDProperty, dr.GetInt32("Level_1_ID"));
LoadProperty(Level_1_NameProperty, dr.GetString("Level_1_Name"));
var args = new DataPortalHookArgs(dr);
OnFetchRead(args);
}
/// <summary>
/// Loads child objects from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void FetchChildren(SafeDataReader dr)
{
dr.NextResult();
if (dr.Read())
LoadProperty(E03Level11SingleObjectProperty, E03Level11Child.GetE03Level11Child(dr));
dr.NextResult();
if (dr.Read())
LoadProperty(E03Level11ASingleObjectProperty, E03Level11ReChild.GetE03Level11ReChild(dr));
dr.NextResult();
LoadProperty(E03Level11ObjectsProperty, E03Level11Coll.GetE03Level11Coll(dr));
dr.NextResult();
while (dr.Read())
{
var child = E05Level111Child.GetE05Level111Child(dr);
var obj = E03Level11Objects.FindE04Level11ByParentProperties(child.cMarentID1);
obj.LoadChild(child);
}
dr.NextResult();
while (dr.Read())
{
var child = E05Level111ReChild.GetE05Level111ReChild(dr);
var obj = E03Level11Objects.FindE04Level11ByParentProperties(child.cMarentID2);
obj.LoadChild(child);
}
dr.NextResult();
var e05Level111Coll = E05Level111Coll.GetE05Level111Coll(dr);
e05Level111Coll.LoadItems(E03Level11Objects);
dr.NextResult();
while (dr.Read())
{
var child = E07Level1111Child.GetE07Level1111Child(dr);
var obj = e05Level111Coll.FindE06Level111ByParentProperties(child.cLarentID1);
obj.LoadChild(child);
}
dr.NextResult();
while (dr.Read())
{
var child = E07Level1111ReChild.GetE07Level1111ReChild(dr);
var obj = e05Level111Coll.FindE06Level111ByParentProperties(child.cLarentID2);
obj.LoadChild(child);
}
dr.NextResult();
var e07Level1111Coll = E07Level1111Coll.GetE07Level1111Coll(dr);
e07Level1111Coll.LoadItems(e05Level111Coll);
dr.NextResult();
while (dr.Read())
{
var child = E09Level11111Child.GetE09Level11111Child(dr);
var obj = e07Level1111Coll.FindE08Level1111ByParentProperties(child.cNarentID1);
obj.LoadChild(child);
}
dr.NextResult();
while (dr.Read())
{
var child = E09Level11111ReChild.GetE09Level11111ReChild(dr);
var obj = e07Level1111Coll.FindE08Level1111ByParentProperties(child.cNarentID2);
obj.LoadChild(child);
}
dr.NextResult();
var e09Level11111Coll = E09Level11111Coll.GetE09Level11111Coll(dr);
e09Level11111Coll.LoadItems(e07Level1111Coll);
dr.NextResult();
while (dr.Read())
{
var child = E11Level111111Child.GetE11Level111111Child(dr);
var obj = e09Level11111Coll.FindE10Level11111ByParentProperties(child.cQarentID1);
obj.LoadChild(child);
}
dr.NextResult();
while (dr.Read())
{
var child = E11Level111111ReChild.GetE11Level111111ReChild(dr);
var obj = e09Level11111Coll.FindE10Level11111ByParentProperties(child.cQarentID2);
obj.LoadChild(child);
}
dr.NextResult();
var e11Level111111Coll = E11Level111111Coll.GetE11Level111111Coll(dr);
e11Level111111Coll.LoadItems(e09Level11111Coll);
}
#endregion
#region Pseudo Events
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
#endregion
}
}
| |
using System;
using System.IO;
using System.Linq;
using Crane.Core.Api;
using Crane.Core.Api.Model;
using Crane.Core.Configuration;
using Crane.Core.Runners;
using Crane.Tests.Common;
using Crane.Tests.Common.Context;
using Crane.Tests.Common.Runners;
using FluentAssertions;
using Xbehave;
namespace Crane.Integration.Tests.Features.Api
{
public class PatchAssemblyInfoFeature
{
[ScenarioIgnoreOnMono("suspect fubucsprojfile does not work on mono")]
public void patch_assembly_info(ICraneApi craneApi,
SolutionBuilderContext context, ISolutionContext solutionContext, Project project, AssemblyInfo updatedInfo, string updatedRawInfo)
{
"Given I have a crane api"
._(() => craneApi = ServiceLocator.Resolve<CraneApi>());
"And I have a solution builder context"
._(() => context = ServiceLocator.Resolve<SolutionBuilderContext>());
"And I have a solution with a project and an assembly info file"
._(() => solutionContext = context.CreateBuilder()
.WithSolution(item => item.Path = Path.Combine(context.RootDirectory, "Sally.sln"))
.WithProject(item => item.Name = "FrodoFx")
.WithFile<AssemblyInfo>(item =>
{
item.Title = "Sally";
item.Description = "Next generation web server";
item.Version = new Version(0, 0, 0, 1);
item.FileVersion = new Version(0, 0, 0, 2);
item.InformationalVersion = "RELEASE";
}).Build());
"And I have an updated assembly info with different version, file number and informational attribute"
._(() =>
{
updatedInfo = solutionContext.Solution.Projects.First().AssemblyInfo;
updatedInfo.Version = new Version(0, 1, 0, 0);
updatedInfo.FileVersion = new Version(0, 1, 20);
updatedInfo.InformationalVersion = "DEBUG";
});
"When I patch the assemble info"
._(() =>
{
craneApi.PatchAssemblyInfo(updatedInfo);
solutionContext = craneApi.GetSolutionContext(solutionContext.Path); // reload to get updated model
project = solutionContext.Solution.Projects.First();
updatedRawInfo = File.ReadAllText(project.AssemblyInfo.Path);
});
"Then it should update the assembly file version"
._(() =>
{
updatedRawInfo.Should().Contain("[assembly: AssemblyFileVersionAttribute(\"0.1.20\")]");
project.AssemblyInfo.FileVersion.Should().Be(new Version(0, 1, 20));
});
"Then it should update the informational version attribute"
._(() =>
{
updatedRawInfo.Should().Contain("[assembly: AssemblyInformationalVersionAttribute(\"DEBUG\")]");
project.AssemblyInfo.InformationalVersion.Should().Be("DEBUG");
});
"Then it should update the assembly version"
._(() =>
{
updatedRawInfo.Should().Contain("[assembly: AssemblyVersionAttribute(\"0.1.0.0\")]");
project.AssemblyInfo.Version.Should().Be(new Version(0, 1, 0, 0));
});
"Then it not update the assembly title as it was not changed"
._(() =>
{
updatedRawInfo.Should().Contain("[assembly: AssemblyTitleAttribute(\"Sally\")]");
project.AssemblyInfo.Title.Should().Be("Sally");
})
.Teardown(() => context.TearDown());
}
[ScenarioIgnoreOnMono("suspect fubucsprojfile does not work on mono")]
public void patch_solution_assembly_info(ICraneApi craneApi,
SolutionBuilderContext context, ISolutionContext solutionContext, Project project,
AssemblyInfo updatedInfo, string updatedRawInfo)
{
"Given I have a crane api"
._(() => craneApi = ServiceLocator.Resolve<CraneApi>());
"And I have a solution builder context"
._(() => context = ServiceLocator.Resolve<SolutionBuilderContext>());
"And I have a solution with two projects"
._(() => context.CreateBuilder()
.WithSolution(item => item.Path = Path.Combine(context.RootDirectory, "Sally.sln"))
.WithProject(item => item.Name = "FrodoFx")
.WithFile<AssemblyInfo>(item =>
{
item.Title = "Sally";
item.Description = "Next generation web server";
item.Version = new Version(0, 0, 0, 1);
item.FileVersion = new Version(0, 0, 0, 2);
item.InformationalVersion = "RELEASE";
})
.WithProject(item => item.Name = "BobFx")
.WithFile<AssemblyInfo>(item =>
{
item.Title = "Bob";
item.Description = "Old school";
item.Version = new Version(0, 0, 0, 1);
item.FileVersion = new Version(0, 0, 0, 2);
item.InformationalVersion = "RELEASE";
}).Build());
"And I have got the solution context"
._(() => solutionContext = craneApi.GetSolutionContext(context.RootDirectory));
"When I path the solution assembly info for all projects"
._(() => craneApi.PatchSolutionAssemblyInfo(solutionContext, "1.2.3.4"));
"Then file version should be set to 1.2.3.4 in all assembly info files"
._(() =>
{
solutionContext = craneApi.GetSolutionContext(context.RootDirectory);
solutionContext.Solution.Projects.All(p => p.AssemblyInfo.FileVersion.ToString() == "1.2.3.4")
.Should()
.BeTrue();
});
"Then version should be set to 1.2.3.4 in all assembly info files"
._(() => solutionContext.Solution.Projects.All(p => p.AssemblyInfo.Version.ToString() == "1.2.3.4")
.Should()
.BeTrue());
"Then file informational version should be set to 1.2.3.4"
._(() => solutionContext.Solution.Projects.All(p => p.AssemblyInfo.InformationalVersion.ToString() == "1.2.3.4")
.Should()
.BeTrue()
).Teardown(() => context.TearDown());
}
[ScenarioIgnoreOnMono("suspect fubucsprojfile does not work on mono")]
public void patch_solution_assembly_info_when_project_in_git(ICraneApi craneApi,
SolutionBuilderContext context, ISolutionContext solutionContext, Project project,
AssemblyInfo updatedInfo, string updatedRawInfo, Git git)
{
"Given I have a crane api"
._(() => craneApi = ServiceLocator.Resolve<CraneApi>());
"And I have a solution builder context"
._(() => context = ServiceLocator.Resolve<SolutionBuilderContext>());
"And I have a solution with two projects"
._(() => context.CreateBuilder()
.WithSolution(item => item.Path = Path.Combine(context.RootDirectory, "Sally.sln"))
.WithProject(item => item.Name = "FrodoFx")
.WithFile<AssemblyInfo>(item =>
{
item.Title = "Sally";
item.Description = "Next generation web server";
item.Version = new Version(0, 0, 0, 1);
item.FileVersion = new Version(0, 0, 0, 2);
item.InformationalVersion = "RELEASE";
})
.WithProject(item => item.Name = "BobFx")
.WithFile<AssemblyInfo>(item =>
{
item.Title = "Bob";
item.Description = "Old school";
item.Version = new Version(0, 0, 0, 1);
item.FileVersion = new Version(0, 0, 0, 2);
item.InformationalVersion = "RELEASE";
}).Build());
"And I have got the solution context"
._(() => solutionContext = craneApi.GetSolutionContext(context.RootDirectory));
"And I initialize that as a git repository"
._(() =>
{
git = ServiceLocator.Resolve<Git>();
git.Run("init", context.RootDirectory).ErrorOutput.Should().BeEmpty();
git.Run("config user.email [email protected]", context.RootDirectory).ErrorOutput.Should().BeEmpty();
});
"And I have a previous commit"
._(() =>
{
git.Run("add -A", context.RootDirectory).ErrorOutput.Should().BeEmpty();
git.Run("config user.email [email protected]", context.RootDirectory).ErrorOutput.Should().BeEmpty();
git.Run("commit -m \"My brand new project\"", context.RootDirectory).ErrorOutput.Should().BeEmpty();
});
"When I path the solution assembly info for all projects"
._(() => craneApi.PatchSolutionAssemblyInfo(solutionContext, "1.2.3.4"));
"Then file version should be set to 1.2.3.4 in all assembly info files"
._(() =>
{
solutionContext = craneApi.GetSolutionContext(context.RootDirectory);
solutionContext.Solution.Projects.All(p => p.AssemblyInfo.FileVersion.ToString() == "1.2.3.4")
.Should()
.BeTrue();
});
"Then version should be set to 1.2.3.4 in all assembly info files"
._(() => solutionContext.Solution.Projects.All(p => p.AssemblyInfo.Version.ToString() == "1.2.3.4")
.Should()
.BeTrue());
"Then file informational version should start with 1.2.3.4"
._(() => solutionContext.Solution.Projects.All(p => p.AssemblyInfo.InformationalVersion.ToString().StartsWith("1.2.3.4"))
.Should()
.BeTrue())
.Teardown(() => context.TearDown());
"Then file informational version should end with the commit message 'My brand new project'"
._(() => solutionContext.Solution.Projects.All(p => p.AssemblyInfo.InformationalVersion.ToString().EndsWith("My brand new project"))
.Should()
.BeTrue())
.Teardown(() => context.TearDown());
}
[ScenarioIgnoreOnMono("suspect fubucsprojfile does not work on mono")]
public void patch_solution_assembly_should_only_patch_non_test_projects(CraneRunner craneRunner, RunResult result, CraneTestContext craneTestContext,
ISolutionContext solutionContext, ICraneApi craneApi, AssemblyInfo originalAssemblyInfo)
{
"Given I have my own private copy of the crane console"
._(() => craneTestContext = ServiceLocator.Resolve<CraneTestContext>());
"And I have a run context"
._(() => craneRunner = new CraneRunner());
"And I have an instance of the crane api"
._(() => craneApi = ServiceLocator.Resolve<ICraneApi>());
"And I have run crane init ServiceStack"
._(() => result = craneRunner.Command(craneTestContext.BuildOutputDirectory, "crane init ServiceStack"));
"And I have got the solution context using the api"
._(() =>
{
craneApi = ServiceLocator.Resolve<ICraneApi>();
solutionContext = craneApi.GetSolutionContext(Path.Combine(craneTestContext.BuildOutputDirectory, "ServiceStack"));
originalAssemblyInfo =
solutionContext.Solution.Projects.First(p => p.Name == "ServiceStack.UnitTests").AssemblyInfo;
});
"And I have created one a solution with one code project called ServiceStack"
._(() => solutionContext.Solution.Projects.First(p => p.Name == "ServiceStack").TestProject.Should().BeFalse());
"And I have created one a solution with one test project called ServiceStack.UnitTests"
._(() => solutionContext.Solution.Projects.First(p => p.Name == "ServiceStack.UnitTests").TestProject.Should().BeTrue());
"When I path the solution assembly info for all projects"
._(() =>
{
craneApi.PatchSolutionAssemblyInfo(solutionContext, "1.2.3.4");
solutionContext = craneApi.GetSolutionContext(Path.Combine(craneTestContext.BuildOutputDirectory, "ServiceStack"));
});
"It should patch the code project's assemblyinfo version"
._(() => solutionContext.Solution.Projects.First(p => !p.TestProject)
.AssemblyInfo.Version.ToString().Should().Be( "1.2.3.4"));
"It should patch the code project's assemblyinfo file version"
._(() => solutionContext.Solution.Projects.First(p => !p.TestProject)
.AssemblyInfo.FileVersion.ToString().Should().Be("1.2.3.4"));
"It should patch the code project's assemblyinfo file informational version"
._(() => solutionContext.Solution.Projects.First(p => !p.TestProject)
.AssemblyInfo.InformationalVersion.Should().Be("1.2.3.4"));
"It should not patch the test project's assemblyinfo version"
._(() => solutionContext.Solution.Projects.First(p => p.TestProject)
.AssemblyInfo.Version.Should().Be(originalAssemblyInfo.Version));
"It should not patch the test project's assemblyinfo file version"
._(() => solutionContext.Solution.Projects.First(p => p.TestProject)
.AssemblyInfo.FileVersion.Should().Be(originalAssemblyInfo.FileVersion));
"It should not patch the test project's file informational version"
._(() => solutionContext.Solution.Projects.First(p => p.TestProject)
.AssemblyInfo.InformationalVersion.Should().Be(originalAssemblyInfo.InformationalVersion))
.Teardown(() => craneTestContext.TearDown());
}
}
}
| |
using System;
using System.Data;
using System.Data.SqlClient;
using Csla;
using Csla.Data;
namespace ParentLoadSoftDelete.Business.ERLevel
{
/// <summary>
/// E08_Region (editable child object).<br/>
/// This is a generated base class of <see cref="E08_Region"/> business object.
/// </summary>
/// <remarks>
/// This class contains one child collection:<br/>
/// - <see cref="E09_CityObjects"/> of type <see cref="E09_CityColl"/> (1:M relation to <see cref="E10_City"/>)<br/>
/// This class is an item of <see cref="E07_RegionColl"/> collection.
/// </remarks>
[Serializable]
public partial class E08_Region : BusinessBase<E08_Region>
{
#region Static Fields
private static int _lastID;
#endregion
#region State Fields
[NotUndoable]
[NonSerialized]
internal int parent_Country_ID = 0;
#endregion
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="Region_ID"/> property.
/// </summary>
public static readonly PropertyInfo<int> Region_IDProperty = RegisterProperty<int>(p => p.Region_ID, "Regions ID");
/// <summary>
/// Gets the Regions ID.
/// </summary>
/// <value>The Regions ID.</value>
public int Region_ID
{
get { return GetProperty(Region_IDProperty); }
}
/// <summary>
/// Maintains metadata about <see cref="Region_Name"/> property.
/// </summary>
public static readonly PropertyInfo<string> Region_NameProperty = RegisterProperty<string>(p => p.Region_Name, "Regions Name");
/// <summary>
/// Gets or sets the Regions Name.
/// </summary>
/// <value>The Regions Name.</value>
public string Region_Name
{
get { return GetProperty(Region_NameProperty); }
set { SetProperty(Region_NameProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="E09_Region_SingleObject"/> property.
/// </summary>
public static readonly PropertyInfo<E09_Region_Child> E09_Region_SingleObjectProperty = RegisterProperty<E09_Region_Child>(p => p.E09_Region_SingleObject, "E09 Region Single Object", RelationshipTypes.Child);
/// <summary>
/// Gets the E09 Region Single Object ("parent load" child property).
/// </summary>
/// <value>The E09 Region Single Object.</value>
public E09_Region_Child E09_Region_SingleObject
{
get { return GetProperty(E09_Region_SingleObjectProperty); }
private set { LoadProperty(E09_Region_SingleObjectProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="E09_Region_ASingleObject"/> property.
/// </summary>
public static readonly PropertyInfo<E09_Region_ReChild> E09_Region_ASingleObjectProperty = RegisterProperty<E09_Region_ReChild>(p => p.E09_Region_ASingleObject, "E09 Region ASingle Object", RelationshipTypes.Child);
/// <summary>
/// Gets the E09 Region ASingle Object ("parent load" child property).
/// </summary>
/// <value>The E09 Region ASingle Object.</value>
public E09_Region_ReChild E09_Region_ASingleObject
{
get { return GetProperty(E09_Region_ASingleObjectProperty); }
private set { LoadProperty(E09_Region_ASingleObjectProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="E09_CityObjects"/> property.
/// </summary>
public static readonly PropertyInfo<E09_CityColl> E09_CityObjectsProperty = RegisterProperty<E09_CityColl>(p => p.E09_CityObjects, "E09 City Objects", RelationshipTypes.Child);
/// <summary>
/// Gets the E09 City Objects ("parent load" child property).
/// </summary>
/// <value>The E09 City Objects.</value>
public E09_CityColl E09_CityObjects
{
get { return GetProperty(E09_CityObjectsProperty); }
private set { LoadProperty(E09_CityObjectsProperty, value); }
}
#endregion
#region Factory Methods
/// <summary>
/// Factory method. Creates a new <see cref="E08_Region"/> object.
/// </summary>
/// <returns>A reference to the created <see cref="E08_Region"/> object.</returns>
internal static E08_Region NewE08_Region()
{
return DataPortal.CreateChild<E08_Region>();
}
/// <summary>
/// Factory method. Loads a <see cref="E08_Region"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
/// <returns>A reference to the fetched <see cref="E08_Region"/> object.</returns>
internal static E08_Region GetE08_Region(SafeDataReader dr)
{
E08_Region obj = new E08_Region();
// show the framework that this is a child object
obj.MarkAsChild();
obj.Fetch(dr);
obj.LoadProperty(E09_CityObjectsProperty, E09_CityColl.NewE09_CityColl());
obj.MarkOld();
return obj;
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="E08_Region"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public E08_Region()
{
// Use factory methods and do not use direct creation.
// show the framework that this is a child object
MarkAsChild();
}
#endregion
#region Data Access
/// <summary>
/// Loads default values for the <see cref="E08_Region"/> object properties.
/// </summary>
[Csla.RunLocal]
protected override void Child_Create()
{
LoadProperty(Region_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID));
LoadProperty(E09_Region_SingleObjectProperty, DataPortal.CreateChild<E09_Region_Child>());
LoadProperty(E09_Region_ASingleObjectProperty, DataPortal.CreateChild<E09_Region_ReChild>());
LoadProperty(E09_CityObjectsProperty, DataPortal.CreateChild<E09_CityColl>());
var args = new DataPortalHookArgs();
OnCreate(args);
base.Child_Create();
}
/// <summary>
/// Loads a <see cref="E08_Region"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void Fetch(SafeDataReader dr)
{
// Value properties
LoadProperty(Region_IDProperty, dr.GetInt32("Region_ID"));
LoadProperty(Region_NameProperty, dr.GetString("Region_Name"));
// parent properties
parent_Country_ID = dr.GetInt32("Parent_Country_ID");
var args = new DataPortalHookArgs(dr);
OnFetchRead(args);
}
/// <summary>
/// Loads child <see cref="E09_Region_Child"/> object.
/// </summary>
/// <param name="child">The child object to load.</param>
internal void LoadChild(E09_Region_Child child)
{
LoadProperty(E09_Region_SingleObjectProperty, child);
}
/// <summary>
/// Loads child <see cref="E09_Region_ReChild"/> object.
/// </summary>
/// <param name="child">The child object to load.</param>
internal void LoadChild(E09_Region_ReChild child)
{
LoadProperty(E09_Region_ASingleObjectProperty, child);
}
/// <summary>
/// Inserts a new <see cref="E08_Region"/> object in the database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Insert(E06_Country parent)
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("AddE08_Region", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@Parent_Country_ID", parent.Country_ID).DbType = DbType.Int32;
cmd.Parameters.AddWithValue("@Region_ID", ReadProperty(Region_IDProperty)).Direction = ParameterDirection.Output;
cmd.Parameters.AddWithValue("@Region_Name", ReadProperty(Region_NameProperty)).DbType = DbType.String;
var args = new DataPortalHookArgs(cmd);
OnInsertPre(args);
cmd.ExecuteNonQuery();
OnInsertPost(args);
LoadProperty(Region_IDProperty, (int) cmd.Parameters["@Region_ID"].Value);
}
// flushes all pending data operations
FieldManager.UpdateChildren(this);
}
}
/// <summary>
/// Updates in the database all changes made to the <see cref="E08_Region"/> object.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Update()
{
if (!IsDirty)
return;
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("UpdateE08_Region", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@Region_ID", ReadProperty(Region_IDProperty)).DbType = DbType.Int32;
cmd.Parameters.AddWithValue("@Region_Name", ReadProperty(Region_NameProperty)).DbType = DbType.String;
var args = new DataPortalHookArgs(cmd);
OnUpdatePre(args);
cmd.ExecuteNonQuery();
OnUpdatePost(args);
}
// flushes all pending data operations
FieldManager.UpdateChildren(this);
}
}
/// <summary>
/// Self deletes the <see cref="E08_Region"/> object from database.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_DeleteSelf()
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
// flushes all pending data operations
FieldManager.UpdateChildren(this);
using (var cmd = new SqlCommand("DeleteE08_Region", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@Region_ID", ReadProperty(Region_IDProperty)).DbType = DbType.Int32;
var args = new DataPortalHookArgs(cmd);
OnDeletePre(args);
cmd.ExecuteNonQuery();
OnDeletePost(args);
}
}
// removes all previous references to children
LoadProperty(E09_Region_SingleObjectProperty, DataPortal.CreateChild<E09_Region_Child>());
LoadProperty(E09_Region_ASingleObjectProperty, DataPortal.CreateChild<E09_Region_ReChild>());
LoadProperty(E09_CityObjectsProperty, DataPortal.CreateChild<E09_CityColl>());
}
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting all defaults for object creation.
/// </summary>
partial void OnCreate(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation.
/// </summary>
partial void OnDeletePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after the delete operation, before Commit().
/// </summary>
partial void OnDeletePost(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the update operation.
/// </summary>
partial void OnUpdatePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit().
/// </summary>
partial void OnUpdatePost(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation.
/// </summary>
partial void OnInsertPre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit().
/// </summary>
partial void OnInsertPost(DataPortalHookArgs args);
#endregion
}
}
| |
#region CopyrightHeader
//
// Copyright by Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0.txt
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Text;
using gov.va.medora.mdo.dao;
using gov.va.medora.mdo.exceptions;
namespace gov.va.medora.mdo
{
public class User : Person
{
string userName;
string nickName;
string pwd;
string permissionString;
string uid;
SiteId logonSiteId;
Team team;
string office;
Service service;
string voicePager;
string digitalPager;
string title;
string phone;
string sigText;
string esig;
string userClass;
string orderRole;
string greeting;
string emailAddress;
string currentContext;
bool defunct;
bool canWriteMedOrders;
string inactiveDate;
bool isInactive;
string providerClass;
string providerType;
bool requiresCosigner;
User usualCosigner;
AbstractPermission primaryPermission;
public User() { }
public string Uid
{
get { return uid; }
set { uid = value; }
}
public string Pwd
{
get { return pwd; }
set { pwd = value; }
}
public string PermissionString
{
get { return permissionString; }
set { permissionString = value; }
}
public string UserName
{
get { return userName; }
set { userName = value; }
}
public SiteId LogonSiteId
{
get { return logonSiteId; }
set { logonSiteId = value; }
}
public string SigText
{
get { return sigText; }
set { sigText = value; }
}
public string Esig
{
get { return esig; }
set { esig = value; }
}
public string UserClass
{
get { return userClass; }
set { userClass = value; }
}
public Team Team
{
get { return team; }
set { team = value; }
}
public string Office
{
get { return office; }
set { office = value; }
}
public Service Service
{
get { return service; }
set { service = value; }
}
public string VoicePager
{
get { return voicePager; }
set { voicePager = value; }
}
public string DigitalPager
{
get { return digitalPager; }
set { digitalPager = value; }
}
public string Title
{
get { return title; }
set { title = value; }
}
public string Phone
{
get { return phone; }
set { phone = value; }
}
public string OrderRole
{
get { return orderRole; }
set { orderRole = value; }
}
public string Greeting
{
get { return greeting; }
set { greeting = value; }
}
public string EmailAddress
{
get { return emailAddress; }
set { emailAddress = value; }
}
public string CurrentContext
{
get { return currentContext; }
set { currentContext = value; }
}
public bool Defunct
{
get { return defunct; }
set { defunct = value; }
}
public string NickName
{
get { return nickName; }
set { nickName = value; }
}
public bool CanWriteMedOrders
{
get { return canWriteMedOrders; }
set { canWriteMedOrders = value; }
}
public string InactiveDate
{
get { return inactiveDate; }
set
{
inactiveDate = value;
if (inactiveDate != "")
{
isInactive = true;
}
}
}
public bool IsInactive
{
get { return isInactive; }
set { isInactive = value; }
}
public string ProviderClass
{
get { return providerClass; }
set { providerClass = value; }
}
public string ProviderType
{
get { return providerType; }
set { providerType = value; }
}
public bool RequiresCosigner
{
get { return requiresCosigner; }
set { requiresCosigner = value; }
}
public User UsualCosigner
{
get { return usualCosigner; }
set { usualCosigner = value; }
}
public AbstractPermission PrimaryPermission
{
get { return primaryPermission; }
set { primaryPermission = value; }
}
internal static IUserDao getDao(AbstractConnection cxn)
{
if (!cxn.IsConnected)
{
throw new MdoException(MdoExceptionCode.USAGE_NO_CONNECTION, "Unable to instantiate DAO: unconnected");
}
AbstractDaoFactory f = AbstractDaoFactory.getDaoFactory(AbstractDaoFactory.getConstant(cxn.DataSource.Protocol));
return f.getUserDao(cxn);
}
public static OrderedDictionary getUsersWithOption(AbstractConnection cxn, string optionName)
{
return getDao(cxn).getUsersWithOption(optionName);
}
public static bool hasPermission(AbstractConnection cxn, string userId, AbstractPermission permission)
{
return getDao(cxn).hasPermission(userId, permission);
}
public static Dictionary<string, AbstractPermission> getPermissions(AbstractConnection cxn, string uid, PermissionType type)
{
return getDao(cxn).getPermissions(type, uid);
}
}
}
| |
#region License
/*
* All content copyright Terracotta, Inc., unless otherwise indicated. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
#endregion
using System;
using System.Runtime.Serialization;
using System.Security;
namespace Quartz.Impl.Calendar
{
/// <summary>
/// This implementation of the Calendar may be used (you don't have to) as a
/// base class for more sophisticated one's. It merely implements the base
/// functionality required by each Calendar.
/// </summary>
/// <remarks>
/// Regarded as base functionality is the treatment of base calendars. Base
/// calendar allow you to chain (stack) as much calendars as you may need. For
/// example to exclude weekends you may use WeeklyCalendar. In order to exclude
/// holidays as well you may define a WeeklyCalendar instance to be the base
/// calendar for HolidayCalendar instance.
/// </remarks>
/// <seealso cref="ICalendar" />
/// <author>Juergen Donnerstag</author>
/// <author>James House</author>
/// <author>Marko Lahma (.NET)</author>
[Serializable]
public class BaseCalendar : ICalendar, ISerializable
{
// A optional base calendar.
private ICalendar baseCalendar;
private string description;
private TimeZoneInfo timeZone;
/// <summary>
/// Initializes a new instance of the <see cref="BaseCalendar"/> class.
/// </summary>
public BaseCalendar()
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BaseCalendar"/> class.
/// </summary>
/// <param name="baseCalendar">The base calendar.</param>
public BaseCalendar(ICalendar baseCalendar)
{
CalendarBase = baseCalendar;
}
/// <summary>
/// Initializes a new instance of the <see cref="BaseCalendar"/> class.
/// </summary>
/// <param name="timeZone">The time zone.</param>
public BaseCalendar(TimeZoneInfo timeZone)
{
this.timeZone = timeZone;
}
/// <summary>
/// Initializes a new instance of the <see cref="BaseCalendar"/> class.
/// </summary>
/// <param name="baseCalendar">The base calendar.</param>
/// <param name="timeZone">The time zone.</param>
public BaseCalendar(ICalendar baseCalendar, TimeZoneInfo timeZone)
{
this.baseCalendar = baseCalendar;
this.timeZone = timeZone;
}
/// <summary>
/// Serialization constructor.
/// </summary>
/// <param name="info"></param>
/// <param name="context"></param>
protected BaseCalendar(SerializationInfo info, StreamingContext context)
{
string prefix = "";
try
{
info.GetValue("description", typeof(string));
}
catch
{
// base class for other
prefix = "BaseCalendar+";
}
baseCalendar = (ICalendar) info.GetValue(prefix + "baseCalendar", typeof(ICalendar));
description = (string)info.GetValue(prefix + "description", typeof(string));
timeZone = (TimeZoneInfo)info.GetValue(prefix + "timeZone", typeof(TimeZoneInfo));
}
[SecurityCritical]
public virtual void GetObjectData(SerializationInfo info, StreamingContext context)
{
info.AddValue("baseCalendar", baseCalendar);
info.AddValue("description", description);
info.AddValue("timeZone", timeZone);
}
/// <summary>
/// Gets or sets the time zone.
/// </summary>
/// <value>The time zone.</value>
public virtual TimeZoneInfo TimeZone
{
get
{
if (timeZone == null)
{
timeZone = TimeZoneInfo.Local;
}
return timeZone;
}
set { timeZone = value; }
}
/// <summary>
/// checks whether two arrays have
/// the same length and
/// for any given place there are equal elements
/// in both arrays
/// </summary>
/// <returns></returns>
protected static bool ArraysEqualElementsOnEqualPlaces(Array array1, Array array2)
{
if (array1.Length != array2.Length)
{
return false;
}
bool toReturn = true;
for (int i = 0; i < array1.Length; i++)
{
toReturn = toReturn && (array1.GetValue(i).Equals(array2.GetValue(i)));
}
return toReturn;
}
/// <summary>
/// Gets or sets the description given to the <see cref="ICalendar" /> instance by
/// its creator (if any).
/// </summary>
public virtual string Description
{
get { return description; }
set { description = value; }
}
/// <summary>
/// Set a new base calendar or remove the existing one
/// </summary>
/// <value></value>
public ICalendar CalendarBase
{
set { baseCalendar = value; }
get { return baseCalendar; }
}
/// <summary>
/// Get the base calendar. Will be null, if not set.
/// </summary>
public ICalendar GetBaseCalendar()
{
return baseCalendar;
}
/// <summary>
/// Check if date/time represented by timeStamp is included. If included
/// return true. The implementation of BaseCalendar simply calls the base
/// calendars IsTimeIncluded() method if base calendar is set.
/// </summary>
/// <seealso cref="ICalendar.IsTimeIncluded" />
public virtual bool IsTimeIncluded(DateTimeOffset timeStampUtc)
{
if (timeStampUtc == DateTimeOffset.MinValue)
{
throw new ArgumentException("timeStampUtc must be greater 0");
}
if (baseCalendar != null)
{
if (!baseCalendar.IsTimeIncluded(timeStampUtc))
{
return false;
}
}
return true;
}
/// <summary>
/// Determine the next UTC time (in milliseconds) that is 'included' by the
/// Calendar after the given time. Return the original value if timeStamp is
/// included. Return 0 if all days are excluded.
/// </summary>
/// <seealso cref="ICalendar.GetNextIncludedTimeUtc" />
public virtual DateTimeOffset GetNextIncludedTimeUtc(DateTimeOffset timeUtc)
{
if (timeUtc == DateTimeOffset.MinValue)
{
throw new ArgumentException("timeStamp must be greater DateTimeOffset.MinValue");
}
if (baseCalendar != null)
{
return baseCalendar.GetNextIncludedTimeUtc(timeUtc);
}
return timeUtc;
}
/// <summary>
/// Creates a new object that is a copy of the current instance.
/// </summary>
/// <returns>A new object that is a copy of this instance.</returns>
public virtual object Clone()
{
BaseCalendar clone = (BaseCalendar) MemberwiseClone();
if (GetBaseCalendar() != null)
{
clone.baseCalendar = (ICalendar) GetBaseCalendar().Clone();
}
return clone;
}
}
}
| |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
// Template Source: Templates\CSharp\Requests\EntityRequest.cs.tt
namespace Microsoft.Graph
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Threading;
using System.Linq.Expressions;
/// <summary>
/// The type SubscriptionRequest.
/// </summary>
public partial class SubscriptionRequest : BaseRequest, ISubscriptionRequest
{
/// <summary>
/// Constructs a new SubscriptionRequest.
/// </summary>
/// <param name="requestUrl">The URL for the built request.</param>
/// <param name="client">The <see cref="IBaseClient"/> for handling requests.</param>
/// <param name="options">Query and header option name value pairs for the request.</param>
public SubscriptionRequest(
string requestUrl,
IBaseClient client,
IEnumerable<Option> options)
: base(requestUrl, client, options)
{
}
/// <summary>
/// Creates the specified Subscription using POST.
/// </summary>
/// <param name="subscriptionToCreate">The Subscription to create.</param>
/// <returns>The created Subscription.</returns>
public System.Threading.Tasks.Task<Subscription> CreateAsync(Subscription subscriptionToCreate)
{
return this.CreateAsync(subscriptionToCreate, CancellationToken.None);
}
/// <summary>
/// Creates the specified Subscription using POST.
/// </summary>
/// <param name="subscriptionToCreate">The Subscription to create.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The created Subscription.</returns>
public async System.Threading.Tasks.Task<Subscription> CreateAsync(Subscription subscriptionToCreate, CancellationToken cancellationToken)
{
this.ContentType = "application/json";
this.Method = "POST";
var newEntity = await this.SendAsync<Subscription>(subscriptionToCreate, cancellationToken).ConfigureAwait(false);
this.InitializeCollectionProperties(newEntity);
return newEntity;
}
/// <summary>
/// Deletes the specified Subscription.
/// </summary>
/// <returns>The task to await.</returns>
public System.Threading.Tasks.Task DeleteAsync()
{
return this.DeleteAsync(CancellationToken.None);
}
/// <summary>
/// Deletes the specified Subscription.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The task to await.</returns>
public async System.Threading.Tasks.Task DeleteAsync(CancellationToken cancellationToken)
{
this.Method = "DELETE";
await this.SendAsync<Subscription>(null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the specified Subscription.
/// </summary>
/// <returns>The Subscription.</returns>
public System.Threading.Tasks.Task<Subscription> GetAsync()
{
return this.GetAsync(CancellationToken.None);
}
/// <summary>
/// Gets the specified Subscription.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The Subscription.</returns>
public async System.Threading.Tasks.Task<Subscription> GetAsync(CancellationToken cancellationToken)
{
this.Method = "GET";
var retrievedEntity = await this.SendAsync<Subscription>(null, cancellationToken).ConfigureAwait(false);
this.InitializeCollectionProperties(retrievedEntity);
return retrievedEntity;
}
/// <summary>
/// Updates the specified Subscription using PATCH.
/// </summary>
/// <param name="subscriptionToUpdate">The Subscription to update.</param>
/// <returns>The updated Subscription.</returns>
public System.Threading.Tasks.Task<Subscription> UpdateAsync(Subscription subscriptionToUpdate)
{
return this.UpdateAsync(subscriptionToUpdate, CancellationToken.None);
}
/// <summary>
/// Updates the specified Subscription using PATCH.
/// </summary>
/// <param name="subscriptionToUpdate">The Subscription to update.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The updated Subscription.</returns>
public async System.Threading.Tasks.Task<Subscription> UpdateAsync(Subscription subscriptionToUpdate, CancellationToken cancellationToken)
{
this.ContentType = "application/json";
this.Method = "PATCH";
var updatedEntity = await this.SendAsync<Subscription>(subscriptionToUpdate, cancellationToken).ConfigureAwait(false);
this.InitializeCollectionProperties(updatedEntity);
return updatedEntity;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="value">The expand value.</param>
/// <returns>The request object to send.</returns>
public ISubscriptionRequest Expand(string value)
{
this.QueryOptions.Add(new QueryOption("$expand", value));
return this;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="expandExpression">The expression from which to calculate the expand value.</param>
/// <returns>The request object to send.</returns>
public ISubscriptionRequest Expand(Expression<Func<Subscription, object>> expandExpression)
{
if (expandExpression == null)
{
throw new ArgumentNullException(nameof(expandExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(expandExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(expandExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$expand", value));
}
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="value">The select value.</param>
/// <returns>The request object to send.</returns>
public ISubscriptionRequest Select(string value)
{
this.QueryOptions.Add(new QueryOption("$select", value));
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="selectExpression">The expression from which to calculate the select value.</param>
/// <returns>The request object to send.</returns>
public ISubscriptionRequest Select(Expression<Func<Subscription, object>> selectExpression)
{
if (selectExpression == null)
{
throw new ArgumentNullException(nameof(selectExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(selectExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(selectExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$select", value));
}
return this;
}
/// <summary>
/// Initializes any collection properties after deserialization, like next requests for paging.
/// </summary>
/// <param name="subscriptionToInitialize">The <see cref="Subscription"/> with the collection properties to initialize.</param>
private void InitializeCollectionProperties(Subscription subscriptionToInitialize)
{
}
}
}
| |
// ListOfTransactionsPanel.cs
//
// Copyright (c) 2013 Brent Knowles (http://www.brentknowles.com)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
// Review documentation at http://www.yourothermind.com for updated implementation notes, license updates
// or other general information/
//
// Author information available at http://www.brentknowles.com or http://www.amazon.com/Brent-Knowles/e/B0035WW7OW
// Full source code: https://github.com/BrentKnowles/YourOtherMind
//###
using System;
using System.Windows.Forms;
using CoreUtilities;
using System.Collections.Generic;
using Layout;
using Transactions;
using System.IO;
namespace Worklog
{
public class ListOfTransactionsPanel : Panel
{
#region GUI
ListBox EventList = null;
RichTextBox Summary=null;
#endregion
#region variables
string _GUID=Constants.BLANK;
Action<string> RefreshPanels=null;
Func<DateTime> CurrentDate=null;
Button Add=null;
#endregion
public string SummaryText {
set{ Summary.Text = value;}
}
/// <summary>
/// Initializes a new instance of the <see cref="Worklog.ListOfTransactionsPanel"/> class.
///
/// This panel contains:
///
/// - a listbox showing events assigned to this
/// - A summary of the week's activity ON THIS layout
/// - An Add button
/// - Ability to edit existing entries from the listbox
/// </summary>
public ListOfTransactionsPanel (string LayoutGUID, Action<string> _RefreshPanels, Func<DateTime> _CurrentDate,Func<bool> _BringFront)
{
_GUID = LayoutGUID;
CurrentDate = _CurrentDate;
RefreshPanels = _RefreshPanels;
EventList = new ListBox();
EventList.DoubleClick+= HandleTransactionListDoubleClick;
EventList.Dock = DockStyle.Fill;
Add = new Button();
Add.Text = Loc.Instance.GetString ("Add Worklog");
Add.Dock = DockStyle.Top;
Add.Click += HandleAddClick;
Summary = new RichTextBox();
Summary.Click+= (object sender, EventArgs e) => _BringFront();
Summary.Height = 200;
Summary.Dock = DockStyle.Top;
Summary.ReadOnly = true;
this.Controls.Add(EventList);
this.Controls.Add (Add);
this.Controls.Add (Summary);
}
static void ExportIfNeeded (int minutes)
{
if (minutes >= 60 && LayoutDetails.Instance.CurrentLayout != null) {
// Export Layout if workload was more than 60 minutes
string exportDirectory = Path.Combine (LayoutDetails.Instance.Path, "export");
exportDirectory = Path.Combine (exportDirectory, "worklog");
if (!Directory.Exists (exportDirectory)) {
Directory.CreateDirectory (exportDirectory);
}
string datename = string.Format("{0:yyyy-MM-dd_hh-mm-ss}",
DateTime.Now);
string filename = Path.Combine (exportDirectory, LayoutDetails.Instance.CurrentLayout.GUID + datename + ".xml");
// NewMessage.Show (filename);
MasterOfLayouts.ExportLayout (LayoutDetails.Instance.CurrentLayout.GUID, filename);
}
}
void HandleTransactionListDoubleClick (object sender, EventArgs e)
{
// Edit an existing transaction entry
if (EventList.SelectedItem != null) {
string IDOfRowForTransaction = EventList.SelectedValue.ToString ();
TransactionWorkLog foundNote = (TransactionWorkLog)LayoutDetails.Instance.TransactionsList.GetExisting (new database.ColumnConstant[2]
{TransactionsTable.TYPE, TransactionsTable.ID}, new string[2] {
TransactionsTable.T_USER.ToString (),
IDOfRowForTransaction.ToString ()
});
if (foundNote != null) {
int Words = foundNote.Words;
int Minutes = foundNote.Minutes;
string Category =foundNote.Category;
string Notes = foundNote.Notes;
AddEditWorkForm AddEdit = new AddEditWorkForm (Words, Minutes, Category, Notes);
if (AddEdit.ShowDialog () == DialogResult.OK) {
// TransactionWorkLog Work = new TransactionWorkLog(oldDate, this._GUID, AddEdit.Note, AddEdit.Words, AddEdit.Minutes, AddEdit.CategoryText);
// if in add mode we Add a new entry
//LayoutDetails.Instance.TransactionsList.UpdateEvent(new TransactionWorkLog(DateTime.Now, _GUID, AddEdit.Note, AddEdit.Words, AddEdit.Minutes, AddEdit.CategoryText));
foundNote.Words = AddEdit.Words;
foundNote.Minutes = AddEdit.Minutes;
foundNote.Notes = AddEdit.Note;
foundNote.Category = AddEdit.CategoryText;
LayoutDetails.Instance.TransactionsList.UpdateEvent (foundNote);
UpdateScreen ();
ExportIfNeeded (foundNote.Minutes);
}
} else {
NewMessage.Show (Loc.Instance.GetStringFmt ("Unusual Error... unable to retrieve this transaction. ID = {0}", IDOfRowForTransaction));
}
}
}
void UpdateScreen ()
{
if (null != RefreshPanels) {
RefreshPanels ("");
}
}
void AddEntry ()
{
AddEditWorkForm AddEdit = new AddEditWorkForm ();
if (AddEdit.ShowDialog () == DialogResult.OK) {
if (CurrentDate != null)
{
// if in add mode we Add a new entry
LayoutDetails.Instance.TransactionsList.AddEvent(new TransactionWorkLog(CurrentDate(), _GUID,
AddEdit.Note, AddEdit.Words, AddEdit.Minutes, AddEdit.CategoryText));
ExportIfNeeded (AddEdit.Minutes);
}
else
{
NewMessage.Show ("A date was not passed into the AddEntry method.");
}
UpdateScreen ();
}
}
void HandleAddClick (object sender, EventArgs e)
{
AddEntry();
}
public void BuildList ()
{
if (null == LayoutDetails.Instance.TransactionsList) {
throw new Exception("Transaction Table not created yet");
}
try {
List<Transactions.TransactionBase> LayoutEvents = LayoutDetails.Instance.TransactionsList.GetEventsForLayoutGuid (_GUID, " and type='5' ");
if (LayoutEvents != null)
{
LayoutEvents.Sort ();
LayoutEvents.Reverse ();
if (LayoutEvents != null) {
EventList.DataSource = LayoutEvents;
EventList.DisplayMember = "Display";
EventList.ValueMember = "ID";
}
}
else
{
lg.Instance.Line ("ListOfTransactionsPanel.buildList", ProblemType.MESSAGE,"Transaction list for this note was empty. Remove me after debugging.");
}
} catch (Exception ex) {
NewMessage.Show (ex.ToString());
}
}
public void UpdateAppearance(AppearanceClass app)
{
EventList.BackColor = app.mainBackground;
EventList.ForeColor =app.secondaryForeground;
EventList.Font = app.captionFont;
Add.BackColor = app.mainBackground;
Add.ForeColor = app.secondaryForeground;
Add.Font = app.captionFont;
Summary.Font = app.captionFont;
Summary.BackColor = app.mainBackground;
Summary.ForeColor = app.secondaryForeground;
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Apis.RealTimeBidding.v1alpha
{
/// <summary>The RealTimeBidding Service.</summary>
public class RealTimeBiddingService : Google.Apis.Services.BaseClientService
{
/// <summary>The API version.</summary>
public const string Version = "v1alpha";
/// <summary>The discovery version used to generate this service.</summary>
public static Google.Apis.Discovery.DiscoveryVersion DiscoveryVersionUsed = Google.Apis.Discovery.DiscoveryVersion.Version_1_0;
/// <summary>Constructs a new service.</summary>
public RealTimeBiddingService() : this(new Google.Apis.Services.BaseClientService.Initializer())
{
}
/// <summary>Constructs a new service.</summary>
/// <param name="initializer">The service initializer.</param>
public RealTimeBiddingService(Google.Apis.Services.BaseClientService.Initializer initializer) : base(initializer)
{
Bidders = new BiddersResource(this);
}
/// <summary>Gets the service supported features.</summary>
public override System.Collections.Generic.IList<string> Features => new string[0];
/// <summary>Gets the service name.</summary>
public override string Name => "realtimebidding";
/// <summary>Gets the service base URI.</summary>
public override string BaseUri =>
#if NETSTANDARD1_3 || NETSTANDARD2_0 || NET45
BaseUriOverride ?? "https://realtimebidding.googleapis.com/";
#else
"https://realtimebidding.googleapis.com/";
#endif
/// <summary>Gets the service base path.</summary>
public override string BasePath => "";
#if !NET40
/// <summary>Gets the batch base URI; <c>null</c> if unspecified.</summary>
public override string BatchUri => "https://realtimebidding.googleapis.com/batch";
/// <summary>Gets the batch base path; <c>null</c> if unspecified.</summary>
public override string BatchPath => "batch";
#endif
/// <summary>Available OAuth 2.0 scopes for use with the Real-time Bidding API.</summary>
public class Scope
{
/// <summary>
/// See, create, edit, and delete your Authorized Buyers and Open Bidding account entities
/// </summary>
public static string RealtimeBidding = "https://www.googleapis.com/auth/realtime-bidding";
}
/// <summary>Available OAuth 2.0 scope constants for use with the Real-time Bidding API.</summary>
public static class ScopeConstants
{
/// <summary>
/// See, create, edit, and delete your Authorized Buyers and Open Bidding account entities
/// </summary>
public const string RealtimeBidding = "https://www.googleapis.com/auth/realtime-bidding";
}
/// <summary>Gets the Bidders resource.</summary>
public virtual BiddersResource Bidders { get; }
}
/// <summary>A base abstract class for RealTimeBidding requests.</summary>
public abstract class RealTimeBiddingBaseServiceRequest<TResponse> : Google.Apis.Requests.ClientServiceRequest<TResponse>
{
/// <summary>Constructs a new RealTimeBiddingBaseServiceRequest instance.</summary>
protected RealTimeBiddingBaseServiceRequest(Google.Apis.Services.IClientService service) : base(service)
{
}
/// <summary>V1 error format.</summary>
[Google.Apis.Util.RequestParameterAttribute("$.xgafv", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<XgafvEnum> Xgafv { get; set; }
/// <summary>V1 error format.</summary>
public enum XgafvEnum
{
/// <summary>v1 error format</summary>
[Google.Apis.Util.StringValueAttribute("1")]
Value1 = 0,
/// <summary>v2 error format</summary>
[Google.Apis.Util.StringValueAttribute("2")]
Value2 = 1,
}
/// <summary>OAuth access token.</summary>
[Google.Apis.Util.RequestParameterAttribute("access_token", Google.Apis.Util.RequestParameterType.Query)]
public virtual string AccessToken { get; set; }
/// <summary>Data format for response.</summary>
[Google.Apis.Util.RequestParameterAttribute("alt", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<AltEnum> Alt { get; set; }
/// <summary>Data format for response.</summary>
public enum AltEnum
{
/// <summary>Responses with Content-Type of application/json</summary>
[Google.Apis.Util.StringValueAttribute("json")]
Json = 0,
/// <summary>Media download with context-dependent Content-Type</summary>
[Google.Apis.Util.StringValueAttribute("media")]
Media = 1,
/// <summary>Responses with Content-Type of application/x-protobuf</summary>
[Google.Apis.Util.StringValueAttribute("proto")]
Proto = 2,
}
/// <summary>JSONP</summary>
[Google.Apis.Util.RequestParameterAttribute("callback", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Callback { get; set; }
/// <summary>Selector specifying which fields to include in a partial response.</summary>
[Google.Apis.Util.RequestParameterAttribute("fields", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Fields { get; set; }
/// <summary>
/// API key. Your API key identifies your project and provides you with API access, quota, and reports. Required
/// unless you provide an OAuth 2.0 token.
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("key", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Key { get; set; }
/// <summary>OAuth 2.0 token for the current user.</summary>
[Google.Apis.Util.RequestParameterAttribute("oauth_token", Google.Apis.Util.RequestParameterType.Query)]
public virtual string OauthToken { get; set; }
/// <summary>Returns response with indentations and line breaks.</summary>
[Google.Apis.Util.RequestParameterAttribute("prettyPrint", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<bool> PrettyPrint { get; set; }
/// <summary>
/// Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a
/// user, but should not exceed 40 characters.
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("quotaUser", Google.Apis.Util.RequestParameterType.Query)]
public virtual string QuotaUser { get; set; }
/// <summary>Legacy upload protocol for media (e.g. "media", "multipart").</summary>
[Google.Apis.Util.RequestParameterAttribute("uploadType", Google.Apis.Util.RequestParameterType.Query)]
public virtual string UploadType { get; set; }
/// <summary>Upload protocol for media (e.g. "raw", "multipart").</summary>
[Google.Apis.Util.RequestParameterAttribute("upload_protocol", Google.Apis.Util.RequestParameterType.Query)]
public virtual string UploadProtocol { get; set; }
/// <summary>Initializes RealTimeBidding parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("$.xgafv", new Google.Apis.Discovery.Parameter
{
Name = "$.xgafv",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("access_token", new Google.Apis.Discovery.Parameter
{
Name = "access_token",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("alt", new Google.Apis.Discovery.Parameter
{
Name = "alt",
IsRequired = false,
ParameterType = "query",
DefaultValue = "json",
Pattern = null,
});
RequestParameters.Add("callback", new Google.Apis.Discovery.Parameter
{
Name = "callback",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("fields", new Google.Apis.Discovery.Parameter
{
Name = "fields",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("key", new Google.Apis.Discovery.Parameter
{
Name = "key",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("oauth_token", new Google.Apis.Discovery.Parameter
{
Name = "oauth_token",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("prettyPrint", new Google.Apis.Discovery.Parameter
{
Name = "prettyPrint",
IsRequired = false,
ParameterType = "query",
DefaultValue = "true",
Pattern = null,
});
RequestParameters.Add("quotaUser", new Google.Apis.Discovery.Parameter
{
Name = "quotaUser",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("uploadType", new Google.Apis.Discovery.Parameter
{
Name = "uploadType",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("upload_protocol", new Google.Apis.Discovery.Parameter
{
Name = "upload_protocol",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
}
}
/// <summary>The "bidders" collection of methods.</summary>
public class BiddersResource
{
private const string Resource = "bidders";
/// <summary>The service which this resource belongs to.</summary>
private readonly Google.Apis.Services.IClientService service;
/// <summary>Constructs a new resource.</summary>
public BiddersResource(Google.Apis.Services.IClientService service)
{
this.service = service;
BiddingFunctions = new BiddingFunctionsResource(service);
}
/// <summary>Gets the BiddingFunctions resource.</summary>
public virtual BiddingFunctionsResource BiddingFunctions { get; }
/// <summary>The "biddingFunctions" collection of methods.</summary>
public class BiddingFunctionsResource
{
private const string Resource = "biddingFunctions";
/// <summary>The service which this resource belongs to.</summary>
private readonly Google.Apis.Services.IClientService service;
/// <summary>Constructs a new resource.</summary>
public BiddingFunctionsResource(Google.Apis.Services.IClientService service)
{
this.service = service;
}
/// <summary>
/// Activates an existing bidding function. An activated function is available for invocation for the
/// server-side TURTLEDOVE simulations.
/// </summary>
/// <param name="body">The body of the request.</param>
/// <param name="name">
/// Required. The name of the bidding function to activate. Format:
/// `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
/// </param>
public virtual ActivateRequest Activate(Google.Apis.RealTimeBidding.v1alpha.Data.ActivateBiddingFunctionRequest body, string name)
{
return new ActivateRequest(service, body, name);
}
/// <summary>
/// Activates an existing bidding function. An activated function is available for invocation for the
/// server-side TURTLEDOVE simulations.
/// </summary>
public class ActivateRequest : RealTimeBiddingBaseServiceRequest<Google.Apis.RealTimeBidding.v1alpha.Data.BiddingFunction>
{
/// <summary>Constructs a new Activate request.</summary>
public ActivateRequest(Google.Apis.Services.IClientService service, Google.Apis.RealTimeBidding.v1alpha.Data.ActivateBiddingFunctionRequest body, string name) : base(service)
{
Name = name;
Body = body;
InitParameters();
}
/// <summary>
/// Required. The name of the bidding function to activate. Format:
/// `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("name", Google.Apis.Util.RequestParameterType.Path)]
public virtual string Name { get; private set; }
/// <summary>Gets or sets the body of this request.</summary>
Google.Apis.RealTimeBidding.v1alpha.Data.ActivateBiddingFunctionRequest Body { get; set; }
/// <summary>Returns the body of the request.</summary>
protected override object GetBody() => Body;
/// <summary>Gets the method name.</summary>
public override string MethodName => "activate";
/// <summary>Gets the HTTP method.</summary>
public override string HttpMethod => "POST";
/// <summary>Gets the REST path.</summary>
public override string RestPath => "v1alpha/{+name}:activate";
/// <summary>Initializes Activate parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("name", new Google.Apis.Discovery.Parameter
{
Name = "name",
IsRequired = true,
ParameterType = "path",
DefaultValue = null,
Pattern = @"^bidders/[^/]+/biddingFunctions/[^/]+$",
});
}
}
/// <summary>
/// Archives an existing bidding function. An archived function will not be available for function
/// invocation for the server-side TURTLEDOVE simulations unless it is activated.
/// </summary>
/// <param name="body">The body of the request.</param>
/// <param name="name">
/// Required. The name of the bidding function to archive. Format:
/// `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
/// </param>
public virtual ArchiveRequest Archive(Google.Apis.RealTimeBidding.v1alpha.Data.ArchiveBiddingFunctionRequest body, string name)
{
return new ArchiveRequest(service, body, name);
}
/// <summary>
/// Archives an existing bidding function. An archived function will not be available for function
/// invocation for the server-side TURTLEDOVE simulations unless it is activated.
/// </summary>
public class ArchiveRequest : RealTimeBiddingBaseServiceRequest<Google.Apis.RealTimeBidding.v1alpha.Data.BiddingFunction>
{
/// <summary>Constructs a new Archive request.</summary>
public ArchiveRequest(Google.Apis.Services.IClientService service, Google.Apis.RealTimeBidding.v1alpha.Data.ArchiveBiddingFunctionRequest body, string name) : base(service)
{
Name = name;
Body = body;
InitParameters();
}
/// <summary>
/// Required. The name of the bidding function to archive. Format:
/// `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("name", Google.Apis.Util.RequestParameterType.Path)]
public virtual string Name { get; private set; }
/// <summary>Gets or sets the body of this request.</summary>
Google.Apis.RealTimeBidding.v1alpha.Data.ArchiveBiddingFunctionRequest Body { get; set; }
/// <summary>Returns the body of the request.</summary>
protected override object GetBody() => Body;
/// <summary>Gets the method name.</summary>
public override string MethodName => "archive";
/// <summary>Gets the HTTP method.</summary>
public override string HttpMethod => "POST";
/// <summary>Gets the REST path.</summary>
public override string RestPath => "v1alpha/{+name}:archive";
/// <summary>Initializes Archive parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("name", new Google.Apis.Discovery.Parameter
{
Name = "name",
IsRequired = true,
ParameterType = "path",
DefaultValue = null,
Pattern = @"^bidders/[^/]+/biddingFunctions/[^/]+$",
});
}
}
/// <summary>Creates a new bidding function.</summary>
/// <param name="body">The body of the request.</param>
/// <param name="parent">
/// Required. The name of the bidder for which to create the bidding function. Format:
/// `bidders/{bidderAccountId}`
/// </param>
public virtual CreateRequest Create(Google.Apis.RealTimeBidding.v1alpha.Data.BiddingFunction body, string parent)
{
return new CreateRequest(service, body, parent);
}
/// <summary>Creates a new bidding function.</summary>
public class CreateRequest : RealTimeBiddingBaseServiceRequest<Google.Apis.RealTimeBidding.v1alpha.Data.BiddingFunction>
{
/// <summary>Constructs a new Create request.</summary>
public CreateRequest(Google.Apis.Services.IClientService service, Google.Apis.RealTimeBidding.v1alpha.Data.BiddingFunction body, string parent) : base(service)
{
Parent = parent;
Body = body;
InitParameters();
}
/// <summary>
/// Required. The name of the bidder for which to create the bidding function. Format:
/// `bidders/{bidderAccountId}`
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("parent", Google.Apis.Util.RequestParameterType.Path)]
public virtual string Parent { get; private set; }
/// <summary>Gets or sets the body of this request.</summary>
Google.Apis.RealTimeBidding.v1alpha.Data.BiddingFunction Body { get; set; }
/// <summary>Returns the body of the request.</summary>
protected override object GetBody() => Body;
/// <summary>Gets the method name.</summary>
public override string MethodName => "create";
/// <summary>Gets the HTTP method.</summary>
public override string HttpMethod => "POST";
/// <summary>Gets the REST path.</summary>
public override string RestPath => "v1alpha/{+parent}/biddingFunctions";
/// <summary>Initializes Create parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("parent", new Google.Apis.Discovery.Parameter
{
Name = "parent",
IsRequired = true,
ParameterType = "path",
DefaultValue = null,
Pattern = @"^bidders/[^/]+$",
});
}
}
/// <summary>Lists the bidding functions that a bidder currently has registered.</summary>
/// <param name="parent">
/// Required. Name of the bidder whose bidding functions will be listed. Format:
/// `bidders/{bidder_account_id}`
/// </param>
public virtual ListRequest List(string parent)
{
return new ListRequest(service, parent);
}
/// <summary>Lists the bidding functions that a bidder currently has registered.</summary>
public class ListRequest : RealTimeBiddingBaseServiceRequest<Google.Apis.RealTimeBidding.v1alpha.Data.ListBiddingFunctionsResponse>
{
/// <summary>Constructs a new List request.</summary>
public ListRequest(Google.Apis.Services.IClientService service, string parent) : base(service)
{
Parent = parent;
InitParameters();
}
/// <summary>
/// Required. Name of the bidder whose bidding functions will be listed. Format:
/// `bidders/{bidder_account_id}`
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("parent", Google.Apis.Util.RequestParameterType.Path)]
public virtual string Parent { get; private set; }
/// <summary>The maximum number of bidding functions to return.</summary>
[Google.Apis.Util.RequestParameterAttribute("pageSize", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<int> PageSize { get; set; }
/// <summary>
/// A token identifying a page of results the server should return. This value is received from a
/// previous `ListBiddingFunctions` call in ListBiddingFunctionsResponse.nextPageToken.
/// </summary>
[Google.Apis.Util.RequestParameterAttribute("pageToken", Google.Apis.Util.RequestParameterType.Query)]
public virtual string PageToken { get; set; }
/// <summary>Gets the method name.</summary>
public override string MethodName => "list";
/// <summary>Gets the HTTP method.</summary>
public override string HttpMethod => "GET";
/// <summary>Gets the REST path.</summary>
public override string RestPath => "v1alpha/{+parent}/biddingFunctions";
/// <summary>Initializes List parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add("parent", new Google.Apis.Discovery.Parameter
{
Name = "parent",
IsRequired = true,
ParameterType = "path",
DefaultValue = null,
Pattern = @"^bidders/[^/]+$",
});
RequestParameters.Add("pageSize", new Google.Apis.Discovery.Parameter
{
Name = "pageSize",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add("pageToken", new Google.Apis.Discovery.Parameter
{
Name = "pageToken",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
}
}
}
}
}
namespace Google.Apis.RealTimeBidding.v1alpha.Data
{
/// <summary>The request to activate a bidding function.</summary>
public class ActivateBiddingFunctionRequest : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>A request to archive a bidding function.</summary>
public class ArchiveBiddingFunctionRequest : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>
/// The bidding function to be executed as part of the TURTLEDOVE simulation experiment bidding flow.
/// </summary>
public class BiddingFunction : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>The raw Javascript source code of the bidding function.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("biddingFunction")]
public virtual string BiddingFunctionValue { get; set; }
/// <summary>
/// The name of the bidding function that must follow the pattern:
/// `bidders/{bidder_account_id}/biddingFunctions/{bidding_function_name}`.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("name")]
public virtual string Name { get; set; }
/// <summary>Output only. The state of the bidding function.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("state")]
public virtual string State { get; set; }
/// <summary>The type of the bidding function to be created.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("type")]
public virtual string Type { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>A response containing a list of a bidder's bidding functions.</summary>
public class ListBiddingFunctionsResponse : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>A list of a bidder's bidding functions.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("biddingFunctions")]
public virtual System.Collections.Generic.IList<BiddingFunction> BiddingFunctions { get; set; }
/// <summary>
/// A token which can be passed to a subsequent call to the `ListBiddingFunctions` method to retrieve the next
/// page of results in ListBiddingFunctionsRequest.pageToken.
/// </summary>
[Newtonsoft.Json.JsonPropertyAttribute("nextPageToken")]
public virtual string NextPageToken { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
}
| |
/*
* Exchange Web Services Managed API
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
*
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this
* software and associated documentation files (the "Software"), to deal in the Software
* without restriction, including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
* to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
* FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
namespace Microsoft.Exchange.WebServices.Data
{
using System.Diagnostics.CodeAnalysis;
/// <summary>
/// Represents the schema for e-mail messages.
/// </summary>
[Schema]
public class EmailMessageSchema : ItemSchema
{
/// <summary>
/// Field URIs for EmailMessage.
/// </summary>
private static class FieldUris
{
public const string ConversationIndex = "message:ConversationIndex";
public const string ConversationTopic = "message:ConversationTopic";
public const string InternetMessageId = "message:InternetMessageId";
public const string IsRead = "message:IsRead";
public const string IsResponseRequested = "message:IsResponseRequested";
public const string IsReadReceiptRequested = "message:IsReadReceiptRequested";
public const string IsDeliveryReceiptRequested = "message:IsDeliveryReceiptRequested";
public const string References = "message:References";
public const string ReplyTo = "message:ReplyTo";
public const string From = "message:From";
public const string Sender = "message:Sender";
public const string ToRecipients = "message:ToRecipients";
public const string CcRecipients = "message:CcRecipients";
public const string BccRecipients = "message:BccRecipients";
public const string ReceivedBy = "message:ReceivedBy";
public const string ReceivedRepresenting = "message:ReceivedRepresenting";
public const string ApprovalRequestData = "message:ApprovalRequestData";
public const string VotingInformation = "message:VotingInformation";
public const string Likers = "message:Likers";
}
/// <summary>
/// Defines the ToRecipients property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition ToRecipients =
new ComplexPropertyDefinition<EmailAddressCollection>(
XmlElementNames.ToRecipients,
FieldUris.ToRecipients,
PropertyDefinitionFlags.AutoInstantiateOnRead | PropertyDefinitionFlags.CanSet | PropertyDefinitionFlags.CanUpdate | PropertyDefinitionFlags.CanDelete,
ExchangeVersion.Exchange2007_SP1,
delegate() { return new EmailAddressCollection(); });
/// <summary>
/// Defines the BccRecipients property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition BccRecipients =
new ComplexPropertyDefinition<EmailAddressCollection>(
XmlElementNames.BccRecipients,
FieldUris.BccRecipients,
PropertyDefinitionFlags.AutoInstantiateOnRead | PropertyDefinitionFlags.CanSet | PropertyDefinitionFlags.CanUpdate | PropertyDefinitionFlags.CanDelete,
ExchangeVersion.Exchange2007_SP1,
delegate() { return new EmailAddressCollection(); });
/// <summary>
/// Defines the CcRecipients property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition CcRecipients =
new ComplexPropertyDefinition<EmailAddressCollection>(
XmlElementNames.CcRecipients,
FieldUris.CcRecipients,
PropertyDefinitionFlags.AutoInstantiateOnRead | PropertyDefinitionFlags.CanSet | PropertyDefinitionFlags.CanUpdate | PropertyDefinitionFlags.CanDelete,
ExchangeVersion.Exchange2007_SP1,
delegate() { return new EmailAddressCollection(); });
/// <summary>
/// Defines the ConversationIndex property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition ConversationIndex =
new ByteArrayPropertyDefinition(
XmlElementNames.ConversationIndex,
FieldUris.ConversationIndex,
PropertyDefinitionFlags.CanFind,
ExchangeVersion.Exchange2007_SP1);
/// <summary>
/// Defines the ConversationTopic property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition ConversationTopic =
new StringPropertyDefinition(
XmlElementNames.ConversationTopic,
FieldUris.ConversationTopic,
PropertyDefinitionFlags.CanFind,
ExchangeVersion.Exchange2007_SP1);
/// <summary>
/// Defines the From property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition From =
new ContainedPropertyDefinition<EmailAddress>(
XmlElementNames.From,
FieldUris.From,
XmlElementNames.Mailbox,
PropertyDefinitionFlags.CanSet | PropertyDefinitionFlags.CanUpdate | PropertyDefinitionFlags.CanDelete | PropertyDefinitionFlags.CanFind,
ExchangeVersion.Exchange2007_SP1,
delegate() { return new EmailAddress(); });
/// <summary>
/// Defines the IsDeliveryReceiptRequested property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition IsDeliveryReceiptRequested =
new BoolPropertyDefinition(
XmlElementNames.IsDeliveryReceiptRequested,
FieldUris.IsDeliveryReceiptRequested,
PropertyDefinitionFlags.CanSet | PropertyDefinitionFlags.CanUpdate | PropertyDefinitionFlags.CanDelete | PropertyDefinitionFlags.CanFind,
ExchangeVersion.Exchange2007_SP1);
/// <summary>
/// Defines the IsRead property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition IsRead =
new BoolPropertyDefinition(
XmlElementNames.IsRead,
FieldUris.IsRead,
PropertyDefinitionFlags.CanSet | PropertyDefinitionFlags.CanUpdate | PropertyDefinitionFlags.CanFind,
ExchangeVersion.Exchange2007_SP1);
/// <summary>
/// Defines the IsReadReceiptRequested property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition IsReadReceiptRequested =
new BoolPropertyDefinition(
XmlElementNames.IsReadReceiptRequested,
FieldUris.IsReadReceiptRequested,
PropertyDefinitionFlags.CanSet | PropertyDefinitionFlags.CanUpdate | PropertyDefinitionFlags.CanDelete | PropertyDefinitionFlags.CanFind,
ExchangeVersion.Exchange2007_SP1);
/// <summary>
/// Defines the IsResponseRequested property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition IsResponseRequested =
new BoolPropertyDefinition(
XmlElementNames.IsResponseRequested,
FieldUris.IsResponseRequested,
PropertyDefinitionFlags.CanSet | PropertyDefinitionFlags.CanUpdate | PropertyDefinitionFlags.CanDelete | PropertyDefinitionFlags.CanFind,
ExchangeVersion.Exchange2007_SP1,
true); // isNullable
/// <summary>
/// Defines the InternetMessageId property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition InternetMessageId =
new StringPropertyDefinition(
XmlElementNames.InternetMessageId,
FieldUris.InternetMessageId,
PropertyDefinitionFlags.CanFind,
ExchangeVersion.Exchange2007_SP1);
/// <summary>
/// Defines the References property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition References =
new StringPropertyDefinition(
XmlElementNames.References,
FieldUris.References,
PropertyDefinitionFlags.CanSet | PropertyDefinitionFlags.CanUpdate | PropertyDefinitionFlags.CanDelete | PropertyDefinitionFlags.CanFind,
ExchangeVersion.Exchange2007_SP1);
/// <summary>
/// Defines the ReplyTo property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition ReplyTo =
new ComplexPropertyDefinition<EmailAddressCollection>(
XmlElementNames.ReplyTo,
FieldUris.ReplyTo,
PropertyDefinitionFlags.AutoInstantiateOnRead | PropertyDefinitionFlags.CanSet | PropertyDefinitionFlags.CanUpdate | PropertyDefinitionFlags.CanDelete,
ExchangeVersion.Exchange2007_SP1,
delegate() { return new EmailAddressCollection(); });
/// <summary>
/// Defines the Sender property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition Sender =
new ContainedPropertyDefinition<EmailAddress>(
XmlElementNames.Sender,
FieldUris.Sender,
XmlElementNames.Mailbox,
PropertyDefinitionFlags.CanSet | PropertyDefinitionFlags.CanFind,
ExchangeVersion.Exchange2007_SP1,
delegate() { return new EmailAddress(); });
/// <summary>
/// Defines the ReceivedBy property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition ReceivedBy =
new ContainedPropertyDefinition<EmailAddress>(
XmlElementNames.ReceivedBy,
FieldUris.ReceivedBy,
XmlElementNames.Mailbox,
PropertyDefinitionFlags.CanFind,
ExchangeVersion.Exchange2007_SP1,
delegate() { return new EmailAddress(); });
/// <summary>
/// Defines the ReceivedRepresenting property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition ReceivedRepresenting =
new ContainedPropertyDefinition<EmailAddress>(
XmlElementNames.ReceivedRepresenting,
FieldUris.ReceivedRepresenting,
XmlElementNames.Mailbox,
PropertyDefinitionFlags.CanFind,
ExchangeVersion.Exchange2007_SP1,
delegate() { return new EmailAddress(); });
/// <summary>
/// Defines the ApprovalRequestData property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition ApprovalRequestData =
new ComplexPropertyDefinition<ApprovalRequestData>(
XmlElementNames.ApprovalRequestData,
FieldUris.ApprovalRequestData,
ExchangeVersion.Exchange2013,
delegate() { return new ApprovalRequestData(); });
/// <summary>
/// Defines the VotingInformation property.
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition VotingInformation =
new ComplexPropertyDefinition<VotingInformation>(
XmlElementNames.VotingInformation,
FieldUris.VotingInformation,
ExchangeVersion.Exchange2013,
delegate() { return new VotingInformation(); });
/// <summary>
/// Defines the Likers property
/// </summary>
[SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "Immutable type")]
public static readonly PropertyDefinition Likers =
new ComplexPropertyDefinition<EmailAddressCollection>(
XmlElementNames.Likers,
FieldUris.Likers,
PropertyDefinitionFlags.AutoInstantiateOnRead,
ExchangeVersion.Exchange2015,
delegate() { return new EmailAddressCollection(); });
// This must be after the declaration of property definitions
internal static new readonly EmailMessageSchema Instance = new EmailMessageSchema();
/// <summary>
/// Registers properties.
/// </summary>
/// <remarks>
/// IMPORTANT NOTE: PROPERTIES MUST BE REGISTERED IN SCHEMA ORDER (i.e. the same order as they are defined in types.xsd)
/// </remarks>
internal override void RegisterProperties()
{
base.RegisterProperties();
this.RegisterProperty(Sender);
this.RegisterProperty(ToRecipients);
this.RegisterProperty(CcRecipients);
this.RegisterProperty(BccRecipients);
this.RegisterProperty(IsReadReceiptRequested);
this.RegisterProperty(IsDeliveryReceiptRequested);
this.RegisterProperty(ConversationIndex);
this.RegisterProperty(ConversationTopic);
this.RegisterProperty(From);
this.RegisterProperty(InternetMessageId);
this.RegisterProperty(IsRead);
this.RegisterProperty(IsResponseRequested);
this.RegisterProperty(References);
this.RegisterProperty(ReplyTo);
this.RegisterProperty(ReceivedBy);
this.RegisterProperty(ReceivedRepresenting);
this.RegisterProperty(ApprovalRequestData);
this.RegisterProperty(VotingInformation);
this.RegisterProperty(Likers);
}
/// <summary>
/// Initializes a new instance of the <see cref="EmailMessageSchema"/> class.
/// </summary>
internal EmailMessageSchema()
: base()
{
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Xunit;
namespace System.Collections.Tests
{
public static class DictionaryBaseTests
{
private static FooKey CreateKey(int i) => new FooKey(i, i.ToString());
private static FooValue CreateValue(int i) => new FooValue(i, i.ToString());
private static MyDictionary CreateDictionary(int count)
{
var dictionary = new MyDictionary();
for (int i = 0; i < count; i++)
{
dictionary.Add(CreateKey(i), CreateValue(i));
}
return dictionary;
}
[Fact]
public static void Add()
{
var dictBase = new MyDictionary();
for (int i = 0; i < 100; i++)
{
FooKey key = CreateKey(i);
dictBase.Add(key, CreateValue(i));
Assert.True(dictBase.Contains(key));
}
Assert.Equal(100, dictBase.Count);
for (int i = 0; i < dictBase.Count; i++)
{
Assert.Equal(CreateValue(i), dictBase[CreateKey(i)]);
}
FooKey nullKey = CreateKey(101);
dictBase.Add(nullKey, null);
Assert.Equal(null, dictBase[nullKey]);
}
[Fact]
public static void Remove()
{
MyDictionary dictBase = CreateDictionary(100);
for (int i = 0; i < 100; i++)
{
FooKey key = CreateKey(i);
dictBase.Remove(key);
Assert.False(dictBase.Contains(key));
}
Assert.Equal(0, dictBase.Count);
dictBase.Remove(new FooKey()); // Doesn't exist, but doesn't throw
}
[Fact]
public static void Contains()
{
MyDictionary dictBase = CreateDictionary(100);
for (int i = 0; i < dictBase.Count; i++)
{
Assert.True(dictBase.Contains(CreateKey(i)));
}
Assert.False(dictBase.Contains(new FooKey()));
}
[Fact]
public static void Keys()
{
MyDictionary dictBase = CreateDictionary(100);
ICollection keys = dictBase.Keys;
Assert.Equal(dictBase.Count, keys.Count);
foreach (FooKey key in keys)
{
Assert.True(dictBase.Contains(key));
}
}
[Fact]
public static void Values()
{
MyDictionary dictBase = CreateDictionary(100);
ICollection values = dictBase.Values;
Assert.Equal(dictBase.Count, values.Count);
foreach (FooValue value in values)
{
FooKey key = CreateKey(value.IntValue);
Assert.Equal(value, dictBase[key]);
}
}
[Fact]
public static void Item_Get()
{
MyDictionary dictBase = CreateDictionary(100);
for (int i = 0; i < dictBase.Count; i++)
{
Assert.Equal(CreateValue(i), dictBase[CreateKey(i)]);
}
Assert.Equal(null, dictBase[new FooKey()]);
}
[Fact]
public static void Item_Get_NullKey_ThrowsArgumentNullException()
{
var dictBase = new MyDictionary();
Assert.Throws<ArgumentNullException>("key", () => dictBase[null]);
}
[Fact]
public static void Item_Set()
{
MyDictionary dictBase = CreateDictionary(100);
for (int i = 0; i < dictBase.Count; i++)
{
FooKey key = CreateKey(i);
FooValue value = CreateValue(dictBase.Count - i - 1);
dictBase[key] = value;
Assert.Equal(value, dictBase[key]);
}
FooKey nonExistentKey = CreateKey(101);
dictBase[nonExistentKey] = null;
Assert.Equal(101, dictBase.Count); // Should add a key/value pair if the key
Assert.Equal(null, dictBase[nonExistentKey]);
}
[Fact]
public static void Indexer_Set_NullKey_ThrowsArgumentNullException()
{
var dictBase = new MyDictionary();
Assert.Throws<ArgumentNullException>("key", () => dictBase[null] = new FooValue());
}
[Fact]
public static void Clear()
{
MyDictionary dictBase = CreateDictionary(100);
dictBase.Clear();
Assert.Equal(0, dictBase.Count);
}
[Fact]
public static void CopyTo()
{
MyDictionary dictBase = CreateDictionary(100);
// Basic
var entries = new DictionaryEntry[dictBase.Count];
dictBase.CopyTo(entries, 0);
Assert.Equal(dictBase.Count, entries.Length);
for (int i = 0; i < entries.Length; i++)
{
DictionaryEntry entry = entries[i];
Assert.Equal(CreateKey(entries.Length - i - 1), entry.Key);
Assert.Equal(CreateValue(entries.Length - i - 1), entry.Value);
}
// With index
entries = new DictionaryEntry[dictBase.Count * 2];
dictBase.CopyTo(entries, dictBase.Count);
Assert.Equal(dictBase.Count * 2, entries.Length);
for (int i = dictBase.Count; i < entries.Length; i++)
{
DictionaryEntry entry = entries[i];
Assert.Equal(CreateKey(entries.Length - i - 1), entry.Key);
Assert.Equal(CreateValue(entries.Length - i - 1), entry.Value);
}
}
[Fact]
public static void CopyTo_Invalid()
{
MyDictionary dictBase = CreateDictionary(100);
Assert.Throws<ArgumentNullException>("array", () => dictBase.CopyTo(null, 0)); // Array is null
Assert.Throws<ArgumentException>("array", () => dictBase.CopyTo(new object[100, 100], 0)); // Array is multidimensional
Assert.Throws<ArgumentOutOfRangeException>("arrayIndex", () => dictBase.CopyTo(new DictionaryEntry[100], -1)); // Index < 0
Assert.Throws<ArgumentException>(null, () => dictBase.CopyTo(new DictionaryEntry[100], 100)); // Index >= count
Assert.Throws<ArgumentException>(null, () => dictBase.CopyTo(new DictionaryEntry[100], 50)); // Index + array.Count >= count
}
[Fact]
public static void GetEnumerator_IDictionaryEnumerator()
{
MyDictionary dictBase = CreateDictionary(100);
IDictionaryEnumerator enumerator = dictBase.GetEnumerator();
Assert.NotNull(enumerator);
int count = 0;
while (enumerator.MoveNext())
{
DictionaryEntry entry1 = (DictionaryEntry)enumerator.Current;
DictionaryEntry entry2 = enumerator.Entry;
Assert.Equal(entry1.Key, entry2.Key);
Assert.Equal(entry1.Value, entry2.Value);
Assert.Equal(enumerator.Key, entry1.Key);
Assert.Equal(enumerator.Value, entry1.Value);
Assert.Equal(enumerator.Value, dictBase[(FooKey)enumerator.Key]);
count++;
}
Assert.Equal(dictBase.Count, count);
}
[Fact]
public static void GetEnumerator_IDictionaryEnumerator_Invalid()
{
MyDictionary dictBase = CreateDictionary(100);
IDictionaryEnumerator enumerator = dictBase.GetEnumerator();
// Index < 0
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
Assert.Throws<InvalidOperationException>(() => enumerator.Entry);
Assert.Throws<InvalidOperationException>(() => enumerator.Key);
Assert.Throws<InvalidOperationException>(() => enumerator.Value);
// Index > dictionary.Count
while (enumerator.MoveNext()) ;
Assert.False(enumerator.MoveNext());
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
Assert.Throws<InvalidOperationException>(() => enumerator.Entry);
Assert.Throws<InvalidOperationException>(() => enumerator.Key);
Assert.Throws<InvalidOperationException>(() => enumerator.Value);
// Current throws after resetting
enumerator.Reset();
Assert.True(enumerator.MoveNext());
enumerator.Reset();
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
Assert.Throws<InvalidOperationException>(() => enumerator.Entry);
Assert.Throws<InvalidOperationException>(() => enumerator.Key);
Assert.Throws<InvalidOperationException>(() => enumerator.Value);
}
[Fact]
public static void GetEnumerator_IEnumerator()
{
MyDictionary dictBase = CreateDictionary(100);
IEnumerator enumerator = ((IEnumerable)dictBase).GetEnumerator();
Assert.NotNull(enumerator);
int count = 0;
while (enumerator.MoveNext())
{
DictionaryEntry entry = (DictionaryEntry)enumerator.Current;
Assert.Equal((FooValue)entry.Value, dictBase[(FooKey)entry.Key]);
count++;
}
Assert.Equal(dictBase.Count, count);
}
[Fact]
public static void GetEnumerator_IEnumerator_Invalid()
{
MyDictionary dictBase = CreateDictionary(100);
IEnumerator enumerator = ((IEnumerable)dictBase).GetEnumerator();
// Index < 0
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
// Index >= dictionary.Count
while (enumerator.MoveNext()) ;
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
Assert.False(enumerator.MoveNext());
// Current throws after resetting
enumerator.Reset();
Assert.True(enumerator.MoveNext());
enumerator.Reset();
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
}
[Fact]
public static void SyncRoot()
{
// SyncRoot should be the reference to the underlying dictionary, not to MyDictionary
var dictBase = new MyDictionary();
object syncRoot = dictBase.SyncRoot;
Assert.NotEqual(syncRoot, dictBase);
Assert.Equal(dictBase.SyncRoot, dictBase.SyncRoot);
}
[Fact]
public static void IDictionaryProperties()
{
var dictBase = new MyDictionary();
Assert.False(dictBase.IsFixedSize);
Assert.False(dictBase.IsReadOnly);
Assert.False(dictBase.IsSynchronized);
}
[Fact]
public static void Add_Called()
{
var f = new FooKey(0, "0");
var dictBase = new OnMethodCalledDictionary();
dictBase.Add(f, "hello");
Assert.True(dictBase.OnValidateCalled);
Assert.True(dictBase.OnInsertCalled);
Assert.True(dictBase.OnInsertCompleteCalled);
Assert.True(dictBase.Contains(f));
}
[Fact]
public static void Add_Throws_Called()
{
var f = new FooKey(0, "0");
// Throw OnValidate
var dictBase = new OnMethodCalledDictionary();
dictBase.OnValidateThrow = true;
Assert.Throws<Exception>(() => dictBase.Add(f, ""));
Assert.Equal(0, dictBase.Count);
// Throw OnInsert
dictBase = new OnMethodCalledDictionary();
dictBase.OnInsertThrow = true;
Assert.Throws<Exception>(() => dictBase.Add(f, ""));
Assert.Equal(0, dictBase.Count);
// Throw OnInsertComplete
dictBase = new OnMethodCalledDictionary();
dictBase.OnInsertCompleteThrow = true;
Assert.Throws<Exception>(() => dictBase.Add(f, ""));
Assert.Equal(0, dictBase.Count);
}
[Fact]
public static void Remove_Called()
{
var f = new FooKey(0, "0");
var dictBase = new OnMethodCalledDictionary();
dictBase.Add(f, "");
dictBase.OnValidateCalled = false;
dictBase.Remove(f);
Assert.True(dictBase.OnValidateCalled);
Assert.True(dictBase.OnRemoveCalled);
Assert.True(dictBase.OnRemoveCompleteCalled);
Assert.False(dictBase.Contains(f));
}
[Fact]
public static void Remove_Throws_Called()
{
var f = new FooKey(0, "0");
// Throw OnValidate
var dictBase = new OnMethodCalledDictionary();
dictBase.Add(f, "");
dictBase.OnValidateThrow = true;
Assert.Throws<Exception>(() => dictBase.Remove(f));
Assert.Equal(1, dictBase.Count);
// Throw OnRemove
dictBase = new OnMethodCalledDictionary();
dictBase.Add(f, "");
dictBase.OnRemoveThrow = true;
Assert.Throws<Exception>(() => dictBase.Remove(f));
Assert.Equal(1, dictBase.Count);
// Throw OnRemoveComplete
dictBase = new OnMethodCalledDictionary();
dictBase.Add(f, "");
dictBase.OnRemoveCompleteThrow = true;
Assert.Throws<Exception>(() => dictBase.Remove(f));
Assert.Equal(1, dictBase.Count);
}
[Fact]
public static void Clear_Called()
{
var f = new FooKey(0, "0");
var dictBase = new OnMethodCalledDictionary();
dictBase.Add(f, "");
dictBase.Clear();
Assert.True(dictBase.OnClearCalled);
Assert.True(dictBase.OnClearCompleteCalled);
Assert.Equal(0, dictBase.Count);
}
[Fact]
public static void Clear_Throws_Called()
{
var f = new FooKey(0, "0");
// Throw OnValidate
var dictBase = new OnMethodCalledDictionary();
dictBase.Add(f, "");
dictBase.OnValidateThrow = true;
dictBase.Clear();
Assert.Equal(0, dictBase.Count);
// Throw OnClear
dictBase = new OnMethodCalledDictionary();
dictBase.Add(f, "");
dictBase.OnClearThrow = true;
Assert.Throws<Exception>(() => dictBase.Clear());
Assert.Equal(1, dictBase.Count);
// Throw OnClearComplete
dictBase = new OnMethodCalledDictionary();
dictBase.Add(f, "");
dictBase.OnClearCompleteThrow = true;
Assert.Throws<Exception>(() => dictBase.Clear());
Assert.Equal(0, dictBase.Count);
}
[Fact]
public static void Set_New_Called()
{
var f = new FooKey(1, "1");
var dictBase = new OnMethodCalledDictionary();
dictBase.OnValidateCalled = false;
dictBase[f] = "hello";
Assert.True(dictBase.OnValidateCalled);
Assert.True(dictBase.OnSetCalled);
Assert.True(dictBase.OnSetCompleteCalled);
Assert.Equal(1, dictBase.Count);
Assert.Equal("hello", dictBase[f]);
}
[Fact]
public static void Set_New_Throws_Called()
{
var f = new FooKey(0, "0");
// Throw OnValidate
var dictBase = new OnMethodCalledDictionary();
dictBase.OnValidateThrow = true;
Assert.Throws<Exception>(() => dictBase[f] = "hello");
Assert.Equal(0, dictBase.Count);
// Throw OnSet
dictBase = new OnMethodCalledDictionary();
dictBase.OnSetThrow = true;
Assert.Throws<Exception>(() => dictBase[f] = "hello");
Assert.Equal(0, dictBase.Count);
// Throw OnSetComplete
dictBase = new OnMethodCalledDictionary();
dictBase.OnSetCompleteThrow = true;
Assert.Throws<Exception>(() => dictBase[f] = "hello");
Assert.Equal(0, dictBase.Count);
}
[Fact]
public static void Set_Existing_Called()
{
var f = new FooKey(1, "1");
var dictBase = new OnMethodCalledDictionary();
dictBase.Add(new FooKey(), "");
dictBase.OnValidateCalled = false;
dictBase[f] = "hello";
Assert.True(dictBase.OnValidateCalled);
Assert.True(dictBase.OnSetCalled);
Assert.True(dictBase.OnSetCompleteCalled);
Assert.Equal("hello", dictBase[f]);
}
[Fact]
public static void Set_Existing_Throws_Called()
{
var f = new FooKey(0, "0");
// Throw OnValidate
var dictBase = new OnMethodCalledDictionary();
dictBase.Add(f, "");
dictBase.OnValidateThrow = true;
Assert.Throws<Exception>(() => dictBase[f] = "hello");
Assert.Equal("", dictBase[f]);
// Throw OnSet
dictBase = new OnMethodCalledDictionary();
dictBase.Add(f, "");
dictBase.OnSetThrow = true;
Assert.Throws<Exception>(() => dictBase[f] = "hello");
Assert.Equal("", dictBase[f]);
// Throw OnSetComplete
dictBase = new OnMethodCalledDictionary();
dictBase.Add(f, "");
dictBase.OnSetCompleteThrow = true;
Assert.Throws<Exception>(() => dictBase[f] = "hello");
Assert.Equal("", dictBase[f]);
}
// DictionaryBase is provided to be used as the base class for strongly typed collections. Lets use one of our own here
private class MyDictionary : DictionaryBase
{
public void Add(FooKey key, FooValue value) => Dictionary.Add(key, value);
public FooValue this[FooKey key]
{
get { return (FooValue)Dictionary[key]; }
set { Dictionary[key] = value; }
}
public bool IsSynchronized
{
get { return Dictionary.IsSynchronized; }
}
public object SyncRoot
{
get { return Dictionary.SyncRoot; }
}
public bool Contains(FooKey key) => Dictionary.Contains(key);
public void Remove(FooKey key) => Dictionary.Remove(key);
public bool IsFixedSize
{
get { return Dictionary.IsFixedSize; }
}
public bool IsReadOnly
{
get { return Dictionary.IsReadOnly; }
}
public ICollection Keys
{
get { return Dictionary.Keys; }
}
public ICollection Values
{
get { return Dictionary.Values; }
}
}
private class FooKey : IComparable
{
public FooKey()
{
}
public FooKey(int i, string str)
{
IntValue = i;
StringValue = str;
}
public int IntValue { get; set; }
public string StringValue { get; set; }
public override bool Equals(object obj)
{
FooKey foo = obj as FooKey;
if (foo == null)
return false;
return foo.IntValue == IntValue && foo.StringValue == StringValue;
}
public override int GetHashCode() =>IntValue;
public int CompareTo(object obj)
{
FooKey temp = (FooKey)obj;
return IntValue.CompareTo(temp.IntValue);
}
}
private class FooValue : IComparable
{
public FooValue()
{
}
public FooValue(int intValue, string stringValue)
{
IntValue = intValue;
StringValue = stringValue;
}
public int IntValue { get; set; }
public string StringValue { get; set; }
public override bool Equals(object obj)
{
FooValue foo = obj as FooValue;
if (foo == null)
return false;
return foo.IntValue == IntValue && foo.StringValue == StringValue;
}
public override int GetHashCode() => IntValue;
public int CompareTo(object obj)
{
FooValue temp = (FooValue)obj;
return IntValue.CompareTo(temp.IntValue);
}
}
// DictionaryBase is provided to be used as the base class for strongly typed collections. Lets use one of our own here
private class OnMethodCalledDictionary : DictionaryBase
{
public bool OnValidateCalled;
public bool OnSetCalled;
public bool OnSetCompleteCalled;
public bool OnInsertCalled;
public bool OnInsertCompleteCalled;
public bool OnClearCalled;
public bool OnClearCompleteCalled;
public bool OnRemoveCalled;
public bool OnRemoveCompleteCalled;
public bool OnValidateThrow;
public bool OnSetThrow;
public bool OnSetCompleteThrow;
public bool OnInsertThrow;
public bool OnInsertCompleteThrow;
public bool OnClearThrow;
public bool OnClearCompleteThrow;
public bool OnRemoveThrow;
public bool OnRemoveCompleteThrow;
public void Add(FooKey key, string value) => Dictionary.Add(key, value);
public string this[FooKey key]
{
get { return (string)Dictionary[key]; }
set { Dictionary[key] = value; }
}
public bool Contains(FooKey key) => Dictionary.Contains(key);
public void Remove(FooKey key) => Dictionary.Remove(key);
protected override void OnSet(object key, object oldValue, object newValue)
{
Assert.True(OnValidateCalled);
Assert.Equal(oldValue, this[(FooKey)key]);
OnSetCalled = true;
if (OnSetThrow)
throw new Exception("OnSet");
}
protected override void OnInsert(object key, object value)
{
Assert.True(OnValidateCalled);
Assert.NotEqual(value, this[(FooKey)key]);
OnInsertCalled = true;
if (OnInsertThrow)
throw new Exception("OnInsert");
}
protected override void OnClear()
{
OnClearCalled = true;
if (OnClearThrow)
throw new Exception("OnClear");
}
protected override void OnRemove(object key, object value)
{
Assert.True(OnValidateCalled);
Assert.Equal(value, this[(FooKey)key]);
OnRemoveCalled = true;
if (OnRemoveThrow)
throw new Exception("OnRemove");
}
protected override void OnValidate(object key, object value)
{
OnValidateCalled = true;
if (OnValidateThrow)
throw new Exception("OnValidate");
}
protected override void OnSetComplete(object key, object oldValue, object newValue)
{
Assert.True(OnSetCalled);
Assert.Equal(newValue, this[(FooKey)key]);
OnSetCompleteCalled = true;
if (OnSetCompleteThrow)
throw new Exception("OnSetComplete");
}
protected override void OnInsertComplete(object key, object value)
{
Assert.True(OnInsertCalled);
Assert.Equal(value, this[(FooKey)key]);
OnInsertCompleteCalled = true;
if (OnInsertCompleteThrow)
throw new Exception("OnInsertComplete");
}
protected override void OnClearComplete()
{
Assert.True(OnClearCalled);
OnClearCompleteCalled = true;
if (OnClearCompleteThrow)
throw new Exception("OnClearComplete");
}
protected override void OnRemoveComplete(object key, object value)
{
Assert.True(OnRemoveCalled);
Assert.False(Contains((FooKey)key));
OnRemoveCompleteCalled = true;
if (OnRemoveCompleteThrow)
throw new Exception("OnRemoveComplete");
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Reflection;
using NUnit.Framework;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Framework.Communications;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Tests.Common;
using OpenSim.Tests.Common.Mock;
namespace OpenSim.Region.Framework.Scenes.Tests
{
/// <summary>
/// Basic scene object status tests
/// </summary>
[TestFixture]
public class SceneObjectStatusTests : OpenSimTestCase
{
private TestScene m_scene;
private UUID m_ownerId = TestHelpers.ParseTail(0x1);
private SceneObjectGroup m_so1;
private SceneObjectGroup m_so2;
[SetUp]
public void Init()
{
m_scene = new SceneHelpers().SetupScene();
m_so1 = SceneHelpers.CreateSceneObject(1, m_ownerId, "so1", 0x10);
m_so2 = SceneHelpers.CreateSceneObject(1, m_ownerId, "so2", 0x20);
}
[Test]
public void TestSetPhantomSinglePrim()
{
TestHelpers.InMethod();
m_scene.AddSceneObject(m_so1);
SceneObjectPart rootPart = m_so1.RootPart;
Assert.That(rootPart.Flags, Is.EqualTo(PrimFlags.None));
m_so1.ScriptSetPhantomStatus(true);
// Console.WriteLine("so.RootPart.Flags [{0}]", so.RootPart.Flags);
Assert.That(rootPart.Flags, Is.EqualTo(PrimFlags.Phantom));
m_so1.ScriptSetPhantomStatus(false);
Assert.That(rootPart.Flags, Is.EqualTo(PrimFlags.None));
}
[Test]
public void TestSetNonPhysicsVolumeDetectSinglePrim()
{
TestHelpers.InMethod();
m_scene.AddSceneObject(m_so1);
SceneObjectPart rootPart = m_so1.RootPart;
Assert.That(rootPart.Flags, Is.EqualTo(PrimFlags.None));
m_so1.ScriptSetVolumeDetect(true);
// Console.WriteLine("so.RootPart.Flags [{0}]", so.RootPart.Flags);
Assert.That(rootPart.Flags, Is.EqualTo(PrimFlags.Phantom));
m_so1.ScriptSetVolumeDetect(false);
Assert.That(rootPart.Flags, Is.EqualTo(PrimFlags.None));
}
[Test]
public void TestSetPhysicsSinglePrim()
{
TestHelpers.InMethod();
m_scene.AddSceneObject(m_so1);
SceneObjectPart rootPart = m_so1.RootPart;
Assert.That(rootPart.Flags, Is.EqualTo(PrimFlags.None));
m_so1.ScriptSetPhysicsStatus(true);
Assert.That(rootPart.Flags, Is.EqualTo(PrimFlags.Physics));
m_so1.ScriptSetPhysicsStatus(false);
Assert.That(rootPart.Flags, Is.EqualTo(PrimFlags.None));
}
[Test]
public void TestSetPhysicsVolumeDetectSinglePrim()
{
TestHelpers.InMethod();
m_scene.AddSceneObject(m_so1);
SceneObjectPart rootPart = m_so1.RootPart;
Assert.That(rootPart.Flags, Is.EqualTo(PrimFlags.None));
m_so1.ScriptSetPhysicsStatus(true);
m_so1.ScriptSetVolumeDetect(true);
Assert.That(rootPart.Flags, Is.EqualTo(PrimFlags.Phantom | PrimFlags.Physics));
m_so1.ScriptSetVolumeDetect(false);
Assert.That(rootPart.Flags, Is.EqualTo(PrimFlags.Physics));
}
[Test]
public void TestSetPhysicsLinkset()
{
TestHelpers.InMethod();
m_scene.AddSceneObject(m_so1);
m_scene.AddSceneObject(m_so2);
m_scene.LinkObjects(m_ownerId, m_so1.LocalId, new List<uint>() { m_so2.LocalId });
m_so1.ScriptSetPhysicsStatus(true);
Assert.That(m_so1.RootPart.Flags, Is.EqualTo(PrimFlags.Physics));
Assert.That(m_so1.Parts[1].Flags, Is.EqualTo(PrimFlags.Physics));
m_so1.ScriptSetPhysicsStatus(false);
Assert.That(m_so1.RootPart.Flags, Is.EqualTo(PrimFlags.None));
Assert.That(m_so1.Parts[1].Flags, Is.EqualTo(PrimFlags.None));
m_so1.ScriptSetPhysicsStatus(true);
Assert.That(m_so1.RootPart.Flags, Is.EqualTo(PrimFlags.Physics));
Assert.That(m_so1.Parts[1].Flags, Is.EqualTo(PrimFlags.Physics));
}
/// <summary>
/// Test that linking results in the correct physical status for all linkees.
/// </summary>
[Test]
public void TestLinkPhysicsBothPhysical()
{
TestHelpers.InMethod();
m_scene.AddSceneObject(m_so1);
m_scene.AddSceneObject(m_so2);
m_so1.ScriptSetPhysicsStatus(true);
m_so2.ScriptSetPhysicsStatus(true);
m_scene.LinkObjects(m_ownerId, m_so1.LocalId, new List<uint>() { m_so2.LocalId });
Assert.That(m_so1.RootPart.Flags, Is.EqualTo(PrimFlags.Physics));
Assert.That(m_so1.Parts[1].Flags, Is.EqualTo(PrimFlags.Physics));
}
/// <summary>
/// Test that linking results in the correct physical status for all linkees.
/// </summary>
[Test]
public void TestLinkPhysicsRootPhysicalOnly()
{
TestHelpers.InMethod();
m_scene.AddSceneObject(m_so1);
m_scene.AddSceneObject(m_so2);
m_so1.ScriptSetPhysicsStatus(true);
m_scene.LinkObjects(m_ownerId, m_so1.LocalId, new List<uint>() { m_so2.LocalId });
Assert.That(m_so1.RootPart.Flags, Is.EqualTo(PrimFlags.Physics));
Assert.That(m_so1.Parts[1].Flags, Is.EqualTo(PrimFlags.Physics));
}
/// <summary>
/// Test that linking results in the correct physical status for all linkees.
/// </summary>
[Test]
public void TestLinkPhysicsChildPhysicalOnly()
{
TestHelpers.InMethod();
m_scene.AddSceneObject(m_so1);
m_scene.AddSceneObject(m_so2);
m_so2.ScriptSetPhysicsStatus(true);
m_scene.LinkObjects(m_ownerId, m_so1.LocalId, new List<uint>() { m_so2.LocalId });
Assert.That(m_so1.RootPart.Flags, Is.EqualTo(PrimFlags.None));
Assert.That(m_so1.Parts[1].Flags, Is.EqualTo(PrimFlags.None));
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CodeGeneration;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Extensions;
using Microsoft.CodeAnalysis.CSharp.Symbols;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Editing;
using Microsoft.CodeAnalysis.ExtractMethod;
using Microsoft.CodeAnalysis.Formatting;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Simplification;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.CSharp.ExtractMethod
{
internal partial class CSharpMethodExtractor
{
private abstract partial class CSharpCodeGenerator : CodeGenerator<StatementSyntax, ExpressionSyntax, SyntaxNode>
{
private SyntaxToken _methodName;
public static async Task<GeneratedCode> GenerateAsync(
InsertionPoint insertionPoint,
SelectionResult selectionResult,
AnalyzerResult analyzerResult,
CancellationToken cancellationToken)
{
var codeGenerator = Create(insertionPoint, selectionResult, analyzerResult);
return await codeGenerator.GenerateAsync(cancellationToken).ConfigureAwait(false);
}
private static CSharpCodeGenerator Create(
InsertionPoint insertionPoint,
SelectionResult selectionResult,
AnalyzerResult analyzerResult)
{
if (ExpressionCodeGenerator.IsExtractMethodOnExpression(selectionResult))
{
return new ExpressionCodeGenerator(insertionPoint, selectionResult, analyzerResult);
}
if (SingleStatementCodeGenerator.IsExtractMethodOnSingleStatement(selectionResult))
{
return new SingleStatementCodeGenerator(insertionPoint, selectionResult, analyzerResult);
}
if (MultipleStatementsCodeGenerator.IsExtractMethodOnMultipleStatements(selectionResult))
{
return new MultipleStatementsCodeGenerator(insertionPoint, selectionResult, analyzerResult);
}
return Contract.FailWithReturn<CSharpCodeGenerator>("Unknown selection");
}
protected CSharpCodeGenerator(
InsertionPoint insertionPoint,
SelectionResult selectionResult,
AnalyzerResult analyzerResult) :
base(insertionPoint, selectionResult, analyzerResult)
{
Contract.ThrowIfFalse(this.SemanticDocument == selectionResult.SemanticDocument);
var nameToken = CreateMethodName();
_methodName = nameToken.WithAdditionalAnnotations(this.MethodNameAnnotation);
}
private CSharpSelectionResult CSharpSelectionResult
{
get { return (CSharpSelectionResult)this.SelectionResult; }
}
protected override SyntaxNode GetPreviousMember(SemanticDocument document)
{
var node = this.InsertionPoint.With(document).GetContext();
return (node.Parent is GlobalStatementSyntax) ? node.Parent : node;
}
protected override OperationStatus<IMethodSymbol> GenerateMethodDefinition(CancellationToken cancellationToken)
{
var result = CreateMethodBody(cancellationToken);
var methodSymbol = CodeGenerationSymbolFactory.CreateMethodSymbol(
attributes: SpecializedCollections.EmptyList<AttributeData>(),
accessibility: Accessibility.Private,
modifiers: CreateMethodModifiers(),
returnType: this.AnalyzerResult.ReturnType,
explicitInterfaceSymbol: null,
name: _methodName.ToString(),
typeParameters: CreateMethodTypeParameters(cancellationToken),
parameters: CreateMethodParameters(),
statements: result.Data);
return result.With(
this.MethodDefinitionAnnotation.AddAnnotationToSymbol(
Formatter.Annotation.AddAnnotationToSymbol(methodSymbol)));
}
protected override async Task<SyntaxNode> GenerateBodyForCallSiteContainerAsync(CancellationToken cancellationToken)
{
var container = this.GetOutermostCallSiteContainerToProcess(cancellationToken);
var variableMapToRemove = CreateVariableDeclarationToRemoveMap(
this.AnalyzerResult.GetVariablesToMoveIntoMethodDefinition(cancellationToken), cancellationToken);
var firstStatementToRemove = GetFirstStatementOrInitializerSelectedAtCallSite();
var lastStatementToRemove = GetLastStatementOrInitializerSelectedAtCallSite();
Contract.ThrowIfFalse(firstStatementToRemove.Parent == lastStatementToRemove.Parent);
var statementsToInsert = await CreateStatementsOrInitializerToInsertAtCallSiteAsync(cancellationToken).ConfigureAwait(false);
var callSiteGenerator =
new CallSiteContainerRewriter(
container,
variableMapToRemove,
firstStatementToRemove,
lastStatementToRemove,
statementsToInsert);
return container.CopyAnnotationsTo(callSiteGenerator.Generate()).WithAdditionalAnnotations(Formatter.Annotation);
}
private async Task<IEnumerable<SyntaxNode>> CreateStatementsOrInitializerToInsertAtCallSiteAsync(CancellationToken cancellationToken)
{
var selectedNode = this.GetFirstStatementOrInitializerSelectedAtCallSite();
// field initializer, constructor initializer, expression bodied member case
if (selectedNode is ConstructorInitializerSyntax ||
selectedNode is FieldDeclarationSyntax ||
IsExpressionBodiedMember(selectedNode))
{
var statement = await GetStatementOrInitializerContainingInvocationToExtractedMethodAsync(this.CallSiteAnnotation, cancellationToken).ConfigureAwait(false);
return SpecializedCollections.SingletonEnumerable(statement);
}
// regular case
var semanticModel = this.SemanticDocument.SemanticModel;
var context = this.InsertionPoint.GetContext();
var postProcessor = new PostProcessor(semanticModel, context.SpanStart);
var statements = SpecializedCollections.EmptyEnumerable<StatementSyntax>();
statements = AddSplitOrMoveDeclarationOutStatementsToCallSite(statements, cancellationToken);
statements = postProcessor.MergeDeclarationStatements(statements);
statements = AddAssignmentStatementToCallSite(statements, cancellationToken);
statements = await AddInvocationAtCallSiteAsync(statements, cancellationToken).ConfigureAwait(false);
statements = AddReturnIfUnreachable(statements, cancellationToken);
return statements;
}
private bool IsExpressionBodiedMember(SyntaxNode node)
{
return node is MemberDeclarationSyntax && ((MemberDeclarationSyntax)node).GetExpressionBody() != null;
}
private SimpleNameSyntax CreateMethodNameForInvocation()
{
return this.AnalyzerResult.MethodTypeParametersInDeclaration.Count == 0
? (SimpleNameSyntax)SyntaxFactory.IdentifierName(_methodName)
: SyntaxFactory.GenericName(_methodName, SyntaxFactory.TypeArgumentList(CreateMethodCallTypeVariables()));
}
private SeparatedSyntaxList<TypeSyntax> CreateMethodCallTypeVariables()
{
Contract.ThrowIfTrue(this.AnalyzerResult.MethodTypeParametersInDeclaration.Count == 0);
// propagate any type variable used in extracted code
var typeVariables = new List<TypeSyntax>();
foreach (var methodTypeParameter in this.AnalyzerResult.MethodTypeParametersInDeclaration)
{
typeVariables.Add(SyntaxFactory.ParseTypeName(methodTypeParameter.Name));
}
return SyntaxFactory.SeparatedList(typeVariables);
}
protected SyntaxNode GetCallSiteContainerFromOutermostMoveInVariable(CancellationToken cancellationToken)
{
var outmostVariable = GetOutermostVariableToMoveIntoMethodDefinition(cancellationToken);
if (outmostVariable == null)
{
return null;
}
var idToken = outmostVariable.GetIdentifierTokenAtDeclaration(this.SemanticDocument);
var declStatement = idToken.GetAncestor<LocalDeclarationStatementSyntax>();
Contract.ThrowIfNull(declStatement);
Contract.ThrowIfFalse(declStatement.Parent.IsStatementContainerNode());
return declStatement.Parent;
}
private DeclarationModifiers CreateMethodModifiers()
{
var isUnsafe = this.CSharpSelectionResult.ShouldPutUnsafeModifier();
var isAsync = this.CSharpSelectionResult.ShouldPutAsyncModifier();
return new DeclarationModifiers(
isUnsafe: isUnsafe,
isAsync: isAsync,
isStatic: !this.AnalyzerResult.UseInstanceMember);
}
private static SyntaxKind GetParameterRefSyntaxKind(ParameterBehavior parameterBehavior)
{
return parameterBehavior == ParameterBehavior.Ref ?
SyntaxKind.RefKeyword :
parameterBehavior == ParameterBehavior.Out ?
SyntaxKind.OutKeyword : SyntaxKind.None;
}
private OperationStatus<List<SyntaxNode>> CreateMethodBody(CancellationToken cancellationToken)
{
var statements = GetInitialStatementsForMethodDefinitions();
statements = SplitOrMoveDeclarationIntoMethodDefinition(statements, cancellationToken);
statements = MoveDeclarationOutFromMethodDefinition(statements, cancellationToken);
statements = AppendReturnStatementIfNeeded(statements);
statements = CleanupCode(statements);
// set output so that we can use it in negative preview
var wrapped = WrapInCheckStatementIfNeeded(statements);
return CheckActiveStatements(statements).With(wrapped.ToList<SyntaxNode>());
}
private IEnumerable<StatementSyntax> WrapInCheckStatementIfNeeded(IEnumerable<StatementSyntax> statements)
{
var kind = this.CSharpSelectionResult.UnderCheckedStatementContext();
if (kind == SyntaxKind.None)
{
return statements;
}
if (statements.Skip(1).Any())
{
return SpecializedCollections.SingletonEnumerable<StatementSyntax>(SyntaxFactory.CheckedStatement(kind, SyntaxFactory.Block(statements)));
}
var block = statements.Single() as BlockSyntax;
if (block != null)
{
return SpecializedCollections.SingletonEnumerable<StatementSyntax>(SyntaxFactory.CheckedStatement(kind, block));
}
return SpecializedCollections.SingletonEnumerable<StatementSyntax>(SyntaxFactory.CheckedStatement(kind, SyntaxFactory.Block(statements)));
}
private IEnumerable<StatementSyntax> CleanupCode(IEnumerable<StatementSyntax> statements)
{
var semanticModel = this.SemanticDocument.SemanticModel;
var context = this.InsertionPoint.GetContext();
var postProcessor = new PostProcessor(semanticModel, context.SpanStart);
statements = postProcessor.RemoveRedundantBlock(statements);
statements = postProcessor.RemoveDeclarationAssignmentPattern(statements);
statements = postProcessor.RemoveInitializedDeclarationAndReturnPattern(statements);
return statements;
}
private OperationStatus CheckActiveStatements(IEnumerable<StatementSyntax> statements)
{
var count = statements.Count();
if (count == 0)
{
return OperationStatus.NoActiveStatement;
}
if (count == 1)
{
var returnStatement = statements.Single() as ReturnStatementSyntax;
if (returnStatement != null && returnStatement.Expression == null)
{
return OperationStatus.NoActiveStatement;
}
}
foreach (var statement in statements)
{
var declStatement = statement as LocalDeclarationStatementSyntax;
if (declStatement == null)
{
// found one
return OperationStatus.Succeeded;
}
foreach (var variable in declStatement.Declaration.Variables)
{
if (variable.Initializer != null)
{
// found one
return OperationStatus.Succeeded;
}
}
}
return OperationStatus.NoActiveStatement;
}
private IEnumerable<StatementSyntax> MoveDeclarationOutFromMethodDefinition(
IEnumerable<StatementSyntax> statements, CancellationToken cancellationToken)
{
var variableToRemoveMap = CreateVariableDeclarationToRemoveMap(
this.AnalyzerResult.GetVariablesToMoveOutToCallSiteOrDelete(cancellationToken), cancellationToken);
foreach (var statement in statements)
{
var declarationStatement = statement as LocalDeclarationStatementSyntax;
if (declarationStatement == null)
{
// if given statement is not decl statement, do nothing.
yield return statement;
continue;
}
var expressionStatements = new List<StatementSyntax>();
var list = new List<VariableDeclaratorSyntax>();
var triviaList = new List<SyntaxTrivia>();
// When we modify the declaration to an initialization we have to preserve the leading trivia
var firstVariableToAttachTrivia = true;
// go through each var decls in decl statement, and create new assignment if
// variable is initialized at decl.
foreach (var variableDeclaration in declarationStatement.Declaration.Variables)
{
if (variableToRemoveMap.HasSyntaxAnnotation(variableDeclaration))
{
if (variableDeclaration.Initializer != null)
{
SyntaxToken identifier = ApplyTriviaFromDeclarationToAssignmentIdentifier(declarationStatement, firstVariableToAttachTrivia, variableDeclaration);
// move comments with the variable here
expressionStatements.Add(CreateAssignmentExpressionStatement(identifier, variableDeclaration.Initializer.Value));
}
else
{
// we don't remove trivia around tokens we remove
triviaList.AddRange(variableDeclaration.GetLeadingTrivia());
triviaList.AddRange(variableDeclaration.GetTrailingTrivia());
}
firstVariableToAttachTrivia = false;
continue;
}
// Prepend the trivia from the declarations without initialization to the next persisting variable declaration
if (triviaList.Count > 0)
{
list.Add(variableDeclaration.WithPrependedLeadingTrivia(triviaList));
triviaList.Clear();
firstVariableToAttachTrivia = false;
continue;
}
firstVariableToAttachTrivia = false;
list.Add(variableDeclaration);
}
if (list.Count == 0 && triviaList.Count > 0)
{
// well, there are trivia associated with the node.
// we can't just delete the node since then, we will lose
// the trivia. unfortunately, it is not easy to attach the trivia
// to next token. for now, create an empty statement and associate the
// trivia to the statement
// TODO : think about a way to trivia attached to next token
yield return SyntaxFactory.EmptyStatement(SyntaxFactory.Token(SyntaxFactory.TriviaList(triviaList), SyntaxKind.SemicolonToken, SyntaxTriviaList.Create(SyntaxFactory.ElasticMarker)));
triviaList.Clear();
}
// return survived var decls
if (list.Count > 0)
{
yield return
SyntaxFactory.LocalDeclarationStatement(
declarationStatement.Modifiers,
SyntaxFactory.VariableDeclaration(
declarationStatement.Declaration.Type,
SyntaxFactory.SeparatedList(list)),
declarationStatement.SemicolonToken.WithPrependedLeadingTrivia(triviaList));
triviaList.Clear();
}
// return any expression statement if there was any
foreach (var expressionStatement in expressionStatements)
{
yield return expressionStatement;
}
}
}
private static SyntaxToken ApplyTriviaFromDeclarationToAssignmentIdentifier(LocalDeclarationStatementSyntax declarationStatement, bool firstVariableToAttachTrivia, VariableDeclaratorSyntax variable)
{
var identifier = variable.Identifier;
var typeSyntax = declarationStatement.Declaration.Type;
if (firstVariableToAttachTrivia && typeSyntax != null)
{
var identifierLeadingTrivia = new SyntaxTriviaList();
if (typeSyntax.HasLeadingTrivia)
{
identifierLeadingTrivia = identifierLeadingTrivia.AddRange(typeSyntax.GetLeadingTrivia());
}
identifierLeadingTrivia = identifierLeadingTrivia.AddRange(identifier.LeadingTrivia);
identifier = identifier.WithLeadingTrivia(identifierLeadingTrivia);
}
return identifier;
}
private static SyntaxToken GetIdentifierTokenAndTrivia(SyntaxToken identifier, TypeSyntax typeSyntax)
{
if (typeSyntax != null)
{
var identifierLeadingTrivia = new SyntaxTriviaList();
var identifierTrailingTrivia = new SyntaxTriviaList();
if (typeSyntax.HasLeadingTrivia)
{
identifierLeadingTrivia = identifierLeadingTrivia.AddRange(typeSyntax.GetLeadingTrivia());
}
if (typeSyntax.HasTrailingTrivia)
{
identifierLeadingTrivia = identifierLeadingTrivia.AddRange(typeSyntax.GetTrailingTrivia());
}
identifierLeadingTrivia = identifierLeadingTrivia.AddRange(identifier.LeadingTrivia);
identifierTrailingTrivia = identifierTrailingTrivia.AddRange(identifier.TrailingTrivia);
identifier = identifier.WithLeadingTrivia(identifierLeadingTrivia)
.WithTrailingTrivia(identifierTrailingTrivia);
}
return identifier;
}
private IEnumerable<StatementSyntax> SplitOrMoveDeclarationIntoMethodDefinition(
IEnumerable<StatementSyntax> statements,
CancellationToken cancellationToken)
{
var semanticModel = this.SemanticDocument.SemanticModel;
var context = this.InsertionPoint.GetContext();
var postProcessor = new PostProcessor(semanticModel, context.SpanStart);
var declStatements = CreateDeclarationStatements(AnalyzerResult.GetVariablesToSplitOrMoveIntoMethodDefinition(cancellationToken), cancellationToken);
declStatements = postProcessor.MergeDeclarationStatements(declStatements);
return declStatements.Concat(statements);
}
private ExpressionSyntax CreateAssignmentExpression(SyntaxToken identifier, ExpressionSyntax rvalue)
{
return SyntaxFactory.AssignmentExpression(
SyntaxKind.SimpleAssignmentExpression,
SyntaxFactory.IdentifierName(identifier),
rvalue);
}
protected override bool LastStatementOrHasReturnStatementInReturnableConstruct()
{
var lastStatement = this.GetLastStatementOrInitializerSelectedAtCallSite();
var container = lastStatement.GetAncestorsOrThis<SyntaxNode>().FirstOrDefault(n => n.IsReturnableConstruct());
if (container == null)
{
// case such as field initializer
return false;
}
var blockBody = container.GetBlockBody();
if (blockBody == null)
{
// such as expression lambda. there is no statement
return false;
}
// check whether it is last statement except return statement
var statements = blockBody.Statements;
if (statements.Last() == lastStatement)
{
return true;
}
var index = statements.IndexOf((StatementSyntax)lastStatement);
return statements[index + 1].Kind() == SyntaxKind.ReturnStatement;
}
protected override SyntaxToken CreateIdentifier(string name)
{
return SyntaxFactory.Identifier(name);
}
protected override StatementSyntax CreateReturnStatement(string identifierName = null)
{
return string.IsNullOrEmpty(identifierName)
? SyntaxFactory.ReturnStatement()
: SyntaxFactory.ReturnStatement(SyntaxFactory.IdentifierName(identifierName));
}
protected override ExpressionSyntax CreateCallSignature()
{
var methodName = CreateMethodNameForInvocation().WithAdditionalAnnotations(Simplifier.Annotation);
var arguments = new List<ArgumentSyntax>();
foreach (var argument in this.AnalyzerResult.MethodParameters)
{
var modifier = GetParameterRefSyntaxKind(argument.ParameterModifier);
var refOrOut = modifier == SyntaxKind.None ? default(SyntaxToken) : SyntaxFactory.Token(modifier);
arguments.Add(SyntaxFactory.Argument(SyntaxFactory.IdentifierName(argument.Name)).WithRefOrOutKeyword(refOrOut));
}
var invocation = SyntaxFactory.InvocationExpression(methodName,
SyntaxFactory.ArgumentList(SyntaxFactory.SeparatedList(arguments)));
var shouldPutAsyncModifier = this.CSharpSelectionResult.ShouldPutAsyncModifier();
if (!shouldPutAsyncModifier)
{
return invocation;
}
return SyntaxFactory.AwaitExpression(invocation);
}
protected override StatementSyntax CreateAssignmentExpressionStatement(SyntaxToken identifier, ExpressionSyntax rvalue)
{
return SyntaxFactory.ExpressionStatement(CreateAssignmentExpression(identifier, rvalue));
}
protected override StatementSyntax CreateDeclarationStatement(
VariableInfo variable,
CancellationToken cancellationToken,
ExpressionSyntax initialValue = null)
{
var type = variable.GetVariableType(this.SemanticDocument);
var typeNode = type.GenerateTypeSyntax();
var equalsValueClause = initialValue == null ? null : SyntaxFactory.EqualsValueClause(value: initialValue);
return SyntaxFactory.LocalDeclarationStatement(
SyntaxFactory.VariableDeclaration(typeNode)
.AddVariables(SyntaxFactory.VariableDeclarator(SyntaxFactory.Identifier(variable.Name)).WithInitializer(equalsValueClause)));
}
protected override async Task<GeneratedCode> CreateGeneratedCodeAsync(OperationStatus status, SemanticDocument newDocument, CancellationToken cancellationToken)
{
if (status.Succeeded())
{
// in hybrid code cases such as extract method, formatter will have some difficulties on where it breaks lines in two.
// here, we explicitly insert newline at the end of "{" of auto generated method decl so that anchor knows how to find out
// indentation of inserted statements (from users code) with user code style preserved
var root = newDocument.Root;
var methodDefinition = root.GetAnnotatedNodes<MethodDeclarationSyntax>(this.MethodDefinitionAnnotation).First();
var newMethodDefinition =
methodDefinition.ReplaceToken(
methodDefinition.Body.OpenBraceToken,
methodDefinition.Body.OpenBraceToken.WithAppendedTrailingTrivia(
SpecializedCollections.SingletonEnumerable(SyntaxFactory.CarriageReturnLineFeed)));
newDocument = await newDocument.WithSyntaxRootAsync(root.ReplaceNode(methodDefinition, newMethodDefinition), cancellationToken).ConfigureAwait(false);
}
return await base.CreateGeneratedCodeAsync(status, newDocument, cancellationToken).ConfigureAwait(false);
}
protected StatementSyntax GetStatementContainingInvocationToExtractedMethodWorker()
{
var callSignature = CreateCallSignature();
if (this.AnalyzerResult.HasReturnType)
{
Contract.ThrowIfTrue(this.AnalyzerResult.HasVariableToUseAsReturnValue);
return SyntaxFactory.ReturnStatement(callSignature);
}
return SyntaxFactory.ExpressionStatement(callSignature);
}
}
}
}
| |
// ***********************************************************************
// Copyright (c) 2014-2015 Charlie Poole
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using NUnit.Framework.Interfaces;
namespace NUnitLite
{
/// <summary>
/// Helper class used to summarize the result of a test run
/// </summary>
public class ResultSummary
{
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="ResultSummary"/> class.
/// </summary>
/// <param name="result">The result.</param>
public ResultSummary(ITestResult result)
{
InitializeCounters();
ResultState = result.ResultState;
StartTime = result.StartTime;
EndTime = result.EndTime;
Duration = result.Duration;
Summarize(result);
}
#endregion
#region Properties
/// <summary>
/// Gets the number of test cases for which results
/// have been summarized. Any tests excluded by use of
/// Category or Explicit attributes are not counted.
/// </summary>
public int TestCount { get; private set; }
/// <summary>
/// Returns the number of test cases actually run.
/// </summary>
public int RunCount
{
get { return PassCount + ErrorCount + FailureCount + InconclusiveCount; }
}
/// <summary>
/// Gets the number of tests not run for any reason.
/// </summary>
public int NotRunCount
{
get { return InvalidCount + SkipCount + IgnoreCount + ExplicitCount; }
}
/// <summary>
/// Returns the number of failed test cases (including errors and invalid tests)
/// </summary>
public int FailedCount
{
get { return FailureCount + InvalidCount + ErrorCount; }
}
/// <summary>
/// Returns the sum of skipped test cases, including ignored and explicit tests
/// </summary>
public int TotalSkipCount
{
get { return SkipCount + IgnoreCount + ExplicitCount; }
}
/// <summary>
/// Gets the count of passed tests
/// </summary>
public int PassCount { get; private set; }
/// <summary>
/// Gets count of failed tests, excluding errors and invalid tests
/// </summary>
public int FailureCount { get; private set; }
/// <summary>
/// Gets the error count
/// </summary>
public int ErrorCount { get; private set; }
/// <summary>
/// Gets the count of inconclusive tests
/// </summary>
public int InconclusiveCount { get; private set; }
/// <summary>
/// Returns the number of test cases that were not runnable
/// due to errors in the signature of the class or method.
/// Such tests are also counted as Errors.
/// </summary>
public int InvalidCount { get; private set; }
/// <summary>
/// Gets the count of skipped tests, excluding ignored tests
/// </summary>
public int SkipCount { get; private set; }
/// <summary>
/// Gets the ignore count
/// </summary>
public int IgnoreCount { get; private set; }
/// <summary>
/// Gets the explicit count
/// </summary>
public int ExplicitCount { get; private set; }
/// <summary>
/// Gets the ResultState of the test result, which
/// indicates the success or failure of the test.
/// </summary>
public ResultState ResultState { get; private set; }
/// <summary>
/// Gets or sets the time the test started running.
/// </summary>
public DateTime StartTime { get; private set; }
/// <summary>
/// Gets or sets the time the test finished running.
/// </summary>
public DateTime EndTime { get; private set; }
/// <summary>
/// Gets or sets the elapsed time for running the test in seconds
/// </summary>
public double Duration { get; private set; }
#endregion
#region Helper Methods
private void InitializeCounters()
{
TestCount = 0;
PassCount = 0;
FailureCount = 0;
ErrorCount = 0;
InconclusiveCount = 0;
SkipCount = 0;
IgnoreCount = 0;
ExplicitCount = 0;
InvalidCount = 0;
}
private void Summarize(ITestResult result)
{
if (result.Test.IsSuite)
{
foreach (ITestResult r in result.Children)
Summarize(r);
}
else
{
TestCount++;
var label = result.ResultState.Label;
switch (result.ResultState.Status)
{
case TestStatus.Passed:
PassCount++;
break;
case TestStatus.Skipped:
if (label == "Ignored")
IgnoreCount++;
else if (label == "Explicit")
ExplicitCount++;
else
SkipCount++;
break;
case TestStatus.Failed:
if (label == "Invalid")
InvalidCount++;
else if (label == "Error")
ErrorCount++;
else
FailureCount++;
break;
case TestStatus.Inconclusive:
InconclusiveCount++;
break;
}
return;
}
}
#endregion
}
}
| |
//------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------------------------
namespace System.ServiceModel.Configuration
{
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Configuration;
using System.IdentityModel.Tokens;
using System.Runtime;
using System.ServiceModel.Channels;
using System.ServiceModel.Security;
using System.ServiceModel.Security.Tokens;
using System.Xml;
using System.Linq;
public partial class SecurityElementBase : BindingElementExtensionElement
{
internal const AuthenticationMode defaultAuthenticationMode = AuthenticationMode.SspiNegotiated;
// if you add another variable, make sure to adjust: CopyFrom and UnMerge methods.
SecurityBindingElement failedSecurityBindingElement = null;
bool willX509IssuerReferenceAssertionBeWritten;
SecurityKeyType templateKeyType = IssuedSecurityTokenParameters.defaultKeyType;
internal SecurityElementBase()
{
}
internal bool HasImportFailed { get { return this.failedSecurityBindingElement != null; } }
internal bool IsSecurityElementBootstrap { get; set; } // Used in serialization path to optimize Xml representation
[ConfigurationProperty(ConfigurationStrings.DefaultAlgorithmSuite, DefaultValue = SecurityBindingElement.defaultAlgorithmSuiteString)]
[TypeConverter(typeof(SecurityAlgorithmSuiteConverter))]
public SecurityAlgorithmSuite DefaultAlgorithmSuite
{
get { return (SecurityAlgorithmSuite)base[ConfigurationStrings.DefaultAlgorithmSuite]; }
set { base[ConfigurationStrings.DefaultAlgorithmSuite] = value; }
}
[ConfigurationProperty(ConfigurationStrings.AllowSerializedSigningTokenOnReply, DefaultValue = AsymmetricSecurityBindingElement.defaultAllowSerializedSigningTokenOnReply)]
public bool AllowSerializedSigningTokenOnReply
{
get { return (bool)base[ConfigurationStrings.AllowSerializedSigningTokenOnReply]; }
set { base[ConfigurationStrings.AllowSerializedSigningTokenOnReply] = value; }
}
[ConfigurationProperty(ConfigurationStrings.EnableUnsecuredResponse, DefaultValue = SecurityBindingElement.defaultEnableUnsecuredResponse)]
public bool EnableUnsecuredResponse
{
get { return (bool)base[ConfigurationStrings.EnableUnsecuredResponse]; }
set { base[ConfigurationStrings.EnableUnsecuredResponse] = value; }
}
[ConfigurationProperty(ConfigurationStrings.AuthenticationMode, DefaultValue = defaultAuthenticationMode)]
[ServiceModelEnumValidator(typeof(AuthenticationModeHelper))]
public AuthenticationMode AuthenticationMode
{
get { return (AuthenticationMode)base[ConfigurationStrings.AuthenticationMode]; }
set { base[ConfigurationStrings.AuthenticationMode] = value; }
}
public override Type BindingElementType
{
get { return typeof(SecurityBindingElement); }
}
[ConfigurationProperty(ConfigurationStrings.RequireDerivedKeys, DefaultValue = SecurityTokenParameters.defaultRequireDerivedKeys)]
public bool RequireDerivedKeys
{
get { return (bool)base[ConfigurationStrings.RequireDerivedKeys]; }
set { base[ConfigurationStrings.RequireDerivedKeys] = value; }
}
[ConfigurationProperty(ConfigurationStrings.SecurityHeaderLayout, DefaultValue = SecurityProtocolFactory.defaultSecurityHeaderLayout)]
[ServiceModelEnumValidator(typeof(SecurityHeaderLayoutHelper))]
public SecurityHeaderLayout SecurityHeaderLayout
{
get { return (SecurityHeaderLayout)base[ConfigurationStrings.SecurityHeaderLayout]; }
set { base[ConfigurationStrings.SecurityHeaderLayout] = value; }
}
[ConfigurationProperty(ConfigurationStrings.IncludeTimestamp, DefaultValue = SecurityBindingElement.defaultIncludeTimestamp)]
public bool IncludeTimestamp
{
get { return (bool)base[ConfigurationStrings.IncludeTimestamp]; }
set { base[ConfigurationStrings.IncludeTimestamp] = value; }
}
[ConfigurationProperty(ConfigurationStrings.AllowInsecureTransport, DefaultValue = SecurityBindingElement.defaultAllowInsecureTransport)]
public bool AllowInsecureTransport
{
get { return (bool)base[ConfigurationStrings.AllowInsecureTransport]; }
set { base[ConfigurationStrings.AllowInsecureTransport] = value; }
}
[ConfigurationProperty(ConfigurationStrings.KeyEntropyMode, DefaultValue = System.ServiceModel.Security.AcceleratedTokenProvider.defaultKeyEntropyMode)]
[ServiceModelEnumValidator(typeof(SecurityKeyEntropyModeHelper))]
public SecurityKeyEntropyMode KeyEntropyMode
{
get { return (SecurityKeyEntropyMode)base[ConfigurationStrings.KeyEntropyMode]; }
set { base[ConfigurationStrings.KeyEntropyMode] = value; }
}
[ConfigurationProperty(ConfigurationStrings.IssuedTokenParameters)]
public IssuedTokenParametersElement IssuedTokenParameters
{
get { return (IssuedTokenParametersElement)base[ConfigurationStrings.IssuedTokenParameters]; }
}
[ConfigurationProperty(ConfigurationStrings.LocalClientSettings)]
public LocalClientSecuritySettingsElement LocalClientSettings
{
get { return (LocalClientSecuritySettingsElement)base[ConfigurationStrings.LocalClientSettings]; }
}
[ConfigurationProperty(ConfigurationStrings.LocalServiceSettings)]
public LocalServiceSecuritySettingsElement LocalServiceSettings
{
get { return (LocalServiceSecuritySettingsElement)base[ConfigurationStrings.LocalServiceSettings]; }
}
[ConfigurationProperty(ConfigurationStrings.MessageProtectionOrder, DefaultValue = SecurityBindingElement.defaultMessageProtectionOrder)]
[ServiceModelEnumValidator(typeof(MessageProtectionOrderHelper))]
public MessageProtectionOrder MessageProtectionOrder
{
get { return (MessageProtectionOrder)base[ConfigurationStrings.MessageProtectionOrder]; }
set { base[ConfigurationStrings.MessageProtectionOrder] = value; }
}
[ConfigurationProperty(ConfigurationStrings.ProtectTokens, DefaultValue = false)]
public bool ProtectTokens
{
get { return (bool)base[ConfigurationStrings.ProtectTokens]; }
set { base[ConfigurationStrings.ProtectTokens] = value; }
}
[ConfigurationProperty(ConfigurationStrings.MessageSecurityVersion, DefaultValue = ConfigurationStrings.Default)]
[TypeConverter(typeof(MessageSecurityVersionConverter))]
public MessageSecurityVersion MessageSecurityVersion
{
get { return (MessageSecurityVersion)base[ConfigurationStrings.MessageSecurityVersion]; }
set { base[ConfigurationStrings.MessageSecurityVersion] = value; }
}
[ConfigurationProperty(ConfigurationStrings.RequireSecurityContextCancellation, DefaultValue = SecureConversationSecurityTokenParameters.defaultRequireCancellation)]
public bool RequireSecurityContextCancellation
{
get { return (bool)base[ConfigurationStrings.RequireSecurityContextCancellation]; }
set { base[ConfigurationStrings.RequireSecurityContextCancellation] = value; }
}
[ConfigurationProperty(ConfigurationStrings.RequireSignatureConfirmation, DefaultValue = SecurityBindingElement.defaultRequireSignatureConfirmation)]
public bool RequireSignatureConfirmation
{
get { return (bool)base[ConfigurationStrings.RequireSignatureConfirmation]; }
set { base[ConfigurationStrings.RequireSignatureConfirmation] = value; }
}
[ConfigurationProperty(ConfigurationStrings.CanRenewSecurityContextToken, DefaultValue = SecureConversationSecurityTokenParameters.defaultCanRenewSession)]
public bool CanRenewSecurityContextToken
{
get { return (bool)base[ConfigurationStrings.CanRenewSecurityContextToken]; }
set { base[ConfigurationStrings.CanRenewSecurityContextToken] = value; }
}
public override void ApplyConfiguration(BindingElement bindingElement)
{
base.ApplyConfiguration(bindingElement);
SecurityBindingElement sbe = (SecurityBindingElement)bindingElement;
#pragma warning disable 56506 //[....]; base.CopyFrom() checks for 'from' being null
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.DefaultAlgorithmSuite].ValueOrigin)
sbe.DefaultAlgorithmSuite = this.DefaultAlgorithmSuite;
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.IncludeTimestamp].ValueOrigin)
sbe.IncludeTimestamp = this.IncludeTimestamp;
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.MessageSecurityVersion].ValueOrigin)
sbe.MessageSecurityVersion = this.MessageSecurityVersion;
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.KeyEntropyMode].ValueOrigin)
sbe.KeyEntropyMode = this.KeyEntropyMode;
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.SecurityHeaderLayout].ValueOrigin)
sbe.SecurityHeaderLayout = this.SecurityHeaderLayout;
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.RequireDerivedKeys].ValueOrigin)
sbe.SetKeyDerivation(this.RequireDerivedKeys);
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.AllowInsecureTransport].ValueOrigin)
sbe.AllowInsecureTransport = this.AllowInsecureTransport;
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.EnableUnsecuredResponse].ValueOrigin)
sbe.EnableUnsecuredResponse = this.EnableUnsecuredResponse;
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.ProtectTokens].ValueOrigin)
sbe.ProtectTokens = this.ProtectTokens;
#pragma warning restore
SymmetricSecurityBindingElement ssbe = sbe as SymmetricSecurityBindingElement;
if (ssbe != null)
{
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.MessageProtectionOrder].ValueOrigin)
ssbe.MessageProtectionOrder = this.MessageProtectionOrder;
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.RequireSignatureConfirmation].ValueOrigin)
ssbe.RequireSignatureConfirmation = this.RequireSignatureConfirmation;
SecureConversationSecurityTokenParameters scParameters = ssbe.ProtectionTokenParameters as SecureConversationSecurityTokenParameters;
if (scParameters != null)
{
scParameters.CanRenewSession = this.CanRenewSecurityContextToken;
}
}
AsymmetricSecurityBindingElement asbe = sbe as AsymmetricSecurityBindingElement;
if (asbe != null)
{
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.MessageProtectionOrder].ValueOrigin)
asbe.MessageProtectionOrder = this.MessageProtectionOrder;
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.RequireSignatureConfirmation].ValueOrigin)
asbe.RequireSignatureConfirmation = this.RequireSignatureConfirmation;
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.AllowSerializedSigningTokenOnReply].ValueOrigin)
asbe.AllowSerializedSigningTokenOnReply = this.AllowSerializedSigningTokenOnReply;
}
TransportSecurityBindingElement tsbe = sbe as TransportSecurityBindingElement;
if (tsbe != null)
{
if (tsbe.EndpointSupportingTokenParameters.Endorsing.Count == 1)
{
SecureConversationSecurityTokenParameters scParameters = tsbe.EndpointSupportingTokenParameters.Endorsing[0] as SecureConversationSecurityTokenParameters;
if (scParameters != null)
{
scParameters.CanRenewSession = this.CanRenewSecurityContextToken;
}
}
}
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.LocalClientSettings].ValueOrigin)
{
this.LocalClientSettings.ApplyConfiguration(sbe.LocalClientSettings);
}
if (PropertyValueOrigin.Default != this.ElementInformation.Properties[ConfigurationStrings.LocalServiceSettings].ValueOrigin)
{
this.LocalServiceSettings.ApplyConfiguration(sbe.LocalServiceSettings);
}
}
public override void CopyFrom(ServiceModelExtensionElement from)
{
base.CopyFrom(from);
SecurityElementBase source = (SecurityElementBase)from;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.AllowSerializedSigningTokenOnReply].ValueOrigin)
this.AllowSerializedSigningTokenOnReply = source.AllowSerializedSigningTokenOnReply;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.DefaultAlgorithmSuite].ValueOrigin)
this.DefaultAlgorithmSuite = source.DefaultAlgorithmSuite;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.EnableUnsecuredResponse].ValueOrigin)
this.EnableUnsecuredResponse = source.EnableUnsecuredResponse;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.AllowInsecureTransport].ValueOrigin)
this.AllowInsecureTransport = source.AllowInsecureTransport;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.RequireDerivedKeys].ValueOrigin)
this.RequireDerivedKeys = source.RequireDerivedKeys;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.IncludeTimestamp].ValueOrigin)
this.IncludeTimestamp = source.IncludeTimestamp;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.IssuedTokenParameters].ValueOrigin)
this.IssuedTokenParameters.Copy(source.IssuedTokenParameters);
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.MessageProtectionOrder].ValueOrigin)
this.MessageProtectionOrder = source.MessageProtectionOrder;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.ProtectTokens].ValueOrigin)
this.ProtectTokens = source.ProtectTokens;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.MessageSecurityVersion].ValueOrigin)
this.MessageSecurityVersion = source.MessageSecurityVersion;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.RequireSignatureConfirmation].ValueOrigin)
this.RequireSignatureConfirmation = source.RequireSignatureConfirmation;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.RequireSecurityContextCancellation].ValueOrigin)
this.RequireSecurityContextCancellation = source.RequireSecurityContextCancellation;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.CanRenewSecurityContextToken].ValueOrigin)
this.CanRenewSecurityContextToken = source.CanRenewSecurityContextToken;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.KeyEntropyMode].ValueOrigin)
this.KeyEntropyMode = source.KeyEntropyMode;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.SecurityHeaderLayout].ValueOrigin)
this.SecurityHeaderLayout = source.SecurityHeaderLayout;
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.LocalClientSettings].ValueOrigin)
this.LocalClientSettings.CopyFrom(source.LocalClientSettings);
if (PropertyValueOrigin.Default != source.ElementInformation.Properties[ConfigurationStrings.LocalServiceSettings].ValueOrigin)
this.LocalServiceSettings.CopyFrom(source.LocalServiceSettings);
this.failedSecurityBindingElement = source.failedSecurityBindingElement;
this.willX509IssuerReferenceAssertionBeWritten = source.willX509IssuerReferenceAssertionBeWritten;
}
protected internal override BindingElement CreateBindingElement()
{
return this.CreateBindingElement(false);
}
protected internal virtual BindingElement CreateBindingElement(bool createTemplateOnly)
{
SecurityBindingElement result;
switch (this.AuthenticationMode)
{
case AuthenticationMode.AnonymousForCertificate:
result = SecurityBindingElement.CreateAnonymousForCertificateBindingElement();
break;
case AuthenticationMode.AnonymousForSslNegotiated:
result = SecurityBindingElement.CreateSslNegotiationBindingElement(false, this.RequireSecurityContextCancellation);
break;
case AuthenticationMode.CertificateOverTransport:
result = SecurityBindingElement.CreateCertificateOverTransportBindingElement(this.MessageSecurityVersion);
break;
case AuthenticationMode.IssuedToken:
result = SecurityBindingElement.CreateIssuedTokenBindingElement(this.IssuedTokenParameters.Create(createTemplateOnly, this.templateKeyType));
break;
case AuthenticationMode.IssuedTokenForCertificate:
result = SecurityBindingElement.CreateIssuedTokenForCertificateBindingElement(this.IssuedTokenParameters.Create(createTemplateOnly, this.templateKeyType));
break;
case AuthenticationMode.IssuedTokenForSslNegotiated:
result = SecurityBindingElement.CreateIssuedTokenForSslBindingElement(this.IssuedTokenParameters.Create(createTemplateOnly, this.templateKeyType), this.RequireSecurityContextCancellation);
break;
case AuthenticationMode.IssuedTokenOverTransport:
result = SecurityBindingElement.CreateIssuedTokenOverTransportBindingElement(this.IssuedTokenParameters.Create(createTemplateOnly, this.templateKeyType));
break;
case AuthenticationMode.Kerberos:
result = SecurityBindingElement.CreateKerberosBindingElement();
break;
case AuthenticationMode.KerberosOverTransport:
result = SecurityBindingElement.CreateKerberosOverTransportBindingElement();
break;
case AuthenticationMode.MutualCertificateDuplex:
result = SecurityBindingElement.CreateMutualCertificateDuplexBindingElement(this.MessageSecurityVersion);
break;
case AuthenticationMode.MutualCertificate:
result = SecurityBindingElement.CreateMutualCertificateBindingElement(this.MessageSecurityVersion);
break;
case AuthenticationMode.MutualSslNegotiated:
result = SecurityBindingElement.CreateSslNegotiationBindingElement(true, this.RequireSecurityContextCancellation);
break;
case AuthenticationMode.SspiNegotiated:
result = SecurityBindingElement.CreateSspiNegotiationBindingElement(this.RequireSecurityContextCancellation);
break;
case AuthenticationMode.UserNameForCertificate:
result = SecurityBindingElement.CreateUserNameForCertificateBindingElement();
break;
case AuthenticationMode.UserNameForSslNegotiated:
result = SecurityBindingElement.CreateUserNameForSslBindingElement(this.RequireSecurityContextCancellation);
break;
case AuthenticationMode.UserNameOverTransport:
result = SecurityBindingElement.CreateUserNameOverTransportBindingElement();
break;
case AuthenticationMode.SspiNegotiatedOverTransport:
result = SecurityBindingElement.CreateSspiNegotiationOverTransportBindingElement(this.RequireSecurityContextCancellation);
break;
default:
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidEnumArgumentException("AuthenticationMode", (int)this.AuthenticationMode, typeof(AuthenticationMode)));
}
this.ApplyConfiguration(result);
return result;
}
protected void AddBindingTemplate(Dictionary<AuthenticationMode, SecurityBindingElement> bindingTemplates, AuthenticationMode mode)
{
this.AuthenticationMode = mode;
try
{
bindingTemplates[mode] = (SecurityBindingElement)this.CreateBindingElement(true);
}
#pragma warning suppress 56500 // covered by FxCOP
catch (Exception e)
{
if (Fx.IsFatal(e))
{
throw;
}
}
}
static bool AreTokenParametersMatching(SecurityTokenParameters p1, SecurityTokenParameters p2, bool skipRequireDerivedKeysComparison, bool exactMessageSecurityVersion)
{
if (p1 == null || p2 == null)
return false;
if (p1.GetType() != p2.GetType())
return false;
if (p1.InclusionMode != p2.InclusionMode)
return false;
if (skipRequireDerivedKeysComparison == false && p1.RequireDerivedKeys != p2.RequireDerivedKeys)
return false;
if (p1.ReferenceStyle != p2.ReferenceStyle)
return false;
// mutual ssl and anonymous ssl differ in the client cert requirement
if (p1 is SslSecurityTokenParameters)
{
if (((SslSecurityTokenParameters)p1).RequireClientCertificate != ((SslSecurityTokenParameters)p2).RequireClientCertificate)
return false;
}
else if (p1 is SecureConversationSecurityTokenParameters)
{
SecureConversationSecurityTokenParameters sc1 = (SecureConversationSecurityTokenParameters)p1;
SecureConversationSecurityTokenParameters sc2 = (SecureConversationSecurityTokenParameters)p2;
if (sc1.RequireCancellation != sc2.RequireCancellation)
return false;
if (sc1.CanRenewSession != sc2.CanRenewSession)
return false;
if (!AreBindingsMatching(sc1.BootstrapSecurityBindingElement, sc2.BootstrapSecurityBindingElement, exactMessageSecurityVersion))
return false;
}
else if (p1 is IssuedSecurityTokenParameters)
{
if (((IssuedSecurityTokenParameters)p1).KeyType != ((IssuedSecurityTokenParameters)p2).KeyType)
return false;
}
return true;
}
static bool AreTokenParameterCollectionsMatching(Collection<SecurityTokenParameters> c1, Collection<SecurityTokenParameters> c2, bool exactMessageSecurityVersion)
{
if (c1.Count != c2.Count)
return false;
for (int i = 0; i < c1.Count; i++)
if (!AreTokenParametersMatching(c1[i], c2[i], true, exactMessageSecurityVersion))
return false;
return true;
}
internal static bool AreBindingsMatching(SecurityBindingElement b1, SecurityBindingElement b2)
{
return AreBindingsMatching(b1, b2, true);
}
internal static bool AreBindingsMatching(SecurityBindingElement b1, SecurityBindingElement b2, bool exactMessageSecurityVersion)
{
if (b1 == null || b2 == null)
return b1 == b2;
if (b1.GetType() != b2.GetType())
return false;
if (b1.MessageSecurityVersion != b2.MessageSecurityVersion)
{
// exactMessageSecurityVersion meant that BSP mismatch could be ignored
if (exactMessageSecurityVersion)
return false;
if (b1.MessageSecurityVersion.SecurityVersion != b2.MessageSecurityVersion.SecurityVersion
|| b1.MessageSecurityVersion.TrustVersion != b2.MessageSecurityVersion.TrustVersion
|| b1.MessageSecurityVersion.SecureConversationVersion != b2.MessageSecurityVersion.SecureConversationVersion
|| b1.MessageSecurityVersion.SecurityPolicyVersion != b2.MessageSecurityVersion.SecurityPolicyVersion)
{
return false;
}
}
if (b1.SecurityHeaderLayout != b2.SecurityHeaderLayout)
return false;
if (b1.DefaultAlgorithmSuite != b2.DefaultAlgorithmSuite)
return false;
if (b1.IncludeTimestamp != b2.IncludeTimestamp)
return false;
if (b1.SecurityHeaderLayout != b2.SecurityHeaderLayout)
return false;
if (b1.KeyEntropyMode != b2.KeyEntropyMode)
return false;
if (!AreTokenParameterCollectionsMatching(b1.EndpointSupportingTokenParameters.Endorsing, b2.EndpointSupportingTokenParameters.Endorsing, exactMessageSecurityVersion))
return false;
if (!AreTokenParameterCollectionsMatching(b1.EndpointSupportingTokenParameters.SignedEncrypted, b2.EndpointSupportingTokenParameters.SignedEncrypted, exactMessageSecurityVersion))
return false;
if (!AreTokenParameterCollectionsMatching(b1.EndpointSupportingTokenParameters.Signed, b2.EndpointSupportingTokenParameters.Signed, exactMessageSecurityVersion))
return false;
if (!AreTokenParameterCollectionsMatching(b1.EndpointSupportingTokenParameters.SignedEndorsing, b2.EndpointSupportingTokenParameters.SignedEndorsing, exactMessageSecurityVersion))
return false;
if (b1.OperationSupportingTokenParameters.Count != b2.OperationSupportingTokenParameters.Count)
return false;
foreach (KeyValuePair<string, SupportingTokenParameters> operation1 in b1.OperationSupportingTokenParameters)
{
if (!b2.OperationSupportingTokenParameters.ContainsKey(operation1.Key))
return false;
SupportingTokenParameters stp2 = b2.OperationSupportingTokenParameters[operation1.Key];
if (!AreTokenParameterCollectionsMatching(operation1.Value.Endorsing, stp2.Endorsing, exactMessageSecurityVersion))
return false;
if (!AreTokenParameterCollectionsMatching(operation1.Value.SignedEncrypted, stp2.SignedEncrypted, exactMessageSecurityVersion))
return false;
if (!AreTokenParameterCollectionsMatching(operation1.Value.Signed, stp2.Signed, exactMessageSecurityVersion))
return false;
if (!AreTokenParameterCollectionsMatching(operation1.Value.SignedEndorsing, stp2.SignedEndorsing, exactMessageSecurityVersion))
return false;
}
SymmetricSecurityBindingElement ssbe1 = b1 as SymmetricSecurityBindingElement;
if (ssbe1 != null)
{
SymmetricSecurityBindingElement ssbe2 = (SymmetricSecurityBindingElement)b2;
if (ssbe1.MessageProtectionOrder != ssbe2.MessageProtectionOrder)
return false;
if (!AreTokenParametersMatching(ssbe1.ProtectionTokenParameters, ssbe2.ProtectionTokenParameters, false, exactMessageSecurityVersion))
return false;
}
AsymmetricSecurityBindingElement asbe1 = b1 as AsymmetricSecurityBindingElement;
if (asbe1 != null)
{
AsymmetricSecurityBindingElement asbe2 = (AsymmetricSecurityBindingElement)b2;
if (asbe1.MessageProtectionOrder != asbe2.MessageProtectionOrder)
return false;
if (asbe1.RequireSignatureConfirmation != asbe2.RequireSignatureConfirmation)
return false;
if (!AreTokenParametersMatching(asbe1.InitiatorTokenParameters, asbe2.InitiatorTokenParameters, true, exactMessageSecurityVersion)
|| !AreTokenParametersMatching(asbe1.RecipientTokenParameters, asbe2.RecipientTokenParameters, true, exactMessageSecurityVersion))
return false;
}
return true;
}
protected virtual void AddBindingTemplates(Dictionary<AuthenticationMode, SecurityBindingElement> bindingTemplates)
{
AddBindingTemplate(bindingTemplates, AuthenticationMode.AnonymousForCertificate);
AddBindingTemplate(bindingTemplates, AuthenticationMode.AnonymousForSslNegotiated);
AddBindingTemplate(bindingTemplates, AuthenticationMode.CertificateOverTransport);
if (this.templateKeyType == SecurityKeyType.SymmetricKey)
{
AddBindingTemplate(bindingTemplates, AuthenticationMode.IssuedToken);
}
AddBindingTemplate(bindingTemplates, AuthenticationMode.IssuedTokenForCertificate);
AddBindingTemplate(bindingTemplates, AuthenticationMode.IssuedTokenForSslNegotiated);
AddBindingTemplate(bindingTemplates, AuthenticationMode.IssuedTokenOverTransport);
AddBindingTemplate(bindingTemplates, AuthenticationMode.Kerberos);
AddBindingTemplate(bindingTemplates, AuthenticationMode.KerberosOverTransport);
AddBindingTemplate(bindingTemplates, AuthenticationMode.MutualCertificate);
AddBindingTemplate(bindingTemplates, AuthenticationMode.MutualCertificateDuplex);
AddBindingTemplate(bindingTemplates, AuthenticationMode.MutualSslNegotiated);
AddBindingTemplate(bindingTemplates, AuthenticationMode.SspiNegotiated);
AddBindingTemplate(bindingTemplates, AuthenticationMode.UserNameForCertificate);
AddBindingTemplate(bindingTemplates, AuthenticationMode.UserNameForSslNegotiated);
AddBindingTemplate(bindingTemplates, AuthenticationMode.UserNameOverTransport);
AddBindingTemplate(bindingTemplates, AuthenticationMode.SspiNegotiatedOverTransport);
}
bool TryInitializeAuthenticationMode(SecurityBindingElement sbe)
{
bool result;
if (sbe.OperationSupportingTokenParameters.Count > 0)
result = false;
else
{
SetIssuedTokenKeyType(sbe);
Dictionary<AuthenticationMode, SecurityBindingElement> bindingTemplates = new Dictionary<AuthenticationMode, SecurityBindingElement>();
this.AddBindingTemplates(bindingTemplates);
result = false;
foreach (AuthenticationMode mode in bindingTemplates.Keys)
{
SecurityBindingElement candidate = bindingTemplates[mode];
if (AreBindingsMatching(sbe, candidate))
{
this.AuthenticationMode = mode;
result = true;
break;
}
}
}
return result;
}
void SetIssuedTokenKeyType(SecurityBindingElement sbe)
{
// Set the keyType for building the template for IssuedToken binding.
// The reason is the different supporting token is defined depending on keyType.
if (sbe.EndpointSupportingTokenParameters.Endorsing.Count > 0 &&
sbe.EndpointSupportingTokenParameters.Endorsing[0] is IssuedSecurityTokenParameters)
{
this.templateKeyType = ((IssuedSecurityTokenParameters)sbe.EndpointSupportingTokenParameters.Endorsing[0]).KeyType;
}
else if (sbe.EndpointSupportingTokenParameters.Signed.Count > 0 &&
sbe.EndpointSupportingTokenParameters.Signed[0] is IssuedSecurityTokenParameters)
{
this.templateKeyType = ((IssuedSecurityTokenParameters)sbe.EndpointSupportingTokenParameters.Signed[0]).KeyType;
}
else if (sbe.EndpointSupportingTokenParameters.SignedEncrypted.Count > 0 &&
sbe.EndpointSupportingTokenParameters.SignedEncrypted[0] is IssuedSecurityTokenParameters)
{
this.templateKeyType = ((IssuedSecurityTokenParameters)sbe.EndpointSupportingTokenParameters.SignedEncrypted[0]).KeyType;
}
else
{
this.templateKeyType = IssuedSecurityTokenParameters.defaultKeyType;
}
}
protected virtual void InitializeNestedTokenParameterSettings(SecurityTokenParameters sp, bool initializeNestedBindings)
{
if (sp is SspiSecurityTokenParameters)
SetPropertyValueIfNotDefaultValue(ConfigurationStrings.RequireSecurityContextCancellation, ((SspiSecurityTokenParameters)sp).RequireCancellation);
else if (sp is SslSecurityTokenParameters)
SetPropertyValueIfNotDefaultValue(ConfigurationStrings.RequireSecurityContextCancellation, ((SslSecurityTokenParameters)sp).RequireCancellation);
else if (sp is IssuedSecurityTokenParameters)
this.IssuedTokenParameters.InitializeFrom((IssuedSecurityTokenParameters)sp, initializeNestedBindings);
}
internal void InitializeFrom(BindingElement bindingElement, bool initializeNestedBindings)
{
if (bindingElement == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("bindingElement");
}
SecurityBindingElement sbe = (SecurityBindingElement)bindingElement;
// Can't apply default value optimization to properties like DefaultAlgorithmSuite because the defaults are computed at runtime and don't match config defaults
this.DefaultAlgorithmSuite = sbe.DefaultAlgorithmSuite;
this.IncludeTimestamp = sbe.IncludeTimestamp;
if (sbe.MessageSecurityVersion != MessageSecurityVersion.Default)
{
this.MessageSecurityVersion = sbe.MessageSecurityVersion;
}
// Still safe to apply the optimization here because the runtime defaults are the same as config defaults in all cases
SetPropertyValueIfNotDefaultValue(ConfigurationStrings.KeyEntropyMode, sbe.KeyEntropyMode);
SetPropertyValueIfNotDefaultValue(ConfigurationStrings.SecurityHeaderLayout, sbe.SecurityHeaderLayout);
SetPropertyValueIfNotDefaultValue(ConfigurationStrings.ProtectTokens, sbe.ProtectTokens);
SetPropertyValueIfNotDefaultValue(ConfigurationStrings.AllowInsecureTransport, sbe.AllowInsecureTransport);
SetPropertyValueIfNotDefaultValue(ConfigurationStrings.EnableUnsecuredResponse, sbe.EnableUnsecuredResponse);
Nullable<bool> requireDerivedKeys = new Nullable<bool>();
if (sbe.EndpointSupportingTokenParameters.Endorsing.Count == 1)
{
this.InitializeNestedTokenParameterSettings(sbe.EndpointSupportingTokenParameters.Endorsing[0], initializeNestedBindings);
}
else if (sbe.EndpointSupportingTokenParameters.SignedEncrypted.Count == 1)
{
this.InitializeNestedTokenParameterSettings(sbe.EndpointSupportingTokenParameters.SignedEncrypted[0], initializeNestedBindings);
}
else if (sbe.EndpointSupportingTokenParameters.Signed.Count == 1)
{
this.InitializeNestedTokenParameterSettings(sbe.EndpointSupportingTokenParameters.Signed[0], initializeNestedBindings);
}
bool initializationFailure = false;
foreach (SecurityTokenParameters t in sbe.EndpointSupportingTokenParameters.Endorsing)
{
if (t.HasAsymmetricKey == false)
{
if (requireDerivedKeys.HasValue && requireDerivedKeys.Value != t.RequireDerivedKeys)
initializationFailure = true;
else
requireDerivedKeys = t.RequireDerivedKeys;
}
}
SymmetricSecurityBindingElement ssbe = sbe as SymmetricSecurityBindingElement;
if ( ssbe != null )
{
SetPropertyValueIfNotDefaultValue(ConfigurationStrings.MessageProtectionOrder, ssbe.MessageProtectionOrder);
this.RequireSignatureConfirmation = ssbe.RequireSignatureConfirmation;
if ( ssbe.ProtectionTokenParameters != null )
{
this.InitializeNestedTokenParameterSettings( ssbe.ProtectionTokenParameters, initializeNestedBindings );
if ( requireDerivedKeys.HasValue && requireDerivedKeys.Value != ssbe.ProtectionTokenParameters.RequireDerivedKeys )
initializationFailure = true;
else
requireDerivedKeys = ssbe.ProtectionTokenParameters.RequireDerivedKeys;
}
}
else
{
AsymmetricSecurityBindingElement asbe = sbe as AsymmetricSecurityBindingElement;
if ( asbe != null )
{
SetPropertyValueIfNotDefaultValue(ConfigurationStrings.MessageProtectionOrder, asbe.MessageProtectionOrder);
this.RequireSignatureConfirmation = asbe.RequireSignatureConfirmation;
if ( asbe.InitiatorTokenParameters != null )
{
this.InitializeNestedTokenParameterSettings( asbe.InitiatorTokenParameters, initializeNestedBindings );
//
// Copy the derived key token bool flag from the token parameters. The token parameter was set from
// importing WSDL during SecurityBindingElementImporter.ImportPolicy time
//
if ( requireDerivedKeys.HasValue && requireDerivedKeys.Value != asbe.InitiatorTokenParameters.RequireDerivedKeys )
initializationFailure = true;
else
requireDerivedKeys = asbe.InitiatorTokenParameters.RequireDerivedKeys;
}
}
}
this.willX509IssuerReferenceAssertionBeWritten = DoesSecurityBindingElementContainClauseTypeofIssuerSerial(sbe);
this.RequireDerivedKeys = requireDerivedKeys.GetValueOrDefault(SecurityTokenParameters.defaultRequireDerivedKeys);
this.LocalClientSettings.InitializeFrom(sbe.LocalClientSettings);
this.LocalServiceSettings.InitializeFrom(sbe.LocalServiceSettings);
if (!initializationFailure)
initializationFailure = !this.TryInitializeAuthenticationMode(sbe);
if (initializationFailure)
this.failedSecurityBindingElement = sbe;
}
protected internal override void InitializeFrom(BindingElement bindingElement)
{
this.InitializeFrom(bindingElement, true);
}
/// <summary>
/// returns true if one of the xxxSupportingTokenParameters.yyy is of type IssuerSerial
/// </summary>
/// <param name="sbe"></param>
/// <returns></returns>
bool DoesSecurityBindingElementContainClauseTypeofIssuerSerial( SecurityBindingElement sbe )
{
if ( sbe == null )
return false;
if ( sbe is SymmetricSecurityBindingElement )
{
X509SecurityTokenParameters tokenParamameters = ( (SymmetricSecurityBindingElement)sbe ).ProtectionTokenParameters as X509SecurityTokenParameters;
if ( tokenParamameters != null && tokenParamameters.X509ReferenceStyle == X509KeyIdentifierClauseType.IssuerSerial )
return true;
}
else if ( sbe is AsymmetricSecurityBindingElement )
{
X509SecurityTokenParameters initiatorParamameters = ( (AsymmetricSecurityBindingElement)sbe ).InitiatorTokenParameters as X509SecurityTokenParameters;
if ( initiatorParamameters != null && initiatorParamameters.X509ReferenceStyle == X509KeyIdentifierClauseType.IssuerSerial )
return true;
X509SecurityTokenParameters recepientParamameters = ( (AsymmetricSecurityBindingElement)sbe ).RecipientTokenParameters as X509SecurityTokenParameters;
if ( recepientParamameters != null && recepientParamameters.X509ReferenceStyle == X509KeyIdentifierClauseType.IssuerSerial )
return true;
}
if ( DoesX509TokenParametersContainClauseTypeofIssuerSerial( sbe.EndpointSupportingTokenParameters.Endorsing ) )
return true;
if ( DoesX509TokenParametersContainClauseTypeofIssuerSerial( sbe.EndpointSupportingTokenParameters.Signed ) )
return true;
if ( DoesX509TokenParametersContainClauseTypeofIssuerSerial( sbe.EndpointSupportingTokenParameters.SignedEncrypted ) )
return true;
if ( DoesX509TokenParametersContainClauseTypeofIssuerSerial( sbe.EndpointSupportingTokenParameters.SignedEndorsing ) )
return true;
if ( DoesX509TokenParametersContainClauseTypeofIssuerSerial( sbe.OptionalEndpointSupportingTokenParameters.Endorsing ) )
return true;
if ( DoesX509TokenParametersContainClauseTypeofIssuerSerial( sbe.OptionalEndpointSupportingTokenParameters.Signed ) )
return true;
if ( DoesX509TokenParametersContainClauseTypeofIssuerSerial( sbe.OptionalEndpointSupportingTokenParameters.SignedEncrypted ) )
return true;
if ( DoesX509TokenParametersContainClauseTypeofIssuerSerial( sbe.OptionalEndpointSupportingTokenParameters.SignedEndorsing ) )
return true;
return false;
}
bool DoesX509TokenParametersContainClauseTypeofIssuerSerial( Collection<SecurityTokenParameters> tokenParameters )
{
foreach ( SecurityTokenParameters tokenParameter in tokenParameters )
{
X509SecurityTokenParameters x509TokenParameter = tokenParameter as X509SecurityTokenParameters;
if ( x509TokenParameter != null )
{
if ( x509TokenParameter.X509ReferenceStyle == X509KeyIdentifierClauseType.IssuerSerial )
return true;
}
}
return false;
}
protected override bool SerializeToXmlElement(XmlWriter writer, String elementName)
{
bool result;
if (this.failedSecurityBindingElement != null && writer != null)
{
writer.WriteComment(SR.GetString(SR.ConfigurationSchemaInsuffientForSecurityBindingElementInstance));
writer.WriteComment(this.failedSecurityBindingElement.ToString());
result = true;
}
else
{
if ( writer != null && this.willX509IssuerReferenceAssertionBeWritten )
writer.WriteComment( SR.GetString(SR.ConfigurationSchemaContainsX509IssuerSerialReference));
result = base.SerializeToXmlElement(writer, elementName);
}
return result;
}
protected override bool SerializeElement(XmlWriter writer, bool serializeCollectionKey)
{
bool nontrivial = base.SerializeElement(writer, serializeCollectionKey);
// A SecurityElement can copy properties from a "bootstrap" SecurityBaseElement.
// In this case, a trivial bootstrap (no properties set) is equivalent to not having one at all so we can omit it.
Func<PropertyInformation, bool> nontrivialProperty = property => property.ValueOrigin == PropertyValueOrigin.SetHere;
if (this.IsSecurityElementBootstrap && !this.ElementInformation.Properties.OfType<PropertyInformation>().Any(nontrivialProperty))
{
nontrivial = false;
}
return nontrivial;
}
protected override void Unmerge(ConfigurationElement sourceElement, ConfigurationElement parentElement, ConfigurationSaveMode saveMode)
{
if ( sourceElement is SecurityElementBase )
{
this.failedSecurityBindingElement = ( (SecurityElementBase)sourceElement ).failedSecurityBindingElement;
this.willX509IssuerReferenceAssertionBeWritten = ( (SecurityElementBase)sourceElement ).willX509IssuerReferenceAssertionBeWritten;
}
base.Unmerge(sourceElement, parentElement, saveMode);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void AndInt64()
{
var test = new SimpleBinaryOpTest__AndInt64();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local works
test.RunLclFldScenario();
// Validates passing an instance member works
test.RunFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__AndInt64
{
private const int VectorSize = 32;
private const int Op1ElementCount = VectorSize / sizeof(Int64);
private const int Op2ElementCount = VectorSize / sizeof(Int64);
private const int RetElementCount = VectorSize / sizeof(Int64);
private static Int64[] _data1 = new Int64[Op1ElementCount];
private static Int64[] _data2 = new Int64[Op2ElementCount];
private static Vector256<Int64> _clsVar1;
private static Vector256<Int64> _clsVar2;
private Vector256<Int64> _fld1;
private Vector256<Int64> _fld2;
private SimpleBinaryOpTest__DataTable<Int64, Int64, Int64> _dataTable;
static SimpleBinaryOpTest__AndInt64()
{
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (long)(random.Next(int.MinValue, int.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Int64>, byte>(ref _clsVar1), ref Unsafe.As<Int64, byte>(ref _data1[0]), VectorSize);
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (long)(random.Next(int.MinValue, int.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Int64>, byte>(ref _clsVar2), ref Unsafe.As<Int64, byte>(ref _data2[0]), VectorSize);
}
public SimpleBinaryOpTest__AndInt64()
{
Succeeded = true;
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (long)(random.Next(int.MinValue, int.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Int64>, byte>(ref _fld1), ref Unsafe.As<Int64, byte>(ref _data1[0]), VectorSize);
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (long)(random.Next(int.MinValue, int.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Int64>, byte>(ref _fld2), ref Unsafe.As<Int64, byte>(ref _data2[0]), VectorSize);
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (long)(random.Next(int.MinValue, int.MaxValue)); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (long)(random.Next(int.MinValue, int.MaxValue)); }
_dataTable = new SimpleBinaryOpTest__DataTable<Int64, Int64, Int64>(_data1, _data2, new Int64[RetElementCount], VectorSize);
}
public bool IsSupported => Avx2.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
var result = Avx2.And(
Unsafe.Read<Vector256<Int64>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<Int64>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
var result = Avx2.And(
Avx.LoadVector256((Int64*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((Int64*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
var result = Avx2.And(
Avx.LoadAlignedVector256((Int64*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((Int64*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
var result = typeof(Avx2).GetMethod(nameof(Avx2.And), new Type[] { typeof(Vector256<Int64>), typeof(Vector256<Int64>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector256<Int64>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<Int64>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Int64>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
var result = typeof(Avx2).GetMethod(nameof(Avx2.And), new Type[] { typeof(Vector256<Int64>), typeof(Vector256<Int64>) })
.Invoke(null, new object[] {
Avx.LoadVector256((Int64*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((Int64*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Int64>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
var result = typeof(Avx2).GetMethod(nameof(Avx2.And), new Type[] { typeof(Vector256<Int64>), typeof(Vector256<Int64>) })
.Invoke(null, new object[] {
Avx.LoadAlignedVector256((Int64*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((Int64*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Int64>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
var result = Avx2.And(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
var left = Unsafe.Read<Vector256<Int64>>(_dataTable.inArray1Ptr);
var right = Unsafe.Read<Vector256<Int64>>(_dataTable.inArray2Ptr);
var result = Avx2.And(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
var left = Avx.LoadVector256((Int64*)(_dataTable.inArray1Ptr));
var right = Avx.LoadVector256((Int64*)(_dataTable.inArray2Ptr));
var result = Avx2.And(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
var left = Avx.LoadAlignedVector256((Int64*)(_dataTable.inArray1Ptr));
var right = Avx.LoadAlignedVector256((Int64*)(_dataTable.inArray2Ptr));
var result = Avx2.And(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclFldScenario()
{
var test = new SimpleBinaryOpTest__AndInt64();
var result = Avx2.And(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunFldScenario()
{
var result = Avx2.And(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunUnsupportedScenario()
{
Succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
Succeeded = true;
}
}
private void ValidateResult(Vector256<Int64> left, Vector256<Int64> right, void* result, [CallerMemberName] string method = "")
{
Int64[] inArray1 = new Int64[Op1ElementCount];
Int64[] inArray2 = new Int64[Op2ElementCount];
Int64[] outArray = new Int64[RetElementCount];
Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left);
Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "")
{
Int64[] inArray1 = new Int64[Op1ElementCount];
Int64[] inArray2 = new Int64[Op2ElementCount];
Int64[] outArray = new Int64[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int64, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int64, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int64, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Int64[] left, Int64[] right, Int64[] result, [CallerMemberName] string method = "")
{
if ((long)(left[0] & right[0]) != result[0])
{
Succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if ((long)(left[i] & right[i]) != result[i])
{
Succeeded = false;
break;
}
}
}
if (!Succeeded)
{
Console.WriteLine($"{nameof(Avx2)}.{nameof(Avx2.And)}<Int64>(Vector256<Int64>, Vector256<Int64>): {method} failed:");
Console.WriteLine($" left: ({string.Join(", ", left)})");
Console.WriteLine($" right: ({string.Join(", ", right)})");
Console.WriteLine($" result: ({string.Join(", ", result)})");
Console.WriteLine();
}
}
}
}
| |
using System.Text.RegularExpressions;
using System.Diagnostics;
using System;
using System.Drawing.Drawing2D;
using System.Windows.Forms;
using System.Collections;
using System.Drawing;
using Microsoft.VisualBasic;
using System.Data.SqlClient;
using System.Data;
using System.Collections.Generic;
using WeifenLuo.WinFormsUI;
using Microsoft.Win32;
using WeifenLuo;
using System.ComponentModel;
namespace SoftLogik.Win
{
namespace Reporting
{
#region Report Filter Classes
public class SPReportFilter
{
private SPReportFilterFieldTypes _Type = SPReportFilterFieldTypes.General;
private string _DisplayMember = string.Empty;
private string _ValueMember = string.Empty;
private string _Operation = string.Empty;
private string _OperationText = string.Empty;
private object _Value = null;
#region Properties
protected internal SPReportFilterFieldTypes @Type
{
get
{
return _Type;
}
}
protected internal string Operation
{
get
{
return _Operation;
}
set
{
_Operation = value;
}
}
public string OperationText
{
get
{
return _OperationText;
}
set
{
_OperationText = value;
}
}
public string DisplayMember
{
get
{
return _DisplayMember;
}
set
{
_DisplayMember = value;
}
}
public string ValueMember
{
get
{
return _ValueMember;
}
set
{
_ValueMember = value;
}
}
protected internal object Value
{
get
{
return _Value;
}
set
{
_Value = value;
}
}
#endregion
public SPReportFilter(string DisplayMember, string ValueMember, SPReportFilterFieldTypes @Type)
{
this._DisplayMember = DisplayMember;
this._ValueMember = ValueMember;
this._Type = @Type;
}
public override string ToString()
{
return this._Operation.Replace("#ValueMember#", this._ValueMember);
}
public string ToFormattedString()
{
return this._OperationText.Replace("#DisplayMember#", this._DisplayMember);
}
}
[DataObject()]public class SPReportFilterCollection : List<SPReportFilter>
{
public SPReportFilter Add(string DisplayMember, string ValueMember, SPReportFilterFieldTypes @Type)
{
SPReportFilter newItem = new SPReportFilter(DisplayMember, ValueMember, @Type);
try
{
this.Add(newItem);
}
catch (Exception)
{
}
return newItem;
}
public SPReportFilter this[string DisplayMember]
{
get
{
foreach (SPReportFilter itm in this)
{
if (itm.DisplayMember == DisplayMember)
{
return itm;
}
}
return null;
}
}
public void Remove(string DisplayMember)
{
//On Error Resume Next VBConversions Warning: On Error Resume Next not supported in C#
this.Remove(this[DisplayMember]);
}
public override string ToString()
{
string strFilterQuery = string.Empty;
foreach (SPReportFilter qry in this)
{
strFilterQuery += qry.ToString();
strFilterQuery += " AND ";
}
if (strFilterQuery.Trim().EndsWith("AND"))
{
//strFilterQuery = string.(string.Trim(strFilterQuery), 1, String.Trim(strFilterQuery).Length - 3);
}
return strFilterQuery;
}
}
#endregion
#region Report Group Classes
[Description("Specify a Grouping used in a Report.")]public class SPReportGrouping
{
private string _DisplayMember = string.Empty;
private string _ValueMember = string.Empty;
public string DisplayMember
{
get
{
return _DisplayMember;
}
set
{
_DisplayMember = value;
}
}
public SPReportGrouping(string DisplayMember, string ValueMember)
{
this._DisplayMember = DisplayMember;
this._ValueMember = ValueMember;
}
}
[Description("Specify a List of Groupings used in a Report.")]public class SPReportGroupingCollection : List<SPReportGrouping>
{
public SPReportGrouping Add(string DisplayMember, string ValueMember)
{
SPReportGrouping newItem = new SPReportGrouping(DisplayMember, ValueMember);
try
{
this.Add(newItem);
}
catch (Exception)
{
}
return newItem;
}
public SPReportGrouping this[string DisplayMember]
{
get
{
foreach (SPReportGrouping itm in this)
{
if (itm.DisplayMember == DisplayMember)
{
return itm;
}
}
return null;
}
}
public void Remove(string DisplayMember)
{
//On Error Resume Next VBConversions Warning: On Error Resume Next not supported in C#
this.Remove(this[DisplayMember]);
}
}
#endregion
#region Report Parameter Classes
public class SPReportParameter
{
private string _Name = string.Empty;
private object _Value = null;
public string Name
{
get
{
return _Name;
}
set
{
_Name = value;
}
}
public object Value
{
get
{
return _Value;
}
set
{
_Value = value;
}
}
public SPReportParameter(string Name, object Value)
{
this._Name = Name;
this._Value = Value;
}
}
public class SPReportParameterCollection : List<SPReportParameter>
{
public SPReportParameter Add(string Name, object Value)
{
SPReportParameter newItem = new SPReportParameter(Name, Value);
try
{
this.Add(newItem);
}
catch (Exception)
{
}
return newItem;
}
public SPReportParameter this[string Name]
{
get
{
foreach (SPReportParameter itm in this)
{
if (itm.Name == Name)
{
return itm;
}
}
return null;
}
}
public void Remove(string Name)
{
//On Error Resume Next VBConversions Warning: On Error Resume Next not supported in C#
this.Remove(this[Name]);
}
}
#endregion
#region Report Filter Operator Classes
public class SPReportFilterOperator
{
private string _Name = string.Empty;
public string Name
{
get
{
return _Name;
}
set
{
_Name = value;
}
}
private SPComparisons _Value = SPComparisons.Equals;
public SPComparisons Value
{
get
{
return _Value;
}
set
{
_Value = value;
}
}
public SPReportFilterOperator()
{
}
public SPReportFilterOperator(string Name, SPComparisons Value)
{
this._Name = Name;
this._Value = Value;
}
}
#endregion
}
}
| |
using System;
using System.Runtime.InteropServices;
using System.Text;
namespace DbgEng.NoExceptions
{
[ComImport, ComConversionLoss, Guid("8C31E98C-983A-48A5-9016-6FE5D667A950"), InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
public interface IDebugSymbols
{
[PreserveSig]
int GetSymbolOptions(
[Out] out uint Options);
[PreserveSig]
int AddSymbolOptions(
[In] uint Options);
[PreserveSig]
int RemoveSymbolOptions(
[In] uint Options);
[PreserveSig]
int SetSymbolOptions(
[In] uint Options);
[PreserveSig]
int GetNameByOffset(
[In] ulong Offset,
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder NameBuffer,
[In] uint NameBufferSize,
[Out] out uint NameSize,
[Out] out ulong Displacement);
[PreserveSig]
int GetOffsetByName(
[In, MarshalAs(UnmanagedType.LPStr)] string Symbol,
[Out] out ulong Offset);
[PreserveSig]
int GetNearNameByOffset(
[In] ulong Offset,
[In] int Delta,
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder NameBuffer,
[In] uint NameBufferSize,
[Out] out uint NameSize,
[Out] out ulong Displacement);
[PreserveSig]
int GetLineByOffset(
[In] ulong Offset,
[Out] out uint Line,
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder FileBuffer,
[In] uint FileBufferSize,
[Out] out uint FileSize,
[Out] out ulong Displacement);
[PreserveSig]
int GetOffsetByLine(
[In] uint Line,
[In, MarshalAs(UnmanagedType.LPStr)] string File,
[Out] out ulong Offset);
[PreserveSig]
int GetNumberModules(
[Out] out uint Loaded,
[Out] out uint Unloaded);
[PreserveSig]
int GetModuleByIndex(
[In] uint Index,
[Out] out ulong Base);
[PreserveSig]
int GetModuleByModuleName(
[In, MarshalAs(UnmanagedType.LPStr)] string Name,
[In] uint StartIndex,
[Out] out uint Index,
[Out] out ulong Base);
[PreserveSig]
int GetModuleByOffset(
[In] ulong Offset,
[In] uint StartIndex,
[Out] out uint Index,
[Out] out ulong Base);
[PreserveSig]
int GetModuleNames(
[In] uint Index,
[In] ulong Base,
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder ImageNameBuffer,
[In] uint ImageNameBufferSize,
[Out] out uint ImageNameSize,
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder ModuleNameBuffer,
[In] uint ModuleNameBufferSize,
[Out] out uint ModuleNameSize,
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder LoadedImageNameBuffer,
[In] uint LoadedImageNameBufferSize,
[Out] out uint LoadedImageNameSize);
[PreserveSig]
int GetModuleParameters(
[In] uint Count,
[In] ref ulong Bases,
[In] uint Start = default(uint),
[Out, MarshalAs(UnmanagedType.LPArray)] _DEBUG_MODULE_PARAMETERS[] Params = null);
[PreserveSig]
int GetSymbolModule(
[In, MarshalAs(UnmanagedType.LPStr)] string Symbol,
[Out] out ulong Base);
[PreserveSig]
int GetTypeName(
[In] ulong Module,
[In] uint TypeId,
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder NameBuffer,
[In] uint NameBufferSize,
[Out] out uint NameSize);
[PreserveSig]
int GetTypeId(
[In] ulong Module,
[In, MarshalAs(UnmanagedType.LPStr)] string Name,
[Out] out uint Id);
[PreserveSig]
int GetTypeSize(
[In] ulong Module,
[In] uint TypeId,
[Out] out uint Size);
[PreserveSig]
int GetFieldOffset(
[In] ulong Module,
[In] uint TypeId,
[In, MarshalAs(UnmanagedType.LPStr)] string Field,
[Out] out uint Offset);
[PreserveSig]
int GetSymbolTypeId(
[In, MarshalAs(UnmanagedType.LPStr)] string Symbol,
[Out] out uint TypeId,
[Out] out ulong Module);
[PreserveSig]
int GetOffsetTypeId(
[In] ulong Offset,
[Out] out uint TypeId,
[Out] out ulong Module);
[PreserveSig]
int ReadTypedDataVirtual(
[In] ulong Offset,
[In] ulong Module,
[In] uint TypeId,
[Out, MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 4)] byte[] Buffer,
[In] uint BufferSize,
[Out] out uint BytesRead);
[PreserveSig]
int WriteTypedDataVirtual(
[In] ulong Offset,
[In] ulong Module,
[In] uint TypeId,
[In] IntPtr Buffer,
[In] uint BufferSize,
[Out] out uint BytesWritten);
[PreserveSig]
int OutputTypedDataVirtual(
[In] uint OutputControl,
[In] ulong Offset,
[In] ulong Module,
[In] uint TypeId,
[In] uint Flags);
[PreserveSig]
int ReadTypedDataPhysical(
[In] ulong Offset,
[In] ulong Module,
[In] uint TypeId,
[Out] IntPtr Buffer,
[In] uint BufferSize,
[Out] out uint BytesRead);
[PreserveSig]
int WriteTypedDataPhysical(
[In] ulong Offset,
[In] ulong Module,
[In] uint TypeId,
[In] IntPtr Buffer,
[In] uint BufferSize,
[Out] out uint BytesWritten);
[PreserveSig]
int OutputTypedDataPhysical(
[In] uint OutputControl,
[In] ulong Offset,
[In] ulong Module,
[In] uint TypeId,
[In] uint Flags);
[PreserveSig]
int GetScope(
[Out] out ulong InstructionOffset,
[Out] out _DEBUG_STACK_FRAME ScopeFrame,
[Out] IntPtr ScopeContext = default(IntPtr),
[In] uint ScopeContextSize = default(uint));
[PreserveSig]
int SetScope(
[In] ulong InstructionOffset,
[In] ref _DEBUG_STACK_FRAME ScopeFrame,
[In] IntPtr ScopeContext = default(IntPtr),
[In] uint ScopeContextSize = default(uint));
[PreserveSig]
int ResetScope();
[PreserveSig]
int GetScopeSymbolGroup(
[In] uint Flags,
[In, MarshalAs(UnmanagedType.Interface)] IDebugSymbolGroup Update,
[Out, MarshalAs(UnmanagedType.Interface)] out IDebugSymbolGroup Symbols);
[PreserveSig]
int CreateSymbolGroup(
[Out, MarshalAs(UnmanagedType.Interface)] out IDebugSymbolGroup Symbols);
[PreserveSig]
int StartSymbolMatch(
[In, MarshalAs(UnmanagedType.LPStr)] string Pattern,
[Out] out ulong Handle);
[PreserveSig]
int GetNextSymbolMatch(
[In] ulong Handle,
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder Buffer,
[In] uint BufferSize, [Out] out uint MatchSize,
[Out] out ulong Offset);
[PreserveSig]
int EndSymbolMatch(
[In] ulong Handle);
[PreserveSig]
int Reload(
[In, MarshalAs(UnmanagedType.LPStr)] string Module);
[PreserveSig]
int GetSymbolPath(
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder Buffer,
[In] uint BufferSize,
[Out] out uint PathSize);
[PreserveSig]
int SetSymbolPath(
[In, MarshalAs(UnmanagedType.LPStr)] string Path);
[PreserveSig]
int AppendSymbolPath(
[In, MarshalAs(UnmanagedType.LPStr)] string Addition);
[PreserveSig]
int GetImagePath(
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder Buffer,
[In] uint BufferSize,
[Out] out uint PathSize);
[PreserveSig]
int SetImagePath(
[In, MarshalAs(UnmanagedType.LPStr)] string Path);
[PreserveSig]
int AppendImagePath(
[In, MarshalAs(UnmanagedType.LPStr)] string Addition);
[PreserveSig]
int GetSourcePath(
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder Buffer,
[In] uint BufferSize,
[Out] out uint PathSize);
[PreserveSig]
int GetSourcePathElement(
[In] uint Index,
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder Buffer,
[In] uint BufferSize,
[Out] out uint ElementSize);
[PreserveSig]
int SetSourcePath(
[In, MarshalAs(UnmanagedType.LPStr)] string Path);
[PreserveSig]
int AppendSourcePath(
[In, MarshalAs(UnmanagedType.LPStr)] string Addition);
[PreserveSig]
int FindSourceFile(
[In] uint StartElement,
[In, MarshalAs(UnmanagedType.LPStr)] string File,
[In] uint Flags,
[Out] out uint FoundElement,
[Out, MarshalAs(UnmanagedType.LPStr)] StringBuilder Buffer,
[In] uint BufferSize,
[Out] out uint FoundSize);
[PreserveSig]
int GetSourceFileLineOffsets(
[In, MarshalAs(UnmanagedType.LPStr)] string File,
[Out] out ulong Buffer,
[In] uint BufferLines,
[Out] out uint FileLines);
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Newtonsoft.Json;
using Umbraco.Core.IO;
using Umbraco.Core.Logging;
using Umbraco.Core.Models;
namespace Umbraco.Core.PropertyEditors
{
/// <summary>
/// Defines a pre-value editor
/// </summary>
/// <remarks>
/// A pre-value editor is made up of multiple pre-value fields, each field defines a key that the value is stored against.
/// Each field can have any editor and the value from each field can store any data such as a simple string or a json structure.
///
/// The Json serialization attributes are required for manifest property editors to work.
/// </remarks>
public class PreValueEditor
{
public PreValueEditor()
{
var fields = new List<PreValueField>();
//the ctor checks if we have PreValueFieldAttributes applied and if so we construct our fields from them
var props = TypeHelper.CachedDiscoverableProperties(GetType())
.Where(x => x.Name != "Fields");
foreach (var p in props)
{
var att = p.GetCustomAttributes(typeof (PreValueFieldAttribute), false).OfType<PreValueFieldAttribute>().SingleOrDefault();
if (att != null)
{
if (att.PreValueFieldType != null)
{
//try to create it
try
{
var instance = (PreValueField) Activator.CreateInstance(att.PreValueFieldType);
//overwrite values if they are assigned
if (!att.Key.IsNullOrWhiteSpace())
{
instance.Key = att.Key;
}
//if the key is still empty then assign it to be the property name
if (instance.Key.IsNullOrWhiteSpace())
{
instance.Key = p.Name;
}
if (!att.Name.IsNullOrWhiteSpace())
instance.Name = att.Name;
if (!att.View.IsNullOrWhiteSpace())
instance.View = att.View;
if (!att.Description.IsNullOrWhiteSpace())
instance.Description = att.Description;
if (att.HideLabel)
instance.HideLabel = att.HideLabel;
//add the custom field
fields.Add(instance);
}
catch (Exception ex)
{
LogHelper.WarnWithException<PreValueEditor>("Could not create an instance of " + att.PreValueFieldType, ex);
}
}
else
{
fields.Add(MapAttributeToField(att, p));
}
}
}
Fields = fields;
}
private static PreValueField MapAttributeToField(PreValueFieldAttribute att, PropertyInfo prop)
{
return new PreValueField
{
//set the key to the property name if it is empty
Key = att.Key.IsNullOrWhiteSpace() ? prop.Name : att.Key,
Name = att.Name,
Description = att.Description,
HideLabel = att.HideLabel,
View = att.View.StartsWith("~/") ? IOHelper.ResolveUrl(att.View) : att.View
};
}
/// <summary>
/// A collection of pre-value fields to be edited
/// </summary>
/// <remarks>
/// If fields are specified then the master View and Validators will be ignored
/// </remarks>
[JsonProperty("fields")]
public List<PreValueField> Fields { get; private set; }
/// <summary>
/// A method to format the posted values from the editor to the values to be persisted
/// </summary>
/// <param name="editorValue"></param>
/// <param name="currentValue">
/// The current value that has been persisted to the database for this pre-value editor. This value may be usesful for
/// how the value then get's deserialized again to be re-persisted. In most cases it will probably not be used.
/// </param>
/// <returns></returns>
/// <remarks>
/// By default this will just return the Posted editorValue.
///
/// This can be overridden if perhaps you have a comma delimited string posted value but want to convert those to individual rows, or to convert
/// a json structure to multiple rows.
/// </remarks>
public virtual IDictionary<string, PreValue> ConvertEditorToDb(IDictionary<string, object> editorValue, PreValueCollection currentValue)
{
//convert to a string based value to be saved in the db
return editorValue.ToDictionary(x => x.Key, x => new PreValue(x.Value == null ? null : x.Value.ToString()));
}
/// <summary>
/// This can be used to re-format the currently saved pre-values that will be passed to the editor,
/// by default this returns the merged default and persisted pre-values.
/// </summary>
/// <param name="defaultPreVals">
/// The default/static pre-vals for the property editor
/// </param>
/// <param name="persistedPreVals">
/// The persisted pre-vals for the property editor
/// </param>
/// <returns></returns>
/// <remarks>
/// This is generally not going to be used by anything unless a property editor wants to change the merging
/// functionality or needs to convert some legacy persisted data, or convert the string values to strongly typed values in json (i.e. booleans)
///
/// IMPORTANT! When using this method the default pre values dictionary should not be modified which would change the property editor's global
/// singleton pre-values!
/// </remarks>
public virtual IDictionary<string, object> ConvertDbToEditor(IDictionary<string, object> defaultPreVals, PreValueCollection persistedPreVals)
{
//we'll make a copy since we'll merge into the defaults - but we don't want to overwrite the global singleton ones passed in!
var defaultPreValCopy = new Dictionary<string, object>();
if (defaultPreVals != null)
{
defaultPreValCopy = new Dictionary<string, object>(defaultPreVals);
}
if (persistedPreVals.IsDictionaryBased)
{
//we just need to merge the dictionaries now, the persisted will replace default.
foreach (var item in persistedPreVals.PreValuesAsDictionary)
{
//The persisted dictionary contains values of type PreValue which contain the ID and the Value, we don't care
// about the Id, just the value so ignore the id.
defaultPreValCopy[item.Key] = item.Value.Value;
}
//now we're going to try to see if any of the values are JSON, if they are we'll convert them to real JSON objects
// so they can be consumed as real json in angular!
ConvertItemsToJsonIfDetected(defaultPreValCopy);
return defaultPreValCopy;
}
//it's an array so need to format it
var result = new Dictionary<string, object>();
var asArray = persistedPreVals.PreValuesAsArray.ToArray();
for (var i = 0; i < asArray.Length; i++)
{
//each item is of type PreValue but we don't want the ID, just the value so ignore the ID
result.Add(i.ToInvariantString(), asArray[i].Value);
}
//now we're going to try to see if any of the values are JSON, if they are we'll convert them to real JSON objects
// so they can be consumed as real json in angular!
ConvertItemsToJsonIfDetected(result);
return result;
}
protected void ConvertItemsToJsonIfDetected(IDictionary<string, object> result)
{
// convert values that are Json to true Json objects that can be consumed by Angular
var keys = result.Keys.ToArray();
for (var i = 0; i < keys.Length; i++)
{
if ((result[keys[i]] is string) == false) continue;
var asString = result[keys[i]].ToString();
if (asString.DetectIsJson() == false) continue;
try
{
result[keys[i]] = JsonConvert.DeserializeObject(asString);
}
catch
{
// swallow this exception, we thought it was Json but it really isn't so continue returning a string
}
}
}
}
}
| |
/*
* Copyright (c) Citrix Systems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1) Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using CookComputing.XmlRpc;
namespace XenAPI
{
/// <summary>
/// The metrics associated with a virtual network device
/// First published in XenServer 4.0.
/// </summary>
public partial class VIF_metrics : XenObject<VIF_metrics>
{
public VIF_metrics()
{
}
public VIF_metrics(string uuid,
double io_read_kbs,
double io_write_kbs,
DateTime last_updated,
Dictionary<string, string> other_config)
{
this.uuid = uuid;
this.io_read_kbs = io_read_kbs;
this.io_write_kbs = io_write_kbs;
this.last_updated = last_updated;
this.other_config = other_config;
}
/// <summary>
/// Creates a new VIF_metrics from a Proxy_VIF_metrics.
/// </summary>
/// <param name="proxy"></param>
public VIF_metrics(Proxy_VIF_metrics proxy)
{
this.UpdateFromProxy(proxy);
}
public override void UpdateFrom(VIF_metrics update)
{
uuid = update.uuid;
io_read_kbs = update.io_read_kbs;
io_write_kbs = update.io_write_kbs;
last_updated = update.last_updated;
other_config = update.other_config;
}
internal void UpdateFromProxy(Proxy_VIF_metrics proxy)
{
uuid = proxy.uuid == null ? null : (string)proxy.uuid;
io_read_kbs = Convert.ToDouble(proxy.io_read_kbs);
io_write_kbs = Convert.ToDouble(proxy.io_write_kbs);
last_updated = proxy.last_updated;
other_config = proxy.other_config == null ? null : Maps.convert_from_proxy_string_string(proxy.other_config);
}
public Proxy_VIF_metrics ToProxy()
{
Proxy_VIF_metrics result_ = new Proxy_VIF_metrics();
result_.uuid = (uuid != null) ? uuid : "";
result_.io_read_kbs = io_read_kbs;
result_.io_write_kbs = io_write_kbs;
result_.last_updated = last_updated;
result_.other_config = Maps.convert_to_proxy_string_string(other_config);
return result_;
}
/// <summary>
/// Creates a new VIF_metrics from a Hashtable.
/// </summary>
/// <param name="table"></param>
public VIF_metrics(Hashtable table)
{
uuid = Marshalling.ParseString(table, "uuid");
io_read_kbs = Marshalling.ParseDouble(table, "io_read_kbs");
io_write_kbs = Marshalling.ParseDouble(table, "io_write_kbs");
last_updated = Marshalling.ParseDateTime(table, "last_updated");
other_config = Maps.convert_from_proxy_string_string(Marshalling.ParseHashTable(table, "other_config"));
}
public bool DeepEquals(VIF_metrics other)
{
if (ReferenceEquals(null, other))
return false;
if (ReferenceEquals(this, other))
return true;
return Helper.AreEqual2(this._uuid, other._uuid) &&
Helper.AreEqual2(this._io_read_kbs, other._io_read_kbs) &&
Helper.AreEqual2(this._io_write_kbs, other._io_write_kbs) &&
Helper.AreEqual2(this._last_updated, other._last_updated) &&
Helper.AreEqual2(this._other_config, other._other_config);
}
public override string SaveChanges(Session session, string opaqueRef, VIF_metrics server)
{
if (opaqueRef == null)
{
System.Diagnostics.Debug.Assert(false, "Cannot create instances of this type on the server");
return "";
}
else
{
if (!Helper.AreEqual2(_other_config, server._other_config))
{
VIF_metrics.set_other_config(session, opaqueRef, _other_config);
}
return null;
}
}
/// <summary>
/// Get a record containing the current state of the given VIF_metrics.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vif_metrics">The opaque_ref of the given vif_metrics</param>
public static VIF_metrics get_record(Session session, string _vif_metrics)
{
return new VIF_metrics((Proxy_VIF_metrics)session.proxy.vif_metrics_get_record(session.uuid, (_vif_metrics != null) ? _vif_metrics : "").parse());
}
/// <summary>
/// Get a reference to the VIF_metrics instance with the specified UUID.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_uuid">UUID of object to return</param>
public static XenRef<VIF_metrics> get_by_uuid(Session session, string _uuid)
{
return XenRef<VIF_metrics>.Create(session.proxy.vif_metrics_get_by_uuid(session.uuid, (_uuid != null) ? _uuid : "").parse());
}
/// <summary>
/// Get the uuid field of the given VIF_metrics.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vif_metrics">The opaque_ref of the given vif_metrics</param>
public static string get_uuid(Session session, string _vif_metrics)
{
return (string)session.proxy.vif_metrics_get_uuid(session.uuid, (_vif_metrics != null) ? _vif_metrics : "").parse();
}
/// <summary>
/// Get the io/read_kbs field of the given VIF_metrics.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vif_metrics">The opaque_ref of the given vif_metrics</param>
public static double get_io_read_kbs(Session session, string _vif_metrics)
{
return Convert.ToDouble(session.proxy.vif_metrics_get_io_read_kbs(session.uuid, (_vif_metrics != null) ? _vif_metrics : "").parse());
}
/// <summary>
/// Get the io/write_kbs field of the given VIF_metrics.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vif_metrics">The opaque_ref of the given vif_metrics</param>
public static double get_io_write_kbs(Session session, string _vif_metrics)
{
return Convert.ToDouble(session.proxy.vif_metrics_get_io_write_kbs(session.uuid, (_vif_metrics != null) ? _vif_metrics : "").parse());
}
/// <summary>
/// Get the last_updated field of the given VIF_metrics.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vif_metrics">The opaque_ref of the given vif_metrics</param>
public static DateTime get_last_updated(Session session, string _vif_metrics)
{
return session.proxy.vif_metrics_get_last_updated(session.uuid, (_vif_metrics != null) ? _vif_metrics : "").parse();
}
/// <summary>
/// Get the other_config field of the given VIF_metrics.
/// First published in XenServer 5.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vif_metrics">The opaque_ref of the given vif_metrics</param>
public static Dictionary<string, string> get_other_config(Session session, string _vif_metrics)
{
return Maps.convert_from_proxy_string_string(session.proxy.vif_metrics_get_other_config(session.uuid, (_vif_metrics != null) ? _vif_metrics : "").parse());
}
/// <summary>
/// Set the other_config field of the given VIF_metrics.
/// First published in XenServer 5.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vif_metrics">The opaque_ref of the given vif_metrics</param>
/// <param name="_other_config">New value to set</param>
public static void set_other_config(Session session, string _vif_metrics, Dictionary<string, string> _other_config)
{
session.proxy.vif_metrics_set_other_config(session.uuid, (_vif_metrics != null) ? _vif_metrics : "", Maps.convert_to_proxy_string_string(_other_config)).parse();
}
/// <summary>
/// Add the given key-value pair to the other_config field of the given VIF_metrics.
/// First published in XenServer 5.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vif_metrics">The opaque_ref of the given vif_metrics</param>
/// <param name="_key">Key to add</param>
/// <param name="_value">Value to add</param>
public static void add_to_other_config(Session session, string _vif_metrics, string _key, string _value)
{
session.proxy.vif_metrics_add_to_other_config(session.uuid, (_vif_metrics != null) ? _vif_metrics : "", (_key != null) ? _key : "", (_value != null) ? _value : "").parse();
}
/// <summary>
/// Remove the given key and its corresponding value from the other_config field of the given VIF_metrics. If the key is not in that Map, then do nothing.
/// First published in XenServer 5.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_vif_metrics">The opaque_ref of the given vif_metrics</param>
/// <param name="_key">Key to remove</param>
public static void remove_from_other_config(Session session, string _vif_metrics, string _key)
{
session.proxy.vif_metrics_remove_from_other_config(session.uuid, (_vif_metrics != null) ? _vif_metrics : "", (_key != null) ? _key : "").parse();
}
/// <summary>
/// Return a list of all the VIF_metrics instances known to the system.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
public static List<XenRef<VIF_metrics>> get_all(Session session)
{
return XenRef<VIF_metrics>.Create(session.proxy.vif_metrics_get_all(session.uuid).parse());
}
/// <summary>
/// Get all the VIF_metrics Records at once, in a single XML RPC call
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
public static Dictionary<XenRef<VIF_metrics>, VIF_metrics> get_all_records(Session session)
{
return XenRef<VIF_metrics>.Create<Proxy_VIF_metrics>(session.proxy.vif_metrics_get_all_records(session.uuid).parse());
}
/// <summary>
/// Unique identifier/object reference
/// </summary>
public virtual string uuid
{
get { return _uuid; }
set
{
if (!Helper.AreEqual(value, _uuid))
{
_uuid = value;
Changed = true;
NotifyPropertyChanged("uuid");
}
}
}
private string _uuid;
/// <summary>
/// Read bandwidth (KiB/s)
/// </summary>
public virtual double io_read_kbs
{
get { return _io_read_kbs; }
set
{
if (!Helper.AreEqual(value, _io_read_kbs))
{
_io_read_kbs = value;
Changed = true;
NotifyPropertyChanged("io_read_kbs");
}
}
}
private double _io_read_kbs;
/// <summary>
/// Write bandwidth (KiB/s)
/// </summary>
public virtual double io_write_kbs
{
get { return _io_write_kbs; }
set
{
if (!Helper.AreEqual(value, _io_write_kbs))
{
_io_write_kbs = value;
Changed = true;
NotifyPropertyChanged("io_write_kbs");
}
}
}
private double _io_write_kbs;
/// <summary>
/// Time at which this information was last updated
/// </summary>
public virtual DateTime last_updated
{
get { return _last_updated; }
set
{
if (!Helper.AreEqual(value, _last_updated))
{
_last_updated = value;
Changed = true;
NotifyPropertyChanged("last_updated");
}
}
}
private DateTime _last_updated;
/// <summary>
/// additional configuration
/// First published in XenServer 5.0.
/// </summary>
public virtual Dictionary<string, string> other_config
{
get { return _other_config; }
set
{
if (!Helper.AreEqual(value, _other_config))
{
_other_config = value;
Changed = true;
NotifyPropertyChanged("other_config");
}
}
}
private Dictionary<string, string> _other_config;
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using Microsoft.Build.Execution;
using Microsoft.Build.Shared;
using System;
using Microsoft.Build.Evaluation;
using Microsoft.Build.Construction;
using Microsoft.Build.UnitTests.BackEnd;
using TaskItem = Microsoft.Build.Execution.ProjectItemInstance.TaskItem;
using System.Xml;
using Microsoft.Build.Framework;
using System.IO;
using Xunit;
namespace Microsoft.Build.UnitTests.OM.Instance
{
/// <summary>
/// Tests for ProjectPropertyInstance internal members
/// </summary>
public class TaskItem_Tests
{
/// <summary>
/// Test serialization
/// </summary>
[Fact]
public void Serialization()
{
TaskItem item = new TaskItem("foo", "bar.proj");
item.SetMetadata("a", "b");
TranslationHelpers.GetWriteTranslator().Translate(ref item, TaskItem.FactoryForDeserialization);
TaskItem deserializedItem = null;
TranslationHelpers.GetReadTranslator().Translate(ref deserializedItem, TaskItem.FactoryForDeserialization);
Assert.Equal(item.ItemSpec, deserializedItem.ItemSpec);
Assert.Equal(item.MetadataCount, deserializedItem.MetadataCount);
Assert.Equal(item.GetMetadata("a"), deserializedItem.GetMetadata("a"));
Assert.Equal(item.GetMetadata(FileUtilities.ItemSpecModifiers.DefiningProjectFullPath), deserializedItem.GetMetadata(FileUtilities.ItemSpecModifiers.DefiningProjectFullPath));
}
/// <summary>
/// Ensure an item is equivalent to itself.
/// </summary>
[Fact]
public void TestEquivalenceIdentity()
{
TaskItem left = new TaskItem("foo", "bar.proj");
Assert.True(left.Equals(left));
}
/// <summary>
/// Ensure two items with the same item spec and no metadata are equivalent
/// </summary>
[Fact]
public void TestEquivalence()
{
TaskItem left = new TaskItem("foo", "bar.proj");
TaskItem right = new TaskItem("foo", "bar.proj");
Assert.Equal(left, right);
Assert.Equal(left, right);
}
/// <summary>
/// Ensure two items with the same custom metadata are equivalent
/// </summary>
[Fact]
public void TestEquivalenceWithCustomMetadata()
{
TaskItem left = new TaskItem("foo", "bar.proj");
left.SetMetadata("a", "b");
TaskItem right = new TaskItem("foo", "bar.proj");
right.SetMetadata("a", "b");
Assert.Equal(left, right);
Assert.Equal(left, right);
}
/// <summary>
/// Ensure two items with different custom metadata values are not equivalent
/// </summary>
[Fact]
public void TestInequivalenceWithDifferentCustomMetadataValues()
{
TaskItem left = new TaskItem("foo", "bar.proj");
left.SetMetadata("a", "b");
TaskItem right = new TaskItem("foo", "bar.proj");
right.SetMetadata("a", "c");
Assert.NotEqual(left, right);
Assert.NotEqual(left, right);
}
/// <summary>
/// Ensure two items with different custom metadata keys are not equivalent
/// </summary>
[Fact]
public void TestInequivalenceWithDifferentCustomMetadataKeys()
{
TaskItem left = new TaskItem("foo", "bar.proj");
left.SetMetadata("a", "b");
TaskItem right = new TaskItem("foo", "bar.proj");
right.SetMetadata("b", "b");
Assert.NotEqual(left, right);
Assert.NotEqual(left, right);
}
/// <summary>
/// Ensure two items with different numbers of custom metadata are not equivalent
/// </summary>
[Fact]
public void TestInequivalenceWithDifferentCustomMetadataCount()
{
TaskItem left = new TaskItem("foo", "bar.proj");
left.SetMetadata("a", "b");
TaskItem right = new TaskItem("foo", "bar.proj");
Assert.NotEqual(left, right);
Assert.NotEqual(left, right);
}
/// <summary>
/// Ensure two items with different numbers of custom metadata are not equivalent
/// </summary>
[Fact]
public void TestInequivalenceWithDifferentCustomMetadataCount2()
{
TaskItem left = new TaskItem("foo", "bar.proj");
left.SetMetadata("a", "b");
TaskItem right = new TaskItem("foo", "bar.proj");
right.SetMetadata("a", "b");
right.SetMetadata("c", "d");
Assert.NotEqual(left, right);
Assert.NotEqual(left, right);
}
/// <summary>
/// Ensure when cloning an Item that the clone is equivalent to the parent item and that they are not the same object.
/// </summary>
[Fact]
public void TestDeepClone()
{
TaskItem parent = new TaskItem("foo", "bar.proj");
parent.SetMetadata("a", "b");
parent.SetMetadata("c", "d");
TaskItem clone = parent.DeepClone();
Assert.True(parent.Equals(clone)); // "The parent and the clone should be equal"
Assert.False(object.ReferenceEquals(parent, clone)); // "The parent and the child should not be the same object"
}
/// <summary>
/// Flushing an item through a task should not mess up special characters on the metadata.
/// </summary>
[Fact]
public void Escaping1()
{
string content = ObjectModelHelpers.CleanupFileContents(@"
<Project ToolsVersion='msbuilddefaulttoolsversion' xmlns='msbuildnamespace'>
<ItemGroup>
<i Include='i1'>
<m>i1m1;i1m2</m>
</i>
<j Include='j1'>
<m>j1m1;j1m2</m>
</j>
</ItemGroup>
<Target Name='Build'>
<CallTarget Targets='%(i.m)'/>
<CreateItem Include='@(j)'>
<Output TaskParameter='Include' ItemName='j2'/>
</CreateItem>
<CallTarget Targets='%(j2.m)'/>
</Target>
<Target Name='i1m1'>
<Warning Text='[i1m1]'/>
</Target>
<Target Name='i1m2'>
<Warning Text='[i1m2]'/>
</Target>
<Target Name='j1m1'>
<Warning Text='[j1m1]'/>
</Target>
<Target Name='j1m2'>
<Warning Text='[j1m2]'/>
</Target>
</Project>
");
ProjectRootElement xml = ProjectRootElement.Create(XmlTextReader.Create(new StringReader(content)));
Project project = new Project(xml);
MockLogger logger = new MockLogger();
project.Build("Build", new ILogger[] { logger });
logger.AssertLogContains("[i1m1]");
logger.AssertLogContains("[i1m2]");
logger.AssertLogContains("[j1m1]");
logger.AssertLogContains("[j1m2]");
}
/// <summary>
/// Flushing an item through a task run in the task host also should not mess up special characters on the metadata.
/// </summary>
#if RUNTIME_TYPE_NETCORE || MONO
[Fact(Skip = "FEATURE: TASKHOST")]
#else
[Fact]
#endif
public void Escaping2()
{
string content = ObjectModelHelpers.CleanupFileContents(@"
<Project ToolsVersion='msbuilddefaulttoolsversion' xmlns='msbuildnamespace'>
<UsingTask TaskName='CreateItem' AssemblyFile='$(MSBuildToolsPath)\Microsoft.Build.Tasks.Core.dll' TaskFactory='TaskHostFactory' />
<ItemGroup>
<i Include='i1'>
<m>i1m1;i1m2</m>
</i>
<j Include='j1'>
<m>j1m1;j1m2</m>
</j>
</ItemGroup>
<Target Name='Build'>
<CallTarget Targets='%(i.m)'/>
<CreateItem Include='@(j)'>
<Output TaskParameter='Include' ItemName='j2'/>
</CreateItem>
<CallTarget Targets='%(j2.m)'/>
</Target>
<Target Name='i1m1'>
<Warning Text='[i1m1]'/>
</Target>
<Target Name='i1m2'>
<Warning Text='[i1m2]'/>
</Target>
<Target Name='j1m1'>
<Warning Text='[j1m1]'/>
</Target>
<Target Name='j1m2'>
<Warning Text='[j1m2]'/>
</Target>
</Project>
");
ProjectRootElement xml = ProjectRootElement.Create(XmlTextReader.Create(new StringReader(content)));
Project project = new Project(xml);
MockLogger logger = new MockLogger();
project.Build("Build", new ILogger[] { logger });
logger.AssertLogContains("[i1m1]");
logger.AssertLogContains("[i1m2]");
logger.AssertLogContains("[j1m1]");
logger.AssertLogContains("[j1m2]");
}
/// <summary>
/// Flushing an item through a task run in the task host also should not mess up the escaping of the itemspec either.
/// </summary>
#if RUNTIME_TYPE_NETCORE || MONO
[Fact(Skip = "FEATURE: TASKHOST")]
#else
[Fact]
#endif
public void Escaping3()
{
string content = ObjectModelHelpers.CleanupFileContents(@"
<Project ToolsVersion='msbuilddefaulttoolsversion' xmlns='msbuildnamespace'>
<UsingTask TaskName='AssignCulture' AssemblyFile='$(MSBuildToolsPath)\Microsoft.Build.Tasks.Core.dll' TaskFactory='TaskHostFactory' />
<ItemGroup>
<i Include='i1%252ai2' />
</ItemGroup>
<Target Name='Build'>
<AssignCulture Files='@(i)'>
<Output TaskParameter='AssignedFiles' ItemName='i1'/>
</AssignCulture>
<Message Text='@(i1)'/>
</Target>
</Project>
");
ProjectRootElement xml = ProjectRootElement.Create(XmlTextReader.Create(new StringReader(content)));
Project project = new Project(xml);
MockLogger logger = new MockLogger();
project.Build("Build", new ILogger[] { logger });
logger.AssertLogContains("i1%2ai2");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
namespace System.Xml.Linq
{
internal class XNodeBuilder : XmlWriter
{
private List<object> _content;
private XContainer _parent;
private XName _attrName;
private string _attrValue;
private readonly XContainer _root;
public XNodeBuilder(XContainer container)
{
_root = container;
}
public override XmlWriterSettings Settings
{
get
{
XmlWriterSettings settings = new XmlWriterSettings();
settings.ConformanceLevel = ConformanceLevel.Auto;
return settings;
}
}
public override WriteState WriteState
{
get { throw new NotSupportedException(); } // nop
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
Close();
}
}
public override void Close()
{
_root.Add(_content);
}
public override void Flush()
{
}
public override string LookupPrefix(string namespaceName)
{
throw new NotSupportedException(); // nop
}
public override void WriteBase64(byte[] buffer, int index, int count)
{
throw new NotSupportedException(SR.NotSupported_WriteBase64);
}
public override void WriteCData(string text)
{
AddNode(new XCData(text));
}
public override void WriteCharEntity(char ch)
{
AddString(char.ToString(ch));
}
public override void WriteChars(char[] buffer, int index, int count)
{
AddString(new string(buffer, index, count));
}
public override void WriteComment(string text)
{
AddNode(new XComment(text));
}
public override void WriteDocType(string name, string pubid, string sysid, string subset)
{
AddNode(new XDocumentType(name, pubid, sysid, subset));
}
public override void WriteEndAttribute()
{
XAttribute a = new XAttribute(_attrName, _attrValue);
_attrName = null;
_attrValue = null;
if (_parent != null)
{
_parent.Add(a);
}
else
{
Add(a);
}
}
public override void WriteEndDocument()
{
}
public override void WriteEndElement()
{
_parent = ((XElement)_parent).parent;
}
public override void WriteEntityRef(string name)
{
switch (name)
{
case "amp":
AddString("&");
break;
case "apos":
AddString("'");
break;
case "gt":
AddString(">");
break;
case "lt":
AddString("<");
break;
case "quot":
AddString("\"");
break;
default:
throw new NotSupportedException(SR.NotSupported_WriteEntityRef);
}
}
public override void WriteFullEndElement()
{
XElement e = (XElement)_parent;
if (e.IsEmpty)
{
e.Add(string.Empty);
}
_parent = e.parent;
}
public override void WriteProcessingInstruction(string name, string text)
{
if (name == "xml")
{
return;
}
AddNode(new XProcessingInstruction(name, text));
}
public override void WriteRaw(char[] buffer, int index, int count)
{
AddString(new string(buffer, index, count));
}
public override void WriteRaw(string data)
{
AddString(data);
}
public override void WriteStartAttribute(string prefix, string localName, string namespaceName)
{
if (prefix == null) throw new ArgumentNullException(nameof(prefix));
_attrName = XNamespace.Get(prefix.Length == 0 ? string.Empty : namespaceName).GetName(localName);
_attrValue = string.Empty;
}
public override void WriteStartDocument()
{
}
public override void WriteStartDocument(bool standalone)
{
}
public override void WriteStartElement(string prefix, string localName, string namespaceName)
{
AddNode(new XElement(XNamespace.Get(namespaceName).GetName(localName)));
}
public override void WriteString(string text)
{
AddString(text);
}
public override void WriteSurrogateCharEntity(char lowCh, char highCh)
{
ReadOnlySpan<char> entity = stackalloc char[] { highCh, lowCh };
AddString(new string(entity));
}
public override void WriteValue(DateTimeOffset value)
{
// For compatibility with custom writers, XmlWriter writes DateTimeOffset as DateTime.
// Our internal writers should use the DateTimeOffset-String conversion from XmlConvert.
WriteString(XmlConvert.ToString(value));
}
public override void WriteWhitespace(string ws)
{
AddString(ws);
}
private void Add(object o)
{
if (_content == null)
{
_content = new List<object>();
}
_content.Add(o);
}
private void AddNode(XNode n)
{
if (_parent != null)
{
_parent.Add(n);
}
else
{
Add(n);
}
XContainer c = n as XContainer;
if (c != null)
{
_parent = c;
}
}
private void AddString(string s)
{
if (s == null)
{
return;
}
if (_attrValue != null)
{
_attrValue += s;
}
else if (_parent != null)
{
_parent.Add(s);
}
else
{
Add(s);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if ES_BUILD_STANDALONE
using System;
using System.Diagnostics;
#endif
using System.Collections.Generic;
using System.Threading;
#if ES_BUILD_STANDALONE
namespace Microsoft.Diagnostics.Tracing
#else
namespace System.Diagnostics.Tracing
#endif
{
internal class CounterGroup
{
private readonly EventSource _eventSource;
private readonly List<DiagnosticCounter> _counters;
private static readonly object s_counterGroupLock = new object();
internal CounterGroup(EventSource eventSource)
{
_eventSource = eventSource;
_counters = new List<DiagnosticCounter>();
RegisterCommandCallback();
}
internal void Add(DiagnosticCounter eventCounter)
{
lock (s_counterGroupLock) // Lock the CounterGroup
_counters.Add(eventCounter);
}
internal void Remove(DiagnosticCounter eventCounter)
{
lock (s_counterGroupLock) // Lock the CounterGroup
_counters.Remove(eventCounter);
}
#region EventSource Command Processing
private void RegisterCommandCallback()
{
_eventSource.EventCommandExecuted += OnEventSourceCommand;
}
private void OnEventSourceCommand(object? sender, EventCommandEventArgs e)
{
if (e.Command == EventCommand.Enable || e.Command == EventCommand.Update)
{
Debug.Assert(e.Arguments != null);
if (e.Arguments.TryGetValue("EventCounterIntervalSec", out string? valueStr) && float.TryParse(valueStr, out float value))
{
lock (s_counterGroupLock) // Lock the CounterGroup
{
EnableTimer(value);
}
}
}
else if (e.Command == EventCommand.Disable)
{
lock (s_counterGroupLock)
{
DisableTimer();
}
}
}
#endregion // EventSource Command Processing
#region Global CounterGroup Array management
// We need eventCounters to 'attach' themselves to a particular EventSource.
// this table provides the mapping from EventSource -> CounterGroup
// which represents this 'attached' information.
private static WeakReference<CounterGroup>[]? s_counterGroups;
private static void EnsureEventSourceIndexAvailable(int eventSourceIndex)
{
Debug.Assert(Monitor.IsEntered(s_counterGroupLock));
if (CounterGroup.s_counterGroups == null)
{
CounterGroup.s_counterGroups = new WeakReference<CounterGroup>[eventSourceIndex + 1];
}
else if (eventSourceIndex >= CounterGroup.s_counterGroups.Length)
{
WeakReference<CounterGroup>[] newCounterGroups = new WeakReference<CounterGroup>[eventSourceIndex + 1];
Array.Copy(CounterGroup.s_counterGroups, 0, newCounterGroups, 0, CounterGroup.s_counterGroups.Length);
CounterGroup.s_counterGroups = newCounterGroups;
}
}
internal static CounterGroup GetCounterGroup(EventSource eventSource)
{
lock (s_counterGroupLock)
{
int eventSourceIndex = EventListener.EventSourceIndex(eventSource);
EnsureEventSourceIndexAvailable(eventSourceIndex);
Debug.Assert(s_counterGroups != null);
WeakReference<CounterGroup> weakRef = CounterGroup.s_counterGroups[eventSourceIndex];
CounterGroup? ret = null;
if (weakRef == null || !weakRef.TryGetTarget(out ret))
{
ret = new CounterGroup(eventSource);
CounterGroup.s_counterGroups[eventSourceIndex] = new WeakReference<CounterGroup>(ret);
}
return ret;
}
}
#endregion // Global CounterGroup Array management
#region Timer Processing
private DateTime _timeStampSinceCollectionStarted;
private int _pollingIntervalInMilliseconds;
private DateTime _nextPollingTimeStamp;
private void EnableTimer(float pollingIntervalInSeconds)
{
Debug.Assert(Monitor.IsEntered(s_counterGroupLock));
if (pollingIntervalInSeconds <= 0)
{
_pollingIntervalInMilliseconds = 0;
}
else if (_pollingIntervalInMilliseconds == 0 || pollingIntervalInSeconds * 1000 < _pollingIntervalInMilliseconds)
{
_pollingIntervalInMilliseconds = (int)(pollingIntervalInSeconds * 1000);
ResetCounters(); // Reset statistics for counters before we start the thread.
_timeStampSinceCollectionStarted = DateTime.UtcNow;
// Don't capture the current ExecutionContext and its AsyncLocals onto the timer causing them to live forever
bool restoreFlow = false;
try
{
if (!ExecutionContext.IsFlowSuppressed())
{
ExecutionContext.SuppressFlow();
restoreFlow = true;
}
_nextPollingTimeStamp = DateTime.UtcNow + new TimeSpan(0, 0, (int)pollingIntervalInSeconds);
// Create the polling thread and init all the shared state if needed
if (s_pollingThread == null)
{
s_pollingThreadSleepEvent = new AutoResetEvent(false);
s_counterGroupEnabledList = new List<CounterGroup>();
s_pollingThread = new Thread(PollForValues) { IsBackground = true };
s_pollingThread.Start();
}
if (!s_counterGroupEnabledList!.Contains(this))
{
s_counterGroupEnabledList.Add(this);
}
// notify the polling thread that the polling interval may have changed and the sleep should
// be recomputed
s_pollingThreadSleepEvent!.Set();
}
finally
{
// Restore the current ExecutionContext
if (restoreFlow)
ExecutionContext.RestoreFlow();
}
}
}
private void DisableTimer()
{
_pollingIntervalInMilliseconds = 0;
s_counterGroupEnabledList?.Remove(this);
}
private void ResetCounters()
{
lock (s_counterGroupLock) // Lock the CounterGroup
{
foreach (DiagnosticCounter counter in _counters)
{
if (counter is IncrementingEventCounter ieCounter)
{
ieCounter.UpdateMetric();
}
else if (counter is IncrementingPollingCounter ipCounter)
{
ipCounter.UpdateMetric();
}
else if (counter is EventCounter eCounter)
{
eCounter.ResetStatistics();
}
}
}
}
private void OnTimer()
{
Debug.Assert(Monitor.IsEntered(s_counterGroupLock));
if (_eventSource.IsEnabled())
{
DateTime now = DateTime.UtcNow;
TimeSpan elapsed = now - _timeStampSinceCollectionStarted;
foreach (DiagnosticCounter counter in _counters)
{
counter.WritePayload((float)elapsed.TotalSeconds, _pollingIntervalInMilliseconds);
}
_timeStampSinceCollectionStarted = now;
do
{
_nextPollingTimeStamp += new TimeSpan(0, 0, 0, 0, _pollingIntervalInMilliseconds);
} while (_nextPollingTimeStamp <= now);
}
}
private static Thread? s_pollingThread;
// Used for sleeping for a certain amount of time while allowing the thread to be woken up
private static AutoResetEvent? s_pollingThreadSleepEvent;
private static List<CounterGroup>? s_counterGroupEnabledList;
private static void PollForValues()
{
AutoResetEvent? sleepEvent = null;
while (true)
{
int sleepDurationInMilliseconds = int.MaxValue;
lock (s_counterGroupLock)
{
sleepEvent = s_pollingThreadSleepEvent;
foreach (CounterGroup counterGroup in s_counterGroupEnabledList!)
{
DateTime now = DateTime.UtcNow;
if (counterGroup._nextPollingTimeStamp < now + new TimeSpan(0, 0, 0, 0, 1))
{
counterGroup.OnTimer();
}
int millisecondsTillNextPoll = (int)((counterGroup._nextPollingTimeStamp - now).TotalMilliseconds);
millisecondsTillNextPoll = Math.Max(1, millisecondsTillNextPoll);
sleepDurationInMilliseconds = Math.Min(sleepDurationInMilliseconds, millisecondsTillNextPoll);
}
}
if (sleepDurationInMilliseconds == int.MaxValue)
{
sleepDurationInMilliseconds = -1; // WaitOne uses -1 to mean infinite
}
sleepEvent?.WaitOne(sleepDurationInMilliseconds);
}
}
#endregion // Timer Processing
}
}
| |
//
// Copyright (c) 2003-2006 Jaroslaw Kowalski <[email protected]>
// Copyright (c) 2006-2014 Piotr Fusik <[email protected]>
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using Sooda.Schema;
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Xml;
using System.Xml.Serialization;
namespace Sooda.CodeGen
{
public enum PrimitiveRepresentation
{
Boxed,
SqlType,
Raw,
Nullable,
RawWithIsNull
}
public class ExternalProjectInfo
{
public ExternalProjectInfo()
{
}
public ExternalProjectInfo(string projectType)
{
this.ProjectType = projectType;
}
public ExternalProjectInfo(string projectType, string projectFile)
{
this.ProjectType = projectType;
this.ProjectFile = projectFile;
}
[XmlIgnore]
public IProjectFile ProjectProvider;
[XmlIgnore]
public string ActualProjectFile;
[XmlAttribute("type")]
public string ProjectType;
[XmlAttribute("file")]
public string ProjectFile;
}
[XmlRoot("sooda-project", Namespace = "http://www.sooda.org/schemas/SoodaProject.xsd")]
public class SoodaProject
{
public static string NamespaceURI = "http://www.sooda.org/schemas/SoodaProject.xsd";
public static Stream GetSoodaProjectXsdStream()
{
Assembly ass = typeof(SoodaProject).Assembly;
foreach (string name in ass.GetManifestResourceNames())
{
if (name.EndsWith(".SoodaProject.xsd"))
{
return ass.GetManifestResourceStream(name);
};
}
throw new SoodaSchemaException("SoodaProject.xsd not embedded in Sooda.CodeGen assembly");
}
public static XmlReader GetSoodaProjectXsdStreamXmlReader()
{
return new XmlTextReader(GetSoodaProjectXsdStream());
}
[XmlElement("schema-file")]
public string SchemaFile;
[XmlElement("language")]
public string Language = "c#";
[XmlElement("output-assembly")]
public string AssemblyName;
[XmlElement("output-namespace")]
public string OutputNamespace;
[XmlElement("output-path")]
public string OutputPath;
[XmlElement("output-partial-path")]
public string OutputPartialPath;
[XmlElement("nullable-representation")]
public PrimitiveRepresentation NullableRepresentation = PrimitiveRepresentation.SqlType;
[XmlElement("not-null-representation")]
public PrimitiveRepresentation NotNullRepresentation = PrimitiveRepresentation.Raw;
[XmlElement("null-propagation")]
[System.ComponentModel.DefaultValue(false)]
public bool NullPropagation = false;
[XmlElement("base-class-name")]
public string BaseClassName = null;
[XmlElement("with-typed-queries")]
[System.ComponentModel.DefaultValue(true)]
public bool WithTypedQueryWrappers = true;
[XmlElement("with-soql")]
[System.ComponentModel.DefaultValue(true)]
public bool WithSoql = true;
[XmlElement("file-per-namespace")]
[System.ComponentModel.DefaultValue(false)]
public bool FilePerNamespace = false;
[XmlElement("loader-class")]
[System.ComponentModel.DefaultValue(false)]
public bool LoaderClass = false;
[XmlElement("stubs-compiled-separately")]
[System.ComponentModel.DefaultValue(false)]
public bool SeparateStubs = false;
[XmlElement("embedded-schema-type")]
public EmbedSchema EmbedSchema = EmbedSchema.Binary;
[XmlArray("external-projects")]
[XmlArrayItem("project")]
public List<ExternalProjectInfo> ExternalProjects = new List<ExternalProjectInfo>();
[XmlElement("use-partial")]
[System.ComponentModel.DefaultValue(false)]
public bool UsePartial = false;
[XmlElement("partial-suffix")]
public string PartialSuffix = "";
public void WriteTo(string fileName)
{
using (FileStream fs = File.Create(fileName))
{
WriteTo(fs);
}
}
public void WriteTo(Stream stream)
{
using (StreamWriter sw = new StreamWriter(stream))
{
WriteTo(sw);
}
}
public void WriteTo(TextWriter tw)
{
XmlTextWriter xtw = new XmlTextWriter(tw);
xtw.Indentation = 4;
xtw.Formatting = Formatting.Indented;
WriteTo(xtw);
}
public void WriteTo(XmlTextWriter xtw)
{
XmlSerializer ser = new XmlSerializer(typeof(SoodaProject));
XmlSerializerNamespaces ns = new XmlSerializerNamespaces();
ns.Add(String.Empty, "http://www.sooda.org/schemas/SoodaProject.xsd");
ser.Serialize(xtw, this, ns);
xtw.Flush();
}
public static SoodaProject LoadFrom(string fileName)
{
using (FileStream fs = File.OpenRead(fileName))
{
return LoadFrom(fs);
}
}
public static SoodaProject LoadFrom(Stream stream)
{
using (StreamReader sr = new StreamReader(stream))
{
return LoadFrom(sr);
}
}
public static SoodaProject LoadFrom(TextReader reader)
{
XmlTextReader xmlreader = new XmlTextReader(reader);
return LoadFrom(xmlreader);
}
public static SoodaProject LoadFrom(XmlTextReader reader)
{
#if SOODA_NO_VALIDATING_READER
XmlSerializer ser = new XmlSerializer(typeof(SoodaProject));
SoodaProject project = (SoodaProject)ser.Deserialize(reader);
#else
XmlReaderSettings readerSettings = new XmlReaderSettings();
readerSettings.ValidationType = ValidationType.Schema;
readerSettings.Schemas.Add(NamespaceURI, GetSoodaProjectXsdStreamXmlReader());
XmlReader validatingReader = XmlReader.Create(reader, readerSettings);
XmlSerializer ser = new XmlSerializer(typeof(SoodaProject));
SoodaProject project = (SoodaProject)ser.Deserialize(validatingReader);
#endif
return project;
}
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
#if !(PORTABLE || PORTABLE40 || NET35 || NET20)
#endif
#if NET20
using Util.Json.Utilities.LinqBridge;
#else
#endif
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Numerics;
using System.Reflection;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters;
using System.Text;
namespace Json.Net.Utilities
{
#if (NETFX_CORE || PORTABLE || PORTABLE40)
internal enum MemberTypes
{
Property,
Field,
Event,
Method,
Other
}
#endif
#if NETFX_CORE || PORTABLE
[Flags]
internal enum BindingFlags
{
Default = 0,
IgnoreCase = 1,
DeclaredOnly = 2,
Instance = 4,
Static = 8,
Public = 16,
NonPublic = 32,
FlattenHierarchy = 64,
InvokeMethod = 256,
CreateInstance = 512,
GetField = 1024,
SetField = 2048,
GetProperty = 4096,
SetProperty = 8192,
PutDispProperty = 16384,
ExactBinding = 65536,
PutRefDispProperty = 32768,
SuppressChangeType = 131072,
OptionalParamBinding = 262144,
IgnoreReturn = 16777216
}
#endif
internal static class ReflectionUtils
{
public static readonly Type[] EmptyTypes;
static ReflectionUtils()
{
#if !(NETFX_CORE || PORTABLE40 || PORTABLE)
EmptyTypes = Type.EmptyTypes;
#else
EmptyTypes = new Type[0];
#endif
}
public static bool IsVirtual(this PropertyInfo propertyInfo)
{
ValidationUtils.ArgumentNotNull(propertyInfo, "propertyInfo");
MethodInfo m = propertyInfo.GetGetMethod();
if (m != null && m.IsVirtual)
return true;
m = propertyInfo.GetSetMethod();
if (m != null && m.IsVirtual)
return true;
return false;
}
public static MethodInfo GetBaseDefinition(this PropertyInfo propertyInfo)
{
ValidationUtils.ArgumentNotNull(propertyInfo, "propertyInfo");
MethodInfo m = propertyInfo.GetGetMethod();
if (m != null)
return m.GetBaseDefinition();
m = propertyInfo.GetSetMethod();
if (m != null)
return m.GetBaseDefinition();
return null;
}
public static bool IsPublic(PropertyInfo property)
{
if (property.GetGetMethod() != null && property.GetGetMethod().IsPublic)
return true;
if (property.GetSetMethod() != null && property.GetSetMethod().IsPublic)
return true;
return false;
}
public static Type GetObjectType(object v)
{
return (v != null) ? v.GetType() : null;
}
public static string GetTypeName(Type t, FormatterAssemblyStyle assemblyFormat, SerializationBinder binder)
{
string fullyQualifiedTypeName;
#if !(NET20 || NET35)
if (binder != null)
{
string assemblyName, typeName;
binder.BindToName(t, out assemblyName, out typeName);
fullyQualifiedTypeName = typeName + (assemblyName == null ? "" : ", " + assemblyName);
}
else
{
fullyQualifiedTypeName = t.AssemblyQualifiedName;
}
#else
fullyQualifiedTypeName = t.AssemblyQualifiedName;
#endif
switch (assemblyFormat)
{
case FormatterAssemblyStyle.Simple:
return RemoveAssemblyDetails(fullyQualifiedTypeName);
case FormatterAssemblyStyle.Full:
return fullyQualifiedTypeName;
default:
throw new ArgumentOutOfRangeException();
}
}
private static string RemoveAssemblyDetails(string fullyQualifiedTypeName)
{
StringBuilder builder = new StringBuilder();
// loop through the type name and filter out qualified assembly details from nested type names
bool writingAssemblyName = false;
bool skippingAssemblyDetails = false;
for (int i = 0; i < fullyQualifiedTypeName.Length; i++)
{
char current = fullyQualifiedTypeName[i];
switch (current)
{
case '[':
writingAssemblyName = false;
skippingAssemblyDetails = false;
builder.Append(current);
break;
case ']':
writingAssemblyName = false;
skippingAssemblyDetails = false;
builder.Append(current);
break;
case ',':
if (!writingAssemblyName)
{
writingAssemblyName = true;
builder.Append(current);
}
else
{
skippingAssemblyDetails = true;
}
break;
default:
if (!skippingAssemblyDetails)
builder.Append(current);
break;
}
}
return builder.ToString();
}
public static bool HasDefaultConstructor(Type t, bool nonPublic)
{
ValidationUtils.ArgumentNotNull(t, "t");
if (t.IsValueType())
return true;
return (GetDefaultConstructor(t, nonPublic) != null);
}
public static ConstructorInfo GetDefaultConstructor(Type t)
{
return GetDefaultConstructor(t, false);
}
public static ConstructorInfo GetDefaultConstructor(Type t, bool nonPublic)
{
BindingFlags bindingFlags = BindingFlags.Instance | BindingFlags.Public;
if (nonPublic)
bindingFlags = bindingFlags | BindingFlags.NonPublic;
return t.GetConstructors(bindingFlags).SingleOrDefault(c => !c.GetParameters().Any());
}
public static bool IsNullable(Type t)
{
ValidationUtils.ArgumentNotNull(t, "t");
if (t.IsValueType())
return IsNullableType(t);
return true;
}
public static bool IsNullableType(Type t)
{
ValidationUtils.ArgumentNotNull(t, "t");
return (t.IsGenericType() && t.GetGenericTypeDefinition() == typeof(Nullable<>));
}
public static Type EnsureNotNullableType(Type t)
{
return (IsNullableType(t))
? Nullable.GetUnderlyingType(t)
: t;
}
public static bool IsGenericDefinition(Type type, Type genericInterfaceDefinition)
{
if (!type.IsGenericType())
return false;
Type t = type.GetGenericTypeDefinition();
return (t == genericInterfaceDefinition);
}
public static bool ImplementsGenericDefinition(Type type, Type genericInterfaceDefinition)
{
Type implementingType;
return ImplementsGenericDefinition(type, genericInterfaceDefinition, out implementingType);
}
public static bool ImplementsGenericDefinition(Type type, Type genericInterfaceDefinition, out Type implementingType)
{
ValidationUtils.ArgumentNotNull(type, "type");
ValidationUtils.ArgumentNotNull(genericInterfaceDefinition, "genericInterfaceDefinition");
if (!genericInterfaceDefinition.IsInterface() || !genericInterfaceDefinition.IsGenericTypeDefinition())
throw new ArgumentNullException("'{0}' is not a generic interface definition.".FormatWith(CultureInfo.InvariantCulture, genericInterfaceDefinition));
if (type.IsInterface())
{
if (type.IsGenericType())
{
Type interfaceDefinition = type.GetGenericTypeDefinition();
if (genericInterfaceDefinition == interfaceDefinition)
{
implementingType = type;
return true;
}
}
}
foreach (Type i in type.GetInterfaces())
{
if (i.IsGenericType())
{
Type interfaceDefinition = i.GetGenericTypeDefinition();
if (genericInterfaceDefinition == interfaceDefinition)
{
implementingType = i;
return true;
}
}
}
implementingType = null;
return false;
}
public static bool InheritsGenericDefinition(Type type, Type genericClassDefinition)
{
Type implementingType;
return InheritsGenericDefinition(type, genericClassDefinition, out implementingType);
}
public static bool InheritsGenericDefinition(Type type, Type genericClassDefinition, out Type implementingType)
{
ValidationUtils.ArgumentNotNull(type, "type");
ValidationUtils.ArgumentNotNull(genericClassDefinition, "genericClassDefinition");
if (!genericClassDefinition.IsClass() || !genericClassDefinition.IsGenericTypeDefinition())
throw new ArgumentNullException("'{0}' is not a generic class definition.".FormatWith(CultureInfo.InvariantCulture, genericClassDefinition));
return InheritsGenericDefinitionInternal(type, genericClassDefinition, out implementingType);
}
private static bool InheritsGenericDefinitionInternal(Type currentType, Type genericClassDefinition, out Type implementingType)
{
if (currentType.IsGenericType())
{
Type currentGenericClassDefinition = currentType.GetGenericTypeDefinition();
if (genericClassDefinition == currentGenericClassDefinition)
{
implementingType = currentType;
return true;
}
}
if (currentType.BaseType() == null)
{
implementingType = null;
return false;
}
return InheritsGenericDefinitionInternal(currentType.BaseType(), genericClassDefinition, out implementingType);
}
/// <summary>
/// Gets the type of the typed collection's items.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The type of the typed collection's items.</returns>
public static Type GetCollectionItemType(Type type)
{
ValidationUtils.ArgumentNotNull(type, "type");
Type genericListType;
if (type.IsArray)
{
return type.GetElementType();
}
if (ImplementsGenericDefinition(type, typeof(IEnumerable<>), out genericListType))
{
if (genericListType.IsGenericTypeDefinition())
throw new Exception("Type {0} is not a collection.".FormatWith(CultureInfo.InvariantCulture, type));
return genericListType.GetGenericArguments()[0];
}
if (typeof(IEnumerable).IsAssignableFrom(type))
{
return null;
}
throw new Exception("Type {0} is not a collection.".FormatWith(CultureInfo.InvariantCulture, type));
}
public static void GetDictionaryKeyValueTypes(Type dictionaryType, out Type keyType, out Type valueType)
{
ValidationUtils.ArgumentNotNull(dictionaryType, "type");
Type genericDictionaryType;
if (ImplementsGenericDefinition(dictionaryType, typeof(IDictionary<,>), out genericDictionaryType))
{
if (genericDictionaryType.IsGenericTypeDefinition())
throw new Exception("Type {0} is not a dictionary.".FormatWith(CultureInfo.InvariantCulture, dictionaryType));
Type[] dictionaryGenericArguments = genericDictionaryType.GetGenericArguments();
keyType = dictionaryGenericArguments[0];
valueType = dictionaryGenericArguments[1];
return;
}
if (typeof(IDictionary).IsAssignableFrom(dictionaryType))
{
keyType = null;
valueType = null;
return;
}
throw new Exception("Type {0} is not a dictionary.".FormatWith(CultureInfo.InvariantCulture, dictionaryType));
}
/// <summary>
/// Gets the member's underlying type.
/// </summary>
/// <param name="member">The member.</param>
/// <returns>The underlying type of the member.</returns>
public static Type GetMemberUnderlyingType(MemberInfo member)
{
ValidationUtils.ArgumentNotNull(member, "member");
switch (member.MemberType())
{
case MemberTypes.Field:
return ((FieldInfo)member).FieldType;
case MemberTypes.Property:
return ((PropertyInfo)member).PropertyType;
case MemberTypes.Event:
return ((EventInfo)member).EventHandlerType;
case MemberTypes.Method:
return ((MethodInfo)member).ReturnType;
default:
throw new ArgumentException("MemberInfo must be of type FieldInfo, PropertyInfo, EventInfo or MethodInfo", "member");
}
}
/// <summary>
/// Determines whether the member is an indexed property.
/// </summary>
/// <param name="member">The member.</param>
/// <returns>
/// <c>true</c> if the member is an indexed property; otherwise, <c>false</c>.
/// </returns>
public static bool IsIndexedProperty(MemberInfo member)
{
ValidationUtils.ArgumentNotNull(member, "member");
PropertyInfo propertyInfo = member as PropertyInfo;
if (propertyInfo != null)
return IsIndexedProperty(propertyInfo);
else
return false;
}
/// <summary>
/// Determines whether the property is an indexed property.
/// </summary>
/// <param name="property">The property.</param>
/// <returns>
/// <c>true</c> if the property is an indexed property; otherwise, <c>false</c>.
/// </returns>
public static bool IsIndexedProperty(PropertyInfo property)
{
ValidationUtils.ArgumentNotNull(property, "property");
return (property.GetIndexParameters().Length > 0);
}
/// <summary>
/// Gets the member's value on the object.
/// </summary>
/// <param name="member">The member.</param>
/// <param name="target">The target object.</param>
/// <returns>The member's value on the object.</returns>
public static object GetMemberValue(MemberInfo member, object target)
{
ValidationUtils.ArgumentNotNull(member, "member");
ValidationUtils.ArgumentNotNull(target, "target");
switch (member.MemberType())
{
case MemberTypes.Field:
return ((FieldInfo)member).GetValue(target);
case MemberTypes.Property:
try
{
return ((PropertyInfo)member).GetValue(target, null);
}
catch (TargetParameterCountException e)
{
throw new ArgumentException("MemberInfo '{0}' has index parameters".FormatWith(CultureInfo.InvariantCulture, member.Name), e);
}
default:
throw new ArgumentException("MemberInfo '{0}' is not of type FieldInfo or PropertyInfo".FormatWith(CultureInfo.InvariantCulture, CultureInfo.InvariantCulture, member.Name), "member");
}
}
/// <summary>
/// Sets the member's value on the target object.
/// </summary>
/// <param name="member">The member.</param>
/// <param name="target">The target.</param>
/// <param name="value">The value.</param>
public static void SetMemberValue(MemberInfo member, object target, object value)
{
ValidationUtils.ArgumentNotNull(member, "member");
ValidationUtils.ArgumentNotNull(target, "target");
switch (member.MemberType())
{
case MemberTypes.Field:
((FieldInfo)member).SetValue(target, value);
break;
case MemberTypes.Property:
((PropertyInfo)member).SetValue(target, value, null);
break;
default:
throw new ArgumentException("MemberInfo '{0}' must be of type FieldInfo or PropertyInfo".FormatWith(CultureInfo.InvariantCulture, member.Name), "member");
}
}
/// <summary>
/// Determines whether the specified MemberInfo can be read.
/// </summary>
/// <param name="member">The MemberInfo to determine whether can be read.</param>
/// /// <param name="nonPublic">if set to <c>true</c> then allow the member to be gotten non-publicly.</param>
/// <returns>
/// <c>true</c> if the specified MemberInfo can be read; otherwise, <c>false</c>.
/// </returns>
public static bool CanReadMemberValue(MemberInfo member, bool nonPublic)
{
switch (member.MemberType())
{
case MemberTypes.Field:
FieldInfo fieldInfo = (FieldInfo)member;
if (nonPublic)
return true;
else if (fieldInfo.IsPublic)
return true;
return false;
case MemberTypes.Property:
PropertyInfo propertyInfo = (PropertyInfo)member;
if (!propertyInfo.CanRead)
return false;
if (nonPublic)
return true;
return (propertyInfo.GetGetMethod(nonPublic) != null);
default:
return false;
}
}
/// <summary>
/// Determines whether the specified MemberInfo can be set.
/// </summary>
/// <param name="member">The MemberInfo to determine whether can be set.</param>
/// <param name="nonPublic">if set to <c>true</c> then allow the member to be set non-publicly.</param>
/// <param name="canSetReadOnly">if set to <c>true</c> then allow the member to be set if read-only.</param>
/// <returns>
/// <c>true</c> if the specified MemberInfo can be set; otherwise, <c>false</c>.
/// </returns>
public static bool CanSetMemberValue(MemberInfo member, bool nonPublic, bool canSetReadOnly)
{
switch (member.MemberType())
{
case MemberTypes.Field:
FieldInfo fieldInfo = (FieldInfo)member;
if (fieldInfo.IsInitOnly && !canSetReadOnly)
return false;
if (nonPublic)
return true;
else if (fieldInfo.IsPublic)
return true;
return false;
case MemberTypes.Property:
PropertyInfo propertyInfo = (PropertyInfo)member;
if (!propertyInfo.CanWrite)
return false;
if (nonPublic)
return true;
return (propertyInfo.GetSetMethod(nonPublic) != null);
default:
return false;
}
}
public static List<MemberInfo> GetFieldsAndProperties(Type type, BindingFlags bindingAttr)
{
List<MemberInfo> targetMembers = new List<MemberInfo>();
targetMembers.AddRange(GetFields(type, bindingAttr));
targetMembers.AddRange(GetProperties(type, bindingAttr));
// for some reason .NET returns multiple members when overriding a generic member on a base class
// http://social.msdn.microsoft.com/Forums/en-US/b5abbfee-e292-4a64-8907-4e3f0fb90cd9/reflection-overriden-abstract-generic-properties?forum=netfxbcl
// filter members to only return the override on the topmost class
// update: I think this is fixed in .NET 3.5 SP1 - leave this in for now...
List<MemberInfo> distinctMembers = new List<MemberInfo>(targetMembers.Count);
foreach (var groupedMember in targetMembers.GroupBy(m => m.Name))
{
int count = groupedMember.Count();
IList<MemberInfo> members = groupedMember.ToList();
if (count == 1)
{
distinctMembers.Add(members.First());
}
else
{
IList<MemberInfo> resolvedMembers = new List<MemberInfo>();
foreach (MemberInfo memberInfo in members)
{
// this is a bit hacky
// if the hiding property is hiding a base property and it is virtual
// then this ensures the derived property gets used
if (resolvedMembers.Count == 0)
resolvedMembers.Add(memberInfo);
else if (!IsOverridenGenericMember(memberInfo, bindingAttr) || memberInfo.Name == "Item")
resolvedMembers.Add(memberInfo);
}
distinctMembers.AddRange(resolvedMembers);
}
}
return distinctMembers;
}
private static bool IsOverridenGenericMember(MemberInfo memberInfo, BindingFlags bindingAttr)
{
if (memberInfo.MemberType() != MemberTypes.Property)
return false;
PropertyInfo propertyInfo = (PropertyInfo)memberInfo;
if (!IsVirtual(propertyInfo))
return false;
Type declaringType = propertyInfo.DeclaringType;
if (!declaringType.IsGenericType())
return false;
Type genericTypeDefinition = declaringType.GetGenericTypeDefinition();
if (genericTypeDefinition == null)
return false;
MemberInfo[] members = genericTypeDefinition.GetMember(propertyInfo.Name, bindingAttr);
if (members.Length == 0)
return false;
Type memberUnderlyingType = GetMemberUnderlyingType(members[0]);
if (!memberUnderlyingType.IsGenericParameter)
return false;
return true;
}
public static T GetAttribute<T>(object attributeProvider) where T : Attribute
{
return GetAttribute<T>(attributeProvider, true);
}
public static T GetAttribute<T>(object attributeProvider, bool inherit) where T : Attribute
{
T[] attributes = GetAttributes<T>(attributeProvider, inherit);
return (attributes != null) ? attributes.SingleOrDefault() : null;
}
#if !(NETFX_CORE || PORTABLE)
public static T[] GetAttributes<T>(object attributeProvider, bool inherit) where T : Attribute
{
return (T[])GetAttributes(attributeProvider, typeof(T), inherit);
}
public static Attribute[] GetAttributes(object attributeProvider, Type attributeType, bool inherit)
{
ValidationUtils.ArgumentNotNull(attributeProvider, "attributeProvider");
object provider = attributeProvider;
// http://hyperthink.net/blog/getcustomattributes-gotcha/
// ICustomAttributeProvider doesn't do inheritance
if (provider is Type)
return (Attribute[])((Type)provider).GetCustomAttributes(attributeType, inherit);
if (provider is Assembly)
return Attribute.GetCustomAttributes((Assembly)provider, attributeType);
if (provider is MemberInfo)
return Attribute.GetCustomAttributes((MemberInfo)provider, attributeType, inherit);
#if !PORTABLE40
if (provider is Module)
return Attribute.GetCustomAttributes((Module)provider, attributeType, inherit);
#endif
if (provider is ParameterInfo)
return Attribute.GetCustomAttributes((ParameterInfo)provider, attributeType, inherit);
#if !PORTABLE40
return (Attribute[])((ICustomAttributeProvider)attributeProvider).GetCustomAttributes(attributeType, inherit);
#else
throw new Exception("Cannot get attributes from '{0}'.".FormatWith(CultureInfo.InvariantCulture, provider));
#endif
}
#else
public static T[] GetAttributes<T>(object attributeProvider, bool inherit) where T : Attribute
{
return GetAttributes(attributeProvider, typeof(T), inherit).Cast<T>().ToArray();
}
public static Attribute[] GetAttributes(object provider, Type attributeType, bool inherit)
{
if (provider is Type)
return ((Type) provider).GetTypeInfo().GetCustomAttributes(attributeType, inherit).ToArray();
if (provider is Assembly)
return ((Assembly) provider).GetCustomAttributes(attributeType).ToArray();
if (provider is MemberInfo)
return ((MemberInfo) provider).GetCustomAttributes(attributeType, inherit).ToArray();
if (provider is Module)
return ((Module) provider).GetCustomAttributes(attributeType).ToArray();
if (provider is ParameterInfo)
return ((ParameterInfo) provider).GetCustomAttributes(attributeType, inherit).ToArray();
throw new Exception("Cannot get attributes from '{0}'.".FormatWith(CultureInfo.InvariantCulture, provider));
}
#endif
public static void SplitFullyQualifiedTypeName(string fullyQualifiedTypeName, out string typeName, out string assemblyName)
{
int? assemblyDelimiterIndex = GetAssemblyDelimiterIndex(fullyQualifiedTypeName);
if (assemblyDelimiterIndex != null)
{
typeName = fullyQualifiedTypeName.Substring(0, assemblyDelimiterIndex.Value).Trim();
assemblyName = fullyQualifiedTypeName.Substring(assemblyDelimiterIndex.Value + 1, fullyQualifiedTypeName.Length - assemblyDelimiterIndex.Value - 1).Trim();
}
else
{
typeName = fullyQualifiedTypeName;
assemblyName = null;
}
}
private static int? GetAssemblyDelimiterIndex(string fullyQualifiedTypeName)
{
// we need to get the first comma following all surrounded in brackets because of generic types
// e.g. System.Collections.Generic.Dictionary`2[[System.String, mscorlib,Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089],[System.String, mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]], mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
int scope = 0;
for (int i = 0; i < fullyQualifiedTypeName.Length; i++)
{
char current = fullyQualifiedTypeName[i];
switch (current)
{
case '[':
scope++;
break;
case ']':
scope--;
break;
case ',':
if (scope == 0)
return i;
break;
}
}
return null;
}
public static MemberInfo GetMemberInfoFromType(Type targetType, MemberInfo memberInfo)
{
const BindingFlags bindingAttr = BindingFlags.Instance | BindingFlags.Static | BindingFlags.Public | BindingFlags.NonPublic;
switch (memberInfo.MemberType())
{
case MemberTypes.Property:
PropertyInfo propertyInfo = (PropertyInfo)memberInfo;
Type[] types = propertyInfo.GetIndexParameters().Select(p => p.ParameterType).ToArray();
return targetType.GetProperty(propertyInfo.Name, bindingAttr, null, propertyInfo.PropertyType, types, null);
default:
return targetType.GetMember(memberInfo.Name, memberInfo.MemberType(), bindingAttr).SingleOrDefault();
}
}
public static IEnumerable<FieldInfo> GetFields(Type targetType, BindingFlags bindingAttr)
{
ValidationUtils.ArgumentNotNull(targetType, "targetType");
List<MemberInfo> fieldInfos = new List<MemberInfo>(targetType.GetFields(bindingAttr));
#if !(NETFX_CORE || PORTABLE)
// Type.GetFields doesn't return inherited private fields
// manually find private fields from base class
GetChildPrivateFields(fieldInfos, targetType, bindingAttr);
#endif
return fieldInfos.Cast<FieldInfo>();
}
#if !(NETFX_CORE || PORTABLE)
private static void GetChildPrivateFields(IList<MemberInfo> initialFields, Type targetType, BindingFlags bindingAttr)
{
// fix weirdness with private FieldInfos only being returned for the current Type
// find base type fields and add them to result
if ((bindingAttr & BindingFlags.NonPublic) != 0)
{
// modify flags to not search for public fields
BindingFlags nonPublicBindingAttr = bindingAttr.RemoveFlag(BindingFlags.Public);
while ((targetType = targetType.BaseType()) != null)
{
// filter out protected fields
IEnumerable<MemberInfo> childPrivateFields =
targetType.GetFields(nonPublicBindingAttr).Where(f => f.IsPrivate).Cast<MemberInfo>();
initialFields.AddRange(childPrivateFields);
}
}
}
#endif
public static IEnumerable<PropertyInfo> GetProperties(Type targetType, BindingFlags bindingAttr)
{
ValidationUtils.ArgumentNotNull(targetType, "targetType");
List<PropertyInfo> propertyInfos = new List<PropertyInfo>(targetType.GetProperties(bindingAttr));
GetChildPrivateProperties(propertyInfos, targetType, bindingAttr);
// a base class private getter/setter will be inaccessable unless the property was gotten from the base class
for (int i = 0; i < propertyInfos.Count; i++)
{
PropertyInfo member = propertyInfos[i];
if (member.DeclaringType != targetType)
{
PropertyInfo declaredMember = (PropertyInfo)GetMemberInfoFromType(member.DeclaringType, member);
propertyInfos[i] = declaredMember;
}
}
return propertyInfos;
}
public static BindingFlags RemoveFlag(this BindingFlags bindingAttr, BindingFlags flag)
{
return ((bindingAttr & flag) == flag)
? bindingAttr ^ flag
: bindingAttr;
}
private static void GetChildPrivateProperties(IList<PropertyInfo> initialProperties, Type targetType, BindingFlags bindingAttr)
{
// fix weirdness with private PropertyInfos only being returned for the current Type
// find base type properties and add them to result
// also find base properties that have been hidden by subtype properties with the same name
while ((targetType = targetType.BaseType()) != null)
{
foreach (PropertyInfo propertyInfo in targetType.GetProperties(bindingAttr))
{
PropertyInfo subTypeProperty = propertyInfo;
if (!IsPublic(subTypeProperty))
{
// have to test on name rather than reference because instances are different
// depending on the type that GetProperties was called on
int index = initialProperties.IndexOf(p => p.Name == subTypeProperty.Name);
if (index == -1)
{
initialProperties.Add(subTypeProperty);
}
else
{
PropertyInfo childProperty = initialProperties[index];
// don't replace public child with private base
if (!IsPublic(childProperty))
{
// replace nonpublic properties for a child, but gotten from
// the parent with the one from the child
// the property gotten from the child will have access to private getter/setter
initialProperties[index] = subTypeProperty;
}
}
}
else
{
if (!subTypeProperty.IsVirtual())
{
int index = initialProperties.IndexOf(p => p.Name == subTypeProperty.Name
&& p.DeclaringType == subTypeProperty.DeclaringType);
if (index == -1)
initialProperties.Add(subTypeProperty);
}
else
{
int index = initialProperties.IndexOf(p => p.Name == subTypeProperty.Name
&& p.IsVirtual()
&& p.GetBaseDefinition() != null
&& p.GetBaseDefinition().DeclaringType.IsAssignableFrom(subTypeProperty.DeclaringType));
if (index == -1)
initialProperties.Add(subTypeProperty);
}
}
}
}
}
public static bool IsMethodOverridden(Type currentType, Type methodDeclaringType, string method)
{
bool isMethodOverriden = currentType.GetMethods(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance)
.Any(info =>
info.Name == method &&
// check that the method overrides the original on DynamicObjectProxy
info.DeclaringType != methodDeclaringType
&& info.GetBaseDefinition().DeclaringType == methodDeclaringType
);
return isMethodOverriden;
}
public static object GetDefaultValue(Type type)
{
if (!type.IsValueType())
return null;
switch (ConvertUtils.GetTypeCode(type))
{
case PrimitiveTypeCode.Boolean:
return false;
case PrimitiveTypeCode.Char:
case PrimitiveTypeCode.SByte:
case PrimitiveTypeCode.Byte:
case PrimitiveTypeCode.Int16:
case PrimitiveTypeCode.UInt16:
case PrimitiveTypeCode.Int32:
case PrimitiveTypeCode.UInt32:
return 0;
case PrimitiveTypeCode.Int64:
case PrimitiveTypeCode.UInt64:
return 0L;
case PrimitiveTypeCode.Single:
return 0f;
case PrimitiveTypeCode.Double:
return 0.0;
case PrimitiveTypeCode.Decimal:
return 0m;
case PrimitiveTypeCode.DateTime:
return new DateTime();
#if !(PORTABLE || PORTABLE40 || NET35 || NET20)
case PrimitiveTypeCode.BigInteger:
return new BigInteger();
#endif
case PrimitiveTypeCode.Guid:
return new Guid();
#if !NET20
case PrimitiveTypeCode.DateTimeOffset:
return new DateTimeOffset();
#endif
}
if (IsNullable(type))
return null;
// possibly use IL initobj for perf here?
return Activator.CreateInstance(type);
}
}
}
| |
//
// Copyright (c)1998-2011 Pearson Education, Inc. or its affiliate(s).
// All rights reserved.
//
using System;
using System.Collections;
using System.Xml;
namespace OpenADK.Util
{
public class XmlUtils
{
/// <summary>
/// This method returns the attribute value or NULL if the attribute does not exist, which differs from the behavior of the
/// XmlElement.GetAttribute Method;
/// </summary>
/// <param name="element"></param>
/// <param name="attributeName"></param>
/// <returns></returns>
public static string GetAttributeValue( XmlElement element,
string attributeName )
{
XmlAttribute attr = element.GetAttributeNode( attributeName );
return attr == null ? null : attr.Value;
}
/// <summary>
/// Builds an array of XmlElements from an XmlNodeList
/// </summary>
/// <param name="list">an XmlNodeList</param>
/// <param name="filter">If true, returns only the elements with an "enabled" property set to "True" or "Yes"</param>
/// <returns>the array of elements</returns>
/// <remarks>
/// If there are no elements in the list or that are enabled, an empty array will be returned
/// </remarks>
public static XmlElement [] ElementArrayFromNodeList( XmlNodeList list,
bool filter )
{
ArrayList v = new ArrayList();
IEnumerator enumerator;
if ( filter ) {
enumerator = new FilteredElementList( list ).GetEnumerator();
}
else {
enumerator = list.GetEnumerator();
}
while ( enumerator.MoveNext() ) {
v.Add( enumerator.Current );
}
return (XmlElement []) v.ToArray( typeof ( XmlElement ) );
}
public static XmlElement GetElementByAttribute( XmlElement parent,
string elementName,
string attributeName,
string attributeValue,
bool filtered )
{
if ( parent == null || elementName == null || attributeName == null ||
attributeValue == null ) {
return null;
}
// TODO: Implement better filtering in the xpath expression so that the multiple steps of operations are not necessary
XmlElement element = null;
string xPath = elementName + "[@" + attributeName + "=\"" + attributeValue + "\"]";
if ( filtered ) {
FilteredElementList list = new FilteredElementList( parent.SelectNodes( xPath ) );
IEnumerator enumerator = list.GetEnumerator();
if ( enumerator.MoveNext() ) {
element = (XmlElement) enumerator.Current;
}
}
else {
element = (XmlElement) parent.SelectSingleNode( xPath );
}
return element;
}
/// <summary>
/// Returns true only if the element has an "enabled" property set to "True" or "Yes"
/// </summary>
/// <param name="element"></param>
/// <returns></returns>
public static bool IsElementEnabled( XmlElement element )
{
string val = element.GetAttribute( "enabled" ).ToUpper();
return val.Length == 0 || val == "TRUE" || val == "YES";
}
/// <summary> Sets the value of a <code><property></code> child of the specified
/// node. If a <code><property></code> element already exists, its value
/// is updated; otherwise a new element is appended to the node.
///
/// </summary>
/// <param name="parentNode">The parent node of the property
/// </param>
/// <param name="property">The name of the property
/// </param>
/// <param name="value">The property value
/// </param>
public static void SetProperty( XmlElement parentNode,
string property,
string val )
{
XmlElement propN =
GetElementByAttribute
( parentNode, AdkXmlConstants.Property.ELEMENT, AdkXmlConstants.Property.NAME,
property, false );
if ( propN == null ) {
propN = parentNode.OwnerDocument.CreateElement( AdkXmlConstants.Property.ELEMENT );
// Search for another node in the config that matches the prefix of the current
// property element and insert the node immediately after it, if found.
// This helps to keep the property file in an easier to read format.
int loc = property.Length - 1;
XmlNode lastSibling = null;
while ( lastSibling == null && (loc = property.LastIndexOf( '.', loc - 1 )) > -1 ) {
string prefix = property.Substring( 0, loc + 1 );
lastSibling = FindLastPropertySibling( parentNode, prefix );
}
if ( lastSibling == null ) {
// Find the last property element
lastSibling = FindLastPropertySibling( parentNode, null );
}
if ( lastSibling != null ) {
parentNode.InsertAfter( propN, lastSibling );
}
else {
parentNode.AppendChild( propN );
}
}
propN.SetAttribute( AdkXmlConstants.Property.NAME, property );
propN.SetAttribute( AdkXmlConstants.Property.VALUE, val );
}
/// <summary>
/// Searches for the last property element with the specified prefix
/// </summary>
/// <param name="parent">The parent node to search</param>
/// <param name="propertyPrefix">The prefix to search for, or NULL if the last property element is to be returned</param>
/// <returns>The last property element found with the specified prefix or null, if none found</returns>
private static XmlElement FindLastPropertySibling( XmlElement parent,
string propertyPrefix )
{
XmlNodeList children = parent.ChildNodes;
for ( int a = children.Count - 1; a > -1; a-- ) {
XmlElement child = children[a] as XmlElement;
if ( child != null &&
child.Name == AdkXmlConstants.Property.ELEMENT ) {
if ( propertyPrefix == null ||
child.GetAttribute( AdkXmlConstants.Property.NAME ).StartsWith
( propertyPrefix ) ) {
return child;
}
}
}
return null;
}
/// <summary>
/// Filters the list of elements, and returns only those with an "enabled" property set to "True" or "Yes"
/// </summary>
public class FilteredElementList : IEnumerable
{
public FilteredElementList( XmlNodeList list )
{
fNodeList = list;
}
public IEnumerator GetEnumerator()
{
return new FilteredEnumerator( fNodeList.GetEnumerator() );
}
private XmlNodeList fNodeList;
private class FilteredEnumerator : EnumeratorWrapper
{
public FilteredEnumerator( IEnumerator enumerator )
: base( enumerator ) {}
public override bool MoveNext()
{
while ( this.WrappedEnumerator.MoveNext() ) {
XmlElement element = (XmlElement) this.Current;
if ( IsElementEnabled( element ) ) {
return true;
}
}
return false;
}
}
}
/// <summary>
/// Allows safe iteration of Xml Child elements of a given element
/// </summary>
/// <remarks>
/// Example of code without using this class
/// <code>
/// foreach( XmlNode a_o in new a_XmlDoc.DocumentElement )
/// {
/// if( a_o is XmlElement )
/// {
/// XmlElement a_Element = (XmlElement)a_o
/// Console.WriteLine( a_Element.Name );
/// foreach( XmlNode a_ChildElement in a_Element )
/// {
/// if( a_ChildElement is XmlElement )
/// {
/// Console.WriteLine( " " + ((XmlElement)a_ChildElement).Name );
/// }
/// }
/// }
/// }
/// </code>
/// Example Usage using the XmlElementEnumerator
/// <code>
/// foreach( XmlElement a_Element in new XmlElementEnumerator( a_XmlDoc.DocumentElement ) )
/// {
/// Console.WriteLine( a_Element.Name );
/// foreach( XmlElement a_ChildElement in new XmlElementEnumerator( a_Element ) )
/// {
/// Console.WriteLine( " " + a_ChildElement.Name );
/// }
/// }
/// </code>
/// </remarks>
public sealed class XmlElementEnumerator : IEnumerable
{
public XmlElementEnumerator( XmlElement parentNode )
{
fParent = parentNode;
}
public XmlElementEnumerator( XmlNodeList parentList )
{
fParent = parentList;
}
IEnumerator IEnumerable.GetEnumerator()
{
return new ElementEnumerator( fParent.GetEnumerator() );
}
private class ElementEnumerator : IEnumerator
{
internal ElementEnumerator( IEnumerator enumerator )
{
fNodeEnumerator = enumerator;
}
public bool MoveNext()
{
while ( fNodeEnumerator.MoveNext() ) {
if ( fNodeEnumerator.Current is XmlElement ) {
return true;
}
}
return false;
}
public void Reset()
{
fNodeEnumerator.Reset();
}
public object Current
{
get { return fNodeEnumerator.Current; }
}
private IEnumerator fNodeEnumerator;
}
private IEnumerable fParent;
}
/// <summary>
/// Sets an XML Attribute, or removes it if the value is null
/// </summary>
/// <param name="element"></param>
/// <param name="attributeName"></param>
/// <param name="value"></param>
public static void SetOrRemoveAttribute(XmlElement element, string attributeName, string value)
{
if( value == null )
{
element.RemoveAttribute( attributeName );
} else
{
element.SetAttribute(attributeName, value);
}
}
/// <summary>
/// Returns the first child element found with the specified name, ignoring differences in case
/// </summary>
/// <param name="element">The element to search</param>
/// <param name="nodeName">The element tag name to search for</param>
/// <returns>The matching child element with the specified name, or NULL if not found</returns>
public static XmlElement GetFirstElementIgnoreCase(XmlElement element, string nodeName )
{
if (nodeName != null && element != null)
{
foreach( XmlNode child in element.ChildNodes )
{
if( child.NodeType == XmlNodeType.Element && String.Compare( nodeName, child.Name, true ) == 0 )
{
return (XmlElement)child;
}
}
}
return null;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
namespace Microsoft.Azure.Management.Dns.Fluent
{
using System.Collections.Generic;
internal partial class DnsRecordSetImpl
{
/// <summary>
/// Gets the etag associated with the record set.
/// </summary>
string Microsoft.Azure.Management.Dns.Fluent.IDnsRecordSet.ETag
{
get
{
return this.ETag();
}
}
/// <summary>
/// Gets the fully qualified domain name of the record set.
/// </summary>
string Microsoft.Azure.Management.Dns.Fluent.IDnsRecordSet.Fqdn
{
get
{
return this.Fqdn();
}
}
/// <summary>
/// Gets the metadata associated with this record set.
/// </summary>
System.Collections.Generic.IReadOnlyDictionary<string, string> Microsoft.Azure.Management.Dns.Fluent.IDnsRecordSet.Metadata
{
get
{
return this.Metadata();
}
}
/// <summary>
/// Gets the type of records in this record set.
/// </summary>
Models.RecordType Microsoft.Azure.Management.Dns.Fluent.IDnsRecordSet.RecordType
{
get
{
return this.RecordType();
}
}
/// <summary>
/// Gets TTL of the records in this record set.
/// </summary>
long Microsoft.Azure.Management.Dns.Fluent.IDnsRecordSet.TimeToLive
{
get
{
return this.TimeToLive();
}
}
/// <summary>
/// Attaches the child definition to the parent resource update.
/// </summary>
/// <return>The next stage of the parent definition.</return>
DnsZone.Update.IUpdate Microsoft.Azure.Management.ResourceManager.Fluent.Core.ChildResource.Update.IInUpdate<DnsZone.Update.IUpdate>.Attach()
{
return this.Attach();
}
/// <summary>
/// Attaches the child definition to the parent resource definiton.
/// </summary>
/// <return>The next stage of the parent definition.</return>
DnsZone.Definition.IWithCreate Microsoft.Azure.Management.ResourceManager.Fluent.Core.ChildResource.Definition.IInDefinition<DnsZone.Definition.IWithCreate>.Attach()
{
return this.Attach();
}
/// <summary>
/// Creates a CNAME record with the provided alias.
/// </summary>
/// <param name="alias">The alias.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.UpdateDefinition.IWithCNameRecordSetAttachable<DnsZone.Update.IUpdate> DnsRecordSet.UpdateDefinition.IWithCNameRecordAlias<DnsZone.Update.IUpdate>.WithAlias(string alias)
{
return this.WithAlias(alias);
}
/// <summary>
/// Creates a CNAME record with the provided alias.
/// </summary>
/// <param name="alias">The alias.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.Definition.IWithCNameRecordSetAttachable<DnsZone.Definition.IWithCreate> DnsRecordSet.Definition.IWithCNameRecordAlias<DnsZone.Definition.IWithCreate>.WithAlias(string alias)
{
return this.WithAlias(alias);
}
/// <summary>
/// The new alias for the CNAME record set.
/// </summary>
/// <param name="alias">The alias.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateCNameRecordSet.IUpdateCNameRecordSet DnsRecordSet.Update.IWithCNameRecordAlias.WithAlias(string alias)
{
return this.WithAlias(alias);
}
/// <summary>
/// Specifies the email server associated with the SOA record.
/// </summary>
/// <param name="emailServerHostName">The email server.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateSoaRecord.IUpdateSoaRecord DnsRecordSet.Update.IWithSoaRecordAttributes.WithEmailServer(string emailServerHostName)
{
return this.WithEmailServer(emailServerHostName);
}
/// <summary>
/// Specifies that If-None-Match header needs to set to to prevent updating an existing record set.
/// </summary>
/// <return>The next stage of the definition.</return>
DnsRecordSet.UpdateDefinition.IWithAttach<DnsZone.Update.IUpdate> DnsRecordSet.UpdateDefinition.IWithETagCheck<DnsZone.Update.IUpdate>.WithETagCheck()
{
return this.WithETagCheck();
}
/// <summary>
/// Specifies that If-None-Match header needs to set to to prevent updating an existing record set.
/// </summary>
/// <return>The next stage of the definition.</return>
DnsRecordSet.Definition.IWithAttach<DnsZone.Definition.IWithCreate> DnsRecordSet.Definition.IWithETagCheck<DnsZone.Definition.IWithCreate>.WithETagCheck()
{
return this.WithETagCheck();
}
/// <summary>
/// Specifies that If-Match header needs to set to the current eTag value associated
/// with the record set.
/// </summary>
/// <return>The next stage of the update.</return>
DnsRecordSet.Update.IUpdate DnsRecordSet.Update.IWithETagCheck.WithETagCheck()
{
return this.WithETagCheck();
}
/// <summary>
/// Specifies that if-Match header needs to set to the given eTag value.
/// </summary>
/// <param name="eTagValue">The eTag value.</param>
/// <return>The next stage of the update.</return>
DnsRecordSet.Update.IUpdate DnsRecordSet.Update.IWithETagCheck.WithETagCheck(string eTagValue)
{
return this.WithETagCheck(eTagValue);
}
/// <summary>
/// Specifies the time in seconds that a secondary name server will treat its cached zone file as valid
/// when the primary name server cannot be contacted.
/// </summary>
/// <param name="expireTimeInSeconds">The expire time in seconds.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateSoaRecord.IUpdateSoaRecord DnsRecordSet.Update.IWithSoaRecordAttributes.WithExpireTimeInSeconds(long expireTimeInSeconds)
{
return this.WithExpireTimeInSeconds(expireTimeInSeconds);
}
/// <summary>
/// Creates an A record with the provided IPv4 address in this record set.
/// </summary>
/// <param name="ipv4Address">The IPv4 address.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.UpdateDefinition.IWithARecordIPv4AddressOrAttachable<DnsZone.Update.IUpdate> DnsRecordSet.UpdateDefinition.IWithARecordIPv4Address<DnsZone.Update.IUpdate>.WithIPv4Address(string ipv4Address)
{
return this.WithIPv4Address(ipv4Address);
}
/// <summary>
/// Creates an A record with the provided IPv4 address in this record set.
/// </summary>
/// <param name="ipv4Address">The IPv4 address.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.Definition.IWithARecordIPv4AddressOrAttachable<DnsZone.Definition.IWithCreate> DnsRecordSet.Definition.IWithARecordIPv4Address<DnsZone.Definition.IWithCreate>.WithIPv4Address(string ipv4Address)
{
return this.WithIPv4Address(ipv4Address);
}
/// <summary>
/// Creates an A record with the provided IPv4 address in the record set.
/// </summary>
/// <param name="ipv4Address">An IPv4 address.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateARecordSet.IUpdateARecordSet DnsRecordSet.Update.IWithARecordIPv4Address.WithIPv4Address(string ipv4Address)
{
return this.WithIPv4Address(ipv4Address);
}
/// <summary>
/// Creates an AAAA record with the provided IPv6 address in this record set.
/// </summary>
/// <param name="ipv6Address">The IPv6 address.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.UpdateDefinition.IWithAaaaRecordIPv6AddressOrAttachable<DnsZone.Update.IUpdate> DnsRecordSet.UpdateDefinition.IWithAaaaRecordIPv6Address<DnsZone.Update.IUpdate>.WithIPv6Address(string ipv6Address)
{
return this.WithIPv6Address(ipv6Address);
}
/// <summary>
/// Creates an AAAA record with the provided IPv6 address in this record set.
/// </summary>
/// <param name="ipv6Address">An IPv6 address.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.Definition.IWithAaaaRecordIPv6AddressOrAttachable<DnsZone.Definition.IWithCreate> DnsRecordSet.Definition.IWithAaaaRecordIPv6Address<DnsZone.Definition.IWithCreate>.WithIPv6Address(string ipv6Address)
{
return this.WithIPv6Address(ipv6Address);
}
/// <summary>
/// Creates an AAAA record with the provided IPv6 address in this record set.
/// </summary>
/// <param name="ipv6Address">The IPv6 address.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateAaaaRecordSet.IUpdateAaaaRecordSet DnsRecordSet.Update.IWithAaaaRecordIPv6Address.WithIPv6Address(string ipv6Address)
{
return this.WithIPv6Address(ipv6Address);
}
/// <summary>
/// Creates and assigns priority to a MX record with the provided mail exchange server in this record set.
/// </summary>
/// <param name="mailExchangeHostName">The host name of the mail exchange server.</param>
/// <param name="priority">The priority for the mail exchange host, lower the value higher the priority.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateMXRecordSet.IUpdateMXRecordSet DnsRecordSet.Update.IWithMXRecordMailExchange.WithMailExchange(string mailExchangeHostName, int priority)
{
return this.WithMailExchange(mailExchangeHostName, priority);
}
/// <summary>
/// Creates and assigns priority to a MX record with the provided mail exchange server in this record set.
/// </summary>
/// <param name="mailExchangeHostName">The host name of the mail exchange server.</param>
/// <param name="priority">The priority for the mail exchange host, lower the value higher the priority.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.UpdateDefinition.IWithMXRecordMailExchangeOrAttachable<DnsZone.Update.IUpdate> DnsRecordSet.UpdateDefinition.IWithMXRecordMailExchange<DnsZone.Update.IUpdate>.WithMailExchange(string mailExchangeHostName, int priority)
{
return this.WithMailExchange(mailExchangeHostName, priority);
}
/// <summary>
/// Creates and assigns priority to a MX record with the provided mail exchange server in this record set.
/// </summary>
/// <param name="mailExchangeHostName">The host name of the mail exchange server.</param>
/// <param name="priority">The priority for the mail exchange host, lower the value higher the priority.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.Definition.IWithMXRecordMailExchangeOrAttachable<DnsZone.Definition.IWithCreate> DnsRecordSet.Definition.IWithMXRecordMailExchange<DnsZone.Definition.IWithCreate>.WithMailExchange(string mailExchangeHostName, int priority)
{
return this.WithMailExchange(mailExchangeHostName, priority);
}
/// <summary>
/// Adds a metadata to the record set.
/// </summary>
/// <param name="key">The key for the metadata.</param>
/// <param name="value">The value for the metadata.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.Update.IUpdate DnsRecordSet.Update.IWithMetadata.WithMetadata(string key, string value)
{
return this.WithMetadata(key, value);
}
/// <summary>
/// Adds a tag to the resource.
/// </summary>
/// <param name="key">The key for the metadata.</param>
/// <param name="value">The value for the metadata.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.UpdateDefinition.IWithAttach<DnsZone.Update.IUpdate> DnsRecordSet.UpdateDefinition.IWithMetadata<DnsZone.Update.IUpdate>.WithMetadata(string key, string value)
{
return this.WithMetadata(key, value);
}
/// <summary>
/// Adds a metadata to the resource.
/// </summary>
/// <param name="key">The key for the metadata.</param>
/// <param name="value">The value for the metadata.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.Definition.IWithAttach<DnsZone.Definition.IWithCreate> DnsRecordSet.Definition.IWithMetadata<DnsZone.Definition.IWithCreate>.WithMetadata(string key, string value)
{
return this.WithMetadata(key, value);
}
/// <summary>
/// Creates a NS record with the provided name server in this record set.
/// </summary>
/// <param name="nameServerHostName">The name server host name.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.UpdateDefinition.IWithNSRecordNameServerOrAttachable<DnsZone.Update.IUpdate> DnsRecordSet.UpdateDefinition.IWithNSRecordNameServer<DnsZone.Update.IUpdate>.WithNameServer(string nameServerHostName)
{
return this.WithNameServer(nameServerHostName);
}
/// <summary>
/// Creates a NS record with the provided name server in this record set.
/// </summary>
/// <param name="nameServerHostName">The name server host name.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.Definition.IWithNSRecordNameServerOrAttachable<DnsZone.Definition.IWithCreate> DnsRecordSet.Definition.IWithNSRecordNameServer<DnsZone.Definition.IWithCreate>.WithNameServer(string nameServerHostName)
{
return this.WithNameServer(nameServerHostName);
}
/// <summary>
/// Creates a NS record with the provided name server in this record set.
/// </summary>
/// <param name="nameServerHostName">The name server host name.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateNSRecordSet.IUpdateNSRecordSet DnsRecordSet.Update.IWithNSRecordNameServer.WithNameServer(string nameServerHostName)
{
return this.WithNameServer(nameServerHostName);
}
/// <summary>
/// Specifies the time in seconds that any name server or resolver should cache a negative response.
/// </summary>
/// <param name="negativeCachingTimeToLive">The TTL for cached negative response.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateSoaRecord.IUpdateSoaRecord DnsRecordSet.Update.IWithSoaRecordAttributes.WithNegativeResponseCachingTimeToLiveInSeconds(long negativeCachingTimeToLive)
{
return this.WithNegativeResponseCachingTimeToLiveInSeconds(negativeCachingTimeToLive);
}
/// <summary>
/// Removes the A record with the provided IPv4 address from the record set.
/// </summary>
/// <param name="ipv4Address">An IPv4 address.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateARecordSet.IUpdateARecordSet DnsRecordSet.Update.IWithARecordIPv4Address.WithoutIPv4Address(string ipv4Address)
{
return this.WithoutIPv4Address(ipv4Address);
}
/// <summary>
/// Removes an AAAA record with the provided IPv6 address from this record set.
/// </summary>
/// <param name="ipv6Address">The IPv6 address.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateAaaaRecordSet.IUpdateAaaaRecordSet DnsRecordSet.Update.IWithAaaaRecordIPv6Address.WithoutIPv6Address(string ipv6Address)
{
return this.WithoutIPv6Address(ipv6Address);
}
/// <summary>
/// Removes MX record with the provided mail exchange server and priority from this record set.
/// </summary>
/// <param name="mailExchangeHostName">The host name of the mail exchange server.</param>
/// <param name="priority">The priority for the mail exchange host, lower the value higher the priority.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateMXRecordSet.IUpdateMXRecordSet DnsRecordSet.Update.IWithMXRecordMailExchange.WithoutMailExchange(string mailExchangeHostName, int priority)
{
return this.WithoutMailExchange(mailExchangeHostName, priority);
}
/// <summary>
/// Removes a metadata from the record set.
/// </summary>
/// <param name="key">The key of the metadata to remove.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.Update.IUpdate DnsRecordSet.Update.IWithMetadata.WithoutMetadata(string key)
{
return this.WithoutMetadata(key);
}
/// <summary>
/// Rmoves a NS record with the provided name server from this record set.
/// </summary>
/// <param name="nameServerHostName">The name server host name.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateNSRecordSet.IUpdateNSRecordSet DnsRecordSet.Update.IWithNSRecordNameServer.WithoutNameServer(string nameServerHostName)
{
return this.WithoutNameServer(nameServerHostName);
}
/// <summary>
/// Removes a service record for a service.
/// </summary>
/// <param name="target">The canonical name of the target host running the service.</param>
/// <param name="port">The port on which the service is bounded.</param>
/// <param name="priority">The priority of the target host.</param>
/// <param name="weight">The relative weight (preference) of the records.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateSrvRecordSet.IUpdateSrvRecordSet DnsRecordSet.Update.IWithSrvRecordEntry.WithoutRecord(string target, int port, int priority, int weight)
{
return this.WithoutRecord(target, port, priority, weight);
}
/// <summary>
/// Removes a Caa record for a service.
/// </summary>
/// <param name="flags">The flags for this CAA record as an integer between 0 and 255.</param>
/// <param name="tag">The tag for this CAA record.</param>
/// <param name="value">The value for this CAA record.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateCaaRecordSet.IUpdateCaaRecordSet DnsRecordSet.Update.IWithCaaRecordEntry.WithoutRecord(int flags, string tag, string value)
{
return this.WithoutRecord(flags, tag, value);
}
/// <summary>
/// Removes the CName record with the provided canonical name from this record set.
/// </summary>
/// <param name="targetDomainName">The target domain name.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdatePtrRecordSet.IUpdatePtrRecordSet DnsRecordSet.Update.IWithPtrRecordTargetDomainName.WithoutTargetDomainName(string targetDomainName)
{
return this.WithoutTargetDomainName(targetDomainName);
}
/// <summary>
/// Removes a Txt record with the given text from this record set.
/// </summary>
/// <param name="text">The text value.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateTxtRecordSet.IUpdateTxtRecordSet DnsRecordSet.Update.IWithTxtRecordTextValue.WithoutText(string text)
{
return this.WithoutText(text);
}
/// <summary>
/// Removes a Txt record with the given text (split into 255 char chunks) from this record set.
/// </summary>
/// <param name="textChunks">The text value as list.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateTxtRecordSet.IUpdateTxtRecordSet DnsRecordSet.Update.IWithTxtRecordTextValue.WithoutText(IList<string> textChunks)
{
return this.WithoutText(textChunks);
}
/// <summary>
/// Specifies a service record for a service.
/// </summary>
/// <param name="target">The canonical name of the target host running the service.</param>
/// <param name="port">The port on which the service is bounded.</param>
/// <param name="priority">The priority of the target host, lower the value higher the priority.</param>
/// <param name="weight">The relative weight (preference) of the records with the same priority, higher the value more the preference.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateSrvRecordSet.IUpdateSrvRecordSet DnsRecordSet.Update.IWithSrvRecordEntry.WithRecord(string target, int port, int priority, int weight)
{
return this.WithRecord(target, port, priority, weight);
}
/// <summary>
/// Specifies a service record for a service.
/// </summary>
/// <param name="target">The canonical name of the target host running the service.</param>
/// <param name="port">The port on which the service is bounded.</param>
/// <param name="priority">The priority of the target host, lower the value higher the priority.</param>
/// <param name="weight">The relative weight (preference) of the records with the same priority, higher the value more the preference.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.UpdateDefinition.IWithSrvRecordEntryOrAttachable<DnsZone.Update.IUpdate> DnsRecordSet.UpdateDefinition.IWithSrvRecordEntry<DnsZone.Update.IUpdate>.WithRecord(string target, int port, int priority, int weight)
{
return this.WithRecord(target, port, priority, weight);
}
/// <summary>
/// Specifies a service record for a service.
/// </summary>
/// <param name="target">The canonical name of the target host running the service.</param>
/// <param name="port">The port on which the service is bounded.</param>
/// <param name="priority">The priority of the target host, lower the value higher the priority.</param>
/// <param name="weight">The relative weight (preference) of the records with the same priority, higher the value more the preference.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.Definition.IWithSrvRecordEntryOrAttachable<DnsZone.Definition.IWithCreate> DnsRecordSet.Definition.IWithSrvRecordEntry<DnsZone.Definition.IWithCreate>.WithRecord(string target, int port, int priority, int weight)
{
return this.WithRecord(target, port, priority, weight);
}
/// <summary>
/// Specifies a Caa record for a service.
/// </summary>
/// <param name="flags">The flags for this CAA record as an integer between 0 and 255.</param>
/// <param name="tag">The tag for this CAA record.</param>
/// <param name="value">The value for this CAA record.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.UpdateDefinition.IWithCaaRecordEntryOrAttachable<DnsZone.Update.IUpdate> DnsRecordSet.UpdateDefinition.IWithCaaRecordEntry<DnsZone.Update.IUpdate>.WithRecord(int flags, string tag, string value)
{
return this.WithRecord(flags, tag, value);
}
/// <summary>
/// Specifies a Caa record for a service.
/// </summary>
/// <param name="flags">The flags for this CAA record as an integer between 0 and 255.</param>
/// <param name="tag">The tag for this CAA record.</param>
/// <param name="value">The value for this CAA record.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.Definition.IWithCaaRecordEntryOrAttachable<DnsZone.Definition.IWithCreate> DnsRecordSet.Definition.IWithCaaRecordEntry<DnsZone.Definition.IWithCreate>.WithRecord(int flags, string tag, string value)
{
return this.WithRecord(flags, tag, value);
}
/// <summary>
/// Specifies a Caa record for a service.
/// </summary>
/// <param name="flags">The flags for this CAA record as an integer between 0 and 255.</param>
/// <param name="tag">The tag for this CAA record.</param>
/// <param name="value">The value for this CAA record.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateCaaRecordSet.IUpdateCaaRecordSet DnsRecordSet.Update.IWithCaaRecordEntry.WithRecord(int flags, string tag, string value)
{
return this.WithRecord(flags, tag, value);
}
/// <summary>
/// Specifies time in seconds that a secondary name server should wait before trying to contact the
/// the primary name server for a zone file update.
/// </summary>
/// <param name="refreshTimeInSeconds">The refresh time in seconds.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateSoaRecord.IUpdateSoaRecord DnsRecordSet.Update.IWithSoaRecordAttributes.WithRefreshTimeInSeconds(long refreshTimeInSeconds)
{
return this.WithRefreshTimeInSeconds(refreshTimeInSeconds);
}
/// <summary>
/// Specifies the time in seconds that a secondary name server should wait before trying to contact
/// the primary name server again after a failed attempt to check for a zone file update.
/// </summary>
/// <param name="refreshTimeInSeconds">The retry time in seconds.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateSoaRecord.IUpdateSoaRecord DnsRecordSet.Update.IWithSoaRecordAttributes.WithRetryTimeInSeconds(long refreshTimeInSeconds)
{
return this.WithRetryTimeInSeconds(refreshTimeInSeconds);
}
/// <summary>
/// Specifies the serial number for the zone file.
/// </summary>
/// <param name="serialNumber">The serial number.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateSoaRecord.IUpdateSoaRecord DnsRecordSet.Update.IWithSoaRecordAttributes.WithSerialNumber(long serialNumber)
{
return this.WithSerialNumber(serialNumber);
}
/// <summary>
/// Creates a PTR record with the provided target domain name in this record set.
/// </summary>
/// <param name="targetDomainName">The target domain name.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.UpdateDefinition.IWithPtrRecordTargetDomainNameOrAttachable<DnsZone.Update.IUpdate> DnsRecordSet.UpdateDefinition.IWithPtrRecordTargetDomainName<DnsZone.Update.IUpdate>.WithTargetDomainName(string targetDomainName)
{
return this.WithTargetDomainName(targetDomainName);
}
/// <summary>
/// Creates a PTR record with the provided target domain name in this record set.
/// </summary>
/// <param name="targetDomainName">The target domain name.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.Definition.IWithPtrRecordTargetDomainNameOrAttachable<DnsZone.Definition.IWithCreate> DnsRecordSet.Definition.IWithPtrRecordTargetDomainName<DnsZone.Definition.IWithCreate>.WithTargetDomainName(string targetDomainName)
{
return this.WithTargetDomainName(targetDomainName);
}
/// <summary>
/// Creates a CName record with the provided canonical name in this record set.
/// </summary>
/// <param name="targetDomainName">The target domain name.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdatePtrRecordSet.IUpdatePtrRecordSet DnsRecordSet.Update.IWithPtrRecordTargetDomainName.WithTargetDomainName(string targetDomainName)
{
return this.WithTargetDomainName(targetDomainName);
}
/// <summary>
/// Creates a Txt record with the given text in this record set.
/// </summary>
/// <param name="text">The text value.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.UpdateTxtRecordSet.IUpdateTxtRecordSet DnsRecordSet.Update.IWithTxtRecordTextValue.WithText(string text)
{
return this.WithText(text);
}
/// <summary>
/// Creates a TXT record with the given text in this record set.
/// </summary>
/// <param name="text">The text value.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.UpdateDefinition.IWithTxtRecordTextValueOrAttachable<DnsZone.Update.IUpdate> DnsRecordSet.UpdateDefinition.IWithTxtRecordTextValue<DnsZone.Update.IUpdate>.WithText(string text)
{
return this.WithText(text);
}
/// <summary>
/// Creates a Txt record with the given text in this record set.
/// </summary>
/// <param name="text">The text value.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.Definition.IWithTxtRecordTextValueOrAttachable<DnsZone.Definition.IWithCreate> DnsRecordSet.Definition.IWithTxtRecordTextValue<DnsZone.Definition.IWithCreate>.WithText(string text)
{
return this.WithText(text);
}
/// <summary>
/// Specifies the TTL for the records in the record set.
/// </summary>
/// <param name="ttlInSeconds">TTL in seconds.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.UpdateDefinition.IWithAttach<DnsZone.Update.IUpdate> DnsRecordSet.UpdateDefinition.IWithTtl<DnsZone.Update.IUpdate>.WithTimeToLive(long ttlInSeconds)
{
return this.WithTimeToLive(ttlInSeconds);
}
/// <summary>
/// Specifies the Time To Live for the records in the record set.
/// </summary>
/// <param name="ttlInSeconds">TTL in seconds.</param>
/// <return>The next stage of the definition.</return>
DnsRecordSet.Definition.IWithAttach<DnsZone.Definition.IWithCreate> DnsRecordSet.Definition.IWithTtl<DnsZone.Definition.IWithCreate>.WithTimeToLive(long ttlInSeconds)
{
return this.WithTimeToLive(ttlInSeconds);
}
/// <summary>
/// Specifies the TTL for the records in the record set.
/// </summary>
/// <param name="ttlInSeconds">TTL in seconds.</param>
/// <return>The next stage of the record set update.</return>
DnsRecordSet.Update.IUpdate DnsRecordSet.Update.IWithTtl.WithTimeToLive(long ttlInSeconds)
{
return this.WithTimeToLive(ttlInSeconds);
}
}
public partial class ETagState
{
}
}
| |
using Signum.Engine;
using Signum.Engine.Engine;
using Signum.Engine.Linq;
using Signum.Engine.Maps;
using Signum.Engine.PostgresCatalog;
using Signum.Engine.SchemaInfoTables;
using Signum.Entities;
using Signum.Utilities;
using Signum.Utilities.ExpressionTrees;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Data.Common;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
namespace Signum.Engine
{
public static class Administrator
{
public static void TotalGeneration()
{
foreach (var db in Schema.Current.DatabaseNames())
{
Connector.Current.CleanDatabase(db);
SafeConsole.WriteColor(ConsoleColor.DarkGray, '.');
}
SqlPreCommandConcat totalScript = (SqlPreCommandConcat)Schema.Current.GenerationScipt()!;
foreach (SqlPreCommand command in totalScript.Commands)
{
command.ExecuteLeaves();
SafeConsole.WriteColor(ConsoleColor.DarkGray, '.');
}
}
public static string GenerateViewCodes(params string[] tableNames) => tableNames.ToString(tn => GenerateViewCode(tn), "\r\n\r\n");
public static string GenerateViewCode(string tableName) => GenerateViewCode(ObjectName.Parse(tableName, Schema.Current.Settings.IsPostgres));
public static string GenerateViewCode(ObjectName tableName)
{
var columns =
(from t in Database.View<SysTables>()
where t.name == tableName.Name && t.Schema().name == tableName.Schema.Name
from c in t.Columns()
select new DiffColumn
{
Name = c.name,
DbType = new AbstractDbType(SysTablesSchema.ToSqlDbType(c.Type()!.name)),
UserTypeName = null,
PrimaryKey = t.Indices().Any(i => i.is_primary_key && i.IndexColumns().Any(ic => ic.column_id == c.column_id)),
Nullable = c.is_nullable,
}).ToList();
StringBuilder sb = new StringBuilder();
sb.AppendLine($@"[TableName(""{tableName.ToString()}"")]");
sb.AppendLine($"public class {tableName.Name} : IView");
sb.AppendLine(@"{");
foreach (var c in columns)
{
sb.Append(GenerateColumnCode(c).Indent(4));
}
sb.AppendLine(@"}");
return sb.ToString();
}
private static string GenerateColumnCode(DiffColumn c)
{
var type = CodeGeneration.CodeGenerator.Entities.GetValueType(c);
StringBuilder sb = new StringBuilder();
if (c.PrimaryKey)
sb.AppendLine("[ViewPrimaryKey]");
sb.AppendLine($"public {type.TypeName()}{(c.Nullable ? "?" : "")} {c.Name};");
return sb.ToString();
}
public static SqlPreCommand? TotalGenerationScript()
{
return Schema.Current.GenerationScipt();
}
public static SqlPreCommand? TotalSynchronizeScript(bool interactive = true, bool schemaOnly = false)
{
var command = Schema.Current.SynchronizationScript(interactive, schemaOnly);
if (command == null)
return null;
return SqlPreCommand.Combine(Spacing.Double,
new SqlPreCommandSimple(SynchronizerMessage.StartOfSyncScriptGeneratedOn0.NiceToString().FormatWith(DateTime.Now)),
new SqlPreCommandSimple("use {0}".FormatWith(Connector.Current.DatabaseName())),
command,
new SqlPreCommandSimple(SynchronizerMessage.EndOfSyncScript.NiceToString()));
}
public static void CreateTemporaryTable<T>()
where T : IView
{
if (!Transaction.HasTransaction)
throw new InvalidOperationException("You need to be inside of a transaction to create a Temporary table");
var view = Schema.Current.View<T>();
if (!view.Name.IsTemporal)
throw new InvalidOperationException($"Temporary tables should start with # (i.e. #myTable). Consider using {nameof(TableNameAttribute)}");
Connector.Current.SqlBuilder.CreateTableSql(view).ExecuteLeaves();
}
public static IDisposable TemporaryTable<T>() where T : IView
{
CreateTemporaryTable<T>();
return new Disposable(() => DropTemporaryTable<T>());
}
public static void DropTemporaryTable<T>()
where T : IView
{
if (!Transaction.HasTransaction)
throw new InvalidOperationException("You need to be inside of a transaction to create a Temporary table");
var view = Schema.Current.View<T>();
if (!view.Name.IsTemporal)
throw new InvalidOperationException($"Temporary tables should start with # (i.e. #myTable). Consider using {nameof(TableNameAttribute)}");
Connector.Current.SqlBuilder.DropTable(view.Name).ExecuteNonQuery();
}
public static void CreateTemporaryIndex<T>(Expression<Func<T, object>> fields, bool unique = false)
where T : IView
{
var view = Schema.Current.View<T>();
IColumn[] columns = IndexKeyColumns.Split(view, fields);
var index = unique ?
new UniqueTableIndex(view, columns) :
new TableIndex(view, columns);
Connector.Current.SqlBuilder.CreateIndex(index, checkUnique: null).ExecuteLeaves();
}
internal static readonly ThreadVariable<Func<ObjectName, ObjectName>?> registeredViewNameReplacer = Statics.ThreadVariable<Func<ObjectName, ObjectName>?>("overrideDatabase");
public static IDisposable OverrideViewNameReplacer(Func<ObjectName, ObjectName> replacer)
{
var old = registeredViewNameReplacer.Value;
registeredViewNameReplacer.Value = old == null ? replacer : n =>
{
var rep = replacer(n);
if (rep != n)
return rep;
return old!(n);
};
return new Disposable(() => registeredViewNameReplacer.Value = old);
}
public static ObjectName ReplaceViewName(ObjectName name)
{
var replacer = registeredViewNameReplacer.Value;
return replacer == null ? name : replacer(name);
}
public static IDisposable OverrideDatabaseInSysViews(DatabaseName? database)
{
return OverrideViewNameReplacer(n => n.Schema.Name == "sys" ? n.OnDatabase(database) : n);
}
public static bool ExistsTable<T>()
where T : Entity
{
return ExistsTable(Schema.Current.Table<T>());
}
public static bool ExistsTable(Type type)
{
return ExistsTable(Schema.Current.Table(type));
}
public static bool ExistsTable(ITable table)
{
SchemaName schema = table.Name.Schema;
if (Schema.Current.Settings.IsPostgres)
{
return (from t in Database.View<PgClass>()
join ns in Database.View<PgNamespace>() on t.relnamespace equals ns.oid
where t.relname == table.Name.Name && ns.nspname == schema.Name
select t).Any();
}
if (schema.Database != null && schema.Database.Server != null && !Database.View<SysServers>().Any(ss => ss.name == schema.Database!.Server!.Name))
return false;
if (schema.Database != null && !Database.View<SysDatabases>().Any(ss => ss.name == schema.Database!.Name))
return false;
using (schema.Database == null ? null : Administrator.OverrideDatabaseInSysViews(schema.Database))
{
return (from t in Database.View<SysTables>()
join s in Database.View<SysSchemas>() on t.schema_id equals s.schema_id
where t.name == table.Name.Name && s.name == schema.Name
select t).Any();
}
}
public static List<T> TryRetrieveAll<T>(Replacements replacements)
where T : Entity
{
return TryRetrieveAll(typeof(T), replacements).Cast<T>().ToList();
}
public static List<Entity> TryRetrieveAll(Type type, Replacements replacements)
{
Table table = Schema.Current.Table(type);
using (Synchronizer.UseOldTableName(table, replacements))
using (ExecutionMode.DisableCache())
{
if (ExistsTable(table))
return Database.RetrieveAll(type);
return new List<Entity>();
}
}
public static IDisposable DisableIdentity<T>()
where T : Entity
{
Table table = Schema.Current.Table<T>();
return DisableIdentity(table);
}
public static IDisposable? DisableIdentity<T, V>(Expression<Func<T, MList<V>>> mListField)
where T : Entity
{
TableMList table = ((FieldMList)Schema.Current.Field(mListField)).TableMList;
return DisableIdentity(table);
}
public static bool IsIdentityBehaviourDisabled(ITable table)
{
return identityBehaviourDisabled.Value?.Contains(table) == true;
}
static ThreadVariable<ImmutableStack<ITable>?> identityBehaviourDisabled = Statics.ThreadVariable<ImmutableStack<ITable>?>("identityBehaviourOverride");
public static IDisposable DisableIdentity(ITable table, bool behaviourOnly = false)
{
if (!table.IdentityBehaviour)
throw new InvalidOperationException("Identity is false already");
var sqlBuilder = Connector.Current.SqlBuilder;
var oldValue = identityBehaviourDisabled.Value ?? ImmutableStack<ITable>.Empty;
identityBehaviourDisabled.Value = oldValue.Push(table);
if (table.PrimaryKey.Default == null && !sqlBuilder.IsPostgres && !behaviourOnly)
sqlBuilder.SetIdentityInsert(table.Name, true).ExecuteNonQuery();
return new Disposable(() =>
{
identityBehaviourDisabled.Value = oldValue.IsEmpty ? null : oldValue;
if (table.PrimaryKey.Default == null && !sqlBuilder.IsPostgres && !behaviourOnly)
sqlBuilder.SetIdentityInsert(table.Name, false).ExecuteNonQuery();
});
}
public static void SaveDisableIdentity<T>(T entities)
where T : Entity
{
using (Transaction tr = new Transaction())
using (Administrator.DisableIdentity<T>())
{
Database.Save(entities);
tr.Commit();
}
}
public static void SaveListDisableIdentity<T>(IEnumerable<T> entities)
where T : Entity
{
using (Transaction tr = new Transaction())
using (Administrator.DisableIdentity<T>())
{
Database.SaveList(entities);
tr.Commit();
}
}
public static int UnsafeDeleteDuplicates<E, K>(this IQueryable<E> query, Expression<Func<E, K>> key, string? message = null)
where E : Entity
{
return (from e in query
where !query.GroupBy(key).Select(gr => gr.Min(a => a.id)).Contains(e.Id)
select e).UnsafeDelete(message);
}
public static int UnsafeDeleteMListDuplicates<E, V, K>(this IQueryable<MListElement<E,V>> query, Expression<Func<MListElement<E, V>, K>> key, string? message = null)
where E : Entity
{
return (from e in query
where !query.GroupBy(key).Select(gr => gr.Min(a => a.RowId)).Contains(e.RowId)
select e).UnsafeDeleteMList(message);
}
public static SqlPreCommandSimple QueryPreCommand<T>(IQueryable<T> query)
{
var prov = ((DbQueryProvider)query.Provider);
return prov.Translate(query.Expression, tr => tr.MainCommand);
}
public static SqlPreCommandSimple? UnsafeDeletePreCommand<T>(IQueryable<T> query)
where T : Entity
{
if (!Administrator.ExistsTable<T>() || !query.Any())
return null;
var prov = ((DbQueryProvider)query.Provider);
using (PrimaryKeyExpression.PreferVariableName())
return prov.Delete<SqlPreCommandSimple>(query, cm => cm, removeSelectRowCount: true);
}
public static SqlPreCommandSimple? UnsafeDeletePreCommandMList<E, V>(Expression<Func<E, MList<V>>> mListProperty, IQueryable<MListElement<E, V>> query)
where E : Entity
{
if (!Administrator.ExistsTable(Schema.Current.TableMList(mListProperty)) || !query.Any())
return null;
var prov = ((DbQueryProvider)query.Provider);
using (PrimaryKeyExpression.PreferVariableName())
return prov.Delete<SqlPreCommandSimple>(query, cm => cm, removeSelectRowCount: true);
}
public static SqlPreCommandSimple UnsafeUpdatePartPreCommand(IUpdateable update)
{
var prov = ((DbQueryProvider)update.Query.Provider);
return prov.Update(update, sql => sql, removeSelectRowCount: true);
}
public static void UpdateToStrings<T>() where T : Entity, new()
{
UpdateToStrings(Database.Query<T>());
}
public static void UpdateToStrings<T>(IQueryable<T> query) where T : Entity, new()
{
SafeConsole.WriteLineColor(ConsoleColor.Cyan, "Saving toStr for {0}".FormatWith(typeof(T).TypeName()));
if (!query.Any())
return;
query.Select(a => a.Id).IntervalsOf(100).ProgressForeach(inter => inter.ToString(), (interva) =>
{
var list = query.Where(a => interva.Contains(a.Id)).ToList();
foreach (var item in list)
{
if (item.ToString() != item.toStr)
item.InDB().UnsafeUpdate()
.Set(a => a.toStr, a => item.ToString())
.Execute();
}
});
}
public static void UpdateToStrings<T>(Expression<Func<T, string?>> expression) where T : Entity, new()
{
UpdateToStrings(Database.Query<T>(), expression);
}
public static void UpdateToStrings<T>(IQueryable<T> query, Expression<Func<T, string?>> expression) where T : Entity, new()
{
SafeConsole.WaitRows("UnsafeUpdate toStr for {0}".FormatWith(typeof(T).TypeName()), () =>
query.UnsafeUpdate().Set(a => a.toStr, expression).Execute());
}
public static void UpdateToString<T>(T entity) where T : Entity, new()
{
entity.InDB().UnsafeUpdate()
.Set(e => e.toStr, e => entity.ToString())
.Execute();
}
public static void UpdateToString<T>(T entity, Expression<Func<T, string?>> expression) where T : Entity, new()
{
entity.InDB().UnsafeUpdate()
.Set(e => e.toStr, expression)
.Execute();
}
public static IDisposable PrepareForBatchLoadScope<T>(bool disableForeignKeys = true, bool disableMultipleIndexes = true, bool disableUniqueIndexes = false) where T : Entity
{
Table table = Schema.Current.Table(typeof(T));
return table.PrepareForBathLoadScope(disableForeignKeys, disableMultipleIndexes, disableUniqueIndexes);
}
static IDisposable PrepareForBathLoadScope(this Table table, bool disableForeignKeys, bool disableMultipleIndexes, bool disableUniqueIndexes)
{
IDisposable disp = PrepareTableForBatchLoadScope(table, disableForeignKeys, disableMultipleIndexes, disableUniqueIndexes);
var list = table.TablesMList().Select(rt => PrepareTableForBatchLoadScope(rt, disableForeignKeys, disableMultipleIndexes, disableUniqueIndexes)).ToList();
return new Disposable(() =>
{
disp.Dispose();
foreach (var d in list)
d.Dispose();
});
}
public static IDisposable PrepareTableForBatchLoadScope(ITable table, bool disableForeignKeys, bool disableMultipleIndexes, bool disableUniqueIndexes)
{
var sqlBuilder = Connector.Current.SqlBuilder;
SafeConsole.WriteColor(ConsoleColor.Magenta, table.Name + ":");
Action onDispose = () => SafeConsole.WriteColor(ConsoleColor.Magenta, table.Name + ":");
if (disableForeignKeys)
{
SafeConsole.WriteColor(ConsoleColor.DarkMagenta, " NOCHECK Foreign Keys");
Executor.ExecuteNonQuery("ALTER TABLE {0} NOCHECK CONSTRAINT ALL".FormatWith(table.Name));
onDispose += () =>
{
SafeConsole.WriteColor(ConsoleColor.DarkMagenta, " RE-CHECK Foreign Keys");
Executor.ExecuteNonQuery("ALTER TABLE {0} WITH CHECK CHECK CONSTRAINT ALL".FormatWith(table.Name));
};
}
if (disableMultipleIndexes)
{
var multiIndexes = GetIndixesNames(table, unique: false);
if (multiIndexes.Any())
{
SafeConsole.WriteColor(ConsoleColor.DarkMagenta, " DISABLE Multiple Indexes");
multiIndexes.Select(i => sqlBuilder.DisableIndex(table.Name, i)).Combine(Spacing.Simple)!.ExecuteLeaves();
Executor.ExecuteNonQuery(multiIndexes.ToString(i => "ALTER INDEX [{0}] ON {1} DISABLE".FormatWith(i, table.Name), "\r\n"));
onDispose += () =>
{
SafeConsole.WriteColor(ConsoleColor.DarkMagenta, " REBUILD Multiple Indexes");
multiIndexes.Select(i => sqlBuilder.RebuildIndex(table.Name, i)).Combine(Spacing.Simple)!.ExecuteLeaves();
};
}
}
if (disableUniqueIndexes)
{
var uniqueIndexes = GetIndixesNames(table, unique: true);
if (uniqueIndexes.Any())
{
SafeConsole.WriteColor(ConsoleColor.DarkMagenta, " DISABLE Unique Indexes");
uniqueIndexes.Select(i => sqlBuilder.DisableIndex(table.Name, i)).Combine(Spacing.Simple)!.ExecuteLeaves();
onDispose += () =>
{
SafeConsole.WriteColor(ConsoleColor.DarkMagenta, " REBUILD Unique Indexes");
uniqueIndexes.Select(i => sqlBuilder.RebuildIndex(table.Name, i)).Combine(Spacing.Simple)!.ExecuteLeaves();
};
}
}
Console.WriteLine();
onDispose += () => Console.WriteLine();
return new Disposable(onDispose);
}
public static void TruncateTable<T>() where T : Entity => TruncateTable(typeof(T));
public static void TruncateTable(Type type)
{
var table = Schema.Current.Table(type);
using (Transaction tr = new Transaction())
{
table.TablesMList().ToList().ForEach(mlist =>
{
TruncateTableSystemVersioning(mlist);
});
using (DropAndCreateIncommingForeignKeys(table))
TruncateTableSystemVersioning(table);
tr.Commit();
}
}
public static void TruncateTableSystemVersioning(ITable table)
{
var sqlBuilder = Connector.Current.SqlBuilder;
if(table.SystemVersioned == null)
sqlBuilder.TruncateTable(table.Name).ExecuteLeaves();
else
{
sqlBuilder.AlterTableDisableSystemVersioning(table.Name).ExecuteLeaves();
sqlBuilder.TruncateTable(table.Name).ExecuteLeaves();
sqlBuilder.TruncateTable(table.SystemVersioned.TableName).ExecuteLeaves();
sqlBuilder.AlterTableEnableSystemVersioning(table).ExecuteLeaves();
}
}
public static IDisposable DropAndCreateIncommingForeignKeys(Table table)
{
var sqlBuilder = Connector.Current.SqlBuilder;
var isPostgres = Schema.Current.Settings.IsPostgres;
var foreignKeys = Administrator.OverrideDatabaseInSysViews(table.Name.Schema.Database).Using(_ =>
(from targetTable in Database.View<SysTables>()
where targetTable.name == table.Name.Name && targetTable.Schema().name == table.Name.Schema.Name
from ifk in targetTable.IncommingForeignKeys()
let parentTable = ifk.ParentTable()
select new
{
Name = ifk.name,
ParentTable = new ObjectName(new SchemaName(table.Name.Schema.Database, parentTable.Schema().name, isPostgres), parentTable.name, isPostgres),
ParentColumn = parentTable.Columns().SingleEx(c => c.column_id == ifk.ForeignKeyColumns().SingleEx().parent_column_id).name,
}).ToList());
foreignKeys.ForEach(fk => sqlBuilder.AlterTableDropConstraint(fk.ParentTable!, fk.Name! /*CSBUG*/).ExecuteLeaves());
return new Disposable(() =>
{
foreignKeys.ToList().ForEach(fk => sqlBuilder.AlterTableAddConstraintForeignKey(fk.ParentTable!, fk.ParentColumn!, table.Name, table.PrimaryKey.Name)!.ExecuteLeaves());
});
}
public static IDisposable DisableUniqueIndex(UniqueTableIndex index)
{
var sqlBuilder = Connector.Current.SqlBuilder;
SafeConsole.WriteLineColor(ConsoleColor.DarkMagenta, " DISABLE Unique Index " + index.IndexName);
sqlBuilder.DisableIndex(index.Table.Name, index.IndexName).ExecuteLeaves();
return new Disposable(() =>
{
SafeConsole.WriteLineColor(ConsoleColor.DarkMagenta, " REBUILD Unique Index " + index.IndexName);
sqlBuilder.RebuildIndex(index.Table.Name, index.IndexName).ExecuteLeaves();
});
}
public static List<string> GetIndixesNames(this ITable table, bool unique)
{
using (OverrideDatabaseInSysViews(table.Name.Schema.Database))
{
return (from s in Database.View<SysSchemas>()
where s.name == table.Name.Schema.Name
from t in s.Tables()
where t.name == table.Name.Name
from i in t.Indices()
where i.is_unique == unique && !i.is_primary_key
select i.name).ToList();
}
}
public static void DropUniqueIndexes<T>() where T : Entity
{
var sqlBuilder = Connector.Current.SqlBuilder;
var table = Schema.Current.Table<T>();
var indexesNames = Administrator.GetIndixesNames(table, unique: true);
if (indexesNames.HasItems())
indexesNames.Select(n => sqlBuilder.DropIndex(table.Name, n)).Combine(Spacing.Simple)!.ExecuteLeaves();
}
public static void MoveAllForeignKeys<T>(Lite<T> fromEntity, Lite<T> toEntity, Func<ITable, IColumn, bool>? shouldMove = null)
where T : Entity
{
using (Transaction tr = new Transaction())
{
MoveAllForeignKeysPrivate<T>(fromEntity, toEntity, shouldMove).Select(a => a.UpdateScript).Combine(Spacing.Double)!.ExecuteLeaves();
tr.Commit();
}
}
public static SqlPreCommand? MoveAllForeignKeysScript<T>(Lite<T> fromEntity, Lite<T> toEntity, Func<ITable, IColumn, bool>? shouldMove = null)
where T : Entity
{
return MoveAllForeignKeysPrivate<T>(fromEntity, toEntity, shouldMove).Select(a => a.UpdateScript).Combine(Spacing.Double);
}
public static void MoveAllForeignKeysConsole<T>(Lite<T> fromEntity, Lite<T> toEntity, Func<ITable, IColumn, bool>? shouldMove = null)
where T : Entity
{
var tuples = MoveAllForeignKeysPrivate<T>(fromEntity, toEntity, shouldMove);
foreach (var t in tuples)
{
SafeConsole.WaitRows("{0}.{1}".FormatWith(t.ColumnTable.Table.Name.Name, t.ColumnTable.Column.Name), () => t.UpdateScript.ExecuteNonQuery());
}
}
class ColumnTableScript
{
public ColumnTable ColumnTable;
public SqlPreCommandSimple UpdateScript;
public ColumnTableScript(ColumnTable columnTable, SqlPreCommandSimple updateScript)
{
ColumnTable = columnTable;
UpdateScript = updateScript;
}
}
static List<ColumnTableScript> MoveAllForeignKeysPrivate<T>(Lite<T> fromEntity, Lite<T> toEntity, Func<ITable, IColumn, bool>? shouldMove)
where T : Entity
{
if (fromEntity.GetType() != toEntity.GetType())
throw new ArgumentException("fromEntity and toEntity should have the same type");
if (fromEntity.Is(toEntity))
throw new ArgumentException("fromEntity and toEntity should not be the same ");
Schema s = Schema.Current;
Table refTable = s.Table(typeof(T));
List<ColumnTable> columns = GetColumnTables(s, refTable);
if (shouldMove != null)
columns = columns.Where(p => shouldMove!(p.Table, p.Column)).ToList();
var isPostgres = Schema.Current.Settings.IsPostgres;
var pb = Connector.Current.ParameterBuilder;
return columns.Select(ct => new ColumnTableScript(ct, new SqlPreCommandSimple("UPDATE {0}\r\nSET {1} = @toEntity\r\nWHERE {1} = @fromEntity".FormatWith(ct.Table.Name, ct.Column.Name.SqlEscape(isPostgres)), new List<DbParameter>
{
pb.CreateReferenceParameter("@fromEntity", fromEntity.Id, ct.Column),
pb.CreateReferenceParameter("@toEntity", toEntity.Id, ct.Column),
}))).ToList();
}
class ColumnTable
{
public ITable Table;
public IColumn Column;
public ColumnTable(ITable table, IColumn column)
{
Table = table;
Column = column;
}
}
static ConcurrentDictionary<Table, List<ColumnTable>> columns = new ConcurrentDictionary<Table, List<ColumnTable>>();
static List<ColumnTable> GetColumnTables(Schema schema, Table refTable)
{
return columns.GetOrAdd(refTable, rt =>
{
return (from t in schema.GetDatabaseTables()
from c in t.Columns.Values
where c.ReferenceTable == rt
select new ColumnTable(t,c))
.ToList();
});
}
public static T GetSetTicks<T>(this T entity) where T : Entity
{
entity.Ticks = entity.InDBEntity(e => e.Ticks);
return entity;
}
public static SqlPreCommand DeleteWhereScript(Table table, IColumn column, PrimaryKey id)
{
if (table.TablesMList().Any())
throw new InvalidOperationException($"DeleteWhereScript can not be used for {table.Type.Name} because contains MLists");
if(id.VariableName.HasText())
return new SqlPreCommandSimple("DELETE FROM {0} WHERE {1} = {2};".FormatWith(table.Name, column.Name, id.VariableName));
var param = Connector.Current.ParameterBuilder.CreateReferenceParameter("@id", id, column);
return new SqlPreCommandSimple("DELETE FROM {0} WHERE {1} = {2}:".FormatWith(table.Name, column.Name, param.ParameterName), new List<DbParameter> { param });
}
public static SqlPreCommand DeleteWhereScript<T, R>(Expression<Func<T, R>> field, R value)
where T : Entity
where R : Entity
{
var table = Schema.Current.Table<T>();
var column = (IColumn)Schema.Current.Field(field);
return DeleteWhereScript(table, column, value.Id);
}
}
}
| |
#region License
/*
* All content copyright Marko Lahma, unless otherwise indicated. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
#endregion
using System;
using System.Runtime.Serialization;
using System.Text;
namespace Quartz.Impl.Calendar
{
/// <summary>
/// This implementation of the Calendar excludes the set of times expressed by a
/// given CronExpression.
/// </summary>
/// <remarks>
/// For example, you could use this calendar to exclude all but business hours (8AM - 5PM) every
/// day using the expression "* * 0-7,18-23 ? * *".
/// <para>
/// It is important to remember that the cron expression here describes a set of
/// times to be <i>excluded</i> from firing. Whereas the cron expression in
/// CronTrigger describes a set of times that can
/// be <i>included</i> for firing. Thus, if a <see cref="ICronTrigger" /> has a
/// given cron expression and is associated with a <see cref="CronCalendar" /> with
/// the <i>same</i> expression, the calendar will exclude all the times the
/// trigger includes, and they will cancel each other out.
/// </para>
/// </remarks>
/// <author>Aaron Craven</author>
/// <author>Marko Lahma (.NET)</author>
[Serializable]
public class CronCalendar : BaseCalendar
{
private CronExpression cronExpression = null!;
// ReSharper disable once UnusedMember.Local
private CronCalendar()
{
}
/// <summary>
/// Initializes a new instance of the <see cref="CronCalendar"/> class.
/// </summary>
/// <param name="expression">a string representation of the desired cron expression</param>
public CronCalendar(string expression) : this(null, expression)
{
}
/// <summary>
/// Create a <see cref="CronCalendar" /> with the given cron expression and
/// <see cref="BaseCalendar" />.
/// </summary>
/// <param name="baseCalendar">
/// the base calendar for this calendar instance
/// see BaseCalendar for more information on base
/// calendar functionality
/// </param>
/// <param name="expression">a string representation of the desired cron expression</param>
public CronCalendar(ICalendar? baseCalendar, string expression) : this(baseCalendar, expression, null)
{
}
/// <summary>
/// Create a <see cref="CronCalendar" /> with the given cron expression and
/// <see cref="BaseCalendar" />.
/// </summary>
/// <param name="baseCalendar">
/// the base calendar for this calendar instance
/// see BaseCalendar for more information on base
/// calendar functionality
/// </param>
/// <param name="expression">a string representation of the desired cron expression</param>
/// <param name="timeZone"></param>
public CronCalendar(ICalendar? baseCalendar, string expression, TimeZoneInfo? timeZone) : base(baseCalendar, timeZone)
{
cronExpression = new CronExpression(expression);
}
/// <summary>
/// Serialization constructor.
/// </summary>
/// <param name="info"></param>
/// <param name="context"></param>
protected CronCalendar(SerializationInfo info, StreamingContext context) : base(info, context)
{
int version;
try
{
version = info.GetInt32("version");
}
catch
{
version = 0;
}
switch (version)
{
case 0:
case 1:
cronExpression = (CronExpression) info.GetValue("cronExpression", typeof(CronExpression))!;
break;
default:
throw new NotSupportedException("Unknown serialization version");
}
}
[System.Security.SecurityCritical]
public override void GetObjectData(SerializationInfo info, StreamingContext context)
{
base.GetObjectData(info, context);
info.AddValue("version", 1);
info.AddValue("cronExpression", cronExpression);
}
public override TimeZoneInfo TimeZone
{
get => cronExpression.TimeZone;
set => cronExpression.TimeZone = value;
}
/// <summary>
/// Determine whether the given time is 'included' by the
/// Calendar.
/// </summary>
/// <param name="timeUtc">the time to test</param>
/// <returns>a boolean indicating whether the specified time is 'included' by the CronCalendar</returns>
public override bool IsTimeIncluded(DateTimeOffset timeUtc)
{
if (CalendarBase != null &&
CalendarBase.IsTimeIncluded(timeUtc) == false)
{
return false;
}
return !cronExpression.IsSatisfiedBy(timeUtc);
}
/// <summary>
/// Determine the next time that is 'included' by the
/// Calendar after the given time. Return the original value if timeStamp is
/// included. Return 0 if all days are excluded.
/// </summary>
/// <param name="timeUtc"></param>
/// <returns></returns>
public override DateTimeOffset GetNextIncludedTimeUtc(DateTimeOffset timeUtc)
{
DateTimeOffset nextIncludedTime = timeUtc.AddMilliseconds(1); //plus on millisecond
while (!IsTimeIncluded(nextIncludedTime))
{
//If the time is in a range excluded by this calendar, we can
// move to the end of the excluded time range and continue testing
// from there. Otherwise, if nextIncludedTime is excluded by the
// baseCalendar, ask it the next time it includes and begin testing
// from there. Failing this, add one millisecond and continue
// testing.
if (cronExpression.IsSatisfiedBy(nextIncludedTime))
{
nextIncludedTime = cronExpression.GetNextValidTimeAfter(nextIncludedTime)!.Value;
}
else if (CalendarBase != null &&
!CalendarBase.IsTimeIncluded(nextIncludedTime))
{
nextIncludedTime =
CalendarBase.GetNextIncludedTimeUtc(nextIncludedTime);
}
else
{
nextIncludedTime = nextIncludedTime.AddMilliseconds(1);
}
}
return nextIncludedTime;
}
/// <summary>
/// Creates a new object that is a copy of the current instance.
/// </summary>
/// <returns>A new object that is a copy of this instance.</returns>
public override ICalendar Clone()
{
var clone = new CronCalendar();
clone.cronExpression = (CronExpression) cronExpression.Clone();
CloneFields(clone);
return clone;
}
/// <summary>
/// Returns a <see cref="T:System.String"/> that represents the current <see cref="T:System.Object"/>.
/// </summary>
/// <returns>
/// A <see cref="T:System.String"/> that represents the current <see cref="T:System.Object"/>.
/// </returns>
public override string ToString()
{
StringBuilder buffer = new StringBuilder();
buffer.Append("base calendar: [");
if (CalendarBase != null)
{
buffer.Append(CalendarBase);
}
else
{
buffer.Append("null");
}
buffer.Append("], excluded cron expression: '");
buffer.Append(cronExpression);
buffer.Append("'");
return buffer.ToString();
}
/// <summary>
/// Returns the object representation of the cron expression that defines the
/// dates and times this calendar excludes.
/// </summary>
public CronExpression CronExpression
{
get => cronExpression;
set => cronExpression = value ?? throw new ArgumentException("expression cannot be null");
}
/// <summary>
/// Sets the cron expression for the calendar to a new value.
/// </summary>
/// <param name="expression">The expression.</param>
public void SetCronExpressionString(string expression)
{
CronExpression newExp = new CronExpression(expression);
cronExpression = newExp;
}
public override int GetHashCode()
{
int baseHash = 0;
if (CalendarBase != null)
{
baseHash = CalendarBase.GetHashCode();
}
return CronExpression.GetHashCode() + 5*baseHash;
}
public bool Equals(CronCalendar obj)
{
if (obj == null)
{
return false;
}
bool baseEqual = CalendarBase == null || CalendarBase.Equals(obj.CalendarBase);
return baseEqual && CronExpression.Equals(obj.CronExpression);
}
public override bool Equals(object? obj)
{
if (!(obj is CronCalendar))
{
return false;
}
return Equals((CronCalendar) obj);
}
}
}
| |
//
// Author:
// Jb Evain ([email protected])
//
// Copyright (c) 2008 - 2015 Jb Evain
// Copyright (c) 2008 - 2011 Novell, Inc.
//
// Licensed under the MIT/X11 license.
//
using System;
using Zenject.ReflectionBaking.Mono.Cecil.Metadata;
namespace Zenject.ReflectionBaking.Mono.Cecil {
static partial class Mixin {
public static uint ReadCompressedUInt32 (this byte [] data, ref int position)
{
uint integer;
if ((data [position] & 0x80) == 0) {
integer = data [position];
position++;
} else if ((data [position] & 0x40) == 0) {
integer = (uint) (data [position] & ~0x80) << 8;
integer |= data [position + 1];
position += 2;
} else {
integer = (uint) (data [position] & ~0xc0) << 24;
integer |= (uint) data [position + 1] << 16;
integer |= (uint) data [position + 2] << 8;
integer |= (uint) data [position + 3];
position += 4;
}
return integer;
}
public static MetadataToken GetMetadataToken (this CodedIndex self, uint data)
{
uint rid;
TokenType token_type;
switch (self) {
case CodedIndex.TypeDefOrRef:
rid = data >> 2;
switch (data & 3) {
case 0:
token_type = TokenType.TypeDef; goto ret;
case 1:
token_type = TokenType.TypeRef; goto ret;
case 2:
token_type = TokenType.TypeSpec; goto ret;
default:
goto exit;
}
case CodedIndex.HasConstant:
rid = data >> 2;
switch (data & 3) {
case 0:
token_type = TokenType.Field; goto ret;
case 1:
token_type = TokenType.Param; goto ret;
case 2:
token_type = TokenType.Property; goto ret;
default:
goto exit;
}
case CodedIndex.HasCustomAttribute:
rid = data >> 5;
switch (data & 31) {
case 0:
token_type = TokenType.Method; goto ret;
case 1:
token_type = TokenType.Field; goto ret;
case 2:
token_type = TokenType.TypeRef; goto ret;
case 3:
token_type = TokenType.TypeDef; goto ret;
case 4:
token_type = TokenType.Param; goto ret;
case 5:
token_type = TokenType.InterfaceImpl; goto ret;
case 6:
token_type = TokenType.MemberRef; goto ret;
case 7:
token_type = TokenType.Module; goto ret;
case 8:
token_type = TokenType.Permission; goto ret;
case 9:
token_type = TokenType.Property; goto ret;
case 10:
token_type = TokenType.Event; goto ret;
case 11:
token_type = TokenType.Signature; goto ret;
case 12:
token_type = TokenType.ModuleRef; goto ret;
case 13:
token_type = TokenType.TypeSpec; goto ret;
case 14:
token_type = TokenType.Assembly; goto ret;
case 15:
token_type = TokenType.AssemblyRef; goto ret;
case 16:
token_type = TokenType.File; goto ret;
case 17:
token_type = TokenType.ExportedType; goto ret;
case 18:
token_type = TokenType.ManifestResource; goto ret;
case 19:
token_type = TokenType.GenericParam; goto ret;
default:
goto exit;
}
case CodedIndex.HasFieldMarshal:
rid = data >> 1;
switch (data & 1) {
case 0:
token_type = TokenType.Field; goto ret;
case 1:
token_type = TokenType.Param; goto ret;
default:
goto exit;
}
case CodedIndex.HasDeclSecurity:
rid = data >> 2;
switch (data & 3) {
case 0:
token_type = TokenType.TypeDef; goto ret;
case 1:
token_type = TokenType.Method; goto ret;
case 2:
token_type = TokenType.Assembly; goto ret;
default:
goto exit;
}
case CodedIndex.MemberRefParent:
rid = data >> 3;
switch (data & 7) {
case 0:
token_type = TokenType.TypeDef; goto ret;
case 1:
token_type = TokenType.TypeRef; goto ret;
case 2:
token_type = TokenType.ModuleRef; goto ret;
case 3:
token_type = TokenType.Method; goto ret;
case 4:
token_type = TokenType.TypeSpec; goto ret;
default:
goto exit;
}
case CodedIndex.HasSemantics:
rid = data >> 1;
switch (data & 1) {
case 0:
token_type = TokenType.Event; goto ret;
case 1:
token_type = TokenType.Property; goto ret;
default:
goto exit;
}
case CodedIndex.MethodDefOrRef:
rid = data >> 1;
switch (data & 1) {
case 0:
token_type = TokenType.Method; goto ret;
case 1:
token_type = TokenType.MemberRef; goto ret;
default:
goto exit;
}
case CodedIndex.MemberForwarded:
rid = data >> 1;
switch (data & 1) {
case 0:
token_type = TokenType.Field; goto ret;
case 1:
token_type = TokenType.Method; goto ret;
default:
goto exit;
}
case CodedIndex.Implementation:
rid = data >> 2;
switch (data & 3) {
case 0:
token_type = TokenType.File; goto ret;
case 1:
token_type = TokenType.AssemblyRef; goto ret;
case 2:
token_type = TokenType.ExportedType; goto ret;
default:
goto exit;
}
case CodedIndex.CustomAttributeType:
rid = data >> 3;
switch (data & 7) {
case 2:
token_type = TokenType.Method; goto ret;
case 3:
token_type = TokenType.MemberRef; goto ret;
default:
goto exit;
}
case CodedIndex.ResolutionScope:
rid = data >> 2;
switch (data & 3) {
case 0:
token_type = TokenType.Module; goto ret;
case 1:
token_type = TokenType.ModuleRef; goto ret;
case 2:
token_type = TokenType.AssemblyRef; goto ret;
case 3:
token_type = TokenType.TypeRef; goto ret;
default:
goto exit;
}
case CodedIndex.TypeOrMethodDef:
rid = data >> 1;
switch (data & 1) {
case 0:
token_type = TokenType.TypeDef; goto ret;
case 1:
token_type = TokenType.Method; goto ret;
default: goto exit;
}
default:
goto exit;
}
ret:
return new MetadataToken (token_type, rid);
exit:
return MetadataToken.Zero;
}
#if !READ_ONLY
public static uint CompressMetadataToken (this CodedIndex self, MetadataToken token)
{
uint ret = 0;
if (token.RID == 0)
return ret;
switch (self) {
case CodedIndex.TypeDefOrRef:
ret = token.RID << 2;
switch (token.TokenType) {
case TokenType.TypeDef:
return ret | 0;
case TokenType.TypeRef:
return ret | 1;
case TokenType.TypeSpec:
return ret | 2;
default:
goto exit;
}
case CodedIndex.HasConstant:
ret = token.RID << 2;
switch (token.TokenType) {
case TokenType.Field:
return ret | 0;
case TokenType.Param:
return ret | 1;
case TokenType.Property:
return ret | 2;
default:
goto exit;
}
case CodedIndex.HasCustomAttribute:
ret = token.RID << 5;
switch (token.TokenType) {
case TokenType.Method:
return ret | 0;
case TokenType.Field:
return ret | 1;
case TokenType.TypeRef:
return ret | 2;
case TokenType.TypeDef:
return ret | 3;
case TokenType.Param:
return ret | 4;
case TokenType.InterfaceImpl:
return ret | 5;
case TokenType.MemberRef:
return ret | 6;
case TokenType.Module:
return ret | 7;
case TokenType.Permission:
return ret | 8;
case TokenType.Property:
return ret | 9;
case TokenType.Event:
return ret | 10;
case TokenType.Signature:
return ret | 11;
case TokenType.ModuleRef:
return ret | 12;
case TokenType.TypeSpec:
return ret | 13;
case TokenType.Assembly:
return ret | 14;
case TokenType.AssemblyRef:
return ret | 15;
case TokenType.File:
return ret | 16;
case TokenType.ExportedType:
return ret | 17;
case TokenType.ManifestResource:
return ret | 18;
case TokenType.GenericParam:
return ret | 19;
default:
goto exit;
}
case CodedIndex.HasFieldMarshal:
ret = token.RID << 1;
switch (token.TokenType) {
case TokenType.Field:
return ret | 0;
case TokenType.Param:
return ret | 1;
default:
goto exit;
}
case CodedIndex.HasDeclSecurity:
ret = token.RID << 2;
switch (token.TokenType) {
case TokenType.TypeDef:
return ret | 0;
case TokenType.Method:
return ret | 1;
case TokenType.Assembly:
return ret | 2;
default:
goto exit;
}
case CodedIndex.MemberRefParent:
ret = token.RID << 3;
switch (token.TokenType) {
case TokenType.TypeDef:
return ret | 0;
case TokenType.TypeRef:
return ret | 1;
case TokenType.ModuleRef:
return ret | 2;
case TokenType.Method:
return ret | 3;
case TokenType.TypeSpec:
return ret | 4;
default:
goto exit;
}
case CodedIndex.HasSemantics:
ret = token.RID << 1;
switch (token.TokenType) {
case TokenType.Event:
return ret | 0;
case TokenType.Property:
return ret | 1;
default:
goto exit;
}
case CodedIndex.MethodDefOrRef:
ret = token.RID << 1;
switch (token.TokenType) {
case TokenType.Method:
return ret | 0;
case TokenType.MemberRef:
return ret | 1;
default:
goto exit;
}
case CodedIndex.MemberForwarded:
ret = token.RID << 1;
switch (token.TokenType) {
case TokenType.Field:
return ret | 0;
case TokenType.Method:
return ret | 1;
default:
goto exit;
}
case CodedIndex.Implementation:
ret = token.RID << 2;
switch (token.TokenType) {
case TokenType.File:
return ret | 0;
case TokenType.AssemblyRef:
return ret | 1;
case TokenType.ExportedType:
return ret | 2;
default:
goto exit;
}
case CodedIndex.CustomAttributeType:
ret = token.RID << 3;
switch (token.TokenType) {
case TokenType.Method:
return ret | 2;
case TokenType.MemberRef:
return ret | 3;
default:
goto exit;
}
case CodedIndex.ResolutionScope:
ret = token.RID << 2;
switch (token.TokenType) {
case TokenType.Module:
return ret | 0;
case TokenType.ModuleRef:
return ret | 1;
case TokenType.AssemblyRef:
return ret | 2;
case TokenType.TypeRef:
return ret | 3;
default:
goto exit;
}
case CodedIndex.TypeOrMethodDef:
ret = token.RID << 1;
switch (token.TokenType) {
case TokenType.TypeDef:
return ret | 0;
case TokenType.Method:
return ret | 1;
default:
goto exit;
}
default:
goto exit;
}
exit:
throw new ArgumentException ();
}
#endif
public static int GetSize (this CodedIndex self, Func<Table, int> counter)
{
int bits;
Table [] tables;
switch (self) {
case CodedIndex.TypeDefOrRef:
bits = 2;
tables = new [] { Table.TypeDef, Table.TypeRef, Table.TypeSpec };
break;
case CodedIndex.HasConstant:
bits = 2;
tables = new [] { Table.Field, Table.Param, Table.Property };
break;
case CodedIndex.HasCustomAttribute:
bits = 5;
tables = new [] {
Table.Method, Table.Field, Table.TypeRef, Table.TypeDef, Table.Param, Table.InterfaceImpl, Table.MemberRef,
Table.Module, Table.DeclSecurity, Table.Property, Table.Event, Table.StandAloneSig, Table.ModuleRef,
Table.TypeSpec, Table.Assembly, Table.AssemblyRef, Table.File, Table.ExportedType,
Table.ManifestResource, Table.GenericParam
};
break;
case CodedIndex.HasFieldMarshal:
bits = 1;
tables = new [] { Table.Field, Table.Param };
break;
case CodedIndex.HasDeclSecurity:
bits = 2;
tables = new [] { Table.TypeDef, Table.Method, Table.Assembly };
break;
case CodedIndex.MemberRefParent:
bits = 3;
tables = new [] { Table.TypeDef, Table.TypeRef, Table.ModuleRef, Table.Method, Table.TypeSpec };
break;
case CodedIndex.HasSemantics:
bits = 1;
tables = new [] { Table.Event, Table.Property };
break;
case CodedIndex.MethodDefOrRef:
bits = 1;
tables = new [] { Table.Method, Table.MemberRef };
break;
case CodedIndex.MemberForwarded:
bits = 1;
tables = new [] { Table.Field, Table.Method };
break;
case CodedIndex.Implementation:
bits = 2;
tables = new [] { Table.File, Table.AssemblyRef, Table.ExportedType };
break;
case CodedIndex.CustomAttributeType:
bits = 3;
tables = new [] { Table.Method, Table.MemberRef };
break;
case CodedIndex.ResolutionScope:
bits = 2;
tables = new [] { Table.Module, Table.ModuleRef, Table.AssemblyRef, Table.TypeRef };
break;
case CodedIndex.TypeOrMethodDef:
bits = 1;
tables = new [] { Table.TypeDef, Table.Method };
break;
default:
throw new ArgumentException ();
}
int max = 0;
for (int i = 0; i < tables.Length; i++) {
max = System.Math.Max (counter (tables [i]), max);
}
return max < (1 << (16 - bits)) ? 2 : 4;
}
}
}
| |
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
function initializeForestEditor()
{
echo(" % - Initializing Forest Editor");
exec( "./forestEditor.cs" );
exec( "./forestEditorGui.gui" );
exec( "./forestEditToolbar.ed.gui" );
exec( "./forestEditorGui.cs" );
exec( "./tools.cs" );
ForestEditorGui.setVisible( false );
ForestEditorPalleteWindow.setVisible( false );
ForestEditorPropertiesWindow.setVisible( false );
ForestEditToolbar.setVisible( false );
EditorGui.add( ForestEditorGui );
EditorGui.add( ForestEditorPalleteWindow );
EditorGui.add( ForestEditorPropertiesWindow );
EditorGui.add( ForestEditToolbar );
new ScriptObject( ForestEditorPlugin )
{
superClass = "EditorPlugin";
editorGui = ForestEditorGui;
};
new SimSet(ForestTools)
{
new ForestBrushTool()
{
internalName = "BrushTool";
toolTip = "Paint Tool";
buttonImage = "tools/forest/images/brushTool";
};
new ForestSelectionTool()
{
internalName = "SelectionTool";
toolTip = "Selection Tool";
buttonImage = "tools/forest/images/selectionTool";
};
};
%map = new ActionMap();
%map.bindCmd( keyboard, "1", "ForestEditorSelectModeBtn.performClick();", "" ); // Select
%map.bindCmd( keyboard, "2", "ForestEditorMoveModeBtn.performClick();", "" ); // Move
%map.bindCmd( keyboard, "3", "ForestEditorRotateModeBtn.performClick();", "" ); // Rotate
%map.bindCmd( keyboard, "4", "ForestEditorScaleModeBtn.performClick();", "" ); // Scale
%map.bindCmd( keyboard, "5", "ForestEditorPaintModeBtn.performClick();", "" ); // Paint
%map.bindCmd( keyboard, "6", "ForestEditorEraseModeBtn.performClick();", "" ); // Erase
%map.bindCmd( keyboard, "7", "ForestEditorEraseSelectedModeBtn.performClick();", "" ); // EraseSelected
//%map.bindCmd( keyboard, "backspace", "ForestEditorGui.onDeleteKey();", "" );
//%map.bindCmd( keyboard, "delete", "ForestEditorGui.onDeleteKey();", "" );
ForestEditorPlugin.map = %map;
}
function destroyForestEditor()
{
}
// NOTE: debugging helper.
function reinitForest()
{
exec( "./main.cs" );
exec( "./forestEditorGui.cs" );
exec( "./tools.cs" );
}
function ForestEditorPlugin::onWorldEditorStartup( %this )
{
new PersistenceManager( ForestDataManager );
%brushPath = "art/forest/brushes.cs";
if ( !isFile( %brushPath ) )
createPath( %brushPath );
// This creates the ForestBrushGroup, all brushes, and elements.
exec( %brushpath );
if ( !isObject( ForestBrushGroup ) )
{
new SimGroup( ForestBrushGroup );
%this.showError = true;
}
ForestEditBrushTree.open( ForestBrushGroup );
if ( !isObject( ForestItemDataSet ) )
new SimSet( ForestItemDataSet );
ForestEditMeshTree.open( ForestItemDataSet );
// Add ourselves to the window menu.
%accel = EditorGui.addToEditorsMenu( "Forest Editor", "", ForestEditorPlugin );
// Add ourselves to the tools menu.
%tooltip = "Forest Editor (" @ %accel @ ")";
EditorGui.addToToolsToolbar( "ForestEditorPlugin", "ForestEditorPalette", expandFilename("tools/forestEditor/images/forest-editor-btn"), %tooltip );
//connect editor windows
GuiWindowCtrl::attach( ForestEditorPropertiesWindow, ForestEditorPalleteWindow );
ForestEditTabBook.selectPage(0);
}
function ForestEditorPlugin::onWorldEditorShutdown( %this )
{
if ( isObject( ForestBrushGroup ) )
ForestBrushGroup.delete();
if ( isObject( ForestDataManager ) )
ForestDataManager.delete();
}
function ForestEditorPlugin::onActivated( %this )
{
EditorGui.bringToFront( ForestEditorGui );
ForestEditorGui.setVisible( true );
ForestEditorPalleteWindow.setVisible( true );
ForestEditorPropertiesWindow.setVisible( true );
ForestEditorGui.makeFirstResponder( true );
//ForestEditToolbar.setVisible( true );
%this.map.push();
Parent::onActivated(%this);
ForestEditBrushTree.open( ForestBrushGroup );
ForestEditMeshTree.open( ForestItemDataSet );
// Open the Brush tab.
ForestEditTabBook.selectPage(0);
// Sync the pallete button state
// And toolbar.
%tool = ForestEditorGui.getActiveTool();
if ( isObject( %tool ) )
%tool.onActivated();
if ( !isObject( %tool ) )
{
ForestEditorPaintModeBtn.performClick();
if ( ForestEditBrushTree.getItemCount() > 0 )
{
ForestEditBrushTree.selectItem( 0, true );
}
}
else if ( %tool == ForestTools->SelectionTool )
{
%mode = GlobalGizmoProfile.mode;
switch$ (%mode)
{
case "None":
ForestEditorSelectModeBtn.performClick();
case "Move":
ForestEditorMoveModeBtn.performClick();
case "Rotate":
ForestEditorRotateModeBtn.performClick();
case "Scale":
ForestEditorScaleModeBtn.performClick();
}
}
else if ( %tool == ForestTools->BrushTool )
{
%mode = ForestTools->BrushTool.mode;
switch$ (%mode)
{
case "Paint":
ForestEditorPaintModeBtn.performClick();
case "Erase":
ForestEditorEraseModeBtn.performClick();
case "EraseSelected":
ForestEditorEraseSelectedModeBtn.performClick();
}
}
if ( %this.showError )
MessageBoxOK( "Error", "Your art/forest folder does not contain a valid brushes.cs. Brushes you create will not be saved!" );
}
function ForestEditorPlugin::onDeactivated( %this )
{
ForestEditorGui.setVisible( false );
ForestEditorPalleteWindow.setVisible( false );
ForestEditorPropertiesWindow.setVisible( false );
%tool = ForestEditorGui.getActiveTool();
if ( isObject( %tool ) )
%tool.onDeactivated();
// Also take this opportunity to save.
ForestDataManager.saveDirty();
%this.map.pop();
Parent::onDeactivated(%this);
}
function ForestEditorPlugin::isDirty( %this )
{
%dirty = %this.dirty || ForestEditorGui.isDirty();
return %dirty;
}
function ForestEditorPlugin::clearDirty( %this )
{
%this.dirty = false;
}
function ForestEditorPlugin::onSaveMission( %this, %missionFile )
{
ForestDataManager.saveDirty();
if ( isObject( theForest ) )
theForest.saveDataFile();
ForestBrushGroup.save( "art/forest/brushes.cs" );
}
function ForestEditorPlugin::onEditorSleep( %this )
{
}
function ForestEditorPlugin::onEditMenuSelect( %this, %editMenu )
{
%hasSelection = false;
%selTool = ForestTools->SelectionTool;
if ( ForestEditorGui.getActiveTool() == %selTool )
if ( %selTool.getSelectionCount() > 0 )
%hasSelection = true;
%editMenu.enableItem( 3, %hasSelection ); // Cut
%editMenu.enableItem( 4, %hasSelection ); // Copy
%editMenu.enableItem( 5, %hasSelection ); // Paste
%editMenu.enableItem( 6, %hasSelection ); // Delete
%editMenu.enableItem( 8, %hasSelection ); // Deselect
}
function ForestEditorPlugin::handleDelete( %this )
{
ForestTools->SelectionTool.deleteSelection();
}
function ForestEditorPlugin::handleDeselect( %this )
{
ForestTools->SelectionTool.clearSelection();
}
function ForestEditorPlugin::handleCut( %this )
{
ForestTools->SelectionTool.cutSelection();
}
function ForestEditorPlugin::handleCopy( %this )
{
ForestTools->SelectionTool.copySelection();
}
function ForestEditorPlugin::handlePaste( %this )
{
ForestTools->SelectionTool.pasteSelection();
}
| |
//
// Box.cs
//
// Author:
// Lluis Sanchez <[email protected]>
//
// Copyright (c) 2011 Xamarin Inc
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.Linq;
using System.Collections.Generic;
using System.ComponentModel;
using Xwt.Backends;
namespace Xwt
{
[BackendType (typeof(IBoxBackend))]
public class Box: Widget
{
ChildrenCollection<BoxPlacement> children;
Orientation direction;
double spacing = 6;
protected new class WidgetBackendHost: Widget.WidgetBackendHost, ICollectionEventSink<BoxPlacement>, IContainerEventSink<BoxPlacement>
{
public void AddedItem (BoxPlacement item, int index)
{
((Box)Parent).OnAdd (item.Child, item);
}
public void RemovedItem (BoxPlacement item, int index)
{
((Box)Parent).OnRemove (item.Child);
}
public void ChildChanged (BoxPlacement child, string hint)
{
((Box)Parent).OnChildChanged (child, hint);
}
public void ChildReplaced (BoxPlacement child, Widget oldWidget, Widget newWidget)
{
((Box)Parent).OnReplaceChild (child, oldWidget, newWidget);
}
}
protected override BackendHost CreateBackendHost ()
{
return new WidgetBackendHost ();
}
IBoxBackend Backend {
get { return (IBoxBackend) BackendHost.Backend; }
}
internal Box (Orientation dir)
{
children = new ChildrenCollection<BoxPlacement> ((WidgetBackendHost)BackendHost);
direction = dir;
}
public double Spacing {
get { return spacing; }
set {
spacing = value > 0 ? value : 0;
OnPreferredSizeChanged ();
}
}
public ChildrenCollection<BoxPlacement> Placements {
get { return children; }
}
public IEnumerable<Widget> Children {
get { return children.Select (c => c.Child); }
}
public void PackStart (Widget widget)
{
if (widget == null)
throw new ArgumentNullException ("widget");
Pack (widget, false, WidgetPlacement.Fill, PackOrigin.Start);
}
public void PackStart (Widget widget, bool expand)
{
if (widget == null)
throw new ArgumentNullException ("widget");
Pack (widget, expand, WidgetPlacement.Fill, PackOrigin.Start);
}
public void PackStart (Widget widget, bool expand, bool fill)
{
if (widget == null)
throw new ArgumentNullException ("widget");
WidgetPlacement align = fill ? WidgetPlacement.Fill : WidgetPlacement.Center;
Pack (widget, expand, align, PackOrigin.Start);
}
public void PackStart (Widget widget, bool expand = false, WidgetPlacement vpos = WidgetPlacement.Fill, WidgetPlacement hpos = WidgetPlacement.Fill, double marginLeft = -1, double marginTop = -1, double marginRight = -1, double marginBottom = -1, double margin = -1)
{
if (widget == null)
throw new ArgumentNullException ("widget");
Pack (widget, expand, vpos, hpos, marginLeft, marginTop, marginRight, marginBottom, margin, PackOrigin.Start);
}
[Obsolete ("BoxMode is going away")]
public void PackStart (Widget widget, BoxMode mode)
{
if (widget == null)
throw new ArgumentNullException ("widget");
bool expand = (mode & BoxMode.Expand) != 0;
bool fill = (mode & BoxMode.Fill) != 0;
PackStart (widget, expand, fill);
}
public void PackEnd (Widget widget)
{
if (widget == null)
throw new ArgumentNullException ("widget");
Pack (widget, false, WidgetPlacement.Fill, PackOrigin.End);
}
public void PackEnd (Widget widget, bool expand)
{
if (widget == null)
throw new ArgumentNullException ("widget");
Pack (widget, expand, WidgetPlacement.Fill, PackOrigin.End);
}
public void PackEnd (Widget widget, bool expand, bool fill)
{
if (widget == null)
throw new ArgumentNullException ("widget");
WidgetPlacement align = fill ? WidgetPlacement.Fill : WidgetPlacement.Center;
Pack (widget, expand, align, PackOrigin.End);
}
public void PackEnd (Widget widget, bool expand = false, WidgetPlacement hpos = WidgetPlacement.Fill, WidgetPlacement vpos = WidgetPlacement.Fill, double marginLeft = -1, double marginTop = -1, double marginRight = -1, double marginBottom = -1, double margin = -1)
{
if (widget == null)
throw new ArgumentNullException ("widget");
Pack (widget, expand, vpos, hpos, marginLeft, marginTop, marginRight, marginBottom, margin, PackOrigin.End);
}
[Obsolete ("BoxMode is going away")]
public void PackEnd (Widget widget, BoxMode mode)
{
bool expand = (mode & BoxMode.Expand) != 0;
bool fill = (mode & BoxMode.Fill) != 0;
PackEnd (widget, expand, fill);
}
void Pack (Widget widget, bool expand, WidgetPlacement vpos, WidgetPlacement hpos, double marginLeft, double marginTop, double marginRight, double marginBottom, double margin, PackOrigin ptype)
{
WidgetPlacement align;
if (direction == Orientation.Horizontal) {
align = hpos;
if (vpos != default (WidgetPlacement))
widget.VerticalPlacement = vpos;
} else {
align = vpos;
if (hpos != default (WidgetPlacement))
widget.HorizontalPlacement = hpos;
}
if (margin != -1)
widget.Margin = margin;
if (marginLeft != -1)
widget.MarginLeft = marginLeft;
if (marginTop != -1)
widget.MarginTop = marginTop;
if (marginTop != -1)
widget.MarginRight = marginRight;
if (marginBottom != -1)
widget.MarginBottom = marginBottom;
Pack (widget, expand, align, ptype);
}
void Pack (Widget widget, bool? expand, WidgetPlacement align, PackOrigin ptype)
{
if (expand.HasValue) {
if (direction == Orientation.Vertical)
widget.ExpandVertical = expand.Value;
else
widget.ExpandHorizontal = expand.Value;
}
if (align != default (WidgetPlacement)) {
if (direction == Orientation.Vertical)
widget.VerticalPlacement = align;
else
widget.HorizontalPlacement = align;
}
if (widget == null)
throw new ArgumentNullException ("widget");
var p = new BoxPlacement ((WidgetBackendHost)BackendHost, widget);
p.PackOrigin = ptype;
children.Add (p);
}
public bool Remove (Widget widget)
{
if (widget == null)
throw new ArgumentNullException ("widget");
for (int n=0; n<children.Count; n++) {
if (children[n].Child == widget) {
children.RemoveAt (n);
return true;
}
}
return false;
}
/// <summary>
/// Removes all children
/// </summary>
public void Clear ()
{
children.Clear ();
}
void OnAdd (Widget child, BoxPlacement placement)
{
RegisterChild (child);
Backend.Add ((IWidgetBackend)GetBackend (child));
OnPreferredSizeChanged ();
}
void OnRemove (Widget child)
{
UnregisterChild (child);
Backend.Remove ((IWidgetBackend)GetBackend (child));
OnPreferredSizeChanged ();
}
void OnChildChanged (BoxPlacement placement, object hint)
{
OnPreferredSizeChanged ();
}
internal protected virtual void OnReplaceChild (BoxPlacement placement, Widget oldWidget, Widget newWidget)
{
if (oldWidget != null)
OnRemove (oldWidget);
OnAdd (newWidget, placement);
}
protected override void OnReallocate ()
{
var size = Backend.Size;
var visibleChildren = children.Where (c => c.Child.Visible).ToArray ();
IWidgetBackend[] widgets = new IWidgetBackend [visibleChildren.Length];
Rectangle[] rects = new Rectangle [visibleChildren.Length];
if (size.Width <= 0 || size.Height <= 0) {
var ws = visibleChildren.Select (bp => bp.Child.GetBackend ()).ToArray ();
Backend.SetAllocation (ws, new Rectangle[visibleChildren.Length]);
return;
}
if (direction == Orientation.Horizontal) {
CalcDefaultSizes (size.Width, size.Height, true);
double xs = 0;
double xe = size.Width + spacing;
for (int n=0; n<visibleChildren.Length; n++) {
var bp = visibleChildren [n];
double availableWidth = bp.NextSize >= 0 ? bp.NextSize : 0;
if (bp.PackOrigin == PackOrigin.End)
xe -= availableWidth + spacing;
var slot = new Rectangle (bp.PackOrigin == PackOrigin.Start ? xs : xe, 0, availableWidth, size.Height);
widgets[n] = (IWidgetBackend)GetBackend (bp.Child);
rects[n] = bp.Child.Surface.GetPlacementInRect (slot).Round ().WithPositiveSize ();
if (bp.PackOrigin == PackOrigin.Start)
xs += availableWidth + spacing;
}
} else {
CalcDefaultSizes (size.Width, size.Height, true);
double ys = 0;
double ye = size.Height + spacing;
for (int n=0; n<visibleChildren.Length; n++) {
var bp = visibleChildren [n];
double availableHeight = bp.NextSize >= 0 ? bp.NextSize : 0;
if (bp.PackOrigin == PackOrigin.End)
ye -= availableHeight + spacing;
var slot = new Rectangle (0, bp.PackOrigin == PackOrigin.Start ? ys : ye, size.Width, availableHeight);
widgets[n] = (IWidgetBackend)GetBackend (bp.Child);
rects[n] = bp.Child.Surface.GetPlacementInRect (slot).Round ().WithPositiveSize ();
if (bp.PackOrigin == PackOrigin.Start)
ys += availableHeight + spacing;
}
}
Backend.SetAllocation (widgets, rects);
}
void CalcDefaultSizes (SizeConstraint width, SizeConstraint height, bool allowShrink)
{
bool vertical = direction == Orientation.Vertical;
int nexpands = 0;
double requiredSize = 0;
double availableSize = vertical ? height.AvailableSize : width.AvailableSize;
var widthConstraint = vertical ? width : SizeConstraint.Unconstrained;
var heightConstraint = vertical ? SizeConstraint.Unconstrained : height;
var visibleChildren = children.Where (b => b.Child.Visible).ToArray ();
var sizes = new Dictionary<BoxPlacement,double> ();
// Get the natural size of each child
foreach (var bp in visibleChildren) {
Size s;
s = bp.Child.Surface.GetPreferredSize (widthConstraint, heightConstraint, true);
bp.NextSize = vertical ? s.Height : s.Width;
sizes [bp] = bp.NextSize;
requiredSize += bp.NextSize;
if (bp.Child.ExpandsForOrientation (direction))
nexpands++;
}
double remaining = availableSize - requiredSize - (spacing * (double)(visibleChildren.Length - 1));
if (remaining > 0) {
var expandRemaining = new SizeSplitter (remaining, nexpands);
foreach (var bp in visibleChildren) {
if (bp.Child.ExpandsForOrientation (direction))
bp.NextSize += expandRemaining.NextSizePart ();
}
}
else if (allowShrink && remaining < 0) {
// The box is not big enough to fit the widgets using its natural size.
// We have to shrink the widgets.
// The total amount we have to shrink
double shrinkSize = -remaining;
var sizePart = new SizeSplitter (shrinkSize, visibleChildren.Length);
foreach (var bp in visibleChildren)
bp.NextSize -= sizePart.NextSizePart ();
}
}
protected override Size OnGetPreferredSize (SizeConstraint widthConstraint, SizeConstraint heightConstraint)
{
Size s = new Size ();
int count = 0;
var visibleChildren = children.Where (b => b.Child.Visible).ToArray ();
if (direction == Orientation.Horizontal) {
// If the width is constrained then we have a total width, and we can calculate the exact width assigned to each child.
// We can then use that width as a width constraint for the child.
if (widthConstraint.IsConstrained)
CalcDefaultSizes (widthConstraint, heightConstraint, false); // Calculates the width assigned to each child
foreach (var cw in visibleChildren) {
// Use the calculated width if available
var wsize = cw.Child.Surface.GetPreferredSize (widthConstraint.IsConstrained ? cw.NextSize : SizeConstraint.Unconstrained, heightConstraint, true);
s.Width += wsize.Width;
if (wsize.Height > s.Height)
s.Height = wsize.Height;
count++;
}
if (count > 0)
s.Width += spacing * (double)(count - 1);
} else {
if (heightConstraint.IsConstrained)
CalcDefaultSizes (widthConstraint, heightConstraint, false);
foreach (var cw in visibleChildren) {
var wsize = cw.Child.Surface.GetPreferredSize (widthConstraint, heightConstraint.IsConstrained ? cw.NextSize : SizeConstraint.Unconstrained, true);
s.Height += wsize.Height;
if (wsize.Width > s.Width)
s.Width = wsize.Width;
count++;
}
if (count > 0)
s.Height += spacing * (double)(count - 1);
}
return s;
}
}
[Flags]
public enum BoxMode
{
None = 0,
Fill = 1,
Expand = 2,
FillAndExpand = 3
}
public class BoxPlacement
{
IContainerEventSink<BoxPlacement> parent;
int position;
PackOrigin packType = PackOrigin.Start;
Widget child;
internal BoxPlacement (IContainerEventSink<BoxPlacement> parent, Widget child)
{
this.parent = parent;
this.child = child;
}
internal double NextSize;
public int Position {
get {
return this.position;
}
set {
if (value < 0)
throw new ArgumentException ("Position can't be negative");
position = value;
parent.ChildChanged (this, "Position");
}
}
[DefaultValue (PackOrigin.Start)]
public PackOrigin PackOrigin {
get {
return this.packType;
}
set {
packType = value;
parent.ChildChanged (this, "PackType");
}
}
public Widget Child {
get { return child; }
set {
if (value == null)
throw new ArgumentNullException ();
var old = child;
child = value;
parent.ChildReplaced (this, old, value);
}
}
}
public enum PackOrigin
{
Start,
End
}
class SizeSplitter
{
int rem;
int part;
public SizeSplitter (double total, int numParts)
{
if (numParts > 0) {
part = ((int)total) / numParts;
rem = ((int)total) % numParts;
}
}
public double NextSizePart ()
{
if (rem > 0) {
rem--;
return part + 1;
}
else
return part;
}
}
}
| |
/*
MIT License
Copyright (c) 2017 Saied Zarrinmehr
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Media;
using System.Windows.Shapes;
using System.Windows.Controls;
using System.Windows.Input;
using SpatialAnalysis.FieldUtility;
using SpatialAnalysis.CellularEnvironment;
using SpatialAnalysis.Geometry;
using System.ComponentModel;
namespace SpatialAnalysis.Visualization
{
/// <summary>
/// Type of ScenelVisualHost
/// </summary>
public enum SceneType
{
/// <summary>
/// The grid view model
/// </summary>
Grid,
/// <summary>
/// The visual barriers model
/// </summary>
VisualBarriers,
/// <summary>
/// The physical barriers
/// </summary>
PhysicalBarriers,
/// <summary>
/// The walkable field model
/// </summary>
Field
}
/// <summary>
/// Visualize scene
/// </summary>
public class ScenelVisualHost : FrameworkElement, INotifyPropertyChanged
{
private OSMDocument _host { get; set; }
private System.Windows.Media.Geometry geometry;
/// <summary>
/// Gets or sets the geometry that this control visualizes.
/// </summary>
/// <value>The geometry.</value>
public System.Windows.Media.Geometry Geometry
{
get { return geometry; }
set
{
if (PropertyChanged != null)
{
PropertyChanged(this, new PropertyChangedEventArgs("Geometry"));
}
geometry = value;
}
}
private double boarderThickness { get; set; }
private Brush boarderBrush { get; set; }
private VisualCollection _children { get; set; }
private Brush fillBrush { get; set; }
private MenuItem visualizationMenu { get; set; }
private MenuItem hide_Show_Menu { get; set; }
private MenuItem boarderThickness_Menu { get; set; }
private MenuItem boarderBrush_Menu { get; set; }
private MenuItem fillBrush_Menu { get; set; }
/// <summary>
/// Initializes a new instance of the <see cref="ScenelVisualHost"/> class.
/// </summary>
public ScenelVisualHost()
{
this._children = new VisualCollection(this);
this.boarderThickness = 1;
this.boarderBrush = Brushes.Black;
this.visualizationMenu = new MenuItem();
this.boarderBrush_Menu = new MenuItem() { Header = "Boarder Brush" };
this.boarderThickness_Menu = new MenuItem() { Header = "Boarder Thickness" };
this.fillBrush_Menu = new MenuItem() { Header = "Fill Brush" };
this.hide_Show_Menu = new MenuItem() { Header = "Hide" };
this.visualizationMenu.Items.Add(this.hide_Show_Menu);
this.visualizationMenu.Items.Add(this.boarderThickness_Menu);
this.visualizationMenu.Items.Add(this.fillBrush_Menu);
this.visualizationMenu.Items.Add(this.boarderBrush_Menu);
this.boarderThickness_Menu.Click += new RoutedEventHandler(boarderThickness_Menu_Click);
this.fillBrush_Menu.Click += new RoutedEventHandler(fillBrush_Menu_Click);
this.boarderBrush_Menu.Click += new RoutedEventHandler(boarderBrush_Menu_Click);
this.hide_Show_Menu.Click += new RoutedEventHandler(hide_Show_Menu_Click);
}
private void hide_Show_Menu_Click(object sender, RoutedEventArgs e)
{
if (this.Visibility == System.Windows.Visibility.Visible)
{
this.Visibility = System.Windows.Visibility.Collapsed;
this.hide_Show_Menu.Header = "Show";
this.boarderBrush_Menu.IsEnabled = false;
this.fillBrush_Menu.IsEnabled = false;
this.boarderThickness_Menu.IsEnabled = false;
}
else
{
this.Visibility = System.Windows.Visibility.Visible;
this.hide_Show_Menu.Header = "Hide";
this.boarderBrush_Menu.IsEnabled = true;
this.fillBrush_Menu.IsEnabled = true;
this.boarderThickness_Menu.IsEnabled = true;
}
}
private void setMenuName(String name)
{
this.visualizationMenu.Header = name;
}
private void boarderBrush_Menu_Click(object sender, RoutedEventArgs e)
{
BrushPicker colorPicker = new BrushPicker(this.boarderBrush);
colorPicker.Owner = this._host;
colorPicker.ShowDialog();
this.boarderBrush = colorPicker._Brush;
this.draw();
colorPicker = null;
}
private void fillBrush_Menu_Click(object sender, RoutedEventArgs e)
{
BrushPicker colorPicker = new BrushPicker(this.fillBrush);
colorPicker.Owner = this._host;
colorPicker.ShowDialog();
this.fillBrush = colorPicker._Brush;
this.draw();
colorPicker = null;
}
private void boarderThickness_Menu_Click(object sender, RoutedEventArgs e)
{
GetNumber gn = new GetNumber("Enter New Thickness Value", "New thickness value will be applied to the edges of barriers", this.boarderThickness);
gn.Owner = this._host;
gn.ShowDialog();
this.boarderThickness = gn.NumberValue;
this.draw();
gn = null;
}
// Provide a required override for the VisualChildrenCount property.
protected override int VisualChildrenCount
{
get { return _children.Count; }
}
// Provide a required override for the GetVisualChild method.
protected override Visual GetVisualChild(int index)
{
if (index < 0 || index >= _children.Count)
{
throw new ArgumentOutOfRangeException();
}
return _children[index];
}
private void setGeometry(BarrierPolygon[] barriers)
{
StreamGeometry sg = new StreamGeometry();
using (StreamGeometryContext sgc = sg.Open())
{
foreach (BarrierPolygon barrier in barriers)
{
sgc.BeginFigure(this.toPoint(barrier.BoundaryPoints[0]), true, true);
for (int i = 1; i < barrier.Length; i++)
{
sgc.LineTo(this.toPoint(barrier.BoundaryPoints[i]), true, true);
}
}
}
sg.FillRule = FillRule.EvenOdd;
this.geometry = sg;
}
private void setGeometry(CellularFloor cellularFloor)
{
UV up = new UV(0, cellularFloor.CellSize);
UV right = new UV(cellularFloor.CellSize, 0);
StreamGeometry sg = new StreamGeometry();
using (StreamGeometryContext sgc = sg.Open())
{
UV start = null, end = null;
sgc.BeginFigure(this.toPoint(cellularFloor.Cells[0, 0]), false, false);
sgc.LineTo(this.toPoint(cellularFloor.Cells[0, cellularFloor.GridHeight - 1] + up), true, false);
for (int i = 1; i < cellularFloor.GridWidth; i++)
{
start = cellularFloor.Cells[i, 0];
end = cellularFloor.Cells[i, cellularFloor.GridHeight - 1] + up;
sgc.LineTo(this.toPoint(start), false, false);
sgc.LineTo(this.toPoint(end), true, false);
}
start = start + right;
end = end + right;
sgc.LineTo(this.toPoint(start), false, false);
sgc.LineTo(this.toPoint(end), true, false);
for (int i = 0; i < cellularFloor.GridHeight; i++)
{
start = cellularFloor.Cells[0, i];
end = cellularFloor.Cells[cellularFloor.GridWidth - 1, i] + right;
sgc.LineTo(this.toPoint(start), false, false);
sgc.LineTo(this.toPoint(end), true, false);
}
start = start + up;
end = end + up;
sgc.LineTo(this.toPoint(start), false, false);
sgc.LineTo(this.toPoint(end), true, false);
}
this.geometry = sg;
}
private void draw()
{
if (this.geometry == null)
{
MessageBox.Show("Cannot Draw Null Geometry");
return;
}
try
{
this._children.Clear();
double scale = this.RenderTransform.Value.M11 * this.RenderTransform.Value.M11 +
this.RenderTransform.Value.M12 * this.RenderTransform.Value.M12;
scale = Math.Sqrt(scale);
double t = this.boarderThickness / scale;
Pen _pen = new Pen(this.boarderBrush, t);
DrawingVisual drawingVisual = new DrawingVisual();
using (DrawingContext drawingContext = drawingVisual.RenderOpen())
{
drawingContext.DrawGeometry(this.fillBrush, _pen, this.geometry);
}
drawingVisual.Drawing.Freeze();
this._children.Add(drawingVisual);
}
catch (Exception e)
{
MessageBox.Show(e.Message);
}
}
/// <summary>
/// Clears this instance.
/// </summary>
public void Clear()
{
this._children.Clear();
this.geometry = null;
this._host = null;
this.boarderBrush = null;
this._children = null;
this.fillBrush = null;
this.visualizationMenu.Items.Clear();
this.boarderThickness_Menu.Click -= boarderThickness_Menu_Click;
this.fillBrush_Menu.Click -= fillBrush_Menu_Click;
this.boarderBrush_Menu.Click -= boarderBrush_Menu_Click;
this.hide_Show_Menu.Click -= hide_Show_Menu_Click;
this.hide_Show_Menu = null;
this.boarderThickness_Menu = null;
this.boarderBrush_Menu = null;
this.fillBrush_Menu = null;
this.visualizationMenu = null;
}
private Point toPoint(UV uv)
{
return new Point(uv.U, uv.V);
}
/// <summary>
/// Sets the host.
/// </summary>
/// <param name="host">The main document to which this control belongs.</param>
/// <param name="menueName">Name of the menu.</param>
/// <param name="type">The type.</param>
public void SetHost(OSMDocument host, string menueName, SceneType type)
{
this._host = host;
this.RenderTransform = this._host.RenderTransformation;
this.setMenuName(menueName);
this._host.ViewUtil.Items.Add(this.visualizationMenu);
switch (type)
{
case SceneType.Grid:
this.setGeometry(this._host.cellularFloor);
this.boarderBrush = new SolidColorBrush(Colors.DarkRed) { Opacity = .6 };
this.boarderThickness = .15;
break;
case SceneType.VisualBarriers:
this.setGeometry(this._host.BIM_To_OSM.VisualBarriers);
this.fillBrush = new SolidColorBrush(Colors.Black) { Opacity = .8 };
this.boarderThickness = .1;
break;
case SceneType.PhysicalBarriers:
this.setGeometry(this._host.BIM_To_OSM.PhysicalBarriers);
this.fillBrush = new SolidColorBrush(Colors.Black) { Opacity = .6 };
this.boarderThickness = .1;
break;
case SceneType.Field:
this.setGeometry(this._host.BIM_To_OSM.FieldBarriers);
this.fillBrush = new SolidColorBrush(Color.FromRgb(170,161,156)) { Opacity = 1.0 };
this.boarderThickness = .01;
break;
default:
break;
}
this.draw();
}
/// <summary>
/// Occurs when a property value changes.
/// </summary>
public event PropertyChangedEventHandler PropertyChanged;
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Html;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Rendering;
using Microsoft.AspNetCore.Mvc.ViewFeatures;
using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Localization;
using OrchardCore.DisplayManagement;
using OrchardCore.DisplayManagement.Descriptors;
using OrchardCore.DisplayManagement.Implementation;
using OrchardCore.DisplayManagement.Shapes;
namespace OrchardCore.Navigation
{
public class PagerShapesTableProvider : IShapeTableProvider
{
public void Discover(ShapeTableBuilder builder)
{
builder.Describe("Pager")
.OnCreated(created =>
{
// Intializes the common properties of a Pager shape
// such that views can safely add values to them.
created.Shape.Properties["ItemClasses"] = new List<string>();
created.Shape.Properties["ItemAttributes"] = new Dictionary<string, string>();
})
.OnDisplaying(displaying =>
{
if (displaying.Shape.Properties.TryGetValue("PagerId", out var value) && value is string pagerId)
{
if (!String.IsNullOrEmpty(pagerId))
{
displaying.Shape.Metadata.Alternates.Add("Pager__" + EncodeAlternateElement(pagerId));
}
};
});
builder.Describe("PagerSlim")
.OnCreated(created =>
{
// Intializes the common properties of a Pager shape
// such that views can safely add values to them.
created.Shape.Properties["ItemClasses"] = new List<string>();
created.Shape.Properties["ItemAttributes"] = new Dictionary<string, string>();
});
builder.Describe("Pager_Gap")
.OnDisplaying(displaying =>
{
if (displaying.Shape.Properties.TryGetValue("PagerId", out var value) && value is string pagerId)
{
if (!String.IsNullOrEmpty(pagerId))
{
displaying.Shape.Metadata.Alternates.Add("Pager_Gap__" + EncodeAlternateElement(pagerId));
}
}
});
builder.Describe("Pager_First")
.OnDisplaying(displaying =>
{
if (displaying.Shape.Properties.TryGetValue("PagerId", out var value) && value is string pagerId)
{
if (!String.IsNullOrEmpty(pagerId))
{
displaying.Shape.Metadata.Alternates.Add("Pager_First__" + EncodeAlternateElement(pagerId));
}
}
});
builder.Describe("Pager_Previous")
.OnDisplaying(displaying =>
{
if (displaying.Shape.Properties.TryGetValue("PagerId", out var value) && value is string pagerId)
{
if (!String.IsNullOrEmpty(pagerId))
{
displaying.Shape.Metadata.Alternates.Add("Pager_Previous__" + EncodeAlternateElement(pagerId));
}
}
});
builder.Describe("Pager_Next")
.OnDisplaying(displaying =>
{
if (displaying.Shape.Properties.TryGetValue("PagerId", out var value) && value is string pagerId)
{
if (!String.IsNullOrEmpty(pagerId))
{
displaying.Shape.Metadata.Alternates.Add("Pager_Next__" + EncodeAlternateElement(pagerId));
}
}
});
builder.Describe("Pager_Last")
.OnDisplaying(displaying =>
{
if (displaying.Shape.Properties.TryGetValue("PagerId", out var value) && value is string pagerId)
{
if (!String.IsNullOrEmpty(pagerId))
{
displaying.Shape.Metadata.Alternates.Add("Pager_Last__" + EncodeAlternateElement(pagerId));
}
}
});
builder.Describe("Pager_CurrentPage")
.OnDisplaying(displaying =>
{
if (displaying.Shape.Properties.TryGetValue("PagerId", out var value) && value is string pagerId)
{
if (!String.IsNullOrEmpty(pagerId))
{
displaying.Shape.Metadata.Alternates.Add("Pager_CurrentPage__" + EncodeAlternateElement(pagerId));
}
}
});
builder.Describe("Pager_Links")
.OnDisplaying(displaying =>
{
if (displaying.Shape.Properties.TryGetValue("PagerId", out var value) && value is string pagerId)
{
if (!String.IsNullOrEmpty(pagerId))
{
displaying.Shape.Metadata.Alternates.Add("Pager_Links__" + EncodeAlternateElement(pagerId));
}
}
});
}
private string EncodeAlternateElement(string alternateElement)
{
return alternateElement.Replace("-", "__").Replace('.', '_');
}
}
public class PagerShapes : IShapeAttributeProvider
{
private readonly IStringLocalizer S;
public PagerShapes(IStringLocalizer<PagerShapes> localizer)
{
S = localizer;
}
[Shape]
public async Task<IHtmlContent> Pager_Links(Shape shape, dynamic DisplayAsync, dynamic New, IHtmlHelper Html, DisplayContext DisplayContext,
string PagerId,
int Page,
int PageSize,
double TotalItemCount,
int? Quantity,
object FirstText,
object PreviousText,
object NextText,
object LastText,
object GapText,
bool ShowNext,
string ItemTagName,
IDictionary<string, string> ItemAttributes
// parameter omitted to workaround an issue where a NullRef is thrown
// when an anonymous object is bound to an object shape parameter
/*object RouteValues*/)
{
var currentPage = Page;
if (currentPage < 1)
currentPage = 1;
var pageSize = PageSize;
var numberOfPagesToShow = Quantity ?? 0;
if (Quantity == null || Quantity < 0)
numberOfPagesToShow = 7;
var totalPageCount = pageSize > 0 ? (int)Math.Ceiling(TotalItemCount / pageSize) : 1;
// return shape early if pager is not needed.
if (totalPageCount < 2)
{
shape.Metadata.Type = "List";
return await DisplayAsync(shape);
}
var firstText = FirstText ?? S["<<"];
var previousText = PreviousText ?? S["<"];
var nextText = NextText ?? S[">"];
var lastText = LastText ?? S[">>"];
var gapText = GapText ?? S["..."];
var httpContextAccessor = DisplayContext.ServiceProvider.GetService<IHttpContextAccessor>();
var httpContext = httpContextAccessor.HttpContext;
var routeData = new RouteValueDictionary(Html.ViewContext.RouteData.Values);
if (httpContext != null)
{
var queryString = httpContext.Request.Query;
if (queryString != null)
{
foreach (var key in from string key in queryString.Keys where key != null && !routeData.ContainsKey(key) let value = queryString[key] select key)
{
routeData[key] = queryString[key];
}
}
}
// specific cross-requests route data can be passed to the shape directly (e.g., OrchardCore.Users)
var shapeRoute = (object)((dynamic)shape).RouteData;
if (shapeRoute != null)
{
var shapeRouteData = shapeRoute as RouteValueDictionary;
if (shapeRouteData == null)
{
var route = shapeRoute as RouteData;
if (route != null)
{
shapeRouteData = new RouteValueDictionary(route.Values);
}
}
if (shapeRouteData != null)
{
foreach (var rd in shapeRouteData)
{
routeData[rd.Key] = rd.Value;
}
}
}
var firstPage = Math.Max(1, Page - (numberOfPagesToShow / 2));
var lastPage = Math.Min(totalPageCount, Page + (int)(numberOfPagesToShow / 2));
var pageKey = String.IsNullOrEmpty(PagerId) ? "page" : PagerId;
shape.Classes.Add("pager");
shape.Metadata.Alternates.Clear();
shape.Metadata.Type = "List";
// first and previous pages
if ((Page > 1) && (routeData.ContainsKey(pageKey)))
{
routeData.Remove(pageKey); // to keep from having "page=1" in the query string
}
// first
shape.Add(await New.Pager_First(Value: firstText, RouteValues: new RouteValueDictionary(routeData), Pager: shape, Disabled: Page < 2));
// previous
if ((Page > 1) && (currentPage > 2))
{ // also to keep from having "page=1" in the query string
routeData[pageKey] = currentPage - 1;
}
shape.Add(await New.Pager_Previous(Value: previousText, RouteValues: new RouteValueDictionary(routeData), Pager: shape, Disabled: Page < 2));
// gap at the beginning of the pager
if (firstPage > 1 && numberOfPagesToShow > 0)
{
shape.Add(await New.Pager_Gap(Value: gapText, Pager: shape));
}
// page numbers
if (numberOfPagesToShow > 0 && lastPage > 1)
{
for (var p = firstPage; p <= lastPage; p++)
{
if (p == currentPage)
{
routeData[pageKey] = currentPage;
shape.Add(await New.Pager_CurrentPage(Value: p, RouteValues: new RouteValueDictionary(routeData), Pager: shape));
}
else
{
if (p == 1)
routeData.Remove(pageKey);
else
routeData[pageKey] = p;
shape.Add(await New.Pager_Link(Value: p, RouteValues: new RouteValueDictionary(routeData), Pager: shape));
}
}
}
// gap at the end of the pager
if (lastPage < totalPageCount && numberOfPagesToShow > 0)
{
shape.Add(await New.Pager_Gap(Value: gapText, Pager: shape));
}
// Next
routeData[pageKey] = Page + 1;
shape.Add(await New.Pager_Next(Value: nextText, RouteValues: new RouteValueDictionary(routeData), Pager: shape, Disabled: Page >= totalPageCount && !ShowNext));
// Last
routeData[pageKey] = totalPageCount;
shape.Add(await New.Pager_Last(Value: lastText, RouteValues: new RouteValueDictionary(routeData), Pager: shape, Disabled: Page >= totalPageCount));
return await DisplayAsync(shape);
}
[Shape]
public Task<IHtmlContent> Pager(Shape shape, dynamic DisplayAsync)
{
shape.Metadata.Alternates.Clear();
shape.Metadata.Type = "Pager_Links";
return DisplayAsync(shape);
}
[Shape]
public async Task<IHtmlContent> PagerSlim(Shape shape, dynamic DisplayAsync, dynamic New, IHtmlHelper Html, DisplayContext DisplayContext,
object PreviousText,
object NextText,
string PreviousClass,
string NextClass,
string ItemTagName,
IDictionary<string, string> ItemAttributes,
Dictionary<string, string> UrlParams)
{
var previousText = PreviousText ?? S["<"];
var nextText = NextText ?? S[">"];
shape.Classes.Add("pager");
shape.Metadata.Alternates.Clear();
shape.Metadata.Type = "List";
var routeData = new RouteValueDictionary(Html.ViewContext.RouteData.Values);
// Allows to pass custom url params to PagerSlim
if (UrlParams != null)
{
foreach (var item in UrlParams)
{
routeData.Add(item.Key, item.Value);
}
}
if (shape.Properties.TryGetValue("Before", out var beforeValue) && beforeValue is string before)
{
var beforeRouteData = new RouteValueDictionary(routeData)
{
["before"] = before
};
shape.Add(await New.Pager_Previous(Value: previousText, RouteValues: beforeRouteData, Pager: shape));
shape.Properties["FirstClass"] = PreviousClass;
}
if (shape.Properties.TryGetValue("After", out var afterValue) && afterValue is string after)
{
var afterRouteData = new RouteValueDictionary(routeData)
{
["after"] = after
};
shape.Add(await New.Pager_Next(Value: nextText, RouteValues: afterRouteData, Pager: shape));
shape.Properties["LastClass"] = NextClass;
}
return await DisplayAsync(shape);
}
[Shape]
public Task<IHtmlContent> Pager_First(Shape shape, dynamic DisplayAsync)
{
shape.Metadata.Alternates.Clear();
shape.Metadata.Type = "Pager_Link";
return DisplayAsync(shape);
}
[Shape]
public Task<IHtmlContent> Pager_Previous(Shape shape, dynamic DisplayAsync)
{
shape.Metadata.Alternates.Clear();
shape.Metadata.Type = "Pager_Link";
return DisplayAsync(shape);
}
[Shape]
public Task<IHtmlContent> Pager_CurrentPage(Shape shape, dynamic DisplayAsync)
{
shape.Metadata.Alternates.Clear();
shape.Metadata.Type = "Pager_Link";
var parentTag = (TagBuilder)shape.Properties["Tag"];
parentTag.AddCssClass("active");
return DisplayAsync(shape);
}
[Shape]
public Task<IHtmlContent> Pager_Next(Shape shape, dynamic DisplayAsync)
{
shape.Metadata.Alternates.Clear();
shape.Metadata.Type = "Pager_Link";
return DisplayAsync(shape);
}
[Shape]
public Task<IHtmlContent> Pager_Last(Shape shape, dynamic DisplayAsync)
{
shape.Metadata.Alternates.Clear();
shape.Metadata.Type = "Pager_Link";
return DisplayAsync(shape);
}
[Shape]
public Task<IHtmlContent> Pager_Link(Shape shape, IHtmlHelper Html, dynamic DisplayAsync, object Value)
{
shape.Metadata.Alternates.Clear();
shape.Metadata.Type = "ActionLink";
return DisplayAsync(shape);
}
[Shape]
public IHtmlContent ActionLink(Shape shape, IUrlHelper Url, object Value, bool Disabled = false)
{
if (Disabled)
{
if (shape.Properties.TryGetValue("Tag", out var value) && value is TagBuilder tagBuilder)
{
tagBuilder.AddCssClass("disabled");
}
}
var RouteValues = (object)((dynamic)shape).RouteValues;
RouteValueDictionary rvd;
if (RouteValues == null)
{
rvd = new RouteValueDictionary();
}
else
{
rvd = RouteValues as RouteValueDictionary ?? new RouteValueDictionary(RouteValues);
}
shape.Attributes["href"] = Url.Action((string)rvd["action"], (string)rvd["controller"], rvd);
var tag = Shape.GetTagBuilder("a", null, shape.Classes, shape.Attributes);
tag.InnerHtml.AppendHtml(CoerceHtmlString(Value));
return tag;
}
[Shape]
public Task<IHtmlContent> Pager_Gap(Shape shape, dynamic DisplayAsync)
{
shape.Metadata.Alternates.Clear();
shape.Metadata.Type = "Pager_Link";
var parentTag = (TagBuilder)shape.Properties["Tag"];
parentTag.AddCssClass("disabled");
return DisplayAsync(shape);
}
private IHtmlContent CoerceHtmlString(object value)
{
if (value == null)
{
return null;
}
if (value is IHtmlContent result)
{
return result;
}
return new StringHtmlContent(value.ToString());
}
}
}
| |
#if !BESTHTTP_DISABLE_ALTERNATE_SSL && (!UNITY_WEBGL || UNITY_EDITOR)
using System;
using Org.BouncyCastle.Crypto;
using Org.BouncyCastle.Crypto.Parameters;
using Org.BouncyCastle.Math;
namespace Org.BouncyCastle.Crypto.Encodings
{
/**
* ISO 9796-1 padding. Note in the light of recent results you should
* only use this with RSA (rather than the "simpler" Rabin keys) and you
* should never use it with anything other than a hash (ie. even if the
* message is small don't sign the message, sign it's hash) or some "random"
* value. See your favorite search engine for details.
*/
public class ISO9796d1Encoding
: IAsymmetricBlockCipher
{
private static readonly BigInteger Sixteen = BigInteger.ValueOf(16);
private static readonly BigInteger Six = BigInteger.ValueOf(6);
private static readonly byte[] shadows = { 0xe, 0x3, 0x5, 0x8, 0x9, 0x4, 0x2, 0xf,
0x0, 0xd, 0xb, 0x6, 0x7, 0xa, 0xc, 0x1 };
private static readonly byte[] inverse = { 0x8, 0xf, 0x6, 0x1, 0x5, 0x2, 0xb, 0xc,
0x3, 0x4, 0xd, 0xa, 0xe, 0x9, 0x0, 0x7 };
private readonly IAsymmetricBlockCipher engine;
private bool forEncryption;
private int bitSize;
private int padBits = 0;
private BigInteger modulus;
public ISO9796d1Encoding(
IAsymmetricBlockCipher cipher)
{
this.engine = cipher;
}
public string AlgorithmName
{
get { return engine.AlgorithmName + "/ISO9796-1Padding"; }
}
public IAsymmetricBlockCipher GetUnderlyingCipher()
{
return engine;
}
public void Init(
bool forEncryption,
ICipherParameters parameters)
{
RsaKeyParameters kParam;
if (parameters is ParametersWithRandom)
{
ParametersWithRandom rParam = (ParametersWithRandom)parameters;
kParam = (RsaKeyParameters)rParam.Parameters;
}
else
{
kParam = (RsaKeyParameters)parameters;
}
engine.Init(forEncryption, parameters);
modulus = kParam.Modulus;
bitSize = modulus.BitLength;
this.forEncryption = forEncryption;
}
/**
* return the input block size. The largest message we can process
* is (key_size_in_bits + 3)/16, which in our world comes to
* key_size_in_bytes / 2.
*/
public int GetInputBlockSize()
{
int baseBlockSize = engine.GetInputBlockSize();
if (forEncryption)
{
return (baseBlockSize + 1) / 2;
}
else
{
return baseBlockSize;
}
}
/**
* return the maximum possible size for the output.
*/
public int GetOutputBlockSize()
{
int baseBlockSize = engine.GetOutputBlockSize();
if (forEncryption)
{
return baseBlockSize;
}
else
{
return (baseBlockSize + 1) / 2;
}
}
/**
* set the number of bits in the next message to be treated as
* pad bits.
*/
public void SetPadBits(
int padBits)
{
if (padBits > 7)
{
throw new ArgumentException("padBits > 7");
}
this.padBits = padBits;
}
/**
* retrieve the number of pad bits in the last decoded message.
*/
public int GetPadBits()
{
return padBits;
}
public byte[] ProcessBlock(
byte[] input,
int inOff,
int length)
{
if (forEncryption)
{
return EncodeBlock(input, inOff, length);
}
else
{
return DecodeBlock(input, inOff, length);
}
}
private byte[] EncodeBlock(
byte[] input,
int inOff,
int inLen)
{
byte[] block = new byte[(bitSize + 7) / 8];
int r = padBits + 1;
int z = inLen;
int t = (bitSize + 13) / 16;
for (int i = 0; i < t; i += z)
{
if (i > t - z)
{
Array.Copy(input, inOff + inLen - (t - i),
block, block.Length - t, t - i);
}
else
{
Array.Copy(input, inOff, block, block.Length - (i + z), z);
}
}
for (int i = block.Length - 2 * t; i != block.Length; i += 2)
{
byte val = block[block.Length - t + i / 2];
block[i] = (byte)((shadows[(uint) (val & 0xff) >> 4] << 4)
| shadows[val & 0x0f]);
block[i + 1] = val;
}
block[block.Length - 2 * z] ^= (byte) r;
block[block.Length - 1] = (byte)((block[block.Length - 1] << 4) | 0x06);
int maxBit = (8 - (bitSize - 1) % 8);
int offSet = 0;
if (maxBit != 8)
{
block[0] &= (byte) ((ushort) 0xff >> maxBit);
block[0] |= (byte) ((ushort) 0x80 >> maxBit);
}
else
{
block[0] = 0x00;
block[1] |= 0x80;
offSet = 1;
}
return engine.ProcessBlock(block, offSet, block.Length - offSet);
}
/**
* @exception InvalidCipherTextException if the decrypted block is not a valid ISO 9796 bit string
*/
private byte[] DecodeBlock(
byte[] input,
int inOff,
int inLen)
{
byte[] block = engine.ProcessBlock(input, inOff, inLen);
int r = 1;
int t = (bitSize + 13) / 16;
BigInteger iS = new BigInteger(1, block);
BigInteger iR;
if (iS.Mod(Sixteen).Equals(Six))
{
iR = iS;
}
else
{
iR = modulus.Subtract(iS);
if (!iR.Mod(Sixteen).Equals(Six))
throw new InvalidCipherTextException("resulting integer iS or (modulus - iS) is not congruent to 6 mod 16");
}
block = iR.ToByteArrayUnsigned();
if ((block[block.Length - 1] & 0x0f) != 0x6)
throw new InvalidCipherTextException("invalid forcing byte in block");
block[block.Length - 1] =
(byte)(((ushort)(block[block.Length - 1] & 0xff) >> 4)
| ((inverse[(block[block.Length - 2] & 0xff) >> 4]) << 4));
block[0] = (byte)((shadows[(uint) (block[1] & 0xff) >> 4] << 4)
| shadows[block[1] & 0x0f]);
bool boundaryFound = false;
int boundary = 0;
for (int i = block.Length - 1; i >= block.Length - 2 * t; i -= 2)
{
int val = ((shadows[(uint) (block[i] & 0xff) >> 4] << 4)
| shadows[block[i] & 0x0f]);
if (((block[i - 1] ^ val) & 0xff) != 0)
{
if (!boundaryFound)
{
boundaryFound = true;
r = (block[i - 1] ^ val) & 0xff;
boundary = i - 1;
}
else
{
throw new InvalidCipherTextException("invalid tsums in block");
}
}
}
block[boundary] = 0;
byte[] nblock = new byte[(block.Length - boundary) / 2];
for (int i = 0; i < nblock.Length; i++)
{
nblock[i] = block[2 * i + boundary + 1];
}
padBits = r - 1;
return nblock;
}
}
}
#endif
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Reflection;
using System.Text;
using System.Xml;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Security;
using System.Runtime.CompilerServices;
#if !uapaot
using ExtensionDataObject = System.Object;
#endif
namespace System.Runtime.Serialization
{
#if USE_REFEMIT || uapaot
public class XmlObjectSerializerWriteContext : XmlObjectSerializerContext
#else
internal class XmlObjectSerializerWriteContext : XmlObjectSerializerContext
#endif
{
private ObjectReferenceStack _byValObjectsInScope = new ObjectReferenceStack();
private XmlSerializableWriter _xmlSerializableWriter;
private const int depthToCheckCyclicReference = 512;
private ObjectToIdCache _serializedObjects;
private bool _isGetOnlyCollection;
private readonly bool _unsafeTypeForwardingEnabled;
protected bool serializeReadOnlyTypes;
protected bool preserveObjectReferences;
internal static XmlObjectSerializerWriteContext CreateContext(DataContractSerializer serializer, DataContract rootTypeDataContract, DataContractResolver dataContractResolver)
{
return (serializer.PreserveObjectReferences || serializer.SerializationSurrogateProvider != null)
? new XmlObjectSerializerWriteContextComplex(serializer, rootTypeDataContract, dataContractResolver)
: new XmlObjectSerializerWriteContext(serializer, rootTypeDataContract, dataContractResolver);
}
protected XmlObjectSerializerWriteContext(DataContractSerializer serializer, DataContract rootTypeDataContract, DataContractResolver resolver)
: base(serializer, rootTypeDataContract, resolver)
{
this.serializeReadOnlyTypes = serializer.SerializeReadOnlyTypes;
// Known types restricts the set of types that can be deserialized
_unsafeTypeForwardingEnabled = true;
}
internal XmlObjectSerializerWriteContext(XmlObjectSerializer serializer, int maxItemsInObjectGraph, StreamingContext streamingContext, bool ignoreExtensionDataObject)
: base(serializer, maxItemsInObjectGraph, streamingContext, ignoreExtensionDataObject)
{
// Known types restricts the set of types that can be deserialized
_unsafeTypeForwardingEnabled = true;
}
#if USE_REFEMIT || uapaot
internal ObjectToIdCache SerializedObjects
#else
protected ObjectToIdCache SerializedObjects
#endif
{
get
{
if (_serializedObjects == null)
_serializedObjects = new ObjectToIdCache();
return _serializedObjects;
}
}
internal override bool IsGetOnlyCollection
{
get { return _isGetOnlyCollection; }
set { _isGetOnlyCollection = value; }
}
internal bool SerializeReadOnlyTypes
{
get { return this.serializeReadOnlyTypes; }
}
internal bool UnsafeTypeForwardingEnabled
{
get { return _unsafeTypeForwardingEnabled; }
}
#if USE_REFEMIT
public void StoreIsGetOnlyCollection()
#else
internal void StoreIsGetOnlyCollection()
#endif
{
_isGetOnlyCollection = true;
}
internal void ResetIsGetOnlyCollection()
{
_isGetOnlyCollection = false;
}
#if USE_REFEMIT
public void InternalSerializeReference(XmlWriterDelegator xmlWriter, object obj, bool isDeclaredType, bool writeXsiType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle)
#else
internal void InternalSerializeReference(XmlWriterDelegator xmlWriter, object obj, bool isDeclaredType, bool writeXsiType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle)
#endif
{
if (!OnHandleReference(xmlWriter, obj, true /*canContainCyclicReference*/))
InternalSerialize(xmlWriter, obj, isDeclaredType, writeXsiType, declaredTypeID, declaredTypeHandle);
OnEndHandleReference(xmlWriter, obj, true /*canContainCyclicReference*/);
}
#if USE_REFEMIT
public virtual void InternalSerialize(XmlWriterDelegator xmlWriter, object obj, bool isDeclaredType, bool writeXsiType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle)
#else
internal virtual void InternalSerialize(XmlWriterDelegator xmlWriter, object obj, bool isDeclaredType, bool writeXsiType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle)
#endif
{
if (writeXsiType)
{
Type declaredType = Globals.TypeOfObject;
SerializeWithXsiType(xmlWriter, obj, obj.GetType().TypeHandle, null/*type*/, -1, declaredType.TypeHandle, declaredType);
}
else if (isDeclaredType)
{
DataContract contract = GetDataContract(declaredTypeID, declaredTypeHandle);
SerializeWithoutXsiType(contract, xmlWriter, obj, declaredTypeHandle);
}
else
{
RuntimeTypeHandle objTypeHandle = obj.GetType().TypeHandle;
if (declaredTypeHandle.GetHashCode() == objTypeHandle.GetHashCode()) // semantically the same as Value == Value; Value is not available in SL
{
DataContract dataContract = (declaredTypeID >= 0)
? GetDataContract(declaredTypeID, declaredTypeHandle)
: GetDataContract(declaredTypeHandle, null /*type*/);
SerializeWithoutXsiType(dataContract, xmlWriter, obj, declaredTypeHandle);
}
else
{
SerializeWithXsiType(xmlWriter, obj, objTypeHandle, null /*type*/, declaredTypeID, declaredTypeHandle, Type.GetTypeFromHandle(declaredTypeHandle));
}
}
}
internal void SerializeWithoutXsiType(DataContract dataContract, XmlWriterDelegator xmlWriter, object obj, RuntimeTypeHandle declaredTypeHandle)
{
if (OnHandleIsReference(xmlWriter, dataContract, obj))
return;
if (dataContract.KnownDataContracts != null)
{
scopedKnownTypes.Push(dataContract.KnownDataContracts);
WriteDataContractValue(dataContract, xmlWriter, obj, declaredTypeHandle);
scopedKnownTypes.Pop();
}
else
{
WriteDataContractValue(dataContract, xmlWriter, obj, declaredTypeHandle);
}
}
internal virtual void SerializeWithXsiTypeAtTopLevel(DataContract dataContract, XmlWriterDelegator xmlWriter, object obj, RuntimeTypeHandle originalDeclaredTypeHandle, Type graphType)
{
bool verifyKnownType = false;
Type declaredType = rootTypeDataContract.UnderlyingType;
if (declaredType.IsInterface && CollectionDataContract.IsCollectionInterface(declaredType))
{
if (DataContractResolver != null)
{
WriteResolvedTypeInfo(xmlWriter, graphType, declaredType);
}
}
else if (!declaredType.IsArray) //Array covariance is not supported in XSD. If declared type is array do not write xsi:type. Instead write xsi:type for each item
{
verifyKnownType = WriteTypeInfo(xmlWriter, dataContract, rootTypeDataContract);
}
SerializeAndVerifyType(dataContract, xmlWriter, obj, verifyKnownType, originalDeclaredTypeHandle, declaredType);
}
protected virtual void SerializeWithXsiType(XmlWriterDelegator xmlWriter, object obj, RuntimeTypeHandle objectTypeHandle, Type objectType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle, Type declaredType)
{
bool verifyKnownType = false;
DataContract dataContract;
if (declaredType.IsInterface && CollectionDataContract.IsCollectionInterface(declaredType))
{
#if !uapaot
dataContract = GetDataContractSkipValidation(DataContract.GetId(objectTypeHandle), objectTypeHandle, objectType);
if (OnHandleIsReference(xmlWriter, dataContract, obj))
return;
dataContract = GetDataContract(declaredTypeHandle, declaredType);
#else
dataContract = DataContract.GetDataContract(declaredType);
if (OnHandleIsReference(xmlWriter, dataContract, obj))
return;
if (this.Mode == SerializationMode.SharedType && dataContract.IsValidContract(this.Mode))
dataContract = dataContract.GetValidContract(this.Mode);
else
dataContract = GetDataContract(declaredTypeHandle, declaredType);
#endif
if (!WriteClrTypeInfo(xmlWriter, dataContract) && DataContractResolver != null)
{
if (objectType == null)
{
objectType = Type.GetTypeFromHandle(objectTypeHandle);
}
WriteResolvedTypeInfo(xmlWriter, objectType, declaredType);
}
}
else if (declaredType.IsArray)//Array covariance is not supported in XSD. If declared type is array do not write xsi:type. Instead write xsi:type for each item
{
// A call to OnHandleIsReference is not necessary here -- arrays cannot be IsReference
dataContract = GetDataContract(objectTypeHandle, objectType);
WriteClrTypeInfo(xmlWriter, dataContract);
dataContract = GetDataContract(declaredTypeHandle, declaredType);
}
else
{
dataContract = GetDataContract(objectTypeHandle, objectType);
if (OnHandleIsReference(xmlWriter, dataContract, obj))
return;
if (!WriteClrTypeInfo(xmlWriter, dataContract))
{
DataContract declaredTypeContract = (declaredTypeID >= 0)
? GetDataContract(declaredTypeID, declaredTypeHandle)
: GetDataContract(declaredTypeHandle, declaredType);
verifyKnownType = WriteTypeInfo(xmlWriter, dataContract, declaredTypeContract);
}
}
SerializeAndVerifyType(dataContract, xmlWriter, obj, verifyKnownType, declaredTypeHandle, declaredType);
}
internal bool OnHandleIsReference(XmlWriterDelegator xmlWriter, DataContract contract, object obj)
{
if (preserveObjectReferences || !contract.IsReference || _isGetOnlyCollection)
{
return false;
}
bool isNew = true;
int objectId = SerializedObjects.GetId(obj, ref isNew);
_byValObjectsInScope.EnsureSetAsIsReference(obj);
if (isNew)
{
xmlWriter.WriteAttributeString(Globals.SerPrefix, DictionaryGlobals.IdLocalName,
DictionaryGlobals.SerializationNamespace, string.Format(CultureInfo.InvariantCulture, "i{0}", objectId));
return false;
}
else
{
xmlWriter.WriteAttributeString(Globals.SerPrefix, DictionaryGlobals.RefLocalName, DictionaryGlobals.SerializationNamespace, string.Format(CultureInfo.InvariantCulture, "i{0}", objectId));
return true;
}
}
protected void SerializeAndVerifyType(DataContract dataContract, XmlWriterDelegator xmlWriter, object obj, bool verifyKnownType, RuntimeTypeHandle declaredTypeHandle, Type declaredType)
{
bool knownTypesAddedInCurrentScope = false;
if (dataContract.KnownDataContracts != null)
{
scopedKnownTypes.Push(dataContract.KnownDataContracts);
knownTypesAddedInCurrentScope = true;
}
#if !uapaot
if (verifyKnownType)
{
if (!IsKnownType(dataContract, declaredType))
{
DataContract knownContract = ResolveDataContractFromKnownTypes(dataContract.StableName.Name, dataContract.StableName.Namespace, null /*memberTypeContract*/, declaredType);
if (knownContract == null || knownContract.UnderlyingType != dataContract.UnderlyingType)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.DcTypeNotFoundOnSerialize, DataContract.GetClrTypeFullName(dataContract.UnderlyingType), dataContract.StableName.Name, dataContract.StableName.Namespace)));
}
}
}
#endif
WriteDataContractValue(dataContract, xmlWriter, obj, declaredTypeHandle);
if (knownTypesAddedInCurrentScope)
{
scopedKnownTypes.Pop();
}
}
internal virtual bool WriteClrTypeInfo(XmlWriterDelegator xmlWriter, DataContract dataContract)
{
return false;
}
internal virtual bool WriteClrTypeInfo(XmlWriterDelegator xmlWriter, string clrTypeName, string clrAssemblyName)
{
return false;
}
internal virtual bool WriteClrTypeInfo(XmlWriterDelegator xmlWriter, Type dataContractType, string clrTypeName, string clrAssemblyName)
{
return false;
}
internal virtual bool WriteClrTypeInfo(XmlWriterDelegator xmlWriter, Type dataContractType, SerializationInfo serInfo)
{
return false;
}
#if USE_REFEMIT || uapaot
public virtual void WriteAnyType(XmlWriterDelegator xmlWriter, object value)
#else
internal virtual void WriteAnyType(XmlWriterDelegator xmlWriter, object value)
#endif
{
xmlWriter.WriteAnyType(value);
}
#if USE_REFEMIT || uapaot
public virtual void WriteString(XmlWriterDelegator xmlWriter, string value)
#else
internal virtual void WriteString(XmlWriterDelegator xmlWriter, string value)
#endif
{
xmlWriter.WriteString(value);
}
#if USE_REFEMIT || uapaot
public virtual void WriteString(XmlWriterDelegator xmlWriter, string value, XmlDictionaryString name, XmlDictionaryString ns)
#else
internal virtual void WriteString(XmlWriterDelegator xmlWriter, string value, XmlDictionaryString name, XmlDictionaryString ns)
#endif
{
if (value == null)
WriteNull(xmlWriter, typeof(string), true/*isMemberTypeSerializable*/, name, ns);
else
{
xmlWriter.WriteStartElementPrimitive(name, ns);
xmlWriter.WriteString(value);
xmlWriter.WriteEndElementPrimitive();
}
}
#if USE_REFEMIT || uapaot
public virtual void WriteBase64(XmlWriterDelegator xmlWriter, byte[] value)
#else
internal virtual void WriteBase64(XmlWriterDelegator xmlWriter, byte[] value)
#endif
{
xmlWriter.WriteBase64(value);
}
#if USE_REFEMIT || uapaot
public virtual void WriteBase64(XmlWriterDelegator xmlWriter, byte[] value, XmlDictionaryString name, XmlDictionaryString ns)
#else
internal virtual void WriteBase64(XmlWriterDelegator xmlWriter, byte[] value, XmlDictionaryString name, XmlDictionaryString ns)
#endif
{
if (value == null)
WriteNull(xmlWriter, typeof(byte[]), true/*isMemberTypeSerializable*/, name, ns);
else
{
xmlWriter.WriteStartElementPrimitive(name, ns);
xmlWriter.WriteBase64(value);
xmlWriter.WriteEndElementPrimitive();
}
}
#if USE_REFEMIT || uapaot
public virtual void WriteUri(XmlWriterDelegator xmlWriter, Uri value)
#else
internal virtual void WriteUri(XmlWriterDelegator xmlWriter, Uri value)
#endif
{
xmlWriter.WriteUri(value);
}
#if USE_REFEMIT || uapaot
public virtual void WriteUri(XmlWriterDelegator xmlWriter, Uri value, XmlDictionaryString name, XmlDictionaryString ns)
#else
internal virtual void WriteUri(XmlWriterDelegator xmlWriter, Uri value, XmlDictionaryString name, XmlDictionaryString ns)
#endif
{
if (value == null)
WriteNull(xmlWriter, typeof(Uri), true/*isMemberTypeSerializable*/, name, ns);
else
{
xmlWriter.WriteStartElementPrimitive(name, ns);
xmlWriter.WriteUri(value);
xmlWriter.WriteEndElementPrimitive();
}
}
#if USE_REFEMIT || uapaot
public virtual void WriteQName(XmlWriterDelegator xmlWriter, XmlQualifiedName value)
#else
internal virtual void WriteQName(XmlWriterDelegator xmlWriter, XmlQualifiedName value)
#endif
{
xmlWriter.WriteQName(value);
}
#if USE_REFEMIT || uapaot
public virtual void WriteQName(XmlWriterDelegator xmlWriter, XmlQualifiedName value, XmlDictionaryString name, XmlDictionaryString ns)
#else
internal virtual void WriteQName(XmlWriterDelegator xmlWriter, XmlQualifiedName value, XmlDictionaryString name, XmlDictionaryString ns)
#endif
{
if (value == null)
WriteNull(xmlWriter, typeof(XmlQualifiedName), true/*isMemberTypeSerializable*/, name, ns);
else
{
if (ns != null && ns.Value != null && ns.Value.Length > 0)
xmlWriter.WriteStartElement(Globals.ElementPrefix, name, ns);
else
xmlWriter.WriteStartElement(name, ns);
xmlWriter.WriteQName(value);
xmlWriter.WriteEndElement();
}
}
internal void HandleGraphAtTopLevel(XmlWriterDelegator writer, object obj, DataContract contract)
{
writer.WriteXmlnsAttribute(Globals.XsiPrefix, DictionaryGlobals.SchemaInstanceNamespace);
if (contract.IsISerializable)
{
writer.WriteXmlnsAttribute(Globals.XsdPrefix, DictionaryGlobals.SchemaNamespace);
}
OnHandleReference(writer, obj, true /*canContainReferences*/);
}
internal virtual bool OnHandleReference(XmlWriterDelegator xmlWriter, object obj, bool canContainCyclicReference)
{
if (xmlWriter.depth < depthToCheckCyclicReference)
return false;
if (canContainCyclicReference)
{
if (_byValObjectsInScope.Contains(obj))
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.CannotSerializeObjectWithCycles, DataContract.GetClrTypeFullName(obj.GetType()))));
_byValObjectsInScope.Push(obj);
}
return false;
}
internal virtual void OnEndHandleReference(XmlWriterDelegator xmlWriter, object obj, bool canContainCyclicReference)
{
if (xmlWriter.depth < depthToCheckCyclicReference)
return;
if (canContainCyclicReference)
{
_byValObjectsInScope.Pop(obj);
}
}
#if USE_REFEMIT
public void WriteNull(XmlWriterDelegator xmlWriter, Type memberType, bool isMemberTypeSerializable)
#else
internal void WriteNull(XmlWriterDelegator xmlWriter, Type memberType, bool isMemberTypeSerializable)
#endif
{
CheckIfTypeSerializable(memberType, isMemberTypeSerializable);
WriteNull(xmlWriter);
}
internal void WriteNull(XmlWriterDelegator xmlWriter, Type memberType, bool isMemberTypeSerializable, XmlDictionaryString name, XmlDictionaryString ns)
{
xmlWriter.WriteStartElement(name, ns);
WriteNull(xmlWriter, memberType, isMemberTypeSerializable);
xmlWriter.WriteEndElement();
}
#if USE_REFEMIT
public void IncrementArrayCount(XmlWriterDelegator xmlWriter, Array array)
#else
internal void IncrementArrayCount(XmlWriterDelegator xmlWriter, Array array)
#endif
{
IncrementCollectionCount(xmlWriter, array.GetLength(0));
}
#if USE_REFEMIT
public void IncrementCollectionCount(XmlWriterDelegator xmlWriter, ICollection collection)
#else
internal void IncrementCollectionCount(XmlWriterDelegator xmlWriter, ICollection collection)
#endif
{
IncrementCollectionCount(xmlWriter, collection.Count);
}
#if USE_REFEMIT
public void IncrementCollectionCountGeneric<T>(XmlWriterDelegator xmlWriter, ICollection<T> collection)
#else
internal void IncrementCollectionCountGeneric<T>(XmlWriterDelegator xmlWriter, ICollection<T> collection)
#endif
{
IncrementCollectionCount(xmlWriter, collection.Count);
}
private void IncrementCollectionCount(XmlWriterDelegator xmlWriter, int size)
{
IncrementItemCount(size);
WriteArraySize(xmlWriter, size);
}
internal virtual void WriteArraySize(XmlWriterDelegator xmlWriter, int size)
{
}
#if USE_REFEMIT
public static bool IsMemberTypeSameAsMemberValue(object obj, Type memberType)
#else
internal static bool IsMemberTypeSameAsMemberValue(object obj, Type memberType)
#endif
{
if (obj == null || memberType == null)
return false;
return obj.GetType().TypeHandle.Equals(memberType.TypeHandle);
}
#if USE_REFEMIT
public static T GetDefaultValue<T>()
#else
internal static T GetDefaultValue<T>()
#endif
{
return default(T);
}
#if USE_REFEMIT
public static T GetNullableValue<T>(Nullable<T> value) where T : struct
#else
internal static T GetNullableValue<T>(Nullable<T> value) where T : struct
#endif
{
// value.Value will throw if hasValue is false
return value.Value;
}
#if USE_REFEMIT
public static void ThrowRequiredMemberMustBeEmitted(string memberName, Type type)
#else
internal static void ThrowRequiredMemberMustBeEmitted(string memberName, Type type)
#endif
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SerializationException(SR.Format(SR.RequiredMemberMustBeEmitted, memberName, type.FullName)));
}
#if USE_REFEMIT
public static bool GetHasValue<T>(Nullable<T> value) where T : struct
#else
internal static bool GetHasValue<T>(Nullable<T> value) where T : struct
#endif
{
return value.HasValue;
}
internal void WriteIXmlSerializable(XmlWriterDelegator xmlWriter, object obj)
{
if (_xmlSerializableWriter == null)
_xmlSerializableWriter = new XmlSerializableWriter();
WriteIXmlSerializable(xmlWriter, obj, _xmlSerializableWriter);
}
internal static void WriteRootIXmlSerializable(XmlWriterDelegator xmlWriter, object obj)
{
WriteIXmlSerializable(xmlWriter, obj, new XmlSerializableWriter());
}
private static void WriteIXmlSerializable(XmlWriterDelegator xmlWriter, object obj, XmlSerializableWriter xmlSerializableWriter)
{
xmlSerializableWriter.BeginWrite(xmlWriter.Writer, obj);
IXmlSerializable xmlSerializable = obj as IXmlSerializable;
if (xmlSerializable != null)
xmlSerializable.WriteXml(xmlSerializableWriter);
else
{
XmlElement xmlElement = obj as XmlElement;
if (xmlElement != null)
xmlElement.WriteTo(xmlSerializableWriter);
else
{
XmlNode[] xmlNodes = obj as XmlNode[];
if (xmlNodes != null)
foreach (XmlNode xmlNode in xmlNodes)
xmlNode.WriteTo(xmlSerializableWriter);
else
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.UnknownXmlType, DataContract.GetClrTypeFullName(obj.GetType()))));
}
}
xmlSerializableWriter.EndWrite();
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal void GetObjectData(ISerializable obj, SerializationInfo serInfo, StreamingContext context)
{
obj.GetObjectData(serInfo, context);
}
public void WriteISerializable(XmlWriterDelegator xmlWriter, ISerializable obj)
{
Type objType = obj.GetType();
var serInfo = new SerializationInfo(objType, XmlObjectSerializer.FormatterConverter /*!UnsafeTypeForwardingEnabled is always false*/);
GetObjectData(obj, serInfo, GetStreamingContext());
if (!UnsafeTypeForwardingEnabled && serInfo.AssemblyName == Globals.MscorlibAssemblyName)
{
// Throw if a malicious type tries to set its assembly name to "0" to get deserialized in mscorlib
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ISerializableAssemblyNameSetToZero, DataContract.GetClrTypeFullName(obj.GetType()))));
}
WriteSerializationInfo(xmlWriter, objType, serInfo);
}
internal void WriteSerializationInfo(XmlWriterDelegator xmlWriter, Type objType, SerializationInfo serInfo)
{
if (DataContract.GetClrTypeFullName(objType) != serInfo.FullTypeName)
{
if (DataContractResolver != null)
{
XmlDictionaryString typeName, typeNs;
if (ResolveType(serInfo.ObjectType, objType, out typeName, out typeNs))
{
xmlWriter.WriteAttributeQualifiedName(Globals.SerPrefix, DictionaryGlobals.ISerializableFactoryTypeLocalName, DictionaryGlobals.SerializationNamespace, typeName, typeNs);
}
}
else
{
string typeName, typeNs;
DataContract.GetDefaultStableName(serInfo.FullTypeName, out typeName, out typeNs);
xmlWriter.WriteAttributeQualifiedName(Globals.SerPrefix, DictionaryGlobals.ISerializableFactoryTypeLocalName, DictionaryGlobals.SerializationNamespace, DataContract.GetClrTypeString(typeName), DataContract.GetClrTypeString(typeNs));
}
}
WriteClrTypeInfo(xmlWriter, objType, serInfo);
IncrementItemCount(serInfo.MemberCount);
foreach (SerializationEntry serEntry in serInfo)
{
XmlDictionaryString name = DataContract.GetClrTypeString(DataContract.EncodeLocalName(serEntry.Name));
xmlWriter.WriteStartElement(name, DictionaryGlobals.EmptyString);
object obj = serEntry.Value;
if (obj == null)
{
WriteNull(xmlWriter);
}
else
{
InternalSerializeReference(xmlWriter, obj, false /*isDeclaredType*/, false /*writeXsiType*/, -1, Globals.TypeOfObject.TypeHandle);
}
xmlWriter.WriteEndElement();
}
}
protected virtual void WriteDataContractValue(DataContract dataContract, XmlWriterDelegator xmlWriter, object obj, RuntimeTypeHandle declaredTypeHandle)
{
dataContract.WriteXmlValue(xmlWriter, obj, this);
}
protected virtual void WriteNull(XmlWriterDelegator xmlWriter)
{
XmlObjectSerializer.WriteNull(xmlWriter);
}
private void WriteResolvedTypeInfo(XmlWriterDelegator writer, Type objectType, Type declaredType)
{
XmlDictionaryString typeName, typeNamespace;
if (ResolveType(objectType, declaredType, out typeName, out typeNamespace))
{
WriteTypeInfo(writer, typeName, typeNamespace);
}
}
private bool ResolveType(Type objectType, Type declaredType, out XmlDictionaryString typeName, out XmlDictionaryString typeNamespace)
{
if (!DataContractResolver.TryResolveType(objectType, declaredType, KnownTypeResolver, out typeName, out typeNamespace))
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ResolveTypeReturnedFalse, DataContract.GetClrTypeFullName(DataContractResolver.GetType()), DataContract.GetClrTypeFullName(objectType))));
}
if (typeName == null)
{
if (typeNamespace == null)
{
return false;
}
else
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ResolveTypeReturnedNull, DataContract.GetClrTypeFullName(DataContractResolver.GetType()), DataContract.GetClrTypeFullName(objectType))));
}
}
if (typeNamespace == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ResolveTypeReturnedNull, DataContract.GetClrTypeFullName(DataContractResolver.GetType()), DataContract.GetClrTypeFullName(objectType))));
}
return true;
}
protected virtual bool WriteTypeInfo(XmlWriterDelegator writer, DataContract contract, DataContract declaredContract)
{
if (!XmlObjectSerializer.IsContractDeclared(contract, declaredContract))
{
if (DataContractResolver == null)
{
WriteTypeInfo(writer, contract.Name, contract.Namespace);
return true;
}
else
{
WriteResolvedTypeInfo(writer, contract.OriginalUnderlyingType, declaredContract.OriginalUnderlyingType);
return false;
}
}
return false;
}
protected virtual void WriteTypeInfo(XmlWriterDelegator writer, string dataContractName, string dataContractNamespace)
{
writer.WriteAttributeQualifiedName(Globals.XsiPrefix, DictionaryGlobals.XsiTypeLocalName, DictionaryGlobals.SchemaInstanceNamespace, dataContractName, dataContractNamespace);
}
protected virtual void WriteTypeInfo(XmlWriterDelegator writer, XmlDictionaryString dataContractName, XmlDictionaryString dataContractNamespace)
{
writer.WriteAttributeQualifiedName(Globals.XsiPrefix, DictionaryGlobals.XsiTypeLocalName, DictionaryGlobals.SchemaInstanceNamespace, dataContractName, dataContractNamespace);
}
public void WriteExtensionData(XmlWriterDelegator xmlWriter, ExtensionDataObject extensionData, int memberIndex)
{
if (IgnoreExtensionDataObject || extensionData == null)
return;
IList<ExtensionDataMember> members = extensionData.Members;
if (members != null)
{
for (int i = 0; i < extensionData.Members.Count; i++)
{
ExtensionDataMember member = extensionData.Members[i];
if (member.MemberIndex == memberIndex)
{
WriteExtensionDataMember(xmlWriter, member);
}
}
}
}
private void WriteExtensionDataMember(XmlWriterDelegator xmlWriter, ExtensionDataMember member)
{
xmlWriter.WriteStartElement(member.Name, member.Namespace);
IDataNode dataNode = member.Value;
WriteExtensionDataValue(xmlWriter, dataNode);
xmlWriter.WriteEndElement();
}
internal virtual void WriteExtensionDataTypeInfo(XmlWriterDelegator xmlWriter, IDataNode dataNode)
{
if (dataNode.DataContractName != null)
WriteTypeInfo(xmlWriter, dataNode.DataContractName, dataNode.DataContractNamespace);
WriteClrTypeInfo(xmlWriter, dataNode.DataType, dataNode.ClrTypeName, dataNode.ClrAssemblyName);
}
internal void WriteExtensionDataValue(XmlWriterDelegator xmlWriter, IDataNode dataNode)
{
IncrementItemCount(1);
if (dataNode == null)
{
WriteNull(xmlWriter);
return;
}
if (dataNode.PreservesReferences
&& OnHandleReference(xmlWriter, (dataNode.Value == null ? dataNode : dataNode.Value), true /*canContainCyclicReference*/))
return;
Type dataType = dataNode.DataType;
if (dataType == Globals.TypeOfClassDataNode)
WriteExtensionClassData(xmlWriter, (ClassDataNode)dataNode);
else if (dataType == Globals.TypeOfCollectionDataNode)
WriteExtensionCollectionData(xmlWriter, (CollectionDataNode)dataNode);
else if (dataType == Globals.TypeOfXmlDataNode)
WriteExtensionXmlData(xmlWriter, (XmlDataNode)dataNode);
else if (dataType == Globals.TypeOfISerializableDataNode)
WriteExtensionISerializableData(xmlWriter, (ISerializableDataNode)dataNode);
else
{
WriteExtensionDataTypeInfo(xmlWriter, dataNode);
if (dataType == Globals.TypeOfObject)
{
// NOTE: serialize value in DataNode<object> since it may contain non-primitive
// deserialized object (ex. empty class)
object o = dataNode.Value;
if (o != null)
InternalSerialize(xmlWriter, o, false /*isDeclaredType*/, false /*writeXsiType*/, -1, o.GetType().TypeHandle);
}
else
xmlWriter.WriteExtensionData(dataNode);
}
if (dataNode.PreservesReferences)
OnEndHandleReference(xmlWriter, (dataNode.Value == null ? dataNode : dataNode.Value), true /*canContainCyclicReference*/);
}
internal bool TryWriteDeserializedExtensionData(XmlWriterDelegator xmlWriter, IDataNode dataNode)
{
object o = dataNode.Value;
if (o == null)
return false;
Type declaredType = (dataNode.DataContractName == null) ? o.GetType() : Globals.TypeOfObject;
InternalSerialize(xmlWriter, o, false /*isDeclaredType*/, false /*writeXsiType*/, -1, declaredType.TypeHandle);
return true;
}
private void WriteExtensionClassData(XmlWriterDelegator xmlWriter, ClassDataNode dataNode)
{
if (!TryWriteDeserializedExtensionData(xmlWriter, dataNode))
{
WriteExtensionDataTypeInfo(xmlWriter, dataNode);
IList<ExtensionDataMember> members = dataNode.Members;
if (members != null)
{
for (int i = 0; i < members.Count; i++)
{
WriteExtensionDataMember(xmlWriter, members[i]);
}
}
}
}
private void WriteExtensionCollectionData(XmlWriterDelegator xmlWriter, CollectionDataNode dataNode)
{
if (!TryWriteDeserializedExtensionData(xmlWriter, dataNode))
{
WriteExtensionDataTypeInfo(xmlWriter, dataNode);
WriteArraySize(xmlWriter, dataNode.Size);
IList<IDataNode> items = dataNode.Items;
if (items != null)
{
for (int i = 0; i < items.Count; i++)
{
xmlWriter.WriteStartElement(dataNode.ItemName, dataNode.ItemNamespace);
WriteExtensionDataValue(xmlWriter, items[i]);
xmlWriter.WriteEndElement();
}
}
}
}
private void WriteExtensionISerializableData(XmlWriterDelegator xmlWriter, ISerializableDataNode dataNode)
{
if (!TryWriteDeserializedExtensionData(xmlWriter, dataNode))
{
WriteExtensionDataTypeInfo(xmlWriter, dataNode);
if (dataNode.FactoryTypeName != null)
xmlWriter.WriteAttributeQualifiedName(Globals.SerPrefix, DictionaryGlobals.ISerializableFactoryTypeLocalName, DictionaryGlobals.SerializationNamespace, dataNode.FactoryTypeName, dataNode.FactoryTypeNamespace);
IList<ISerializableDataMember> members = dataNode.Members;
if (members != null)
{
for (int i = 0; i < members.Count; i++)
{
ISerializableDataMember member = members[i];
xmlWriter.WriteStartElement(member.Name, string.Empty);
WriteExtensionDataValue(xmlWriter, member.Value);
xmlWriter.WriteEndElement();
}
}
}
}
private void WriteExtensionXmlData(XmlWriterDelegator xmlWriter, XmlDataNode dataNode)
{
if (!TryWriteDeserializedExtensionData(xmlWriter, dataNode))
{
IList<XmlAttribute> xmlAttributes = dataNode.XmlAttributes;
if (xmlAttributes != null)
{
foreach (XmlAttribute attribute in xmlAttributes)
attribute.WriteTo(xmlWriter.Writer);
}
WriteExtensionDataTypeInfo(xmlWriter, dataNode);
IList<XmlNode> xmlChildNodes = dataNode.XmlChildNodes;
if (xmlChildNodes != null)
{
foreach (XmlNode node in xmlChildNodes)
node.WriteTo(xmlWriter.Writer);
}
}
}
}
}
| |
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// ==--==
//---------------------------------------------------------------------------
//
// CLASS: SecurityDocument.cs
//
// <OWNER>[....]</OWNER>
//
// PURPOSE: Represent an XML document
//
//
//---------------------------------------------------------------------------
namespace System.Security
{
using System;
using System.Collections;
using System.Security.Util;
using System.Text;
using System.Globalization;
using System.IO;
using System.Diagnostics.Contracts;
using StringMaker = System.Security.Util.Tokenizer.StringMaker;
#if !MONO
[Serializable]
sealed internal class SecurityDocumentElement : ISecurityElementFactory
{
private int m_position;
private SecurityDocument m_document;
internal SecurityDocumentElement( SecurityDocument document, int position )
{
m_document = document;
m_position = position;
}
SecurityElement ISecurityElementFactory.CreateSecurityElement()
{
return m_document.GetElement( m_position, true );
}
Object ISecurityElementFactory.Copy()
{
return new SecurityDocumentElement( m_document, m_position );
}
String ISecurityElementFactory.GetTag()
{
return m_document.GetTagForElement( m_position );
}
String ISecurityElementFactory.Attribute( String attributeName )
{
return m_document.GetAttributeForElement( m_position, attributeName );
}
}
#endif
[Serializable]
sealed internal class SecurityDocument
{
internal byte[] m_data;
internal const byte c_element = 1;
internal const byte c_attribute = 2;
internal const byte c_text = 3;
internal const byte c_children = 4;
internal const int c_growthSize = 32;
public SecurityDocument( int numData )
{
m_data = new byte[numData];
}
public SecurityDocument( byte[] data )
{
this.m_data = data;
}
public SecurityDocument( SecurityElement elRoot )
{
m_data = new byte[c_growthSize];
int position = 0;
ConvertElement( elRoot, ref position );
}
public void GuaranteeSize( int size )
{
if (m_data.Length < size)
{
byte[] m_newData = new byte[((size / c_growthSize) + 1) * c_growthSize];
Array.Copy( m_data, 0, m_newData, 0, m_data.Length );
m_data = m_newData;
}
}
public void AddString( String str, ref int position )
{
// @
GuaranteeSize( position + str.Length * 2 + 2 );
for (int i = 0; i < str.Length; ++i)
{
m_data[position+(2*i)] = (byte)(str[i] >> 8);
m_data[position+(2*i)+1] = (byte)(str[i] & 0x00FF);
}
m_data[position + str.Length * 2] = 0;
m_data[position + str.Length * 2 + 1] = 0;
position += str.Length * 2 + 2;
}
public void AppendString( String str, ref int position )
{
if (position <= 1 ||
m_data[position - 1] != 0 ||
m_data[position - 2] != 0 )
throw new XmlSyntaxException();
position -= 2;
AddString( str, ref position );
}
public static int EncodedStringSize( String str )
{
return str.Length * 2 + 2;
}
public String GetString( ref int position )
{
return GetString( ref position, true );
}
public String GetString( ref int position, bool bCreate )
{
int stringEnd;
bool bFoundEnd = false;
for (stringEnd = position; stringEnd < m_data.Length-1; stringEnd += 2)
{
if (m_data[stringEnd] == 0 && m_data[stringEnd + 1] == 0)
{
bFoundEnd = true;
break;
}
}
Contract.Assert(bFoundEnd, "Malformed string in parse data");
StringMaker m = System.SharedStatics.GetSharedStringMaker();
try
{
if (bCreate)
{
m._outStringBuilder = null;
m._outIndex = 0;
for (int i = position; i < stringEnd; i += 2)
{
char c = (char)(m_data[i] << 8 | m_data[i+1]);
// add character to the string
if (m._outIndex < StringMaker.outMaxSize)
{
// easy case
m._outChars[m._outIndex++] = c;
}
else
{
if (m._outStringBuilder == null)
{
// OK, first check if we have to init the StringBuilder
m._outStringBuilder = new StringBuilder();
}
// OK, copy from _outChars to _outStringBuilder
m._outStringBuilder.Append(m._outChars, 0, StringMaker.outMaxSize);
// reset _outChars pointer
m._outChars[0] = c;
m._outIndex = 1;
}
}
}
position = stringEnd + 2;
if (bCreate)
return m.MakeString();
else
return null;
}
finally
{
System.SharedStatics.ReleaseSharedStringMaker(ref m);
}
}
public void AddToken( byte b, ref int position )
{
GuaranteeSize( position + 1 );
m_data[position++] = b;
}
public void ConvertElement( SecurityElement elCurrent, ref int position )
{
AddToken( c_element, ref position );
AddString( elCurrent.m_strTag, ref position );
if (elCurrent.m_lAttributes != null)
{
for (int i = 0; i < elCurrent.m_lAttributes.Count; i+=2)
{
AddToken( c_attribute, ref position );
AddString( (String)elCurrent.m_lAttributes[i], ref position );
AddString( (String)elCurrent.m_lAttributes[i+1], ref position );
}
}
if (elCurrent.m_strText != null)
{
AddToken( c_text, ref position );
AddString( elCurrent.m_strText, ref position );
}
if (elCurrent.InternalChildren != null)
{
for (int i = 0; i < elCurrent.InternalChildren.Count; ++i)
{
ConvertElement( (SecurityElement)elCurrent.Children[i], ref position );
}
}
AddToken( c_children, ref position );
}
public SecurityElement GetRootElement()
{
return GetElement( 0, true );
}
public SecurityElement GetElement( int position, bool bCreate )
{
SecurityElement elRoot = InternalGetElement( ref position, bCreate );
return elRoot;
}
internal SecurityElement InternalGetElement( ref int position, bool bCreate )
{
if (m_data.Length <= position)
throw new XmlSyntaxException();
if (m_data[position++] != c_element)
throw new XmlSyntaxException();
SecurityElement elCurrent = null;
String strTag = GetString( ref position, bCreate );
if (bCreate)
elCurrent = new SecurityElement( strTag );
while (m_data[position] == c_attribute)
{
position++;
String strName = GetString( ref position, bCreate );
String strValue = GetString( ref position, bCreate );
if (bCreate)
elCurrent.AddAttribute( strName, strValue );
}
if (m_data[position] == c_text)
{
position++;
String strText = GetString( ref position, bCreate );
if (bCreate)
elCurrent.m_strText = strText;
}
while (m_data[position] != c_children)
{
SecurityElement elChild = InternalGetElement( ref position, bCreate );
if (bCreate)
elCurrent.AddChild( elChild );
}
position++;
return elCurrent;
}
public String GetTagForElement( int position )
{
if (m_data.Length <= position)
throw new XmlSyntaxException();
if (m_data[position++] != c_element)
throw new XmlSyntaxException();
String strTag = GetString( ref position );
return strTag;
}
public ArrayList GetChildrenPositionForElement( int position )
{
if (m_data.Length <= position)
throw new XmlSyntaxException();
if (m_data[position++] != c_element)
throw new XmlSyntaxException();
ArrayList children = new ArrayList();
// This is to move past the tag string
GetString( ref position );
while (m_data[position] == c_attribute)
{
position++;
// Read name and value, then throw them away
GetString( ref position, false );
GetString( ref position, false );
}
if (m_data[position] == c_text)
{
position++;
// Read text, then throw it away.
GetString( ref position, false );
}
while (m_data[position] != c_children)
{
children.Add( position );
InternalGetElement( ref position, false );
}
position++;
return children;
}
public String GetAttributeForElement( int position, String attributeName )
{
if (m_data.Length <= position)
throw new XmlSyntaxException();
if (m_data[position++] != c_element)
throw new XmlSyntaxException();
String strRetValue = null;
// This is to move past the tag string.
GetString( ref position, false );
while (m_data[position] == c_attribute)
{
position++;
String strName = GetString( ref position );
String strValue = GetString( ref position );
if (String.Equals( strName, attributeName ))
{
strRetValue = strValue;
break;
}
}
return strRetValue;
}
}
}
| |
//---------------------------------------------------------------------------
//
// <copyright file="PropertyCondition.cs" company="Microsoft">
// Copyright (C) Microsoft Corporation. All rights reserved.
// </copyright>
//
//
// Description:
//
// History:
// 10/14/2003 : BrendanM - Created
//
//---------------------------------------------------------------------------
using System;
using System.Globalization;
using System.Windows.Automation;
using MS.Internal.Automation;
namespace System.Windows.Automation
{
/// <summary>
/// Flags that affect how a property value is compared in a PropertyCondition
/// </summary>
[Flags]
#if (INTERNAL_COMPILE)
internal enum PropertyConditionFlags
#else
public enum PropertyConditionFlags
#endif
{
///<summary>Properties are to be compared using default options (eg. case-sensitive comparison for strings)</summary>
None = 0x00,
///<summary>For string comparisons, specifies that a case-insensitive comparison should be used</summary>
IgnoreCase = 0x01,
}
/// <summary>
/// Condition that checks whether a property has the specified value
/// </summary>
#if (INTERNAL_COMPILE)
internal class PropertyCondition : Condition
#else
public class PropertyCondition : Condition
#endif
{
//------------------------------------------------------
//
// Constructors
//
//------------------------------------------------------
#region Constructors
/// <summary>
/// Constructor to create a condition that checks whether a property has the specified value
/// </summary>
/// <param name="property">The property to check</param>
/// <param name="value">The value to check the property for</param>
public PropertyCondition( AutomationProperty property, object value )
{
Init(property, value, PropertyConditionFlags.None);
}
/// <summary>
/// Constructor to create a condition that checks whether a property has the specified value
/// </summary>
/// <param name="property">The property to check</param>
/// <param name="value">The value to check the property for</param>
/// <param name="flags">Flags that affect the comparison</param>
public PropertyCondition( AutomationProperty property, object value, PropertyConditionFlags flags )
{
Init(property, value, flags);
}
#endregion Constructors
//------------------------------------------------------
//
// Public Properties
//
//------------------------------------------------------
#region Public Properties
/// <summary>
/// Returns the property that this condition is checking for
/// </summary>
public AutomationProperty Property
{
get
{
return _property;
}
}
/// <summary>
/// Returns the value of the property that this condition is checking for
/// </summary>
public object Value
{
get
{
return _val;
}
}
/// <summary>
/// Returns the flags used in this property comparison
/// </summary>
public PropertyConditionFlags Flags
{
get
{
return _flags;
}
}
#endregion Public Properties
//------------------------------------------------------
//
// Private Methods
//
//------------------------------------------------------
#region Private Methods
void Init(AutomationProperty property, object val, PropertyConditionFlags flags )
{
Misc.ValidateArgumentNonNull(property, "property");
AutomationPropertyInfo info;
if (!Schema.GetPropertyInfo(property, out info))
{
throw new ArgumentException(SR.Get(SRID.UnsupportedProperty));
}
// Check type is appropriate: NotSupported is allowed against any property,
// null is allowed for any reference type (ie not for value types), otherwise
// type must be assignable from expected type.
Type expectedType = info.Type;
if (val != AutomationElement.NotSupported &&
((val == null && expectedType.IsValueType)
|| (val != null && !expectedType.IsAssignableFrom(val.GetType()))))
{
throw new ArgumentException(SR.Get(SRID.PropertyConditionIncorrectType, property.ProgrammaticName, expectedType.Name));
}
if ((flags & PropertyConditionFlags.IgnoreCase) != 0)
{
Misc.ValidateArgument(val is string, SRID.IgnoreCaseRequiresString);
}
// Some types are handled differently in managed vs unmanaged - handle those here...
if (val is AutomationElement)
{
// If this is a comparison against a Raw/LogicalElement,
// save the runtime ID instead of the element so that we
// can take it cross-proc if needed.
val = ((AutomationElement)val).GetRuntimeId();
}
else if (val is ControlType)
{
// If this is a control type, use the ID, not the CLR object
val = ((ControlType)val).Id;
}
else if (val is Rect)
{
Rect rc = (Rect)val;
val = new double[] { rc.Left, rc.Top, rc.Width, rc.Height };
}
else if (val is Point)
{
Point pt = (Point)val;
val = new double[] { pt.X, pt.Y };
}
else if (val is CultureInfo)
{
val = ((CultureInfo)val).LCID;
}
_property = property;
_val = val;
_flags = flags;
SetMarshalData(new UiaCoreApi.UiaPropertyCondition(_property.Id, _val, _flags));
}
#endregion Private Methods
//------------------------------------------------------
//
// Private Fields
//
//------------------------------------------------------
#region Private Fields
private AutomationProperty _property;
private object _val;
private PropertyConditionFlags _flags;
#endregion Private Fields
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gaxgrpc = Google.Api.Gax.Grpc;
using lro = Google.LongRunning;
using wkt = Google.Protobuf.WellKnownTypes;
using grpccore = Grpc.Core;
using moq = Moq;
using st = System.Threading;
using stt = System.Threading.Tasks;
using xunit = Xunit;
namespace Google.Cloud.Notebooks.V1Beta1.Tests
{
/// <summary>Generated unit tests.</summary>
public sealed class GeneratedNotebookServiceClientTest
{
[xunit::FactAttribute]
public void GetInstanceRequestObject()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceRequest request = new GetInstanceRequest
{
Name = "name1c9368b0",
};
Instance expectedResponse = new Instance
{
InstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
VmImage = new VmImage(),
ContainerImage = new ContainerImage(),
PostStartupScript = "post_startup_script0d185c71",
ProxyUri = "proxy_uri73c5439d",
InstanceOwners =
{
"instance_owners34378c03",
},
ServiceAccount = "service_accounta3c1b923",
MachineType = "machine_type68ce40fa",
AcceleratorConfig = new Instance.Types.AcceleratorConfig(),
State = Instance.Types.State.Initializing,
InstallGpuDriver = true,
CustomGpuDriverPath = "custom_gpu_driver_path24577c2a",
BootDiskType = Instance.Types.DiskType.PdSsd,
BootDiskSizeGb = -3633932070397520346L,
DiskEncryption = Instance.Types.DiskEncryption.Cmek,
KmsKey = "kms_key0b21601b",
NoPublicIp = true,
NoProxyAccess = false,
Network = "networkd22ce091",
Subnet = "subnet0666554f",
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
Metadata =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
DataDiskType = Instance.Types.DiskType.Unspecified,
DataDiskSizeGb = 8597103336273737467L,
NoRemoveDataDisk = true,
};
mockGrpcClient.Setup(x => x.GetInstance(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Instance response = client.GetInstance(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetInstanceRequestObjectAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetInstanceRequest request = new GetInstanceRequest
{
Name = "name1c9368b0",
};
Instance expectedResponse = new Instance
{
InstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
VmImage = new VmImage(),
ContainerImage = new ContainerImage(),
PostStartupScript = "post_startup_script0d185c71",
ProxyUri = "proxy_uri73c5439d",
InstanceOwners =
{
"instance_owners34378c03",
},
ServiceAccount = "service_accounta3c1b923",
MachineType = "machine_type68ce40fa",
AcceleratorConfig = new Instance.Types.AcceleratorConfig(),
State = Instance.Types.State.Initializing,
InstallGpuDriver = true,
CustomGpuDriverPath = "custom_gpu_driver_path24577c2a",
BootDiskType = Instance.Types.DiskType.PdSsd,
BootDiskSizeGb = -3633932070397520346L,
DiskEncryption = Instance.Types.DiskEncryption.Cmek,
KmsKey = "kms_key0b21601b",
NoPublicIp = true,
NoProxyAccess = false,
Network = "networkd22ce091",
Subnet = "subnet0666554f",
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
Metadata =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
CreateTime = new wkt::Timestamp(),
UpdateTime = new wkt::Timestamp(),
DataDiskType = Instance.Types.DiskType.Unspecified,
DataDiskSizeGb = 8597103336273737467L,
NoRemoveDataDisk = true,
};
mockGrpcClient.Setup(x => x.GetInstanceAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Instance>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Instance responseCallSettings = await client.GetInstanceAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Instance responseCancellationToken = await client.GetInstanceAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void IsInstanceUpgradeableRequestObject()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
IsInstanceUpgradeableRequest request = new IsInstanceUpgradeableRequest
{
NotebookInstance = "notebook_instance62ef1454",
};
IsInstanceUpgradeableResponse expectedResponse = new IsInstanceUpgradeableResponse
{
Upgradeable = false,
UpgradeVersion = "upgrade_version716e0863",
UpgradeInfo = "upgrade_info29eca0a2",
};
mockGrpcClient.Setup(x => x.IsInstanceUpgradeable(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
IsInstanceUpgradeableResponse response = client.IsInstanceUpgradeable(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task IsInstanceUpgradeableRequestObjectAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
IsInstanceUpgradeableRequest request = new IsInstanceUpgradeableRequest
{
NotebookInstance = "notebook_instance62ef1454",
};
IsInstanceUpgradeableResponse expectedResponse = new IsInstanceUpgradeableResponse
{
Upgradeable = false,
UpgradeVersion = "upgrade_version716e0863",
UpgradeInfo = "upgrade_info29eca0a2",
};
mockGrpcClient.Setup(x => x.IsInstanceUpgradeableAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<IsInstanceUpgradeableResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
IsInstanceUpgradeableResponse responseCallSettings = await client.IsInstanceUpgradeableAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
IsInstanceUpgradeableResponse responseCancellationToken = await client.IsInstanceUpgradeableAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetEnvironmentRequestObject()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetEnvironmentRequest request = new GetEnvironmentRequest
{
Name = "name1c9368b0",
};
Environment expectedResponse = new Environment
{
EnvironmentName = EnvironmentName.FromProjectEnvironment("[PROJECT]", "[ENVIRONMENT]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
VmImage = new VmImage(),
ContainerImage = new ContainerImage(),
PostStartupScript = "post_startup_script0d185c71",
CreateTime = new wkt::Timestamp(),
};
mockGrpcClient.Setup(x => x.GetEnvironment(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Environment response = client.GetEnvironment(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetEnvironmentRequestObjectAsync()
{
moq::Mock<NotebookService.NotebookServiceClient> mockGrpcClient = new moq::Mock<NotebookService.NotebookServiceClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetEnvironmentRequest request = new GetEnvironmentRequest
{
Name = "name1c9368b0",
};
Environment expectedResponse = new Environment
{
EnvironmentName = EnvironmentName.FromProjectEnvironment("[PROJECT]", "[ENVIRONMENT]"),
DisplayName = "display_name137f65c2",
Description = "description2cf9da67",
VmImage = new VmImage(),
ContainerImage = new ContainerImage(),
PostStartupScript = "post_startup_script0d185c71",
CreateTime = new wkt::Timestamp(),
};
mockGrpcClient.Setup(x => x.GetEnvironmentAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Environment>(stt::Task.FromResult(expectedResponse), null, null, null, null));
NotebookServiceClient client = new NotebookServiceClientImpl(mockGrpcClient.Object, null);
Environment responseCallSettings = await client.GetEnvironmentAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Environment responseCancellationToken = await client.GetEnvironmentAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Immutable;
using System.ComponentModel;
using System.ComponentModel.Composition;
using System.Linq;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.VisualStudio.LanguageServices.Implementation.TaskList;
using Microsoft.VisualStudio.Shell;
using Microsoft.VisualStudio.Shell.Interop;
using Microsoft.VisualStudio.Shell.TableManager;
using Microsoft.VisualStudio.Text;
namespace Microsoft.VisualStudio.LanguageServices.Implementation.TableDataSource
{
[Export(typeof(VisualStudioDiagnosticListTable))]
internal class VisualStudioDiagnosticListTable : VisualStudioBaseDiagnosticListTable
{
internal const string IdentifierString = nameof(VisualStudioDiagnosticListTable);
private readonly IErrorList _errorList;
private readonly LiveTableDataSource _liveTableSource;
private readonly BuildTableDataSource _buildTableSource;
[ImportingConstructor]
public VisualStudioDiagnosticListTable(
SVsServiceProvider serviceProvider,
VisualStudioWorkspace workspace,
IDiagnosticService diagnosticService,
ExternalErrorDiagnosticUpdateSource errorSource,
ITableManagerProvider provider) :
this(serviceProvider, (Workspace)workspace, diagnosticService, errorSource, provider)
{
ConnectWorkspaceEvents();
_errorList = serviceProvider.GetService(typeof(SVsErrorList)) as IErrorList;
if (_errorList == null)
{
AddInitialTableSource(workspace.CurrentSolution, _liveTableSource);
return;
}
_errorList.PropertyChanged += OnErrorListPropertyChanged;
AddInitialTableSource(workspace.CurrentSolution, GetCurrentDataSource());
}
private ITableDataSource GetCurrentDataSource()
{
return _errorList.AreOtherErrorSourceEntriesShown ? (ITableDataSource)_liveTableSource : _buildTableSource;
}
/// this is for test only
internal VisualStudioDiagnosticListTable(Workspace workspace, IDiagnosticService diagnosticService, ITableManagerProvider provider) :
this(null, workspace, diagnosticService, null, provider)
{
AddInitialTableSource(workspace.CurrentSolution, _liveTableSource);
}
private VisualStudioDiagnosticListTable(
SVsServiceProvider serviceProvider,
Workspace workspace,
IDiagnosticService diagnosticService,
ExternalErrorDiagnosticUpdateSource errorSource,
ITableManagerProvider provider) :
base(serviceProvider, workspace, diagnosticService, provider)
{
_liveTableSource = new LiveTableDataSource(serviceProvider, workspace, diagnosticService, IdentifierString);
_buildTableSource = new BuildTableDataSource(workspace, errorSource);
}
protected override void AddTableSourceIfNecessary(Solution solution)
{
if (solution.ProjectIds.Count == 0)
{
return;
}
RemoveTableSourcesIfNecessary();
AddTableSource(GetCurrentDataSource());
}
protected override void RemoveTableSourceIfNecessary(Solution solution)
{
if (solution.ProjectIds.Count > 0)
{
return;
}
RemoveTableSourcesIfNecessary();
}
private void RemoveTableSourcesIfNecessary()
{
RemoveTableSourceIfNecessary(_buildTableSource);
RemoveTableSourceIfNecessary(_liveTableSource);
}
private void RemoveTableSourceIfNecessary(ITableDataSource source)
{
if (!this.TableManager.Sources.Any(s => s == source))
{
return;
}
this.TableManager.RemoveSource(source);
}
protected override void ShutdownSource()
{
_liveTableSource.Shutdown();
_buildTableSource.Shutdown();
}
private void OnErrorListPropertyChanged(object sender, PropertyChangedEventArgs e)
{
if (e.PropertyName == nameof(IErrorList.AreOtherErrorSourceEntriesShown))
{
AddTableSourceIfNecessary(this.Workspace.CurrentSolution);
}
}
private class BuildTableDataSource : AbstractTableDataSource<DiagnosticData>
{
private readonly Workspace _workspace;
private readonly ExternalErrorDiagnosticUpdateSource _buildErrorSource;
public BuildTableDataSource(Workspace workspce, ExternalErrorDiagnosticUpdateSource errorSource)
{
_workspace = workspce;
_buildErrorSource = errorSource;
ConnectToBuildUpdateSource(errorSource);
}
private void ConnectToBuildUpdateSource(ExternalErrorDiagnosticUpdateSource errorSource)
{
if (errorSource == null)
{
return;
}
SetStableState(errorSource.IsInProgress);
errorSource.BuildStarted += OnBuildStarted;
}
private void OnBuildStarted(object sender, bool started)
{
SetStableState(started);
if (!started)
{
OnDataAddedOrChanged(this, _buildErrorSource.GetBuildErrors().Length);
}
}
private void SetStableState(bool started)
{
IsStable = !started;
ChangeStableState(IsStable);
}
public override string DisplayName => ServicesVSResources.BuildTableSourceName;
public override string SourceTypeIdentifier => StandardTableDataSources.ErrorTableDataSource;
public override string Identifier => IdentifierString;
protected void OnDataAddedOrChanged(object key, int itemCount)
{
// reuse factory. it is okay to re-use factory since we make sure we remove the factory before
// adding it back
bool newFactory = false;
ImmutableArray<SubscriptionWithoutLock> snapshot;
AbstractTableEntriesFactory<DiagnosticData> factory;
lock (Gate)
{
snapshot = Subscriptions;
if (!Map.TryGetValue(key, out factory))
{
factory = new TableEntriesFactory(this, _workspace);
Map.Add(key, factory);
newFactory = true;
}
}
factory.OnUpdated(itemCount);
for (var i = 0; i < snapshot.Length; i++)
{
snapshot[i].AddOrUpdate(factory, newFactory);
}
}
private class TableEntriesFactory : AbstractTableEntriesFactory<DiagnosticData>
{
private readonly BuildTableDataSource _source;
private readonly Workspace _workspace;
public TableEntriesFactory(BuildTableDataSource source, Workspace workspace) :
base(source)
{
_source = source;
_workspace = workspace;
}
protected override ImmutableArray<DiagnosticData> GetItems()
{
return _source._buildErrorSource.GetBuildErrors();
}
protected override ImmutableArray<ITrackingPoint> GetTrackingPoints(ImmutableArray<DiagnosticData> items)
{
return ImmutableArray<ITrackingPoint>.Empty;
}
protected override AbstractTableEntriesSnapshot<DiagnosticData> CreateSnapshot(
int version, ImmutableArray<DiagnosticData> items, ImmutableArray<ITrackingPoint> trackingPoints)
{
return new TableEntriesSnapshot(this, version, items);
}
private class TableEntriesSnapshot : AbstractTableEntriesSnapshot<DiagnosticData>
{
private readonly TableEntriesFactory _factory;
public TableEntriesSnapshot(
TableEntriesFactory factory, int version, ImmutableArray<DiagnosticData> items) :
base(version, Guid.Empty, items, ImmutableArray<ITrackingPoint>.Empty)
{
_factory = factory;
}
public override bool TryGetValue(int index, string columnName, out object content)
{
// REVIEW: this method is too-chatty to make async, but otherwise, how one can implement it async?
// also, what is cancellation mechanism?
var item = GetItem(index);
if (item == null)
{
content = null;
return false;
}
switch (columnName)
{
case StandardTableKeyNames.ErrorRank:
content = WellKnownDiagnosticTags.Build;
return true;
case StandardTableKeyNames.ErrorSeverity:
content = GetErrorCategory(item.Severity);
return true;
case StandardTableKeyNames.ErrorCode:
content = item.Id;
return true;
case StandardTableKeyNames.ErrorCodeToolTip:
content = GetHelpLinkToolTipText(item);
return content != null;
case StandardTableKeyNames.HelpLink:
content = GetHelpLink(item);
return content != null;
case StandardTableKeyNames.ErrorCategory:
content = item.Category;
return true;
case StandardTableKeyNames.ErrorSource:
content = ErrorSource.Build;
return true;
case StandardTableKeyNames.BuildTool:
content = PredefinedBuildTools.Build;
return true;
case StandardTableKeyNames.Text:
content = item.Message;
return true;
case StandardTableKeyNames.DocumentName:
content = GetFileName(item.OriginalFilePath, item.MappedFilePath);
return true;
case StandardTableKeyNames.Line:
content = item.MappedStartLine;
return true;
case StandardTableKeyNames.Column:
content = item.MappedStartColumn;
return true;
case StandardTableKeyNames.ProjectName:
content = GetProjectName(_factory._workspace, item.ProjectId);
return content != null;
case StandardTableKeyNames.ProjectGuid:
content = GetProjectGuid(_factory._workspace, item.ProjectId);
return ProjectGuid != Guid.Empty;
default:
content = null;
return false;
}
}
public override bool TryNavigateTo(int index, bool previewTab)
{
var item = GetItem(index);
if (item == null)
{
return false;
}
// this item is not navigatable
if (item.DocumentId == null)
{
return false;
}
return TryNavigateTo(_factory._workspace, item.DocumentId, item.OriginalStartLine, item.OriginalStartColumn, previewTab);
}
protected override bool IsEquivalent(DiagnosticData item1, DiagnosticData item2)
{
// everything same except location
return item1.Id == item2.Id &&
item1.ProjectId == item2.ProjectId &&
item1.DocumentId == item2.DocumentId &&
item1.Category == item2.Category &&
item1.Severity == item2.Severity &&
item1.WarningLevel == item2.WarningLevel &&
item1.Message == item2.Message;
}
}
}
}
}
}
| |
namespace AbpKendoDemo.Migrations
{
using System;
using System.Collections.Generic;
using System.Data.Entity.Infrastructure.Annotations;
using System.Data.Entity.Migrations;
public partial class Upgraded_To_V0_9 : DbMigration
{
public override void Up()
{
DropForeignKey("dbo.AbpSettings", "TenantId", "dbo.AbpTenants");
DropForeignKey("dbo.AbpUsers", "TenantId", "dbo.AbpTenants");
DropForeignKey("dbo.AbpRoles", "TenantId", "dbo.AbpTenants");
DropIndex("dbo.AbpUserLoginAttempts", new[] { "TenancyName", "UserNameOrEmailAddress", "Result" });
CreateTable(
"dbo.AbpTenantNotifications",
c => new
{
Id = c.Guid(nullable: false),
TenantId = c.Int(),
NotificationName = c.String(nullable: false, maxLength: 96),
Data = c.String(),
DataTypeName = c.String(maxLength: 512),
EntityTypeName = c.String(maxLength: 250),
EntityTypeAssemblyQualifiedName = c.String(maxLength: 512),
EntityId = c.String(maxLength: 96),
Severity = c.Byte(nullable: false),
CreationTime = c.DateTime(nullable: false),
CreatorUserId = c.Long(),
},
annotations: new Dictionary<string, object>
{
{ "DynamicFilter_TenantNotificationInfo_MayHaveTenant", "EntityFramework.DynamicFilters.DynamicFilterDefinition" },
})
.PrimaryKey(t => t.Id);
AlterTableAnnotations(
"dbo.AbpFeatures",
c => new
{
Id = c.Long(nullable: false, identity: true),
Name = c.String(nullable: false, maxLength: 128),
Value = c.String(nullable: false, maxLength: 2000),
CreationTime = c.DateTime(nullable: false),
CreatorUserId = c.Long(),
EditionId = c.Int(),
TenantId = c.Int(),
Discriminator = c.String(nullable: false, maxLength: 128),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_TenantFeatureSetting_MustHaveTenant",
new AnnotationValues(oldValue: null, newValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition")
},
});
AlterTableAnnotations(
"dbo.AbpNotificationSubscriptions",
c => new
{
Id = c.Guid(nullable: false),
TenantId = c.Int(),
UserId = c.Long(nullable: false),
NotificationName = c.String(maxLength: 96),
EntityTypeName = c.String(maxLength: 250),
EntityTypeAssemblyQualifiedName = c.String(maxLength: 512),
EntityId = c.String(maxLength: 96),
CreationTime = c.DateTime(nullable: false),
CreatorUserId = c.Long(),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_NotificationSubscriptionInfo_MayHaveTenant",
new AnnotationValues(oldValue: null, newValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition")
},
});
AlterTableAnnotations(
"dbo.AbpPermissions",
c => new
{
Id = c.Long(nullable: false, identity: true),
TenantId = c.Int(),
Name = c.String(nullable: false, maxLength: 128),
IsGranted = c.Boolean(nullable: false),
CreationTime = c.DateTime(nullable: false),
CreatorUserId = c.Long(),
RoleId = c.Int(),
UserId = c.Long(),
Discriminator = c.String(nullable: false, maxLength: 128),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_PermissionSetting_MayHaveTenant",
new AnnotationValues(oldValue: null, newValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition")
},
{
"DynamicFilter_RolePermissionSetting_MayHaveTenant",
new AnnotationValues(oldValue: null, newValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition")
},
{
"DynamicFilter_UserPermissionSetting_MayHaveTenant",
new AnnotationValues(oldValue: null, newValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition")
},
});
AlterTableAnnotations(
"dbo.AbpUserLogins",
c => new
{
Id = c.Long(nullable: false, identity: true),
TenantId = c.Int(),
UserId = c.Long(nullable: false),
LoginProvider = c.String(nullable: false, maxLength: 128),
ProviderKey = c.String(nullable: false, maxLength: 256),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_UserLogin_MayHaveTenant",
new AnnotationValues(oldValue: null, newValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition")
},
});
AlterTableAnnotations(
"dbo.AbpUserRoles",
c => new
{
Id = c.Long(nullable: false, identity: true),
TenantId = c.Int(),
UserId = c.Long(nullable: false),
RoleId = c.Int(nullable: false),
CreationTime = c.DateTime(nullable: false),
CreatorUserId = c.Long(),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_UserRole_MayHaveTenant",
new AnnotationValues(oldValue: null, newValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition")
},
});
AlterTableAnnotations(
"dbo.AbpSettings",
c => new
{
Id = c.Long(nullable: false, identity: true),
TenantId = c.Int(),
UserId = c.Long(),
Name = c.String(nullable: false, maxLength: 256),
Value = c.String(maxLength: 2000),
LastModificationTime = c.DateTime(),
LastModifierUserId = c.Long(),
CreationTime = c.DateTime(nullable: false),
CreatorUserId = c.Long(),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_Setting_MayHaveTenant",
new AnnotationValues(oldValue: null, newValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition")
},
});
AlterTableAnnotations(
"dbo.AbpUserLoginAttempts",
c => new
{
Id = c.Long(nullable: false, identity: true),
TenantId = c.Int(),
TenancyName = c.String(maxLength: 64),
UserId = c.Long(),
UserNameOrEmailAddress = c.String(maxLength: 255),
ClientIpAddress = c.String(maxLength: 64),
ClientName = c.String(maxLength: 128),
BrowserInfo = c.String(maxLength: 256),
Result = c.Byte(nullable: false),
CreationTime = c.DateTime(nullable: false),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_UserLoginAttempt_MayHaveTenant",
new AnnotationValues(oldValue: null, newValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition")
},
});
AlterTableAnnotations(
"dbo.AbpUserNotifications",
c => new
{
Id = c.Guid(nullable: false),
TenantId = c.Int(),
UserId = c.Long(nullable: false),
NotificationId = c.Guid(nullable: false),
State = c.Int(nullable: false),
CreationTime = c.DateTime(nullable: false),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_UserNotificationInfo_MayHaveTenant",
new AnnotationValues(oldValue: null, newValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition")
},
});
AddColumn("dbo.AbpPermissions", "TenantId", c => c.Int());
AddColumn("dbo.AbpUserLogins", "TenantId", c => c.Int());
AddColumn("dbo.AbpUserRoles", "TenantId", c => c.Int());
AddColumn("dbo.AbpUserNotifications", "TenantId", c => c.Int());
AlterColumn("dbo.AbpUserLoginAttempts", "UserNameOrEmailAddress", c => c.String(maxLength: 255));
CreateIndex("dbo.AbpUserLoginAttempts", new[] { "UserId", "TenantId" });
CreateIndex("dbo.AbpUserLoginAttempts", new[] { "TenancyName", "UserNameOrEmailAddress", "Result" });
//Update current AbpUserRoles.TenantId values
Sql(@"UPDATE AbpUserRoles
SET TenantId = AbpUsers.TenantId
FROM AbpUsers
WHERE AbpUserRoles.UserId = AbpUsers.Id");
//Update current AbpUserLogins.TenantId values
Sql(@"UPDATE AbpUserLogins
SET TenantId = AbpUsers.TenantId
FROM AbpUsers
WHERE AbpUserLogins.UserId = AbpUsers.Id");
//Update current AbpPermissions.TenantId values
Sql(@"UPDATE AbpPermissions
SET TenantId = AbpUsers.TenantId
FROM AbpUsers
WHERE AbpPermissions.UserId = AbpUsers.Id");
Sql(@"UPDATE AbpPermissions
SET TenantId = AbpRoles.TenantId
FROM AbpRoles
WHERE AbpPermissions.RoleId = AbpRoles.Id");
//Update current AbpUserNotifications.TenantId values
Sql(@"UPDATE AbpUserNotifications
SET TenantId = AbpUsers.TenantId
FROM AbpUsers
WHERE AbpUserNotifications.UserId = AbpUsers.Id");
//Update current AbpSettings.TenantId values
Sql(@"UPDATE AbpSettings
SET TenantId = AbpUsers.TenantId
FROM AbpUsers
WHERE AbpSettings.UserId = AbpUsers.Id");
}
public override void Down()
{
DropIndex("dbo.AbpUserLoginAttempts", new[] { "TenancyName", "UserNameOrEmailAddress", "Result" });
DropIndex("dbo.AbpUserLoginAttempts", new[] { "UserId", "TenantId" });
AlterColumn("dbo.AbpUserLoginAttempts", "UserNameOrEmailAddress", c => c.String(maxLength: 256));
DropColumn("dbo.AbpUserNotifications", "TenantId");
DropColumn("dbo.AbpUserRoles", "TenantId");
DropColumn("dbo.AbpUserLogins", "TenantId");
DropColumn("dbo.AbpPermissions", "TenantId");
AlterTableAnnotations(
"dbo.AbpUserNotifications",
c => new
{
Id = c.Guid(nullable: false),
TenantId = c.Int(),
UserId = c.Long(nullable: false),
NotificationId = c.Guid(nullable: false),
State = c.Int(nullable: false),
CreationTime = c.DateTime(nullable: false),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_UserNotificationInfo_MayHaveTenant",
new AnnotationValues(oldValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition", newValue: null)
},
});
AlterTableAnnotations(
"dbo.AbpUserLoginAttempts",
c => new
{
Id = c.Long(nullable: false, identity: true),
TenantId = c.Int(),
TenancyName = c.String(maxLength: 64),
UserId = c.Long(),
UserNameOrEmailAddress = c.String(maxLength: 255),
ClientIpAddress = c.String(maxLength: 64),
ClientName = c.String(maxLength: 128),
BrowserInfo = c.String(maxLength: 256),
Result = c.Byte(nullable: false),
CreationTime = c.DateTime(nullable: false),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_UserLoginAttempt_MayHaveTenant",
new AnnotationValues(oldValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition", newValue: null)
},
});
AlterTableAnnotations(
"dbo.AbpSettings",
c => new
{
Id = c.Long(nullable: false, identity: true),
TenantId = c.Int(),
UserId = c.Long(),
Name = c.String(nullable: false, maxLength: 256),
Value = c.String(maxLength: 2000),
LastModificationTime = c.DateTime(),
LastModifierUserId = c.Long(),
CreationTime = c.DateTime(nullable: false),
CreatorUserId = c.Long(),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_Setting_MayHaveTenant",
new AnnotationValues(oldValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition", newValue: null)
},
});
AlterTableAnnotations(
"dbo.AbpUserRoles",
c => new
{
Id = c.Long(nullable: false, identity: true),
TenantId = c.Int(),
UserId = c.Long(nullable: false),
RoleId = c.Int(nullable: false),
CreationTime = c.DateTime(nullable: false),
CreatorUserId = c.Long(),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_UserRole_MayHaveTenant",
new AnnotationValues(oldValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition", newValue: null)
},
});
AlterTableAnnotations(
"dbo.AbpUserLogins",
c => new
{
Id = c.Long(nullable: false, identity: true),
TenantId = c.Int(),
UserId = c.Long(nullable: false),
LoginProvider = c.String(nullable: false, maxLength: 128),
ProviderKey = c.String(nullable: false, maxLength: 256),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_UserLogin_MayHaveTenant",
new AnnotationValues(oldValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition", newValue: null)
},
});
AlterTableAnnotations(
"dbo.AbpPermissions",
c => new
{
Id = c.Long(nullable: false, identity: true),
TenantId = c.Int(),
Name = c.String(nullable: false, maxLength: 128),
IsGranted = c.Boolean(nullable: false),
CreationTime = c.DateTime(nullable: false),
CreatorUserId = c.Long(),
RoleId = c.Int(),
UserId = c.Long(),
Discriminator = c.String(nullable: false, maxLength: 128),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_PermissionSetting_MayHaveTenant",
new AnnotationValues(oldValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition", newValue: null)
},
{
"DynamicFilter_RolePermissionSetting_MayHaveTenant",
new AnnotationValues(oldValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition", newValue: null)
},
{
"DynamicFilter_UserPermissionSetting_MayHaveTenant",
new AnnotationValues(oldValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition", newValue: null)
},
});
AlterTableAnnotations(
"dbo.AbpNotificationSubscriptions",
c => new
{
Id = c.Guid(nullable: false),
TenantId = c.Int(),
UserId = c.Long(nullable: false),
NotificationName = c.String(maxLength: 96),
EntityTypeName = c.String(maxLength: 250),
EntityTypeAssemblyQualifiedName = c.String(maxLength: 512),
EntityId = c.String(maxLength: 96),
CreationTime = c.DateTime(nullable: false),
CreatorUserId = c.Long(),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_NotificationSubscriptionInfo_MayHaveTenant",
new AnnotationValues(oldValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition", newValue: null)
},
});
AlterTableAnnotations(
"dbo.AbpFeatures",
c => new
{
Id = c.Long(nullable: false, identity: true),
Name = c.String(nullable: false, maxLength: 128),
Value = c.String(nullable: false, maxLength: 2000),
CreationTime = c.DateTime(nullable: false),
CreatorUserId = c.Long(),
EditionId = c.Int(),
TenantId = c.Int(),
Discriminator = c.String(nullable: false, maxLength: 128),
},
annotations: new Dictionary<string, AnnotationValues>
{
{
"DynamicFilter_TenantFeatureSetting_MustHaveTenant",
new AnnotationValues(oldValue: "EntityFramework.DynamicFilters.DynamicFilterDefinition", newValue: null)
},
});
DropTable("dbo.AbpTenantNotifications",
removedAnnotations: new Dictionary<string, object>
{
{ "DynamicFilter_TenantNotificationInfo_MayHaveTenant", "EntityFramework.DynamicFilters.DynamicFilterDefinition" },
});
CreateIndex("dbo.AbpUserLoginAttempts", new[] { "TenancyName", "UserNameOrEmailAddress", "Result" });
AddForeignKey("dbo.AbpRoles", "TenantId", "dbo.AbpTenants", "Id");
AddForeignKey("dbo.AbpUsers", "TenantId", "dbo.AbpTenants", "Id");
AddForeignKey("dbo.AbpSettings", "TenantId", "dbo.AbpTenants", "Id");
}
}
}
| |
/****************************************************************************
Copyright (c) 2010-2012 cocos2d-x.org
Copyright (c) 2008-2010 Ricardo Quesada
Copyright (c) 2011 Zynga Inc.
Copyright (c) 2011-2012 openxlive.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
using Microsoft.Xna.Framework.Graphics;
namespace CocosSharp
{
public class CCTiledGrid3D : CCGridBase
{
bool dirty;
CCIndexBuffer<short> indexBuffer;
CCVertexBuffer<CCV3F_T2F> vertexBuffer;
#region Properties
protected short[] Indices { get; private set; }
protected CCQuad3[] OriginalVertices { get; private set; }
internal CCV3F_T2F[] Vertices { get; private set; }
#endregion Properties
#region Constructors
public CCTiledGrid3D(CCGridSize gridSize, CCTexture2D texture, bool flipped=false) : base(gridSize, texture, flipped)
{
}
#endregion Constructors
#region Tile Indexers and accessors
public CCQuad3 this[CCGridSize pos]
{
get { return this[pos.X, pos.Y]; }
set {
this[pos.X, pos.Y] = value;
}
}
public CCQuad3 this[int x, int y]
{
get
{
int idx = (GridSize.Y * x + y) * 4;
CCV3F_T2F[] vertArray = Vertices;
return new CCQuad3
{
BottomLeft = vertArray[idx + 0].Vertices,
BottomRight = vertArray[idx + 1].Vertices,
TopLeft = vertArray[idx + 2].Vertices,
TopRight = vertArray[idx + 3].Vertices
};
}
set
{
int idx = (GridSize.Y * x + y) * 4;
CCV3F_T2F[] vertArray = Vertices;
vertArray[idx + 0].Vertices = value.BottomLeft;
vertArray[idx + 1].Vertices = value.BottomRight;
vertArray[idx + 2].Vertices = value.TopLeft;
vertArray[idx + 3].Vertices = value.TopRight;
dirty = true;
}
}
// returns the original tile (untransformed) at the given position
public CCQuad3 OriginalTile(CCGridSize pos)
{
return OriginalTile(pos.X, pos.Y);
}
// returns the original tile (untransformed) at the given position
public CCQuad3 OriginalTile(int x, int y)
{
int idx = (GridSize.Y * x + y);
return OriginalVertices[idx];
}
#endregion Tile Indexers and accessors
public override void Blit()
{
if (dirty)
{
vertexBuffer.UpdateBuffer();
}
CCDrawManager drawMangager = Scene.Window.DrawManager;
bool save = drawMangager.VertexColorEnabled;
drawMangager.VertexColorEnabled = false;
drawMangager.DrawBuffer(vertexBuffer, indexBuffer, 0, Indices.Length / 3);
drawMangager.VertexColorEnabled = save;
}
public override void Reuse()
{
if (ReuseGrid > 0)
{
int numQuads = GridSize.X * GridSize.Y;
CCQuad3[] orig = OriginalVertices;
CCV3F_T2F[] verts = Vertices;
for (int i = 0; i < numQuads; i++)
{
int i4 = i * 4;
orig[i].BottomLeft = verts[i4 + 0].Vertices;
orig[i].BottomRight = verts[i4 + 1].Vertices;
orig[i].TopLeft = verts[i4 + 2].Vertices;
orig[i].TopRight = verts[i4 + 3].Vertices;
}
--ReuseGrid;
}
}
public override void CalculateVertexPoints()
{
float width = Texture.PixelsWide;
float height = Texture.PixelsHigh;
float imageH = Texture.ContentSizeInPixels.Height;
int numQuads = GridSize.X * GridSize.Y;
vertexBuffer = new CCVertexBuffer<CCV3F_T2F>(numQuads * 4, CCBufferUsage.WriteOnly);
vertexBuffer.Count = numQuads * 4;
indexBuffer = new CCIndexBuffer<short>(numQuads * 6, BufferUsage.WriteOnly);
indexBuffer.Count = numQuads * 6;
Vertices = vertexBuffer.Data.Elements;
Indices = indexBuffer.Data.Elements;
OriginalVertices = new CCQuad3[numQuads];
CCV3F_T2F[] vertArray = Vertices;
short[] idxArray = Indices;
int index = 0;
for (int x = 0; x < GridSize.X; x++)
{
for (int y = 0; y < GridSize.Y; y++)
{
float x1 = x * Step.X;
float x2 = x1 + Step.X;
float y1 = y * Step.Y;
float y2 = y1 + Step.Y;
vertArray[index + 0].Vertices = new CCVertex3F(x1, y1, 0);
vertArray[index + 1].Vertices = new CCVertex3F(x2, y1, 0);
vertArray[index + 2].Vertices = new CCVertex3F(x1, y2, 0);
vertArray[index + 3].Vertices = new CCVertex3F(x2, y2, 0);
float newY1 = y1;
float newY2 = y2;
if (!TextureFlipped)
{
newY1 = imageH - y1;
newY2 = imageH - y2;
}
vertArray[index + 0].TexCoords = new CCTex2F(x1 / width, newY1 / height);
vertArray[index + 1].TexCoords = new CCTex2F(x2 / width, newY1 / height);
vertArray[index + 2].TexCoords = new CCTex2F(x1 / width, newY2 / height);
vertArray[index + 3].TexCoords = new CCTex2F(x2 / width, newY2 / height);
index += 4;
}
}
for (int x = 0; x < numQuads; x++)
{
int i6 = x * 6;
int i4 = x * 4;
idxArray[i6 + 0] = (short) (i4 + 0);
idxArray[i6 + 1] = (short) (i4 + 2);
idxArray[i6 + 2] = (short) (i4 + 1);
idxArray[i6 + 3] = (short) (i4 + 1);
idxArray[i6 + 4] = (short) (i4 + 2);
idxArray[i6 + 5] = (short) (i4 + 3);
}
indexBuffer.UpdateBuffer();
for (int i = 0; i < numQuads; i++)
{
int i4 = i * 4;
OriginalVertices[i].BottomLeft = vertArray[i4 + 0].Vertices;
OriginalVertices[i].BottomRight = vertArray[i4 + 1].Vertices;
OriginalVertices[i].TopLeft = vertArray[i4 + 2].Vertices;
OriginalVertices[i].TopRight = vertArray[i4 + 3].Vertices;
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Runtime.CompilerServices;
using System.IO;
using System.Collections;
using System.Globalization;
using System.Text;
using System.Threading;
using Xunit;
public class DirectoryInfo_GetFileSystemInfos_str
{
public static String s_strActiveBugNums = "28509";
public static String s_strClassMethod = "Directory.GetFiles()";
public static String s_strTFName = "GetFileSystemInfos _str.cs";
public static String s_strTFPath = Directory.GetCurrentDirectory();
[Fact]
public static void runTest()
{
//////////// Global Variables used for all tests
int iCountErrors = 0;
int iCountTestcases = 0;
String strLoc = "Loc_000oo";
String strValue = String.Empty;
try
{
///////////////////////// START TESTS ////////////////////////////
///////////////////////////////////////////////////////////////////
DirectoryInfo dir2;
String dirName = Path.GetRandomFileName();
FileSystemInfo[] fsArr;
if (Directory.Exists(dirName))
Directory.Delete(dirName, true);
// [] Should throw ArgumentNullException for null argument
//-----------------------------------------------------------------
strLoc = "Loc_477g8";
dir2 = new DirectoryInfo(".");
iCountTestcases++;
try
{
dir2.GetFileSystemInfos(null);
iCountErrors++;
printerr("Error_2988b! Expected exception not thrown");
}
catch (ArgumentNullException)
{
}
catch (Exception exc)
{
iCountErrors++;
printerr("Error_0707t! Incorrect exception thrown, exc==" + exc.ToString());
}
//-----------------------------------------------------------------
// [] ArgumentException for String.Empty
//-----------------------------------------------------------------
strLoc = "Loc_4yg7b";
dir2 = new DirectoryInfo(".");
iCountTestcases++;
try
{
FileSystemInfo[] strInfos = dir2.GetFileSystemInfos(String.Empty);
if (strInfos.Length != 0)
{
iCountErrors++;
printerr("Error_8ytbm! Unexpected number of file infos returned" + strInfos.Length);
}
}
catch (Exception exc)
{
iCountErrors++;
printerr("Error_2908y! Incorrect exception thrown, exc==" + exc.ToString());
}
//-----------------------------------------------------------------
// [] ArgumentException for all whitespace
//-----------------------------------------------------------------
strLoc = "Loc_1190x";
dir2 = new DirectoryInfo(".");
iCountTestcases++;
try
{
dir2.GetFileSystemInfos(Path.Combine("..ab ab.. .. abc..d", "abc.."));
iCountErrors++;
printerr("Error_2198y! Expected exception not thrown");
}
catch (ArgumentException)
{
}
catch (Exception exc)
{
iCountErrors++;
printerr("Error_17888! Incorrect exception thrown, exc==" + exc.ToString());
}
//-----------------------------------------------------------------
// [] Should return zero length array for an empty directory
//-----------------------------------------------------------------
strLoc = "Loc_4y982";
dir2 = Directory.CreateDirectory(dirName);
fsArr = dir2.GetFileSystemInfos();
iCountTestcases++;
if (fsArr.Length != 0)
{
iCountErrors++;
printerr("Error_207v7! Incorrect number of files returned");
}
//-----------------------------------------------------------------
// [] Create a directorystructure and try different searchcriterias
//-----------------------------------------------------------------
strLoc = "Loc_2398c";
dir2.CreateSubdirectory("TestDir1");
dir2.CreateSubdirectory("TestDir2");
dir2.CreateSubdirectory("TestDir3");
dir2.CreateSubdirectory("Test1Dir1");
dir2.CreateSubdirectory("Test1Dir2");
new FileInfo(Path.Combine(dir2.FullName, "TestFile1")).Create();
new FileInfo(Path.Combine(dir2.FullName, "TestFile2")).Create();
new FileInfo(Path.Combine(dir2.FullName, "TestFile3")).Create();
new FileInfo(Path.Combine(dir2.FullName, "Test1File1")).Create();
new FileInfo(Path.Combine(dir2.FullName, "Test1File2")).Create();
// [] Search criteria ending with '*'
iCountTestcases++;
fsArr = dir2.GetFileSystemInfos("TestFile*");
iCountTestcases++;
if (fsArr.Length != 3)
{
iCountErrors++;
printerr("Error_1yt75! Incorrect number of files returned");
}
String[] names = new String[fsArr.Length];
int i = 0;
foreach (FileSystemInfo f in fsArr)
names[i++] = f.Name;
if (!Interop.IsWindows) // test is expecting sorted order as provided by Windows
{
Array.Sort(names);
}
iCountTestcases++;
if (Array.IndexOf(names, "TestFile1") < 0)
{
iCountErrors++;
printerr("Error_3y775! Incorrect name==" + fsArr[0].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "TestFile2") < 0)
{
iCountErrors++;
printerr("Error_90885! Incorrect name==" + fsArr[1].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "TestFile3") < 0)
{
iCountErrors++;
printerr("Error_879by! Incorrect name==" + fsArr[2].Name);
}
// [] Search criteria is '*'
fsArr = dir2.GetFileSystemInfos("*");
iCountTestcases++;
if (fsArr.Length != 10)
{
iCountErrors++;
printerr("Error_t5792! Incorrect number of files==" + fsArr.Length);
}
names = new String[fsArr.Length];
i = 0;
foreach (FileSystemInfo f in fsArr)
names[i++] = f.Name;
if (!Interop.IsWindows) // test is expecting sorted order as provided by Windows
{
Array.Sort(names);
}
iCountTestcases++;
if (Array.IndexOf(names, "Test1Dir1") < 0)
{
iCountErrors++;
printerr("Error_4898v! Incorrect name==" + fsArr[0].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "Test1Dir2") < 0)
{
iCountErrors++;
printerr("Error_4598c! Incorrect name==" + fsArr[1].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "TestDir1") < 0)
{
iCountErrors++;
printerr("Error_209d8! Incorrect name==" + fsArr[2].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "TestDir2") < 0)
{
iCountErrors++;
printerr("Error_10vtu! Incorrect name==" + fsArr[3].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "TestDir3") < 0)
{
iCountErrors++;
printerr("Error_190vh! Incorrect name==" + fsArr[4].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "Test1File1") < 0)
{
iCountErrors++;
printerr("Error_4898v! Incorrect name==" + fsArr[5].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "Test1File2") < 0)
{
iCountErrors++;
printerr("Error_4598c! Incorrect name==" + fsArr[6].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "TestFile1") < 0)
{
iCountErrors++;
printerr("Error_209d8! Incorrect name==" + fsArr[7].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "TestFile2") < 0)
{
iCountErrors++;
printerr("Error_10vtu! Incorrect name==" + fsArr[8].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "TestFile3") < 0)
{
iCountErrors++;
printerr("Error_190vh! Incorrect name==" + fsArr[9].Name);
}
// [] Search criteria beginning with '*'
fsArr = dir2.GetFileSystemInfos("*2");
iCountTestcases++;
if (fsArr.Length != 4)
{
iCountErrors++;
printerr("Error_8019x! Incorrect number of files==" + fsArr.Length);
}
names = new String[fsArr.Length];
i = 0;
foreach (FileSystemInfo fs in fsArr)
names[i++] = fs.Name;
iCountTestcases++;
if (Array.IndexOf(names, "Test1Dir2") < 0)
{
iCountErrors++;
printerr("Error_247yg! Incorrect name==" + fsArr[0].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "TestDir2") < 0)
{
iCountErrors++;
printerr("Error_24gy7! Incorrect name==" + fsArr[1].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "Test1File2") < 0)
{
iCountErrors++;
printerr("Error_167yb! Incorrect name==" + fsArr[2].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "TestFile2") < 0)
{
iCountErrors++;
printerr("Error_49yb7! Incorrect name==" + fsArr[3].Name);
}
fsArr = dir2.GetFileSystemInfos("*Dir2");
iCountTestcases++;
if (fsArr.Length != 2)
{
iCountErrors++;
printerr("Error_948yv! Incorrect number of files==" + fsArr.Length);
}
names = new String[fsArr.Length];
i = 0;
foreach (FileSystemInfo fs in fsArr)
names[i++] = fs.Name;
iCountTestcases++;
if (Array.IndexOf(names, "Test1Dir2") < 0)
{
iCountErrors++;
printerr("Error_247yg! Incorrect name==" + fsArr[0].Name);
}
iCountTestcases++;
if (Array.IndexOf(names, "TestDir2") < 0)
{
iCountErrors++;
printerr("Error_24gy7! Incorrect name==" + fsArr[1].Name);
}
// [] Search criteria Beginning and ending with '*'
new FileInfo(Path.Combine(dir2.FullName, "AAABB")).Create();
Directory.CreateDirectory(Path.Combine(dir2.FullName, "aaabbcc"));
fsArr = dir2.GetFileSystemInfos("*BB*");
iCountTestcases++;
if (fsArr.Length != (Interop.IsWindows ? 2 : 1))
{
iCountErrors++;
printerr("Error_4y190! Incorrect number of files==" + fsArr.Length);
}
names = new String[fsArr.Length];
i = 0;
foreach (FileSystemInfo fs in fsArr)
names[i++] = fs.Name;
if (Interop.IsWindows)
{
iCountTestcases++;
if (Array.IndexOf(names, "aaabbcc") < 0)
{
iCountErrors++;
printerr("Error_956yb! Incorrect name==" + fsArr[0]);
foreach (FileSystemInfo s in fsArr)
Console.WriteLine(s.Name);
}
}
iCountTestcases++;
if (Array.IndexOf(names, "AAABB") < 0)
{
iCountErrors++;
printerr("Error_48yg7! Incorrect name==" + fsArr[1]);
foreach (FileSystemInfo s in fsArr)
Console.WriteLine(s.Name);
}
strLoc = "Loc_0001";
// [] Should not search on fullpath
// [] Search Criteria without match should return empty array
fsArr = dir2.GetFileSystemInfos("Directory");
iCountTestcases++;
if (fsArr.Length != 0)
{
iCountErrors++;
printerr("Error_209v7! Incorrect number of files==" + fsArr.Length);
}
new FileInfo(Path.Combine(dir2.FullName, "TestDir1", "Test.tmp")).Create();
fsArr = dir2.GetFileSystemInfos(Path.Combine("TestDir1", "*"));
iCountTestcases++;
if (fsArr.Length != 1)
{
iCountErrors++;
printerr("Error_28gyb! Incorrect number of files");
}
}
catch (Exception exc_general)
{
++iCountErrors;
Console.WriteLine("Error Err_8888yyy! strLoc==" + strLoc + ", exc_general==" + exc_general.ToString());
}
//// Finish Diagnostics
if (iCountErrors != 0)
{
Console.WriteLine("FAiL! " + s_strTFName + " ,iCountErrors==" + iCountErrors.ToString());
}
Assert.Equal(0, iCountErrors);
}
public static void printerr(String err, [CallerMemberName] string memberName = "", [CallerFilePath] string filePath = "", [CallerLineNumber] int lineNumber = 0)
{
Console.WriteLine("ERROR: ({0}, {1}, {2}) {3}", memberName, filePath, lineNumber, err);
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Versioning;
using Moq;
using NuGet.Test.Mocks;
using Xunit;
namespace NuGet.Test
{
public class PackageRepositoryTest
{
// Test that when dependencyVersion is DependencyVersions.Lowest,
// the dependency with the lowest patch number is picked.
[Fact]
public void FindDependencyPicksLowestMajorAndMinorVersion()
{
// Arrange
var repository = new MockPackageRepository() {
PackageUtility.CreatePackage("B", "2.0"),
PackageUtility.CreatePackage("B", "1.0"),
PackageUtility.CreatePackage("B", "1.0.1"),
PackageUtility.CreatePackage("B", "1.0.9"),
PackageUtility.CreatePackage("B", "1.1")
};
// B >= 1.0
PackageDependency dependency1 = PackageDependency.CreateDependency("B", "1.0");
// B >= 1.0.0
PackageDependency dependency2 = PackageDependency.CreateDependency("B", "1.0.0");
// B >= 1.0.0.0
PackageDependency dependency3 = PackageDependency.CreateDependency("B", "1.0.0.0");
// B = 1.0
PackageDependency dependency4 = PackageDependency.CreateDependency("B", "[1.0]");
// B >= 1.0.0 && <= 1.0.8
PackageDependency dependency5 = PackageDependency.CreateDependency("B", "[1.0.0, 1.0.8]");
// Act
IPackage package1 = DependencyResolveUtility.ResolveDependency(
repository,
dependency1, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.Lowest);
IPackage package2 = DependencyResolveUtility.ResolveDependency(
repository,
dependency2, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.Lowest);
IPackage package3 = repository.ResolveDependency(
dependency3, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.Lowest);
IPackage package4 = repository.ResolveDependency(
dependency4, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.Lowest);
IPackage package5 = repository.ResolveDependency(
dependency5, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.Lowest);
// Assert
Assert.Equal("B", package1.Id);
Assert.Equal(new SemanticVersion("1.0"), package1.Version);
Assert.Equal("B", package2.Id);
Assert.Equal(new SemanticVersion("1.0"), package2.Version);
Assert.Equal("B", package3.Id);
Assert.Equal(new SemanticVersion("1.0"), package3.Version);
Assert.Equal("B", package4.Id);
Assert.Equal(new SemanticVersion("1.0"), package4.Version);
Assert.Equal("B", package5.Id);
Assert.Equal(new SemanticVersion("1.0"), package5.Version);
}
[Fact]
public void FindByIdReturnsPackage()
{
// Arrange
var repo = GetLocalRepository();
// Act
var package = repo.FindPackage(packageId: "A");
// Assert
Assert.NotNull(package);
Assert.Equal("A", package.Id);
}
[Fact]
public void FindByIdReturnsNullWhenPackageNotFound()
{
// Arrange
var repo = GetLocalRepository();
// Act
var package = repo.FindPackage(packageId: "X");
// Assert
Assert.Null(package);
}
[Fact]
public void FindByIdAndVersionReturnsPackage()
{
// Arrange
var repo = GetRemoteRepository();
// Act
var package = repo.FindPackage(packageId: "A", version: SemanticVersion.Parse("1.0"));
// Assert
Assert.NotNull(package);
Assert.Equal("A", package.Id);
Assert.Equal(SemanticVersion.Parse("1.0"), package.Version);
}
[Fact]
public void FindByIdAndVersionReturnsNullWhenPackageNotFound()
{
// Arrange
var repo = GetLocalRepository();
// Act
var package1 = repo.FindPackage(packageId: "X", version: SemanticVersion.Parse("1.0"));
var package2 = repo.FindPackage(packageId: "A", version: SemanticVersion.Parse("1.1"));
// Assert
Assert.Null(package1 ?? package2);
}
[Fact]
public void FindByIdAndVersionRangeReturnsPackage()
{
// Arrange
var repo = GetRemoteRepository();
var versionSpec = VersionUtility.ParseVersionSpec("[0.9, 1.1]");
// Act
var package = repo.FindPackage("A", versionSpec, allowPrereleaseVersions: false, allowUnlisted: true);
// Assert
Assert.NotNull(package);
Assert.Equal("A", package.Id);
Assert.Equal(SemanticVersion.Parse("1.0"), package.Version);
}
[Fact]
public void FindByIdAndVersionRangeReturnsNullWhenPackageNotFound()
{
// Arrange
var repo = GetLocalRepository();
// Act
var package1 = repo.FindPackage("X", VersionUtility.ParseVersionSpec("[0.9, 1.1]"), allowPrereleaseVersions: false, allowUnlisted: true);
var package2 = repo.FindPackage("A", VersionUtility.ParseVersionSpec("[1.4, 1.5]"), allowPrereleaseVersions: false, allowUnlisted: true);
// Assert
Assert.Null(package1 ?? package2);
}
[Fact]
public void FindPackageByIdVersionAndVersionRangesUsesRangeIfExactVersionIsNull()
{
// Arrange
var repo = GetRemoteRepository();
// Act
var package = repo.FindPackage("A", VersionUtility.ParseVersionSpec("[0.6, 1.1.5]"), allowPrereleaseVersions: false, allowUnlisted: true);
// Assert
Assert.NotNull(package);
Assert.Equal("A", package.Id);
Assert.Equal(SemanticVersion.Parse("1.0"), package.Version);
}
[Fact]
public void FindPackagesReturnsPackagesWithTermInPackageTagOrDescriptionOrId()
{
// Arrange
var term = "TAG";
var repo = new MockPackageRepository();
repo.Add(CreateMockPackage("A", "1.0", "Description", " TAG "));
repo.Add(CreateMockPackage("B", "2.0", "Description", "Tags"));
repo.Add(CreateMockPackage("C", "1.0", "This description has tags in it"));
repo.Add(CreateMockPackage("D", "1.0", "Description"));
repo.Add(CreateMockPackage("TagCloud", "1.0", "Description"));
// Act
var packages = repo.GetPackages().Find(term).ToList();
// Assert
Assert.Equal(3, packages.Count);
Assert.Equal("A", packages[0].Id);
Assert.Equal("C", packages[1].Id);
Assert.Equal("TagCloud", packages[2].Id);
}
[Fact]
public void FindPackagesReturnsPrereleasePackagesIfTheFlagIsSetToTrue()
{
// Arrange
var term = "B";
var repo = GetRemoteRepository(includePrerelease: true);
// Act
var packages = repo.GetPackages().Find(term);
// Assert
Assert.Equal(packages.Count(), 2);
packages = packages.OrderBy(p => p.Id);
Assert.Equal(packages.ElementAt(0).Id, "B");
Assert.Equal(packages.ElementAt(0).Version, new SemanticVersion("1.0"));
Assert.Equal(packages.ElementAt(1).Id, "B");
Assert.Equal(packages.ElementAt(1).Version, new SemanticVersion("1.0-beta"));
}
[Fact]
public void FindPackagesReturnsPackagesWithTerm()
{
// Arrange
var term = "B xaml";
var repo = GetRemoteRepository();
// Act
var packages = repo.GetPackages().Find(term);
// Assert
Assert.Equal(packages.Count(), 2);
packages = packages.OrderBy(p => p.Id);
Assert.Equal(packages.ElementAt(0).Id, "B");
Assert.Equal(packages.ElementAt(1).Id, "C");
}
[Fact]
public void FindPackagesReturnsEmptyCollectionWhenNoPackageContainsTerm()
{
// Arrange
var term = "does-not-exist";
var repo = GetRemoteRepository();
// Act
var packages = repo.GetPackages().Find(term);
// Assert
Assert.False(packages.Any());
}
[Fact]
public void FindPackagesReturnsAllPackagesWhenSearchTermIsNullOrEmpty()
{
// Arrange
var repo = GetLocalRepository();
// Act
var packages1 = repo.GetPackages().Find(String.Empty);
var packages2 = repo.GetPackages().Find(null);
var packages3 = repo.GetPackages();
// Assert
Assert.Equal(packages1.ToList(), packages2.ToList());
Assert.Equal(packages2.ToList(), packages3.ToList());
}
[Fact]
public void SearchUsesInterfaceIfImplementedByRepository()
{
// Arrange
var repo = new Mock<MockPackageRepository>(MockBehavior.Strict);
repo.Setup(m => m.GetPackages()).Returns(Enumerable.Empty<IPackage>().AsQueryable());
repo.As<IServiceBasedRepository>().Setup(m => m.Search(It.IsAny<string>(), It.IsAny<IEnumerable<string>>(), false, false))
.Returns(new[] { PackageUtility.CreatePackage("A") }.AsQueryable());
// Act
var packages = repo.Object.Search("Hello", new[] { ".NETFramework" }, allowPrereleaseVersions: false).ToList();
// Assert
Assert.Equal(1, packages.Count);
Assert.Equal("A", packages[0].Id);
}
[Fact]
public void GetUpdatesReturnsPackagesWithUpdates()
{
// Arrange
var localRepo = GetLocalRepository();
var remoteRepo = GetRemoteRepository();
// Act
var packages = remoteRepo.GetUpdates(localRepo.GetPackages(), includePrerelease: false, includeAllVersions: false);
// Assert
Assert.True(packages.Any());
Assert.Equal(packages.First().Id, "A");
Assert.Equal(packages.First().Version, SemanticVersion.Parse("1.2"));
}
[Fact]
public void GetUpdatesDoesNotInvokeServiceMethodIfLocalRepositoryDoesNotHaveAnyPackages()
{
// Arrange
var localRepo = new MockPackageRepository();
var serviceRepository = new Mock<IServiceBasedRepository>(MockBehavior.Strict);
var remoteRepo = serviceRepository.As<IPackageRepository>().Object;
// Act
remoteRepo.GetUpdates(localRepo.GetPackages(), includePrerelease: false, includeAllVersions: false);
// Assert
serviceRepository.Verify(s => s.GetUpdates(It.IsAny<IEnumerable<IPackage>>(), false, false, It.IsAny<IEnumerable<FrameworkName>>(), It.IsAny<IEnumerable<IVersionSpec>>()), Times.Never());
}
[Fact]
public void GetUpdatesReturnsEmptyCollectionWhenSourceRepositoryIsEmpty()
{
// Arrange
var localRepo = GetLocalRepository();
var remoteRepo = GetEmptyRepository();
// Act
var packages = remoteRepo.GetUpdates(localRepo.GetPackages(), includePrerelease: false, includeAllVersions: false);
// Assert
Assert.False(packages.Any());
}
[Fact]
public void FindDependencyPicksHighestVersionIfNotSpecified()
{
// Arrange
var repository = new MockPackageRepository() {
PackageUtility.CreatePackage("B", "2.0"),
PackageUtility.CreatePackage("B", "1.0"),
PackageUtility.CreatePackage("B", "1.0.1"),
PackageUtility.CreatePackage("B", "1.0.9"),
PackageUtility.CreatePackage("B", "1.1")
};
var dependency = new PackageDependency("B");
// Act
IPackage package = PackageRepositoryExtensions.ResolveDependency(repository, dependency, allowPrereleaseVersions: false, preferListedPackages: false);
// Assert
Assert.Equal("B", package.Id);
Assert.Equal(new SemanticVersion("2.0"), package.Version);
}
[Fact]
public void FindPackageNormalizesVersionBeforeComparing()
{
// Arrange
var repository = new MockPackageRepository() {
PackageUtility.CreatePackage("B", "1.0.0"),
PackageUtility.CreatePackage("B", "1.0.0.1")
};
// Act
IPackage package = repository.FindPackage("B", new SemanticVersion("1.0"));
// Assert
Assert.Equal("B", package.Id);
Assert.Equal(new SemanticVersion("1.0.0"), package.Version);
}
// Test that when dependencyVersion is DependencyVersions.Highest,
// the dependency with the highest version is picked.
[Fact]
public void FindDependencyPicksHighest()
{
// Arrange
var repository = new MockPackageRepository() {
PackageUtility.CreatePackage("B", "3.0"),
PackageUtility.CreatePackage("B", "2.0"),
PackageUtility.CreatePackage("B", "1.0"),
PackageUtility.CreatePackage("B", "1.0.1"),
PackageUtility.CreatePackage("B", "1.0.9"),
PackageUtility.CreatePackage("B", "1.1")
};
// B >= 1.0
PackageDependency dependency1 = PackageDependency.CreateDependency("B", "1.0");
// B >= 1.0.0
PackageDependency dependency2 = PackageDependency.CreateDependency("B", "1.0.0");
// B >= 1.0.0.0
PackageDependency dependency3 = PackageDependency.CreateDependency("B", "1.0.0.0");
// B = 1.0
PackageDependency dependency4 = PackageDependency.CreateDependency("B", "[1.0]");
// B >= 1.0.0 && <= 2.0
PackageDependency dependency5 = PackageDependency.CreateDependency("B", "[1.0.0, 2.0]");
// Act
IPackage package1 = repository.ResolveDependency(
dependency1, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.Highest);
IPackage package2 = repository.ResolveDependency(
dependency2, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.Highest);
IPackage package3 = repository.ResolveDependency(
dependency3, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.Highest);
IPackage package4 = repository.ResolveDependency(
dependency4, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.Highest);
IPackage package5 = repository.ResolveDependency(
dependency5, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.Highest);
// Assert
Assert.Equal("B", package1.Id);
Assert.Equal(new SemanticVersion("3.0"), package1.Version);
Assert.Equal("B", package2.Id);
Assert.Equal(new SemanticVersion("3.0"), package2.Version);
Assert.Equal("B", package3.Id);
Assert.Equal(new SemanticVersion("3.0"), package3.Version);
Assert.Equal("B", package4.Id);
Assert.Equal(new SemanticVersion("1.0"), package4.Version);
Assert.Equal("B", package5.Id);
Assert.Equal(new SemanticVersion("2.0"), package5.Version);
}
// Test that when dependencyVersion is DependencyVersions.HighestMinor,
// the dependency with the highest minor version is picked.
[Fact]
public void FindDependencyPicksHighestMinor()
{
// Arrange
var repository = new MockPackageRepository() {
PackageUtility.CreatePackage("B", "1.0"),
PackageUtility.CreatePackage("B", "1.0.1"),
PackageUtility.CreatePackage("B", "1.0.9"),
PackageUtility.CreatePackage("B", "1.1"),
PackageUtility.CreatePackage("B", "2.0"),
PackageUtility.CreatePackage("B", "3.0")
};
// B >= 1.0
PackageDependency dependency1 = PackageDependency.CreateDependency("B", "1.0");
// B >= 1.0.0
PackageDependency dependency2 = PackageDependency.CreateDependency("B", "1.0.0");
// B >= 1.0.0.0
PackageDependency dependency3 = PackageDependency.CreateDependency("B", "1.0.0.0");
// B = 1.0
PackageDependency dependency4 = PackageDependency.CreateDependency("B", "[1.0]");
// B >= 1.0.0 && <= 2.0
PackageDependency dependency5 = PackageDependency.CreateDependency("B", "[1.0.0, 2.0]");
// Act
IPackage package1 = repository.ResolveDependency(
dependency1, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.HighestMinor);
IPackage package2 = repository.ResolveDependency(
dependency2, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.HighestMinor);
IPackage package3 = repository.ResolveDependency(
dependency3, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.HighestMinor);
IPackage package4 = repository.ResolveDependency(
dependency4, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.HighestMinor);
IPackage package5 = repository.ResolveDependency(
dependency5, constraintProvider: null, allowPrereleaseVersions: false,
preferListedPackages: false, dependencyVersion: DependencyVersion.HighestMinor);
// Assert
Assert.Equal("B", package1.Id);
Assert.Equal(new SemanticVersion("1.1"), package1.Version);
Assert.Equal("B", package2.Id);
Assert.Equal(new SemanticVersion("1.1"), package2.Version);
Assert.Equal("B", package3.Id);
Assert.Equal(new SemanticVersion("1.1"), package3.Version);
Assert.Equal("B", package4.Id);
Assert.Equal(new SemanticVersion("1.0"), package4.Version);
Assert.Equal("B", package5.Id);
Assert.Equal(new SemanticVersion("1.1"), package5.Version);
}
[Fact]
public void FindDependencyPicksLowestMajorAndMinorVersionButHighestBuildAndRevision()
{
// Arrange
var repository = new MockPackageRepository() {
PackageUtility.CreatePackage("B", "2.0"),
PackageUtility.CreatePackage("B", "1.0"),
PackageUtility.CreatePackage("B", "1.0.1"),
PackageUtility.CreatePackage("B", "1.0.9"),
PackageUtility.CreatePackage("B", "1.1")
};
// B >= 1.0
PackageDependency dependency1 = PackageDependency.CreateDependency("B", "1.0");
// B >= 1.0.0
PackageDependency dependency2 = PackageDependency.CreateDependency("B", "1.0.0");
// B >= 1.0.0.0
PackageDependency dependency3 = PackageDependency.CreateDependency("B", "1.0.0.0");
// B = 1.0
PackageDependency dependency4 = PackageDependency.CreateDependency("B", "[1.0]");
// B >= 1.0.0 && <= 1.0.8
PackageDependency dependency5 = PackageDependency.CreateDependency("B", "[1.0.0, 1.0.8]");
// Act
IPackage package1 = repository.ResolveDependency(dependency1, constraintProvider: null, allowPrereleaseVersions: false, preferListedPackages: false, dependencyVersion: DependencyVersion.HighestPatch);
IPackage package2 = repository.ResolveDependency(dependency2, constraintProvider: null, allowPrereleaseVersions: false, preferListedPackages: false, dependencyVersion: DependencyVersion.HighestPatch);
IPackage package3 = repository.ResolveDependency(dependency3, constraintProvider: null, allowPrereleaseVersions: false, preferListedPackages: false, dependencyVersion: DependencyVersion.HighestPatch);
IPackage package4 = repository.ResolveDependency(dependency4, constraintProvider: null, allowPrereleaseVersions: false, preferListedPackages: false, dependencyVersion: DependencyVersion.HighestPatch);
IPackage package5 = repository.ResolveDependency(dependency5, constraintProvider: null, allowPrereleaseVersions: false, preferListedPackages: false, dependencyVersion: DependencyVersion.HighestPatch);
// Assert
Assert.Equal("B", package1.Id);
Assert.Equal(new SemanticVersion("1.0.9"), package1.Version);
Assert.Equal("B", package2.Id);
Assert.Equal(new SemanticVersion("1.0.9"), package2.Version);
Assert.Equal("B", package3.Id);
Assert.Equal(new SemanticVersion("1.0.9"), package3.Version);
Assert.Equal("B", package4.Id);
Assert.Equal(new SemanticVersion("1.0"), package4.Version);
Assert.Equal("B", package5.Id);
Assert.Equal(new SemanticVersion("1.0.1"), package5.Version);
}
private static IPackageRepository GetEmptyRepository()
{
Mock<IPackageRepository> repository = new Mock<IPackageRepository>();
repository.Setup(c => c.GetPackages()).Returns(() => Enumerable.Empty<IPackage>().AsQueryable());
return repository.Object;
}
private static IPackageRepository GetRemoteRepository(bool includePrerelease = false)
{
Mock<IPackageRepository> repository = new Mock<IPackageRepository>();
var packages = new List<IPackage> {
CreateMockPackage("A", "1.0", "scripts style"),
CreateMockPackage("B", "1.0", "testing"),
CreateMockPackage("C", "2.0", "xaml"),
CreateMockPackage("A", "1.2", "a updated desc") };
if (includePrerelease)
{
packages.Add(CreateMockPackage("A", "2.0-alpha", "a prerelease package"));
packages.Add(CreateMockPackage("B", "1.0-beta", "another prerelease package"));
}
repository.Setup(c => c.GetPackages()).Returns(() => packages.AsQueryable());
return repository.Object;
}
private static IPackageRepository GetLocalRepository()
{
Mock<IPackageRepository> repository = new Mock<IPackageRepository>();
var packages = new[] { CreateMockPackage("A", "1.0"), CreateMockPackage("B", "1.0") };
repository.Setup(c => c.GetPackages()).Returns(() => packages.AsQueryable());
return repository.Object;
}
private static IPackage CreateMockPackage(string name, string version, string desc = null, string tags = null)
{
Mock<IPackage> package = new Mock<IPackage>();
package.SetupGet(p => p.Id).Returns(name);
package.SetupGet(p => p.Version).Returns(SemanticVersion.Parse(version));
package.SetupGet(p => p.Description).Returns(desc);
package.SetupGet(p => p.Tags).Returns(tags);
package.SetupGet(p => p.Listed).Returns(true);
return package.Object;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
using Microsoft.Azure.Management.BatchAI.Fluent.Models;
namespace Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition
{
using Microsoft.Azure.Management.BatchAI.Fluent;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Definition;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.ResourceActions;
using Microsoft.Azure.Management.BatchAI.Fluent.AzureBlobFileSystem.Definition;
using Microsoft.Azure.Management.BatchAI.Fluent.AzureFileShare.Definition;
using Microsoft.Azure.Management.BatchAI.Fluent.FileServer.Definition;
using Microsoft.Azure.Management.BatchAI.Fluent.NodeSetupTask.Definition;
public interface IWithVMPriority
{
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate WithLowPriority();
}
/// <summary>
/// The stage of the definition which contains all the minimum required inputs for the resource to be created
/// but also allows for any other optional settings to be specified.
/// </summary>
public interface IWithCreate :
Microsoft.Azure.Management.ResourceManager.Fluent.Core.ResourceActions.ICreatable<Microsoft.Azure.Management.BatchAI.Fluent.IBatchAICluster>,
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithUserCredentials,
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithVMPriority,
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithSetupTask,
Microsoft.Azure.Management.BatchAI.Fluent.Models.HasMountVolumes.Definition.IWithMountVolumes<Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate>,
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithAppInsightsResourceId,
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithVirtualMachineImage,
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithSubnet
{
}
/// <summary>
/// Specifies the credentials to use for authentication on each of the nodes of a cluster.
/// </summary>
public interface IWithUserCredentials
{
/// <param name="sshPublicKey">SSH public keys used to authenticate with linux based VMs.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithScaleSettings WithSshPublicKey(string sshPublicKey);
/// <param name="password">Admin user password (linux only).</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithScaleSettings WithPassword(string password);
}
/// <summary>
/// Specifies a setup task which can be used to customize the compute nodes
/// of the cluster. The task runs everytime a VM is rebooted. For
/// that reason the task code needs to be idempotent. Generally it is used
/// to either download static data that is required for all jobs that run on
/// the cluster VMs or to download/install software.
/// NOTE: The volumes specified in mountVolumes are mounted first and then the setupTask is run.
/// Therefore the setup task can use local mountPaths in its execution.
/// </summary>
public interface IWithSetupTask
{
/// <summary>
/// Begins the definition of setup task.
/// </summary>
/// <return>The first stage of the setup task definition.</return>
NodeSetupTask.Definition.IBlank<Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate> DefineSetupTask();
}
/// <summary>
/// The first stage of a Batch AI cluster definition.
/// </summary>
public interface IBlank : IWithVMSize
{
}
/// <summary>
/// Specifies Azure Application Insights information for performance counters reporting.
/// </summary>
public interface IWithAppInsightsResourceId
{
/// <param name="resoureId">Azure Application Insights component resource id.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithAppInsightsKey WithAppInsightsComponentId(string resoureId);
}
public interface IWithAppInsightsKey :
Microsoft.Azure.Management.ResourceManager.Fluent.Core.IBeta
{
/// <param name="instrumentationKey">Value of the Azure Application Insights instrumentation key.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate WithInstrumentationKey(string instrumentationKey);
/// <summary>
/// Specifies KeyVault Store and Secret which contains the value for the instrumentation key.
/// </summary>
/// <param name="keyVaultId">Fully qualified resource Id for the Key Vault.</param>
/// <param name="secretUrl">The URL referencing a secret in a Key Vault.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate WithInstrumentationKeySecretReference(string keyVaultId, string secretUrl);
}
/// <summary>
/// Specifies virtual machine image.
/// </summary>
public interface IWithVirtualMachineImage :
Microsoft.Azure.Management.ResourceManager.Fluent.Core.IBeta
{
/// <summary>
/// Specifies virtual machine image.
/// </summary>
/// <param name="publisher">Publisher of the image.</param>
/// <param name="offer">Offer of the image.</param>
/// <param name="sku">Sku of the image.</param>
/// <param name="version">Version of the image.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate WithVirtualMachineImage(string publisher, string offer, string sku, string version);
/// <summary>
/// Specifies virtual machine image.
/// </summary>
/// <param name="publisher">Publisher of the image.</param>
/// <param name="offer">Offer of the image.</param>
/// <param name="sku">Sku of the image.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate WithVirtualMachineImage(string publisher, string offer, string sku);
/// <summary>
/// Computes nodes of the cluster will be created using this custom image. This is of the form
/// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Compute/images/{imageName}.
/// The virtual machine image must be in the same region and subscription as
/// the cluster. For information about the firewall settings for the Batch
/// node agent to communicate with the Batch service see
/// https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration.
/// Note, you need to provide publisher, offer and sku of the base OS image
/// of which the custom image has been derived from.
/// </summary>
/// <param name="virtualMachineImageId">The ARM resource identifier of the virtual machine image.</param>
/// <param name="publisher">Publisher of the image.</param>
/// <param name="offer">Offer of the image.</param>
/// <param name="sku">Sku of the image.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate WithVirtualMachineImageId(string virtualMachineImageId, string publisher, string offer, string sku);
}
/// <summary>
/// Defines subnet for the cluster.
/// </summary>
public interface IWithSubnet :
Microsoft.Azure.Management.ResourceManager.Fluent.Core.IBeta
{
/// <param name="subnetId">Identifier of the subnet.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate WithSubnet(string subnetId);
/// <param name="networkId">Identifier of the network.</param>
/// <param name="subnetName">Subnet name.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate WithSubnet(string networkId, string subnetName);
}
/// <summary>
/// Specifies the name of the administrator account that gets created on each of the nodes of a cluster.
/// </summary>
public interface IWithUserName
{
/// <param name="userName">The name of the administrator account.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithUserCredentials WithUserName(string userName);
}
/// <summary>
/// The stage of a Batch AI cluster definition allowing to specify virtual machine size. All virtual machines in a cluster are the same size.
/// For information about available VM sizes for clusters using images from the Virtual
/// Machines Marketplace (see Sizes for Virtual Machines (Linux) or Sizes for Virtual Machines (Windows). Batch AI service supports all Azure VM
/// sizes except STANDARD_A0 and those with premium storage (STANDARD_GS, STANDARD_DS, and STANDARD_DSV2 series).
/// </summary>
public interface IWithVMSize
{
/// <param name="vmSize">Virtual machine size.</param>
/// <return>Next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithUserName WithVMSize(string vmSize);
}
/// <summary>
/// Specifies scale settings for the cluster.
/// </summary>
public interface IWithScaleSettings
{
/// <summary>
/// If autoScale settings are specified, the system automatically scales the cluster up and down (within
/// the supplied limits) based on the pending jobs on the cluster.
/// </summary>
/// <param name="minimumNodeCount">The minimum number of compute nodes the cluster can have.</param>
/// <param name="maximumNodeCount">The maximum number of compute nodes the cluster can have.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate WithAutoScale(int minimumNodeCount, int maximumNodeCount);
/// <summary>
/// If autoScale settings are specified, the system automatically scales the cluster up and down (within
/// the supplied limits) based on the pending jobs on the cluster.
/// </summary>
/// <param name="minimumNodeCount">The minimum number of compute nodes the cluster can have.</param>
/// <param name="maximumNodeCount">The maximum number of compute nodes the cluster can have.</param>
/// <param name="initialNodeCount">
/// The number of compute nodes to allocate on cluster creation.
/// Note that this value is used only during cluster creation.
/// </param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate WithAutoScale(int minimumNodeCount, int maximumNodeCount, int initialNodeCount);
/// <summary>
/// Specifies that cluster should be scaled by manual settings.
/// </summary>
/// <param name="targetNodeCount">The desired number of compute nodes in the Cluster.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate WithManualScale(int targetNodeCount);
/// <summary>
/// Specifies that cluster should be scaled by manual settings.
/// </summary>
/// <param name="targetNodeCount">The desired number of compute nodes in the Cluster.</param>
/// <param name="deallocationOption">Determines what to do with the job(s) running on compute node if the cluster size is decreasing. The default value is requeue.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate WithManualScale(int targetNodeCount, DeallocationOption deallocationOption);
}
/// <summary>
/// The entirety of a Batch AI cluster definition.
/// </summary>
public interface IDefinition :
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IBlank,
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithUserName,
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithUserCredentials,
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithScaleSettings,
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithAppInsightsKey,
Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithCreate
{
}
/// <summary>
/// The stage of a Batch AI cluster definition allowing the resource group to be specified.
/// </summary>
public interface IWithGroup :
Microsoft.Azure.Management.ResourceManager.Fluent.Core.GroupableResource.Definition.IWithGroup<Microsoft.Azure.Management.BatchAI.Fluent.BatchAICluster.Definition.IWithVMSize>
{
}
}
| |
using System;
using Csla;
using SelfLoad.DataAccess;
using SelfLoad.DataAccess.ERLevel;
namespace SelfLoad.Business.ERLevel
{
/// <summary>
/// C06_Country (editable child object).<br/>
/// This is a generated base class of <see cref="C06_Country"/> business object.
/// </summary>
/// <remarks>
/// This class contains one child collection:<br/>
/// - <see cref="C07_RegionObjects"/> of type <see cref="C07_RegionColl"/> (1:M relation to <see cref="C08_Region"/>)<br/>
/// This class is an item of <see cref="C05_CountryColl"/> collection.
/// </remarks>
[Serializable]
public partial class C06_Country : BusinessBase<C06_Country>
{
#region Static Fields
private static int _lastID;
#endregion
#region State Fields
[NotUndoable]
private byte[] _rowVersion = new byte[] {};
#endregion
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="Country_ID"/> property.
/// </summary>
public static readonly PropertyInfo<int> Country_IDProperty = RegisterProperty<int>(p => p.Country_ID, "Countries ID");
/// <summary>
/// Gets the Countries ID.
/// </summary>
/// <value>The Countries ID.</value>
public int Country_ID
{
get { return GetProperty(Country_IDProperty); }
}
/// <summary>
/// Maintains metadata about <see cref="Country_Name"/> property.
/// </summary>
public static readonly PropertyInfo<string> Country_NameProperty = RegisterProperty<string>(p => p.Country_Name, "Countries Name");
/// <summary>
/// Gets or sets the Countries Name.
/// </summary>
/// <value>The Countries Name.</value>
public string Country_Name
{
get { return GetProperty(Country_NameProperty); }
set { SetProperty(Country_NameProperty, value); }
}
/// <summary>
/// Maintains metadata about <see cref="ParentSubContinentID"/> property.
/// </summary>
public static readonly PropertyInfo<int> ParentSubContinentIDProperty = RegisterProperty<int>(p => p.ParentSubContinentID, "ParentSubContinentID");
/// <summary>
/// Gets or sets the ParentSubContinentID.
/// </summary>
/// <value>The ParentSubContinentID.</value>
public int ParentSubContinentID
{
get { return GetProperty(ParentSubContinentIDProperty); }
set { SetProperty(ParentSubContinentIDProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="C07_Country_SingleObject"/> property.
/// </summary>
public static readonly PropertyInfo<C07_Country_Child> C07_Country_SingleObjectProperty = RegisterProperty<C07_Country_Child>(p => p.C07_Country_SingleObject, "C07 Country Single Object", RelationshipTypes.Child);
/// <summary>
/// Gets the C07 Country Single Object ("self load" child property).
/// </summary>
/// <value>The C07 Country Single Object.</value>
public C07_Country_Child C07_Country_SingleObject
{
get { return GetProperty(C07_Country_SingleObjectProperty); }
private set { LoadProperty(C07_Country_SingleObjectProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="C07_Country_ASingleObject"/> property.
/// </summary>
public static readonly PropertyInfo<C07_Country_ReChild> C07_Country_ASingleObjectProperty = RegisterProperty<C07_Country_ReChild>(p => p.C07_Country_ASingleObject, "C07 Country ASingle Object", RelationshipTypes.Child);
/// <summary>
/// Gets the C07 Country ASingle Object ("self load" child property).
/// </summary>
/// <value>The C07 Country ASingle Object.</value>
public C07_Country_ReChild C07_Country_ASingleObject
{
get { return GetProperty(C07_Country_ASingleObjectProperty); }
private set { LoadProperty(C07_Country_ASingleObjectProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="C07_RegionObjects"/> property.
/// </summary>
public static readonly PropertyInfo<C07_RegionColl> C07_RegionObjectsProperty = RegisterProperty<C07_RegionColl>(p => p.C07_RegionObjects, "C07 Region Objects", RelationshipTypes.Child);
/// <summary>
/// Gets the C07 Region Objects ("self load" child property).
/// </summary>
/// <value>The C07 Region Objects.</value>
public C07_RegionColl C07_RegionObjects
{
get { return GetProperty(C07_RegionObjectsProperty); }
private set { LoadProperty(C07_RegionObjectsProperty, value); }
}
#endregion
#region Factory Methods
/// <summary>
/// Factory method. Creates a new <see cref="C06_Country"/> object.
/// </summary>
/// <returns>A reference to the created <see cref="C06_Country"/> object.</returns>
internal static C06_Country NewC06_Country()
{
return DataPortal.CreateChild<C06_Country>();
}
/// <summary>
/// Factory method. Loads a <see cref="C06_Country"/> object from the given C06_CountryDto.
/// </summary>
/// <param name="data">The <see cref="C06_CountryDto"/>.</param>
/// <returns>A reference to the fetched <see cref="C06_Country"/> object.</returns>
internal static C06_Country GetC06_Country(C06_CountryDto data)
{
C06_Country obj = new C06_Country();
// show the framework that this is a child object
obj.MarkAsChild();
obj.Fetch(data);
obj.MarkOld();
return obj;
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="C06_Country"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public C06_Country()
{
// Use factory methods and do not use direct creation.
// show the framework that this is a child object
MarkAsChild();
}
#endregion
#region Data Access
/// <summary>
/// Loads default values for the <see cref="C06_Country"/> object properties.
/// </summary>
[Csla.RunLocal]
protected override void Child_Create()
{
LoadProperty(Country_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID));
LoadProperty(C07_Country_SingleObjectProperty, DataPortal.CreateChild<C07_Country_Child>());
LoadProperty(C07_Country_ASingleObjectProperty, DataPortal.CreateChild<C07_Country_ReChild>());
LoadProperty(C07_RegionObjectsProperty, DataPortal.CreateChild<C07_RegionColl>());
var args = new DataPortalHookArgs();
OnCreate(args);
base.Child_Create();
}
/// <summary>
/// Loads a <see cref="C06_Country"/> object from the given <see cref="C06_CountryDto"/>.
/// </summary>
/// <param name="data">The C06_CountryDto to use.</param>
private void Fetch(C06_CountryDto data)
{
// Value properties
LoadProperty(Country_IDProperty, data.Country_ID);
LoadProperty(Country_NameProperty, data.Country_Name);
LoadProperty(ParentSubContinentIDProperty, data.ParentSubContinentID);
_rowVersion = data.RowVersion;
var args = new DataPortalHookArgs(data);
OnFetchRead(args);
}
/// <summary>
/// Loads child objects.
/// </summary>
internal void FetchChildren()
{
LoadProperty(C07_Country_SingleObjectProperty, C07_Country_Child.GetC07_Country_Child(Country_ID));
LoadProperty(C07_Country_ASingleObjectProperty, C07_Country_ReChild.GetC07_Country_ReChild(Country_ID));
LoadProperty(C07_RegionObjectsProperty, C07_RegionColl.GetC07_RegionColl(Country_ID));
}
/// <summary>
/// Inserts a new <see cref="C06_Country"/> object in the database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Insert(C04_SubContinent parent)
{
var dto = new C06_CountryDto();
dto.Parent_SubContinent_ID = parent.SubContinent_ID;
dto.Country_Name = Country_Name;
using (var dalManager = DalFactorySelfLoad.GetManager())
{
var args = new DataPortalHookArgs(dto);
OnInsertPre(args);
var dal = dalManager.GetProvider<IC06_CountryDal>();
using (BypassPropertyChecks)
{
var resultDto = dal.Insert(dto);
LoadProperty(Country_IDProperty, resultDto.Country_ID);
_rowVersion = resultDto.RowVersion;
args = new DataPortalHookArgs(resultDto);
}
OnInsertPost(args);
// flushes all pending data operations
FieldManager.UpdateChildren(this);
}
}
/// <summary>
/// Updates in the database all changes made to the <see cref="C06_Country"/> object.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Update()
{
if (!IsDirty)
return;
var dto = new C06_CountryDto();
dto.Country_ID = Country_ID;
dto.Country_Name = Country_Name;
dto.ParentSubContinentID = ParentSubContinentID;
dto.RowVersion = _rowVersion;
using (var dalManager = DalFactorySelfLoad.GetManager())
{
var args = new DataPortalHookArgs(dto);
OnUpdatePre(args);
var dal = dalManager.GetProvider<IC06_CountryDal>();
using (BypassPropertyChecks)
{
var resultDto = dal.Update(dto);
_rowVersion = resultDto.RowVersion;
args = new DataPortalHookArgs(resultDto);
}
OnUpdatePost(args);
// flushes all pending data operations
FieldManager.UpdateChildren(this);
}
}
/// <summary>
/// Self deletes the <see cref="C06_Country"/> object from database.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_DeleteSelf()
{
using (var dalManager = DalFactorySelfLoad.GetManager())
{
var args = new DataPortalHookArgs();
// flushes all pending data operations
FieldManager.UpdateChildren(this);
OnDeletePre(args);
var dal = dalManager.GetProvider<IC06_CountryDal>();
using (BypassPropertyChecks)
{
dal.Delete(ReadProperty(Country_IDProperty));
}
OnDeletePost(args);
}
}
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting all defaults for object creation.
/// </summary>
partial void OnCreate(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation.
/// </summary>
partial void OnDeletePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after the delete operation, before Commit().
/// </summary>
partial void OnDeletePost(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the update operation.
/// </summary>
partial void OnUpdatePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit().
/// </summary>
partial void OnUpdatePost(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation.
/// </summary>
partial void OnInsertPre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit().
/// </summary>
partial void OnInsertPost(DataPortalHookArgs args);
#endregion
}
}
| |
#region Apache License
//
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to you under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.IO;
using System.Text;
using System.Threading;
using Ctrip.Log4.Util;
using Ctrip.Log4.Layout;
using Ctrip.Log4.Core;
namespace Ctrip.Log4.Appender
{
#if !NETCF
/// <summary>
/// Appends logging events to a file.
/// </summary>
/// <remarks>
/// <para>
/// Logging events are sent to the file specified by
/// the <see cref="File"/> property.
/// </para>
/// <para>
/// The file can be opened in either append or overwrite mode
/// by specifying the <see cref="AppendToFile"/> property.
/// If the file path is relative it is taken as relative from
/// the application base directory. The file encoding can be
/// specified by setting the <see cref="Encoding"/> property.
/// </para>
/// <para>
/// The layout's <see cref="ILayout.Header"/> and <see cref="ILayout.Footer"/>
/// values will be written each time the file is opened and closed
/// respectively. If the <see cref="AppendToFile"/> property is <see langword="true"/>
/// then the file may contain multiple copies of the header and footer.
/// </para>
/// <para>
/// This appender will first try to open the file for writing when <see cref="ActivateOptions"/>
/// is called. This will typically be during configuration.
/// If the file cannot be opened for writing the appender will attempt
/// to open the file again each time a message is logged to the appender.
/// If the file cannot be opened for writing when a message is logged then
/// the message will be discarded by this appender.
/// </para>
/// <para>
/// The <see cref="FileAppender"/> supports pluggable file locking models via
/// the <see cref="LockingModel"/> property.
/// The default behavior, implemented by <see cref="FileAppender.ExclusiveLock"/>
/// is to obtain an exclusive write lock on the file until this appender is closed.
/// The alternative models only hold a
/// write lock while the appender is writing a logging event (<see cref="FileAppender.MinimalLock"/>)
/// or synchronize by using a named system wide Mutex (<see cref="FileAppender.InterProcessLock"/>).
/// </para>
/// <para>
/// All locking strategies have issues and you should seriously consider using a different strategy that
/// avoids having multiple processes logging to the same file.
/// </para>
/// </remarks>
/// <author>Nicko Cadell</author>
/// <author>Gert Driesen</author>
/// <author>Rodrigo B. de Oliveira</author>
/// <author>Douglas de la Torre</author>
/// <author>Niall Daley</author>
#else
/// <summary>
/// Appends logging events to a file.
/// </summary>
/// <remarks>
/// <para>
/// Logging events are sent to the file specified by
/// the <see cref="File"/> property.
/// </para>
/// <para>
/// The file can be opened in either append or overwrite mode
/// by specifying the <see cref="AppendToFile"/> property.
/// If the file path is relative it is taken as relative from
/// the application base directory. The file encoding can be
/// specified by setting the <see cref="Encoding"/> property.
/// </para>
/// <para>
/// The layout's <see cref="ILayout.Header"/> and <see cref="ILayout.Footer"/>
/// values will be written each time the file is opened and closed
/// respectively. If the <see cref="AppendToFile"/> property is <see langword="true"/>
/// then the file may contain multiple copies of the header and footer.
/// </para>
/// <para>
/// This appender will first try to open the file for writing when <see cref="ActivateOptions"/>
/// is called. This will typically be during configuration.
/// If the file cannot be opened for writing the appender will attempt
/// to open the file again each time a message is logged to the appender.
/// If the file cannot be opened for writing when a message is logged then
/// the message will be discarded by this appender.
/// </para>
/// <para>
/// The <see cref="FileAppender"/> supports pluggable file locking models via
/// the <see cref="LockingModel"/> property.
/// The default behavior, implemented by <see cref="FileAppender.ExclusiveLock"/>
/// is to obtain an exclusive write lock on the file until this appender is closed.
/// The alternative model only holds a
/// write lock while the appender is writing a logging event (<see cref="FileAppender.MinimalLock"/>).
/// </para>
/// <para>
/// All locking strategies have issues and you should seriously consider using a different strategy that
/// avoids having multiple processes logging to the same file.
/// </para>
/// </remarks>
/// <author>Nicko Cadell</author>
/// <author>Gert Driesen</author>
/// <author>Rodrigo B. de Oliveira</author>
/// <author>Douglas de la Torre</author>
/// <author>Niall Daley</author>
#endif
public class FileAppender : TextWriterAppender
{
#region LockingStream Inner Class
/// <summary>
/// Write only <see cref="Stream"/> that uses the <see cref="LockingModelBase"/>
/// to manage access to an underlying resource.
/// </summary>
private sealed class LockingStream : Stream, IDisposable
{
public sealed class LockStateException : LogException
{
public LockStateException(string message): base(message)
{
}
}
private Stream m_realStream=null;
private LockingModelBase m_lockingModel=null;
private int m_readTotal=-1;
private int m_lockLevel=0;
public LockingStream(LockingModelBase locking) : base()
{
if (locking==null)
{
throw new ArgumentException("Locking model may not be null","locking");
}
m_lockingModel=locking;
}
#region Override Implementation of Stream
// Methods
public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback callback, object state)
{
AssertLocked();
IAsyncResult ret=m_realStream.BeginRead(buffer,offset,count,callback,state);
m_readTotal=EndRead(ret);
return ret;
}
/// <summary>
/// True asynchronous writes are not supported, the implementation forces a synchronous write.
/// </summary>
public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state)
{
AssertLocked();
IAsyncResult ret=m_realStream.BeginWrite(buffer,offset,count,callback,state);
EndWrite(ret);
return ret;
}
public override void Close()
{
m_lockingModel.CloseFile();
}
public override int EndRead(IAsyncResult asyncResult)
{
AssertLocked();
return m_readTotal;
}
public override void EndWrite(IAsyncResult asyncResult)
{
//No-op, it has already been handled
}
public override void Flush()
{
AssertLocked();
m_realStream.Flush();
}
public override int Read(byte[] buffer, int offset, int count)
{
return m_realStream.Read(buffer,offset,count);
}
public override int ReadByte()
{
return m_realStream.ReadByte();
}
public override long Seek(long offset, SeekOrigin origin)
{
AssertLocked();
return m_realStream.Seek(offset,origin);
}
public override void SetLength(long value)
{
AssertLocked();
m_realStream.SetLength(value);
}
void IDisposable.Dispose()
{
Close();
}
public override void Write(byte[] buffer, int offset, int count)
{
AssertLocked();
m_realStream.Write(buffer,offset,count);
}
public override void WriteByte(byte value)
{
AssertLocked();
m_realStream.WriteByte(value);
}
// Properties
public override bool CanRead
{
get { return false; }
}
public override bool CanSeek
{
get
{
AssertLocked();
return m_realStream.CanSeek;
}
}
public override bool CanWrite
{
get
{
AssertLocked();
return m_realStream.CanWrite;
}
}
public override long Length
{
get
{
AssertLocked();
return m_realStream.Length;
}
}
public override long Position
{
get
{
AssertLocked();
return m_realStream.Position;
}
set
{
AssertLocked();
m_realStream.Position=value;
}
}
#endregion Override Implementation of Stream
#region Locking Methods
private void AssertLocked()
{
if (m_realStream == null)
{
throw new LockStateException("The file is not currently locked");
}
}
public bool AcquireLock()
{
bool ret=false;
lock(this)
{
if (m_lockLevel==0)
{
// If lock is already acquired, nop
m_realStream=m_lockingModel.AcquireLock();
}
if (m_realStream!=null)
{
m_lockLevel++;
ret=true;
}
}
return ret;
}
public void ReleaseLock()
{
lock(this)
{
m_lockLevel--;
if (m_lockLevel==0)
{
// If already unlocked, nop
m_lockingModel.ReleaseLock();
m_realStream=null;
}
}
}
#endregion Locking Methods
}
#endregion LockingStream Inner Class
#region Locking Models
/// <summary>
/// Locking model base class
/// </summary>
/// <remarks>
/// <para>
/// Base class for the locking models available to the <see cref="FileAppender"/> derived loggers.
/// </para>
/// </remarks>
public abstract class LockingModelBase
{
private FileAppender m_appender=null;
/// <summary>
/// Open the output file
/// </summary>
/// <param name="filename">The filename to use</param>
/// <param name="append">Whether to append to the file, or overwrite</param>
/// <param name="encoding">The encoding to use</param>
/// <remarks>
/// <para>
/// Open the file specified and prepare for logging.
/// No writes will be made until <see cref="AcquireLock"/> is called.
/// Must be called before any calls to <see cref="AcquireLock"/>,
/// <see cref="ReleaseLock"/> and <see cref="CloseFile"/>.
/// </para>
/// </remarks>
public abstract void OpenFile(string filename, bool append,Encoding encoding);
/// <summary>
/// Close the file
/// </summary>
/// <remarks>
/// <para>
/// Close the file. No further writes will be made.
/// </para>
/// </remarks>
public abstract void CloseFile();
/// <summary>
/// Acquire the lock on the file
/// </summary>
/// <returns>A stream that is ready to be written to.</returns>
/// <remarks>
/// <para>
/// Acquire the lock on the file in preparation for writing to it.
/// Return a stream pointing to the file. <see cref="ReleaseLock"/>
/// must be called to release the lock on the output file.
/// </para>
/// </remarks>
public abstract Stream AcquireLock();
/// <summary>
/// Release the lock on the file
/// </summary>
/// <remarks>
/// <para>
/// Release the lock on the file. No further writes will be made to the
/// stream until <see cref="AcquireLock"/> is called again.
/// </para>
/// </remarks>
public abstract void ReleaseLock();
/// <summary>
/// Gets or sets the <see cref="FileAppender"/> for this LockingModel
/// </summary>
/// <value>
/// The <see cref="FileAppender"/> for this LockingModel
/// </value>
/// <remarks>
/// <para>
/// The file appender this locking model is attached to and working on
/// behalf of.
/// </para>
/// <para>
/// The file appender is used to locate the security context and the error handler to use.
/// </para>
/// <para>
/// The value of this property will be set before <see cref="OpenFile"/> is
/// called.
/// </para>
/// </remarks>
public FileAppender CurrentAppender
{
get { return m_appender; }
set { m_appender = value; }
}
/// <summary>
/// Helper method that creates a FileStream under CurrentAppender's SecurityContext.
/// </summary>
/// <remarks>
/// <para>
/// Typically called during OpenFile or AcquireLock.
/// </para>
/// <para>
/// If the directory portion of the <paramref name="filename"/> does not exist, it is created
/// via Directory.CreateDirecctory.
/// </para>
/// </remarks>
/// <param name="filename"></param>
/// <param name="append"></param>
/// <param name="fileShare"></param>
/// <returns></returns>
protected Stream CreateStream(string filename, bool append, FileShare fileShare)
{
using (CurrentAppender.SecurityContext.Impersonate(this))
{
// Ensure that the directory structure exists
string directoryFullName = Path.GetDirectoryName(filename);
// Only create the directory if it does not exist
// doing this check here resolves some permissions failures
if (!Directory.Exists(directoryFullName))
{
Directory.CreateDirectory(directoryFullName);
}
FileMode fileOpenMode = append ? FileMode.Append : FileMode.Create;
return new FileStream(filename, fileOpenMode, FileAccess.Write, fileShare);
}
}
/// <summary>
/// Helper method to close <paramref name="stream"/> under CurrentAppender's SecurityContext.
/// </summary>
/// <remarks>
/// Does not set <paramref name="stream"/> to null.
/// </remarks>
/// <param name="stream"></param>
protected void CloseStream(Stream stream)
{
using (CurrentAppender.SecurityContext.Impersonate(this))
{
stream.Close();
}
}
}
/// <summary>
/// Hold an exclusive lock on the output file
/// </summary>
/// <remarks>
/// <para>
/// Open the file once for writing and hold it open until <see cref="CloseFile"/> is called.
/// Maintains an exclusive lock on the file during this time.
/// </para>
/// </remarks>
public class ExclusiveLock : LockingModelBase
{
private Stream m_stream = null;
/// <summary>
/// Open the file specified and prepare for logging.
/// </summary>
/// <param name="filename">The filename to use</param>
/// <param name="append">Whether to append to the file, or overwrite</param>
/// <param name="encoding">The encoding to use</param>
/// <remarks>
/// <para>
/// Open the file specified and prepare for logging.
/// No writes will be made until <see cref="AcquireLock"/> is called.
/// Must be called before any calls to <see cref="AcquireLock"/>,
/// <see cref="ReleaseLock"/> and <see cref="CloseFile"/>.
/// </para>
/// </remarks>
public override void OpenFile(string filename, bool append,Encoding encoding)
{
try
{
m_stream = CreateStream(filename, append, FileShare.Read);
}
catch (Exception e1)
{
CurrentAppender.ErrorHandler.Error("Unable to acquire lock on file "+filename+". "+e1.Message);
}
}
/// <summary>
/// Close the file
/// </summary>
/// <remarks>
/// <para>
/// Close the file. No further writes will be made.
/// </para>
/// </remarks>
public override void CloseFile()
{
CloseStream(m_stream);
m_stream = null;
}
/// <summary>
/// Acquire the lock on the file
/// </summary>
/// <returns>A stream that is ready to be written to.</returns>
/// <remarks>
/// <para>
/// Does nothing. The lock is already taken
/// </para>
/// </remarks>
public override Stream AcquireLock()
{
return m_stream;
}
/// <summary>
/// Release the lock on the file
/// </summary>
/// <remarks>
/// <para>
/// Does nothing. The lock will be released when the file is closed.
/// </para>
/// </remarks>
public override void ReleaseLock()
{
//NOP
}
}
/// <summary>
/// Acquires the file lock for each write
/// </summary>
/// <remarks>
/// <para>
/// Opens the file once for each <see cref="AcquireLock"/>/<see cref="ReleaseLock"/> cycle,
/// thus holding the lock for the minimal amount of time. This method of locking
/// is considerably slower than <see cref="FileAppender.ExclusiveLock"/> but allows
/// other processes to move/delete the log file whilst logging continues.
/// </para>
/// </remarks>
public class MinimalLock : LockingModelBase
{
private string m_filename;
private bool m_append;
private Stream m_stream=null;
/// <summary>
/// Prepares to open the file when the first message is logged.
/// </summary>
/// <param name="filename">The filename to use</param>
/// <param name="append">Whether to append to the file, or overwrite</param>
/// <param name="encoding">The encoding to use</param>
/// <remarks>
/// <para>
/// Open the file specified and prepare for logging.
/// No writes will be made until <see cref="AcquireLock"/> is called.
/// Must be called before any calls to <see cref="AcquireLock"/>,
/// <see cref="ReleaseLock"/> and <see cref="CloseFile"/>.
/// </para>
/// </remarks>
public override void OpenFile(string filename, bool append, Encoding encoding)
{
m_filename=filename;
m_append=append;
}
/// <summary>
/// Close the file
/// </summary>
/// <remarks>
/// <para>
/// Close the file. No further writes will be made.
/// </para>
/// </remarks>
public override void CloseFile()
{
// NOP
}
/// <summary>
/// Acquire the lock on the file
/// </summary>
/// <returns>A stream that is ready to be written to.</returns>
/// <remarks>
/// <para>
/// Acquire the lock on the file in preparation for writing to it.
/// Return a stream pointing to the file. <see cref="ReleaseLock"/>
/// must be called to release the lock on the output file.
/// </para>
/// </remarks>
public override Stream AcquireLock()
{
if (m_stream==null)
{
try
{
m_stream = CreateStream(m_filename, m_append, FileShare.Read);
m_append = true;
}
catch (Exception e1)
{
CurrentAppender.ErrorHandler.Error("Unable to acquire lock on file "+m_filename+". "+e1.Message);
}
}
return m_stream;
}
/// <summary>
/// Release the lock on the file
/// </summary>
/// <remarks>
/// <para>
/// Release the lock on the file. No further writes will be made to the
/// stream until <see cref="AcquireLock"/> is called again.
/// </para>
/// </remarks>
public override void ReleaseLock()
{
CloseStream(m_stream);
m_stream = null;
}
}
#if !NETCF
/// <summary>
/// Provides cross-process file locking.
/// </summary>
/// <author>Ron Grabowski</author>
/// <author>Steve Wranovsky</author>
public class InterProcessLock : LockingModelBase
{
private Mutex m_mutex = null;
private bool m_mutexClosed = false;
private Stream m_stream = null;
/// <summary>
/// Open the file specified and prepare for logging.
/// </summary>
/// <param name="filename">The filename to use</param>
/// <param name="append">Whether to append to the file, or overwrite</param>
/// <param name="encoding">The encoding to use</param>
/// <remarks>
/// <para>
/// Open the file specified and prepare for logging.
/// No writes will be made until <see cref="AcquireLock"/> is called.
/// Must be called before any calls to <see cref="AcquireLock"/>,
/// -<see cref="ReleaseLock"/> and <see cref="CloseFile"/>.
/// </para>
/// </remarks>
#if NET_4_0
[System.Security.SecuritySafeCritical]
#endif
public override void OpenFile(string filename, bool append, Encoding encoding)
{
try
{
m_stream = CreateStream(filename, append, FileShare.ReadWrite);
string mutextFriendlyFilename = filename
.Replace("\\", "_")
.Replace(":", "_")
.Replace("/", "_");
m_mutex = new Mutex(false, mutextFriendlyFilename);
}
catch (Exception e1)
{
CurrentAppender.ErrorHandler.Error("Unable to acquire lock on file " + filename + ". " + e1.Message);
}
}
/// <summary>
/// Close the file
/// </summary>
/// <remarks>
/// <para>
/// Close the file. No further writes will be made.
/// </para>
/// </remarks>
public override void CloseFile()
{
try {
CloseStream(m_stream);
m_stream = null;
}
finally {
m_mutex.ReleaseMutex();
m_mutex.Close();
m_mutexClosed = true;
}
}
/// <summary>
/// Acquire the lock on the file
/// </summary>
/// <returns>A stream that is ready to be written to.</returns>
/// <remarks>
/// <para>
/// Does nothing. The lock is already taken
/// </para>
/// </remarks>
public override Stream AcquireLock()
{
if (m_mutex != null) {
// TODO: add timeout?
m_mutex.WaitOne();
// should always be true (and fast) for FileStream
if (m_stream.CanSeek) {
m_stream.Seek(0, SeekOrigin.End);
}
}
return m_stream;
}
/// <summary>
///
/// </summary>
public override void ReleaseLock()
{
if (m_mutexClosed == false && m_mutex != null)
{
m_mutex.ReleaseMutex();
}
}
}
#endif
#endregion Locking Models
#region Public Instance Constructors
/// <summary>
/// Default constructor
/// </summary>
/// <remarks>
/// <para>
/// Default constructor
/// </para>
/// </remarks>
public FileAppender()
{
}
/// <summary>
/// Construct a new appender using the layout, file and append mode.
/// </summary>
/// <param name="layout">the layout to use with this appender</param>
/// <param name="filename">the full path to the file to write to</param>
/// <param name="append">flag to indicate if the file should be appended to</param>
/// <remarks>
/// <para>
/// Obsolete constructor.
/// </para>
/// </remarks>
[Obsolete("Instead use the default constructor and set the Layout, File & AppendToFile properties")]
public FileAppender(ILayout layout, string filename, bool append)
{
Layout = layout;
File = filename;
AppendToFile = append;
ActivateOptions();
}
/// <summary>
/// Construct a new appender using the layout and file specified.
/// The file will be appended to.
/// </summary>
/// <param name="layout">the layout to use with this appender</param>
/// <param name="filename">the full path to the file to write to</param>
/// <remarks>
/// <para>
/// Obsolete constructor.
/// </para>
/// </remarks>
[Obsolete("Instead use the default constructor and set the Layout & File properties")]
public FileAppender(ILayout layout, string filename) : this(layout, filename, true)
{
}
#endregion Public Instance Constructors
#region Public Instance Properties
/// <summary>
/// Gets or sets the path to the file that logging will be written to.
/// </summary>
/// <value>
/// The path to the file that logging will be written to.
/// </value>
/// <remarks>
/// <para>
/// If the path is relative it is taken as relative from
/// the application base directory.
/// </para>
/// </remarks>
virtual public string File
{
get { return m_fileName; }
set { m_fileName = value; }
}
/// <summary>
/// Gets or sets a flag that indicates whether the file should be
/// appended to or overwritten.
/// </summary>
/// <value>
/// Indicates whether the file should be appended to or overwritten.
/// </value>
/// <remarks>
/// <para>
/// If the value is set to false then the file will be overwritten, if
/// it is set to true then the file will be appended to.
/// </para>
/// The default value is true.
/// </remarks>
public bool AppendToFile
{
get { return m_appendToFile; }
set { m_appendToFile = value; }
}
/// <summary>
/// Gets or sets <see cref="Encoding"/> used to write to the file.
/// </summary>
/// <value>
/// The <see cref="Encoding"/> used to write to the file.
/// </value>
/// <remarks>
/// <para>
/// The default encoding set is <see cref="System.Text.Encoding.Default"/>
/// which is the encoding for the system's current ANSI code page.
/// </para>
/// </remarks>
public Encoding Encoding
{
get { return m_encoding; }
set { m_encoding = value; }
}
/// <summary>
/// Gets or sets the <see cref="SecurityContext"/> used to write to the file.
/// </summary>
/// <value>
/// The <see cref="SecurityContext"/> used to write to the file.
/// </value>
/// <remarks>
/// <para>
/// Unless a <see cref="SecurityContext"/> specified here for this appender
/// the <see cref="SecurityContextProvider.DefaultProvider"/> is queried for the
/// security context to use. The default behavior is to use the security context
/// of the current thread.
/// </para>
/// </remarks>
public SecurityContext SecurityContext
{
get { return m_securityContext; }
set { m_securityContext = value; }
}
#if NETCF
/// <summary>
/// Gets or sets the <see cref="FileAppender.LockingModel"/> used to handle locking of the file.
/// </summary>
/// <value>
/// The <see cref="FileAppender.LockingModel"/> used to lock the file.
/// </value>
/// <remarks>
/// <para>
/// Gets or sets the <see cref="FileAppender.LockingModel"/> used to handle locking of the file.
/// </para>
/// <para>
/// There are two built in locking models, <see cref="FileAppender.ExclusiveLock"/> and <see cref="FileAppender.MinimalLock"/>.
/// The first locks the file from the start of logging to the end, the
/// second locks only for the minimal amount of time when logging each message
/// and the last synchronizes processes using a named system wide Mutex.
/// </para>
/// <para>
/// The default locking model is the <see cref="FileAppender.ExclusiveLock"/>.
/// </para>
/// </remarks>
#else
/// <summary>
/// Gets or sets the <see cref="FileAppender.LockingModel"/> used to handle locking of the file.
/// </summary>
/// <value>
/// The <see cref="FileAppender.LockingModel"/> used to lock the file.
/// </value>
/// <remarks>
/// <para>
/// Gets or sets the <see cref="FileAppender.LockingModel"/> used to handle locking of the file.
/// </para>
/// <para>
/// There are three built in locking models, <see cref="FileAppender.ExclusiveLock"/>, <see cref="FileAppender.MinimalLock"/> and <see cref="FileAppender.InterProcessLock"/> .
/// The first locks the file from the start of logging to the end, the
/// second locks only for the minimal amount of time when logging each message
/// and the last synchronizes processes using a named system wide Mutex.
/// </para>
/// <para>
/// The default locking model is the <see cref="FileAppender.ExclusiveLock"/>.
/// </para>
/// </remarks>
#endif
public FileAppender.LockingModelBase LockingModel
{
get { return m_lockingModel; }
set { m_lockingModel = value; }
}
#endregion Public Instance Properties
#region Override implementation of AppenderSkeleton
/// <summary>
/// Activate the options on the file appender.
/// </summary>
/// <remarks>
/// <para>
/// This is part of the <see cref="IOptionHandler"/> delayed object
/// activation scheme. The <see cref="ActivateOptions"/> method must
/// be called on this object after the configuration properties have
/// been set. Until <see cref="ActivateOptions"/> is called this
/// object is in an undefined state and must not be used.
/// </para>
/// <para>
/// If any of the configuration properties are modified then
/// <see cref="ActivateOptions"/> must be called again.
/// </para>
/// <para>
/// This will cause the file to be opened.
/// </para>
/// </remarks>
override public void ActivateOptions()
{
base.ActivateOptions();
if (m_securityContext == null)
{
m_securityContext = SecurityContextProvider.DefaultProvider.CreateSecurityContext(this);
}
if (m_lockingModel == null)
{
m_lockingModel = new FileAppender.ExclusiveLock();
}
m_lockingModel.CurrentAppender=this;
using(SecurityContext.Impersonate(this))
{
m_fileName = ConvertToFullPath(m_fileName.Trim());
}
if (m_fileName != null)
{
SafeOpenFile(m_fileName, m_appendToFile);
}
else
{
LogLog.Warn(declaringType, "FileAppender: File option not set for appender ["+Name+"].");
LogLog.Warn(declaringType, "FileAppender: Are you using FileAppender instead of ConsoleAppender?");
}
}
#endregion Override implementation of AppenderSkeleton
#region Override implementation of TextWriterAppender
/// <summary>
/// Closes any previously opened file and calls the parent's <see cref="TextWriterAppender.Reset"/>.
/// </summary>
/// <remarks>
/// <para>
/// Resets the filename and the file stream.
/// </para>
/// </remarks>
override protected void Reset()
{
base.Reset();
m_fileName = null;
}
/// <summary>
/// Called to initialize the file writer
/// </summary>
/// <remarks>
/// <para>
/// Will be called for each logged message until the file is
/// successfully opened.
/// </para>
/// </remarks>
override protected void PrepareWriter()
{
SafeOpenFile(m_fileName, m_appendToFile);
}
/// <summary>
/// This method is called by the <see cref="M:AppenderSkeleton.DoAppend(LoggingEvent)"/>
/// method.
/// </summary>
/// <param name="loggingEvent">The event to log.</param>
/// <remarks>
/// <para>
/// Writes a log statement to the output stream if the output stream exists
/// and is writable.
/// </para>
/// <para>
/// The format of the output will depend on the appender's layout.
/// </para>
/// </remarks>
override protected void Append(LoggingEvent loggingEvent)
{
if (m_stream.AcquireLock())
{
try
{
base.Append(loggingEvent);
}
finally
{
m_stream.ReleaseLock();
}
}
}
/// <summary>
/// This method is called by the <see cref="M:AppenderSkeleton.DoAppend(LoggingEvent[])"/>
/// method.
/// </summary>
/// <param name="loggingEvents">The array of events to log.</param>
/// <remarks>
/// <para>
/// Acquires the output file locks once before writing all the events to
/// the stream.
/// </para>
/// </remarks>
override protected void Append(LoggingEvent[] loggingEvents)
{
if (m_stream.AcquireLock())
{
try
{
base.Append(loggingEvents);
}
finally
{
m_stream.ReleaseLock();
}
}
}
/// <summary>
/// Writes a footer as produced by the embedded layout's <see cref="ILayout.Footer"/> property.
/// </summary>
/// <remarks>
/// <para>
/// Writes a footer as produced by the embedded layout's <see cref="ILayout.Footer"/> property.
/// </para>
/// </remarks>
protected override void WriteFooter()
{
if (m_stream!=null)
{
//WriteFooter can be called even before a file is opened
m_stream.AcquireLock();
try
{
base.WriteFooter();
}
finally
{
m_stream.ReleaseLock();
}
}
}
/// <summary>
/// Writes a header produced by the embedded layout's <see cref="ILayout.Header"/> property.
/// </summary>
/// <remarks>
/// <para>
/// Writes a header produced by the embedded layout's <see cref="ILayout.Header"/> property.
/// </para>
/// </remarks>
protected override void WriteHeader()
{
if (m_stream!=null)
{
if (m_stream.AcquireLock())
{
try
{
base.WriteHeader();
}
finally
{
m_stream.ReleaseLock();
}
}
}
}
/// <summary>
/// Closes the underlying <see cref="TextWriter"/>.
/// </summary>
/// <remarks>
/// <para>
/// Closes the underlying <see cref="TextWriter"/>.
/// </para>
/// </remarks>
protected override void CloseWriter()
{
if (m_stream!=null)
{
m_stream.AcquireLock();
try
{
base.CloseWriter();
}
finally
{
m_stream.ReleaseLock();
}
}
}
#endregion Override implementation of TextWriterAppender
#region Public Instance Methods
/// <summary>
/// Closes the previously opened file.
/// </summary>
/// <remarks>
/// <para>
/// Writes the <see cref="ILayout.Footer"/> to the file and then
/// closes the file.
/// </para>
/// </remarks>
protected void CloseFile()
{
WriteFooterAndCloseWriter();
}
#endregion Public Instance Methods
#region Protected Instance Methods
/// <summary>
/// Sets and <i>opens</i> the file where the log output will go. The specified file must be writable.
/// </summary>
/// <param name="fileName">The path to the log file. Must be a fully qualified path.</param>
/// <param name="append">If true will append to fileName. Otherwise will truncate fileName</param>
/// <remarks>
/// <para>
/// Calls <see cref="OpenFile"/> but guarantees not to throw an exception.
/// Errors are passed to the <see cref="TextWriterAppender.ErrorHandler"/>.
/// </para>
/// </remarks>
virtual protected void SafeOpenFile(string fileName, bool append)
{
try
{
OpenFile(fileName, append);
}
catch(Exception e)
{
ErrorHandler.Error("OpenFile("+fileName+","+append+") call failed.", e, ErrorCode.FileOpenFailure);
}
}
/// <summary>
/// Sets and <i>opens</i> the file where the log output will go. The specified file must be writable.
/// </summary>
/// <param name="fileName">The path to the log file. Must be a fully qualified path.</param>
/// <param name="append">If true will append to fileName. Otherwise will truncate fileName</param>
/// <remarks>
/// <para>
/// If there was already an opened file, then the previous file
/// is closed first.
/// </para>
/// <para>
/// This method will ensure that the directory structure
/// for the <paramref name="fileName"/> specified exists.
/// </para>
/// </remarks>
virtual protected void OpenFile(string fileName, bool append)
{
if (LogLog.IsErrorEnabled)
{
// Internal check that the fileName passed in is a rooted path
bool isPathRooted = false;
using(SecurityContext.Impersonate(this))
{
isPathRooted = Path.IsPathRooted(fileName);
}
if (!isPathRooted)
{
LogLog.Error(declaringType, "INTERNAL ERROR. OpenFile("+fileName+"): File name is not fully qualified.");
}
}
lock(this)
{
Reset();
LogLog.Debug(declaringType, "Opening file for writing ["+fileName+"] append ["+append+"]");
// Save these for later, allowing retries if file open fails
m_fileName = fileName;
m_appendToFile = append;
LockingModel.CurrentAppender=this;
LockingModel.OpenFile(fileName,append,m_encoding);
m_stream=new LockingStream(LockingModel);
if (m_stream != null)
{
m_stream.AcquireLock();
try
{
SetQWForFiles(new StreamWriter(m_stream, m_encoding));
}
finally
{
m_stream.ReleaseLock();
}
}
WriteHeader();
}
}
/// <summary>
/// Sets the quiet writer used for file output
/// </summary>
/// <param name="fileStream">the file stream that has been opened for writing</param>
/// <remarks>
/// <para>
/// This implementation of <see cref="M:SetQWForFiles(Stream)"/> creates a <see cref="StreamWriter"/>
/// over the <paramref name="fileStream"/> and passes it to the
/// <see cref="M:SetQWForFiles(TextWriter)"/> method.
/// </para>
/// <para>
/// This method can be overridden by sub classes that want to wrap the
/// <see cref="Stream"/> in some way, for example to encrypt the output
/// data using a <c>System.Security.Cryptography.CryptoStream</c>.
/// </para>
/// </remarks>
virtual protected void SetQWForFiles(Stream fileStream)
{
SetQWForFiles(new StreamWriter(fileStream, m_encoding));
}
/// <summary>
/// Sets the quiet writer being used.
/// </summary>
/// <param name="writer">the writer over the file stream that has been opened for writing</param>
/// <remarks>
/// <para>
/// This method can be overridden by sub classes that want to
/// wrap the <see cref="TextWriter"/> in some way.
/// </para>
/// </remarks>
virtual protected void SetQWForFiles(TextWriter writer)
{
QuietWriter = new QuietTextWriter(writer, ErrorHandler);
}
#endregion Protected Instance Methods
#region Protected Static Methods
/// <summary>
/// Convert a path into a fully qualified path.
/// </summary>
/// <param name="path">The path to convert.</param>
/// <returns>The fully qualified path.</returns>
/// <remarks>
/// <para>
/// Converts the path specified to a fully
/// qualified path. If the path is relative it is
/// taken as relative from the application base
/// directory.
/// </para>
/// </remarks>
protected static string ConvertToFullPath(string path)
{
return SystemInfo.ConvertToFullPath(path);
}
#endregion Protected Static Methods
#region Private Instance Fields
/// <summary>
/// Flag to indicate if we should append to the file
/// or overwrite the file. The default is to append.
/// </summary>
private bool m_appendToFile = true;
/// <summary>
/// The name of the log file.
/// </summary>
private string m_fileName = null;
/// <summary>
/// The encoding to use for the file stream.
/// </summary>
private Encoding m_encoding = Encoding.Default;
/// <summary>
/// The security context to use for privileged calls
/// </summary>
private SecurityContext m_securityContext;
/// <summary>
/// The stream to log to. Has added locking semantics
/// </summary>
private FileAppender.LockingStream m_stream = null;
/// <summary>
/// The locking model to use
/// </summary>
private FileAppender.LockingModelBase m_lockingModel = new FileAppender.ExclusiveLock();
#endregion Private Instance Fields
#region Private Static Fields
/// <summary>
/// The fully qualified type of the FileAppender class.
/// </summary>
/// <remarks>
/// Used by the internal logger to record the Type of the
/// log message.
/// </remarks>
private readonly static Type declaringType = typeof(FileAppender);
#endregion Private Static Fields
}
}
| |
// Copyright (c) Microsoft Corporation
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
namespace Microsoft.Xbox.Services.TitleStorage
{
using global::System;
using global::System.Runtime.InteropServices;
using Microsoft.Xbox.Services.System;
/// <summary>
/// Metadata about a blob.
/// </summary>
public class TitleStorageBlobMetadata
{
internal IntPtr metadataPtr;
/// <summary>
/// Gets the number of bytes of the blob data.
/// </summary>
public ulong Length { get; private set; }
/// <summary>
/// [optional] Timestamp assigned by the client.
/// </summary>
public DateTimeOffset ClientTimeStamp { get; private set; }
/// <summary>
/// ETag for the file used in read and write requests.
/// </summary>
public string ETag { get; private set; }
/// <summary>
/// [optional] Friendly display name to show in app UI.
/// </summary>
public string DisplayName { get; private set; }
/// <summary>
/// Type of storage.
/// </summary>
public TitleStorageType StorageType { get; private set; }
/// <summary>
/// Type of blob data. Possible values are: Binary, Json, and Config.
/// </summary>
public TitleStorageBlobType BlobType { get; private set; }
/// <summary>
/// Blob path is a unique string that conforms to a SubPath\file format (example: "foo\bar\blob.txt").
/// </summary>
public string BlobPath { get; private set; }
/// <summary>
/// The service configuration ID of the title
/// </summary>
public string ServiceConfigurationId { get; private set; }
/// <summary>
/// The Xbox User ID of the player that this file belongs to.
/// This value is null for Global and Session files.
/// </summary>
public string XboxUserId { get; private set; }
/// <summary>
/// Initializes a new instance of the <see cref="TitleStorageBlobMetadata"/> class.
/// </summary>
/// <param name="serviceConfigurationId">The service configuration ID (SCID) of the title</param>
/// <param name="storageType">Type of storage.</param>
/// <param name="blobPath">Blob path is a unique string that conforms to a SubPath\file format (example: "foo\bar\blob.txt").</param>
/// <param name="blobType">Type of blob data. Possible values are: Binary, Json, and Config.</param>
/// <param name="xboxUserId">The Xbox User ID of the title storage to enumerate. Ignored when dealing with GlobalStorage, so passing nullptr is acceptable in that case. (Optional)</param>
/// <param name="displayName">[optional] Friendly display name to show in app UI.</param>
/// <param name="eTag">ETag for the file used in read and write requests.</param>
/// <param name="length">Length of the content of the blob</param>
public TitleStorageBlobMetadata(string serviceConfigurationId, TitleStorageType storageType, string blobPath, TitleStorageBlobType blobType, string xboxUserId, string displayName, string eTag, ulong length)
{
if (string.IsNullOrEmpty(blobPath))
throw new ArgumentNullException("blobPath");
this.ServiceConfigurationId = serviceConfigurationId;
this.StorageType = storageType;
this.BlobType = blobType;
this.BlobPath = blobPath;
this.DisplayName = displayName;
this.ETag = eTag;
this.Length = length;
this.XboxUserId = xboxUserId;
CreateCMetadata();
}
/// <summary>
/// Initializes a new instance of the <see cref="TitleStorageBlobMetadata"/> class.
/// </summary>
/// <param name="serviceConfigurationId">The service configuration ID (SCID) of the title</param>
/// <param name="storageType">Type of storage.</param>
/// <param name="blobPath">Blob path is a unique string that conforms to a SubPath\file format (example: "foo\bar\blob.txt").</param>
/// <param name="blobType">Type of blob data. Possible values are: Binary, Json, and Config.</param>
/// <param name="xboxUserId">The Xbox User ID of the title storage to enumerate. Ignored when dealing with GlobalStorage, so passing nullptr is acceptable in that case. (Optional)</param>
/// <param name="displayName">[optional] Friendly display name to show in app UI.</param>
/// <param name="eTag">ETag for the file used in read and write requests.</param>
public TitleStorageBlobMetadata(string serviceConfigurationId, TitleStorageType storageType, string blobPath, TitleStorageBlobType blobType, string xboxUserId, string displayName, string eTag)
{
if (string.IsNullOrEmpty(blobPath))
throw new ArgumentNullException("blobPath");
this.ServiceConfigurationId = serviceConfigurationId;
this.StorageType = storageType;
this.BlobType = blobType;
this.BlobPath = blobPath;
this.DisplayName = displayName;
this.ETag = eTag;
this.XboxUserId = xboxUserId;
CreateCMetadata();
}
/// <summary>
/// Initializes a new instance of the <see cref="TitleStorageBlobMetadata"/> class.
/// </summary>
/// <param name="serviceConfigurationId">The service configuration ID (SCID) of the title</param>
/// <param name="storageType">Type of storage.</param>
/// <param name="blobPath">Blob path is a unique string that conforms to a SubPath\file format (example: "foo\bar\blob.txt").</param>
/// <param name="blobType">Type of blob data. Possible values are: Binary, Json, and Config.</param>
/// <param name="xboxUserId">The Xbox User ID of the title storage to enumerate. Ignored when dealing with GlobalStorage, so passing nullptr is acceptable in that case. (Optional)</param>
public TitleStorageBlobMetadata(string serviceConfigurationId, TitleStorageType storageType, string blobPath, TitleStorageBlobType blobType, string xboxUserId)
{
if (string.IsNullOrEmpty(blobPath))
throw new ArgumentNullException("blobPath");
this.ServiceConfigurationId = serviceConfigurationId;
this.StorageType = storageType;
this.BlobType = blobType;
this.BlobPath = blobPath;
this.XboxUserId = xboxUserId;
CreateCMetadata();
}
internal TitleStorageBlobMetadata(IntPtr structPtr)
{
this.metadataPtr = structPtr;
this.Refresh();
}
~TitleStorageBlobMetadata()
{
TitleStorageReleaseBlobMetadata(metadataPtr);
}
private void CreateCMetadata()
{
var scid = MarshalingHelpers.StringToHGlobalUtf8(this.ServiceConfigurationId);
var path = MarshalingHelpers.StringToHGlobalUtf8(this.BlobPath);
var xuid = MarshalingHelpers.StringToHGlobalUtf8(this.XboxUserId);
var displayName = MarshalingHelpers.StringToHGlobalUtf8(this.DisplayName);
var etag = MarshalingHelpers.StringToHGlobalUtf8(this.ETag);
IntPtr clientTimePtr = IntPtr.Zero;
if (this.ClientTimeStamp != null)
{
var clientTime = this.ClientTimeStamp.ToUnixTimeSeconds();
clientTimePtr = Marshal.AllocHGlobal(MarshalingHelpers.SizeOf<UInt64>());
Marshal.WriteInt64(clientTimePtr, clientTime);
}
TitleStorageCreateBlobMetadata(scid, StorageType, path, BlobType, xuid, displayName, etag, clientTimePtr, out metadataPtr);
Marshal.FreeHGlobal(scid);
Marshal.FreeHGlobal(path);
Marshal.FreeHGlobal(xuid);
Marshal.FreeHGlobal(displayName);
Marshal.FreeHGlobal(etag);
Marshal.FreeHGlobal(clientTimePtr);
}
internal void Refresh()
{
var CStruct = MarshalingHelpers.PtrToStructure<XSAPI_TITLE_STORAGE_BLOB_METADATA>(this.metadataPtr);
this.ServiceConfigurationId = MarshalingHelpers.Utf8ToString(CStruct.serviceConfigurationId);
this.BlobPath = MarshalingHelpers.Utf8ToString(CStruct.blobPath);
this.BlobType = CStruct.blobType;
this.StorageType = CStruct.storageType;
this.DisplayName = MarshalingHelpers.Utf8ToString(CStruct.displayName);
this.ETag = MarshalingHelpers.Utf8ToString(CStruct.ETag);
this.Length = CStruct.length;
this.XboxUserId = MarshalingHelpers.Utf8ToString(CStruct.xboxUserId);
this.ClientTimeStamp = MarshalingHelpers.FromUnixTimeSeconds(CStruct.clientTimestamp);
}
[DllImport(XboxLive.FlatCDllName, CallingConvention = CallingConvention.Cdecl)]
private static extern XSAPI_RESULT TitleStorageCreateBlobMetadata(
IntPtr serviceConfigurationId,
TitleStorageType storageType,
IntPtr blobPath,
TitleStorageBlobType blobType,
IntPtr xboxUserId,
IntPtr displayName,
IntPtr etag,
IntPtr pClientTimestamp,
out IntPtr ppMetadata);
[DllImport(XboxLive.FlatCDllName, CallingConvention = CallingConvention.Cdecl)]
private static extern XSAPI_RESULT TitleStorageReleaseBlobMetadata(
IntPtr pMetadata);
}
[StructLayout(LayoutKind.Sequential)]
internal class XSAPI_TITLE_STORAGE_BLOB_METADATA
{
public IntPtr blobPath;
public TitleStorageBlobType blobType;
public TitleStorageType storageType;
public IntPtr displayName;
public IntPtr ETag;
public Int64 clientTimestamp;
public UInt64 length;
public IntPtr serviceConfigurationId;
public IntPtr xboxUserId;
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.CustomerInsights
{
using Azure;
using Management;
using Rest;
using Rest.Azure;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// WidgetTypesOperations operations.
/// </summary>
internal partial class WidgetTypesOperations : IServiceOperations<CustomerInsightsManagementClient>, IWidgetTypesOperations
{
/// <summary>
/// Initializes a new instance of the WidgetTypesOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal WidgetTypesOperations(CustomerInsightsManagementClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the CustomerInsightsManagementClient
/// </summary>
public CustomerInsightsManagementClient Client { get; private set; }
/// <summary>
/// Gets all available widget types in the specified hub.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='hubName'>
/// The name of the hub.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<WidgetTypeResourceFormat>>> ListByHubWithHttpMessagesAsync(string resourceGroupName, string hubName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (hubName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "hubName");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("hubName", hubName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListByHub", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/widgetTypes").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{hubName}", System.Uri.EscapeDataString(hubName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<WidgetTypeResourceFormat>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<WidgetTypeResourceFormat>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets a widget type in the specified hub.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='hubName'>
/// The name of the hub.
/// </param>
/// <param name='widgetTypeName'>
/// The name of the widget type.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<WidgetTypeResourceFormat>> GetWithHttpMessagesAsync(string resourceGroupName, string hubName, string widgetTypeName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (hubName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "hubName");
}
if (widgetTypeName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "widgetTypeName");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("hubName", hubName);
tracingParameters.Add("widgetTypeName", widgetTypeName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/widgetTypes/{widgetTypeName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{hubName}", System.Uri.EscapeDataString(hubName));
_url = _url.Replace("{widgetTypeName}", System.Uri.EscapeDataString(widgetTypeName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<WidgetTypeResourceFormat>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<WidgetTypeResourceFormat>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets all available widget types in the specified hub.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<WidgetTypeResourceFormat>>> ListByHubNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListByHubNext", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<WidgetTypeResourceFormat>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<WidgetTypeResourceFormat>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.ObjectModel;
using System.Diagnostics;
using Microsoft.VisualStudio.Debugger.Evaluation;
using Microsoft.VisualStudio.Debugger.Evaluation.ClrCompilation;
using Microsoft.VisualStudio.Debugger.Metadata;
using Type = Microsoft.VisualStudio.Debugger.Metadata.Type;
namespace Microsoft.CodeAnalysis.ExpressionEvaluator
{
/// <summary>
/// Type member expansion.
/// </summary>
/// <remarks>
/// Includes accesses to static members with instance receivers and
/// accesses to instance members with dynamic receivers.
/// </remarks>
internal sealed class MemberExpansion : Expansion
{
internal static Expansion CreateExpansion(
DkmInspectionContext inspectionContext,
Type declaredType,
DkmClrValue value,
ExpansionFlags flags,
Predicate<MemberInfo> predicate,
Formatter formatter)
{
var runtimeType = value.Type.GetLmrType();
// Primitives, enums and null values with a declared type that is an interface have no visible members.
Debug.Assert(!runtimeType.IsInterface || value.IsNull);
if (formatter.IsPredefinedType(runtimeType) || runtimeType.IsEnum || runtimeType.IsInterface)
{
return null;
}
var expansions = ArrayBuilder<Expansion>.GetInstance();
// From the members, collect the fields and properties,
// separated into static and instance members.
var staticMembers = ArrayBuilder<MemberAndDeclarationInfo>.GetInstance();
var instanceMembers = ArrayBuilder<MemberAndDeclarationInfo>.GetInstance();
var appDomain = value.Type.AppDomain;
// Expand members. (Ideally, this should be done lazily.)
var allMembers = ArrayBuilder<MemberAndDeclarationInfo>.GetInstance();
var includeInherited = (flags & ExpansionFlags.IncludeBaseMembers) == ExpansionFlags.IncludeBaseMembers;
var hideNonPublic = (inspectionContext.EvaluationFlags & DkmEvaluationFlags.HideNonPublicMembers) == DkmEvaluationFlags.HideNonPublicMembers;
runtimeType.AppendTypeMembers(allMembers, predicate, declaredType, appDomain, includeInherited, hideNonPublic);
foreach (var member in allMembers)
{
var name = member.Name;
if (name.IsCompilerGenerated())
{
continue;
}
if (member.IsStatic)
{
staticMembers.Add(member);
}
else if (!value.IsNull)
{
instanceMembers.Add(member);
}
}
allMembers.Free();
// Public and non-public instance members.
Expansion publicInstanceExpansion;
Expansion nonPublicInstanceExpansion;
GetPublicAndNonPublicMembers(
instanceMembers,
out publicInstanceExpansion,
out nonPublicInstanceExpansion);
// Public and non-public static members.
Expansion publicStaticExpansion;
Expansion nonPublicStaticExpansion;
GetPublicAndNonPublicMembers(
staticMembers,
out publicStaticExpansion,
out nonPublicStaticExpansion);
if (publicInstanceExpansion != null)
{
expansions.Add(publicInstanceExpansion);
}
if ((publicStaticExpansion != null) || (nonPublicStaticExpansion != null))
{
var staticExpansions = ArrayBuilder<Expansion>.GetInstance();
if (publicStaticExpansion != null)
{
staticExpansions.Add(publicStaticExpansion);
}
if (nonPublicStaticExpansion != null)
{
staticExpansions.Add(nonPublicStaticExpansion);
}
Debug.Assert(staticExpansions.Count > 0);
var staticMembersExpansion = new StaticMembersExpansion(
runtimeType,
AggregateExpansion.CreateExpansion(staticExpansions));
staticExpansions.Free();
expansions.Add(staticMembersExpansion);
}
if (value.NativeComPointer != 0)
{
expansions.Add(new NativeViewExpansion());
}
if (nonPublicInstanceExpansion != null)
{
expansions.Add(nonPublicInstanceExpansion);
}
// Include Results View if necessary.
if ((flags & ExpansionFlags.IncludeResultsView) != 0)
{
var resultsViewExpansion = ResultsViewExpansion.CreateExpansion(inspectionContext, value, formatter);
if (resultsViewExpansion != null)
{
expansions.Add(resultsViewExpansion);
}
}
var result = AggregateExpansion.CreateExpansion(expansions);
expansions.Free();
return result;
}
private static void GetPublicAndNonPublicMembers(
ArrayBuilder<MemberAndDeclarationInfo> allMembers,
out Expansion publicExpansion,
out Expansion nonPublicExpansion)
{
var publicExpansions = ArrayBuilder<Expansion>.GetInstance();
var publicMembers = ArrayBuilder<MemberAndDeclarationInfo>.GetInstance();
var nonPublicMembers = ArrayBuilder<MemberAndDeclarationInfo>.GetInstance();
foreach (var member in allMembers)
{
if (member.BrowsableState.HasValue)
{
switch (member.BrowsableState.Value)
{
case DkmClrDebuggerBrowsableAttributeState.RootHidden:
if (publicMembers.Count > 0)
{
publicExpansions.Add(new MemberExpansion(publicMembers.ToArray()));
publicMembers.Clear();
}
publicExpansions.Add(new RootHiddenExpansion(member));
continue;
case DkmClrDebuggerBrowsableAttributeState.Never:
continue;
}
}
if (member.HideNonPublic && !member.IsPublic)
{
nonPublicMembers.Add(member);
}
else
{
publicMembers.Add(member);
}
}
if (publicMembers.Count > 0)
{
publicExpansions.Add(new MemberExpansion(publicMembers.ToArray()));
}
publicMembers.Free();
publicExpansion = AggregateExpansion.CreateExpansion(publicExpansions);
publicExpansions.Free();
nonPublicExpansion = (nonPublicMembers.Count > 0) ?
new NonPublicMembersExpansion(
declaredType: null,
members: new MemberExpansion(nonPublicMembers.ToArray())) :
null;
nonPublicMembers.Free();
}
private readonly MemberAndDeclarationInfo[] _members;
private MemberExpansion(MemberAndDeclarationInfo[] members)
{
Debug.Assert(members != null);
Debug.Assert(members.Length > 0);
_members = members;
}
internal override void GetRows(
ResultProvider resultProvider,
ArrayBuilder<EvalResultDataItem> rows,
DkmInspectionContext inspectionContext,
EvalResultDataItem parent,
DkmClrValue value,
int startIndex,
int count,
bool visitAll,
ref int index)
{
int startIndex2;
int count2;
GetIntersection(startIndex, count, index, _members.Length, out startIndex2, out count2);
int offset = startIndex2 - index;
for (int i = 0; i < count2; i++)
{
rows.Add(GetMemberRow(resultProvider, inspectionContext, value, _members[i + offset], parent));
}
index += _members.Length;
}
private static EvalResultDataItem GetMemberRow(
ResultProvider resultProvider,
DkmInspectionContext inspectionContext,
DkmClrValue value,
MemberAndDeclarationInfo member,
EvalResultDataItem parent)
{
var memberValue = GetMemberValue(value, member, inspectionContext);
return CreateMemberDataItem(
resultProvider,
inspectionContext,
member,
memberValue,
parent,
ExpansionFlags.All);
}
private static DkmClrValue GetMemberValue(DkmClrValue container, MemberAndDeclarationInfo member, DkmInspectionContext inspectionContext)
{
// Note: GetMemberValue() may return special value
// when func-eval of properties is disabled.
return container.GetMemberValue(member.Name, (int)member.MemberType, member.DeclaringType.FullName, inspectionContext);
}
private sealed class RootHiddenExpansion : Expansion
{
private readonly MemberAndDeclarationInfo _member;
internal RootHiddenExpansion(MemberAndDeclarationInfo member)
{
_member = member;
}
internal override void GetRows(
ResultProvider resultProvider,
ArrayBuilder<EvalResultDataItem> rows,
DkmInspectionContext inspectionContext,
EvalResultDataItem parent,
DkmClrValue value,
int startIndex,
int count,
bool visitAll,
ref int index)
{
var memberValue = GetMemberValue(value, _member, inspectionContext);
if (memberValue.IsError())
{
if (InRange(startIndex, count, index))
{
var row = new EvalResultDataItem(Resources.ErrorName, errorMessage: (string)memberValue.HostObjectValue);
rows.Add(row);
}
index++;
}
else
{
parent = CreateMemberDataItem(
resultProvider,
inspectionContext,
_member,
memberValue,
parent,
ExpansionFlags.IncludeBaseMembers | ExpansionFlags.IncludeResultsView);
var expansion = parent.Expansion;
if (expansion != null)
{
expansion.GetRows(resultProvider, rows, inspectionContext, parent, parent.Value, startIndex, count, visitAll, ref index);
}
}
}
}
/// <summary>
/// An explicit user request to bypass "Just My Code" and display
/// the inaccessible members of an instance of an imported type.
/// </summary>
private sealed class NonPublicMembersExpansion : Expansion
{
private readonly Type _declaredType;
private readonly Expansion _members;
internal NonPublicMembersExpansion(Type declaredType, Expansion members)
{
_declaredType = declaredType;
_members = members;
}
internal override void GetRows(
ResultProvider resultProvider,
ArrayBuilder<EvalResultDataItem> rows,
DkmInspectionContext inspectionContext,
EvalResultDataItem parent,
DkmClrValue value,
int startIndex,
int count,
bool visitAll,
ref int index)
{
if (InRange(startIndex, count, index))
{
rows.Add(GetRow(
resultProvider,
inspectionContext,
_declaredType,
value,
_members,
parent));
}
index++;
}
private static readonly ReadOnlyCollection<string> s_hiddenFormatSpecifiers = new ReadOnlyCollection<string>(new[] { "hidden" });
private static EvalResultDataItem GetRow(
ResultProvider resultProvider,
DkmInspectionContext inspectionContext,
Type declaredType,
DkmClrValue value,
Expansion expansion,
EvalResultDataItem parent)
{
return new EvalResultDataItem(
ExpansionKind.NonPublicMembers,
name: Resources.NonPublicMembers,
typeDeclaringMember: null,
declaredType: declaredType,
parent: null,
value: value,
displayValue: null,
expansion: expansion,
childShouldParenthesize: parent.ChildShouldParenthesize,
fullName: parent.FullNameWithoutFormatSpecifiers,
childFullNamePrefixOpt: parent.ChildFullNamePrefix,
formatSpecifiers: s_hiddenFormatSpecifiers,
category: DkmEvaluationResultCategory.Data,
flags: DkmEvaluationResultFlags.ReadOnly,
editableValue: null,
inspectionContext: inspectionContext);
}
}
/// <summary>
/// A transition from an instance of a type to the type itself (for inspecting static members).
/// </summary>
private sealed class StaticMembersExpansion : Expansion
{
private readonly Type _declaredType;
private readonly Expansion _members;
internal StaticMembersExpansion(Type declaredType, Expansion members)
{
_declaredType = declaredType;
_members = members;
}
internal override void GetRows(
ResultProvider resultProvider,
ArrayBuilder<EvalResultDataItem> rows,
DkmInspectionContext inspectionContext,
EvalResultDataItem parent,
DkmClrValue value,
int startIndex,
int count,
bool visitAll,
ref int index)
{
if (InRange(startIndex, count, index))
{
rows.Add(GetRow(
resultProvider,
inspectionContext,
_declaredType,
value,
_members));
}
index++;
}
private static EvalResultDataItem GetRow(
ResultProvider resultProvider,
DkmInspectionContext inspectionContext,
Type declaredType,
DkmClrValue value,
Expansion expansion)
{
var formatter = resultProvider.Formatter;
var fullName = formatter.GetTypeName(declaredType, escapeKeywordIdentifiers: true);
return new EvalResultDataItem(
ExpansionKind.StaticMembers,
name: formatter.StaticMembersString,
typeDeclaringMember: null,
declaredType: declaredType,
parent: null,
value: value,
displayValue: null,
expansion: expansion,
childShouldParenthesize: false,
fullName: fullName,
childFullNamePrefixOpt: fullName,
formatSpecifiers: Formatter.NoFormatSpecifiers,
category: DkmEvaluationResultCategory.Class,
flags: DkmEvaluationResultFlags.ReadOnly,
editableValue: null,
inspectionContext: inspectionContext);
}
}
private static EvalResultDataItem CreateMemberDataItem(
ResultProvider resultProvider,
DkmInspectionContext inspectionContext,
MemberAndDeclarationInfo member,
DkmClrValue memberValue,
EvalResultDataItem parent,
ExpansionFlags flags)
{
var formatter = resultProvider.Formatter;
string memberName;
var typeDeclaringMember = member.GetExplicitlyImplementedInterface(out memberName) ?? member.DeclaringType;
memberName = formatter.GetIdentifierEscapingPotentialKeywords(memberName);
var fullName = MakeFullName(
formatter,
memberName,
typeDeclaringMember,
member.RequiresExplicitCast,
member.IsStatic,
parent);
return resultProvider.CreateDataItem(
inspectionContext,
memberName,
typeDeclaringMember: (member.IncludeTypeInMemberName || typeDeclaringMember.IsInterface) ? typeDeclaringMember : null,
declaredType: member.Type,
value: memberValue,
parent: parent,
expansionFlags: flags,
childShouldParenthesize: false,
fullName: fullName,
formatSpecifiers: Formatter.NoFormatSpecifiers,
category: DkmEvaluationResultCategory.Other,
flags: memberValue.EvalFlags,
evalFlags: DkmEvaluationFlags.None);
}
private static string MakeFullName(
Formatter formatter,
string name,
Type typeDeclaringMember,
bool memberAccessRequiresExplicitCast,
bool memberIsStatic,
EvalResultDataItem parent)
{
// If the parent is an exception thrown during evaluation,
// there is no valid fullname expression for the child.
if (parent.Value.EvalFlags.Includes(DkmEvaluationResultFlags.ExceptionThrown))
{
return null;
}
var parentFullName = parent.ChildFullNamePrefix;
if (parentFullName == null)
{
return null;
}
if (parent.ChildShouldParenthesize)
{
parentFullName = $"({parentFullName})";
}
if (!typeDeclaringMember.IsInterface)
{
string qualifier;
if (memberIsStatic)
{
qualifier = formatter.GetTypeName(typeDeclaringMember, escapeKeywordIdentifiers: false);
}
else if (memberAccessRequiresExplicitCast)
{
var typeName = formatter.GetTypeName(typeDeclaringMember, escapeKeywordIdentifiers: true);
qualifier = formatter.GetCastExpression(
parentFullName,
typeName,
parenthesizeEntireExpression: true);
}
else
{
qualifier = parentFullName;
}
return $"{qualifier}.{name}";
}
else
{
// NOTE: This should never interact with debugger proxy types:
// 1) Interfaces cannot have debugger proxy types.
// 2) Debugger proxy types cannot be interfaces.
if (typeDeclaringMember.Equals(parent.DeclaredType))
{
var memberAccessTemplate = parent.ChildShouldParenthesize
? "({0}).{1}"
: "{0}.{1}";
return string.Format(memberAccessTemplate, parent.ChildFullNamePrefix, name);
}
else
{
var interfaceName = formatter.GetTypeName(typeDeclaringMember, escapeKeywordIdentifiers: true);
var memberAccessTemplate = parent.ChildShouldParenthesize
? "(({0})({1})).{2}"
: "(({0}){1}).{2}";
return string.Format(memberAccessTemplate, interfaceName, parent.ChildFullNamePrefix, name);
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Localization;
using Microsoft.Extensions.Localization;
using Orchard.Admin;
using Orchard.Deployment.ViewModels;
using Orchard.DisplayManagement;
using Orchard.DisplayManagement.ModelBinding;
using Orchard.DisplayManagement.Notify;
using Orchard.Settings;
using YesSql;
namespace Orchard.Deployment.Controllers
{
[Admin]
public class StepController : Controller, IUpdateModel
{
private readonly IAuthorizationService _authorizationService;
private readonly IDisplayManager<DeploymentStep> _displayManager;
private readonly IEnumerable<IDeploymentStepFactory> _factories;
private readonly ISession _session;
private readonly ISiteService _siteService;
private readonly INotifier _notifier;
public StepController(
IAuthorizationService authorizationService,
IDisplayManager<DeploymentStep> displayManager,
IEnumerable<IDeploymentStepFactory> factories,
ISession session,
ISiteService siteService,
IShapeFactory shapeFactory,
IStringLocalizer<StepController> stringLocalizer,
IHtmlLocalizer<StepController> htmlLocalizer,
INotifier notifier)
{
_displayManager = displayManager;
_factories = factories;
_authorizationService = authorizationService;
_session = session;
_siteService = siteService;
New = shapeFactory;
_notifier = notifier;
T = stringLocalizer;
H = htmlLocalizer;
}
public dynamic New { get; set; }
public IStringLocalizer T { get; set; }
public IHtmlLocalizer H { get; set; }
public async Task<IActionResult> Create(int id, string type)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageDeploymentPlan))
{
return Unauthorized();
}
var deploymentPlan = await _session.GetAsync<DeploymentPlan>(id);
if (deploymentPlan == null)
{
return NotFound();
}
var step = _factories.FirstOrDefault(x => x.Name == type)?.Create();
if (step == null)
{
return NotFound();
}
step.Id = Guid.NewGuid().ToString("n");
var model = new EditDeploymentPlanStepViewModel
{
DeploymentPlanId = id,
DeploymentStep = step,
DeploymentStepId = step.Id,
DeploymentStepType = type,
Editor = await _displayManager.BuildEditorAsync(step, this)
};
model.Editor.DeploymentStep = step;
return View(model);
}
[HttpPost]
public async Task<IActionResult> Create(EditDeploymentPlanStepViewModel model)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageDeploymentPlan))
{
return Unauthorized();
}
var deploymentPlan = await _session.GetAsync<DeploymentPlan>(model.DeploymentPlanId);
if (deploymentPlan == null)
{
return NotFound();
}
var step = _factories.FirstOrDefault(x => x.Name == model.DeploymentStepType)?.Create();
if (step == null)
{
return NotFound();
}
var editor = await _displayManager.UpdateEditorAsync(step, this);
editor.DeploymentStep = step;
if (ModelState.IsValid)
{
step.Id = model.DeploymentStepId;
deploymentPlan.DeploymentSteps.Add(step);
_session.Save(deploymentPlan);
_notifier.Success(H["Deployment plan step added successfully"]);
return RedirectToAction("Display", "DeploymentPlan", new { id = model.DeploymentPlanId });
}
model.Editor = editor;
// If we got this far, something failed, redisplay form
return View(model);
}
public async Task<IActionResult> Edit(int id, string stepId)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageDeploymentPlan))
{
return Unauthorized();
}
var deploymentPlan = await _session.GetAsync<DeploymentPlan>(id);
if (deploymentPlan == null)
{
return NotFound();
}
var step = deploymentPlan.DeploymentSteps.FirstOrDefault(x => String.Equals(x.Id, stepId, StringComparison.OrdinalIgnoreCase));
if (step == null)
{
return NotFound();
}
var model = new EditDeploymentPlanStepViewModel
{
DeploymentPlanId = id,
DeploymentStep = step,
DeploymentStepId = step.Id,
DeploymentStepType = step.GetType().Name,
Editor = await _displayManager.BuildEditorAsync(step, this)
};
model.Editor.DeploymentStep = step;
return View(model);
}
[HttpPost]
public async Task<IActionResult> Edit(EditDeploymentPlanStepViewModel model)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageDeploymentPlan))
{
return Unauthorized();
}
var deploymentPlan = await _session.GetAsync<DeploymentPlan>(model.DeploymentPlanId);
if (deploymentPlan == null)
{
return NotFound();
}
var step = deploymentPlan.DeploymentSteps.FirstOrDefault(x => String.Equals(x.Id, model.DeploymentStepId, StringComparison.OrdinalIgnoreCase));
if (step == null)
{
return NotFound();
}
var editor = await _displayManager.UpdateEditorAsync(step, this);
if (ModelState.IsValid)
{
_session.Save(deploymentPlan);
_notifier.Success(H["Deployment plan step updated successfully"]);
return RedirectToAction("Display", "DeploymentPlan", new { id = model.DeploymentPlanId });
}
_notifier.Error(H["The deployment plan step has validation errors"]);
model.Editor = editor;
// If we got this far, something failed, redisplay form
return View(model);
}
[HttpPost]
public async Task<IActionResult> Delete(int id, string stepId)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageDeploymentPlan))
{
return Unauthorized();
}
var deploymentPlan = await _session.GetAsync<DeploymentPlan>(id);
if (deploymentPlan == null)
{
return NotFound();
}
var step = deploymentPlan.DeploymentSteps.FirstOrDefault(x => String.Equals(x.Id, stepId, StringComparison.OrdinalIgnoreCase));
if (step == null)
{
return NotFound();
}
deploymentPlan.DeploymentSteps.Remove(step);
_session.Save(deploymentPlan);
_notifier.Success(H["Deployment step deleted successfully"]);
return RedirectToAction("Display", "DeploymentPlan", new { id });
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using NUnit.Framework;
using OpenMetaverse;
using OpenSim.Tests.Common;
namespace OpenSim.Framework.Tests
{
[TestFixture]
public class UtilTests : OpenSimTestCase
{
[Test]
public void VectorOperationTests()
{
Vector3 v1, v2;
double expectedDistance;
double expectedMagnitude;
double lowPrecisionTolerance = 0.001;
//Lets test a simple case of <0,0,0> and <5,5,5>
{
v1 = new Vector3(0, 0, 0);
v2 = new Vector3(5, 5, 5);
expectedDistance = 8.66;
Assert.That(Util.GetDistanceTo(v1, v2),
new DoubleToleranceConstraint(expectedDistance, lowPrecisionTolerance),
"Calculated distance between two vectors was not within tolerances.");
expectedMagnitude = 0;
Assert.That(Util.GetMagnitude(v1), Is.EqualTo(0), "Magnitude of null vector was not zero.");
expectedMagnitude = 8.66;
Assert.That(Util.GetMagnitude(v2),
new DoubleToleranceConstraint(expectedMagnitude, lowPrecisionTolerance),
"Magnitude of vector was incorrect.");
TestDelegate d = delegate() { Util.GetNormalizedVector(v1); };
bool causesArgumentException = TestHelpers.AssertThisDelegateCausesArgumentException(d);
Assert.That(causesArgumentException, Is.True,
"Getting magnitude of null vector did not cause argument exception.");
Vector3 expectedNormalizedVector = new Vector3(.577f, .577f, .577f);
double expectedNormalizedMagnitude = 1;
Vector3 normalizedVector = Util.GetNormalizedVector(v2);
Assert.That(normalizedVector,
new VectorToleranceConstraint(expectedNormalizedVector, lowPrecisionTolerance),
"Normalized vector generated from vector was not what was expected.");
Assert.That(Util.GetMagnitude(normalizedVector),
new DoubleToleranceConstraint(expectedNormalizedMagnitude, lowPrecisionTolerance),
"Normalized vector generated from vector does not have magnitude of 1.");
}
//Lets test a simple case of <0,0,0> and <0,0,0>
{
v1 = new Vector3(0, 0, 0);
v2 = new Vector3(0, 0, 0);
expectedDistance = 0;
Assert.That(Util.GetDistanceTo(v1, v2),
new DoubleToleranceConstraint(expectedDistance, lowPrecisionTolerance),
"Calculated distance between two vectors was not within tolerances.");
expectedMagnitude = 0;
Assert.That(Util.GetMagnitude(v1), Is.EqualTo(0), "Magnitude of null vector was not zero.");
expectedMagnitude = 0;
Assert.That(Util.GetMagnitude(v2),
new DoubleToleranceConstraint(expectedMagnitude, lowPrecisionTolerance),
"Magnitude of vector was incorrect.");
TestDelegate d = delegate() { Util.GetNormalizedVector(v1); };
bool causesArgumentException = TestHelpers.AssertThisDelegateCausesArgumentException(d);
Assert.That(causesArgumentException, Is.True,
"Getting magnitude of null vector did not cause argument exception.");
d = delegate() { Util.GetNormalizedVector(v2); };
causesArgumentException = TestHelpers.AssertThisDelegateCausesArgumentException(d);
Assert.That(causesArgumentException, Is.True,
"Getting magnitude of null vector did not cause argument exception.");
}
//Lets test a simple case of <0,0,0> and <-5,-5,-5>
{
v1 = new Vector3(0, 0, 0);
v2 = new Vector3(-5, -5, -5);
expectedDistance = 8.66;
Assert.That(Util.GetDistanceTo(v1, v2),
new DoubleToleranceConstraint(expectedDistance, lowPrecisionTolerance),
"Calculated distance between two vectors was not within tolerances.");
expectedMagnitude = 0;
Assert.That(Util.GetMagnitude(v1), Is.EqualTo(0), "Magnitude of null vector was not zero.");
expectedMagnitude = 8.66;
Assert.That(Util.GetMagnitude(v2),
new DoubleToleranceConstraint(expectedMagnitude, lowPrecisionTolerance),
"Magnitude of vector was incorrect.");
TestDelegate d = delegate() { Util.GetNormalizedVector(v1); };
bool causesArgumentException = TestHelpers.AssertThisDelegateCausesArgumentException(d);
Assert.That(causesArgumentException, Is.True,
"Getting magnitude of null vector did not cause argument exception.");
Vector3 expectedNormalizedVector = new Vector3(-.577f, -.577f, -.577f);
double expectedNormalizedMagnitude = 1;
Vector3 normalizedVector = Util.GetNormalizedVector(v2);
Assert.That(normalizedVector,
new VectorToleranceConstraint(expectedNormalizedVector, lowPrecisionTolerance),
"Normalized vector generated from vector was not what was expected.");
Assert.That(Util.GetMagnitude(normalizedVector),
new DoubleToleranceConstraint(expectedNormalizedMagnitude, lowPrecisionTolerance),
"Normalized vector generated from vector does not have magnitude of 1.");
}
}
[Test]
public void UUIDTests()
{
Assert.IsTrue(Util.isUUID("01234567-89ab-Cdef-0123-456789AbCdEf"),
"A correct UUID wasn't recognized.");
Assert.IsFalse(Util.isUUID("FOOBAR67-89ab-Cdef-0123-456789AbCdEf"),
"UUIDs with non-hex characters are recognized as correct UUIDs.");
Assert.IsFalse(Util.isUUID("01234567"),
"Too short UUIDs are recognized as correct UUIDs.");
Assert.IsFalse(Util.isUUID("01234567-89ab-Cdef-0123-456789AbCdEf0"),
"Too long UUIDs are recognized as correct UUIDs.");
Assert.IsFalse(Util.isUUID("01234567-89ab-Cdef-0123+456789AbCdEf"),
"UUIDs with wrong format are recognized as correct UUIDs.");
}
[Test]
public void GetHashGuidTests()
{
string string1 = "This is one string";
string string2 = "This is another";
// Two consecutive runs should equal the same
Assert.AreEqual(Util.GetHashGuid(string1, "secret1"), Util.GetHashGuid(string1, "secret1"));
Assert.AreEqual(Util.GetHashGuid(string2, "secret1"), Util.GetHashGuid(string2, "secret1"));
// Varying data should not eqal the same
Assert.AreNotEqual(Util.GetHashGuid(string1, "secret1"), Util.GetHashGuid(string2, "secret1"));
// Varying secrets should not eqal the same
Assert.AreNotEqual(Util.GetHashGuid(string1, "secret1"), Util.GetHashGuid(string1, "secret2"));
}
[Test]
public void SLUtilTypeConvertTests()
{
int[] assettypes = new int[]{-1,0,1,2,3,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22
,23,24,25,46,47,48};
string[] contenttypes = new string[]
{
"application/octet-stream",
"image/x-j2c",
"audio/ogg",
"application/vnd.ll.callingcard",
"application/vnd.ll.landmark",
"application/vnd.ll.clothing",
"application/vnd.ll.primitive",
"application/vnd.ll.notecard",
"application/vnd.ll.folder",
"application/vnd.ll.rootfolder",
"application/vnd.ll.lsltext",
"application/vnd.ll.lslbyte",
"image/tga",
"application/vnd.ll.bodypart",
"application/vnd.ll.trashfolder",
"application/vnd.ll.snapshotfolder",
"application/vnd.ll.lostandfoundfolder",
"audio/x-wav",
"image/tga",
"image/jpeg",
"application/vnd.ll.animation",
"application/vnd.ll.gesture",
"application/x-metaverse-simstate",
"application/vnd.ll.favoritefolder",
"application/vnd.ll.link",
"application/vnd.ll.linkfolder",
"application/vnd.ll.currentoutfitfolder",
"application/vnd.ll.outfitfolder",
"application/vnd.ll.myoutfitsfolder"
};
for (int i=0;i<assettypes.Length;i++)
{
Assert.That(SLUtil.SLAssetTypeToContentType(assettypes[i]) == contenttypes[i], "Expecting {0} but got {1}", contenttypes[i], SLUtil.SLAssetTypeToContentType(assettypes[i]));
}
for (int i = 0; i < contenttypes.Length; i++)
{
int expected;
if (contenttypes[i] == "image/tga")
expected = 12; // if we know only the content-type "image/tga", then we assume the asset type is TextureTGA; not ImageTGA
else
expected = assettypes[i];
Assert.AreEqual(expected, SLUtil.ContentTypeToSLAssetType(contenttypes[i]),
String.Format("Incorrect AssetType mapped from Content-Type {0}", contenttypes[i]));
}
int[] inventorytypes = new int[] {-1,0,1,2,3,6,7,8,9,10,15,17,18,20};
string[] invcontenttypes = new string[]
{
"application/octet-stream",
"image/x-j2c",
"audio/ogg",
"application/vnd.ll.callingcard",
"application/vnd.ll.landmark",
"application/vnd.ll.primitive",
"application/vnd.ll.notecard",
"application/vnd.ll.folder",
"application/vnd.ll.rootfolder",
"application/vnd.ll.lsltext",
"image/x-j2c",
"application/vnd.ll.primitive",
"application/vnd.ll.clothing",
"application/vnd.ll.gesture"
};
for (int i=0;i<inventorytypes.Length;i++)
{
Assert.AreEqual(invcontenttypes[i], SLUtil.SLInvTypeToContentType(inventorytypes[i]),
String.Format("Incorrect Content-Type mapped from InventoryType {0}", inventorytypes[i]));
}
invcontenttypes = new string[]
{
"image/x-j2c","image/jp2","image/tga",
"image/jpeg","application/ogg","audio/ogg",
"audio/x-wav","application/vnd.ll.callingcard",
"application/x-metaverse-callingcard",
"application/vnd.ll.landmark",
"application/x-metaverse-landmark",
"application/vnd.ll.clothing",
"application/x-metaverse-clothing","application/vnd.ll.bodypart",
"application/x-metaverse-bodypart","application/vnd.ll.primitive",
"application/x-metaverse-primitive","application/vnd.ll.notecard",
"application/x-metaverse-notecard","application/vnd.ll.folder",
"application/vnd.ll.rootfolder","application/vnd.ll.lsltext",
"application/x-metaverse-lsl","application/vnd.ll.lslbyte",
"application/x-metaverse-lso","application/vnd.ll.trashfolder",
"application/vnd.ll.snapshotfolder",
"application/vnd.ll.lostandfoundfolder","application/vnd.ll.animation",
"application/x-metaverse-animation","application/vnd.ll.gesture",
"application/x-metaverse-gesture","application/x-metaverse-simstate",
"application/octet-stream"
};
sbyte[] invtypes = new sbyte[]
{
0, 0, 0, 0, 1, 1, 1, 2, 2, 3, 3, 18, 18, 18, 18, 6, 6, 7, 7, 8, 9, 10, 10, 10, 10
, 8, 8, 8, 19, 19, 20, 20, 15, -1
};
for (int i = 0; i < invtypes.Length; i++)
{
Assert.AreEqual(invtypes[i], SLUtil.ContentTypeToSLInvType(invcontenttypes[i]),
String.Format("Incorrect InventoryType mapped from Content-Type {0}", invcontenttypes[i]));
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Xml.Xsl
{
using System.Reflection;
using System.Diagnostics;
using System.IO;
using System.Xml.XPath;
using System.Xml.Xsl.XsltOld;
using MS.Internal.Xml.XPath;
using MS.Internal.Xml.Cache;
using System.Collections.Generic;
using System.Xml.Xsl.XsltOld.Debugger;
using System.Security.Policy;
using System.Runtime.Versioning;
using System.Xml.XmlConfiguration;
[Obsolete("This class has been deprecated. Please use System.Xml.Xsl.XslCompiledTransform instead. http://go.microsoft.com/fwlink/?linkid=14202")]
public sealed class XslTransform
{
private XmlResolver _documentResolver = null;
private bool _isDocumentResolverSet = false;
private XmlResolver _DocumentResolver
{
get
{
if (_isDocumentResolverSet)
return _documentResolver;
else
return XsltConfigSection.CreateDefaultResolver();
}
}
//
// Compiled stylesheet state
//
private Stylesheet _CompiledStylesheet;
private List<TheQuery> _QueryStore;
private RootAction _RootAction;
private IXsltDebugger _debugger;
public XslTransform() { }
public XmlResolver XmlResolver
{
set
{
_documentResolver = value;
_isDocumentResolverSet = true;
}
}
public void Load(XmlReader stylesheet)
{
Load(stylesheet, XsltConfigSection.CreateDefaultResolver());
}
public void Load(XmlReader stylesheet, XmlResolver resolver)
{
Load(new XPathDocument(stylesheet, XmlSpace.Preserve), resolver);
}
public void Load(IXPathNavigable stylesheet)
{
Load(stylesheet, XsltConfigSection.CreateDefaultResolver());
}
public void Load(IXPathNavigable stylesheet, XmlResolver resolver)
{
if (stylesheet == null)
{
throw new ArgumentNullException(nameof(stylesheet));
}
Load(stylesheet.CreateNavigator(), resolver);
}
public void Load(XPathNavigator stylesheet)
{
if (stylesheet == null)
{
throw new ArgumentNullException(nameof(stylesheet));
}
Load(stylesheet, XsltConfigSection.CreateDefaultResolver());
}
public void Load(XPathNavigator stylesheet, XmlResolver resolver)
{
if (stylesheet == null)
{
throw new ArgumentNullException(nameof(stylesheet));
}
Compile(stylesheet, resolver, /*evidence:*/null);
}
public void Load(string url)
{
XmlTextReaderImpl tr = new XmlTextReaderImpl(url);
Evidence evidence = XmlSecureResolver.CreateEvidenceForUrl(tr.BaseURI); // We should ask BaseURI before we start reading because it's changing with each node
Compile(Compiler.LoadDocument(tr).CreateNavigator(), XsltConfigSection.CreateDefaultResolver(), evidence);
}
public void Load(string url, XmlResolver resolver)
{
XmlTextReaderImpl tr = new XmlTextReaderImpl(url);
{
tr.XmlResolver = resolver;
}
Evidence evidence = XmlSecureResolver.CreateEvidenceForUrl(tr.BaseURI); // We should ask BaseURI before we start reading because it's changing with each node
Compile(Compiler.LoadDocument(tr).CreateNavigator(), resolver, evidence);
}
internal void Load(IXPathNavigable stylesheet, XmlResolver resolver, Evidence evidence)
{
if (stylesheet == null)
{
throw new ArgumentNullException(nameof(stylesheet));
}
Load(stylesheet.CreateNavigator(), resolver, evidence);
}
internal void Load(XmlReader stylesheet, XmlResolver resolver, Evidence evidence)
{
if (stylesheet == null)
{
throw new ArgumentNullException(nameof(stylesheet));
}
Load(new XPathDocument(stylesheet, XmlSpace.Preserve), resolver, evidence);
}
internal void Load(XPathNavigator stylesheet, XmlResolver resolver, Evidence evidence)
{
if (stylesheet == null)
{
throw new ArgumentNullException(nameof(stylesheet));
}
if (evidence == null)
{
evidence = new Evidence();
}
Compile(stylesheet, resolver, evidence);
}
// ------------------------------------ Transform() ------------------------------------ //
private void CheckCommand()
{
if (_CompiledStylesheet == null)
{
throw new InvalidOperationException(SR.Xslt_NoStylesheetLoaded);
}
}
public XmlReader Transform(XPathNavigator input, XsltArgumentList args, XmlResolver resolver)
{
CheckCommand();
Processor processor = new Processor(input, args, resolver, _CompiledStylesheet, _QueryStore, _RootAction, _debugger);
return processor.StartReader();
}
public XmlReader Transform(XPathNavigator input, XsltArgumentList args)
{
return Transform(input, args, _DocumentResolver);
}
public void Transform(XPathNavigator input, XsltArgumentList args, XmlWriter output, XmlResolver resolver)
{
CheckCommand();
Processor processor = new Processor(input, args, resolver, _CompiledStylesheet, _QueryStore, _RootAction, _debugger);
processor.Execute(output);
}
public void Transform(XPathNavigator input, XsltArgumentList args, XmlWriter output)
{
Transform(input, args, output, _DocumentResolver);
}
public void Transform(XPathNavigator input, XsltArgumentList args, Stream output, XmlResolver resolver)
{
CheckCommand();
Processor processor = new Processor(input, args, resolver, _CompiledStylesheet, _QueryStore, _RootAction, _debugger);
processor.Execute(output);
}
public void Transform(XPathNavigator input, XsltArgumentList args, Stream output)
{
Transform(input, args, output, _DocumentResolver);
}
public void Transform(XPathNavigator input, XsltArgumentList args, TextWriter output, XmlResolver resolver)
{
CheckCommand();
Processor processor = new Processor(input, args, resolver, _CompiledStylesheet, _QueryStore, _RootAction, _debugger);
processor.Execute(output);
}
public void Transform(XPathNavigator input, XsltArgumentList args, TextWriter output)
{
CheckCommand();
Processor processor = new Processor(input, args, _DocumentResolver, _CompiledStylesheet, _QueryStore, _RootAction, _debugger);
processor.Execute(output);
}
public XmlReader Transform(IXPathNavigable input, XsltArgumentList args, XmlResolver resolver)
{
if (input == null)
{
throw new ArgumentNullException(nameof(input));
}
return Transform(input.CreateNavigator(), args, resolver);
}
public XmlReader Transform(IXPathNavigable input, XsltArgumentList args)
{
if (input == null)
{
throw new ArgumentNullException(nameof(input));
}
return Transform(input.CreateNavigator(), args, _DocumentResolver);
}
public void Transform(IXPathNavigable input, XsltArgumentList args, TextWriter output, XmlResolver resolver)
{
if (input == null)
{
throw new ArgumentNullException(nameof(input));
}
Transform(input.CreateNavigator(), args, output, resolver);
}
public void Transform(IXPathNavigable input, XsltArgumentList args, TextWriter output)
{
if (input == null)
{
throw new ArgumentNullException(nameof(input));
}
Transform(input.CreateNavigator(), args, output, _DocumentResolver);
}
public void Transform(IXPathNavigable input, XsltArgumentList args, Stream output, XmlResolver resolver)
{
if (input == null)
{
throw new ArgumentNullException(nameof(input));
}
Transform(input.CreateNavigator(), args, output, resolver);
}
public void Transform(IXPathNavigable input, XsltArgumentList args, Stream output)
{
if (input == null)
{
throw new ArgumentNullException(nameof(input));
}
Transform(input.CreateNavigator(), args, output, _DocumentResolver);
}
public void Transform(IXPathNavigable input, XsltArgumentList args, XmlWriter output, XmlResolver resolver)
{
if (input == null)
{
throw new ArgumentNullException(nameof(input));
}
Transform(input.CreateNavigator(), args, output, resolver);
}
public void Transform(IXPathNavigable input, XsltArgumentList args, XmlWriter output)
{
if (input == null)
{
throw new ArgumentNullException(nameof(input));
}
Transform(input.CreateNavigator(), args, output, _DocumentResolver);
}
public void Transform(String inputfile, String outputfile, XmlResolver resolver)
{
FileStream fs = null;
try
{
// We should read doc before creating output file in case they are the same
XPathDocument doc = new XPathDocument(inputfile);
fs = new FileStream(outputfile, FileMode.Create, FileAccess.ReadWrite);
Transform(doc, /*args:*/null, fs, resolver);
}
finally
{
if (fs != null)
{
fs.Dispose();
}
}
}
public void Transform(String inputfile, String outputfile)
{
Transform(inputfile, outputfile, _DocumentResolver);
}
// Implementation
private void Compile(XPathNavigator stylesheet, XmlResolver resolver, Evidence evidence)
{
Debug.Assert(stylesheet != null);
Compiler compiler = (Debugger == null) ? new Compiler() : new DbgCompiler(this.Debugger);
NavigatorInput input = new NavigatorInput(stylesheet);
compiler.Compile(input, resolver ?? XmlNullResolver.Singleton, evidence);
Debug.Assert(compiler.CompiledStylesheet != null);
Debug.Assert(compiler.QueryStore != null);
Debug.Assert(compiler.QueryStore != null);
_CompiledStylesheet = compiler.CompiledStylesheet;
_QueryStore = compiler.QueryStore;
_RootAction = compiler.RootAction;
}
internal IXsltDebugger Debugger
{
get { return _debugger; }
}
#if false
internal XslTransform(IXsltDebugger debugger) {
this.debugger = debugger;
}
#endif
internal XslTransform(object debugger)
{
if (debugger != null)
{
_debugger = new DebuggerAddapter(debugger);
}
}
private class DebuggerAddapter : IXsltDebugger
{
private object _unknownDebugger;
private MethodInfo _getBltIn;
private MethodInfo _onCompile;
private MethodInfo _onExecute;
public DebuggerAddapter(object unknownDebugger)
{
_unknownDebugger = unknownDebugger;
BindingFlags flags = BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static;
Type unknownType = unknownDebugger.GetType();
_getBltIn = unknownType.GetMethod("GetBuiltInTemplatesUri", flags);
_onCompile = unknownType.GetMethod("OnInstructionCompile", flags);
_onExecute = unknownType.GetMethod("OnInstructionExecute", flags);
}
// ------------------ IXsltDebugger ---------------
public string GetBuiltInTemplatesUri()
{
if (_getBltIn == null)
{
return null;
}
return (string)_getBltIn.Invoke(_unknownDebugger, new object[] { });
}
public void OnInstructionCompile(XPathNavigator styleSheetNavigator)
{
if (_onCompile != null)
{
_onCompile.Invoke(_unknownDebugger, new object[] { styleSheetNavigator });
}
}
public void OnInstructionExecute(IXsltProcessor xsltProcessor)
{
if (_onExecute != null)
{
_onExecute.Invoke(_unknownDebugger, new object[] { xsltProcessor });
}
}
}
}
}
| |
/* ****************************************************************************
*
* Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* [email protected]. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
* ***************************************************************************/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.PythonTools.Analysis;
using Microsoft.PythonTools.Intellisense;
using Microsoft.PythonTools.Interpreter;
using Microsoft.PythonTools.Parsing.Ast;
using Microsoft.VisualStudio.Language.StandardClassification;
using Microsoft.VisualStudio.Text;
using Microsoft.VisualStudio.Text.Classification;
namespace Microsoft.PythonTools {
struct CachedClassification {
public ITrackingSpan Span;
public string Classification;
public CachedClassification(ITrackingSpan span, string classification) {
Span = span;
Classification = classification;
}
}
/// <summary>
/// Provides classification based upon the AST and analysis.
/// </summary>
internal class PythonAnalysisClassifier : IClassifier {
private List<List<CachedClassification>> _spanCache;
private readonly object _spanCacheLock = new object();
private readonly PythonAnalysisClassifierProvider _provider;
private readonly ITextBuffer _buffer;
private IPythonProjectEntry _entry;
internal PythonAnalysisClassifier(PythonAnalysisClassifierProvider provider, ITextBuffer buffer) {
buffer.Changed += BufferChanged;
buffer.ContentTypeChanged += BufferContentTypeChanged;
_provider = provider;
_buffer = buffer;
EnsureAnalysis();
}
public void NewVersion() {
var newEntry = _buffer.GetPythonProjectEntry();
var oldEntry = Interlocked.Exchange(ref _entry, newEntry);
if (oldEntry != null && oldEntry != newEntry) {
oldEntry.OnNewAnalysis -= OnNewAnalysis;
}
if (newEntry != null) {
newEntry.OnNewAnalysis += OnNewAnalysis;
if (newEntry.IsAnalyzed) {
// Ensure we get classifications if we've already been
// analyzed
OnNewAnalysis(_entry, EventArgs.Empty);
}
}
}
private void EnsureAnalysis() {
if (_entry == null) {
NewVersion();
}
}
private void OnNewAnalysis(object sender, EventArgs e) {
if (_provider._serviceProvider.GetPythonToolsService().AdvancedOptions.ColorNames == false) {
lock (_spanCacheLock) {
if (_spanCache != null) {
_spanCache = null;
OnNewClassifications(_buffer.CurrentSnapshot);
}
}
return;
}
PythonAst tree;
IAnalysisCookie cookie;
_entry.GetTreeAndCookie(out tree, out cookie);
var sCookie = cookie as SnapshotCookie;
var snapshot = sCookie != null ? sCookie.Snapshot : null;
if (tree == null || snapshot == null) {
return;
}
var moduleAnalysis = (_provider._serviceProvider.GetPythonToolsService().AdvancedOptions.ColorNamesWithAnalysis)
? _entry.Analysis
: null;
var walker = new ClassifierWalker(tree, moduleAnalysis, snapshot, Provider.CategoryMap);
tree.Walk(walker);
var newCache = walker.Spans;
lock (_spanCacheLock) {
if (snapshot == snapshot.TextBuffer.CurrentSnapshot) {
// Ensure we have not raced with another update
_spanCache = newCache;
} else {
snapshot = null;
}
}
if (snapshot != null) {
OnNewClassifications(snapshot);
}
}
private void OnNewClassifications(ITextSnapshot snapshot) {
var changed = ClassificationChanged;
if (changed != null) {
changed(this, new ClassificationChangedEventArgs(new SnapshotSpan(snapshot, 0, snapshot.Length)));
}
}
public event EventHandler<ClassificationChangedEventArgs> ClassificationChanged;
public IList<ClassificationSpan> GetClassificationSpans(SnapshotSpan span) {
EnsureAnalysis();
var classifications = new List<ClassificationSpan>();
var snapshot = span.Snapshot;
var spans = _spanCache;
if (span.Length <= 0 || span.Snapshot.IsReplBufferWithCommand() || spans == null) {
return classifications;
}
var firstLine = 0; // span.Start.GetContainingLine().LineNumber;
var lastLine = int.MaxValue; // span.End.GetContainingLine().LineNumber;
for (int line = firstLine; line <= lastLine && line < spans.Count; ++line) {
var lineSpan = spans[line];
if (lineSpan != null) {
foreach (var cc in lineSpan) {
if (cc.Span.TextBuffer != snapshot.TextBuffer) {
continue;
}
var cs = cc.Span.GetSpan(snapshot);
if (!cs.IntersectsWith(span)) {
continue;
}
IClassificationType classification;
if (_provider.CategoryMap.TryGetValue(cc.Classification, out classification)) {
Debug.Assert(classification != null, "Did not find " + cc.Classification);
classifications.Add(new ClassificationSpan(cc.Span.GetSpan(snapshot), classification));
}
}
}
}
return classifications;
}
public PythonAnalysisClassifierProvider Provider {
get {
return _provider;
}
}
#region Private Members
private void BufferContentTypeChanged(object sender, ContentTypeChangedEventArgs e) {
_spanCache = null;
_buffer.Changed -= BufferChanged;
_buffer.ContentTypeChanged -= BufferContentTypeChanged;
if (_entry != null) {
_entry.OnNewAnalysis -= OnNewAnalysis;
_entry = null;
}
_buffer.Properties.RemoveProperty(typeof(PythonAnalysisClassifier));
}
private void BufferChanged(object sender, TextContentChangedEventArgs e) {
EnsureAnalysis();
}
#endregion
}
internal static partial class ClassifierExtensions {
public static PythonAnalysisClassifier GetPythonAnalysisClassifier(this ITextBuffer buffer) {
PythonAnalysisClassifier res;
if (buffer.Properties.TryGetProperty<PythonAnalysisClassifier>(typeof(PythonAnalysisClassifier), out res)) {
return res;
}
return null;
}
}
class ClassifierWalker : PythonWalker {
class StackData {
public readonly string Name;
public readonly HashSet<string> Parameters;
public readonly HashSet<string> Functions;
public readonly HashSet<string> Types;
public readonly HashSet<string> Modules;
public readonly List<Tuple<string, Span>> Names;
public readonly StackData Previous;
public StackData(string name, StackData previous) {
Name = name;
Previous = previous;
Parameters = new HashSet<string>();
Functions = new HashSet<string>();
Types = new HashSet<string>();
Modules = new HashSet<string>();
Names = new List<Tuple<string, Span>>();
}
public IEnumerable<StackData> EnumerateTowardsGlobal {
get {
for (var sd = this; sd != null; sd = sd.Previous) {
yield return sd;
}
}
}
}
private readonly PythonAst _ast;
private readonly ModuleAnalysis _analysis;
private readonly ITextSnapshot _snapshot;
private readonly Dictionary<string, IClassificationType> _formatMap;
private StackData _head;
public readonly List<List<CachedClassification>> Spans;
public ClassifierWalker(PythonAst ast, ModuleAnalysis analysis, ITextSnapshot snapshot, Dictionary<string, IClassificationType> formatMap) {
_ast = ast;
_analysis = analysis;
_snapshot = snapshot;
_formatMap = formatMap;
Spans = new List<List<CachedClassification>>();
}
private void AddSpan(Tuple<string, Span> node, string type) {
int lineNo;
try {
lineNo = _snapshot.GetLineNumberFromPosition(node.Item2.Start);
} catch (ArgumentException) {
return;
}
var existing = lineNo < Spans.Count ? Spans[lineNo] : null;
if (existing == null) {
while (lineNo >= Spans.Count) {
Spans.Add(null);
}
Spans[lineNo] = existing = new List<CachedClassification>();
}
existing.Add(new CachedClassification(
_snapshot.CreateTrackingSpan(node.Item2, SpanTrackingMode.EdgeExclusive),
type
));
}
private void BeginScope(string name = null) {
if (_head != null) {
if (name == null) {
name = _head.Name;
} else if (_head.Name != null) {
name = _head.Name + "." + name;
}
}
_head = new StackData(name, _head);
}
private void AddParameter(Parameter node) {
Debug.Assert(_head != null);
_head.Parameters.Add(node.Name);
_head.Names.Add(Tuple.Create(node.Name, new Span(node.StartIndex, node.Name.Length)));
}
private void AddParameter(Node node) {
NameExpression name;
TupleExpression tuple;
Debug.Assert(_head != null);
if ((name = node as NameExpression) != null) {
_head.Parameters.Add(name.Name);
} else if ((tuple = node as TupleExpression) != null) {
foreach (var expr in tuple.Items) {
AddParameter(expr);
}
} else {
Trace.TraceWarning("Unable to find parameter in {0}", node);
}
}
public override bool Walk(NameExpression node) {
_head.Names.Add(Tuple.Create(node.Name, Span.FromBounds(node.StartIndex, node.EndIndex)));
return base.Walk(node);
}
private static string GetFullName(MemberExpression expr) {
var ne = expr.Target as NameExpression;
if (ne != null) {
return ne.Name + "." + expr.Name ?? string.Empty;
}
var me = expr.Target as MemberExpression;
if (me != null) {
var baseName = GetFullName(me);
if (baseName == null) {
return null;
}
return baseName + "." + expr.Name ?? string.Empty;
}
return null;
}
public override bool Walk(MemberExpression node) {
var fullname = GetFullName(node);
if (fullname != null) {
_head.Names.Add(Tuple.Create(fullname, Span.FromBounds(node.NameHeader, node.EndIndex)));
}
return base.Walk(node);
}
public override bool Walk(DottedName node) {
string totalName = "";
foreach (var name in node.Names) {
_head.Names.Add(Tuple.Create(totalName + name.Name, Span.FromBounds(name.StartIndex, name.EndIndex)));
totalName += name.Name + ".";
}
return base.Walk(node);
}
private string ClassifyName(Tuple<string, Span> node) {
var name = node.Item1;
foreach (var sd in _head.EnumerateTowardsGlobal) {
if (sd.Parameters.Contains(name)) {
return PythonPredefinedClassificationTypeNames.Parameter;
} else if (sd.Functions.Contains(name)) {
return PythonPredefinedClassificationTypeNames.Function;
} else if (sd.Types.Contains(name)) {
return PythonPredefinedClassificationTypeNames.Class;
} else if (sd.Modules.Contains(name)) {
return PythonPredefinedClassificationTypeNames.Module;
}
}
if (_analysis != null) {
var memberType = PythonMemberType.Unknown;
lock (_analysis) {
memberType = _analysis
.GetValuesByIndex(name, node.Item2.Start)
.Select(v => v.MemberType)
.DefaultIfEmpty(PythonMemberType.Unknown)
.Aggregate((a, b) => a == b ? a : PythonMemberType.Unknown);
}
if (memberType == PythonMemberType.Module) {
return PythonPredefinedClassificationTypeNames.Module;
} else if (memberType == PythonMemberType.Class) {
return PythonPredefinedClassificationTypeNames.Class;
} else if (memberType == PythonMemberType.Function || memberType == PythonMemberType.Method) {
return PythonPredefinedClassificationTypeNames.Function;
}
}
return null;
}
private void EndScope(bool mergeNames) {
var sd = _head;
foreach (var node in sd.Names) {
var classificationName = ClassifyName(node);
if (classificationName != null) {
AddSpan(node, classificationName);
if (mergeNames && sd.Previous != null) {
if (classificationName == PythonPredefinedClassificationTypeNames.Module) {
sd.Previous.Modules.Add(sd.Name + "." + node.Item1);
} else if (classificationName == PythonPredefinedClassificationTypeNames.Class) {
sd.Previous.Types.Add(sd.Name + "." + node.Item1);
} else if (classificationName == PythonPredefinedClassificationTypeNames.Function) {
sd.Previous.Functions.Add(sd.Name + "." + node.Item1);
}
}
}
}
_head = sd.Previous;
}
public override bool Walk(PythonAst node) {
Debug.Assert(_head == null);
_head = new StackData(string.Empty, null);
return base.Walk(node);
}
public override void PostWalk(PythonAst node) {
EndScope(false);
Debug.Assert(_head == null);
base.PostWalk(node);
}
public override bool Walk(ClassDefinition node) {
Debug.Assert(_head != null);
_head.Types.Add(node.NameExpression.Name);
node.NameExpression.Walk(this);
BeginScope(node.NameExpression.Name);
return base.Walk(node);
}
public override bool Walk(FunctionDefinition node) {
if (node.IsCoroutine) {
AddSpan(Tuple.Create("", new Span(node.StartIndex, 5)), PredefinedClassificationTypeNames.Keyword);
}
Debug.Assert(_head != null);
_head.Functions.Add(node.NameExpression.Name);
node.NameExpression.Walk(this);
BeginScope();
return base.Walk(node);
}
public override bool Walk(DictionaryComprehension node) {
BeginScope();
return base.Walk(node);
}
public override bool Walk(ListComprehension node) {
BeginScope();
return base.Walk(node);
}
public override bool Walk(GeneratorExpression node) {
BeginScope();
return base.Walk(node);
}
public override bool Walk(ComprehensionFor node) {
AddParameter(node.Left);
return base.Walk(node);
}
public override bool Walk(Parameter node) {
AddParameter(node);
return base.Walk(node);
}
public override bool Walk(ImportStatement node) {
Debug.Assert(_head != null);
if (node.AsNames != null) {
foreach (var name in node.AsNames) {
if (name != null && !string.IsNullOrEmpty(name.Name)) {
_head.Modules.Add(name.Name);
_head.Names.Add(Tuple.Create(name.Name, Span.FromBounds(name.StartIndex, name.EndIndex)));
}
}
}
if (node.Names != null) {
for (int i = 0; i < node.Names.Count; ++i) {
var dottedName = node.Names[i];
var hasAsName = (node.AsNames != null && node.AsNames.Count > i) ? node.AsNames[i] != null : false;
foreach (var name in dottedName.Names) {
if (name != null && !string.IsNullOrEmpty(name.Name)) {
if (!hasAsName) {
_head.Modules.Add(name.Name);
_head.Names.Add(Tuple.Create(name.Name, Span.FromBounds(name.StartIndex, name.EndIndex)));
} else {
// Only want to highlight this instance of the
// name, since it isn't going to be bound in the
// rest of the module.
AddSpan(Tuple.Create(name.Name, Span.FromBounds(name.StartIndex, name.EndIndex)), PythonPredefinedClassificationTypeNames.Module);
}
}
}
}
}
return base.Walk(node);
}
public override bool Walk(FromImportStatement node) {
Debug.Assert(_head != null);
if (node.Root != null) {
foreach (var name in node.Root.Names) {
if (name != null && !string.IsNullOrEmpty(name.Name)) {
AddSpan(Tuple.Create(name.Name, Span.FromBounds(name.StartIndex, name.EndIndex)), PythonPredefinedClassificationTypeNames.Module);
}
}
}
if (node.Names != null) {
foreach (var name in node.Names) {
if (name != null && !string.IsNullOrEmpty(name.Name)) {
_head.Names.Add(Tuple.Create(name.Name, Span.FromBounds(name.StartIndex, name.EndIndex)));
}
}
}
return base.Walk(node);
}
public override void PostWalk(ClassDefinition node) {
EndScope(true);
Debug.Assert(_head != null);
base.PostWalk(node);
}
public override void PostWalk(FunctionDefinition node) {
EndScope(false);
Debug.Assert(_head != null);
base.PostWalk(node);
}
public override void PostWalk(DictionaryComprehension node) {
EndScope(false);
Debug.Assert(_head != null);
base.PostWalk(node);
}
public override void PostWalk(ListComprehension node) {
EndScope(false);
Debug.Assert(_head != null);
base.PostWalk(node);
}
public override void PostWalk(GeneratorExpression node) {
EndScope(false);
Debug.Assert(_head != null);
base.PostWalk(node);
}
public override bool Walk(AwaitExpression node) {
AddSpan(Tuple.Create("", new Span(node.StartIndex, 5)), PredefinedClassificationTypeNames.Keyword);
return base.Walk(node);
}
public override bool Walk(ForStatement node) {
if (node.IsAsync) {
AddSpan(Tuple.Create("", new Span(node.StartIndex, 5)), PredefinedClassificationTypeNames.Keyword);
}
return base.Walk(node);
}
public override bool Walk(WithStatement node) {
if (node.IsAsync) {
AddSpan(Tuple.Create("", new Span(node.StartIndex, 5)), PredefinedClassificationTypeNames.Keyword);
}
return base.Walk(node);
}
}
}
| |
using System;
using System.Collections;
using System.ComponentModel;
using System.Drawing;
using System.Data;
using System.Globalization;
using System.Windows.Forms;
//using ActiveWave.RfidDb;
namespace ActiveWave.Mapper
{
public class ReaderListView : System.Windows.Forms.UserControl, IComparer
{
//public event ReaderEventHandler ReaderSelected = null;
//public event ReaderEventHandler ReaderActivated = null;
//private RfidDbController m_rfid = RfidDbController.theRfidDbController;
private int m_sortColumn = -1;
private bool m_sortReverse= false;
private Comparer m_comparer = new Comparer(CultureInfo.CurrentCulture);
//private int m_statusColumn = -1;
private System.Windows.Forms.ListView m_listView;
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.Container components = null;
#region Constructor
public ReaderListView()
{
InitializeComponent();
//m_rfid.ReaderAdded += new RfidDb.ReaderEventHandler(OnReaderAdded);
//m_rfid.ReaderChanged += new RfidDb.ReaderEventHandler(OnReaderAdded);
//m_rfid.ReaderRemoved += new RfidDb.ReaderEventHandler(OnReaderRemoved);
}
#endregion
#region Properties
/*public IRfidReader SelectedReader
{
get
{
if (m_listView.SelectedItems.Count > 0)
return m_listView.SelectedItems[0].Tag as IRfidReader;
else
return null;
}
set
{
m_listView.SelectedItems.Clear();
ListViewItem item = FindItem(value);
if (item != null) item.Selected = true;
}
}*/
#endregion
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose( bool disposing )
{
if( disposing )
{
if(components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.m_listView = new System.Windows.Forms.ListView();
this.SuspendLayout();
//
// m_listView
//
this.m_listView.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.m_listView.FullRowSelect = true;
this.m_listView.GridLines = true;
this.m_listView.HideSelection = false;
this.m_listView.Location = new System.Drawing.Point(0, 0);
this.m_listView.MultiSelect = false;
this.m_listView.Name = "m_listView";
this.m_listView.Size = new System.Drawing.Size(384, 152);
this.m_listView.TabIndex = 0;
this.m_listView.View = System.Windows.Forms.View.Details;
this.m_listView.ItemActivate += new System.EventHandler(this.ListView_ItemActivate);
this.m_listView.ColumnClick += new System.Windows.Forms.ColumnClickEventHandler(this.ListView_ColumnClick);
this.m_listView.SelectedIndexChanged += new System.EventHandler(this.ListView_SelectedIndexChanged);
//
// ReaderListView
//
this.Controls.Add(this.m_listView);
this.Name = "ReaderListView";
this.Size = new System.Drawing.Size(384, 150);
this.ResumeLayout(false);
}
#endregion
#region User events
private void ListView_ColumnClick(object sender, System.Windows.Forms.ColumnClickEventArgs e)
{
m_sortReverse = (e.Column == m_sortColumn) ? !m_sortReverse : false;
m_sortColumn = e.Column;
m_listView.ListViewItemSorter = this;
m_listView.Sort();
m_listView.ListViewItemSorter = null;
}
private void ListView_ItemActivate(object sender, System.EventArgs e)
{
//if (ReaderActivated != null) ReaderActivated(this.SelectedReader);
}
private void ListView_SelectedIndexChanged(object sender, System.EventArgs e)
{
//if (ReaderSelected != null) ReaderSelected(this.SelectedReader);
}
#endregion
#region RFID events
/*private void OnReaderAdded(IRfidReader reader)
{
InitListView(reader);
AddItem(reader);
}
private void OnReaderRemoved(IRfidReader reader)
{
ListViewItem item = FindItem(reader);
if (item != null) item.Remove();
}*/
#endregion
#region Misc
/*private void AddItem(IRfidReader reader)
{
ListViewItem item = FindItem(reader);
if (item == null)
{
item = m_listView.Items.Add(string.Empty);
item.UseItemStyleForSubItems = false;
}
item.Tag = reader;
object[] data = reader.DisplayData.ItemArray;
for (int idx = 0; idx < data.Length; idx++)
{
if (idx >= item.SubItems.Count)
item.SubItems.Add(data[idx].ToString());
else
item.SubItems[idx].Text = data[idx].ToString();
}
UpdateStatus(item);
}
private ListViewItem FindItem(IRfidReader reader)
{
if (reader != null)
{
foreach (ListViewItem item in m_listView.Items)
{
IRfidReader r = item.Tag as IRfidReader;
if (r.Id == reader.Id) return item;
}
}
return null;
}*/
private void UpdateStatus(ListViewItem item)
{
// If there is a status column, set color based on reader status
/*if (m_statusColumn >= 0)
{
IRfidReader reader = item.Tag as IRfidReader;
Color fc, bc;
switch (reader.Status.ToLower())
{
case "online":
fc = m_listView.ForeColor;
bc = m_listView.BackColor;
break;
case "offline":
fc = Color.White;
bc = Color.Red;
break;
default:
fc = Color.Black;
bc = Color.Yellow;
break;
}
item.SubItems[m_statusColumn].ForeColor = fc;
item.SubItems[m_statusColumn].BackColor = bc;
}*/
}
/*private void InitListView(IRfidReader reader)
{
if (m_listView.Columns.Count == 0)
{
m_statusColumn = -1;
for (int idx = 0; idx < reader.DisplayData.Table.Columns.Count; idx++)
{
DataColumn col = reader.DisplayData.Table.Columns[idx];
m_listView.Columns.Add(col.Caption, 100, HorizontalAlignment.Left);
if (string.Compare(col.Caption, "status", true) == 0)
{
m_statusColumn = idx;
}
}
}
}*/
public int Compare(object x, object y)
{
/*ListViewItem item1 = x as ListViewItem;
ListViewItem item2 = y as ListViewItem;
IRfidReader reader1 = item1.Tag as IRfidReader;
IRfidReader reader2 = item2.Tag as IRfidReader;
object data1 = reader1.DisplayData.ItemArray[m_sortColumn];
object data2 = reader2.DisplayData.ItemArray[m_sortColumn];
int rc = 0;
if (data1 == System.DBNull.Value)
rc = -1;
else if (data2 == System.DBNull.Value)
rc = 1;
else rc = m_comparer.Compare(data1, data2);
return m_sortReverse ? -rc : rc;*/
return 0;
}
#endregion
}
}
| |
using System;
using System.Collections;
using System.IO;
using System.Text;
using Raksha.Asn1.Cms;
using Raksha.Cms;
using Raksha.Asn1;
using Raksha.Asn1.Pkcs;
using Raksha.Asn1.X509;
using Raksha.OpenSsl;
using Raksha.Security.Certificates;
using Raksha.Utilities;
using Raksha.Utilities.Collections;
using Raksha.X509;
namespace Raksha.Pkix
{
/**
* An immutable sequence of certificates (a certification path).<br />
* <br />
* This is an abstract class that defines the methods common to all CertPaths.
* Subclasses can handle different kinds of certificates (X.509, PGP, etc.).<br />
* <br />
* All CertPath objects have a type, a list of Certificates, and one or more
* supported encodings. Because the CertPath class is immutable, a CertPath
* cannot change in any externally visible way after being constructed. This
* stipulation applies to all public fields and methods of this class and any
* added or overridden by subclasses.<br />
* <br />
* The type is a string that identifies the type of Certificates in the
* certification path. For each certificate cert in a certification path
* certPath, cert.getType().equals(certPath.getType()) must be true.<br />
* <br />
* The list of Certificates is an ordered List of zero or more Certificates.
* This List and all of the Certificates contained in it must be immutable.<br />
* <br />
* Each CertPath object must support one or more encodings so that the object
* can be translated into a byte array for storage or transmission to other
* parties. Preferably, these encodings should be well-documented standards
* (such as PKCS#7). One of the encodings supported by a CertPath is considered
* the default encoding. This encoding is used if no encoding is explicitly
* requested (for the {@link #getEncoded()} method, for instance).<br />
* <br />
* All CertPath objects are also Serializable. CertPath objects are resolved
* into an alternate {@link CertPathRep} object during serialization. This
* allows a CertPath object to be serialized into an equivalent representation
* regardless of its underlying implementation.<br />
* <br />
* CertPath objects can be created with a CertificateFactory or they can be
* returned by other classes, such as a CertPathBuilder.<br />
* <br />
* By convention, X.509 CertPaths (consisting of X509Certificates), are ordered
* starting with the target certificate and ending with a certificate issued by
* the trust anchor. That is, the issuer of one certificate is the subject of
* the following one. The certificate representing the
* {@link TrustAnchor TrustAnchor} should not be included in the certification
* path. Unvalidated X.509 CertPaths may not follow these conventions. PKIX
* CertPathValidators will detect any departure from these conventions that
* cause the certification path to be invalid and throw a
* CertPathValidatorException.<br />
* <br />
* <strong>Concurrent Access</strong><br />
* <br />
* All CertPath objects must be thread-safe. That is, multiple threads may
* concurrently invoke the methods defined in this class on a single CertPath
* object (or more than one) with no ill effects. This is also true for the List
* returned by CertPath.getCertificates.<br />
* <br />
* Requiring CertPath objects to be immutable and thread-safe allows them to be
* passed around to various pieces of code without worrying about coordinating
* access. Providing this thread-safety is generally not difficult, since the
* CertPath and List objects in question are immutable.
*
* @see CertificateFactory
* @see CertPathBuilder
*/
/// <summary>
/// CertPath implementation for X.509 certificates.
/// </summary>
public class PkixCertPath
// : CertPath
{
internal static readonly IList certPathEncodings;
static PkixCertPath()
{
IList encodings = Platform.CreateArrayList();
encodings.Add("PkiPath");
encodings.Add("PEM");
encodings.Add("PKCS7");
certPathEncodings = CollectionUtilities.ReadOnly(encodings);
}
private readonly IList certificates;
/**
* @param certs
*/
private static IList SortCerts(
IList certs)
{
if (certs.Count < 2)
return certs;
X509Name issuer = ((X509Certificate)certs[0]).IssuerDN;
bool okay = true;
for (int i = 1; i != certs.Count; i++)
{
X509Certificate cert = (X509Certificate)certs[i];
if (issuer.Equivalent(cert.SubjectDN, true))
{
issuer = ((X509Certificate)certs[i]).IssuerDN;
}
else
{
okay = false;
break;
}
}
if (okay)
return certs;
// find end-entity cert
IList retList = Platform.CreateArrayList(certs.Count);
IList orig = Platform.CreateArrayList(certs);
for (int i = 0; i < certs.Count; i++)
{
X509Certificate cert = (X509Certificate)certs[i];
bool found = false;
X509Name subject = cert.SubjectDN;
foreach (X509Certificate c in certs)
{
if (c.IssuerDN.Equivalent(subject, true))
{
found = true;
break;
}
}
if (!found)
{
retList.Add(cert);
certs.RemoveAt(i);
}
}
// can only have one end entity cert - something's wrong, give up.
if (retList.Count > 1)
return orig;
for (int i = 0; i != retList.Count; i++)
{
issuer = ((X509Certificate)retList[i]).IssuerDN;
for (int j = 0; j < certs.Count; j++)
{
X509Certificate c = (X509Certificate)certs[j];
if (issuer.Equivalent(c.SubjectDN, true))
{
retList.Add(c);
certs.RemoveAt(j);
break;
}
}
}
// make sure all certificates are accounted for.
if (certs.Count > 0)
return orig;
return retList;
}
/**
* Creates a CertPath of the specified type.
* This constructor is protected because most users should use
* a CertificateFactory to create CertPaths.
* @param type the standard name of the type of Certificatesin this path
**/
public PkixCertPath(
ICollection certificates)
// : base("X.509")
{
this.certificates = SortCerts(Platform.CreateArrayList(certificates));
}
public PkixCertPath(
Stream inStream)
: this(inStream, "PkiPath")
{
}
/**
* Creates a CertPath of the specified type.
* This constructor is protected because most users should use
* a CertificateFactory to create CertPaths.
*
* @param type the standard name of the type of Certificatesin this path
**/
public PkixCertPath(
Stream inStream,
string encoding)
// : base("X.509")
{
string upper = encoding.ToUpper();
IList certs;
try
{
if (upper.Equals("PkiPath".ToUpper()))
{
Asn1InputStream derInStream = new Asn1InputStream(inStream);
Asn1Object derObject = derInStream.ReadObject();
if (!(derObject is Asn1Sequence))
{
throw new CertificateException(
"input stream does not contain a ASN1 SEQUENCE while reading PkiPath encoded data to load CertPath");
}
certs = Platform.CreateArrayList();
foreach (Asn1Encodable ae in (Asn1Sequence)derObject)
{
byte[] derBytes = ae.GetEncoded(Asn1Encodable.Der);
Stream certInStream = new MemoryStream(derBytes, false);
// TODO Is inserting at the front important (list will be sorted later anyway)?
certs.Insert(0, new X509CertificateParser().ReadCertificate(certInStream));
}
}
else if (upper.Equals("PKCS7") || upper.Equals("PEM"))
{
certs = Platform.CreateArrayList(new X509CertificateParser().ReadCertificates(inStream));
}
else
{
throw new CertificateException("unsupported encoding: " + encoding);
}
}
catch (IOException ex)
{
throw new CertificateException(
"IOException throw while decoding CertPath:\n"
+ ex.ToString());
}
this.certificates = SortCerts(certs);
}
/**
* Returns an iteration of the encodings supported by this
* certification path, with the default encoding
* first. Attempts to modify the returned Iterator via its
* remove method result in an UnsupportedOperationException.
*
* @return an Iterator over the names of the supported encodings (as Strings)
**/
public virtual IEnumerable Encodings
{
get { return new EnumerableProxy(certPathEncodings); }
}
/**
* Compares this certification path for equality with the specified object.
* Two CertPaths are equal if and only if their types are equal and their
* certificate Lists (and by implication the Certificates in those Lists)
* are equal. A CertPath is never equal to an object that is not a CertPath.<br />
* <br />
* This algorithm is implemented by this method. If it is overridden, the
* behavior specified here must be maintained.
*
* @param other
* the object to test for equality with this certification path
*
* @return true if the specified object is equal to this certification path,
* false otherwise
*
* @see Object#hashCode() Object.hashCode()
*/
public override bool Equals(
object obj)
{
if (this == obj)
return true;
PkixCertPath other = obj as PkixCertPath;
if (other == null)
return false;
// if (!this.Type.Equals(other.Type))
// return false;
//return this.Certificates.Equals(other.Certificates);
// TODO Extract this to a utility class
IList thisCerts = this.Certificates;
IList otherCerts = other.Certificates;
if (thisCerts.Count != otherCerts.Count)
return false;
IEnumerator e1 = thisCerts.GetEnumerator();
IEnumerator e2 = thisCerts.GetEnumerator();
while (e1.MoveNext())
{
e2.MoveNext();
if (!Platform.Equals(e1.Current, e2.Current))
return false;
}
return true;
}
public override int GetHashCode()
{
// FIXME?
return this.Certificates.GetHashCode();
}
/**
* Returns the encoded form of this certification path, using
* the default encoding.
*
* @return the encoded bytes
* @exception CertificateEncodingException if an encoding error occurs
**/
public virtual byte[] GetEncoded()
{
foreach (object enc in Encodings)
{
if (enc is string)
{
return GetEncoded((string)enc);
}
}
return null;
}
/**
* Returns the encoded form of this certification path, using
* the specified encoding.
*
* @param encoding the name of the encoding to use
* @return the encoded bytes
* @exception CertificateEncodingException if an encoding error
* occurs or the encoding requested is not supported
*
*/
public virtual byte[] GetEncoded(
string encoding)
{
if (Platform.CompareIgnoreCase(encoding, "PkiPath") == 0)
{
Asn1EncodableVector v = new Asn1EncodableVector();
for (int i = certificates.Count - 1; i >= 0; i--)
{
v.Add(ToAsn1Object((X509Certificate) certificates[i]));
}
return ToDerEncoded(new DerSequence(v));
}
else if (Platform.CompareIgnoreCase(encoding, "PKCS7") == 0)
{
Asn1.Pkcs.ContentInfo encInfo = new Asn1.Pkcs.ContentInfo(
PkcsObjectIdentifiers.Data, null);
Asn1EncodableVector v = new Asn1EncodableVector();
for (int i = 0; i != certificates.Count; i++)
{
v.Add(ToAsn1Object((X509Certificate)certificates[i]));
}
Asn1.Pkcs.SignedData sd = new Asn1.Pkcs.SignedData(
new DerInteger(1),
new DerSet(),
encInfo,
new DerSet(v),
null,
new DerSet());
return ToDerEncoded(new Asn1.Pkcs.ContentInfo(PkcsObjectIdentifiers.SignedData, sd));
}
else if (Platform.CompareIgnoreCase(encoding, "PEM") == 0)
{
using (var bOut = new MemoryStream())
{
using (var writer = new StreamWriter(bOut))
{
var pWrt = new PemWriter(writer);
try
{
for (int i = 0; i != certificates.Count; i++)
{
pWrt.WriteObject(certificates[i]);
}
pWrt.Writer.Dispose();
}
catch (Exception)
{
throw new CertificateEncodingException("can't encode certificate for PEM encoded path");
}
}
return bOut.ToArray();
}
}
else
{
throw new CertificateEncodingException("unsupported encoding: " + encoding);
}
}
/// <summary>
/// Returns the list of certificates in this certification
/// path.
/// </summary>
public virtual IList Certificates
{
get { return CollectionUtilities.ReadOnly(certificates); }
}
/**
* Return a DERObject containing the encoded certificate.
*
* @param cert the X509Certificate object to be encoded
*
* @return the DERObject
**/
private Asn1Object ToAsn1Object(
X509Certificate cert)
{
try
{
return Asn1Object.FromByteArray(cert.GetEncoded());
}
catch (Exception e)
{
throw new CertificateEncodingException("Exception while encoding certificate", e);
}
}
private byte[] ToDerEncoded(Asn1Encodable obj)
{
try
{
return obj.GetEncoded(Asn1Encodable.Der);
}
catch (IOException e)
{
throw new CertificateEncodingException("Exception thrown", e);
}
}
}
}
| |
// Copyright 2013 The Noda Time Authors. All rights reserved.
// Use of this source code is governed by the Apache License 2.0,
// as found in the LICENSE.txt file.
using System.Globalization;
using System.Text;
using NodaTime.Annotations;
using NodaTime.Globalization;
using NodaTime.Text.Patterns;
using NodaTime.TimeZones;
using NodaTime.Utility;
using JetBrains.Annotations;
namespace NodaTime.Text
{
/// <summary>
/// Represents a pattern for parsing and formatting <see cref="ZonedDateTime"/> values.
/// </summary>
/// <threadsafety>
/// When used with a read-only <see cref="CultureInfo" />, this type is immutable and instances
/// may be shared freely between threads. We recommend only using read-only cultures for patterns, although this is
/// not currently enforced.
/// </threadsafety>
[Immutable] // Well, assuming an immutable culture...
public sealed class ZonedDateTimePattern : IPattern<ZonedDateTime>
{
internal static ZonedDateTime DefaultTemplateValue { get; } = new LocalDateTime(2000, 1, 1, 0, 0).InUtc();
/// <summary>
/// Gets an zoned local date/time pattern based on ISO-8601 (down to the second) including offset from UTC and zone ID.
/// It corresponds to a custom pattern of "yyyy'-'MM'-'dd'T'HH':'mm':'ss z '('o<g>')'" and is available
/// as the 'G' standard pattern.
/// </summary>
/// <remarks>
/// The calendar system is not formatted as part of this pattern, and it cannot be used for parsing as no time zone
/// provider is included. Call <see cref="WithZoneProvider"/> on the value of this property to obtain a
/// pattern which can be used for parsing.
/// </remarks>
/// <value>An zoned local date/time pattern based on ISO-8601 (down to the second) including offset from UTC and zone ID.</value>
public static ZonedDateTimePattern GeneralFormatOnlyIsoPattern => Patterns.GeneralFormatOnlyPatternImpl;
// TODO(2.0): Add tests for this and other patterns from properties.
/// <summary>
/// Returns an invariant zoned date/time pattern based on ISO-8601 (down to the nanosecond) including offset from UTC and zone ID.
/// It corresponds to a custom pattern of "yyyy'-'MM'-'dd'T'HH':'mm':'ss;FFFFFFFFF z '('o<g>')'" and is available
/// as the 'F' standard pattern.
/// </summary>
/// <remarks>
/// The calendar system is not formatted as part of this pattern, and it cannot be used for parsing as no time zone
/// provider is included. Call <see cref="WithZoneProvider"/> on the value of this property to obtain a
/// pattern which can be used for parsing.
/// </remarks>
/// <value>An invariant zoned date/time pattern based on ISO-8601 (down to the nanosecond) including offset from UTC and zone ID.</value>
public static ZonedDateTimePattern ExtendedFormatOnlyIsoPattern => Patterns.ExtendedFormatOnlyPatternImpl;
private readonly IPattern<ZonedDateTime> pattern;
/// <summary>
/// Class whose existence is solely to avoid type initialization order issues, most of which stem
/// from needing NodaFormatInfo.InvariantInfo...
/// </summary>
internal static class Patterns
{
internal static readonly ZonedDateTimePattern GeneralFormatOnlyPatternImpl = CreateWithInvariantCulture("yyyy'-'MM'-'dd'T'HH':'mm':'ss z '('o<g>')'", null);
internal static readonly ZonedDateTimePattern ExtendedFormatOnlyPatternImpl = CreateWithInvariantCulture("yyyy'-'MM'-'dd'T'HH':'mm':'ss;FFFFFFFFF z '('o<g>')'", null);
internal static readonly PatternBclSupport<ZonedDateTime> BclSupport = new PatternBclSupport<ZonedDateTime>("G", fi => fi.ZonedDateTimePatternParser);
}
/// <summary>
/// Gets the pattern text for this pattern, as supplied on creation.
/// </summary>
/// <value>The pattern text for this pattern, as supplied on creation.</value>
public string PatternText { get; }
/// <summary>
/// Gets the localization information used in this pattern.
/// </summary>
internal NodaFormatInfo FormatInfo { get; }
/// <summary>
/// Gets the value used as a template for parsing: any field values unspecified
/// in the pattern are taken from the template.
/// </summary>
/// <value>The value used as a template for parsing.</value>
public ZonedDateTime TemplateValue { get; }
/// <summary>
/// Gets the resolver which is used to map local date/times to zoned date/times,
/// handling skipped and ambiguous times appropriately (where the offset isn't specified in the pattern).
/// </summary>
/// <value>The resolver which is used to map local date/times to zoned date/times.</value>
public ZoneLocalMappingResolver Resolver { get; }
/// <summary>
/// Gets the provider which is used to look up time zones when parsing a pattern
/// which contains a time zone identifier. This may be null, in which case the pattern can
/// only be used for formatting (not parsing).
/// </summary>
/// <value>The provider which is used to look up time zones when parsing a pattern
/// which contains a time zone identifier.</value>
public IDateTimeZoneProvider ZoneProvider { get; }
private ZonedDateTimePattern(string patternText, NodaFormatInfo formatInfo, ZonedDateTime templateValue,
ZoneLocalMappingResolver resolver, IDateTimeZoneProvider zoneProvider, IPattern<ZonedDateTime> pattern)
{
this.PatternText = patternText;
this.FormatInfo = formatInfo;
this.TemplateValue = templateValue;
this.Resolver = resolver;
this.ZoneProvider = zoneProvider;
this.pattern = pattern;
}
/// <summary>
/// Parses the given text value according to the rules of this pattern.
/// </summary>
/// <remarks>
/// This method never throws an exception (barring a bug in Noda Time itself). Even errors such as
/// the argument being null are wrapped in a parse result.
/// </remarks>
/// <param name="text">The text value to parse.</param>
/// <returns>The result of parsing, which may be successful or unsuccessful.</returns>
public ParseResult<ZonedDateTime> Parse(string text) => pattern.Parse(text);
/// <summary>
/// Formats the given zoned date/time as text according to the rules of this pattern.
/// </summary>
/// <param name="value">The zoned date/time to format.</param>
/// <returns>The zoned date/time formatted according to this pattern.</returns>
public string Format(ZonedDateTime value) => pattern.Format(value);
/// <summary>
/// Formats the given value as text according to the rules of this pattern,
/// appending to the given <see cref="StringBuilder"/>.
/// </summary>
/// <param name="value">The value to format.</param>
/// <param name="builder">The <c>StringBuilder</c> to append to.</param>
/// <returns>The builder passed in as <paramref name="builder"/>.</returns>
public StringBuilder AppendFormat(ZonedDateTime value, [NotNull] StringBuilder builder) => pattern.AppendFormat(value, builder);
/// <summary>
/// Creates a pattern for the given pattern text, format info, template value, mapping resolver and time zone provider.
/// </summary>
/// <param name="patternText">Pattern text to create the pattern for</param>
/// <param name="formatInfo">The format info to use in the pattern</param>
/// <param name="templateValue">Template value to use for unspecified fields</param>
/// <param name="resolver">Resolver to apply when mapping local date/time values into the zone.</param>
/// <param name="zoneProvider">Time zone provider, used when parsing text which contains a time zone identifier.</param>
/// <returns>A pattern for parsing and formatting zoned date/times.</returns>
/// <exception cref="InvalidPatternException">The pattern text was invalid.</exception>
private static ZonedDateTimePattern Create([NotNull] string patternText, [NotNull] NodaFormatInfo formatInfo,
[NotNull] ZoneLocalMappingResolver resolver, IDateTimeZoneProvider zoneProvider, ZonedDateTime templateValue)
{
Preconditions.CheckNotNull(patternText, nameof(patternText));
Preconditions.CheckNotNull(formatInfo, nameof(formatInfo));
Preconditions.CheckNotNull(resolver, nameof(resolver));
var pattern = new ZonedDateTimePatternParser(templateValue, resolver, zoneProvider).ParsePattern(patternText, formatInfo);
return new ZonedDateTimePattern(patternText, formatInfo, templateValue, resolver, zoneProvider, pattern);
}
/// <summary>
/// Creates a pattern for the given pattern text, culture, resolver, time zone provider, and template value.
/// </summary>
/// <remarks>
/// See the user guide for the available pattern text options.
/// If <paramref name="zoneProvider"/> is null, the resulting pattern can be used for formatting
/// but not parsing.
/// </remarks>
/// <param name="patternText">Pattern text to create the pattern for</param>
/// <param name="cultureInfo">The culture to use in the pattern</param>
/// <param name="resolver">Resolver to apply when mapping local date/time values into the zone.</param>
/// <param name="zoneProvider">Time zone provider, used when parsing text which contains a time zone identifier.</param>
/// <param name="templateValue">Template value to use for unspecified fields</param>
/// <returns>A pattern for parsing and formatting zoned date/times.</returns>
/// <exception cref="InvalidPatternException">The pattern text was invalid.</exception>
public static ZonedDateTimePattern Create([NotNull] string patternText, [NotNull] CultureInfo cultureInfo,
[NotNull] ZoneLocalMappingResolver resolver, IDateTimeZoneProvider zoneProvider, ZonedDateTime templateValue) =>
Create(patternText, NodaFormatInfo.GetFormatInfo(cultureInfo), resolver, zoneProvider, templateValue);
/// <summary>
/// Creates a pattern for the given pattern text and time zone provider, using a strict resolver, the invariant
/// culture, and a default template value of midnight January 1st 2000 UTC.
/// </summary>
/// <remarks>
/// The resolver is only used if the pattern text doesn't include an offset.
/// If <paramref name="zoneProvider"/> is null, the resulting pattern can be used for formatting
/// but not parsing.
/// </remarks>
/// <param name="patternText">Pattern text to create the pattern for</param>
/// <param name="zoneProvider">Time zone provider, used when parsing text which contains a time zone identifier.</param>
/// <returns>A pattern for parsing and formatting zoned date/times.</returns>
public static ZonedDateTimePattern CreateWithInvariantCulture([NotNull] string patternText, IDateTimeZoneProvider zoneProvider) =>
Create(patternText, NodaFormatInfo.InvariantInfo, Resolvers.StrictResolver, zoneProvider, DefaultTemplateValue);
/// <summary>
/// Creates a pattern for the given pattern text and time zone provider, using a strict resolver, the current
/// culture, and a default template value of midnight January 1st 2000 UTC.
/// </summary>
/// <remarks>
/// The resolver is only used if the pattern text doesn't include an offset.
/// If <paramref name="zoneProvider"/> is null, the resulting pattern can be used for formatting
/// but not parsing. Note that the current culture is captured at the time this method is called
/// - it is not captured at the point of parsing or formatting values.
/// </remarks>
/// <param name="patternText">Pattern text to create the pattern for</param>
/// <param name="zoneProvider">Time zone provider, used when parsing text which contains a time zone identifier.</param>
/// <returns>A pattern for parsing and formatting zoned date/times.</returns>
public static ZonedDateTimePattern CreateWithCurrentCulture([NotNull] string patternText, IDateTimeZoneProvider zoneProvider) =>
Create(patternText, NodaFormatInfo.CurrentInfo, Resolvers.StrictResolver, zoneProvider, DefaultTemplateValue);
/// <summary>
/// Creates a pattern for the same original localization information as this pattern, but with the specified
/// pattern text.
/// </summary>
/// <param name="patternText">The pattern text to use in the new pattern.</param>
/// <returns>A new pattern with the given pattern text.</returns>
public ZonedDateTimePattern WithPatternText([NotNull] string patternText) =>
Create(patternText, FormatInfo, Resolver, ZoneProvider, TemplateValue);
/// <summary>
/// Creates a pattern for the same original pattern text as this pattern, but with the specified
/// localization information.
/// </summary>
/// <param name="formatInfo">The localization information to use in the new pattern.</param>
/// <returns>A new pattern with the given localization information.</returns>
private ZonedDateTimePattern WithFormatInfo([NotNull] NodaFormatInfo formatInfo) =>
Create(PatternText, formatInfo, Resolver, ZoneProvider, TemplateValue);
/// <summary>
/// Creates a pattern for the same original pattern text as this pattern, but with the specified
/// culture.
/// </summary>
/// <param name="cultureInfo">The culture to use in the new pattern.</param>
/// <returns>A new pattern with the given culture.</returns>
public ZonedDateTimePattern WithCulture([NotNull] CultureInfo cultureInfo) =>
WithFormatInfo(NodaFormatInfo.GetFormatInfo(cultureInfo));
/// <summary>
/// Creates a pattern for the same original pattern text as this pattern, but with the specified
/// resolver.
/// </summary>
/// <param name="resolver">The new local mapping resolver to use.</param>
/// <returns>A new pattern with the given resolver.</returns>
public ZonedDateTimePattern WithResolver([NotNull] ZoneLocalMappingResolver resolver) =>
Resolver == resolver ? this : Create(PatternText, FormatInfo, resolver, ZoneProvider, TemplateValue);
/// <summary>
/// Creates a pattern for the same original pattern text as this pattern, but with the specified
/// time zone provider.
/// </summary>
/// <remarks>
/// If <paramref name="newZoneProvider"/> is null, the resulting pattern can be used for formatting
/// but not parsing.
/// </remarks>
/// <param name="newZoneProvider">The new time zone provider to use.</param>
/// <returns>A new pattern with the given time zone provider.</returns>
public ZonedDateTimePattern WithZoneProvider(IDateTimeZoneProvider newZoneProvider) =>
newZoneProvider == ZoneProvider ? this : Create(PatternText, FormatInfo, Resolver, newZoneProvider, TemplateValue);
/// <summary>
/// Creates a pattern like this one, but with the specified template value.
/// </summary>
/// <param name="newTemplateValue">The template value for the new pattern, used to fill in unspecified fields.</param>
/// <returns>A new pattern with the given template value.</returns>
public ZonedDateTimePattern WithTemplateValue(ZonedDateTime newTemplateValue) =>
newTemplateValue == TemplateValue ? this : Create(PatternText, FormatInfo, Resolver, ZoneProvider, newTemplateValue);
}
}
| |
using System;
using System.Web;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Text.RegularExpressions;
using gov.va.medora.mdo;
using gov.va.medora.mdo.dao;
using gov.va.medora.mdo.dao.vista;
using gov.va.medora.mdo.api;
using gov.va.medora.utils;
using gov.va.medora.mdws.dto;
namespace gov.va.medora.mdws
{
public class ConnectionLib
{
MySession mySession;
AbstractConnection myCxn;
internal const string NO_CONNECTIONS = "There are no open connections";
internal const string ALREADY_CONNECTED_TO_SITE = "You are already connected to that site";
internal const string SITE_NOT_IN_SITE_TABLE = "Site not in site table";
internal const string NO_SITECODE = "Missing sitecode";
internal const string NO_DUZ = "Missing DUZ";
internal const string NO_DFN = "Missing DFN";
internal const string NO_USER_NAME = "Missing User Name";
internal const string NO_SSN = "Missing SSN";
internal const string NO_MPI_PID = "Missing MPI Patient ID";
internal const string NO_SECURITY_PHRASE = "Missing Security Phrase";
internal const string NO_SITE_TABLE = "MDWS can't load your sites table";
internal const string NO_LOGGED_IN_USER = "No logged in user";
internal const string NO_PATIENT = "No patient has been selected";
public ConnectionLib(MySession mySession)
{
this.mySession = mySession;
}
public DataSourceArray connectToLoginSite(string sitecode)
{
DataSourceArray result = new DataSourceArray();
if (String.IsNullOrEmpty(sitecode))
{
result.fault = new FaultTO(NO_SITECODE);
}
else if (mySession.SiteTable == null)
{
result.fault = new FaultTO(NO_SITE_TABLE);
}
else if (mySession.SiteTable.getSite(sitecode) == null)
{
result.fault = new FaultTO(SITE_NOT_IN_SITE_TABLE);
}
else if (mySession.ConnectionSet != null && mySession.ConnectionSet.Count > 0 && mySession.ConnectionSet.HasConnection(sitecode))
{
result.fault = new FaultTO(ALREADY_CONNECTED_TO_SITE);
}
if (result.fault != null)
{
return result;
}
try
{
Site site = mySession.SiteTable.getSite(sitecode);
DataSource src = site.getDataSourceByModality("HIS");
AbstractDaoFactory factory = AbstractDaoFactory.getDaoFactory(AbstractDaoFactory.getConstant(src.Protocol));
AbstractConnection c = factory.getConnection(src);
c.connect();
result = new DataSourceArray(src);
result.items[0].welcomeMessage = c.getWelcomeMessage();
mySession.ConnectionSet.Add(c);
}
catch (Exception e)
{
result.fault = new FaultTO(e.Message);
}
return result;
}
public DataSourceTO connectSite(string sitecode)
{
DataSourceTO result = new DataSourceTO();
if (String.IsNullOrEmpty(sitecode))
{
result.fault = new FaultTO(NO_SITECODE);
}
else if (mySession.SiteTable == null || mySession.SiteTable.getSite(sitecode) == null)
{
result.fault = new FaultTO(NO_SITE_TABLE);
}
else if (mySession.ConnectionSet != null && mySession.ConnectionSet.Count > 0 && mySession.ConnectionSet.HasConnection(sitecode))
{
result.fault = new FaultTO(ALREADY_CONNECTED_TO_SITE);
}
if (result.fault != null)
{
return result;
}
try
{
Site site = (Site)mySession.SiteTable.Sites[sitecode];
DataSource dataSource = site.getDataSourceByModality("HIS");
AbstractDaoFactory factory = AbstractDaoFactory.getDaoFactory(AbstractDaoFactory.getConstant(dataSource.Protocol));
AbstractConnection c = factory.getConnection(dataSource);
c.connect();
result = new DataSourceTO(dataSource);
result.welcomeMessage = c.getWelcomeMessage();
mySession.ConnectionSet.Add(c);
}
catch (Exception e)
{
result.fault = new FaultTO(e.Message);
}
return result;
}
public TaggedTextArray disconnectAll()
{
TaggedTextArray result = new TaggedTextArray();
if (mySession.ConnectionSet == null || mySession.ConnectionSet.Count == 0)
{
result.fault = new FaultTO(NO_CONNECTIONS);
}
if (result.fault != null)
{
return result;
}
try
{
IndexedHashtable t = mySession.ConnectionSet.disconnectAll();
result = new TaggedTextArray(t);
mySession.ConnectionSet.Clear();
}
catch (Exception e)
{
result.fault = new FaultTO(e.Message);
}
return result;
}
public TextTO disconnectSite()
{
TextTO result = new TextTO();
if (mySession.ConnectionSet == null || mySession.ConnectionSet.Count == 0)
{
result.fault = new FaultTO(NO_CONNECTIONS);
return result;
}
try
{
mySession.ConnectionSet.disconnectAll();
result.text = "OK";
}
catch (Exception e)
{
result.fault = new FaultTO(e.Message);
}
return result;
}
// This is still in ToolsService - need to get rid of it
public TaggedTextArray disconnectSites()
{
return disconnectAll();
}
public TaggedTextArray disconnectRemoteSites()
{
TaggedTextArray result = new TaggedTextArray();
if (mySession.ConnectionSet == null || mySession.ConnectionSet.Count == 0)
{
result.fault = new FaultTO(NO_CONNECTIONS);
}
if (result.fault != null)
{
return result;
}
try
{
IndexedHashtable t = mySession.ConnectionSet.disconnectRemotes();
result = new TaggedTextArray(t);
}
catch (Exception e)
{
result.fault = new FaultTO(e.Message);
}
return result;
}
public TaggedTextArray getVistaTimestamps()
{
TaggedTextArray result = new TaggedTextArray();
if(mySession.ConnectionSet.Count == 0 || !mySession.ConnectionSet.IsAuthorized)
{
result.fault = new FaultTO(NO_CONNECTIONS);
}
if (result.fault != null)
{
return result;
}
try
{
IndexedHashtable t = mySession.ConnectionSet.query("IToolsDao", "getTimestamp", new object[] { });
result = new TaggedTextArray(t);
}
catch (Exception e)
{
result.fault = new FaultTO(e.Message);
}
return result;
}
public TaggedText siteHasPatch(string patchId)
{
TaggedText result = new TaggedText();
if (mySession.ConnectionSet.Count == 0 || !mySession.ConnectionSet.IsAuthorized)
{
result.fault = new FaultTO(NO_CONNECTIONS);
}
else if (String.IsNullOrEmpty(patchId))
{
result.fault = new FaultTO("Missing patchId");
}
if (result.fault != null)
{
return result;
}
try
{
bool f = mySession.ConnectionSet.BaseConnection.hasPatch(patchId);
result = new TaggedText(mySession.ConnectionSet.BaseConnection.DataSource.SiteId.Id,
f == true ? "Y" : "N");
}
catch (Exception e)
{
result.fault = new FaultTO(e.Message);
}
return result;
}
public TaggedTextArray sitesHavePatch(string sitelist, string patchId)
{
TaggedTextArray result = new TaggedTextArray();
//if (mySession.ConnectionSet.Count == 0 || !mySession.ConnectionSet.IsAuthorized)
//{
// result.fault = new FaultTO(NO_CONNECTIONS);
//}
if (String.IsNullOrEmpty(sitelist))
{
result.fault = new FaultTO("Missing sitelist");
}
else if (String.IsNullOrEmpty(patchId))
{
result.fault = new FaultTO("Missing patchId");
}
if (result.fault != null)
{
return result;
}
try
{
AccountLib acctLib = new AccountLib(mySession);
TaggedTextArray sites = acctLib.visitSites(MdwsConstants.MY_SECURITY_PHRASE, sitelist, MdwsConstants.CPRS_CONTEXT);
ToolsApi api = new ToolsApi();
IndexedHashtable t = api.hasPatch(mySession.ConnectionSet, patchId);
result = new TaggedTextArray(t);
}
catch (Exception e)
{
result.fault = new FaultTO(e.Message);
}
finally
{
mySession.ConnectionSet.disconnectAll();
}
return result;
}
public TextArray getRpcs()
{
if (mySession == null || mySession.ConnectionSet == null || mySession.ConnectionSet.Count == 0 ||
!(mySession.ConnectionSet.BaseConnection is VistaConnection) ||
((VistaConnection)(mySession.ConnectionSet.BaseConnection)).Rpcs == null ||
((VistaConnection)(mySession.ConnectionSet.BaseConnection)).Rpcs.Count == 0)
{
TextArray result = new TextArray();
result.fault = new FaultTO("No active connections");
return result;
}
return new TextArray(((VistaConnection)(mySession.ConnectionSet.BaseConnection)).Rpcs);
}
}
}
| |
//---------------------------------------------------------------------------
//
// <copyright file="WindowsUpDown.cs" company="Microsoft">
// Copyright (C) Microsoft Corporation. All rights reserved.
// </copyright>
//
//
// Description: Win32 Up/Down proxy
//
// History:
// Jean-Francois Peyroux, alexsn - Created (in DotNet)
//
//---------------------------------------------------------------------------
using System;
using System.Windows;
using System.Collections;
using System.ComponentModel;
using System.Windows.Automation;
using System.Windows.Automation.Provider;
using System.Text;
using System.Runtime.InteropServices;
using MS.Win32;
using NativeMethodsSetLastError = MS.Internal.UIAutomationClientSideProviders.NativeMethodsSetLastError;
namespace MS.Internal.AutomationProxies
{
class WindowsUpDown : ProxyHwnd, IRangeValueProvider
{
// ------------------------------------------------------
//
// Constructors
//
// ------------------------------------------------------
#region Constructors
// Contructor for SpinControlProxy class. Calls the base class constructor.
internal WindowsUpDown (IntPtr hwnd, ProxyFragment parent, int item)
: base (hwnd, parent, item)
{
// Set the strings to return properly the properties.
_cControlType = ControlType.Spinner;
// support for events
_createOnEvent = new WinEventTracker.ProxyRaiseEvents (RaiseEvents);
// if spin is embedded in a tab control exclude from the content view.
_fIsContent = !IsInsideOfTab();
}
#endregion
#region Proxy Create
// Static Create method called by UIAutomation to create this proxy.
// returns null if unsuccessful
internal static IRawElementProviderSimple Create(IntPtr hwnd, int idChild, int idObject)
{
return Create(hwnd, idChild);
}
internal static IRawElementProviderSimple Create(IntPtr hwnd, int idChild)
{
// Something is wrong if idChild is not zero
if (idChild != 0)
{
System.Diagnostics.Debug.Assert (idChild == 0, "Invalid Child Id, idChild != 0");
throw new ArgumentOutOfRangeException("idChild", idChild, SR.Get(SRID.ShouldBeZero));
}
return new WindowsUpDown(hwnd, null, idChild);
}
// Called by the event tracker system.
internal static void RaiseEvents (IntPtr hwnd, int eventId, object idProp, int idObject, int idChild)
{
if(idObject == NativeMethods.OBJID_CLIENT
&& eventId == NativeMethods.EventObjectInvoke
&& idProp == InvokePattern.InvokedEvent)
{
RaiseInvokedEvent(hwnd, idObject, idChild);
}
else if (idObject != NativeMethods.OBJID_VSCROLL && idObject != NativeMethods.OBJID_HSCROLL)
{
WindowsUpDown wtv = new WindowsUpDown (hwnd, null, -1);
wtv.DispatchEvents (eventId, idProp, idObject, idChild);
}
}
private static void RaiseInvokedEvent(IntPtr hwnd, int idObject, int idChild)
{
ProxySimple button = null;
if (idChild == 1)
{
WindowsUpDown wtv = new WindowsUpDown(hwnd, null, -1);
button = wtv.CreateSpinButtonItem(SpinItem.DownArrow);
}
else if (idChild == 2)
{
WindowsUpDown wtv = new WindowsUpDown(hwnd, null, -1);
button = wtv.CreateSpinButtonItem(SpinItem.UpArrow);
}
if (button != null)
{
button.DispatchEvents(NativeMethods.EventObjectInvoke, InvokePattern.InvokedEvent, idObject, idChild);
}
}
// Creates a list item RawElementBase Item
private ProxySimple CreateSpinButtonItem (SpinItem item)
{
return new SpinButtonItem(_hwnd, IsSpinnerElement()? _parent : this, (int)item);
}
#endregion Proxy Create
//------------------------------------------------------
//
// Patterns Implementation
//
//------------------------------------------------------
#region ProxySimple Interface
// Returns a pattern interface if supported.
internal override object GetPatternProvider (AutomationPattern iid)
{
return (iid == RangeValuePattern.Pattern) ? this : null;
}
internal override object GetElementProperty(AutomationProperty idProp)
{
if (idProp == AutomationElement.IsControlElementProperty)
{
// Hide spin portion in the logical tree
// in the case when it is embedded inside of a [....] spinner
if (WindowsFormsHelper.IsWindowsFormsControl(_hwnd) && IsWinformUpdown(_hwnd))
{
return false;
}
}
return base.GetElementProperty(idProp);
}
//Gets the localized name
internal override string LocalizedName
{
get
{
return ST.Get(STID.LocalizedNameWindowsUpDown);
}
}
#endregion
#region ProxyFragment Interface
// Returns the next sibling element in the raw hierarchy.
// Peripheral controls have always negative values.
// Returns null if no next child
internal override ProxySimple GetNextSibling (ProxySimple child)
{
// Determine how many items are in the list view.
if (child._item == (int)SpinItem.DownArrow)
{
return CreateSpinButtonItem (SpinItem.UpArrow);
}
return null;
}
// Returns the previous sibling element in the raw hierarchy.
// Peripheral controls have always negative values.
// Returns null is no previous child.
internal override ProxySimple GetPreviousSibling (ProxySimple child)
{
if (child._item == (int)SpinItem.UpArrow)
{
return CreateSpinButtonItem (SpinItem.DownArrow);
}
return null;
}
// Returns the first child element in the raw hierarchy.
internal override ProxySimple GetFirstChild ()
{
return CreateSpinButtonItem (SpinItem.DownArrow);
}
// Returns the last child element in the raw hierarchy.
internal override ProxySimple GetLastChild ()
{
return CreateSpinButtonItem (SpinItem.UpArrow);
}
// Returns a Proxy element corresponding to the specified screen coordinates.
internal override ProxySimple ElementProviderFromPoint (int x, int y)
{
for (SpinItem item = SpinItem.DownArrow; item <= SpinItem.UpArrow; item++)
{
NativeMethods.Win32Rect rc = new NativeMethods.Win32Rect (SpinButtonItem.GetBoundingRectangle (_hwnd, item));
if (Misc.PtInRect(ref rc, x, y))
{
return CreateSpinButtonItem (item);
}
}
return this;
}
#endregion
#region RangeValue Pattern
// Change the position of the Up/Down
void IRangeValueProvider.SetValue (double val)
{
// Make sure that the control is enabled
if (!SafeNativeMethods.IsWindowEnabled (_hwnd))
{
throw new ElementNotEnabledException();
}
if (double.IsNaN(val))
{
throw new ArgumentException(SR.Get(SRID.InvalidParameter));
}
if (val > Max)
{
throw new ArgumentOutOfRangeException("value", val, SR.Get(SRID.RangeValueMax));
}
else if (val < Min)
{
throw new ArgumentOutOfRangeException("value", val, SR.Get(SRID.RangeValueMin));
}
short newPos = Convert.ToInt16(val);
Misc.ProxySendMessage(_hwnd, NativeMethods.UDM_SETPOS, IntPtr.Zero, NativeMethods.Util.MAKELPARAM(newPos, 0));
// Scroll the buddy
Misc.ProxySendMessage(HwndBuddy(_hwnd), NativeMethods.WM_HSCROLL, NativeMethods.Util.MAKELPARAM(NativeMethods.SB_THUMBPOSITION, newPos), IntPtr.Zero);
}
// Request to get the value that this UI element is representing in a native format
double IRangeValueProvider.Value
{
get
{
return Pos;
}
}
bool IRangeValueProvider.IsReadOnly
{
get
{
return false;
}
}
double IRangeValueProvider.Maximum
{
get
{
return Max;
}
}
double IRangeValueProvider.Minimum
{
get
{
return Min;
}
}
double IRangeValueProvider.SmallChange
{
get
{
return 1.0;
}
}
double IRangeValueProvider.LargeChange
{
get
{
return Double.NaN;
}
}
#endregion RangeValuePattern
//------------------------------------------------------
//
// Internal Methods
//
//------------------------------------------------------
#region Internal Methods
internal bool IsInsideOfTab()
{
IntPtr hwndParent = NativeMethodsSetLastError.GetAncestor(_hwnd, NativeMethods.GA_PARENT);
if (hwndParent != IntPtr.Zero)
{
// Test for tab control
return Misc.ProxyGetClassName(hwndParent).Contains("SysTabControl32");
}
return false;
}
// Method that verifies if window or one of its intermediate children (in terms of IAccessible tree) is a Spinner
internal static bool IsWinformUpdown (IntPtr hwnd)
{
Accessible acc = null;
int hr = Accessible.AccessibleObjectFromWindow(hwnd, NativeMethods.OBJID_CLIENT, ref acc);
// Verify the role
return hr == NativeMethods.S_OK && acc != null ? acc.Role == AccessibleRole.SpinButton : false;
}
#endregion
//------------------------------------------------------
//
// Internal Fields
//
//------------------------------------------------------
#region Internal Fields
internal enum SpinItem
{
DownArrow = 0,
UpArrow = 1,
}
#endregion
//------------------------------------------------------
//
// Private Methods
//
//------------------------------------------------------
#region Private Methods
private double Pos
{
get
{
int pos = Misc.ProxySendMessageInt(_hwnd, NativeMethods.UDM_GETPOS, IntPtr.Zero, IntPtr.Zero);
// From the doc, If successful, the high-order word is set to zero and the
// low-order word is set to the control's current position. If
// an error occurs, the high-order word is set to a nonzero value.
// However as often the high word is set to 1 but the value is ok, ignore the
// error code and just return the pos.
return (double)NativeMethods.Util.LOWORD(pos);
}
}
private static IntPtr HwndBuddy(IntPtr hwnd)
{
IntPtr hwndBuddy = Misc.ProxySendMessage(hwnd, NativeMethods.UDM_GETBUDDY, IntPtr.Zero, IntPtr.Zero);
// if no buddy window, then all notifications are sent to the parent
if (hwndBuddy == IntPtr.Zero)
{
hwndBuddy = Misc.GetParent(hwnd);
}
return hwndBuddy;
}
private bool IsSpinnerElement()
{
// If this is a Spinner UpDown Control, the buddy window should be a control with
// the class of EDIT.
IntPtr hwndBuddy = HwndBuddy(_hwnd);
return hwndBuddy != IntPtr.Zero && Misc.ProxyGetClassName(hwndBuddy).IndexOf("EDIT", StringComparison.OrdinalIgnoreCase) != -1;
}
private double Max
{
get
{
// The low-order word is the maximum position for the control, and the
// high-order word is the minimum position.
int range = Misc.ProxySendMessageInt(_hwnd, NativeMethods.UDM_GETRANGE, IntPtr.Zero, IntPtr.Zero);
int min = NativeMethods.Util.HIWORD(range);
int max = NativeMethods.Util.LOWORD(range);
return (double)(max > min ? max : min);
}
}
private double Min
{
get
{
// The low-order word is the maximum position for the control, and the
// high-order word is the minimum position.
int range = Misc.ProxySendMessageInt(_hwnd, NativeMethods.UDM_GETRANGE, IntPtr.Zero, IntPtr.Zero);
int min = NativeMethods.Util.HIWORD(range);
int max = NativeMethods.Util.LOWORD(range);
return (double)(max > min ? min : max);
}
}
#endregion
//------------------------------------------------------
//
// SpinButtonItem Private Class
//
//------------------------------------------------------
#region SpinButtonItem
class SpinButtonItem: ProxySimple, IInvokeProvider
{
//------------------------------------------------------
//
// Constructors
//
//------------------------------------------------------
#region Constructors
// Contructor for SpinControlProxy class. Calls the base class constructor.
internal SpinButtonItem (IntPtr hwnd, ProxyFragment parent, int item)
: base(hwnd, parent, item)
{
// Set the strings to return properly the properties.
_fIsContent = false;
_cControlType = ControlType.Button;
WindowsUpDown upDownParent = parent as WindowsUpDown;
if (upDownParent != null)
{
_isInsideOfTab = upDownParent.IsInsideOfTab();
}
// The buttons are swapped on a tab control compared to the spinner.
if (_isInsideOfTab)
{
item = 1 - item;
}
_sAutomationId = _asAutomationId[item];
}
#endregion
//------------------------------------------------------
//
// Patterns Implementation
//
//------------------------------------------------------
#region ProxySimple Interface
// Returns a pattern interface if supported.
internal override object GetPatternProvider (AutomationPattern iid)
{
return iid == InvokePattern.Pattern ? this : null;
}
// Gets the bounding rectangle for this element
internal override Rect BoundingRectangle
{
get
{
return GetBoundingRectangle(_hwnd, (WindowsUpDown.SpinItem)_item);
}
}
// Process all the Logical and Raw Element Properties
internal override object GetElementProperty(AutomationProperty idProp)
{
if (idProp == AutomationElement.IsControlElementProperty)
{
IntPtr hwndTabParent = GetTabParent();
if (hwndTabParent != IntPtr.Zero)
{
return WindowsTab.IsValidControl(hwndTabParent);
}
}
return base.GetElementProperty(idProp);
}
//Gets the localized name
internal override string LocalizedName
{
get
{
int item = _item;
// The buttons are swapped on a tab control compared to the spinner.
if (_isInsideOfTab)
{
item = 1 - item;
}
return ST.Get(_asNames[item]);
}
}
#endregion ProxySimple Interface
#region Invoke Pattern
// Same as a click on one of the button Up or Down
void IInvokeProvider.Invoke ()
{
// Make sure that the control is enabled
if (!SafeNativeMethods.IsWindowEnabled(_hwnd))
{
throw new ElementNotEnabledException();
}
// NOTE: The GetBoundingRectangel() will swap the buttons to retrieve
// the correct rectangle based on the WS_EX_LAYOUTRTL bit. But the
// SendMessages WM_LBUTTONDOWN and WM_LBUTTONUP also swaps the buttons
// on the WS_EX_LAYOUTRTL bit. So need to send the center point of
// button before the swap to get the SendMessage to apply it to the
// correct button.
int item = _item;
// If the control is horizontal and the WS_EX_LAYOUTRTL is set need to
// swap the button order
if (IsHorizontal(_hwnd) && Misc.IsLayoutRTL(_hwnd))
{
item = 1 - item;
}
// does the control have vertical scrolling buttons
Rect rc = GetBoundingRectangle(_hwnd, (WindowsUpDown.SpinItem)item);
NativeMethods.Win32Rect updownRect = new NativeMethods.Win32Rect();
if (!Misc.GetWindowRect(_hwnd, ref updownRect))
{
throw new InvalidOperationException(SR.Get(SRID.OperationCannotBePerformed));
}
int x = (int) rc.Left - updownRect.left + (int) rc.Width / 2;
int y = (int) rc.Top - updownRect.top + (int) rc.Height / 2;
IntPtr center = NativeMethods.Util.MAKELPARAM (x, y);
// the message does not seems to operate, fake a mouse action instead
Misc.ProxySendMessage(_hwnd, NativeMethods.WM_LBUTTONDOWN, (IntPtr)NativeMethods.MK_LBUTTON, center);
Misc.ProxySendMessage(_hwnd, NativeMethods.WM_LBUTTONUP, (IntPtr)NativeMethods.MK_LBUTTON, center);
}
#endregion Invoke Pattern
//------------------------------------------------------
//
// Internal Methods
//
//------------------------------------------------------
#region Internal Methods
static internal Rect GetBoundingRectangle(IntPtr hwnd, WindowsUpDown.SpinItem item)
{
NativeMethods.Win32Rect updownRect = new NativeMethods.Win32Rect();
if (!Misc.GetWindowRect(hwnd, ref updownRect))
{
return Rect.Empty;
}
bool fHorz = IsHorizontal(hwnd);
// If the control is horizontal and the WS_EX_LAYOUTRTL is set need to
// swap the button order
if (fHorz && Misc.IsLayoutRTL(hwnd))
{
item = item == SpinItem.DownArrow ? SpinItem.UpArrow : SpinItem.DownArrow;
}
switch (item)
{
case WindowsUpDown.SpinItem.DownArrow:
if (fHorz)
{
int width = (updownRect.right - updownRect.left);
updownRect.right = updownRect.left + width / 2;
}
else
{
int height = (updownRect.bottom - updownRect.top);
updownRect.bottom = updownRect.top + height / 2;
}
// Don't need to normalize, GetWindowRect returns screen coordinates.
return updownRect.ToRect(false);
case WindowsUpDown.SpinItem.UpArrow:
if (fHorz)
{
int width = (updownRect.right - updownRect.left);
updownRect.left = updownRect.left + width / 2;
}
else
{
int height = (updownRect.bottom - updownRect.top);
updownRect.top = updownRect.top + height / 2;
}
// Don't need to normalize, GetWindowRect returns screen coordinates.
return updownRect.ToRect(false);
}
return Rect.Empty;
}
#endregion
// ------------------------------------------------------
//
// Private Methods
//
//------------------------------------------------------
#region Private Methods
private IntPtr GetTabParent()
{
IntPtr hwndParent = NativeMethodsSetLastError.GetAncestor(_hwnd, NativeMethods.GA_PARENT);
if (hwndParent != IntPtr.Zero)
{
// Test for tab control
hwndParent = Misc.ProxyGetClassName(hwndParent).Contains("SysTabControl32") ? hwndParent : IntPtr.Zero;
}
return hwndParent;
}
private static bool IsHorizontal(IntPtr hwnd)
{
return Misc.IsBitSet(Misc.GetWindowStyle(hwnd), NativeMethods.UDS_HORZ);
}
#endregion
// ------------------------------------------------------
//
// Private Fields
//
// ------------------------------------------------------
#region Private Fields
private bool _isInsideOfTab;
private static STID [] _asNames = {
STID.LocalizedNameWindowsSpinButtonItemForward,
STID.LocalizedNameWindowsSpinButtonItemBackward
};
private static string[] _asAutomationId = new string[] {
"SmallIncrement", "SmallDecrement" // This string is a non-localizable string
};
#endregion
}
#endregion
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using BTDB.Buffer;
using BTDB.FieldHandler;
using BTDB.KVDBLayer;
using BTDB.StreamLayer;
namespace BTDB.ODBLayer
{
public class ODBDictionary<TKey, TValue> : IOrderedDictionary<TKey, TValue>, IQuerySizeDictionary<TKey>
{
readonly IInternalObjectDBTransaction _tr;
readonly IFieldHandler _keyHandler;
readonly IFieldHandler _valueHandler;
readonly Func<AbstractBufferedReader, IReaderCtx, TKey> _keyReader;
readonly Action<TKey, AbstractBufferedWriter, IWriterCtx> _keyWriter;
readonly Func<AbstractBufferedReader, IReaderCtx, TValue> _valueReader;
readonly Action<TValue, AbstractBufferedWriter, IWriterCtx> _valueWriter;
readonly IKeyValueDBTransaction _keyValueTr;
readonly KeyValueDBTransactionProtector _keyValueTrProtector;
readonly ulong _id;
byte[] _prefix;
int _count;
int _modificationCounter;
KeysCollection _keysCollection;
ValuesCollection _valuesCollection;
public ODBDictionary(IInternalObjectDBTransaction tr, ODBDictionaryConfiguration config, ulong id)
{
_tr = tr;
_keyHandler = config.KeyHandler;
_valueHandler = config.ValueHandler;
_id = id;
GeneratePrefix();
_keyReader = (Func<AbstractBufferedReader, IReaderCtx, TKey>)config.KeyReader;
_keyWriter = (Action<TKey, AbstractBufferedWriter, IWriterCtx>)config.KeyWriter;
_valueReader = (Func<AbstractBufferedReader, IReaderCtx, TValue>)config.ValueReader;
_valueWriter = (Action<TValue, AbstractBufferedWriter, IWriterCtx>)config.ValueWriter;
_keyValueTr = _tr.KeyValueDBTransaction;
_keyValueTrProtector = _tr.TransactionProtector;
_count = -1;
}
public ODBDictionary(IInternalObjectDBTransaction tr, ODBDictionaryConfiguration config)
{
_tr = tr;
_keyHandler = config.KeyHandler;
_valueHandler = config.ValueHandler;
_id = tr.AllocateDictionaryId();
GeneratePrefix();
_keyReader = (Func<AbstractBufferedReader, IReaderCtx, TKey>)config.KeyReader;
_keyWriter = (Action<TKey, AbstractBufferedWriter, IWriterCtx>)config.KeyWriter;
_valueReader = (Func<AbstractBufferedReader, IReaderCtx, TValue>)config.ValueReader;
_valueWriter = (Action<TValue, AbstractBufferedWriter, IWriterCtx>)config.ValueWriter;
_keyValueTr = _tr.KeyValueDBTransaction;
_keyValueTrProtector = _tr.TransactionProtector;
_count = -1;
}
static void throwModifiedDuringEnum()
{
throw new InvalidOperationException("DB modified during iteration");
}
public static void DoSave(IWriterCtx ctx, IDictionary<TKey, TValue> dictionary, int cfgId)
{
var dbctx = (IDBWriterCtx)ctx;
var goodDict = dictionary as ODBDictionary<TKey, TValue>;
if (goodDict == null)
{
var tr = dbctx.GetTransaction();
var id = tr.AllocateDictionaryId();
goodDict = new ODBDictionary<TKey, TValue>(tr, (ODBDictionaryConfiguration)dbctx.FindInstance(cfgId), id);
if (dictionary != null)
foreach (var pair in dictionary)
goodDict.Add(pair.Key, pair.Value);
}
ctx.Writer().WriteVUInt64(goodDict._id);
}
void GeneratePrefix()
{
int o = ObjectDB.AllDictionariesPrefix.Length;
_prefix = new byte[o + PackUnpack.LengthVUInt(_id)];
Array.Copy(ObjectDB.AllDictionariesPrefix, _prefix, o);
PackUnpack.PackVUInt(_prefix, ref o, _id);
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
public void Add(KeyValuePair<TKey, TValue> item)
{
Add(item.Key, item.Value);
}
public void Clear()
{
var taken = false;
try
{
_keyValueTrProtector.Start(ref taken);
_modificationCounter++;
_keyValueTr.SetKeyPrefix(_prefix);
_keyValueTr.EraseAll();
_count = 0;
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
public bool Contains(KeyValuePair<TKey, TValue> item)
{
TValue value;
if (!TryGetValue(item.Key, out value)) return false;
return EqualityComparer<TValue>.Default.Equals(value, item.Value);
}
public void CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex)
{
if (array == null) throw new ArgumentNullException(nameof(array));
if ((arrayIndex < 0) || (arrayIndex > array.Length))
{
throw new ArgumentOutOfRangeException(nameof(arrayIndex), arrayIndex, "Needs to be nonnegative ");
}
if ((array.Length - arrayIndex) < Count)
{
throw new ArgumentException("Array too small");
}
foreach (var item in this)
{
array[arrayIndex++] = item;
}
}
public bool Remove(KeyValuePair<TKey, TValue> item)
{
if (Contains(item))
{
Remove(item.Key);
return true;
}
return false;
}
public int Count
{
get
{
if (_count == -1)
{
bool taken = false;
try
{
_keyValueTrProtector.Start(ref taken);
_keyValueTr.SetKeyPrefix(_prefix);
_count = (int)Math.Min(_keyValueTr.GetKeyValueCount(), int.MaxValue);
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
return _count;
}
}
public bool IsReadOnly
{
get { return false; }
}
byte[] KeyToByteArray(TKey key)
{
var writer = new ByteBufferWriter();
IWriterCtx ctx = null;
if (_keyHandler.NeedsCtx()) ctx = new DBWriterCtx(_tr, writer);
_keyWriter(key, writer, ctx);
return writer.Data.ToByteArray();
}
byte[] ValueToByteArray(TValue value)
{
var writer = new ByteBufferWriter();
IWriterCtx ctx = null;
if (_valueHandler.NeedsCtx()) ctx = new DBWriterCtx(_tr, writer);
_valueWriter(value, writer, ctx);
return writer.Data.ToByteArray();
}
TKey ByteArrayToKey(byte[] data)
{
var reader = new ByteArrayReader(data);
IReaderCtx ctx = null;
if (_keyHandler.NeedsCtx()) ctx = new DBReaderCtx(_tr, reader);
return _keyReader(reader, ctx);
}
TValue ByteArrayToValue(byte[] data)
{
var reader = new ByteArrayReader(data);
IReaderCtx ctx = null;
if (_valueHandler.NeedsCtx()) ctx = new DBReaderCtx(_tr, reader);
return _valueReader(reader, ctx);
}
public bool ContainsKey(TKey key)
{
bool taken = false;
var keyBytes = KeyToByteArray(key);
try
{
_keyValueTrProtector.Start(ref taken);
_keyValueTr.SetKeyPrefix(_prefix);
return _keyValueTr.FindExactKey(keyBytes);
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
public void Add(TKey key, TValue value)
{
bool taken = false;
var keyBytes = KeyToByteArray(key);
var valueBytes = ValueToByteArray(value);
try
{
_keyValueTrProtector.Start(ref taken);
_modificationCounter++;
_keyValueTr.SetKeyPrefix(_prefix);
if (_keyValueTr.FindExactKey(keyBytes))
{
throw new ArgumentException("Cannot Add duplicate key to Dictionary");
}
_keyValueTr.CreateOrUpdateKeyValueUnsafe(keyBytes, valueBytes);
NotifyAdded();
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
public bool Remove(TKey key)
{
bool taken = false;
var keyBytes = KeyToByteArray(key);
try
{
_keyValueTrProtector.Start(ref taken);
_modificationCounter++;
_keyValueTr.SetKeyPrefix(_prefix);
bool found = _keyValueTr.FindExactKey(keyBytes);
if (found)
{
_keyValueTr.EraseCurrent();
NotifyRemoved();
}
return found;
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
public bool TryGetValue(TKey key, out TValue value)
{
bool taken = false;
var keyBytes = KeyToByteArray(key);
try
{
_keyValueTrProtector.Start(ref taken);
_keyValueTr.SetKeyPrefix(_prefix);
bool found = _keyValueTr.FindExactKey(keyBytes);
if (!found)
{
value = default(TValue);
return false;
}
var valueBytes = _keyValueTr.GetValueAsByteArray();
_keyValueTrProtector.Stop(ref taken);
value = ByteArrayToValue(valueBytes);
return true;
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
public TValue this[TKey key]
{
get
{
bool taken = false;
var keyBytes = KeyToByteArray(key);
try
{
_keyValueTrProtector.Start(ref taken);
_keyValueTr.SetKeyPrefix(_prefix);
bool found = _keyValueTr.FindExactKey(keyBytes);
if (!found)
{
throw new ArgumentException("Key not found in Dictionary");
}
var valueBytes = _keyValueTr.GetValueAsByteArray();
_keyValueTrProtector.Stop(ref taken);
return ByteArrayToValue(valueBytes);
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
set
{
bool taken = false;
var keyBytes = KeyToByteArray(key);
var valueBytes = ValueToByteArray(value);
try
{
_keyValueTrProtector.Start(ref taken);
_keyValueTr.SetKeyPrefix(_prefix);
if (_keyValueTr.CreateOrUpdateKeyValue(keyBytes, valueBytes))
{
_modificationCounter++;
NotifyAdded();
}
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
}
void NotifyAdded()
{
if (_count != -1)
{
if (_count != int.MaxValue) _count++;
}
}
void NotifyRemoved()
{
if (_count != -1)
{
if (_count == int.MaxValue)
{
_count = (int)Math.Min(_keyValueTr.GetKeyValueCount(), int.MaxValue);
}
else
{
_count--;
}
}
}
class KeysCollection : ICollection<TKey>
{
readonly ODBDictionary<TKey, TValue> _parent;
public KeysCollection(ODBDictionary<TKey, TValue> parent)
{
_parent = parent;
}
public IEnumerator<TKey> GetEnumerator()
{
bool taken = false;
try
{
long prevProtectionCounter = 0;
int prevModificationCounter = 0;
long pos = 0;
while (true)
{
if (!taken) _parent._keyValueTrProtector.Start(ref taken);
if (pos == 0)
{
prevModificationCounter = _parent._modificationCounter;
prevProtectionCounter = _parent._keyValueTrProtector.ProtectionCounter;
_parent._keyValueTr.SetKeyPrefix(_parent._prefix);
if (!_parent._keyValueTr.FindFirstKey()) break;
}
else
{
if (_parent._keyValueTrProtector.WasInterupted(prevProtectionCounter))
{
if (prevModificationCounter != _parent._modificationCounter)
throwModifiedDuringEnum();
_parent._keyValueTr.SetKeyPrefix(_parent._prefix);
if (!_parent._keyValueTr.SetKeyIndex(pos)) break;
}
else
{
if (!_parent._keyValueTr.FindNextKey()) break;
}
}
var keyBytes = _parent._keyValueTr.GetKeyAsByteArray();
_parent._keyValueTrProtector.Stop(ref taken);
var key = _parent.ByteArrayToKey(keyBytes);
yield return key;
pos++;
}
}
finally
{
_parent._keyValueTrProtector.Stop(ref taken);
}
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
public void Add(TKey item)
{
_parent.Add(item, default(TValue));
}
public void Clear()
{
_parent.Clear();
}
public bool Contains(TKey item)
{
return _parent.ContainsKey(item);
}
public void CopyTo(TKey[] array, int arrayIndex)
{
if (array == null) throw new ArgumentNullException(nameof(array));
if ((arrayIndex < 0) || (arrayIndex > array.Length))
{
throw new ArgumentOutOfRangeException(nameof(arrayIndex), arrayIndex, "Needs to be nonnegative ");
}
if ((array.Length - arrayIndex) < Count)
{
throw new ArgumentException("Array too small");
}
foreach (var item in this)
{
array[arrayIndex++] = item;
}
}
public bool Remove(TKey item)
{
return _parent.Remove(item);
}
public int Count
{
get { return _parent.Count; }
}
public bool IsReadOnly
{
get { return false; }
}
}
public ICollection<TKey> Keys
{
get { return _keysCollection ?? (_keysCollection = new KeysCollection(this)); }
}
class ValuesCollection : ICollection<TValue>
{
readonly ODBDictionary<TKey, TValue> _parent;
public ValuesCollection(ODBDictionary<TKey, TValue> parent)
{
_parent = parent;
}
public IEnumerator<TValue> GetEnumerator()
{
bool taken = false;
try
{
long prevProtectionCounter = 0;
int prevModificationCounter = 0;
long pos = 0;
while (true)
{
if (!taken) _parent._keyValueTrProtector.Start(ref taken);
if (pos == 0)
{
prevModificationCounter = _parent._modificationCounter;
prevProtectionCounter = _parent._keyValueTrProtector.ProtectionCounter;
_parent._keyValueTr.SetKeyPrefix(_parent._prefix);
if (!_parent._keyValueTr.FindFirstKey()) break;
}
else
{
if (_parent._keyValueTrProtector.WasInterupted(prevProtectionCounter))
{
if (prevModificationCounter != _parent._modificationCounter)
throwModifiedDuringEnum();
_parent._keyValueTr.SetKeyPrefix(_parent._prefix);
if (!_parent._keyValueTr.SetKeyIndex(pos)) break;
}
else
{
if (!_parent._keyValueTr.FindNextKey()) break;
}
}
var valueBytes = _parent._keyValueTr.GetValueAsByteArray();
_parent._keyValueTrProtector.Stop(ref taken);
var value = _parent.ByteArrayToValue(valueBytes);
yield return value;
pos++;
}
}
finally
{
_parent._keyValueTrProtector.Stop(ref taken);
}
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
public void Add(TValue item)
{
throw new NotSupportedException();
}
public void Clear()
{
_parent.Clear();
}
public bool Contains(TValue item)
{
return this.Any(i => EqualityComparer<TValue>.Default.Equals(i, item));
}
public void CopyTo(TValue[] array, int arrayIndex)
{
if (array == null) throw new ArgumentNullException(nameof(array));
if ((arrayIndex < 0) || (arrayIndex > array.Length))
{
throw new ArgumentOutOfRangeException(nameof(arrayIndex), arrayIndex, "Needs to be nonnegative ");
}
if ((array.Length - arrayIndex) < Count)
{
throw new ArgumentException("Array too small");
}
foreach (var item in this)
{
array[arrayIndex++] = item;
}
}
public bool Remove(TValue item)
{
throw new NotSupportedException();
}
public int Count
{
get { return _parent.Count; }
}
public bool IsReadOnly
{
get { return true; }
}
}
public ICollection<TValue> Values
{
get { return _valuesCollection ?? (_valuesCollection = new ValuesCollection(this)); }
}
public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator()
{
var taken = false;
try
{
long prevProtectionCounter = 0;
int prevModificationCounter = 0;
long pos = 0;
while (true)
{
if (!taken) _keyValueTrProtector.Start(ref taken);
if (pos == 0)
{
prevModificationCounter = _modificationCounter;
prevProtectionCounter = _keyValueTrProtector.ProtectionCounter;
_keyValueTr.SetKeyPrefix(_prefix);
if (!_keyValueTr.FindFirstKey()) break;
}
else
{
if (_keyValueTrProtector.WasInterupted(prevProtectionCounter))
{
if (prevModificationCounter != _modificationCounter)
throwModifiedDuringEnum();
_keyValueTr.SetKeyPrefix(_prefix);
if (!_keyValueTr.SetKeyIndex(pos)) break;
}
else
{
if (!_keyValueTr.FindNextKey()) break;
}
}
var keyBytes = _keyValueTr.GetKeyAsByteArray();
var valueBytes = _keyValueTr.GetValueAsByteArray();
_keyValueTrProtector.Stop(ref taken);
var key = ByteArrayToKey(keyBytes);
var value = ByteArrayToValue(valueBytes);
yield return new KeyValuePair<TKey, TValue>(key, value);
pos++;
}
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
public IEnumerable<KeyValuePair<TKey, TValue>> GetReverseEnumerator()
{
var taken = false;
try
{
long prevProtectionCounter = 0;
int prevModificationCounter = 0;
long pos = long.MaxValue;
while (true)
{
if (!taken) _keyValueTrProtector.Start(ref taken);
if (pos == long.MaxValue)
{
prevModificationCounter = _modificationCounter;
prevProtectionCounter = _keyValueTrProtector.ProtectionCounter;
_keyValueTr.SetKeyPrefix(_prefix);
if (!_keyValueTr.FindLastKey()) break;
pos = _keyValueTr.GetKeyIndex();
}
else
{
if (_keyValueTrProtector.WasInterupted(prevProtectionCounter))
{
if (prevModificationCounter != _modificationCounter)
throwModifiedDuringEnum();
_keyValueTr.SetKeyPrefix(_prefix);
if (!_keyValueTr.SetKeyIndex(pos)) break;
}
else
{
if (!_keyValueTr.FindPreviousKey()) break;
}
}
var keyBytes = _keyValueTr.GetKeyAsByteArray();
var valueBytes = _keyValueTr.GetValueAsByteArray();
_keyValueTrProtector.Stop(ref taken);
var key = ByteArrayToKey(keyBytes);
var value = ByteArrayToValue(valueBytes);
yield return new KeyValuePair<TKey, TValue>(key, value);
pos--;
}
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
public IEnumerable<KeyValuePair<TKey, TValue>> GetIncreasingEnumerator(TKey start)
{
var startKeyBytes = KeyToByteArray(start);
var taken = false;
try
{
long prevProtectionCounter = 0;
int prevModificationCounter = 0;
long pos = 0;
while (true)
{
if (!taken) _keyValueTrProtector.Start(ref taken);
if (pos == 0)
{
prevModificationCounter = _modificationCounter;
prevProtectionCounter = _keyValueTrProtector.ProtectionCounter;
_keyValueTr.SetKeyPrefix(_prefix);
bool startOk;
switch (_keyValueTr.Find(ByteBuffer.NewSync(startKeyBytes)))
{
case FindResult.Exact:
case FindResult.Next:
startOk = true;
break;
case FindResult.Previous:
startOk = _keyValueTr.FindNextKey();
break;
case FindResult.NotFound:
startOk = false;
break;
default:
throw new ArgumentOutOfRangeException();
}
if (!startOk) break;
pos = _keyValueTr.GetKeyIndex();
}
else
{
if (_keyValueTrProtector.WasInterupted(prevProtectionCounter))
{
if (prevModificationCounter != _modificationCounter)
throwModifiedDuringEnum();
_keyValueTr.SetKeyPrefix(_prefix);
if (!_keyValueTr.SetKeyIndex(pos)) break;
}
else
{
if (!_keyValueTr.FindNextKey()) break;
}
}
var keyBytes = _keyValueTr.GetKeyAsByteArray();
var valueBytes = _keyValueTr.GetValueAsByteArray();
_keyValueTrProtector.Stop(ref taken);
var key = ByteArrayToKey(keyBytes);
var value = ByteArrayToValue(valueBytes);
yield return new KeyValuePair<TKey, TValue>(key, value);
pos++;
}
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
public IEnumerable<KeyValuePair<TKey, TValue>> GetDecreasingEnumerator(TKey start)
{
var startKeyBytes = KeyToByteArray(start);
var taken = false;
try
{
long prevProtectionCounter = 0;
int prevModificationCounter = 0;
long pos = long.MaxValue;
while (true)
{
if (!taken) _keyValueTrProtector.Start(ref taken);
if (pos == long.MaxValue)
{
prevModificationCounter = _modificationCounter;
prevProtectionCounter = _keyValueTrProtector.ProtectionCounter;
_keyValueTr.SetKeyPrefix(_prefix);
bool startOk;
switch (_keyValueTr.Find(ByteBuffer.NewSync(startKeyBytes)))
{
case FindResult.Exact:
case FindResult.Previous:
startOk = true;
break;
case FindResult.Next:
startOk = _keyValueTr.FindPreviousKey();
break;
case FindResult.NotFound:
startOk = false;
break;
default:
throw new ArgumentOutOfRangeException();
}
if (!startOk) break;
pos = _keyValueTr.GetKeyIndex();
}
else
{
if (_keyValueTrProtector.WasInterupted(prevProtectionCounter))
{
if (prevModificationCounter != _modificationCounter)
throwModifiedDuringEnum();
_keyValueTr.SetKeyPrefix(_prefix);
if (!_keyValueTr.SetKeyIndex(pos)) break;
}
else
{
if (!_keyValueTr.FindPreviousKey()) break;
}
}
var keyBytes = _keyValueTr.GetKeyAsByteArray();
var valueBytes = _keyValueTr.GetValueAsByteArray();
_keyValueTrProtector.Stop(ref taken);
var key = ByteArrayToKey(keyBytes);
var value = ByteArrayToValue(valueBytes);
yield return new KeyValuePair<TKey, TValue>(key, value);
pos--;
}
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
public long RemoveRange(TKey start, bool includeStart, TKey end, bool includeEnd)
{
var taken = false;
var startKeyBytes = KeyToByteArray(start);
var endKeyBytes = KeyToByteArray(end);
try
{
_keyValueTrProtector.Start(ref taken);
_modificationCounter++;
_keyValueTr.SetKeyPrefix(_prefix);
var result = _keyValueTr.Find(ByteBuffer.NewAsync(startKeyBytes));
if (result == FindResult.NotFound) return 0;
var startIndex = _keyValueTr.GetKeyIndex();
if (result == FindResult.Exact)
{
if (!includeStart) startIndex++;
}
else if (result == FindResult.Previous)
{
startIndex++;
}
result = _keyValueTr.Find(ByteBuffer.NewAsync(endKeyBytes));
var endIndex = _keyValueTr.GetKeyIndex();
if (result == FindResult.Exact)
{
if (!includeEnd) endIndex--;
}
else if (result == FindResult.Next)
{
endIndex--;
}
_keyValueTr.EraseRange(startIndex, endIndex);
_keyValueTrProtector.Stop(ref taken);
return Math.Max(0, endIndex - startIndex + 1);
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
public IEnumerable<KeyValuePair<uint, uint>> QuerySizeEnumerator()
{
var taken = false;
try
{
long prevProtectionCounter = 0;
int prevModificationCounter = 0;
long pos = 0;
while (true)
{
if (!taken) _keyValueTrProtector.Start(ref taken);
if (pos == 0)
{
prevModificationCounter = _modificationCounter;
prevProtectionCounter = _keyValueTrProtector.ProtectionCounter;
_keyValueTr.SetKeyPrefix(_prefix);
if (!_keyValueTr.FindFirstKey()) break;
}
else
{
if (_keyValueTrProtector.WasInterupted(prevProtectionCounter))
{
if (prevModificationCounter != _modificationCounter)
throwModifiedDuringEnum();
_keyValueTr.SetKeyPrefix(_prefix);
if (!_keyValueTr.SetKeyIndex(pos)) break;
}
else
{
if (!_keyValueTr.FindNextKey()) break;
}
}
var size = _keyValueTr.GetStorageSizeOfCurrentKey();
_keyValueTrProtector.Stop(ref taken);
yield return size;
pos++;
}
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
public KeyValuePair<uint, uint> QuerySizeByKey(TKey key)
{
bool taken = false;
var keyBytes = KeyToByteArray(key);
try
{
_keyValueTrProtector.Start(ref taken);
_keyValueTr.SetKeyPrefix(_prefix);
bool found = _keyValueTr.FindExactKey(keyBytes);
if (!found)
{
throw new ArgumentException("Key not found in Dictionary");
}
var size = _keyValueTr.GetStorageSizeOfCurrentKey();
_keyValueTrProtector.Stop(ref taken);
return size;
}
finally
{
_keyValueTrProtector.Stop(ref taken);
}
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using Microsoft.Azure.Commands.Compute.Automation.Models;
using Microsoft.Azure.Management.Compute;
using Microsoft.Azure.Management.Compute.Models;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Management.Automation;
using System.Reflection;
namespace Microsoft.Azure.Commands.Compute.Automation
{
public abstract class ComputeAutomationBaseCmdlet : Microsoft.Azure.Commands.Compute.ComputeClientBaseCmdlet
{
public override void ExecuteCmdlet()
{
base.ExecuteCmdlet();
ComputeAutomationAutoMapperProfile.Initialize();
}
protected static PSArgument[] ConvertFromObjectsToArguments(string[] names, object[] objects)
{
var arguments = new PSArgument[objects.Length];
for (int index = 0; index < objects.Length; index++)
{
arguments[index] = new PSArgument
{
Name = names[index],
Type = objects[index].GetType(),
Value = objects[index]
};
}
return arguments;
}
protected static object[] ConvertFromArgumentsToObjects(object[] arguments)
{
if (arguments == null)
{
return null;
}
var objects = new object[arguments.Length];
for (int index = 0; index < arguments.Length; index++)
{
if (arguments[index] is PSArgument)
{
objects[index] = ((PSArgument)arguments[index]).Value;
}
else
{
objects[index] = arguments[index];
}
}
return objects;
}
public IVirtualMachineScaleSetsOperations VirtualMachineScaleSetsClient
{
get
{
return ComputeClient.ComputeManagementClient.VirtualMachineScaleSets;
}
}
public IVirtualMachineScaleSetVMsOperations VirtualMachineScaleSetVMsClient
{
get
{
return ComputeClient.ComputeManagementClient.VirtualMachineScaleSetVMs;
}
}
public static string FormatObject(Object obj)
{
var objType = obj.GetType();
System.Reflection.PropertyInfo[] pros = objType.GetProperties();
string result = "\n";
var resultTuples = new List<Tuple<string, string, int>>();
var totalTab = GetTabLength(obj, 0, 0, resultTuples) + 1;
foreach (var t in resultTuples)
{
string preTab = new string(' ', t.Item3 * 2);
string postTab = new string(' ', totalTab - t.Item3 * 2 - t.Item1.Length);
result += preTab + t.Item1 + postTab + ": " + t.Item2 + "\n";
}
return result;
}
private static int GetTabLength(Object obj, int max, int depth, List<Tuple<string, string, int>> tupleList)
{
var objType = obj.GetType();
var propertySet = new List<PropertyInfo>();
if (objType.BaseType != null)
{
foreach (var property in objType.BaseType.GetProperties())
{
propertySet.Add(property);
}
}
foreach (var property in objType.GetProperties(BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public))
{
propertySet.Add(property);
}
foreach (var property in propertySet)
{
Object childObject = property.GetValue(obj, null);
var isJObject = childObject as Newtonsoft.Json.Linq.JObject;
if (isJObject != null)
{
var objStringValue = Newtonsoft.Json.JsonConvert.SerializeObject(childObject);
int i = objStringValue.IndexOf("xmlCfg");
if (i >= 0)
{
var xmlCfgString = objStringValue.Substring(i + 7);
int start = xmlCfgString.IndexOf('"');
int end = xmlCfgString.IndexOf('"', start + 1);
xmlCfgString = xmlCfgString.Substring(start + 1, end - start - 1);
objStringValue = objStringValue.Replace(xmlCfgString, "...");
}
tupleList.Add(MakeTuple(property.Name, objStringValue, depth));
max = Math.Max(max, depth * 2 + property.Name.Length);
}
else
{
var elem = childObject as IList;
if (elem != null)
{
if (elem.Count != 0)
{
max = Math.Max(max, depth * 2 + property.Name.Length + 4);
for (int i = 0; i < elem.Count; i++)
{
Type propType = elem[i].GetType();
if (propType.IsSerializable)
{
tupleList.Add(MakeTuple(property.Name + "[" + i + "]", elem[i].ToString(), depth));
}
else
{
tupleList.Add(MakeTuple(property.Name + "[" + i + "]", "", depth));
max = Math.Max(max, GetTabLength((Object)elem[i], max, depth + 1, tupleList));
}
}
}
}
else
{
if (property.PropertyType.IsSerializable)
{
if (childObject != null)
{
tupleList.Add(MakeTuple(property.Name, childObject.ToString(), depth));
max = Math.Max(max, depth * 2 + property.Name.Length);
}
}
else
{
var isDictionary = childObject as IDictionary;
if (isDictionary != null)
{
tupleList.Add(MakeTuple(property.Name, Newtonsoft.Json.JsonConvert.SerializeObject(childObject), depth));
max = Math.Max(max, depth * 2 + property.Name.Length);
}
else if (childObject != null)
{
tupleList.Add(MakeTuple(property.Name, "", depth));
max = Math.Max(max, GetTabLength(childObject, max, depth + 1, tupleList));
}
}
}
}
}
return max;
}
private static Tuple<string, string, int> MakeTuple(string key, string value, int depth)
{
return new Tuple<string, string, int>(key, value, depth);
}
}
}
| |
/* ****************************************************************************
*
* Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* [email protected]. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
*
* ***************************************************************************/
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.Shell;
using Microsoft.VisualStudio.Shell.Interop;
namespace Microsoft.VisualStudio.Project
{
[CLSCompliant(false), ComVisible(true)]
public class ProjectReferenceNode : ReferenceNode
{
#region fieds
/// <summary>
/// The name of the assembly this refernce represents
/// </summary>
private Guid referencedProjectGuid;
private string referencedProjectName = String.Empty;
private string referencedProjectRelativePath = String.Empty;
private string referencedProjectFullPath = String.Empty;
private BuildDependency buildDependency;
/// <summary>
/// This is a reference to the automation object for the referenced project.
/// </summary>
private EnvDTE.Project referencedProject;
/// <summary>
/// This state is controlled by the solution events.
/// The state is set to false by OnBeforeUnloadProject.
/// The state is set to true by OnBeforeCloseProject event.
/// </summary>
private bool canRemoveReference = true;
/// <summary>
/// Possibility for solution listener to update the state on the dangling reference.
/// It will be set in OnBeforeUnloadProject then the nopde is invalidated then it is reset to false.
/// </summary>
private bool isNodeValid;
#endregion
#region properties
public override string Url
{
get
{
return this.referencedProjectFullPath;
}
}
public override string Caption
{
get
{
return this.referencedProjectName;
}
}
internal Guid ReferencedProjectGuid
{
get
{
return this.referencedProjectGuid;
}
}
/// <summary>
/// Possiblity to shortcut and set the dangling project reference icon.
/// It is ussually manipulated by solution listsneres who handle reference updates.
/// </summary>
internal protected bool IsNodeValid
{
get
{
return this.isNodeValid;
}
set
{
this.isNodeValid = value;
}
}
/// <summary>
/// Controls the state whether this reference can be removed or not. Think of the project unload scenario where the project reference should not be deleted.
/// </summary>
internal bool CanRemoveReference
{
get
{
return this.canRemoveReference;
}
set
{
this.canRemoveReference = value;
}
}
internal string ReferencedProjectName
{
get { return this.referencedProjectName; }
}
/// <summary>
/// Gets the automation object for the referenced project.
/// </summary>
internal EnvDTE.Project ReferencedProjectObject
{
get
{
// If the referenced project is null then re-read.
if (this.referencedProject == null)
{
// Search for the project in the collection of the projects in the
// current solution.
EnvDTE.DTE dte = (EnvDTE.DTE)this.ProjectMgr.GetService(typeof(EnvDTE.DTE));
if ((null == dte) || (null == dte.Solution))
{
return null;
}
foreach (EnvDTE.Project prj in dte.Solution.Projects)
{
//Skip this project if it is an umodeled project (unloaded)
if (string.Compare(EnvDTE.Constants.vsProjectKindUnmodeled, prj.Kind, StringComparison.OrdinalIgnoreCase) == 0)
{
continue;
}
// Get the full path of the current project.
EnvDTE.Property pathProperty = null;
try
{
if (prj.Properties == null)
{
continue;
}
pathProperty = prj.Properties.Item("FullPath");
if (null == pathProperty)
{
// The full path should alway be availabe, but if this is not the
// case then we have to skip it.
continue;
}
}
catch (ArgumentException)
{
continue;
}
string prjPath = pathProperty.Value.ToString();
EnvDTE.Property fileNameProperty = null;
// Get the name of the project file.
try
{
fileNameProperty = prj.Properties.Item("FileName");
if (null == fileNameProperty)
{
// Again, this should never be the case, but we handle it anyway.
continue;
}
}
catch (ArgumentException)
{
continue;
}
prjPath = System.IO.Path.Combine(prjPath, fileNameProperty.Value.ToString());
// If the full path of this project is the same as the one of this
// reference, then we have found the right project.
if (NativeMethods.IsSamePath(prjPath, referencedProjectFullPath))
{
this.referencedProject = prj;
break;
}
}
}
return this.referencedProject;
}
set
{
this.referencedProject = value;
}
}
/// <summary>
/// Gets the full path to the assembly generated by this project.
/// </summary>
internal string ReferencedProjectOutputPath
{
get
{
// Make sure that the referenced project implements the automation object.
if(null == this.ReferencedProjectObject)
{
return null;
}
// Get the configuration manager from the project.
EnvDTE.ConfigurationManager confManager = this.ReferencedProjectObject.ConfigurationManager;
if(null == confManager)
{
return null;
}
// Get the active configuration.
EnvDTE.Configuration config = confManager.ActiveConfiguration;
if(null == config)
{
return null;
}
// Get the output path for the current configuration.
EnvDTE.Property outputPathProperty = config.Properties.Item("OutputPath");
if (null == outputPathProperty || outputPathProperty.Value == null)
{
return null;
}
string outputPath = outputPathProperty.Value.ToString();
// Ususally the output path is relative to the project path, but it is possible
// to set it as an absolute path. If it is not absolute, then evaluate its value
// based on the project directory.
if(!System.IO.Path.IsPathRooted(outputPath))
{
string projectDir = System.IO.Path.GetDirectoryName(referencedProjectFullPath);
outputPath = System.IO.Path.Combine(projectDir, outputPath);
}
// Now get the name of the assembly from the project.
// Some project system throw if the property does not exist. We expect an ArgumentException.
EnvDTE.Property assemblyNameProperty = null;
try
{
assemblyNameProperty = this.ReferencedProjectObject.Properties.Item("OutputFileName");
}
catch(ArgumentException)
{
}
if(null == assemblyNameProperty)
{
return null;
}
// build the full path adding the name of the assembly to the output path.
outputPath = System.IO.Path.Combine(outputPath, assemblyNameProperty.Value.ToString());
return outputPath;
}
}
private Automation.OAProjectReference projectReference;
internal override object Object
{
get
{
if(null == projectReference)
{
projectReference = new Automation.OAProjectReference(this);
}
return projectReference;
}
}
#endregion
#region ctors
/// <summary>
/// Constructor for the ReferenceNode. It is called when the project is reloaded, when the project element representing the refernce exists.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2234:PassSystemUriObjectsInsteadOfStrings")]
public ProjectReferenceNode(ProjectNode root, ProjectElement element)
: base(root, element)
{
this.referencedProjectRelativePath = this.ItemNode.GetMetadata(ProjectFileConstants.Include);
Debug.Assert(!String.IsNullOrEmpty(this.referencedProjectRelativePath), "Could not retrive referenced project path form project file");
string guidString = this.ItemNode.GetMetadata(ProjectFileConstants.Project);
// Continue even if project setttings cannot be read.
try
{
this.referencedProjectGuid = new Guid(guidString);
this.buildDependency = new BuildDependency(this.ProjectMgr, this.referencedProjectGuid);
this.ProjectMgr.AddBuildDependency(this.buildDependency);
}
finally
{
Debug.Assert(this.referencedProjectGuid != Guid.Empty, "Could not retrive referenced project guidproject file");
this.referencedProjectName = this.ItemNode.GetMetadata(ProjectFileConstants.Name);
Debug.Assert(!String.IsNullOrEmpty(this.referencedProjectName), "Could not retrive referenced project name form project file");
}
Uri uri = new Uri(this.ProjectMgr.BaseURI.Uri, this.referencedProjectRelativePath);
if(uri != null)
{
this.referencedProjectFullPath = Microsoft.VisualStudio.Shell.Url.Unescape(uri.LocalPath, true);
}
}
/// <summary>
/// constructor for the ProjectReferenceNode
/// </summary>
public ProjectReferenceNode(ProjectNode root, string referencedProjectName, string projectPath, string projectReference)
: base(root)
{
Debug.Assert(root != null && !String.IsNullOrEmpty(referencedProjectName) && !String.IsNullOrEmpty(projectReference)
&& !String.IsNullOrEmpty(projectPath), "Can not add a reference because the input for adding one is invalid.");
if (projectReference == null)
{
throw new ArgumentNullException("projectReference");
}
this.referencedProjectName = referencedProjectName;
int indexOfSeparator = projectReference.IndexOf('|');
string fileName = String.Empty;
// Unfortunately we cannot use the path part of the projectReference string since it is not resolving correctly relative pathes.
if(indexOfSeparator != -1)
{
string projectGuid = projectReference.Substring(0, indexOfSeparator);
this.referencedProjectGuid = new Guid(projectGuid);
if(indexOfSeparator + 1 < projectReference.Length)
{
string remaining = projectReference.Substring(indexOfSeparator + 1);
indexOfSeparator = remaining.IndexOf('|');
if(indexOfSeparator == -1)
{
fileName = remaining;
}
else
{
fileName = remaining.Substring(0, indexOfSeparator);
}
}
}
Debug.Assert(!String.IsNullOrEmpty(fileName), "Can not add a project reference because the input for adding one is invalid.");
// Did we get just a file or a relative path?
Uri uri = new Uri(projectPath);
string referenceDir = PackageUtilities.GetPathDistance(this.ProjectMgr.BaseURI.Uri, uri);
Debug.Assert(!String.IsNullOrEmpty(referenceDir), "Can not add a project reference because the input for adding one is invalid.");
string justTheFileName = Path.GetFileName(fileName);
this.referencedProjectRelativePath = Path.Combine(referenceDir, justTheFileName);
this.referencedProjectFullPath = Path.Combine(projectPath, justTheFileName);
this.buildDependency = new BuildDependency(this.ProjectMgr, this.referencedProjectGuid);
}
#endregion
#region methods
protected override NodeProperties CreatePropertiesObject()
{
return new ProjectReferencesProperties(this);
}
/// <summary>
/// The node is added to the hierarchy and then updates the build dependency list.
/// </summary>
public override void AddReference()
{
if(this.ProjectMgr == null)
{
return;
}
base.AddReference();
this.ProjectMgr.AddBuildDependency(this.buildDependency);
return;
}
/// <summary>
/// Overridden method. The method updates the build dependency list before removing the node from the hierarchy.
/// </summary>
public override void Remove(bool removeFromStorage)
{
if(this.ProjectMgr == null || !this.CanRemoveReference)
{
return;
}
this.ProjectMgr.RemoveBuildDependency(this.buildDependency);
base.Remove(removeFromStorage);
return;
}
/// <summary>
/// Links a reference node to the project file.
/// </summary>
protected override void BindReferenceData()
{
Debug.Assert(!String.IsNullOrEmpty(this.referencedProjectName), "The referencedProjectName field has not been initialized");
Debug.Assert(this.referencedProjectGuid != Guid.Empty, "The referencedProjectName field has not been initialized");
this.ItemNode = new ProjectElement(this.ProjectMgr, this.referencedProjectRelativePath, ProjectFileConstants.ProjectReference);
this.ItemNode.SetMetadata(ProjectFileConstants.Name, this.referencedProjectName);
this.ItemNode.SetMetadata(ProjectFileConstants.Project, this.referencedProjectGuid.ToString("B"));
this.ItemNode.SetMetadata(ProjectFileConstants.Private, true.ToString());
}
/// <summary>
/// Defines whether this node is valid node for painting the refererence icon.
/// </summary>
/// <returns></returns>
protected override bool CanShowDefaultIcon()
{
if(this.referencedProjectGuid == Guid.Empty || this.ProjectMgr == null || this.ProjectMgr.IsClosed || this.isNodeValid)
{
return false;
}
IVsHierarchy hierarchy = null;
hierarchy = VsShellUtilities.GetHierarchy(this.ProjectMgr.Site, this.referencedProjectGuid);
if(hierarchy == null)
{
return false;
}
//If the Project is unloaded return false
if(this.ReferencedProjectObject == null)
{
return false;
}
return (!String.IsNullOrEmpty(this.referencedProjectFullPath) && File.Exists(this.referencedProjectFullPath));
}
/// <summary>
/// Checks if a project reference can be added to the hierarchy. It calls base to see if the reference is not already there, then checks for circular references.
/// </summary>
/// <param name="errorHandler">The error handler delegate to return</param>
/// <returns></returns>
protected override bool CanAddReference(out CannotAddReferenceErrorMessage errorHandler)
{
// When this method is called this refererence has not yet been added to the hierarchy, only instantiated.
if(!base.CanAddReference(out errorHandler))
{
return false;
}
errorHandler = null;
if(this.IsThisProjectReferenceInCycle())
{
errorHandler = new CannotAddReferenceErrorMessage(ShowCircularReferenceErrorMessage);
return false;
}
return true;
}
private bool IsThisProjectReferenceInCycle()
{
return IsReferenceInCycle(this.referencedProjectGuid);
}
private void ShowCircularReferenceErrorMessage()
{
string message = String.Format(CultureInfo.CurrentCulture, SR.GetString(SR.ProjectContainsCircularReferences, CultureInfo.CurrentUICulture), this.referencedProjectName);
string title = string.Empty;
OLEMSGICON icon = OLEMSGICON.OLEMSGICON_CRITICAL;
OLEMSGBUTTON buttons = OLEMSGBUTTON.OLEMSGBUTTON_OK;
OLEMSGDEFBUTTON defaultButton = OLEMSGDEFBUTTON.OLEMSGDEFBUTTON_FIRST;
VsShellUtilities.ShowMessageBox(this.ProjectMgr.Site, title, message, icon, buttons, defaultButton);
}
/// <summary>
/// Recursively search if this project reference guid is in cycle.
/// </summary>
private bool IsReferenceInCycle(Guid projectGuid)
{
IVsHierarchy hierarchy = VsShellUtilities.GetHierarchy(this.ProjectMgr.Site, projectGuid);
IReferenceContainerProvider provider = hierarchy as IReferenceContainerProvider;
if(provider != null)
{
IReferenceContainer referenceContainer = provider.GetReferenceContainer();
Debug.Assert(referenceContainer != null, "Could not found the References virtual node");
foreach(ReferenceNode refNode in referenceContainer.EnumReferences())
{
ProjectReferenceNode projRefNode = refNode as ProjectReferenceNode;
if(projRefNode != null)
{
if(projRefNode.ReferencedProjectGuid == this.ProjectMgr.ProjectIDGuid)
{
return true;
}
if(this.IsReferenceInCycle(projRefNode.ReferencedProjectGuid))
{
return true;
}
}
}
}
return false;
}
#endregion
}
}
| |
namespace SoftDemo
{
static class TorusMesh
{
public static readonly float[] Vertices = new[]
{
2.5f, 0f, 0f,
2.405f, 0.294f, 0f,
2.155f, 0.476f, 0f,
1.845f, 0.476f, 0f,
1.595f, 0.294f, 0f,
1.5f, 0f, 0f,
1.595f, -0.294f, 0f,
1.845f, -0.476f, 0f,
2.155f, -0.476f, 0f,
2.405f, -0.294f, 0f,
2.445f, 0f, 0.52f,
2.352f, 0.294f, 0.5f,
2.107f, 0.476f, 0.448f,
1.805f, 0.476f, 0.384f,
1.561f, 0.294f, 0.332f,
1.467f, 0f, 0.312f,
1.561f, -0.294f, 0.332f,
1.805f, -0.476f, 0.384f,
2.107f, -0.476f, 0.448f,
2.352f, -0.294f, 0.5f,
2.284f, 0f, 1.017f,
2.197f, 0.294f, 0.978f,
1.968f, 0.476f, 0.876f,
1.686f, 0.476f, 0.751f,
1.458f, 0.294f, 0.649f,
1.37f, 0f, 0.61f,
1.458f, -0.294f, 0.649f,
1.686f, -0.476f, 0.751f,
1.968f, -0.476f, 0.876f,
2.197f, -0.294f, 0.978f,
2.023f, 0f, 1.469f,
1.945f, 0.294f, 1.413f,
1.743f, 0.476f, 1.266f,
1.493f, 0.476f, 1.085f,
1.291f, 0.294f, 0.938f,
1.214f, 0f, 0.882f,
1.291f, -0.294f, 0.938f,
1.493f, -0.476f, 1.085f,
1.743f, -0.476f, 1.266f,
1.945f, -0.294f, 1.413f,
1.673f, 0f, 1.858f,
1.609f, 0.294f, 1.787f,
1.442f, 0.476f, 1.601f,
1.235f, 0.476f, 1.371f,
1.068f, 0.294f, 1.186f,
1.004f, 0f, 1.115f,
1.068f, -0.294f, 1.186f,
1.235f, -0.476f, 1.371f,
1.442f, -0.476f, 1.601f,
1.609f, -0.294f, 1.787f,
1.25f, 0f, 2.165f,
1.202f, 0.294f, 2.082f,
1.077f, 0.476f, 1.866f,
0.923f, 0.476f, 1.598f,
0.798f, 0.294f, 1.382f,
0.75f, 0f, 1.299f,
0.798f, -0.294f, 1.382f,
0.923f, -0.476f, 1.598f,
1.077f, -0.476f, 1.866f,
1.202f, -0.294f, 2.082f,
0.773f, 0f, 2.378f,
0.743f, 0.294f, 2.287f,
0.666f, 0.476f, 2.049f,
0.57f, 0.476f, 1.755f,
0.493f, 0.294f, 1.517f,
0.464f, 0f, 1.427f,
0.493f, -0.294f, 1.517f,
0.57f, -0.476f, 1.755f,
0.666f, -0.476f, 2.049f,
0.743f, -0.294f, 2.287f,
0.261f, 0f, 2.486f,
0.251f, 0.294f, 2.391f,
0.225f, 0.476f, 2.143f,
0.193f, 0.476f, 1.835f,
0.167f, 0.294f, 1.587f,
0.157f, 0f, 1.492f,
0.167f, -0.294f, 1.587f,
0.193f, -0.476f, 1.835f,
0.225f, -0.476f, 2.143f,
0.251f, -0.294f, 2.391f,
-0.261f, 0f, 2.486f,
-0.251f, 0.294f, 2.391f,
-0.225f, 0.476f, 2.143f,
-0.193f, 0.476f, 1.835f,
-0.167f, 0.294f, 1.587f,
-0.157f, 0f, 1.492f,
-0.167f, -0.294f, 1.587f,
-0.193f, -0.476f, 1.835f,
-0.225f, -0.476f, 2.143f,
-0.251f, -0.294f, 2.391f,
-0.773f, 0f, 2.378f,
-0.743f, 0.294f, 2.287f,
-0.666f, 0.476f, 2.049f,
-0.57f, 0.476f, 1.755f,
-0.493f, 0.294f, 1.517f,
-0.464f, 0f, 1.427f,
-0.493f, -0.294f, 1.517f,
-0.57f, -0.476f, 1.755f,
-0.666f, -0.476f, 2.049f,
-0.743f, -0.294f, 2.287f,
-1.25f, 0f, 2.165f,
-1.202f, 0.294f, 2.082f,
-1.077f, 0.476f, 1.866f,
-0.923f, 0.476f, 1.598f,
-0.798f, 0.294f, 1.382f,
-0.75f, 0f, 1.299f,
-0.798f, -0.294f, 1.382f,
-0.923f, -0.476f, 1.598f,
-1.077f, -0.476f, 1.866f,
-1.202f, -0.294f, 2.082f,
-1.673f, 0f, 1.858f,
-1.609f, 0.294f, 1.787f,
-1.442f, 0.476f, 1.601f,
-1.235f, 0.476f, 1.371f,
-1.068f, 0.294f, 1.186f,
-1.004f, 0f, 1.115f,
-1.068f, -0.294f, 1.186f,
-1.235f, -0.476f, 1.371f,
-1.442f, -0.476f, 1.601f,
-1.609f, -0.294f, 1.787f,
-2.023f, 0f, 1.469f,
-1.945f, 0.294f, 1.413f,
-1.743f, 0.476f, 1.266f,
-1.493f, 0.476f, 1.085f,
-1.291f, 0.294f, 0.938f,
-1.214f, 0f, 0.882f,
-1.291f, -0.294f, 0.938f,
-1.493f, -0.476f, 1.085f,
-1.743f, -0.476f, 1.266f,
-1.945f, -0.294f, 1.413f,
-2.284f, 0f, 1.017f,
-2.197f, 0.294f, 0.978f,
-1.968f, 0.476f, 0.876f,
-1.686f, 0.476f, 0.751f,
-1.458f, 0.294f, 0.649f,
-1.37f, 0f, 0.61f,
-1.458f, -0.294f, 0.649f,
-1.686f, -0.476f, 0.751f,
-1.968f, -0.476f, 0.876f,
-2.197f, -0.294f, 0.978f,
-2.445f, 0f, 0.52f,
-2.352f, 0.294f, 0.5f,
-2.107f, 0.476f, 0.448f,
-1.805f, 0.476f, 0.384f,
-1.561f, 0.294f, 0.332f,
-1.467f, 0f, 0.312f,
-1.561f, -0.294f, 0.332f,
-1.805f, -0.476f, 0.384f,
-2.107f, -0.476f, 0.448f,
-2.352f, -0.294f, 0.5f,
-2.5f, 0f, 0f,
-2.405f, 0.294f, 0f,
-2.155f, 0.476f, 0f,
-1.845f, 0.476f, 0f,
-1.595f, 0.294f, 0f,
-1.5f, 0f, 0f,
-1.595f, -0.294f, 0f,
-1.845f, -0.476f, 0f,
-2.155f, -0.476f, 0f,
-2.405f, -0.294f, 0f,
-2.445f, 0f, -0.52f,
-2.352f, 0.294f, -0.5f,
-2.107f, 0.476f, -0.448f,
-1.805f, 0.476f, -0.384f,
-1.561f, 0.294f, -0.332f,
-1.467f, 0f, -0.312f,
-1.561f, -0.294f, -0.332f,
-1.805f, -0.476f, -0.384f,
-2.107f, -0.476f, -0.448f,
-2.352f, -0.294f, -0.5f,
-2.284f, 0f, -1.017f,
-2.197f, 0.294f, -0.978f,
-1.968f, 0.476f, -0.876f,
-1.686f, 0.476f, -0.751f,
-1.458f, 0.294f, -0.649f,
-1.37f, 0f, -0.61f,
-1.458f, -0.294f, -0.649f,
-1.686f, -0.476f, -0.751f,
-1.968f, -0.476f, -0.876f,
-2.197f, -0.294f, -0.978f,
-2.023f, 0f, -1.469f,
-1.945f, 0.294f, -1.413f,
-1.743f, 0.476f, -1.266f,
-1.493f, 0.476f, -1.085f,
-1.291f, 0.294f, -0.938f,
-1.214f, 0f, -0.882f,
-1.291f, -0.294f, -0.938f,
-1.493f, -0.476f, -1.085f,
-1.743f, -0.476f, -1.266f,
-1.945f, -0.294f, -1.413f,
-1.673f, 0f, -1.858f,
-1.609f, 0.294f, -1.787f,
-1.442f, 0.476f, -1.601f,
-1.235f, 0.476f, -1.371f,
-1.068f, 0.294f, -1.186f,
-1.004f, 0f, -1.115f,
-1.068f, -0.294f, -1.186f,
-1.235f, -0.476f, -1.371f,
-1.442f, -0.476f, -1.601f,
-1.609f, -0.294f, -1.787f,
-1.25f, 0f, -2.165f,
-1.202f, 0.294f, -2.082f,
-1.077f, 0.476f, -1.866f,
-0.923f, 0.476f, -1.598f,
-0.798f, 0.294f, -1.382f,
-0.75f, 0f, -1.299f,
-0.798f, -0.294f, -1.382f,
-0.923f, -0.476f, -1.598f,
-1.077f, -0.476f, -1.866f,
-1.202f, -0.294f, -2.082f,
-0.773f, 0f, -2.378f,
-0.743f, 0.294f, -2.287f,
-0.666f, 0.476f, -2.049f,
-0.57f, 0.476f, -1.755f,
-0.493f, 0.294f, -1.517f,
-0.464f, 0f, -1.427f,
-0.493f, -0.294f, -1.517f,
-0.57f, -0.476f, -1.755f,
-0.666f, -0.476f, -2.049f,
-0.743f, -0.294f, -2.287f,
-0.261f, 0f, -2.486f,
-0.251f, 0.294f, -2.391f,
-0.225f, 0.476f, -2.143f,
-0.193f, 0.476f, -1.835f,
-0.167f, 0.294f, -1.587f,
-0.157f, 0f, -1.492f,
-0.167f, -0.294f, -1.587f,
-0.193f, -0.476f, -1.835f,
-0.225f, -0.476f, -2.143f,
-0.251f, -0.294f, -2.391f,
0.261f, 0f, -2.486f,
0.251f, 0.294f, -2.391f,
0.225f, 0.476f, -2.143f,
0.193f, 0.476f, -1.835f,
0.167f, 0.294f, -1.587f,
0.157f, 0f, -1.492f,
0.167f, -0.294f, -1.587f,
0.193f, -0.476f, -1.835f,
0.225f, -0.476f, -2.143f,
0.251f, -0.294f, -2.391f,
0.773f, 0f, -2.378f,
0.743f, 0.294f, -2.287f,
0.666f, 0.476f, -2.049f,
0.57f, 0.476f, -1.755f,
0.493f, 0.294f, -1.517f,
0.464f, 0f, -1.427f,
0.493f, -0.294f, -1.517f,
0.57f, -0.476f, -1.755f,
0.666f, -0.476f, -2.049f,
0.743f, -0.294f, -2.287f,
1.25f, 0f, -2.165f,
1.202f, 0.294f, -2.082f,
1.077f, 0.476f, -1.866f,
0.923f, 0.476f, -1.598f,
0.798f, 0.294f, -1.382f,
0.75f, 0f, -1.299f,
0.798f, -0.294f, -1.382f,
0.923f, -0.476f, -1.598f,
1.077f, -0.476f, -1.866f,
1.202f, -0.294f, -2.082f,
1.673f, 0f, -1.858f,
1.609f, 0.294f, -1.787f,
1.442f, 0.476f, -1.601f,
1.235f, 0.476f, -1.371f,
1.068f, 0.294f, -1.186f,
1.004f, 0f, -1.115f,
1.068f, -0.294f, -1.186f,
1.235f, -0.476f, -1.371f,
1.442f, -0.476f, -1.601f,
1.609f, -0.294f, -1.787f,
2.023f, 0f, -1.469f,
1.945f, 0.294f, -1.413f,
1.743f, 0.476f, -1.266f,
1.493f, 0.476f, -1.085f,
1.291f, 0.294f, -0.938f,
1.214f, 0f, -0.882f,
1.291f, -0.294f, -0.938f,
1.493f, -0.476f, -1.085f,
1.743f, -0.476f, -1.266f,
1.945f, -0.294f, -1.413f,
2.284f, 0f, -1.017f,
2.197f, 0.294f, -0.978f,
1.968f, 0.476f, -0.876f,
1.686f, 0.476f, -0.751f,
1.458f, 0.294f, -0.649f,
1.37f, 0f, -0.61f,
1.458f, -0.294f, -0.649f,
1.686f, -0.476f, -0.751f,
1.968f, -0.476f, -0.876f,
2.197f, -0.294f, -0.978f,
2.445f, 0f, -0.52f,
2.352f, 0.294f, -0.5f,
2.107f, 0.476f, -0.448f,
1.805f, 0.476f, -0.384f,
1.561f, 0.294f, -0.332f,
1.467f, 0f, -0.312f,
1.561f, -0.294f, -0.332f,
1.805f, -0.476f, -0.384f,
2.107f, -0.476f, -0.448f,
2.352f, -0.294f, -0.5f
};
public static readonly int[] Indices = new[]
{
0, 1, 11,
1, 2, 12,
2, 3, 13,
3, 4, 14,
4, 5, 15,
5, 6, 16,
6, 7, 17,
7, 8, 18,
8, 9, 19,
9, 0, 10,
10, 11, 21,
11, 12, 22,
12, 13, 23,
13, 14, 24,
14, 15, 25,
15, 16, 26,
16, 17, 27,
17, 18, 28,
18, 19, 29,
19, 10, 20,
20, 21, 31,
21, 22, 32,
22, 23, 33,
23, 24, 34,
24, 25, 35,
25, 26, 36,
26, 27, 37,
27, 28, 38,
28, 29, 39,
29, 20, 30,
30, 31, 41,
31, 32, 42,
32, 33, 43,
33, 34, 44,
34, 35, 45,
35, 36, 46,
36, 37, 47,
37, 38, 48,
38, 39, 49,
39, 30, 40,
40, 41, 51,
41, 42, 52,
42, 43, 53,
43, 44, 54,
44, 45, 55,
45, 46, 56,
46, 47, 57,
47, 48, 58,
48, 49, 59,
49, 40, 50,
50, 51, 61,
51, 52, 62,
52, 53, 63,
53, 54, 64,
54, 55, 65,
55, 56, 66,
56, 57, 67,
57, 58, 68,
58, 59, 69,
59, 50, 60,
60, 61, 71,
61, 62, 72,
62, 63, 73,
63, 64, 74,
64, 65, 75,
65, 66, 76,
66, 67, 77,
67, 68, 78,
68, 69, 79,
69, 60, 70,
70, 71, 81,
71, 72, 82,
72, 73, 83,
73, 74, 84,
74, 75, 85,
75, 76, 86,
76, 77, 87,
77, 78, 88,
78, 79, 89,
79, 70, 80,
80, 81, 91,
81, 82, 92,
82, 83, 93,
83, 84, 94,
84, 85, 95,
85, 86, 96,
86, 87, 97,
87, 88, 98,
88, 89, 99,
89, 80, 90,
90, 91, 101,
91, 92, 102,
92, 93, 103,
93, 94, 104,
94, 95, 105,
95, 96, 106,
96, 97, 107,
97, 98, 108,
98, 99, 109,
99, 90, 100,
100, 101, 111,
101, 102, 112,
102, 103, 113,
103, 104, 114,
104, 105, 115,
105, 106, 116,
106, 107, 117,
107, 108, 118,
108, 109, 119,
109, 100, 110,
110, 111, 121,
111, 112, 122,
112, 113, 123,
113, 114, 124,
114, 115, 125,
115, 116, 126,
116, 117, 127,
117, 118, 128,
118, 119, 129,
119, 110, 120,
120, 121, 131,
121, 122, 132,
122, 123, 133,
123, 124, 134,
124, 125, 135,
125, 126, 136,
126, 127, 137,
127, 128, 138,
128, 129, 139,
129, 120, 130,
130, 131, 141,
131, 132, 142,
132, 133, 143,
133, 134, 144,
134, 135, 145,
135, 136, 146,
136, 137, 147,
137, 138, 148,
138, 139, 149,
139, 130, 140,
140, 141, 151,
141, 142, 152,
142, 143, 153,
143, 144, 154,
144, 145, 155,
145, 146, 156,
146, 147, 157,
147, 148, 158,
148, 149, 159,
149, 140, 150,
150, 151, 161,
151, 152, 162,
152, 153, 163,
153, 154, 164,
154, 155, 165,
155, 156, 166,
156, 157, 167,
157, 158, 168,
158, 159, 169,
159, 150, 160,
160, 161, 171,
161, 162, 172,
162, 163, 173,
163, 164, 174,
164, 165, 175,
165, 166, 176,
166, 167, 177,
167, 168, 178,
168, 169, 179,
169, 160, 170,
170, 171, 181,
171, 172, 182,
172, 173, 183,
173, 174, 184,
174, 175, 185,
175, 176, 186,
176, 177, 187,
177, 178, 188,
178, 179, 189,
179, 170, 180,
180, 181, 191,
181, 182, 192,
182, 183, 193,
183, 184, 194,
184, 185, 195,
185, 186, 196,
186, 187, 197,
187, 188, 198,
188, 189, 199,
189, 180, 190,
190, 191, 201,
191, 192, 202,
192, 193, 203,
193, 194, 204,
194, 195, 205,
195, 196, 206,
196, 197, 207,
197, 198, 208,
198, 199, 209,
199, 190, 200,
200, 201, 211,
201, 202, 212,
202, 203, 213,
203, 204, 214,
204, 205, 215,
205, 206, 216,
206, 207, 217,
207, 208, 218,
208, 209, 219,
209, 200, 210,
210, 211, 221,
211, 212, 222,
212, 213, 223,
213, 214, 224,
214, 215, 225,
215, 216, 226,
216, 217, 227,
217, 218, 228,
218, 219, 229,
219, 210, 220,
220, 221, 231,
221, 222, 232,
222, 223, 233,
223, 224, 234,
224, 225, 235,
225, 226, 236,
226, 227, 237,
227, 228, 238,
228, 229, 239,
229, 220, 230,
230, 231, 241,
231, 232, 242,
232, 233, 243,
233, 234, 244,
234, 235, 245,
235, 236, 246,
236, 237, 247,
237, 238, 248,
238, 239, 249,
239, 230, 240,
240, 241, 251,
241, 242, 252,
242, 243, 253,
243, 244, 254,
244, 245, 255,
245, 246, 256,
246, 247, 257,
247, 248, 258,
248, 249, 259,
249, 240, 250,
250, 251, 261,
251, 252, 262,
252, 253, 263,
253, 254, 264,
254, 255, 265,
255, 256, 266,
256, 257, 267,
257, 258, 268,
258, 259, 269,
259, 250, 260,
260, 261, 271,
261, 262, 272,
262, 263, 273,
263, 264, 274,
264, 265, 275,
265, 266, 276,
266, 267, 277,
267, 268, 278,
268, 269, 279,
269, 260, 270,
270, 271, 281,
271, 272, 282,
272, 273, 283,
273, 274, 284,
274, 275, 285,
275, 276, 286,
276, 277, 287,
277, 278, 288,
278, 279, 289,
279, 270, 280,
280, 281, 291,
281, 282, 292,
282, 283, 293,
283, 284, 294,
284, 285, 295,
285, 286, 296,
286, 287, 297,
287, 288, 298,
288, 289, 299,
289, 280, 290,
290, 291, 1,
291, 292, 2,
292, 293, 3,
293, 294, 4,
294, 295, 5,
295, 296, 6,
296, 297, 7,
297, 298, 8,
298, 299, 9,
299, 290, 0,
0, 11, 10,
1, 12, 11,
2, 13, 12,
3, 14, 13,
4, 15, 14,
5, 16, 15,
6, 17, 16,
7, 18, 17,
8, 19, 18,
9, 10, 19,
10, 21, 20,
11, 22, 21,
12, 23, 22,
13, 24, 23,
14, 25, 24,
15, 26, 25,
16, 27, 26,
17, 28, 27,
18, 29, 28,
19, 20, 29,
20, 31, 30,
21, 32, 31,
22, 33, 32,
23, 34, 33,
24, 35, 34,
25, 36, 35,
26, 37, 36,
27, 38, 37,
28, 39, 38,
29, 30, 39,
30, 41, 40,
31, 42, 41,
32, 43, 42,
33, 44, 43,
34, 45, 44,
35, 46, 45,
36, 47, 46,
37, 48, 47,
38, 49, 48,
39, 40, 49,
40, 51, 50,
41, 52, 51,
42, 53, 52,
43, 54, 53,
44, 55, 54,
45, 56, 55,
46, 57, 56,
47, 58, 57,
48, 59, 58,
49, 50, 59,
50, 61, 60,
51, 62, 61,
52, 63, 62,
53, 64, 63,
54, 65, 64,
55, 66, 65,
56, 67, 66,
57, 68, 67,
58, 69, 68,
59, 60, 69,
60, 71, 70,
61, 72, 71,
62, 73, 72,
63, 74, 73,
64, 75, 74,
65, 76, 75,
66, 77, 76,
67, 78, 77,
68, 79, 78,
69, 70, 79,
70, 81, 80,
71, 82, 81,
72, 83, 82,
73, 84, 83,
74, 85, 84,
75, 86, 85,
76, 87, 86,
77, 88, 87,
78, 89, 88,
79, 80, 89,
80, 91, 90,
81, 92, 91,
82, 93, 92,
83, 94, 93,
84, 95, 94,
85, 96, 95,
86, 97, 96,
87, 98, 97,
88, 99, 98,
89, 90, 99,
90, 101, 100,
91, 102, 101,
92, 103, 102,
93, 104, 103,
94, 105, 104,
95, 106, 105,
96, 107, 106,
97, 108, 107,
98, 109, 108,
99, 100, 109,
100, 111, 110,
101, 112, 111,
102, 113, 112,
103, 114, 113,
104, 115, 114,
105, 116, 115,
106, 117, 116,
107, 118, 117,
108, 119, 118,
109, 110, 119,
110, 121, 120,
111, 122, 121,
112, 123, 122,
113, 124, 123,
114, 125, 124,
115, 126, 125,
116, 127, 126,
117, 128, 127,
118, 129, 128,
119, 120, 129,
120, 131, 130,
121, 132, 131,
122, 133, 132,
123, 134, 133,
124, 135, 134,
125, 136, 135,
126, 137, 136,
127, 138, 137,
128, 139, 138,
129, 130, 139,
130, 141, 140,
131, 142, 141,
132, 143, 142,
133, 144, 143,
134, 145, 144,
135, 146, 145,
136, 147, 146,
137, 148, 147,
138, 149, 148,
139, 140, 149,
140, 151, 150,
141, 152, 151,
142, 153, 152,
143, 154, 153,
144, 155, 154,
145, 156, 155,
146, 157, 156,
147, 158, 157,
148, 159, 158,
149, 150, 159,
150, 161, 160,
151, 162, 161,
152, 163, 162,
153, 164, 163,
154, 165, 164,
155, 166, 165,
156, 167, 166,
157, 168, 167,
158, 169, 168,
159, 160, 169,
160, 171, 170,
161, 172, 171,
162, 173, 172,
163, 174, 173,
164, 175, 174,
165, 176, 175,
166, 177, 176,
167, 178, 177,
168, 179, 178,
169, 170, 179,
170, 181, 180,
171, 182, 181,
172, 183, 182,
173, 184, 183,
174, 185, 184,
175, 186, 185,
176, 187, 186,
177, 188, 187,
178, 189, 188,
179, 180, 189,
180, 191, 190,
181, 192, 191,
182, 193, 192,
183, 194, 193,
184, 195, 194,
185, 196, 195,
186, 197, 196,
187, 198, 197,
188, 199, 198,
189, 190, 199,
190, 201, 200,
191, 202, 201,
192, 203, 202,
193, 204, 203,
194, 205, 204,
195, 206, 205,
196, 207, 206,
197, 208, 207,
198, 209, 208,
199, 200, 209,
200, 211, 210,
201, 212, 211,
202, 213, 212,
203, 214, 213,
204, 215, 214,
205, 216, 215,
206, 217, 216,
207, 218, 217,
208, 219, 218,
209, 210, 219,
210, 221, 220,
211, 222, 221,
212, 223, 222,
213, 224, 223,
214, 225, 224,
215, 226, 225,
216, 227, 226,
217, 228, 227,
218, 229, 228,
219, 220, 229,
220, 231, 230,
221, 232, 231,
222, 233, 232,
223, 234, 233,
224, 235, 234,
225, 236, 235,
226, 237, 236,
227, 238, 237,
228, 239, 238,
229, 230, 239,
230, 241, 240,
231, 242, 241,
232, 243, 242,
233, 244, 243,
234, 245, 244,
235, 246, 245,
236, 247, 246,
237, 248, 247,
238, 249, 248,
239, 240, 249,
240, 251, 250,
241, 252, 251,
242, 253, 252,
243, 254, 253,
244, 255, 254,
245, 256, 255,
246, 257, 256,
247, 258, 257,
248, 259, 258,
249, 250, 259,
250, 261, 260,
251, 262, 261,
252, 263, 262,
253, 264, 263,
254, 265, 264,
255, 266, 265,
256, 267, 266,
257, 268, 267,
258, 269, 268,
259, 260, 269,
260, 271, 270,
261, 272, 271,
262, 273, 272,
263, 274, 273,
264, 275, 274,
265, 276, 275,
266, 277, 276,
267, 278, 277,
268, 279, 278,
269, 270, 279,
270, 281, 280,
271, 282, 281,
272, 283, 282,
273, 284, 283,
274, 285, 284,
275, 286, 285,
276, 287, 286,
277, 288, 287,
278, 289, 288,
279, 280, 289,
280, 291, 290,
281, 292, 291,
282, 293, 292,
283, 294, 293,
284, 295, 294,
285, 296, 295,
286, 297, 296,
287, 298, 297,
288, 299, 298,
289, 290, 299,
290, 1, 0,
291, 2, 1,
292, 3, 2,
293, 4, 3,
294, 5, 4,
295, 6, 5,
296, 7, 6,
297, 8, 7,
298, 9, 8,
299, 0, 9
};
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
/* ================================================================
* About NPOI
* Author: Tony Qu
* Author's email: tonyqus (at) gmail.com
* Author's Blog: tonyqus.wordpress.com.cn (wp.tonyqus.cn)
* HomePage: http://www.codeplex.com/npoi
* Contributors:
*
* ==============================================================*/
namespace NPOI.POIFS.EventFileSystem
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using NPOI.POIFS.FileSystem;
using NPOI.POIFS.Properties;
using NPOI.POIFS.Storage;
/// <summary>
/// An event-driven Reader for POIFS file systems. Users of this class
/// first Create an instance of it, then use the RegisterListener
/// methods to Register POIFSReaderListener instances for specific
/// documents. Once all the listeners have been Registered, the Read()
/// method is called, which results in the listeners being notified as
/// their documents are Read.
/// @author Marc Johnson (mjohnson at apache dot org)
/// </summary>
public class POIFSReader
{
public event POIFSReaderEventHandler StreamReaded;
private POIFSReaderRegistry registry;
private bool registryClosed;
protected virtual void OnStreamReaded(POIFSReaderEventArgs e)
{
if (StreamReaded != null)
{
StreamReaded(this, e);
}
}
/// <summary>
/// Initializes a new instance of the <see cref="POIFSReader"/> class.
/// </summary>
public POIFSReader()
{
registry = new POIFSReaderRegistry();
registryClosed = false;
}
/// <summary>
/// Read from an InputStream and Process the documents we Get
/// </summary>
/// <param name="stream">the InputStream from which to Read the data</param>
/// <returns>POIFSDocument list</returns>
public List<DocumentDescriptor> Read(Stream stream)
{
registryClosed = true;
// Read the header block from the stream
HeaderBlock header_block = new HeaderBlock(stream);
// Read the rest of the stream into blocks
RawDataBlockList data_blocks = new RawDataBlockList(stream, header_block.BigBlockSize);
// Set up the block allocation table (necessary for the
// data_blocks to be manageable
new BlockAllocationTableReader(header_block.BigBlockSize,
header_block.BATCount,
header_block.BATArray,
header_block.XBATCount,
header_block.XBATIndex,
data_blocks);
// Get property table from the document
PropertyTable properties = new PropertyTable(header_block, data_blocks);
// Process documents
return ProcessProperties(SmallBlockTableReader.GetSmallDocumentBlocks
(header_block.BigBlockSize, data_blocks,
properties.Root,
header_block.SBATStart),
data_blocks, properties.Root.Children, new POIFSDocumentPath()
);
}
/**
* Register a POIFSReaderListener for all documents
*
* @param listener the listener to be registered
*
* @exception NullPointerException if listener is null
* @exception IllegalStateException if read() has already been
* called
*/
public void RegisterListener(POIFSReaderListener listener)
{
if (listener == null)
{
throw new NullReferenceException();
}
if (registryClosed)
{
throw new InvalidOperationException();
}
registry.RegisterListener(listener);
}
/**
* Register a POIFSReaderListener for a document in the root
* directory
*
* @param listener the listener to be registered
* @param name the document name
*
* @exception NullPointerException if listener is null or name is
* null or empty
* @exception IllegalStateException if read() has already been
* called
*/
public void RegisterListener(POIFSReaderListener listener,
String name)
{
RegisterListener(listener, null, name);
}
/**
* Register a POIFSReaderListener for a document in the specified
* directory
*
* @param listener the listener to be registered
* @param path the document path; if null, the root directory is
* assumed
* @param name the document name
*
* @exception NullPointerException if listener is null or name is
* null or empty
* @exception IllegalStateException if read() has already been
* called
*/
public void RegisterListener(POIFSReaderListener listener,
POIFSDocumentPath path,
String name)
{
if ((listener == null) || (name == null) || (name.Length == 0))
{
throw new NullReferenceException();
}
if (registryClosed)
{
throw new InvalidOperationException();
}
registry.RegisterListener(listener,
(path == null) ? new POIFSDocumentPath()
: path, name);
}
/// <summary>
/// Processes the properties.
/// </summary>
/// <param name="small_blocks">The small_blocks.</param>
/// <param name="big_blocks">The big_blocks.</param>
/// <param name="properties">The properties.</param>
/// <param name="path">The path.</param>
/// <returns></returns>
private List<DocumentDescriptor> ProcessProperties(BlockList small_blocks,
BlockList big_blocks,
IEnumerator properties,
POIFSDocumentPath path)
{
List<DocumentDescriptor> documents =
new List<DocumentDescriptor>();
while (properties.MoveNext())
{
Property property = (Property)properties.Current;
String name = property.Name;
if (property.IsDirectory)
{
POIFSDocumentPath new_path = new POIFSDocumentPath(path,
new String[]
{
name
});
ProcessProperties(
small_blocks, big_blocks,
((DirectoryProperty)property).Children, new_path);
}
else
{
int startBlock = property.StartBlock;
IEnumerator listeners = registry.GetListeners(path, name);
POIFSDocument document = null;
if (listeners.MoveNext())
{
listeners.Reset();
int size = property.Size;
if (property.ShouldUseSmallBlocks)
{
document =
new POIFSDocument(name, small_blocks
.FetchBlocks(startBlock, -1), size);
}
else
{
document =
new POIFSDocument(name, big_blocks
.FetchBlocks(startBlock, -1), size);
}
//POIFSReaderListener listener =
// (POIFSReaderListener)listeners.Current;
//listener.ProcessPOIFSReaderEvent(
// new POIFSReaderEvent(
// new DocumentInputStream(document), path,
// name));
while (listeners.MoveNext())
{
POIFSReaderListener listener =
(POIFSReaderListener)listeners.Current;
listener.ProcessPOIFSReaderEvent(
new POIFSReaderEvent(
new DocumentInputStream(document), path,
name));
}
}
else
{
// consume the document's data and discard it
if (property.ShouldUseSmallBlocks)
{
small_blocks.FetchBlocks(startBlock, -1);
}
else
{
big_blocks.FetchBlocks(startBlock, -1);
}
//documents.Add(
// new DocumentDescriptor(path, name));
//fire event
//OnStreamReaded(new POIFSReaderEventArgs(name, path, document));
}
}
}
return documents;
}
}
}
| |
using System;
using ChainUtils.BouncyCastle.Crypto.Modes.Gcm;
using ChainUtils.BouncyCastle.Crypto.Parameters;
using ChainUtils.BouncyCastle.Crypto.Utilities;
using ChainUtils.BouncyCastle.Utilities;
namespace ChainUtils.BouncyCastle.Crypto.Modes
{
/// <summary>
/// Implements the Galois/Counter mode (GCM) detailed in
/// NIST Special Publication 800-38D.
/// </summary>
public class GcmBlockCipher
: IAeadBlockCipher
{
private const int BlockSize = 16;
private readonly IBlockCipher cipher;
private readonly IGcmMultiplier multiplier;
private IGcmExponentiator exp;
// These fields are set by Init and not modified by processing
private bool forEncryption;
private int macSize;
private byte[] nonce;
private byte[] initialAssociatedText;
private byte[] H;
private byte[] J0;
// These fields are modified during processing
private byte[] bufBlock;
private byte[] macBlock;
private byte[] S, S_at, S_atPre;
private byte[] counter;
private int bufOff;
private ulong totalLength;
private byte[] atBlock;
private int atBlockPos;
private ulong atLength;
private ulong atLengthPre;
public GcmBlockCipher(
IBlockCipher c)
: this(c, null)
{
}
public GcmBlockCipher(
IBlockCipher c,
IGcmMultiplier m)
{
if (c.GetBlockSize() != BlockSize)
throw new ArgumentException("cipher required with a block size of " + BlockSize + ".");
if (m == null)
{
// TODO Consider a static property specifying default multiplier
m = new Tables8kGcmMultiplier();
}
cipher = c;
multiplier = m;
}
public virtual string AlgorithmName
{
get { return cipher.AlgorithmName + "/GCM"; }
}
public IBlockCipher GetUnderlyingCipher()
{
return cipher;
}
public virtual int GetBlockSize()
{
return BlockSize;
}
/// <remarks>
/// MAC sizes from 32 bits to 128 bits (must be a multiple of 8) are supported. The default is 128 bits.
/// Sizes less than 96 are not recommended, but are supported for specialized applications.
/// </remarks>
public virtual void Init(
bool forEncryption,
ICipherParameters parameters)
{
this.forEncryption = forEncryption;
macBlock = null;
KeyParameter keyParam;
if (parameters is AeadParameters)
{
var param = (AeadParameters)parameters;
nonce = param.GetNonce();
initialAssociatedText = param.GetAssociatedText();
var macSizeBits = param.MacSize;
if (macSizeBits < 32 || macSizeBits > 128 || macSizeBits % 8 != 0)
{
throw new ArgumentException("Invalid value for MAC size: " + macSizeBits);
}
macSize = macSizeBits / 8;
keyParam = param.Key;
}
else if (parameters is ParametersWithIV)
{
var param = (ParametersWithIV)parameters;
nonce = param.GetIV();
initialAssociatedText = null;
macSize = 16;
keyParam = (KeyParameter)param.Parameters;
}
else
{
throw new ArgumentException("invalid parameters passed to GCM");
}
var bufLength = forEncryption ? BlockSize : (BlockSize + macSize);
bufBlock = new byte[bufLength];
if (nonce == null || nonce.Length < 1)
{
throw new ArgumentException("IV must be at least 1 byte");
}
// TODO Restrict macSize to 16 if nonce length not 12?
// Cipher always used in forward mode
// if keyParam is null we're reusing the last key.
if (keyParam != null)
{
cipher.Init(true, keyParam);
H = new byte[BlockSize];
cipher.ProcessBlock(H, 0, H, 0);
// if keyParam is null we're reusing the last key and the multiplier doesn't need re-init
multiplier.Init(H);
exp = null;
}
else if (H == null)
{
throw new ArgumentException("Key must be specified in initial init");
}
J0 = new byte[BlockSize];
if (nonce.Length == 12)
{
Array.Copy(nonce, 0, J0, 0, nonce.Length);
J0[BlockSize - 1] = 0x01;
}
else
{
gHASH(J0, nonce, nonce.Length);
var X = new byte[BlockSize];
Pack.UInt64_To_BE((ulong)nonce.Length * 8UL, X, 8);
gHASHBlock(J0, X);
}
S = new byte[BlockSize];
S_at = new byte[BlockSize];
S_atPre = new byte[BlockSize];
atBlock = new byte[BlockSize];
atBlockPos = 0;
atLength = 0;
atLengthPre = 0;
counter = Arrays.Clone(J0);
bufOff = 0;
totalLength = 0;
if (initialAssociatedText != null)
{
ProcessAadBytes(initialAssociatedText, 0, initialAssociatedText.Length);
}
}
public virtual byte[] GetMac()
{
return Arrays.Clone(macBlock);
}
public virtual int GetOutputSize(
int len)
{
var totalData = len + bufOff;
if (forEncryption)
{
return totalData + macSize;
}
return totalData < macSize ? 0 : totalData - macSize;
}
public virtual int GetUpdateOutputSize(
int len)
{
var totalData = len + bufOff;
if (!forEncryption)
{
if (totalData < macSize)
{
return 0;
}
totalData -= macSize;
}
return totalData - totalData % BlockSize;
}
public virtual void ProcessAadByte(byte input)
{
atBlock[atBlockPos] = input;
if (++atBlockPos == BlockSize)
{
// Hash each block as it fills
gHASHBlock(S_at, atBlock);
atBlockPos = 0;
atLength += BlockSize;
}
}
public virtual void ProcessAadBytes(byte[] inBytes, int inOff, int len)
{
for (var i = 0; i < len; ++i)
{
atBlock[atBlockPos] = inBytes[inOff + i];
if (++atBlockPos == BlockSize)
{
// Hash each block as it fills
gHASHBlock(S_at, atBlock);
atBlockPos = 0;
atLength += BlockSize;
}
}
}
private void InitCipher()
{
if (atLength > 0)
{
Array.Copy(S_at, 0, S_atPre, 0, BlockSize);
atLengthPre = atLength;
}
// Finish hash for partial AAD block
if (atBlockPos > 0)
{
gHASHPartial(S_atPre, atBlock, 0, atBlockPos);
atLengthPre += (uint)atBlockPos;
}
if (atLengthPre > 0)
{
Array.Copy(S_atPre, 0, S, 0, BlockSize);
}
}
public virtual int ProcessByte(
byte input,
byte[] output,
int outOff)
{
bufBlock[bufOff] = input;
if (++bufOff == bufBlock.Length)
{
OutputBlock(output, outOff);
return BlockSize;
}
return 0;
}
public virtual int ProcessBytes(
byte[] input,
int inOff,
int len,
byte[] output,
int outOff)
{
var resultLen = 0;
for (var i = 0; i < len; ++i)
{
bufBlock[bufOff] = input[inOff + i];
if (++bufOff == bufBlock.Length)
{
OutputBlock(output, outOff + resultLen);
resultLen += BlockSize;
}
}
return resultLen;
}
private void OutputBlock(byte[] output, int offset)
{
if (totalLength == 0)
{
InitCipher();
}
gCTRBlock(bufBlock, output, offset);
if (forEncryption)
{
bufOff = 0;
}
else
{
Array.Copy(bufBlock, BlockSize, bufBlock, 0, macSize);
bufOff = macSize;
}
}
public int DoFinal(byte[] output, int outOff)
{
if (totalLength == 0)
{
InitCipher();
}
var extra = bufOff;
if (!forEncryption)
{
if (extra < macSize)
throw new InvalidCipherTextException("data too short");
extra -= macSize;
}
if (extra > 0)
{
gCTRPartial(bufBlock, 0, extra, output, outOff);
}
atLength += (uint)atBlockPos;
if (atLength > atLengthPre)
{
/*
* Some AAD was sent after the cipher started. We determine the difference b/w the hash value
* we actually used when the cipher started (S_atPre) and the final hash value calculated (S_at).
* Then we carry this difference forward by multiplying by H^c, where c is the number of (full or
* partial) cipher-text blocks produced, and adjust the current hash.
*/
// Finish hash for partial AAD block
if (atBlockPos > 0)
{
gHASHPartial(S_at, atBlock, 0, atBlockPos);
}
// Find the difference between the AAD hashes
if (atLengthPre > 0)
{
GcmUtilities.Xor(S_at, S_atPre);
}
// Number of cipher-text blocks produced
var c = (long)(((totalLength * 8) + 127) >> 7);
// Calculate the adjustment factor
var H_c = new byte[16];
if (exp == null)
{
exp = new Tables1kGcmExponentiator();
exp.Init(H);
}
exp.ExponentiateX(c, H_c);
// Carry the difference forward
GcmUtilities.Multiply(S_at, H_c);
// Adjust the current hash
GcmUtilities.Xor(S, S_at);
}
// Final gHASH
var X = new byte[BlockSize];
Pack.UInt64_To_BE(atLength * 8UL, X, 0);
Pack.UInt64_To_BE(totalLength * 8UL, X, 8);
gHASHBlock(S, X);
// T = MSBt(GCTRk(J0,S))
var tag = new byte[BlockSize];
cipher.ProcessBlock(J0, 0, tag, 0);
GcmUtilities.Xor(tag, S);
var resultLen = extra;
// We place into macBlock our calculated value for T
macBlock = new byte[macSize];
Array.Copy(tag, 0, macBlock, 0, macSize);
if (forEncryption)
{
// Append T to the message
Array.Copy(macBlock, 0, output, outOff + bufOff, macSize);
resultLen += macSize;
}
else
{
// Retrieve the T value from the message and compare to calculated one
var msgMac = new byte[macSize];
Array.Copy(bufBlock, extra, msgMac, 0, macSize);
if (!Arrays.ConstantTimeAreEqual(macBlock, msgMac))
throw new InvalidCipherTextException("mac check in GCM failed");
}
Reset(false);
return resultLen;
}
public virtual void Reset()
{
Reset(true);
}
private void Reset(
bool clearMac)
{
cipher.Reset();
S = new byte[BlockSize];
S_at = new byte[BlockSize];
S_atPre = new byte[BlockSize];
atBlock = new byte[BlockSize];
atBlockPos = 0;
atLength = 0;
atLengthPre = 0;
counter = Arrays.Clone(J0);
bufOff = 0;
totalLength = 0;
if (bufBlock != null)
{
Arrays.Fill(bufBlock, 0);
}
if (clearMac)
{
macBlock = null;
}
if (initialAssociatedText != null)
{
ProcessAadBytes(initialAssociatedText, 0, initialAssociatedText.Length);
}
}
private void gCTRBlock(byte[] block, byte[] output, int outOff)
{
var tmp = GetNextCounterBlock();
GcmUtilities.Xor(tmp, block);
Array.Copy(tmp, 0, output, outOff, BlockSize);
gHASHBlock(S, forEncryption ? tmp : block);
totalLength += BlockSize;
}
private void gCTRPartial(byte[] buf, int off, int len, byte[] output, int outOff)
{
var tmp = GetNextCounterBlock();
GcmUtilities.Xor(tmp, buf, off, len);
Array.Copy(tmp, 0, output, outOff, len);
gHASHPartial(S, forEncryption ? tmp : buf, 0, len);
totalLength += (uint)len;
}
private void gHASH(byte[] Y, byte[] b, int len)
{
for (var pos = 0; pos < len; pos += BlockSize)
{
var num = System.Math.Min(len - pos, BlockSize);
gHASHPartial(Y, b, pos, num);
}
}
private void gHASHBlock(byte[] Y, byte[] b)
{
GcmUtilities.Xor(Y, b);
multiplier.MultiplyH(Y);
}
private void gHASHPartial(byte[] Y, byte[] b, int off, int len)
{
GcmUtilities.Xor(Y, b, off, len);
multiplier.MultiplyH(Y);
}
private byte[] GetNextCounterBlock()
{
for (var i = 15; i >= 12; --i)
{
if (++counter[i] != 0) break;
}
var tmp = new byte[BlockSize];
// TODO Sure would be nice if ciphers could operate on int[]
cipher.ProcessBlock(counter, 0, tmp, 0);
return tmp;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using Xunit;
using Assembly = System.Reflection.Tests;
[module: Assembly.MyAttribute_Single_P("single"), Assembly.MyAttribute_AllowMultiple_P("multiple1"), Assembly.MyAttribute_AllowMultiple_P("multiple2")]
namespace System.Reflection.Tests
{
public class GetCustomAttributes_ParameterInfo
{
[Fact]
public void IsDefined_Inherit()
{
Type type = typeof(TestClass_P);
MethodInfo miWithAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithAttribute");
ParameterInfo piWithAttributes = miWithAttributes.GetParameters()[0];
Assert.False(CustomAttributeExtensions.IsDefined(piWithAttributes,
typeof(CLSCompliantAttribute), false));
}
[Fact]
public void IsDefined()
{
Type type = typeof(TestClass_P);
MethodInfo miWithAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithAttribute");
ParameterInfo piWithAttributes = miWithAttributes.GetParameters()[0];
Assert.True(CustomAttributeExtensions.IsDefined(piWithAttributes, typeof(MyAttribute_Single_P)));
Assert.Throws<ArgumentException>(() =>
{
CustomAttributeExtensions.IsDefined(piWithAttributes, typeof(String));
});
Assert.Throws<ArgumentNullException>(() =>
{
CustomAttributeExtensions.IsDefined(piWithAttributes, null);
});
}
[Fact]
public void GetCustomAttributeOfT_Single_NoInherit()
{
Type type = typeof(TestClass_P);
MethodInfo miWithAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithAttribute");
ParameterInfo piWithAttributes = miWithAttributes.GetParameters()[0];
Attribute attribute = CustomAttributeExtensions.GetCustomAttribute<MyAttribute_Single_P>(piWithAttributes, false);
Assert.NotNull(attribute);
}
[Fact]
public void GetCustomAttributeOfT_Single()
{
Type type = typeof(TestClass_P);
MethodInfo miWithAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithAttribute");
ParameterInfo piWithAttributes = miWithAttributes.GetParameters()[0];
Attribute attribute = CustomAttributeExtensions.GetCustomAttribute<MyAttribute_Single_P>(piWithAttributes);
Assert.NotNull(attribute);
Assert.Throws<AmbiguousMatchException>(() =>
{
attribute = CustomAttributeExtensions.GetCustomAttribute<MyAttribute_AllowMultiple_P>(piWithAttributes);
});
}
[Fact]
public void GetCustomAttributeOfT_Multiple_NoInherit()
{
Type type = typeof(TestClass_P);
MethodInfo miWithAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithAttribute");
ParameterInfo piWithAttributes = miWithAttributes.GetParameters()[0];
IEnumerable<Attribute> attributes;
attributes = CustomAttributeExtensions.GetCustomAttributes<MyAttribute_AllowMultiple_P>(piWithAttributes, false);
Assert.Equal(2, attributes.Count());
}
[Fact]
public void GetCustomAttributeOfT()
{
Type type = typeof(TestClass_P);
MethodInfo miWithAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithAttribute");
ParameterInfo piWithAttributes = miWithAttributes.GetParameters()[0];
IEnumerable<Attribute> attributes;
attributes = CustomAttributeExtensions.GetCustomAttributes<MyAttribute_Single_P>(piWithAttributes);
Assert.Equal(1, attributes.Count());
attributes = CustomAttributeExtensions.GetCustomAttributes<CLSCompliantAttribute>(piWithAttributes);
Assert.Equal(0, attributes.Count());
}
[Fact]
public void GetCustomAttribute_Single_NoInherit()
{
Type type = typeof(TestClass_P);
MethodInfo miWithAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithAttribute");
ParameterInfo piWithAttributes = miWithAttributes.GetParameters()[0];
Attribute attribute = CustomAttributeExtensions.GetCustomAttribute(piWithAttributes, typeof(MyAttribute_Single_P), false);
Assert.NotNull(attribute);
}
[Fact]
public void GetCustomAttribute_Single()
{
Type type = typeof(TestClass_P);
MethodInfo miWithAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithAttribute");
ParameterInfo piWithAttributes = miWithAttributes.GetParameters()[0];
Attribute attribute = CustomAttributeExtensions.GetCustomAttribute(piWithAttributes, typeof(MyAttribute_Single_P));
Assert.NotNull(attribute);
Assert.Throws<AmbiguousMatchException>(() =>
{
attribute = CustomAttributeExtensions.GetCustomAttribute(piWithAttributes, typeof(MyAttribute_AllowMultiple_P));
});
Assert.Throws<ArgumentException>(() =>
{
attribute = CustomAttributeExtensions.GetCustomAttribute(piWithAttributes, typeof(String));
});
Assert.Throws<ArgumentNullException>(() =>
{
attribute = CustomAttributeExtensions.GetCustomAttribute(piWithAttributes, null);
});
}
[Fact]
public void GetCustomAttribute_Multiple_NoInherit()
{
Type type = typeof(TestClass_P);
MethodInfo miWithAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithAttribute");
ParameterInfo piWithAttributes = miWithAttributes.GetParameters()[0];
IEnumerable<Attribute> attributes;
attributes = CustomAttributeExtensions.GetCustomAttributes(piWithAttributes, typeof(MyAttribute_AllowMultiple_P), false);
Assert.Equal(2, attributes.Count());
}
[Fact]
public void GetCusomAttribute_Multiple()
{
Type type = typeof(TestClass_P);
MethodInfo miWithAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithAttribute");
ParameterInfo piWithAttributes = miWithAttributes.GetParameters()[0];
IEnumerable<Attribute> attributes;
attributes = CustomAttributeExtensions.GetCustomAttributes(piWithAttributes, typeof(MyAttribute_AllowMultiple_P));
Assert.Equal(2, attributes.Count());
attributes = CustomAttributeExtensions.GetCustomAttributes(piWithAttributes, typeof(CLSCompliantAttribute));
Assert.Equal(0, attributes.Count());
Assert.Throws<ArgumentException>(() =>
{
attributes = CustomAttributeExtensions.GetCustomAttributes(piWithAttributes, typeof(String));
});
Assert.Throws<ArgumentNullException>(() =>
{
attributes = CustomAttributeExtensions.GetCustomAttributes(piWithAttributes, null);
});
}
[Fact]
public void GetCustomAttribute_General_NoInherit()
{
Type type = typeof(TestClass_P);
MethodInfo miWithoutAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithoutAttribute");
ParameterInfo piWithoutAttributes = miWithoutAttributes.GetParameters()[0];
MethodInfo miWithAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithAttribute");
ParameterInfo piWithAttributes = miWithAttributes.GetParameters()[0];
IEnumerable<Attribute> attributes;
attributes = CustomAttributeExtensions.GetCustomAttributes(piWithoutAttributes, false);
Assert.Equal(0, attributes.Count());
attributes = CustomAttributeExtensions.GetCustomAttributes(piWithAttributes, false);
Assert.Equal(5, attributes.Count());
Assert.Equal(1, attributes.Count(attr => attr.ToString().Equals("System.Reflection.Tests.MyAttribute_Single_P single", StringComparison.Ordinal)));
Assert.Equal(1, attributes.Count(attr => attr.ToString().Equals("System.Reflection.Tests.MyAttribute_AllowMultiple_P multiple1", StringComparison.Ordinal)));
Assert.Equal(1, attributes.Count(attr => attr.ToString().Equals("System.Reflection.Tests.MyAttribute_AllowMultiple_P multiple2", StringComparison.Ordinal)));
Assert.Equal(1, attributes.Count(attr => attr.ToString().Equals("System.Reflection.Tests.MyAttribute_Single_Inherited_P single", StringComparison.Ordinal)));
Assert.Equal(1, attributes.Count(attr => attr.ToString().Equals("System.Reflection.Tests.MyAttribute_AllowMultiple_Inherited_P multiple", StringComparison.Ordinal)));
}
[Fact]
public void GetCustom_Attribute_General()
{
Type type = typeof(TestClass_P);
MethodInfo miWithAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithAttribute");
ParameterInfo piWithAttributes = miWithAttributes.GetParameters()[0];
MethodInfo miWithoutAttributes = type.GetTypeInfo().GetDeclaredMethod("methodWithoutAttribute");
ParameterInfo piWithoutAttributes = miWithoutAttributes.GetParameters()[0];
IEnumerable<Attribute> attributes = CustomAttributeExtensions.GetCustomAttributes(piWithoutAttributes);
Assert.Equal(0, attributes.Count());
IEnumerable<CustomAttributeData> attributeData = piWithoutAttributes.CustomAttributes;
Assert.Equal(0, attributes.Count());
attributes = CustomAttributeExtensions.GetCustomAttributes(piWithAttributes);
Assert.Equal(5, attributes.Count());
Assert.Equal(1, attributes.Count(attr => attr.ToString().Equals("System.Reflection.Tests.MyAttribute_Single_P single", StringComparison.Ordinal)));
Assert.Equal(1, attributes.Count(attr => attr.ToString().Equals("System.Reflection.Tests.MyAttribute_AllowMultiple_P multiple1", StringComparison.Ordinal)));
Assert.Equal(1, attributes.Count(attr => attr.ToString().Equals("System.Reflection.Tests.MyAttribute_AllowMultiple_P multiple2", StringComparison.Ordinal)));
Assert.Equal(1, attributes.Count(attr => attr.ToString().Equals("System.Reflection.Tests.MyAttribute_Single_Inherited_P single", StringComparison.Ordinal)));
Assert.Equal(1, attributes.Count(attr => attr.ToString().Equals("System.Reflection.Tests.MyAttribute_AllowMultiple_Inherited_P multiple", StringComparison.Ordinal)));
attributeData = piWithAttributes.CustomAttributes;
Assert.Equal(5, attributeData.Count());
Assert.Equal(2, attributeData.Count(attr => attr.AttributeType.ToString().Equals("System.Reflection.Tests.MyAttribute_AllowMultiple_P", StringComparison.Ordinal)));
Assert.Equal(1, attributeData.Count(attr => attr.AttributeType.ToString().Equals("System.Reflection.Tests.MyAttribute_Single_P", StringComparison.Ordinal)));
Assert.Equal(1, attributeData.Count(attr => attr.AttributeType.ToString().Equals("System.Reflection.Tests.MyAttribute_Single_Inherited_P", StringComparison.Ordinal)));
Assert.Equal(1, attributeData.Count(attr => attr.AttributeType.ToString().Equals("System.Reflection.Tests.MyAttribute_AllowMultiple_Inherited_P", StringComparison.Ordinal)));
}
}
public class ParameterInfoAttributeBase : Attribute
{
private String _name;
public ParameterInfoAttributeBase(String name)
{
_name = name;
}
public override String ToString() { return this.GetType() + " " + _name; }
}
[AttributeUsage(AttributeTargets.All, AllowMultiple = false, Inherited = false)]
public class MyAttribute_Single_P : ParameterInfoAttributeBase
{
public MyAttribute_Single_P(String name) : base(name) { }
}
[AttributeUsage(AttributeTargets.All, AllowMultiple = true, Inherited = false)]
public class MyAttribute_AllowMultiple_P : ParameterInfoAttributeBase
{
public MyAttribute_AllowMultiple_P(String name) : base(name) { }
}
[AttributeUsage(AttributeTargets.All, AllowMultiple = false, Inherited = true)]
public class MyAttribute_Single_Inherited_P : ParameterInfoAttributeBase
{
public MyAttribute_Single_Inherited_P(String name) : base(name) { }
}
[AttributeUsage(AttributeTargets.All, AllowMultiple = true, Inherited = true)]
public class MyAttribute_AllowMultiple_Inherited_P : ParameterInfoAttributeBase
{
public MyAttribute_AllowMultiple_Inherited_P(String name) : base(name) { }
}
public class TestClass_P
{
public void methodWithoutAttribute(int param) { }
public void methodWithAttribute([MyAttribute_Single_P("single"),
MyAttribute_AllowMultiple_P("multiple1"),
MyAttribute_AllowMultiple_P("multiple2"),
MyAttribute_Single_Inherited_P("single"),
MyAttribute_AllowMultiple_Inherited_P("multiple")] int param)
{ }
}
}
| |
using System;
using System.Globalization;
using System.Collections.Generic;
using Sasoma.Utils;
using Sasoma.Microdata.Interfaces;
using Sasoma.Languages.Core;
using Sasoma.Microdata.Properties;
namespace Sasoma.Microdata.Types
{
/// <summary>
/// User interaction: Visit to a web page.
/// </summary>
public class UserPageVisits_Core : TypeCore, IUserInteraction
{
public UserPageVisits_Core()
{
this._TypeId = 279;
this._Id = "UserPageVisits";
this._Schema_Org_Url = "http://schema.org/UserPageVisits";
string label = "";
GetLabel(out label, "UserPageVisits", typeof(UserPageVisits_Core));
this._Label = label;
this._Ancestors = new int[]{266,98,277};
this._SubTypes = new int[0];
this._SuperTypes = new int[]{277};
this._Properties = new int[]{67,108,143,229,19,71,82,130,151,158,214,216,218};
}
/// <summary>
/// A person attending the event.
/// </summary>
private Attendees_Core attendees;
public Attendees_Core Attendees
{
get
{
return attendees;
}
set
{
attendees = value;
SetPropertyInstance(attendees);
}
}
/// <summary>
/// A short description of the item.
/// </summary>
private Description_Core description;
public Description_Core Description
{
get
{
return description;
}
set
{
description = value;
SetPropertyInstance(description);
}
}
/// <summary>
/// The duration of the item (movie, audio recording, event, etc.) in <a href=\http://en.wikipedia.org/wiki/ISO_8601\ target=\new\>ISO 8601 date format</a>.
/// </summary>
private Properties.Duration_Core duration;
public Properties.Duration_Core Duration
{
get
{
return duration;
}
set
{
duration = value;
SetPropertyInstance(duration);
}
}
/// <summary>
/// The end date and time of the event (in <a href=\http://en.wikipedia.org/wiki/ISO_8601\ target=\new\>ISO 8601 date format</a>).
/// </summary>
private EndDate_Core endDate;
public EndDate_Core EndDate
{
get
{
return endDate;
}
set
{
endDate = value;
SetPropertyInstance(endDate);
}
}
/// <summary>
/// URL of an image of the item.
/// </summary>
private Image_Core image;
public Image_Core Image
{
get
{
return image;
}
set
{
image = value;
SetPropertyInstance(image);
}
}
/// <summary>
/// The location of the event or organization.
/// </summary>
private Location_Core location;
public Location_Core Location
{
get
{
return location;
}
set
{
location = value;
SetPropertyInstance(location);
}
}
/// <summary>
/// The name of the item.
/// </summary>
private Name_Core name;
public Name_Core Name
{
get
{
return name;
}
set
{
name = value;
SetPropertyInstance(name);
}
}
/// <summary>
/// An offer to sell this item\u2014for example, an offer to sell a product, the DVD of a movie, or tickets to an event.
/// </summary>
private Offers_Core offers;
public Offers_Core Offers
{
get
{
return offers;
}
set
{
offers = value;
SetPropertyInstance(offers);
}
}
/// <summary>
/// The main performer or performers of the event\u2014for example, a presenter, musician, or actor.
/// </summary>
private Performers_Core performers;
public Performers_Core Performers
{
get
{
return performers;
}
set
{
performers = value;
SetPropertyInstance(performers);
}
}
/// <summary>
/// The start date and time of the event (in <a href=\http://en.wikipedia.org/wiki/ISO_8601\ target=\new\>ISO 8601 date format</a>).
/// </summary>
private StartDate_Core startDate;
public StartDate_Core StartDate
{
get
{
return startDate;
}
set
{
startDate = value;
SetPropertyInstance(startDate);
}
}
/// <summary>
/// Events that are a part of this event. For example, a conference event includes many presentations, each are subEvents of the conference.
/// </summary>
private SubEvents_Core subEvents;
public SubEvents_Core SubEvents
{
get
{
return subEvents;
}
set
{
subEvents = value;
SetPropertyInstance(subEvents);
}
}
/// <summary>
/// An event that this event is a part of. For example, a collection of individual music performances might each have a music festival as their superEvent.
/// </summary>
private SuperEvent_Core superEvent;
public SuperEvent_Core SuperEvent
{
get
{
return superEvent;
}
set
{
superEvent = value;
SetPropertyInstance(superEvent);
}
}
/// <summary>
/// URL of the item.
/// </summary>
private Properties.URL_Core uRL;
public Properties.URL_Core URL
{
get
{
return uRL;
}
set
{
uRL = value;
SetPropertyInstance(uRL);
}
}
}
}
| |
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using OwaspHeaders.Core;
using OwaspHeaders.Core.Enums;
using OwaspHeaders.Core.Extensions;
using OwaspHeaders.Core.Models;
using Xunit;
namespace tests
{
public class SecureHeadersInjectedTest
{
private int _onNextCalledTimes;
private readonly Task _onNextResult = Task.FromResult(0);
private readonly RequestDelegate _onNext;
private readonly DefaultHttpContext _context;
public SecureHeadersInjectedTest()
{
_onNext = _ =>
{
Interlocked.Increment(ref _onNextCalledTimes);
return _onNextResult;
};
_context = new DefaultHttpContext();
}
[Fact]
public async Task Invoke_StrictTransportSecurityHeaderName_HeaderIsPresent()
{
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().UseHsts().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.True(headerPresentConfig.UseHsts);
Assert.True(_context.Response.Headers.ContainsKey(Constants.StrictTransportSecurityHeaderName));
Assert.Equal("max-age=63072000;includeSubDomains",
_context.Response.Headers[Constants.StrictTransportSecurityHeaderName]);
}
[Fact]
public async Task Invoke_StrictTransportSecurityHeaderName_HeaderIsNotPresent()
{
// arrange
var headerNotPresetConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerNotPresetConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.False(headerNotPresetConfig.UseHsts);
Assert.False(_context.Response.Headers.ContainsKey(Constants.StrictTransportSecurityHeaderName));
}
[Fact]
public async Task Invoke_XFrameOptionsHeaderName_HeaderIsPresent()
{
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().UseXFrameOptions().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.True(headerPresentConfig.UseXFrameOptions);
Assert.True(_context.Response.Headers.ContainsKey(Constants.XFrameOptionsHeaderName));
Assert.Equal("DENY", _context.Response.Headers[Constants.XFrameOptionsHeaderName]);
}
[Fact]
public async Task Invoke_XFrameOptionsHeaderName_HeaderIsNotPresentInDefault()
{
// arrange
var headerNotPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerNotPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.False(headerNotPresentConfig.UseXFrameOptions);
Assert.False(_context.Response.Headers.ContainsKey(Constants.XFrameOptionsHeaderName));
}
[Fact]
public async Task Invoke_XssProtectionHeaderName_HeaderIsPresent()
{
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().UseXSSProtection().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.True(headerPresentConfig.UseXssProtection);
Assert.True(_context.Response.Headers.ContainsKey(Constants.XssProtectionHeaderName));
Assert.Equal("1;mode=block", _context.Response.Headers[Constants.XssProtectionHeaderName]);
}
[Fact]
public async Task Invoke_XssProtectionHeaderName_HeaderIsNotPresent()
{
// arrange
var headerNotPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerNotPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.False(headerNotPresentConfig.UseXssProtection);
Assert.False(_context.Response.Headers.ContainsKey(Constants.XssProtectionHeaderName));
}
[Fact]
public async Task Invoke_XContentTypeOptionsHeaderName_HeaderIsPresent()
{
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().UseContentTypeOptions().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.True(headerPresentConfig.UseXContentTypeOptions);
Assert.True(_context.Response.Headers.ContainsKey(Constants.XContentTypeOptionsHeaderName));
Assert.Equal("nosniff", _context.Response.Headers[Constants.XContentTypeOptionsHeaderName]);
}
[Fact]
public async Task Invoke_XContentTypeOptionsHeaderName_HeaderIsNotPresent()
{
// arrange
var headerNotPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerNotPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.False(headerNotPresentConfig.UseXContentTypeOptions);
Assert.False(_context.Response.Headers.ContainsKey(Constants.XContentTypeOptionsHeaderName));
}
[Fact]
public async Task Invoke_ContentSecurityPolicyHeaderName_HeaderIsPresent()
{
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().UseContentDefaultSecurityPolicy().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
if (headerPresentConfig.UseContentSecurityPolicy)
{
Assert.True(_context.Response.Headers.ContainsKey(Constants.ContentSecurityPolicyHeaderName));
Assert.Equal("script-src 'self';object-src 'self';block-all-mixed-content;upgrade-insecure-requests;",
_context.Response.Headers[Constants.ContentSecurityPolicyHeaderName]);
}
else
{
Assert.False(_context.Response.Headers.ContainsKey(Constants.ContentSecurityPolicyHeaderName));
}
}
[Fact]
public async Task invoke_NullConfig_ExceptionThrown()
{
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, null);
var exception = await Record.ExceptionAsync(() => secureHeadersMiddleware.Invoke(_context));
Assert.NotNull(exception);
Assert.IsAssignableFrom<ArgumentException>(exception);
var argEx = exception as ArgumentException;
Assert.NotNull(argEx);
Assert.Contains(nameof(SecureHeadersMiddlewareConfiguration), exception.Message);
}
[Fact]
public async Task Invoke_ContentSecurityPolicyHeaderName_HeaderIsPresent_WithMultipleCspSandboxTypes()
{
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().UseContentSecurityPolicy().Build();
headerPresentConfig.SetCspSandBox(CspSandboxType.allowForms, CspSandboxType.allowScripts, CspSandboxType.allowSameOrigin);
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.True(_context.Response.Headers.ContainsKey(Constants.ContentSecurityPolicyHeaderName));
Assert.Equal("sandbox allow-forms allow-scripts allow-same-origin;block-all-mixed-content;upgrade-insecure-requests;",
_context.Response.Headers[Constants.ContentSecurityPolicyHeaderName]);
}
[Fact]
public async Task Invoke_ContentSecurityPolicyHeaderName_HeaderIsNotPresent()
{
// arrange
var headerNotPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerNotPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.False(headerNotPresentConfig.UseContentSecurityPolicy);
Assert.False(_context.Response.Headers.ContainsKey(Constants.ContentSecurityPolicyHeaderName));
}
[Fact]
public async Task Invoke_ContentSecurityPolicyReportOnly_HeaderIsPresent_WithMultipleCspSandboxTypes()
{
const string reportUri = "https://localhost:5001/report-uri";
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().UseContentSecurityPolicyReportOnly(reportUri).Build();
headerPresentConfig.SetCspSandBox(CspSandboxType.allowForms, CspSandboxType.allowScripts, CspSandboxType.allowSameOrigin);
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.True(_context.Response.Headers.ContainsKey(Constants.ContentSecurityPolicyReportOnlyHeaderName));
Assert.Equal($"block-all-mixed-content;upgrade-insecure-requests;report-uri {reportUri};",
_context.Response.Headers[Constants.ContentSecurityPolicyReportOnlyHeaderName]);
}
[Fact]
public async Task Invoke_ContentSecurityPolicyReportOnly_HeaderIsNotPresent()
{
// arrange
var headerNotPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerNotPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.False(headerNotPresentConfig.UseContentSecurityPolicyReportOnly);
Assert.False(_context.Response.Headers.ContainsKey(Constants.ContentSecurityPolicyReportOnlyHeaderName));
}
[Fact]
public async Task Invoke_XContentSecurityPolicyHeaderName_HeaderIsPresent()
{
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().UseContentSecurityPolicy(useXContentSecurityPolicy: true).Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.True(headerPresentConfig.UseXContentSecurityPolicy);
Assert.True(_context.Response.Headers.ContainsKey(Constants.XContentSecurityPolicyHeaderName));
Assert.Equal("block-all-mixed-content;upgrade-insecure-requests;",
_context.Response.Headers[Constants.XContentSecurityPolicyHeaderName]);
}
[Fact]
public async Task Invoke_XContentSecurityPolicyHeaderName_HeaderIsNotPresent()
{
// arrange
var headerNotPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerNotPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.False(headerNotPresentConfig.UseXContentSecurityPolicy);
Assert.False(_context.Response.Headers.ContainsKey(Constants.XContentSecurityPolicyHeaderName));
}
[Fact]
public async Task Invoke_PermittedCrossDomainPoliciesHeaderName_HeaderIsPresent()
{
// arrange
var headerPresentConfig =
SecureHeadersMiddlewareBuilder.CreateBuilder().UsePermittedCrossDomainPolicies().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.True(headerPresentConfig.UsePermittedCrossDomainPolicy);
Assert.True(_context.Response.Headers.ContainsKey(Constants.PermittedCrossDomainPoliciesHeaderName));
Assert.Equal("none;",
_context.Response.Headers[Constants.PermittedCrossDomainPoliciesHeaderName]);
}
[Fact]
public async Task Invoke_PermittedCrossDomainPoliciesHeaderName_HeaderIsNotPresent()
{
// arrange
var headerNotPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerNotPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.False(headerNotPresentConfig.UsePermittedCrossDomainPolicy);
Assert.False(_context.Response.Headers.ContainsKey(Constants.PermittedCrossDomainPoliciesHeaderName));
}
[Fact]
public async Task Invoke_ReferrerPolicyHeaderName_HeaderIsPresent()
{
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().UseReferrerPolicy().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.True(headerPresentConfig.UseReferrerPolicy);
Assert.True(_context.Response.Headers.ContainsKey(Constants.ReferrerPolicyHeaderName));
Assert.Equal("no-referrer", _context.Response.Headers[Constants.ReferrerPolicyHeaderName]);
}
[Fact]
public async Task Invoke_ReferrerPolicyHeaderName_HeaderIsNotPresent()
{
// arrange
var headerNotPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerNotPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.False(headerNotPresentConfig.UseReferrerPolicy);
Assert.False(_context.Response.Headers.ContainsKey(Constants.ReferrerPolicyHeaderName));
}
[Fact]
public async Task Invoke_ExpectCtHeaderName_HeaderIsPresent()
{
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder()
.UseExpectCt("https://test.com/report").Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.True(headerPresentConfig.UseExpectCt);
Assert.True(_context.Response.Headers.ContainsKey(Constants.ExpectCtHeaderName));
Assert.Equal(headerPresentConfig.ExpectCt.BuildHeaderValue(),
_context.Response.Headers[Constants.ExpectCtHeaderName]);
}
[Fact]
public async Task Invoke_ExpectCtHeaderName_HeaderIsPresent_ReportUri_Optional()
{
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder()
.UseExpectCt(string.Empty).Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.True(headerPresentConfig.UseExpectCt);
Assert.True(_context.Response.Headers.ContainsKey(Constants.ExpectCtHeaderName));
Assert.Equal(headerPresentConfig.ExpectCt.BuildHeaderValue(),
_context.Response.Headers[Constants.ExpectCtHeaderName]);
}
[Fact]
public async Task Invoke_ExpectCtHeaderName_HeaderIsNotPresent()
{
// arrange
var headerNotPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerNotPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.False(headerNotPresentConfig.UseExpectCt);
Assert.False(_context.Response.Headers.ContainsKey(Constants.ExpectCtHeaderName));
}
[Fact]
public async Task Invoke_XPoweredByHeader_RemoveHeader()
{
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().RemovePoweredByHeader().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.True(headerPresentConfig.RemoveXPoweredByHeader);
Assert.False(_context.Response.Headers.ContainsKey(Constants.PoweredByHeaderName));
Assert.False(_context.Response.Headers.ContainsKey(Constants.ServerHeaderName));
}
[Fact]
public async Task Invoke_XPoweredByHeader_DoNotRemoveHeader()
{
// arrange
var headerPresentConfig = SecureHeadersMiddlewareBuilder.CreateBuilder().Build();
var secureHeadersMiddleware = new SecureHeadersMiddleware(_onNext, headerPresentConfig);
// act
await secureHeadersMiddleware.Invoke(_context);
// assert
Assert.False(headerPresentConfig.RemoveXPoweredByHeader);
// Am currently running the 2.1.300 Preview 1 build of the SDK
// and the server doesn't seem to add this header.
// Therefore this assert is commented out, as it will always fail
//Assert.True(_context.Response.Headers.ContainsKey(Constants.PoweredByHeaderName));
}
}
}
| |
using System;
using ChainUtils.BouncyCastle.Utilities;
namespace ChainUtils.BouncyCastle.Crypto.Digests
{
/**
* Implementation of WhirlpoolDigest, based on Java source published by Barreto
* and Rijmen.
*
*/
public sealed class WhirlpoolDigest
: IDigest, IMemoable
{
private const int BYTE_LENGTH = 64;
private const int DIGEST_LENGTH_BYTES = 512 / 8;
private const int ROUNDS = 10;
private const int REDUCTION_POLYNOMIAL = 0x011d; // 2^8 + 2^4 + 2^3 + 2 + 1;
private static readonly int[] SBOX =
{
0x18, 0x23, 0xc6, 0xe8, 0x87, 0xb8, 0x01, 0x4f, 0x36, 0xa6, 0xd2, 0xf5, 0x79, 0x6f, 0x91, 0x52,
0x60, 0xbc, 0x9b, 0x8e, 0xa3, 0x0c, 0x7b, 0x35, 0x1d, 0xe0, 0xd7, 0xc2, 0x2e, 0x4b, 0xfe, 0x57,
0x15, 0x77, 0x37, 0xe5, 0x9f, 0xf0, 0x4a, 0xda, 0x58, 0xc9, 0x29, 0x0a, 0xb1, 0xa0, 0x6b, 0x85,
0xbd, 0x5d, 0x10, 0xf4, 0xcb, 0x3e, 0x05, 0x67, 0xe4, 0x27, 0x41, 0x8b, 0xa7, 0x7d, 0x95, 0xd8,
0xfb, 0xee, 0x7c, 0x66, 0xdd, 0x17, 0x47, 0x9e, 0xca, 0x2d, 0xbf, 0x07, 0xad, 0x5a, 0x83, 0x33,
0x63, 0x02, 0xaa, 0x71, 0xc8, 0x19, 0x49, 0xd9, 0xf2, 0xe3, 0x5b, 0x88, 0x9a, 0x26, 0x32, 0xb0,
0xe9, 0x0f, 0xd5, 0x80, 0xbe, 0xcd, 0x34, 0x48, 0xff, 0x7a, 0x90, 0x5f, 0x20, 0x68, 0x1a, 0xae,
0xb4, 0x54, 0x93, 0x22, 0x64, 0xf1, 0x73, 0x12, 0x40, 0x08, 0xc3, 0xec, 0xdb, 0xa1, 0x8d, 0x3d,
0x97, 0x00, 0xcf, 0x2b, 0x76, 0x82, 0xd6, 0x1b, 0xb5, 0xaf, 0x6a, 0x50, 0x45, 0xf3, 0x30, 0xef,
0x3f, 0x55, 0xa2, 0xea, 0x65, 0xba, 0x2f, 0xc0, 0xde, 0x1c, 0xfd, 0x4d, 0x92, 0x75, 0x06, 0x8a,
0xb2, 0xe6, 0x0e, 0x1f, 0x62, 0xd4, 0xa8, 0x96, 0xf9, 0xc5, 0x25, 0x59, 0x84, 0x72, 0x39, 0x4c,
0x5e, 0x78, 0x38, 0x8c, 0xd1, 0xa5, 0xe2, 0x61, 0xb3, 0x21, 0x9c, 0x1e, 0x43, 0xc7, 0xfc, 0x04,
0x51, 0x99, 0x6d, 0x0d, 0xfa, 0xdf, 0x7e, 0x24, 0x3b, 0xab, 0xce, 0x11, 0x8f, 0x4e, 0xb7, 0xeb,
0x3c, 0x81, 0x94, 0xf7, 0xb9, 0x13, 0x2c, 0xd3, 0xe7, 0x6e, 0xc4, 0x03, 0x56, 0x44, 0x7f, 0xa9,
0x2a, 0xbb, 0xc1, 0x53, 0xdc, 0x0b, 0x9d, 0x6c, 0x31, 0x74, 0xf6, 0x46, 0xac, 0x89, 0x14, 0xe1,
0x16, 0x3a, 0x69, 0x09, 0x70, 0xb6, 0xd0, 0xed, 0xcc, 0x42, 0x98, 0xa4, 0x28, 0x5c, 0xf8, 0x86
};
private static readonly long[] C0 = new long[256];
private static readonly long[] C1 = new long[256];
private static readonly long[] C2 = new long[256];
private static readonly long[] C3 = new long[256];
private static readonly long[] C4 = new long[256];
private static readonly long[] C5 = new long[256];
private static readonly long[] C6 = new long[256];
private static readonly long[] C7 = new long[256];
private readonly long[] _rc = new long[ROUNDS + 1];
/*
* increment() can be implemented in this way using 2 arrays or
* by having some temporary variables that are used to set the
* value provided by EIGHT[i] and carry within the loop.
*
* not having done any timing, this seems likely to be faster
* at the slight expense of 32*(sizeof short) bytes
*/
private static readonly short[] EIGHT = new short[BITCOUNT_ARRAY_SIZE];
static WhirlpoolDigest()
{
EIGHT[BITCOUNT_ARRAY_SIZE - 1] = 8;
for (var i = 0; i < 256; i++)
{
var v1 = SBOX[i];
var v2 = maskWithReductionPolynomial(v1 << 1);
var v4 = maskWithReductionPolynomial(v2 << 1);
var v5 = v4 ^ v1;
var v8 = maskWithReductionPolynomial(v4 << 1);
var v9 = v8 ^ v1;
C0[i] = packIntoLong(v1, v1, v4, v1, v8, v5, v2, v9);
C1[i] = packIntoLong(v9, v1, v1, v4, v1, v8, v5, v2);
C2[i] = packIntoLong(v2, v9, v1, v1, v4, v1, v8, v5);
C3[i] = packIntoLong(v5, v2, v9, v1, v1, v4, v1, v8);
C4[i] = packIntoLong(v8, v5, v2, v9, v1, v1, v4, v1);
C5[i] = packIntoLong(v1, v8, v5, v2, v9, v1, v1, v4);
C6[i] = packIntoLong(v4, v1, v8, v5, v2, v9, v1, v1);
C7[i] = packIntoLong(v1, v4, v1, v8, v5, v2, v9, v1);
}
}
public WhirlpoolDigest()
{
_rc[0] = 0L;
for (var r = 1; r <= ROUNDS; r++)
{
var i = 8 * (r - 1);
_rc[r] = (long)((ulong)C0[i] & 0xff00000000000000L) ^
(C1[i + 1] & (long) 0x00ff000000000000L) ^
(C2[i + 2] & (long) 0x0000ff0000000000L) ^
(C3[i + 3] & (long) 0x000000ff00000000L) ^
(C4[i + 4] & (long) 0x00000000ff000000L) ^
(C5[i + 5] & (long) 0x0000000000ff0000L) ^
(C6[i + 6] & (long) 0x000000000000ff00L) ^
(C7[i + 7] & (long) 0x00000000000000ffL);
}
}
private static long packIntoLong(int b7, int b6, int b5, int b4, int b3, int b2, int b1, int b0)
{
return
((long)b7 << 56) ^
((long)b6 << 48) ^
((long)b5 << 40) ^
((long)b4 << 32) ^
((long)b3 << 24) ^
((long)b2 << 16) ^
((long)b1 << 8) ^
b0;
}
/*
* int's are used to prevent sign extension. The values that are really being used are
* actually just 0..255
*/
private static int maskWithReductionPolynomial(int input)
{
var rv = input;
if (rv >= 0x100L) // high bit set
{
rv ^= REDUCTION_POLYNOMIAL; // reduced by the polynomial
}
return rv;
}
// --------------------------------------------------------------------------------------//
// -- buffer information --
private const int BITCOUNT_ARRAY_SIZE = 32;
private byte[] _buffer = new byte[64];
private int _bufferPos;
private short[] _bitCount = new short[BITCOUNT_ARRAY_SIZE];
// -- internal hash state --
private long[] _hash = new long[8];
private long[] _K = new long[8]; // the round key
private long[] _L = new long[8];
private long[] _block = new long[8]; // mu (buffer)
private long[] _state = new long[8]; // the current "cipher" state
/**
* Copy constructor. This will copy the state of the provided message
* digest.
*/
public WhirlpoolDigest(WhirlpoolDigest originalDigest)
{
Reset(originalDigest);
}
public string AlgorithmName
{
get { return "Whirlpool"; }
}
public int GetDigestSize()
{
return DIGEST_LENGTH_BYTES;
}
public int DoFinal(byte[] output, int outOff)
{
// sets output[outOff] .. output[outOff+DIGEST_LENGTH_BYTES]
finish();
for (var i = 0; i < 8; i++)
{
convertLongToByteArray(_hash[i], output, outOff + (i * 8));
}
Reset();
return GetDigestSize();
}
/**
* Reset the chaining variables
*/
public void Reset()
{
// set variables to null, blank, whatever
_bufferPos = 0;
Array.Clear(_bitCount, 0, _bitCount.Length);
Array.Clear(_buffer, 0, _buffer.Length);
Array.Clear(_hash, 0, _hash.Length);
Array.Clear(_K, 0, _K.Length);
Array.Clear(_L, 0, _L.Length);
Array.Clear(_block, 0, _block.Length);
Array.Clear(_state, 0, _state.Length);
}
// this takes a buffer of information and fills the block
private void processFilledBuffer()
{
// copies into the block...
for (var i = 0; i < _state.Length; i++)
{
_block[i] = bytesToLongFromBuffer(_buffer, i * 8);
}
processBlock();
_bufferPos = 0;
Array.Clear(_buffer, 0, _buffer.Length);
}
private static long bytesToLongFromBuffer(byte[] buffer, int startPos)
{
var rv = (((buffer[startPos + 0] & 0xffL) << 56) |
((buffer[startPos + 1] & 0xffL) << 48) |
((buffer[startPos + 2] & 0xffL) << 40) |
((buffer[startPos + 3] & 0xffL) << 32) |
((buffer[startPos + 4] & 0xffL) << 24) |
((buffer[startPos + 5] & 0xffL) << 16) |
((buffer[startPos + 6] & 0xffL) << 8) |
((buffer[startPos + 7]) & 0xffL));
return rv;
}
private static void convertLongToByteArray(long inputLong, byte[] outputArray, int offSet)
{
for (var i = 0; i < 8; i++)
{
outputArray[offSet + i] = (byte)((inputLong >> (56 - (i * 8))) & 0xff);
}
}
private void processBlock()
{
// buffer contents have been transferred to the _block[] array via
// processFilledBuffer
// compute and apply K^0
for (var i = 0; i < 8; i++)
{
_state[i] = _block[i] ^ (_K[i] = _hash[i]);
}
// iterate over the rounds
for (var round = 1; round <= ROUNDS; round++)
{
for (var i = 0; i < 8; i++)
{
_L[i] = 0;
_L[i] ^= C0[(int)(_K[(i - 0) & 7] >> 56) & 0xff];
_L[i] ^= C1[(int)(_K[(i - 1) & 7] >> 48) & 0xff];
_L[i] ^= C2[(int)(_K[(i - 2) & 7] >> 40) & 0xff];
_L[i] ^= C3[(int)(_K[(i - 3) & 7] >> 32) & 0xff];
_L[i] ^= C4[(int)(_K[(i - 4) & 7] >> 24) & 0xff];
_L[i] ^= C5[(int)(_K[(i - 5) & 7] >> 16) & 0xff];
_L[i] ^= C6[(int)(_K[(i - 6) & 7] >> 8) & 0xff];
_L[i] ^= C7[(int)(_K[(i - 7) & 7]) & 0xff];
}
Array.Copy(_L, 0, _K, 0, _K.Length);
_K[0] ^= _rc[round];
// apply the round transformation
for (var i = 0; i < 8; i++)
{
_L[i] = _K[i];
_L[i] ^= C0[(int)(_state[(i - 0) & 7] >> 56) & 0xff];
_L[i] ^= C1[(int)(_state[(i - 1) & 7] >> 48) & 0xff];
_L[i] ^= C2[(int)(_state[(i - 2) & 7] >> 40) & 0xff];
_L[i] ^= C3[(int)(_state[(i - 3) & 7] >> 32) & 0xff];
_L[i] ^= C4[(int)(_state[(i - 4) & 7] >> 24) & 0xff];
_L[i] ^= C5[(int)(_state[(i - 5) & 7] >> 16) & 0xff];
_L[i] ^= C6[(int)(_state[(i - 6) & 7] >> 8) & 0xff];
_L[i] ^= C7[(int)(_state[(i - 7) & 7]) & 0xff];
}
// save the current state
Array.Copy(_L, 0, _state, 0, _state.Length);
}
// apply Miuaguchi-Preneel compression
for (var i = 0; i < 8; i++)
{
_hash[i] ^= _state[i] ^ _block[i];
}
}
public void Update(byte input)
{
_buffer[_bufferPos] = input;
//Console.WriteLine("adding to buffer = "+_buffer[_bufferPos]);
++_bufferPos;
if (_bufferPos == _buffer.Length)
{
processFilledBuffer();
}
increment();
}
private void increment()
{
var carry = 0;
for (var i = _bitCount.Length - 1; i >= 0; i--)
{
var sum = (_bitCount[i] & 0xff) + EIGHT[i] + carry;
carry = sum >> 8;
_bitCount[i] = (short)(sum & 0xff);
}
}
public void BlockUpdate(byte[] input, int inOff, int length)
{
while (length > 0)
{
Update(input[inOff]);
++inOff;
--length;
}
}
private void finish()
{
/*
* this makes a copy of the current bit length. at the expense of an
* object creation of 32 bytes rather than providing a _stopCounting
* boolean which was the alternative I could think of.
*/
var bitLength = copyBitLength();
_buffer[_bufferPos++] |= 0x80;
if (_bufferPos == _buffer.Length)
{
processFilledBuffer();
}
/*
* Final block contains
* [ ... data .... ][0][0][0][ length ]
*
* if [ length ] cannot fit. Need to create a new block.
*/
if (_bufferPos > 32)
{
while (_bufferPos != 0)
{
Update((byte)0);
}
}
while (_bufferPos <= 32)
{
Update((byte)0);
}
// copy the length information to the final 32 bytes of the
// 64 byte block....
Array.Copy(bitLength, 0, _buffer, 32, bitLength.Length);
processFilledBuffer();
}
private byte[] copyBitLength()
{
var rv = new byte[BITCOUNT_ARRAY_SIZE];
for (var i = 0; i < rv.Length; i++)
{
rv[i] = (byte)(_bitCount[i] & 0xff);
}
return rv;
}
public int GetByteLength()
{
return BYTE_LENGTH;
}
public IMemoable Copy()
{
return new WhirlpoolDigest(this);
}
public void Reset(IMemoable other)
{
var originalDigest = (WhirlpoolDigest)other;
Array.Copy(originalDigest._rc, 0, _rc, 0, _rc.Length);
Array.Copy(originalDigest._buffer, 0, _buffer, 0, _buffer.Length);
_bufferPos = originalDigest._bufferPos;
Array.Copy(originalDigest._bitCount, 0, _bitCount, 0, _bitCount.Length);
// -- internal hash state --
Array.Copy(originalDigest._hash, 0, _hash, 0, _hash.Length);
Array.Copy(originalDigest._K, 0, _K, 0, _K.Length);
Array.Copy(originalDigest._L, 0, _L, 0, _L.Length);
Array.Copy(originalDigest._block, 0, _block, 0, _block.Length);
Array.Copy(originalDigest._state, 0, _state, 0, _state.Length);
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using Microsoft.Msagl.Core;
using Microsoft.Msagl.Core.DataStructures;
using Microsoft.Msagl.Core.GraphAlgorithms;
namespace Microsoft.Msagl.Layout.Layered {
/// <summary>
/// The algorithm follows "A technique for Drawing Directed Graphs", Gansner, Koutsofios, North, Vo.
/// Consider re-implement this algorithm following Chvatal. The algorithm works for a connected graph.
/// </summary>
internal class NetworkSimplex : AlgorithmBase, LayerCalculator {
static BasicGraph<IntEdge> CreateGraphWithIEEdges(BasicGraph<IntEdge> bg) {
List<IntEdge> ieEdges = new List<IntEdge>();
foreach (IntEdge e in bg.Edges)
ieEdges.Add(new NetworkEdge(e));
return new BasicGraph<IntEdge>(ieEdges, bg.NodeCount);
}
int[] layers;
internal NetworkSimplex(BasicGraph<IntEdge> graph, CancelToken cancelToken)
{
this.graph = CreateGraphWithIEEdges(graph);
inTree = new bool[graph.NodeCount];
NetworkCancelToken = cancelToken;
}
public int[] GetLayers() {
if (layers == null)
Run(NetworkCancelToken);
return layers;
}
private void ShiftLayerToZero() {
int minLayer = NetworkEdge.Infinity;
foreach (int i in layers)
if (i < minLayer)
minLayer = i;
for (int i = 0; i < graph.NodeCount; i++)
layers[i] -= minLayer;
}
/// <summary>
/// The function FeasibleTree constructs an initial feasible spanning tree.
/// </summary>
void FeasibleTree() {
InitLayer();
while (TightTree() < this.graph.NodeCount) {
IntEdge e = GetNonTreeEdgeIncidentToTheTreeWithMinimalAmountOfSlack();
if (e == null)
break; //all edges are tree edges
int slack = Slack(e);
if (slack == 0)
throw new InvalidOperationException();//"the tree should be tight");
if (inTree[e.Source])
slack = -slack;
//shift the tree rigidly up or down and make e tight ; since the slack is the minimum of slacks
//the layering will still remain feasible
foreach (int i in treeVertices)
layers[i] += slack;
}
InitCutValues();
}
/// <summary>
/// treeEdge, belonging to the tree, divides the vertices to source and target components
/// If v belongs to the source component we return 1 oterwise we return 0
/// </summary>
/// <param name="v">a v</param>
/// <param name="treeEdge">a vertex</param>
/// <returns>an edge from the tree</returns>
int VertexSourceTargetVal(int v, NetworkEdge treeEdge) {
#if DEBUGNW
if (treeEdge.inTree == false)
throw new Exception("wrong params for VertexSourceTargetVal");
#endif
int s = treeEdge.Source;
int t = treeEdge.Target;
if (lim[s] > lim[t])//s belongs to the tree root component
if (lim[v] <= lim[t] && low[t] <= lim[v])
return 0;
else
return 1;
else //t belongs to the tree root component
if (lim[v] <= lim[s] && low[s] <= lim[v])
return 1;
else
return 0;
}
/// <summary>
/// a convenient wrapper of IncEdges
/// </summary>
/// <param name="v"></param>
/// <returns>edges incident to v</returns>
IncEdges IncidentEdges(int v) {
return new IncEdges(v, this);
}
bool AllLowCutsHaveBeenDone(int v) {
foreach (NetworkEdge ie in IncidentEdges(v))
if (ie.inTree && ie.Cut == NetworkEdge.Infinity && ie != parent[v])
return false;
return true;
}
/// <summary>
/// treeEdge, belonging to the tree, divides the vertices to source and target components
/// e does not belong to the tree . If e goes from the source component to target component
/// then the return value is 1,
/// if e goes from the target component ot the source then the return value is -1
/// otherwise it is zero
/// </summary>
/// <param name="e">a non-tree edge</param>
/// <param name="treeEdge">a tree edge</param>
/// <returns></returns>
int EdgeSourceTargetVal(NetworkEdge e, NetworkEdge treeEdge) {
// if (e.inTree || treeEdge.inTree == false)
// throw new Exception("wrong params for EdgeSOurceTargetVal");
return VertexSourceTargetVal(e.Source, treeEdge) - VertexSourceTargetVal(e.Target, treeEdge);
}
/// <summary>
/// The init_cutvalues function computes the cut values of the tree edges.
/// For each tree edge, this is computed by marking the nodes as belonging to the source or
/// target component, and then performing the sum of the signed weights of all
/// edges whose source and target are in different components, the sign being negative for those edges
/// going from the source to the target component.
/// To reduce this cost, we note that the cut values can be computed using information local to an edge
/// if the search is ordered from the leaves of the feasible tree inward. It is trivial to compute the
/// cut value of a tree edge with one of its endpoints a leaf in the tree,
/// since either the source or the target component consists of a single node.
/// Now, assuming the cut values are known for all the edges incident on a given
/// node except one, the cut value of the remaining edge is the sum of the known cut
/// values plus a term dependent only on the edges incident to the given node.
/// </summary>
void InitCutValues() {
InitLimLowAndParent();
//going up from the leaves following parents
Stack<int> front = new Stack<int>();
foreach (int i in leaves)
front.Push(i);
Stack<int> newFront = new Stack<int>();
while (front.Count > 0) {
while (front.Count > 0) {
int w = front.Pop();
NetworkEdge cutEdge = parent[w]; //have to find the cut of e
if (cutEdge == null)
continue;
int cut = 0;
foreach (NetworkEdge e in IncidentEdges(w)) {
if (e.inTree == false) {
int e0Val = EdgeSourceTargetVal(e, cutEdge);
if (e0Val != 0)
cut += e0Val * e.Weight;
} else //e0 is a tree edge
{
if (e == cutEdge)
cut += e.Weight;
else {
int impact = cutEdge.Source == e.Target || cutEdge.Target == e.Source ? 1 : -1;
int edgeContribution = EdgeContribution(e, w);
cut += edgeContribution * impact;
}
}
}
cutEdge.Cut = cut;
int v = cutEdge.Source == w ? cutEdge.Target : cutEdge.Source;
if (AllLowCutsHaveBeenDone(v))
newFront.Push(v);
}
//swap newFrontAndFront
Stack<int> t = front;
front = newFront;
newFront = t;
}
}
/// <summary>
/// e is a tree edge for which the cut has been calculted already.
/// EdgeContribution gives an amount that edge e brings to the cut of parent[w].
/// The contribution is the cut value minus the weight of e. Let S be the component of e source.
/// We should also substruct W(ie) for every ie going from S to w and add W(ie) going from w to S.
/// These numbers appear in e.Cut but with opposite signs.
/// </summary>
/// <param name="e">tree edge</param>
/// <param name="w">parent[w] is in the process of the cut calculation</param>
/// <returns></returns>
int EdgeContribution(NetworkEdge e, int w) {
int ret = e.Cut - e.Weight;
foreach (NetworkEdge ie in IncidentEdges(w)) {
if (ie.inTree == false) {
int sign = EdgeSourceTargetVal(ie, e);
if (sign == -1)
ret += ie.Weight;
else if (sign == 1)
ret -= ie.Weight;
}
}
return ret;
}
int[] lim;
int[] low;
NetworkEdge[] parent;
internal struct StackStruct {
internal int v;
internal IEnumerator outEnum;
internal IEnumerator inEnum;
internal StackStruct(int v,
IEnumerator outEnum,
IEnumerator inEnum) {
this.v = v;
this.outEnum = outEnum;
this.inEnum = inEnum;
}
}
List<int> leaves = new List<int>();
/// <summary>
/// A quote:
/// Another valuable optimization, similar to a technique described in [Ch],
/// is to perform a postorder traversal of the tree, starting from some fixed
/// root node vroot, and labeling each node v with its postorder
/// traversal number lim(v), the least number low(v) of any descendant in the search,
/// and the edge parent(v) by which the node was reached (see figure 2-5).
/// This provides an inexpensive way to test whether a node lies in the
/// source or target component of a tree edge, and thus whether a non-tree edge
/// crosses between the two components. For example, if e = (u,v) is a
/// tree edge and vroot is in the source component of the edge (i.e., lim(u) less lim(v)),
/// then a node w is in the target component of e if and only if low(u) is less or equal than lim(w)
/// is less or equal than lim(u). These numbers can also be used to update the tree efficiently
/// during the network simplex iterations. If f = (w,x) is the entering edge, the
/// only edges whose cut values must be adjusted are those in the path
/// connecting w and x in the tree. This path is determined by following
/// the parent edges back from w and x until the least common ancestor is reached,
/// i.e., the first node l such that low(l) is less or equal lim(w) than ,
/// lim(x) is less or equal than lim(l).
/// Of course, these postorder parameters must also be adjusted when
/// exchanging tree edges, but only for nodes below l.
/// </summary>
void InitLimLowAndParent() {
lim = new int[graph.NodeCount];
low = new int[graph.NodeCount];
parent = new NetworkEdge[graph.NodeCount];
int curLim = 1;
int v = 0;
InitLowLimParentAndLeavesOnSubtree(ref curLim, ref v);
}
/// <summary>
/// initializes lim and low in the subtree
/// </summary>
/// <param name="curLim">the root of the subtree</param>
/// <param name="v">the low[v]</param>
private void InitLowLimParentAndLeavesOnSubtree(ref int curLim, ref int v) {
Stack<StackStruct> stack = new Stack<StackStruct>();
IEnumerator outEnum = this.graph.OutEdges(v).GetEnumerator();
IEnumerator inEnum = this.graph.InEdges(v).GetEnumerator();
stack.Push(new StackStruct(v, outEnum, inEnum));//vroot is 0 here
low[v] = curLim;
while (stack.Count > 0) {
StackStruct ss = stack.Pop();
v = ss.v;
outEnum = ss.outEnum;
inEnum = ss.inEnum;
//for sure we will have a descendant with the lowest number curLim since curLim may only grow
//from the current value
ProgressStep();
bool done;
do {
done = true;
while (outEnum.MoveNext()) {
NetworkEdge e = outEnum.Current as NetworkEdge;
if (!e.inTree || low[e.Target] > 0)
continue;
stack.Push(new StackStruct(v, outEnum, inEnum));
v = e.Target;
parent[v] = e;
low[v] = curLim;
outEnum = this.graph.OutEdges(v).GetEnumerator();
inEnum = this.graph.InEdges(v).GetEnumerator();
}
while (inEnum.MoveNext()) {
NetworkEdge e = inEnum.Current as NetworkEdge;
if (!e.inTree || low[e.Source] > 0) {
continue;
}
stack.Push(new StackStruct(v, outEnum, inEnum));
v = e.Source;
low[v] = curLim;
parent[v] = e;
outEnum = this.graph.OutEdges(v).GetEnumerator();
inEnum = this.graph.InEdges(v).GetEnumerator();
done = false;
break;
}
} while (!done);
//finally done with v
lim[v] = curLim++;
if (lim[v] == low[v])
leaves.Add(v);
}
}
/// <summary>
/// here we update values lim and low for the subtree with the root l
/// </summary>
/// <param name="l"></param>
void UpdateLimLowLeavesAndParentsUnderNode(int l) {
//first we zero all low values in the subtree since they are an indication when positive that
//the node has been processed
//We are updating leaves also
int llow = low[l];
int llim = lim[l];
leaves.Clear();
for (int i = 0; i < this.graph.NodeCount; i++) {
if (llow <= lim[i] && lim[i] <= llim)
low[i] = 0;
else if (low[i] == lim[i])
leaves.Add(i);
}
int v = l;
InitLowLimParentAndLeavesOnSubtree(ref llow, ref v);
}
int Slack(IntEdge e) {
int ret = layers[e.Source] - layers[e.Target] - e.Separation;
#if DEBUGNW
if (ret < 0)
throw new Exception("separation is not satisfied");
#endif
return ret;
}
/// <summary>
/// one of the returned edge vertices does not belong to the tree but another does
/// </summary>
/// <returns></returns>
NetworkEdge GetNonTreeEdgeIncidentToTheTreeWithMinimalAmountOfSlack() {
IntEdge eret = null;
int minSlack = NetworkEdge.Infinity;
foreach (int v in this.treeVertices) {
foreach (NetworkEdge e in this.graph.OutEdges(v)) {
if (inTree[e.Source] && inTree[e.Target])
continue;
int slack = Slack(e);
if (slack < minSlack) {
eret = e;
minSlack = slack;
if (slack == 1)
return e;
}
}
foreach (NetworkEdge e in this.graph.InEdges(v)) {
if (inTree[e.Source] && inTree[e.Target])
continue;
int slack = Slack(e);
if (slack < minSlack) {
eret = e;
minSlack = slack;
if (slack == 1)
return e;
}
}
}
return eret as NetworkEdge;
}
List<int> treeVertices = new List<int>();
bool[] inTree;
/// <summary>
/// The function TightTree finds a maximal tree of tight edges containing
/// some fixed node and returns the number of nodes in the tree.
/// Note that such a maximal tree is just a spanning tree for the subgraph
/// induced by all nodes reachable from the fixed node in the underlying
/// undirected graph using only tight edges. In particular, all such trees have the same number of nodes.
/// The function also builds the tree.
/// </summary>
/// <returns>number of verices in a tight tree</returns>
int TightTree() {
treeVertices.Clear();
foreach (NetworkEdge ie in this.graph.Edges)
ie.inTree = false;
for (int i = 1; i < inTree.Length; i++)
inTree[i] = false;
//the vertex 0 is a fixed node
inTree[0] = true;
treeVertices.Add(0);
Stack<int> q = new Stack<int>();
q.Push(0);
while (q.Count > 0) {
int v = q.Pop();
foreach (NetworkEdge e in graph.OutEdges(v)) {
if (inTree[e.Target])
continue;
if (layers[e.Source] - layers[e.Target] == e.Separation) {
q.Push(e.Target);
inTree[e.Target] = true;
treeVertices.Add(e.Target);
e.inTree = true;
}
}
foreach (NetworkEdge e in graph.InEdges(v)) {
if (inTree[e.Source])
continue;
if (layers[e.Source] - layers[e.Target] == e.Separation) {
q.Push(e.Source);
inTree[e.Source] = true;
treeVertices.Add(e.Source);
e.inTree = true;
}
}
}
return treeVertices.Count;
}
Random random = new Random(1);
///// <summary>
///// LeaveEnterEdge finds a non-tree edge to replace e.
///// This is done by breaking the edge e, which divides
///// the tree into the source and the target componentx.
///// All edges going from the source component to the
///// target are considered, with an edge of minimum
///// slack being chosen. This is necessary to maintain feasibility.
///// </summary>
///// <param name="leavingEdge">a leaving edge</param>
///// <param name="enteringEdge">an entering edge</param>
///// <returns>returns true if a pair is chosen</returns>
Tuple<NetworkEdge, NetworkEdge> LeaveEnterEdge() {
NetworkEdge leavingEdge = null;
NetworkEdge enteringEdge = null; //to keep the compiler happy
int minCut = 0;
foreach (NetworkEdge e in graph.Edges) {
if (e.inTree) {
if (e.Cut < minCut) {
minCut = e.Cut;
leavingEdge = e;
}
}
}
if (leavingEdge == null)
return null;
//now we are looking for a non-tree edge with a minimal slack belonging to TS
bool continuation = false;
int minSlack = NetworkEdge.Infinity;
foreach (NetworkEdge f in graph.Edges) {
int slack = Slack(f);
if (f.inTree == false && EdgeSourceTargetVal(f, leavingEdge) == -1 &&
(slack < minSlack || (slack == minSlack && (continuation = (random.Next(2) == 1))))
) {
minSlack = slack;
enteringEdge = f;
if (minSlack == 0 && !continuation)
break;
continuation = false;
}
}
#if TEST_MSAGL
if (enteringEdge == null)
{
throw new InvalidOperationException();
}
#endif
return new Tuple<NetworkEdge, NetworkEdge>(leavingEdge, enteringEdge);
}
/// <summary>
/// If f = (w,x) is the entering edge, the
/// only edges whose cut values must be adjusted are those in the path
/// connecting w and x in the tree, excluding e. This path is determined by
/// following the parent edges back from w and x until the least common ancestor is
/// reached, i.e., the first node l such that low(l) less or equal lim(w) ,lim(x) less or equal lim(l).
/// Of course, these postorder parameters must also be adjusted when
/// exchanging tree edges, but only for nodes below l.
/// </summary>
/// <param name="e">exiting edge</param>
/// <param name="f">entering edge</param>
void Exchange(NetworkEdge e, NetworkEdge f) {
int l = CommonPredecessorOfSourceAndTargetOfF(f);
CreatePathForCutUpdates(e, f, l);
UpdateLimLowLeavesAndParentsUnderNode(l);
UpdateCuts(e);
UpdateLayersUnderNode(l);
}
private void UpdateLayersUnderNode(int l) {
//update the layers under l
Stack<int> front = new Stack<int>();
front.Push(l);
//set layers to infinity under l
for (int i = 0; i < this.graph.NodeCount; i++)
if (low[l] <= lim[i] && lim[i] <= lim[l] && i != l)
layers[i] = NetworkEdge.Infinity;
while (front.Count > 0) {
int u = front.Pop();
foreach (NetworkEdge oe in this.graph.OutEdges(u)) {
if (oe.inTree && layers[oe.Target] == NetworkEdge.Infinity) {
layers[oe.Target] = layers[u] - oe.Separation;
front.Push(oe.Target);
}
}
foreach (NetworkEdge ie in this.graph.InEdges(u)) {
if (ie.inTree && layers[ie.Source] == NetworkEdge.Infinity) {
layers[ie.Source] = layers[u] + ie.Separation;
front.Push(ie.Source);
}
}
}
}
private void UpdateCuts(NetworkEdge e) {
//going up from the leaves of the branch following parents
Stack<int> front = new Stack<int>();
Stack<int> newFront = new Stack<int>();
//We start cut updates from the vertices of e. It will work only if in the new tree
// the parents of the vertices of e are end edges on the path connecting the two vertices.
//Let e be (w,x) and let f be (u,v). Let T be the tree containing e but no f,
//and T0 be the tree without with e but containg f. Let us consider the path with no edge repetitions from u to v in T.
//It has to contain e since there is a path from u to v in T containing e, because v lies in the component of w in T
//and u lies in the component of x in T, if there is a path without e then we have a cycle in T.
// Now if we romove e from this path and add f to it we get a path without edge repetitions connecting w to x.
// The edge adjacent in this path to w is parent[w] in T0, and the edge of the path adjacent to x is
//parent[x] in T0. If it is not true then we can get a cycle by constructing another path from w to x going up through the
//parents to the common ancessor of w and x.
front.Push(e.Source);
front.Push(e.Target);
while (front.Count > 0) {
while (front.Count > 0) {
int w = front.Pop();
ProgressStep();
NetworkEdge cutEdge = parent[w]; //have to find the cut of cutEdge
if (cutEdge == null)
continue;
if (cutEdge.Cut != NetworkEdge.Infinity)
continue; //the value of this cut has not been changed
int cut = 0;
foreach (NetworkEdge ce in IncidentEdges(w)) {
if (ce.inTree == false) {
int e0Val = EdgeSourceTargetVal(ce, cutEdge);
if (e0Val != 0)
cut += e0Val * ce.Weight;
} else //e0 is a tree edge
{
if (ce == cutEdge)
cut += ce.Weight;
else {
int impact = cutEdge.Source == ce.Target || cutEdge.Target == ce.Source ? 1 : -1;
int edgeContribution = EdgeContribution(ce, w);
cut += edgeContribution * impact;
}
}
}
cutEdge.Cut = cut;
int u = cutEdge.Source == w ? cutEdge.Target : cutEdge.Source;
if (AllLowCutsHaveBeenDone(u))
newFront.Push(u);
}
//swap newFrontAndFront
Stack<int> t = front;
front = newFront;
newFront = t;
}
}
private void CreatePathForCutUpdates(NetworkEdge e, NetworkEdge f, int l) {
//we mark the path by setting the cut value to infinity
int v = f.Target;
while (v != l) {
NetworkEdge p = parent[v];
p.Cut = NetworkEdge.Infinity;
v = p.Source == v ? p.Target : p.Source;
}
f.Cut = NetworkEdge.Infinity; //have to do it because f will be in the path between end points of e in the new tree
//remove e from the tree and put f inside of it
e.inTree = false; f.inTree = true;
}
private int CommonPredecessorOfSourceAndTargetOfF(NetworkEdge f) {
//find the common predecessor of f.Source and f.Target
int fMin, fmax;
if (lim[f.Source] < lim[f.Target]) {
fMin = lim[f.Source];
fmax = lim[f.Target];
} else {
fMin = lim[f.Target];
fmax = lim[f.Source];
}
//it is the best to walk up from the highest of nodes f
//but we don't know the depths
//so just start walking up from the source
int l = f.Source;
while ((low[l] <= fMin && fmax <= lim[l]) == false) {
NetworkEdge p = parent[l];
p.Cut = NetworkEdge.Infinity;
l = p.Source == l ? p.Target : p.Source;
}
return l;
}
#if TEST_MSAGL
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Globalization", "CA1303:Do not pass literals as localized parameters", MessageId = "System.Console.WriteLine(System.String,System.Object,System.Object,System.Object)"), System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
void CheckCutValues() {
foreach (NetworkEdge e in this.graph.Edges) {
if (e.inTree) {
int cut = 0;
foreach (NetworkEdge f in graph.Edges) {
cut += EdgeSourceTargetVal(f, e) * f.Weight;
}
if (e.Cut != cut)
Console.WriteLine("cuts are wrong for {0}; should be {1} but is {2}", e, cut, e.Cut);
}
}
}
#endif
void InitLayer() {
LongestPathLayering lp = new LongestPathLayering(this.graph);
this.layers = lp.GetLayers();
}
#region Enumerators
/// <summary>
/// to enumerate over all edges incident to v
/// </summary>
internal class IncEdges : IEnumerable<NetworkEdge> {
int v;
NetworkSimplex nw;
#if SHARPKIT //http://code.google.com/p/sharpkit/issues/detail?id=203
//SharpKit/Colin - also https://code.google.com/p/sharpkit/issues/detail?id=332
public IEnumerator<NetworkEdge> GetEnumerator()
{
#else
IEnumerator<NetworkEdge> IEnumerable<NetworkEdge>.GetEnumerator() {
#endif
return new IncEdgeEnumerator(nw.graph.OutEdges(v).GetEnumerator(), nw.graph.InEdges(v).GetEnumerator());
}
IEnumerator IEnumerable.GetEnumerator() {
return new IncEdgeEnumerator(nw.graph.OutEdges(v).GetEnumerator(), nw.graph.InEdges(v).GetEnumerator());
}
internal IncEdges(int v, NetworkSimplex nw) {
this.v = v;
this.nw = nw;
}
}
internal class IncEdgeEnumerator : IEnumerator<NetworkEdge> {
IEnumerator outEdges;
IEnumerator inEdges;
bool outIsActive;
bool inIsActive;
public void Dispose() { GC.SuppressFinalize(this); }
internal IncEdgeEnumerator(IEnumerator outEdges, IEnumerator inEdges) {
this.outEdges = outEdges;
this.inEdges = inEdges;
}
void IEnumerator.Reset() {
outEdges.Reset();
inEdges.Reset();
}
public bool MoveNext() {
outIsActive = outEdges.MoveNext();
if (!outIsActive)
inIsActive = inEdges.MoveNext();
return outIsActive || inIsActive;
}
#if SHARPKIT //https://code.google.com/p/sharpkit/issues/detail?id=203
//SharpKit/Colin - https://code.google.com/p/sharpkit/issues/detail?id=332
public NetworkEdge Current
{
#else
NetworkEdge IEnumerator<NetworkEdge>.Current {
#endif
get {
if (outIsActive)
return outEdges.Current as NetworkEdge;
if (inIsActive)
return inEdges.Current as NetworkEdge;
throw new InvalidOperationException();
}
}
object IEnumerator.Current {
get {
if (outIsActive)
return outEdges.Current as NetworkEdge;
if (inIsActive)
return inEdges.Current as NetworkEdge;
throw new InvalidOperationException();//"bug in the IncEdge enumerator");
}
}
}
#endregion
BasicGraph<IntEdge> graph;
private CancelToken NetworkCancelToken;
protected override void RunInternal()
{
if (graph.Edges.Count == 0 && graph.NodeCount == 0)
layers=new int[0];
FeasibleTree();
Tuple<NetworkEdge, NetworkEdge> leaveEnter;
while ((leaveEnter = LeaveEnterEdge()) != null)
{
ProgressStep();
Exchange(leaveEnter.Item1, leaveEnter.Item2);
}
ShiftLayerToZero();
}
}
}
| |
using System;
using NUnit.Framework;
using OpenQA.Selenium.Environment;
namespace OpenQA.Selenium
{
[TestFixture]
public class FormHandlingTests : DriverTestFixture
{
[Test]
public void ShouldClickOnSubmitInputElements()
{
driver.Url = formsPage;
driver.FindElement(By.Id("submitButton")).Click();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ClickingOnUnclickableElementsDoesNothing()
{
driver.Url = formsPage;
driver.FindElement(By.XPath("//body")).Click();
}
[Test]
public void ShouldBeAbleToClickImageButtons()
{
driver.Url = formsPage;
driver.FindElement(By.Id("imageButton")).Click();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ShouldBeAbleToSubmitForms()
{
driver.Url = formsPage;
driver.FindElement(By.Name("login")).Submit();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ShouldSubmitAFormWhenAnyInputElementWithinThatFormIsSubmitted()
{
driver.Url = formsPage;
driver.FindElement(By.Id("checky")).Submit();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ShouldSubmitAFormWhenAnyElementWithinThatFormIsSubmitted()
{
driver.Url = formsPage;
driver.FindElement(By.XPath("//form/p")).Submit();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
[IgnoreBrowser(Browser.Android)]
[IgnoreBrowser(Browser.Chrome)]
[IgnoreBrowser(Browser.IPhone)]
[IgnoreBrowser(Browser.Opera)]
[IgnoreBrowser(Browser.PhantomJS)]
[IgnoreBrowser(Browser.Safari)]
public void ShouldNotBeAbleToSubmitAFormThatDoesNotExist()
{
driver.Url = formsPage;
Assert.Throws<NoSuchElementException>(() => driver.FindElement(By.Name("SearchableText")).Submit());
}
[Test]
public void ShouldBeAbleToEnterTextIntoATextAreaBySettingItsValue()
{
driver.Url = javascriptPage;
IWebElement textarea = driver.FindElement(By.Id("keyUpArea"));
string cheesey = "Brie and cheddar";
textarea.SendKeys(cheesey);
Assert.AreEqual(textarea.GetAttribute("value"), cheesey);
}
[Test]
public void SendKeysKeepsCapitalization()
{
driver.Url = javascriptPage;
IWebElement textarea = driver.FindElement(By.Id("keyUpArea"));
string cheesey = "BrIe And CheDdar";
textarea.SendKeys(cheesey);
Assert.AreEqual(textarea.GetAttribute("value"), cheesey);
}
[Test]
public void ShouldSubmitAFormUsingTheNewlineLiteral()
{
driver.Url = formsPage;
IWebElement nestedForm = driver.FindElement(By.Id("nested_form"));
IWebElement input = nestedForm.FindElement(By.Name("x"));
input.SendKeys("\n");
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual("We Arrive Here", driver.Title);
Assert.IsTrue(driver.Url.EndsWith("?x=name"));
}
[Test]
public void ShouldSubmitAFormUsingTheEnterKey()
{
driver.Url = formsPage;
IWebElement nestedForm = driver.FindElement(By.Id("nested_form"));
IWebElement input = nestedForm.FindElement(By.Name("x"));
input.SendKeys(Keys.Enter);
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual("We Arrive Here", driver.Title);
Assert.IsTrue(driver.Url.EndsWith("?x=name"));
}
[Test]
public void ShouldEnterDataIntoFormFields()
{
driver.Url = xhtmlTestPage;
IWebElement element = driver.FindElement(By.XPath("//form[@name='someForm']/input[@id='username']"));
String originalValue = element.GetAttribute("value");
Assert.AreEqual(originalValue, "change");
element.Clear();
element.SendKeys("some text");
element = driver.FindElement(By.XPath("//form[@name='someForm']/input[@id='username']"));
String newFormValue = element.GetAttribute("value");
Assert.AreEqual(newFormValue, "some text");
}
[Test]
[IgnoreBrowser(Browser.Android, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.IPhone, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.Safari, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.WindowsPhone, "Does not yet support file uploads")]
public void ShouldBeAbleToAlterTheContentsOfAFileUploadInputElement()
{
driver.Url = formsPage;
IWebElement uploadElement = driver.FindElement(By.Id("upload"));
Assert.IsTrue(string.IsNullOrEmpty(uploadElement.GetAttribute("value")));
System.IO.FileInfo inputFile = new System.IO.FileInfo("test.txt");
System.IO.StreamWriter inputFileWriter = inputFile.CreateText();
inputFileWriter.WriteLine("Hello world");
inputFileWriter.Close();
uploadElement.SendKeys(inputFile.FullName);
System.IO.FileInfo outputFile = new System.IO.FileInfo(uploadElement.GetAttribute("value"));
Assert.AreEqual(inputFile.Name, outputFile.Name);
inputFile.Delete();
}
[Test]
[IgnoreBrowser(Browser.Android, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.IPhone, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.Safari, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.WindowsPhone, "Does not yet support file uploads")]
public void ShouldBeAbleToSendKeysToAFileUploadInputElementInAnXhtmlDocument()
{
// IE before 9 doesn't handle pages served with an XHTML content type, and just prompts for to
// download it
if (TestUtilities.IsOldIE(driver))
{
return;
}
driver.Url = xhtmlFormPage;
IWebElement uploadElement = driver.FindElement(By.Id("file"));
Assert.AreEqual(string.Empty, uploadElement.GetAttribute("value"));
System.IO.FileInfo inputFile = new System.IO.FileInfo("test.txt");
System.IO.StreamWriter inputFileWriter = inputFile.CreateText();
inputFileWriter.WriteLine("Hello world");
inputFileWriter.Close();
uploadElement.SendKeys(inputFile.FullName);
System.IO.FileInfo outputFile = new System.IO.FileInfo(uploadElement.GetAttribute("value"));
Assert.AreEqual(inputFile.Name, outputFile.Name);
inputFile.Delete();
}
[Test]
[IgnoreBrowser(Browser.Android, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.IPhone, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.Safari, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.WindowsPhone, "Does not yet support file uploads")]
public void ShouldBeAbleToUploadTheSameFileTwice()
{
System.IO.FileInfo inputFile = new System.IO.FileInfo("test.txt");
System.IO.StreamWriter inputFileWriter = inputFile.CreateText();
inputFileWriter.WriteLine("Hello world");
inputFileWriter.Close();
driver.Url = formsPage;
IWebElement uploadElement = driver.FindElement(By.Id("upload"));
Assert.IsTrue(string.IsNullOrEmpty(uploadElement.GetAttribute("value")));
uploadElement.SendKeys(inputFile.FullName);
uploadElement.Submit();
driver.Url = formsPage;
uploadElement = driver.FindElement(By.Id("upload"));
Assert.IsTrue(string.IsNullOrEmpty(uploadElement.GetAttribute("value")));
uploadElement.SendKeys(inputFile.FullName);
uploadElement.Submit();
// If we get this far, then we're all good.
}
[Test]
public void SendingKeyboardEventsShouldAppendTextInInputs()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("working"));
element.SendKeys("Some");
String value = element.GetAttribute("value");
Assert.AreEqual(value, "Some");
element.SendKeys(" text");
value = element.GetAttribute("value");
Assert.AreEqual(value, "Some text");
}
[Test]
public void SendingKeyboardEventsShouldAppendTextInInputsWithExistingValue()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("inputWithText"));
element.SendKeys(". Some text");
string value = element.GetAttribute("value");
Assert.AreEqual("Example text. Some text", value);
}
[Test]
[IgnoreBrowser(Browser.HtmlUnit, "Not implemented going to the end of the line first")]
public void SendingKeyboardEventsShouldAppendTextInTextAreas()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("withText"));
element.SendKeys(". Some text");
String value = element.GetAttribute("value");
Assert.AreEqual(value, "Example text. Some text");
}
[Test]
public void ShouldBeAbleToClearTextFromInputElements()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("working"));
element.SendKeys("Some text");
String value = element.GetAttribute("value");
Assert.IsTrue(value.Length > 0);
element.Clear();
value = element.GetAttribute("value");
Assert.AreEqual(value.Length, 0);
}
[Test]
public void EmptyTextBoxesShouldReturnAnEmptyStringNotNull()
{
driver.Url = formsPage;
IWebElement emptyTextBox = driver.FindElement(By.Id("working"));
Assert.AreEqual(emptyTextBox.GetAttribute("value"), "");
IWebElement emptyTextArea = driver.FindElement(By.Id("emptyTextArea"));
Assert.AreEqual(emptyTextBox.GetAttribute("value"), "");
}
[Test]
public void ShouldBeAbleToClearTextFromTextAreas()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("withText"));
element.SendKeys("Some text");
String value = element.GetAttribute("value");
Assert.IsTrue(value.Length > 0);
element.Clear();
value = element.GetAttribute("value");
Assert.AreEqual(value.Length, 0);
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.HtmlUnit, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Opera, "Untested")]
[IgnoreBrowser(Browser.PhantomJS, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
[IgnoreBrowser(Browser.WindowsPhone, "Does not yet support alert handling")]
public void HandleFormWithJavascriptAction()
{
string url = EnvironmentManager.Instance.UrlBuilder.WhereIs("form_handling_js_submit.html");
driver.Url = url;
IWebElement element = driver.FindElement(By.Id("theForm"));
element.Submit();
IAlert alert = driver.SwitchTo().Alert();
string text = alert.Text;
alert.Dismiss();
Assert.AreEqual("Tasty cheese", text);
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
public void CanClickOnASubmitButton()
{
CheckSubmitButton("internal_explicit_submit");
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
public void CanClickOnAnImplicitSubmitButton()
{
CheckSubmitButton("internal_implicit_submit");
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
[IgnoreBrowser(Browser.HtmlUnit, "Fails on HtmlUnit")]
[IgnoreBrowser(Browser.IE, "Fails on IE")]
public void CanClickOnAnExternalSubmitButton()
{
CheckSubmitButton("external_explicit_submit");
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
[IgnoreBrowser(Browser.HtmlUnit, "Fails on HtmlUnit")]
[IgnoreBrowser(Browser.IE, "Fails on IE")]
public void CanClickOnAnExternalImplicitSubmitButton()
{
CheckSubmitButton("external_implicit_submit");
}
private void CheckSubmitButton(string buttonId)
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("click_tests/html5_submit_buttons.html");
string name = "Gromit";
driver.FindElement(By.Id("name")).SendKeys(name);
driver.FindElement(By.Id(buttonId)).Click();
WaitFor(TitleToBe("Submitted Successfully!"), "Browser title is not 'Submitted Successfully!'");
Assert.That(driver.Url.Contains("name=" + name), "URL does not contain 'name=" + name + "'. Actual URL:" + driver.Url);
}
private Func<bool> TitleToBe(string desiredTitle)
{
return () =>
{
return driver.Title == desiredTitle;
};
}
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="LinkGrep.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Web.Services.Discovery {
using System;
using System.IO;
using System.Text.RegularExpressions;
using System.Web.Services.Protocols;
using System.Globalization;
internal class LinkGrep {
private LinkGrep() { }
private static string ReadEntireStream(TextReader input) {
char[] buffer = new char[4096];
int bufferpos = 0;
int delta;
//
for (;;) {
delta = input.Read(buffer, bufferpos, buffer.Length - bufferpos);
if (delta == 0)
break;
bufferpos += delta;
if (bufferpos == buffer.Length) {
char[] newbuf = new char[buffer.Length * 2];
System.Array.Copy(buffer, 0, newbuf, 0, buffer.Length);
buffer = newbuf;
}
}
return new string(buffer, 0, bufferpos);
}
internal static string SearchForLink(Stream stream) {
string text = null;
text = ReadEntireStream(new StreamReader(stream));
int textpos = 0;
Match match;
if ((match = doctypeDirectiveRegex.Match(text, textpos)).Success) {
textpos += match.Length;
}
bool oneMatch;
for (;;) {
// Reset match flag
oneMatch = false;
// 1: scan for text up to the next tag.
// First case: check for whitespace going all the way to the next tag
if ((match = whitespaceRegex.Match(text, textpos)).Success) {
oneMatch = true;
}
// Second case: there may be some nonwhitespace; scan it
else if ((match = textRegex.Match(text, textpos)).Success) {
oneMatch = true;
}
// we might be done now
textpos += match.Length;
if (textpos == text.Length)
break;
// 2: handle constructs that start with <
// First, check to see if it's a tag
if ((match = tagRegex.Match(text, textpos)).Success)
{
oneMatch = true;
string tag = match.Groups["tagname"].Value;
if (String.Compare(tag, "link", StringComparison.OrdinalIgnoreCase) == 0) {
CaptureCollection attrnames = match.Groups["attrname"].Captures;
CaptureCollection attrvalues = match.Groups["attrval"].Captures;
int count = attrnames.Count;
bool rightType = false;
bool rightRel = false;
string href = null;
for (int i = 0; i < count; i++) {
string attrName = attrnames[i].ToString();
string attrValue = attrvalues[i].ToString();
if (String.Compare(attrName, "type", StringComparison.OrdinalIgnoreCase) == 0 &&
ContentType.MatchesBase(attrValue, ContentType.TextXml)) {
rightType = true;
}
else if (String.Compare(attrName, "rel", StringComparison.OrdinalIgnoreCase) == 0 &&
String.Compare(attrValue, "alternate", StringComparison.OrdinalIgnoreCase) == 0) {
rightRel = true;
}
else if (String.Compare(attrName, "href", StringComparison.OrdinalIgnoreCase) == 0) {
href = attrValue;
}
if (rightType && rightRel && href != null) {
// Got a link to a disco file!
return href;
}
}
}
else if (tag == "body") {
// If body begins, get out, since link tags should only be defined in the head
break;
}
}
// check to see if it's an end tag
else if ((match = endtagRegex.Match(text, textpos)).Success) {
oneMatch = true;
}
// check to see if it's a comment
else if ((match = commentRegex.Match(text, textpos)).Success) {
oneMatch = true;
}
// we might be done now
textpos += match.Length;
if (textpos == text.Length)
break;
// If we couldn't get one single match, it means that it's probably not HTML, so bail
if (!oneMatch) {
break;
}
}
return null;
}
private readonly static Regex tagRegex = new Regex
(
"\\G<" + // leading <
"(?<prefix>[\\w:.-]+(?=:)|):?" + // optional prefix:
"(?<tagname>[\\w.-]+)" + // tagname
"(?:\\s+" + // zero or more attributes
"(?<attrprefix>[\\w:.-]+(?=:)|):?" + // optional attrprefix:
"(?<attrname>[\\w.-]+)" + // attrname
"\\s*=\\s*" + // required equals
"(?:" + // quoted value
"\"(?<attrval>[^\"]*)\"" + // double quoted attrval
"|'(?<attrval>[^\']*)'" + // single quoted attrval
"|(?<attrval>[a-zA-Z0-9\\-._:]+)" + // attrval with no quotes (SGML-approved chars)
")" + // end quoted value
")*" + // end attribute
"\\s*(?<empty>/)?>" // optional trailing /, and trailing >
);
private readonly static Regex doctypeDirectiveRegex = new Regex
(
@"\G<!doctype\b(([\s\w]+)|("".*""))*>",
RegexOptions.IgnoreCase | RegexOptions.Multiline | RegexOptions.IgnorePatternWhitespace
);
private readonly static Regex endtagRegex = new Regex
(
"\\G</" + // leading </
"(?<prefix>[\\w:-]+(?=:)|):?" + // optional prefix:
"(?<tagname>[\\w-]+)\\s*>" // tagname
);
private readonly static Regex commentRegex = new Regex
(
"\\G<!--" + // leading <!--
"(?>[^-]*-)+?" + // one or more chunks of text ending with -, minimal
"->" // trailing ->
);
private readonly static Regex whitespaceRegex = new Regex
(
"\\G\\s+" + // at least one char of whitespace
"(?=<|\\Z)" // ending with either '<' or the end of the string
);
private readonly static Regex textRegex = new Regex
(
"\\G[^<]+" // at least one char on non-'<', maximal
);
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Configuration;
using System.ComponentModel;
using System.Data;
using System.Data.Common;
namespace Artem.Data.Access {
/// <summary>
/// Database utility object.
/// </summary>
public partial class DataAccess : IDisposable {
#region Fields //////////////////////////////////////////////////////////////////
IDbConnection _connection;
IDbTransaction _transaction;
//List<DataAccessCommand> _commands;
IDbCommand _command;
List<IDataReader> _readers;
string _connectionName;
string _connectionString;
bool _isDisposed = false;
bool _isInScope = false;
#endregion
#region Properties //////////////////////////////////////////////////////////////
/// <summary>
/// Gets an provider indepedent database connection
/// </summary>
protected internal IDbConnection Connection {
get { return this._connection; }
}
/// <summary>
/// Gets the name of the used connection
/// </summary>
/// <value>The name of the connection.</value>
public string ConnectionName {
get {
if (string.IsNullOrEmpty(_connectionName)) {
return DataAccess.Provider.ConnectionName;
}
else {
return _connectionName;
}
}
set { _connectionName = value; }
}
/// <summary>
/// Gets or sets the connection string.
/// </summary>
/// <value>The connection string.</value>
public string ConnectionString {
get { return _connectionString; }
set { _connectionString = value; }
}
/// <summary>
/// Gets a value indicating whether this instance is disposed.
/// </summary>
/// <value>
/// <c>true</c> if this instance is disposed; otherwise, <c>false</c>.
/// </value>
public bool IsDisposed {
get { return _isDisposed; }
}
/// <summary>
/// Returns <code>true</code> if database object contains at least
/// one transaction started, otherwise returns <code>false</code>
/// </summary>
/// <value>
/// <c>true</c> if this instance has transaction; otherwise, <c>false</c>.
/// </value>
public bool HasTransaction {
get { return this._transaction != null; }
}
/// <summary>
/// Returns the list of all <code>IDataReader</code>s created by
/// database object if any
/// </summary>
/// <value>The readers.</value>
protected IList<IDataReader> Readers {
get {
if (_readers == null) {
_readers = new List<IDataReader>();
}
return _readers;
}
}
/// <summary>
///
/// </summary>
/// <param name="index"></param>
/// <returns></returns>
public IDataParameter this[int index] {
get { return (IDataParameter)_command.Parameters[index]; }
}
/// <summary>
///
/// </summary>
public IDbCommand Command {
get { return _command; }
}
/// <summary>
///
/// </summary>
public string CommandText {
get { return _command.CommandText; }
set { _command.CommandText = value; }
}
/// <summary>
///
/// </summary>
public int CommandTimeout {
get { return _command.CommandTimeout; }
set { _command.CommandTimeout = value; }
}
/// <summary>
///
/// </summary>
public CommandType CommandType {
get { return _command.CommandType; }
set { _command.CommandType = value; }
}
/// <summary>
///
/// </summary>
public IDataParameterCollection Parameters {
get { return _command.Parameters; }
}
/// <summary>
///
/// </summary>
public UpdateRowSource UpdatedRowSource {
get { return _command.UpdatedRowSource; }
set { _command.UpdatedRowSource = value; }
}
#endregion
#region Construct / Destruct ////////////////////////////////////////////////////
/// <summary>
/// Creates a new <code>Database</code> object.
/// </summary>
/// <param name="commandText">The command text.</param>
public DataAccess(string commandText) {
if (DataScope.Current != null) {
this._isInScope = true;
this._connection = DataScope.Current.Connection;
this._transaction = DataScope.Current.Transaction;
}
else {
this._connection = DataAccess.Provider.CreateConnection();
}
//this._transactionStack = new Stack<IDbTransaction>();
this._command = this.CreateCommand(commandText);
// new List<DataAccessCommand>(Default.CommandsSize);
//if (commands != null) {
// foreach (string commandText in commands) {
// }
//}
}
///// <summary>
/////
///// </summary>
///// <param name="transaction"></param>
///// <param name="commands"></param>
//public Database(IDbTransaction transaction, params string[] commands)
// : this(commands) {
// this._transaction = transaction;
// //this._transactionStack.Push(transaction);
//}
/// <summary>
/// Creates a new <code>Database</code> object.
/// </summary>
public DataAccess()
: this(null) {
}
#region Disposing
/// <summary>
/// Releases all resources used by <code>Database</code>
/// </summary>
public void Dispose() {
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
///
/// </summary>
/// <param name="disposing"></param>
protected virtual void Dispose(bool disposing) {
if (!_isDisposed) {
if (disposing) {
this.CloseReaders();
this.CloseCommands();
if (this.HasTransaction) this.Commit();
this.CloseConnection(true);
}
}
_isDisposed = true;
}
#endregion
#endregion
#region Methods /////////////////////////////////////////////////////////////////
/// <summary>
/// Adds the parameter.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="dbType">Type of the db.</param>
/// <param name="direction">The direction.</param>
/// <param name="size">The size.</param>
/// <param name="value">The value.</param>
/// <returns></returns>
public IDbDataParameter AddParameter(
string name, int dbType, ParameterDirection direction, int size, object value) {
IDbDataParameter __parameter =
DataAccess.CreateParameter(name, dbType, direction, size, value);
this.Parameters.Add(__parameter);
return __parameter;
}
/// <summary>
/// Adds the parameter.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="dbType">Type of the db.</param>
/// <param name="direction">The direction.</param>
/// <param name="value">The value.</param>
/// <returns></returns>
public IDbDataParameter AddParameter(
string name, int dbType, ParameterDirection direction, object value) {
IDbDataParameter __parameter = DataAccess.CreateParameter(name, dbType, direction, value);
this.Parameters.Add(__parameter);
return __parameter;
}
/// <summary>
/// Adds the parameter.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="value">The value.</param>
/// <returns></returns>
public IDbDataParameter AddParameter(string name, object value) {
IDbDataParameter __parameter = DataAccess.CreateParameter(name, value);
this.Parameters.Add(__parameter);
return __parameter;
}
/// <summary>
/// Adds the parameter.
/// </summary>
/// <param name="parameter">The parameter.</param>
/// <returns></returns>
public IDbDataParameter AddParameter(IDbDataParameter parameter) {
this.Parameters.Add(parameter);
return parameter;
}
/// <summary>
/// Starts a new database transaction. The current implementation allows a nested
/// transactions to be used. If there are nested transaction then the
/// <code>RollbackBehaviour</code> is used to manage rollback behaviour
/// </summary>
/// <param name="level">the transaction isolation level</param>
/// <returns>returns the created transaction</returns>
public IDbTransaction BeginTransaction(IsolationLevel level) {
if (_isInScope) {
throw new DataAccessException(DataAccessError.Transaction_CanNotBeInScope);
}
this.Prepare();
_transaction = _connection.BeginTransaction(level);
return _transaction;
}
/// <summary>
/// Starts a new database transaction. The current implementation allows a nested
/// transactions to be used. If there are nested transaction then the
/// <code>RollbackBehaviour</code> is used to manage rollback behaviour
/// </summary>
/// <returns>returns the created transaction</returns>
public IDbTransaction BeginTransaction() {
if (_isInScope) {
throw new DataAccessException(DataAccessError.Transaction_CanNotBeInScope);
}
this.Prepare();
this._transaction = _connection.BeginTransaction(IsolationLevel.RepeatableRead);
return this._transaction;
}
/// <summary>
/// Creates a database command using the default database provider
/// </summary>
/// <param name="commandText">the command text</param>
/// <param name="commandType">the command type</param>
/// <param name="commandParams">the list of command parameters</param>
/// <returns>returns the created command</returns>
public IDbCommand CreateCommand(
string commandText, CommandType commandType, params IDbDataParameter[] commandParams) {
IDbTransaction __transaction = null;
if (this.HasTransaction) {
__transaction = this._transaction;
}
IDbCommand __command = DataAccess.Provider.CreateCommand(
commandText, commandType, _connection, __transaction);
foreach (DbParameter __parameter in commandParams) {
__command.Parameters.Add(__parameter);
}
return __command;
}
/// <summary>
/// Creates a database command using the default database provider
/// </summary>
/// <param name="commandText">the command text</param>
/// <param name="commandParams">the list of command parameters</param>
/// <returns>returns the created command</returns>
public IDbCommand CreateCommand(string commandText, params IDbDataParameter[] commandParams) {
IDbTransaction __transaction = null;
if (this.HasTransaction) {
__transaction = this._transaction;// _transactionStack.Peek();
}
IDbCommand __command = DataAccess.Provider.CreateCommand(
commandText, DataAccess.Provider.DefaultCommandType, _connection, __transaction);
foreach (IDbDataParameter __parameter in commandParams) {
__command.Parameters.Add(__parameter);
}
return __command;
}
/// <summary>
/// Creates a database command using the default database provider
/// </summary>
/// <param name="commandText">the command text</param>
/// <param name="commandType">the command type</param>
/// <returns>returns the created command</returns>
public IDbCommand CreateCommand(string commandText, CommandType commandType) {
IDbTransaction __transaction = null;
if (this.HasTransaction) {
__transaction = this._transaction;// _transactionStack.Peek();
}
IDbCommand __command = DataAccess.Provider.CreateCommand(
commandText, commandType, _connection, __transaction);
return __command;
}
/// <summary>
/// Creates a database command using the default database provider
/// </summary>
/// <param name="commandText">the command text</param>
/// <returns>returns the created command</returns>
public IDbCommand CreateCommand(string commandText) {
IDbTransaction __transaction = null;
if (this.HasTransaction) {
__transaction = this._transaction;// _transactionStack.Peek();
}
IDbCommand __command = DataAccess.Provider.CreateCommand(
commandText, DataAccess.Provider.DefaultCommandType, _connection, __transaction);
return __command;
}
/// <summary>
/// Prepares the database object for use.
/// </summary>
protected void Prepare() {
if (_connection.State == ConnectionState.Closed || _connection.State == ConnectionState.Broken) {
if (string.IsNullOrEmpty(this.ConnectionString)) {
_connection.ConnectionString =
ConnectionStringSettings[this.ConnectionName].ConnectionString;
}
else {
_connection.ConnectionString = _connectionString;
}
_connection.Open();
}
else {
// close the forgoten readers, if any, before next command execution
this.CloseReaders();
}
}
///// <summary>
///// Prepares the command at index for use.
///// </summary>
///// <param name="index"></param>
//protected void Prepare(int index) {
// //IDbCommand __dbCommand = this[index].Command;
// //if (this.HasTransaction && __dbCommand.Transaction == null) {
// // __dbCommand.Transaction = this._transaction;// _transactionStack.Peek();
// //}
// // TODO: resolve this problem
// //INullableValue __nullable;
// //foreach (IDbDataParameter __param in __dbCommand.Parameters) {
// // if (__param.Value is INullableValue) {
// // __nullable = __param.Value as INullableValue;
// // __param.Value = (__nullable.HasValue) ? __nullable.Value : DBNull.Value;
// // }
// //}
//}
/// <summary>
/// Commits transaction, if this object is transactional.
/// It will be invoked at disposing of the object,
/// that's why the explicit call of this method can be ommited
/// if one transaction is to be commited.
/// </summary>
public void Commit() {
if (this.HasTransaction && !_isInScope) {
_transaction.Commit();
_transaction.Dispose();
_transaction = null;
}
}
/// <summary>
/// Rolls back transaction, if this object is transactional.
/// It will be invoked by default, if any exception occur,
/// that's why the explicit call of this method can be ommited.
/// </summary>
public void Rollback() {
if (this.HasTransaction && !_isInScope) {
_transaction.Rollback();
_transaction.Dispose();
_transaction = null;
}
}
#region Utitlity Methods
/// <summary>
/// Closes the used connection.
/// </summary>
/// <param name="disposing"></param>
private void CloseConnection(bool disposing) {
if ((!disposing && this.HasTransaction) || _isInScope) return;
if (_connection != null && _connection.State != ConnectionState.Closed) {
try {
_connection.Close();
}
catch { }
}
}
/// <summary>
/// Closes all created commands
/// </summary>
private void CloseCommands() {
if (_command != null) {
_command.Dispose();
}
}
/// <summary>
/// Closes all readers opened, if any.
/// </summary>
private void CloseReaders() {
if (_readers != null) {
foreach (IDataReader __reader in _readers) {
if (__reader != null) {
if (!__reader.IsClosed) {
__reader.Close();
}
__reader.Dispose();
}
}
_readers.Clear();
}
}
/// <summary>
/// Handles the exception.
/// </summary>
/// <param name="ex">The ex.</param>
private void HandleException(Exception ex) {
if (!_isInScope) {
this.Rollback();
throw ex;
}
else {
DataScope.Current.InnerException = ex;
}
}
/// <summary>
/// Traces the start.
/// </summary>
/// <param name="method">The method.</param>
private void TraceStart(string method) {
if (TraceEnabled && Trace != null) {
Trace.Write("data.access", string.Format("{0}_Start: {1}", method, this.Command.CommandText));
}
}
/// <summary>
/// Traces the end.
/// </summary>
/// <param name="method">The method.</param>
private void TraceEnd(string method) {
if (TraceEnabled && Trace != null) {
Trace.Write("data.access", string.Format("{0}_End: {1}", method, this.Command.CommandText));
}
}
#endregion
#endregion
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core.Tests
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Apache.Ignite.Core.Common;
using Apache.Ignite.Core.Messaging;
using Apache.Ignite.Core.Tests.Process;
using NUnit.Framework;
/// <summary>
/// Ignite start/stop tests.
/// </summary>
[Category(TestUtils.CategoryIntensive)]
public class IgniteStartStopTest
{
/// <summary>
///
/// </summary>
[SetUp]
public void SetUp()
{
TestUtils.KillProcesses();
}
/// <summary>
///
/// </summary>
[TearDown]
public void TearDown()
{
TestUtils.KillProcesses();
Ignition.StopAll(true);
}
/// <summary>
///
/// </summary>
[Test]
public void TestStartDefault()
{
var cfg = new IgniteConfiguration {JvmClasspath = TestUtils.CreateTestClasspath()};
var grid = Ignition.Start(cfg);
Assert.IsNotNull(grid);
Assert.AreEqual(1, grid.GetCluster().GetNodes().Count);
}
/// <summary>
///
/// </summary>
[Test]
public void TestStartWithConfigPath()
{
var cfg = new IgniteConfiguration
{
SpringConfigUrl = "config/default-config.xml",
JvmClasspath = TestUtils.CreateTestClasspath()
};
var grid = Ignition.Start(cfg);
Assert.IsNotNull(grid);
Assert.AreEqual(1, grid.GetCluster().GetNodes().Count);
}
/// <summary>
///
/// </summary>
[Test]
public void TestStartGetStop()
{
var cfgs = new List<string> { "config\\start-test-grid1.xml", "config\\start-test-grid2.xml", "config\\start-test-grid3.xml" };
var cfg = new IgniteConfiguration
{
SpringConfigUrl = cfgs[0],
JvmOptions = TestUtils.TestJavaOptions(),
JvmClasspath = TestUtils.CreateTestClasspath()
};
var grid1 = Ignition.Start(cfg);
Assert.AreEqual("grid1", grid1.Name);
cfg.SpringConfigUrl = cfgs[1];
var grid2 = Ignition.Start(cfg);
Assert.AreEqual("grid2", grid2.Name);
cfg.SpringConfigUrl = cfgs[2];
var grid3 = Ignition.Start(cfg);
Assert.IsNull(grid3.Name);
Assert.AreSame(grid1, Ignition.GetIgnite("grid1"));
Assert.AreSame(grid2, Ignition.GetIgnite("grid2"));
Assert.AreSame(grid3, Ignition.GetIgnite(null));
try
{
Ignition.GetIgnite("invalid_name");
}
catch (IgniteException e)
{
Console.WriteLine("Expected exception: " + e);
}
Assert.IsTrue(Ignition.Stop("grid1", true));
try
{
Ignition.GetIgnite("grid1");
}
catch (IgniteException e)
{
Console.WriteLine("Expected exception: " + e);
}
grid2.Dispose();
try
{
Ignition.GetIgnite("grid2");
}
catch (IgniteException e)
{
Console.WriteLine("Expected exception: " + e);
}
grid3.Dispose();
try
{
Ignition.GetIgnite(null);
}
catch (IgniteException e)
{
Console.WriteLine("Expected exception: " + e);
}
foreach (var cfgName in cfgs)
{
cfg.SpringConfigUrl = cfgName;
cfg.JvmOptions = TestUtils.TestJavaOptions();
Ignition.Start(cfg);
}
foreach (var gridName in new List<string> { "grid1", "grid2", null })
Assert.IsNotNull(Ignition.GetIgnite(gridName));
Ignition.StopAll(true);
foreach (var gridName in new List<string> { "grid1", "grid2", null })
{
try
{
Ignition.GetIgnite(gridName);
}
catch (IgniteException e)
{
Console.WriteLine("Expected exception: " + e);
}
}
}
/// <summary>
///
/// </summary>
[Test]
public void TestStartTheSameName()
{
var cfg = new IgniteConfiguration
{
SpringConfigUrl = "config\\start-test-grid1.xml",
JvmOptions = TestUtils.TestJavaOptions(),
JvmClasspath = TestUtils.CreateTestClasspath()
};
var grid1 = Ignition.Start(cfg);
Assert.AreEqual("grid1", grid1.Name);
try
{
Ignition.Start(cfg);
Assert.Fail("Start should fail.");
}
catch (IgniteException e)
{
Console.WriteLine("Expected exception: " + e);
}
}
/// <summary>
///
/// </summary>
[Test]
public void TestUsageAfterStop()
{
var cfg = new IgniteConfiguration
{
SpringConfigUrl = "config\\start-test-grid1.xml",
JvmOptions = TestUtils.TestJavaOptions(),
JvmClasspath = TestUtils.CreateTestClasspath()
};
var grid = Ignition.Start(cfg);
Assert.IsNotNull(grid.GetCache<int, int>("cache1"));
grid.Dispose();
try
{
grid.GetCache<int, int>("cache1");
Assert.Fail();
}
catch (InvalidOperationException e)
{
Console.WriteLine("Expected exception: " + e);
}
}
/// <summary>
///
/// </summary>
[Test]
public void TestStartStopLeak()
{
var cfg = new IgniteConfiguration
{
SpringConfigUrl = "config\\start-test-grid1.xml",
JvmOptions = new List<string> {"-Xcheck:jni", "-Xms256m", "-Xmx256m", "-XX:+HeapDumpOnOutOfMemoryError"},
JvmClasspath = TestUtils.CreateTestClasspath()
};
for (var i = 0; i < 20; i++)
{
Console.WriteLine("Iteration: " + i);
var grid = Ignition.Start(cfg);
UseIgnite(grid);
if (i % 2 == 0) // Try to stop ignite from another thread.
{
var t = new Thread(() => {
grid.Dispose();
});
t.Start();
t.Join();
}
else
grid.Dispose();
GC.Collect(); // At the time of writing java references are cleaned from finalizer, so GC is needed.
}
}
/// <summary>
/// Tests the client mode flag.
/// </summary>
[Test]
public void TestClientMode()
{
var servCfg = new IgniteConfiguration
{
SpringConfigUrl = "config\\start-test-grid1.xml",
JvmOptions = TestUtils.TestJavaOptions(),
JvmClasspath = TestUtils.CreateTestClasspath()
};
var clientCfg = new IgniteConfiguration
{
SpringConfigUrl = "config\\start-test-grid2.xml",
JvmOptions = TestUtils.TestJavaOptions(),
JvmClasspath = TestUtils.CreateTestClasspath()
};
try
{
using (var serv = Ignition.Start(servCfg)) // start server-mode ignite first
{
Assert.IsFalse(serv.GetCluster().GetLocalNode().IsClient);
Ignition.ClientMode = true;
using (var grid = Ignition.Start(clientCfg))
{
Assert.IsTrue(grid.GetCluster().GetLocalNode().IsClient);
UseIgnite(grid);
}
}
}
finally
{
Ignition.ClientMode = false;
}
}
/// <summary>
/// Uses the ignite.
/// </summary>
/// <param name="ignite">The ignite.</param>
private static void UseIgnite(IIgnite ignite)
{
// Create objects holding references to java objects.
var comp = ignite.GetCompute();
// ReSharper disable once RedundantAssignment
comp = comp.WithKeepBinary();
var prj = ignite.GetCluster().ForOldest();
Assert.IsTrue(prj.GetNodes().Count > 0);
Assert.IsNotNull(prj.GetCompute());
var cache = ignite.GetCache<int, int>("cache1");
Assert.IsNotNull(cache);
cache.GetAndPut(1, 1);
Assert.AreEqual(1, cache.Get(1));
}
/// <summary>
/// Tests the processor initialization and grid usage right after topology enter.
/// </summary>
[Test]
public void TestProcessorInit()
{
var cfg = new IgniteConfiguration
{
SpringConfigUrl = "config\\start-test-grid1.xml",
JvmOptions = TestUtils.TestJavaOptions(),
JvmClasspath = TestUtils.CreateTestClasspath()
};
// Start local node
var grid = Ignition.Start(cfg);
// Start remote node in a separate process
// ReSharper disable once UnusedVariable
var proc = new IgniteProcess(
"-jvmClasspath=" + TestUtils.CreateTestClasspath(),
"-springConfigUrl=" + Path.GetFullPath(cfg.SpringConfigUrl),
"-J-Xms512m", "-J-Xmx512m");
var cts = new CancellationTokenSource();
var token = cts.Token;
// Spam message subscriptions on a separate thread
// to test race conditions during processor init on remote node
var listenTask = Task.Factory.StartNew(() =>
{
var filter = new MessageListener();
while (!token.IsCancellationRequested)
{
var listenId = grid.GetMessaging().RemoteListen(filter);
grid.GetMessaging().StopRemoteListen(listenId);
}
// ReSharper disable once FunctionNeverReturns
});
// Wait for remote node to join
Assert.IsTrue(grid.WaitTopology(2, 30000));
// Wait some more for initialization
Thread.Sleep(1000);
// Cancel listen task and check that it finishes
cts.Cancel();
Assert.IsTrue(listenTask.Wait(5000));
}
/// <summary>
/// Noop message filter.
/// </summary>
[Serializable]
private class MessageListener : IMessageListener<int>
{
/** <inheritdoc /> */
public bool Invoke(Guid nodeId, int message)
{
return true;
}
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file dIstributed with
thIs work for additional information regarding copyright ownership.
The ASF licenses thIs file to You under the Apache License, Version 2.0
(the "License"); you may not use thIs file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
dIstributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permIssions and
limitations under the License.
==================================================================== */
namespace NPOI.SS.Formula.Functions
{
using System;
using NPOI.SS.Formula.Eval;
using System.Diagnostics;
/**
* Implementation for the Excel function INDEX
*
* Syntax : <br/>
* INDEX ( reference, row_num[, column_num [, area_num]])<br/>
* INDEX ( array, row_num[, column_num])
* <table border="0" cellpAdding="1" cellspacing="0" summary="Parameter descriptions">
* <tr><th>reference</th><td>typically an area reference, possibly a union of areas</td></tr>
* <tr><th>array</th><td>a literal array value (currently not supported)</td></tr>
* <tr><th>row_num</th><td>selects the row within the array or area reference</td></tr>
* <tr><th>column_num</th><td>selects column within the array or area reference. default Is 1</td></tr>
* <tr><th>area_num</th><td>used when reference Is a union of areas</td></tr>
* </table>
*
* @author Josh Micich
*/
public class Index : Function2Arg, Function3Arg, Function4Arg
{
public ValueEval Evaluate(int srcRowIndex, int srcColumnIndex, ValueEval arg0, ValueEval arg1)
{
//AreaEval reference = ConvertFirstArg(arg0);
//bool colArgWasPassed = false;
//int columnIx = 0;
//try
//{
// int rowIx = ResolveIndexArg(arg1, srcRowIndex, srcColumnIndex);
// return GetValueFromArea(reference, rowIx, columnIx, colArgWasPassed, srcRowIndex, srcColumnIndex);
//}
//catch (EvaluationException e)
//{
// return e.GetErrorEval();
//}
TwoDEval reference = ConvertFirstArg(arg0);
int columnIx = 0;
try
{
int rowIx = ResolveIndexArg(arg1, srcRowIndex, srcColumnIndex);
if (!reference.IsColumn)
{
if (!reference.IsRow)
{
// always an error with 2-D area refs
// Note - the type of error changes if the pRowArg is negative
return ErrorEval.REF_INVALID;
}
// When the two-arg version of INDEX() has been invoked and the reference
// is a single column ref, the row arg seems to get used as the column index
columnIx = rowIx;
rowIx = 0;
}
return GetValueFromArea(reference, rowIx, columnIx);
}
catch (EvaluationException e)
{
return e.GetErrorEval();
}
}
public ValueEval Evaluate(int srcRowIndex, int srcColumnIndex, ValueEval arg0, ValueEval arg1,
ValueEval arg2)
{
//AreaEval reference = ConvertFirstArg(arg0);
//bool colArgWasPassed = true;
//try
//{
// int columnIx = ResolveIndexArg(arg2, srcRowIndex, srcColumnIndex);
// int rowIx = ResolveIndexArg(arg1, srcRowIndex, srcColumnIndex);
// return GetValueFromArea(reference, rowIx, columnIx, colArgWasPassed, srcRowIndex, srcColumnIndex);
//}
//catch (EvaluationException e)
//{
// return e.GetErrorEval();
//}
TwoDEval reference = ConvertFirstArg(arg0);
try
{
int columnIx = ResolveIndexArg(arg2, srcRowIndex, srcColumnIndex);
int rowIx = ResolveIndexArg(arg1, srcRowIndex, srcColumnIndex);
return GetValueFromArea(reference, rowIx, columnIx);
}
catch (EvaluationException e)
{
return e.GetErrorEval();
}
}
public ValueEval Evaluate(int srcRowIndex, int srcColumnIndex, ValueEval arg0, ValueEval arg1,
ValueEval arg2, ValueEval arg3)
{
throw new Exception("Incomplete code"
+ " - don't know how to support the 'area_num' parameter yet)");
// Excel expression might look like thIs "INDEX( (A1:B4, C3:D6, D2:E5 ), 1, 2, 3)
// In thIs example, the 3rd area would be used i.e. D2:E5, and the overall result would be E2
// Token array might be encoded like thIs: MemAreaPtg, AreaPtg, AreaPtg, UnionPtg, UnionPtg, ParenthesesPtg
// The formula parser doesn't seem to support thIs yet. Not sure if the evaluator does either
}
private static TwoDEval ConvertFirstArg(ValueEval arg0)
{
ValueEval firstArg = arg0;
if (firstArg is RefEval)
{
// Convert to area ref for simpler code in getValueFromArea()
return ((RefEval)firstArg).Offset(0, 0, 0, 0);
}
if ((firstArg is TwoDEval))
{
return (TwoDEval)firstArg;
}
// else the other variation of thIs function takes an array as the first argument
// it seems like interface 'ArrayEval' does not even exIst yet
throw new Exception("Incomplete code - cannot handle first arg of type ("
+ firstArg.GetType().Name + ")");
}
public ValueEval Evaluate(ValueEval[] args, int srcRowIndex, int srcColumnIndex)
{
switch (args.Length)
{
case 2:
return Evaluate(srcRowIndex, srcColumnIndex, args[0], args[1]);
case 3:
return Evaluate(srcRowIndex, srcColumnIndex, args[0], args[1], args[2]);
case 4:
return Evaluate(srcRowIndex, srcColumnIndex, args[0], args[1], args[2], args[3]);
}
return ErrorEval.VALUE_INVALID;
}
private static ValueEval GetValueFromArea(TwoDEval ae, int pRowIx, int pColumnIx)
{
Debug.Assert(pRowIx >= 0);
Debug.Assert(pColumnIx >= 0);
TwoDEval result = ae;
if (pRowIx != 0)
{
// Slightly irregular logic for bounds checking errors
if (pRowIx > ae.Height)
{
// high bounds check fail gives #REF! if arg was explicitly passed
throw new EvaluationException(ErrorEval.REF_INVALID);
}
result = result.GetRow(pRowIx - 1);
}
if (pColumnIx != 0)
{
// Slightly irregular logic for bounds checking errors
if (pColumnIx > ae.Width)
{
// high bounds check fail gives #REF! if arg was explicitly passed
throw new EvaluationException(ErrorEval.REF_INVALID);
}
result = result.GetColumn(pColumnIx - 1);
}
return result;
}
/**
* @param colArgWasPassed <c>false</c> if the INDEX argument lIst had just 2 items
* (exactly 1 comma). If anything Is passed for the <c>column_num</c> argument
* (including {@link BlankEval} or {@link MIssingArgEval}) this parameter will be
* <c>true</c>. ThIs parameter is needed because error codes are slightly
* different when only 2 args are passed.
*/
[Obsolete]
private static ValueEval GetValueFromArea(AreaEval ae, int pRowIx, int pColumnIx,
bool colArgWasPassed, int srcRowIx, int srcColIx)
{
bool rowArgWasEmpty = pRowIx == 0;
bool colArgWasEmpty = pColumnIx == 0;
int rowIx;
int columnIx;
// when the area ref Is a single row or a single column,
// there are special rules for conversion of rowIx and columnIx
if (ae.IsRow)
{
if (ae.IsColumn)
{
// single cell ref
rowIx = rowArgWasEmpty ? 0 : pRowIx - 1;
columnIx = colArgWasEmpty ? 0 : pColumnIx - 1;
}
else
{
if (colArgWasPassed)
{
rowIx = rowArgWasEmpty ? 0 : pRowIx - 1;
columnIx = pColumnIx - 1;
}
else
{
// special case - row arg seems to Get used as the column index
rowIx = 0;
// transfer both the index value and the empty flag from 'row' to 'column':
columnIx = pRowIx - 1;
colArgWasEmpty = rowArgWasEmpty;
}
}
}
else if (ae.IsColumn)
{
if (rowArgWasEmpty)
{
rowIx = srcRowIx - ae.FirstRow;
}
else
{
rowIx = pRowIx - 1;
}
if (colArgWasEmpty)
{
columnIx = 0;
}
else
{
columnIx = colArgWasEmpty ? 0 : pColumnIx - 1;
}
}
else
{
// ae Is an area (not single row or column)
if (!colArgWasPassed)
{
// always an error with 2-D area refs
// Note - the type of error Changes if the pRowArg is negative
throw new EvaluationException(pRowIx < 0 ? ErrorEval.VALUE_INVALID : ErrorEval.REF_INVALID);
}
// Normal case - area ref Is 2-D, and both index args were provided
// if either arg Is missing (or blank) the logic is similar to OperandResolver.getSingleValue()
if (rowArgWasEmpty)
{
rowIx = srcRowIx - ae.FirstRow;
}
else
{
rowIx = pRowIx - 1;
}
if (colArgWasEmpty)
{
columnIx = srcColIx - ae.FirstColumn;
}
else
{
columnIx = pColumnIx - 1;
}
}
int width = ae.Width;
int height = ae.Height;
// Slightly irregular logic for bounds checking errors
if (!rowArgWasEmpty && rowIx >= height || !colArgWasEmpty && columnIx >= width)
{
// high bounds check fail gives #REF! if arg was explicitly passed
throw new EvaluationException(ErrorEval.REF_INVALID);
}
if (rowIx < 0 || columnIx < 0 || rowIx >= height || columnIx >= width)
{
throw new EvaluationException(ErrorEval.VALUE_INVALID);
}
return ae.GetRelativeValue(rowIx, columnIx);
}
/**
* @param arg a 1-based index.
* @return the Resolved 1-based index. Zero if the arg was missing or blank
* @throws EvaluationException if the arg Is an error value evaluates to a negative numeric value
*/
private static int ResolveIndexArg(ValueEval arg, int srcCellRow, int srcCellCol)
{
ValueEval ev = OperandResolver.GetSingleValue(arg, srcCellRow, srcCellCol);
if (ev == MissingArgEval.instance)
{
return 0;
}
if (ev == BlankEval.instance)
{
return 0;
}
int result = OperandResolver.CoerceValueToInt(ev);
if (result < 0)
{
throw new EvaluationException(ErrorEval.VALUE_INVALID);
}
return result;
}
}
}
| |
using System;
using System.Reflection;
using UnityEditor;
using UnityEngine;
using Vexe.Editor.Helpers;
using UnityObject = UnityEngine.Object;
namespace Vexe.Editor.GUIs
{
public class TurtleGUI : BaseGUI
{
private static HorizontalBlock horizontal;
private static VerticalBlock vertical;
private static MethodInfo gradientFieldMethod;
public override Rect LastRect
{
get { return GUILayoutUtility.GetLastRect(); }
}
public override void OnGUI(Action code, Vector4 padding, int targetId)
{
code();
}
public override Bounds BoundsField(GUIContent content, Bounds value, Layout option)
{
return EditorGUILayout.BoundsField(content, value, option);
}
public override void Box(GUIContent content, GUIStyle style, Layout option)
{
GUILayout.Box(content, style, option);
}
public override void HelpBox(string message, MessageType type)
{
EditorGUILayout.HelpBox(message, type);
}
public override bool Button(GUIContent content, GUIStyle style, Layout option, ControlType buttonType)
{
return GUILayout.Button(content, style, option);
}
public override Color ColorField(GUIContent content, Color value, Layout option)
{
return EditorGUILayout.ColorField(content, value, option);
}
public override Enum EnumPopup(GUIContent content, System.Enum selected, GUIStyle style, Layout option)
{
return EditorGUILayout.EnumPopup(content, selected, style, option);
}
public override float FloatField(GUIContent content, float value, Layout option)
{
return EditorGUILayout.FloatField(content, value, option);
}
public override char CharField(GUIContent content, char value, Layout option)
{
return CharField(content, ref value, option);
}
public override sbyte SByteField(GUIContent content, sbyte value, Layout option)
{
return SByteField(content, ref value, option);
}
public override byte ByteField(GUIContent content, byte value, Layout option)
{
return ByteField(content, ref value, option);
}
public override short ShortField(GUIContent content, short value, Layout option)
{
return ShortField(content, ref value, option);
}
public override ushort UShortField(GUIContent content, ushort value, Layout option)
{
return UShortField(content, ref value, option);
}
public override int IntField(GUIContent content, int value, Layout option)
{
return EditorGUILayout.IntField(content, value, option);
}
public override uint UIntField(GUIContent content, uint value, Layout option)
{
return UIntField(content, ref value, option);
}
public override bool Foldout(GUIContent content, bool value, GUIStyle style, Layout option)
{
var rect = GUILayoutUtility.GetRect(content, style, option);
return EditorGUI.Foldout(rect, value, content, true, style);
}
public override void Label(GUIContent content, GUIStyle style, Layout option)
{
GUILayout.Label(content, style, option);
}
public override int MaskField(GUIContent content, int mask, string[] displayedOptions, GUIStyle style, Layout option)
{
return EditorGUILayout.MaskField(content, mask, displayedOptions, style, option);
}
public override UnityObject ObjectField(GUIContent content, UnityObject value, System.Type type, bool allowSceneObjects, Layout option)
{
// If we pass an empty content, ObjectField will still reserve space for an empty label ~__~
return string.IsNullOrEmpty(content.text) ?
EditorGUILayout.ObjectField(value, type, allowSceneObjects, option) :
EditorGUILayout.ObjectField(content, value, type, allowSceneObjects, option);
}
public override int Popup(string text, int selectedIndex, string[] displayedOptions, GUIStyle style, Layout option)
{
return EditorGUILayout.Popup(text, selectedIndex, displayedOptions, style, option);
}
public override int Popup(string text, int selectedIndex, GUIContent[] displayedOptions, GUIStyle style, Layout option)
{
var content = GetContent(text);
return EditorGUILayout.Popup(content, selectedIndex, displayedOptions, style, option);
}
public override Rect RectField(GUIContent content, Rect value, Layout option)
{
return EditorGUILayout.RectField(content, value, option);
}
public override AnimationCurve CurveField(GUIContent content, AnimationCurve value, Layout option)
{
return EditorGUILayout.CurveField(content, value, option);
}
public override Gradient GradientField(GUIContent content, Gradient value, Layout option)
{
if (value == null)
value = new Gradient();
return (Gradient) gradientFieldMethod.Invoke(null, new object[] { content, value, option });
}
protected override void BeginScrollView(ref Vector2 pos, bool alwaysShowHorizontal, bool alwaysShowVertical, GUIStyle horizontalScrollbar, GUIStyle verticalScrollbar, GUIStyle background, Layout option)
{
pos = GUILayout.BeginScrollView(pos, alwaysShowHorizontal, alwaysShowVertical, horizontalScrollbar, verticalScrollbar, background, option);
}
protected override void EndScrollView()
{
GUILayout.EndScrollView();
}
public override float FloatSlider(GUIContent content, float value, float leftValue, float rightValue, Layout option)
{
return EditorGUILayout.Slider(content, value, leftValue, rightValue, option);
}
public override void Space(float pixels)
{
GUILayout.Space(pixels);
}
public override void FlexibleSpace()
{
GUILayout.FlexibleSpace();
}
public override string TextField(GUIContent content, string value, GUIStyle style, Layout option)
{
return EditorGUILayout.TextField(content, value, style, option);
}
public override string ToolbarSearch(string value, Layout option)
{
return GUIHelper.ToolbarSearchField_GL(null, new object[] { value, option.ToGLOptions() }) as string;
}
public override bool Toggle(GUIContent content, bool value, GUIStyle style, Layout option)
{
return EditorGUILayout.Toggle(content, value, style, option);
}
public override bool ToggleLeft(GUIContent content, bool value, GUIStyle labelStyle, Layout option)
{
return EditorGUILayout.ToggleLeft(content, value, labelStyle, option);
}
static TurtleGUI()
{
horizontal = new HorizontalBlock();
vertical = new VerticalBlock();
Type tyEditorGUILayout = typeof(EditorGUILayout);
gradientFieldMethod = tyEditorGUILayout.GetMethod("GradientField", BindingFlags.NonPublic | BindingFlags.Static, null, new Type[] { typeof(GUIContent), typeof(Gradient), typeof(GUILayoutOption[]) }, null);
}
public TurtleGUI()
{
horizontal.onDisposed = EndHorizontal;
vertical.onDisposed = EndVertical;
}
protected override HorizontalBlock BeginHorizontal(GUIStyle style)
{
GUILayout.BeginHorizontal(style);
return horizontal;
}
protected override VerticalBlock BeginVertical(GUIStyle style)
{
GUILayout.BeginVertical(style);
return vertical;
}
protected override void EndHorizontal()
{
GUILayout.EndHorizontal();
}
protected override void EndVertical()
{
GUILayout.EndVertical();
}
public override string TextArea(string value, Layout option)
{
return EditorGUILayout.TextArea(value, option);
}
public override bool InspectorTitlebar(bool foldout, UnityObject target)
{
return EditorGUILayout.InspectorTitlebar(foldout, target);
}
public override string Tag(GUIContent content, string tag, GUIStyle style, Layout layout)
{
return EditorGUILayout.TagField(content, tag, style, layout);
}
public override int LayerField(GUIContent label, int layer, GUIStyle style, Layout layout)
{
return EditorGUILayout.LayerField(label, layer, style, layout);
}
public override void Prefix(GUIContent content)
{
EditorGUILayout.PrefixLabel(content);
}
public override void Prefix(string label)
{
EditorGUILayout.PrefixLabel(label);
}
public override string ScrollableTextArea(string value, ref Vector2 scrollPos, GUIStyle style, Layout option)
{
throw new NotImplementedException();
}
public override string TextFieldDropDown(GUIContent label, string text, string[] displayedOptions, Layout option)
{
throw new NotImplementedException();
}
public override double DoubleField(GUIContent content, double value, Layout option)
{
return EditorGUILayout.DoubleField(content, value, option);
}
public override long LongField(GUIContent content, long value, Layout option)
{
return EditorGUILayout.LongField(content, value, option);
}
public override ulong ULongField(GUIContent content, ulong value, Layout option)
{
return ULongField(content, ref value, option);
}
public override IDisposable If(bool condition, IDisposable body)
{
throw new NotImplementedException();
}
public override void MinMaxSlider(GUIContent label, ref float minValue, ref float maxValue, float minLimit, float maxLimit, Layout option)
{
throw new NotImplementedException();
}
}
}
| |
/*
* Original author: Nick Shulman <nicksh .at. u.washington.edu>,
* MacCoss Lab, Department of Genome Sciences, UW
*
* Copyright 2012 University of Washington - Seattle, WA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using ZedGraph;
namespace pwiz.Common.Graph
{
/// <summary>
/// Default implementation of <see cref="ICurveDataHandler"/> which tries
/// to deal gracefully with curves of all types.
/// One can override the way that a curve's data is rendered into text on
/// the clipboard by placing a <see cref="CurveDataHandlerAttribute"/>
/// over the class definition, or by modifying the logic in
/// <see cref="CurveDataHandlers.FindCurveHandler"/>.
/// </summary>
public class CurveDataHandler : ICurveDataHandler
{
#region ICurveDataHandler members
public virtual DataFrameBuilder CreateDataFrame(DataFrameBuilder dataFrameBuilder)
{
dataFrameBuilder = dataFrameBuilder.SetDataFrame(new DataFrame(dataFrameBuilder.CurveItem.Label.Text, dataFrameBuilder.Points.Count));
dataFrameBuilder = AddColumns(dataFrameBuilder);
return dataFrameBuilder;
}
/// <summary>
/// Filters the <see cref="DataFrameBuilder.Points"/> down to just those that are
/// visible if the graph has been zoomed in.
/// This method only filters based on the value on the Base Axis
/// (i.e. the X-Axis for most curves).
/// </summary>
public virtual DataFrameBuilder FilterToZoomedRange(DataFrameBuilder dataFrameBuilder)
{
var graphPane = dataFrameBuilder.GraphPane;
var pointList = dataFrameBuilder.Points;
var baseAxis = dataFrameBuilder.CurveItem.BaseAxis(graphPane);
if (baseAxis.Scale.IsAnyOrdinal)
{
// If the Scale is either Ordinal or Text, then don't
// filter the PointList, because the point values
// are derived from their position in the list.
return dataFrameBuilder;
}
Func<PointPair, double> valueOfPointFunc = ValueOfPointFuncForAxis(dataFrameBuilder, baseAxis);
if (null == valueOfPointFunc)
{
return dataFrameBuilder;
}
double min = baseAxis.Scale.Min;
double max = baseAxis.Scale.Max;
var pointPairList = new PointPairList();
for (int i = 0; i < pointList.Count; i++)
{
var pointPair = pointList[i];
double value = valueOfPointFunc(pointPair);
if (value < min || value > max)
{
continue;
}
pointPairList.Add(pointPair);
}
return dataFrameBuilder.SetPoints(pointPairList);
}
#endregion
/// <summary>
/// Adds columns to the <see cref="DataFrameBuilder.DataFrame"/> for all
/// of the data in the <see cref="DataFrameBuilder.Points"/>.
/// </summary>
protected virtual DataFrameBuilder AddColumns(DataFrameBuilder dataFrameBuilder)
{
dataFrameBuilder = AddColumnForAxis(dataFrameBuilder, dataFrameBuilder.BaseAxis);
dataFrameBuilder = AddColumnForAxis(dataFrameBuilder, dataFrameBuilder.ValueAxis);
if (HasZAxis(dataFrameBuilder))
{
dataFrameBuilder = dataFrameBuilder.AddColumn(GetZAxisColumn(dataFrameBuilder.Points));
}
dataFrameBuilder = dataFrameBuilder.AddColumn(GetTagColumn(dataFrameBuilder.Points));
return dataFrameBuilder;
}
/// <summary>
/// Adds the data for the <paramref name="axis"/> to the <see cref="DataFrameBuilder.DataFrame"/>.
/// If <paramref name="axis"/> is the <see cref="CurveItem.BaseAxis"/> then the column
/// is added as the <see cref="DataFrame.RowHeader"/>, otherwise it is added
/// to <see cref="DataFrame.ColumnGroups"/>.
/// The X-Axis is usually the base axis, but for bar graphs that display horizontally,
/// the Y-Axis is the base axis.
/// </summary>
protected virtual DataFrameBuilder AddColumnForAxis(DataFrameBuilder dataFrameBuilder, Axis axis)
{
var column = GetColumnForAxis(dataFrameBuilder, axis);
if (column == null)
{
return dataFrameBuilder;
}
var dataFrame = dataFrameBuilder.DataFrame;
if (dataFrame.RowHeader == null && ReferenceEquals(axis, dataFrameBuilder.BaseAxis))
{
dataFrame = dataFrame.SetRowHeaders(column);
}
else
{
dataFrame = dataFrame.AddColumn(column);
}
return dataFrameBuilder.SetDataFrame(dataFrame);
}
/// <summary>
/// Returns a <see cref="DataColumn"/> containing the values on the <paramref name="axis"/>.
/// If <see cref="Scale.IsText"/> is true for the <see cref="Axis.Scale"/>,
/// then the DataColumn will contain string values.
/// </summary>
protected virtual DataColumn GetColumnForAxis(DataFrameBuilder dataFrameBuilder, Axis axis)
{
if (axis == null)
{
return null;
}
if (axis.Scale.IsText)
{
var textValues = new string[dataFrameBuilder.Points.Count];
Array.Copy(axis.Scale.TextLabels, 0, textValues, 0, Math.Min(textValues.Length, axis.Scale.TextLabels.Length));
return new DataColumn<string>(axis.Title.Text, textValues);
}
if (axis.Scale.IsOrdinal)
{
return new DataColumn<int>(axis.Title.Text, Enumerable.Range(0, dataFrameBuilder.Points.Count));
}
var values = new double[dataFrameBuilder.Points.Count];
var valueOfPoint = ValueOfPointFuncForAxis(dataFrameBuilder, axis);
if (valueOfPoint != null)
{
for (int i = 0; i < dataFrameBuilder.Points.Count; i++)
{
values[i] = valueOfPoint(dataFrameBuilder.Points[i]);
}
}
if (values.Any(value=>PointPairBase.Missing == value))
{
var valuesWithNull = values.Select(value => PointPairBase.Missing == value ? (double?) null : value);
return new DataColumn<double?>(axis.Title.Text, valuesWithNull);
}
return new DataColumn<double>(axis.Title.Text, values);
}
/// <summary>
/// Returns true if any of the values in <see cref="PointPair.Z"/> are nonzero.
/// </summary>
protected bool HasZAxis(DataFrameBuilder dataFrameBuilder)
{
return AsEnumerable(dataFrameBuilder.Points).Any(point => 0 != point.Z);
}
/// <summary>
/// Returns a column with the data values in <see cref="PointPair.Z"/>.
/// </summary>
protected virtual DataColumn GetZAxisColumn(IPointList points)
{
var values = new double?[points.Count];
for (int i = 0; i < points.Count; i++)
{
var point = points[i];
if (point.IsMissing)
{
values[i] = null;
}
else
{
values[i] = point.Z;
}
}
if (values.Contains(null))
{
return new DataColumn<double?>("Z", values); // Not L10N
}
return new DataColumn<double>("Z", values.Cast<double>()); // Not L10N
}
/// <summary>
/// If any of the <see cref="PointPair.Tag"/> properties on the point is a string,
/// then this method returns a <see cref="DataColumn"/> containing those string values.
/// If none of the tags are strings, then this method returns null.
///
/// A <see cref="ZedGraphControl"/> will show the tag as a tooltip if it is
/// a string and <see cref="ZedGraphControl.IsShowPointValues"/> is true.
/// </summary>
protected virtual DataColumn GetTagColumn(IPointList points)
{
var values = AsEnumerable(points).Select(point => point.Tag as string).ToArray();
if (values.All(value=>null == value))
{
return null;
}
return new DataColumn<string>("Label", values); // Not L10N
}
/// <summary>
/// Determines whether <paramref name="axis"/> is the X-Axis or the Y-Axis,
/// and returns a function that returns either <see cref="PointPair.X"/> or
/// <see cref="PointPair.Y"/>.
/// Returns null if the axis is neither.
/// </summary>
protected virtual Func<PointPair, double> ValueOfPointFuncForAxis(DataFrameBuilder dataFrameBuilder, Axis axis)
{
if (axis is XAxis || axis is X2Axis || ReferenceEquals(axis, dataFrameBuilder.XAxis))
{
return point => point.X;
}
if (axis is YAxis || axis is Y2Axis || ReferenceEquals(axis, dataFrameBuilder.YAxis))
{
return point => point.Y;
}
Trace.TraceError("Could not determine type of axis {0}", axis); // Not L10N
return null;
}
public static IEnumerable<PointPair> AsEnumerable(IPointList pointList)
{
for (int i = 0; i < pointList.Count; i++)
{
yield return pointList[i];
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// See the LICENSE file in the project root for more information.
//
// System.Drawing.Icon.cs
//
// Authors:
// Gary Barnett ([email protected])
// Dennis Hayes ([email protected])
// Andreas Nahr ([email protected])
// Sanjay Gupta ([email protected])
// Peter Dennis Bartok ([email protected])
// Sebastien Pouliot <[email protected]>
//
// Copyright (C) 2002 Ximian, Inc. http://www.ximian.com
// Copyright (C) 2004-2008 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System.Collections;
using System.ComponentModel;
using System.Drawing.Imaging;
using System.IO;
using System.Reflection;
using System.Runtime.Serialization;
using System.Runtime.InteropServices;
namespace System.Drawing
{
#if !NETCORE
#if !MONOTOUCH
[Editor ("System.Drawing.Design.IconEditor, " + Consts.AssemblySystem_Drawing_Design, typeof (System.Drawing.Design.UITypeEditor))]
#endif
[TypeConverter(typeof(IconConverter))]
#endif
public sealed partial class Icon : MarshalByRefObject, ISerializable, ICloneable, IDisposable
{
[StructLayout(LayoutKind.Sequential)]
internal struct IconDirEntry
{
internal byte width; // Width of icon
internal byte height; // Height of icon
internal byte colorCount; // colors in icon
internal byte reserved; // Reserved
internal ushort planes; // Color Planes
internal ushort bitCount; // Bits per pixel
internal uint bytesInRes; // bytes in resource
internal uint imageOffset; // position in file
internal bool ignore; // for unsupported images (vista 256 png)
};
[StructLayout(LayoutKind.Sequential)]
internal struct IconDir
{
internal ushort idReserved; // Reserved
internal ushort idType; // resource type (1 for icons)
internal ushort idCount; // how many images?
internal IconDirEntry[] idEntries; // the entries for each image
};
[StructLayout(LayoutKind.Sequential)]
internal struct BitmapInfoHeader
{
internal uint biSize;
internal int biWidth;
internal int biHeight;
internal ushort biPlanes;
internal ushort biBitCount;
internal uint biCompression;
internal uint biSizeImage;
internal int biXPelsPerMeter;
internal int biYPelsPerMeter;
internal uint biClrUsed;
internal uint biClrImportant;
};
[StructLayout(LayoutKind.Sequential)] // added baseclass for non bmp image format support
internal abstract class ImageData
{
};
[StructLayout(LayoutKind.Sequential)]
internal class IconImage : ImageData
{
internal BitmapInfoHeader iconHeader; //image header
internal uint[] iconColors; //colors table
internal byte[] iconXOR; // bits for XOR mask
internal byte[] iconAND; //bits for AND mask
};
[StructLayout(LayoutKind.Sequential)]
internal class IconDump : ImageData
{
internal byte[] data;
};
private Size iconSize;
private IntPtr handle = IntPtr.Zero;
private IconDir iconDir;
private ushort id;
private ImageData[] imageData;
private bool undisposable;
private bool disposed;
private Bitmap bitmap;
private Icon()
{
}
#if !MONOTOUCH
private Icon(IntPtr handle)
{
this.handle = handle;
bitmap = Bitmap.FromHicon(handle);
iconSize = new Size(bitmap.Width, bitmap.Height);
bitmap = Bitmap.FromHicon(handle);
iconSize = new Size(bitmap.Width, bitmap.Height);
// FIXME: we need to convert the bitmap into an icon
undisposable = true;
}
#endif
public Icon(Icon original, int width, int height)
: this(original, new Size(width, height))
{
}
public Icon(Icon original, Size size)
{
if (original == null)
throw new ArgumentNullException(nameof(original));
iconSize = size;
iconDir = original.iconDir;
int count = iconDir.idCount;
if (count > 0)
{
imageData = original.imageData;
id = UInt16.MaxValue;
for (ushort i = 0; i < count; i++)
{
IconDirEntry ide = iconDir.idEntries[i];
if (((ide.height == size.Height) || (ide.width == size.Width)) && !ide.ignore)
{
id = i;
break;
}
}
// if a perfect match isn't found we look for the biggest icon *smaller* than specified
if (id == UInt16.MaxValue)
{
int requested = Math.Min(size.Height, size.Width);
// previously best set to 1st image, as this might not be smallest changed loop to check all
IconDirEntry? best = null;
for (ushort i = 0; i < count; i++)
{
IconDirEntry ide = iconDir.idEntries[i];
if (((ide.height < requested) || (ide.width < requested)) && !ide.ignore)
{
if (best == null)
{
best = ide;
id = i;
}
else if ((ide.height > best.Value.height) || (ide.width > best.Value.width))
{
best = ide;
id = i;
}
}
}
}
// last one, if nothing better can be found
if (id == UInt16.MaxValue)
{
int i = count;
while (id == UInt16.MaxValue && i > 0)
{
i--;
if (!iconDir.idEntries[i].ignore)
id = (ushort)i;
}
}
if (id == UInt16.MaxValue)
throw new ArgumentException("Icon", "No valid icon image found");
iconSize.Height = iconDir.idEntries[id].height;
iconSize.Width = iconDir.idEntries[id].width;
}
else
{
iconSize.Height = size.Height;
iconSize.Width = size.Width;
}
if (original.bitmap != null)
bitmap = (Bitmap)original.bitmap.Clone();
}
public Icon(Stream stream) : this(stream, 32, 32)
{
}
public Icon(Stream stream, int width, int height)
{
InitFromStreamWithSize(stream, width, height);
}
public Icon(string fileName)
{
using (FileStream fs = File.OpenRead(fileName))
{
InitFromStreamWithSize(fs, 32, 32);
}
}
public Icon(Type type, string resource)
{
if (resource == null)
throw new ArgumentException("resource");
// For compatibility with the .NET Framework
if (type == null)
throw new NullReferenceException();
using (Stream s = type.GetTypeInfo().Assembly.GetManifestResourceStream(type, resource))
{
if (s == null)
{
throw new ArgumentException(null);
}
InitFromStreamWithSize(s, 32, 32); // 32x32 is default
}
}
internal Icon(string resourceName, bool undisposable)
{
using (Stream s = typeof(Icon).GetTypeInfo().Assembly.GetManifestResourceStream(resourceName))
{
if (s == null)
{
string msg = string.Format("Resource '{0}' was not found.", resourceName);
throw new FileNotFoundException(msg);
}
InitFromStreamWithSize(s, 32, 32); // 32x32 is default
}
this.undisposable = true;
}
public Icon(Stream stream, Size size) :
this(stream, size.Width, size.Height)
{
}
public Icon(string fileName, int width, int height)
{
using (FileStream fs = File.OpenRead(fileName))
{
InitFromStreamWithSize(fs, width, height);
}
}
public Icon(string fileName, Size size)
{
using (FileStream fs = File.OpenRead(fileName))
{
InitFromStreamWithSize(fs, size.Width, size.Height);
}
}
[MonoLimitation("The same icon, SystemIcons.WinLogo, is returned for all file types.")]
public static Icon ExtractAssociatedIcon(string filePath)
{
if (filePath == null)
throw new ArgumentNullException(nameof(filePath));
if (String.IsNullOrEmpty(filePath))
throw new ArgumentException("Null or empty path.", "path");
if (!File.Exists(filePath))
throw new FileNotFoundException("Couldn't find specified file.", filePath);
return SystemIcons.WinLogo;
}
public void Dispose()
{
// SystemIcons requires this
if (undisposable)
return;
if (!disposed)
{
if (bitmap != null)
{
bitmap.Dispose();
bitmap = null;
}
GC.SuppressFinalize(this);
}
disposed = true;
}
public object Clone()
{
return new Icon(this, Size);
}
#if !MONOTOUCH
public static Icon FromHandle(IntPtr handle)
{
if (handle == IntPtr.Zero)
throw new ArgumentException("handle");
return new Icon(handle);
}
#endif
private void SaveIconImage(BinaryWriter writer, IconImage ii)
{
BitmapInfoHeader bih = ii.iconHeader;
writer.Write(bih.biSize);
writer.Write(bih.biWidth);
writer.Write(bih.biHeight);
writer.Write(bih.biPlanes);
writer.Write(bih.biBitCount);
writer.Write(bih.biCompression);
writer.Write(bih.biSizeImage);
writer.Write(bih.biXPelsPerMeter);
writer.Write(bih.biYPelsPerMeter);
writer.Write(bih.biClrUsed);
writer.Write(bih.biClrImportant);
//now write color table
int colCount = ii.iconColors.Length;
for (int j = 0; j < colCount; j++)
writer.Write(ii.iconColors[j]);
//now write XOR Mask
writer.Write(ii.iconXOR);
//now write AND Mask
writer.Write(ii.iconAND);
}
private void SaveIconDump(BinaryWriter writer, IconDump id)
{
writer.Write(id.data);
}
private void SaveIconDirEntry(BinaryWriter writer, IconDirEntry ide, uint offset)
{
writer.Write(ide.width);
writer.Write(ide.height);
writer.Write(ide.colorCount);
writer.Write(ide.reserved);
writer.Write(ide.planes);
writer.Write(ide.bitCount);
writer.Write(ide.bytesInRes);
writer.Write((offset == UInt32.MaxValue) ? ide.imageOffset : offset);
}
private void SaveAll(BinaryWriter writer)
{
writer.Write(iconDir.idReserved);
writer.Write(iconDir.idType);
ushort count = iconDir.idCount;
writer.Write(count);
for (int i = 0; i < (int)count; i++)
{
SaveIconDirEntry(writer, iconDir.idEntries[i], UInt32.MaxValue);
}
for (int i = 0; i < (int)count; i++)
{
//FIXME: HACK: 1 (out of the 8) vista type icons had additional bytes (value:0)
//between images. This fixes the issue, but perhaps shouldnt include in production?
while (writer.BaseStream.Length < iconDir.idEntries[i].imageOffset)
writer.Write((byte)0);
if (imageData[i] is IconDump)
SaveIconDump(writer, (IconDump)imageData[i]);
else
SaveIconImage(writer, (IconImage)imageData[i]);
}
}
// TODO: check image not ignored (presently this method doesnt seem to be called unless width/height
// refer to image)
private void SaveBestSingleIcon(BinaryWriter writer, int width, int height)
{
writer.Write(iconDir.idReserved);
writer.Write(iconDir.idType);
writer.Write((ushort)1);
// find best entry and save it
int best = 0;
int bitCount = 0;
for (int i = 0; i < iconDir.idCount; i++)
{
IconDirEntry ide = iconDir.idEntries[i];
if ((width == ide.width) && (height == ide.height))
{
if (ide.bitCount >= bitCount)
{
bitCount = ide.bitCount;
best = i;
}
}
}
SaveIconDirEntry(writer, iconDir.idEntries[best], 22);
SaveIconImage(writer, (IconImage)imageData[best]);
}
private void SaveBitmapAsIcon(BinaryWriter writer)
{
writer.Write((ushort)0); // idReserved must be 0
writer.Write((ushort)1); // idType must be 1
writer.Write((ushort)1); // only one icon
// when transformed into a bitmap only a single image exists
IconDirEntry ide = new IconDirEntry();
ide.width = (byte)bitmap.Width;
ide.height = (byte)bitmap.Height;
ide.colorCount = 0; // 32 bbp == 0, for palette size
ide.reserved = 0; // always 0
ide.planes = 0;
ide.bitCount = 32;
ide.imageOffset = 22; // 22 is the first icon position (for single icon files)
BitmapInfoHeader bih = new BitmapInfoHeader();
bih.biSize = (uint)Marshal.SizeOf(typeof(BitmapInfoHeader));
bih.biWidth = bitmap.Width;
bih.biHeight = 2 * bitmap.Height; // include both XOR and AND images
bih.biPlanes = 1;
bih.biBitCount = 32;
bih.biCompression = 0;
bih.biSizeImage = 0;
bih.biXPelsPerMeter = 0;
bih.biYPelsPerMeter = 0;
bih.biClrUsed = 0;
bih.biClrImportant = 0;
IconImage ii = new IconImage();
ii.iconHeader = bih;
ii.iconColors = new uint[0]; // no palette
int xor_size = (((bih.biBitCount * bitmap.Width + 31) & ~31) >> 3) * bitmap.Height;
ii.iconXOR = new byte[xor_size];
int p = 0;
for (int y = bitmap.Height - 1; y >= 0; y--)
{
for (int x = 0; x < bitmap.Width; x++)
{
Color c = bitmap.GetPixel(x, y);
ii.iconXOR[p++] = c.B;
ii.iconXOR[p++] = c.G;
ii.iconXOR[p++] = c.R;
ii.iconXOR[p++] = c.A;
}
}
int and_line_size = (((Width + 31) & ~31) >> 3); // must be a multiple of 4 bytes
int and_size = and_line_size * bitmap.Height;
ii.iconAND = new byte[and_size];
ide.bytesInRes = (uint)(bih.biSize + xor_size + and_size);
SaveIconDirEntry(writer, ide, UInt32.MaxValue);
SaveIconImage(writer, ii);
}
private void Save(Stream outputStream, int width, int height)
{
BinaryWriter writer = new BinaryWriter(outputStream);
// if we have the icon information then save from this
if (iconDir.idEntries != null)
{
if ((width == -1) && (height == -1))
SaveAll(writer);
else
SaveBestSingleIcon(writer, width, height);
}
else if (bitmap != null)
{
// if the icon was created from a bitmap then convert it
SaveBitmapAsIcon(writer);
}
writer.Flush();
}
public void Save(Stream outputStream)
{
if (outputStream == null)
throw new NullReferenceException("outputStream");
// save every icons available
Save(outputStream, -1, -1);
}
#if !MONOTOUCH
internal Bitmap BuildBitmapOnWin32()
{
Bitmap bmp;
if (imageData == null)
return new Bitmap(32, 32);
IconImage ii = (IconImage)imageData[id];
BitmapInfoHeader bih = ii.iconHeader;
int biHeight = bih.biHeight / 2;
int ncolors = (int)bih.biClrUsed;
if ((ncolors == 0) && (bih.biBitCount < 24))
ncolors = (int)(1 << bih.biBitCount);
switch (bih.biBitCount)
{
case 1:
bmp = new Bitmap(bih.biWidth, biHeight, PixelFormat.Format1bppIndexed);
break;
case 4:
bmp = new Bitmap(bih.biWidth, biHeight, PixelFormat.Format4bppIndexed);
break;
case 8:
bmp = new Bitmap(bih.biWidth, biHeight, PixelFormat.Format8bppIndexed);
break;
case 24:
bmp = new Bitmap(bih.biWidth, biHeight, PixelFormat.Format24bppRgb);
break;
case 32:
bmp = new Bitmap(bih.biWidth, biHeight, PixelFormat.Format32bppArgb);
break;
default:
string msg = string.Format("Unexpected number of bits: {0}", bih.biBitCount);
throw new Exception(msg);
}
if (bih.biBitCount < 24)
{
ColorPalette pal = bmp.Palette; // Managed palette
for (int i = 0; i < ii.iconColors.Length; i++)
{
pal.Entries[i] = Color.FromArgb((int)ii.iconColors[i] | unchecked((int)0xff000000));
}
bmp.Palette = pal;
}
int bytesPerLine = (int)((((bih.biWidth * bih.biBitCount) + 31) & ~31) >> 3);
BitmapData bits = bmp.LockBits(new Rectangle(0, 0, bmp.Width, bmp.Height), ImageLockMode.WriteOnly, bmp.PixelFormat);
for (int y = 0; y < biHeight; y++)
{
Marshal.Copy(ii.iconXOR, bytesPerLine * y,
(IntPtr)(bits.Scan0.ToInt64() + bits.Stride * (biHeight - 1 - y)), bytesPerLine);
}
bmp.UnlockBits(bits);
bmp = new Bitmap(bmp); // This makes a 32bpp image out of an indexed one
// Apply the mask to make properly transparent
bytesPerLine = (int)((((bih.biWidth) + 31) & ~31) >> 3);
for (int y = 0; y < biHeight; y++)
{
for (int x = 0; x < bih.biWidth / 8; x++)
{
for (int bit = 7; bit >= 0; bit--)
{
if (((ii.iconAND[y * bytesPerLine + x] >> bit) & 1) != 0)
{
bmp.SetPixel(x * 8 + 7 - bit, biHeight - y - 1, Color.Transparent);
}
}
}
}
return bmp;
}
internal Bitmap GetInternalBitmap()
{
if (bitmap == null)
{
// Mono's libgdiplus doesn't require to keep the stream alive when loading images
using (MemoryStream ms = new MemoryStream())
{
// save the current icon
Save(ms, Width, Height);
ms.Position = 0;
// libgdiplus can now decode icons
bitmap = (Bitmap)Image.LoadFromStream(ms, false);
}
}
return bitmap;
}
// note: all bitmaps are 32bits ARGB - no matter what the icon format (bitcount) was
public Bitmap ToBitmap()
{
if (disposed)
throw new ObjectDisposedException("Icon instance was disposed.");
// note: we can't return the original image because
// (a) we have no control over the bitmap instance we return (i.e. it could be disposed)
// (b) the palette, flags won't match MS results. See MonoTests.System.Drawing.Imaging.IconCodecTest.
// Image16 for the differences
return new Bitmap(GetInternalBitmap());
}
#endif
public override string ToString()
{
//is this correct, this is what returned by .Net
return "<Icon>";
}
#if !MONOTOUCH
[Browsable(false)]
public IntPtr Handle
{
get
{
if (disposed)
{
throw new ObjectDisposedException(GetType().Name);
}
// note: this handle doesn't survive the lifespan of the icon instance
if (handle == IntPtr.Zero)
{
handle = GetInternalBitmap().nativeImage;
}
return handle;
}
}
#endif
[Browsable(false)]
public int Height
{
get
{
if (disposed)
{
throw new ObjectDisposedException(GetType().Name);
}
return iconSize.Height;
}
}
public Size Size
{
get
{
if (disposed)
{
throw new ObjectDisposedException(GetType().Name);
}
return iconSize;
}
}
[Browsable(false)]
public int Width
{
get
{
if (disposed)
{
throw new ObjectDisposedException(GetType().Name);
}
return iconSize.Width;
}
}
~Icon()
{
Dispose();
}
private void InitFromStreamWithSize(Stream stream, int width, int height)
{
if (stream == null)
throw new ArgumentNullException(nameof(stream));
if (stream.Length == 0)
throw new System.ArgumentException("The argument 'stream' must be a picture that can be used as a Icon", "stream");
//read the icon header
BinaryReader reader = new BinaryReader(stream);
//iconDir = new IconDir ();
iconDir.idReserved = reader.ReadUInt16();
if (iconDir.idReserved != 0) //must be 0
throw new System.ArgumentException("Invalid Argument", "stream");
iconDir.idType = reader.ReadUInt16();
if (iconDir.idType != 1) //must be 1
throw new System.ArgumentException("Invalid Argument", "stream");
ushort dirEntryCount = reader.ReadUInt16();
imageData = new ImageData[dirEntryCount];
iconDir.idCount = dirEntryCount;
iconDir.idEntries = new IconDirEntry[dirEntryCount];
bool sizeObtained = false;
// now read in the IconDirEntry structures
for (int i = 0; i < dirEntryCount; i++)
{
IconDirEntry ide;
ide.width = reader.ReadByte();
ide.height = reader.ReadByte();
ide.colorCount = reader.ReadByte();
ide.reserved = reader.ReadByte();
ide.planes = reader.ReadUInt16();
ide.bitCount = reader.ReadUInt16();
ide.bytesInRes = reader.ReadUInt32();
ide.imageOffset = reader.ReadUInt32();
// Vista 256x256 icons points directly to a PNG bitmap
// 256x256 icons are decoded as 0x0 (width and height are encoded as BYTE)
// and we ignore them just like MS does (at least up to fx 2.0)
// Added: storing data so it can be saved back
if ((ide.width == 0) && (ide.height == 0))
ide.ignore = true;
else
ide.ignore = false;
iconDir.idEntries[i] = ide;
//is this is the best fit??
if (!sizeObtained)
{
if (((ide.height == height) || (ide.width == width)) && !ide.ignore)
{
this.id = (ushort)i;
sizeObtained = true;
this.iconSize.Height = ide.height;
this.iconSize.Width = ide.width;
}
}
}
// throw error if no valid entries found
int valid = 0;
for (int i = 0; i < dirEntryCount; i++)
{
if (!(iconDir.idEntries[i].ignore))
valid++;
}
if (valid == 0)
throw new Win32Exception(0, "No valid icon entry were found.");
// if we havent found the best match, return the one with the
// largest size. Is this approach correct??
if (!sizeObtained)
{
uint largestSize = 0;
for (int j = 0; j < dirEntryCount; j++)
{
if (iconDir.idEntries[j].bytesInRes >= largestSize && !iconDir.idEntries[j].ignore)
{
largestSize = iconDir.idEntries[j].bytesInRes;
this.id = (ushort)j;
this.iconSize.Height = iconDir.idEntries[j].height;
this.iconSize.Width = iconDir.idEntries[j].width;
}
}
}
//now read in the icon data
for (int j = 0; j < dirEntryCount; j++)
{
// process ignored into IconDump
if (iconDir.idEntries[j].ignore)
{
IconDump id = new IconDump();
stream.Seek(iconDir.idEntries[j].imageOffset, SeekOrigin.Begin);
id.data = new byte[iconDir.idEntries[j].bytesInRes];
stream.Read(id.data, 0, id.data.Length);
imageData[j] = id;
continue;
}
// standard image
IconImage iidata = new IconImage();
BitmapInfoHeader bih = new BitmapInfoHeader();
stream.Seek(iconDir.idEntries[j].imageOffset, SeekOrigin.Begin);
byte[] buffer = new byte[iconDir.idEntries[j].bytesInRes];
stream.Read(buffer, 0, buffer.Length);
BinaryReader bihReader = new BinaryReader(new MemoryStream(buffer));
bih.biSize = bihReader.ReadUInt32();
bih.biWidth = bihReader.ReadInt32();
bih.biHeight = bihReader.ReadInt32();
bih.biPlanes = bihReader.ReadUInt16();
bih.biBitCount = bihReader.ReadUInt16();
bih.biCompression = bihReader.ReadUInt32();
bih.biSizeImage = bihReader.ReadUInt32();
bih.biXPelsPerMeter = bihReader.ReadInt32();
bih.biYPelsPerMeter = bihReader.ReadInt32();
bih.biClrUsed = bihReader.ReadUInt32();
bih.biClrImportant = bihReader.ReadUInt32();
iidata.iconHeader = bih;
//Read the number of colors used and corresponding memory occupied by
//color table. Fill this memory chunk into rgbquad[]
int numColors;
switch (bih.biBitCount)
{
case 1:
numColors = 2;
break;
case 4:
numColors = 16;
break;
case 8:
numColors = 256;
break;
default:
numColors = 0;
break;
}
iidata.iconColors = new uint[numColors];
for (int i = 0; i < numColors; i++)
iidata.iconColors[i] = bihReader.ReadUInt32();
//XOR mask is immediately after ColorTable and its size is
//icon height* no. of bytes per line
//icon height is half of BITMAPINFOHEADER.biHeight, since it contains
//both XOR as well as AND mask bytes
int iconHeight = bih.biHeight / 2;
//bytes per line should should be uint aligned
int numBytesPerLine = ((((bih.biWidth * bih.biPlanes * bih.biBitCount) + 31) >> 5) << 2);
//Determine the XOR array Size
int xorSize = numBytesPerLine * iconHeight;
iidata.iconXOR = new byte[xorSize];
int nread = bihReader.Read(iidata.iconXOR, 0, xorSize);
if (nread != xorSize)
{
string msg = string.Format("{0} data length expected {1}, read {2}", "XOR", xorSize, nread);
throw new ArgumentException(msg, "stream");
}
//Determine the AND array size
numBytesPerLine = (int)((((bih.biWidth) + 31) & ~31) >> 3);
int andSize = numBytesPerLine * iconHeight;
iidata.iconAND = new byte[andSize];
nread = bihReader.Read(iidata.iconAND, 0, andSize);
if (nread != andSize)
{
string msg = string.Format("{0} data length expected {1}, read {2}", "AND", andSize, nread);
throw new ArgumentException(msg, "stream");
}
imageData[j] = iidata;
bihReader.Dispose();
}
reader.Dispose();
}
}
}
| |
// Copyright (c) .NET Foundation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.IO;
using FluentAssertions;
using Microsoft.DotNet.TestFramework;
using Microsoft.DotNet.Tools.Test.Utilities;
using NuGet.Frameworks;
using Xunit;
namespace Microsoft.DotNet.Cli.Utils.Tests
{
public class GivenAProjectDependenciesCommandFactory : TestBase
{
private static readonly NuGetFramework s_desktopTestFramework = FrameworkConstants.CommonFrameworks.Net451;
private RepoDirectoriesProvider _repoDirectoriesProvider;
public GivenAProjectDependenciesCommandFactory()
{
_repoDirectoriesProvider = new RepoDirectoriesProvider();
Environment.SetEnvironmentVariable(
Constants.MSBUILD_EXE_PATH,
Path.Combine(_repoDirectoriesProvider.Stage2Sdk, "MSBuild.dll"));
}
[WindowsOnlyFact]
public void It_resolves_desktop_apps_defaulting_to_Debug_Configuration()
{
var configuration = "Debug";
var testInstance = TestAssets.Get(TestAssetKinds.DesktopTestProjects, "AppWithProjTool2Fx")
.CreateInstance()
.WithSourceFiles()
.WithNuGetConfig(_repoDirectoriesProvider.TestPackages);
var restoreCommand = new RestoreCommand()
.WithWorkingDirectory(testInstance.Root)
.ExecuteWithCapturedOutput()
.Should().Pass();
var buildCommand = new BuildCommand()
.WithWorkingDirectory(testInstance.Root)
.WithConfiguration(configuration)
.WithCapturedOutput()
.Execute()
.Should().Pass();
var factory = new ProjectDependenciesCommandFactory(
s_desktopTestFramework,
null,
null,
null,
testInstance.Root.FullName);
var command = factory.Create("dotnet-desktop-and-portable", null);
command.CommandName.Should().Contain(testInstance.Root.GetDirectory("bin", configuration).FullName);
Path.GetFileName(command.CommandName).Should().Be("dotnet-desktop-and-portable.exe");
}
[WindowsOnlyFact]
public void It_resolves_desktop_apps_when_configuration_is_Debug()
{
var configuration = "Debug";
var testInstance = TestAssets.Get(TestAssetKinds.DesktopTestProjects, "AppWithProjTool2Fx")
.CreateInstance()
.WithSourceFiles()
.WithNuGetConfig(_repoDirectoriesProvider.TestPackages);
var restoreCommand = new RestoreCommand()
.WithWorkingDirectory(testInstance.Root)
.ExecuteWithCapturedOutput()
.Should().Pass();
var buildCommand = new BuildCommand()
.WithWorkingDirectory(testInstance.Root)
.WithConfiguration(configuration)
.Execute()
.Should().Pass();
var factory = new ProjectDependenciesCommandFactory(
s_desktopTestFramework,
configuration,
null,
null,
testInstance.Root.FullName);
var command = factory.Create("dotnet-desktop-and-portable", null);
command.CommandName.Should().Contain(testInstance.Root.GetDirectory("bin", configuration).FullName);
Path.GetFileName(command.CommandName).Should().Be("dotnet-desktop-and-portable.exe");
}
[WindowsOnlyFact]
public void It_resolves_desktop_apps_when_configuration_is_Release()
{
var configuration = "Debug";
var testInstance = TestAssets.Get(TestAssetKinds.DesktopTestProjects, "AppWithProjTool2Fx")
.CreateInstance()
.WithSourceFiles()
.WithNuGetConfig(_repoDirectoriesProvider.TestPackages);
var restoreCommand = new RestoreCommand()
.WithWorkingDirectory(testInstance.Root)
.ExecuteWithCapturedOutput()
.Should().Pass();
var buildCommand = new BuildCommand()
.WithWorkingDirectory(testInstance.Root)
.WithConfiguration(configuration)
.WithCapturedOutput()
.Execute()
.Should().Pass();
var factory = new ProjectDependenciesCommandFactory(
s_desktopTestFramework,
configuration,
null,
null,
testInstance.Root.FullName);
var command = factory.Create("dotnet-desktop-and-portable", null);
command.CommandName.Should().Contain(testInstance.Root.GetDirectory("bin", configuration).FullName);
Path.GetFileName(command.CommandName).Should().Be("dotnet-desktop-and-portable.exe");
}
[WindowsOnlyFact]
public void It_resolves_desktop_apps_using_configuration_passed_to_create()
{
var configuration = "Debug";
var testInstance = TestAssets.Get(TestAssetKinds.DesktopTestProjects, "AppWithProjTool2Fx")
.CreateInstance()
.WithSourceFiles()
.WithNuGetConfig(_repoDirectoriesProvider.TestPackages);
var restoreCommand = new RestoreCommand()
.WithWorkingDirectory(testInstance.Root)
.ExecuteWithCapturedOutput()
.Should().Pass();
var buildCommand = new BuildCommand()
.WithWorkingDirectory(testInstance.Root)
.WithConfiguration(configuration)
.WithCapturedOutput()
.Execute()
.Should().Pass();
var factory = new ProjectDependenciesCommandFactory(
s_desktopTestFramework,
"Debug",
null,
null,
testInstance.Root.FullName);
var command = factory.Create("dotnet-desktop-and-portable", null, configuration: configuration);
command.CommandName.Should().Contain(testInstance.Root.GetDirectory("bin", configuration).FullName);
Path.GetFileName(command.CommandName).Should().Be("dotnet-desktop-and-portable.exe");
}
[Fact]
public void It_resolves_tools_whose_package_name_is_different_than_dll_name()
{
Environment.SetEnvironmentVariable(
Constants.MSBUILD_EXE_PATH,
Path.Combine(new RepoDirectoriesProvider().Stage2Sdk, "MSBuild.dll"));
var configuration = "Debug";
var testInstance = TestAssets.Get("AppWithDirectDepWithOutputName")
.CreateInstance()
.WithSourceFiles()
.WithRestoreFiles();
var buildCommand = new BuildCommand()
.WithProjectDirectory(testInstance.Root)
.WithConfiguration(configuration)
.WithCapturedOutput()
.Execute()
.Should().Pass();
var factory = new ProjectDependenciesCommandFactory(
FrameworkConstants.CommonFrameworks.NetCoreApp10,
configuration,
null,
null,
testInstance.Root.FullName);
var command = factory.Create("dotnet-tool-with-output-name", null);
command.CommandArgs.Should().Contain(
Path.Combine("toolwithoutputname", "1.0.0", "lib", "netcoreapp1.0", "dotnet-tool-with-output-name.dll"));
}
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System;
using System.Linq;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using Avalonia.Controls.Primitives;
using Avalonia.Input;
using Avalonia.Layout;
using Avalonia.VisualTree;
namespace Avalonia.Controls.Presenters
{
/// <summary>
/// Presents a scrolling view of content inside a <see cref="ScrollViewer"/>.
/// </summary>
public class ScrollContentPresenter : ContentPresenter, IPresenter, IScrollable
{
/// <summary>
/// Defines the <see cref="Extent"/> property.
/// </summary>
public static readonly DirectProperty<ScrollContentPresenter, Size> ExtentProperty =
ScrollViewer.ExtentProperty.AddOwner<ScrollContentPresenter>(
o => o.Extent,
(o, v) => o.Extent = v);
/// <summary>
/// Defines the <see cref="Offset"/> property.
/// </summary>
public static readonly DirectProperty<ScrollContentPresenter, Vector> OffsetProperty =
ScrollViewer.OffsetProperty.AddOwner<ScrollContentPresenter>(
o => o.Offset,
(o, v) => o.Offset = v);
/// <summary>
/// Defines the <see cref="Viewport"/> property.
/// </summary>
public static readonly DirectProperty<ScrollContentPresenter, Size> ViewportProperty =
ScrollViewer.ViewportProperty.AddOwner<ScrollContentPresenter>(
o => o.Viewport,
(o, v) => o.Viewport = v);
/// <summary>
/// Defines the <see cref="CanScrollHorizontally"/> property.
/// </summary>
public static readonly StyledProperty<bool> CanScrollHorizontallyProperty =
ScrollViewer.CanScrollHorizontallyProperty.AddOwner<ScrollContentPresenter>();
private Size _extent;
private Size _measuredExtent;
private Vector _offset;
private IDisposable _logicalScrollSubscription;
private Size _viewport;
/// <summary>
/// Initializes static members of the <see cref="ScrollContentPresenter"/> class.
/// </summary>
static ScrollContentPresenter()
{
ClipToBoundsProperty.OverrideDefaultValue(typeof(ScrollContentPresenter), true);
ChildProperty.Changed.AddClassHandler<ScrollContentPresenter>(x => x.ChildChanged);
AffectsArrange(OffsetProperty);
}
/// <summary>
/// Initializes a new instance of the <see cref="ScrollContentPresenter"/> class.
/// </summary>
public ScrollContentPresenter()
{
AddHandler(RequestBringIntoViewEvent, BringIntoViewRequested);
this.GetObservable(ChildProperty).Subscribe(UpdateScrollableSubscription);
}
/// <summary>
/// Gets the extent of the scrollable content.
/// </summary>
public Size Extent
{
get { return _extent; }
private set { SetAndRaise(ExtentProperty, ref _extent, value); }
}
/// <summary>
/// Gets or sets the current scroll offset.
/// </summary>
public Vector Offset
{
get { return _offset; }
set { SetAndRaise(OffsetProperty, ref _offset, value); }
}
/// <summary>
/// Gets the size of the viewport on the scrollable content.
/// </summary>
public Size Viewport
{
get { return _viewport; }
private set { SetAndRaise(ViewportProperty, ref _viewport, value); }
}
/// <summary>
/// Gets a value indicating whether the content can be scrolled horizontally.
/// </summary>
public bool CanScrollHorizontally => GetValue(CanScrollHorizontallyProperty);
/// <summary>
/// Attempts to bring a portion of the target visual into view by scrolling the content.
/// </summary>
/// <param name="target">The target visual.</param>
/// <param name="targetRect">The portion of the target visual to bring into view.</param>
/// <returns>True if the scroll offset was changed; otherwise false.</returns>
public bool BringDescendentIntoView(IVisual target, Rect targetRect)
{
if (Child == null)
{
return false;
}
var scrollable = Child as ILogicalScrollable;
var control = target as IControl;
if (scrollable?.IsLogicalScrollEnabled == true && control != null)
{
return scrollable.BringIntoView(control, targetRect);
}
var transform = target.TransformToVisual(Child);
if (transform == null)
{
return false;
}
var rect = targetRect * transform.Value;
var offset = Offset;
var result = false;
if (rect.Bottom > offset.Y + Viewport.Height)
{
offset = offset.WithY((rect.Bottom - Viewport.Height) + Child.Margin.Top);
result = true;
}
if (rect.Y < offset.Y)
{
offset = offset.WithY(rect.Y);
result = true;
}
if (rect.Right > offset.X + Viewport.Width)
{
offset = offset.WithX((rect.Right - Viewport.Width) + Child.Margin.Left);
result = true;
}
if (rect.X < offset.X)
{
offset = offset.WithX(rect.X);
result = true;
}
if (result)
{
Offset = offset;
}
return result;
}
/// <inheritdoc/>
protected override Size MeasureOverride(Size availableSize)
{
var child = Child;
if (child != null)
{
var measureSize = availableSize;
if (_logicalScrollSubscription == null)
{
measureSize = new Size(double.PositiveInfinity, double.PositiveInfinity);
if (!CanScrollHorizontally)
{
measureSize = measureSize.WithWidth(availableSize.Width);
}
}
child.Measure(measureSize);
var size = child.DesiredSize;
_measuredExtent = size;
return size.Constrain(availableSize);
}
else
{
return Extent = new Size();
}
}
/// <inheritdoc/>
protected override Size ArrangeOverride(Size finalSize)
{
var child = this.GetVisualChildren().SingleOrDefault() as ILayoutable;
var logicalScroll = _logicalScrollSubscription != null;
if (!logicalScroll)
{
Viewport = finalSize;
Extent = _measuredExtent;
if (child != null)
{
var size = new Size(
Math.Max(finalSize.Width, child.DesiredSize.Width),
Math.Max(finalSize.Height, child.DesiredSize.Height));
child.Arrange(new Rect((Point)(-Offset), size));
return finalSize;
}
}
else if (child != null)
{
child.Arrange(new Rect(finalSize));
return finalSize;
}
return new Size();
}
/// <inheritdoc/>
protected override void OnPointerWheelChanged(PointerWheelEventArgs e)
{
if (Extent.Height > Viewport.Height || Extent.Width > Viewport.Width)
{
var scrollable = Child as ILogicalScrollable;
bool isLogical = scrollable?.IsLogicalScrollEnabled == true;
double x = Offset.X;
double y = Offset.Y;
if (Extent.Height > Viewport.Height)
{
double height = isLogical ? scrollable.ScrollSize.Height : 50;
y += -e.Delta.Y * height;
y = Math.Max(y, 0);
y = Math.Min(y, Extent.Height - Viewport.Height);
}
if (Extent.Width > Viewport.Width)
{
double width = isLogical ? scrollable.ScrollSize.Width : 50;
x += -e.Delta.X * width;
x = Math.Max(x, 0);
x = Math.Min(x, Extent.Width - Viewport.Width);
}
Offset = new Vector(x, y);
e.Handled = true;
}
}
private void BringIntoViewRequested(object sender, RequestBringIntoViewEventArgs e)
{
e.Handled = BringDescendentIntoView(e.TargetObject, e.TargetRect);
}
private void ChildChanged(AvaloniaPropertyChangedEventArgs e)
{
UpdateScrollableSubscription((IControl)e.NewValue);
if (e.OldValue != null)
{
Offset = default(Vector);
}
}
private void UpdateScrollableSubscription(IControl child)
{
var scrollable = child as ILogicalScrollable;
_logicalScrollSubscription?.Dispose();
_logicalScrollSubscription = null;
if (scrollable != null)
{
scrollable.InvalidateScroll = () => UpdateFromScrollable(scrollable);
if (scrollable.IsLogicalScrollEnabled == true)
{
_logicalScrollSubscription = new CompositeDisposable(
this.GetObservable(OffsetProperty).Skip(1).Subscribe(x => scrollable.Offset = x),
Disposable.Create(() => scrollable.InvalidateScroll = null));
UpdateFromScrollable(scrollable);
}
}
}
private void UpdateFromScrollable(ILogicalScrollable scrollable)
{
var logicalScroll = _logicalScrollSubscription != null;
if (logicalScroll != scrollable.IsLogicalScrollEnabled)
{
UpdateScrollableSubscription(Child);
Offset = default(Vector);
InvalidateMeasure();
}
else if (scrollable.IsLogicalScrollEnabled)
{
Viewport = scrollable.Viewport;
Extent = scrollable.Extent;
Offset = scrollable.Offset;
}
}
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Linq;
using Avalonia.Rendering;
namespace Avalonia.VisualTree
{
/// <summary>
/// Provides extension methods for working with the visual tree.
/// </summary>
public static class VisualExtensions
{
/// <summary>
/// Calculates the distance from a visual's <see cref="IRenderRoot"/>.
/// </summary>
/// <param name="visual">The visual.</param>
/// <param name="ancestor">The ancestor visual.</param>
/// <returns>
/// The number of steps from the visual to the ancestor or -1 if
/// <paramref name="visual"/> is not a descendent of <paramref name="ancestor"/>.
/// </returns>
public static int CalculateDistanceFromAncestor(this IVisual visual, IVisual ancestor)
{
Contract.Requires<ArgumentNullException>(visual != null);
var result = 0;
while (visual != null && visual != ancestor)
{
++result;
visual = visual.VisualParent;
}
return visual != null ? result : -1;
}
/// <summary>
/// Tries to get the first common ancestor of two visuals.
/// </summary>
/// <param name="visual">The first visual.</param>
/// <param name="target">The second visual.</param>
/// <returns>The common ancestor, or null if not found.</returns>
public static IVisual FindCommonVisualAncestor(this IVisual visual, IVisual target)
{
Contract.Requires<ArgumentNullException>(visual != null);
return visual.GetSelfAndVisualAncestors().Intersect(target.GetSelfAndVisualAncestors())
.FirstOrDefault();
}
/// <summary>
/// Enumerates the ancestors of an <see cref="IVisual"/> in the visual tree.
/// </summary>
/// <param name="visual">The visual.</param>
/// <returns>The visual's ancestors.</returns>
public static IEnumerable<IVisual> GetVisualAncestors(this IVisual visual)
{
Contract.Requires<ArgumentNullException>(visual != null);
visual = visual.VisualParent;
while (visual != null)
{
yield return visual;
visual = visual.VisualParent;
}
}
/// <summary>
/// Enumerates an <see cref="IVisual"/> and its ancestors in the visual tree.
/// </summary>
/// <param name="visual">The visual.</param>
/// <returns>The visual and its ancestors.</returns>
public static IEnumerable<IVisual> GetSelfAndVisualAncestors(this IVisual visual)
{
Contract.Requires<ArgumentNullException>(visual != null);
yield return visual;
foreach (var ancestor in visual.GetVisualAncestors())
{
yield return ancestor;
}
}
/// <summary>
/// Gets the first visual in the visual tree whose bounds contain a point.
/// </summary>
/// <param name="visual">The root visual to test.</param>
/// <param name="p">The point.</param>
/// <returns>The visuals at the requested point.</returns>
public static IVisual GetVisualAt(this IVisual visual, Point p)
{
Contract.Requires<ArgumentNullException>(visual != null);
return visual.GetVisualsAt(p).FirstOrDefault();
}
/// <summary>
/// Enumerates the visible visuals in the visual tree whose bounds contain a point.
/// </summary>
/// <param name="visual">The root visual to test.</param>
/// <param name="p">The point.</param>
/// <returns>The visuals at the requested point.</returns>
public static IEnumerable<IVisual> GetVisualsAt(
this IVisual visual,
Point p)
{
Contract.Requires<ArgumentNullException>(visual != null);
return visual.GetVisualsAt(p, x => x.IsVisible);
}
/// <summary>
/// Enumerates the visuals in the visual tree whose bounds contain a point.
/// </summary>
/// <param name="visual">The root visual to test.</param>
/// <param name="p">The point.</param>
/// <param name="filter">
/// A filter predicate. If the predicate returns false then the visual and all its
/// children will be excluded from the results.
/// </param>
/// <returns>The visuals at the requested point.</returns>
public static IEnumerable<IVisual> GetVisualsAt(
this IVisual visual,
Point p,
Func<IVisual, bool> filter)
{
Contract.Requires<ArgumentNullException>(visual != null);
var root = visual.GetVisualRoot();
p = visual.TranslatePoint(p, root);
return root.Renderer.HitTest(p, filter);
}
/// <summary>
/// Enumerates the children of an <see cref="IVisual"/> in the visual tree.
/// </summary>
/// <param name="visual">The visual.</param>
/// <returns>The visual children.</returns>
public static IEnumerable<IVisual> GetVisualChildren(this IVisual visual)
{
return visual.VisualChildren;
}
/// <summary>
/// Enumerates the descendants of an <see cref="IVisual"/> in the visual tree.
/// </summary>
/// <param name="visual">The visual.</param>
/// <returns>The visual's ancestors.</returns>
public static IEnumerable<IVisual> GetVisualDescendants(this IVisual visual)
{
foreach (IVisual child in visual.VisualChildren)
{
yield return child;
foreach (IVisual descendant in child.GetVisualDescendants())
{
yield return descendant;
}
}
}
/// <summary>
/// Enumerates an <see cref="IVisual"/> and its descendants in the visual tree.
/// </summary>
/// <param name="visual">The visual.</param>
/// <returns>The visual and its ancestors.</returns>
public static IEnumerable<IVisual> GetSelfAndVisualDescendants(this IVisual visual)
{
yield return visual;
foreach (var ancestor in visual.GetVisualDescendants())
{
yield return ancestor;
}
}
/// <summary>
/// Gets the visual parent of an <see cref="IVisual"/>.
/// </summary>
/// <param name="visual">The visual.</param>
/// <returns>The parent, or null if the visual is unparented.</returns>
public static IVisual GetVisualParent(this IVisual visual)
{
return visual.VisualParent;
}
/// <summary>
/// Gets the visual parent of an <see cref="IVisual"/>.
/// </summary>
/// <typeparam name="T">The type of the visual parent.</typeparam>
/// <param name="visual">The visual.</param>
/// <returns>
/// The parent, or null if the visual is unparented or its parent is not of type <typeparamref name="T"/>.
/// </returns>
public static T GetVisualParent<T>(this IVisual visual) where T : class
{
return visual.VisualParent as T;
}
/// <summary>
/// Gets the root visual for an <see cref="IVisual"/>.
/// </summary>
/// <param name="visual">The visual.</param>
/// <returns>
/// The root visual or null if the visual is not rooted.
/// </returns>
public static IRenderRoot GetVisualRoot(this IVisual visual)
{
Contract.Requires<ArgumentNullException>(visual != null);
return visual as IRenderRoot ?? visual.VisualRoot;
}
/// <summary>
/// Tests whether an <see cref="IVisual"/> is an ancestor of another visual.
/// </summary>
/// <param name="visual">The visual.</param>
/// <param name="target">The potential descendant.</param>
/// <returns>
/// True if <paramref name="visual"/> is an ancestor of <paramref name="target"/>;
/// otherwise false.
/// </returns>
public static bool IsVisualAncestorOf(this IVisual visual, IVisual target)
{
return target.GetVisualAncestors().Any(x => x == visual);
}
public static IEnumerable<IVisual> SortByZIndex(this IEnumerable<IVisual> elements)
{
return elements
.Select((element, index) => new ZOrderElement
{
Element = element,
Index = index,
ZIndex = element.ZIndex,
})
.OrderBy(x => x, null)
.Select(x => x.Element);
}
private class ZOrderElement : IComparable<ZOrderElement>
{
public IVisual Element { get; set; }
public int Index { get; set; }
public int ZIndex { get; set; }
public int CompareTo(ZOrderElement other)
{
var z = other.ZIndex - ZIndex;
if (z != 0)
{
return z;
}
else
{
return other.Index - Index;
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.IO.Pipelines;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Features;
using Microsoft.Extensions.Primitives;
using Microsoft.Net.Http.Headers;
namespace Microsoft.AspNetCore.ResponseCompression
{
/// <summary>
/// Stream wrapper that create specific compression stream only if necessary.
/// </summary>
internal class ResponseCompressionBody : Stream, IHttpResponseBodyFeature, IHttpsCompressionFeature
{
private readonly HttpContext _context;
private readonly IResponseCompressionProvider _provider;
private readonly IHttpResponseBodyFeature _innerBodyFeature;
private readonly Stream _innerStream;
private ICompressionProvider? _compressionProvider;
private bool _compressionChecked;
private Stream? _compressionStream;
private PipeWriter? _pipeAdapter;
private bool _providerCreated;
private bool _autoFlush;
private bool _complete;
internal ResponseCompressionBody(HttpContext context, IResponseCompressionProvider provider,
IHttpResponseBodyFeature innerBodyFeature)
{
_context = context;
_provider = provider;
_innerBodyFeature = innerBodyFeature;
_innerStream = innerBodyFeature.Stream;
}
internal async Task FinishCompressionAsync()
{
if (_complete)
{
return;
}
_complete = true;
if (_pipeAdapter != null)
{
await _pipeAdapter.CompleteAsync();
}
if (_compressionStream != null)
{
await _compressionStream.DisposeAsync();
}
// Adds the compression headers for HEAD requests even if the body was not used.
if (!_compressionChecked && HttpMethods.IsHead(_context.Request.Method))
{
InitializeCompressionHeaders();
}
}
HttpsCompressionMode IHttpsCompressionFeature.Mode { get; set; } = HttpsCompressionMode.Default;
public override bool CanRead => false;
public override bool CanSeek => false;
public override bool CanWrite => _innerStream.CanWrite;
public override long Length
{
get { throw new NotSupportedException(); }
}
public override long Position
{
get { throw new NotSupportedException(); }
set { throw new NotSupportedException(); }
}
public Stream Stream => this;
public PipeWriter Writer
{
get
{
if (_pipeAdapter == null)
{
_pipeAdapter = PipeWriter.Create(Stream, new StreamPipeWriterOptions(leaveOpen: true));
}
return _pipeAdapter;
}
}
public override void Flush()
{
if (!_compressionChecked)
{
OnWrite();
// Flush the original stream to send the headers. Flushing the compression stream won't
// flush the original stream if no data has been written yet.
_innerStream.Flush();
return;
}
if (_compressionStream != null)
{
_compressionStream.Flush();
}
else
{
_innerStream.Flush();
}
}
public override Task FlushAsync(CancellationToken cancellationToken)
{
if (!_compressionChecked)
{
OnWrite();
// Flush the original stream to send the headers. Flushing the compression stream won't
// flush the original stream if no data has been written yet.
return _innerStream.FlushAsync(cancellationToken);
}
if (_compressionStream != null)
{
return _compressionStream.FlushAsync(cancellationToken);
}
return _innerStream.FlushAsync(cancellationToken);
}
public override int Read(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
OnWrite();
if (_compressionStream != null)
{
_compressionStream.Write(buffer, offset, count);
if (_autoFlush)
{
_compressionStream.Flush();
}
}
else
{
_innerStream.Write(buffer, offset, count);
}
}
public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback? callback, object? state)
=> TaskToApm.Begin(WriteAsync(buffer, offset, count, CancellationToken.None), callback, state);
public override void EndWrite(IAsyncResult asyncResult)
=> TaskToApm.End(asyncResult);
public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
=> await WriteAsync(buffer.AsMemory(offset, count), cancellationToken);
public override async ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken)
{
OnWrite();
if (_compressionStream != null)
{
await _compressionStream.WriteAsync(buffer, cancellationToken);
if (_autoFlush)
{
await _compressionStream.FlushAsync(cancellationToken);
}
}
else
{
await _innerStream.WriteAsync(buffer, cancellationToken);
}
}
/// <summary>
/// Checks if the response should be compressed and sets the response headers.
/// </summary>
/// <returns>The compression provider to use if compression is enabled, otherwise null.</returns>
private ICompressionProvider? InitializeCompressionHeaders()
{
if (_provider.ShouldCompressResponse(_context))
{
var headers = _context.Response.Headers;
// If the MIME type indicates that the response could be compressed, caches will need to vary by the Accept-Encoding header
var varyValues = headers.GetCommaSeparatedValues(HeaderNames.Vary);
var varyByAcceptEncoding = false;
for (var i = 0; i < varyValues.Length; i++)
{
if (string.Equals(varyValues[i], HeaderNames.AcceptEncoding, StringComparison.OrdinalIgnoreCase))
{
varyByAcceptEncoding = true;
break;
}
}
if (!varyByAcceptEncoding)
{
// Can't use += as StringValues does not override operator+
// and the implict conversions will cause an incorrect string concat https://github.com/dotnet/runtime/issues/52507
headers.Vary = StringValues.Concat(headers.Vary, HeaderNames.AcceptEncoding);
}
var compressionProvider = ResolveCompressionProvider();
if (compressionProvider != null)
{
// Can't use += as StringValues does not override operator+
// and the implict conversions will cause an incorrect string concat https://github.com/dotnet/runtime/issues/52507
headers.ContentEncoding = StringValues.Concat(headers.ContentEncoding, compressionProvider.EncodingName);
headers.ContentMD5 = default; // Reset the MD5 because the content changed.
headers.ContentLength = default;
}
return compressionProvider;
}
return null;
}
private void OnWrite()
{
if (!_compressionChecked)
{
_compressionChecked = true;
var compressionProvider = InitializeCompressionHeaders();
if (compressionProvider != null)
{
_compressionStream = compressionProvider.CreateStream(_innerStream);
}
}
}
private ICompressionProvider? ResolveCompressionProvider()
{
if (!_providerCreated)
{
_providerCreated = true;
_compressionProvider = _provider.GetCompressionProvider(_context);
}
return _compressionProvider;
}
// For this to be effective it needs to be called before the first write.
public void DisableBuffering()
{
if (ResolveCompressionProvider()?.SupportsFlush == false)
{
// Don't compress, some of the providers don't implement Flush (e.g. .NET 4.5.1 GZip/Deflate stream)
// which would block real-time responses like SignalR.
_compressionChecked = true;
}
else
{
_autoFlush = true;
}
_innerBodyFeature.DisableBuffering();
}
public Task SendFileAsync(string path, long offset, long? count, CancellationToken cancellation)
{
OnWrite();
if (_compressionStream != null)
{
return SendFileFallback.SendFileAsync(Stream, path, offset, count, cancellation);
}
return _innerBodyFeature.SendFileAsync(path, offset, count, cancellation);
}
public Task StartAsync(CancellationToken token = default)
{
OnWrite();
return _innerBodyFeature.StartAsync(token);
}
public async Task CompleteAsync()
{
if (_complete)
{
return;
}
await FinishCompressionAsync(); // Sets _complete
await _innerBodyFeature.CompleteAsync();
}
}
}
| |
using Lucene.Net.Attributes;
using NUnit.Framework;
using System;
using System.Collections.Generic;
namespace Lucene.Net.Support
{
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
[TestFixture]
public class TestHashMap
{
protected virtual HashMap<TKey, TValue> GetNewHashMap<TKey, TValue>()
{
return new HashMap<TKey, TValue>();
}
private HashMap<string,string> GetDefaultHashMap1()
{
var hm = GetNewHashMap<string, string>();
hm.Add("key1", "value1");
hm.Add("key2", "value2");
return hm;
}
[Test, LuceneNetSpecific]
public virtual void TestKeyEnumeration()
{
var keys = new List<string> {"key1", "key2"};
var dict = GetDefaultHashMap1();
foreach (var key in dict.Keys)
{
Assert.IsTrue(keys.Contains(key));
}
keys.Add(null);
dict[null] = "nullvalue";
foreach (var key in dict.Keys)
{
Assert.IsTrue(keys.Contains(key));
}
}
[Test, LuceneNetSpecific]
public virtual void TestValueEnumeration()
{
var values = new List<string> { "value1", "value2" };
var dict = GetDefaultHashMap1();
foreach (var value in dict.Values)
{
Assert.IsTrue(values.Contains(value));
}
values.Add("nullvalue");
dict[null] = "nullvalue";
foreach (var value in dict.Values)
{
Assert.IsTrue(values.Contains(value));
}
}
[Test, LuceneNetSpecific]
public virtual void TestKeyValuePairEnumeration()
{
var dict = GetDefaultHashMap1();
Action<KeyValuePair<string, string>> act = kvp =>
{
Assert.IsNotNull(kvp);
if (kvp.Key == "key1")
{
Assert.AreEqual("value1", kvp.Value);
}
else if (kvp.Key == "key2")
{
Assert.AreEqual("value2", kvp.Value);
}
else if (kvp.Key == null)
{
Assert.AreEqual("nullval", kvp.Value);
}
};
foreach (var kvp in dict)
{
act.Invoke(kvp);
}
dict.Add(null, "nullval");
foreach (var kvp in dict)
{
act.Invoke(kvp);
}
}
[Test, LuceneNetSpecific]
public virtual void TestContainsNullKey()
{
var dict = GetDefaultHashMap1();
Assert.IsFalse(dict.ContainsKey(null));
Assert.IsNull(dict[null]);
dict.Add(null, "value");
Assert.IsTrue(dict.ContainsKey(null));
Assert.AreEqual("value", dict[null]);
}
[Test, LuceneNetSpecific]
public virtual void TestContainsKey()
{
var dict = GetDefaultHashMap1();
Assert.IsTrue(dict.ContainsKey("key1"));
Assert.IsTrue(dict.ContainsKey("key2"));
}
[Test, LuceneNetSpecific]
public virtual void TestAdd_NoNullKeys_NullValues()
{
var dict = GetNewHashMap<string, string>();
dict.Add("key1", null);
dict.Add("key2", "value2");
Assert.AreEqual(2, dict.Count);
}
[Test, LuceneNetSpecific]
public virtual void TestAdd_WithNullKeys_NoNullValues()
{
var dict = GetNewHashMap<string, string>();
dict.Add("key1", "value1");
dict.Add(null, "nullValue");
Assert.AreEqual(2, dict.Count);
}
[Test, LuceneNetSpecific]
public virtual void TestGetWithNonExistantKey_EmptyCollection()
{
var dict = GetNewHashMap<string, string>();
Assert.IsNull(dict["nothing"]);
Assert.IsNull(dict[null]);
}
[Test, LuceneNetSpecific]
public virtual void TestGetWithNonExistantKey()
{
var dict = GetDefaultHashMap1();
Assert.IsNull(dict["nothing"]);
Assert.IsNull(dict[null]);
}
[Test, LuceneNetSpecific]
public virtual void TestAddsUpdate_NotThrowException()
{
var dict = GetNewHashMap<string, string>();
dict.Add("key1", "value1");
Assert.AreEqual("value1", dict["key1"]);
Assert.AreEqual(1, dict.Count);
dict.Add("key1", "value2");
Assert.AreEqual("value2", dict["key1"], "Value was not updated by Add!");
Assert.AreEqual(1, dict.Count);
}
[Test, LuceneNetSpecific]
public virtual void TestIndexersUpdate_NotThrowException()
{
var dict = GetNewHashMap<string, string>();
dict["key1"] = "value1";
Assert.AreEqual("value1", dict["key1"]);
Assert.AreEqual(1, dict.Count);
dict["key1"] = "value2";
Assert.AreEqual("value2", dict["key1"], "Value was not updated by Add!");
Assert.AreEqual(1, dict.Count);
}
[Test, LuceneNetSpecific]
public virtual void TestWithValueType()
{
// Make sure default value types are stored in the internal dictionary
// and not the _nullValue variable
var dict = GetNewHashMap<int, string>();
dict[2] = "MyString";
dict[0] = "OtherString";
Assert.AreEqual("MyString", dict[2]);
Assert.AreEqual("OtherString", dict[0]);
Assert.AreEqual(2, dict.Count);
Assert.AreEqual(2, dict.Count, "0 (default(int)) was not stored in internal dict!");
}
}
}
| |
using System;
namespace TestFu.Grammars
{
/// <summary>
/// Static helper class for creating rules.
/// </summary>
public sealed class Rules
{
#region Constructor
private Rules()
{}
#endregion
/// <summary>
/// Creates an alternative of rules.
/// </summary>
/// <param name="rules">
/// Set of rule to choose from alternatively.
/// </param>
/// <remarks>
/// <code>
/// [EBNF]
/// rule := A | B | C
///
/// [C#]
/// IRule rule = Rules.Alt(A,B,C);
/// </code>
/// </remarks>
/// <returns>
/// An <see cref="AlternativeRule"/> instance implementing
/// the alternative rule choosing.
/// </returns>
public static AlternativeRule Alt(params IRule[] rules)
{
AlternativeRule alt = new AlternativeRule();
foreach(IRule rule in rules)
{
alt.Rules.Add(rule);
}
return alt;
}
/// <summary>
/// Creates a weighted alternative of rules.
/// </summary>
/// <param name="rules">
/// Set of rule to choose from alternatively.
/// </param>
/// <remarks>
/// <para>
/// The <see cref="IRule.Weight"/> property of each rule is used to
/// weight the probability to choose the rule.
/// </para>
/// <code>
/// [EBNF]
/// rule := A | B | C where A is chosen with P(A)=A.Weight / ABC.Weight
/// and ABC.Weight = A.Weight + B.Weight + C.Weight
///
/// [C#]
/// IRule rule = Rules.WeightedAlt(A,B,C);
/// </code>
/// </remarks>
/// <returns>
/// An <see cref="AlternativeRule"/> instance implementing
/// the alternative rule choosing.
/// </returns>
public static AlternativeRule WeightedAlt(params IRule[] rules)
{
AlternativeRule alt = Alt(rules);
alt.Selector = new WeightedRandomRuleSelector();
return alt;
}
/// <summary>
/// Creates a sequence of rules.
/// </summary>
/// <param name="rules">
/// Set of rule to execute in sequence.
/// </param>
/// <remarks>
/// <code>
/// [EBNF]
/// rule := A B C
///
/// [C#]
/// IRule rule = Rules.Seq(A,B,C);
/// </code>
/// </remarks>
/// <returns>
/// An <see cref="SequenceRule"/> instance implementing
/// the sequence of rules.
/// </returns>
public static SequenceRule Seq(params IRule[] rules)
{
SequenceRule seq = new SequenceRule();
foreach(IRule rule in rules)
{
seq.Rules.Add(rule);
}
return seq;
}
/// <summary>
/// Creates an optional rule.
/// </summary>
/// <param name="rule">
/// Rule to execute optionaly.
/// </param>
/// <remarks>
/// <code>
/// [EBNF]
/// rule := A?
///
/// [C#]
/// IRule rule = Rules.Opt(A);
/// </code>
/// </remarks>
/// <returns>
/// An <see cref="RepetitionRule"/> instance implementing
/// the ? operator.
/// </returns>
public static RepetitionRule Opt(IRule rule)
{
return Repetition(rule, 0, 1);
}
/// <summary>
/// Creates a rule to be execute one or more times.
/// </summary>
/// <param name="rule">
/// Rule to be executed.
/// </param>
/// <remarks>
/// <code>
/// [EBNF]
/// rule := A+
///
/// [C#]
/// IRule rule = Rules.Pos(A);
/// </code>
/// </remarks>
/// <returns>
/// An <see cref="RepetitionRule"/> instance implementing
/// the + operator.
/// </returns>
public static RepetitionRule Pos(IRule rule)
{
return Repetition(rule, 1, int.MaxValue);
}
/// <summary>
/// Creates a rule to be execute zero or more times.
/// </summary>
/// <param name="rule">
/// Rule to be executed.
/// </param>
/// <remarks>
/// <code>
/// [EBNF]
/// rule := A*
///
/// [C#]
/// IRule rule = Rules.Kleene(A);
/// </code>
/// </remarks>
/// <returns>
/// An <see cref="RepetitionRule"/> instance implementing
/// the * operator.
/// </returns>
public static RepetitionRule Kleene(IRule rule)
{
return Repetition(rule, 0, int.MaxValue);
}
/// <summary>
/// Creates a rule to be execute between <paramref name="minOccurence"/>
/// and <paramref name="maxOccurence"/> times.
/// </summary>
/// <param name="rule">
/// Rule to be executed.
/// </param>
/// <remarks>
/// <code>
/// [EBNF]
/// rule := A{m,n}
///
/// [C#]
/// IRule rule = Rules.Repetition(A,m,n);
/// </code>
/// </remarks>
/// <param name="minOccurence">
/// minimum number of execution of <paramref name="rule"/>
/// </param>
/// <param name="maxOccurence">
/// maximum number of execution of <paramref name="rule"/>
/// </param>
/// <returns>
/// An <see cref="RepetitionRule"/> instance implementing
/// the {m,n} operator.
/// </returns>
public static RepetitionRule Repetition(IRule rule, int minOccurence, int maxOccurence)
{
return new RepetitionRule(rule, minOccurence, maxOccurence);
}
/// <summary>
/// Creates a <see cref="IRule"/> that executes an <see cref="EventHandler"/>.
/// </summary>
/// <param name="handler">
/// <see cref="EventHandler"/> to execute
/// </param>
/// <returns>
/// <see cref="EventHandlerRule"/> instance that contains <paramref name="handler"/>
/// </returns>
public static EventHandlerRule EventHandler(EventHandler handler)
{
return new EventHandlerRule(handler);
}
/// <summary>
/// Creates a <see cref="IRule"/> that executes an <see cref="MethodInvoker"/>.
/// </summary>
/// <param name="del">
/// <see cref="MethodInvoker"/> to execute
/// </param>
/// <returns>
/// <see cref="MethodInvokerRule"/> instance that contains
/// <paramref name="del"/>
/// </returns>
public static MethodInvokerRule Method(MethodInvoker del)
{
return new MethodInvokerRule(del);
}
/// <summary>
/// Creates a <see cref="IRule"/> that executes an <see cref="ProductionTokenDelegate"/>.
/// </summary>
/// <param name="del">
/// <see cref="ProductionTokenDelegate"/> to execute
/// </param>
/// <returns>
/// <see cref="ProductionTokenDelegateRule"/> instance that contains
/// <paramref name="del"/>
/// </returns>
public static ProductionTokenDelegateRule Method(ProductionTokenDelegate del)
{
return new ProductionTokenDelegateRule(del);
}
/// <summary>
/// Guards the execution of a <see cref="IRule"/> from an expected
/// <see cref="Exception"/> type.
/// </summary>
/// <param name="rule">
/// <see cref="IRule"/> instance to guard.
/// </param>
/// <param name="exceptionType">
/// Expected throwed exception when <paramref name="rule"/> is executed
/// </param>
/// <returns>
/// A <see cref="GuardedRule"/> instance guarding <paramref name="rule"/>
/// </returns>
public static GuardedRule Guard(IRule rule, Type exceptionType)
{
return new GuardedRule(rule, exceptionType);
}
/// <summary>
/// Creates a conditional rule with "if" rule.
/// </summary>
/// <param name="cond">
/// Condition expression
/// </param>
/// <param name="rule">
/// <see cref="IRule"/> to execute if condition is true.
/// </param>
/// <returns>
/// A <see cref="ConditionalRule"/> implementing condition rule execution.
/// </returns>
public static ConditionalRule If(IPredicate cond, IRule rule)
{
return new ConditionalRule(cond,rule);
}
/// <summary>
/// Creates a conditional rule with "if" rule and "else" rule.
/// </summary>
/// <param name="cond">
/// Condition expression
/// </param>
/// <param name="rule">
/// <see cref="IRule"/> to execute if condition is true.
/// </param>
/// <param name="elseRule">
/// <see cref="IRule"/> to execute if condition is false.
/// </param>
/// <returns>
/// A <see cref="ConditionalRule"/> implementing condition rule execution.
/// </returns>
public static ConditionalRule If(IPredicate cond, IRule rule, IRule elseRule)
{
return new ConditionalRule(cond,rule,elseRule);
}
/// <summary>
/// Creates a conditional rule with "if" rule.
/// </summary>
/// <param name="cond">
/// Condition expression
/// </param>
/// <param name="rule">
/// <see cref="IRule"/> to execute if condition is true.
/// </param>
/// <param name="elseRule">
/// <see cref="IRule"/> to execute if condition is false.
/// </param>
/// <returns>
/// A <see cref="ConditionalRule"/> implementing condition rule execution.
/// </returns>
public static ConditionalRule If(ConditionDelegate cond, IRule rule, IRule elseRule)
{
return new ConditionalRule(Predicates.If(cond),rule,elseRule);
}
/// <summary>
/// Creates a conditional rule with "if" rule and "else" rule.
/// </summary>
/// <param name="cond">
/// Condition expression
/// </param>
/// <param name="rule">
/// <see cref="IRule"/> to execute if condition is true.
/// </param>
/// <returns>
/// A <see cref="ConditionalRule"/> implementing condition rule execution.
/// </returns>
public static ConditionalRule If(ConditionDelegate cond, IRule rule)
{
return new ConditionalRule(Predicates.If(cond),rule);
}
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System.Collections.Specialized;
using System.Linq;
using Avalonia.Collections;
using Avalonia.Controls.Presenters;
using Avalonia.Controls.Templates;
using Avalonia.LogicalTree;
using Avalonia.VisualTree;
using Xunit;
namespace Avalonia.Controls.UnitTests
{
public class ItemsControlTests
{
[Fact]
public void Should_Use_ItemTemplate_To_Create_Control()
{
var target = new ItemsControl
{
Template = GetTemplate(),
ItemTemplate = new FuncDataTemplate<string>(_ => new Canvas()),
};
target.Items = new[] { "Foo" };
target.ApplyTemplate();
target.Presenter.ApplyTemplate();
var container = (ContentPresenter)target.Presenter.Panel.Children[0];
container.UpdateChild();
Assert.IsType<Canvas>(container.Child);
}
[Fact]
public void Panel_Should_Have_TemplatedParent_Set_To_ItemsControl()
{
var target = new ItemsControl();
target.Template = GetTemplate();
target.Items = new[] { "Foo" };
target.ApplyTemplate();
target.Presenter.ApplyTemplate();
Assert.Equal(target, target.Presenter.Panel.TemplatedParent);
}
[Fact]
public void Container_Should_Have_TemplatedParent_Set_To_Null()
{
var target = new ItemsControl();
target.Template = GetTemplate();
target.Items = new[] { "Foo" };
target.ApplyTemplate();
target.Presenter.ApplyTemplate();
var container = (ContentPresenter)target.Presenter.Panel.Children[0];
Assert.Null(container.TemplatedParent);
}
[Fact]
public void Control_Item_Should_Be_Logical_Child_Before_ApplyTemplate()
{
var target = new ItemsControl();
var child = new Control();
target.Template = GetTemplate();
target.Items = new[] { child };
Assert.Equal(child.Parent, target);
Assert.Equal(child.GetLogicalParent(), target);
Assert.Equal(new[] { child }, target.GetLogicalChildren());
}
[Fact]
public void Control_Item_Should_Be_Removed_From_Logical_Children_Before_ApplyTemplate()
{
var target = new ItemsControl();
var child = new Control();
var items = new AvaloniaList<Control>(child);
target.Template = GetTemplate();
target.Items = items;
items.RemoveAt(0);
Assert.Null(child.Parent);
Assert.Null(child.GetLogicalParent());
Assert.Empty(target.GetLogicalChildren());
}
[Fact]
public void Clearing_Items_Should_Clear_Child_Controls_Parent_Before_ApplyTemplate()
{
var target = new ItemsControl();
var child = new Control();
target.Template = GetTemplate();
target.Items = new[] { child };
target.Items = null;
Assert.Null(child.Parent);
Assert.Null(((ILogical)child).LogicalParent);
}
[Fact]
public void Clearing_Items_Should_Clear_Child_Controls_Parent()
{
var target = new ItemsControl();
var child = new Control();
target.Template = GetTemplate();
target.Items = new[] { child };
target.ApplyTemplate();
target.Items = null;
Assert.Null(child.Parent);
Assert.Null(((ILogical)child).LogicalParent);
}
[Fact]
public void Adding_Control_Item_Should_Make_Control_Appear_In_LogicalChildren()
{
var target = new ItemsControl();
var child = new Control();
target.Template = GetTemplate();
target.Items = new[] { child };
// Should appear both before and after applying template.
Assert.Equal(new ILogical[] { child }, target.GetLogicalChildren());
target.ApplyTemplate();
Assert.Equal(new ILogical[] { child }, target.GetLogicalChildren());
}
[Fact]
public void Adding_String_Item_Should_Make_ContentPresenter_Appear_In_LogicalChildren()
{
var target = new ItemsControl();
var child = new Control();
target.Template = GetTemplate();
target.Items = new[] { "Foo" };
target.ApplyTemplate();
target.Presenter.ApplyTemplate();
var logical = (ILogical)target;
Assert.Equal(1, logical.LogicalChildren.Count);
Assert.IsType<ContentPresenter>(logical.LogicalChildren[0]);
}
[Fact]
public void Setting_Items_To_Null_Should_Remove_LogicalChildren()
{
var target = new ItemsControl();
var child = new Control();
target.Template = GetTemplate();
target.Items = new[] { "Foo" };
target.ApplyTemplate();
target.Presenter.ApplyTemplate();
Assert.NotEmpty(target.GetLogicalChildren());
target.Items = null;
Assert.Equal(new ILogical[0], target.GetLogicalChildren());
}
[Fact]
public void Setting_Items_Should_Fire_LogicalChildren_CollectionChanged()
{
var target = new ItemsControl();
var child = new Control();
var called = false;
target.Template = GetTemplate();
target.ApplyTemplate();
((ILogical)target).LogicalChildren.CollectionChanged += (s, e) =>
called = e.Action == NotifyCollectionChangedAction.Add;
target.Items = new[] { child };
Assert.True(called);
}
[Fact]
public void Setting_Items_To_Null_Should_Fire_LogicalChildren_CollectionChanged()
{
var target = new ItemsControl();
var child = new Control();
var called = false;
target.Template = GetTemplate();
target.Items = new[] { child };
target.ApplyTemplate();
((ILogical)target).LogicalChildren.CollectionChanged += (s, e) =>
called = e.Action == NotifyCollectionChangedAction.Remove;
target.Items = null;
Assert.True(called);
}
[Fact]
public void Changing_Items_Should_Fire_LogicalChildren_CollectionChanged()
{
var target = new ItemsControl();
var child = new Control();
var called = false;
target.Template = GetTemplate();
target.Items = new[] { child };
target.ApplyTemplate();
((ILogical)target).LogicalChildren.CollectionChanged += (s, e) => called = true;
target.Items = new[] { "Foo" };
Assert.True(called);
}
[Fact]
public void Adding_Items_Should_Fire_LogicalChildren_CollectionChanged()
{
var target = new ItemsControl();
var items = new AvaloniaList<string> { "Foo" };
var called = false;
target.Template = GetTemplate();
target.Items = items;
target.ApplyTemplate();
target.Presenter.ApplyTemplate();
((ILogical)target).LogicalChildren.CollectionChanged += (s, e) =>
called = e.Action == NotifyCollectionChangedAction.Add;
items.Add("Bar");
Assert.True(called);
}
[Fact]
public void Removing_Items_Should_Fire_LogicalChildren_CollectionChanged()
{
var target = new ItemsControl();
var items = new AvaloniaList<string> { "Foo", "Bar" };
var called = false;
target.Template = GetTemplate();
target.Items = items;
target.ApplyTemplate();
target.Presenter.ApplyTemplate();
((ILogical)target).LogicalChildren.CollectionChanged += (s, e) =>
called = e.Action == NotifyCollectionChangedAction.Remove;
items.Remove("Bar");
Assert.True(called);
}
[Fact]
public void LogicalChildren_Should_Not_Change_Instance_When_Template_Changed()
{
var target = new ItemsControl()
{
Template = GetTemplate(),
};
var before = ((ILogical)target).LogicalChildren;
target.Template = null;
target.Template = GetTemplate();
var after = ((ILogical)target).LogicalChildren;
Assert.NotNull(before);
Assert.NotNull(after);
Assert.Same(before, after);
}
[Fact]
public void Empty_Class_Should_Initially_Be_Applied()
{
var target = new ItemsControl()
{
Template = GetTemplate(),
};
Assert.True(target.Classes.Contains(":empty"));
}
[Fact]
public void Empty_Class_Should_Be_Cleared_When_Items_Added()
{
var target = new ItemsControl()
{
Template = GetTemplate(),
Items = new[] { 1, 2, 3 },
};
Assert.False(target.Classes.Contains(":empty"));
}
[Fact]
public void Empty_Class_Should_Be_Set_When_Empty_Collection_Set()
{
var target = new ItemsControl()
{
Template = GetTemplate(),
Items = new[] { 1, 2, 3 },
};
target.Items = new int[0];
Assert.True(target.Classes.Contains(":empty"));
}
[Fact]
public void Setting_Presenter_Explicitly_Should_Set_Item_Parent()
{
var target = new TestItemsControl();
var child = new Control();
var presenter = new ItemsPresenter
{
TemplatedParent = target,
[~ItemsPresenter.ItemsProperty] = target[~ItemsControl.ItemsProperty],
};
presenter.ApplyTemplate();
target.Presenter = presenter;
target.Items = new[] { child };
target.ApplyTemplate();
Assert.Equal(target, child.Parent);
Assert.Equal(target, ((ILogical)child).LogicalParent);
}
[Fact]
public void DataContexts_Should_Be_Correctly_Set()
{
var items = new object[]
{
"Foo",
new Item("Bar"),
new TextBlock { Text = "Baz" },
new ListBoxItem { Content = "Qux" },
};
var target = new ItemsControl
{
Template = GetTemplate(),
DataContext = "Base",
DataTemplates = new DataTemplates
{
new FuncDataTemplate<Item>(x => new Button { Content = x })
},
Items = items,
};
target.ApplyTemplate();
target.Presenter.ApplyTemplate();
var dataContexts = target.Presenter.Panel.Children
.Do(x => (x as ContentPresenter)?.UpdateChild())
.Cast<Control>()
.Select(x => x.DataContext)
.ToList();
Assert.Equal(
new object[] { items[0], items[1], "Base", "Base" },
dataContexts);
}
[Fact]
public void MemberSelector_Should_Select_Member()
{
var target = new ItemsControl
{
Template = GetTemplate(),
Items = new[] { new Item("Foo"), new Item("Bar") },
MemberSelector = new FuncMemberSelector<Item, string>(x => x.Value),
};
target.ApplyTemplate();
target.Presenter.ApplyTemplate();
var text = target.Presenter.Panel.Children
.Cast<ContentPresenter>()
.Select(x => x.Content)
.ToList();
Assert.Equal(new[] { "Foo", "Bar" }, text);
}
[Fact]
public void Control_Item_Should_Not_Be_NameScope()
{
var items = new object[]
{
new TextBlock(),
};
var target = new ItemsControl
{
Template = GetTemplate(),
Items = items,
};
target.ApplyTemplate();
target.Presenter.ApplyTemplate();
var item = target.Presenter.Panel.LogicalChildren[0];
Assert.Null(NameScope.GetNameScope((TextBlock)item));
}
[Fact]
public void DataTemplate_Created_Content_Should_Be_NameScope()
{
var items = new object[]
{
"foo",
};
var target = new ItemsControl
{
Template = GetTemplate(),
Items = items,
};
target.ApplyTemplate();
target.Presenter.ApplyTemplate();
var container = (ContentPresenter)target.Presenter.Panel.LogicalChildren[0];
container.UpdateChild();
Assert.NotNull(NameScope.GetNameScope((TextBlock)container.Child));
}
private class Item
{
public Item(string value)
{
Value = value;
}
public string Value { get; }
}
private FuncControlTemplate GetTemplate()
{
return new FuncControlTemplate<ItemsControl>(parent =>
{
return new Border
{
Background = new Media.SolidColorBrush(0xffffffff),
Child = new ItemsPresenter
{
Name = "PART_ItemsPresenter",
MemberSelector = parent.MemberSelector,
[~ItemsPresenter.ItemsProperty] = parent[~ItemsControl.ItemsProperty],
}
};
});
}
private class TestItemsControl : ItemsControl
{
public new IItemsPresenter Presenter
{
get { return base.Presenter; }
set { base.Presenter = value; }
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// Licensed under the MIT License. See LICENSE.txt in the project root for license information.
using Microsoft.Graphics.Canvas;
using Microsoft.Graphics.Canvas.Effects;
using Microsoft.Graphics.Canvas.Text;
using Microsoft.Graphics.Canvas.UI.Xaml;
using System;
using Windows.Foundation;
using Windows.UI;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
namespace ExampleGallery
{
public sealed class GlowTextCustomControl : UserControl
{
public string Text
{
get { return (string)GetValue(TextProperty); }
set { SetValue(TextProperty, value); }
}
public Color TextColor
{
get { return (Color)GetValue(TextColorProperty); }
set { SetValue(TextColorProperty, value); }
}
public double GlowAmount
{
get { return (double)GetValue(GlowAmountProperty); }
set { SetValue(GlowAmountProperty, value); }
}
public double MaxGlowAmount
{
get { return (double)GetValue(MaxGlowAmountProperty); }
set { SetValue(MaxGlowAmountProperty, value); }
}
public Color GlowColor
{
get { return (Color)GetValue(GlowColorProperty); }
set { SetValue(GlowColorProperty, value); }
}
public static readonly DependencyProperty TextProperty =
DependencyProperty.Register(
"Text",
typeof(string),
typeof(GlowTextCustomControl),
new PropertyMetadata("", new PropertyChangedCallback(OnPropertyChanged)));
public static readonly DependencyProperty TextColorProperty =
DependencyProperty.Register(
"TextColor",
typeof(Color),
typeof(GlowTextCustomControl),
new PropertyMetadata(Colors.White, new PropertyChangedCallback(OnPropertyChanged)));
public static readonly DependencyProperty GlowAmountProperty =
DependencyProperty.Register(
"GlowAmount",
typeof(double),
typeof(GlowTextCustomControl),
new PropertyMetadata(5.0, new PropertyChangedCallback(OnPropertyChanged)));
public static readonly DependencyProperty MaxGlowAmountProperty =
DependencyProperty.Register(
"MaxGlowAmount",
typeof(double),
typeof(GlowTextCustomControl),
new PropertyMetadata(5.0, new PropertyChangedCallback(OnPropertyChanged)));
public static readonly DependencyProperty GlowColorProperty =
DependencyProperty.Register(
"GlowColor",
typeof(Color),
typeof(GlowTextCustomControl),
new PropertyMetadata(Colors.Green, new PropertyChangedCallback(OnPropertyChanged)));
private GlowEffectGraph glowEffectGraph = new GlowEffectGraph();
private CanvasControl canvas;
public GlowTextCustomControl()
{
Loaded += UserControl_Loaded;
Unloaded += UserControl_Unloaded;
}
private void UserControl_Loaded(object sender, RoutedEventArgs e)
{
canvas = new CanvasControl();
if (ThumbnailGenerator.IsDrawingThumbnail)
canvas.ClearColor = Colors.Black;
canvas.Draw += OnDraw;
Content = canvas;
}
private void UserControl_Unloaded(object sender, RoutedEventArgs e)
{
// Explicitly remove references to allow the Win2D controls to get garbage collected
if (canvas != null)
{
canvas.RemoveFromVisualTree();
canvas = null;
}
}
private static void OnPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
var instance = d as GlowTextCustomControl;
if (d == null)
return;
if (instance.canvas != null)
{
instance.canvas.Invalidate();
instance.InvalidateMeasure();
}
}
// This is the amount that we grow the desired size by (to account for the glow)
private double ExpandAmount { get { return Math.Max(GlowAmount, MaxGlowAmount) * 4; } }
protected override Size MeasureOverride(Size availableSize)
{
// CanvasTextLayout cannot cope with infinite sizes, so we change
// infinite to some-large-value.
if (double.IsInfinity(availableSize.Width))
availableSize.Width = 6000;
if (double.IsInfinity(availableSize.Height))
availableSize.Height = 6000;
var device = CanvasDevice.GetSharedDevice(false);
var layout = CreateTextLayout(device, availableSize);
var bounds = layout.LayoutBounds;
return new Size(Math.Min(availableSize.Width, bounds.Width + ExpandAmount), Math.Min(availableSize.Height, bounds.Height + ExpandAmount));
}
private void OnDraw(CanvasControl sender, CanvasDrawEventArgs args)
{
DoEffect(args.DrawingSession, sender.Size, (float)GlowAmount);
}
private void DoEffect(CanvasDrawingSession ds, Size size, float amount)
{
size.Width = size.Width - ExpandAmount;
size.Height = size.Height - ExpandAmount;
var offset = (float)(ExpandAmount / 2);
using (var textLayout = CreateTextLayout(ds, size))
using (var textCommandList = new CanvasCommandList(ds))
{
using (var textDs = textCommandList.CreateDrawingSession())
{
textDs.DrawTextLayout(textLayout, 0, 0, GlowColor);
}
glowEffectGraph.Setup(textCommandList, amount);
ds.DrawImage(glowEffectGraph.Output, offset, offset);
ds.DrawTextLayout(textLayout, offset, offset, TextColor);
}
}
private CanvasTextLayout CreateTextLayout(ICanvasResourceCreator resourceCreator, Size size)
{
var format = new CanvasTextFormat()
{
HorizontalAlignment = GetCanvasHorizontalAlignemnt(),
VerticalAlignment = GetCanvasVerticalAlignment()
};
return new CanvasTextLayout(
resourceCreator,
Text,
format,
(float)size.Width,
(float)size.Height);
}
private CanvasHorizontalAlignment GetCanvasHorizontalAlignemnt()
{
switch (HorizontalContentAlignment)
{
case HorizontalAlignment.Center:
return CanvasHorizontalAlignment.Center;
case HorizontalAlignment.Left:
return CanvasHorizontalAlignment.Left;
case HorizontalAlignment.Right:
return CanvasHorizontalAlignment.Right;
default:
return CanvasHorizontalAlignment.Left;
}
}
private CanvasVerticalAlignment GetCanvasVerticalAlignment()
{
switch (VerticalContentAlignment)
{
case VerticalAlignment.Center:
return CanvasVerticalAlignment.Center;
case VerticalAlignment.Top:
return CanvasVerticalAlignment.Top;
case VerticalAlignment.Bottom:
return CanvasVerticalAlignment.Bottom;
default:
return CanvasVerticalAlignment.Top;
}
}
}
class GlowEffectGraph
{
public ICanvasImage Output
{
get
{
return blur;
}
}
MorphologyEffect morphology = new MorphologyEffect()
{
Mode = MorphologyEffectMode.Dilate,
Width = 1,
Height = 1
};
GaussianBlurEffect blur = new GaussianBlurEffect()
{
BlurAmount = 0,
BorderMode = EffectBorderMode.Soft
};
public GlowEffectGraph()
{
blur.Source = morphology;
}
public void Setup(ICanvasImage source, float amount)
{
morphology.Source = source;
var halfAmount = Math.Min(amount / 2, 100);
morphology.Width = (int)Math.Ceiling(halfAmount);
morphology.Height = (int)Math.Ceiling(halfAmount);
blur.BlurAmount = halfAmount;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.