context
stringlengths 2.52k
185k
| gt
stringclasses 1
value |
---|---|
//-----------------------------------------------------------------------
// <copyright file="Serialization.cs" company="Akka.NET Project">
// Copyright (C) 2009-2016 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Serialization;
using Akka.Actor;
using Akka.Util.Internal;
namespace Akka.Serialization
{
public class Information
{
public Address Address { get; set; }
public ActorSystem System { get; set; }
}
public class Serialization
{
[ThreadStatic] private static Information _currentTransportInformation;
public static T SerializeWithTransport<T>(ActorSystem system, Address address, Func<T> action)
{
_currentTransportInformation = new Information()
{
System = system,
Address = address
};
var res = action();
_currentTransportInformation = null;
return res;
}
private readonly Serializer _nullSerializer;
private readonly Dictionary<Type, Serializer> _serializerMap = new Dictionary<Type, Serializer>();
private readonly Dictionary<int, Serializer> _serializers = new Dictionary<int, Serializer>();
public Serialization(ExtendedActorSystem system)
{
System = system;
_nullSerializer = new NullSerializer(system);
_serializers.Add(_nullSerializer.Identifier,_nullSerializer);
var serializersConfig = system.Settings.Config.GetConfig("akka.actor.serializers").AsEnumerable().ToList();
var serializerBindingConfig = system.Settings.Config.GetConfig("akka.actor.serialization-bindings").AsEnumerable().ToList();
var namedSerializers = new Dictionary<string, Serializer>();
foreach (var kvp in serializersConfig)
{
var serializerTypeName = kvp.Value.GetString();
var serializerType = Type.GetType(serializerTypeName);
if (serializerType == null)
{
system.Log.Warning("The type name for serializer '{0}' did not resolve to an actual Type: '{1}'",kvp.Key, serializerTypeName);
continue;
}
var serializer = (Serializer)Activator.CreateInstance(serializerType,system);
_serializers.Add(serializer.Identifier, serializer);
namedSerializers.Add(kvp.Key,serializer);
}
foreach (var kvp in serializerBindingConfig)
{
var typename = kvp.Key;
var serializerName = kvp.Value.GetString();
var messageType = Type.GetType(typename);
if (messageType == null)
{
system.Log.Warning("The type name for message/serializer binding '{0}' did not resolve to an actual Type: '{1}'",serializerName, typename);
continue;
}
var serializer = namedSerializers[serializerName];
if (serializer == null)
{
system.Log.Warning("Serialization binding to non existing serializer: '{0}'", serializerName);
continue;
}
_serializerMap.Add(messageType,serializer);
}
}
public ActorSystem System { get; private set; }
public void AddSerializer(Serializer serializer)
{
_serializers.Add(serializer.Identifier, serializer);
}
public void AddSerializationMap(Type type, Serializer serializer)
{
_serializerMap.Add(type, serializer);
}
public object Deserialize(byte[] bytes, int serializerId, Type type)
{
Serializer serializer;
if (!_serializers.TryGetValue(serializerId, out serializer))
throw new SerializationException(
$"Cannot find serializer with id [{serializerId}]. The most probable reason" +
" is that the configuration entry akka.actor.serializers is not in sync between the two systems.");
return serializer.FromBinary(bytes, type);
}
public object Deserialize(byte[] bytes, int serializerId, string manifest)
{
Serializer serializer;
if (!_serializers.TryGetValue(serializerId, out serializer))
throw new SerializationException(
$"Cannot find serializer with id [{serializerId}]. The most probable reason" +
" is that the configuration entry akka.actor.serializers is not in sync between the two systems.");
if (serializer is SerializerWithStringManifest)
return ((SerializerWithStringManifest)serializer).FromBinary(bytes, manifest);
if (string.IsNullOrEmpty(manifest))
return serializer.FromBinary(bytes, null);
Type type;
try
{
type = Type.GetType(manifest);
}
catch
{
throw new SerializationException($"Cannot find manifest class [{manifest}] for serializer with id [{serializerId}].");
}
return serializer.FromBinary(bytes, type);
}
public Serializer FindSerializerFor(object obj)
{
if (obj == null)
return _nullSerializer;
Type type = obj.GetType();
return FindSerializerForType(type);
}
//cache to eliminate lots of typeof operator calls
private readonly Type _objectType = typeof (object);
public Serializer FindSerializerForType(Type objectType)
{
Type type = objectType;
//TODO: see if we can do a better job with proper type sorting here - most specific to least specific (object serializer goes last)
foreach (var serializerType in _serializerMap)
{
//force deferral of the base "object" serializer until all other higher-level types have been evaluated
if (serializerType.Key.IsAssignableFrom(type) && serializerType.Key != _objectType)
return serializerType.Value;
}
//do a final check for the "object" serializer
if (_serializerMap.ContainsKey(_objectType) && _objectType.IsAssignableFrom(type))
return _serializerMap[_objectType];
throw new Exception("Serializer not found for type " + objectType.Name);
}
public static string SerializedActorPath(IActorRef actorRef)
{
if (Equals(actorRef, ActorRefs.NoSender))
return String.Empty;
var path = actorRef.Path;
ExtendedActorSystem originalSystem = null;
if (actorRef is ActorRefWithCell)
{
originalSystem = actorRef.AsInstanceOf<ActorRefWithCell>().Underlying.System.AsInstanceOf<ExtendedActorSystem>();
}
if (_currentTransportInformation == null)
{
if (originalSystem == null)
{
var res = path.ToSerializationFormat();
return res;
}
else
{
var defaultAddress = originalSystem.Provider.DefaultAddress;
var res = path.ToStringWithAddress(defaultAddress);
return res;
}
}
//CurrentTransportInformation exists
var system = _currentTransportInformation.System;
var address = _currentTransportInformation.Address;
if (originalSystem == null || originalSystem == system)
{
var res = path.ToStringWithAddress(address);
return res;
}
else
{
var provider = originalSystem.Provider;
var res =
path.ToStringWithAddress(provider.GetExternalAddressFor(address).GetOrElse(provider.DefaultAddress));
return res;
}
}
public Serializer GetSerializerById(int serializerId)
{
return _serializers[serializerId];
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Lucene.Net.Index;
using Lucene.Net.Store;
using Lucene.Net.Support;
using Lucene.Net.Util;
using IndexReader = Lucene.Net.Index.IndexReader;
using Term = Lucene.Net.Index.Term;
using BooleanClause = Lucene.Net.Search.BooleanClause;
using DefaultSimilarity = Lucene.Net.Search.DefaultSimilarity;
using TermQuery = Lucene.Net.Search.TermQuery;
using BooleanQuery = Lucene.Net.Search.BooleanQuery;
using IndexSearcher = Lucene.Net.Search.IndexSearcher;
using Query = Lucene.Net.Search.Query;
using Analyzer = Lucene.Net.Analysis.Analyzer;
using TokenStream = Lucene.Net.Analysis.TokenStream;
using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Lucene.Net.Analysis.Tokenattributes;
namespace Lucene.Net.Search.Similar
{
/// <summary> Generate "more like this" similarity queries.
/// Based on this mail:
/// <pre>
/// Lucene does let you access the document frequency of terms, with IndexReader.DocFreq().
/// Term frequencies can be computed by re-tokenizing the text, which, for a single document,
/// is usually fast enough. But looking up the DocFreq() of every term in the document is
/// probably too slow.
///
/// You can use some heuristics to prune the set of terms, to avoid calling DocFreq() too much,
/// or at all. Since you're trying to maximize a tf*idf score, you're probably most interested
/// in terms with a high tf. Choosing a tf threshold even as low as two or three will radically
/// reduce the number of terms under consideration. Another heuristic is that terms with a
/// high idf (i.e., a low df) tend to be longer. So you could threshold the terms by the
/// number of characters, not selecting anything less than, e.g., six or seven characters.
/// With these sorts of heuristics you can usually find small set of, e.g., ten or fewer terms
/// that do a pretty good job of characterizing a document.
///
/// It all depends on what you're trying to do. If you're trying to eek out that last percent
/// of precision and recall regardless of computational difficulty so that you can win a TREC
/// competition, then the techniques I mention above are useless. But if you're trying to
/// provide a "more like this" button on a search results page that does a decent job and has
/// good performance, such techniques might be useful.
///
/// An efficient, effective "more-like-this" query generator would be a great contribution, if
/// anyone's interested. I'd imagine that it would take a Reader or a String (the document's
/// text), analyzer Analyzer, and return a set of representative terms using heuristics like those
/// above. The frequency and length thresholds could be parameters, etc.
///
/// Doug
/// </pre>
///
///
/// <p/>
/// <h3>Initial Usage</h3>
///
/// This class has lots of options to try to make it efficient and flexible.
/// See the body of <see cref="Main"/> below in the source for real code, or
/// if you want pseudo code, the simpliest possible usage is as follows. The bold
/// fragment is specific to this class.
///
/// <pre>
///
/// IndexReader ir = ...
/// IndexSearcher is = ...
/// <b>
/// MoreLikeThis mlt = new MoreLikeThis(ir);
/// Reader target = ... </b><em>// orig source of doc you want to find similarities to</em><b>
/// Query query = mlt.Like( target);
/// </b>
/// Hits hits = is.Search(query);
/// <em>// now the usual iteration thru 'hits' - the only thing to watch for is to make sure
/// you ignore the doc if it matches your 'target' document, as it should be similar to itself </em>
///
/// </pre>
///
/// Thus you:
/// <ol>
/// <li> do your normal, Lucene setup for searching,</li>
/// <li> create a MoreLikeThis,</li>
/// <li> get the text of the doc you want to find similaries to</li>
/// <li> then call one of the Like() calls to generate a similarity query</li>
/// <li> call the searcher to find the similar docs</li>
/// </ol>
///
/// <h3>More Advanced Usage</h3>
///
/// You may want to use <see cref="SetFieldNames"/> so you can examine
/// multiple fields (e.g. body and title) for similarity.
/// <p/>
///
/// Depending on the size of your index and the size and makeup of your documents you
/// may want to call the other set methods to control how the similarity queries are
/// generated:
/// <ul>
/// <li> <see cref="MinTermFreq"/> </li>
/// <li> <see cref="MinDocFreq"/> </li>
/// <li> <see cref="MaxDocFreq"/></li>
/// <li> <see cref="SetMaxDocFreqPct(int)"/></li>
/// <li> <see cref="MinWordLen"/> </li>
/// <li> <see cref="MaxWordLen"/></li>
/// <li> <see cref="MaxQueryTerms"/></li>
/// <li> <see cref="MaxNumTokensParsed"/></li>
/// <li> <see cref="SetStopWords(ISet{string})"/> </li>
/// </ul>
///
/// <hr/>
/// <pre>
/// Changes: Mark Harwood 29/02/04
/// Some bugfixing, some refactoring, some optimisation.
/// - bugfix: retrieveTerms(int docNum) was not working for indexes without a termvector -added missing code
/// - bugfix: No significant terms being created for fields with a termvector - because
/// was only counting one occurence per term/field pair in calculations(ie not including frequency info from TermVector)
/// - refactor: moved common code into isNoiseWord()
/// - optimise: when no termvector support available - used maxNumTermsParsed to limit amount of tokenization
/// </pre>
/// </summary>
public sealed class MoreLikeThis
{
/// <summary> Default maximum number of tokens to parse in each example doc field that is not stored with TermVector support.</summary>
/// <seealso cref="MaxNumTokensParsed">
/// </seealso>
public const int DEFAULT_MAX_NUM_TOKENS_PARSED = 5000;
/// <summary> Default analyzer to parse source doc with.</summary>
/// <seealso cref="Analyzer">
/// </seealso>
public static readonly Analyzer DEFAULT_ANALYZER = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
/// <summary> Ignore terms with less than this frequency in the source doc.</summary>
/// <seealso cref="MinTermFreq">
/// </seealso>
/// <seealso cref="MinTermFreq">
/// </seealso>
public const int DEFAULT_MIN_TERM_FREQ = 2;
/// <summary> Ignore words which do not occur in at least this many docs.</summary>
/// <seealso cref="MinDocFreq">
/// </seealso>
/// <seealso cref="MinDocFreq">
/// </seealso>
public const int DEFAULT_MIN_DOC_FREQ = 5;
/// <summary>
/// Ignore words wich occur in more than this many docs
/// </summary>
/// <seealso cref="MaxDocFreq"/>
/// <seealso cref="MaxDocFreq"/>
public const int DEFAULT_MAX_DOC_FREQ = int.MaxValue;
/// <summary> Boost terms in query based on score.</summary>
/// <seealso cref="Boost">
/// </seealso>
/// <seealso cref="Boost">
/// </seealso>
public const bool DEFAULT_BOOST = false;
/// <summary> Default field names. Null is used to specify that the field names should be looked
/// up at runtime from the provided reader.
/// </summary>
public static readonly System.String[] DEFAULT_FIELD_NAMES = new System.String[] { "contents" };
/// <summary> Ignore words less than this length or if 0 then this has no effect.</summary>
/// <seealso cref="MinWordLen">
/// </seealso>
/// <seealso cref="MinWordLen">
/// </seealso>
public const int DEFAULT_MIN_WORD_LENGTH = 0;
/// <summary> Ignore words greater than this length or if 0 then this has no effect.</summary>
/// <seealso cref="MaxWordLen">
/// </seealso>
/// <seealso cref="MaxWordLen">
/// </seealso>
public const int DEFAULT_MAX_WORD_LENGTH = 0;
/// <summary> Default set of stopwords.
/// If null means to allow stop words.
///
/// </summary>
/// <seealso cref="SetStopWords">
/// </seealso>
/// <seealso cref="GetStopWords">
/// </seealso>
public static readonly ISet<string> DEFAULT_STOP_WORDS = null;
/// <summary> Current set of stop words.</summary>
private ISet<string> stopWords = DEFAULT_STOP_WORDS;
/// <summary> Return a Query with no more than this many terms.
///
/// </summary>
/// <seealso cref="BooleanQuery.MaxClauseCount">
/// </seealso>
/// <seealso cref="MaxQueryTerms">
/// </seealso>
/// <seealso cref="MaxQueryTerms">
/// </seealso>
public const int DEFAULT_MAX_QUERY_TERMS = 25;
/// <summary> Analyzer that will be used to parse the doc.</summary>
private Analyzer analyzer = DEFAULT_ANALYZER;
/// <summary> Ignore words less freqent that this.</summary>
private int minTermFreq = DEFAULT_MIN_TERM_FREQ;
/// <summary> Ignore words which do not occur in at least this many docs.</summary>
private int minDocFreq = DEFAULT_MIN_DOC_FREQ;
/// <summary>
/// Ignore words which occur in more than this many docs.
/// </summary>
private int maxDocfreq = DEFAULT_MAX_DOC_FREQ;
/// <summary> Should we apply a boost to the Query based on the scores?</summary>
private bool boost = DEFAULT_BOOST;
/// <summary> Field name we'll analyze.</summary>
private System.String[] fieldNames = DEFAULT_FIELD_NAMES;
/// <summary> The maximum number of tokens to parse in each example doc field that is not stored with TermVector support</summary>
private int maxNumTokensParsed = DEFAULT_MAX_NUM_TOKENS_PARSED;
/// <summary> Ignore words if less than this len.</summary>
private int minWordLen = DEFAULT_MIN_WORD_LENGTH;
/// <summary> Ignore words if greater than this len.</summary>
private int maxWordLen = DEFAULT_MAX_WORD_LENGTH;
/// <summary> Don't return a query longer than this.</summary>
private int maxQueryTerms = DEFAULT_MAX_QUERY_TERMS;
/// <summary> For idf() calculations.</summary>
private Lucene.Net.Search.Similarity similarity = null;
/// <summary> IndexReader to use</summary>
private IndexReader ir;
/// <summary> Boost factor to use when boosting the terms </summary>
private float boostFactor = 1;
/// <summary>
/// Gets or sets the boost factor used when boosting terms
/// </summary>
public float BoostFactor
{
get { return boostFactor; }
set { this.boostFactor = value; }
}
/// <summary> Constructor requiring an IndexReader.</summary>
public MoreLikeThis(IndexReader ir) : this(ir,new DefaultSimilarity())
{
}
public MoreLikeThis(IndexReader ir, Lucene.Net.Search.Similarity sim)
{
this.ir = ir;
this.similarity = sim;
}
public Similarity Similarity
{
get { return similarity; }
set { this.similarity = value; }
}
/// <summary> Gets or sets the analyzer used to parse source doc with. The default analyzer
/// is the <see cref="DEFAULT_ANALYZER"/>.
/// <para />
/// An analyzer is not required for generating a query with the
/// <see cref="Like(int)"/> method, all other 'like' methods require an analyzer.
/// </summary>
/// <value> the analyzer that will be used to parse source doc with. </value>
/// <seealso cref="DEFAULT_ANALYZER">
/// </seealso>
public Analyzer Analyzer
{
get { return analyzer; }
set { this.analyzer = value; }
}
/// <summary>
/// Gets or sets the frequency below which terms will be ignored in the source doc. The default
/// frequency is the <see cref="DEFAULT_MIN_TERM_FREQ"/>.
/// </summary>
public int MinTermFreq
{
get { return minTermFreq; }
set { this.minTermFreq = value; }
}
/// <summary>
/// Gets or sets the frequency at which words will be ignored which do not occur in at least this
/// many docs. The default frequency is <see cref="DEFAULT_MIN_DOC_FREQ"/>.
/// </summary>
public int MinDocFreq
{
get { return minDocFreq; }
set { this.minDocFreq = value; }
}
/// <summary>
/// Gets or sets the maximum frequency in which words may still appear.
/// Words that appear in more than this many docs will be ignored. The default frequency is
/// <see cref="DEFAULT_MAX_DOC_FREQ"/>
/// </summary>
public int MaxDocFreq
{
get { return this.maxDocfreq; }
set { this.maxDocfreq = value; }
}
/// <summary>
/// Set the maximum percentage in which words may still appear. Words that appear
/// in more than this many percent of all docs will be ignored.
/// </summary>
/// <param name="maxPercentage">
/// the maximum percentage of documents (0-100) that a term may appear
/// in to be still considered relevant
/// </param>
public void SetMaxDocFreqPct(int maxPercentage)
{
this.maxDocfreq = maxPercentage * ir.NumDocs() / 100;
}
/// <summary> Gets or sets a boolean indicating whether to boost terms in query based
/// on "score" or not. The default is <see cref="DEFAULT_BOOST"/>.
/// </summary>
public bool Boost
{
get { return boost; }
set { this.boost = value; }
}
/// <summary> Returns the field names that will be used when generating the 'More Like This' query.
/// The default field names that will be used is <see cref="DEFAULT_FIELD_NAMES"/>.
///
/// </summary>
/// <returns> the field names that will be used when generating the 'More Like This' query.
/// </returns>
public System.String[] GetFieldNames()
{
return fieldNames;
}
/// <summary> Sets the field names that will be used when generating the 'More Like This' query.
/// Set this to null for the field names to be determined at runtime from the IndexReader
/// provided in the constructor.
///
/// </summary>
/// <param name="fieldNames">the field names that will be used when generating the 'More Like This'
/// query.
/// </param>
public void SetFieldNames(System.String[] fieldNames)
{
this.fieldNames = fieldNames;
}
/// <summary>
/// Gets or sets the minimum word length below which words will be ignored.
/// Set this to 0 for no minimum word length. The default is <see cref="DEFAULT_MIN_WORD_LENGTH"/>.
/// </summary>
public int MinWordLen
{
get { return minWordLen; }
set { this.minWordLen = value; }
}
/// <summary>
/// Gets or sets the maximum word length above which words will be ignored. Set this to 0 for no
/// maximum word length. The default is <see cref="DEFAULT_MAX_WORD_LENGTH"/>.
/// </summary>
public int MaxWordLen
{
get { return maxWordLen; }
set { this.maxWordLen = value; }
}
/// <summary> Set the set of stopwords.
/// Any word in this set is considered "uninteresting" and ignored.
/// Even if your Analyzer allows stopwords, you might want to tell the MoreLikeThis code to ignore them, as
/// for the purposes of document similarity it seems reasonable to assume that "a stop word is never interesting".
///
/// </summary>
/// <param name="stopWords">set of stopwords, if null it means to allow stop words
///
/// </param>
/// <seealso cref="Lucene.Net.Analysis.StopFilter.MakeStopSet(string[])">
/// </seealso>
/// <seealso cref="GetStopWords">
/// </seealso>
public void SetStopWords(ISet<string> stopWords)
{
this.stopWords = stopWords;
}
/// <summary> Get the current stop words being used.</summary>
/// <seealso cref="SetStopWords">
/// </seealso>
public ISet<string> GetStopWords()
{
return stopWords;
}
/// <summary>
/// Gets or sets the maximum number of query terms that will be included in any generated query.
/// The default is <see cref="DEFAULT_MAX_QUERY_TERMS"/>.
/// </summary>
public int MaxQueryTerms
{
get { return maxQueryTerms; }
set { this.maxQueryTerms = value; }
}
/// <summary>
/// Gets or sets the maximum number of tokens to parse in each example doc
/// field that is not stored with TermVector support
/// </summary>
/// <seealso cref="DEFAULT_MAX_NUM_TOKENS_PARSED" />
public int MaxNumTokensParsed
{
get { return maxNumTokensParsed; }
set { maxNumTokensParsed = value; }
}
/// <summary>Return a query that will return docs like the passed lucene document ID.</summary>
/// <param name="docNum">the documentID of the lucene doc to generate the 'More Like This" query for.</param>
/// <returns> a query that will return docs like the passed lucene document ID.</returns>
public Query Like(int docNum)
{
if (fieldNames == null)
{
// gather list of valid fields from lucene
ICollection<string> fields = ir.GetFieldNames(IndexReader.FieldOption.INDEXED);
fieldNames = fields.ToArray();
}
return CreateQuery(RetrieveTerms(docNum));
}
/// <summary> Return a query that will return docs like the passed file.
///
/// </summary>
/// <returns> a query that will return docs like the passed file.
/// </returns>
public Query Like(System.IO.FileInfo f)
{
if (fieldNames == null)
{
// gather list of valid fields from lucene
ICollection<string> fields = ir.GetFieldNames(IndexReader.FieldOption.INDEXED);
fieldNames = fields.ToArray();
}
return Like(new System.IO.StreamReader(f.FullName, System.Text.Encoding.Default));
}
/// <summary> Return a query that will return docs like the passed URL.
///
/// </summary>
/// <returns> a query that will return docs like the passed URL.
/// </returns>
public Query Like(System.Uri u)
{
return Like(new System.IO.StreamReader((System.Net.WebRequest.Create(u)).GetResponse().GetResponseStream(), System.Text.Encoding.Default));
}
/// <summary> Return a query that will return docs like the passed stream.
///
/// </summary>
/// <returns> a query that will return docs like the passed stream.
/// </returns>
public Query Like(System.IO.Stream is_Renamed)
{
return Like(new System.IO.StreamReader(is_Renamed, System.Text.Encoding.Default));
}
/// <summary> Return a query that will return docs like the passed Reader.
///
/// </summary>
/// <returns> a query that will return docs like the passed Reader.
/// </returns>
public Query Like(System.IO.TextReader r)
{
return CreateQuery(RetrieveTerms(r));
}
/// <summary> Create the More like query from a PriorityQueue</summary>
private Query CreateQuery(PriorityQueue<object[]> q)
{
BooleanQuery query = new BooleanQuery();
System.Object cur;
int qterms = 0;
float bestScore = 0;
while (((cur = q.Pop()) != null))
{
System.Object[] ar = (System.Object[])cur;
TermQuery tq = new TermQuery(new Term((System.String)ar[1], (System.String)ar[0]));
if (boost)
{
if (qterms == 0)
{
bestScore = (float)ar[2];
}
float myScore = (float)ar[2];
tq.Boost = boostFactor * myScore / bestScore;
}
try
{
query.Add(tq, Occur.SHOULD);
}
catch (BooleanQuery.TooManyClauses ignore)
{
break;
}
qterms++;
if (maxQueryTerms > 0 && qterms >= maxQueryTerms)
{
break;
}
}
return query;
}
/// <summary> Create a PriorityQueue from a word->tf map.
///
/// </summary>
/// <param name="words">a map of words keyed on the word(String) with Int objects as the values.
/// </param>
private PriorityQueue<object[]> CreateQueue(IDictionary<string,Int> words)
{
// have collected all words in doc and their freqs
int numDocs = ir.NumDocs();
FreqQ res = new FreqQ(words.Count); // will order words by score
var it = words.Keys.GetEnumerator();
while (it.MoveNext())
{
// for every word
System.String word = it.Current;
int tf = words[word].x; // term freq in the source doc
if (minTermFreq > 0 && tf < minTermFreq)
{
continue; // filter out words that don't occur enough times in the source
}
// go through all the fields and find the largest document frequency
System.String topField = fieldNames[0];
int docFreq = 0;
for (int i = 0; i < fieldNames.Length; i++)
{
int freq = ir.DocFreq(new Term(fieldNames[i], word));
topField = (freq > docFreq) ? fieldNames[i] : topField;
docFreq = (freq > docFreq) ? freq : docFreq;
}
if (minDocFreq > 0 && docFreq < minDocFreq)
{
continue; // filter out words that don't occur in enough docs
}
if (docFreq > maxDocfreq)
{
continue; // filter out words that occur in too many docs
}
if (docFreq == 0)
{
continue; // index update problem?
}
float idf = similarity.Idf(docFreq, numDocs);
float score = tf * idf;
// only really need 1st 3 entries, other ones are for troubleshooting
res.InsertWithOverflow(new System.Object[] { word, topField, score, idf, docFreq, tf });
}
return res;
}
/// <summary> Describe the parameters that control how the "more like this" query is formed.</summary>
public System.String DescribeParams()
{
System.Text.StringBuilder sb = new System.Text.StringBuilder();
sb.Append("\t" + "maxQueryTerms : " + maxQueryTerms + "\n");
sb.Append("\t" + "minWordLen : " + minWordLen + "\n");
sb.Append("\t" + "maxWordLen : " + maxWordLen + "\n");
sb.Append("\t" + "fieldNames : \"");
System.String delim = "";
for (int i = 0; i < fieldNames.Length; i++)
{
System.String fieldName = fieldNames[i];
sb.Append(delim).Append(fieldName);
delim = ", ";
}
sb.Append("\n");
sb.Append("\t" + "boost : " + boost + "\n");
sb.Append("\t" + "minTermFreq : " + minTermFreq + "\n");
sb.Append("\t" + "minDocFreq : " + minDocFreq + "\n");
return sb.ToString();
}
/// <summary> Test driver.
/// Pass in "-i INDEX" and then either "-fn FILE" or "-url URL".
/// </summary>
[STAThread]
public static void Main(System.String[] a)
{
System.String indexName = "localhost_index";
System.String fn = "c:/Program Files/Apache Group/Apache/htdocs/manual/vhosts/index.html.en";
System.Uri url = null;
for (int i = 0; i < a.Length; i++)
{
if (a[i].Equals("-i"))
{
indexName = a[++i];
}
else if (a[i].Equals("-f"))
{
fn = a[++i];
}
else if (a[i].Equals("-url"))
{
url = new System.Uri(a[++i]);
}
}
System.IO.StreamWriter temp_writer;
temp_writer = new System.IO.StreamWriter(System.Console.OpenStandardOutput(), System.Console.Out.Encoding);
temp_writer.AutoFlush = true;
System.IO.StreamWriter o = temp_writer;
FSDirectory dir = FSDirectory.Open(new DirectoryInfo(indexName));
IndexReader r = IndexReader.Open(dir, true);
o.WriteLine("Open index " + indexName + " which has " + r.NumDocs() + " docs");
MoreLikeThis mlt = new MoreLikeThis(r);
o.WriteLine("Query generation parameters:");
o.WriteLine(mlt.DescribeParams());
o.WriteLine();
Query query = null;
if (url != null)
{
o.WriteLine("Parsing URL: " + url);
query = mlt.Like(url);
}
else if (fn != null)
{
o.WriteLine("Parsing file: " + fn);
query = mlt.Like(new System.IO.FileInfo(fn));
}
o.WriteLine("q: " + query);
o.WriteLine();
IndexSearcher searcher = new IndexSearcher(dir, true);
TopDocs hits = searcher.Search(query, null, 25);
int len = hits.TotalHits;
o.WriteLine("found: " + len + " documents matching");
o.WriteLine();
ScoreDoc[] scoreDocs = hits.ScoreDocs;
for (int i = 0; i < System.Math.Min(25, len); i++)
{
Document d = searcher.Doc(scoreDocs[i].Doc);
System.String summary = d.Get("summary");
o.WriteLine("score : " + scoreDocs[i].Score);
o.WriteLine("url : " + d.Get("url"));
o.WriteLine("\ttitle : " + d.Get("title"));
if (summary != null)
o.WriteLine("\tsummary: " + d.Get("summary"));
o.WriteLine();
}
}
/// <summary> Find words for a more-like-this query former.
///
/// </summary>
/// <param name="docNum">the id of the lucene document from which to find terms
/// </param>
private PriorityQueue<object[]> RetrieveTerms(int docNum)
{
IDictionary<string,Int> termFreqMap = new HashMap<string,Int>();
for (int i = 0; i < fieldNames.Length; i++)
{
System.String fieldName = fieldNames[i];
ITermFreqVector vector = ir.GetTermFreqVector(docNum, fieldName);
// field does not store term vector info
if (vector == null)
{
Document d = ir.Document(docNum);
System.String[] text = d.GetValues(fieldName);
if (text != null)
{
for (int j = 0; j < text.Length; j++)
{
AddTermFrequencies(new System.IO.StringReader(text[j]), termFreqMap, fieldName);
}
}
}
else
{
AddTermFrequencies(termFreqMap, vector);
}
}
return CreateQueue(termFreqMap);
}
/// <summary> Adds terms and frequencies found in vector into the Map termFreqMap</summary>
/// <param name="termFreqMap">a Map of terms and their frequencies
/// </param>
/// <param name="vector">List of terms and their frequencies for a doc/field
/// </param>
private void AddTermFrequencies(IDictionary<string, Int> termFreqMap, ITermFreqVector vector)
{
System.String[] terms = vector.GetTerms();
int[] freqs = vector.GetTermFrequencies();
for (int j = 0; j < terms.Length; j++)
{
System.String term = terms[j];
if (IsNoiseWord(term))
{
continue;
}
// increment frequency
Int cnt = termFreqMap[term];
if (cnt == null)
{
cnt = new Int();
termFreqMap[term] = cnt;
cnt.x = freqs[j];
}
else
{
cnt.x += freqs[j];
}
}
}
/// <summary> Adds term frequencies found by tokenizing text from reader into the Map words</summary>
/// <param name="r">a source of text to be tokenized
/// </param>
/// <param name="termFreqMap">a Map of terms and their frequencies
/// </param>
/// <param name="fieldName">Used by analyzer for any special per-field analysis
/// </param>
private void AddTermFrequencies(System.IO.TextReader r, IDictionary<string,Int> termFreqMap, System.String fieldName)
{
TokenStream ts = analyzer.TokenStream(fieldName, r);
int tokenCount=0;
// for every token
ITermAttribute termAtt = ts.AddAttribute<ITermAttribute>();
while (ts.IncrementToken()) {
string word = termAtt.Term;
tokenCount++;
if(tokenCount>maxNumTokensParsed)
{
break;
}
if(IsNoiseWord(word)){
continue;
}
// increment frequency
Int cnt = termFreqMap[word];
if (cnt == null) {
termFreqMap[word] = new Int();
}
else {
cnt.x++;
}
}
}
/// <summary>determines if the passed term is likely to be of interest in "more like" comparisons
///
/// </summary>
/// <param name="term">The word being considered
/// </param>
/// <returns> true if should be ignored, false if should be used in further analysis
/// </returns>
private bool IsNoiseWord(System.String term)
{
int len = term.Length;
if (minWordLen > 0 && len < minWordLen)
{
return true;
}
if (maxWordLen > 0 && len > maxWordLen)
{
return true;
}
if (stopWords != null && stopWords.Contains(term))
{
return true;
}
return false;
}
/// <summary> Find words for a more-like-this query former.
/// The result is a priority queue of arrays with one entry for <b>every word</b> in the document.
/// Each array has 6 elements.
/// The elements are:
/// <ol>
/// <li> The word (String)</li>
/// <li> The top field that this word comes from (String)</li>
/// <li> The score for this word (Float)</li>
/// <li> The IDF value (Float)</li>
/// <li> The frequency of this word in the index (Integer)</li>
/// <li> The frequency of this word in the source document (Integer)</li>
/// </ol>
/// This is a somewhat "advanced" routine, and in general only the 1st entry in the array is of interest.
/// This method is exposed so that you can identify the "interesting words" in a document.
/// For an easier method to call see <see cref="RetrieveInterestingTerms(System.IO.TextReader)"/>.
///
/// </summary>
/// <param name="r">the reader that has the content of the document
/// </param>
/// <returns> the most intresting words in the document ordered by score, with the highest scoring, or best entry, first
///
/// </returns>
/// <seealso cref="RetrieveInterestingTerms(System.IO.TextReader)">
/// </seealso>
public PriorityQueue<object[]> RetrieveTerms(System.IO.TextReader r)
{
IDictionary<string, Int> words = new HashMap<string,Int>();
for (int i = 0; i < fieldNames.Length; i++)
{
System.String fieldName = fieldNames[i];
AddTermFrequencies(r, words, fieldName);
}
return CreateQueue(words);
}
public System.String[] RetrieveInterestingTerms(int docNum)
{
List<object> al = new List<object>(maxQueryTerms);
PriorityQueue<object[]> pq = RetrieveTerms(docNum);
System.Object cur;
int lim = maxQueryTerms; // have to be careful, retrieveTerms returns all words but that's probably not useful to our caller...
// we just want to return the top words
while (((cur = pq.Pop()) != null) && lim-- > 0)
{
System.Object[] ar = (System.Object[])cur;
al.Add(ar[0]); // the 1st entry is the interesting word
}
//System.String[] res = new System.String[al.Count];
//return al.toArray(res);
return al.Select(x => x.ToString()).ToArray();
}
/// <summary> Convenience routine to make it easy to return the most interesting words in a document.
/// More advanced users will call <see cref="RetrieveTerms(System.IO.TextReader)"/> directly.
/// </summary>
/// <param name="r">the source document
/// </param>
/// <returns> the most interesting words in the document
///
/// </returns>
/// <seealso cref="RetrieveTerms(System.IO.TextReader)">
/// </seealso>
/// <seealso cref="MaxQueryTerms">
/// </seealso>
public System.String[] RetrieveInterestingTerms(System.IO.TextReader r)
{
List<object> al = new List<object>(maxQueryTerms);
PriorityQueue<object[]> pq = RetrieveTerms(r);
System.Object cur;
int lim = maxQueryTerms; // have to be careful, retrieveTerms returns all words but that's probably not useful to our caller...
// we just want to return the top words
while (((cur = pq.Pop()) != null) && lim-- > 0)
{
System.Object[] ar = (System.Object[])cur;
al.Add(ar[0]); // the 1st entry is the interesting word
}
//System.String[] res = new System.String[al.Count];
// return (System.String[]) SupportClass.ICollectionSupport.ToArray(al, res);
return al.Select(x => x.ToString()).ToArray();
}
/// <summary> PriorityQueue that orders words by score.</summary>
private class FreqQ : PriorityQueue<object[]>
{
internal FreqQ(int s)
{
Initialize(s);
}
override public bool LessThan(System.Object[] aa, System.Object[] bb)
{
float fa = (float)aa[2];
float fb = (float)bb[2];
return (float)fa > (float)fb;
}
}
/// <summary> Use for frequencies and to avoid renewing Integers.</summary>
private class Int
{
internal int x;
internal Int()
{
x = 1;
}
}
}
}
| |
//
// AttachmentInternal.cs
//
// Author:
// Zachary Gramana <[email protected]>
//
// Copyright (c) 2014 Xamarin Inc
// Copyright (c) 2014 .NET Foundation
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//
// Copyright (c) 2014 Couchbase, Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
//
using Couchbase.Lite;
using Couchbase.Lite.Internal;
using Sharpen;
using System.Collections.Generic;
using System.IO;
using System;
using System.Diagnostics;
using System.Linq;
using System.IO.Compression;
using Couchbase.Lite.Util;
namespace Couchbase.Lite.Internal
{
internal enum AttachmentEncoding
{
None,
GZIP
}
internal sealed class AttachmentInternal
{
private IEnumerable<byte> _data;
private const string TAG = "AttachmentInternal";
public long Length { get; set; }
public long EncodedLength { get; set; }
public AttachmentEncoding Encoding { get; set; }
public int RevPos { get; set; }
public Database Database { get; set; }
public string Name { get; private set; }
public string ContentType { get; private set; }
public BlobKey BlobKey {
get { return _blobKey; }
set {
_digest = null;
_blobKey = value;
}
}
private BlobKey _blobKey;
public string Digest {
get {
if (_digest != null) {
return _digest;
}
if (_blobKey != null) {
return _blobKey.Base64Digest();
}
return null;
}
}
private string _digest;
// only if inline or stored in db blob-store
public IEnumerable<byte> EncodedContent {
get {
if (_data != null) {
return _data;
}
if (Database == null || Database.Attachments == null) {
return null;
}
return Database.Attachments.BlobForKey(_blobKey);
}
}
public IEnumerable<byte> Content {
get {
var data = EncodedContent;
switch (Encoding) {
case AttachmentEncoding.None:
break;
case AttachmentEncoding.GZIP:
data = data.Decompress();
break;
}
if (data == null) {
Log.W(TAG, "Unable to decode attachment!");
}
return data;
}
}
public Stream ContentStream {
get {
if (Encoding == AttachmentEncoding.None) {
return Database.Attachments.BlobStreamForKey(_blobKey);
}
var ms = new MemoryStream(Content.ToArray());
return new GZipStream(ms, CompressionMode.Decompress, true);
}
}
// only if already stored in db blob-store
public Uri ContentUrl {
get {
string path = Database.Attachments.PathForKey(_blobKey);
return path != null ? new Uri(path) : null;
}
}
public bool IsValid {
get {
if (Encoding != AttachmentEncoding.None) {
if (EncodedLength == 0 && Length > 0) {
return false;
}
} else if (EncodedLength > 0) {
return false;
}
if (RevPos == 0) {
return false;
}
#if DEBUG
if(_blobKey == null) {
return false;
}
#endif
return true;
}
}
public AttachmentInternal(string name, string contentType)
{
Debug.Assert(name != null);
Name = name;
ContentType = contentType;
}
public AttachmentInternal(string name, IDictionary<string, object> info)
: this(name, info.GetCast<string>("content_type"))
{
Length = info.GetCast<long>("length");
EncodedLength = info.GetCast<long>("encoded_length");
_digest = info.GetCast<string>("digest");
if (_digest != null) {
BlobKey = new BlobKey(_digest);
}
string encodingString = info.GetCast<string>("encoding");
if (encodingString != null) {
if (encodingString.Equals("gzip")) {
Encoding = AttachmentEncoding.GZIP;
} else {
throw new CouchbaseLiteException(StatusCode.BadEncoding);
}
}
var data = info.Get("data");
if (data != null) {
// If there's inline attachment data, decode and store it:
if (data is string) {
_data = Convert.FromBase64String((string)data);
} else {
_data = data as IEnumerable<byte>;
}
if (_data == null) {
throw new CouchbaseLiteException(StatusCode.BadEncoding);
}
SetPossiblyEncodedLength(_data.LongCount());
} else if (info.GetCast<bool>("stub", false)) {
// This item is just a stub; validate and skip it
if(info.ContainsKey("revpos")) {
var revPos = info.GetCast<int>("revpos");
if (revPos <= 0) {
throw new CouchbaseLiteException(StatusCode.BadAttachment);
}
RevPos = revPos;
}
} else if (info.GetCast<bool>("follows", false)) {
// I can't handle this myself; my caller will look it up from the digest
if (Digest == null) {
throw new CouchbaseLiteException(StatusCode.BadAttachment);
}
if(info.ContainsKey("revpos")) {
var revPos = info.GetCast<int>("revpos");
if (revPos <= 0) {
throw new CouchbaseLiteException(StatusCode.BadAttachment);
}
RevPos = revPos;
}
} else {
throw new CouchbaseLiteException(StatusCode.BadAttachment);
}
}
public IDictionary<string, object> AsStubDictionary()
{
var retVal = new NonNullDictionary<string, object> {
{ "stub", true },
{ "digest", Digest },
{ "content_type", ContentType },
{ "revpos", RevPos },
{ "length", Length }
};
if (EncodedLength > 0) {
retVal["encoded_length"] = EncodedLength;
}
switch (Encoding) {
case AttachmentEncoding.GZIP:
retVal["encoding"] = "gzip";
break;
case AttachmentEncoding.None:
break;
}
return retVal;
}
public void SetPossiblyEncodedLength(long length)
{
if (Encoding != AttachmentEncoding.None) {
EncodedLength = length;
} else {
Length = length;
}
}
}
}
| |
using StructureMap.Building;
using StructureMap.Building.Interception;
using StructureMap.Diagnostics;
using StructureMap.Graph;
using StructureMap.TypeRules;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
namespace StructureMap.Pipeline
{
public abstract class Instance : HasLifecycle, IDescribed
{
internal readonly IList<IInstancePolicy> AppliedPolicies = new List<IInstancePolicy>();
private readonly string _originalName;
private readonly int _hashCode;
private string _name;
private readonly IList<IInterceptor> _interceptors = new List<IInterceptor>();
private readonly ConcurrentDictionary<Type, int> _hashCodes = new ConcurrentDictionary<Type, int>();
/// <summary>
/// Add an interceptor to only this Instance
/// </summary>
/// <param name="interceptor"></param>
public void AddInterceptor(IInterceptor interceptor)
{
if (ReturnedType != null && !ReturnedType.CanBeCastTo(interceptor.Accepts))
{
throw new ArgumentOutOfRangeException(
"ReturnedType {0} cannot be cast to the Interceptor Accepts type {1}".ToFormat(
ReturnedType.GetFullName(), interceptor.Accepts.GetFullName()));
}
_interceptors.Add(interceptor);
}
protected Instance()
{
Id = Guid.NewGuid();
_originalName = _name = Id.ToString();
_hashCode = _originalName.GetHashCode();
}
/// <summary>
/// Strategy for how this Instance would be built as
/// an inline dependency in the parent Instance's
/// "Build Plan"
/// </summary>
/// <param name="pluginType"></param>
/// <returns></returns>
public abstract IDependencySource ToDependencySource(Type pluginType);
/// <summary>
/// Creates an IDependencySource that can be used to build the object
/// represented by this Instance
/// </summary>
/// <param name="pluginType"></param>
/// <param name="policies"></param>
/// <returns></returns>
public virtual IDependencySource ToBuilder(Type pluginType, Policies policies)
{
return ToDependencySource(pluginType);
}
public IEnumerable<IInterceptor> Interceptors
{
get { return _interceptors; }
}
public virtual string Name
{
get { return _name; }
set { _name = value; }
}
public abstract string Description { get; }
public InstanceToken CreateToken()
{
return new InstanceToken(Name, Description);
}
/// <summary>
/// The known .Net Type built by this Instance. May be null when indeterminate.
/// </summary>
public abstract Type ReturnedType { get; }
/// <summary>
/// Does this Instance have a user-defined name?
/// </summary>
/// <returns></returns>
public virtual bool HasExplicitName()
{
return _name != _originalName;
}
/// <summary>
/// Return the closed type value for this Instance
/// when starting from an open generic type
/// </summary>
/// <param name="types"></param>
/// <returns></returns>
public virtual Instance CloseType(Type[] types)
{
return this;
}
private readonly object _buildLock = new object();
private IBuildPlan _plan;
/// <summary>
/// Resolves the IBuildPlan for this Instance. The result is remembered
/// for subsequent requests
/// </summary>
/// <param name="pluginType"></param>
/// <param name="policies"></param>
/// <returns></returns>
public IBuildPlan ResolveBuildPlan(Type pluginType, Policies policies)
{
lock (_buildLock)
{
return _plan ?? (_plan = buildPlan(pluginType, policies));
}
}
/// <summary>
/// Clears out any remembered IBuildPlan for this Instance
/// </summary>
public void ClearBuildPlan()
{
lock (_buildLock)
{
_plan = null;
}
}
/// <summary>
/// Has a build plan already been created for this instance?
/// </summary>
/// <returns></returns>
public bool HasBuildPlan()
{
return _plan != null;
}
private string toDescription(Type pluginType)
{
var typeName = (pluginType ?? ReturnedType).GetFullName();
if (HasExplicitName())
{
return "Instance of {0} ({1}) -- {2}".ToFormat(typeName, Name, Description);
}
return "Instance of {0} -- {1}".ToFormat(typeName, Description);
}
public void ApplyAllPolicies(Type pluginType, Policies policies)
{
lock (_buildLock)
{
policies.Apply(pluginType, this);
}
}
protected virtual IBuildPlan buildPlan(Type pluginType, Policies policies)
{
try
{
policies.Apply(pluginType, this);
var builderSource = ToBuilder(pluginType, policies);
return new BuildPlan(pluginType, this, builderSource, policies, Interceptors);
}
catch (StructureMapException e)
{
e.Push("Attempting to create a BuildPlan for " + toDescription(pluginType));
throw;
}
catch (Exception e)
{
throw new StructureMapBuildPlanException(
"Error while trying to create the BuildPlan for {0}.\nPlease check the inner exception".ToFormat(
toDescription(pluginType)), e);
}
}
/// <summary>
/// Creates a hash that is unique for this Instance and PluginType combination
/// </summary>
/// <param name="pluginType"></param>
/// <returns></returns>
public int InstanceKey(Type pluginType)
{
if (pluginType == null)
{
return _hashCode;
}
return _hashCodes.GetOrAdd(pluginType, t =>
{
unchecked
{
return _hashCode * 397 ^ pluginType.AssemblyQualifiedName.GetHashCode();
}
});
}
public ILifecycle DetermineLifecycle(ILifecycle parent)
{
return Lifecycle ?? parent ?? Lifecycles.Transient;
}
protected bool Equals(Instance other)
{
return string.Equals(_originalName, other._originalName);
}
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) return false;
if (ReferenceEquals(this, obj)) return true;
if (obj.GetType() != this.GetType()) return false;
return Equals((Instance)obj);
}
public override int GetHashCode()
{
return _hashCode;
}
public Guid Id { get; private set; }
// SAMPLE: ToNamedClose
public virtual Instance ToNamedClone(string name)
// ENDSAMPLE
{
throw new NotSupportedException("Instance's of type '{0}' does not support ToNamedClose() and cannot be used as a MissingInstance".ToFormat(GetType().FullName));
}
}
}
| |
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// ==--==
// <OWNER>[....]</OWNER>
//
//
// ClaimsIdentity.cs
//
namespace System.Security.Claims
{
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics.Contracts;
using System.IO;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
using System.Security.Permissions;
using System.Security.Principal;
/// <summary>
/// An Identity that is represented by a set of claims.
/// </summary>
[Serializable]
[ComVisible(true)]
public class ClaimsIdentity : IIdentity
{
private enum SerializationMask
{
None = 0,
AuthenticationType = 1,
BootstrapConext = 2,
NameClaimType = 4,
RoleClaimType = 8,
HasClaims = 16,
HasLabel = 32,
Actor = 64,
UserData = 128,
}
[NonSerialized]
private byte[] m_userSerializationData;
[NonSerialized]
const string PreFix = "System.Security.ClaimsIdentity.";
[NonSerialized]
const string ActorKey = PreFix + "actor";
[NonSerialized]
const string AuthenticationTypeKey = PreFix + "authenticationType";
[NonSerialized]
const string BootstrapContextKey = PreFix + "bootstrapContext";
[NonSerialized]
const string ClaimsKey = PreFix + "claims";
[NonSerialized]
const string LabelKey = PreFix + "label";
[NonSerialized]
const string NameClaimTypeKey = PreFix + "nameClaimType";
[NonSerialized]
const string RoleClaimTypeKey = PreFix + "roleClaimType";
[NonSerialized]
const string VersionKey = PreFix + "version";
[NonSerialized]
public const string DefaultIssuer = @"LOCAL AUTHORITY";
[NonSerialized]
public const string DefaultNameClaimType = ClaimTypes.Name;
[NonSerialized]
public const string DefaultRoleClaimType = ClaimTypes.Role;
// === Important
//
// adding claims to this list will affect the Authorization for this Identity
// originally marked this as SecurityCritical, however because enumerators access it
// we would need to extend SecuritySafeCritical to the enumerator methods AND the constructors.
// In the end, this requires additional [SecuritySafeCritical] attributes. So if any additional access
// is added to 'm_instanceClaims' then this must be carefully monitored. This is equivalent to adding sids to the
// NTToken and will be used up the stack to make Authorization decisions.
//
// these are claims that are added by using the AddClaim, AddClaims methods or passed in the constructor.
[NonSerialized]
List<Claim> m_instanceClaims = new List<Claim>();
// These are claims that are external to the identity. .Net runtime attaches roles owned by principals GenericPrincpal and RolePrincipal here.
// They are not serialized OR remembered when cloned. Access through public method: ClaimProviders.
[NonSerialized]
Collection<IEnumerable<Claim>> m_externalClaims = new Collection<IEnumerable<Claim>>();
[NonSerialized]
string m_nameType = DefaultNameClaimType;
[NonSerialized]
string m_roleType = DefaultRoleClaimType;
[OptionalField(VersionAdded=2)]
string m_version = "1.0";
[OptionalField(VersionAdded = 2)]
ClaimsIdentity m_actor;
[OptionalField(VersionAdded = 2)]
string m_authenticationType;
[OptionalField(VersionAdded = 2)]
object m_bootstrapContext;
[OptionalField(VersionAdded = 2)]
string m_label;
[OptionalField(VersionAdded = 2)]
string m_serializedNameType;
[OptionalField(VersionAdded = 2)]
string m_serializedRoleType;
[OptionalField(VersionAdded = 2)]
string m_serializedClaims;
#region ClaimsIdentity Constructors
/// <summary>
/// Initializes an instance of <see cref="ClaimsIdentity"/> with an empty claims collection.
/// </summary>
/// <remarks>
/// <see cref="Identity.AuthenticationType"/> is set to null.
/// </remarks>
public ClaimsIdentity()
: this((Claim[])null)
{
}
/// <summary>
/// Initializes an instance of <see cref="ClaimsIdentity"/> using the name and authentication type from
/// an <see cref="IIdentity"/> instance.
/// </summary>
/// <param name="identity"><see cref="IIdentity"/> to draw the name and authentication type from.</param>
/// <exception cref="ArgumentNullException"> if <paramref name="identity"/> is null.</exception>
public ClaimsIdentity(IIdentity identity)
: this(identity, (IEnumerable<Claim>)null)
{
}
/// <summary>
/// Initializes an instance of <see cref="Identity"/> using an enumerated collection of
/// <see cref="Claim"/> objects.
/// </summary>
/// <param name="claims">
/// The collection of <see cref="Claim"/> objects to populate <see cref="Identity.Claims"/> with.
/// </param>
/// <remarks>
/// <see cref="Identity.AuthenticationType"/> is set to null.
/// </remarks>
public ClaimsIdentity(IEnumerable<Claim> claims)
: this((IIdentity) null, claims, null, null, null)
{
}
/// <summary>
/// Initializes an instance of <see cref="Identity"/> with an empty <see cref="Claim"/> collection
/// and the specified authentication type.
/// </summary>
/// <param name="authenticationType">The type of authentication used.</param>
public ClaimsIdentity(string authenticationType)
: this((IIdentity) null, (IEnumerable<Claim>)null, authenticationType, (string)null, (string)null)
{
}
/// <summary>
/// Initializes an instance of <see cref="Identity"/> using an enumerated collection of
/// <see cref="Claim"/> objects.
/// </summary>
/// <param name="claims">
/// The collection of <see cref="Claim"/> objects to populate <see cref="Identity.Claims"/> with.
/// </param>
/// <param name="authenticationType">The type of authentication used.</param>
/// <remarks>
/// <see cref="Identity.AuthenticationType"/> is set to null.
/// </remarks>
public ClaimsIdentity(IEnumerable<Claim> claims, string authenticationType)
: this((IIdentity)null, claims, authenticationType, null, null)
{
}
/// <summary>
/// Initializes an instance of <see cref="ClaimsIdentity"/> using the name and authentication type from
/// an <see cref="IIdentity"/> instance.
/// </summary>
/// <param name="identity"><see cref="IIdentity"/> to draw the name and authentication type from.</param>
/// <exception cref="ArgumentNullException"> if <paramref name="identity"/> is null.</exception>
public ClaimsIdentity(IIdentity identity, IEnumerable<Claim> claims)
: this(identity, claims, (string)null, (string)null, (string)null)
{
}
/// <summary>
/// Initializes an instance of <see cref="Identity"/> with an empty <see cref="Claim"/> collection,
/// the specified authentication type, name claim type, and role claim type.
/// </summary>
/// <param name="authenticationType">The type of authentication used.</param>
/// <param name="nameType">The claim type to use for <see cref="Identity.Name"/>.</param>
/// <param name="roleType">The claim type to use for IClaimsPrincipal.IsInRole(string).</param>
public ClaimsIdentity(string authenticationType, string nameType, string roleType )
: this((IIdentity) null, (IEnumerable<Claim>)null, authenticationType, nameType, roleType)
{
}
/// <summary>
/// Initializes an instance of <see cref="ClaimsIdentity"/> using an enumeration of type
/// <see cref="Claim"/>, authentication type, name claim type, role claim type, and bootstrapContext.
/// </summary>
/// <param name="claims">An enumeration of type <see cref="Claim"/> to initialize this identity</param>
/// <param name="authenticationType">The type of authentication used.</param>
/// <param name="nameType">The claim type to identify NameClaims.</param>
/// <param name="roleType">The claim type to identify RoleClaims.</param>
public ClaimsIdentity(IEnumerable<Claim> claims, string authenticationType, string nameType, string roleType)
: this((IIdentity)null, claims, authenticationType, nameType, roleType)
{
}
/// <summary>
/// Initializes an instance of <see cref="ClaimsIdentity"/> using an enumeration of type
/// <see cref="Claim"/>, authentication type, name claim type, role claim type, and bootstrapContext.
/// </summary>
/// <param name="identity">The initial identity to base this identity from.</param>
/// <param name="claims">An enumeration of type <see cref="Claim"/> to initialize this identity.</param>
/// <param name="authenticationType">The type of authentication used.</param>
/// <param name="nameType">The claim type to identify NameClaims.</param>
/// <param name="roleType">The claim type to identify RoleClaims.</param>
public ClaimsIdentity(IIdentity identity, IEnumerable<Claim> claims, string authenticationType, string nameType, string roleType)
: this(identity, claims, authenticationType, nameType, roleType, true)
{
}
/// <summary>
/// This constructor was added so that the WindowsIdentity could control if the authenticationType should be checked. For WindowsIdentities this
/// leads to a priviledged call and will fail where the caller has low priviledge.
/// </summary>
/// <param name="identity">The initial identity to base this identity from.</param>
/// <param name="claims">An enumeration of type <see cref="Claim"/> to initialize this identity.</param>
/// <param name="authenticationType">The type of authentication used.</param>
/// <param name="nameType">The claim type to identify NameClaims.</param>
/// <param name="roleType">The claim type to identify RoleClaims.</param>
/// <param name="checkAuthType">This boolean flag controls if we blindly set the authenticationType, since call WindowsIdentity.AuthenticationType is a priviledged call.</param>
internal ClaimsIdentity(IIdentity identity, IEnumerable<Claim> claims, string authenticationType, string nameType, string roleType, bool checkAuthType)
{
bool nameTypeSet = false;
bool roleTypeSet = false;
// move the authtype, nameType and roleType over from the identity ONLY if they weren't specifically set.
if(checkAuthType && null != identity && string.IsNullOrEmpty(authenticationType))
{
// can safely ignore UnauthorizedAccessException from WindowsIdentity,
// LSA didn't allow the call and WindowsIdentity throws if property is never accessed, no reason to fail.
if (identity is WindowsIdentity)
{
try
{
m_authenticationType = identity.AuthenticationType;
}
catch (UnauthorizedAccessException)
{
m_authenticationType = null;
}
}
else
{
m_authenticationType = identity.AuthenticationType;
}
}
else
{
m_authenticationType = authenticationType;
}
if(!string.IsNullOrEmpty(nameType))
{
m_nameType = nameType;
nameTypeSet = true;
}
if(!string.IsNullOrEmpty(roleType))
{
m_roleType = roleType;
roleTypeSet = true;
}
ClaimsIdentity claimsIdentity = identity as ClaimsIdentity;
if (claimsIdentity != null)
{
m_label = claimsIdentity.m_label;
// give preference to parameters
if (!nameTypeSet)
{
m_nameType = claimsIdentity.m_nameType;
}
if (!roleTypeSet)
{
m_roleType = claimsIdentity.m_roleType;
}
m_bootstrapContext = claimsIdentity.m_bootstrapContext;
if (claimsIdentity.Actor != null)
{
//
// Check if the Actor is circular before copying. That check is done while setting
// the Actor property and so not really needed here. But checking just for sanity sake
//
if(!IsCircular(claimsIdentity.Actor))
{
m_actor = claimsIdentity.Actor;
}
else
{
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperationException_ActorGraphCircular"));
}
}
// We can only copy over the claims we own, it is up to the derived
// to copy over claims they own.
// BUT we need to special case WindowsIdentity as it keeps its own claims.
// In the case where we are not a windowsIdentity and the claimsIdentity is
// we need to copy the claims
if ((claimsIdentity is WindowsIdentity) && (!(this is WindowsIdentity)))
SafeAddClaims(claimsIdentity.Claims);
else
SafeAddClaims(claimsIdentity.m_instanceClaims);
}
else
{
if (identity != null && !string.IsNullOrEmpty(identity.Name))
{
SafeAddClaim(new Claim(m_nameType, identity.Name, ClaimValueTypes.String, DefaultIssuer, DefaultIssuer, this));
}
}
if (claims != null)
{
SafeAddClaims(claims);
}
}
/// Initializes an instance of <see cref="ClaimsIdentity"/> using a <see cref="BinaryReader"/>.
/// Normally the reader is constructed from the bytes returned from <see cref="WriteTo"/>
/// </summary>
/// <param name="reader">a <see cref="BinaryReader"/> pointing to a <see cref="ClaimsIdentity"/>.</param>
/// <exception cref="ArgumentNullException">if 'reader' is null.</exception>
public ClaimsIdentity(BinaryReader reader)
{
if (reader == null)
throw new ArgumentNullException("reader");
Initialize(reader);
}
/// <summary>
/// Copy constructor.
/// </summary>
/// <param name="other"><see cref="ClaimsIdentity"/> to copy.</param>
/// <exception cref="ArgumentNullException">if 'other' is null.</exception>
protected ClaimsIdentity(ClaimsIdentity other)
{
if (other == null)
{
throw new ArgumentNullException("other");
}
if (other.m_actor != null)
{
m_actor = other.m_actor.Clone();
}
m_authenticationType = other.m_authenticationType;
m_bootstrapContext = other.m_bootstrapContext;
m_label = other.m_label;
m_nameType = other.m_nameType;
m_roleType = other.m_roleType;
if (other.m_userSerializationData != null)
{
m_userSerializationData = other.m_userSerializationData.Clone() as byte[];
}
SafeAddClaims(other.m_instanceClaims);
}
/// <summary>
/// Initializes an instance of <see cref="Identity"/> from a serialized stream created via
/// <see cref="ISerializable"/>.
/// </summary>
/// <param name="info">
/// The <see cref="SerializationInfo"/> to read from.
/// </param>
/// <param name="context">The <see cref="StreamingContext"/> for serialization. Can be null.</param>
/// <exception cref="ArgumentNullException">Thrown is the <paramref name="info"/> is null.</exception>
[SecurityCritical]
protected ClaimsIdentity(SerializationInfo info, StreamingContext context)
{
if (null == info)
{
throw new ArgumentNullException("info");
}
Deserialize(info, context, true);
}
/// <summary>
/// Initializes an instance of <see cref="Identity"/> from a serialized stream created via
/// <see cref="ISerializable"/>.
/// </summary>
/// <param name="info">
/// The <see cref="SerializationInfo"/> to read from.
/// </param>
/// <exception cref="ArgumentNullException">Thrown is the <paramref name="info"/> is null.</exception>
[SecurityCritical]
protected ClaimsIdentity(SerializationInfo info)
{
if (null == info)
{
throw new ArgumentNullException("info");
}
StreamingContext sc = new StreamingContext();
Deserialize(info, sc, false);
}
#endregion
/// <summary>
/// Gets the authentication type.
/// </summary>
public virtual string AuthenticationType
{
get { return m_authenticationType; }
}
/// <summary>
/// Gets a value that indicates whether the user has been authenticated.
/// </summary>
public virtual bool IsAuthenticated
{
get { return !string.IsNullOrEmpty(m_authenticationType); }
}
/// <summary>
/// Gets or sets a <see cref="ClaimsIdentity"/> that was granted delegation rights.
/// </summary>
public ClaimsIdentity Actor
{
get { return m_actor; }
set
{
if(value != null)
{
if(IsCircular(value))
{
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperationException_ActorGraphCircular"));
}
}
m_actor = value;
}
}
/// <summary>
/// Gets or sets a context that was used to create this <see cref="ClaimsIdentity"/>.
/// </summary>
public object BootstrapContext
{
get { return m_bootstrapContext; }
[SecurityCritical]
set { m_bootstrapContext = value; }
}
/// <summary>
/// Gets the claims as <see cref="IEnumerable{Claim}"/>, associated with this <see cref="ClaimsIdentity"/>.
/// </summary>
/// <remarks>May contain nulls.</remarks>
public virtual IEnumerable<Claim> Claims
{
get
{
for (int i = 0; i < m_instanceClaims.Count; i++)
{
yield return m_instanceClaims[i];
}
if (m_externalClaims != null)
{
for (int j = 0; j < m_externalClaims.Count; j++)
{
if (m_externalClaims[j] != null)
{
foreach (Claim claim in m_externalClaims[j])
{
yield return claim;
}
}
}
}
}
}
/// <summary>
/// Contains any additional data provided by a derived type, typically set when calling <see cref="WriteTo(BinaryWriter, byte[])"/>.</param>
/// </summary>
protected virtual byte[] CustomSerializationData
{
get
{
return m_userSerializationData;
}
}
/// <summary>
/// Allow the association of claims with this instance of <see cref="ClaimsIdentity"/>.
/// The claims will not be serialized or added in Clone(). They will be included in searches, finds and returned from the call to Claims.
/// It is recommended the creator of the claims ensures the subject of the claims reflects this <see cref="ClaimsIdentity"/>.
/// </summary>
internal Collection<IEnumerable<Claim>> ExternalClaims
{
[FriendAccessAllowed]
get { return m_externalClaims; }
}
/// <summary>
/// Gets or sets the label for this <see cref="Identity"/>
/// </summary>
public string Label
{
get { return m_label; }
set { m_label = value; }
}
/// <summary>
/// Gets the value of the first claim that has a type of NameClaimType. If no claim is found, null is returned.
/// </summary>
public virtual string Name
{
// just an accessor for getting the name claim
get
{
Claim claim = FindFirst(m_nameType);
if (claim != null)
{
return claim.Value;
}
return null;
}
}
/// <summary>
/// Gets the claim type used to distinguish claims that refer to the name.
/// </summary>
public string NameClaimType
{
get { return m_nameType; }
}
/// <summary>
/// Gets the claim type used to distinguish claims that refer to Roles.
/// </summary>
public string RoleClaimType
{
get { return m_roleType; }
}
/// <summary>
/// Returns a new instance of <see cref="ClaimsIdentity"/> with values copied from this object.
/// </summary>
/// <returns>A new <see cref="Identity"/> object copied from this object</returns>
public virtual ClaimsIdentity Clone()
{
ClaimsIdentity newIdentity = new ClaimsIdentity(m_instanceClaims);
newIdentity.m_authenticationType = this.m_authenticationType;
newIdentity.m_bootstrapContext = this.m_bootstrapContext;
newIdentity.m_label = this.m_label;
newIdentity.m_nameType = this.m_nameType;
newIdentity.m_roleType = this.m_roleType;
if(this.Actor != null)
{
// Check if the Actor is circular before copying. That check is done while setting
// the Actor property and so not really needed here. But checking just for sanity sake
if(!IsCircular(this.Actor))
{
newIdentity.Actor = this.Actor;
}
else
{
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperationException_ActorGraphCircular"));
}
}
return newIdentity;
}
/// <summary>
/// Adds a single claim to this ClaimsIdentity. The claim is examined and if the subject != this, then a new claim is
/// created by calling claim.Clone(this). This creates a new claim, with the correct subject.
/// </summary>
/// <param name="claims">Enumeration of claims to add.</param>
/// This is SecurityCritical as we need to control who can add claims to the Identity. Futher down the pipe
/// Authorization decisions will be made based on the claims found in this collection.
[SecurityCritical]
public virtual void AddClaim(Claim claim)
{
if (claim == null)
{
throw new ArgumentNullException("claim");
}
Contract.EndContractBlock();
if(object.ReferenceEquals(claim.Subject, this))
{
m_instanceClaims.Add(claim);
}
else
{
m_instanceClaims.Add(claim.Clone(this));
}
}
/// <summary>
/// Adds a list of claims to this Claims Identity. Each claim is examined and if the subject != this, then a new claim is
/// created by calling claim.Clone(this). This creates a new claim, with the correct subject.
/// </summary>
/// <param name="claims">Enumeration of claims to add.</param>
/// This is SecurityCritical as we need to control who can add claims to the Identity. Futher down the pipe
/// Authorization decisions will be made based on the claims found in this collection.
[SecurityCritical]
public virtual void AddClaims(IEnumerable<Claim> claims)
{
if (claims == null)
{
throw new ArgumentNullException("claims");
}
Contract.EndContractBlock();
foreach (Claim claim in claims)
{
if (claim == null)
{
continue;
}
AddClaim(claim);
}
}
/// <summary>
/// Attempts to remove a claim from the identity. It is possible that the claim cannot be removed since it is not owned
/// by the identity. This would be the case for role claims that are owned by the Principal.
/// Matches by object reference.
/// <summary/>
[SecurityCritical]
public virtual bool TryRemoveClaim(Claim claim)
{
bool removed = false;
for (int i = 0; i < m_instanceClaims.Count; i++)
{
if (object.ReferenceEquals(m_instanceClaims[i], claim))
{
m_instanceClaims.RemoveAt(i);
removed = true;
break;
}
}
return removed;
}
[SecurityCritical]
public virtual void RemoveClaim(Claim claim)
{
if (!TryRemoveClaim(claim))
{
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_ClaimCannotBeRemoved", claim));
}
}
/// <summary>
/// Called from constructor, isolated for easy review
/// This is called from the constructor, this implies that the base class has
/// ownership of holding onto the claims. We can't call AddClaim as that is a virtual and the
/// Derived class may not be constructed yet.
/// </summary>
/// <param name="claims"></param>
[SecuritySafeCritical]
void SafeAddClaims(IEnumerable<Claim> claims)
{
foreach (Claim claim in claims)
{
if (object.ReferenceEquals(claim.Subject, this))
{
m_instanceClaims.Add(claim);
}
else
{
m_instanceClaims.Add(claim.Clone(this));
}
}
}
/// <summary>
/// Called from constructor, isolated for easy review.
/// This is called from the constructor, this implies that the base class has
/// ownership of holding onto the claims. We can't call AddClaim as that is a virtual and the
/// Derived class may not be constructed yet.
/// </summary>
/// <param name="claim"></param>
[SecuritySafeCritical]
void SafeAddClaim(Claim claim)
{
if (object.ReferenceEquals(claim.Subject, this))
{
m_instanceClaims.Add(claim);
}
else
{
m_instanceClaims.Add(claim.Clone(this));
}
}
/// <summary>
/// Retrieves a <see cref="IEnumerable{Claim}"/> where each claim is matched by <param name="match"/>.
/// </summary>
/// <param name="match">The function that performs the matching logic.</param>
/// <returns>A <see cref="IEnumerable{Claim}"/> of matched claims.</returns>
public virtual IEnumerable<Claim> FindAll(Predicate<Claim> match)
{
if (match == null)
{
throw new ArgumentNullException("match");
}
Contract.EndContractBlock();
List<Claim> claims = new List<Claim>();
foreach (Claim claim in Claims)
{
if (match(claim))
{
claims.Add(claim);
}
}
return claims.AsReadOnly();
}
/// <summary>
/// Retrieves a <see cref="IEnumerable{Claim}"/> where each Claim.Type equals <paramref name="type"/>.
/// </summary>
/// <param name="type">The type of the claim to match.</param>
/// <returns>A <see cref="IEnumerable{Claim}"/> of matched claims.</returns>
/// <remarks>Comparison is made using Ordinal case in-sensitive on type.<</remarks>
public virtual IEnumerable<Claim> FindAll(string type)
{
if (type == null)
{
throw new ArgumentNullException("type");
}
Contract.EndContractBlock();
List<Claim> claims = new List<Claim>();
foreach (Claim claim in Claims)
{
if (claim != null)
{
if (string.Equals(claim.Type, type, StringComparison.OrdinalIgnoreCase))
{
claims.Add(claim);
}
}
}
return claims.AsReadOnly();
}
/// <summary>
/// Determines if a claim is contained within this ClaimsIdentity.
/// </summary>
/// <param name="match">The function that performs the matching logic.</param>
/// <returns>true if a claim is found, false otherwise.</returns>
public virtual bool HasClaim(Predicate<Claim> match)
{
if (match == null)
{
throw new ArgumentNullException("match");
}
Contract.EndContractBlock();
foreach (Claim claim in Claims)
{
if (match(claim))
{
return true;
}
}
return false;
}
/// <summary>
/// Determines if a claim with type AND value is contained in the claims within this ClaimsIdentity.
/// </summary>
/// <param name="type"> the type of the claim to match.</param>
/// <param name="value"> the value of the claim to match.</param>
/// <returns>true if a claim is matched, false otherwise.</returns>
/// <remarks>Does not check Issuer or OriginalIssuer. Comparison is made using Ordinal, case sensitive on value, case in-sensitive on type.</remarks>
public virtual bool HasClaim(string type, string value)
{
if (type == null)
{
throw new ArgumentNullException("type");
}
if (value == null)
{
throw new ArgumentNullException("value");
}
Contract.EndContractBlock();
foreach (Claim claim in Claims)
{
if (claim != null)
{
if (claim != null
&& string.Equals(claim.Type, type, StringComparison.OrdinalIgnoreCase)
&& string.Equals(claim.Value, value, StringComparison.Ordinal))
{
return true;
}
}
}
return false;
}
/// <summary>
/// Retrieves the first <see cref="Claim"/> that is matched by <param name="match"/>.
/// </summary>
/// <param name="match">The function that performs the matching logic.</param>
/// <returns>A <see cref="Claim"/>, null if nothing matches.</returns>
/// <remarks>Comparison is made using Ordinal, case in-sensitive.</remarks>
public virtual Claim FindFirst(Predicate<Claim> match)
{
if (match == null)
{
throw new ArgumentNullException("match");
}
Contract.EndContractBlock();
foreach (Claim claim in Claims)
{
if (match(claim))
{
return claim;
}
}
return null;
}
/// <summary>
/// Retrieves the first <see cref="Claim"/> where Claim.Type equals <paramref name="type"/>.
/// </summary>
/// <param name="type">The type of the claim to match.</param>
/// <returns>A <see cref="Claim"/>, null if nothing matches.</returns>
/// <remarks>Comparison is made using Ordinal, case in-sensitive.</remarks>
public virtual Claim FindFirst(string type)
{
if (type == null)
{
throw new ArgumentNullException("type");
}
Contract.EndContractBlock();
foreach (Claim claim in Claims)
{
if (claim != null)
{
if (string.Equals(claim.Type, type, StringComparison.OrdinalIgnoreCase))
{
return claim;
}
}
}
return null;
}
[OnSerializing()]
[SecurityCritical]
private void OnSerializingMethod(StreamingContext context)
{
if (this is ISerializable)
return;
m_serializedClaims = SerializeClaims();
m_serializedNameType = m_nameType;
m_serializedRoleType = m_roleType;
}
[OnDeserialized()]
[SecurityCritical]
private void OnDeserializedMethod(StreamingContext context)
{
if (this is ISerializable)
return;
if (!String.IsNullOrEmpty(m_serializedClaims))
{
DeserializeClaims(m_serializedClaims);
m_serializedClaims = null;
}
m_nameType = string.IsNullOrEmpty(m_serializedNameType) ? DefaultNameClaimType : m_serializedNameType;
m_roleType = string.IsNullOrEmpty(m_serializedRoleType) ? DefaultRoleClaimType : m_serializedRoleType;
}
[OnDeserializing()]
private void OnDeserializingMethod(StreamingContext context)
{
if (this is ISerializable)
return;
m_instanceClaims = new List<Claim>();
m_externalClaims = new Collection<IEnumerable<Claim>>();
}
/// <summary>
/// Populates the specified <see cref="SerializationInfo"/> with the serialization data for the ClaimsIdentity
/// </summary>
/// <param name="info">The serialization information stream to write to. Satisfies ISerializable contract.</param>
/// <param name="context">Context for serialization. Can be null.</param>
/// <exception cref="ArgumentNullException">Thrown if the info parameter is null.</exception>
[SecurityCritical]
[SecurityPermission(SecurityAction.Assert, SerializationFormatter = true)]
protected virtual void GetObjectData(SerializationInfo info, StreamingContext context)
{
if (null == info)
{
throw new ArgumentNullException("info");
}
Contract.EndContractBlock();
BinaryFormatter formatter = new BinaryFormatter();
info.AddValue(VersionKey, m_version);
if (!string.IsNullOrEmpty(m_authenticationType))
{
info.AddValue(AuthenticationTypeKey, m_authenticationType);
}
info.AddValue(NameClaimTypeKey, m_nameType);
info.AddValue(RoleClaimTypeKey, m_roleType);
if (!string.IsNullOrEmpty(m_label))
{
info.AddValue(LabelKey, m_label);
}
//
// actor
//
if (m_actor != null)
{
using (MemoryStream ms = new MemoryStream())
{
formatter.Serialize(ms, m_actor, null, false);
info.AddValue(ActorKey, Convert.ToBase64String(ms.GetBuffer(), 0, (int)ms.Length));
}
}
//
// claims
//
info.AddValue(ClaimsKey, SerializeClaims());
//
// bootstrapContext
//
if (m_bootstrapContext != null)
{
using (MemoryStream ms = new MemoryStream())
{
formatter.Serialize(ms, m_bootstrapContext, null, false);
info.AddValue(BootstrapContextKey, Convert.ToBase64String(ms.GetBuffer(), 0, (int)ms.Length));
}
}
}
[SecurityCritical]
private void DeserializeClaims(string serializedClaims)
{
if (!string.IsNullOrEmpty(serializedClaims))
{
using (MemoryStream stream = new MemoryStream(Convert.FromBase64String(serializedClaims)))
{
m_instanceClaims = (List<Claim>)(new BinaryFormatter()).Deserialize(stream, null, false);
for (int i = 0; i < m_instanceClaims.Count; i++)
{
m_instanceClaims[i].Subject = this;
}
}
}
if (m_instanceClaims == null)
{
m_instanceClaims = new List<Claim>();
}
}
[SecurityCritical]
private string SerializeClaims()
{
using (MemoryStream ms = new MemoryStream())
{
(new BinaryFormatter()).Serialize(ms, m_instanceClaims, null, false);
return Convert.ToBase64String(ms.GetBuffer(), 0, (int)ms.Length);
}
}
/// <summary>
/// Checks if a circular reference exists to 'this'
/// </summary>
/// <param name="subject"></param>
/// <returns></returns>
bool IsCircular(ClaimsIdentity subject)
{
if(ReferenceEquals(this, subject))
{
return true;
}
ClaimsIdentity currSubject = subject;
while(currSubject.Actor != null)
{
if(ReferenceEquals(this, currSubject.Actor))
{
return true;
}
currSubject = currSubject.Actor;
}
return false;
}
/// <summary>
/// Initializes from a <see cref="BinaryReader"/>. Normally the reader is initialized in the same as the one passed to <see cref="Serialize(BinaryWriter)"/>
/// </summary>
/// <param name="reader">a <see cref="BinaryReader"/> pointing to a <see cref="ClaimsIdentity"/>.</param>
/// <exception cref="ArgumentNullException">if 'reader' is null.</exception>
private void Initialize(BinaryReader reader)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
//
SerializationMask mask = (SerializationMask)reader.ReadInt32();
if ((mask & SerializationMask.AuthenticationType) == SerializationMask.AuthenticationType)
{
m_authenticationType = reader.ReadString();
}
if ((mask & SerializationMask.BootstrapConext) == SerializationMask.BootstrapConext)
{
m_bootstrapContext = reader.ReadString();
}
if ((mask & SerializationMask.NameClaimType) == SerializationMask.NameClaimType)
{
m_nameType = reader.ReadString();
}
else
{
m_nameType = ClaimsIdentity.DefaultNameClaimType;
}
if ((mask & SerializationMask.RoleClaimType) == SerializationMask.RoleClaimType)
{
m_roleType = reader.ReadString();
}
else
{
m_roleType = ClaimsIdentity.DefaultRoleClaimType;
}
if ((mask & SerializationMask.HasClaims) == SerializationMask.HasClaims)
{
//
int numberOfClaims = reader.ReadInt32();
for (int index = 0; index < numberOfClaims; ++index)
{
Claim claim = new Claim(reader, this);
m_instanceClaims.Add(claim);
}
}
}
/// <summary>
/// Provides and extensibility point for derived types to create a custom <see cref="Claim"/>.
/// </summary>
/// <param name="reader">the <see cref="BinaryReader"/>that points at the claim.</param>
/// <returns>a new <see cref="Claim"/>.</returns>
protected virtual Claim CreateClaim(BinaryReader reader)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return new Claim(reader, this);
}
/// <summary>
/// Serializes using a <see cref="BinaryWriter"/>
/// </summary>
/// <param name="writer">the <see cref="BinaryWriter"/> to use for data storage.</param>
/// <exception cref="ArgumentNullException">if 'writer' is null.</exception>
public virtual void WriteTo(BinaryWriter writer)
{
WriteTo(writer, null);
}
/// <summary>
/// Serializes using a <see cref="BinaryWriter"/>
/// </summary>
/// <param name="writer">the <see cref="BinaryWriter"/> to use for data storage.</param>
/// <param name="userData">additional data provided by derived type.</param>
/// <exception cref="ArgumentNullException">if 'writer' is null.</exception>
protected virtual void WriteTo(BinaryWriter writer, byte[] userData)
{
if (writer == null)
{
throw new ArgumentNullException("writer");
}
int numberOfPropertiesWritten = 0;
var mask = SerializationMask.None;
if (m_authenticationType != null)
{
mask |= SerializationMask.AuthenticationType;
numberOfPropertiesWritten++;
}
if (m_bootstrapContext != null)
{
string rawData = m_bootstrapContext as string;
if (rawData != null)
{
mask |= SerializationMask.BootstrapConext;
numberOfPropertiesWritten++;
}
}
if (!string.Equals(m_nameType, ClaimsIdentity.DefaultNameClaimType, StringComparison.Ordinal))
{
mask |= SerializationMask.NameClaimType;
numberOfPropertiesWritten++;
}
if (!string.Equals(m_roleType, ClaimsIdentity.DefaultRoleClaimType, StringComparison.Ordinal))
{
mask |= SerializationMask.RoleClaimType;
numberOfPropertiesWritten++;
}
if (!string.IsNullOrWhiteSpace(m_label))
{
mask |= SerializationMask.HasLabel;
numberOfPropertiesWritten++;
}
if (m_instanceClaims.Count > 0)
{
mask |= SerializationMask.HasClaims;
numberOfPropertiesWritten++;
}
if (m_actor != null)
{
mask |= SerializationMask.Actor;
numberOfPropertiesWritten++;
}
if (userData != null && userData.Length > 0)
{
numberOfPropertiesWritten++;
mask |= SerializationMask.UserData;
}
writer.Write((Int32)mask);
writer.Write((Int32)numberOfPropertiesWritten);
if ((mask & SerializationMask.AuthenticationType) == SerializationMask.AuthenticationType)
{
writer.Write(m_authenticationType);
}
if ((mask & SerializationMask.BootstrapConext) == SerializationMask.BootstrapConext)
{
writer.Write(m_bootstrapContext as string);
}
if ((mask & SerializationMask.NameClaimType) == SerializationMask.NameClaimType)
{
writer.Write(m_nameType);
}
if ((mask & SerializationMask.RoleClaimType) == SerializationMask.RoleClaimType)
{
writer.Write(m_roleType);
}
if ((mask & SerializationMask.HasLabel) == SerializationMask.HasLabel)
{
writer.Write(m_label);
}
if ((mask & SerializationMask.HasClaims) == SerializationMask.HasClaims)
{
writer.Write((Int32)m_instanceClaims.Count);
foreach (var claim in m_instanceClaims)
{
claim.WriteTo(writer);
}
}
if ((mask & SerializationMask.Actor) == SerializationMask.Actor)
{
m_actor.WriteTo(writer);
}
if ((mask & SerializationMask.UserData) == SerializationMask.UserData)
{
writer.Write((Int32)userData.Length);
writer.Write(userData);
}
writer.Flush();
}
// <param name="useContext"></param> The reason for this param is due to WindowsIdentity deciding to have an
// api that doesn't pass the context to its internal constructor.
[SecurityCritical]
[SecurityPermission(SecurityAction.Assert, SerializationFormatter = true)]
private void Deserialize(SerializationInfo info, StreamingContext context, bool useContext)
{
if (null == info)
{
throw new ArgumentNullException("info");
}
BinaryFormatter bf;
if (useContext)
bf = new BinaryFormatter(null, context);
else
bf = new BinaryFormatter();
SerializationInfoEnumerator enumerator = info.GetEnumerator();
while (enumerator.MoveNext())
{
switch (enumerator.Name)
{
case VersionKey:
string version = info.GetString(VersionKey);
break;
case AuthenticationTypeKey:
m_authenticationType = info.GetString(AuthenticationTypeKey);
break;
case NameClaimTypeKey:
m_nameType = info.GetString(NameClaimTypeKey);
break;
case RoleClaimTypeKey:
m_roleType = info.GetString(RoleClaimTypeKey);
break;
case LabelKey:
m_label = info.GetString(LabelKey);
break;
case ActorKey:
using (MemoryStream stream = new MemoryStream(Convert.FromBase64String(info.GetString(ActorKey))))
{
m_actor = (ClaimsIdentity)bf.Deserialize(stream, null, false);
}
break;
case ClaimsKey:
DeserializeClaims(info.GetString(ClaimsKey));
break;
case BootstrapContextKey:
using (MemoryStream ms = new MemoryStream(Convert.FromBase64String(info.GetString(BootstrapContextKey))))
{
m_bootstrapContext = bf.Deserialize(ms, null, false);
}
break;
default:
// Ignore other fields for forward compatability.
break;
}
}
}
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="SessionStateModule.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
/*
* SessionStateModule
*
* Copyright (c) 1998-2002, Microsoft Corporation
*
*/
namespace System.Web.SessionState {
using System;
using System.Threading;
using System.Collections;
using System.Configuration;
using System.IO;
using System.Web.Caching;
using System.Web.Util;
using System.Web.Configuration;
using System.Xml;
using System.Security.Cryptography;
using System.Data.SqlClient;
using System.Globalization;
using System.Security.Permissions;
using System.Text;
using System.Web.Hosting;
using System.Web.Management;
using Microsoft.Win32;
public delegate void SessionStateItemExpireCallback(
string id, SessionStateStoreData item);
class SessionOnEndTargetWorkItem {
SessionOnEndTarget _target;
HttpSessionState _sessionState;
internal SessionOnEndTargetWorkItem(SessionOnEndTarget target, HttpSessionState sessionState) {
_target = target;
_sessionState = sessionState;
}
internal void RaiseOnEndCallback() {
_target.RaiseOnEnd(_sessionState);
}
}
/*
* Calls the OnSessionEnd event. We use an object other than the SessionStateModule
* because the state of the module is unknown - it could have been disposed
* when a session ends.
*/
class SessionOnEndTarget {
internal int _sessionEndEventHandlerCount;
internal SessionOnEndTarget() {
}
internal int SessionEndEventHandlerCount {
get {
return _sessionEndEventHandlerCount;
}
set {
_sessionEndEventHandlerCount = value;
}
}
internal void RaiseOnEnd(HttpSessionState sessionState) {
Debug.Trace("SessionOnEnd", "Firing OnSessionEnd for " + sessionState.SessionID);
if (_sessionEndEventHandlerCount > 0) {
HttpApplicationFactory.EndSession(sessionState, this, EventArgs.Empty);
}
}
internal void RaiseSessionOnEnd(String id, SessionStateStoreData item) {
HttpSessionStateContainer sessionStateContainer = new HttpSessionStateContainer(
id,
item.Items,
item.StaticObjects,
item.Timeout,
false,
SessionStateModule.s_configCookieless,
SessionStateModule.s_configMode,
true);
HttpSessionState sessionState = new HttpSessionState(sessionStateContainer);
if (HttpRuntime.ShutdownInProgress) {
// call directly when shutting down
RaiseOnEnd(sessionState);
}
else {
// post via thread pool
SessionOnEndTargetWorkItem workItem = new SessionOnEndTargetWorkItem(this, sessionState);
WorkItem.PostInternal(new WorkItemCallback(workItem.RaiseOnEndCallback));
}
}
}
/*
* The sesssion state module provides session state services
* for an application.
*/
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public sealed class SessionStateModule : IHttpModule {
internal const string SQL_CONNECTION_STRING_DEFAULT = "data source=localhost;Integrated Security=SSPI";
internal const string STATE_CONNECTION_STRING_DEFAULT = "tcpip=loopback:42424";
internal const int TIMEOUT_DEFAULT = 20;
internal const SessionStateMode MODE_DEFAULT = SessionStateMode.InProc;
private static long LOCKED_ITEM_POLLING_INTERVAL = 500; // in milliseconds
static readonly TimeSpan LOCKED_ITEM_POLLING_DELTA = new TimeSpan(250 * TimeSpan.TicksPerMillisecond);
static readonly TimeSpan DEFAULT_DBG_EXECUTION_TIMEOUT = new TimeSpan(0, 0, System.Web.Compilation.PageCodeDomTreeGenerator.DebugScriptTimeout);
// When we are using Cache to store session state (InProc and StateServer),
// can't specify a timeout value larger than 1 year because CacheEntry ctor
// will throw an exception.
internal const int MAX_CACHE_BASED_TIMEOUT_MINUTES = 365 * 24 * 60;
bool s_oneTimeInit;
static int s_timeout;
#pragma warning disable 0649
static ReadWriteSpinLock s_lock;
#pragma warning restore 0649
static bool s_trustLevelInsufficient;
static TimeSpan s_configExecutionTimeout;
static bool s_configRegenerateExpiredSessionId;
static bool s_useHostingIdentity;
internal static HttpCookieMode s_configCookieless;
internal static SessionStateMode s_configMode;
// This is used as a perf optimization for IIS7 Integrated Mode. If session state is released
// in ReleaseState, we can disable the EndRequest notification if the mode is InProc or StateServer
// because neither InProcSessionStateStore.EndRequest nor OutOfProcSessionStateStore.EndRequest
// are implemented.
static bool s_canSkipEndRequestCall;
private static bool s_PollIntervalRegLookedUp = false;
private static object s_PollIntervalRegLock = new object();
//
// Check if we can optmize for InProc case.
// Optimization details:
//
// If we are in InProc mode, and cookieless=false, in certain scenarios we
// can avoid reading the session ID from the cookies because that's an expensive operation.
// To allow that, we use s_sessionEverSet to keep track of whether we've ever created
// any session state.
//
// If no session has ever be created, we can optimize in the following two cases:
//
// Case 1: Page has disabled session state
// In BeginAcquireState, we usually read the session ID, and reset the timeout value
// of the session state. However, since no session has ever been created, we can
// skip both reading the session id and resetting the timeout.
//
// Case 2: Page has enabled session state
// In this case, we will delay reading (and creating it if not found) the session ID
// until it's really needed. (e.g. from HttpSessionStateContainer.SessionID)
//
// Please note that we optimize only if the app is using SessionIDManager
// as the session ID provider; otherwise, we do not have knowledge about
// the provider in order to optimize safely.
//
// And we will delay reading the id only if we are using cookie to store the session ID. If we
// use cookieless, in the delayed session ID creation scenario, cookieless requires a redirect,
// and it'll be bad to do that in the middle of a page execution.
//
static bool s_allowInProcOptimization;
static bool s_sessionEverSet;
//
// Another optimization is to delay the creation of a new session state store item
// until it's needed.
static bool s_allowDelayedStateStoreItemCreation;
static HttpSessionStateContainer s_delayedSessionState = new HttpSessionStateContainer();
/* per application vars */
EventHandler _sessionStartEventHandler;
Timer _timer;
TimerCallback _timerCallback;
volatile int _timerId;
ISessionIDManager _idManager;
bool _usingAspnetSessionIdManager;
SessionStateStoreProviderBase _store;
bool _supportSessionExpiry;
IPartitionResolver _partitionResolver;
bool _ignoreImpersonation;
readonly SessionOnEndTarget _onEndTarget = new SessionOnEndTarget();
/* per request data goes in _rq* variables */
bool _acquireCalled;
bool _releaseCalled;
HttpSessionStateContainer _rqSessionState;
String _rqId;
bool _rqIdNew;
ISessionStateItemCollection _rqSessionItems;
HttpStaticObjectsCollection _rqStaticObjects;
bool _rqIsNewSession;
bool _rqSessionStateNotFound;
bool _rqReadonly;
HttpContext _rqContext;
HttpAsyncResult _rqAr;
SessionStateStoreData _rqItem;
object _rqLockId; // The id of its SessionStateItem ownership
// If the ownership change hands (e.g. this ownership
// times out), the lockId of the item at the store
// will change.
int _rqInCallback;
DateTime _rqLastPollCompleted;
TimeSpan _rqExecutionTimeout;
bool _rqAddedCookie;
SessionStateActions _rqActionFlags;
ImpersonationContext _rqIctx;
internal int _rqChangeImpersonationRefCount;
ImpersonationContext _rqTimerThreadImpersonationIctx;
bool _rqSupportSessionIdReissue;
/// <devdoc>
/// <para>
/// Initializes a new instance of the <see cref='System.Web.State.SessionStateModule'/>
/// class.
/// </para>
/// </devdoc>
[SecurityPermission(SecurityAction.Demand, Unrestricted=true)]
public SessionStateModule() {
}
static bool CheckTrustLevel(SessionStateSection config) {
switch (config.Mode) {
case SessionStateMode.SQLServer:
case SessionStateMode.StateServer:
return HttpRuntime.HasAspNetHostingPermission(AspNetHostingPermissionLevel.Medium);
default:
case SessionStateMode.Off:
case SessionStateMode.InProc: // In-proc session doesn't require any trust level (part of ASURT 124513)
return true;
}
}
[AspNetHostingPermission(SecurityAction.Assert, Level=AspNetHostingPermissionLevel.Low)]
private SessionStateStoreProviderBase SecureInstantiateProvider(ProviderSettings settings) {
return (SessionStateStoreProviderBase)ProvidersHelper.InstantiateProvider(settings, typeof(SessionStateStoreProviderBase));
}
// Create an instance of the custom store as specified in the config file
SessionStateStoreProviderBase InitCustomStore(SessionStateSection config) {
string providerName = config.CustomProvider;
ProviderSettings ps;
if (String.IsNullOrEmpty(providerName)) {
throw new ConfigurationErrorsException(
SR.GetString(SR.Invalid_session_custom_provider, providerName),
config.ElementInformation.Properties["customProvider"].Source, config.ElementInformation.Properties["customProvider"].LineNumber);
}
ps = config.Providers[providerName];
if (ps == null) {
throw new ConfigurationErrorsException(
SR.GetString(SR.Missing_session_custom_provider, providerName),
config.ElementInformation.Properties["customProvider"].Source, config.ElementInformation.Properties["customProvider"].LineNumber);
}
return SecureInstantiateProvider(ps);
}
IPartitionResolver InitPartitionResolver(SessionStateSection config) {
string partitionResolverType = config.PartitionResolverType;
Type resolverType;
IPartitionResolver iResolver;
if (String.IsNullOrEmpty(partitionResolverType)) {
return null;
}
if (config.Mode != SessionStateMode.StateServer &&
config.Mode != SessionStateMode.SQLServer) {
throw new ConfigurationErrorsException(SR.GetString(SR.Cant_use_partition_resolve),
config.ElementInformation.Properties["partitionResolverType"].Source, config.ElementInformation.Properties["partitionResolverType"].LineNumber);
}
resolverType = ConfigUtil.GetType(partitionResolverType, "partitionResolverType", config);
ConfigUtil.CheckAssignableType(typeof(IPartitionResolver), resolverType, config, "partitionResolverType");
iResolver = (IPartitionResolver)HttpRuntime.CreatePublicInstance(resolverType);
iResolver.Initialize();
return iResolver;
}
ISessionIDManager InitSessionIDManager(SessionStateSection config) {
string sessionIDManagerType = config.SessionIDManagerType;
ISessionIDManager iManager;
if (String.IsNullOrEmpty(sessionIDManagerType)) {
iManager = new SessionIDManager();
_usingAspnetSessionIdManager = true;
}
else {
Type managerType;
managerType = ConfigUtil.GetType(sessionIDManagerType, "sessionIDManagerType", config);
ConfigUtil.CheckAssignableType(typeof(ISessionIDManager), managerType, config, "sessionIDManagerType");
iManager = (ISessionIDManager)HttpRuntime.CreatePublicInstance(managerType);
}
iManager.Initialize();
return iManager;
}
void InitModuleFromConfig(HttpApplication app, SessionStateSection config) {
if (config.Mode == SessionStateMode.Off) {
return;
}
app.AddOnAcquireRequestStateAsync(
new BeginEventHandler(this.BeginAcquireState),
new EndEventHandler(this.EndAcquireState));
app.ReleaseRequestState += new EventHandler(this.OnReleaseState);
app.EndRequest += new EventHandler(this.OnEndRequest);
_partitionResolver = InitPartitionResolver(config);
switch (config.Mode) {
case SessionStateMode.InProc:
if (HttpRuntime.UseIntegratedPipeline) {
s_canSkipEndRequestCall = true;
}
_store = new InProcSessionStateStore();
_store.Initialize(null, null);
break;
#if !FEATURE_PAL // FEATURE_PAL does not enable out of proc session state
case SessionStateMode.StateServer:
if (HttpRuntime.UseIntegratedPipeline) {
s_canSkipEndRequestCall = true;
}
_store = new OutOfProcSessionStateStore();
((OutOfProcSessionStateStore)_store).Initialize(null, null, _partitionResolver);
break;
case SessionStateMode.SQLServer:
_store = new SqlSessionStateStore();
((SqlSessionStateStore)_store).Initialize(null, null, _partitionResolver);
#if DBG
((SqlSessionStateStore)_store).SetModule(this);
#endif
break;
#else // !FEATURE_PAL
case SessionStateMode.StateServer:
throw new NotImplementedException("ROTORTODO");
break;
case SessionStateMode.SQLServer:
throw new NotImplementedException("ROTORTODO");
break;
#endif // !FEATURE_PAL
case SessionStateMode.Custom:
_store = InitCustomStore(config);
break;
default:
break;
}
// We depend on SessionIDManager to manage session id
_idManager = InitSessionIDManager(config);
if ((config.Mode == SessionStateMode.InProc || config.Mode == SessionStateMode.StateServer) &&
_usingAspnetSessionIdManager) {
// If we're using InProc mode or StateServer mode, and also using our own session id module,
// we know we don't care about impersonation in our all session state store read/write
// and session id read/write.
_ignoreImpersonation = true;
}
}
public void Init(HttpApplication app) {
bool initModuleCalled = false;
SessionStateSection config = RuntimeConfig.GetAppConfig().SessionState;
if (!s_oneTimeInit) {
s_lock.AcquireWriterLock();
try {
if (!s_oneTimeInit) {
InitModuleFromConfig(app, config);
initModuleCalled = true;
if (!CheckTrustLevel(config))
s_trustLevelInsufficient = true;
s_timeout = (int)config.Timeout.TotalMinutes;
s_useHostingIdentity = config.UseHostingIdentity;
// See if we can try InProc optimization. See inline doc of s_allowInProcOptimization
// for details.
if (config.Mode == SessionStateMode.InProc &&
_usingAspnetSessionIdManager) {
s_allowInProcOptimization = true;
}
if (config.Mode != SessionStateMode.Custom &&
config.Mode != SessionStateMode.Off &&
!config.RegenerateExpiredSessionId) {
s_allowDelayedStateStoreItemCreation = true;
}
s_configExecutionTimeout = RuntimeConfig.GetConfig().HttpRuntime.ExecutionTimeout;
s_configRegenerateExpiredSessionId = config.RegenerateExpiredSessionId;
s_configCookieless = config.Cookieless;
s_configMode = config.Mode;
// The last thing to set in this if-block.
s_oneTimeInit = true;
Debug.Trace("SessionStateModuleInit",
"Configuration: _mode=" + config.Mode +
";Timeout=" + config.Timeout +
";CookieMode=" + config.Cookieless +
";SqlConnectionString=" + config.SqlConnectionString +
";StateConnectionString=" + config.StateConnectionString +
";s_allowInProcOptimization=" + s_allowInProcOptimization +
";s_allowDelayedStateStoreItemCreation=" + s_allowDelayedStateStoreItemCreation);
}
}
finally {
s_lock.ReleaseWriterLock();
}
}
if (!initModuleCalled) {
InitModuleFromConfig(app, config);
}
if (s_trustLevelInsufficient) {
throw new HttpException(SR.GetString(SR.Session_state_need_higher_trust));
}
}
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Dispose() {
if (_timer != null) {
((IDisposable)_timer).Dispose();
}
if (_store != null) {
_store.Dispose();
}
}
void ResetPerRequestFields() {
Debug.Assert(_rqIctx == null, "_rqIctx == null");
Debug.Assert(_rqChangeImpersonationRefCount == 0, "_rqChangeImpersonationRefCount == 0");
_rqSessionState = null;
_rqId = null;
_rqSessionItems = null;
_rqStaticObjects = null;
_rqIsNewSession = false;
_rqSessionStateNotFound = true;
_rqReadonly = false;
_rqItem = null;
_rqContext = null;
_rqAr = null;
_rqLockId = null;
_rqInCallback = 0;
_rqLastPollCompleted = DateTime.MinValue;
_rqExecutionTimeout = TimeSpan.Zero;
_rqAddedCookie = false;
_rqIdNew = false;
_rqActionFlags = 0;
_rqIctx = null;
_rqChangeImpersonationRefCount = 0;
_rqTimerThreadImpersonationIctx = null;
_rqSupportSessionIdReissue = false;
}
/*
* Add a OnStart event handler.
*
* @param sessionEventHandler
*/
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public event EventHandler Start {
add {
_sessionStartEventHandler += value;
}
remove {
_sessionStartEventHandler -= value;
}
}
void RaiseOnStart(EventArgs e) {
if (_sessionStartEventHandler == null)
return;
Debug.Trace("SessionStateModuleRaiseOnStart",
"Session_Start called for session id:" + _rqId);
// Session_OnStart for ASPCOMPAT pages has to be raised from an STA thread
//
if (HttpRuntime.ApartmentThreading || _rqContext.InAspCompatMode) {
#if !FEATURE_PAL // FEATURE_PAL does not enable COM
AspCompatApplicationStep.RaiseAspCompatEvent(
_rqContext,
_rqContext.ApplicationInstance,
null,
_sessionStartEventHandler,
this,
e);
#else // !FEATURE_PAL
throw new NotImplementedException ("ROTORTODO");
#endif // !FEATURE_PAL
}
else {
if (HttpContext.Current == null) {
// This can happen if it's called by a timer thread
DisposableHttpContextWrapper.SwitchContext(_rqContext);
}
_sessionStartEventHandler(this, e);
}
}
/*
* Fire the OnStart event.
*
* @param e
*/
void OnStart(EventArgs e) {
RaiseOnStart(e);
}
/*
* Add a OnEnd event handler.
*
* @param sessionEventHandler
*/
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public event EventHandler End {
add {
lock(_onEndTarget) {
if (_store != null && _onEndTarget.SessionEndEventHandlerCount == 0) {
_supportSessionExpiry = _store.SetItemExpireCallback(
new SessionStateItemExpireCallback(_onEndTarget.RaiseSessionOnEnd));
}
++_onEndTarget.SessionEndEventHandlerCount;
}
}
remove {
lock(_onEndTarget) {
--_onEndTarget.SessionEndEventHandlerCount;
//
if (_store != null && _onEndTarget.SessionEndEventHandlerCount == 0) {
_store.SetItemExpireCallback(null);
_supportSessionExpiry = false;
}
}
}
}
/*
* Acquire session state
*/
IAsyncResult BeginAcquireState(Object source, EventArgs e, AsyncCallback cb, Object extraData) {
bool requiresState;
bool isCompleted = true;
bool skipReadingId = false;
Debug.Trace("SessionStateModuleOnAcquireState", "Beginning SessionStateModule::OnAcquireState");
_acquireCalled = true;
_releaseCalled = false;
ResetPerRequestFields();
_rqContext = ((HttpApplication)source).Context;
_rqAr = new HttpAsyncResult(cb, extraData);
ChangeImpersonation(_rqContext, false);
try {
if (EtwTrace.IsTraceEnabled(EtwTraceLevel.Information, EtwTraceFlags.AppSvc)) EtwTrace.Trace(EtwTraceType.ETW_TYPE_SESSION_DATA_BEGIN, _rqContext.WorkerRequest);
/* Notify the store we are beginning to get process request */
_store.InitializeRequest(_rqContext);
/* determine if the request requires state at all */
requiresState = _rqContext.RequiresSessionState;
// SessionIDManager may need to do a redirect if cookieless setting is AutoDetect
if (_idManager.InitializeRequest(_rqContext, false, out _rqSupportSessionIdReissue)) {
_rqAr.Complete(true, null, null);
if (EtwTrace.IsTraceEnabled(EtwTraceLevel.Information, EtwTraceFlags.AppSvc)) EtwTrace.Trace(EtwTraceType.ETW_TYPE_SESSION_DATA_END, _rqContext.WorkerRequest);
return _rqAr;
}
// See if we can skip reading the session id. See inline doc of s_allowInProcOptimization
// for details.
if (s_allowInProcOptimization &&
!s_sessionEverSet &&
(!requiresState || // Case 1
!((SessionIDManager)_idManager).UseCookieless(_rqContext)) ) { // Case 2
skipReadingId = true;
#if DBG
if (!requiresState) {
// Case 1
Debug.Trace("SessionStateModuleOnAcquireState", "Skip reading id because page has disabled session state");
}
else {
// Case 2
Debug.Trace("SessionStateModuleOnAcquireState", "Delay reading id because we're using InProc optimization, and we are not using cookieless");
}
#endif
}
else {
/* Get sessionid */
_rqId = _idManager.GetSessionID(_rqContext);
Debug.Trace("SessionStateModuleOnAcquireState", "Current request id=" + _rqId);
}
if (!requiresState) {
if (_rqId == null) {
Debug.Trace("SessionStateModuleOnAcquireState",
"Handler does not require state, " +
"session id skipped or no id found, " +
"skipReadingId=" + skipReadingId +
"\nReturning from SessionStateModule::OnAcquireState");
}
else {
Debug.Trace("SessionStateModuleOnAcquireState",
"Handler does not require state, " +
"resetting timeout for SessionId=" + _rqId +
"\nReturning from SessionStateModule::OnAcquireState");
// Still need to update the sliding timeout to keep session alive.
// There is a plan to skip this for perf reason. But it was postponed to
// after Whidbey.
_store.ResetItemTimeout(_rqContext, _rqId);
}
_rqAr.Complete(true, null, null);
if (EtwTrace.IsTraceEnabled(EtwTraceLevel.Information, EtwTraceFlags.AppSvc)) EtwTrace.Trace(EtwTraceType.ETW_TYPE_SESSION_DATA_END, _rqContext.WorkerRequest);
return _rqAr;
}
_rqExecutionTimeout = _rqContext.Timeout;
// If the page is marked as DEBUG, HttpContext.Timeout will return a very large value (~1 year)
// In this case, we want to use the executionTimeout value specified in the config to avoid
// PollLockedSession to run forever.
if (_rqExecutionTimeout == DEFAULT_DBG_EXECUTION_TIMEOUT) {
_rqExecutionTimeout = s_configExecutionTimeout;
}
/* determine if we need just read-only access */
_rqReadonly = _rqContext.ReadOnlySessionState;
if (_rqId != null) {
/* get the session state corresponding to this session id */
isCompleted = GetSessionStateItem();
}
else if (!skipReadingId) {
/* if there's no id yet, create it */
bool redirected = CreateSessionId();
_rqIdNew = true;
if (redirected) {
if (s_configRegenerateExpiredSessionId) {
// See inline comments in CreateUninitializedSessionState()
CreateUninitializedSessionState();
}
_rqAr.Complete(true, null, null);
if (EtwTrace.IsTraceEnabled(EtwTraceLevel.Information, EtwTraceFlags.AppSvc)) EtwTrace.Trace(EtwTraceType.ETW_TYPE_SESSION_DATA_END, _rqContext.WorkerRequest);
return _rqAr;
}
}
if (isCompleted) {
CompleteAcquireState();
_rqAr.Complete(true, null, null);
}
return _rqAr;
}
finally {
RestoreImpersonation();
}
}
internal bool CreateSessionId() {
// CreateSessionId should be called only if:
Debug.Assert(_rqId == null || // Session id isn't found in the request, OR
(_rqSessionStateNotFound && // The session state isn't found, AND
s_configRegenerateExpiredSessionId && // We are regenerating expired session id, AND
_rqSupportSessionIdReissue && // This request supports session id re-issue, AND
!_rqIdNew), // The above three condition should imply the session id
// isn't just created, but is sent by the request.
"CreateSessionId should be called only if we're generating new id, or re-generating expired one");
Debug.Assert(_rqChangeImpersonationRefCount > 0, "Must call ChangeImpersonation first");
bool redirected;
_rqId = _idManager.CreateSessionID(_rqContext);
_idManager.SaveSessionID(_rqContext, _rqId, out redirected, out _rqAddedCookie);
return redirected;
}
internal void EnsureStateStoreItemLocked() {
// DevDiv 665141:
// Ensure ownership of the session state item here as the session ID now can be put on the wire (by Response.Flush)
// and the client can initiate a request before this one reaches OnReleaseState and thus causing a race condition.
// Note: It changes when we call into the Session Store provider. Now it may happen at BeginAcquireState instead of OnReleaseState.
// Item is locked yet here only if this is a new session
if (!_rqSessionStateNotFound) {
return;
}
Debug.Assert(_rqId != null, "Session State ID must exist");
Debug.Assert(_rqItem != null, "Session State item must exist");
ChangeImpersonation(_rqContext, false);
try {
// Store the item if already have been created
_store.SetAndReleaseItemExclusive(_rqContext, _rqId, _rqItem, _rqLockId, true /*_rqSessionStateNotFound*/);
// Lock Session State Item in Session State Store
LockSessionStateItem();
}
catch {
throw;
}
finally {
RestoreImpersonation();
}
// Mark as old session here. The SessionState is fully initialized, the item is locked
_rqSessionStateNotFound = false;
s_sessionEverSet = true;
}
// Called when AcquireState is done. This function will add the returned
// SessionStateStore item to the request context.
void CompleteAcquireState() {
Debug.Trace("SessionStateModuleOnAcquireState", "Item retrieved=" + (_rqItem != null).ToString(CultureInfo.InvariantCulture));
bool delayInitStateStoreItem = false;
Debug.Assert(!(s_allowDelayedStateStoreItemCreation && s_configRegenerateExpiredSessionId),
"!(s_allowDelayedStateStoreItemCreation && s_configRegenerateExpiredSessionId)");
try {
if (_rqItem != null) {
_rqSessionStateNotFound = false;
if ((_rqActionFlags & SessionStateActions.InitializeItem) != 0) {
Debug.Trace("SessionStateModuleOnAcquireState", "Initialize an uninit item");
_rqIsNewSession = true;
}
else {
_rqIsNewSession = false;
}
}
else {
_rqIsNewSession = true;
_rqSessionStateNotFound = true;
if (s_allowDelayedStateStoreItemCreation) {
Debug.Trace("SessionStateModuleOnAcquireState", "Delay creating new session state");
delayInitStateStoreItem = true;
}
// We couldn't find the session state.
if (!_rqIdNew && // If the request has a session id, that means the session state has expired
s_configRegenerateExpiredSessionId && // And we're asked to regenerate expired session
_rqSupportSessionIdReissue) { // And this request support session id reissue
// We will generate a new session id for this expired session state
bool redirected = CreateSessionId();
Debug.Trace("SessionStateModuleOnAcquireState", "Complete re-creating new id; redirected=" + redirected);
if (redirected) {
Debug.Trace("SessionStateModuleOnAcquireState", "Will redirect because we've reissued a new id and it's cookieless");
CreateUninitializedSessionState();
return;
}
}
}
if (delayInitStateStoreItem) {
_rqSessionState = s_delayedSessionState;
}
else {
InitStateStoreItem(true);
}
// Set session state module
SessionStateUtility.AddHttpSessionStateModuleToContext(_rqContext, this, delayInitStateStoreItem);
if (_rqIsNewSession) {
Debug.Trace("SessionStateModuleOnAcquireState", "Calling OnStart");
OnStart(EventArgs.Empty);
}
}
finally {
if (EtwTrace.IsTraceEnabled(EtwTraceLevel.Information, EtwTraceFlags.AppSvc)) EtwTrace.Trace(EtwTraceType.ETW_TYPE_SESSION_DATA_END, _rqContext.WorkerRequest);
}
#if DBG
if (_rqIsNewSession) {
if (_rqId == null) {
Debug.Assert(s_allowInProcOptimization, "s_allowInProcOptimization");
Debug.Trace("SessionStateModuleOnAcquireState", "New session: session id reading is delayed"+
"\nReturning from SessionStateModule::OnAcquireState");
}
else {
Debug.Trace("SessionStateModuleOnAcquireState", "New session: SessionId= " + _rqId +
"\nReturning from SessionStateModule::OnAcquireState");
}
}
else {
Debug.Trace("SessionStateModuleOnAcquireState", "Retrieved old session, SessionId= " + _rqId +
"\nReturning from SessionStateModule::OnAcquireState");
}
#endif
}
void CreateUninitializedSessionState() {
Debug.Assert(_rqChangeImpersonationRefCount > 0, "Must call ChangeImpersonation first");
// When we generate a new session id in cookieless case, and if "reissueExpiredSession" is
// true, we need to generate a new temporary empty session and save it
// under the new session id, otherwise when the next request (i.e. when the browser is
// redirected back to the web server) comes in, we will think it's accessing an expired session.
_store.CreateUninitializedItem(_rqContext, _rqId, s_timeout);
}
internal void InitStateStoreItem(bool addToContext) {
Debug.Assert(_rqId != null || s_allowInProcOptimization, "_rqId != null || s_allowInProcOptimization");
ChangeImpersonation(_rqContext, false);
try {
if (_rqItem == null) {
Debug.Trace("InitStateStoreItem", "Creating new session state");
_rqItem = _store.CreateNewStoreData(_rqContext, s_timeout);
}
_rqSessionItems = _rqItem.Items;
if (_rqSessionItems == null) {
throw new HttpException(SR.GetString(SR.Null_value_for_SessionStateItemCollection));
}
// No check for null because we allow our custom provider to return a null StaticObjects.
_rqStaticObjects = _rqItem.StaticObjects;
_rqSessionItems.Dirty = false;
_rqSessionState = new HttpSessionStateContainer(
this,
_rqId, // could be null if we're using InProc optimization
_rqSessionItems,
_rqStaticObjects,
_rqItem.Timeout,
_rqIsNewSession,
s_configCookieless,
s_configMode,
_rqReadonly);
if (addToContext) {
SessionStateUtility.AddHttpSessionStateToContext(_rqContext, _rqSessionState);
}
}
finally {
RestoreImpersonation();
}
}
// Used for InProc session id optimization
internal string DelayedGetSessionId() {
Debug.Assert(s_allowInProcOptimization, "Shouldn't be called if we don't allow InProc optimization");
Debug.Assert(_rqId == null, "Shouldn't be called if we already have the id");
Debug.Assert(!((SessionIDManager)_idManager).UseCookieless(_rqContext), "We can delay session id only if we are not using cookieless");
Debug.Trace("DelayedOperation", "Delayed getting session id");
bool redirected;
ChangeImpersonation(_rqContext, false);
try {
_rqId = _idManager.GetSessionID(_rqContext);
if (_rqId == null) {
Debug.Trace("DelayedOperation", "Delayed creating session id");
redirected = CreateSessionId();
Debug.Assert(!redirected, "DelayedGetSessionId shouldn't redirect us here.");
}
}
finally {
RestoreImpersonation();
}
return _rqId;
}
void LockSessionStateItem() {
bool locked;
TimeSpan lockAge;
Debug.Assert(_rqId != null, "_rqId != null");
Debug.Assert(_rqChangeImpersonationRefCount > 0, "Must call ChangeImpersonation first");
if (!_rqReadonly) {
SessionStateStoreData storedItem = _store.GetItemExclusive(_rqContext, _rqId, out locked, out lockAge, out _rqLockId, out _rqActionFlags);
Debug.Assert(storedItem != null, "Must succeed in locking session state item.");
}
}
bool GetSessionStateItem() {
bool isCompleted = true;
bool locked;
TimeSpan lockAge;
Debug.Assert(_rqId != null, "_rqId != null");
Debug.Assert(_rqChangeImpersonationRefCount > 0, "Must call ChangeImpersonation first");
if (_rqReadonly) {
_rqItem = _store.GetItem(_rqContext, _rqId, out locked, out lockAge, out _rqLockId, out _rqActionFlags);
}
else {
_rqItem = _store.GetItemExclusive(_rqContext, _rqId, out locked, out lockAge, out _rqLockId, out _rqActionFlags);
// DevDiv Bugs 146875: WebForm and WebService Session Access Concurrency Issue
// If we have an expired session, we need to insert the state in the store here to
// ensure serialized access in case more than one entity requests it simultaneously.
// If the state has already been created before, CreateUninitializedSessionState is a no-op.
if (_rqItem == null && locked == false && _rqId != null) {
if (!(s_configCookieless == HttpCookieMode.UseUri && s_configRegenerateExpiredSessionId == true)) {
CreateUninitializedSessionState();
_rqItem = _store.GetItemExclusive(_rqContext, _rqId, out locked, out lockAge, out _rqLockId, out _rqActionFlags);
}
}
}
// We didn't get it because it's locked....
if (_rqItem == null && locked) {
//
if (lockAge >= _rqExecutionTimeout) {
/* Release the lock on the item, which is held by another thread*/
Debug.Trace("SessionStateModuleOnAcquireState",
"Lock timed out, lockAge=" + lockAge +
", id=" + _rqId);
_store.ReleaseItemExclusive(_rqContext, _rqId, _rqLockId);
}
Debug.Trace("SessionStateModuleOnAcquireState",
"Item is locked, will poll, id=" + _rqId);
isCompleted = false;
PollLockedSession();
}
return isCompleted;
}
void PollLockedSession() {
if (_timerCallback == null) {
_timerCallback = new TimerCallback(this.PollLockedSessionCallback);
}
if (_timer == null) {
_timerId++;
#if DBG
if (!Debug.IsTagPresent("Timer") || Debug.IsTagEnabled("Timer"))
#endif
{
if (!s_PollIntervalRegLookedUp)
LookUpRegForPollInterval();
_timer = new Timer(_timerCallback, _timerId, LOCKED_ITEM_POLLING_INTERVAL, LOCKED_ITEM_POLLING_INTERVAL);
}
}
}
[RegistryPermission(SecurityAction.Assert, Unrestricted = true)]
private static void LookUpRegForPollInterval() {
lock (s_PollIntervalRegLock) {
if (s_PollIntervalRegLookedUp)
return;
try {
object o = Registry.GetValue(@"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\ASP.NET", "SessionStateLockedItemPollInterval", 0);
if (o != null && (o is int || o is uint) && ((int)o) > 0)
LOCKED_ITEM_POLLING_INTERVAL = (int) o;
s_PollIntervalRegLookedUp = true;
}
catch { // ignore exceptions
}
}
}
void ResetPollTimer() {
_timerId++;
if (_timer != null) {
((IDisposable)_timer).Dispose();
_timer = null;
}
}
void ChangeImpersonation(HttpContext context, bool timerThread) {
#if !FEATURE_PAL // FEATURE_PAL doesn't enable impersonation
_rqChangeImpersonationRefCount++;
if (_ignoreImpersonation) {
return;
}
// If SQL store isn't using integrated security, and we're using our own session id module,
// we know we don't care about impersonation in our all session state store read/write
// and session id read/write.
if (s_configMode == SessionStateMode.SQLServer &&
((SqlSessionStateStore)_store).KnowForSureNotUsingIntegratedSecurity &&
_usingAspnetSessionIdManager) {
return;
}
// Please note that there are two types of calls coming in. One is from a request thread,
// where timerThread==false; the other is from PollLockedSessionCallback, where
// timerThread==true.
if (s_useHostingIdentity) {
// If we're told to use Application Identity, in each case we should impersonate,
// if not called yet.
if (_rqIctx == null) {
_rqIctx = new ApplicationImpersonationContext();
}
}
else {
if (timerThread) {
// For the timer thread, we should explicity impersonate back to what the HttpContext was
// orginally impersonating.
_rqTimerThreadImpersonationIctx = new ClientImpersonationContext(context, false);
}
else {
// For a request thread, if we're told to not use hosting id, there's no need
// to do anything special.
Debug.Assert(_rqIctx == null, "_rqIctx == null");
return;
}
}
#endif // !FEATURE_PAL
}
void RestoreImpersonation() {
Debug.Assert(_rqChangeImpersonationRefCount != 0, "_rqChangeImpersonationRefCount != 0");
_rqChangeImpersonationRefCount--;
if (_rqChangeImpersonationRefCount == 0) {
Debug.Assert(!(_rqIctx != null && _rqTimerThreadImpersonationIctx != null), "Should not have mixed mode of impersonation");
if (_rqIctx != null) {
_rqIctx.Undo();
_rqIctx = null;
}
if (_rqTimerThreadImpersonationIctx != null) {
Debug.Assert(_rqContext != null, "_rqContext != null");
_rqTimerThreadImpersonationIctx.Undo();
_rqTimerThreadImpersonationIctx = null;
}
}
}
void PollLockedSessionCallback(object state) {
Debug.Assert(_rqId != null, "_rqId != null");
Debug.Trace("SessionStateModuleOnAcquireState",
"Polling callback called from timer, id=" + _rqId);
bool isCompleted = false;
Exception error = null;
/* check whether we are currently in a callback */
if (Interlocked.CompareExchange(ref _rqInCallback, 1, 0) != 0)
return;
try {
/*
* check whether this callback is for the current request,
* and whether sufficient time has passed since the last poll
* to try again.
*/
int timerId = (int) state;
if ( (timerId == _timerId) &&
(DateTime.UtcNow - _rqLastPollCompleted >= LOCKED_ITEM_POLLING_DELTA)) {
ChangeImpersonation(_rqContext, true);
try {
isCompleted = GetSessionStateItem();
_rqLastPollCompleted = DateTime.UtcNow;
if (isCompleted) {
Debug.Assert(_timer != null, "_timer != null");
ResetPollTimer();
CompleteAcquireState();
}
}
finally {
RestoreImpersonation();
}
}
}
catch (Exception e) {
ResetPollTimer();
error = e;
}
finally {
Interlocked.Exchange(ref _rqInCallback, 0);
}
if (isCompleted || error != null) {
_rqAr.Complete(false, null, error);
}
}
void EndAcquireState(IAsyncResult ar) {
((HttpAsyncResult)ar).End();
}
// Called by OnReleaseState to get the session id.
string ReleaseStateGetSessionID() {
if (_rqId == null) {
Debug.Assert(s_allowInProcOptimization, "s_allowInProcOptimization");
DelayedGetSessionId();
}
Debug.Assert(_rqId != null, "_rqId != null");
return _rqId;
}
/*
* Release session state
*/
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
void OnReleaseState(Object source, EventArgs eventArgs) {
HttpApplication app;
HttpContext context;
bool setItemCalled = false;
Debug.Trace("SessionStateOnReleaseState", "Beginning SessionStateModule::OnReleaseState");
Debug.Assert(!(_rqAddedCookie && !_rqIsNewSession),
"If session id was added to the cookie, it must be a new session.");
// !!!
// Please note that due to InProc session id optimization, this function should not
// use _rqId directly because it can still be null. Instead, use DelayedGetSessionId().
_releaseCalled = true;
app = (HttpApplication)source;
context = app.Context;
ChangeImpersonation(context, false);
try {
if (_rqSessionState != null) {
bool delayedSessionState = (_rqSessionState == s_delayedSessionState);
Debug.Trace("SessionStateOnReleaseState", "Remove session state from context");
SessionStateUtility.RemoveHttpSessionStateFromContext(_rqContext, delayedSessionState);
/*
* Don't store untouched new sessions.
*/
if (
// The store doesn't have the session state.
// ( Please note we aren't checking _rqIsNewSession because _rqIsNewSession
// is lalso true if the item is converted from temp to perm in a GetItemXXX() call.)
_rqSessionStateNotFound
// OnStart is not defined
&& _sessionStartEventHandler == null
// Nothing has been stored in session state
&& (delayedSessionState || !_rqSessionItems.Dirty)
&& (delayedSessionState || _rqStaticObjects == null || _rqStaticObjects.NeverAccessed)
) {
Debug.Trace("SessionStateOnReleaseState", "Not storing unused new session.");
}
else if (_rqSessionState.IsAbandoned) {
Debug.Trace("SessionStateOnReleaseState", "Removing session due to abandonment, SessionId=" + _rqId);
if (_rqSessionStateNotFound) {
// The store provider doesn't have it, and so we don't need to remove it from the store.
// However, if the store provider supports session expiry, and we have a Session_End in global.asax,
// we need to explicitly call Session_End.
if (_supportSessionExpiry) {
if (delayedSessionState) {
Debug.Assert(s_allowDelayedStateStoreItemCreation, "s_allowDelayedStateStoreItemCreation");
Debug.Assert(_rqItem == null, "_rqItem == null");
InitStateStoreItem(false /*addToContext*/);
}
_onEndTarget.RaiseSessionOnEnd(ReleaseStateGetSessionID(), _rqItem);
}
}
else {
Debug.Assert(_rqItem != null, "_rqItem cannot null if it's not a new session");
// Remove it from the store because the session is abandoned.
_store.RemoveItem(_rqContext, ReleaseStateGetSessionID(), _rqLockId, _rqItem);
}
}
else if (!_rqReadonly ||
(_rqReadonly &&
_rqIsNewSession &&
_sessionStartEventHandler != null &&
!SessionIDManagerUseCookieless)) {
// We need to save it since it isn't read-only
// See Dev10 588711: Issuing a redirect from inside of Session_Start event
// triggers an infinite loop when using pages with read-only session state
// We save it only if there is no error, and if something has changed (unless it's a new session)
if ( context.Error == null // no error
&& ( _rqSessionStateNotFound
|| _rqSessionItems.Dirty // SessionItems has changed.
|| (_rqStaticObjects != null && !_rqStaticObjects.NeverAccessed) // Static objects have been accessed
|| _rqItem.Timeout != _rqSessionState.Timeout // Timeout value has changed
)
) {
if (delayedSessionState) {
Debug.Assert(_rqIsNewSession, "Saving a session and delayedSessionState is true: _rqIsNewSession must be true");
Debug.Assert(s_allowDelayedStateStoreItemCreation, "Saving a session and delayedSessionState is true: s_allowDelayedStateStoreItemCreation");
Debug.Assert(_rqItem == null, "Saving a session and delayedSessionState is true: _rqItem == null");
InitStateStoreItem(false /*addToContext*/);
}
#if DBG
if (_rqSessionItems.Dirty) {
Debug.Trace("SessionStateOnReleaseState", "Setting new session due to dirty SessionItems, SessionId=" + _rqId);
}
else if (_rqStaticObjects != null && !_rqStaticObjects.NeverAccessed) {
Debug.Trace("SessionStateOnReleaseState", "Setting new session due to accessed Static Objects, SessionId=" + _rqId);
}
else if (_rqSessionStateNotFound) {
Debug.Trace("SessionStateOnReleaseState", "Setting new session because it's not found, SessionId=" + _rqId);
}
else {
Debug.Trace("SessionStateOnReleaseState", "Setting new session due to options change, SessionId=" + _rqId +
"\n\t_rq.timeout=" + _rqItem.Timeout.ToString(CultureInfo.InvariantCulture) +
", _rqSessionState.timeout=" + _rqSessionState.Timeout.ToString(CultureInfo.InvariantCulture));
}
#endif
if (_rqItem.Timeout != _rqSessionState.Timeout) {
_rqItem.Timeout = _rqSessionState.Timeout;
}
s_sessionEverSet = true;
setItemCalled = true;
_store.SetAndReleaseItemExclusive(_rqContext, ReleaseStateGetSessionID(), _rqItem, _rqLockId, _rqSessionStateNotFound);
}
else {
// Can't save it because of various reason. Just release our exclusive lock on it.
Debug.Trace("SessionStateOnReleaseState", "Release exclusive lock on session, SessionId=" + _rqId);
if (!_rqSessionStateNotFound) {
Debug.Assert(_rqItem != null, "_rqItem cannot null if it's not a new session");
_store.ReleaseItemExclusive(_rqContext, ReleaseStateGetSessionID(), _rqLockId);
}
}
}
#if DBG
else {
Debug.Trace("SessionStateOnReleaseState", "Session is read-only, ignoring SessionId=" + _rqId);
}
#endif
Debug.Trace("SessionStateOnReleaseState", "Returning from SessionStateModule::OnReleaseState");
}
if (_rqAddedCookie && !setItemCalled && context.Response.IsBuffered()) {
_idManager.RemoveSessionID(_rqContext);
}
}
finally {
RestoreImpersonation();
}
// WOS 1679798: PERF: Session State Module should disable EndRequest on successful cleanup
bool implementsIRequiresSessionState = context.RequiresSessionState;
if (HttpRuntime.UseIntegratedPipeline
&& (context.NotificationContext.CurrentNotification == RequestNotification.ReleaseRequestState)
&& (s_canSkipEndRequestCall || !implementsIRequiresSessionState)) {
context.DisableNotifications(RequestNotification.EndRequest, 0 /*postNotifications*/);
_acquireCalled = false;
_releaseCalled = false;
ResetPerRequestFields();
}
}
/*
* End of request processing. Possibly does release if skipped due to errors
*/
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
void OnEndRequest(Object source, EventArgs eventArgs) {
HttpApplication app;
HttpContext context;
String id;
Debug.Trace("SessionStateOnEndRequest", "Beginning SessionStateModule::OnEndRequest");
app = (HttpApplication)source;
context = app.Context;
/* determine if the request requires state at all */
if (!context.RequiresSessionState) {
return;
}
ChangeImpersonation(context, false);
try {
if (!_releaseCalled) {
if (_acquireCalled) {
/*
* need to do release here if the request short-circuited due to an error
*/
OnReleaseState(source, eventArgs);
}
else {
/*
* 'advise' -- update session timeout
*/
if (_rqContext == null) {
_rqContext = context;
}
// We haven't called BeginAcquireState. So we have to call these InitializeRequest
// methods here.
bool dummy;
_store.InitializeRequest(_rqContext);
_idManager.InitializeRequest(_rqContext, true, out dummy);
id = _idManager.GetSessionID(context);
if (id != null) {
Debug.Trace("SessionStateOnEndRequest", "Resetting timeout for SessionId=" + id);
_store.ResetItemTimeout(context, id);
}
#if DBG
else {
Debug.Trace("SessionStateOnEndRequest", "No session id found.");
}
#endif
}
}
/* Notify the store we are finishing a request */
_store.EndRequest(_rqContext);
}
finally {
_acquireCalled = false;
_releaseCalled = false;
RestoreImpersonation();
ResetPerRequestFields();
}
Debug.Trace("SessionStateOnEndRequest", "Returning from SessionStateModule::OnEndRequest");
}
internal static void ReadConnectionString(SessionStateSection config, ref string cntString, string propName) {
ConfigsHelper.GetRegistryStringAttribute(ref cntString, config, propName);
HandlerBase.CheckAndReadConnectionString(ref cntString, true);
}
internal bool SessionIDManagerUseCookieless {
get {
// See VSWhidbey 399907
if (!_usingAspnetSessionIdManager) {
return s_configCookieless == HttpCookieMode.UseUri;
}
else {
return ((SessionIDManager)_idManager).UseCookieless(_rqContext);
}
}
}
// DevDiv Bugs 151914: Release session state before executing child request
internal void EnsureReleaseState(HttpApplication app) {
if (HttpRuntime.UseIntegratedPipeline && _acquireCalled && !_releaseCalled) {
try {
OnReleaseState(app, null);
}
catch { }
}
}
}
}
| |
//
// Rectangle.cs
//
// Author:
// Lluis Sanchez <[email protected]>
//
// Copyright (c) 2011 Xamarin Inc
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.Collections;
using System.Globalization;
namespace Xwt
{
[Serializable]
public struct Rectangle
{
public double X { get; set; }
public double Y { get; set; }
public double Width { get; set; }
public double Height { get; set; }
public static Rectangle Zero = new Rectangle ();
public override string ToString ()
{
return String.Format ("{{X={0} Y={1} Width={2} Height={3}}}", X.ToString (CultureInfo.InvariantCulture), Y.ToString (CultureInfo.InvariantCulture), Width.ToString (CultureInfo.InvariantCulture), Height.ToString (CultureInfo.InvariantCulture));
}
// constructors
public Rectangle (double x, double y, double width, double height): this ()
{
X = x;
Y = y;
Width = width;
Height = height;
}
public Rectangle (Point loc, Size sz) : this (loc.X, loc.Y, sz.Width, sz.Height) {}
public static Rectangle FromLTRB (double left, double top, double right, double bottom)
{
return new Rectangle (left, top, right - left, bottom - top);
}
// Equality
public override bool Equals (object o)
{
if (!(o is Rectangle))
return false;
return (this == (Rectangle) o);
}
public override int GetHashCode ()
{
unchecked {
var hash = X.GetHashCode ();
hash = (hash * 397) ^ Y.GetHashCode ();
hash = (hash * 397) ^ Width.GetHashCode ();
hash = (hash * 397) ^ Height.GetHashCode ();
return hash;
}
}
public static bool operator == (Rectangle r1, Rectangle r2)
{
return ((r1.Location == r2.Location) && (r1.Size == r2.Size));
}
public static bool operator != (Rectangle r1, Rectangle r2)
{
return !(r1 == r2);
}
// Hit Testing / Intersection / Union
public bool Contains (Rectangle rect)
{
return X <= rect.X && Right >= rect.Right && Y <= rect.Y && Bottom >= rect.Bottom;
}
public bool Contains (Point pt)
{
return Contains (pt.X, pt.Y);
}
public bool Contains (double x, double y)
{
return ((x >= Left) && (x < Right) &&
(y >= Top) && (y < Bottom));
}
public bool IntersectsWith (Rectangle r)
{
return !((Left >= r.Right) || (Right <= r.Left) ||
(Top >= r.Bottom) || (Bottom <= r.Top));
}
public Rectangle Union (Rectangle r)
{
return Union (this, r);
}
public static Rectangle Union (Rectangle r1, Rectangle r2)
{
return FromLTRB (Math.Min (r1.Left, r2.Left),
Math.Min (r1.Top, r2.Top),
Math.Max (r1.Right, r2.Right),
Math.Max (r1.Bottom, r2.Bottom));
}
public Rectangle Intersect (Rectangle r)
{
return Intersect (this, r);
}
public static Rectangle Intersect (Rectangle r1, Rectangle r2)
{
var x = Math.Max (r1.X, r2.X);
var y = Math.Max (r1.Y, r2.Y);
var width = Math.Min (r1.Right, r2.Right) - x;
var height = Math.Min (r1.Bottom, r2.Bottom) - y;
if (width < 0 || height < 0)
{
return Rectangle.Zero;
}
return new Rectangle (x, y, width, height);
}
// Position/Size
public double Top {
get { return Y; }
set { Y = value; }
}
public double Bottom {
get { return Y + Height; }
set { Height = value - Y; }
}
public double Right {
get { return X + Width; }
set { Width = value - X; }
}
public double Left {
get { return X; }
set { X = value; }
}
public bool IsEmpty {
get { return (Width <= 0) || (Height <= 0); }
}
public Size Size {
get {
return new Size (Width, Height);
}
set {
Width = value.Width;
Height = value.Height;
}
}
public Point Location {
get {
return new Point (X, Y);
}
set {
X = value.X;
Y = value.Y;
}
}
public Point Center {
get {
return new Point (X + Width / 2, Y + Height / 2);
}
}
// Inflate and Offset
public Rectangle Inflate (Size sz)
{
return Inflate (sz.Width, sz.Height);
}
public Rectangle Inflate (double width, double height)
{
Rectangle r = this;
r.X -= width;
r.Y -= height;
r.Width += width * 2;
r.Height += height * 2;
return r;
}
public Rectangle Offset (double dx, double dy)
{
Rectangle r = this;
r.X += dx;
r.Y += dy;
return r;
}
public Rectangle Offset (Point dr)
{
return Offset (dr.X, dr.Y);
}
public Rectangle Round ()
{
return new Rectangle (
Math.Round (X),
Math.Round (Y),
Math.Round (Width),
Math.Round (Height)
);
}
/// <summary>
/// Returns a copy of the rectangle, ensuring that the width and height are greater or equal to zero
/// </summary>
/// <returns>The new rectangle</returns>
public Rectangle WithPositiveSize ()
{
return new Rectangle (
X,
Y,
Width >= 0 ? Width : 0,
Height >= 0 ? Height : 0
);
}
}
}
| |
using System;
using System.Collections.Generic;
namespace Wc3o {
public static class Game {
#region " Game Data "
static GameData gameData;
public static GameData GameData {
get {
return gameData;
}
set {
gameData = value;
}
}
#endregion
#region " Portal Data "
static PortalData portalData;
public static PortalData PortalData {
get {
return portalData;
}
set {
portalData = value;
}
}
#endregion
#region " Properties "
static Random random = new Random();
public static Random Random {
get {
return Game.random;
}
}
public static string Gfx {
get {
if (System.Web.HttpContext.Current.User.Identity.IsAuthenticated) {
Player p = CurrentPlayer;
if (p != null)
return p.Gfx;
}
return Configuration.Default_Gfx_Path;
}
}
#endregion
#region " Logger "
static Logger logger;
public static Logger Logger {
get {
return logger;
}
set {
logger = value;
}
}
#endregion
#region " Ticker "
static Ticker ticker;
public static Ticker Ticker {
get {
return ticker;
}
set {
ticker = value;
}
}
#endregion
#region " Email/Message methods "
public static void SendEmail(string recipient, string subject, string body) {
System.Net.Mail.MailMessage mail = new System.Net.Mail.MailMessage();
mail.BodyEncoding = System.Text.Encoding.UTF7;
mail.IsBodyHtml = false;
mail.From = new System.Net.Mail.MailAddress(Configuration.Email_From_Address);
mail.To.Add(new System.Net.Mail.MailAddress(recipient));
mail.Subject = subject;
mail.Body = body;
System.Net.Mail.SmtpClient smtp = new System.Net.Mail.SmtpClient(Configuration.Email_Server);
smtp.Credentials = new System.Net.NetworkCredential(Configuration.Email_Username, Configuration.Email_Password);
try {
smtp.Send(mail);
} catch {
Game.Logger.Log("An error occured while sending an email to '" + recipient + "'.");
}
}
#endregion
#region " Finder methods "
public static Sector GetSectorByName(string name) {
foreach (Sector s in Game.GameData.Sectors.Values)
if (s.Name == name)
return s;
return null;
}
public static Player GetPlayerByEmail(string email) {
foreach (Player p in Game.GameData.Players.Values)
if (p.Email.ToLower() == email.ToLower())
return p;
return null;
}
public static List<Player> GetPlayers(int league) {
List<Player> l = new List<Player>();
foreach (Player p in GameData.Players.Values)
if (p.League == league)
l.Add(p);
return l;
}
public static Player CurrentPlayer {
get {
try {
return Game.GameData.Players[System.Web.HttpContext.Current.User.Identity.Name];
} catch {
System.Web.Security.FormsAuthentication.SignOut();
}
return null;
}
}
public static Sector CurrentSector {
get {
if (System.Web.HttpContext.Current.Request.QueryString["Sector"] != null)
System.Web.HttpContext.Current.Session["Sector"] = Game.GameData.Sectors[new Coordinate(System.Web.HttpContext.Current.Request.QueryString["Sector"])];
else
if (System.Web.HttpContext.Current.Session["Sector"] == null)
System.Web.HttpContext.Current.Session["Sector"] = CurrentPlayer.Sectors[0];
return (Sector)System.Web.HttpContext.Current.Session["Sector"];
}
}
#endregion
#region " Misc methods "
public static int Min(int a, int b) {
if (a > b)
return b;
return a;
}
public static int Max(int a, int b) {
if (a > b)
return a;
return b;
}
public static bool IsNight {
get {
if (Game.GetCorrectedDate().Hour >= Configuration.Start_Night || Game.GetCorrectedDate().Hour < Configuration.End_Night)
return true;
return false;
}
}
public static int GetDamage(Entity e) {
return 100 - 100 * e.Hitpoints / e.Info.Hitpoints;
}
public static List<Entity> Merge(List<Entity> l) {
return Merge(l, false);
}
public static List<Entity> Merge(List<Entity> l, bool ignoreHitpoints) {
List<Entity> m = new List<Entity>();
foreach (Entity e in l) {
bool b = false;
foreach (Entity f in m)
if (e.Info == f.Info && (ignoreHitpoints || e.Hitpoints == f.Hitpoints)) {
f.Number += e.Number;
e.Destroy();
b = true;
}
if (!b)
m.Add(e);
}
return m;
}
public static List<Entity> Split(List<Entity> l) {
List<Entity> m = new List<Entity>();
foreach (Entity e in l) {
while (e.Number > 1) {
Entity f = e.Clone();
f.Destroy();
f.Number = 1;
m.Add(f);
e.Number--;
}
m.Add(e);
}
return m;
}
public static bool IsAvailable(Player p, Sector s, BuildingInfo i) {
return IsAvailable(p, s, i, false);
}
public static bool IsAvailable(Player p, Sector s, UnitInfo i) {
if (i.Type == UnitType.None || !i.Buildable || p.Gold < i.Gold || p.Lumber < i.Lumber)
return false;
if (!s.HasBuildingForRequirement(i.TrainedAt))
return false;
foreach (BuildingType t in i.Requirements)
if (!p.HasBuildingForRequirement(t))
return false;
int upkeep = 0;
foreach (Unit u in p.Units)
upkeep += u.Number * u.Info.Food;
if (p.Food < upkeep + i.Food)
return false;
return true;
}
public static bool IsAvailable(Player p, Sector s, BuildingInfo i, bool isAnUpgrade) {
if (i.Type == BuildingType.None || p.Fraction != i.Fraction)
return false;
if (!isAnUpgrade && !i.Buildable)
return false;
if (p.Gold < i.Gold || p.Lumber < i.Lumber)
return false;
if (i.Type == BuildingType.TownHall && (s.HasBuilding(BuildingType.Keep) || s.HasBuilding(BuildingType.Castle)))
return false;
else if (i.Type == BuildingType.GreatHall && (s.HasBuilding(BuildingType.Stronghold) || s.HasBuilding(BuildingType.Fortress)))
return false;
else if (i.Type == BuildingType.TreeOfLife && (s.HasBuilding(BuildingType.TreeOfAges) || s.HasBuilding(BuildingType.TreeOfEternity)))
return false;
else if (i.Type == BuildingType.Necropolis && (s.HasBuilding(BuildingType.HallsOfTheDead) || s.HasBuilding(BuildingType.BlackCitadel)))
return false;
foreach (BuildingType t in i.Requirements)
if (!p.HasBuildingForRequirement(t))
return false;
int j = 0;
foreach (Building b in s.Buildings) //check number per sector
if (b.BuildingInfo == i)
j += b.Number;
if (j >= i.NumberPerSector)
return false;
j = 0;
foreach (Building b in p.Buildings) //check total number
if (b.BuildingInfo == i)
j += b.Number;
if (j >= i.Number)
return false;
return true;
}
public static int GetLeague(int rank) {
if (rank == 0)
return 0;
if (rank % Configuration.Player_Per_League == 0)
return rank / Configuration.Player_Per_League;
else
return rank / Configuration.Player_Per_League + 1;
}
public static void RemoveRange<T>(ICollection<T> from, IEnumerable<T> remove) {
foreach (T t in remove)
from.Remove(t);
}
public static bool TrainedInSameBuilding(UnitInfo a, UnitInfo b) {
return a.TrainedAt == b.TrainedAt;
}
#endregion
#region " UI methods "
public static string Theme {
get {
return Game.GameData.Players[System.Web.HttpContext.Current.User.Identity.Name].Fraction.ToString();
}
}
public static bool SelectByValue(System.Web.UI.WebControls.DropDownList l, string s) {
l.SelectedIndex = -1;
foreach (System.Web.UI.WebControls.ListItem i in l.Items)
if (i.Value == s) {
i.Selected = true;
return true;
}
return false;
}
public static System.Web.UI.Control GetControlByName(System.Web.UI.Control parent, string name) {
foreach (System.Web.UI.Control c in parent.Controls) {
if (c.ID == name)
return c;
System.Web.UI.Control cntrl = GetControlByName(c, name);
if (cntrl != null)
return cntrl;
}
return null;
}
public static void Message(System.Web.UI.Control parent, string message, MessageType type) {
System.Web.UI.WebControls.Literal l = (System.Web.UI.WebControls.Literal)Game.GetControlByName(parent, "lblMessage");
l.Text = "<div style=\"text-align:center;\">";
switch (type) {
case MessageType.Acknowledgement:
l.Text += "<div class=\"Message_Acknowledgement\">" + message + "</div>";
break;
case MessageType.Normal:
l.Text += "<div class=\"Message_Normal\">" + message + "</div>";
break;
case MessageType.Error:
l.Text += "<div class=\"Message_Error\">" + message + "</div>";
break;
}
l.Text += "</div>";
}
public static void Message(System.Web.UI.Control parent, string message) {
Game.Message(parent, message, MessageType.Normal);
}
#endregion
#region " Format Methods "
public static string Format(int number) {
if (number == 0)
return "0";
else
return number.ToString("N").Substring(0, number.ToString("N").Length - 3).Replace(",", ".");
}
public static string Format(double number) {
if (number == 0)
return "0";
else {
return number.ToString("N").Replace(".", "_").Replace(",", ".").Replace("_", ",");
}
}
public static string Format(DateTime input, bool WriteSeconds) {
return Format(input, true, WriteSeconds);
}
public static string Format(DateTime input, bool writeTime, bool writeSeconds) {
input = GetCorrectedDate(input);
string weekday = "";
switch (input.DayOfWeek.GetHashCode()) {
case 0:
weekday = "Sunday, ";
break;
case 1:
weekday = "Monday, ";
break;
case 2:
weekday = "Tuesday, ";
break;
case 3:
weekday = "Wednesday, ";
break;
case 4:
weekday = "Thursday, ";
break;
case 5:
weekday = "Friday, ";
break;
case 6:
weekday = "Saturday, ";
break;
}
string month = "";
switch (input.Month) {
case 1:
month = "January";
break;
case 2:
month = "February";
break;
case 3:
month = "March";
break;
case 4:
month = "April";
break;
case 5:
month = "May";
break;
case 6:
month = "June";
break;
case 7:
month = "July";
break;
case 8:
month = "August";
break;
case 9:
month = "September";
break;
case 10:
month = "October";
break;
case 11:
month = "November";
break;
case 12:
month = "December";
break;
}
if (writeTime) {
string minuteCorrection = "";
string secondCorrection = "";
if (input.Minute < 10) {
minuteCorrection = "0";
}
if (input.Second < 10) {
secondCorrection = "0";
}
string seconds;
if (writeSeconds)
seconds = ":" + secondCorrection + input.Second;
else
seconds = "";
return weekday + month + " " + input.Day + ", " + input.Hour + ":" + minuteCorrection + input.Minute + seconds;
}
else
return weekday + month + " " + input.Day;
}
public static string TimeSpan(DateTime d) {
TimeSpan t;
if (d > DateTime.Now)
t = d - DateTime.Now;
else
t = DateTime.Now - d;
int hour = t.Hours + t.Days * 24;
string hours, minutes, seconds;
if (hour > 9)
hours = hour.ToString();
else
hours = "0" + hour.ToString();
if (t.Minutes > 9)
minutes = t.Minutes.ToString();
else
minutes = "0" + t.Minutes.ToString();
if (t.Seconds > 9)
seconds = t.Seconds.ToString();
else
seconds = "0" + t.Seconds.ToString();
if (hour > 0)
return hours + ":" + minutes + ":" + seconds;
else
return minutes + ":" + seconds;
}
public static DateTime GetCorrectedDate(DateTime d) {
return d.AddHours(Configuration.Hour_Correction);
}
public static DateTime GetCorrectedDate() {
return GetCorrectedDate(DateTime.Now);
}
public static string Format(AttackType type) {
switch (type) {
case AttackType.Chaos:
return "Chaos";
case AttackType.Hero:
return "Hero";
case AttackType.Magic:
return "Magic";
case AttackType.None:
return "-";
case AttackType.Normal:
return "Normal";
case AttackType.Pierce:
return "Pierce";
case AttackType.Siege:
return "Siege";
case AttackType.Spells:
return "Spell";
}
return null;
}
public static string Format(ArmorType type) {
switch (type) {
case ArmorType.Fort:
return "Fortified";
case ArmorType.Heavy:
return "Heavy";
case ArmorType.Hero:
return "Hero";
case ArmorType.Light:
return "Light";
case ArmorType.Medium:
return "Medium";
case ArmorType.Unarmored:
return "Unarmored";
}
return null;
}
public static string Format(Visibility visibility) {
switch (visibility) {
case Visibility.Always:
return "Always";
case Visibility.AtDay:
return "At day";
case Visibility.AtNight:
return "At night";
case Visibility.Never:
return "Never";
}
return null;
}
public static string Format(AllianceRank rank, Fraction fraction) {
if (fraction == Fraction.Humans) {
if (rank == AllianceRank.Level1)
return "Lieutnant";
else if (rank == AllianceRank.Level2)
return "Commisasr";
else if (rank == AllianceRank.Level3)
return "Marshal";
}
else if (fraction == Fraction.Orcs) {
if (rank == AllianceRank.Level1)
return "Raider";
else if (rank == AllianceRank.Level2)
return "Pack Leader";
else if (rank == AllianceRank.Level3)
return "War Chief";
}
else if (fraction == Fraction.Undead) {
if (rank == AllianceRank.Level1)
return "Evocator";
else if (rank == AllianceRank.Level2)
return "Necrolyte Master";
else if (rank == AllianceRank.Level3)
return "Wraithlord";
}
else if (fraction == Fraction.NightElves) {
if (rank == AllianceRank.Level1)
return "Shadow Guard";
else if (rank == AllianceRank.Level2)
return "Captain of the Guard";
else if (rank == AllianceRank.Level3)
return "Commandant";
}
return "";
}
#endregion
}
public enum MessageType { Error, Acknowledgement, Normal }
}
| |
using System;
using System.Reflection;
using System.Windows.Forms;
using Anycmd.Xacml.Policy.TargetItems;
using pol = Anycmd.Xacml.Policy;
namespace Anycmd.Xacml.ControlCenter.CustomControls
{
/// <summary>
/// Summary description for Condition.
/// </summary>
public class Condition : BaseControl
{
private System.Windows.Forms.TreeView tvwCondition;
private pol.ConditionElementReadWrite _condition;
private System.Windows.Forms.GroupBox grpCondition;
private System.Windows.Forms.Label label1;
private System.Windows.Forms.ComboBox cmbDataType;
private System.Windows.Forms.Label label2;
private System.Windows.Forms.TextBox txtValue;
private System.Windows.Forms.GroupBox grpElement;
private System.Windows.Forms.ContextMenu contextMenu;
private System.Windows.Forms.MenuItem mniAdd;
private System.Windows.Forms.MenuItem mniDelete;
private System.Windows.Forms.ComboBox cmbInternalFunctions;
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.Container components = null;
/// <summary>
///
/// </summary>
public Condition( pol.ConditionElementReadWrite condition )
{
// This call is required by the Windows.Forms Form Designer.
InitializeComponent();
_condition = condition;
tvwCondition.Nodes.Add( new TreeNodes.FunctionExecution( condition ) );
tvwCondition.ExpandAll();
foreach( FieldInfo field in typeof(Consts.Schema1.InternalDataTypes).GetFields() )
{
cmbDataType.Items.Add( field.GetValue( null ) );
}
foreach( FieldInfo field in typeof(Consts.Schema1.InternalFunctions).GetFields() )
{
cmbInternalFunctions.Items.Add( field.GetValue( null ) );
}
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose( bool disposing )
{
if( disposing )
{
if(components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.tvwCondition = new System.Windows.Forms.TreeView();
this.contextMenu = new System.Windows.Forms.ContextMenu();
this.mniAdd = new System.Windows.Forms.MenuItem();
this.mniDelete = new System.Windows.Forms.MenuItem();
this.grpCondition = new System.Windows.Forms.GroupBox();
this.grpElement = new System.Windows.Forms.GroupBox();
this.cmbDataType = new System.Windows.Forms.ComboBox();
this.txtValue = new System.Windows.Forms.TextBox();
this.label1 = new System.Windows.Forms.Label();
this.label2 = new System.Windows.Forms.Label();
this.cmbInternalFunctions = new System.Windows.Forms.ComboBox();
this.grpCondition.SuspendLayout();
this.grpElement.SuspendLayout();
this.SuspendLayout();
//
// tvwCondition
//
this.tvwCondition.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.tvwCondition.ContextMenu = this.contextMenu;
this.tvwCondition.ImageIndex = -1;
this.tvwCondition.Location = new System.Drawing.Point(8, 24);
this.tvwCondition.Name = "tvwCondition";
this.tvwCondition.SelectedImageIndex = -1;
this.tvwCondition.Size = new System.Drawing.Size(632, 248);
this.tvwCondition.TabIndex = 0;
this.tvwCondition.MouseDown += new System.Windows.Forms.MouseEventHandler(this.tvwCondition_MouseDown);
this.tvwCondition.AfterSelect += new System.Windows.Forms.TreeViewEventHandler(this.tvwCondition_AfterSelect);
this.tvwCondition.BeforeSelect += new System.Windows.Forms.TreeViewCancelEventHandler(this.tvwCondition_BeforeSelect);
//
// contextMenu
//
this.contextMenu.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] {
this.mniAdd,
this.mniDelete});
this.contextMenu.Popup += new System.EventHandler(this.contextMenu_Popup);
//
// mniAdd
//
this.mniAdd.Index = 0;
this.mniAdd.Text = "Add";
//
// mniDelete
//
this.mniDelete.Index = 1;
this.mniDelete.Text = "Delete";
this.mniDelete.Click += new System.EventHandler(this.mniDelete_Click);
//
// grpCondition
//
this.grpCondition.Controls.Add(this.grpElement);
this.grpCondition.Controls.Add(this.tvwCondition);
this.grpCondition.Location = new System.Drawing.Point(8, 8);
this.grpCondition.Name = "grpCondition";
this.grpCondition.Size = new System.Drawing.Size(656, 456);
this.grpCondition.TabIndex = 1;
this.grpCondition.TabStop = false;
this.grpCondition.Text = "Condition";
//
// grpElement
//
this.grpElement.Controls.Add(this.cmbInternalFunctions);
this.grpElement.Controls.Add(this.cmbDataType);
this.grpElement.Controls.Add(this.txtValue);
this.grpElement.Controls.Add(this.label1);
this.grpElement.Controls.Add(this.label2);
this.grpElement.Location = new System.Drawing.Point(8, 288);
this.grpElement.Name = "grpElement";
this.grpElement.Size = new System.Drawing.Size(632, 144);
this.grpElement.TabIndex = 9;
this.grpElement.TabStop = false;
this.grpElement.Text = "Element";
//
// cmbDataType
//
this.cmbDataType.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.cmbDataType.Location = new System.Drawing.Point(72, 40);
this.cmbDataType.Name = "cmbDataType";
this.cmbDataType.Size = new System.Drawing.Size(536, 21);
this.cmbDataType.TabIndex = 6;
//
// txtValue
//
this.txtValue.Location = new System.Drawing.Point(72, 88);
this.txtValue.Name = "txtValue";
this.txtValue.Size = new System.Drawing.Size(536, 20);
this.txtValue.TabIndex = 8;
this.txtValue.Text = "";
//
// label1
//
this.label1.Location = new System.Drawing.Point(8, 40);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(64, 24);
this.label1.TabIndex = 1;
this.label1.Text = "Data type:";
//
// label2
//
this.label2.Location = new System.Drawing.Point(8, 88);
this.label2.Name = "label2";
this.label2.Size = new System.Drawing.Size(64, 23);
this.label2.TabIndex = 7;
this.label2.Text = "Value:";
//
// cmbInternalFunctions
//
this.cmbInternalFunctions.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.cmbInternalFunctions.Location = new System.Drawing.Point(72, 88);
this.cmbInternalFunctions.Name = "cmbInternalFunctions";
this.cmbInternalFunctions.Size = new System.Drawing.Size(536, 21);
this.cmbInternalFunctions.TabIndex = 9;
this.cmbInternalFunctions.Visible = false;
//
// Condition
//
this.Controls.Add(this.grpCondition);
this.Name = "Condition";
this.Size = new System.Drawing.Size(680, 488);
this.grpCondition.ResumeLayout(false);
this.grpElement.ResumeLayout(false);
this.ResumeLayout(false);
}
#endregion
private void tvwCondition_AfterSelect(object sender, System.Windows.Forms.TreeViewEventArgs e)
{
if( e.Node is TreeNodes.FunctionExecution )
{
grpElement.Text = "Function execution";
TreeNodes.FunctionExecution node = (TreeNodes.FunctionExecution)e.Node;
txtValue.Visible = false;
cmbInternalFunctions.Visible = true;
cmbInternalFunctions.SelectedIndex = cmbInternalFunctions.FindStringExact( node.ApplyBaseDefinition.FunctionId );
label2.Text = "FunctionId:";
cmbDataType.Enabled = false;
}
else if( e.Node is TreeNodes.FunctionParameter )
{
grpElement.Text = "Function parameter";
TreeNodes.FunctionParameter node = (TreeNodes.FunctionParameter)e.Node;
txtValue.Visible = false;
cmbInternalFunctions.Visible = true;
cmbInternalFunctions.SelectedIndex = cmbInternalFunctions.FindStringExact( node.FunctionDefinition.FunctionId );
label2.Text = "FunctionId:";
cmbDataType.Enabled = false;
}
else if( e.Node is TreeNodes.AttributeValue )
{
grpElement.Text = "Attribute value";
TreeNodes.AttributeValue node = (TreeNodes.AttributeValue)e.Node;
txtValue.Visible = true;
cmbInternalFunctions.Visible = false;
txtValue.Text = node.AttributeValueDefinition.Contents;
label2.Text = "Value:";
cmbDataType.Enabled = true;
cmbDataType.SelectedIndex = cmbDataType.FindStringExact( node.AttributeValueDefinition.DataType );
}
else if( e.Node is TreeNodes.AttributeDesignator )
{
grpElement.Text = "Attribute designator";
TreeNodes.AttributeDesignator node = (TreeNodes.AttributeDesignator)e.Node;
txtValue.Visible = true;
cmbInternalFunctions.Visible = false;
txtValue.Text = node.AttributeDesignatorDefinition.AttributeId;
label2.Text = "AttributeId:";
cmbDataType.Enabled = true;
cmbDataType.SelectedIndex = cmbDataType.FindStringExact( node.AttributeDesignatorDefinition.DataType );
}
else if( e.Node is TreeNodes.AttributeSelector )
{
grpElement.Text = "Attribute selector";
TreeNodes.AttributeSelector node = (TreeNodes.AttributeSelector)e.Node;
txtValue.Visible = true;
cmbInternalFunctions.Visible = false;
txtValue.Text = node.AttributeSelectorDefinition.RequestContextPath;
label2.Text = "XPath:";
cmbDataType.Enabled = false;
}
}
private void tvwCondition_MouseDown(object sender, System.Windows.Forms.MouseEventArgs e)
{
if( e.Button == MouseButtons.Right )
{
tvwCondition.SelectedNode = tvwCondition.GetNodeAt( e.X, e.Y );
}
}
private void tvwCondition_BeforeSelect(object sender, System.Windows.Forms.TreeViewCancelEventArgs e)
{
TreeNodes.NoBoldNode node = (TreeNodes.NoBoldNode)tvwCondition.SelectedNode;
if( node is TreeNodes.FunctionExecution )
{
TreeNodes.FunctionExecution funcNode = ((TreeNodes.FunctionExecution)node);
funcNode.ApplyBaseDefinition.FunctionId = cmbInternalFunctions.Text;
tvwCondition.SelectedNode = funcNode;
tvwCondition.SelectedNode.Text = "[" + "dataType" + "] " + funcNode.ApplyBaseDefinition.FunctionId;
}
else if( node is TreeNodes.FunctionParameter )
{
TreeNodes.FunctionParameter funcNode = ((TreeNodes.FunctionParameter)node);
funcNode.FunctionDefinition.FunctionId = cmbInternalFunctions.Text;
tvwCondition.SelectedNode = funcNode;
tvwCondition.SelectedNode.Text = "Function: " + funcNode.FunctionDefinition.FunctionId;;
}
else if( node is TreeNodes.AttributeValue )
{
TreeNodes.AttributeValue attNode = ((TreeNodes.AttributeValue)node);
attNode.AttributeValueDefinition.Value = txtValue.Text;
attNode.AttributeValueDefinition.DataType = cmbDataType.Text;
tvwCondition.SelectedNode = attNode;
tvwCondition.SelectedNode.Text = "[" + attNode.AttributeValueDefinition.DataType + "] " + attNode.AttributeValueDefinition.Contents;
}
else if( node is TreeNodes.AttributeDesignator )
{
TreeNodes.AttributeDesignator attNode = ((TreeNodes.AttributeDesignator)node);
attNode.AttributeDesignatorDefinition.AttributeId = txtValue.Text;
attNode.AttributeDesignatorDefinition.DataType = cmbDataType.Text;
tvwCondition.SelectedNode = attNode;
tvwCondition.SelectedNode.Text = "[" + attNode.AttributeDesignatorDefinition.DataType + "]:" + attNode.AttributeDesignatorDefinition.AttributeId;
}
else if( node is TreeNodes.AttributeSelector )
{
TreeNodes.AttributeSelector attNode = ((TreeNodes.AttributeSelector)node);
attNode.AttributeSelectorDefinition.RequestContextPath = txtValue.Text;
tvwCondition.SelectedNode = attNode;
tvwCondition.SelectedNode.Text = "XPath: " + attNode.AttributeSelectorDefinition.RequestContextPath;
}
}
#region Context menu
private void contextMenu_Popup(object sender, System.EventArgs e)
{
mniAdd.MenuItems.Clear();
if( tvwCondition.SelectedNode == null)
{
if(tvwCondition.Nodes.Count == 0)
{
mniAdd.MenuItems.Add( "Function execution", new System.EventHandler( CreateFunctionExecution ) );
mniAdd.MenuItems.Add( "Function parameter", new System.EventHandler( CreateFunctionParameter ) );
mniAdd.MenuItems.Add( "Attribute value", new System.EventHandler( CreateAttributeValue ) );
mniAdd.MenuItems.Add( "Action attribute designator", new System.EventHandler( CreateActionAttributeDesignator ) );
mniAdd.MenuItems.Add( "Subject attribute designator", new System.EventHandler( CreateSubjectAttributeDesignator ) );
mniAdd.MenuItems.Add( "Resource designator", new System.EventHandler( CreateResourceAttributeDesignator ) );
mniAdd.MenuItems.Add( "Attribute selector", new System.EventHandler( CreateAttributeSelector ) );
}
else
{
mniDelete.Visible = false;
}
}
else if( tvwCondition.SelectedNode is TreeNodes.FunctionExecution)
{
mniAdd.MenuItems.Add( "Function execution", new System.EventHandler( CreateFunctionExecutionFromFunction ) );
mniAdd.MenuItems.Add( "Function parameter", new System.EventHandler( CreateFunctionParameterFromFunction ) );
mniAdd.MenuItems.Add( "Attribute value", new System.EventHandler( CreateAttributeValueFromFunction ) );
mniAdd.MenuItems.Add( "Action attribute designator", new System.EventHandler( CreateActionAttributeDesignatorFromFunction ) );
mniAdd.MenuItems.Add( "Subject attribute designator", new System.EventHandler( CreateSubjectAttributeDesignatorFromFunction ) );
mniAdd.MenuItems.Add( "Resource designator", new System.EventHandler( CreateResourceAttributeDesignatorFromFunction ) );
mniAdd.MenuItems.Add( "Attribute selector", new System.EventHandler( CreateAttributeSelectorFromFunction ) );
}
if( mniAdd.MenuItems.Count == 0 )
{
mniAdd.Visible = false;
}
else
{
mniAdd.Visible = true;
}
}
private void CreateFunctionExecution( object sender, EventArgs args )
{
pol.ApplyElement apply = new pol.ApplyElement("urn:new_function", new pol.ExpressionReadWriteCollection(), Xacml.XacmlVersion.Version11);
TreeNodes.FunctionExecution node = new TreeNodes.FunctionExecution( apply );
tvwCondition.Nodes.Add( node );
_condition.Arguments.Add( apply );
}
private void CreateFunctionExecutionFromFunction( object sender, EventArgs args )
{
TreeNodes.FunctionExecution func = (TreeNodes.FunctionExecution)tvwCondition.SelectedNode;
pol.ApplyBaseReadWrite parentApply = func.ApplyBaseDefinition;
pol.ApplyElement apply = new pol.ApplyElement("urn:new_function", new pol.ExpressionReadWriteCollection(), Xacml.XacmlVersion.Version11);
TreeNodes.FunctionExecution node = new TreeNodes.FunctionExecution( apply );
func.Nodes.Add( node );
parentApply.Arguments.Add( apply );
}
private void CreateFunctionParameter( object sender, EventArgs args )
{
pol.FunctionElementReadWrite function = new pol.FunctionElementReadWrite( "urn:new_function_param", Xacml.XacmlVersion.Version11 );
TreeNodes.FunctionParameter node = new TreeNodes.FunctionParameter( function );
tvwCondition.Nodes.Add( node );
_condition.Arguments.Add( function );
}
private void CreateFunctionParameterFromFunction( object sender, EventArgs args )
{
TreeNodes.FunctionExecution func = (TreeNodes.FunctionExecution)tvwCondition.SelectedNode;
pol.ApplyBaseReadWrite parentApply = func.ApplyBaseDefinition;
pol.FunctionElementReadWrite function = new pol.FunctionElementReadWrite( "urn:new_function_param", Xacml.XacmlVersion.Version11 );
TreeNodes.FunctionParameter node = new TreeNodes.FunctionParameter( function );
func.Nodes.Add( node );
parentApply.Arguments.Add( function );
}
private void CreateAttributeValueFromFunction( object sender, EventArgs args )
{
TreeNodes.FunctionExecution func = (TreeNodes.FunctionExecution)tvwCondition.SelectedNode;
pol.ApplyBaseReadWrite parentApply = func.ApplyBaseDefinition;
pol.AttributeValueElementReadWrite attr = new pol.AttributeValueElementReadWrite( Xacml.Consts.Schema1.InternalDataTypes.XsdString, "TODO: Add content", Xacml.XacmlVersion.Version11 );
TreeNodes.AttributeValue node = new TreeNodes.AttributeValue( attr );
func.Nodes.Add( node );
parentApply.Arguments.Add( attr );
}
private void CreateAttributeValue( object sender, EventArgs args )
{
pol.AttributeValueElementReadWrite attr = new pol.AttributeValueElementReadWrite( Xacml.Consts.Schema1.InternalDataTypes.XsdString, "TODO: Add content", Xacml.XacmlVersion.Version11 );
TreeNodes.AttributeValue node = new TreeNodes.AttributeValue( attr );
tvwCondition.Nodes.Add( node );
_condition.Arguments.Add( attr );
}
private void CreateActionAttributeDesignator( object sender, EventArgs args )
{
ActionAttributeDesignatorElement att = new ActionAttributeDesignatorElement( string.Empty, false, "TODO: Add attribute id", string.Empty ,Xacml.XacmlVersion.Version11 );
TreeNodes.AttributeDesignator node = new TreeNodes.AttributeDesignator( att );
tvwCondition.Nodes.Add( node );
_condition.Arguments.Add( att );
}
private void CreateSubjectAttributeDesignator( object sender, EventArgs args )
{
SubjectAttributeDesignatorElement att = new SubjectAttributeDesignatorElement( string.Empty, false, "TODO: Add attribute id", string.Empty, string.Empty ,Xacml.XacmlVersion.Version11 );
TreeNodes.AttributeDesignator node = new TreeNodes.AttributeDesignator( att );
tvwCondition.Nodes.Add( node );
_condition.Arguments.Add( att );
}
private void CreateResourceAttributeDesignator( object sender, EventArgs args )
{
ResourceAttributeDesignatorElement att = new ResourceAttributeDesignatorElement( string.Empty, false, "TODO: Add attribute id", string.Empty, Xacml.XacmlVersion.Version11 );
TreeNodes.AttributeDesignator node = new TreeNodes.AttributeDesignator( att );
tvwCondition.Nodes.Add( node );
_condition.Arguments.Add( att );
}
private void CreateActionAttributeDesignatorFromFunction( object sender, EventArgs args )
{
TreeNodes.FunctionExecution func = (TreeNodes.FunctionExecution)tvwCondition.SelectedNode;
pol.ApplyBaseReadWrite parentApply = func.ApplyBaseDefinition;
ActionAttributeDesignatorElement att = new ActionAttributeDesignatorElement( string.Empty, false, "TODO: Add attribute id", string.Empty ,Xacml.XacmlVersion.Version11 );
TreeNodes.AttributeDesignator node = new TreeNodes.AttributeDesignator( att );
func.Nodes.Add( node );
parentApply.Arguments.Add( att );
}
private void CreateSubjectAttributeDesignatorFromFunction( object sender, EventArgs args )
{
TreeNodes.FunctionExecution func = (TreeNodes.FunctionExecution)tvwCondition.SelectedNode;
pol.ApplyBaseReadWrite parentApply = func.ApplyBaseDefinition;
SubjectAttributeDesignatorElement att = new SubjectAttributeDesignatorElement( string.Empty, false, "TODO: Add attribute id", string.Empty, string.Empty ,Xacml.XacmlVersion.Version11 );
TreeNodes.AttributeDesignator node = new TreeNodes.AttributeDesignator( att );
func.Nodes.Add( node );
parentApply.Arguments.Add( att );
}
private void CreateResourceAttributeDesignatorFromFunction( object sender, EventArgs args )
{
TreeNodes.FunctionExecution func = (TreeNodes.FunctionExecution)tvwCondition.SelectedNode;
pol.ApplyBaseReadWrite parentApply = func.ApplyBaseDefinition;
ResourceAttributeDesignatorElement att = new ResourceAttributeDesignatorElement( string.Empty, false, "TODO: Add attribute id", string.Empty, Xacml.XacmlVersion.Version11 );
TreeNodes.AttributeDesignator node = new TreeNodes.AttributeDesignator( att );
func.Nodes.Add( node );
parentApply.Arguments.Add( att );
}
private void CreateAttributeSelector( object sender, EventArgs args )
{
pol.AttributeSelectorElement attr = new pol.AttributeSelectorElement( string.Empty, false, "TODO: Add XPath", Xacml.XacmlVersion.Version11 );
TreeNodes.AttributeSelector node = new TreeNodes.AttributeSelector( attr );
tvwCondition.Nodes.Add( node );
_condition.Arguments.Add( attr );
}
private void CreateAttributeSelectorFromFunction( object sender, EventArgs args )
{
TreeNodes.FunctionExecution func = (TreeNodes.FunctionExecution)tvwCondition.SelectedNode;
pol.ApplyBaseReadWrite parentApply = func.ApplyBaseDefinition;
pol.AttributeSelectorElement attr = new pol.AttributeSelectorElement( string.Empty, false, "TODO: Add XPath", Xacml.XacmlVersion.Version11 );
TreeNodes.AttributeSelector node = new TreeNodes.AttributeSelector( attr );
func.Nodes.Add( node );
parentApply.Arguments.Add( attr );
}
private void mniDelete_Click(object sender, System.EventArgs e)
{
TreeNodes.FunctionExecution functionNode = (TreeNodes.FunctionExecution)tvwCondition.SelectedNode.Parent;
TreeNodes.NoBoldNode node = (TreeNodes.NoBoldNode)tvwCondition.SelectedNode;
if( node is TreeNodes.FunctionExecution )
{
TreeNodes.FunctionExecution funcNode = ((TreeNodes.FunctionExecution)node);
int index = functionNode.ApplyBaseDefinition.Arguments.GetIndex( (pol.ApplyElement)funcNode.ApplyBaseDefinition );
functionNode.ApplyBaseDefinition.Arguments.RemoveAt( index );
functionNode.Nodes.Remove( funcNode );
}
else if( node is TreeNodes.FunctionParameter )
{
TreeNodes.FunctionParameter funcNode = ((TreeNodes.FunctionParameter)node);
int index = functionNode.ApplyBaseDefinition.Arguments.GetIndex( funcNode.FunctionDefinition );
functionNode.ApplyBaseDefinition.Arguments.RemoveAt( index );
functionNode.Nodes.Remove( funcNode );
}
else if( node is TreeNodes.AttributeValue )
{
TreeNodes.AttributeValue attNode = ((TreeNodes.AttributeValue)node);
int index = functionNode.ApplyBaseDefinition.Arguments.GetIndex( attNode.AttributeValueDefinition );
functionNode.ApplyBaseDefinition.Arguments.RemoveAt( index );
functionNode.Nodes.Remove( attNode );
}
else if( node is TreeNodes.AttributeDesignator )
{
TreeNodes.AttributeDesignator attNode = ((TreeNodes.AttributeDesignator)node);
int index = functionNode.ApplyBaseDefinition.Arguments.GetIndex( attNode.AttributeDesignatorDefinition );
functionNode.ApplyBaseDefinition.Arguments.RemoveAt( index );
functionNode.Nodes.Remove( attNode );
}
else if( node is TreeNodes.AttributeSelector )
{
TreeNodes.AttributeSelector attNode = ((TreeNodes.AttributeSelector)node);
int index = functionNode.ApplyBaseDefinition.Arguments.GetIndex( attNode.AttributeSelectorDefinition );
functionNode.ApplyBaseDefinition.Arguments.RemoveAt( index );
functionNode.Nodes.Remove( attNode );
}
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
namespace Lucene.Net.Util
{
using Lucene.Net.Support;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using DirectAllocator = Lucene.Net.Util.ByteBlockPool.DirectAllocator;
/// <summary>
/// <seealso cref="BytesRefHash"/> is a special purpose hash-map like data-structure
/// optimized for <seealso cref="BytesRef"/> instances. BytesRefHash maintains mappings of
/// byte arrays to ids (Map<BytesRef,int>) storing the hashed bytes
/// efficiently in continuous storage. The mapping to the id is
/// encapsulated inside <seealso cref="BytesRefHash"/> and is guaranteed to be increased
/// for each added <seealso cref="BytesRef"/>.
///
/// <p>
/// Note: The maximum capacity <seealso cref="BytesRef"/> instance passed to
/// <seealso cref="#add(BytesRef)"/> must not be longer than <seealso cref="ByteBlockPool#BYTE_BLOCK_SIZE"/>-2.
/// The internal storage is limited to 2GB total byte storage.
/// </p>
///
/// @lucene.internal
/// </summary>
public sealed class BytesRefHash
{
public const int DEFAULT_CAPACITY = 16;
// the following fields are needed by comparator,
// so package private to prevent access$-methods:
internal readonly ByteBlockPool Pool;
internal int[] BytesStart;
private readonly BytesRef Scratch1 = new BytesRef();
private int HashSize;
private int HashHalfSize;
private int HashMask;
private int Count;
private int LastCount = -1;
private int[] Ids;
private readonly BytesStartArray bytesStartArray;
private Counter BytesUsed;
/// <summary>
/// Creates a new <seealso cref="BytesRefHash"/> with a <seealso cref="ByteBlockPool"/> using a
/// <seealso cref="DirectAllocator"/>.
/// </summary>
public BytesRefHash()
: this(new ByteBlockPool(new DirectAllocator()))
{
}
/// <summary>
/// Creates a new <seealso cref="BytesRefHash"/>
/// </summary>
public BytesRefHash(ByteBlockPool pool)
: this(pool, DEFAULT_CAPACITY, new DirectBytesStartArray(DEFAULT_CAPACITY))
{
}
/// <summary>
/// Creates a new <seealso cref="BytesRefHash"/>
/// </summary>
public BytesRefHash(ByteBlockPool pool, int capacity, BytesStartArray bytesStartArray)
{
HashSize = capacity;
HashHalfSize = HashSize >> 1;
HashMask = HashSize - 1;
this.Pool = pool;
Ids = new int[HashSize];
Arrays.Fill(Ids, -1);
this.bytesStartArray = bytesStartArray;
BytesStart = bytesStartArray.Init();
BytesUsed = bytesStartArray.BytesUsed() == null ? Counter.NewCounter() : bytesStartArray.BytesUsed();
BytesUsed.AddAndGet(HashSize * RamUsageEstimator.NUM_BYTES_INT);
}
/// <summary>
/// Returns the number of <seealso cref="BytesRef"/> values in this <seealso cref="BytesRefHash"/>.
/// </summary>
/// <returns> the number of <seealso cref="BytesRef"/> values in this <seealso cref="BytesRefHash"/>. </returns>
public int Size()
{
return Count;
}
/// <summary>
/// Populates and returns a <seealso cref="BytesRef"/> with the bytes for the given
/// bytesID.
/// <p>
/// Note: the given bytesID must be a positive integer less than the current
/// size (<seealso cref="#size()"/>)
/// </summary>
/// <param name="bytesID">
/// the id </param>
/// <param name="ref">
/// the <seealso cref="BytesRef"/> to populate
/// </param>
/// <returns> the given BytesRef instance populated with the bytes for the given
/// bytesID </returns>
public BytesRef Get(int bytesID, BytesRef @ref)
{
Debug.Assert(BytesStart != null, "bytesStart is null - not initialized");
Debug.Assert(bytesID < BytesStart.Length, "bytesID exceeds byteStart len: " + BytesStart.Length);
Pool.SetBytesRef(@ref, BytesStart[bytesID]);
return @ref;
}
/// <summary>
/// Returns the ids array in arbitrary order. Valid ids start at offset of 0
/// and end at a limit of <seealso cref="#size()"/> - 1
/// <p>
/// Note: this is a destructive operation. <seealso cref="#clear()"/> must be called in
/// order to reuse this <seealso cref="BytesRefHash"/> instance.
/// </p>
/// </summary>
public int[] Compact()
{
Debug.Assert(BytesStart != null, "bytesStart is null - not initialized");
int upto = 0;
for (int i = 0; i < HashSize; i++)
{
if (Ids[i] != -1)
{
if (upto < i)
{
Ids[upto] = Ids[i];
Ids[i] = -1;
}
upto++;
}
}
Debug.Assert(upto == Count);
LastCount = Count;
return Ids;
}
/// <summary>
/// Returns the values array sorted by the referenced byte values.
/// <p>
/// Note: this is a destructive operation. <seealso cref="#clear()"/> must be called in
/// order to reuse this <seealso cref="BytesRefHash"/> instance.
/// </p>
/// </summary>
/// <param name="comp">
/// the <seealso cref="Comparator"/> used for sorting </param>
public int[] Sort(IComparer<BytesRef> comp)
{
int[] compact = Compact();
new IntroSorterAnonymousInnerClassHelper(this, comp, compact).Sort(0, Count);
return compact;
}
private class IntroSorterAnonymousInnerClassHelper : IntroSorter
{
private BytesRefHash OuterInstance;
private IComparer<BytesRef> Comp;
private int[] Compact;
private readonly BytesRef pivot = new BytesRef(), scratch1 = new BytesRef(), scratch2 = new BytesRef();
public IntroSorterAnonymousInnerClassHelper(BytesRefHash outerInstance, IComparer<BytesRef> comp, int[] compact)
{
this.OuterInstance = outerInstance;
this.Comp = comp;
this.Compact = compact;
}
protected override void Swap(int i, int j)
{
int o = Compact[i];
Compact[i] = Compact[j];
Compact[j] = o;
}
protected override int Compare(int i, int j)
{
int id1 = Compact[i], id2 = Compact[j];
Debug.Assert(OuterInstance.BytesStart.Length > id1 && OuterInstance.BytesStart.Length > id2);
OuterInstance.Pool.SetBytesRef(OuterInstance.Scratch1, OuterInstance.BytesStart[id1]);
OuterInstance.Pool.SetBytesRef(scratch2, OuterInstance.BytesStart[id2]);
return Comp.Compare(OuterInstance.Scratch1, scratch2);
}
protected internal override int Pivot
{
set
{
int id = Compact[value];
Debug.Assert(OuterInstance.BytesStart.Length > id);
OuterInstance.Pool.SetBytesRef(pivot, OuterInstance.BytesStart[id]);
}
}
protected internal override int ComparePivot(int j)
{
int id = Compact[j];
Debug.Assert(OuterInstance.BytesStart.Length > id);
OuterInstance.Pool.SetBytesRef(scratch2, OuterInstance.BytesStart[id]);
return Comp.Compare(pivot, scratch2);
}
}
private bool Equals(int id, BytesRef b)
{
Pool.SetBytesRef(Scratch1, BytesStart[id]);
return Scratch1.BytesEquals(b);
}
private bool Shrink(int targetSize)
{
// Cannot use ArrayUtil.shrink because we require power
// of 2:
int newSize = HashSize;
while (newSize >= 8 && newSize / 4 > targetSize)
{
newSize /= 2;
}
if (newSize != HashSize)
{
BytesUsed.AddAndGet(RamUsageEstimator.NUM_BYTES_INT * -(HashSize - newSize));
HashSize = newSize;
Ids = new int[HashSize];
Arrays.Fill(Ids, -1);
HashHalfSize = newSize / 2;
HashMask = newSize - 1;
return true;
}
else
{
return false;
}
}
/// <summary>
/// Clears the <seealso cref="BytesRef"/> which maps to the given <seealso cref="BytesRef"/>
/// </summary>
public void Clear(bool resetPool)
{
LastCount = Count;
Count = 0;
if (resetPool)
{
Pool.Reset(false, false); // we don't need to 0-fill the buffers
}
BytesStart = bytesStartArray.Clear();
if (LastCount != -1 && Shrink(LastCount))
{
// shrink clears the hash entries
return;
}
Arrays.Fill(Ids, -1);
}
public void Clear()
{
Clear(true);
}
/// <summary>
/// Closes the BytesRefHash and releases all internally used memory
/// </summary>
public void Close()
{
Clear(true);
Ids = null;
BytesUsed.AddAndGet(RamUsageEstimator.NUM_BYTES_INT * -HashSize);
}
/// <summary>
/// Adds a new <seealso cref="BytesRef"/>
/// </summary>
/// <param name="bytes">
/// the bytes to hash </param>
/// <returns> the id the given bytes are hashed if there was no mapping for the
/// given bytes, otherwise <code>(-(id)-1)</code>. this guarantees
/// that the return value will always be >= 0 if the given bytes
/// haven't been hashed before.
/// </returns>
/// <exception cref="MaxBytesLengthExceededException">
/// if the given bytes are > 2 +
/// <seealso cref="ByteBlockPool#BYTE_BLOCK_SIZE"/> </exception>
public int Add(BytesRef bytes)
{
Debug.Assert(BytesStart != null, "Bytesstart is null - not initialized");
int length = bytes.Length;
// final position
int hashPos = FindHash(bytes);
int e = Ids[hashPos];
if (e == -1)
{
// new entry
int len2 = 2 + bytes.Length;
if (len2 + Pool.ByteUpto > ByteBlockPool.BYTE_BLOCK_SIZE)
{
if (len2 > ByteBlockPool.BYTE_BLOCK_SIZE)
{
throw new MaxBytesLengthExceededException("bytes can be at most " + (ByteBlockPool.BYTE_BLOCK_SIZE - 2) + " in length; got " + bytes.Length);
}
Pool.NextBuffer();
}
var buffer = Pool.Buffer;
int bufferUpto = Pool.ByteUpto;
if (Count >= BytesStart.Length)
{
BytesStart = bytesStartArray.Grow();
Debug.Assert(Count < BytesStart.Length + 1, "count: " + Count + " len: " + BytesStart.Length);
}
e = Count++;
BytesStart[e] = bufferUpto + Pool.ByteOffset;
// We first encode the length, followed by the
// bytes. Length is encoded as vInt, but will consume
// 1 or 2 bytes at most (we reject too-long terms,
// above).
if (length < 128)
{
// 1 byte to store length
buffer[bufferUpto] = (byte)length;
Pool.ByteUpto += length + 1;
Debug.Assert(length >= 0, "Length must be positive: " + length);
Array.Copy(bytes.Bytes, bytes.Offset, buffer, bufferUpto + 1, length);
}
else
{
// 2 byte to store length
buffer[bufferUpto] = unchecked((byte)(0x80 | (length & 0x7f)));
buffer[bufferUpto + 1] = unchecked((byte)((length >> 7) & 0xff));
Pool.ByteUpto += length + 2;
Array.Copy(bytes.Bytes, bytes.Offset, buffer, bufferUpto + 2, length);
}
Debug.Assert(Ids[hashPos] == -1);
Ids[hashPos] = e;
if (Count == HashHalfSize)
{
Rehash(2 * HashSize, true);
}
return e;
}
return -(e + 1);
}
/// <summary>
/// Returns the id of the given <seealso cref="BytesRef"/>.
/// </summary>
/// <param name="bytes">
/// the bytes to look for
/// </param>
/// <returns> the id of the given bytes, or {@code -1} if there is no mapping for the
/// given bytes. </returns>
public int Find(BytesRef bytes)
{
return Ids[FindHash(bytes)];
}
private int FindHash(BytesRef bytes)
{
Debug.Assert(BytesStart != null, "bytesStart is null - not initialized");
int code = DoHash(bytes.Bytes, bytes.Offset, bytes.Length);
// final position
int hashPos = code & HashMask;
int e = Ids[hashPos];
if (e != -1 && !Equals(e, bytes))
{
// Conflict; use linear probe to find an open slot
// (see LUCENE-5604):
do
{
code++;
hashPos = code & HashMask;
e = Ids[hashPos];
} while (e != -1 && !Equals(e, bytes));
}
return hashPos;
}
/// <summary>
/// Adds a "arbitrary" int offset instead of a BytesRef
/// term. this is used in the indexer to hold the hash for term
/// vectors, because they do not redundantly store the byte[] term
/// directly and instead reference the byte[] term
/// already stored by the postings BytesRefHash. See
/// add(int textStart) in TermsHashPerField.
/// </summary>
public int AddByPoolOffset(int offset)
{
Debug.Assert(BytesStart != null, "Bytesstart is null - not initialized");
// final position
int code = offset;
int hashPos = offset & HashMask;
int e = Ids[hashPos];
if (e != -1 && BytesStart[e] != offset)
{
// Conflict; use linear probe to find an open slot
// (see LUCENE-5604):
do
{
code++;
hashPos = code & HashMask;
e = Ids[hashPos];
} while (e != -1 && BytesStart[e] != offset);
}
if (e == -1)
{
// new entry
if (Count >= BytesStart.Length)
{
BytesStart = bytesStartArray.Grow();
Debug.Assert(Count < BytesStart.Length + 1, "count: " + Count + " len: " + BytesStart.Length);
}
e = Count++;
BytesStart[e] = offset;
Debug.Assert(Ids[hashPos] == -1);
Ids[hashPos] = e;
if (Count == HashHalfSize)
{
Rehash(2 * HashSize, false);
}
return e;
}
return -(e + 1);
}
/// <summary>
/// Called when hash is too small (> 50% occupied) or too large (< 20%
/// occupied).
/// </summary>
private void Rehash(int newSize, bool hashOnData)
{
int newMask = newSize - 1;
BytesUsed.AddAndGet(RamUsageEstimator.NUM_BYTES_INT * (newSize));
int[] newHash = new int[newSize];
Arrays.Fill(newHash, -1);
for (int i = 0; i < HashSize; i++)
{
int e0 = Ids[i];
if (e0 != -1)
{
int code;
if (hashOnData)
{
int off = BytesStart[e0];
int start = off & ByteBlockPool.BYTE_BLOCK_MASK;
var bytes = Pool.Buffers[off >> ByteBlockPool.BYTE_BLOCK_SHIFT];
int len;
int pos;
if ((bytes[start] & 0x80) == 0)
{
// length is 1 byte
len = bytes[start];
pos = start + 1;
}
else
{
len = (bytes[start] & 0x7f) + ((bytes[start + 1] & 0xff) << 7);
pos = start + 2;
}
code = DoHash(bytes, pos, len);
}
else
{
code = BytesStart[e0];
}
int hashPos = code & newMask;
Debug.Assert(hashPos >= 0);
if (newHash[hashPos] != -1)
{
// Conflict; use linear probe to find an open slot
// (see LUCENE-5604):
do
{
code++;
hashPos = code & newMask;
} while (newHash[hashPos] != -1);
}
newHash[hashPos] = e0;
}
}
HashMask = newMask;
BytesUsed.AddAndGet(RamUsageEstimator.NUM_BYTES_INT * (-Ids.Length));
Ids = newHash;
HashSize = newSize;
HashHalfSize = newSize / 2;
}
// TODO: maybe use long? But our keys are typically short...
private static int DoHash(byte[] bytes, int offset, int length)
{
return StringHelper.Murmurhash3_x86_32(bytes, offset, length, StringHelper.GOOD_FAST_HASH_SEED);
}
/// <summary>
/// reinitializes the <seealso cref="BytesRefHash"/> after a previous <seealso cref="#clear()"/>
/// call. If <seealso cref="#clear()"/> has not been called previously this method has no
/// effect.
/// </summary>
public void Reinit()
{
if (BytesStart == null)
{
BytesStart = bytesStartArray.Init();
}
if (Ids == null)
{
Ids = new int[HashSize];
BytesUsed.AddAndGet(RamUsageEstimator.NUM_BYTES_INT * HashSize);
}
}
/// <summary>
/// Returns the bytesStart offset into the internally used
/// <seealso cref="ByteBlockPool"/> for the given bytesID
/// </summary>
/// <param name="bytesID">
/// the id to look up </param>
/// <returns> the bytesStart offset into the internally used
/// <seealso cref="ByteBlockPool"/> for the given id </returns>
public int ByteStart(int bytesID)
{
Debug.Assert(BytesStart != null, "bytesStart is null - not initialized");
Debug.Assert(bytesID >= 0 && bytesID < Count, bytesID.ToString());
return BytesStart[bytesID];
}
/// <summary>
/// Thrown if a <seealso cref="BytesRef"/> exceeds the <seealso cref="BytesRefHash"/> limit of
/// <seealso cref="ByteBlockPool#BYTE_BLOCK_SIZE"/>-2.
/// </summary>
public class MaxBytesLengthExceededException : Exception
{
internal MaxBytesLengthExceededException(string message)
: base(message)
{
}
}
/// <summary>
/// Manages allocation of the per-term addresses. </summary>
public abstract class BytesStartArray
{
/// <summary>
/// Initializes the BytesStartArray. this call will allocate memory
/// </summary>
/// <returns> the initialized bytes start array </returns>
public abstract int[] Init();
/// <summary>
/// Grows the <seealso cref="BytesStartArray"/>
/// </summary>
/// <returns> the grown array </returns>
public abstract int[] Grow();
/// <summary>
/// clears the <seealso cref="BytesStartArray"/> and returns the cleared instance.
/// </summary>
/// <returns> the cleared instance, this might be <code>null</code> </returns>
public abstract int[] Clear();
/// <summary>
/// A <seealso cref="Counter"/> reference holding the number of bytes used by this
/// <seealso cref="BytesStartArray"/>. The <seealso cref="BytesRefHash"/> uses this reference to
/// track it memory usage
/// </summary>
/// <returns> a <seealso cref="AtomicLong"/> reference holding the number of bytes used
/// by this <seealso cref="BytesStartArray"/>. </returns>
public abstract Counter BytesUsed();
}
/// <summary>
/// A simple <seealso cref="BytesStartArray"/> that tracks
/// memory allocation using a private <seealso cref="Counter"/>
/// instance.
/// </summary>
public class DirectBytesStartArray : BytesStartArray
{
// TODO: can't we just merge this w/
// TrackingDirectBytesStartArray...? Just add a ctor
// that makes a private bytesUsed?
protected internal readonly int InitSize;
internal int[] BytesStart;
internal readonly Counter BytesUsed_Renamed;
public DirectBytesStartArray(int initSize, Counter counter)
{
this.BytesUsed_Renamed = counter;
this.InitSize = initSize;
}
public DirectBytesStartArray(int initSize)
: this(initSize, Counter.NewCounter())
{
}
public override int[] Clear()
{
return BytesStart = null;
}
public override int[] Grow()
{
Debug.Assert(BytesStart != null);
return BytesStart = ArrayUtil.Grow(BytesStart, BytesStart.Length + 1);
}
public override int[] Init()
{
return BytesStart = new int[ArrayUtil.Oversize(InitSize, RamUsageEstimator.NUM_BYTES_INT)];
}
public override Counter BytesUsed()
{
return BytesUsed_Renamed;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.AspNetCore.Razor.Language.CodeGeneration;
using Microsoft.AspNetCore.Razor.Language.Extensions;
using Microsoft.AspNetCore.Razor.Language.Intermediate;
namespace Microsoft.AspNetCore.Razor.Language.Components
{
// Based on the DesignTimeNodeWriter from Razor repo.
internal class ComponentDesignTimeNodeWriter : ComponentNodeWriter
{
private readonly ScopeStack _scopeStack = new ScopeStack();
private const string DesignTimeVariable = "__o";
public override void WriteMarkupBlock(CodeRenderingContext context, MarkupBlockIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
// Do nothing
}
public override void WriteMarkupElement(CodeRenderingContext context, MarkupElementIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
context.RenderChildren(node);
}
public override void WriteUsingDirective(CodeRenderingContext context, UsingDirectiveIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
if (node.Source.HasValue)
{
using (context.CodeWriter.BuildLinePragma(node.Source.Value, context))
{
context.AddSourceMappingFor(node);
context.CodeWriter.WriteUsing(node.Content);
}
}
else
{
context.CodeWriter.WriteUsing(node.Content);
}
}
public override void WriteCSharpExpression(CodeRenderingContext context, CSharpExpressionIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
WriteCSharpExpressionInnards(context, node);
}
private void WriteCSharpExpressionInnards(CodeRenderingContext context, CSharpExpressionIntermediateNode node, string type = null)
{
if (node.Children.Count == 0)
{
return;
}
if (node.Source != null)
{
using (context.CodeWriter.BuildLinePragma(node.Source.Value, context))
{
var offset = DesignTimeVariable.Length + " = ".Length;
if (type != null)
{
offset += type.Length + 2; // two parenthesis
}
context.CodeWriter.WritePadding(offset, node.Source, context);
context.CodeWriter.WriteStartAssignment(DesignTimeVariable);
if (type != null)
{
context.CodeWriter.Write("(");
context.CodeWriter.Write(type);
context.CodeWriter.Write(")");
}
for (var i = 0; i < node.Children.Count; i++)
{
if (node.Children[i] is IntermediateToken token && token.IsCSharp)
{
context.AddSourceMappingFor(token);
context.CodeWriter.Write(token.Content);
}
else
{
// There may be something else inside the expression like a Template or another extension node.
context.RenderNode(node.Children[i]);
}
}
context.CodeWriter.WriteLine(";");
}
}
else
{
context.CodeWriter.WriteStartAssignment(DesignTimeVariable);
for (var i = 0; i < node.Children.Count; i++)
{
if (node.Children[i] is IntermediateToken token && token.IsCSharp)
{
context.CodeWriter.Write(token.Content);
}
else
{
// There may be something else inside the expression like a Template or another extension node.
context.RenderNode(node.Children[i]);
}
}
context.CodeWriter.WriteLine(";");
}
}
public override void WriteCSharpCode(CodeRenderingContext context, CSharpCodeIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
var isWhitespaceStatement = true;
for (var i = 0; i < node.Children.Count; i++)
{
var token = node.Children[i] as IntermediateToken;
if (token == null || !string.IsNullOrWhiteSpace(token.Content))
{
isWhitespaceStatement = false;
break;
}
}
IDisposable linePragmaScope = null;
if (node.Source != null)
{
if (!isWhitespaceStatement)
{
linePragmaScope = context.CodeWriter.BuildLinePragma(node.Source.Value, context);
}
context.CodeWriter.WritePadding(0, node.Source.Value, context);
}
else if (isWhitespaceStatement)
{
// Don't write whitespace if there is no line mapping for it.
return;
}
for (var i = 0; i < node.Children.Count; i++)
{
if (node.Children[i] is IntermediateToken token && token.IsCSharp)
{
context.AddSourceMappingFor(token);
context.CodeWriter.Write(token.Content);
}
else
{
// There may be something else inside the statement like an extension node.
context.RenderNode(node.Children[i]);
}
}
if (linePragmaScope != null)
{
linePragmaScope.Dispose();
}
else
{
context.CodeWriter.WriteLine();
}
}
public override void WriteHtmlAttribute(CodeRenderingContext context, HtmlAttributeIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
// This expression may contain code so we have to render it or else the design-time
// exprience is broken.
if (node.AttributeNameExpression is CSharpExpressionIntermediateNode expression)
{
WriteCSharpExpressionInnards(context, expression, "string");
}
context.RenderChildren(node);
}
public override void WriteHtmlAttributeValue(CodeRenderingContext context, HtmlAttributeValueIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
// Do nothing, this can't contain code.
}
public override void WriteCSharpExpressionAttributeValue(CodeRenderingContext context, CSharpExpressionAttributeValueIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
if (node.Children.Count == 0)
{
return;
}
context.CodeWriter.WriteStartAssignment(DesignTimeVariable);
for (var i = 0; i < node.Children.Count; i++)
{
if (node.Children[i] is IntermediateToken token && token.IsCSharp)
{
WriteCSharpToken(context, token);
}
else
{
// There may be something else inside the expression like a Template or another extension node.
context.RenderNode(node.Children[i]);
}
}
context.CodeWriter.WriteLine(";");
}
public override void WriteHtmlContent(CodeRenderingContext context, HtmlContentIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
// Do nothing
}
protected override void BeginWriteAttribute(CodeRenderingContext context, string key)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (key == null)
{
throw new ArgumentNullException(nameof(key));
}
context.CodeWriter
.WriteStartMethodInvocation($"{_scopeStack.BuilderVarName}.{nameof(ComponentsApi.RenderTreeBuilder.AddAttribute)}")
.Write("-1")
.WriteParameterSeparator()
.WriteStringLiteral(key);
}
protected override void BeginWriteAttribute(CodeRenderingContext context, IntermediateNode expression)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (expression == null)
{
throw new ArgumentNullException(nameof(expression));
}
context.CodeWriter.WriteStartMethodInvocation($"{_scopeStack.BuilderVarName}.{ComponentsApi.RenderTreeBuilder.AddAttribute}");
context.CodeWriter.Write("-1");
context.CodeWriter.WriteParameterSeparator();
var tokens = GetCSharpTokens(expression);
for (var i = 0; i < tokens.Count; i++)
{
context.CodeWriter.Write(tokens[i].Content);
}
}
public override void WriteComponent(CodeRenderingContext context, ComponentIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
if (node.TypeInferenceNode == null)
{
// Writes something like:
//
// __builder.OpenComponent<MyComponent>(0);
// __builder.AddAttribute(1, "Foo", ...);
// __builder.AddAttribute(2, "ChildContent", ...);
// __builder.SetKey(someValue);
// __builder.AddElementCapture(3, (__value) => _field = __value);
// __builder.CloseComponent();
foreach (var typeArgument in node.TypeArguments)
{
context.RenderNode(typeArgument);
}
// We need to preserve order for attibutes and attribute splats since the ordering
// has a semantic effect.
foreach (var child in node.Children)
{
if (child is ComponentAttributeIntermediateNode attribute)
{
context.RenderNode(attribute);
}
else if (child is SplatIntermediateNode splat)
{
context.RenderNode(splat);
}
}
if (node.ChildContents.Any())
{
foreach (var childContent in node.ChildContents)
{
context.RenderNode(childContent);
}
}
else
{
// We eliminate 'empty' child content when building the tree so that usage like
// '<MyComponent>\r\n</MyComponent>' doesn't create a child content.
//
// Consider what would happen if the user's cursor was inside the element. At
// design -time we want to render an empty lambda to provide proper scoping
// for any code that the user types.
context.RenderNode(new ComponentChildContentIntermediateNode()
{
TypeName = ComponentsApi.RenderFragment.FullTypeName,
});
}
foreach (var setKey in node.SetKeys)
{
context.RenderNode(setKey);
}
foreach (var capture in node.Captures)
{
context.RenderNode(capture);
}
}
else
{
var parameters = GetTypeInferenceMethodParameters(node.TypeInferenceNode);
// If this component is going to cascade any of its generic types, we have to split its type inference
// into two parts. First we call an inference method that captures all the parameters in local variables,
// then we use those to call the real type inference method that emits the component. The reason for this
// is so the captured variables can be used by descendants without re-evaluating the expressions.
CodeWriterExtensions.CSharpCodeWritingScope? typeInferenceCaptureScope = null;
if (node.Component.SuppliesCascadingGenericParameters())
{
typeInferenceCaptureScope = context.CodeWriter.BuildScope();
context.CodeWriter.Write(node.TypeInferenceNode.FullTypeName);
context.CodeWriter.Write(".");
context.CodeWriter.Write(node.TypeInferenceNode.MethodName);
context.CodeWriter.Write("_CaptureParameters(");
var isFirst = true;
foreach (var parameter in parameters.Where(p => p.UsedForTypeInference))
{
if (isFirst)
{
isFirst = false;
}
else
{
context.CodeWriter.Write(", ");
}
WriteTypeInferenceMethodParameterInnards(context, parameter);
context.CodeWriter.Write(", out var ");
var variableName = $"__typeInferenceArg_{_scopeStack.Depth}_{parameter.ParameterName}";
context.CodeWriter.Write(variableName);
UseCapturedCascadingGenericParameterVariable(node, parameter, variableName);
}
context.CodeWriter.WriteLine(");");
}
// When we're doing type inference, we can't write all of the code inline to initialize
// the component on the builder. We generate a method elsewhere, and then pass all of the information
// to that method. We pass in all of the attribute values + the sequence numbers.
//
// __Blazor.MyComponent.TypeInference.CreateMyComponent_0(__builder, 0, 1, ..., 2, ..., 3, ....);
context.CodeWriter.Write(node.TypeInferenceNode.FullTypeName);
context.CodeWriter.Write(".");
context.CodeWriter.Write(node.TypeInferenceNode.MethodName);
context.CodeWriter.Write("(");
context.CodeWriter.Write(_scopeStack.BuilderVarName);
context.CodeWriter.Write(", ");
context.CodeWriter.Write("-1");
foreach (var parameter in parameters)
{
context.CodeWriter.Write(", ");
if (!string.IsNullOrEmpty(parameter.SeqName))
{
context.CodeWriter.Write("-1");
context.CodeWriter.Write(", ");
}
WriteTypeInferenceMethodParameterInnards(context, parameter);
}
context.CodeWriter.Write(");");
context.CodeWriter.WriteLine();
if (typeInferenceCaptureScope.HasValue)
{
typeInferenceCaptureScope.Value.Dispose();
}
}
// We want to generate something that references the Component type to avoid
// the "usings directive is unnecessary" message.
// Looks like:
// __o = typeof(SomeNamespace.SomeComponent);
using (context.CodeWriter.BuildLinePragma(node.Source.Value, context))
{
context.CodeWriter.Write(DesignTimeVariable);
context.CodeWriter.Write(" = ");
context.CodeWriter.Write("typeof(");
context.CodeWriter.Write(node.TagName);
if (node.Component.IsGenericTypedComponent())
{
context.CodeWriter.Write("<");
var typeArgumentCount = node.Component.GetTypeParameters().Count();
for (var i = 1; i < typeArgumentCount; i++)
{
context.CodeWriter.Write(",");
}
context.CodeWriter.Write(">");
}
context.CodeWriter.Write(");");
context.CodeWriter.WriteLine();
}
}
private void WriteTypeInferenceMethodParameterInnards(CodeRenderingContext context, TypeInferenceMethodParameter parameter)
{
switch (parameter.Source)
{
case ComponentAttributeIntermediateNode attribute:
// Don't type check generics, since we can't actually write the type name.
// The type checking with happen anyway since we defined a method and we're generating
// a call to it.
WriteComponentAttributeInnards(context, attribute, canTypeCheck: false);
break;
case SplatIntermediateNode splat:
WriteSplatInnards(context, splat, canTypeCheck: false);
break;
case ComponentChildContentIntermediateNode childNode:
WriteComponentChildContentInnards(context, childNode);
break;
case SetKeyIntermediateNode setKey:
WriteSetKeyInnards(context, setKey);
break;
case ReferenceCaptureIntermediateNode capture:
WriteReferenceCaptureInnards(context, capture, shouldTypeCheck: false);
break;
case CascadingGenericTypeParameter syntheticArg:
// The value should be populated before we use it, because we emit code for creating ancestors
// first, and that's where it's populated. However if this goes wrong somehow, we don't want to
// throw, so use a fallback
context.CodeWriter.Write(syntheticArg.ValueExpression ?? "default");
break;
case TypeInferenceCapturedVariable capturedVariable:
context.CodeWriter.Write(capturedVariable.VariableName);
break;
default:
throw new InvalidOperationException($"Not implemented: type inference method parameter from source {parameter.Source}");
}
}
public override void WriteComponentAttribute(CodeRenderingContext context, ComponentAttributeIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
// Looks like:
// __o = 17;
context.CodeWriter.Write(DesignTimeVariable);
context.CodeWriter.Write(" = ");
// Following the same design pattern as the runtime codegen
WriteComponentAttributeInnards(context, node, canTypeCheck: true);
context.CodeWriter.Write(";");
context.CodeWriter.WriteLine();
}
private void WriteComponentAttributeInnards(CodeRenderingContext context, ComponentAttributeIntermediateNode node, bool canTypeCheck)
{
// We limit component attributes to simple cases. However there is still a lot of complexity
// to handle here, since there are a few different cases for how an attribute might be structured.
//
// This roughly follows the design of the runtime writer for simplicity.
if (node.AttributeStructure == AttributeStructure.Minimized)
{
// Minimized attributes always map to 'true'
context.CodeWriter.Write("true");
}
else if (node.Children.Count > 1)
{
// We don't expect this to happen, we just want to know if it can.
throw new InvalidOperationException("Attribute nodes should either be minimized or a single type of content." + string.Join(", ", node.Children));
}
else if (node.Children.Count == 1 && node.Children[0] is HtmlContentIntermediateNode)
{
// We don't actually need the content at designtime, an empty string will do.
context.CodeWriter.Write("\"\"");
}
else
{
// There are a few different forms that could be used to contain all of the tokens, but we don't really care
// exactly what it looks like - we just want all of the content.
//
// This can include an empty list in some cases like the following (sic):
// <MyComponent Value="
//
// Or a CSharpExpressionIntermediateNode when the attribute has an explicit transition like:
// <MyComponent Value="@value" />
//
// Of a list of tokens directly in the attribute.
var tokens = GetCSharpTokens(node);
if ((node.BoundAttribute?.IsDelegateProperty() ?? false) ||
(node.BoundAttribute?.IsChildContentProperty() ?? false))
{
// We always surround the expression with the delegate constructor. This makes type
// inference inside lambdas, and method group conversion do the right thing.
if (canTypeCheck)
{
context.CodeWriter.Write("new ");
context.CodeWriter.Write(node.TypeName);
context.CodeWriter.Write("(");
}
context.CodeWriter.WriteLine();
for (var i = 0; i < tokens.Count; i++)
{
WriteCSharpToken(context, tokens[i]);
}
if (canTypeCheck)
{
context.CodeWriter.Write(")");
}
}
else if (node.BoundAttribute?.IsEventCallbackProperty() ?? false)
{
// This is the case where we are writing an EventCallback (a delegate with super-powers).
//
// An event callback can either be passed verbatim, or it can be created by the EventCallbackFactory.
// Since we don't look at the code the user typed inside the attribute value, this is always
// resolved via overloading.
if (canTypeCheck && NeedsTypeCheck(node))
{
context.CodeWriter.Write(ComponentsApi.RuntimeHelpers.TypeCheck);
context.CodeWriter.Write("<");
context.CodeWriter.Write(node.TypeName);
context.CodeWriter.Write(">");
context.CodeWriter.Write("(");
}
// Microsoft.AspNetCore.Components.EventCallback.Factory.Create(this, ...) OR
// Microsoft.AspNetCore.Components.EventCallback.Factory.Create<T>(this, ...)
context.CodeWriter.Write(ComponentsApi.EventCallback.FactoryAccessor);
context.CodeWriter.Write(".");
context.CodeWriter.Write(ComponentsApi.EventCallbackFactory.CreateMethod);
if (node.TryParseEventCallbackTypeArgument(out StringSegment argument))
{
context.CodeWriter.Write("<");
context.CodeWriter.Write(argument);
context.CodeWriter.Write(">");
}
context.CodeWriter.Write("(");
context.CodeWriter.Write("this");
context.CodeWriter.Write(", ");
context.CodeWriter.WriteLine();
for (var i = 0; i < tokens.Count; i++)
{
WriteCSharpToken(context, tokens[i]);
}
context.CodeWriter.Write(")");
if (canTypeCheck && NeedsTypeCheck(node))
{
context.CodeWriter.Write(")");
}
}
else
{
// This is the case when an attribute contains C# code
//
// If we have a parameter type, then add a type check.
if (canTypeCheck && NeedsTypeCheck(node))
{
context.CodeWriter.Write(ComponentsApi.RuntimeHelpers.TypeCheck);
context.CodeWriter.Write("<");
context.CodeWriter.Write(node.TypeName);
context.CodeWriter.Write(">");
context.CodeWriter.Write("(");
}
for (var i = 0; i < tokens.Count; i++)
{
WriteCSharpToken(context, tokens[i]);
}
if (canTypeCheck && NeedsTypeCheck(node))
{
context.CodeWriter.Write(")");
}
}
}
static bool NeedsTypeCheck(ComponentAttributeIntermediateNode n)
{
// Weakly typed attributes will have their TypeName set to null.
return n.BoundAttribute != null && n.TypeName != null;
}
}
private IReadOnlyList<IntermediateToken> GetCSharpTokens(IntermediateNode node)
{
// We generally expect all children to be CSharp, this is here just in case.
return node.FindDescendantNodes<IntermediateToken>().Where(t => t.IsCSharp).ToArray();
}
public override void WriteComponentChildContent(CodeRenderingContext context, ComponentChildContentIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
// Writes something like:
//
// __builder.AddAttribute(1, "ChildContent", (RenderFragment)((__builder73) => { ... }));
// OR
// __builder.AddAttribute(1, "ChildContent", (RenderFragment<Person>)((person) => (__builder73) => { ... }));
BeginWriteAttribute(context, node.AttributeName);
context.CodeWriter.WriteParameterSeparator();
context.CodeWriter.Write($"({node.TypeName})(");
WriteComponentChildContentInnards(context, node);
context.CodeWriter.Write(")");
context.CodeWriter.WriteEndMethodInvocation();
}
private void WriteComponentChildContentInnards(CodeRenderingContext context, ComponentChildContentIntermediateNode node)
{
// Writes something like:
//
// ((__builder73) => { ... })
// OR
// ((person) => (__builder73) => { })
_scopeStack.OpenComponentScope(
context,
node.AttributeName,
node.IsParameterized ? node.ParameterName : null);
for (var i = 0; i < node.Children.Count; i++)
{
context.RenderNode(node.Children[i]);
}
_scopeStack.CloseScope(context);
}
public override void WriteComponentTypeArgument(CodeRenderingContext context, ComponentTypeArgumentIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
// At design type we want write the equivalent of:
//
// __o = typeof(TItem);
context.CodeWriter.Write(DesignTimeVariable);
context.CodeWriter.Write(" = ");
context.CodeWriter.Write("typeof(");
var tokens = GetCSharpTokens(node);
for (var i = 0; i < tokens.Count; i++)
{
WriteCSharpToken(context, tokens[i]);
}
context.CodeWriter.Write(");");
context.CodeWriter.WriteLine();
IReadOnlyList<IntermediateToken> GetCSharpTokens(ComponentTypeArgumentIntermediateNode arg)
{
// We generally expect all children to be CSharp, this is here just in case.
return arg.FindDescendantNodes<IntermediateToken>().Where(t => t.IsCSharp).ToArray();
}
}
public override void WriteTemplate(CodeRenderingContext context, TemplateIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
// Looks like:
//
// (__builder73) => { ... }
_scopeStack.OpenTemplateScope(context);
context.RenderChildren(node);
_scopeStack.CloseScope(context);
}
public override void WriteSetKey(CodeRenderingContext context, SetKeyIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
// Looks like:
//
// __builder.SetKey(_keyValue);
var codeWriter = context.CodeWriter;
codeWriter
.WriteStartMethodInvocation($"{_scopeStack.BuilderVarName}.{ComponentsApi.RenderTreeBuilder.SetKey}");
WriteSetKeyInnards(context, node);
codeWriter.WriteEndMethodInvocation();
}
private void WriteSetKeyInnards(CodeRenderingContext context, SetKeyIntermediateNode node)
{
WriteCSharpCode(context, new CSharpCodeIntermediateNode
{
Source = node.Source,
Children =
{
node.KeyValueToken
}
});
}
public override void WriteSplat(CodeRenderingContext context, SplatIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
// Looks like:
//
// __builder.AddMultipleAttributes(2, ...);
context.CodeWriter.WriteStartMethodInvocation($"{_scopeStack.BuilderVarName}.{ComponentsApi.RenderTreeBuilder.AddMultipleAttributes}");
context.CodeWriter.Write("-1");
context.CodeWriter.WriteParameterSeparator();
WriteSplatInnards(context, node, canTypeCheck: true);
context.CodeWriter.WriteEndMethodInvocation();
}
private void WriteSplatInnards(CodeRenderingContext context, SplatIntermediateNode node, bool canTypeCheck)
{
var tokens = GetCSharpTokens(node);
if (canTypeCheck)
{
context.CodeWriter.Write(ComponentsApi.RuntimeHelpers.TypeCheck);
context.CodeWriter.Write("<");
context.CodeWriter.Write(ComponentsApi.AddMultipleAttributesTypeFullName);
context.CodeWriter.Write(">");
context.CodeWriter.Write("(");
}
for (var i = 0; i < tokens.Count; i++)
{
WriteCSharpToken(context, tokens[i]);
}
if (canTypeCheck)
{
context.CodeWriter.Write(")");
}
}
public override void WriteReferenceCapture(CodeRenderingContext context, ReferenceCaptureIntermediateNode node)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
// Looks like:
//
// __field = default(MyComponent);
WriteReferenceCaptureInnards(context, node, shouldTypeCheck: true);
}
protected override void WriteReferenceCaptureInnards(CodeRenderingContext context, ReferenceCaptureIntermediateNode node, bool shouldTypeCheck)
{
// We specialize this code based on whether or not we can type check. When we're calling into
// a type-inferenced component, we can't do the type check. See the comments in WriteTypeInferenceMethod.
if (shouldTypeCheck)
{
// The runtime node writer moves the call elsewhere. At design time we
// just want sufficiently similar code that any unknown-identifier or type
// errors will be equivalent
var captureTypeName = node.IsComponentCapture
? node.ComponentCaptureTypeName
: ComponentsApi.ElementReference.FullTypeName;
WriteCSharpCode(context, new CSharpCodeIntermediateNode
{
Source = node.Source,
Children =
{
node.IdentifierToken,
new IntermediateToken
{
Kind = TokenKind.CSharp,
Content = $" = default({captureTypeName});"
}
}
});
}
else
{
// Looks like:
//
// (__value) = { _field = (MyComponent)__value; }
// OR
// (__value) = { _field = (ElementRef)__value; }
const string refCaptureParamName = "__value";
using (var lambdaScope = context.CodeWriter.BuildLambda(refCaptureParamName))
{
WriteCSharpCode(context, new CSharpCodeIntermediateNode
{
Source = node.Source,
Children =
{
node.IdentifierToken,
new IntermediateToken
{
Kind = TokenKind.CSharp,
Content = $" = {refCaptureParamName};"
}
}
});
}
}
}
private void WriteCSharpToken(CodeRenderingContext context, IntermediateToken token)
{
if (string.IsNullOrWhiteSpace(token.Content))
{
return;
}
if (token.Source?.FilePath == null)
{
context.CodeWriter.Write(token.Content);
return;
}
using (context.CodeWriter.BuildLinePragma(token.Source, context))
{
context.CodeWriter.WritePadding(0, token.Source.Value, context);
context.AddSourceMappingFor(token);
context.CodeWriter.Write(token.Content);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.Linq;
using System.Text;
using System.Web.UI.WebControls;
using ILPathways.Common;
using Isle.DTO;
using LRWarehouse.Business;
using LRWarehouse.DAL;
using LDBM = LRWarehouse.DAL.DatabaseManager;
using ResBiz = IOERBusinessEntities;
using RV2 = LRWarehouse.Business.ResourceV2;
namespace Isle.BizServices
{
public class CodeTableBizService : ServiceHelper
{
#region ==== Site filter methods ====
public static CodesSite Site_SelectFilters( string siteName, bool mustHaveValues )
{
CodesSite site = ResBiz.EFCodesManager.Codes_Site_GetByTitle( siteName, mustHaveValues );
return site;
} //
public static CodesSite Site_SelectFilters( int siteId, bool mustHaveValues )
{
CodesSite site = ResBiz.EFCodesManager.Codes_Site_Get( siteId, mustHaveValues );
return site;
} //
/// <summary>
/// Get a site,and list of filter categories
/// </summary>
/// <param name="siteId"></param>
/// <returns></returns>
public static Isle.DTO.Site Site_Get( int siteId )
{
return ResBiz.EFCodesManager.Codes_Site_Get( siteId );
} //
/// <summary>
/// Get all filter categories for a site
/// </summary>
/// <param name="siteId"></param>
/// <returns></returns>
public static List<Isle.DTO.SiteTagCategory> Site_SelectFilterCategories( int siteId )
{
return ResBiz.EFCodesManager.Codes_TagCategory_Fill( siteId );
} //
#endregion
#region === Site filter methods for WebApi =====================
public static SiteFiltersDTO Site_SelectAsDto( string siteName, bool mustHaveValues )
{
CodesSite site = ResBiz.EFCodesManager.Codes_Site_GetByTitle( siteName, mustHaveValues );
return TransformSite( site );
} //
public static SiteFiltersDTO Site_SelectAsDto( int siteId, bool mustHaveValues )
{
CodesSite site = ResBiz.EFCodesManager.Codes_Site_Get( siteId, mustHaveValues );
return TransformSite( site );
} //
public static SiteFiltersDTO TransformSite( CodesSite site )
{
SiteFiltersDTO siteFilters = new SiteFiltersDTO();
if ( site == null || site.Id == 0 || site.SiteTagCategories == null || site.SiteTagCategories.Count == 0)
{
siteFilters.IsValid = false;
siteFilters.Message = "No data";
return siteFilters;
}
siteFilters.Id = site.Id;
siteFilters.IsValid = true;
siteFilters.SiteName = site.Title;
siteFilters.FiltersCount = site.SiteTagCategories.Count;
siteFilters.FilterList = new List<SiteFiltersTagsDTO>();
SiteFiltersTagsDTO filter;
foreach ( CodesSiteTagCategory tag in site.SiteTagCategories )
{
filter = new SiteFiltersTagsDTO();
filter.Id = tag.Id;
filter.SiteId = tag.SiteId;
filter.CategoryId = tag.CategoryId;
filter.Title = tag.Title;
filter.Description = tag.Description;
filter.SchemaTag = tag.SchemaTag;
filter.SortOrder = tag.SortOrder;
if ( tag.TagCategory.TagValues != null && tag.TagCategory.TagValues.Count > 0 )
{
filter.FilterValues = new List<SiteFilterValueDTO>();
SiteFilterValueDTO fv;
foreach ( CodesTagValue val in tag.TagCategory.TagValues )
{
fv = new SiteFilterValueDTO();
fv.Id = val.Id;
fv.CategoryId = val.CategoryId;
fv.CodeId = val.CodeId;
fv.Title = val.Title;
fv.Description = val.Description;
fv.SortOrder = val.SortOrder;
fv.SchemaTag = val.SchemaTag;
fv.WarehouseTotal = val.WarehouseTotal;
filter.FilterValues.Add( fv );
}
siteFilters.FilterList.Add( filter );
}
}
return siteFilters;
}
#endregion
#region === generic handling of resource code tables ===
/// <summary>
/// Return all active Code.TagValue rows
/// </summary>
/// <param name="pWithValuesOnly">Set to true to only return codes with warehouse value > 0</param>
/// <returns></returns>
public static DataSet Codes_TagValue_GetAll( bool pWithValuesOnly )
{
return CodeTableManager.Codes_TagValue_GetAll( 0, pWithValuesOnly );
}
/// <summary>
/// Return values for a code table, optionally specify where to return all rows or only those with total used > 0
/// </summary>
/// <param name="tableName"></param>
/// <param name="mustHaveValues">If true, only return rows with total</param>
/// <returns></returns>
public static List<CodeItem> Resource_CodeTableSelectList( string tableName, bool mustHaveValues )
{
return Resource_CodeTableSelectLists( tableName, mustHaveValues );
} //
/// <summary>
/// Return all values for a code table
/// </summary>
/// <param name="tableName"></param>
/// <returns></returns>
public static List<CodeItem> Resource_CodeTableSelectList( string tableName )
{
return Resource_CodeTableSelectLists( tableName, false );
} //
private static List<CodeItem> Resource_CodeTableSelectLists( string tableName, bool mustHaveValues )
{
List<CodeItem> list = new List<CodeItem>();
CodeItem code = new CodeItem();
DataSet ds = Resource_CodeTableSelect( tableName );
if ( DoesDataSetHaveRows( ds ) )
{
foreach ( DataRow row in ds.Tables[ 0 ].Rows )
{
code = new CodeItem();
code.Id = GetRowColumn(row, "Id", 0);
code.Title = GetRowColumn( row, "Title", "Missing" );
code.Description = GetRowColumn( row, "Description", "Missing" );
code.WarehouseTotal = GetRowColumn( row, "WarehouseTotal", 0 );
code.SortOrder = GetRowPossibleColumn( row, "SortOrder", 10 );
if ( mustHaveValues == false || code.WarehouseTotal > 0 )
list.Add( code );
}
}
return list;
} //
public static DataSet Resource_CodeTableSelect( string tableName )
{
string sql = string.Format( "SELECT [Id],[Title] ,[Description] ,isnull([WarehouseTotal],0) As [WarehouseTotal] FROM [dbo].[{0}] where IsActive= 1 order by title", tableName);
DataSet ds = LDBM.DoQuery( sql );
return ds;
} //
#endregion
#region == Mapping searches =====================
/// <summary>
/// Map Career Cluster Search
/// </summary>
/// <param name="pFilter"></param>
/// <param name="pOrderBy"></param>
/// <param name="pStartPageIndex"></param>
/// <param name="pMaximumRows"></param>
/// <param name="pTotalRows"></param>
/// <returns></returns>
public static DataSet MapCareerCluster_Search( string pFilter, string pOrderBy, int pStartPageIndex, int pMaximumRows, ref int pTotalRows )
{
return CodeTableManager.MapCareerCluster_Search( pFilter, pOrderBy, pStartPageIndex, pMaximumRows, ref pTotalRows );
}
/// <summary>
/// Map K12 Subjectr Search
/// </summary>
/// <param name="pFilter"></param>
/// <param name="pOrderBy"></param>
/// <param name="pStartPageIndex"></param>
/// <param name="pMaximumRows"></param>
/// <param name="pTotalRows"></param>
/// <returns></returns>
public static DataSet MapK12Subject_Search( string pFilter, string pOrderBy, int pStartPageIndex, int pMaximumRows, ref int pTotalRows )
{
return CodeTableManager.MapK12Subject_Search( pFilter, pOrderBy, pStartPageIndex, pMaximumRows, ref pTotalRows );
}
#endregion
public static DataSet DeterminingAgeRanges( RV2.ResourceDTO input )
{
return CodeTableManager.DeterminingAgeRanges( input );
} //
public static void PopulateGridPageSizeList( ref DropDownList list )
{
DataSet ds = LDBM.GetCodeValues( "GridPageSize", "SortOrder" );
LDBM.PopulateList( list, ds, "StringValue", "StringValue", "Select Size" );
} //
}
}
| |
using System;
using System.Data;
using Csla;
using Csla.Data;
using SelfLoadSoftDelete.DataAccess;
using SelfLoadSoftDelete.DataAccess.ERCLevel;
namespace SelfLoadSoftDelete.Business.ERCLevel
{
/// <summary>
/// H07_Country_Child (editable child object).<br/>
/// This is a generated base class of <see cref="H07_Country_Child"/> business object.
/// </summary>
/// <remarks>
/// This class is an item of <see cref="H06_Country"/> collection.
/// </remarks>
[Serializable]
public partial class H07_Country_Child : BusinessBase<H07_Country_Child>
{
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="Country_Child_Name"/> property.
/// </summary>
public static readonly PropertyInfo<string> Country_Child_NameProperty = RegisterProperty<string>(p => p.Country_Child_Name, "Regions Child Name");
/// <summary>
/// Gets or sets the Regions Child Name.
/// </summary>
/// <value>The Regions Child Name.</value>
public string Country_Child_Name
{
get { return GetProperty(Country_Child_NameProperty); }
set { SetProperty(Country_Child_NameProperty, value); }
}
#endregion
#region Factory Methods
/// <summary>
/// Factory method. Creates a new <see cref="H07_Country_Child"/> object.
/// </summary>
/// <returns>A reference to the created <see cref="H07_Country_Child"/> object.</returns>
internal static H07_Country_Child NewH07_Country_Child()
{
return DataPortal.CreateChild<H07_Country_Child>();
}
/// <summary>
/// Factory method. Loads a <see cref="H07_Country_Child"/> object, based on given parameters.
/// </summary>
/// <param name="country_ID1">The Country_ID1 parameter of the H07_Country_Child to fetch.</param>
/// <returns>A reference to the fetched <see cref="H07_Country_Child"/> object.</returns>
internal static H07_Country_Child GetH07_Country_Child(int country_ID1)
{
return DataPortal.FetchChild<H07_Country_Child>(country_ID1);
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="H07_Country_Child"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public H07_Country_Child()
{
// Use factory methods and do not use direct creation.
// show the framework that this is a child object
MarkAsChild();
}
#endregion
#region Data Access
/// <summary>
/// Loads default values for the <see cref="H07_Country_Child"/> object properties.
/// </summary>
[Csla.RunLocal]
protected override void Child_Create()
{
var args = new DataPortalHookArgs();
OnCreate(args);
base.Child_Create();
}
/// <summary>
/// Loads a <see cref="H07_Country_Child"/> object from the database, based on given criteria.
/// </summary>
/// <param name="country_ID1">The Country ID1.</param>
protected void Child_Fetch(int country_ID1)
{
var args = new DataPortalHookArgs(country_ID1);
OnFetchPre(args);
using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager())
{
var dal = dalManager.GetProvider<IH07_Country_ChildDal>();
var data = dal.Fetch(country_ID1);
Fetch(data);
}
OnFetchPost(args);
}
private void Fetch(IDataReader data)
{
using (var dr = new SafeDataReader(data))
{
if (dr.Read())
{
Fetch(dr);
}
}
}
/// <summary>
/// Loads a <see cref="H07_Country_Child"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void Fetch(SafeDataReader dr)
{
// Value properties
LoadProperty(Country_Child_NameProperty, dr.GetString("Country_Child_Name"));
var args = new DataPortalHookArgs(dr);
OnFetchRead(args);
}
/// <summary>
/// Inserts a new <see cref="H07_Country_Child"/> object in the database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Insert(H06_Country parent)
{
using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager())
{
var args = new DataPortalHookArgs();
OnInsertPre(args);
var dal = dalManager.GetProvider<IH07_Country_ChildDal>();
using (BypassPropertyChecks)
{
dal.Insert(
parent.Country_ID,
Country_Child_Name
);
}
OnInsertPost(args);
}
}
/// <summary>
/// Updates in the database all changes made to the <see cref="H07_Country_Child"/> object.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Update(H06_Country parent)
{
if (!IsDirty)
return;
using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager())
{
var args = new DataPortalHookArgs();
OnUpdatePre(args);
var dal = dalManager.GetProvider<IH07_Country_ChildDal>();
using (BypassPropertyChecks)
{
dal.Update(
parent.Country_ID,
Country_Child_Name
);
}
OnUpdatePost(args);
}
}
/// <summary>
/// Self deletes the <see cref="H07_Country_Child"/> object from database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_DeleteSelf(H06_Country parent)
{
using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager())
{
var args = new DataPortalHookArgs();
OnDeletePre(args);
var dal = dalManager.GetProvider<IH07_Country_ChildDal>();
using (BypassPropertyChecks)
{
dal.Delete(parent.Country_ID);
}
OnDeletePost(args);
}
}
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting all defaults for object creation.
/// </summary>
partial void OnCreate(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation.
/// </summary>
partial void OnDeletePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after the delete operation, before Commit().
/// </summary>
partial void OnDeletePost(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the update operation.
/// </summary>
partial void OnUpdatePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit().
/// </summary>
partial void OnUpdatePost(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation.
/// </summary>
partial void OnInsertPre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit().
/// </summary>
partial void OnInsertPost(DataPortalHookArgs args);
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Linq;
using System.Xml.Linq;
namespace System.Xml.XPath
{
internal class XNodeNavigator : XPathNavigator, IXmlLineInfo
{
internal static readonly string xmlPrefixNamespace = XNamespace.Xml.NamespaceName;
internal static readonly string xmlnsPrefixNamespace = XNamespace.Xmlns.NamespaceName;
private const int DocumentContentMask =
(1 << (int)XmlNodeType.Element) |
(1 << (int)XmlNodeType.ProcessingInstruction) |
(1 << (int)XmlNodeType.Comment);
private static readonly int[] s_ElementContentMasks = {
0, // Root
(1 << (int)XmlNodeType.Element), // Element
0, // Attribute
0, // Namespace
(1 << (int)XmlNodeType.CDATA) |
(1 << (int)XmlNodeType.Text), // Text
0, // SignificantWhitespace
0, // Whitespace
(1 << (int)XmlNodeType.ProcessingInstruction), // ProcessingInstruction
(1 << (int)XmlNodeType.Comment), // Comment
(1 << (int)XmlNodeType.Element) |
(1 << (int)XmlNodeType.CDATA) |
(1 << (int)XmlNodeType.Text) |
(1 << (int)XmlNodeType.ProcessingInstruction) |
(1 << (int)XmlNodeType.Comment) // All
};
private const int TextMask =
(1 << (int)XmlNodeType.CDATA) |
(1 << (int)XmlNodeType.Text);
private static XAttribute s_XmlNamespaceDeclaration;
// The navigator position is encoded by the tuple (source, parent).
// Namespace declaration uses (instance, parent element).
// Common XObjects uses (instance, null).
private XObject _source;
private XElement _parent;
private XmlNameTable _nameTable;
public XNodeNavigator(XNode node, XmlNameTable nameTable)
{
_source = node;
_nameTable = nameTable != null ? nameTable : CreateNameTable();
}
public XNodeNavigator(XNodeNavigator other)
{
_source = other._source;
_parent = other._parent;
_nameTable = other._nameTable;
}
public override string BaseURI
{
get
{
if (_source != null)
{
return _source.BaseUri;
}
if (_parent != null)
{
return _parent.BaseUri;
}
return string.Empty;
}
}
public override bool HasAttributes
{
get
{
XElement element = _source as XElement;
if (element != null)
{
foreach (XAttribute attribute in element.Attributes())
{
if (!attribute.IsNamespaceDeclaration)
{
return true;
}
}
}
return false;
}
}
public override bool HasChildren
{
get
{
XContainer container = _source as XContainer;
if (container != null)
{
foreach (XNode node in container.Nodes())
{
if (IsContent(container, node))
{
return true;
}
}
}
return false;
}
}
public override bool IsEmptyElement
{
get
{
XElement e = _source as XElement;
return e != null && e.IsEmpty;
}
}
public override string LocalName
{
get { return _nameTable.Add(GetLocalName()); }
}
string GetLocalName()
{
XElement e = _source as XElement;
if (e != null)
{
return e.Name.LocalName;
}
XAttribute a = _source as XAttribute;
if (a != null)
{
if (_parent != null && a.Name.NamespaceName.Length == 0)
{
return string.Empty; // backcompat
}
return a.Name.LocalName;
}
XProcessingInstruction p = _source as XProcessingInstruction;
if (p != null)
{
return p.Target;
}
return string.Empty;
}
public override string Name
{
get
{
string prefix = GetPrefix();
if (prefix.Length == 0)
{
return _nameTable.Add(GetLocalName());
}
return _nameTable.Add(string.Concat(prefix, ":", GetLocalName()));
}
}
public override string NamespaceURI
{
get { return _nameTable.Add(GetNamespaceURI()); }
}
string GetNamespaceURI()
{
XElement e = _source as XElement;
if (e != null)
{
return e.Name.NamespaceName;
}
XAttribute a = _source as XAttribute;
if (a != null)
{
if (_parent != null)
{
return string.Empty; // backcompat
}
return a.Name.NamespaceName;
}
return string.Empty;
}
public override XmlNameTable NameTable
{
get { return _nameTable; }
}
public override XPathNodeType NodeType
{
get
{
if (_source != null)
{
switch (_source.NodeType)
{
case XmlNodeType.Element:
return XPathNodeType.Element;
case XmlNodeType.Attribute:
XAttribute attribute = (XAttribute)_source;
return attribute.IsNamespaceDeclaration ? XPathNodeType.Namespace : XPathNodeType.Attribute;
case XmlNodeType.Document:
return XPathNodeType.Root;
case XmlNodeType.Comment:
return XPathNodeType.Comment;
case XmlNodeType.ProcessingInstruction:
return XPathNodeType.ProcessingInstruction;
default:
return XPathNodeType.Text;
}
}
return XPathNodeType.Text;
}
}
public override string Prefix
{
get { return _nameTable.Add(GetPrefix()); }
}
string GetPrefix()
{
XElement e = _source as XElement;
if (e != null)
{
string prefix = e.GetPrefixOfNamespace(e.Name.Namespace);
if (prefix != null)
{
return prefix;
}
return string.Empty;
}
XAttribute a = _source as XAttribute;
if (a != null)
{
if (_parent != null)
{
return string.Empty; // backcompat
}
string prefix = a.GetPrefixOfNamespace(a.Name.Namespace);
if (prefix != null)
{
return prefix;
}
}
return string.Empty;
}
public override object UnderlyingObject
{
get
{
return _source;
}
}
public override string Value
{
get
{
if (_source != null)
{
switch (_source.NodeType)
{
case XmlNodeType.Element:
return ((XElement)_source).Value;
case XmlNodeType.Attribute:
return ((XAttribute)_source).Value;
case XmlNodeType.Document:
XElement root = ((XDocument)_source).Root;
return root != null ? root.Value : string.Empty;
case XmlNodeType.Text:
case XmlNodeType.CDATA:
return CollectText((XText)_source);
case XmlNodeType.Comment:
return ((XComment)_source).Value;
case XmlNodeType.ProcessingInstruction:
return ((XProcessingInstruction)_source).Data;
default:
return string.Empty;
}
}
return string.Empty;
}
}
public override XPathNavigator Clone()
{
return new XNodeNavigator(this);
}
public override bool IsSamePosition(XPathNavigator navigator)
{
XNodeNavigator other = navigator as XNodeNavigator;
if (other == null)
{
return false;
}
return IsSamePosition(this, other);
}
public override bool MoveTo(XPathNavigator navigator)
{
XNodeNavigator other = navigator as XNodeNavigator;
if (other != null)
{
_source = other._source;
_parent = other._parent;
return true;
}
return false;
}
public override bool MoveToAttribute(string localName, string namespaceName)
{
XElement e = _source as XElement;
if (e != null)
{
foreach (XAttribute attribute in e.Attributes())
{
if (attribute.Name.LocalName == localName &&
attribute.Name.NamespaceName == namespaceName &&
!attribute.IsNamespaceDeclaration)
{
_source = attribute;
return true;
}
}
}
return false;
}
public override bool MoveToChild(string localName, string namespaceName)
{
XContainer c = _source as XContainer;
if (c != null)
{
foreach (XElement element in c.Elements())
{
if (element.Name.LocalName == localName &&
element.Name.NamespaceName == namespaceName)
{
_source = element;
return true;
}
}
}
return false;
}
public override bool MoveToChild(XPathNodeType type)
{
XContainer c = _source as XContainer;
if (c != null)
{
int mask = GetElementContentMask(type);
if ((TextMask & mask) != 0 && c.GetParent() == null && c is XDocument)
{
mask &= ~TextMask;
}
foreach (XNode node in c.Nodes())
{
if (((1 << (int)node.NodeType) & mask) != 0)
{
_source = node;
return true;
}
}
}
return false;
}
public override bool MoveToFirstAttribute()
{
XElement e = _source as XElement;
if (e != null)
{
foreach (XAttribute attribute in e.Attributes())
{
if (!attribute.IsNamespaceDeclaration)
{
_source = attribute;
return true;
}
}
}
return false;
}
public override bool MoveToFirstChild()
{
XContainer container = _source as XContainer;
if (container != null)
{
foreach (XNode node in container.Nodes())
{
if (IsContent(container, node))
{
_source = node;
return true;
}
}
}
return false;
}
public override bool MoveToFirstNamespace(XPathNamespaceScope scope)
{
XElement e = _source as XElement;
if (e != null)
{
XAttribute a = null;
switch (scope)
{
case XPathNamespaceScope.Local:
a = GetFirstNamespaceDeclarationLocal(e);
break;
case XPathNamespaceScope.ExcludeXml:
a = GetFirstNamespaceDeclarationGlobal(e);
while (a != null && a.Name.LocalName == "xml")
{
a = GetNextNamespaceDeclarationGlobal(a);
}
break;
case XPathNamespaceScope.All:
a = GetFirstNamespaceDeclarationGlobal(e);
if (a == null)
{
a = GetXmlNamespaceDeclaration();
}
break;
}
if (a != null)
{
_source = a;
_parent = e;
return true;
}
}
return false;
}
public override bool MoveToId(string id)
{
throw new NotSupportedException(SR.NotSupported_MoveToId);
}
public override bool MoveToNamespace(string localName)
{
XElement e = _source as XElement;
if (e != null)
{
if (localName == "xmlns")
{
return false; // backcompat
}
if (localName != null && localName.Length == 0)
{
localName = "xmlns"; // backcompat
}
XAttribute a = GetFirstNamespaceDeclarationGlobal(e);
while (a != null)
{
if (a.Name.LocalName == localName)
{
_source = a;
_parent = e;
return true;
}
a = GetNextNamespaceDeclarationGlobal(a);
}
if (localName == "xml")
{
_source = GetXmlNamespaceDeclaration();
_parent = e;
return true;
}
}
return false;
}
public override bool MoveToNext()
{
XNode currentNode = _source as XNode;
if (currentNode != null)
{
XContainer container = currentNode.GetParent();
if (container != null)
{
XNode next = null;
for (XNode node = currentNode; node != null; node = next)
{
next = node.NextNode;
if (next == null)
{
break;
}
if (IsContent(container, next) && !(node is XText && next is XText))
{
_source = next;
return true;
}
}
}
}
return false;
}
public override bool MoveToNext(string localName, string namespaceName)
{
XNode currentNode = _source as XNode;
if (currentNode != null)
{
foreach (XElement element in currentNode.ElementsAfterSelf())
{
if (element.Name.LocalName == localName &&
element.Name.NamespaceName == namespaceName)
{
_source = element;
return true;
}
}
}
return false;
}
public override bool MoveToNext(XPathNodeType type)
{
XNode currentNode = _source as XNode;
if (currentNode != null)
{
XContainer container = currentNode.GetParent();
if (container != null)
{
int mask = GetElementContentMask(type);
if ((TextMask & mask) != 0 && container.GetParent() == null && container is XDocument)
{
mask &= ~TextMask;
}
XNode next = null;
for (XNode node = currentNode; node != null; node = next)
{
next = node.NextNode;
if (((1 << (int)next.NodeType) & mask) != 0 && !(node is XText && next is XText))
{
_source = next;
return true;
}
}
}
}
return false;
}
public override bool MoveToNextAttribute()
{
XAttribute currentAttribute = _source as XAttribute;
if (currentAttribute != null && _parent == null)
{
XElement e = (XElement)currentAttribute.GetParent();
if (e != null)
{
for (XAttribute attribute = currentAttribute.NextAttribute; attribute != null; attribute = attribute.NextAttribute)
{
if (!attribute.IsNamespaceDeclaration)
{
_source = attribute;
return true;
}
}
}
}
return false;
}
public override bool MoveToNextNamespace(XPathNamespaceScope scope)
{
XAttribute a = _source as XAttribute;
if (a != null && _parent != null && !IsXmlNamespaceDeclaration(a))
{
switch (scope)
{
case XPathNamespaceScope.Local:
if (a.GetParent() != _parent)
{
return false;
}
a = GetNextNamespaceDeclarationLocal(a);
break;
case XPathNamespaceScope.ExcludeXml:
do
{
a = GetNextNamespaceDeclarationGlobal(a);
} while (a != null &&
(a.Name.LocalName == "xml" ||
HasNamespaceDeclarationInScope(a, _parent)));
break;
case XPathNamespaceScope.All:
do
{
a = GetNextNamespaceDeclarationGlobal(a);
} while (a != null &&
HasNamespaceDeclarationInScope(a, _parent));
if (a == null &&
!HasNamespaceDeclarationInScope(GetXmlNamespaceDeclaration(), _parent))
{
a = GetXmlNamespaceDeclaration();
}
break;
}
if (a != null)
{
_source = a;
return true;
}
}
return false;
}
public override bool MoveToParent()
{
if (_parent != null)
{
_source = _parent;
_parent = null;
return true;
}
XNode parentNode = _source.GetParent();
if (parentNode != null)
{
_source = parentNode;
return true;
}
return false;
}
public override bool MoveToPrevious()
{
XNode currentNode = _source as XNode;
if (currentNode != null)
{
XContainer container = currentNode.GetParent();
if (container != null)
{
XNode previous = null;
foreach (XNode node in container.Nodes())
{
if (node == currentNode)
{
if (previous != null)
{
_source = previous;
return true;
}
return false;
}
if (IsContent(container, node))
{
previous = node;
}
}
}
}
return false;
}
public override XmlReader ReadSubtree()
{
XContainer c = _source as XContainer;
if (c == null) throw new InvalidOperationException(SR.Format(SR.InvalidOperation_BadNodeType, NodeType));
return c.CreateReader();
}
bool IXmlLineInfo.HasLineInfo()
{
IXmlLineInfo li = _source as IXmlLineInfo;
if (li != null)
{
return li.HasLineInfo();
}
return false;
}
int IXmlLineInfo.LineNumber
{
get
{
IXmlLineInfo li = _source as IXmlLineInfo;
if (li != null)
{
return li.LineNumber;
}
return 0;
}
}
int IXmlLineInfo.LinePosition
{
get
{
IXmlLineInfo li = _source as IXmlLineInfo;
if (li != null)
{
return li.LinePosition;
}
return 0;
}
}
static string CollectText(XText n)
{
string s = n.Value;
if (n.GetParent() != null)
{
foreach (XNode node in n.NodesAfterSelf())
{
XText t = node as XText;
if (t == null) break;
s += t.Value;
}
}
return s;
}
static XmlNameTable CreateNameTable()
{
XmlNameTable nameTable = new NameTable();
nameTable.Add(string.Empty);
nameTable.Add(xmlnsPrefixNamespace);
nameTable.Add(xmlPrefixNamespace);
return nameTable;
}
static bool IsContent(XContainer c, XNode n)
{
if (c.GetParent() != null || c is XElement)
{
return true;
}
return ((1 << (int)n.NodeType) & DocumentContentMask) != 0;
}
static bool IsSamePosition(XNodeNavigator n1, XNodeNavigator n2)
{
return n1._source == n2._source && n1._source.GetParent() == n2._source.GetParent();
}
static bool IsXmlNamespaceDeclaration(XAttribute a)
{
return (object)a == (object)GetXmlNamespaceDeclaration();
}
static int GetElementContentMask(XPathNodeType type)
{
return s_ElementContentMasks[(int)type];
}
static XAttribute GetFirstNamespaceDeclarationGlobal(XElement e)
{
do
{
XAttribute a = GetFirstNamespaceDeclarationLocal(e);
if (a != null)
{
return a;
}
e = e.Parent;
} while (e != null);
return null;
}
static XAttribute GetFirstNamespaceDeclarationLocal(XElement e)
{
foreach (XAttribute attribute in e.Attributes())
{
if (attribute.IsNamespaceDeclaration)
{
return attribute;
}
}
return null;
}
static XAttribute GetNextNamespaceDeclarationGlobal(XAttribute a)
{
XElement e = (XElement)a.GetParent();
if (e == null)
{
return null;
}
XAttribute next = GetNextNamespaceDeclarationLocal(a);
if (next != null)
{
return next;
}
e = e.Parent;
if (e == null)
{
return null;
}
return GetFirstNamespaceDeclarationGlobal(e);
}
static XAttribute GetNextNamespaceDeclarationLocal(XAttribute a)
{
XElement e = a.Parent;
if (e == null)
{
return null;
}
a = a.NextAttribute;
while (a != null)
{
if (a.IsNamespaceDeclaration)
{
return a;
}
a = a.NextAttribute;
}
return null;
}
static XAttribute GetXmlNamespaceDeclaration()
{
if (s_XmlNamespaceDeclaration == null)
{
System.Threading.Interlocked.CompareExchange(ref s_XmlNamespaceDeclaration, new XAttribute(XNamespace.Xmlns.GetName("xml"), xmlPrefixNamespace), null);
}
return s_XmlNamespaceDeclaration;
}
static bool HasNamespaceDeclarationInScope(XAttribute a, XElement e)
{
XName name = a.Name;
while (e != null && e != a.GetParent())
{
if (e.Attribute(name) != null)
{
return true;
}
e = e.Parent;
}
return false;
}
}
struct XPathEvaluator
{
public object Evaluate<T>(XNode node, string expression, IXmlNamespaceResolver resolver) where T : class
{
XPathNavigator navigator = node.CreateNavigator();
object result = navigator.Evaluate(expression, resolver);
XPathNodeIterator iterator = result as XPathNodeIterator;
if (iterator != null)
{
return EvaluateIterator<T>(iterator);
}
if (!(result is T)) throw new InvalidOperationException(SR.Format(SR.InvalidOperation_UnexpectedEvaluation, result.GetType()));
return (T)result;
}
IEnumerable<T> EvaluateIterator<T>(XPathNodeIterator result)
{
foreach (XPathNavigator navigator in result)
{
object r = navigator.UnderlyingObject;
if (!(r is T)) throw new InvalidOperationException(SR.Format(SR.InvalidOperation_UnexpectedEvaluation, r.GetType()));
yield return (T)r;
XText t = r as XText;
if (t != null && t.GetParent() != null)
{
foreach (XNode node in t.GetParent().Nodes())
{
t = node as XText;
if (t == null) break;
yield return (T)(object)t;
}
}
}
}
}
/// <summary>
/// Extension methods
/// </summary>
public static class Extensions
{
/// <summary>
/// Creates an <see cref="XPathNavigator"/> for a given <see cref="XNode"/>
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <returns>An <see cref="XPathNavigator"/></returns>
public static XPathNavigator CreateNavigator(this XNode node)
{
return node.CreateNavigator(null);
}
/// <summary>
/// Creates an <see cref="XPathNavigator"/> for a given <see cref="XNode"/>
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="nameTable">The <see cref="XmlNameTable"/> to be used by
/// the <see cref="XPathNavigator"/></param>
/// <returns>An <see cref="XPathNavigator"/></returns>
public static XPathNavigator CreateNavigator(this XNode node, XmlNameTable nameTable)
{
if (node == null) throw new ArgumentNullException("node");
if (node is XDocumentType) throw new ArgumentException(SR.Format(SR.Argument_CreateNavigator, XmlNodeType.DocumentType));
XText text = node as XText;
if (text != null)
{
if (text.GetParent() is XDocument) throw new ArgumentException(SR.Format(SR.Argument_CreateNavigator, XmlNodeType.Whitespace));
node = CalibrateText(text);
}
return new XNodeNavigator(node, nameTable);
}
/// <summary>
/// Evaluates an XPath expression
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="expression">The XPath expression</param>
/// <returns>The result of evaluating the expression which can be typed as bool, double, string or
/// IEnumerable</returns>
public static object XPathEvaluate(this XNode node, string expression)
{
return node.XPathEvaluate(expression, null);
}
/// <summary>
/// Evaluates an XPath expression
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="expression">The XPath expression</param>
/// <param name="resolver">A <see cref="IXmlNamespaceResolver"> for the namespace
/// prefixes used in the XPath expression</see></param>
/// <returns>The result of evaluating the expression which can be typed as bool, double, string or
/// IEnumerable</returns>
public static object XPathEvaluate(this XNode node, string expression, IXmlNamespaceResolver resolver)
{
if (node == null) throw new ArgumentNullException("node");
return new XPathEvaluator().Evaluate<object>(node, expression, resolver);
}
/// <summary>
/// Select an <see cref="XElement"/> using a XPath expression
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="expression">The XPath expression</param>
/// <returns>An <see cref="XElement"> or null</see></returns>
public static XElement XPathSelectElement(this XNode node, string expression)
{
return node.XPathSelectElement(expression, null);
}
/// <summary>
/// Select an <see cref="XElement"/> using a XPath expression
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="expression">The XPath expression</param>
/// <param name="resolver">A <see cref="IXmlNamespaceResolver"/> for the namespace
/// prefixes used in the XPath expression</param>
/// <returns>An <see cref="XElement"> or null</see></returns>
public static XElement XPathSelectElement(this XNode node, string expression, IXmlNamespaceResolver resolver)
{
return node.XPathSelectElements(expression, resolver).FirstOrDefault();
}
/// <summary>
/// Select a set of <see cref="XElement"/> using a XPath expression
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="expression">The XPath expression</param>
/// <returns>An <see cref="IEnumerable<XElement>"/> corresponding to the resulting set of elements</returns>
public static IEnumerable<XElement> XPathSelectElements(this XNode node, string expression)
{
return node.XPathSelectElements(expression, null);
}
/// <summary>
/// Select a set of <see cref="XElement"/> using a XPath expression
/// </summary>
/// <param name="node">Extension point <see cref="XNode"/></param>
/// <param name="expression">The XPath expression</param>
/// <param name="resolver">A <see cref="IXmlNamespaceResolver"/> for the namespace
/// prefixes used in the XPath expression</param>
/// <returns>An <see cref="IEnumerable<XElement>"/> corresponding to the resulting set of elements</returns>
public static IEnumerable<XElement> XPathSelectElements(this XNode node, string expression, IXmlNamespaceResolver resolver)
{
if (node == null) throw new ArgumentNullException("node");
return (IEnumerable<XElement>)new XPathEvaluator().Evaluate<XElement>(node, expression, resolver);
}
static XText CalibrateText(XText n)
{
XContainer parentNode = n.GetParent();
if (parentNode == null)
{
return n;
}
foreach (XNode node in parentNode.Nodes())
{
XText t = node as XText;
bool isTextNode = t != null;
if (isTextNode && node == n)
{
return t;
}
}
System.Diagnostics.Debug.Fail("Parent node doesn't contain itself.");
return null;
}
}
}
| |
using System;
using System.Collections;
using System.Text;
using System.Collections.Generic;
using UnityEngine;
// Source: UIToolkit -- https://github.com/prime31/UIToolkit/blob/master/Assets/Plugins/MiniJSON.cs
// Based on the JSON parser from
// http://techblog.procurios.nl/k/618/news/view/14605/14863/How-do-I-write-my-own-parser-for-JSON.html
/// <summary>
/// This class encodes and decodes JSON strings.
/// Spec. details, see http://www.json.org/
///
/// JSON uses Arrays and Objects. These correspond here to the datatypes ArrayList and Hashtable.
/// All numbers are parsed to doubles.
/// </summary>
public class NGUIJson
{
private const int TOKEN_NONE = 0;
private const int TOKEN_CURLY_OPEN = 1;
private const int TOKEN_CURLY_CLOSE = 2;
private const int TOKEN_SQUARED_OPEN = 3;
private const int TOKEN_SQUARED_CLOSE = 4;
private const int TOKEN_COLON = 5;
private const int TOKEN_COMMA = 6;
private const int TOKEN_STRING = 7;
private const int TOKEN_NUMBER = 8;
private const int TOKEN_TRUE = 9;
private const int TOKEN_FALSE = 10;
private const int TOKEN_NULL = 11;
private const int BUILDER_CAPACITY = 2000;
/// <summary>
/// On decoding, this value holds the position at which the parse failed (-1 = no error).
/// </summary>
protected static int lastErrorIndex = -1;
protected static string lastDecode = "";
/// <summary>
/// Parse the specified JSon file, loading sprite information for the specified atlas.
/// </summary>
static public void LoadSpriteData (UIAtlas atlas, TextAsset asset)
{
if (asset == null || atlas == null) return;
string jsonString = asset.text;
Hashtable decodedHash = jsonDecode(jsonString) as Hashtable;
if (decodedHash == null)
{
Debug.LogWarning("Unable to parse Json file: " + asset.name);
}
else LoadSpriteData(atlas, decodedHash);
asset = null;
Resources.UnloadUnusedAssets();
}
/// <summary>
/// Parse the specified JSon file, loading sprite information for the specified atlas.
/// </summary>
static public void LoadSpriteData (UIAtlas atlas, string jsonData)
{
if (string.IsNullOrEmpty(jsonData) || atlas == null) return;
Hashtable decodedHash = jsonDecode(jsonData) as Hashtable;
if (decodedHash == null)
{
Debug.LogWarning("Unable to parse the provided Json string");
}
else LoadSpriteData(atlas, decodedHash);
}
/// <summary>
/// Parse the specified JSon file, loading sprite information for the specified atlas.
/// </summary>
static void LoadSpriteData (UIAtlas atlas, Hashtable decodedHash)
{
if (decodedHash == null || atlas == null) return;
List<UISpriteData> oldSprites = atlas.spriteList;
atlas.spriteList = new List<UISpriteData>();
Hashtable frames = (Hashtable)decodedHash["frames"];
foreach (System.Collections.DictionaryEntry item in frames)
{
UISpriteData newSprite = new UISpriteData();
newSprite.name = item.Key.ToString();
bool exists = false;
// Check to see if this sprite exists
foreach (UISpriteData oldSprite in oldSprites)
{
if (oldSprite.name.Equals(newSprite.name, StringComparison.OrdinalIgnoreCase))
{
exists = true;
break;
}
}
// Get rid of the extension if the sprite doesn't exist
// The extension is kept for backwards compatibility so it's still possible to update older atlases.
if (!exists)
{
newSprite.name = newSprite.name.Replace(".png", "");
newSprite.name = newSprite.name.Replace(".tga", "");
}
// Extract the info we need from the TexturePacker json file, mainly uvRect and size
Hashtable table = (Hashtable)item.Value;
Hashtable frame = (Hashtable)table["frame"];
int frameX = int.Parse(frame["x"].ToString());
int frameY = int.Parse(frame["y"].ToString());
int frameW = int.Parse(frame["w"].ToString());
int frameH = int.Parse(frame["h"].ToString());
// Read the rotation value
//newSprite.rotated = (bool)table["rotated"];
newSprite.x = frameX;
newSprite.y = frameY;
newSprite.width = frameW;
newSprite.height = frameH;
// Support for trimmed sprites
Hashtable sourceSize = (Hashtable)table["sourceSize"];
Hashtable spriteSize = (Hashtable)table["spriteSourceSize"];
if (spriteSize != null && sourceSize != null)
{
// TODO: Account for rotated sprites
if (frameW > 0)
{
int spriteX = int.Parse(spriteSize["x"].ToString());
int spriteW = int.Parse(spriteSize["w"].ToString());
int sourceW = int.Parse(sourceSize["w"].ToString());
newSprite.paddingLeft = spriteX;
newSprite.paddingRight = sourceW - (spriteX + spriteW);
}
if (frameH > 0)
{
int spriteY = int.Parse(spriteSize["y"].ToString());
int spriteH = int.Parse(spriteSize["h"].ToString());
int sourceH = int.Parse(sourceSize["h"].ToString());
newSprite.paddingTop = spriteY;
newSprite.paddingBottom = sourceH - (spriteY + spriteH);
}
}
// If the sprite was present before, see if we can copy its inner rect
foreach (UISpriteData oldSprite in oldSprites)
{
if (oldSprite.name.Equals(newSprite.name, StringComparison.OrdinalIgnoreCase))
{
newSprite.borderLeft = oldSprite.borderLeft;
newSprite.borderRight = oldSprite.borderRight;
newSprite.borderBottom = oldSprite.borderBottom;
newSprite.borderTop = oldSprite.borderTop;
}
}
// Add this new sprite
atlas.spriteList.Add(newSprite);
}
// Sort imported sprites alphabetically
atlas.spriteList.Sort(CompareSprites);
Debug.Log("Imported " + atlas.spriteList.Count + " sprites");
}
/// <summary>
/// Sprite comparison function for sorting.
/// </summary>
static int CompareSprites (UISpriteData a, UISpriteData b) { return a.name.CompareTo(b.name); }
/// <summary>
/// Parses the string json into a value
/// </summary>
/// <param name="json">A JSON string.</param>
/// <returns>An ArrayList, a Hashtable, a double, a string, null, true, or false</returns>
public static object jsonDecode( string json )
{
// save the string for debug information
NGUIJson.lastDecode = json;
if( json != null )
{
char[] charArray = json.ToCharArray();
int index = 0;
bool success = true;
object value = NGUIJson.parseValue( charArray, ref index, ref success );
if( success )
NGUIJson.lastErrorIndex = -1;
else
NGUIJson.lastErrorIndex = index;
return value;
}
else
{
return null;
}
}
/// <summary>
/// Converts a Hashtable / ArrayList / Dictionary(string,string) object into a JSON string
/// </summary>
/// <param name="json">A Hashtable / ArrayList</param>
/// <returns>A JSON encoded string, or null if object 'json' is not serializable</returns>
public static string jsonEncode( object json )
{
var builder = new StringBuilder( BUILDER_CAPACITY );
var success = NGUIJson.serializeValue( json, builder );
return ( success ? builder.ToString() : null );
}
/// <summary>
/// On decoding, this function returns the position at which the parse failed (-1 = no error).
/// </summary>
/// <returns></returns>
public static bool lastDecodeSuccessful()
{
return ( NGUIJson.lastErrorIndex == -1 );
}
/// <summary>
/// On decoding, this function returns the position at which the parse failed (-1 = no error).
/// </summary>
/// <returns></returns>
public static int getLastErrorIndex()
{
return NGUIJson.lastErrorIndex;
}
/// <summary>
/// If a decoding error occurred, this function returns a piece of the JSON string
/// at which the error took place. To ease debugging.
/// </summary>
/// <returns></returns>
public static string getLastErrorSnippet()
{
if( NGUIJson.lastErrorIndex == -1 )
{
return "";
}
else
{
int startIndex = NGUIJson.lastErrorIndex - 5;
int endIndex = NGUIJson.lastErrorIndex + 15;
if( startIndex < 0 )
startIndex = 0;
if( endIndex >= NGUIJson.lastDecode.Length )
endIndex = NGUIJson.lastDecode.Length - 1;
return NGUIJson.lastDecode.Substring( startIndex, endIndex - startIndex + 1 );
}
}
#region Parsing
protected static Hashtable parseObject( char[] json, ref int index )
{
Hashtable table = new Hashtable();
int token;
// {
nextToken( json, ref index );
bool done = false;
while( !done )
{
token = lookAhead( json, index );
if( token == NGUIJson.TOKEN_NONE )
{
return null;
}
else if( token == NGUIJson.TOKEN_COMMA )
{
nextToken( json, ref index );
}
else if( token == NGUIJson.TOKEN_CURLY_CLOSE )
{
nextToken( json, ref index );
return table;
}
else
{
// name
string name = parseString( json, ref index );
if( name == null )
{
return null;
}
// :
token = nextToken( json, ref index );
if( token != NGUIJson.TOKEN_COLON )
return null;
// value
bool success = true;
object value = parseValue( json, ref index, ref success );
if( !success )
return null;
table[name] = value;
}
}
return table;
}
protected static ArrayList parseArray( char[] json, ref int index )
{
ArrayList array = new ArrayList();
// [
nextToken( json, ref index );
bool done = false;
while( !done )
{
int token = lookAhead( json, index );
if( token == NGUIJson.TOKEN_NONE )
{
return null;
}
else if( token == NGUIJson.TOKEN_COMMA )
{
nextToken( json, ref index );
}
else if( token == NGUIJson.TOKEN_SQUARED_CLOSE )
{
nextToken( json, ref index );
break;
}
else
{
bool success = true;
object value = parseValue( json, ref index, ref success );
if( !success )
return null;
array.Add( value );
}
}
return array;
}
protected static object parseValue( char[] json, ref int index, ref bool success )
{
switch( lookAhead( json, index ) )
{
case NGUIJson.TOKEN_STRING:
return parseString( json, ref index );
case NGUIJson.TOKEN_NUMBER:
return parseNumber( json, ref index );
case NGUIJson.TOKEN_CURLY_OPEN:
return parseObject( json, ref index );
case NGUIJson.TOKEN_SQUARED_OPEN:
return parseArray( json, ref index );
case NGUIJson.TOKEN_TRUE:
nextToken( json, ref index );
return Boolean.Parse( "TRUE" );
case NGUIJson.TOKEN_FALSE:
nextToken( json, ref index );
return Boolean.Parse( "FALSE" );
case NGUIJson.TOKEN_NULL:
nextToken( json, ref index );
return null;
case NGUIJson.TOKEN_NONE:
break;
}
success = false;
return null;
}
protected static string parseString( char[] json, ref int index )
{
string s = "";
char c;
eatWhitespace( json, ref index );
// "
c = json[index++];
bool complete = false;
while( !complete )
{
if( index == json.Length )
break;
c = json[index++];
if( c == '"' )
{
complete = true;
break;
}
else if( c == '\\' )
{
if( index == json.Length )
break;
c = json[index++];
if( c == '"' )
{
s += '"';
}
else if( c == '\\' )
{
s += '\\';
}
else if( c == '/' )
{
s += '/';
}
else if( c == 'b' )
{
s += '\b';
}
else if( c == 'f' )
{
s += '\f';
}
else if( c == 'n' )
{
s += '\n';
}
else if( c == 'r' )
{
s += '\r';
}
else if( c == 't' )
{
s += '\t';
}
else if( c == 'u' )
{
int remainingLength = json.Length - index;
if( remainingLength >= 4 )
{
char[] unicodeCharArray = new char[4];
Array.Copy( json, index, unicodeCharArray, 0, 4 );
// Drop in the HTML markup for the unicode character
s += "&#x" + new string( unicodeCharArray ) + ";";
/*
uint codePoint = UInt32.Parse(new string(unicodeCharArray), NumberStyles.HexNumber);
// convert the integer codepoint to a unicode char and add to string
s += Char.ConvertFromUtf32((int)codePoint);
*/
// skip 4 chars
index += 4;
}
else
{
break;
}
}
}
else
{
s += c;
}
}
if( !complete )
return null;
return s;
}
protected static double parseNumber( char[] json, ref int index )
{
eatWhitespace( json, ref index );
int lastIndex = getLastIndexOfNumber( json, index );
int charLength = ( lastIndex - index ) + 1;
char[] numberCharArray = new char[charLength];
Array.Copy( json, index, numberCharArray, 0, charLength );
index = lastIndex + 1;
return Double.Parse( new string( numberCharArray ) ); // , CultureInfo.InvariantCulture);
}
protected static int getLastIndexOfNumber( char[] json, int index )
{
int lastIndex;
for( lastIndex = index; lastIndex < json.Length; lastIndex++ )
if( "0123456789+-.eE".IndexOf( json[lastIndex] ) == -1 )
{
break;
}
return lastIndex - 1;
}
protected static void eatWhitespace( char[] json, ref int index )
{
for( ; index < json.Length; index++ )
if( " \t\n\r".IndexOf( json[index] ) == -1 )
{
break;
}
}
protected static int lookAhead( char[] json, int index )
{
int saveIndex = index;
return nextToken( json, ref saveIndex );
}
protected static int nextToken( char[] json, ref int index )
{
eatWhitespace( json, ref index );
if( index == json.Length )
{
return NGUIJson.TOKEN_NONE;
}
char c = json[index];
index++;
switch( c )
{
case '{':
return NGUIJson.TOKEN_CURLY_OPEN;
case '}':
return NGUIJson.TOKEN_CURLY_CLOSE;
case '[':
return NGUIJson.TOKEN_SQUARED_OPEN;
case ']':
return NGUIJson.TOKEN_SQUARED_CLOSE;
case ',':
return NGUIJson.TOKEN_COMMA;
case '"':
return NGUIJson.TOKEN_STRING;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '-':
return NGUIJson.TOKEN_NUMBER;
case ':':
return NGUIJson.TOKEN_COLON;
}
index--;
int remainingLength = json.Length - index;
// false
if( remainingLength >= 5 )
{
if( json[index] == 'f' &&
json[index + 1] == 'a' &&
json[index + 2] == 'l' &&
json[index + 3] == 's' &&
json[index + 4] == 'e' )
{
index += 5;
return NGUIJson.TOKEN_FALSE;
}
}
// true
if( remainingLength >= 4 )
{
if( json[index] == 't' &&
json[index + 1] == 'r' &&
json[index + 2] == 'u' &&
json[index + 3] == 'e' )
{
index += 4;
return NGUIJson.TOKEN_TRUE;
}
}
// null
if( remainingLength >= 4 )
{
if( json[index] == 'n' &&
json[index + 1] == 'u' &&
json[index + 2] == 'l' &&
json[index + 3] == 'l' )
{
index += 4;
return NGUIJson.TOKEN_NULL;
}
}
return NGUIJson.TOKEN_NONE;
}
#endregion
#region Serialization
protected static bool serializeObjectOrArray( object objectOrArray, StringBuilder builder )
{
if( objectOrArray is Hashtable )
{
return serializeObject( (Hashtable)objectOrArray, builder );
}
else if( objectOrArray is ArrayList )
{
return serializeArray( (ArrayList)objectOrArray, builder );
}
else
{
return false;
}
}
protected static bool serializeObject( Hashtable anObject, StringBuilder builder )
{
builder.Append( "{" );
IDictionaryEnumerator e = anObject.GetEnumerator();
bool first = true;
while( e.MoveNext() )
{
string key = e.Key.ToString();
object value = e.Value;
if( !first )
{
builder.Append( ", " );
}
serializeString( key, builder );
builder.Append( ":" );
if( !serializeValue( value, builder ) )
{
return false;
}
first = false;
}
builder.Append( "}" );
return true;
}
protected static bool serializeDictionary( Dictionary<string,string> dict, StringBuilder builder )
{
builder.Append( "{" );
bool first = true;
foreach( var kv in dict )
{
if( !first )
builder.Append( ", " );
serializeString( kv.Key, builder );
builder.Append( ":" );
serializeString( kv.Value, builder );
first = false;
}
builder.Append( "}" );
return true;
}
protected static bool serializeArray( ArrayList anArray, StringBuilder builder )
{
builder.Append( "[" );
bool first = true;
for( int i = 0; i < anArray.Count; i++ )
{
object value = anArray[i];
if( !first )
{
builder.Append( ", " );
}
if( !serializeValue( value, builder ) )
{
return false;
}
first = false;
}
builder.Append( "]" );
return true;
}
protected static bool serializeValue( object value, StringBuilder builder )
{
// Type t = value.GetType();
// Debug.Log("type: " + t.ToString() + " isArray: " + t.IsArray);
if( value == null )
{
builder.Append( "null" );
}
else if( value.GetType().IsArray )
{
serializeArray( new ArrayList( (ICollection)value ), builder );
}
else if( value is string )
{
serializeString( (string)value, builder );
}
else if( value is Char )
{
serializeString( Convert.ToString( (char)value ), builder );
}
else if( value is Hashtable )
{
serializeObject( (Hashtable)value, builder );
}
else if( value is Dictionary<string,string> )
{
serializeDictionary( (Dictionary<string,string>)value, builder );
}
else if( value is ArrayList )
{
serializeArray( (ArrayList)value, builder );
}
else if( ( value is Boolean ) && ( (Boolean)value == true ) )
{
builder.Append( "true" );
}
else if( ( value is Boolean ) && ( (Boolean)value == false ) )
{
builder.Append( "false" );
}
else if( value.GetType().IsPrimitive )
{
serializeNumber( Convert.ToDouble( value ), builder );
}
else
{
return false;
}
return true;
}
protected static void serializeString( string aString, StringBuilder builder )
{
builder.Append( "\"" );
char[] charArray = aString.ToCharArray();
for( int i = 0; i < charArray.Length; i++ )
{
char c = charArray[i];
if( c == '"' )
{
builder.Append( "\\\"" );
}
else if( c == '\\' )
{
builder.Append( "\\\\" );
}
else if( c == '\b' )
{
builder.Append( "\\b" );
}
else if( c == '\f' )
{
builder.Append( "\\f" );
}
else if( c == '\n' )
{
builder.Append( "\\n" );
}
else if( c == '\r' )
{
builder.Append( "\\r" );
}
else if( c == '\t' )
{
builder.Append( "\\t" );
}
else
{
int codepoint = Convert.ToInt32( c );
if( ( codepoint >= 32 ) && ( codepoint <= 126 ) )
{
builder.Append( c );
}
else
{
builder.Append( "\\u" + Convert.ToString( codepoint, 16 ).PadLeft( 4, '0' ) );
}
}
}
builder.Append( "\"" );
}
protected static void serializeNumber( double number, StringBuilder builder )
{
builder.Append( Convert.ToString( number ) ); // , CultureInfo.InvariantCulture));
}
#endregion
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace SampleSPAApp.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
/*
* Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the ec2-2015-10-01.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.EC2.Model
{
/// <summary>
/// Describes the configuration of a Spot fleet request.
/// </summary>
public partial class SpotFleetRequestConfigData
{
private AllocationStrategy _allocationStrategy;
private string _clientToken;
private ExcessCapacityTerminationPolicy _excessCapacityTerminationPolicy;
private string _iamFleetRole;
private List<SpotFleetLaunchSpecification> _launchSpecifications = new List<SpotFleetLaunchSpecification>();
private string _spotPrice;
private int? _targetCapacity;
private bool? _terminateInstancesWithExpiration;
private DateTime? _validFrom;
private DateTime? _validUntil;
/// <summary>
/// Gets and sets the property AllocationStrategy.
/// <para>
/// Indicates how to allocate the target capacity across the Spot pools specified by the
/// Spot fleet request. The default is <code>lowestPrice</code>.
/// </para>
/// </summary>
public AllocationStrategy AllocationStrategy
{
get { return this._allocationStrategy; }
set { this._allocationStrategy = value; }
}
// Check to see if AllocationStrategy property is set
internal bool IsSetAllocationStrategy()
{
return this._allocationStrategy != null;
}
/// <summary>
/// Gets and sets the property ClientToken.
/// <para>
/// A unique, case-sensitive identifier you provide to ensure idempotency of your listings.
/// This helps avoid duplicate listings. For more information, see <a href="http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring
/// Idempotency</a>.
/// </para>
/// </summary>
public string ClientToken
{
get { return this._clientToken; }
set { this._clientToken = value; }
}
// Check to see if ClientToken property is set
internal bool IsSetClientToken()
{
return this._clientToken != null;
}
/// <summary>
/// Gets and sets the property ExcessCapacityTerminationPolicy.
/// <para>
/// Indicates whether running Spot instances should be terminated if the target capacity
/// of the Spot fleet request is decreased below the current size of the Spot fleet.
/// </para>
/// </summary>
public ExcessCapacityTerminationPolicy ExcessCapacityTerminationPolicy
{
get { return this._excessCapacityTerminationPolicy; }
set { this._excessCapacityTerminationPolicy = value; }
}
// Check to see if ExcessCapacityTerminationPolicy property is set
internal bool IsSetExcessCapacityTerminationPolicy()
{
return this._excessCapacityTerminationPolicy != null;
}
/// <summary>
/// Gets and sets the property IamFleetRole.
/// <para>
/// Grants the Spot fleet permission to terminate Spot instances on your behalf when you
/// cancel its Spot fleet request using <a>CancelSpotFleetRequests</a> or when the Spot
/// fleet request expires, if you set <code>terminateInstancesWithExpiration</code>.
/// </para>
/// </summary>
public string IamFleetRole
{
get { return this._iamFleetRole; }
set { this._iamFleetRole = value; }
}
// Check to see if IamFleetRole property is set
internal bool IsSetIamFleetRole()
{
return this._iamFleetRole != null;
}
/// <summary>
/// Gets and sets the property LaunchSpecifications.
/// <para>
/// Information about the launch specifications for the Spot fleet request.
/// </para>
/// </summary>
public List<SpotFleetLaunchSpecification> LaunchSpecifications
{
get { return this._launchSpecifications; }
set { this._launchSpecifications = value; }
}
// Check to see if LaunchSpecifications property is set
internal bool IsSetLaunchSpecifications()
{
return this._launchSpecifications != null && this._launchSpecifications.Count > 0;
}
/// <summary>
/// Gets and sets the property SpotPrice.
/// <para>
/// The bid price per unit hour.
/// </para>
/// </summary>
public string SpotPrice
{
get { return this._spotPrice; }
set { this._spotPrice = value; }
}
// Check to see if SpotPrice property is set
internal bool IsSetSpotPrice()
{
return this._spotPrice != null;
}
/// <summary>
/// Gets and sets the property TargetCapacity.
/// <para>
/// The number of units to request. You can choose to set the target capacity in terms
/// of instances or a performance characteristic that is important to your application
/// workload, such as vCPUs, memory, or I/O.
/// </para>
/// </summary>
public int TargetCapacity
{
get { return this._targetCapacity.GetValueOrDefault(); }
set { this._targetCapacity = value; }
}
// Check to see if TargetCapacity property is set
internal bool IsSetTargetCapacity()
{
return this._targetCapacity.HasValue;
}
/// <summary>
/// Gets and sets the property TerminateInstancesWithExpiration.
/// <para>
/// Indicates whether running Spot instances should be terminated when the Spot fleet
/// request expires.
/// </para>
/// </summary>
public bool TerminateInstancesWithExpiration
{
get { return this._terminateInstancesWithExpiration.GetValueOrDefault(); }
set { this._terminateInstancesWithExpiration = value; }
}
// Check to see if TerminateInstancesWithExpiration property is set
internal bool IsSetTerminateInstancesWithExpiration()
{
return this._terminateInstancesWithExpiration.HasValue;
}
/// <summary>
/// Gets and sets the property ValidFrom.
/// <para>
/// The start date and time of the request, in UTC format (for example, <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z).
/// The default is to start fulfilling the request immediately.
/// </para>
/// </summary>
public DateTime ValidFrom
{
get { return this._validFrom.GetValueOrDefault(); }
set { this._validFrom = value; }
}
// Check to see if ValidFrom property is set
internal bool IsSetValidFrom()
{
return this._validFrom.HasValue;
}
/// <summary>
/// Gets and sets the property ValidUntil.
/// <para>
/// The end date and time of the request, in UTC format (for example, <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z).
/// At this point, no new Spot instance requests are placed or enabled to fulfill the
/// request.
/// </para>
/// </summary>
public DateTime ValidUntil
{
get { return this._validUntil.GetValueOrDefault(); }
set { this._validUntil = value; }
}
// Check to see if ValidUntil property is set
internal bool IsSetValidUntil()
{
return this._validUntil.HasValue;
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gagvr = Google.Ads.GoogleAds.V8.Resources;
using gax = Google.Api.Gax;
using gaxgrpc = Google.Api.Gax.Grpc;
using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore;
using proto = Google.Protobuf;
using grpccore = Grpc.Core;
using grpcinter = Grpc.Core.Interceptors;
using sys = System;
using scg = System.Collections.Generic;
using sco = System.Collections.ObjectModel;
using st = System.Threading;
using stt = System.Threading.Tasks;
namespace Google.Ads.GoogleAds.V8.Services
{
/// <summary>Settings for <see cref="KeywordPlanCampaignServiceClient"/> instances.</summary>
public sealed partial class KeywordPlanCampaignServiceSettings : gaxgrpc::ServiceSettingsBase
{
/// <summary>Get a new instance of the default <see cref="KeywordPlanCampaignServiceSettings"/>.</summary>
/// <returns>A new instance of the default <see cref="KeywordPlanCampaignServiceSettings"/>.</returns>
public static KeywordPlanCampaignServiceSettings GetDefault() => new KeywordPlanCampaignServiceSettings();
/// <summary>
/// Constructs a new <see cref="KeywordPlanCampaignServiceSettings"/> object with default settings.
/// </summary>
public KeywordPlanCampaignServiceSettings()
{
}
private KeywordPlanCampaignServiceSettings(KeywordPlanCampaignServiceSettings existing) : base(existing)
{
gax::GaxPreconditions.CheckNotNull(existing, nameof(existing));
GetKeywordPlanCampaignSettings = existing.GetKeywordPlanCampaignSettings;
MutateKeywordPlanCampaignsSettings = existing.MutateKeywordPlanCampaignsSettings;
OnCopy(existing);
}
partial void OnCopy(KeywordPlanCampaignServiceSettings existing);
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>KeywordPlanCampaignServiceClient.GetKeywordPlanCampaign</c> and
/// <c>KeywordPlanCampaignServiceClient.GetKeywordPlanCampaignAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 5000 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>,
/// <see cref="grpccore::StatusCode.DeadlineExceeded"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 3600 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings GetKeywordPlanCampaignSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded)));
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>KeywordPlanCampaignServiceClient.MutateKeywordPlanCampaigns</c> and
/// <c>KeywordPlanCampaignServiceClient.MutateKeywordPlanCampaignsAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 5000 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>,
/// <see cref="grpccore::StatusCode.DeadlineExceeded"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 3600 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings MutateKeywordPlanCampaignsSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded)));
/// <summary>Creates a deep clone of this object, with all the same property values.</summary>
/// <returns>A deep clone of this <see cref="KeywordPlanCampaignServiceSettings"/> object.</returns>
public KeywordPlanCampaignServiceSettings Clone() => new KeywordPlanCampaignServiceSettings(this);
}
/// <summary>
/// Builder class for <see cref="KeywordPlanCampaignServiceClient"/> to provide simple configuration of credentials,
/// endpoint etc.
/// </summary>
internal sealed partial class KeywordPlanCampaignServiceClientBuilder : gaxgrpc::ClientBuilderBase<KeywordPlanCampaignServiceClient>
{
/// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary>
public KeywordPlanCampaignServiceSettings Settings { get; set; }
/// <summary>Creates a new builder with default settings.</summary>
public KeywordPlanCampaignServiceClientBuilder()
{
UseJwtAccessWithScopes = KeywordPlanCampaignServiceClient.UseJwtAccessWithScopes;
}
partial void InterceptBuild(ref KeywordPlanCampaignServiceClient client);
partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<KeywordPlanCampaignServiceClient> task);
/// <summary>Builds the resulting client.</summary>
public override KeywordPlanCampaignServiceClient Build()
{
KeywordPlanCampaignServiceClient client = null;
InterceptBuild(ref client);
return client ?? BuildImpl();
}
/// <summary>Builds the resulting client asynchronously.</summary>
public override stt::Task<KeywordPlanCampaignServiceClient> BuildAsync(st::CancellationToken cancellationToken = default)
{
stt::Task<KeywordPlanCampaignServiceClient> task = null;
InterceptBuildAsync(cancellationToken, ref task);
return task ?? BuildAsyncImpl(cancellationToken);
}
private KeywordPlanCampaignServiceClient BuildImpl()
{
Validate();
grpccore::CallInvoker callInvoker = CreateCallInvoker();
return KeywordPlanCampaignServiceClient.Create(callInvoker, Settings);
}
private async stt::Task<KeywordPlanCampaignServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken)
{
Validate();
grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false);
return KeywordPlanCampaignServiceClient.Create(callInvoker, Settings);
}
/// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary>
protected override string GetDefaultEndpoint() => KeywordPlanCampaignServiceClient.DefaultEndpoint;
/// <summary>
/// Returns the default scopes for this builder type, used if no scopes are otherwise specified.
/// </summary>
protected override scg::IReadOnlyList<string> GetDefaultScopes() => KeywordPlanCampaignServiceClient.DefaultScopes;
/// <summary>Returns the channel pool to use when no other options are specified.</summary>
protected override gaxgrpc::ChannelPool GetChannelPool() => KeywordPlanCampaignServiceClient.ChannelPool;
/// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary>
protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance;
}
/// <summary>KeywordPlanCampaignService client wrapper, for convenient use.</summary>
/// <remarks>
/// Service to manage Keyword Plan campaigns.
/// </remarks>
public abstract partial class KeywordPlanCampaignServiceClient
{
/// <summary>
/// The default endpoint for the KeywordPlanCampaignService service, which is a host of
/// "googleads.googleapis.com" and a port of 443.
/// </summary>
public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443";
/// <summary>The default KeywordPlanCampaignService scopes.</summary>
/// <remarks>
/// The default KeywordPlanCampaignService scopes are:
/// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list>
/// </remarks>
public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[]
{
"https://www.googleapis.com/auth/adwords",
});
internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes);
internal static bool UseJwtAccessWithScopes
{
get
{
bool useJwtAccessWithScopes = true;
MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes);
return useJwtAccessWithScopes;
}
}
static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes);
/// <summary>
/// Asynchronously creates a <see cref="KeywordPlanCampaignServiceClient"/> using the default credentials,
/// endpoint and settings. To specify custom credentials or other settings, use
/// <see cref="KeywordPlanCampaignServiceClientBuilder"/>.
/// </summary>
/// <param name="cancellationToken">
/// The <see cref="st::CancellationToken"/> to use while creating the client.
/// </param>
/// <returns>The task representing the created <see cref="KeywordPlanCampaignServiceClient"/>.</returns>
public static stt::Task<KeywordPlanCampaignServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) =>
new KeywordPlanCampaignServiceClientBuilder().BuildAsync(cancellationToken);
/// <summary>
/// Synchronously creates a <see cref="KeywordPlanCampaignServiceClient"/> using the default credentials,
/// endpoint and settings. To specify custom credentials or other settings, use
/// <see cref="KeywordPlanCampaignServiceClientBuilder"/>.
/// </summary>
/// <returns>The created <see cref="KeywordPlanCampaignServiceClient"/>.</returns>
public static KeywordPlanCampaignServiceClient Create() => new KeywordPlanCampaignServiceClientBuilder().Build();
/// <summary>
/// Creates a <see cref="KeywordPlanCampaignServiceClient"/> which uses the specified call invoker for remote
/// operations.
/// </summary>
/// <param name="callInvoker">
/// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null.
/// </param>
/// <param name="settings">Optional <see cref="KeywordPlanCampaignServiceSettings"/>.</param>
/// <returns>The created <see cref="KeywordPlanCampaignServiceClient"/>.</returns>
internal static KeywordPlanCampaignServiceClient Create(grpccore::CallInvoker callInvoker, KeywordPlanCampaignServiceSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker));
grpcinter::Interceptor interceptor = settings?.Interceptor;
if (interceptor != null)
{
callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor);
}
KeywordPlanCampaignService.KeywordPlanCampaignServiceClient grpcClient = new KeywordPlanCampaignService.KeywordPlanCampaignServiceClient(callInvoker);
return new KeywordPlanCampaignServiceClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not
/// affected.
/// </summary>
/// <remarks>
/// After calling this method, further calls to <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down
/// by another call to this method.
/// </remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync();
/// <summary>The underlying gRPC KeywordPlanCampaignService client</summary>
public virtual KeywordPlanCampaignService.KeywordPlanCampaignServiceClient GrpcClient => throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested Keyword Plan campaign in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::KeywordPlanCampaign GetKeywordPlanCampaign(GetKeywordPlanCampaignRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested Keyword Plan campaign in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::KeywordPlanCampaign> GetKeywordPlanCampaignAsync(GetKeywordPlanCampaignRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested Keyword Plan campaign in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::KeywordPlanCampaign> GetKeywordPlanCampaignAsync(GetKeywordPlanCampaignRequest request, st::CancellationToken cancellationToken) =>
GetKeywordPlanCampaignAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Returns the requested Keyword Plan campaign in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the Keyword Plan campaign to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::KeywordPlanCampaign GetKeywordPlanCampaign(string resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetKeywordPlanCampaign(new GetKeywordPlanCampaignRequest
{
ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested Keyword Plan campaign in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the Keyword Plan campaign to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::KeywordPlanCampaign> GetKeywordPlanCampaignAsync(string resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetKeywordPlanCampaignAsync(new GetKeywordPlanCampaignRequest
{
ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested Keyword Plan campaign in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the Keyword Plan campaign to fetch.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::KeywordPlanCampaign> GetKeywordPlanCampaignAsync(string resourceName, st::CancellationToken cancellationToken) =>
GetKeywordPlanCampaignAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Returns the requested Keyword Plan campaign in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the Keyword Plan campaign to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::KeywordPlanCampaign GetKeywordPlanCampaign(gagvr::KeywordPlanCampaignName resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetKeywordPlanCampaign(new GetKeywordPlanCampaignRequest
{
ResourceNameAsKeywordPlanCampaignName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested Keyword Plan campaign in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the Keyword Plan campaign to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::KeywordPlanCampaign> GetKeywordPlanCampaignAsync(gagvr::KeywordPlanCampaignName resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetKeywordPlanCampaignAsync(new GetKeywordPlanCampaignRequest
{
ResourceNameAsKeywordPlanCampaignName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested Keyword Plan campaign in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the Keyword Plan campaign to fetch.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::KeywordPlanCampaign> GetKeywordPlanCampaignAsync(gagvr::KeywordPlanCampaignName resourceName, st::CancellationToken cancellationToken) =>
GetKeywordPlanCampaignAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Creates, updates, or removes Keyword Plan campaigns. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [HeaderError]()
/// [InternalError]()
/// [KeywordPlanCampaignError]()
/// [KeywordPlanError]()
/// [ListOperationError]()
/// [MutateError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [ResourceCountLimitExceededError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual MutateKeywordPlanCampaignsResponse MutateKeywordPlanCampaigns(MutateKeywordPlanCampaignsRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Creates, updates, or removes Keyword Plan campaigns. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [HeaderError]()
/// [InternalError]()
/// [KeywordPlanCampaignError]()
/// [KeywordPlanError]()
/// [ListOperationError]()
/// [MutateError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [ResourceCountLimitExceededError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<MutateKeywordPlanCampaignsResponse> MutateKeywordPlanCampaignsAsync(MutateKeywordPlanCampaignsRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Creates, updates, or removes Keyword Plan campaigns. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [HeaderError]()
/// [InternalError]()
/// [KeywordPlanCampaignError]()
/// [KeywordPlanError]()
/// [ListOperationError]()
/// [MutateError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [ResourceCountLimitExceededError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<MutateKeywordPlanCampaignsResponse> MutateKeywordPlanCampaignsAsync(MutateKeywordPlanCampaignsRequest request, st::CancellationToken cancellationToken) =>
MutateKeywordPlanCampaignsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Creates, updates, or removes Keyword Plan campaigns. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [HeaderError]()
/// [InternalError]()
/// [KeywordPlanCampaignError]()
/// [KeywordPlanError]()
/// [ListOperationError]()
/// [MutateError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [ResourceCountLimitExceededError]()
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer whose Keyword Plan campaigns are being modified.
/// </param>
/// <param name="operations">
/// Required. The list of operations to perform on individual Keyword Plan campaigns.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual MutateKeywordPlanCampaignsResponse MutateKeywordPlanCampaigns(string customerId, scg::IEnumerable<KeywordPlanCampaignOperation> operations, gaxgrpc::CallSettings callSettings = null) =>
MutateKeywordPlanCampaigns(new MutateKeywordPlanCampaignsRequest
{
CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)),
Operations =
{
gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)),
},
}, callSettings);
/// <summary>
/// Creates, updates, or removes Keyword Plan campaigns. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [HeaderError]()
/// [InternalError]()
/// [KeywordPlanCampaignError]()
/// [KeywordPlanError]()
/// [ListOperationError]()
/// [MutateError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [ResourceCountLimitExceededError]()
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer whose Keyword Plan campaigns are being modified.
/// </param>
/// <param name="operations">
/// Required. The list of operations to perform on individual Keyword Plan campaigns.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<MutateKeywordPlanCampaignsResponse> MutateKeywordPlanCampaignsAsync(string customerId, scg::IEnumerable<KeywordPlanCampaignOperation> operations, gaxgrpc::CallSettings callSettings = null) =>
MutateKeywordPlanCampaignsAsync(new MutateKeywordPlanCampaignsRequest
{
CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)),
Operations =
{
gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)),
},
}, callSettings);
/// <summary>
/// Creates, updates, or removes Keyword Plan campaigns. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [HeaderError]()
/// [InternalError]()
/// [KeywordPlanCampaignError]()
/// [KeywordPlanError]()
/// [ListOperationError]()
/// [MutateError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [ResourceCountLimitExceededError]()
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer whose Keyword Plan campaigns are being modified.
/// </param>
/// <param name="operations">
/// Required. The list of operations to perform on individual Keyword Plan campaigns.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<MutateKeywordPlanCampaignsResponse> MutateKeywordPlanCampaignsAsync(string customerId, scg::IEnumerable<KeywordPlanCampaignOperation> operations, st::CancellationToken cancellationToken) =>
MutateKeywordPlanCampaignsAsync(customerId, operations, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
}
/// <summary>KeywordPlanCampaignService client wrapper implementation, for convenient use.</summary>
/// <remarks>
/// Service to manage Keyword Plan campaigns.
/// </remarks>
public sealed partial class KeywordPlanCampaignServiceClientImpl : KeywordPlanCampaignServiceClient
{
private readonly gaxgrpc::ApiCall<GetKeywordPlanCampaignRequest, gagvr::KeywordPlanCampaign> _callGetKeywordPlanCampaign;
private readonly gaxgrpc::ApiCall<MutateKeywordPlanCampaignsRequest, MutateKeywordPlanCampaignsResponse> _callMutateKeywordPlanCampaigns;
/// <summary>
/// Constructs a client wrapper for the KeywordPlanCampaignService service, with the specified gRPC client and
/// settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">
/// The base <see cref="KeywordPlanCampaignServiceSettings"/> used within this client.
/// </param>
public KeywordPlanCampaignServiceClientImpl(KeywordPlanCampaignService.KeywordPlanCampaignServiceClient grpcClient, KeywordPlanCampaignServiceSettings settings)
{
GrpcClient = grpcClient;
KeywordPlanCampaignServiceSettings effectiveSettings = settings ?? KeywordPlanCampaignServiceSettings.GetDefault();
gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings);
_callGetKeywordPlanCampaign = clientHelper.BuildApiCall<GetKeywordPlanCampaignRequest, gagvr::KeywordPlanCampaign>(grpcClient.GetKeywordPlanCampaignAsync, grpcClient.GetKeywordPlanCampaign, effectiveSettings.GetKeywordPlanCampaignSettings).WithGoogleRequestParam("resource_name", request => request.ResourceName);
Modify_ApiCall(ref _callGetKeywordPlanCampaign);
Modify_GetKeywordPlanCampaignApiCall(ref _callGetKeywordPlanCampaign);
_callMutateKeywordPlanCampaigns = clientHelper.BuildApiCall<MutateKeywordPlanCampaignsRequest, MutateKeywordPlanCampaignsResponse>(grpcClient.MutateKeywordPlanCampaignsAsync, grpcClient.MutateKeywordPlanCampaigns, effectiveSettings.MutateKeywordPlanCampaignsSettings).WithGoogleRequestParam("customer_id", request => request.CustomerId);
Modify_ApiCall(ref _callMutateKeywordPlanCampaigns);
Modify_MutateKeywordPlanCampaignsApiCall(ref _callMutateKeywordPlanCampaigns);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>;
partial void Modify_GetKeywordPlanCampaignApiCall(ref gaxgrpc::ApiCall<GetKeywordPlanCampaignRequest, gagvr::KeywordPlanCampaign> call);
partial void Modify_MutateKeywordPlanCampaignsApiCall(ref gaxgrpc::ApiCall<MutateKeywordPlanCampaignsRequest, MutateKeywordPlanCampaignsResponse> call);
partial void OnConstruction(KeywordPlanCampaignService.KeywordPlanCampaignServiceClient grpcClient, KeywordPlanCampaignServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper);
/// <summary>The underlying gRPC KeywordPlanCampaignService client</summary>
public override KeywordPlanCampaignService.KeywordPlanCampaignServiceClient GrpcClient { get; }
partial void Modify_GetKeywordPlanCampaignRequest(ref GetKeywordPlanCampaignRequest request, ref gaxgrpc::CallSettings settings);
partial void Modify_MutateKeywordPlanCampaignsRequest(ref MutateKeywordPlanCampaignsRequest request, ref gaxgrpc::CallSettings settings);
/// <summary>
/// Returns the requested Keyword Plan campaign in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override gagvr::KeywordPlanCampaign GetKeywordPlanCampaign(GetKeywordPlanCampaignRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_GetKeywordPlanCampaignRequest(ref request, ref callSettings);
return _callGetKeywordPlanCampaign.Sync(request, callSettings);
}
/// <summary>
/// Returns the requested Keyword Plan campaign in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<gagvr::KeywordPlanCampaign> GetKeywordPlanCampaignAsync(GetKeywordPlanCampaignRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_GetKeywordPlanCampaignRequest(ref request, ref callSettings);
return _callGetKeywordPlanCampaign.Async(request, callSettings);
}
/// <summary>
/// Creates, updates, or removes Keyword Plan campaigns. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [HeaderError]()
/// [InternalError]()
/// [KeywordPlanCampaignError]()
/// [KeywordPlanError]()
/// [ListOperationError]()
/// [MutateError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [ResourceCountLimitExceededError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override MutateKeywordPlanCampaignsResponse MutateKeywordPlanCampaigns(MutateKeywordPlanCampaignsRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_MutateKeywordPlanCampaignsRequest(ref request, ref callSettings);
return _callMutateKeywordPlanCampaigns.Sync(request, callSettings);
}
/// <summary>
/// Creates, updates, or removes Keyword Plan campaigns. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [HeaderError]()
/// [InternalError]()
/// [KeywordPlanCampaignError]()
/// [KeywordPlanError]()
/// [ListOperationError]()
/// [MutateError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [ResourceCountLimitExceededError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<MutateKeywordPlanCampaignsResponse> MutateKeywordPlanCampaignsAsync(MutateKeywordPlanCampaignsRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_MutateKeywordPlanCampaignsRequest(ref request, ref callSettings);
return _callMutateKeywordPlanCampaigns.Async(request, callSettings);
}
}
}
| |
using System;
using System.IO;
using System.Net;
using System.Linq;
using System.Threading;
using MonoTorrent;
using MonoTorrent.Client;
using MonoTorrent.BEncoding;
using MonoTorrent.Client.Encryption;
using MonoTorrent.Dht.Listeners;
using MonoTorrent.Dht;
using MonoTorrent.Common;
using System.Collections.Generic;
using BTDeploy.Helpers;
namespace BTDeploy.ServiceDaemon.TorrentClients
{
public class MonoTorrentClient : ITorrentClient
{
public int Port = 55999;
public int DefaultTorrentUploadSlots = 4;
public int DefaultTorrentOpenConnections = 150;
protected readonly string TorrentFileDirectory;
protected readonly string BrokenTorrentFileDirectory;
protected readonly string DHTNodeFile;
protected readonly string FastResumeFile;
protected readonly string TorrentMappingsCacheFile;
protected ClientEngine Engine;
protected TorrentSettings DefaultTorrentSettings;
protected BEncodedDictionary FastResume;
protected ListFile<TorrentMapping> TorrentMappingsCache;
public MonoTorrentClient(string applicationDataDirectoryPath)
{
// Make directories.
var monoTorrentClientApplicationDataDirectoryPath = Path.Combine (applicationDataDirectoryPath, this.GetType().Name);
if (!Directory.Exists (monoTorrentClientApplicationDataDirectoryPath))
Directory.CreateDirectory (monoTorrentClientApplicationDataDirectoryPath);
TorrentFileDirectory = Path.Combine (monoTorrentClientApplicationDataDirectoryPath, "torrents");
if (!Directory.Exists (TorrentFileDirectory))
Directory.CreateDirectory (TorrentFileDirectory);
BrokenTorrentFileDirectory = Path.Combine (monoTorrentClientApplicationDataDirectoryPath, "broken");
if (!Directory.Exists (BrokenTorrentFileDirectory))
Directory.CreateDirectory (BrokenTorrentFileDirectory);
// Make files.
DHTNodeFile = Path.Combine (monoTorrentClientApplicationDataDirectoryPath, "dhtNodes");
FastResumeFile = Path.Combine (monoTorrentClientApplicationDataDirectoryPath, "fastResume");
TorrentMappingsCacheFile = Path.Combine (monoTorrentClientApplicationDataDirectoryPath, "torrentMappingsCache");
// Make mappings cache.
TorrentMappingsCache = new ListFile<TorrentMapping> (TorrentMappingsCacheFile);
// Make default torrent settings.
DefaultTorrentSettings = new TorrentSettings (DefaultTorrentUploadSlots, DefaultTorrentOpenConnections, 0, 0);
}
public void Start()
{
// Create an instance of the engine.
Engine = new ClientEngine (new EngineSettings
{
PreferEncryption = false,
AllowedEncryption = EncryptionTypes.All
});
Engine.ChangeListenEndpoint (new IPEndPoint (IPAddress.Any, Port));
// Setup DHT listener.
byte[] nodes = null;
try
{
nodes = File.ReadAllBytes(DHTNodeFile);
}
catch
{
}
var dhtListner = new DhtListener (new IPEndPoint (IPAddress.Any, Port));
Engine.RegisterDht (new DhtEngine (dhtListner));
dhtListner.Start ();
Engine.DhtEngine.Start (nodes);
// Fast resume.
try
{
FastResume = BEncodedValue.Decode<BEncodedDictionary>(File.ReadAllBytes(FastResumeFile));
}
catch
{
FastResume = new BEncodedDictionary();
}
// Try load the cache file.
try
{
TorrentMappingsCache.Load();
}
catch
{
}
// Cross reference torrent files against cache entries (sync).
var torrents = Directory.GetFiles (TorrentFileDirectory, "*.torrent").Select (Torrent.Load).ToList();
TorrentMappingsCache.RemoveAll (tmc => !torrents.Any (t => t.InfoHash.ToString () == tmc.InfoHash));
TorrentMappingsCache.Save ();
torrents.Where (t => !TorrentMappingsCache.Any (tmc => tmc.InfoHash == t.InfoHash.ToString ()))
.ToList ().ForEach (t => File.Delete(t.TorrentPath));
// Reload the torrents and add them.
Directory.GetFiles (TorrentFileDirectory, "*.torrent").ToList ().ForEach (torrentFile =>
{
var torrent = Torrent.Load(torrentFile);
var outputDirectoryPath = TorrentMappingsCache.First(tmc => tmc.InfoHash == torrent.InfoHash.ToString()).OutputDirectoryPath;
try
{
Add(torrent, outputDirectoryPath);
}
catch
{
var brokenTorrentFileName = Path.GetFileName(torrentFile);
var brokenTorrentFilePath = System.IO.Path.Combine(BrokenTorrentFileDirectory, brokenTorrentFileName);
File.Copy(torrentFile, brokenTorrentFilePath, true);
Remove(torrent.InfoHash.ToString());
}
});
}
public ITorrentDetails[] List ()
{
return Engine.Torrents.Select (Convert).ToArray();
}
public string Add (Stream torrentFile, string outputDirectoryPath)
{
// Load the torrent.
var torrent = Torrent.Load(torrentFile);
// Check if torrent already added.
if(Engine.Torrents.Any(t => t.InfoHash.ToString() == torrent.InfoHash.ToString()))
throw new TorrentAlreadyAddedException();
// Check if outputDirectoryPath is not a file.
if (File.Exists (outputDirectoryPath))
throw new InvalidOutputDirectoryException ("Already exists as a file. Can either not exist or must be a directory.");
// Check if output directory already in use.
if (Engine.Torrents.Any (t => t.SavePath.StartsWith (outputDirectoryPath, StringComparison.CurrentCultureIgnoreCase)))
throw new OutputDirectoryAlreadyInUseException ();
// Save torrent file.
torrentFile.Position = 0;
var applicationDataTorrentFilePath = Path.Combine (TorrentFileDirectory, torrent.InfoHash.ToString() + ".torrent");
using (var file = File.OpenWrite(applicationDataTorrentFilePath))
StreamHelpers.CopyStream (torrentFile, file);
// Reload the torrent.
torrent = Torrent.Load(applicationDataTorrentFilePath);
// Create output directory.
if (!Directory.Exists (outputDirectoryPath))
Directory.CreateDirectory (outputDirectoryPath);
// Finally add.
return Add (torrent, outputDirectoryPath);
}
protected string Add(Torrent torrent, string outputDirectoryPath)
{
// Create the torrent manager.
var torrentManager = new TorrentManager(torrent, outputDirectoryPath, DefaultTorrentSettings, "");
// Setup fast resume.
if (FastResume.ContainsKey (torrent.InfoHash.ToHex ()))
torrentManager.LoadFastResume (new FastResume ((BEncodedDictionary)FastResume [torrent.InfoHash.ToHex ()]));
// Add to mappings cache.
TorrentMappingsCache.RemoveAll (tmc => tmc.InfoHash == torrent.InfoHash.ToString ());
TorrentMappingsCache.Add(new TorrentMapping
{
InfoHash = torrent.InfoHash.ToString(),
OutputDirectoryPath = outputDirectoryPath
});
TorrentMappingsCache.Save ();
// Register and start.
Engine.Register(torrentManager);
torrentManager.Start ();
// Return Id.
return torrentManager.InfoHash.ToString();
}
public void Remove (string Id, bool deleteFiles = false)
{
// Get the torrent manager.
var torrentManager = Engine.Torrents.First(tm => tm.InfoHash.ToString() == Id);
// Delete the torrent file.
File.Delete (torrentManager.Torrent.TorrentPath);
// Delete the cache reference.
TorrentMappingsCache.RemoveAll (tmc => tmc.InfoHash == torrentManager.Torrent.InfoHash.ToString ());
TorrentMappingsCache.Save ();
// Stop and remove the torrent from the engine.
torrentManager.Stop();
Engine.Unregister(torrentManager);
torrentManager.Dispose();
// Delete files if required.
if(deleteFiles)
Directory.Delete(torrentManager.SavePath, true);
}
public Stream Create (string name, string sourceDirectoryPath, IEnumerable<string> trackers = null)
{
// Check sourceDirectoryPath is a directory.
if (!Directory.Exists (sourceDirectoryPath))
throw new InvalidSourceDirectoryException ("Was not found or is not a directory.");
// Create torrent file mappings.
var fileMappings = new TorrentFileSource (sourceDirectoryPath, true).Files.Select (fm =>
{
var info = new FileInfo (fm.Source);
return new TorrentFile (fm.Destination, info.Length, fm.Source);
}).ToList();
// Make creator.
var creator = new TorrentCreator ();
creator.PieceLength = TorrentCreator.RecommendedPieceSize (fileMappings);
if (trackers != null)
creator.Announces.Add (new RawTrackerTier(trackers));
// Make torrent, convert to stream and return.
var torrentStream = new MemoryStream ();
var torrentRAW = creator.Create (name, fileMappings).Encode ();
torrentStream.Write(torrentRAW, 0, torrentRAW.Length);
return torrentStream;
}
private TorrentDetails Convert(TorrentManager torrentManager)
{
var torrent = torrentManager.Torrent;
var torrentDetails = new TorrentDetails
{
Id = torrent.InfoHash.ToString(),
Name = torrent.Name,
Files = torrent.Files.Select(f => f.Path).ToArray(),
OutputDirectory = torrentManager.SavePath,
Size = torrent.Size,
Progress = torrentManager.Progress,
DownloadBytesPerSecond = torrentManager.Monitor.DownloadSpeed,
UploadBytesPerSecond = torrentManager.Monitor.UploadSpeed
};
switch(torrentManager.State)
{
case TorrentState.Hashing:
torrentDetails.Status = TorrentStatus.Hashing;
break;
case TorrentState.Downloading:
case TorrentState.Stopping:
case TorrentState.Metadata:
torrentDetails.Status = TorrentStatus.Downloading;
break;
case TorrentState.Seeding:
torrentDetails.Status = TorrentStatus.Seeding;
break;
case TorrentState.Stopped:
case TorrentState.Paused:
torrentDetails.Status = TorrentStatus.Stopped;
break;
case TorrentState.Error:
torrentDetails.Status = TorrentStatus.Error;
break;
}
return torrentDetails;
}
public class TorrentMapping
{
public string InfoHash { get; set; }
public string OutputDirectoryPath { get; set; }
public TorrentMapping() {}
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
namespace NPOI.HWPF.Model
{
using System.Collections.Generic;
using NPOI.Util;
using System;
using NPOI.HWPF.Model.IO;
/**
* Represents a PAP FKP. The style properties for paragraph and character Runs
* are stored in fkps. There are PAP fkps for paragraph properties and CHP fkps
* for character run properties. The first part of the fkp for both CHP and PAP
* fkps consists of an array of 4 byte int offsets in the main stream for that
* Paragraph's or Character Run's text. The ending offset is the next
* value in the array. For example, if an fkp has X number of Paragraph's
* stored in it then there are (x + 1) 4 byte ints in the beginning array. The
* number X is determined by the last byte in a 512 byte fkp.
*
* CHP and PAP fkps also store the compressed styles(grpprl) that correspond to
* the offsets on the front of the fkp. The offset of the grpprls is determined
* differently for CHP fkps and PAP fkps.
*
* @author Ryan Ackley
*/
public class PAPFormattedDiskPage : FormattedDiskPage
{
private static int BX_SIZE = 13;
private static int FC_SIZE = 4;
private List<PAPX> _papxList = new List<PAPX>();
private List<PAPX> _overFlow;
public PAPFormattedDiskPage(byte[] dataStream):this()
{
}
public PAPFormattedDiskPage()
{
}
/**
* Creates a PAPFormattedDiskPage from a 512 byte array
*/
[Obsolete]
public PAPFormattedDiskPage(byte[] documentStream, byte[] dataStream, int offset, int fcMin, TextPieceTable tpt)
: this(documentStream, dataStream, offset, tpt )
{
}
/**
* Creates a PAPFormattedDiskPage from a 512 byte array
*/
public PAPFormattedDiskPage(byte[] documentStream, byte[] dataStream,
int offset, CharIndexTranslator translator)
:base(documentStream, offset)
{
for (int x = 0; x < _crun; x++)
{
int bytesStartAt = GetStart(x);
int bytesEndAt = GetEnd(x);
int charStartAt = translator.GetCharIndex(bytesStartAt);
int charEndAt = translator.GetCharIndex(bytesEndAt, charStartAt);
PAPX papx = new PAPX(charStartAt, charEndAt, GetGrpprl(x), GetParagraphHeight(x), dataStream);
_papxList.Add(papx);
}
_fkp = null;
}
/**
* Fills the queue for writing.
*
* @param Filler a List of PAPXs
*/
public void Fill(List<PAPX> Filler)
{
_papxList.AddRange(Filler);
}
/**
* Used when writing out a Word docunment. This method is part of a sequence
* that is necessary because there is no easy and efficient way to
* determine the number PAPX's that will fit into one FKP. THe sequence is
* as follows:
*
* Fill()
* ToArray()
* GetOverflow()
*
* @return The remaining PAPXs that didn't fit into this FKP.
*/
internal List<PAPX> GetOverflow()
{
return _overFlow;
}
/**
* Gets the PAPX at index.
* @param index The index to get the PAPX for.
* @return The PAPX at index.
*/
public PAPX GetPAPX(int index)
{
return _papxList[index];
}
/**
* Gets the papx grpprl for the paragraph at index in this fkp.
*
* @param index The index of the papx to Get.
* @return a papx grpprl.
*/
protected override byte[] GetGrpprl(int index)
{
int papxOffset = 2 * LittleEndian.GetUByte(_fkp, _offset + (((_crun + 1) * FC_SIZE) + (index * BX_SIZE)));
int size = 2 * LittleEndian.GetUByte(_fkp, _offset + papxOffset);
if (size == 0)
{
size = 2 * LittleEndian.GetUByte(_fkp, _offset + ++papxOffset);
}
else
{
size--;
}
byte[] papx = new byte[size];
Array.Copy(_fkp, _offset + ++papxOffset, papx, 0, size);
return papx;
}
/**
* Creates a byte array representation of this data structure. Suitable for
* writing to a Word document.
*
* @param fcMin The file offset in the main stream where text begins.
* @return A byte array representing this data structure.
*/
internal byte[] ToByteArray(HWPFStream dataStream,
CharIndexTranslator translator)
{
byte[] buf = new byte[512];
int size = _papxList.Count;
int grpprlOffset = 0;
int bxOffset = 0;
int fcOffset = 0;
byte[] lastGrpprl = new byte[0];
// total size is currently the size of one FC
int totalSize = FC_SIZE;
int index = 0;
for (; index < size; index++)
{
byte[] grpprl = ((PAPX)_papxList[index]).GetGrpprl();
int grpprlLength = grpprl.Length;
// is grpprl huge?
if (grpprlLength > 488)
{
grpprlLength = 8; // set equal to size of sprmPHugePapx grpprl
}
// check to see if we have enough room for an FC, a BX, and the grpprl
// and the 1 byte size of the grpprl.
int addition = 0;
if (!Arrays.Equals(grpprl, lastGrpprl))
{
addition = (FC_SIZE + BX_SIZE + grpprlLength + 1);
}
else
{
addition = (FC_SIZE + BX_SIZE);
}
totalSize += addition;
// if size is uneven we will have to add one so the first grpprl falls
// on a word boundary
if (totalSize > 511 + (index % 2))
{
totalSize -= addition;
break;
}
// grpprls must fall on word boundaries
if (grpprlLength % 2 > 0)
{
totalSize += 1;
}
else
{
totalSize += 2;
}
lastGrpprl = grpprl;
}
// see if we couldn't fit some
if (index != size)
{
_overFlow = new List<PAPX>();
_overFlow.AddRange(_papxList.GetRange(index, size-index));
}
// index should equal number of papxs that will be in this fkp now.
buf[511] = (byte)index;
bxOffset = (FC_SIZE * index) + FC_SIZE;
grpprlOffset = 511;
PAPX papx = null;
lastGrpprl = new byte[0];
for (int x = 0; x < index; x++)
{
papx = _papxList[x];
byte[] phe = papx.GetParagraphHeight().ToArray();
byte[] grpprl = papx.GetGrpprl();
// is grpprl huge?
if (grpprl.Length > 488)
{
/*
// if so do we have storage at GetHugeGrpprloffset()
int hugeGrpprlOffset = papx.GetHugeGrpprlOffset();
if (hugeGrpprlOffset == -1) // then we have no storage...
{
throw new InvalidOperationException(
"This Paragraph has no dataStream storage.");
}
// we have some storage...
// get the size of the existing storage
int maxHugeGrpprlSize = LittleEndian.GetUShort(_dataStream, hugeGrpprlOffset);
if (maxHugeGrpprlSize < grpprl.Length - 2)
{ // grpprl.Length-2 because we don't store the istd
throw new InvalidOperationException(
"This Paragraph's dataStream storage is too small.");
}
// store grpprl at hugeGrpprlOffset
Array.Copy(grpprl, 2, _dataStream, hugeGrpprlOffset + 2,
grpprl.Length - 2); // grpprl.Length-2 because we don't store the istd
LittleEndian.PutUShort(_dataStream, hugeGrpprlOffset, grpprl.Length - 2);
*/
byte[] hugePapx = new byte[grpprl.Length - 2];
System.Array.Copy(grpprl, 2, hugePapx, 0, grpprl.Length - 2);
int dataStreamOffset = dataStream.Offset;
dataStream.Write(hugePapx);
// grpprl = grpprl Containing only a sprmPHugePapx2
int istd = LittleEndian.GetUShort(grpprl, 0);
grpprl = new byte[8];
LittleEndian.PutUShort(grpprl, 0, istd);
LittleEndian.PutUShort(grpprl, 2, 0x6646); // sprmPHugePapx2
LittleEndian.PutInt(grpprl, 4, dataStreamOffset);
}
bool same = Arrays.Equals(lastGrpprl, grpprl);
if (!same)
{
grpprlOffset -= (grpprl.Length + (2 - grpprl.Length % 2));
grpprlOffset -= (grpprlOffset % 2);
}
LittleEndian.PutInt(buf, fcOffset, translator.GetByteIndex(papx.Start));
buf[bxOffset] = (byte)(grpprlOffset / 2);
Array.Copy(phe, 0, buf, bxOffset + 1, phe.Length);
// refer to the section on PAPX in the spec. Places a size on the front
// of the PAPX. Has to do with how the grpprl stays on word
// boundaries.
if (!same)
{
int copyOffset = grpprlOffset;
if ((grpprl.Length % 2) > 0)
{
buf[copyOffset++] = (byte)((grpprl.Length + 1) / 2);
}
else
{
buf[++copyOffset] = (byte)((grpprl.Length) / 2);
copyOffset++;
}
Array.Copy(grpprl, 0, buf, copyOffset, grpprl.Length);
lastGrpprl = grpprl;
}
bxOffset += BX_SIZE;
fcOffset += FC_SIZE;
}
LittleEndian.PutInt(buf, fcOffset, translator.GetByteIndex(papx.End));
return buf;
}
/**
* Used to get the ParagraphHeight of a PAPX at a particular index.
* @param index
* @return The ParagraphHeight
*/
private ParagraphHeight GetParagraphHeight(int index)
{
int pheOffset = _offset + 1 + (((_crun + 1) * 4) + (index * 13));
ParagraphHeight phe = new ParagraphHeight(_fkp, pheOffset);
return phe;
}
}
}
| |
// Copyright (c) ppy Pty Ltd <[email protected]>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Collections.Generic;
using System.Linq;
using osu.Framework.Allocation;
using osu.Framework.Caching;
using osu.Framework.Extensions.EnumExtensions;
using osu.Framework.Layout;
using osuTK;
namespace osu.Framework.Graphics.Containers
{
/// <summary>
/// A container which allows laying out <see cref="Drawable"/>s in a grid.
/// </summary>
public class GridContainer : CompositeDrawable
{
public GridContainer()
{
AddLayout(cellLayout);
AddLayout(cellChildLayout);
}
[BackgroundDependencyLoader]
private void load()
{
layoutContent();
}
private GridContainerContent content;
/// <summary>
/// The content of this <see cref="GridContainer"/>, arranged in a 2D grid array, where each array
/// of <see cref="Drawable"/>s represents a row and each element of that array represents a column.
/// <para>
/// Null elements are allowed to represent blank rows/cells.
/// </para>
/// </summary>
public GridContainerContent Content
{
get => content;
set
{
if (content?.Equals(value) == true)
return;
if (content != null)
content.ArrayElementChanged -= onContentChange;
content = value;
onContentChange();
if (content != null)
content.ArrayElementChanged += onContentChange;
}
}
private void onContentChange()
{
cellContent.Invalidate();
}
private Dimension[] rowDimensions = Array.Empty<Dimension>();
/// <summary>
/// Explicit dimensions for rows. Each index of this array applies to the respective row index inside <see cref="Content"/>.
/// </summary>
public Dimension[] RowDimensions
{
set
{
if (value == null)
throw new ArgumentNullException(nameof(value));
if (rowDimensions == value)
return;
rowDimensions = value;
cellLayout.Invalidate();
}
}
private Dimension[] columnDimensions = Array.Empty<Dimension>();
/// <summary>
/// Explicit dimensions for columns. Each index of this array applies to the respective column index inside <see cref="Content"/>.
/// </summary>
public Dimension[] ColumnDimensions
{
set
{
if (value == null)
throw new ArgumentNullException(nameof(value));
if (columnDimensions == value)
return;
columnDimensions = value;
cellLayout.Invalidate();
}
}
/// <summary>
/// Controls which <see cref="Axes"/> are automatically sized w.r.t. <see cref="CompositeDrawable.InternalChildren"/>.
/// Children's <see cref="Drawable.BypassAutoSizeAxes"/> are ignored for automatic sizing.
/// Most notably, <see cref="Drawable.RelativePositionAxes"/> and <see cref="Drawable.RelativeSizeAxes"/> of children
/// do not affect automatic sizing to avoid circular size dependencies.
/// It is not allowed to manually set <see cref="Drawable.Size"/> (or <see cref="Drawable.Width"/> / <see cref="Drawable.Height"/>)
/// on any <see cref="Axes"/> which are automatically sized.
/// </summary>
public new Axes AutoSizeAxes
{
get => base.AutoSizeAxes;
set => base.AutoSizeAxes = value;
}
protected override void Update()
{
base.Update();
layoutContent();
layoutCells();
}
private readonly Cached cellContent = new Cached();
private readonly LayoutValue cellLayout = new LayoutValue(Invalidation.DrawInfo | Invalidation.RequiredParentSizeToFit);
private readonly LayoutValue cellChildLayout = new LayoutValue(Invalidation.RequiredParentSizeToFit | Invalidation.Presence, InvalidationSource.Child);
private CellContainer[,] cells = new CellContainer[0, 0];
private int cellRows => cells.GetLength(0);
private int cellColumns => cells.GetLength(1);
/// <summary>
/// Moves content from <see cref="Content"/> into cells.
/// </summary>
private void layoutContent()
{
if (cellContent.IsValid)
return;
int requiredRows = Content?.Count ?? 0;
int requiredColumns = requiredRows == 0 ? 0 : Content?.Max(c => c?.Count ?? 0) ?? 0;
// Clear cell containers without disposing, as the content might be reused
foreach (var cell in cells)
cell.Clear(false);
// It's easier to just re-construct the cell containers instead of resizing
// If this becomes a bottleneck we can transition to using lists, but this keeps the structure clean...
ClearInternal();
cellLayout.Invalidate();
// Create the new cell containers and add content
cells = new CellContainer[requiredRows, requiredColumns];
for (int r = 0; r < cellRows; r++)
{
for (int c = 0; c < cellColumns; c++)
{
// Add cell
cells[r, c] = new CellContainer();
// Allow empty rows
if (Content[r] == null)
continue;
// Allow non-square grids
if (c >= Content[r].Count)
continue;
// Allow empty cells
if (Content[r][c] == null)
continue;
// Add content
cells[r, c].Add(Content[r][c]);
cells[r, c].Depth = Content[r][c].Depth;
AddInternal(cells[r, c]);
}
}
cellContent.Validate();
}
/// <summary>
/// Repositions/resizes cells.
/// </summary>
private void layoutCells()
{
if (!cellChildLayout.IsValid)
{
cellLayout.Invalidate();
cellChildLayout.Validate();
}
if (cellLayout.IsValid)
return;
float[] widths = distribute(columnDimensions, DrawWidth, getCellSizesAlongAxis(Axes.X, DrawWidth));
float[] heights = distribute(rowDimensions, DrawHeight, getCellSizesAlongAxis(Axes.Y, DrawHeight));
for (int col = 0; col < cellColumns; col++)
{
for (int row = 0; row < cellRows; row++)
{
cells[row, col].Size = new Vector2(widths[col], heights[row]);
if (col > 0)
cells[row, col].X = cells[row, col - 1].X + cells[row, col - 1].Width;
if (row > 0)
cells[row, col].Y = cells[row - 1, col].Y + cells[row - 1, col].Height;
}
}
cellLayout.Validate();
}
/// <summary>
/// Retrieves the size of all cells along the span of an axis.
/// For the X-axis, this retrieves the size of all columns.
/// For the Y-axis, this retrieves the size of all rows.
/// </summary>
/// <param name="axis">The axis span.</param>
/// <param name="spanLength">The absolute length of the span.</param>
/// <returns>The size of all cells along the span of <paramref name="axis"/>.</returns>
/// <exception cref="InvalidOperationException">If the <see cref="Dimension"/> for a cell is unsupported.</exception>
private float[] getCellSizesAlongAxis(Axes axis, float spanLength)
{
var spanDimensions = axis == Axes.X ? columnDimensions : rowDimensions;
int spanCount = axis == Axes.X ? cellColumns : cellRows;
float[] sizes = new float[spanCount];
for (int i = 0; i < spanCount; i++)
{
if (i >= spanDimensions.Length)
break;
var dimension = spanDimensions[i];
switch (dimension.Mode)
{
default:
throw new InvalidOperationException($"Unsupported dimension: {dimension.Mode}.");
case GridSizeMode.Distributed:
break;
case GridSizeMode.Relative:
sizes[i] = dimension.Size * spanLength;
break;
case GridSizeMode.Absolute:
sizes[i] = dimension.Size;
break;
case GridSizeMode.AutoSize:
float size = 0;
if (axis == Axes.X)
{
// Go through each row and get the width of the cell at the indexed column
for (int r = 0; r < cellRows; r++)
{
var cell = Content[r]?[i];
if (cell == null || cell.RelativeSizeAxes.HasFlagFast(axis))
continue;
size = Math.Max(size, getCellWidth(cell));
}
}
else
{
// Go through each column and get the height of the cell at the indexed row
for (int c = 0; c < cellColumns; c++)
{
var cell = Content[i]?[c];
if (cell == null || cell.RelativeSizeAxes.HasFlagFast(axis))
continue;
size = Math.Max(size, getCellHeight(cell));
}
}
sizes[i] = size;
break;
}
sizes[i] = Math.Clamp(sizes[i], dimension.MinSize, dimension.MaxSize);
}
return sizes;
}
private static bool shouldConsiderCell(Drawable cell) => cell != null && cell.IsAlive && cell.IsPresent;
private static float getCellWidth(Drawable cell) => shouldConsiderCell(cell) ? cell.BoundingBox.Width : 0;
private static float getCellHeight(Drawable cell) => shouldConsiderCell(cell) ? cell.BoundingBox.Height : 0;
/// <summary>
/// Distributes any available length along all distributed dimensions, if required.
/// </summary>
/// <param name="dimensions">The full dimensions of the row or column.</param>
/// <param name="spanLength">The total available length.</param>
/// <param name="cellSizes">An array containing pre-filled sizes of any non-distributed cells. This array will be mutated.</param>
/// <returns><paramref name="cellSizes"/>.</returns>
private float[] distribute(Dimension[] dimensions, float spanLength, float[] cellSizes)
{
// Indices of all distributed cells
int[] distributedIndices = Enumerable.Range(0, cellSizes.Length).Where(i => i >= dimensions.Length || dimensions[i].Mode == GridSizeMode.Distributed).ToArray();
// The dimensions corresponding to all distributed cells
IEnumerable<DimensionEntry> distributedDimensions = distributedIndices.Select(i => new DimensionEntry(i, i >= dimensions.Length ? new Dimension() : dimensions[i]));
// Total number of distributed cells
int distributionCount = distributedIndices.Length;
// Non-distributed size
float requiredSize = cellSizes.Sum();
// Distribution size for _each_ distributed cell
float distributionSize = Math.Max(0, spanLength - requiredSize) / distributionCount;
// Write the sizes of distributed cells. Ordering is important to maximize excess at every step
foreach (var entry in distributedDimensions.OrderBy(d => d.Dimension.Range))
{
// Cells start off at their minimum size, and the total size should not exceed their maximum size
cellSizes[entry.Index] = Math.Min(entry.Dimension.MaxSize, entry.Dimension.MinSize + distributionSize);
// If there's no excess, any further distributions are guaranteed to also have no excess, so this becomes a null-op
// If there is an excess, the excess should be re-distributed among all other n-1 distributed cells
if (--distributionCount > 0)
distributionSize += Math.Max(0, distributionSize - entry.Dimension.Range) / distributionCount;
}
return cellSizes;
}
private readonly struct DimensionEntry
{
public readonly int Index;
public readonly Dimension Dimension;
public DimensionEntry(int index, Dimension dimension)
{
Index = index;
Dimension = dimension;
}
}
/// <summary>
/// Represents one cell of the <see cref="GridContainer"/>.
/// </summary>
private class CellContainer : Container
{
protected override bool OnInvalidate(Invalidation invalidation, InvalidationSource source)
{
bool result = base.OnInvalidate(invalidation, source);
if (source == InvalidationSource.Child && (invalidation & (Invalidation.RequiredParentSizeToFit | Invalidation.Presence)) > 0)
result |= Parent?.Invalidate(invalidation, InvalidationSource.Child) ?? false;
return result;
}
}
}
/// <summary>
/// Defines the size of a row or column in a <see cref="GridContainer"/>.
/// </summary>
public class Dimension
{
/// <summary>
/// The mode in which this row or column <see cref="GridContainer"/> is sized.
/// </summary>
public readonly GridSizeMode Mode;
/// <summary>
/// The size of the row or column which this <see cref="Dimension"/> applies to.
/// Only has an effect if <see cref="Mode"/> is not <see cref="GridSizeMode.Distributed"/>.
/// </summary>
public readonly float Size;
/// <summary>
/// The minimum size of the row or column which this <see cref="Dimension"/> applies to.
/// </summary>
public readonly float MinSize;
/// <summary>
/// The maximum size of the row or column which this <see cref="Dimension"/> applies to.
/// </summary>
public readonly float MaxSize;
/// <summary>
/// Constructs a new <see cref="Dimension"/>.
/// </summary>
/// <param name="mode">The sizing mode to use.</param>
/// <param name="size">The size of this row or column. This only has an effect if <paramref name="mode"/> is not <see cref="GridSizeMode.Distributed"/>.</param>
/// <param name="minSize">The minimum size of this row or column.</param>
/// <param name="maxSize">The maximum size of this row or column.</param>
public Dimension(GridSizeMode mode = GridSizeMode.Distributed, float size = 0, float minSize = 0, float maxSize = float.MaxValue)
{
if (minSize < 0)
throw new ArgumentOutOfRangeException(nameof(minSize), "Must be greater than 0.");
if (minSize > maxSize)
throw new ArgumentOutOfRangeException(nameof(minSize), $"Must be less than {nameof(maxSize)}.");
Mode = mode;
Size = size;
MinSize = minSize;
MaxSize = maxSize;
}
/// <summary>
/// The range of the size of this <see cref="Dimension"/>.
/// </summary>
internal float Range => MaxSize - MinSize;
}
public enum GridSizeMode
{
/// <summary>
/// Any remaining area of the <see cref="GridContainer"/> will be divided amongst this and all
/// other elements which use <see cref="GridSizeMode.Distributed"/>.
/// </summary>
Distributed,
/// <summary>
/// This element should be sized relative to the dimensions of the <see cref="GridContainer"/>.
/// </summary>
Relative,
/// <summary>
/// This element has a size independent of the <see cref="GridContainer"/>.
/// </summary>
Absolute,
/// <summary>
/// This element will be sized to the maximum size along its span.
/// </summary>
AutoSize
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Globalization;
using System.Numerics;
using Xunit;
namespace System.Tests
{
public partial class Int64Tests
{
[Fact]
public static void Ctor_Empty()
{
var i = new long();
Assert.Equal(0, i);
}
[Fact]
public static void Ctor_Value()
{
long i = 41;
Assert.Equal(41, i);
}
[Fact]
public static void MaxValue()
{
Assert.Equal(0x7FFFFFFFFFFFFFFF, long.MaxValue);
}
[Fact]
public static void MinValue()
{
Assert.Equal(unchecked((long)0x8000000000000000), long.MinValue);
}
[Theory]
[InlineData((long)234, (long)234, 0)]
[InlineData((long)234, long.MinValue, 1)]
[InlineData((long)-234, long.MinValue, 1)]
[InlineData((long)long.MinValue, long.MinValue, 0)]
[InlineData((long)234, (long)-123, 1)]
[InlineData((long)234, (long)0, 1)]
[InlineData((long)234, (long)123, 1)]
[InlineData((long)234, (long)456, -1)]
[InlineData((long)234, long.MaxValue, -1)]
[InlineData((long)-234, long.MaxValue, -1)]
[InlineData(long.MaxValue, long.MaxValue, 0)]
[InlineData((long)-234, (long)-234, 0)]
[InlineData((long)-234, (long)234, -1)]
[InlineData((long)-234, (long)-432, 1)]
[InlineData((long)234, null, 1)]
public void CompareTo_Other_ReturnsExpected(long i, object value, int expected)
{
if (value is long longValue)
{
Assert.Equal(expected, Math.Sign(i.CompareTo(longValue)));
Assert.Equal(-expected, Math.Sign(longValue.CompareTo(i)));
}
Assert.Equal(expected, Math.Sign(i.CompareTo(value)));
}
[Theory]
[InlineData("a")]
[InlineData(234)]
public void CompareTo_ObjectNotLong_ThrowsArgumentException(object value)
{
AssertExtensions.Throws<ArgumentException>(null, () => ((long)123).CompareTo(value));
}
[Theory]
[InlineData((long)789, (long)789, true)]
[InlineData((long)789, (long)-789, false)]
[InlineData((long)789, (long)0, false)]
[InlineData((long)0, (long)0, true)]
[InlineData((long)-789, (long)-789, true)]
[InlineData((long)-789, (long)789, false)]
[InlineData((long)789, null, false)]
[InlineData((long)789, "789", false)]
[InlineData((long)789, 789, false)]
public static void Equals(long i1, object obj, bool expected)
{
if (obj is long)
{
long i2 = (long)obj;
Assert.Equal(expected, i1.Equals(i2));
Assert.Equal(expected, i1.GetHashCode().Equals(i2.GetHashCode()));
}
Assert.Equal(expected, i1.Equals(obj));
}
[Fact]
public void GetTypeCode_Invoke_ReturnsInt64()
{
Assert.Equal(TypeCode.Int64, ((long)1).GetTypeCode());
}
public static IEnumerable<object[]> ToString_TestData()
{
foreach (NumberFormatInfo defaultFormat in new[] { null, NumberFormatInfo.CurrentInfo })
{
foreach (string defaultSpecifier in new[] { "G", "G\0", "\0N222", "\0", "" })
{
yield return new object[] { long.MinValue, defaultSpecifier, defaultFormat, "-9223372036854775808" };
yield return new object[] { (long)-4567, defaultSpecifier, defaultFormat, "-4567" };
yield return new object[] { (long)0, defaultSpecifier, defaultFormat, "0" };
yield return new object[] { (long)4567, defaultSpecifier, defaultFormat, "4567" };
yield return new object[] { long.MaxValue, defaultSpecifier, defaultFormat, "9223372036854775807" };
}
yield return new object[] { (long)4567, "D", defaultFormat, "4567" };
yield return new object[] { (long)4567, "D99", defaultFormat, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004567" };
yield return new object[] { (long)4567, "D99\09", defaultFormat, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004567" };
yield return new object[] { (long)-4567, "D99", defaultFormat, "-000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004567" };
yield return new object[] { (long)0x2468, "x", defaultFormat, "2468" };
yield return new object[] { (long)-0x2468, "x", defaultFormat, "ffffffffffffdb98" };
yield return new object[] { (long)2468, "N", defaultFormat, string.Format("{0:N}", 2468.00) };
}
var customFormat = new NumberFormatInfo()
{
NegativeSign = "#",
NumberDecimalSeparator = "~",
NumberGroupSeparator = "*",
PositiveSign = "&",
NumberDecimalDigits = 2,
PercentSymbol = "@",
PercentGroupSeparator = ",",
PercentDecimalSeparator = ".",
PercentDecimalDigits = 5
};
yield return new object[] { (long)-2468, "N", customFormat, "#2*468~00" };
yield return new object[] { (long)2468, "N", customFormat, "2*468~00" };
yield return new object[] { (long)123, "E", customFormat, "1~230000E&002" };
yield return new object[] { (long)123, "F", customFormat, "123~00" };
yield return new object[] { (long)123, "P", customFormat, "12,300.00000 @" };
}
[Theory]
[MemberData(nameof(ToString_TestData))]
public static void ToString(long i, string format, IFormatProvider provider, string expected)
{
// Format is case insensitive
string upperFormat = format.ToUpperInvariant();
string lowerFormat = format.ToLowerInvariant();
string upperExpected = expected.ToUpperInvariant();
string lowerExpected = expected.ToLowerInvariant();
bool isDefaultProvider = (provider == null || provider == NumberFormatInfo.CurrentInfo);
if (string.IsNullOrEmpty(format) || format.ToUpperInvariant() == "G")
{
if (isDefaultProvider)
{
Assert.Equal(upperExpected, i.ToString());
Assert.Equal(upperExpected, i.ToString((IFormatProvider)null));
}
Assert.Equal(upperExpected, i.ToString(provider));
}
if (isDefaultProvider)
{
Assert.Equal(upperExpected, i.ToString(upperFormat));
Assert.Equal(lowerExpected, i.ToString(lowerFormat));
Assert.Equal(upperExpected, i.ToString(upperFormat, null));
Assert.Equal(lowerExpected, i.ToString(lowerFormat, null));
}
Assert.Equal(upperExpected, i.ToString(upperFormat, provider));
Assert.Equal(lowerExpected, i.ToString(lowerFormat, provider));
}
[Fact]
public static void ToString_InvalidFormat_ThrowsFormatException()
{
long i = 123;
Assert.Throws<FormatException>(() => i.ToString("r")); // Invalid format
Assert.Throws<FormatException>(() => i.ToString("r", null)); // Invalid format
Assert.Throws<FormatException>(() => i.ToString("R")); // Invalid format
Assert.Throws<FormatException>(() => i.ToString("R", null)); // Invalid format
Assert.Throws<FormatException>(() => i.ToString("Y")); // Invalid format
Assert.Throws<FormatException>(() => i.ToString("Y", null)); // Invalid format
}
public static IEnumerable<object[]> Parse_Valid_TestData()
{
// Reuse all Int32 test data
foreach (object[] objs in Int32Tests.Parse_Valid_TestData())
{
bool unsigned = (((NumberStyles)objs[1]) & NumberStyles.HexNumber) == NumberStyles.HexNumber;
yield return new object[] { objs[0], objs[1], objs[2], unsigned ? (long)(uint)(int)objs[3] : (long)(int)objs[3] };
}
// All lengths decimal
foreach (bool neg in new[] { false, true })
{
string s = neg ? "-" : "";
long result = 0;
for (int i = 1; i <= 19; i++)
{
result = (result * 10) + (i % 10);
s += (i % 10).ToString();
yield return new object[] { s, NumberStyles.Integer, null, neg ? result * -1 : result };
}
}
// All lengths hexadecimal
{
string s = "";
long result = 0;
for (int i = 1; i <= 16; i++)
{
result = (result * 16) + (i % 16);
s += (i % 16).ToString("X");
yield return new object[] { s, NumberStyles.HexNumber, null, result };
}
}
// And test boundary conditions for Int64
yield return new object[] { "-9223372036854775808", NumberStyles.Integer, null, long.MinValue };
yield return new object[] { "9223372036854775807", NumberStyles.Integer, null, long.MaxValue };
yield return new object[] { " -9223372036854775808 ", NumberStyles.Integer, null, long.MinValue };
yield return new object[] { " +9223372036854775807 ", NumberStyles.Integer, null, long.MaxValue };
yield return new object[] { "7FFFFFFFFFFFFFFF", NumberStyles.HexNumber, null, long.MaxValue };
yield return new object[] { "8000000000000000", NumberStyles.HexNumber, null, long.MinValue };
yield return new object[] { "FFFFFFFFFFFFFFFF", NumberStyles.HexNumber, null, -1L };
yield return new object[] { " FFFFFFFFFFFFFFFF ", NumberStyles.HexNumber, null, -1L };
}
[Theory]
[MemberData(nameof(Parse_Valid_TestData))]
public static void Parse_Valid(string value, NumberStyles style, IFormatProvider provider, long expected)
{
long result;
// Default style and provider
if (style == NumberStyles.Integer && provider == null)
{
Assert.True(long.TryParse(value, out result));
Assert.Equal(expected, result);
Assert.Equal(expected, long.Parse(value));
}
// Default provider
if (provider == null)
{
Assert.Equal(expected, long.Parse(value, style));
// Substitute default NumberFormatInfo
Assert.True(long.TryParse(value, style, new NumberFormatInfo(), out result));
Assert.Equal(expected, result);
Assert.Equal(expected, long.Parse(value, style, new NumberFormatInfo()));
}
// Default style
if (style == NumberStyles.Integer)
{
Assert.Equal(expected, long.Parse(value, provider));
}
// Full overloads
Assert.True(long.TryParse(value, style, provider, out result));
Assert.Equal(expected, result);
Assert.Equal(expected, long.Parse(value, style, provider));
}
public static IEnumerable<object[]> Parse_Invalid_TestData()
{
// Reuse all int test data, except for those that wouldn't overflow long.
foreach (object[] objs in Int32Tests.Parse_Invalid_TestData())
{
if ((Type)objs[3] == typeof(OverflowException) &&
(!BigInteger.TryParse((string)objs[0], out BigInteger bi) || (bi >= long.MinValue && bi <= long.MaxValue)))
{
continue;
}
yield return objs;
}
}
[Theory]
[MemberData(nameof(Parse_Invalid_TestData))]
public static void Parse_Invalid(string value, NumberStyles style, IFormatProvider provider, Type exceptionType)
{
long result;
// Default style and provider
if (style == NumberStyles.Integer && provider == null)
{
Assert.False(long.TryParse(value, out result));
Assert.Equal(default, result);
Assert.Throws(exceptionType, () => long.Parse(value));
}
// Default provider
if (provider == null)
{
Assert.Throws(exceptionType, () => long.Parse(value, style));
// Substitute default NumberFormatInfo
Assert.False(long.TryParse(value, style, new NumberFormatInfo(), out result));
Assert.Equal(default, result);
Assert.Throws(exceptionType, () => long.Parse(value, style, new NumberFormatInfo()));
}
// Default style
if (style == NumberStyles.Integer)
{
Assert.Throws(exceptionType, () => long.Parse(value, provider));
}
// Full overloads
Assert.False(long.TryParse(value, style, provider, out result));
Assert.Equal(default, result);
Assert.Throws(exceptionType, () => long.Parse(value, style, provider));
}
[Theory]
[InlineData(NumberStyles.HexNumber | NumberStyles.AllowParentheses, null)]
[InlineData(unchecked((NumberStyles)0xFFFFFC00), "style")]
public static void TryParse_InvalidNumberStyle_ThrowsArgumentException(NumberStyles style, string paramName)
{
long result = 0;
AssertExtensions.Throws<ArgumentException>(paramName, () => long.TryParse("1", style, null, out result));
Assert.Equal(default(long), result);
AssertExtensions.Throws<ArgumentException>(paramName, () => long.Parse("1", style));
AssertExtensions.Throws<ArgumentException>(paramName, () => long.Parse("1", style, null));
}
}
}
| |
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using FluentAssertions;
using FluentAssertions.Extensions;
using JsonApiDotNetCore.Serialization.Objects;
using TestBuildingBlocks;
using Xunit;
namespace JsonApiDotNetCoreTests.IntegrationTests.QueryStrings.Sorting
{
public sealed class SortTests : IClassFixture<IntegrationTestContext<TestableStartup<QueryStringDbContext>, QueryStringDbContext>>
{
private readonly IntegrationTestContext<TestableStartup<QueryStringDbContext>, QueryStringDbContext> _testContext;
private readonly QueryStringFakers _fakers = new();
public SortTests(IntegrationTestContext<TestableStartup<QueryStringDbContext>, QueryStringDbContext> testContext)
{
_testContext = testContext;
testContext.UseController<BlogPostsController>();
testContext.UseController<BlogsController>();
testContext.UseController<WebAccountsController>();
}
[Fact]
public async Task Can_sort_in_primary_resources()
{
// Arrange
List<BlogPost> posts = _fakers.BlogPost.Generate(3);
posts[0].Caption = "B";
posts[1].Caption = "A";
posts[2].Caption = "C";
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<BlogPost>();
dbContext.Posts.AddRange(posts);
await dbContext.SaveChangesAsync();
});
const string route = "/blogPosts?sort=caption";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.ManyValue.Should().HaveCount(3);
responseDocument.Data.ManyValue[0].Id.Should().Be(posts[1].StringId);
responseDocument.Data.ManyValue[1].Id.Should().Be(posts[0].StringId);
responseDocument.Data.ManyValue[2].Id.Should().Be(posts[2].StringId);
}
[Fact]
public async Task Cannot_sort_in_single_primary_resource()
{
// Arrange
BlogPost post = _fakers.BlogPost.Generate();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
dbContext.Posts.Add(post);
await dbContext.SaveChangesAsync();
});
string route = $"/blogPosts/{post.StringId}?sort=id";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.BadRequest);
responseDocument.Errors.Should().HaveCount(1);
ErrorObject error = responseDocument.Errors[0];
error.StatusCode.Should().Be(HttpStatusCode.BadRequest);
error.Title.Should().Be("The specified sort is invalid.");
error.Detail.Should().Be("This query string parameter can only be used on a collection of resources (not on a single resource).");
error.Source.Parameter.Should().Be("sort");
}
[Fact]
public async Task Can_sort_in_secondary_resources()
{
// Arrange
Blog blog = _fakers.Blog.Generate();
blog.Posts = _fakers.BlogPost.Generate(3);
blog.Posts[0].Caption = "B";
blog.Posts[1].Caption = "A";
blog.Posts[2].Caption = "C";
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
dbContext.Blogs.Add(blog);
await dbContext.SaveChangesAsync();
});
string route = $"/blogs/{blog.StringId}/posts?sort=caption";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.ManyValue.Should().HaveCount(3);
responseDocument.Data.ManyValue[0].Id.Should().Be(blog.Posts[1].StringId);
responseDocument.Data.ManyValue[1].Id.Should().Be(blog.Posts[0].StringId);
responseDocument.Data.ManyValue[2].Id.Should().Be(blog.Posts[2].StringId);
}
[Fact]
public async Task Cannot_sort_in_single_secondary_resource()
{
// Arrange
BlogPost post = _fakers.BlogPost.Generate();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
dbContext.Posts.Add(post);
await dbContext.SaveChangesAsync();
});
string route = $"/blogPosts/{post.StringId}/author?sort=id";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.BadRequest);
responseDocument.Errors.Should().HaveCount(1);
ErrorObject error = responseDocument.Errors[0];
error.StatusCode.Should().Be(HttpStatusCode.BadRequest);
error.Title.Should().Be("The specified sort is invalid.");
error.Detail.Should().Be("This query string parameter can only be used on a collection of resources (not on a single resource).");
error.Source.Parameter.Should().Be("sort");
}
[Fact]
public async Task Can_sort_on_OneToMany_relationship()
{
// Arrange
List<Blog> blogs = _fakers.Blog.Generate(2);
blogs[0].Posts = _fakers.BlogPost.Generate(2);
blogs[1].Posts = _fakers.BlogPost.Generate(1);
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Blog>();
dbContext.Blogs.AddRange(blogs);
await dbContext.SaveChangesAsync();
});
const string route = "/blogs?sort=count(posts)";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.ManyValue.Should().HaveCount(2);
responseDocument.Data.ManyValue[0].Id.Should().Be(blogs[1].StringId);
responseDocument.Data.ManyValue[1].Id.Should().Be(blogs[0].StringId);
}
[Fact]
public async Task Can_sort_on_ManyToMany_relationship()
{
// Arrange
List<BlogPost> posts = _fakers.BlogPost.Generate(2);
posts[0].Labels = _fakers.Label.Generate(1).ToHashSet();
posts[1].Labels = _fakers.Label.Generate(2).ToHashSet();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<BlogPost>();
dbContext.Posts.AddRange(posts);
await dbContext.SaveChangesAsync();
});
const string route = "/blogPosts?sort=-count(labels)";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.ManyValue.Should().HaveCount(2);
responseDocument.Data.ManyValue[0].Id.Should().Be(posts[1].StringId);
responseDocument.Data.ManyValue[1].Id.Should().Be(posts[0].StringId);
}
[Fact]
public async Task Can_sort_in_scope_of_OneToMany_relationship()
{
// Arrange
WebAccount account = _fakers.WebAccount.Generate();
account.Posts = _fakers.BlogPost.Generate(3);
account.Posts[0].Caption = "B";
account.Posts[1].Caption = "A";
account.Posts[2].Caption = "C";
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
dbContext.Accounts.Add(account);
await dbContext.SaveChangesAsync();
});
string route = $"/webAccounts/{account.StringId}?include=posts&sort[posts]=caption";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.SingleValue.Should().NotBeNull();
responseDocument.Data.SingleValue.Id.Should().Be(account.StringId);
responseDocument.Included.Should().HaveCount(3);
responseDocument.Included[0].Id.Should().Be(account.Posts[1].StringId);
responseDocument.Included[1].Id.Should().Be(account.Posts[0].StringId);
responseDocument.Included[2].Id.Should().Be(account.Posts[2].StringId);
}
[Fact]
public async Task Can_sort_in_scope_of_OneToMany_relationship_on_secondary_endpoint()
{
// Arrange
Blog blog = _fakers.Blog.Generate();
blog.Owner = _fakers.WebAccount.Generate();
blog.Owner.Posts = _fakers.BlogPost.Generate(3);
blog.Owner.Posts[0].Caption = "B";
blog.Owner.Posts[1].Caption = "A";
blog.Owner.Posts[2].Caption = "C";
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
dbContext.Blogs.Add(blog);
await dbContext.SaveChangesAsync();
});
string route = $"/blogs/{blog.StringId}/owner?include=posts&sort[posts]=caption";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.SingleValue.Should().NotBeNull();
responseDocument.Data.SingleValue.Id.Should().Be(blog.Owner.StringId);
responseDocument.Included.Should().HaveCount(3);
responseDocument.Included[0].Id.Should().Be(blog.Owner.Posts[1].StringId);
responseDocument.Included[1].Id.Should().Be(blog.Owner.Posts[0].StringId);
responseDocument.Included[2].Id.Should().Be(blog.Owner.Posts[2].StringId);
}
[Fact]
public async Task Can_sort_in_scope_of_ManyToMany_relationship()
{
// Arrange
BlogPost post = _fakers.BlogPost.Generate();
post.Labels = _fakers.Label.Generate(3).ToHashSet();
post.Labels.ElementAt(0).Name = "B";
post.Labels.ElementAt(1).Name = "A";
post.Labels.ElementAt(2).Name = "C";
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
dbContext.Posts.Add(post);
await dbContext.SaveChangesAsync();
});
string route = $"/blogPosts/{post.StringId}?include=labels&sort[labels]=name";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.SingleValue.Should().NotBeNull();
responseDocument.Data.SingleValue.Id.Should().Be(post.StringId);
responseDocument.Included.Should().HaveCount(3);
responseDocument.Included[0].Id.Should().Be(post.Labels.ElementAt(1).StringId);
responseDocument.Included[1].Id.Should().Be(post.Labels.ElementAt(0).StringId);
responseDocument.Included[2].Id.Should().Be(post.Labels.ElementAt(2).StringId);
}
[Fact]
public async Task Can_sort_on_multiple_fields_in_multiple_scopes()
{
// Arrange
List<Blog> blogs = _fakers.Blog.Generate(2);
blogs[0].Title = "Z";
blogs[1].Title = "Y";
blogs[0].Posts = _fakers.BlogPost.Generate(4);
blogs[0].Posts[0].Caption = "B";
blogs[0].Posts[1].Caption = "A";
blogs[0].Posts[2].Caption = "A";
blogs[0].Posts[3].Caption = "C";
blogs[0].Posts[0].Url = "";
blogs[0].Posts[1].Url = "www.some2.com";
blogs[0].Posts[2].Url = "www.some1.com";
blogs[0].Posts[3].Url = "";
blogs[0].Posts[0].Comments = _fakers.Comment.Generate(3).ToHashSet();
blogs[0].Posts[0].Comments.ElementAt(0).CreatedAt = 1.January(2015);
blogs[0].Posts[0].Comments.ElementAt(1).CreatedAt = 1.January(2014);
blogs[0].Posts[0].Comments.ElementAt(2).CreatedAt = 1.January(2016);
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Blog>();
dbContext.Blogs.AddRange(blogs);
await dbContext.SaveChangesAsync();
});
const string route = "/blogs?include=posts.comments&sort=title&sort[posts]=caption,url&sort[posts.comments]=-createdAt";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.ManyValue.Should().HaveCount(2);
responseDocument.Data.ManyValue[0].Id.Should().Be(blogs[1].StringId);
responseDocument.Data.ManyValue[1].Id.Should().Be(blogs[0].StringId);
responseDocument.Included.Should().HaveCount(7);
responseDocument.Included[0].Type.Should().Be("blogPosts");
responseDocument.Included[0].Id.Should().Be(blogs[0].Posts[2].StringId);
responseDocument.Included[1].Type.Should().Be("blogPosts");
responseDocument.Included[1].Id.Should().Be(blogs[0].Posts[1].StringId);
responseDocument.Included[2].Type.Should().Be("blogPosts");
responseDocument.Included[2].Id.Should().Be(blogs[0].Posts[0].StringId);
responseDocument.Included[3].Type.Should().Be("comments");
responseDocument.Included[3].Id.Should().Be(blogs[0].Posts[0].Comments.ElementAt(2).StringId);
responseDocument.Included[4].Type.Should().Be("comments");
responseDocument.Included[4].Id.Should().Be(blogs[0].Posts[0].Comments.ElementAt(0).StringId);
responseDocument.Included[5].Type.Should().Be("comments");
responseDocument.Included[5].Id.Should().Be(blogs[0].Posts[0].Comments.ElementAt(1).StringId);
responseDocument.Included[6].Type.Should().Be("blogPosts");
responseDocument.Included[6].Id.Should().Be(blogs[0].Posts[3].StringId);
}
[Fact]
public async Task Can_sort_on_ManyToOne_relationship()
{
// Arrange
List<BlogPost> posts = _fakers.BlogPost.Generate(2);
posts[0].Author = _fakers.WebAccount.Generate();
posts[1].Author = _fakers.WebAccount.Generate();
posts[0].Author.DisplayName = "Conner";
posts[1].Author.DisplayName = "Smith";
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<BlogPost>();
dbContext.Posts.AddRange(posts);
await dbContext.SaveChangesAsync();
});
const string route = "/blogPosts?sort=-author.displayName";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.ManyValue.Should().HaveCount(2);
responseDocument.Data.ManyValue[0].Id.Should().Be(posts[1].StringId);
responseDocument.Data.ManyValue[1].Id.Should().Be(posts[0].StringId);
}
[Fact]
public async Task Can_sort_in_multiple_scopes()
{
// Arrange
List<Blog> blogs = _fakers.Blog.Generate(2);
blogs[0].Title = "Cooking";
blogs[1].Title = "Technology";
blogs[1].Owner = _fakers.WebAccount.Generate();
blogs[1].Owner.Posts = _fakers.BlogPost.Generate(2);
blogs[1].Owner.Posts[0].Caption = "One";
blogs[1].Owner.Posts[1].Caption = "Two";
blogs[1].Owner.Posts[1].Comments = _fakers.Comment.Generate(2).ToHashSet();
blogs[1].Owner.Posts[1].Comments.ElementAt(0).CreatedAt = 1.January(2000);
blogs[1].Owner.Posts[1].Comments.ElementAt(0).CreatedAt = 10.January(2010);
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<Blog>();
dbContext.Blogs.AddRange(blogs);
await dbContext.SaveChangesAsync();
});
const string route = "/blogs?include=owner.posts.comments&sort=-title&sort[owner.posts]=-caption&sort[owner.posts.comments]=-createdAt";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.ManyValue.Should().HaveCount(2);
responseDocument.Data.ManyValue[0].Id.Should().Be(blogs[1].StringId);
responseDocument.Data.ManyValue[1].Id.Should().Be(blogs[0].StringId);
responseDocument.Included.Should().HaveCount(5);
responseDocument.Included[0].Id.Should().Be(blogs[1].Owner.StringId);
responseDocument.Included[1].Id.Should().Be(blogs[1].Owner.Posts[1].StringId);
responseDocument.Included[2].Id.Should().Be(blogs[1].Owner.Posts[1].Comments.ElementAt(1).StringId);
responseDocument.Included[3].Id.Should().Be(blogs[1].Owner.Posts[1].Comments.ElementAt(0).StringId);
responseDocument.Included[4].Id.Should().Be(blogs[1].Owner.Posts[0].StringId);
}
[Fact]
public async Task Cannot_sort_in_unknown_scope()
{
// Arrange
string route = $"/webAccounts?sort[{Unknown.Relationship}]=id";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.BadRequest);
responseDocument.Errors.Should().HaveCount(1);
ErrorObject error = responseDocument.Errors[0];
error.StatusCode.Should().Be(HttpStatusCode.BadRequest);
error.Title.Should().Be("The specified sort is invalid.");
error.Detail.Should().Be($"Relationship '{Unknown.Relationship}' does not exist on resource 'webAccounts'.");
error.Source.Parameter.Should().Be($"sort[{Unknown.Relationship}]");
}
[Fact]
public async Task Cannot_sort_in_unknown_nested_scope()
{
// Arrange
string route = $"/webAccounts?sort[posts.{Unknown.Relationship}]=id";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.BadRequest);
responseDocument.Errors.Should().HaveCount(1);
ErrorObject error = responseDocument.Errors[0];
error.StatusCode.Should().Be(HttpStatusCode.BadRequest);
error.Title.Should().Be("The specified sort is invalid.");
error.Detail.Should().Be($"Relationship '{Unknown.Relationship}' in 'posts.{Unknown.Relationship}' does not exist on resource 'blogPosts'.");
error.Source.Parameter.Should().Be($"sort[posts.{Unknown.Relationship}]");
}
[Fact]
public async Task Cannot_sort_on_attribute_with_blocked_capability()
{
// Arrange
const string route = "/webAccounts?sort=dateOfBirth";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.BadRequest);
responseDocument.Errors.Should().HaveCount(1);
ErrorObject error = responseDocument.Errors[0];
error.StatusCode.Should().Be(HttpStatusCode.BadRequest);
error.Title.Should().Be("Sorting on the requested attribute is not allowed.");
error.Detail.Should().Be("Sorting on attribute 'dateOfBirth' is not allowed.");
error.Source.Parameter.Should().Be("sort");
}
[Fact]
public async Task Can_sort_descending_by_ID()
{
// Arrange
List<WebAccount> accounts = _fakers.WebAccount.Generate(3);
accounts[0].Id = 3000;
accounts[1].Id = 2000;
accounts[2].Id = 1000;
accounts[0].DisplayName = "B";
accounts[1].DisplayName = "A";
accounts[2].DisplayName = "A";
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<WebAccount>();
dbContext.Accounts.AddRange(accounts);
await dbContext.SaveChangesAsync();
});
const string route = "/webAccounts?sort=displayName,-id";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.ManyValue.Should().HaveCount(3);
responseDocument.Data.ManyValue[0].Id.Should().Be(accounts[1].StringId);
responseDocument.Data.ManyValue[1].Id.Should().Be(accounts[2].StringId);
responseDocument.Data.ManyValue[2].Id.Should().Be(accounts[0].StringId);
}
[Fact]
public async Task Sorts_by_ID_if_none_specified()
{
// Arrange
List<WebAccount> accounts = _fakers.WebAccount.Generate(4);
accounts[0].Id = 300;
accounts[1].Id = 200;
accounts[2].Id = 100;
accounts[3].Id = 400;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<WebAccount>();
dbContext.Accounts.AddRange(accounts);
await dbContext.SaveChangesAsync();
});
const string route = "/webAccounts";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecuteGetAsync<Document>(route);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Data.ManyValue.Should().HaveCount(4);
responseDocument.Data.ManyValue[0].Id.Should().Be(accounts[2].StringId);
responseDocument.Data.ManyValue[1].Id.Should().Be(accounts[1].StringId);
responseDocument.Data.ManyValue[2].Id.Should().Be(accounts[0].StringId);
responseDocument.Data.ManyValue[3].Id.Should().Be(accounts[3].StringId);
}
}
}
| |
namespace ATABBI.TexE
{
partial class DocumentExplorer
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(DocumentExplorer));
this.treeView1 = new System.Windows.Forms.TreeView();
this.imageList1 = new System.Windows.Forms.ImageList(this.components);
this.toolStrip1 = new System.Windows.Forms.ToolStrip();
this.toolStripButton1 = new System.Windows.Forms.ToolStripButton();
this.toolStrip1.SuspendLayout();
this.SuspendLayout();
//
// treeView1
//
this.treeView1.Dock = System.Windows.Forms.DockStyle.Fill;
this.treeView1.HideSelection = false;
this.treeView1.HotTracking = true;
this.treeView1.ImageIndex = 0;
this.treeView1.ImageList = this.imageList1;
this.treeView1.Indent = 19;
this.treeView1.Location = new System.Drawing.Point(10, 35);
this.treeView1.Name = "treeView1";
this.treeView1.SelectedImageIndex = 0;
this.treeView1.ShowNodeToolTips = true;
this.treeView1.Size = new System.Drawing.Size(225, 277);
this.treeView1.TabIndex = 0;
this.treeView1.AfterExpand += new System.Windows.Forms.TreeViewEventHandler(this.treeView1_AfterExpand);
this.treeView1.AfterSelect += new System.Windows.Forms.TreeViewEventHandler(this.treeView1_AfterSelect);
this.treeView1.MouseDoubleClick += new System.Windows.Forms.MouseEventHandler(this.treeView1_MouseDoubleClick);
//
// imageList1
//
this.imageList1.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject("imageList1.ImageStream")));
this.imageList1.TransparentColor = System.Drawing.Color.Transparent;
this.imageList1.Images.SetKeyName(0, "");
this.imageList1.Images.SetKeyName(1, "BarSeries_12624.ico");
this.imageList1.Images.SetKeyName(2, "Branch_13220.ico");
this.imageList1.Images.SetKeyName(3, "Bullets_11690.ico");
this.imageList1.Images.SetKeyName(4, "Clearallrequests_8816.ico");
this.imageList1.Images.SetKeyName(5, "ColorSelectionTool_202.ico");
this.imageList1.Images.SetKeyName(6, "DGSL_file_type.ico");
this.imageList1.Images.SetKeyName(7, "Editdatasetwithdesigner_8449.ico");
this.imageList1.Images.SetKeyName(8, "EllipseElement_10707.ico");
this.imageList1.Images.SetKeyName(9, "Enumeration_8335.ico");
this.imageList1.Images.SetKeyName(10, "ExtensionManager_vsix_OSReg.ico");
this.imageList1.Images.SetKeyName(11, "ExtensionManager_vsixproject.ico");
this.imageList1.Images.SetKeyName(12, "filedownload.ico");
this.imageList1.Images.SetKeyName(13, "FillTool_204.ico");
this.imageList1.Images.SetKeyName(14, "Find_5650.ico");
this.imageList1.Images.SetKeyName(15, "Font_6007.ico");
this.imageList1.Images.SetKeyName(16, "GotoNextRow_289.ico");
this.imageList1.Images.SetKeyName(17, "GoToSourceCode_6546.ico");
this.imageList1.Images.SetKeyName(18, "GroupBy_284.ico");
this.imageList1.Images.SetKeyName(19, "IntelliTrace.ico");
this.imageList1.Images.SetKeyName(20, "LaunchConditionsEditor_259.ico");
this.imageList1.Images.SetKeyName(21, "LayerDiagramfile_layerdiagram_13450.ico");
this.imageList1.Images.SetKeyName(22, "Measure.ico");
this.imageList1.Images.SetKeyName(23, "MemoryWindow_6537.ico");
this.imageList1.Images.SetKeyName(24, "MovePrevious_7195.ico");
this.imageList1.Images.SetKeyName(25, "NavigateBackwards_6270.ico");
this.imageList1.Images.SetKeyName(26, "NavigateForward_6271.ico");
this.imageList1.Images.SetKeyName(27, "NewSolutionFolder_6289.ico");
this.imageList1.Images.SetKeyName(28, "OneLevelUp_5834.ico");
this.imageList1.Images.SetKeyName(29, "Open_6296.ico");
this.imageList1.Images.SetKeyName(30, "ParametersInfo_2423.ico");
this.imageList1.Images.SetKeyName(31, "PencilTool_206.ico");
this.imageList1.Images.SetKeyName(32, "PrepareProcess.ico");
this.imageList1.Images.SetKeyName(33, "Print_11009.ico");
this.imageList1.Images.SetKeyName(34, "Relation_8467.ico");
this.imageList1.Images.SetKeyName(35, "Rules.ico");
this.imageList1.Images.SetKeyName(36, "Save_6530.ico");
this.imageList1.Images.SetKeyName(37, "ShowDiagramPane_280.ico");
this.imageList1.Images.SetKeyName(38, "ShowPerformanceSession_7015.ico");
this.imageList1.Images.SetKeyName(39, "ShowResultsPane_282.ico");
this.imageList1.Images.SetKeyName(40, "ShowStartPage_6283.ico");
this.imageList1.Images.SetKeyName(41, "Strings_7959.ico");
this.imageList1.Images.SetKeyName(42, "ToggleOfficeKeyboardScheme_7508.ico");
this.imageList1.Images.SetKeyName(43, "Union_534.ico");
this.imageList1.Images.SetKeyName(44, "XSLTTransformfile_829.ico");
this.imageList1.Images.SetKeyName(45, "Actions_5847.ico");
this.imageList1.Images.SetKeyName(46, "ActiveServerPage(asp)_11272.ico");
this.imageList1.Images.SetKeyName(47, "AllLoadedTests_8546.ico");
this.imageList1.Images.SetKeyName(48, "Assembly_6212.ico");
this.imageList1.Images.SetKeyName(49, "Assembly-excluded_5817.ico");
this.imageList1.Images.SetKeyName(50, "Association.ico");
this.imageList1.Images.SetKeyName(51, "Association_12860.ico");
this.imageList1.Images.SetKeyName(52, "AssociationEditor_5849.ico");
this.imageList1.Images.SetKeyName(53, "Battery.ico");
this.imageList1.Images.SetKeyName(54, "bios.ico");
this.imageList1.Images.SetKeyName(55, "BorderElement_10699.ico");
this.imageList1.Images.SetKeyName(56, "BrokenlinktoFile_431_24.ico");
this.imageList1.Images.SetKeyName(57, "CABProject_5818.ico");
this.imageList1.Images.SetKeyName(58, "cctprojectnode_ID09.ico");
this.imageList1.Images.SetKeyName(59, "CheckBox_669.ico");
this.imageList1.Images.SetKeyName(60, "ClassIcon.ico");
this.imageList1.Images.SetKeyName(61, "CloudStorageContainer.ico");
this.imageList1.Images.SetKeyName(62, "CloudStorageContainerCollection.ico");
this.imageList1.Images.SetKeyName(63, "CompiledMSHelpFile_11556.ico");
this.imageList1.Images.SetKeyName(64, "componentcategory.ico");
this.imageList1.Images.SetKeyName(65, "ComponentDiagramFile_componentdiagram_13449.ico");
this.imageList1.Images.SetKeyName(66, "computersystemproduct.ico");
this.imageList1.Images.SetKeyName(67, "Conditions_5855.ico");
this.imageList1.Images.SetKeyName(68, "Counter_5730.ico");
this.imageList1.Images.SetKeyName(69, "CPPWMIEventProvider_7864.ico");
this.imageList1.Images.SetKeyName(70, "CustomActions_6334.ico");
this.imageList1.Images.SetKeyName(71, "CustomActionsEditor_5850.ico");
this.imageList1.Images.SetKeyName(72, "DatabaseProject_7342.ico");
this.imageList1.Images.SetKeyName(73, "DataGrid_674.ico");
this.imageList1.Images.SetKeyName(74, "DataTable_8468.ico");
this.imageList1.Images.SetKeyName(75, "DBSchema_12823.ico");
this.imageList1.Images.SetKeyName(76, "DeadLetterMessages_5733.ico");
this.imageList1.Images.SetKeyName(77, "Delegate_8339.ico");
this.imageList1.Images.SetKeyName(78, "DeploymentDiagram(SubsystemMapper)_11298.ico");
this.imageList1.Images.SetKeyName(79, "Diagram_8283.ico");
this.imageList1.Images.SetKeyName(80, "DialogGroup_5846.ico");
this.imageList1.Images.SetKeyName(81, "DisplayConfiguration.ico");
this.imageList1.Images.SetKeyName(82, "dmaChannel.ico");
this.imageList1.Images.SetKeyName(83, "DriverTestCancelledResult.ico");
this.imageList1.Images.SetKeyName(84, "DriverTestPassingResult.ico");
this.imageList1.Images.SetKeyName(85, "drivervxd.ico");
this.imageList1.Images.SetKeyName(86, "Error_6206.ico");
this.imageList1.Images.SetKeyName(87, "EventLog_5735.ico");
this.imageList1.Images.SetKeyName(88, "EventLogFailureAudit_5737.ico");
this.imageList1.Images.SetKeyName(89, "EventLogSuccessAudit_5739.ico");
this.imageList1.Images.SetKeyName(90, "Extend_13492.ico");
this.imageList1.Images.SetKeyName(91, "ExtendedStoredProcedure_8284.ico");
this.imageList1.Images.SetKeyName(92, "FieldsHeader.ico");
this.imageList1.Images.SetKeyName(93, "File-exclude_5820.ico");
this.imageList1.Images.SetKeyName(94, "FileSystemEditor_5852.ico");
this.imageList1.Images.SetKeyName(95, "FloppyDrive.ico");
this.imageList1.Images.SetKeyName(96, "Folder(special)_5843.ico");
this.imageList1.Images.SetKeyName(97, "Folder(special-open)_5844.ico");
this.imageList1.Images.SetKeyName(98, "Folder_6221.ico");
this.imageList1.Images.SetKeyName(99, "Folder_6222.ico");
this.imageList1.Images.SetKeyName(100, "GenericVSEditor_9905.ico");
this.imageList1.Images.SetKeyName(101, "GenericVSProject_9906.ico");
this.imageList1.Images.SetKeyName(102, "GenericVSToolWindowRepresentation_9908.ico");
this.imageList1.Images.SetKeyName(103, "GlobalApplicationClass(asax)_443.ico");
this.imageList1.Images.SetKeyName(104, "HardDrive_9462.ico");
this.imageList1.Images.SetKeyName(105, "HiddenFolder_427.ico");
this.imageList1.Images.SetKeyName(106, "HiddenFolder_428.ico");
this.imageList1.Images.SetKeyName(107, "HTMLPage(HTM)_825.ico");
this.imageList1.Images.SetKeyName(108, "Index_8287.ico");
this.imageList1.Images.SetKeyName(109, "infrareddevice.ico");
this.imageList1.Images.SetKeyName(110, "InputParameter_8288.ico");
this.imageList1.Images.SetKeyName(111, "Instance_5746.ico");
this.imageList1.Images.SetKeyName(112, "Interface_612.ico");
this.imageList1.Images.SetKeyName(113, "JournalMessages_5742.ico");
this.imageList1.Images.SetKeyName(114, "KeyOutput_8167.ico");
this.imageList1.Images.SetKeyName(115, "Library_6213.ico");
this.imageList1.Images.SetKeyName(116, "LinkedServer_12789.ico");
this.imageList1.Images.SetKeyName(117, "ListsofTests_8643.ico");
this.imageList1.Images.SetKeyName(118, "ListView_687.ico");
this.imageList1.Images.SetKeyName(119, "LOBSystemInstance.ico");
this.imageList1.Images.SetKeyName(120, "LockControls_322.ico");
this.imageList1.Images.SetKeyName(121, "logicalmemoryconfiguration.ico");
this.imageList1.Images.SetKeyName(122, "Login_6031.ico");
this.imageList1.Images.SetKeyName(123, "LoginScreen_7349.ico");
this.imageList1.Images.SetKeyName(124, "MainMenuControl_688.ico");
this.imageList1.Images.SetKeyName(125, "MasterPage_6478.ico");
this.imageList1.Images.SetKeyName(126, "memoryarray.ico");
this.imageList1.Images.SetKeyName(127, "memorydevice.ico");
this.imageList1.Images.SetKeyName(128, "MergeModule-exclude_5824.ico");
this.imageList1.Images.SetKeyName(129, "MergeModuleReference_6335.ico");
this.imageList1.Images.SetKeyName(130, "MergeModuleReference-excluded_6336.ico");
this.imageList1.Images.SetKeyName(131, "Method_636.ico");
this.imageList1.Images.SetKeyName(132, "MethodInstance.ico");
this.imageList1.Images.SetKeyName(133, "ModelingProject_13455.ico");
this.imageList1.Images.SetKeyName(134, "motherboarddevice.ico");
this.imageList1.Images.SetKeyName(135, "MSHelpCollectionDefinitionFile_11560.ico");
this.imageList1.Images.SetKeyName(136, "MSNETFrameworkDependencies_9538.ico");
this.imageList1.Images.SetKeyName(137, "MultipleOutput-exclude_5825.ico");
this.imageList1.Images.SetKeyName(138, "NegativeAcknowledgementMessage_5745.ico");
this.imageList1.Images.SetKeyName(139, "networkadapter.ico");
this.imageList1.Images.SetKeyName(140, "networkadapterconfiguration.ico");
this.imageList1.Images.SetKeyName(141, "NetworkMixnode_8709.ico");
this.imageList1.Images.SetKeyName(142, "ODBCAttribute.ico");
this.imageList1.Images.SetKeyName(143, "OutputParameter_8289.ico");
this.imageList1.Images.SetKeyName(144, "PageFile.ico");
this.imageList1.Images.SetKeyName(145, "ParallelPort.ico");
this.imageList1.Images.SetKeyName(146, "patchpackage.ico");
this.imageList1.Images.SetKeyName(147, "Permission_12796.ico");
this.imageList1.Images.SetKeyName(148, "pnpentity.ico");
this.imageList1.Images.SetKeyName(149, "PositiveAcknowledgementMessage_5748.ico");
this.imageList1.Images.SetKeyName(150, "PotsModem.ico");
this.imageList1.Images.SetKeyName(151, "PrivateQueue_5749.ico");
this.imageList1.Images.SetKeyName(152, "Procedure_8937.ico");
this.imageList1.Images.SetKeyName(153, "processes_5760.ico");
this.imageList1.Images.SetKeyName(154, "Processor.ico");
this.imageList1.Images.SetKeyName(155, "PropertyIcon.ico");
this.imageList1.Images.SetKeyName(156, "PublicQueue_5750.ico");
this.imageList1.Images.SetKeyName(157, "RegistryEditor_5838.ico");
this.imageList1.Images.SetKeyName(158, "RegistryValueforBinaryType_5839.ico");
this.imageList1.Images.SetKeyName(159, "RegistryValueforStringType_5840.ico");
this.imageList1.Images.SetKeyName(160, "Reports-collapsed_12995.ico");
this.imageList1.Images.SetKeyName(161, "ReturnValue_8291.ico");
this.imageList1.Images.SetKeyName(162, "RolesNode_Valid_Closed.ico");
this.imageList1.Images.SetKeyName(163, "ScriptFile_452.ico");
this.imageList1.Images.SetKeyName(164, "scsicontroller.ico");
this.imageList1.Images.SetKeyName(165, "SequenceDiagramFile_sequencediagram_13452.ico");
this.imageList1.Images.SetKeyName(166, "Server_5720.ico");
this.imageList1.Images.SetKeyName(167, "ServerProject.ico");
this.imageList1.Images.SetKeyName(168, "ServicePause_5722.ico");
this.imageList1.Images.SetKeyName(169, "Services_5724.ico");
this.imageList1.Images.SetKeyName(170, "ServicesStop_5725.ico");
this.imageList1.Images.SetKeyName(171, "ServiceStart_5723.ico");
this.imageList1.Images.SetKeyName(172, "ServiceUnknown_5726.ico");
this.imageList1.Images.SetKeyName(173, "Setup_6331.ico");
this.imageList1.Images.SetKeyName(174, "SetupProjectWizard_5827.ico");
this.imageList1.Images.SetKeyName(175, "setup-v.ico");
this.imageList1.Images.SetKeyName(176, "Shortcut_8169.ico");
this.imageList1.Images.SetKeyName(177, "SingleMessage_5727.ico");
this.imageList1.Images.SetKeyName(178, "SingleOutput_8170.ico");
this.imageList1.Images.SetKeyName(179, "SingleOutput-exclude_5830.ico");
this.imageList1.Images.SetKeyName(180, "SoftwareDefinitionModel_11321.ico");
this.imageList1.Images.SetKeyName(181, "Soundfile_461.ico");
this.imageList1.Images.SetKeyName(182, "SQLServer_5728.ico");
this.imageList1.Images.SetKeyName(183, "StyleSheet(css)_7483.ico");
this.imageList1.Images.SetKeyName(184, "Table_748.ico");
this.imageList1.Images.SetKeyName(185, "tapedrive.ico");
this.imageList1.Images.SetKeyName(186, "TeamProjectCollection_12999.ico");
this.imageList1.Images.SetKeyName(187, "TestsNotinaList_8548.ico");
this.imageList1.Images.SetKeyName(188, "TFSServer_13310.ico");
this.imageList1.Images.SetKeyName(189, "ToolBarControl_710.ico");
this.imageList1.Images.SetKeyName(190, "Type_527.ico");
this.imageList1.Images.SetKeyName(191, "TypeDefinition_521.ico");
this.imageList1.Images.SetKeyName(192, "UMLModelFile_13454.ico");
this.imageList1.Images.SetKeyName(193, "uninterruptablepowersupply.ico");
this.imageList1.Images.SetKeyName(194, "UniqueKey_8270.ico");
this.imageList1.Images.SetKeyName(195, "usbcontroller.ico");
this.imageList1.Images.SetKeyName(196, "UseCaseDiagramFile_usecasediagram_13447.ico");
this.imageList1.Images.SetKeyName(197, "UserDefinedDataType_8271.ico");
this.imageList1.Images.SetKeyName(198, "UserInterfaceEditor_5845.ico");
this.imageList1.Images.SetKeyName(199, "videocontroller.ico");
this.imageList1.Images.SetKeyName(200, "View_8933.ico");
this.imageList1.Images.SetKeyName(201, "VirtualMachine.ico");
this.imageList1.Images.SetKeyName(202, "VirtualMachines.ico");
this.imageList1.Images.SetKeyName(203, "VSNETWebServiceDynamicDiscovery_8215.ico");
this.imageList1.Images.SetKeyName(204, "WCFDataServices.ico");
this.imageList1.Images.SetKeyName(205, "WebCustomControl(ASCX)_816.ico");
this.imageList1.Images.SetKeyName(206, "WebForm(ASPX)_815.ico");
this.imageList1.Images.SetKeyName(207, "WebForm(ASPX)_815_color.ico");
this.imageList1.Images.SetKeyName(208, "WebFormTemplate_11274.ico");
this.imageList1.Images.SetKeyName(209, "WebUserControl(ascx)_11270.ico");
this.imageList1.Images.SetKeyName(210, "WindowsForm_817.ico");
this.imageList1.Images.SetKeyName(211, "WindowsGroups_7309.ico");
this.imageList1.Images.SetKeyName(212, "wmi_task.ico");
this.imageList1.Images.SetKeyName(213, "XMLFile_828.ico");
this.imageList1.Images.SetKeyName(214, "XMLSchema_798.ico");
this.imageList1.Images.SetKeyName(215, "Error_grey_677_16x16.ico");
this.imageList1.Images.SetKeyName(216, "Error_red.ico");
this.imageList1.Images.SetKeyName(217, "Error_red_16x16_cyan.ico");
this.imageList1.Images.SetKeyName(218, "FeatureNotAvailable_5734.ico");
this.imageList1.Images.SetKeyName(219, "FeatureNotAvailable_5734_12x12_16x.ico");
this.imageList1.Images.SetKeyName(220, "FeatureNotAvailable_5734_12x12_16x_cyan.ico");
this.imageList1.Images.SetKeyName(221, "FeatureNotAvailable_5734_cyan.ico");
this.imageList1.Images.SetKeyName(222, "Flagthread_7317.ico");
this.imageList1.Images.SetKeyName(223, "Information_blue_6227.ico");
this.imageList1.Images.SetKeyName(224, "Information_blue_6227_16x16_cyan.ico");
this.imageList1.Images.SetKeyName(225, "Warning_grey_7315_16x16.ico");
this.imageList1.Images.SetKeyName(226, "Warning_yellow_7231.ico");
this.imageList1.Images.SetKeyName(227, "Warning_yellow_7231_16x16_cyan.ico");
this.imageList1.Images.SetKeyName(228, "Warning_yellow_7231_31x32.ico");
this.imageList1.Images.SetKeyName(229, "warning_yellow_7231_31x32_cyan.ico");
//
// toolStrip1
//
this.toolStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] {
this.toolStripButton1});
this.toolStrip1.Location = new System.Drawing.Point(10, 10);
this.toolStrip1.Name = "toolStrip1";
this.toolStrip1.Size = new System.Drawing.Size(225, 25);
this.toolStrip1.TabIndex = 1;
this.toolStrip1.Text = "toolStrip1";
//
// toolStripButton1
//
this.toolStripButton1.DisplayStyle = System.Windows.Forms.ToolStripItemDisplayStyle.Image;
this.toolStripButton1.Image = global::ATABBI.TexE.Properties.Resources.Restart_6322;
this.toolStripButton1.ImageTransparentColor = System.Drawing.Color.Magenta;
this.toolStripButton1.Name = "toolStripButton1";
this.toolStripButton1.Size = new System.Drawing.Size(23, 22);
this.toolStripButton1.Text = "toolStripButton1";
this.toolStripButton1.ToolTipText = "Rebuild tree";
this.toolStripButton1.Click += new System.EventHandler(this.toolStripButton1_Click);
//
// DocumentExplorer
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.ClientSize = new System.Drawing.Size(245, 322);
this.Controls.Add(this.treeView1);
this.Controls.Add(this.toolStrip1);
this.DockAreas = ((WeifenLuo.WinFormsUI.Docking.DockAreas)((((WeifenLuo.WinFormsUI.Docking.DockAreas.DockLeft | WeifenLuo.WinFormsUI.Docking.DockAreas.DockRight)
| WeifenLuo.WinFormsUI.Docking.DockAreas.DockTop)
| WeifenLuo.WinFormsUI.Docking.DockAreas.DockBottom)));
this.HideOnClose = true;
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
this.Name = "DocumentExplorer";
this.Padding = new System.Windows.Forms.Padding(10);
this.ShowHint = WeifenLuo.WinFormsUI.Docking.DockState.DockRight;
this.TabText = "Document Explorer";
this.Text = "Document Explorer";
this.Load += new System.EventHandler(this.DocumentExplorer_Load);
this.toolStrip1.ResumeLayout(false);
this.toolStrip1.PerformLayout();
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.TreeView treeView1;
private System.Windows.Forms.ImageList imageList1;
private System.Windows.Forms.ToolStrip toolStrip1;
private System.Windows.Forms.ToolStripButton toolStripButton1;
}
}
| |
namespace Fixtures.SwaggerBatBodyNumber
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Models;
public static partial class NumberExtensions
{
/// <summary>
/// Get null Number value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
public static double? GetNull(this INumber operations)
{
return Task.Factory.StartNew(s => ((INumber)s).GetNullAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get null Number value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task<double?> GetNullAsync( this INumber operations, CancellationToken cancellationToken = default(CancellationToken))
{
HttpOperationResponse<double?> result = await operations.GetNullWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false);
return result.Body;
}
/// <summary>
/// Get invalid float Number value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
public static double? GetInvalidFloat(this INumber operations)
{
return Task.Factory.StartNew(s => ((INumber)s).GetInvalidFloatAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get invalid float Number value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task<double?> GetInvalidFloatAsync( this INumber operations, CancellationToken cancellationToken = default(CancellationToken))
{
HttpOperationResponse<double?> result = await operations.GetInvalidFloatWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false);
return result.Body;
}
/// <summary>
/// Get invalid double Number value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
public static double? GetInvalidDouble(this INumber operations)
{
return Task.Factory.StartNew(s => ((INumber)s).GetInvalidDoubleAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get invalid double Number value
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task<double?> GetInvalidDoubleAsync( this INumber operations, CancellationToken cancellationToken = default(CancellationToken))
{
HttpOperationResponse<double?> result = await operations.GetInvalidDoubleWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false);
return result.Body;
}
/// <summary>
/// Put big float value 3.402823e+20
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='numberBody'>
/// </param>
public static void PutBigFloat(this INumber operations, double? numberBody)
{
Task.Factory.StartNew(s => ((INumber)s).PutBigFloatAsync(numberBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Put big float value 3.402823e+20
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='numberBody'>
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task PutBigFloatAsync( this INumber operations, double? numberBody, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.PutBigFloatWithHttpMessagesAsync(numberBody, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get big float value 3.402823e+20
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
public static double? GetBigFloat(this INumber operations)
{
return Task.Factory.StartNew(s => ((INumber)s).GetBigFloatAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get big float value 3.402823e+20
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task<double?> GetBigFloatAsync( this INumber operations, CancellationToken cancellationToken = default(CancellationToken))
{
HttpOperationResponse<double?> result = await operations.GetBigFloatWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false);
return result.Body;
}
/// <summary>
/// Put big double value 2.5976931e+101
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='numberBody'>
/// </param>
public static void PutBigDouble(this INumber operations, double? numberBody)
{
Task.Factory.StartNew(s => ((INumber)s).PutBigDoubleAsync(numberBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Put big double value 2.5976931e+101
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='numberBody'>
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task PutBigDoubleAsync( this INumber operations, double? numberBody, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.PutBigDoubleWithHttpMessagesAsync(numberBody, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get big double value 2.5976931e+101
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
public static double? GetBigDouble(this INumber operations)
{
return Task.Factory.StartNew(s => ((INumber)s).GetBigDoubleAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get big double value 2.5976931e+101
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task<double?> GetBigDoubleAsync( this INumber operations, CancellationToken cancellationToken = default(CancellationToken))
{
HttpOperationResponse<double?> result = await operations.GetBigDoubleWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false);
return result.Body;
}
/// <summary>
/// Put big double value 99999999.99
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='numberBody'>
/// </param>
public static void PutBigDoublePositiveDecimal(this INumber operations, double? numberBody)
{
Task.Factory.StartNew(s => ((INumber)s).PutBigDoublePositiveDecimalAsync(numberBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Put big double value 99999999.99
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='numberBody'>
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task PutBigDoublePositiveDecimalAsync( this INumber operations, double? numberBody, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.PutBigDoublePositiveDecimalWithHttpMessagesAsync(numberBody, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get big double value 99999999.99
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
public static double? GetBigDoublePositiveDecimal(this INumber operations)
{
return Task.Factory.StartNew(s => ((INumber)s).GetBigDoublePositiveDecimalAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get big double value 99999999.99
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task<double?> GetBigDoublePositiveDecimalAsync( this INumber operations, CancellationToken cancellationToken = default(CancellationToken))
{
HttpOperationResponse<double?> result = await operations.GetBigDoublePositiveDecimalWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false);
return result.Body;
}
/// <summary>
/// Put big double value -99999999.99
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='numberBody'>
/// </param>
public static void PutBigDoubleNegativeDecimal(this INumber operations, double? numberBody)
{
Task.Factory.StartNew(s => ((INumber)s).PutBigDoubleNegativeDecimalAsync(numberBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Put big double value -99999999.99
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='numberBody'>
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task PutBigDoubleNegativeDecimalAsync( this INumber operations, double? numberBody, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.PutBigDoubleNegativeDecimalWithHttpMessagesAsync(numberBody, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get big double value -99999999.99
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
public static double? GetBigDoubleNegativeDecimal(this INumber operations)
{
return Task.Factory.StartNew(s => ((INumber)s).GetBigDoubleNegativeDecimalAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get big double value -99999999.99
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task<double?> GetBigDoubleNegativeDecimalAsync( this INumber operations, CancellationToken cancellationToken = default(CancellationToken))
{
HttpOperationResponse<double?> result = await operations.GetBigDoubleNegativeDecimalWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false);
return result.Body;
}
/// <summary>
/// Put small float value 3.402823e-20
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='numberBody'>
/// </param>
public static void PutSmallFloat(this INumber operations, double? numberBody)
{
Task.Factory.StartNew(s => ((INumber)s).PutSmallFloatAsync(numberBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Put small float value 3.402823e-20
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='numberBody'>
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task PutSmallFloatAsync( this INumber operations, double? numberBody, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.PutSmallFloatWithHttpMessagesAsync(numberBody, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get big double value 3.402823e-20
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
public static double? GetSmallFloat(this INumber operations)
{
return Task.Factory.StartNew(s => ((INumber)s).GetSmallFloatAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get big double value 3.402823e-20
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task<double?> GetSmallFloatAsync( this INumber operations, CancellationToken cancellationToken = default(CancellationToken))
{
HttpOperationResponse<double?> result = await operations.GetSmallFloatWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false);
return result.Body;
}
/// <summary>
/// Put small double value 2.5976931e-101
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='numberBody'>
/// </param>
public static void PutSmallDouble(this INumber operations, double? numberBody)
{
Task.Factory.StartNew(s => ((INumber)s).PutSmallDoubleAsync(numberBody), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Put small double value 2.5976931e-101
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='numberBody'>
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task PutSmallDoubleAsync( this INumber operations, double? numberBody, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.PutSmallDoubleWithHttpMessagesAsync(numberBody, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get big double value 2.5976931e-101
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
public static double? GetSmallDouble(this INumber operations)
{
return Task.Factory.StartNew(s => ((INumber)s).GetSmallDoubleAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get big double value 2.5976931e-101
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
public static async Task<double?> GetSmallDoubleAsync( this INumber operations, CancellationToken cancellationToken = default(CancellationToken))
{
HttpOperationResponse<double?> result = await operations.GetSmallDoubleWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false);
return result.Body;
}
}
}
| |
#region Header
//
// Copyright 2003-2019 by Autodesk, Inc.
//
// Permission to use, copy, modify, and distribute this software in
// object code form for any purpose and without fee is hereby granted,
// provided that the above copyright notice appears in all copies and
// that both that copyright notice and the limited warranty and
// restricted rights notice below appear in all supporting
// documentation.
//
// AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS.
// AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF
// MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC.
// DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE
// UNINTERRUPTED OR ERROR FREE.
//
// Use, duplication, or disclosure by the U.S. Government is subject to
// restrictions set forth in FAR 52.227-19 (Commercial Computer
// Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii)
// (Rights in Technical Data and Computer Software), as applicable.
//
#endregion // Header
using System;
using System.Collections.Generic;
using System.Text;
using System.Diagnostics;
using Autodesk.Revit.DB;
using Autodesk.Revit.UI;
namespace RevitLookup.Graphics {
public abstract class GraphicsStream {
protected Autodesk.Revit.UI.UIApplication m_app;
protected Stack<Transform> m_xformStack;
protected Stack<Options> m_geomOptionsStack;
protected Stack<View> m_viewStack;
public
GraphicsStream(UIApplication app)
{
m_app = app;
m_xformStack = new Stack<Transform>();
m_geomOptionsStack = new Stack<Options>();
m_viewStack = new Stack<View>();
}
public UIApplication Application
{
get { return m_app; }
}
#region Transformation Stack
public virtual void
PushXform(Transform mat)
{
if (m_xformStack.Count > 0) {
m_xformStack.Push(m_xformStack.Peek() * mat);
}
else {
m_xformStack.Push(mat);
}
}
public virtual void
PopXform()
{
m_xformStack.Pop();
}
public virtual Transform
CurrentXform
{
get { return m_xformStack.Peek(); }
}
public Boolean
HasXform
{
get { return (m_xformStack.Count == 0) ? false : true; }
}
#endregion
#region Geometry Options Stack
public void
PushGeometryOptions(Options opts)
{
m_geomOptionsStack.Push(opts);
}
public void
PopGeometryOptions()
{
m_geomOptionsStack.Pop();
}
public Options
CurrentGeometryOptions
{
get { return m_geomOptionsStack.Peek(); }
}
#endregion
#region View Stack
public void
PushView(View view)
{
m_viewStack.Push(view);
}
public void
PopView()
{
m_viewStack.Pop();
}
public View
CurrentView
{
get { return m_viewStack.Peek(); }
}
#endregion
#region Geometric Primitives
public virtual Double
DeviationForCurves()
{
return 0.5;
}
public virtual Double
DeviationForCurves(XYZ samplePt)
{
return 0.5;
}
#endregion
#region Low-Level geometric primitives
public abstract void
StreamWcs(XYZ pt1, XYZ pt2); // only function you absolutely have to override (to get Vector graphics)
public virtual void
StreamWcs(IList<XYZ> pts, bool closed)
{
if (pts.Count < 2) {
Debug.Assert(false); // have to have at least 2 points!
return;
}
XYZ pt1 = new XYZ();
XYZ pt2 = new XYZ();
int len = pts.Count;
for (int i=0; i <(len - 1); i++) {
pt1 = pts[i];
pt2 = pts[i + 1];
StreamWcs(pt1, pt2); // pts are already in WCS, no Xform
}
if (closed)
StreamWcs(pts[len-1], pts[0]);
}
public virtual void
StreamWcs(Line line)
{
if (line.IsBound == false) {
Debug.Assert(false);
return;
}
StreamWcs(line.GetEndPoint(0), line.GetEndPoint(1));
}
public virtual void
StreamWcs(Arc arc)
{
StreamCurveAsTesselatedPointsWcs(arc);
}
public virtual void
StreamWcs(Ellipse ellipse)
{
StreamCurveAsTesselatedPointsWcs(ellipse);
}
public virtual void
StreamWcs(NurbSpline spline)
{
StreamCurveAsTesselatedPointsWcs(spline);
}
public virtual void
StreamWcs(Curve curve)
{
if (curve is Line)
StreamWcs(curve as Line);
else if (curve is Arc)
StreamWcs(curve as Arc);
else if (curve is Ellipse)
StreamWcs(curve as Ellipse);
else if (curve is NurbSpline)
StreamWcs(curve as NurbSpline);
else
StreamCurveAsTesselatedPointsWcs(curve);
}
#endregion // Low-level geometric primitives
public virtual void
Stream(XYZ pt1, XYZ pt2)
{
if (HasXform)
StreamWcs(CurrentXform.OfPoint(pt1), CurrentXform.OfPoint(pt2));
else
StreamWcs(pt1, pt2);
}
public virtual void
Stream(IList<XYZ> pts, bool closed)
{
if (pts.Count < 2) {
Debug.Assert(false); // have to have at least 2 points!
return;
}
XYZ pt1, pt2;
int len = pts.Count;
for (int i=0; i <(len - 1); i++) {
pt1 = pts[i];
pt2 = pts[i + 1];
if (HasXform)
StreamWcs(CurrentXform.OfPoint(pt1), CurrentXform.OfPoint(pt2));
else
StreamWcs(pt1, pt2);
}
if (closed) {
if (HasXform)
StreamWcs(CurrentXform.OfPoint(pts[len-1]), CurrentXform.OfPoint(pts[0]));
else
StreamWcs(pts[len-1], pts[0]);
}
}
public virtual void
Stream(Line line)
{
if (line.IsBound == false) {
Debug.Assert(false);
return;
}
Stream(line.GetEndPoint(0), line.GetEndPoint(1));
}
public virtual void
Stream(Arc arc)
{
if (HasXform)
StreamWcs(arc.CreateTransformed(CurrentXform));
else
StreamWcs(arc);
}
public virtual void
Stream(Ellipse ellipse)
{
if (HasXform)
StreamWcs(ellipse.CreateTransformed(CurrentXform));
else
StreamWcs(ellipse);
}
public virtual void
Stream(NurbSpline spline)
{
if (HasXform)
StreamWcs(spline.CreateTransformed(CurrentXform));
else
StreamWcs(spline);
}
public virtual void
Stream(Curve curve)
{
if (HasXform)
StreamWcs(curve.CreateTransformed(CurrentXform));
else
StreamWcs(curve);
}
/// <summary>
/// By default, everything goes out as tesselated vectors. This function allows all the
/// base class functions to easily tesselate. But, if derived classes override individual
/// curve types, they can intercept before they are tesselated.
/// </summary>
/// <param name="crv">Curve to tesselate into vectors</param>
private void
StreamCurveAsTesselatedPointsWcs(Curve crv)
{
StreamWcs(crv.Tessellate(), false); // stream out as array of points
}
private void
StreamCurveAsTesselatedPoints(Curve crv)
{
Stream(crv.Tessellate(), false); // stream out as array of points
}
#region High-Level Object Stream functions
public virtual void
Stream(Element elem)
{
if ((m_viewStack.Count == 0) || (m_geomOptionsStack.Count == 0)) {
throw new System.ArgumentException("View stack or Geometry Options stack is empty.");
}
GeometryElement geom = elem.get_Geometry(this.CurrentGeometryOptions);
if (geom != null) {
Stream(geom);
}
}
public virtual void
Stream(GeometryObject obj)
{
if (obj is Curve) {
Stream(obj as Curve);
}
else if (obj is Edge) {
Stream(obj as Edge);
}
else if (obj is GeometryElement) {
Stream(obj as GeometryElement);
}
else if (obj is ConicalFace) {
Stream(obj as ConicalFace);
}
else if (obj is CylindricalFace) {
Stream(obj as CylindricalFace);
}
else if (obj is HermiteFace) {
Stream(obj as HermiteFace);
}
else if (obj is PlanarFace) {
Stream(obj as PlanarFace);
}
else if (obj is RevolvedFace) {
Stream(obj as RevolvedFace);
}
else if (obj is RuledFace) {
Stream(obj as RuledFace);
}
else if (obj is Face) {
Stream(obj as Face);
}
else if (obj is GeometryInstance) {
Stream(obj as GeometryInstance);
}
else if (obj is Mesh) {
Stream(obj as Mesh);
}
else if (obj is Profile) {
Stream(obj as Profile);
}
else if (obj is Solid) {
Stream(obj as Solid);
}
}
public virtual void
Stream(Edge edge)
{
IList<XYZ> ptArray = edge.Tessellate();
int len = ptArray.Count;
for (int i=0; i < (len - 1); i++) {
Stream(ptArray[i], ptArray[i + 1]);
}
}
public virtual void
Stream(EdgeArray edgeArray)
{
foreach (Edge edge in edgeArray) {
Stream(edge);
}
}
public virtual void
Stream(GeometryElement elem)
{
foreach (GeometryObject geom in elem)
{
Stream(geom);
}
}
// All of these types of Faces get their geometry from the base class Face. We
// want a specific virtual for each type though so that derived streams can pick
// up the geometry at the optimal level (if they want).
/// <summary>
/// Do the common work of streaming data out for all Face types
/// </summary>
/// <param name="face"></param>
private void
StreamFaceGeoometry(Face face)
{
foreach (EdgeArray edgeArray in face.EdgeLoops)
Stream(edgeArray);
}
public virtual void
Stream(ConicalFace face)
{
StreamFaceGeoometry(face);
}
public virtual void
Stream(CylindricalFace face)
{
StreamFaceGeoometry(face);
}
public virtual void
Stream(HermiteFace face)
{
StreamFaceGeoometry(face);
}
public virtual void
Stream(PlanarFace face)
{
StreamFaceGeoometry(face);
}
public virtual void
Stream(RevolvedFace face)
{
StreamFaceGeoometry(face);
}
public virtual void
Stream(RuledFace face)
{
StreamFaceGeoometry(face);
}
public virtual void
Stream(Face face)
{
StreamFaceGeoometry(face);
}
public virtual void
Stream(Autodesk.Revit.DB.GeometryInstance inst)
{
PushXform(inst.Transform);
Stream(inst.SymbolGeometry);
PopXform();
}
public virtual void
Stream(Mesh mesh)
{
for (int i=0; i<mesh.NumTriangles; i++) {
MeshTriangle mt = mesh.get_Triangle(i);
Stream(mt.get_Vertex(0), mt.get_Vertex(1));
Stream(mt.get_Vertex(1), mt.get_Vertex(2));
Stream(mt.get_Vertex(2), mt.get_Vertex(0));
}
}
public virtual void
Stream(Profile prof)
{
foreach (Curve curve in prof.Curves) {
Stream(curve);
}
}
public virtual void
Stream(Solid solid)
{
foreach (Face face in solid.Faces) {
Stream(face);
}
//These edges will appear when streaming the faces
//
////StreamEdgeArray(solid.Edges);
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.IO;
using System.Net.Http;
using System.Net.Test.Common;
using System.Text;
using System.Threading.Tasks;
using Xunit;
using Xunit.Abstractions;
namespace System.Net.Tests
{
public class FileWebRequestTest
{
private readonly ITestOutputHelper _output;
public FileWebRequestTest(ITestOutputHelper output)
{
_output = output;
}
[Fact]
public void Ctor_VerifyDefaults_Success()
{
Uri uri = new Uri("file://somefilepath");
FileWebRequest request = (FileWebRequest)WebRequest.Create(uri);
Assert.Null(request.ContentType);
Assert.Null(request.Credentials);
Assert.NotNull(request.Headers);
Assert.Equal(0, request.Headers.Count);
Assert.Equal("GET", request.Method);
Assert.Null(request.Proxy);
Assert.Equal(uri, request.RequestUri);
}
[Fact]
public void FileWebRequest_Properties_Roundtrips()
{
WebRequest request = WebRequest.Create("file://anything");
request.ContentLength = 42;
Assert.Equal(42, request.ContentLength);
request.ContentType = "anything";
Assert.Equal("anything", request.ContentType);
request.Timeout = 42000;
Assert.Equal(42000, request.Timeout);
}
[Fact]
public void InvalidArguments_Throws()
{
WebRequest request = WebRequest.Create("file://anything");
AssertExtensions.Throws<ArgumentException>("value", () => request.ContentLength = -1);
AssertExtensions.Throws<ArgumentException>("value", () => request.Method = null);
AssertExtensions.Throws<ArgumentException>("value", () => request.Method = "");
AssertExtensions.Throws<ArgumentOutOfRangeException>("value", () => request.Timeout = -2);
}
[Fact]
public void GetRequestStream_MethodGet_ThrowsProtocolViolation()
{
WebRequest request = WebRequest.Create("file://anything");
Assert.Throws<ProtocolViolationException>(() => request.BeginGetRequestStream(null, null));
}
[Fact]
public void GetRequestResponseAfterAbort_Throws()
{
WebRequest request = WebRequest.Create("file://anything");
request.Abort();
Assert.Throws<WebException>(() => request.BeginGetRequestStream(null, null));
Assert.Throws<WebException>(() => request.BeginGetResponse(null, null));
}
[Fact]
public void UseDefaultCredentials_GetOrSet_Throws()
{
WebRequest request = WebRequest.Create("file://anything");
Assert.Throws<NotSupportedException>(() => request.UseDefaultCredentials);
Assert.Throws<NotSupportedException>(() => request.UseDefaultCredentials = true);
}
}
public abstract class FileWebRequestTestBase
{
public abstract Task<WebResponse> GetResponseAsync(WebRequest request);
public abstract Task<Stream> GetRequestStreamAsync(WebRequest request);
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "dotnet/corefx #17842")]
[Fact]
public async Task ReadFile_ContainsExpectedContent()
{
string path = Path.GetTempFileName();
try
{
var data = new byte[1024 * 10];
var random = new Random(42);
random.NextBytes(data);
File.WriteAllBytes(path, data);
WebRequest request = WebRequest.Create("file://" + path);
using (WebResponse response = await GetResponseAsync(request))
{
Assert.Equal(data.Length, response.ContentLength);
Assert.Equal(data.Length.ToString(), response.Headers[HttpRequestHeader.ContentLength]);
Assert.Equal("application/octet-stream", response.ContentType);
Assert.Equal("application/octet-stream", response.Headers[HttpRequestHeader.ContentType]);
Assert.True(response.SupportsHeaders);
Assert.NotNull(response.Headers);
Assert.Equal(new Uri("file://" + path), response.ResponseUri);
using (Stream s = response.GetResponseStream())
{
var target = new MemoryStream();
await s.CopyToAsync(target);
Assert.Equal(data, target.ToArray());
}
}
}
finally
{
File.Delete(path);
}
}
[Fact]
public async Task WriteFile_ContainsExpectedContent()
{
string path = Path.GetTempFileName();
try
{
var data = new byte[1024 * 10];
var random = new Random(42);
random.NextBytes(data);
var request = WebRequest.Create("file://" + path);
request.Method = WebRequestMethods.File.UploadFile;
using (Stream s = await GetRequestStreamAsync(request))
{
await s.WriteAsync(data, 0, data.Length);
}
Assert.Equal(data, File.ReadAllBytes(path));
}
finally
{
File.Delete(path);
}
}
[Fact]
public async Task WriteThenReadFile_WriteAccessResultsInNullResponseStream()
{
string path = Path.GetTempFileName();
try
{
var data = new byte[1024 * 10];
var random = new Random(42);
random.NextBytes(data);
var request = WebRequest.Create("file://" + path);
request.Method = WebRequestMethods.File.UploadFile;
using (Stream s = await GetRequestStreamAsync(request))
{
await s.WriteAsync(data, 0, data.Length);
}
using (WebResponse response = await GetResponseAsync(request))
using (Stream s = response.GetResponseStream()) // will hand back a null stream
{
Assert.Equal(0, s.Length);
}
}
finally
{
File.Delete(path);
}
}
protected virtual bool EnableConcurrentReadWriteTests => true;
[Fact]
public async Task RequestAfterResponse_throws()
{
string path = Path.GetTempFileName();
try
{
var data = new byte[1024];
WebRequest request = WebRequest.Create("file://" + path);
request.Method = WebRequestMethods.File.UploadFile;
using (WebResponse response = await GetResponseAsync(request))
{
await Assert.ThrowsAsync<InvalidOperationException>(() => GetRequestStreamAsync(request));
}
}
finally
{
File.Delete(path);
}
}
[Theory]
[InlineData(null)]
[InlineData(false)]
[InlineData(true)]
public async Task BeginGetResponse_OnNonexistentFile_ShouldNotCrashApplication(bool? abortWithDelay)
{
FileWebRequest request = (FileWebRequest)WebRequest.Create("file://" + Path.GetRandomFileName());
Task<WebResponse> responseTask = GetResponseAsync(request);
if (abortWithDelay.HasValue)
{
if (abortWithDelay.Value)
{
await Task.Delay(1);
}
request.Abort();
}
await Assert.ThrowsAsync<WebException>(() => responseTask);
}
}
public abstract class AsyncFileWebRequestTestBase : FileWebRequestTestBase
{
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "dotnet/corefx #17842")]
[Fact]
public async Task ConcurrentReadWrite_ResponseBlocksThenGetsNullStream()
{
string path = Path.GetTempFileName();
try
{
var data = new byte[1024 * 10];
var random = new Random(42);
random.NextBytes(data);
var request = WebRequest.Create("file://" + path);
request.Method = WebRequestMethods.File.UploadFile;
Task<Stream> requestStreamTask = GetRequestStreamAsync(request);
Task<WebResponse> responseTask = GetResponseAsync(request);
using (Stream s = await requestStreamTask)
{
await s.WriteAsync(data, 0, data.Length);
}
using (WebResponse response = await responseTask)
using (Stream s = response.GetResponseStream()) // will hand back a null stream
{
Assert.Equal(0, s.Length);
}
}
finally
{
File.Delete(path);
}
}
}
public sealed class SyncFileWebRequestTestBase : FileWebRequestTestBase
{
public override Task<WebResponse> GetResponseAsync(WebRequest request) => Task.Run(() => request.GetResponse());
public override Task<Stream> GetRequestStreamAsync(WebRequest request) => Task.Run(() => request.GetRequestStream());
}
public sealed class BeginEndFileWebRequestTestBase : AsyncFileWebRequestTestBase
{
public override Task<WebResponse> GetResponseAsync(WebRequest request) =>
Task.Factory.FromAsync(request.BeginGetResponse, request.EndGetResponse, null);
public override Task<Stream> GetRequestStreamAsync(WebRequest request) =>
Task.Factory.FromAsync(request.BeginGetRequestStream, request.EndGetRequestStream, null);
}
public sealed class TaskFileWebRequestTestBase : AsyncFileWebRequestTestBase
{
public override Task<WebResponse> GetResponseAsync(WebRequest request) => request.GetResponseAsync();
public override Task<Stream> GetRequestStreamAsync(WebRequest request) => request.GetRequestStreamAsync();
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Security.Cryptography.X509Certificates;
using Xunit;
using Test.Cryptography;
using System.Security.Cryptography.Pkcs.Tests;
namespace System.Security.Cryptography.Pkcs.EnvelopedCmsTests.Tests
{
public static partial class ContentEncryptionAlgorithmTests
{
public static bool SupportsRc4 => PlatformDetection.IsWindows;
[Fact]
public static void DecodeAlgorithmRc2_128_RoundTrip()
{
AlgorithmIdentifier algorithm = new AlgorithmIdentifier(new Oid(Oids.Rc2));
ContentInfo contentInfo = new ContentInfo(new byte[] { 1, 2, 3 });
EnvelopedCms ecms = new EnvelopedCms(contentInfo, algorithm);
using (X509Certificate2 cert = Certificates.RSAKeyTransfer1.GetCertificate())
{
CmsRecipient cmsRecipient = new CmsRecipient(cert);
ecms.Encrypt(cmsRecipient);
}
byte[] encodedMessage = ecms.Encode();
VerifyAlgorithmRc2_128(encodedMessage);
}
[Fact]
public static void DecodeAlgorithmRc2_128_FixedValue()
{
byte[] encodedMessage =
("3082011306092a864886f70d010703a0820104308201000201003181c83081c5020100302e301a311830160603550403130f"
+ "5253414b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d01010105000481"
+ "802fb2f6a7ccc1c696177c1ddb5ac92ab7f556dce4d21e924c37a06fbdb7015fd35dee9726f6301ca86af50b14275bf34584"
+ "3571848bf6f55281c75fb67adc9c63fac5c4427b38f4fab1567f2f08063a786059f9850c79ff202d1b556e8c90e41f977090"
+ "3c2d84a9046a372a0619a29713179304355750c9f6c180d1cc92d9b22b303006092a864886f70d010701301906082a864886"
+ "f70d0302300d02013a04086bcd05b70546e632800810f6c8d0e0466ee6").HexToByteArray();
VerifyAlgorithmRc2_128(encodedMessage);
}
private static void VerifyAlgorithmRc2_128(byte[] encodedMessage)
{
EnvelopedCms ecms = new EnvelopedCms();
ecms.Decode(encodedMessage);
AlgorithmIdentifier algorithm = ecms.ContentEncryptionAlgorithm;
Assert.NotNull(algorithm.Oid);
Assert.Equal(Oids.Rc2, algorithm.Oid.Value);
Assert.Equal(128, algorithm.KeyLength);
}
[Fact]
public static void DecodeAlgorithmDes_RoundTrip()
{
AlgorithmIdentifier algorithm = new AlgorithmIdentifier(new Oid(Oids.Des));
ContentInfo contentInfo = new ContentInfo(new byte[] { 1, 2, 3 });
EnvelopedCms ecms = new EnvelopedCms(contentInfo, algorithm);
using (X509Certificate2 cert = Certificates.RSAKeyTransfer1.GetCertificate())
{
CmsRecipient cmsRecipient = new CmsRecipient(cert);
ecms.Encrypt(cmsRecipient);
}
byte[] encodedMessage = ecms.Encode();
VerifyAlgorithmDes(encodedMessage);
}
[Fact]
public static void DecodeAlgorithmDes_FixedValue()
{
byte[] encodedMessage =
("3082010906092a864886f70d010703a081fb3081f80201003181c83081c5020100302e301a311830160603550403130f5253"
+ "414b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d01010105000481801f"
+ "0621d0d37f0e89c2eac528c2bf97eff89131aa55f08c286d6c41f403168e74bf49c39d4752830ff2b222b704dbec0a3bb109"
+ "0f6d39a2abb14819083f0a2c767958ebe19a2b73147306202da9ca483b911a0218ffb4ca3046de322cf3be6c1500af3d6b52"
+ "f02e3fa5a1a85e3fa035b3df65400fd29d8104d93481a6716c170c302806092a864886f70d010701301106052b0e03020704"
+ "0880052d38754b7f298008fc778a46c054e572").HexToByteArray();
VerifyAlgorithmDes(encodedMessage);
}
private static void VerifyAlgorithmDes(byte[] encodedMessage)
{
EnvelopedCms ecms = new EnvelopedCms();
ecms.Decode(encodedMessage);
AlgorithmIdentifier algorithm = ecms.ContentEncryptionAlgorithm;
Assert.NotNull(algorithm.Oid);
Assert.Equal(Oids.Des, algorithm.Oid.Value);
Assert.Equal(64, algorithm.KeyLength);
}
[Fact]
public static void DecodeAlgorithm3Des_RoundTrip()
{
AlgorithmIdentifier algorithm = new AlgorithmIdentifier(new Oid(Oids.TripleDesCbc));
ContentInfo contentInfo = new ContentInfo(new byte[] { 1, 2, 3 });
EnvelopedCms ecms = new EnvelopedCms(contentInfo, algorithm);
using (X509Certificate2 cert = Certificates.RSAKeyTransfer1.GetCertificate())
{
CmsRecipient cmsRecipient = new CmsRecipient(cert);
ecms.Encrypt(cmsRecipient);
}
byte[] encodedMessage = ecms.Encode();
VerifyAlgorithm3Des(encodedMessage);
}
[Fact]
public static void DecodeAlgorithm3Des_FixedValue()
{
byte[] encodedMessage =
("3082010c06092a864886f70d010703a081fe3081fb0201003181c83081c5020100302e301a311830160603550403130f5253"
+ "414b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d010101050004818087"
+ "77495ce527339dc78b063477104d513eda6f8a7b2f5c642fddf81d86a4c139f836590a1f81efafb953f7c6d666021fe5c031"
+ "10064f21ce4b17f4737a9370298a8b540b1d597fbc39d21a537b45d9dc65c8d2cbafcc6c7208b5f0453f7ef206f4b1d99cc0"
+ "7186f7f5b31a0a9ec885296ae27183f51b83a64bb8bf46ece16305302b06092a864886f70d010701301406082a864886f70d"
+ "03070408d8ac6958c16ea6f58008beb49fa4214d1e3f").HexToByteArray(); VerifyAlgorithm3Des(encodedMessage);
}
private static void VerifyAlgorithm3Des(byte[] encodedMessage)
{
EnvelopedCms ecms = new EnvelopedCms();
ecms.Decode(encodedMessage);
AlgorithmIdentifier algorithm = ecms.ContentEncryptionAlgorithm;
Assert.NotNull(algorithm.Oid);
Assert.Equal(Oids.TripleDesCbc, algorithm.Oid.Value);
Assert.Equal(192, algorithm.KeyLength);
}
[ConditionalFact(nameof(SupportsRc4))]
public static void DecodeAlgorithmRc4_RoundTrip()
{
AlgorithmIdentifier algorithm = new AlgorithmIdentifier(new Oid(Oids.Rc4));
ContentInfo contentInfo = new ContentInfo(new byte[] { 1, 2, 3 });
EnvelopedCms ecms = new EnvelopedCms(contentInfo, algorithm);
using (X509Certificate2 cert = Certificates.RSAKeyTransfer1.GetCertificate())
{
CmsRecipient cmsRecipient = new CmsRecipient(cert);
ecms.Encrypt(cmsRecipient);
}
byte[] encodedMessage = ecms.Encode();
VerifyAlgorithmRc4(encodedMessage);
}
[Fact]
public static void DecodeAlgorithmRc4_FixedValue()
{
byte[] encodedMessage =
("3081ff06092a864886f70d010703a081f13081ee0201003181c83081c5020100302e301a311830160603550403130f525341"
+ "4b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d010101050004818036ce"
+ "a9d8c17763ceffa0c4902da6e02a2168349eba10c66f827fb427c944d035c0cea65a7729131527b38b1c5e0b378205bb571a"
+ "c94ea26e2b4e8ab9b53d5ec7fec48a095d1145769878e6b2947adf41ad924004b185914bed859b2be7e84bbdb59b45663c2c"
+ "56392895c0534766e743b70db12cd08377c35d9cdf21ac7eb4a4301e06092a864886f70d010701300c06082a864886f70d03"
+ "04050080030039bd").HexToByteArray();
VerifyAlgorithmRc4(encodedMessage);
}
private static void VerifyAlgorithmRc4(byte[] encodedMessage)
{
EnvelopedCms ecms = new EnvelopedCms();
ecms.Decode(encodedMessage);
AlgorithmIdentifier algorithm = ecms.ContentEncryptionAlgorithm;
Assert.NotNull(algorithm.Oid);
Assert.Equal(Oids.Rc4, algorithm.Oid.Value);
Assert.Equal(128, algorithm.KeyLength);
}
[Fact]
public static void DecodeAlgorithmAes128_RoundTrip()
{
AlgorithmIdentifier algorithm = new AlgorithmIdentifier(new Oid(Oids.Aes128));
ContentInfo contentInfo = new ContentInfo(new byte[] { 1, 2, 3 });
EnvelopedCms ecms = new EnvelopedCms(contentInfo, algorithm);
using (X509Certificate2 cert = Certificates.RSAKeyTransfer1.GetCertificate())
{
CmsRecipient cmsRecipient = new CmsRecipient(cert);
ecms.Encrypt(cmsRecipient);
}
byte[] encodedMessage = ecms.Encode();
VerifyAlgorithmAes128(encodedMessage);
}
[Fact]
public static void DecodeAlgorithmAes128_FixedValue()
{
byte[] encodedMessage =
("3082011f06092a864886f70d010703a08201103082010c0201003181c83081c5020100302e301a311830160603550403130f"
+ "5253414b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d01010105000481"
+ "8011e777f410a2c2ab5152303dc011da5bfc5982f2254040ed00bdcfaa028a14624fb63c817082d6e373b4cdbbcce3ab5dee"
+ "bf85c33cea8ffa40b31a784b61dde7eab3736261f0d912b829773201bdf6cb93a602127a30cad5fa1b3034ba10cd4fddcfe5"
+ "f30bb05ffc2171b18d3200ef21bda8631a4b82af603277db7ebb752999303c06092a864886f70d010701301d060960864801"
+ "65030401020410d38e15cc9b02555ae95a75e5a7af86e98010c000f2c29b88ec5e4e6ba51159abae55").HexToByteArray();
VerifyAlgorithmAes128(encodedMessage);
}
private static void VerifyAlgorithmAes128(byte[] encodedMessage)
{
EnvelopedCms ecms = new EnvelopedCms();
ecms.Decode(encodedMessage);
AlgorithmIdentifier algorithm = ecms.ContentEncryptionAlgorithm;
Assert.NotNull(algorithm.Oid);
Assert.Equal(Oids.Aes128, algorithm.Oid.Value);
Assert.Equal(0, algorithm.KeyLength);
}
[Fact]
public static void DecodeAlgorithmAes192_RoundTrip()
{
AlgorithmIdentifier algorithm = new AlgorithmIdentifier(new Oid(Oids.Aes192));
ContentInfo contentInfo = new ContentInfo(new byte[] { 1, 2, 3 });
EnvelopedCms ecms = new EnvelopedCms(contentInfo, algorithm);
using (X509Certificate2 cert = Certificates.RSAKeyTransfer1.GetCertificate())
{
CmsRecipient cmsRecipient = new CmsRecipient(cert);
ecms.Encrypt(cmsRecipient);
}
byte[] encodedMessage = ecms.Encode();
VerifyAlgorithmAes192(encodedMessage);
}
[Fact]
public static void DecodeAlgorithmAes192_FixedValue()
{
byte[] encodedMessage =
("3082011f06092a864886f70d010703a08201103082010c0201003181c83081c5020100302e301a311830160603550403130f"
+ "5253414b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d01010105000481"
+ "8095870ef593f7234a6a12fc23f6dacd75d6e5a5ee05077b7390632c0beb2689a3dd587757e976992ffd26f6dd374fb64f47"
+ "6eb4d920a55d735935716671bc12dc65b84c29c5a72aa78a4480e19a28ac09395e708e99e1e9e9704ee4d077541bfed1d06d"
+ "32f3a7e9441fde9133858a0e825af04a36b5943e0f39eade1463de7c12303c06092a864886f70d010701301d060960864801"
+ "65030401160410d217d9a8bb30516d54aab00a5e6089b68010149eec8997deedcbad000ae6c1a7fb9d").HexToByteArray();
VerifyAlgorithmAes192(encodedMessage);
}
private static void VerifyAlgorithmAes192(byte[] encodedMessage)
{
EnvelopedCms ecms = new EnvelopedCms();
ecms.Decode(encodedMessage);
AlgorithmIdentifier algorithm = ecms.ContentEncryptionAlgorithm;
Assert.NotNull(algorithm.Oid);
Assert.Equal(Oids.Aes192, algorithm.Oid.Value);
Assert.Equal(0, algorithm.KeyLength);
}
[Fact]
public static void DecodeAlgorithmAes256_RoundTrip()
{
AlgorithmIdentifier algorithm = new AlgorithmIdentifier(new Oid(Oids.Aes256));
ContentInfo contentInfo = new ContentInfo(new byte[] { 1, 2, 3 });
EnvelopedCms ecms = new EnvelopedCms(contentInfo, algorithm);
using (X509Certificate2 cert = Certificates.RSAKeyTransfer1.GetCertificate())
{
CmsRecipient cmsRecipient = new CmsRecipient(cert);
ecms.Encrypt(cmsRecipient);
}
byte[] encodedMessage = ecms.Encode();
VerifyAlgorithmAes256(encodedMessage);
}
[Fact]
public static void DecodeAlgorithmAes256_FixedValue()
{
byte[] encodedMessage =
("3082011f06092a864886f70d010703a08201103082010c0201003181c83081c5020100302e301a311830160603550403130f"
+ "5253414b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d01010105000481"
+ "809605c24f3bc99b3437f3e12e44c3a49c91ca0dc94a473fc21388a0f1c69486befa97eb7b9a96e2a9309f89612ad108d1c7"
+ "2db6cc66426253e639939b9be852df9212fba9bb52f857a39a26c04a20bae7b7620a1e53873a7ef03c4139edc7a50ee297ea"
+ "fdc1372596ef299e71b6d4db146cad48a8485e17b3604a56958afdbe83303c06092a864886f70d010701301d060960864801"
+ "650304012a04100b85a6899050456469102f41aaa685158010b3008bd0eb863574ecbe46a5cc91a99c").HexToByteArray();
VerifyAlgorithmAes256(encodedMessage);
}
private static void VerifyAlgorithmAes256(byte[] encodedMessage)
{
EnvelopedCms ecms = new EnvelopedCms();
ecms.Decode(encodedMessage);
AlgorithmIdentifier algorithm = ecms.ContentEncryptionAlgorithm;
Assert.NotNull(algorithm.Oid);
Assert.Equal(Oids.Aes256, algorithm.Oid.Value);
Assert.Equal(0, algorithm.KeyLength);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using Terraria.ModLoader.Default;
using Terraria.ModLoader.Exceptions;
using Terraria.Social;
using Terraria.Utilities;
namespace Terraria.ModLoader.IO
{
internal static class PlayerIO
{
//make Terraria.Player.ENCRYPTION_KEY internal
//add to end of Terraria.Player.SavePlayer
internal static void Save(Player player, string path, bool isCloudSave)
{
path = Path.ChangeExtension(path, ".tplr");
if (FileUtilities.Exists(path, isCloudSave))
FileUtilities.Copy(path, path + ".bak", isCloudSave);
var tag = new TagCompound {
["armor"] = SaveInventory(player.armor),
["dye"] = SaveInventory(player.dye),
["inventory"] = SaveInventory(player.inventory),
["miscEquips"] = SaveInventory(player.miscEquips),
["miscDyes"] = SaveInventory(player.miscDyes),
["bank"] = SaveInventory(player.bank.item),
["bank2"] = SaveInventory(player.bank2.item),
["bank3"] = SaveInventory(player.bank3.item),
["modData"] = SaveModData(player),
["modBuffs"] = SaveModBuffs(player)
};
using (Stream stream = isCloudSave ? (Stream)new MemoryStream() : (Stream)new FileStream(path, FileMode.Create))
{
TagIO.ToStream(tag, stream);
if (isCloudSave && SocialAPI.Cloud != null)
SocialAPI.Cloud.Write(path, ((MemoryStream)stream).ToArray());
}
}
//add near end of Terraria.Player.LoadPlayer before accessory check
internal static void Load(Player player, string path, bool isCloudSave)
{
path = Path.ChangeExtension(path, ".tplr");
if (!FileUtilities.Exists(path, isCloudSave))
return;
var buf = FileUtilities.ReadAllBytes(path, isCloudSave);
if (buf[0] != 0x1F || buf[1] != 0x8B)
{
LoadLegacy(player, buf);
return;
}
var tag = TagIO.FromStream(new MemoryStream(buf));
LoadInventory(player.armor, tag.GetList<TagCompound>("armor"));
LoadInventory(player.dye, tag.GetList<TagCompound>("dye"));
LoadInventory(player.inventory, tag.GetList<TagCompound>("inventory"));
LoadInventory(player.miscEquips, tag.GetList<TagCompound>("miscEquips"));
LoadInventory(player.miscDyes, tag.GetList<TagCompound>("miscDyes"));
LoadInventory(player.bank.item, tag.GetList<TagCompound>("bank"));
LoadInventory(player.bank2.item, tag.GetList<TagCompound>("bank2"));
LoadInventory(player.bank3.item, tag.GetList<TagCompound>("bank3"));
LoadModData(player, tag.GetList<TagCompound>("modData"));
LoadModBuffs(player, tag.GetList<TagCompound>("modBuffs"));
}
public static List<TagCompound> SaveInventory(Item[] inv)
{
var list = new List<TagCompound>();
for (int k = 0; k < inv.Length; k++)
{
if (ItemLoader.NeedsModSaving(inv[k]))
{
var tag = ItemIO.Save(inv[k]);
tag.Set("slot", (short) k);
list.Add(tag);
}
}
return list.Count > 0 ? list : null;
}
public static void LoadInventory(Item[] inv, IList<TagCompound> list)
{
foreach (var tag in list)
inv[tag.GetShort("slot")] = ItemIO.Load(tag);
}
internal static List<TagCompound> SaveModData(Player player)
{
var list = new List<TagCompound>();
foreach (var modPlayer in player.modPlayers)
{
var data = modPlayer.Save();
if (data == null)
continue;
list.Add(new TagCompound {
["mod"] = modPlayer.mod.Name,
["name"] = modPlayer.Name,
["data"] = data
});
}
return list;
}
internal static void LoadModData(Player player, IList<TagCompound> list)
{
foreach (var tag in list)
{
var mod = ModLoader.GetMod(tag.GetString("mod"));
var modPlayer = mod == null ? null : player.GetModPlayer(mod, tag.GetString("name"));
if (modPlayer != null)
{
try
{
if (tag.ContainsKey("legacyData"))
modPlayer.LoadLegacy(new BinaryReader(new MemoryStream(tag.GetByteArray("legacyData"))));
else
modPlayer.Load(tag.GetCompound("data"));
}
catch (Exception e)
{
throw new CustomModDataException(mod,
"Error in reading custom player data for " + mod.Name, e);
}
}
else
{
player.GetModPlayer<MysteryPlayer>(ModLoader.GetMod("ModLoader")).data.Add(tag);
}
}
}
internal static List<TagCompound> SaveModBuffs(Player player)
{
var list = new List<TagCompound>();
byte vanillaIndex = 0;
for (int k = 0; k < Player.maxBuffs; k++)
{
int buff = player.buffType[k];
if (buff == 0 || Main.buffNoSave[buff])
continue;
if (BuffLoader.IsModBuff(buff))
{
var modBuff = BuffLoader.GetBuff(buff);
list.Add(new TagCompound {
["index"] = vanillaIndex, //position of the loaded buff if there were no modBuffs before it
["mod"] = modBuff.mod.Name,
["name"] = modBuff.Name,
["time"] = player.buffTime[k]
});
}
else
{
vanillaIndex++;
}
}
return list;
}
internal static void LoadModBuffs(Player player, IList<TagCompound> list)
{
//buffs list is guaranteed to be compacted
int buffCount = Player.maxBuffs;
while (buffCount > 0 && player.buffType[buffCount - 1] == 0)
buffCount--;
//iterate the list in reverse, insert each buff at its index and push the buffs after it up a slot
foreach (var tag in list.Reverse()) {
var mod = ModLoader.GetMod(tag.GetString("mod"));
int type = mod?.BuffType(tag.GetString("name")) ?? 0;
if (type == 0)
continue;
int index = Math.Min(tag.GetByte("index"), buffCount);
Array.Copy(player.buffType, index, player.buffType, index+1, Player.maxBuffs-index-1);
Array.Copy(player.buffTime, index, player.buffTime, index+1, Player.maxBuffs-index-1);
player.buffType[index] = type;
player.buffTime[index] = tag.GetInt("time");
}
}
private static void LoadLegacy(Player player, byte[] buffer)
{
const int numFlagBytes = 2;
RijndaelManaged rijndaelManaged = new RijndaelManaged();
rijndaelManaged.Padding = PaddingMode.None;
using (MemoryStream stream = new MemoryStream(buffer))
{
using (CryptoStream cryptoStream = new CryptoStream(stream, rijndaelManaged.CreateDecryptor(Player.ENCRYPTION_KEY, Player.ENCRYPTION_KEY), CryptoStreamMode.Read))
{
using (BinaryReader reader = new BinaryReader(cryptoStream))
{
byte limit = reader.ReadByte();
if (limit == 0)
{
return;
}
byte[] flags = reader.ReadBytes(limit);
if (flags.Length < numFlagBytes)
{
Array.Resize(ref flags, numFlagBytes);
}
LoadLegacyModPlayer(player, flags, reader);
}
}
}
}
private static void LoadLegacyModPlayer(Player player, byte[] flags, BinaryReader reader)
{
if ((flags[0] & 1) == 1)
{
ItemIO.LoadLegacyInventory(player.armor, reader);
}
if ((flags[0] & 2) == 2)
{
ItemIO.LoadLegacyInventory(player.dye, reader);
}
if ((flags[0] & 4) == 4)
{
ItemIO.LoadLegacyInventory(player.inventory, reader, true, true);
}
if ((flags[0] & 8) == 8)
{
ItemIO.LoadLegacyInventory(player.miscEquips, reader);
}
if ((flags[0] & 16) == 16)
{
ItemIO.LoadLegacyInventory(player.miscDyes, reader);
}
if ((flags[0] & 32) == 32)
{
ItemIO.LoadLegacyInventory(player.bank.item, reader, true);
}
if ((flags[0] & 64) == 64)
{
ItemIO.LoadLegacyInventory(player.bank2.item, reader, true);
}
if ((flags[0] & 128) == 128)
{
LoadLegacyModData(player, reader);
}
if ((flags[1] & 1) == 1)
{
LoadLegacyModBuffs(player, reader);
}
}
private static void LoadLegacyModData(Player player, BinaryReader reader)
{
int count = reader.ReadUInt16();
for (int k = 0; k < count; k++)
{
string modName = reader.ReadString();
string name = reader.ReadString();
byte[] data = reader.ReadBytes(reader.ReadUInt16());
Mod mod = ModLoader.GetMod(modName);
ModPlayer modPlayer = mod == null ? null : player.GetModPlayer(mod, name);
if (modPlayer != null)
{
using (MemoryStream stream = new MemoryStream(data))
{
using (BinaryReader customReader = new BinaryReader(stream))
{
try
{
modPlayer.LoadLegacy(customReader);
}
catch (Exception e)
{
throw new CustomModDataException(mod,
"Error in reading custom player data for " + mod.Name, e);
}
}
}
}
else
{
var tag = new TagCompound {
["mod"] = modName,
["name"] = name,
["legacyData"] = data
};
player.GetModPlayer<MysteryPlayer>(ModLoader.GetMod("ModLoader")).data.Add(tag);
}
}
}
private static void LoadLegacyModBuffs(Player player, BinaryReader reader)
{
int num = reader.ReadByte();
int minusIndex = 0;
for (int k = 0; k < num; k++)
{
int index = reader.ReadByte() - minusIndex;
string modName = reader.ReadString();
string name = reader.ReadString();
int time = reader.ReadInt32();
Mod mod = ModLoader.GetMod(modName);
int type = mod == null ? 0 : mod.BuffType(name);
if (type > 0)
{
for (int j = Player.maxBuffs - 1; j > index; j--)
{
player.buffType[j] = player.buffType[j - 1];
player.buffTime[j] = player.buffTime[j - 1];
}
player.buffType[index] = type;
player.buffTime[index] = time;
}
else
{
minusIndex++;
}
}
for (int k = 1; k < Player.maxBuffs; k++)
{
if (player.buffType[k] > 0)
{
int j = k - 1;
while (player.buffType[j] == 0)
{
player.buffType[j] = player.buffType[j + 1];
player.buffTime[j] = player.buffTime[j + 1];
player.buffType[j + 1] = 0;
player.buffTime[j + 1] = 0;
j--;
}
}
}
}
//add to end of Terraria.IO.PlayerFileData.MoveToCloud
internal static void MoveToCloud(string localPath, string cloudPath)
{
localPath = Path.ChangeExtension(localPath, ".tplr");
cloudPath = Path.ChangeExtension(cloudPath, ".tplr");
if (File.Exists(localPath))
{
FileUtilities.MoveToCloud(localPath, cloudPath);
}
}
//add to end of Terraria.IO.PlayerFileData.MoveToLocal
//in Terraria.IO.PlayerFileData.MoveToLocal before iterating through map files add
// matchPattern = Regex.Escape(Main.CloudPlayerPath) + "/" + Regex.Escape(fileName) + "/.+\\.tmap";
// files.AddRange(SocialAPI.Cloud.GetFiles(matchPattern));
internal static void MoveToLocal(string cloudPath, string localPath)
{
cloudPath = Path.ChangeExtension(cloudPath, ".tplr");
localPath = Path.ChangeExtension(localPath, ".tplr");
if (FileUtilities.Exists(cloudPath, true))
{
FileUtilities.MoveToLocal(cloudPath, localPath);
}
}
//add to Terraria.Player.GetFileData after moving vanilla .bak file
internal static void LoadBackup(string path, bool cloudSave)
{
path = Path.ChangeExtension(path, ".tplr");
if (FileUtilities.Exists(path + ".bak", cloudSave))
{
FileUtilities.Move(path + ".bak", path, cloudSave, true);
}
}
//in Terraria.Main.ErasePlayer between the two try catches add
// PlayerIO.ErasePlayer(Main.PlayerList[i].Path, Main.PlayerList[i].IsCloudSave);
internal static void ErasePlayer(string path, bool cloudSave)
{
path = Path.ChangeExtension(path, ".tplr");
try
{
FileUtilities.Delete(path, cloudSave);
FileUtilities.Delete(path + ".bak", cloudSave);
}
catch
{
//just copying the Terraria code which also has an empty catch
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
// =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
//
// Sorting.cs
//
// Support for sorting.
//
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using System;
using System.Collections.Generic;
using System.Threading;
using System.Diagnostics.Contracts;
namespace System.Linq.Parallel
{
//---------------------------------------------------------------------------------------
// The sort helper abstraction hides the implementation of our parallel merge sort. See
// comments below for more details. In summary, there will be one sort helper per
// partition. Each will, in parallel read the whole key/value set from its input,
// perform a local sort on this data, and then cooperatively merge with other concurrent
// tasks to generate a single sorted output. The local sort step is done using a simple
// quick-sort algorithm. Then we use a log(p) reduction to perform merges in parallel;
// during each round of merges, half of the threads will stop doing work and may return.
// At the end, one thread will remain and it holds the final sorted output.
//
internal abstract class SortHelper<TInputOutput>
{
internal abstract TInputOutput[] Sort();
}
internal class SortHelper<TInputOutput, TKey> : SortHelper<TInputOutput>, IDisposable
{
private QueryOperatorEnumerator<TInputOutput, TKey> _source; // The data source from which to pull data.
private int _partitionCount; // The partition count.
private int _partitionIndex; // This helper's index.
// This data is shared among all partitions.
private QueryTaskGroupState _groupState; // To communicate status, e.g. cancellation.
private int[][] _sharedIndices; // Shared set of indices used during sorting.
private GrowingArray<TKey>[] _sharedKeys; // Shared keys with which to compare elements.
private TInputOutput[][] _sharedValues; // The actual values used for comparisons.
private Barrier[][] _sharedBarriers; // A matrix of barriers used for synchronizing during merges.
private OrdinalIndexState _indexState; // State of the order index
private IComparer<TKey> _keyComparer; // Comparer for the order keys
//---------------------------------------------------------------------------------------
// Creates a single sort helper object. This is marked private to ensure the only
// snippet of code that creates one is the factory, since creating many implies some
// implementation detail in terms of dependencies which other places in the codebase
// shouldn't need to worry about.
//
private SortHelper(QueryOperatorEnumerator<TInputOutput, TKey> source, int partitionCount, int partitionIndex,
QueryTaskGroupState groupState, int[][] sharedIndices,
OrdinalIndexState indexState, IComparer<TKey> keyComparer,
GrowingArray<TKey>[] sharedkeys, TInputOutput[][] sharedValues, Barrier[][] sharedBarriers)
{
Contract.Assert(source != null);
Contract.Assert(groupState != null);
Contract.Assert(sharedIndices != null);
Contract.Assert(sharedkeys != null);
Contract.Assert(sharedValues != null);
Contract.Assert(sharedBarriers != null);
Contract.Assert(groupState.CancellationState.MergedCancellationToken != null);
Contract.Assert(sharedIndices.Length <= sharedkeys.Length);
Contract.Assert(sharedIndices.Length == sharedValues.Length);
// Multi-dim arrays are simulated using jagged arrays.
// Because of that, when phaseCount == 0, we end up with an empty sharedBarrier array.
// Since there are no cases when phaseCount == 0 where we actually access the sharedBarriers, I am removing this check.
// Contract.Assert(sharedIndices.Length == sharedBarriers[0].Length);
Contract.Assert(groupState.CancellationState.MergedCancellationToken != null);
_source = source;
_partitionCount = partitionCount;
_partitionIndex = partitionIndex;
_groupState = groupState;
_sharedIndices = sharedIndices;
_indexState = indexState;
_keyComparer = keyComparer;
_sharedKeys = sharedkeys;
_sharedValues = sharedValues;
_sharedBarriers = sharedBarriers;
Contract.Assert(_sharedKeys.Length >= _sharedValues.Length);
}
//---------------------------------------------------------------------------------------
// Factory method to create a bunch of sort helpers that are all related. Once created,
// these helpers must all run concurrently with one another.
//
// Arguments:
// partitions - the input data partitions to be sorted
// groupState - common state used for communication (e.g. cancellation)
//
// Return Value:
// An array of helpers, one for each partition.
//
internal static SortHelper<TInputOutput, TKey>[] GenerateSortHelpers(
PartitionedStream<TInputOutput, TKey> partitions, QueryTaskGroupState groupState)
{
int degreeOfParallelism = partitions.PartitionCount;
SortHelper<TInputOutput, TKey>[] helpers = new SortHelper<TInputOutput, TKey>[degreeOfParallelism];
// Calculate the next highest power of two greater than or equal to the DOP.
// Also, calculate phaseCount = log2(degreeOfParallelismPow2)
int degreeOfParallelismPow2 = 1, phaseCount = 0;
while (degreeOfParallelismPow2 < degreeOfParallelism)
{
phaseCount++;
degreeOfParallelismPow2 <<= 1;
}
// Initialize shared objects used during sorting.
int[][] sharedIndices = new int[degreeOfParallelism][];
GrowingArray<TKey>[] sharedKeys = new GrowingArray<TKey>[degreeOfParallelism];
TInputOutput[][] sharedValues = new TInputOutput[degreeOfParallelism][];
// Note that it is possible that phaseCount is 0.
Barrier[][] sharedBarriers = JaggedArray<Barrier>.Allocate(phaseCount, degreeOfParallelism);
if (degreeOfParallelism > 1)
{
// Initialize the barriers we need. Due to the logarithmic reduction, we don't
// need to populate the whole matrix.
int offset = 1;
for (int i = 0; i < sharedBarriers.Length; i++)
{
// We have jagged arrays.
for (int j = 0; j < sharedBarriers[i].Length; j++)
{
// As the phases increase, the barriers required become more and more sparse.
if ((j % offset) == 0)
{
sharedBarriers[i][j] = new Barrier(2);
}
}
offset *= 2;
}
}
// Lastly populate the array of sort helpers.
for (int i = 0; i < degreeOfParallelism; i++)
{
helpers[i] = new SortHelper<TInputOutput, TKey>(
partitions[i], degreeOfParallelism, i,
groupState, sharedIndices,
partitions.OrdinalIndexState, partitions.KeyComparer,
sharedKeys, sharedValues, sharedBarriers);
}
return helpers;
}
//---------------------------------------------------------------------------------------
// Disposes of this sort helper's expensive state.
//
public void Dispose()
{
// We only dispose of the barriers when the 1st partition finishes. That's because
// all others depend on the shared barriers, so we can't get rid of them eagerly.
if (_partitionIndex == 0)
{
for (int i = 0; i < _sharedBarriers.Length; i++)
{
for (int j = 0; j < _sharedBarriers[i].Length; j++)
{
Barrier b = _sharedBarriers[i][j];
if (b != null)
{
b.Dispose();
}
}
}
}
}
//---------------------------------------------------------------------------------------
// Sorts the data, possibly returning a result.
//
// Notes:
// This method makes some pretty fundamental assumptions about what concurrency
// exists in the system. Namely, it assumes all SortHelpers are running in
// parallel. If they aren't Sort will end up waiting for certain events that
// will never happen -- i.e. we will deadlock.
//
internal override TInputOutput[] Sort()
{
// Step 1. Accumulate this partitions' worth of input.
GrowingArray<TKey> sourceKeys = null;
List<TInputOutput> sourceValues = null;
BuildKeysFromSource(ref sourceKeys, ref sourceValues);
Contract.Assert(sourceValues != null, "values weren't populated");
Contract.Assert(sourceKeys != null, "keys weren't populated");
// Step 2. Locally sort this partition's key indices in-place.
QuickSortIndicesInPlace(sourceKeys, sourceValues, _indexState);
// Step 3. Enter into the merging phases, each separated by several barriers.
if (_partitionCount > 1)
{
// We only need to merge if there is more than 1 partition.
MergeSortCooperatively();
}
return _sharedValues[_partitionIndex];
}
//-----------------------------------------------------------------------------------
// Generates a list of values and keys from the data source. After calling this,
// the keys and values lists will be populated; each key at index i corresponds to
// the value at index i in the other list.
//
// Notes:
// Should only be called once per sort helper.
//
private void BuildKeysFromSource(ref GrowingArray<TKey> keys, ref List<TInputOutput> values)
{
values = new List<TInputOutput>();
// Enumerate the whole input set, generating a key set in the process.
CancellationToken cancelToken = _groupState.CancellationState.MergedCancellationToken;
try
{
TInputOutput current = default(TInputOutput);
TKey currentKey = default(TKey);
bool hadNext = _source.MoveNext(ref current, ref currentKey);
if (keys == null)
{
keys = new GrowingArray<TKey>();
}
if (hadNext)
{
int i = 0;
do
{
if ((i++ & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(cancelToken);
// Accumulate the keys and values so that we can sort them in a moment.
keys.Add(currentKey);
values.Add(current);
}
while (_source.MoveNext(ref current, ref currentKey));
}
}
finally
{
_source.Dispose();
}
}
//-----------------------------------------------------------------------------------
// Produces a list of indices and sorts them in place using a local sort.
//
// Notes:
// Each element in the indices array is an index which refers to an element in
// the key/value array. After calling this routine, the indices will be ordered
// such that the keys they refere to are in ascending or descending order,
// according to the sort criteria used.
//
private void QuickSortIndicesInPlace(GrowingArray<TKey> keys, List<TInputOutput> values, OrdinalIndexState ordinalIndexState)
{
Contract.Assert(keys != null);
Contract.Assert(values != null);
Contract.Assert(keys.Count == values.Count);
// Generate a list of keys in forward order. We will sort them in a moment.
int[] indices = new int[values.Count];
for (int i = 0; i < indices.Length; i++)
{
indices[i] = i;
}
// Now sort the indices in place.
if (indices.Length > 1
&& ordinalIndexState.IsWorseThan(OrdinalIndexState.Increasing))
{
QuickSort(0, indices.Length - 1, keys.InternalArray, indices, _groupState.CancellationState.MergedCancellationToken);
}
if (_partitionCount == 1)
{
// If there is only one partition, we will produce the final value set now,
// since there will be no merge afterward (which is where we usually do this).
TInputOutput[] sortedValues = new TInputOutput[values.Count];
for (int i = 0; i < indices.Length; i++)
{
sortedValues[i] = values[indices[i]];
}
_sharedValues[_partitionIndex] = sortedValues;
}
else
{
// Otherwise, a merge will happen. Generate the shared data structures.
_sharedIndices[_partitionIndex] = indices;
_sharedKeys[_partitionIndex] = keys;
_sharedValues[_partitionIndex] = new TInputOutput[values.Count];
// Copy local structures to shared space.
values.CopyTo(_sharedValues[_partitionIndex]);
}
}
//-----------------------------------------------------------------------------------
// Works cooperatively with other concurrent sort helpers to produce a final sorted
// output list of data. Here is an overview of the algorithm used.
//
// During each phase, we must communicate with a partner task. As a simple
// illustration, imagine we have 8 partitions (P=8), numbered 0-7. There will be
// Log2(O)+2 phases (separated by barriers), where O is the next power of two greater
// than or equal to P, in the sort operation:
//
// Pairs: (P = 8)
// phase=L: [0][1] [2][3] [4][5] [6][7]
// phase=0: [0,1] [2,3] [4,5] [6,7]
// phase=1: [0,2] [4,6]
// phase=2: [0,4]
// phase=M: [0]
//
// During phase L, each partition locally sorts its data. Then, at each subsequent
// phase in the logarithmic reduction, two partitions are paired together and cooperate
// to accomplish a portion of the merge. The left one then goes on to choose another
// partner, in the next phase, and the right one exits. And so on, until phase M, when
// there is just one partition left (the 0th), which is when it may publish the final
// output from the sort operation.
//
// Notice we mentioned rounding up to the next power of two when determining the number
// of phases. Values of P which aren't powers of 2 are slightly problematic, because
// they create a load imbalance in one of the partitions and heighten the depth of the
// logarithmic tree. As an illustration, imagine this case:
//
// Pairs: (P = 5)
// phase=L: [0][1] [2][3] [4]
// phase=0: [0,1] [2,3] [4,X] [X,X]
// phase=1: [0,2] [4,X]
// phase=2: [0,4]
// phase=M: [0]
//
// Partition #4 in this example performs its local sort during phase L, but then has nothing
// to do during phases 0 and 2. (I.e. it has nobody to merge with.) Only during phase 2
// does it then resume work and help phase 2 perform its merge. This is modeled a bit like
// there were actually 8 partitions, which is the next power of two greater than or equal to
// 5. This example was chosen as an extreme case of imbalance. We stall a processor (the 5th)
// for two complete phases. If P = 6 or 7, the problem would not be nearly so bad, but if
// P = 9, the last partition would stall for yet another phase (and so on for every power of
// two boundary). We handle these, cases, but note that an overabundance of them will probably
// negatively impact speedups.
//
private void MergeSortCooperatively()
{
CancellationToken cancelToken = _groupState.CancellationState.MergedCancellationToken;
int phaseCount = _sharedBarriers.Length;
for (int phase = 0; phase < phaseCount; phase++)
{
bool isLastPhase = (phase == (phaseCount - 1));
// Calculate our partner for this phase and the next.
int partnerIndex = ComputePartnerIndex(phase);
// If we have a partner (see above for non power of 2 cases and why the index returned might
// be out of bounds), we will coordinate with the partner to produce the merged output.
if (partnerIndex < _partitionCount)
{
// Cache references to our local data.
int[] myIndices = _sharedIndices[_partitionIndex];
GrowingArray<TKey> myKeys = _sharedKeys[_partitionIndex];
TKey[] myKeysArr = myKeys.InternalArray;
TInputOutput[] myValues = _sharedValues[_partitionIndex];
// First we must rendezvous with our merge partner so we know the previous sort
// and merge phase has been completed. By convention, we always use the left-most
// partner's barrier for this; all that matters is that both uses the same.
_sharedBarriers[phase][Math.Min(_partitionIndex, partnerIndex)].SignalAndWait(cancelToken);
// Grab the two sorted inputs and then merge them cooperatively into one list. One
// worker merges from left-to-right until it's placed elements up to the half-way
// point, and the other worker does the same, but only from right-to-left.
if (_partitionIndex < partnerIndex)
{
// Before moving on to the actual merge, the left-most partition will allocate data
// to hold the merged indices and key/value pairs.
// First, remember a copy of all of the partner's lists.
int[] rightIndices = _sharedIndices[partnerIndex];
TKey[] rightKeys = _sharedKeys[partnerIndex].InternalArray;
TInputOutput[] rightValues = _sharedValues[partnerIndex];
// We copy the our own items into the right's (overwriting its values) so that it can
// retrieve them after the barrier. This is an exchange operation.
_sharedIndices[partnerIndex] = myIndices;
_sharedKeys[partnerIndex] = myKeys;
_sharedValues[partnerIndex] = myValues;
int leftCount = myValues.Length;
int rightCount = rightValues.Length;
int totalCount = leftCount + rightCount;
// Now allocate the lists into which the merged data will go. Share this
// with the other thread so that it can place data into it as well.
int[] mergedIndices = null;
TInputOutput[] mergedValues = new TInputOutput[totalCount];
// Only on the last phase do we need to remember indices and keys.
if (!isLastPhase)
{
mergedIndices = new int[totalCount];
}
// Publish our newly allocated merged data structures.
_sharedIndices[_partitionIndex] = mergedIndices;
_sharedKeys[_partitionIndex] = myKeys;
_sharedValues[_partitionIndex] = mergedValues;
Contract.Assert(myKeysArr != null);
_sharedBarriers[phase][_partitionIndex].SignalAndWait(cancelToken);
// Merge the left half into the shared merged space. This is a normal merge sort with
// the caveat that we stop merging once we reach the half-way point (since our partner
// is doing the same for the right half). Note that during the last phase we only
// copy the values and not the indices or keys.
int m = (totalCount + 1) / 2;
int i = 0, j0 = 0, j1 = 0;
while (i < m)
{
if ((i & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(cancelToken);
if (j0 < leftCount && (j1 >= rightCount ||
_keyComparer.Compare(myKeysArr[myIndices[j0]],
rightKeys[rightIndices[j1]]) <= 0))
{
if (isLastPhase)
{
mergedValues[i] = myValues[myIndices[j0]];
}
else
{
mergedIndices[i] = myIndices[j0];
}
j0++;
}
else
{
if (isLastPhase)
{
mergedValues[i] = rightValues[rightIndices[j1]];
}
else
{
mergedIndices[i] = leftCount + rightIndices[j1];
}
j1++;
}
i++;
}
// If it's not the last phase, we just bulk propagate the keys and values.
if (!isLastPhase && leftCount > 0)
{
Array.Copy(myValues, 0, mergedValues, 0, leftCount);
}
// And now just wait for the second half. We never reuse the same barrier across multiple
// phases, so we can always dispose of it when we wake up.
_sharedBarriers[phase][_partitionIndex].SignalAndWait(cancelToken);
}
else
{
// Wait for the other partition to allocate the shared data.
_sharedBarriers[phase][partnerIndex].SignalAndWait(cancelToken);
// After the barrier, the other partition will have made two things available to us:
// (1) its own indices, keys, and values, stored in the cell that used to hold our data,
// and (2) the arrays into which merged data will go, stored in its shared array cells.
// We will snag references to all of these things.
int[] leftIndices = _sharedIndices[_partitionIndex];
TKey[] leftKeys = _sharedKeys[_partitionIndex].InternalArray;
TInputOutput[] leftValues = _sharedValues[_partitionIndex];
int[] mergedIndices = _sharedIndices[partnerIndex];
GrowingArray<TKey> mergedKeys = _sharedKeys[partnerIndex];
TInputOutput[] mergedValues = _sharedValues[partnerIndex];
Contract.Assert(leftValues != null);
Contract.Assert(leftKeys != null);
int leftCount = leftValues.Length;
int rightCount = myValues.Length;
int totalCount = leftCount + rightCount;
// Merge the right half into the shared merged space. This is a normal merge sort with
// the caveat that we stop merging once we reach the half-way point (since our partner
// is doing the same for the left half). Note that during the last phase we only
// copy the values and not the indices or keys.
int m = (totalCount + 1) / 2;
int i = totalCount - 1, j0 = leftCount - 1, j1 = rightCount - 1;
while (i >= m)
{
if ((i & CancellationState.POLL_INTERVAL) == 0)
CancellationState.ThrowIfCanceled(cancelToken);
if (j0 >= 0 && (j1 < 0 ||
_keyComparer.Compare(leftKeys[leftIndices[j0]],
myKeysArr[myIndices[j1]]) > 0))
{
if (isLastPhase)
{
mergedValues[i] = leftValues[leftIndices[j0]];
}
else
{
mergedIndices[i] = leftIndices[j0];
}
j0--;
}
else
{
if (isLastPhase)
{
mergedValues[i] = myValues[myIndices[j1]];
}
else
{
mergedIndices[i] = leftCount + myIndices[j1];
}
j1--;
}
i--;
}
// If it's not the last phase, we just bulk propagate the keys and values.
if (!isLastPhase && myValues.Length > 0)
{
mergedKeys.CopyFrom(myKeysArr, myValues.Length);
Array.Copy(myValues, 0, mergedValues, leftCount, myValues.Length);
}
// Wait for our partner to finish copying too.
_sharedBarriers[phase][partnerIndex].SignalAndWait(cancelToken);
// Now the greater of the two partners can leave, it's done.
break;
}
}
}
}
//---------------------------------------------------------------------------------------
// Computes our partner index given the logarithmic reduction algorithm specified above.
//
private int ComputePartnerIndex(int phase)
{
int offset = 1 << phase;
return _partitionIndex + ((_partitionIndex % (offset * 2)) == 0 ? offset : -offset);
}
//---------------------------------------------------------------------------------------
// Sort algorithm used to sort key/value lists. After this has been called, the indices
// will have been placed in sorted order based on the keys provided.
//
private void QuickSort(int left, int right, TKey[] keys, int[] indices, CancellationToken cancelToken)
{
Contract.Assert(keys != null, "need a non-null keyset");
Contract.Assert(keys.Length >= indices.Length);
Contract.Assert(left <= right);
Contract.Assert(0 <= left && left < keys.Length);
Contract.Assert(0 <= right && right < keys.Length);
// cancellation check.
// only test for intervals that are wider than so many items, else this test is
// relatively expensive compared to the work being performend.
if (right - left > CancellationState.POLL_INTERVAL)
CancellationState.ThrowIfCanceled(cancelToken);
do
{
int i = left;
int j = right;
int pivot = indices[i + ((j - i) >> 1)];
TKey pivotKey = keys[pivot];
do
{
while (_keyComparer.Compare(keys[indices[i]], pivotKey) < 0) i++;
while (_keyComparer.Compare(keys[indices[j]], pivotKey) > 0) j--;
Contract.Assert(i >= left && j <= right, "(i>=left && j<=right) sort failed - bogus IComparer?");
if (i > j)
{
break;
}
if (i < j)
{
// Swap the indices.
int tmp = indices[i];
indices[i] = indices[j];
indices[j] = tmp;
}
i++;
j--;
}
while (i <= j);
if (j - left <= right - i)
{
if (left < j)
{
QuickSort(left, j, keys, indices, cancelToken);
}
left = i;
}
else
{
if (i < right)
{
QuickSort(i, right, keys, indices, cancelToken);
}
right = j;
}
}
while (left < right);
}
}
}
| |
using System;
namespace Lucene.Net.Search
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
/// <summary>
/// <para>Expert: Collectors are primarily meant to be used to
/// gather raw results from a search, and implement sorting
/// or custom result filtering, collation, etc. </para>
///
/// <para>Lucene's core collectors are derived from Collector.
/// Likely your application can use one of these classes, or
/// subclass <see cref="TopDocsCollector{T}"/>, instead of
/// implementing <see cref="ICollector"/> directly:
///
/// <list type="bullet">
///
/// <item><description><see cref="TopDocsCollector{T}"/> is an abstract base class
/// that assumes you will retrieve the top N docs,
/// according to some criteria, after collection is
/// done. </description></item>
///
/// <item><description><see cref="TopScoreDocCollector"/> is a concrete subclass
/// <see cref="TopDocsCollector{T}"/> and sorts according to score +
/// docID. This is used internally by the
/// <see cref="IndexSearcher"/> search methods that do not take an
/// explicit <see cref="Sort"/>. It is likely the most frequently
/// used collector.</description></item>
///
/// <item><description><see cref="TopFieldCollector"/> subclasses
/// <see cref="TopDocsCollector{T}"/> and sorts according to a specified
/// <see cref="Sort"/> object (sort by field). This is used
/// internally by the <see cref="IndexSearcher"/> search methods
/// that take an explicit <see cref="Sort"/>.</description></item>
///
/// <item><description><see cref="TimeLimitingCollector"/>, which wraps any other
/// Collector and aborts the search if it's taken too much
/// time.</description></item>
///
/// <item><description><see cref="PositiveScoresOnlyCollector"/> wraps any other
/// <see cref="ICollector"/> and prevents collection of hits whose score
/// is <= 0.0</description></item>
///
/// </list>
/// </para>
///
/// <para><see cref="ICollector"/> decouples the score from the collected doc:
/// the score computation is skipped entirely if it's not
/// needed. Collectors that do need the score should
/// implement the <see cref="SetScorer(Scorer)"/> method, to hold onto the
/// passed <see cref="Scorer"/> instance, and call
/// <see cref="Scorer.GetScore()"/> within the collect method to compute the
/// current hit's score. If your collector may request the
/// score for a single hit multiple times, you should use
/// <see cref="ScoreCachingWrappingScorer"/>. </para>
///
/// <para><b>NOTE:</b> The doc that is passed to the collect
/// method is relative to the current reader. If your
/// collector needs to resolve this to the docID space of the
/// Multi*Reader, you must re-base it by recording the
/// docBase from the most recent <see cref="SetNextReader(AtomicReaderContext)"/> call. Here's
/// a simple example showing how to collect docIDs into an
/// <see cref="Util.OpenBitSet"/>:</para>
///
/// <code>
/// private class MySearchCollector : ICollector
/// {
/// private readonly OpenBitSet bits;
/// private int docBase;
///
/// public MySearchCollector(OpenBitSet bits)
/// {
/// if (bits == null) throw new ArgumentNullException("bits");
/// this.bits = bits;
/// }
///
/// // ignore scorer
/// public void SetScorer(Scorer scorer)
/// {
/// }
///
/// // accept docs out of order (for a BitSet it doesn't matter)
/// public bool AcceptDocsOutOfOrder
/// {
/// get { return true; }
/// }
///
/// public void Collect(int doc)
/// {
/// bits.Set(doc + docBase);
/// }
///
/// public void SetNextReader(AtomicReaderContext context)
/// {
/// this.docBase = context.DocBase;
/// }
/// }
///
/// IndexSearcher searcher = new IndexSearcher(indexReader);
/// OpenBitSet bits = new OpenBitSet(indexReader.MaxDoc);
/// searcher.Search(query, new MySearchCollector(bits));
/// </code>
///
/// <para>Not all collectors will need to rebase the docID. For
/// example, a collector that simply counts the total number
/// of hits would skip it.</para>
///
/// <para><b>NOTE:</b> Prior to 2.9, Lucene silently filtered
/// out hits with score <= 0. As of 2.9, the core <see cref="ICollector"/>s
/// no longer do that. It's very unusual to have such hits
/// (a negative query boost, or function query returning
/// negative custom scores, could cause it to happen). If
/// you need that behavior, use
/// <see cref="PositiveScoresOnlyCollector"/>.</para>
///
/// @lucene.experimental
/// <para/>
/// @since 2.9
/// </summary>
public interface ICollector // LUCENENET NOTE: This was an abstract class in Lucene, but made into an interface since we need one for Grouping's covariance
{
/// <summary>
/// Called before successive calls to <see cref="Collect(int)"/>. Implementations
/// that need the score of the current document (passed-in to
/// <see cref="Collect(int)"/>), should save the passed-in <see cref="Scorer"/> and call
/// <c>scorer.GetScore()</c> when needed.
/// </summary>
void SetScorer(Scorer scorer);
/// <summary>
/// Called once for every document matching a query, with the unbased document
/// number.
/// <para/>Note: The collection of the current segment can be terminated by throwing
/// a <see cref="CollectionTerminatedException"/>. In this case, the last docs of the
/// current <see cref="AtomicReaderContext"/> will be skipped and <see cref="IndexSearcher"/>
/// will swallow the exception and continue collection with the next leaf.
/// <para/>
/// Note: this is called in an inner search loop. For good search performance,
/// implementations of this method should not call <see cref="IndexSearcher.Doc(int)"/> or
/// <see cref="Lucene.Net.Index.IndexReader.Document(int)"/> on every hit.
/// Doing so can slow searches by an order of magnitude or more.
/// </summary>
void Collect(int doc);
/// <summary>
/// Called before collecting from each <see cref="AtomicReaderContext"/>. All doc ids in
/// <see cref="Collect(int)"/> will correspond to <see cref="Index.IndexReaderContext.Reader"/>.
/// <para/>
/// Add <see cref="AtomicReaderContext.DocBase"/> to the current <see cref="Index.IndexReaderContext.Reader"/>'s
/// internal document id to re-base ids in <see cref="Collect(int)"/>.
/// </summary>
/// <param name="context">next atomic reader context </param>
void SetNextReader(AtomicReaderContext context);
/// <summary>
/// Return <c>true</c> if this collector does not
/// require the matching docIDs to be delivered in int sort
/// order (smallest to largest) to <see cref="Collect"/>.
///
/// <para> Most Lucene Query implementations will visit
/// matching docIDs in order. However, some queries
/// (currently limited to certain cases of <see cref="BooleanQuery"/>)
/// can achieve faster searching if the
/// <see cref="ICollector"/> allows them to deliver the
/// docIDs out of order.</para>
///
/// <para> Many collectors don't mind getting docIDs out of
/// order, so it's important to return <c>true</c>
/// here.</para>
/// </summary>
bool AcceptsDocsOutOfOrder { get; }
}
/// <summary>
/// LUCENENET specific class used to hold the
/// <see cref="NewAnonymous(Action{Scorer}, Action{int}, Action{AtomicReaderContext}, Func{bool})"/> static method.
/// </summary>
public static class Collector
{
/// <summary>
/// Creates a new instance with the ability to specify the body of the <see cref="ICollector.SetScorer(Scorer)"/>
/// method through the <paramref name="setScorer"/> parameter, the body of the <see cref="ICollector.Collect(int)"/>
/// method through the <paramref name="collect"/> parameter, the body of the <see cref="ICollector.SetNextReader(AtomicReaderContext)"/>
/// method through the <paramref name="setNextReader"/> parameter, and the body of the <see cref="ICollector.AcceptsDocsOutOfOrder"/>
/// property through the <paramref name="acceptsDocsOutOfOrder"/> parameter.
/// Simple example:
/// <code>
/// IndexSearcher searcher = new IndexSearcher(indexReader);
/// OpenBitSet bits = new OpenBitSet(indexReader.MaxDoc);
/// int docBase;
/// searcher.Search(query,
/// Collector.NewAnonymous(setScorer: (scorer) =>
/// {
/// // ignore scorer
/// }, collect: (doc) =>
/// {
/// bits.Set(doc + docBase);
/// }, setNextReader: (context) =>
/// {
/// docBase = context.DocBase;
/// }, acceptsDocsOutOfOrder: () =>
/// {
/// return true;
/// })
/// );
/// </code>
/// </summary>
/// <param name="setScorer">
/// A delegate method that represents (is called by) the <see cref="ICollector.SetScorer(Scorer)"/>
/// method. It accepts a <see cref="Scorer"/> scorer and
/// has no return value.
/// </param>
/// <param name="collect">
/// A delegate method that represents (is called by) the <see cref="ICollector.Collect(int)"/>
/// method. It accepts an <see cref="int"/> doc and
/// has no return value.
/// </param>
/// <param name="setNextReader">
/// A delegate method that represents (is called by) the <see cref="ICollector.SetNextReader(AtomicReaderContext)"/>
/// method. It accepts a <see cref="AtomicReaderContext"/> context and
/// has no return value.
/// </param>
/// <param name="acceptsDocsOutOfOrder">
/// A delegate method that represents (is called by) the <see cref="ICollector.AcceptsDocsOutOfOrder"/>
/// property. It returns a <see cref="bool"/> value.
/// </param>
/// <returns> A new <see cref="AnonymousCollector"/> instance. </returns>
public static ICollector NewAnonymous(Action<Scorer> setScorer, Action<int> collect, Action<AtomicReaderContext> setNextReader, Func<bool> acceptsDocsOutOfOrder)
{
return new AnonymousCollector(setScorer, collect, setNextReader, acceptsDocsOutOfOrder);
}
// LUCENENET specific
private class AnonymousCollector : ICollector
{
private readonly Action<Scorer> setScorer;
private readonly Action<int> collect;
private readonly Action<AtomicReaderContext> setNextReader;
private readonly Func<bool> acceptsDocsOutOfOrder;
public AnonymousCollector(Action<Scorer> setScorer, Action<int> collect, Action<AtomicReaderContext> setNextReader, Func<bool> acceptsDocsOutOfOrder)
{
if (setScorer == null)
throw new ArgumentNullException("setScorer");
if (collect == null)
throw new ArgumentNullException("collect");
if (setNextReader == null)
throw new ArgumentNullException("setNextReader");
if (acceptsDocsOutOfOrder == null)
throw new ArgumentNullException("acceptsDocsOutOfOrder");
this.setScorer = setScorer;
this.collect = collect;
this.setNextReader = setNextReader;
this.acceptsDocsOutOfOrder = acceptsDocsOutOfOrder;
}
public bool AcceptsDocsOutOfOrder
{
get
{
return this.acceptsDocsOutOfOrder();
}
}
public void Collect(int doc)
{
this.collect(doc);
}
public void SetNextReader(AtomicReaderContext context)
{
this.setNextReader(context);
}
public void SetScorer(Scorer scorer)
{
this.setScorer(scorer);
}
}
}
}
| |
#region License
// Copyright 2006 James Newton-King
// http://www.newtonsoft.com
//
// This work is licensed under the Creative Commons Attribution 2.5 License
// http://creativecommons.org/licenses/by/2.5/
//
// You are free:
// * to copy, distribute, display, and perform the work
// * to make derivative works
// * to make commercial use of the work
//
// Under the following conditions:
// * You must attribute the work in the manner specified by the author or licensor:
// - If you find this component useful a link to http://www.newtonsoft.com would be appreciated.
// * For any reuse or distribution, you must make clear to others the license terms of this work.
// * Any of these conditions can be waived if you get permission from the copyright holder.
#endregion
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Xml;
using System.Globalization;
namespace Newtonsoft.Json
{
/// <summary>
/// Represents a reader that provides fast, non-cached, forward-only access to serialized Json data.
/// </summary>
public class JsonReader : IDisposable
{
private enum State
{
Start,
Complete,
Property,
ObjectStart,
Object,
ArrayStart,
Array,
Closed,
PostValue,
Constructor,
ConstructorEnd,
Error,
Finished
}
private TextReader _reader;
private char _currentChar;
// current Token data
private JsonToken _token;
private object _value;
private Type _valueType;
private char _quoteChar;
private StringBuffer _buffer;
//private StringBuilder _testBuffer;
private State _currentState;
private int _top;
private List<JsonType> _stack;
/// <summary>
/// Gets the quotation mark character used to enclose the value of a string.
/// </summary>
public char QuoteChar
{
get { return _quoteChar; }
}
/// <summary>
/// Gets the type of the current Json token.
/// </summary>
public JsonToken TokenType
{
get { return _token; }
}
/// <summary>
/// Gets the text value of the current Json token.
/// </summary>
public object Value
{
get { return _value; }
}
/// <summary>
/// Gets The Common Language Runtime (CLR) type for the current Json token.
/// </summary>
public Type ValueType
{
get { return _valueType; }
}
/// <summary>
/// Initializes a new instance of the <see cref="JsonReader"/> class with the specified <see cref="TextReader"/>.
/// </summary>
/// <param name="reader">The <c>TextReader</c> containing the XML data to read.</param>
public JsonReader(TextReader reader)
{
if (reader == null)
throw new ArgumentNullException("reader");
_reader = reader;
_buffer = new StringBuffer(4096);
//_testBuffer = new StringBuilder();
_currentState = State.Start;
_stack = new List<JsonType>();
_top = 0;
Push(JsonType.None);
}
private void Push(JsonType value)
{
_stack.Add(value);
_top++;
}
private JsonType Pop()
{
JsonType value = Peek();
_stack.RemoveAt(_stack.Count - 1);
_top--;
return value;
}
private JsonType Peek()
{
return _stack[_top - 1];
}
private void ParseString(char quote)
{
bool stringTerminated = false;
while (!stringTerminated && MoveNext())
{
switch (_currentChar)
{
//case 0:
//case 0x0A:
//case 0x0D:
// throw new JsonReaderException("Unterminated string");
case '\\':
if (MoveNext())
{
switch (_currentChar)
{
case 'b':
_buffer.Append('\b');
break;
case 't':
_buffer.Append('\t');
break;
case 'n':
_buffer.Append('\n');
break;
case 'f':
_buffer.Append('\f');
break;
case 'r':
_buffer.Append('\r');
break;
case 'u':
//_buffer.Append((char) Integer.parseInt(next(4), 16));
break;
case 'x':
//_buffer.Append((char) Integer.parseInt(next(2), 16));
break;
default:
_buffer.Append(_currentChar);
break;
}
}
else
{
throw new JsonReaderException("Unterminated string. Expected delimiter: " + quote);
}
break;
case '"':
case '\'':
if (_currentChar == quote)
stringTerminated = true;
else
goto default;
break;
default:
_buffer.Append(_currentChar);
break;
}
}
if (!stringTerminated)
throw new JsonReaderException("Unterminated string. Expected delimiter: " + quote);
ClearCurrentChar();
_currentState = State.PostValue;
_token = JsonToken.String;
_value = _buffer.ToString();
_buffer.Position = 0;
_valueType = typeof(string);
_quoteChar = quote;
}
private bool MoveNext()
{
int value = _reader.Read();
if (value != -1)
{
_currentChar = (char) value;
//_testBuffer.Append(_currentChar);
return true;
}
else
{
return false;
}
}
private bool HasNext()
{
return (_reader.Peek() != -1);
}
private char PeekNext()
{
return (char) _reader.Peek();
}
private void ClearCurrentChar()
{
_currentChar = '\0';
}
private bool MoveTo(char value)
{
while (MoveNext())
{
if (_currentChar == value)
return true;
}
return false;
}
/// <summary>
/// Reads the next Json token from the stream.
/// </summary>
/// <returns></returns>
public bool Read()
{
while (true)
{
if (_currentChar == '\0')
{
if (!MoveNext())
return false;
}
switch (_currentState)
{
case State.Start:
case State.Property:
case State.Array:
case State.ArrayStart:
return ParseValue();
case State.Complete:
break;
case State.Object:
case State.ObjectStart:
return ParseObject();
case State.PostValue:
// returns true if it hits
// end of object or array
if (ParsePostValue())
return true;
break;
case State.Closed:
break;
case State.Error:
break;
default:
throw new JsonReaderException("Unexpected state: " + _currentState);
}
}
}
private bool ParsePostValue()
{
do
{
switch (_currentChar)
{
case '}':
SetToken(JsonToken.EndObject);
ClearCurrentChar();
return true;
case ']':
SetToken(JsonToken.EndArray);
ClearCurrentChar();
return true;
case '/':
ParseComment();
return true;
case ',':
// finished paring
SetStateBasedOnCurrent();
ClearCurrentChar();
return false;
default:
if (char.IsWhiteSpace(_currentChar))
{
// eat
}
else
{
throw new JsonReaderException("After parsing a value an unexpected character was encoutered: " + _currentChar);
}
break;
}
} while (MoveNext());
return false;
}
private bool ParseObject()
{
do
{
switch (_currentChar)
{
case '}':
SetToken(JsonToken.EndObject);
return true;
case '/':
ParseComment();
return true;
case ',':
SetToken(JsonToken.Undefined);
return true;
default:
if (char.IsWhiteSpace(_currentChar))
{
// eat
}
else
{
return ParseProperty();
}
break;
}
} while (MoveNext());
return false;
}
private bool ParseProperty()
{
if (ValidIdentifierChar(_currentChar))
{
ParseUnquotedProperty();
}
else if (_currentChar == '"' || _currentChar == '\'')
{
ParseQuotedProperty(_currentChar);
}
else
{
throw new JsonReaderException("Invalid property identifier character: " + _currentChar);
}
// finished property. move to colon
if (_currentChar != ':')
{
MoveTo(':');
}
SetToken(JsonToken.PropertyName, _buffer.ToString());
_buffer.Position = 0;
return true;
}
private void ParseQuotedProperty(char quoteChar)
{
// parse property name until quoted char is hit
while (MoveNext())
{
if (_currentChar == quoteChar)
{
return;
}
else
{
_buffer.Append(_currentChar);
}
}
throw new JsonReaderException("Unclosed quoted property. Expected: " + quoteChar);
}
private bool ValidIdentifierChar(char value)
{
return (char.IsLetterOrDigit(_currentChar) || _currentChar == '_' || _currentChar == '$');
}
private void ParseUnquotedProperty()
{
// parse unquoted property name until whitespace or colon
_buffer.Append(_currentChar);
while (MoveNext())
{
if (char.IsWhiteSpace(_currentChar) || _currentChar == ':')
{
break;
}
else if (ValidIdentifierChar(_currentChar))
{
_buffer.Append(_currentChar);
}
else
{
throw new JsonReaderException("Invalid JavaScript property identifier character: " + _currentChar);
}
}
}
private void SetToken(JsonToken newToken)
{
SetToken(newToken, null);
}
private void SetToken(JsonToken newToken, object value)
{
_token = newToken;
switch (newToken)
{
case JsonToken.StartObject:
_currentState = State.ObjectStart;
Push(JsonType.Object);
ClearCurrentChar();
break;
case JsonToken.StartArray:
_currentState = State.ArrayStart;
Push(JsonType.Array);
ClearCurrentChar();
break;
case JsonToken.EndObject:
ValidateEnd(JsonToken.EndObject);
ClearCurrentChar();
_currentState = State.PostValue;
break;
case JsonToken.EndArray:
ValidateEnd(JsonToken.EndArray);
ClearCurrentChar();
_currentState = State.PostValue;
break;
case JsonToken.PropertyName:
_currentState = State.Property;
ClearCurrentChar();
break;
case JsonToken.Undefined:
case JsonToken.Integer:
case JsonToken.Float:
case JsonToken.Boolean:
case JsonToken.Null:
case JsonToken.Constructor:
case JsonToken.Date:
_currentState = State.PostValue;
break;
}
if (value != null)
{
_value = value;
_valueType = value.GetType();
}
else
{
_value = null;
_valueType = null;
}
}
private bool ParseValue()
{
do
{
switch (_currentChar)
{
case '"':
case '\'':
ParseString(_currentChar);
return true;
case 't':
ParseTrue();
return true;
case 'f':
ParseFalse();
return true;
case 'n':
if (HasNext())
{
char next = PeekNext();
if (next == 'u')
ParseNull();
else if (next == 'e')
ParseConstructor();
else
throw new JsonReaderException("Unexpected character encountered while parsing value: " + _currentChar);
}
else
{
throw new JsonReaderException("Unexpected end");
}
return true;
case '/':
ParseComment();
return true;
case 'u':
ParseUndefined();
return true;
case '{':
SetToken(JsonToken.StartObject);
return true;
case '[':
SetToken(JsonToken.StartArray);
return true;
case '}':
SetToken(JsonToken.EndObject);
return true;
case ']':
SetToken(JsonToken.EndArray);
return true;
case ',':
SetToken(JsonToken.Undefined);
//ClearCurrentChar();
return true;
case ')':
if (_currentState == State.Constructor)
{
_currentState = State.ConstructorEnd;
return false;
}
else
{
throw new JsonReaderException("Unexpected character encountered while parsing value: " + _currentChar);
}
default:
if (char.IsWhiteSpace(_currentChar))
{
// eat
}
else if (char.IsNumber(_currentChar) || _currentChar == '-' || _currentChar == '.')
{
ParseNumber();
return true;
}
else
{
throw new JsonReaderException("Unexpected character encountered while parsing value: " + _currentChar);
}
break;
}
} while (MoveNext());
return false;
}
private bool EatWhitespace(bool oneOrMore)
{
bool whitespace = false;
while (char.IsWhiteSpace(_currentChar))
{
whitespace = true;
MoveNext();
}
return (!oneOrMore || whitespace);
}
private void ParseConstructor()
{
if (MatchValue("new", true))
{
if (EatWhitespace(true))
{
while (char.IsLetter(_currentChar))
{
_buffer.Append(_currentChar);
MoveNext();
}
string constructorName = _buffer.ToString();
_buffer.Position = 0;
List<object> parameters = new List<object>();
EatWhitespace(false);
if (_currentChar == '(' && MoveNext())
{
_currentState = State.Constructor;
while (ParseValue())
{
parameters.Add(_value);
_currentState = State.Constructor;
}
if (string.CompareOrdinal(constructorName, "Date") == 0)
{
long javaScriptTicks = Convert.ToInt64(parameters[0]);
DateTime date = new DateTime((javaScriptTicks * 10000) + JavaScriptConvert.InitialJavaScriptDateTicks);
SetToken(JsonToken.Date, date);
}
else
{
JavaScriptConstructor constructor = new JavaScriptConstructor(constructorName, new JavaScriptParameters(parameters));
if (_currentState == State.ConstructorEnd)
{
SetToken(JsonToken.Constructor, constructor);
}
}
// move past ')'
MoveNext();
}
}
}
}
private void ParseNumber()
{
// parse until seperator character or end
bool end = false;
do
{
if (CurrentIsSeperator())
end = true;
else
_buffer.Append(_currentChar);
} while (!end && MoveNext());
string number = _buffer.ToString();
object numberValue;
JsonToken numberType;
if (number.IndexOf('.') == -1)
{
numberValue = Convert.ToInt64(_buffer.ToString(), CultureInfo.InvariantCulture);
numberType = JsonToken.Integer;
}
else
{
numberValue = Convert.ToDouble(_buffer.ToString(), CultureInfo.InvariantCulture);
numberType = JsonToken.Float;
}
_buffer.Position = 0;
SetToken(numberType, numberValue);
}
private void ValidateEnd(JsonToken endToken)
{
JsonType currentObject = Pop();
if (GetTypeForCloseToken(endToken) != currentObject)
throw new JsonReaderException(string.Format("JsonToken {0} is not valid for closing JsonType {1}.", endToken, currentObject));
}
private void SetStateBasedOnCurrent()
{
JsonType currentObject = Peek();
switch (currentObject)
{
case JsonType.Object:
_currentState = State.Object;
break;
case JsonType.Array:
_currentState = State.Array;
break;
case JsonType.None:
_currentState = State.Finished;
break;
default:
throw new JsonReaderException("While setting the reader state back to current object an unexpected JsonType was encountered: " + currentObject);
}
}
private JsonType GetTypeForCloseToken(JsonToken token)
{
switch (token)
{
case JsonToken.EndObject:
return JsonType.Object;
case JsonToken.EndArray:
return JsonType.Array;
default:
throw new JsonReaderException("Not a valid close JsonToken: " + token);
}
}
private void ParseComment()
{
// should have already parsed / character before reaching this method
MoveNext();
if (_currentChar == '*')
{
while (MoveNext())
{
if (_currentChar == '*')
{
if (MoveNext())
{
if (_currentChar == '/')
{
break;
}
else
{
_buffer.Append('*');
_buffer.Append(_currentChar);
}
}
}
else
{
_buffer.Append(_currentChar);
}
}
}
else
{
throw new JsonReaderException("Error parsing comment. Expected: *");
}
SetToken(JsonToken.Comment, _buffer.ToString());
_buffer.Position = 0;
ClearCurrentChar();
}
private bool MatchValue(string value)
{
int i = 0;
do
{
if (_currentChar != value[i])
{
break;
}
i++;
}
while (i < value.Length && MoveNext());
return (i == value.Length);
}
private bool MatchValue(string value, bool noTrailingNonSeperatorCharacters)
{
// will match value and then move to the next character, checking that it is a seperator character
bool match = MatchValue(value);
if (!noTrailingNonSeperatorCharacters)
return match;
else
return (match && (!MoveNext() || CurrentIsSeperator()));
}
private bool CurrentIsSeperator()
{
switch (_currentChar)
{
case '}':
case ']':
case ',':
return true;
case '/':
// check next character to see if start of a comment
return (HasNext() && PeekNext() == '*');
case ')':
if (_currentState == State.Constructor)
return true;
break;
default:
if (char.IsWhiteSpace(_currentChar))
return true;
break;
}
return false;
}
private void ParseTrue()
{
// check characters equal 'true'
// and that it is followed by either a seperator character
// or the text ends
if (MatchValue(JavaScriptConvert.True, true))
{
SetToken(JsonToken.Boolean, true);
}
else
{
throw new JsonReaderException("Error parsing boolean value.");
}
}
private void ParseNull()
{
if (MatchValue(JavaScriptConvert.Null, true))
{
SetToken(JsonToken.Null);
}
else
{
throw new JsonReaderException("Error parsing null value.");
}
}
private void ParseUndefined()
{
if (MatchValue(JavaScriptConvert.Undefined, true))
{
SetToken(JsonToken.Undefined);
}
else
{
throw new JsonReaderException("Error parsing undefined value.");
}
}
private void ParseFalse()
{
if (MatchValue(JavaScriptConvert.False, true))
{
SetToken(JsonToken.Boolean, false);
}
else
{
throw new JsonReaderException("Error parsing boolean value.");
}
}
void IDisposable.Dispose()
{
Dispose(true);
}
private void Dispose(bool disposing)
{
if (_currentState != State.Closed && disposing)
Close();
}
/// <summary>
/// Changes the <see cref="State"/> to Closed.
/// </summary>
public void Close()
{
_currentState = State.Closed;
_token = JsonToken.None;
_value = null;
_valueType = null;
if (_reader != null)
_reader.Close();
if (_buffer != null)
_buffer.Clear();
}
}
}
| |
using System;
using System.Diagnostics.CodeAnalysis;
using HealthCheck;
using HealthCheck.Framework;
using Moq;
using Quartz.Impl.Calendar;
using Xunit;
namespace UnitTests.Framework
{
[SuppressMessage(
"StyleCop.CSharp.DocumentationRules",
"SA1600:ElementsMustBeDocumented",
Justification = "Test Suites do not need XML Documentation.")]
public class HealthCheckJobTests
{
[Fact]
public void Execute_Should_CallListernerAfterExecution()
{
// Arrange
var executed = false;
var mockPlugin = new Mock<IHealthCheckPlugin>();
var mockStatus = new Mock<IHealthStatus>();
mockStatus.SetupGet(s => s.Status).Returns(CheckResult.Success);
mockPlugin.Setup(p => p.Execute()).Returns(mockStatus.Object);
var job = new HealthCheckJob
{
Plugin = mockPlugin.Object
};
var mockListener = new Mock<IStatusListener>();
mockListener.Setup(l => l.Process(It.IsAny<IHealthStatus>())).Callback(() =>
{
executed = true;
});
job.Listeners.Add(mockListener.Object);
// Act
job.Execute(null);
// Assert
Assert.True(executed);
}
[Fact]
public void Execute_Should_ExecuteJob()
{
// Arrange
var executed = false;
var mock = new Mock<IHealthCheckPlugin>();
mock.Setup(p => p.Execute()).Callback(() =>
{
executed = true;
});
var job = new HealthCheckJob
{
Plugin = mock.Object
};
// Act
job.Execute(null);
// Assert
Assert.True(executed);
}
[Fact]
public void Execute_Should_HandleExceptions()
{
// Arrange
var mockPlugin = new Mock<IHealthCheckPlugin>();
mockPlugin.Setup(p => p.Execute()).Throws(new Exception("BOOM!"));
mockPlugin.SetupProperty(p => p.PluginStatus);
var job = new HealthCheckJob
{
Plugin = mockPlugin.Object
};
// Act
job.Execute(null);
// Assert
Assert.Equal(PluginStatus.TaskExecutionFailure, job.Plugin.PluginStatus);
}
[Fact]
public void Execute_Should_NotExecuteJob_When_InQuietPeriod()
{
// Arrange
var calendar = new DailyCalendar(0, 0, 0, 0, 23, 59, 59, 999);
var executed = false;
var mock = new Mock<IHealthCheckPlugin>();
_ = mock.Setup(p => p.Execute()).Callback(() => executed = true);
var job = new HealthCheckJob
{
Plugin = mock.Object
};
// Act
job.QuietPeriods.AddCalendar(calendar);
job.Execute(null);
// Assert
Assert.False(executed);
}
[Fact]
public void NotifyListener_Should_CallListener_When_ThreshholdIsOverThreshold()
{
// Arrange
var executed = false;
var mockPlugin = new Mock<IHealthCheckPlugin>();
var mockStatus = new Mock<IHealthStatus>();
mockStatus.SetupGet(s => s.Status).Returns(CheckResult.Error);
mockPlugin.Setup(p => p.Execute()).Returns(mockStatus.Object);
var job = new HealthCheckJob
{
Plugin = mockPlugin.Object
};
var mockList = new Mock<IStatusListener>();
mockList.SetupGet(l => l.Threshold).Returns(CheckResult.Warning);
mockList.Setup(l => l.Process(It.IsAny<IHealthStatus>())).Callback(() =>
{
executed = true;
});
job.Listeners.Add(mockList.Object);
// Act
job.Execute(null);
// Assert
Assert.True(executed);
}
[Fact]
public void NotifyListener_Should_CallListener_When_ThreshholdIsSameThreshold()
{
// Arrange
var executed = false;
var mockPlugin = new Mock<IHealthCheckPlugin>();
var mockStatus = new Mock<IHealthStatus>();
mockStatus.SetupGet(s => s.Status).Returns(CheckResult.Error);
mockPlugin.Setup(p => p.Execute()).Returns(mockStatus.Object);
var job = new HealthCheckJob
{
Plugin = mockPlugin.Object
};
var mockList = new Mock<IStatusListener>();
mockList.SetupGet(l => l.Threshold).Returns(CheckResult.Error);
mockList.Setup(l => l.Process(It.IsAny<IHealthStatus>())).Callback(() =>
{
executed = true;
});
job.Listeners.Add(mockList.Object);
// Act
job.Execute(null);
// Assert
Assert.True(executed);
}
[Fact]
public void NotifyListener_Should_NotCallListener_When_ThreshholdIsUnderThreshold()
{
// Arrange
var executed = false;
var mockPlugin = new Mock<IHealthCheckPlugin>();
var mockStatus = new Mock<IHealthStatus>();
mockStatus.SetupGet(s => s.Status).Returns(CheckResult.Success);
mockPlugin.Setup(p => p.Execute()).Returns(mockStatus.Object);
var job = new HealthCheckJob
{
Plugin = mockPlugin.Object
};
var mockList = new Mock<IStatusListener>();
mockList.SetupGet(l => l.Threshold).Returns(CheckResult.Error);
mockList.Setup(l => l.Process(It.IsAny<IHealthStatus>())).Callback(() =>
{
executed = true;
});
job.Listeners.Add(mockList.Object);
// Act
job.Execute(null);
// Assert
Assert.False(executed);
}
[Fact]
public void NotifyListeners_Should_ContinueToCallListeners_When_AnExceptionOccursInOneListener()
{
// Arrange
var executed = false;
var mockPlugin = new Mock<IHealthCheckPlugin>();
var mockStatus = new Mock<IHealthStatus>();
mockStatus.SetupGet(s => s.Status).Returns(CheckResult.Success);
mockPlugin.Setup(p => p.Execute()).Returns(mockStatus.Object);
var job = new HealthCheckJob
{
Plugin = mockPlugin.Object
};
var mockListener1 = new Mock<IStatusListener>();
mockListener1.Setup(l => l.Process(It.IsAny<IHealthStatus>())).Throws(new Exception("BOOM!"));
job.Listeners.Add(mockListener1.Object);
var mockListener2 = new Mock<IStatusListener>();
mockListener2.Setup(l => l.Process(It.IsAny<IHealthStatus>())).Callback(() => { executed = true; });
job.Listeners.Add(mockListener2.Object);
// Act
job.Execute(null);
// Assert
Assert.True(executed);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
namespace System.Globalization
{
////////////////////////////////////////////////////////////////////////////
//
// Rules for the Hebrew calendar:
// - The Hebrew calendar is both a Lunar (months) and Solar (years)
// calendar, but allows for a week of seven days.
// - Days begin at sunset.
// - Leap Years occur in the 3, 6, 8, 11, 14, 17, & 19th years of a
// 19-year cycle. Year = leap iff ((7y+1) mod 19 < 7).
// - There are 12 months in a common year and 13 months in a leap year.
// - In a common year, the 6th month, Adar, has 29 days. In a leap
// year, the 6th month, Adar I, has 30 days and the leap month,
// Adar II, has 29 days.
// - Common years have 353-355 days. Leap years have 383-385 days.
// - The Hebrew new year (Rosh HaShanah) begins on the 1st of Tishri,
// the 7th month in the list below.
// - The new year may not begin on Sunday, Wednesday, or Friday.
// - If the new year would fall on a Tuesday and the conjunction of
// the following year were at midday or later, the new year is
// delayed until Thursday.
// - If the new year would fall on a Monday after a leap year, the
// new year is delayed until Tuesday.
// - The length of the 8th and 9th months vary from year to year,
// depending on the overall length of the year.
// - The length of a year is determined by the dates of the new
// years (Tishri 1) preceding and following the year in question.
// - The 2th month is long (30 days) if the year has 355 or 385 days.
// - The 3th month is short (29 days) if the year has 353 or 383 days.
// - The Hebrew months are:
// 1. Tishri (30 days)
// 2. Heshvan (29 or 30 days)
// 3. Kislev (29 or 30 days)
// 4. Teveth (29 days)
// 5. Shevat (30 days)
// 6. Adar I (30 days)
// 7. Adar {II} (29 days, this only exists if that year is a leap year)
// 8. Nisan (30 days)
// 9. Iyyar (29 days)
// 10. Sivan (30 days)
// 11. Tammuz (29 days)
// 12. Av (30 days)
// 13. Elul (29 days)
//
////////////////////////////////////////////////////////////////////////////
/*
** Calendar support range:
** Calendar Minimum Maximum
** ========== ========== ==========
** Gregorian 1583/01/01 2239/09/29
** Hebrew 5343/04/07 5999/13/29
*/
// Includes CHebrew implemetation;i.e All the code necessary for converting
// Gregorian to Hebrew Lunar from 1583 to 2239.
public class HebrewCalendar : Calendar
{
public static readonly int HebrewEra = 1;
internal const int DatePartYear = 0;
internal const int DatePartDayOfYear = 1;
internal const int DatePartMonth = 2;
internal const int DatePartDay = 3;
internal const int DatePartDayOfWeek = 4;
//
// Hebrew Translation Table.
//
// This table is used to get the following Hebrew calendar information for a
// given Gregorian year:
// 1. The day of the Hebrew month corresponding to Gregorian January 1st
// for a given Gregorian year.
// 2. The month of the Hebrew month corresponding to Gregorian January 1st
// for a given Gregorian year.
// The information is not directly in the table. Instead, the info is decoded
// by special values (numbers above 29 and below 1).
// 3. The type of the Hebrew year for a given Gregorian year.
//
/*
More notes:
This table includes 2 numbers for each year.
The offset into the table determines the year. (offset 0 is Gregorian year 1500)
1st number determines the day of the Hebrew month coresponeds to January 1st.
2nd number determines the type of the Hebrew year. (the type determines how
many days are there in the year.)
normal years : 1 = 353 days 2 = 354 days 3 = 355 days.
Leap years : 4 = 383 5 384 6 = 385 days.
A 99 means the year is not supported for translation.
for convenience the table was defined for 750 year,
but only 640 years are supported. (from 1583 to 2239)
the years before 1582 (starting of Georgian calendar)
and after 2239, are filled with 99.
Greogrian January 1st falls usually in Tevet (4th month). Tevet has always 29 days.
That's why, there no nead to specify the lunar month in the table.
There are exceptions, these are coded by giving numbers above 29 and below 1.
Actual decoding is takenig place whenever fetching information from the table.
The function for decoding is in GetLunarMonthDay().
Example:
The data for 2000 - 2005 A.D. is:
23,6,6,1,17,2,27,6,7,3, // 2000 - 2004
For year 2000, we know it has a Hebrew year type 6, which means it has 385 days.
And 1/1/2000 A.D. is Hebrew year 5760, 23rd day of 4th month.
*/
//
// Jewish Era in use today is dated from the supposed year of the
// Creation with its beginning in 3761 B.C.
//
// The Hebrew year of Gregorian 1st year AD.
// 0001/01/01 AD is Hebrew 3760/01/01
private const int HebrewYearOf1AD = 3760;
// The first Gregorian year in HebrewTable.
private const int FirstGregorianTableYear = 1583; // == Hebrew Year 5343
// The last Gregorian year in HebrewTable.
private const int LastGregorianTableYear = 2239; // == Hebrew Year 5999
private const int TABLESIZE = (LastGregorianTableYear - FirstGregorianTableYear);
private const int MinHebrewYear = HebrewYearOf1AD + FirstGregorianTableYear; // == 5343
private const int MaxHebrewYear = HebrewYearOf1AD + LastGregorianTableYear; // == 5999
private static readonly byte[] s_hebrewTable = {
7,3,17,3, // 1583-1584 (Hebrew year: 5343 - 5344)
0,4,11,2,21,6,1,3,13,2, // 1585-1589
25,4,5,3,16,2,27,6,9,1, // 1590-1594
20,2,0,6,11,3,23,4,4,2, // 1595-1599
14,3,27,4,8,2,18,3,28,6, // 1600
11,1,22,5,2,3,12,3,25,4, // 1605
6,2,16,3,26,6,8,2,20,1, // 1610
0,6,11,2,24,4,4,3,15,2, // 1615
25,6,8,1,19,2,29,6,9,3, // 1620
22,4,3,2,13,3,25,4,6,3, // 1625
17,2,27,6,7,3,19,2,31,4, // 1630
11,3,23,4,5,2,15,3,25,6, // 1635
6,2,19,1,29,6,10,2,22,4, // 1640
3,3,14,2,24,6,6,1,17,3, // 1645
28,5,8,3,20,1,32,5,12,3, // 1650
22,6,4,1,16,2,26,6,6,3, // 1655
17,2,0,4,10,3,22,4,3,2, // 1660
14,3,24,6,5,2,17,1,28,6, // 1665
9,2,19,3,31,4,13,2,23,6, // 1670
3,3,15,1,27,5,7,3,17,3, // 1675
29,4,11,2,21,6,3,1,14,2, // 1680
25,6,5,3,16,2,28,4,9,3, // 1685
20,2,0,6,12,1,23,6,4,2, // 1690
14,3,26,4,8,2,18,3,0,4, // 1695
10,3,21,5,1,3,13,1,24,5, // 1700
5,3,15,3,27,4,8,2,19,3, // 1705
29,6,10,2,22,4,3,3,14,2, // 1710
26,4,6,3,18,2,28,6,10,1, // 1715
20,6,2,2,12,3,24,4,5,2, // 1720
16,3,28,4,8,3,19,2,0,6, // 1725
12,1,23,5,3,3,14,3,26,4, // 1730
7,2,17,3,28,6,9,2,21,4, // 1735
1,3,13,2,25,4,5,3,16,2, // 1740
27,6,9,1,19,3,0,5,11,3, // 1745
23,4,4,2,14,3,25,6,7,1, // 1750
18,2,28,6,9,3,21,4,2,2, // 1755
12,3,25,4,6,2,16,3,26,6, // 1760
8,2,20,1,0,6,11,2,22,6, // 1765
4,1,15,2,25,6,6,3,18,1, // 1770
29,5,9,3,22,4,2,3,13,2, // 1775
23,6,4,3,15,2,27,4,7,3, // 1780
19,2,31,4,11,3,21,6,3,2, // 1785
15,1,25,6,6,2,17,3,29,4, // 1790
10,2,20,6,3,1,13,3,24,5, // 1795
4,3,16,1,27,5,7,3,17,3, // 1800
0,4,11,2,21,6,1,3,13,2, // 1805
25,4,5,3,16,2,29,4,9,3, // 1810
19,6,30,2,13,1,23,6,4,2, // 1815
14,3,27,4,8,2,18,3,0,4, // 1820
11,3,22,5,2,3,14,1,26,5, // 1825
6,3,16,3,28,4,10,2,20,6, // 1830
30,3,11,2,24,4,4,3,15,2, // 1835
25,6,8,1,19,2,29,6,9,3, // 1840
22,4,3,2,13,3,25,4,7,2, // 1845
17,3,27,6,9,1,21,5,1,3, // 1850
11,3,23,4,5,2,15,3,25,6, // 1855
6,2,19,1,29,6,10,2,22,4, // 1860
3,3,14,2,24,6,6,1,18,2, // 1865
28,6,8,3,20,4,2,2,12,3, // 1870
24,4,4,3,16,2,26,6,6,3, // 1875
17,2,0,4,10,3,22,4,3,2, // 1880
14,3,24,6,5,2,17,1,28,6, // 1885
9,2,21,4,1,3,13,2,23,6, // 1890
5,1,15,3,27,5,7,3,19,1, // 1895
0,5,10,3,22,4,2,3,13,2, // 1900
24,6,4,3,15,2,27,4,8,3, // 1905
20,4,1,2,11,3,22,6,3,2, // 1910
15,1,25,6,7,2,17,3,29,4, // 1915
10,2,21,6,1,3,13,1,24,5, // 1920
5,3,15,3,27,4,8,2,19,6, // 1925
1,1,12,2,22,6,3,3,14,2, // 1930
26,4,6,3,18,2,28,6,10,1, // 1935
20,6,2,2,12,3,24,4,5,2, // 1940
16,3,28,4,9,2,19,6,30,3, // 1945
12,1,23,5,3,3,14,3,26,4, // 1950
7,2,17,3,28,6,9,2,21,4, // 1955
1,3,13,2,25,4,5,3,16,2, // 1960
27,6,9,1,19,6,30,2,11,3, // 1965
23,4,4,2,14,3,27,4,7,3, // 1970
18,2,28,6,11,1,22,5,2,3, // 1975
12,3,25,4,6,2,16,3,26,6, // 1980
8,2,20,4,30,3,11,2,24,4, // 1985
4,3,15,2,25,6,8,1,18,3, // 1990
29,5,9,3,22,4,3,2,13,3, // 1995
23,6,6,1,17,2,27,6,7,3, // 2000 - 2004
20,4,1,2,11,3,23,4,5,2, // 2005 - 2009
15,3,25,6,6,2,19,1,29,6, // 2010
10,2,20,6,3,1,14,2,24,6, // 2015
4,3,17,1,28,5,8,3,20,4, // 2020
1,3,12,2,22,6,2,3,14,2, // 2025
26,4,6,3,17,2,0,4,10,3, // 2030
20,6,1,2,14,1,24,6,5,2, // 2035
15,3,28,4,9,2,19,6,1,1, // 2040
12,3,23,5,3,3,15,1,27,5, // 2045
7,3,17,3,29,4,11,2,21,6, // 2050
1,3,12,2,25,4,5,3,16,2, // 2055
28,4,9,3,19,6,30,2,12,1, // 2060
23,6,4,2,14,3,26,4,8,2, // 2065
18,3,0,4,10,3,22,5,2,3, // 2070
14,1,25,5,6,3,16,3,28,4, // 2075
9,2,20,6,30,3,11,2,23,4, // 2080
4,3,15,2,27,4,7,3,19,2, // 2085
29,6,11,1,21,6,3,2,13,3, // 2090
25,4,6,2,17,3,27,6,9,1, // 2095
20,5,30,3,10,3,22,4,3,2, // 2100
14,3,24,6,5,2,17,1,28,6, // 2105
9,2,21,4,1,3,13,2,23,6, // 2110
5,1,16,2,27,6,7,3,19,4, // 2115
30,2,11,3,23,4,3,3,14,2, // 2120
25,6,5,3,16,2,28,4,9,3, // 2125
21,4,2,2,12,3,23,6,4,2, // 2130
16,1,26,6,8,2,20,4,30,3, // 2135
11,2,22,6,4,1,14,3,25,5, // 2140
6,3,18,1,29,5,9,3,22,4, // 2145
2,3,13,2,23,6,4,3,15,2, // 2150
27,4,7,3,20,4,1,2,11,3, // 2155
21,6,3,2,15,1,25,6,6,2, // 2160
17,3,29,4,10,2,20,6,3,1, // 2165
13,3,24,5,4,3,17,1,28,5, // 2170
8,3,18,6,1,1,12,2,22,6, // 2175
2,3,14,2,26,4,6,3,17,2, // 2180
28,6,10,1,20,6,1,2,12,3, // 2185
24,4,5,2,15,3,28,4,9,2, // 2190
19,6,33,3,12,1,23,5,3,3, // 2195
13,3,25,4,6,2,16,3,26,6, // 2200
8,2,20,4,30,3,11,2,24,4, // 2205
4,3,15,2,25,6,8,1,18,6, // 2210
33,2,9,3,22,4,3,2,13,3, // 2215
25,4,6,3,17,2,27,6,9,1, // 2220
21,5,1,3,11,3,23,4,5,2, // 2225
15,3,25,6,6,2,19,4,33,3, // 2230
10,2,22,4,3,3,14,2,24,6, // 2235
6,1 // 2240 (Hebrew year: 6000)
};
private const int MaxMonthPlusOne = 14;
//
// The lunar calendar has 6 different variations of month lengths
// within a year.
//
private static readonly byte[] s_lunarMonthLen = {
0,00,00,00,00,00,00,00,00,00,00,00,00,0,
0,30,29,29,29,30,29,30,29,30,29,30,29,0, // 3 common year variations
0,30,29,30,29,30,29,30,29,30,29,30,29,0,
0,30,30,30,29,30,29,30,29,30,29,30,29,0,
0,30,29,29,29,30,30,29,30,29,30,29,30,29, // 3 leap year variations
0,30,29,30,29,30,30,29,30,29,30,29,30,29,
0,30,30,30,29,30,30,29,30,29,30,29,30,29
};
internal static readonly DateTime calendarMinValue = new DateTime(1583, 1, 1);
// Gregorian 2239/9/29 = Hebrew 5999/13/29 (last day in Hebrew year 5999).
// We can only format/parse Hebrew numbers up to 999, so we limit the max range to Hebrew year 5999.
internal static readonly DateTime calendarMaxValue = new DateTime((new DateTime(2239, 9, 29, 23, 59, 59, 999)).Ticks + 9999);
public override DateTime MinSupportedDateTime
{
get
{
return (calendarMinValue);
}
}
public override DateTime MaxSupportedDateTime
{
get
{
return (calendarMaxValue);
}
}
public override CalendarAlgorithmType AlgorithmType
{
get
{
return CalendarAlgorithmType.LunisolarCalendar;
}
}
public HebrewCalendar()
{
}
internal override CalendarId ID
{
get
{
return (CalendarId.HEBREW);
}
}
/*=================================CheckHebrewYearValue==========================
**Action: Check if the Hebrew year value is supported in this class.
**Returns: None.
**Arguments: y Hebrew year value
** ear Hebrew era value
**Exceptions: ArgumentOutOfRange_Range if the year value is not supported.
**Note:
** We use a table for the Hebrew calendar calculation, so the year supported is limited.
============================================================================*/
private static void CheckHebrewYearValue(int y, int era, string varName)
{
CheckEraRange(era);
if (y > MaxHebrewYear || y < MinHebrewYear)
{
throw new ArgumentOutOfRangeException(
varName,
string.Format(
CultureInfo.CurrentCulture,
SR.ArgumentOutOfRange_Range,
MinHebrewYear,
MaxHebrewYear));
}
}
/*=================================CheckHebrewMonthValue==========================
**Action: Check if the Hebrew month value is valid.
**Returns: None.
**Arguments: year Hebrew year value
** month Hebrew month value
**Exceptions: ArgumentOutOfRange_Range if the month value is not valid.
**Note:
** Call CheckHebrewYearValue() before calling this to verify the year value is supported.
============================================================================*/
private void CheckHebrewMonthValue(int year, int month, int era)
{
int monthsInYear = GetMonthsInYear(year, era);
if (month < 1 || month > monthsInYear)
{
throw new ArgumentOutOfRangeException(
nameof(month),
string.Format(
CultureInfo.CurrentCulture,
SR.ArgumentOutOfRange_Range,
1,
monthsInYear));
}
}
/*=================================CheckHebrewDayValue==========================
**Action: Check if the Hebrew day value is valid.
**Returns: None.
**Arguments: year Hebrew year value
** month Hebrew month value
** day Hebrew day value.
**Exceptions: ArgumentOutOfRange_Range if the day value is not valid.
**Note:
** Call CheckHebrewYearValue()/CheckHebrewMonthValue() before calling this to verify the year/month values are valid.
============================================================================*/
private void CheckHebrewDayValue(int year, int month, int day, int era)
{
int daysInMonth = GetDaysInMonth(year, month, era);
if (day < 1 || day > daysInMonth)
{
throw new ArgumentOutOfRangeException(
nameof(day),
string.Format(
CultureInfo.CurrentCulture,
SR.ArgumentOutOfRange_Range,
1,
daysInMonth));
}
}
internal static void CheckEraRange(int era)
{
if (era != CurrentEra && era != HebrewEra)
{
throw new ArgumentOutOfRangeException(nameof(era), SR.ArgumentOutOfRange_InvalidEraValue);
}
}
private static void CheckTicksRange(long ticks)
{
if (ticks < calendarMinValue.Ticks || ticks > calendarMaxValue.Ticks)
{
throw new ArgumentOutOfRangeException(
"time",
// Print out the date in Gregorian using InvariantCulture since the DateTime is based on GreograinCalendar.
string.Format(
CultureInfo.InvariantCulture,
SR.ArgumentOutOfRange_CalendarRange,
calendarMinValue,
calendarMaxValue));
}
}
internal static int GetResult(__DateBuffer result, int part)
{
switch (part)
{
case DatePartYear:
return (result.year);
case DatePartMonth:
return (result.month);
case DatePartDay:
return (result.day);
}
throw new InvalidOperationException(SR.InvalidOperation_DateTimeParsing);
}
/*=================================GetLunarMonthDay==========================
**Action: Using the Hebrew table (HebrewTable) to get the Hebrew month/day value for Gregorian January 1st
** in a given Gregorian year.
** Greogrian January 1st falls usually in Tevet (4th month). Tevet has always 29 days.
** That's why, there no nead to specify the lunar month in the table. There are exceptions, and these
** are coded by giving numbers above 29 and below 1.
** Actual decoding is takenig place in the switch statement below.
**Returns:
** The Hebrew year type. The value is from 1 to 6.
** normal years : 1 = 353 days 2 = 354 days 3 = 355 days.
** Leap years : 4 = 383 5 384 6 = 385 days.
**Arguments:
** gregorianYear The year value in Gregorian calendar. The value should be between 1500 and 2239.
** lunarDate Object to take the result of the Hebrew year/month/day.
**Exceptions:
============================================================================*/
internal static int GetLunarMonthDay(int gregorianYear, __DateBuffer lunarDate)
{
//
// Get the offset into the LunarMonthLen array and the lunar day
// for January 1st.
//
int index = gregorianYear - FirstGregorianTableYear;
if (index < 0 || index > TABLESIZE)
{
throw new ArgumentOutOfRangeException(nameof(gregorianYear));
}
index *= 2;
lunarDate.day = s_hebrewTable[index];
// Get the type of the year. The value is from 1 to 6
int LunarYearType = s_hebrewTable[index + 1];
//
// Get the Lunar Month.
//
switch (lunarDate.day)
{
case (0): // 1/1 is on Shvat 1
lunarDate.month = 5;
lunarDate.day = 1;
break;
case (30): // 1/1 is on Kislev 30
lunarDate.month = 3;
break;
case (31): // 1/1 is on Shvat 2
lunarDate.month = 5;
lunarDate.day = 2;
break;
case (32): // 1/1 is on Shvat 3
lunarDate.month = 5;
lunarDate.day = 3;
break;
case (33): // 1/1 is on Kislev 29
lunarDate.month = 3;
lunarDate.day = 29;
break;
default: // 1/1 is on Tevet (This is the general case)
lunarDate.month = 4;
break;
}
return (LunarYearType);
}
// Returns a given date part of this DateTime. This method is used
// to compute the year, day-of-year, month, or day part.
internal virtual int GetDatePart(long ticks, int part)
{
// The Gregorian year, month, day value for ticks.
int gregorianYear, gregorianMonth, gregorianDay;
int hebrewYearType; // lunar year type
long AbsoluteDate; // absolute date - absolute date 1/1/1600
//
// Make sure we have a valid Gregorian date that will fit into our
// Hebrew conversion limits.
//
CheckTicksRange(ticks);
DateTime time = new DateTime(ticks);
//
// Save the Gregorian date values.
//
time.GetDatePart(out gregorianYear, out gregorianMonth, out gregorianDay);
__DateBuffer lunarDate = new __DateBuffer(); // lunar month and day for Jan 1
// From the table looking-up value of HebrewTable[index] (stored in lunarDate.day), we get the the
// lunar month and lunar day where the Gregorian date 1/1 falls.
lunarDate.year = gregorianYear + HebrewYearOf1AD;
hebrewYearType = GetLunarMonthDay(gregorianYear, lunarDate);
// This is the buffer used to store the result Hebrew date.
__DateBuffer result = new __DateBuffer();
//
// Store the values for the start of the new year - 1/1.
//
result.year = lunarDate.year;
result.month = lunarDate.month;
result.day = lunarDate.day;
//
// Get the absolute date from 1/1/1600.
//
AbsoluteDate = GregorianCalendar.GetAbsoluteDate(gregorianYear, gregorianMonth, gregorianDay);
//
// If the requested date was 1/1, then we're done.
//
if ((gregorianMonth == 1) && (gregorianDay == 1))
{
return (GetResult(result, part));
}
//
// Calculate the number of days between 1/1 and the requested date.
//
long NumDays; // number of days since 1/1
NumDays = AbsoluteDate - GregorianCalendar.GetAbsoluteDate(gregorianYear, 1, 1);
//
// If the requested date is within the current lunar month, then
// we're done.
//
if ((NumDays + (long)lunarDate.day) <= (long)(s_lunarMonthLen[hebrewYearType * MaxMonthPlusOne + lunarDate.month]))
{
result.day += (int)NumDays;
return (GetResult(result, part));
}
//
// Adjust for the current partial month.
//
result.month++;
result.day = 1;
//
// Adjust the Lunar Month and Year (if necessary) based on the number
// of days between 1/1 and the requested date.
//
// Assumes Jan 1 can never translate to the last Lunar month, which
// is true.
//
NumDays -= (long)(s_lunarMonthLen[hebrewYearType * MaxMonthPlusOne + lunarDate.month] - lunarDate.day);
Debug.Assert(NumDays >= 1, "NumDays >= 1");
// If NumDays is 1, then we are done. Otherwise, find the correct Hebrew month
// and day.
if (NumDays > 1)
{
//
// See if we're on the correct Lunar month.
//
while (NumDays > (long)(s_lunarMonthLen[hebrewYearType * MaxMonthPlusOne + result.month]))
{
//
// Adjust the number of days and move to the next month.
//
NumDays -= (long)(s_lunarMonthLen[hebrewYearType * MaxMonthPlusOne + result.month++]);
//
// See if we need to adjust the Year.
// Must handle both 12 and 13 month years.
//
if ((result.month > 13) || (s_lunarMonthLen[hebrewYearType * MaxMonthPlusOne + result.month] == 0))
{
//
// Adjust the Year.
//
result.year++;
hebrewYearType = s_hebrewTable[(gregorianYear + 1 - FirstGregorianTableYear) * 2 + 1];
//
// Adjust the Month.
//
result.month = 1;
}
}
//
// Found the right Lunar month.
//
result.day += (int)(NumDays - 1);
}
return (GetResult(result, part));
}
// Returns the DateTime resulting from adding the given number of
// months to the specified DateTime. The result is computed by incrementing
// (or decrementing) the year and month parts of the specified DateTime by
// value months, and, if required, adjusting the day part of the
// resulting date downwards to the last day of the resulting month in the
// resulting year. The time-of-day part of the result is the same as the
// time-of-day part of the specified DateTime.
//
// In more precise terms, considering the specified DateTime to be of the
// form y / m / d + t, where y is the
// year, m is the month, d is the day, and t is the
// time-of-day, the result is y1 / m1 / d1 + t,
// where y1 and m1 are computed by adding value months
// to y and m, and d1 is the largest value less than
// or equal to d that denotes a valid day in month m1 of year
// y1.
//
public override DateTime AddMonths(DateTime time, int months)
{
try
{
int y = GetDatePart(time.Ticks, DatePartYear);
int m = GetDatePart(time.Ticks, DatePartMonth);
int d = GetDatePart(time.Ticks, DatePartDay);
int monthsInYear;
int i;
if (months >= 0)
{
i = m + months;
while (i > (monthsInYear = GetMonthsInYear(y, CurrentEra)))
{
y++;
i -= monthsInYear;
}
}
else
{
if ((i = m + months) <= 0)
{
months = -months;
months -= m;
y--;
while (months > (monthsInYear = GetMonthsInYear(y, CurrentEra)))
{
y--;
months -= monthsInYear;
}
monthsInYear = GetMonthsInYear(y, CurrentEra);
i = monthsInYear - months;
}
}
int days = GetDaysInMonth(y, i);
if (d > days)
{
d = days;
}
return (new DateTime(ToDateTime(y, i, d, 0, 0, 0, 0).Ticks + (time.Ticks % TicksPerDay)));
}
// We expect ArgumentException and ArgumentOutOfRangeException (which is subclass of ArgumentException)
// If exception is thrown in the calls above, we are out of the supported range of this calendar.
catch (ArgumentException)
{
throw new ArgumentOutOfRangeException(
nameof(months),
string.Format(
CultureInfo.CurrentCulture,
SR.ArgumentOutOfRange_AddValue));
}
}
// Returns the DateTime resulting from adding the given number of
// years to the specified DateTime. The result is computed by incrementing
// (or decrementing) the year part of the specified DateTime by value
// years. If the month and day of the specified DateTime is 2/29, and if the
// resulting year is not a leap year, the month and day of the resulting
// DateTime becomes 2/28. Otherwise, the month, day, and time-of-day
// parts of the result are the same as those of the specified DateTime.
//
public override DateTime AddYears(DateTime time, int years)
{
int y = GetDatePart(time.Ticks, DatePartYear);
int m = GetDatePart(time.Ticks, DatePartMonth);
int d = GetDatePart(time.Ticks, DatePartDay);
y += years;
CheckHebrewYearValue(y, Calendar.CurrentEra, nameof(years));
int months = GetMonthsInYear(y, CurrentEra);
if (m > months)
{
m = months;
}
int days = GetDaysInMonth(y, m);
if (d > days)
{
d = days;
}
long ticks = ToDateTime(y, m, d, 0, 0, 0, 0).Ticks + (time.Ticks % TicksPerDay);
Calendar.CheckAddResult(ticks, MinSupportedDateTime, MaxSupportedDateTime);
return (new DateTime(ticks));
}
// Returns the day-of-month part of the specified DateTime. The returned
// value is an integer between 1 and 31.
//
public override int GetDayOfMonth(DateTime time)
{
return (GetDatePart(time.Ticks, DatePartDay));
}
// Returns the day-of-week part of the specified DateTime. The returned value
// is an integer between 0 and 6, where 0 indicates Sunday, 1 indicates
// Monday, 2 indicates Tuesday, 3 indicates Wednesday, 4 indicates
// Thursday, 5 indicates Friday, and 6 indicates Saturday.
//
public override DayOfWeek GetDayOfWeek(DateTime time)
{
// If we calculate back, the Hebrew day of week for Gregorian 0001/1/1 is Monday (1).
// Therfore, the fomula is:
return ((DayOfWeek)((int)(time.Ticks / TicksPerDay + 1) % 7));
}
internal static int GetHebrewYearType(int year, int era)
{
CheckHebrewYearValue(year, era, nameof(year));
// The HebrewTable is indexed by Gregorian year and starts from FirstGregorianYear.
// So we need to convert year (Hebrew year value) to Gregorian Year below.
return (s_hebrewTable[(year - HebrewYearOf1AD - FirstGregorianTableYear) * 2 + 1]);
}
// Returns the day-of-year part of the specified DateTime. The returned value
// is an integer between 1 and 366.
//
public override int GetDayOfYear(DateTime time)
{
// Get Hebrew year value of the specified time.
int year = GetYear(time);
DateTime beginOfYearDate;
if (year == 5343)
{
// Gregorian 1583/01/01 corresponds to Hebrew 5343/04/07 (MinSupportedDateTime)
// To figure out the Gregorian date associated with Hebrew 5343/01/01, we need to
// count the days from 5343/01/01 to 5343/04/07 and subtract that from Gregorian
// 1583/01/01.
// 1. Tishri (30 days)
// 2. Heshvan (30 days since 5343 has 355 days)
// 3. Kislev (30 days since 5343 has 355 days)
// 96 days to get from 5343/01/01 to 5343/04/07
// Gregorian 1583/01/01 - 96 days = 1582/9/27
// the beginning of Hebrew year 5343 corresponds to Gregorian September 27, 1582.
beginOfYearDate = new DateTime(1582, 9, 27);
}
else
{
// following line will fail when year is 5343 (first supported year)
beginOfYearDate = ToDateTime(year, 1, 1, 0, 0, 0, 0, CurrentEra);
}
return ((int)((time.Ticks - beginOfYearDate.Ticks) / TicksPerDay) + 1);
}
// Returns the number of days in the month given by the year and
// month arguments.
//
public override int GetDaysInMonth(int year, int month, int era)
{
CheckEraRange(era);
int hebrewYearType = GetHebrewYearType(year, era);
CheckHebrewMonthValue(year, month, era);
Debug.Assert(hebrewYearType >= 1 && hebrewYearType <= 6,
"hebrewYearType should be from 1 to 6, but now hebrewYearType = " + hebrewYearType + " for hebrew year " + year);
int monthDays = s_lunarMonthLen[hebrewYearType * MaxMonthPlusOne + month];
if (monthDays == 0)
{
throw new ArgumentOutOfRangeException(nameof(month), SR.ArgumentOutOfRange_Month);
}
return (monthDays);
}
// Returns the number of days in the year given by the year argument for the current era.
//
public override int GetDaysInYear(int year, int era)
{
CheckEraRange(era);
// normal years : 1 = 353 days 2 = 354 days 3 = 355 days.
// Leap years : 4 = 383 5 384 6 = 385 days.
// LunarYearType is from 1 to 6
int LunarYearType = GetHebrewYearType(year, era);
if (LunarYearType < 4)
{
// common year: LunarYearType = 1, 2, 3
return (352 + LunarYearType);
}
return (382 + (LunarYearType - 3));
}
// Returns the era for the specified DateTime value.
public override int GetEra(DateTime time)
{
return (HebrewEra);
}
public override int[] Eras
{
get
{
return (new int[] { HebrewEra });
}
}
// Returns the month part of the specified DateTime. The returned value is an
// integer between 1 and 12.
//
public override int GetMonth(DateTime time)
{
return (GetDatePart(time.Ticks, DatePartMonth));
}
// Returns the number of months in the specified year and era.
public override int GetMonthsInYear(int year, int era)
{
return (IsLeapYear(year, era) ? 13 : 12);
}
// Returns the year part of the specified DateTime. The returned value is an
// integer between 1 and 9999.
//
public override int GetYear(DateTime time)
{
return (GetDatePart(time.Ticks, DatePartYear));
}
// Checks whether a given day in the specified era is a leap day. This method returns true if
// the date is a leap day, or false if not.
//
public override bool IsLeapDay(int year, int month, int day, int era)
{
if (IsLeapMonth(year, month, era))
{
// Every day in a leap month is a leap day.
CheckHebrewDayValue(year, month, day, era);
return (true);
}
else if (IsLeapYear(year, Calendar.CurrentEra))
{
// There is an additional day in the 6th month in the leap year (the extra day is the 30th day in the 6th month),
// so we should return true for 6/30 if that's in a leap year.
if (month == 6 && day == 30)
{
return (true);
}
}
CheckHebrewDayValue(year, month, day, era);
return (false);
}
// Returns the leap month in a calendar year of the specified era. This method returns 0
// if this calendar does not have leap month, or this year is not a leap year.
//
public override int GetLeapMonth(int year, int era)
{
// Year/era values are checked in IsLeapYear().
if (IsLeapYear(year, era))
{
// The 7th month in a leap year is a leap month.
return (7);
}
return (0);
}
// Checks whether a given month in the specified era is a leap month. This method returns true if
// month is a leap month, or false if not.
//
public override bool IsLeapMonth(int year, int month, int era)
{
// Year/era values are checked in IsLeapYear().
bool isLeapYear = IsLeapYear(year, era);
CheckHebrewMonthValue(year, month, era);
// The 7th month in a leap year is a leap month.
if (isLeapYear)
{
if (month == 7)
{
return (true);
}
}
return (false);
}
// Checks whether a given year in the specified era is a leap year. This method returns true if
// year is a leap year, or false if not.
//
public override bool IsLeapYear(int year, int era)
{
CheckHebrewYearValue(year, era, nameof(year));
return (((7 * (long)year + 1) % 19) < 7);
}
// (month1, day1) - (month2, day2)
private static int GetDayDifference(int lunarYearType, int month1, int day1, int month2, int day2)
{
if (month1 == month2)
{
return (day1 - day2);
}
// Make sure that (month1, day1) < (month2, day2)
bool swap = (month1 > month2);
if (swap)
{
// (month1, day1) < (month2, day2). Swap the values.
// The result will be a negative number.
int tempMonth, tempDay;
tempMonth = month1; tempDay = day1;
month1 = month2; day1 = day2;
month2 = tempMonth; day2 = tempDay;
}
// Get the number of days from (month1,day1) to (month1, end of month1)
int days = s_lunarMonthLen[lunarYearType * MaxMonthPlusOne + month1] - day1;
// Move to next month.
month1++;
// Add up the days.
while (month1 < month2)
{
days += s_lunarMonthLen[lunarYearType * MaxMonthPlusOne + month1++];
}
days += day2;
return (swap ? days : -days);
}
/*=================================HebrewToGregorian==========================
**Action: Convert Hebrew date to Gregorian date.
**Returns:
**Arguments:
**Exceptions:
** The algorithm is like this:
** The hebrew year has an offset to the Gregorian year, so we can guess the Gregorian year for
** the specified Hebrew year. That is, GreogrianYear = HebrewYear - FirstHebrewYearOf1AD.
**
** From the Gregorian year and HebrewTable, we can get the Hebrew month/day value
** of the Gregorian date January 1st. Let's call this month/day value [hebrewDateForJan1]
**
** If the requested Hebrew month/day is less than [hebrewDateForJan1], we know the result
** Gregorian date falls in previous year. So we decrease the Gregorian year value, and
** retrieve the Hebrew month/day value of the Gregorian date january 1st again.
**
** Now, we get the answer of the Gregorian year.
**
** The next step is to get the number of days between the requested Hebrew month/day
** and [hebrewDateForJan1]. When we get that, we can create the DateTime by adding/subtracting
** the ticks value of the number of days.
**
============================================================================*/
private static DateTime HebrewToGregorian(int hebrewYear, int hebrewMonth, int hebrewDay, int hour, int minute, int second, int millisecond)
{
// Get the rough Gregorian year for the specified hebrewYear.
//
int gregorianYear = hebrewYear - HebrewYearOf1AD;
__DateBuffer hebrewDateOfJan1 = new __DateBuffer(); // year value is unused.
int lunarYearType = GetLunarMonthDay(gregorianYear, hebrewDateOfJan1);
if ((hebrewMonth == hebrewDateOfJan1.month) && (hebrewDay == hebrewDateOfJan1.day))
{
return (new DateTime(gregorianYear, 1, 1, hour, minute, second, millisecond));
}
int days = GetDayDifference(lunarYearType, hebrewMonth, hebrewDay, hebrewDateOfJan1.month, hebrewDateOfJan1.day);
DateTime gregorianNewYear = new DateTime(gregorianYear, 1, 1);
return (new DateTime(gregorianNewYear.Ticks + days * TicksPerDay
+ TimeToTicks(hour, minute, second, millisecond)));
}
// Returns the date and time converted to a DateTime value. Throws an exception if the n-tuple is invalid.
//
public override DateTime ToDateTime(int year, int month, int day, int hour, int minute, int second, int millisecond, int era)
{
CheckHebrewYearValue(year, era, nameof(year));
CheckHebrewMonthValue(year, month, era);
CheckHebrewDayValue(year, month, day, era);
DateTime dt = HebrewToGregorian(year, month, day, hour, minute, second, millisecond);
CheckTicksRange(dt.Ticks);
return (dt);
}
private const int DEFAULT_TWO_DIGIT_YEAR_MAX = 5790;
public override int TwoDigitYearMax
{
get
{
if (twoDigitYearMax == -1)
{
twoDigitYearMax = GetSystemTwoDigitYearSetting(ID, DEFAULT_TWO_DIGIT_YEAR_MAX);
}
return (twoDigitYearMax);
}
set
{
VerifyWritable();
if (value == 99)
{
// Do nothing here. Year 99 is allowed so that TwoDitYearMax is disabled.
}
else
{
CheckHebrewYearValue(value, HebrewEra, nameof(value));
}
twoDigitYearMax = value;
}
}
public override int ToFourDigitYear(int year)
{
if (year < 0)
{
throw new ArgumentOutOfRangeException(nameof(year),
SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (year < 100)
{
return (base.ToFourDigitYear(year));
}
if (year > MaxHebrewYear || year < MinHebrewYear)
{
throw new ArgumentOutOfRangeException(
nameof(year),
string.Format(
CultureInfo.CurrentCulture,
SR.ArgumentOutOfRange_Range,
MinHebrewYear,
MaxHebrewYear));
}
return (year);
}
internal class __DateBuffer
{
internal int year;
internal int month;
internal int day;
}
}
}
| |
// This code is part of the Fungus library (http://fungusgames.com) maintained by Chris Gregan (http://twitter.com/gofungus).
// It is released for free under the MIT open source license (https://github.com/snozbot/fungus/blob/master/LICENSE)
#define PRETTY //Comment out when you no longer need to read JSON to disable pretty Print system-wide
//Using doubles will cause errors in VectorTemplates.cs; Unity speaks floats
#define USEFLOAT //Use floats for numbers instead of doubles (enable if you're getting too many significant digits in string output)
//#define POOLING //Currently using a build setting for this one (also it's experimental)
#if UNITY_2 || UNITY_3 || UNITY_4 || UNITY_5
using UnityEngine;
using Debug = UnityEngine.Debug;
#endif
using System.Diagnostics;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using MoonSharp.Interpreter.Diagnostics.PerformanceCounters;
/*
* http://www.opensource.org/licenses/lgpl-2.1.php
* JSONObject class v.1.4.1
* for use with Unity
* Copyright Matt Schoen 2010 - 2013
*/
// Added to Fungus namespace to minimize conflicts with other assets
namespace Fungus
{
public class JSONObject {
#if POOLING
const int MAX_POOL_SIZE = 10000;
public static Queue<JSONObject> releaseQueue = new Queue<JSONObject>();
#endif
const int MAX_DEPTH = 100;
const string INFINITY = "\"INFINITY\"";
const string NEGINFINITY = "\"NEGINFINITY\"";
const string NaN = "\"NaN\"";
const string NEWLINE = "\r\n";
public static readonly char[] WHITESPACE = { ' ', '\r', '\n', '\t', '\uFEFF', '\u0009' };
public enum Type { NULL, STRING, NUMBER, OBJECT, ARRAY, BOOL, BAKED }
public bool isContainer { get { return (type == Type.ARRAY || type == Type.OBJECT); } }
public Type type = Type.NULL;
public int Count {
get {
if(list == null)
return -1;
return list.Count;
}
}
public List<JSONObject> list;
public List<string> keys;
public string str;
#if USEFLOAT
public float n;
public float f {
get {
return n;
}
}
#else
public double n;
public float f {
get {
return (float)n;
}
}
#endif
public bool useInt;
public long i;
public bool b;
public delegate void AddJSONContents(JSONObject self);
public static JSONObject nullJO { get { return Create(Type.NULL); } } //an empty, null object
public static JSONObject obj { get { return Create(Type.OBJECT); } } //an empty object
public static JSONObject arr { get { return Create(Type.ARRAY); } } //an empty array
public JSONObject(Type t) {
type = t;
switch(t) {
case Type.ARRAY:
list = new List<JSONObject>();
break;
case Type.OBJECT:
list = new List<JSONObject>();
keys = new List<string>();
break;
}
}
public JSONObject(bool b) {
type = Type.BOOL;
this.b = b;
}
#if USEFLOAT
public JSONObject(float f) {
type = Type.NUMBER;
n = f;
}
#else
public JSONObject(double d) {
type = Type.NUMBER;
n = d;
}
#endif
public JSONObject(int i) {
type = Type.NUMBER;
this.i = i;
useInt = true;
n = i;
}
public JSONObject(long l) {
type = Type.NUMBER;
i = l;
useInt = true;
n = l;
}
public JSONObject(Dictionary<string, string> dic) {
type = Type.OBJECT;
keys = new List<string>();
list = new List<JSONObject>();
//Not sure if it's worth removing the foreach here
foreach(KeyValuePair<string, string> kvp in dic) {
keys.Add(kvp.Key);
list.Add(CreateStringObject(kvp.Value));
}
}
public JSONObject(Dictionary<string, JSONObject> dic) {
type = Type.OBJECT;
keys = new List<string>();
list = new List<JSONObject>();
//Not sure if it's worth removing the foreach here
foreach(KeyValuePair<string, JSONObject> kvp in dic) {
keys.Add(kvp.Key);
list.Add(kvp.Value);
}
}
public JSONObject(AddJSONContents content) {
content.Invoke(this);
}
public JSONObject(JSONObject[] objs) {
type = Type.ARRAY;
list = new List<JSONObject>(objs);
}
//Convenience function for creating a JSONObject containing a string. This is not part of the constructor so that malformed JSON data doesn't just turn into a string object
public static JSONObject StringObject(string val) { return CreateStringObject(val); }
public void Absorb(JSONObject obj) {
list.AddRange(obj.list);
keys.AddRange(obj.keys);
str = obj.str;
n = obj.n;
useInt = obj.useInt;
i = obj.i;
b = obj.b;
type = obj.type;
}
public static JSONObject Create() {
#if POOLING
JSONObject result = null;
while(result == null && releaseQueue.Count > 0) {
result = releaseQueue.Dequeue();
#if DEV
//The following cases should NEVER HAPPEN (but they do...)
if(result == null)
Debug.WriteLine("wtf " + releaseQueue.Count);
else if(result.list != null)
Debug.WriteLine("wtflist " + result.list.Count);
#endif
}
if(result != null)
return result;
#endif
return new JSONObject();
}
public static JSONObject Create(Type t) {
JSONObject obj = Create();
obj.type = t;
switch(t) {
case Type.ARRAY:
obj.list = new List<JSONObject>();
break;
case Type.OBJECT:
obj.list = new List<JSONObject>();
obj.keys = new List<string>();
break;
}
return obj;
}
public static JSONObject Create(bool val) {
JSONObject obj = Create();
obj.type = Type.BOOL;
obj.b = val;
return obj;
}
public static JSONObject Create(float val) {
JSONObject obj = Create();
obj.type = Type.NUMBER;
obj.n = val;
return obj;
}
public static JSONObject Create(int val) {
JSONObject obj = Create();
obj.type = Type.NUMBER;
obj.n = val;
obj.useInt = true;
obj.i = val;
return obj;
}
public static JSONObject Create(long val) {
JSONObject obj = Create();
obj.type = Type.NUMBER;
obj.n = val;
obj.useInt = true;
obj.i = val;
return obj;
}
public static JSONObject CreateStringObject(string val) {
JSONObject obj = Create();
obj.type = Type.STRING;
obj.str = val;
return obj;
}
public static JSONObject CreateBakedObject(string val) {
JSONObject bakedObject = Create();
bakedObject.type = Type.BAKED;
bakedObject.str = val;
return bakedObject;
}
/// <summary>
/// Create a JSONObject by parsing string data
/// </summary>
/// <param name="val">The string to be parsed</param>
/// <param name="maxDepth">The maximum depth for the parser to search. Set this to to 1 for the first level,
/// 2 for the first 2 levels, etc. It defaults to -2 because -1 is the depth value that is parsed (see below)</param>
/// <param name="storeExcessLevels">Whether to store levels beyond maxDepth in baked JSONObjects</param>
/// <param name="strict">Whether to be strict in the parsing. For example, non-strict parsing will successfully
/// parse "a string" into a string-type </param>
/// <returns></returns>
public static JSONObject Create(string val, int maxDepth = -2, bool storeExcessLevels = false, bool strict = false) {
JSONObject obj = Create();
obj.Parse(val, maxDepth, storeExcessLevels, strict);
return obj;
}
public static JSONObject Create(AddJSONContents content) {
JSONObject obj = Create();
content.Invoke(obj);
return obj;
}
public static JSONObject Create(Dictionary<string, string> dic) {
JSONObject obj = Create();
obj.type = Type.OBJECT;
obj.keys = new List<string>();
obj.list = new List<JSONObject>();
//Not sure if it's worth removing the foreach here
foreach(KeyValuePair<string, string> kvp in dic) {
obj.keys.Add(kvp.Key);
obj.list.Add(CreateStringObject(kvp.Value));
}
return obj;
}
public JSONObject() { }
#region PARSE
public JSONObject(string str, int maxDepth = -2, bool storeExcessLevels = false, bool strict = false) { //create a new JSONObject from a string (this will also create any children, and parse the whole string)
Parse(str, maxDepth, storeExcessLevels, strict);
}
void Parse(string str, int maxDepth = -2, bool storeExcessLevels = false, bool strict = false) {
if(!string.IsNullOrEmpty(str)) {
str = str.Trim(WHITESPACE);
if(strict) {
if(str[0] != '[' && str[0] != '{') {
type = Type.NULL;
#if UNITY_2 || UNITY_3 || UNITY_4 || UNITY_5
Debug.LogWarning
#else
Debug.WriteLine
#endif
("Improper (strict) JSON formatting. First character must be [ or {");
return;
}
}
if(str.Length > 0) {
#if UNITY_WP8 || UNITY_WSA
if (str == "true") {
type = Type.BOOL;
b = true;
} else if (str == "false") {
type = Type.BOOL;
b = false;
} else if (str == "null") {
type = Type.NULL;
#else
if(string.Compare(str, "true", true) == 0) {
type = Type.BOOL;
b = true;
} else if(string.Compare(str, "false", true) == 0) {
type = Type.BOOL;
b = false;
} else if(string.Compare(str, "null", true) == 0) {
type = Type.NULL;
#endif
#if USEFLOAT
} else if(str == INFINITY) {
type = Type.NUMBER;
n = float.PositiveInfinity;
} else if(str == NEGINFINITY) {
type = Type.NUMBER;
n = float.NegativeInfinity;
} else if(str == NaN) {
type = Type.NUMBER;
n = float.NaN;
#else
} else if(str == INFINITY) {
type = Type.NUMBER;
n = double.PositiveInfinity;
} else if(str == NEGINFINITY) {
type = Type.NUMBER;
n = double.NegativeInfinity;
} else if(str == NaN) {
type = Type.NUMBER;
n = double.NaN;
#endif
} else if(str[0] == '"') {
type = Type.STRING;
this.str = str.Substring(1, str.Length - 2);
} else {
int tokenTmp = 1;
/*
* Checking for the following formatting (www.json.org)
* object - {"field1":value,"field2":value}
* array - [value,value,value]
* value - string - "string"
* - number - 0.0
* - bool - true -or- false
* - null - null
*/
int offset = 0;
switch(str[offset]) {
case '{':
type = Type.OBJECT;
keys = new List<string>();
list = new List<JSONObject>();
break;
case '[':
type = Type.ARRAY;
list = new List<JSONObject>();
break;
default:
try {
#if USEFLOAT
n = System.Convert.ToSingle(str);
#else
n = System.Convert.ToDouble(str);
#endif
if(!str.Contains(".")) {
i = System.Convert.ToInt64(str);
useInt = true;
}
type = Type.NUMBER;
} catch(System.FormatException) {
type = Type.NULL;
#if UNITY_2 || UNITY_3 || UNITY_4 || UNITY_5
Debug.LogWarning
#else
Debug.WriteLine
#endif
("improper JSON formatting:" + str);
}
return;
}
string propName = "";
bool openQuote = false;
bool inProp = false;
int depth = 0;
while(++offset < str.Length) {
if(System.Array.IndexOf(WHITESPACE, str[offset]) > -1)
continue;
if(str[offset] == '\\') {
offset += 1;
continue;
}
if(str[offset] == '"') {
if(openQuote) {
if(!inProp && depth == 0 && type == Type.OBJECT)
propName = str.Substring(tokenTmp + 1, offset - tokenTmp - 1);
openQuote = false;
} else {
if(depth == 0 && type == Type.OBJECT)
tokenTmp = offset;
openQuote = true;
}
}
if(openQuote)
continue;
if(type == Type.OBJECT && depth == 0) {
if(str[offset] == ':') {
tokenTmp = offset + 1;
inProp = true;
}
}
if(str[offset] == '[' || str[offset] == '{') {
depth++;
} else if(str[offset] == ']' || str[offset] == '}') {
depth--;
}
//if (encounter a ',' at top level) || a closing ]/}
if((str[offset] == ',' && depth == 0) || depth < 0) {
inProp = false;
string inner = str.Substring(tokenTmp, offset - tokenTmp).Trim(WHITESPACE);
if(inner.Length > 0) {
if(type == Type.OBJECT)
keys.Add(propName);
if(maxDepth != -1) //maxDepth of -1 is the end of the line
list.Add(Create(inner, (maxDepth < -1) ? -2 : maxDepth - 1));
else if(storeExcessLevels)
list.Add(CreateBakedObject(inner));
}
tokenTmp = offset + 1;
}
}
}
} else type = Type.NULL;
} else type = Type.NULL; //If the string is missing, this is a null
//Profiler.EndSample();
}
#endregion
public bool IsNumber { get { return type == Type.NUMBER; } }
public bool IsNull { get { return type == Type.NULL; } }
public bool IsString { get { return type == Type.STRING; } }
public bool IsBool { get { return type == Type.BOOL; } }
public bool IsArray { get { return type == Type.ARRAY; } }
public bool IsObject { get { return type == Type.OBJECT || type == Type.BAKED; } }
public void Add(bool val) {
Add(Create(val));
}
public void Add(float val) {
Add(Create(val));
}
public void Add(int val) {
Add(Create(val));
}
public void Add(string str) {
Add(CreateStringObject(str));
}
public void Add(AddJSONContents content) {
Add(Create(content));
}
public void Add(JSONObject obj) {
if(obj) { //Don't do anything if the object is null
if(type != Type.ARRAY) {
type = Type.ARRAY; //Congratulations, son, you're an ARRAY now
if(list == null)
list = new List<JSONObject>();
}
list.Add(obj);
}
}
public void AddField(string name, bool val) {
AddField(name, Create(val));
}
public void AddField(string name, float val) {
AddField(name, Create(val));
}
public void AddField(string name, int val) {
AddField(name, Create(val));
}
public void AddField(string name, long val) {
AddField(name, Create(val));
}
public void AddField(string name, AddJSONContents content) {
AddField(name, Create(content));
}
public void AddField(string name, string val) {
AddField(name, CreateStringObject(val));
}
public void AddField(string name, JSONObject obj) {
if(obj) { //Don't do anything if the object is null
if(type != Type.OBJECT) {
if(keys == null)
keys = new List<string>();
if(type == Type.ARRAY) {
for(int i = 0; i < list.Count; i++)
keys.Add(i + "");
} else
if(list == null)
list = new List<JSONObject>();
type = Type.OBJECT; //Congratulations, son, you're an OBJECT now
}
keys.Add(name);
list.Add(obj);
}
}
public void SetField(string name, string val) { SetField(name, CreateStringObject(val)); }
public void SetField(string name, bool val) { SetField(name, Create(val)); }
public void SetField(string name, float val) { SetField(name, Create(val)); }
public void SetField(string name, int val) { SetField(name, Create(val)); }
public void SetField(string name, JSONObject obj) {
if(HasField(name)) {
list.Remove(this[name]);
keys.Remove(name);
}
AddField(name, obj);
}
public void RemoveField(string name) {
if(keys.IndexOf(name) > -1) {
list.RemoveAt(keys.IndexOf(name));
keys.Remove(name);
}
}
public delegate void FieldNotFound(string name);
public delegate void GetFieldResponse(JSONObject obj);
public bool GetField(out bool field, string name, bool fallback) {
field = fallback;
return GetField(ref field, name);
}
public bool GetField(ref bool field, string name, FieldNotFound fail = null) {
if(type == Type.OBJECT) {
int index = keys.IndexOf(name);
if(index >= 0) {
field = list[index].b;
return true;
}
}
if(fail != null) fail.Invoke(name);
return false;
}
#if USEFLOAT
public bool GetField(out float field, string name, float fallback) {
#else
public bool GetField(out double field, string name, double fallback) {
#endif
field = fallback;
return GetField(ref field, name);
}
#if USEFLOAT
public bool GetField(ref float field, string name, FieldNotFound fail = null) {
#else
public bool GetField(ref double field, string name, FieldNotFound fail = null) {
#endif
if(type == Type.OBJECT) {
int index = keys.IndexOf(name);
if(index >= 0) {
field = list[index].n;
return true;
}
}
if(fail != null) fail.Invoke(name);
return false;
}
public bool GetField(out int field, string name, int fallback) {
field = fallback;
return GetField(ref field, name);
}
public bool GetField(ref int field, string name, FieldNotFound fail = null) {
if(IsObject) {
int index = keys.IndexOf(name);
if(index >= 0) {
field = (int)list[index].n;
return true;
}
}
if(fail != null) fail.Invoke(name);
return false;
}
public bool GetField(out long field, string name, long fallback) {
field = fallback;
return GetField(ref field, name);
}
public bool GetField(ref long field, string name, FieldNotFound fail = null) {
if(IsObject) {
int index = keys.IndexOf(name);
if(index >= 0) {
field = (long)list[index].n;
return true;
}
}
if(fail != null) fail.Invoke(name);
return false;
}
public bool GetField(out uint field, string name, uint fallback) {
field = fallback;
return GetField(ref field, name);
}
public bool GetField(ref uint field, string name, FieldNotFound fail = null) {
if(IsObject) {
int index = keys.IndexOf(name);
if(index >= 0) {
field = (uint)list[index].n;
return true;
}
}
if(fail != null) fail.Invoke(name);
return false;
}
public bool GetField(out string field, string name, string fallback) {
field = fallback;
return GetField(ref field, name);
}
public bool GetField(ref string field, string name, FieldNotFound fail = null) {
if(IsObject) {
int index = keys.IndexOf(name);
if(index >= 0) {
field = list[index].str;
return true;
}
}
if(fail != null) fail.Invoke(name);
return false;
}
public void GetField(string name, GetFieldResponse response, FieldNotFound fail = null) {
if(response != null && IsObject) {
int index = keys.IndexOf(name);
if(index >= 0) {
response.Invoke(list[index]);
return;
}
}
if(fail != null) fail.Invoke(name);
}
public JSONObject GetField(string name) {
if(IsObject)
for(int i = 0; i < keys.Count; i++)
if(keys[i] == name)
return list[i];
return null;
}
public bool HasFields(string[] names) {
if(!IsObject)
return false;
for(int i = 0; i < names.Length; i++)
if(!keys.Contains(names[i]))
return false;
return true;
}
public bool HasField(string name) {
if(!IsObject)
return false;
for(int i = 0; i < keys.Count; i++)
if(keys[i] == name)
return true;
return false;
}
public void Clear() {
type = Type.NULL;
if(list != null)
list.Clear();
if(keys != null)
keys.Clear();
str = "";
n = 0;
b = false;
}
/// <summary>
/// Copy a JSONObject. This could probably work better
/// </summary>
/// <returns></returns>
public JSONObject Copy() {
return Create(Print());
}
/*
* The Merge function is experimental. Use at your own risk.
*/
public void Merge(JSONObject obj) {
MergeRecur(this, obj);
}
/// <summary>
/// Merge object right into left recursively
/// </summary>
/// <param name="left">The left (base) object</param>
/// <param name="right">The right (new) object</param>
static void MergeRecur(JSONObject left, JSONObject right) {
if(left.type == Type.NULL)
left.Absorb(right);
else if(left.type == Type.OBJECT && right.type == Type.OBJECT) {
for(int i = 0; i < right.list.Count; i++) {
string key = right.keys[i];
if(right[i].isContainer) {
if(left.HasField(key))
MergeRecur(left[key], right[i]);
else
left.AddField(key, right[i]);
} else {
if(left.HasField(key))
left.SetField(key, right[i]);
else
left.AddField(key, right[i]);
}
}
} else if(left.type == Type.ARRAY && right.type == Type.ARRAY) {
if(right.Count > left.Count) {
#if UNITY_2 || UNITY_3 || UNITY_4 || UNITY_5
Debug.LogError
#else
Debug.WriteLine
#endif
("Cannot merge arrays when right object has more elements");
return;
}
for(int i = 0; i < right.list.Count; i++) {
if(left[i].type == right[i].type) { //Only overwrite with the same type
if(left[i].isContainer)
MergeRecur(left[i], right[i]);
else {
left[i] = right[i];
}
}
}
}
}
public void Bake() {
if(type != Type.BAKED) {
str = Print();
type = Type.BAKED;
}
}
public IEnumerable BakeAsync() {
if(type != Type.BAKED) {
foreach(string s in PrintAsync()) {
if(s == null)
yield return s;
else {
str = s;
}
}
type = Type.BAKED;
}
}
#pragma warning disable 219
public string Print(bool pretty = false) {
StringBuilder builder = new StringBuilder();
Stringify(0, builder, pretty);
return builder.ToString();
}
public IEnumerable<string> PrintAsync(bool pretty = false) {
StringBuilder builder = new StringBuilder();
#if !NETFX_CORE
printWatch.Reset();
printWatch.Start();
#endif
foreach(IEnumerable e in StringifyAsync(0, builder, pretty)) {
yield return null;
}
yield return builder.ToString();
}
#pragma warning restore 219
#region STRINGIFY
const float maxFrameTime = 0.008f;
static readonly Stopwatch printWatch = new Stopwatch();
IEnumerable StringifyAsync(int depth, StringBuilder builder, bool pretty = false) { //Convert the JSONObject into a string
//Profiler.BeginSample("JSONprint");
if(depth++ > MAX_DEPTH) {
#if UNITY_2 || UNITY_3 || UNITY_4 || UNITY_5
Debug.Log
#else
Debug.WriteLine
#endif
("reached max depth!");
yield break;
}
#if !NETFX_CORE
if(printWatch.Elapsed.TotalSeconds > maxFrameTime) {
printWatch.Reset();
yield return null;
printWatch.Start();
}
#endif
switch(type) {
case Type.BAKED:
builder.Append(str);
break;
case Type.STRING:
builder.AppendFormat("\"{0}\"", str);
break;
case Type.NUMBER:
if(useInt) {
builder.Append(i.ToString());
} else {
#if USEFLOAT
if(float.IsInfinity(n))
builder.Append(INFINITY);
else if(float.IsNegativeInfinity(n))
builder.Append(NEGINFINITY);
else if(float.IsNaN(n))
builder.Append(NaN);
#else
if(double.IsInfinity(n))
builder.Append(INFINITY);
else if(double.IsNegativeInfinity(n))
builder.Append(NEGINFINITY);
else if(double.IsNaN(n))
builder.Append(NaN);
#endif
else
builder.Append(n.ToString());
}
break;
case Type.OBJECT:
builder.Append("{");
if(list.Count > 0) {
#if (PRETTY) //for a bit more readability, comment the define above to disable system-wide
if(pretty)
builder.Append(NEWLINE);
#endif
for(int i = 0; i < list.Count; i++) {
string key = keys[i];
JSONObject obj = list[i];
if(obj) {
#if (PRETTY)
if(pretty)
for(int j = 0; j < depth; j++)
builder.Append("\t"); //for a bit more readability
#endif
builder.AppendFormat("\"{0}\":", key);
foreach(IEnumerable e in obj.StringifyAsync(depth, builder, pretty))
yield return e;
builder.Append(",");
#if (PRETTY)
if(pretty)
builder.Append(NEWLINE);
#endif
}
}
#if (PRETTY)
if(pretty)
builder.Length -= 2;
else
#endif
builder.Length--;
}
#if (PRETTY)
if(pretty && list.Count > 0) {
builder.Append(NEWLINE);
for(int j = 0; j < depth - 1; j++)
builder.Append("\t"); //for a bit more readability
}
#endif
builder.Append("}");
break;
case Type.ARRAY:
builder.Append("[");
if(list.Count > 0) {
#if (PRETTY)
if(pretty)
builder.Append(NEWLINE); //for a bit more readability
#endif
for(int i = 0; i < list.Count; i++) {
if(list[i]) {
#if (PRETTY)
if(pretty)
for(int j = 0; j < depth; j++)
builder.Append("\t"); //for a bit more readability
#endif
foreach(IEnumerable e in list[i].StringifyAsync(depth, builder, pretty))
yield return e;
builder.Append(",");
#if (PRETTY)
if(pretty)
builder.Append(NEWLINE); //for a bit more readability
#endif
}
}
#if (PRETTY)
if(pretty)
builder.Length -= 2;
else
#endif
builder.Length--;
}
#if (PRETTY)
if(pretty && list.Count > 0) {
builder.Append(NEWLINE);
for(int j = 0; j < depth - 1; j++)
builder.Append("\t"); //for a bit more readability
}
#endif
builder.Append("]");
break;
case Type.BOOL:
if(b)
builder.Append("true");
else
builder.Append("false");
break;
case Type.NULL:
builder.Append("null");
break;
}
//Profiler.EndSample();
}
//TODO: Refactor Stringify functions to share core logic
/*
* I know, I know, this is really bad form. It turns out that there is a
* significant amount of garbage created when calling as a coroutine, so this
* method is duplicated. Hopefully there won't be too many future changes, but
* I would still like a more elegant way to optionaly yield
*/
void Stringify(int depth, StringBuilder builder, bool pretty = false) { //Convert the JSONObject into a string
//Profiler.BeginSample("JSONprint");
if(depth++ > MAX_DEPTH) {
#if UNITY_2 || UNITY_3 || UNITY_4 || UNITY_5
Debug.Log
#else
Debug.WriteLine
#endif
("reached max depth!");
return;
}
switch(type) {
case Type.BAKED:
builder.Append(str);
break;
case Type.STRING:
builder.AppendFormat("\"{0}\"", str);
break;
case Type.NUMBER:
if(useInt) {
builder.Append(i.ToString());
} else {
#if USEFLOAT
if(float.IsInfinity(n))
builder.Append(INFINITY);
else if(float.IsNegativeInfinity(n))
builder.Append(NEGINFINITY);
else if(float.IsNaN(n))
builder.Append(NaN);
#else
if(double.IsInfinity(n))
builder.Append(INFINITY);
else if(double.IsNegativeInfinity(n))
builder.Append(NEGINFINITY);
else if(double.IsNaN(n))
builder.Append(NaN);
#endif
else
builder.Append(n.ToString());
}
break;
case Type.OBJECT:
builder.Append("{");
if(list.Count > 0) {
#if (PRETTY) //for a bit more readability, comment the define above to disable system-wide
if(pretty)
builder.Append("\n");
#endif
for(int i = 0; i < list.Count; i++) {
string key = keys[i];
JSONObject obj = list[i];
if(obj) {
#if (PRETTY)
if(pretty)
for(int j = 0; j < depth; j++)
builder.Append("\t"); //for a bit more readability
#endif
builder.AppendFormat("\"{0}\":", key);
obj.Stringify(depth, builder, pretty);
builder.Append(",");
#if (PRETTY)
if(pretty)
builder.Append("\n");
#endif
}
}
#if (PRETTY)
if(pretty)
builder.Length -= 2;
else
#endif
builder.Length--;
}
#if (PRETTY)
if(pretty && list.Count > 0) {
builder.Append("\n");
for(int j = 0; j < depth - 1; j++)
builder.Append("\t"); //for a bit more readability
}
#endif
builder.Append("}");
break;
case Type.ARRAY:
builder.Append("[");
if(list.Count > 0) {
#if (PRETTY)
if(pretty)
builder.Append("\n"); //for a bit more readability
#endif
for(int i = 0; i < list.Count; i++) {
if(list[i]) {
#if (PRETTY)
if(pretty)
for(int j = 0; j < depth; j++)
builder.Append("\t"); //for a bit more readability
#endif
list[i].Stringify(depth, builder, pretty);
builder.Append(",");
#if (PRETTY)
if(pretty)
builder.Append("\n"); //for a bit more readability
#endif
}
}
#if (PRETTY)
if(pretty)
builder.Length -= 2;
else
#endif
builder.Length--;
}
#if (PRETTY)
if(pretty && list.Count > 0) {
builder.Append("\n");
for(int j = 0; j < depth - 1; j++)
builder.Append("\t"); //for a bit more readability
}
#endif
builder.Append("]");
break;
case Type.BOOL:
if(b)
builder.Append("true");
else
builder.Append("false");
break;
case Type.NULL:
builder.Append("null");
break;
}
//Profiler.EndSample();
}
#endregion
#if UNITY_2 || UNITY_3 || UNITY_4 || UNITY_5
public static implicit operator WWWForm(JSONObject obj) {
WWWForm form = new WWWForm();
for(int i = 0; i < obj.list.Count; i++) {
string key = i + "";
if(obj.type == Type.OBJECT)
key = obj.keys[i];
string val = obj.list[i].ToString();
if(obj.list[i].type == Type.STRING)
val = val.Replace("\"", "");
form.AddField(key, val);
}
return form;
}
#endif
public JSONObject this[int index] {
get {
if(list.Count > index) return list[index];
return null;
}
set {
if(list.Count > index)
list[index] = value;
}
}
public JSONObject this[string index] {
get {
return GetField(index);
}
set {
SetField(index, value);
}
}
public override string ToString() {
return Print();
}
public string ToString(bool pretty) {
return Print(pretty);
}
public Dictionary<string, string> ToDictionary() {
if(type == Type.OBJECT) {
Dictionary<string, string> result = new Dictionary<string, string>();
for(int i = 0; i < list.Count; i++) {
JSONObject val = list[i];
switch(val.type) {
case Type.STRING: result.Add(keys[i], val.str); break;
case Type.NUMBER: result.Add(keys[i], val.n + ""); break;
case Type.BOOL: result.Add(keys[i], val.b + ""); break;
default:
#if UNITY_2 || UNITY_3 || UNITY_4 || UNITY_5
Debug.LogWarning
#else
Debug.WriteLine
#endif
("Omitting object: " + keys[i] + " in dictionary conversion");
break;
}
}
return result;
}
#if UNITY_2 || UNITY_3 || UNITY_4 || UNITY_5
Debug.Log
#else
Debug.WriteLine
#endif
("Tried to turn non-Object JSONObject into a dictionary");
return null;
}
public static implicit operator bool(JSONObject o) {
return o != null;
}
#if POOLING
static bool pool = true;
public static void ClearPool() {
pool = false;
releaseQueue.Clear();
pool = true;
}
~JSONObject() {
if(pool && releaseQueue.Count < MAX_POOL_SIZE) {
type = Type.NULL;
list = null;
keys = null;
str = "";
n = 0;
b = false;
releaseQueue.Enqueue(this);
}
}
#endif
}
}
| |
using System;
using System.Messaging;
using System.Threading;
using Castle.MicroKernel.Registration;
using Castle.Windsor;
using Castle.Windsor.Configuration.Interpreters;
using Rhino.ServiceBus.Impl;
using Rhino.ServiceBus.Internal;
using Rhino.ServiceBus.LoadBalancer;
using Rhino.ServiceBus.Messages;
using Rhino.ServiceBus.Msmq;
using Rhino.ServiceBus.Transport;
using Xunit;
using System.Linq;
namespace Rhino.ServiceBus.Tests.LoadBalancer
{
public class With_load_balancing : LoadBalancingTestBase
{
private readonly IWindsorContainer container;
private readonly Endpoint loadBalancerEndpoint = new Endpoint { Uri = new Uri(loadBalancerQueue) };
public With_load_balancing()
{
var interpreter = new XmlInterpreter(@"LoadBalancer\BusWithLoadBalancer.config");
container = new WindsorContainer(interpreter);
container.Kernel.AddFacility("rhino.esb", new RhinoServiceBusFacility());
container.Register(Component.For<MyHandler>());
container.Register(
Component.For<MsmqLoadBalancer>()
.DependsOn(new
{
threadCount = 1,
endpoint = new Uri(loadBalancerQueue),
transactional = TransactionalOptions.FigureItOut
})
);
}
[Fact]
public void Can_ReRoute_messages()
{
using (var bus = container.Resolve<IStartableServiceBus>())
{
bus.Start();
var endpointRouter = container.Resolve<IEndpointRouter>();
var original = new Uri("msmq://foo/original");
var routedEndpoint = endpointRouter.GetRoutedEndpoint(original);
Assert.Equal(original, routedEndpoint.Uri);
var wait = new ManualResetEvent(false);
bus.ReroutedEndpoint += x => wait.Set();
var newEndPoint = new Uri("msmq://new/endpoint");
bus.Send(bus.Endpoint,
new Reroute
{
OriginalEndPoint = original,
NewEndPoint = newEndPoint
});
wait.WaitOne(TimeSpan.FromSeconds(30), false);
routedEndpoint = endpointRouter.GetRoutedEndpoint(original);
Assert.Equal(newEndPoint, routedEndpoint.Uri);
}
}
[Fact]
public void Can_send_message_through_load_balancer()
{
MyHandler.ResetEvent = new ManualResetEvent(false);
using (var loadBalancer = container.Resolve<MsmqLoadBalancer>())
using (var bus = container.Resolve<IStartableServiceBus>())
{
loadBalancer.Start();
bus.Start();
bus.Send(loadBalancer.Endpoint, "abcdefg");
MyHandler.ResetEvent.WaitOne(TimeSpan.FromSeconds(30), false);
Assert.True(
MyHandler.Message.ResponseQueue.Path.Contains(@"private$\test_queue")
);
Assert.Equal("abcdefg", MyHandler.Value);
}
}
[Fact]
public void When_worker_tell_load_balancer_that_it_is_ready_the_worker_will_be_added_to_known_queues()
{
using (var loadBalancer = container.Resolve<MsmqLoadBalancer>())
using (var bus = container.Resolve<IStartableServiceBus>())
{
loadBalancer.Start();
bus.Start();
using (var workers = new MessageQueue(loadBalancerQueuePath + ";Workers", QueueAccessMode.SendAndReceive))
{
workers.Formatter = new XmlMessageFormatter(new[] { typeof(string) });
var knownWorker = workers.Peek(TimeSpan.FromSeconds(30));
Assert.Equal(bus.Endpoint.Uri.ToString(), knownWorker.Body.ToString());
}
Assert.True(loadBalancer.KnownWorkers.GetValues().Contains(TestQueueUri.Uri));
}
}
[Fact]
public void When_load_balancer_starts_will_read_known_workers_from_workers_sub_queue()
{
using (var workers = MsmqUtil.GetQueuePath(loadBalancerEndpoint)
.Open(QueueAccessMode.SendAndReceive,
new XmlMessageFormatter(new[]
{
typeof(string)
})))
{
workers.Send(new Message(TestQueueUri.Uri.ToString()));
var peek = workers.Peek(TimeSpan.FromSeconds(30));
string ignored;
new SubQueueStrategy().TryMoveMessage(workers, peek, SubQueue.Workers, out ignored);
}
using (var loadBalancer = container.Resolve<MsmqLoadBalancer>())
{
loadBalancer.Start();
Assert.True(loadBalancer.KnownWorkers.GetValues().Contains(TestQueueUri.Uri));
}
}
[Fact]
public void When_new_end_point_send_message_to_load_balancer_the_end_point_will_be_persisted()
{
using (var loadBalancer = container.Resolve<MsmqLoadBalancer>())
using (var bus = container.Resolve<IStartableServiceBus>())
{
loadBalancer.Start();
bus.Start();
bus.Send(bus.Endpoint, "test value");
using (var endpoints = new MessageQueue(loadBalancerQueuePath + ";EndPoints", QueueAccessMode.SendAndReceive))
{
endpoints.Formatter = new XmlMessageFormatter(new[] { typeof(string) });
var knownEndpoint = endpoints.Peek(TimeSpan.FromSeconds(30));
var busUri = bus.Endpoint.Uri.ToString().Replace("localhost", Environment.MachineName).ToLowerInvariant();
Assert.Equal(
busUri,
knownEndpoint.Body.ToString());
Assert.True(loadBalancer.KnownEndpoints.GetValues().Contains(new Uri(busUri)));
}
}
}
[Fact]
public void When_load_balancer_starts_will_read_known_endpoints_from_endpoints_sub_queue()
{
using (var endPointsQueue = MsmqUtil.GetQueuePath(loadBalancerEndpoint)
.Open(QueueAccessMode.SendAndReceive,
new XmlMessageFormatter(new[]
{
typeof(string)
})))
{
endPointsQueue.Send(new Message(TestQueueUri.Uri.ToString()));
var peek = endPointsQueue.Peek(TimeSpan.FromSeconds(30));
string ignored;
new SubQueueStrategy().TryMoveMessage(endPointsQueue, peek, SubQueue.Endpoints, out ignored);
}
using (var loadBalancer = container.Resolve<MsmqLoadBalancer>())
{
loadBalancer.Start();
Assert.True(loadBalancer.KnownEndpoints.GetValues().Contains(TestQueueUri.Uri));
}
}
[Fact]
public void Will_send_administrative_messages_to_all_nodes()
{
using (var loadBalancer = container.Resolve<MsmqLoadBalancer>())
using (var bus = container.Resolve<IStartableServiceBus>())
{
var wait = new ManualResetEvent(false);
loadBalancer.MessageBatchSentToAllWorkers += message => wait.Set();
loadBalancer.Start();
bus.Start();
bus.Send(loadBalancer.Endpoint, new ReadyToWork
{
Endpoint = TransactionalTestQueueUri.Uri
});
bus.Send(loadBalancer.Endpoint, new ReadyToWork
{
Endpoint = TestQueueUri2.Uri
});
bus.Send(loadBalancer.Endpoint, new AddSubscription
{
Endpoint = bus.Endpoint,
Type = "foobar"
});
wait.WaitOne(TimeSpan.FromSeconds(30), false);
}
using (var q = MsmqUtil.GetQueuePath(TransactionalTestQueueUri).Open())
{
var message = q.Receive();
Assert.Equal("Rhino.ServiceBus.Messages.AddSubscription", message.Label);
}
using (var q = MsmqUtil.GetQueuePath(TestQueueUri2).Open())
{
var message = q.Receive();
Assert.Equal("Rhino.ServiceBus.Messages.AddSubscription", message.Label);
}
}
[Fact]
public void Will_remove_all_ReadyToWorkMessages_from_the_queue()
{
using (var loadBalancer = container.Resolve<MsmqLoadBalancer>())
{
using (var bus = container.Resolve<IStartableServiceBus>())
{
bus.Start();
bus.Send(loadBalancer.Endpoint,
new ReadyToWork
{
Endpoint = TransactionalTestQueueUri.Uri
});
bus.Send(loadBalancer.Endpoint,
new ReadyToWork
{
Endpoint = TestQueueUri2.Uri
});
}
var wait = new ManualResetEvent(false);
loadBalancer.TransportMessageArrived += () => wait.Set();
loadBalancer.Start();
bool messagesWereProcess = wait.WaitOne(TimeSpan.FromMilliseconds(250), false);
Assert.False(messagesWereProcess);
}
}
[Fact]
public void Can_send_message_through_load_balancer_when_load_balcner_is_start_after_bus()
{
MyHandler.ResetEvent = new ManualResetEvent(false);
using (var loadBalancer = container.Resolve<MsmqLoadBalancer>())
using (var bus = container.Resolve<IStartableServiceBus>())
{
bus.Start();
bus.Send(loadBalancer.Endpoint, "abcdefg");
loadBalancer.Start();
MyHandler.ResetEvent.WaitOne(TimeSpan.FromSeconds(30), false);
Assert.True(
MyHandler.Message.ResponseQueue.Path.Contains(@"private$\test_queue")
);
Assert.Equal("abcdefg", MyHandler.Value);
}
}
[Fact]
public void Will_remove_all_ReadyToWorkMessages_from_the_queue_and_leave_other_Messages()
{
using (var loadBalancer = container.Resolve<MsmqLoadBalancer>())
{
using (var bus = container.Resolve<IStartableServiceBus>())
{
bus.Start();
bus.Send(loadBalancer.Endpoint,
new ReadyToWork
{
Endpoint = TransactionalTestQueueUri.Uri
});
bus.Send(loadBalancer.Endpoint,
new ReadyToWork
{
Endpoint = TestQueueUri2.Uri
});
}
}
}
public class MyHandler : ConsumerOf<string>
{
public static ManualResetEvent ResetEvent;
public static string Value;
public static Message Message;
public void Consume(string message)
{
Message = MsmqTransport.MsmqCurrentMessageInformation.MsmqMessage;
Value = message;
ResetEvent.Set();
}
}
}
}
| |
/*
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the MIT License. See License.txt in the project root for license information.
*/
using System.Security.Principal;
using System.ServiceModel;
using System.Threading;
using System.Web;
using System.Web.UI.WebControls;
using System.Collections.Specialized;
using System.Web.UI;
using System.ComponentModel;
using System.Drawing.Design;
using System.Collections;
using System.Text.RegularExpressions;
using System;
using Microsoft.Xrm.Client;
using Microsoft.Xrm.Client.Diagnostics;
namespace Microsoft.Xrm.Portal.Web.UI.WebControls
{
/// <summary>
/// Represents settings for a cache key based cache dependency.
/// </summary>
public sealed class CacheKeyDependency : CacheKey, IStateManagedItem // MSBug #120086: Won't make internal.
{
#region IStateManagedItem Members
public void SetDirty()
{
Parameters.SetDirty();
}
#endregion
}
/// <summary>
/// Represents settings for describing a cache key.
/// </summary>
public class CacheKey : IStateManager
{
private static readonly char[] _varySeparator = new char[] { ';' };
private string _name;
/// <summary>
/// Gets or sets the name used for applying a property as a parameter.
/// </summary>
public string Name
{
get { return _name; }
set { _name = value; }
}
private string _propertyName;
/// <summary>
/// Gets or sets the name of the property from which to obtain the value.
/// </summary>
public string PropertyName
{
get { return _propertyName; }
set { _propertyName = value; }
}
private string _keyFormat;
/// <summary>
/// Gets or sets the format string describing the cache key.
/// </summary>
[DefaultValue((string)null)]
public string KeyFormat
{
get { return _keyFormat; }
set { _keyFormat = value; }
}
private bool _varyByUser;
/// <summary>
/// Gets or sets the flag for caching by user.
/// </summary>
public bool VaryByUser
{
get { return _varyByUser; }
set { _varyByUser = value; }
}
private string _varyByParam;
/// <summary>
/// Gets or sets the set of querystring parameter names. The names are separated by a semi-colon character.
/// </summary>
public string VaryByParam
{
get { return _varyByParam; }
set { _varyByParam = value; }
}
private ParameterCollection _parameters;
/// <summary>
/// Gets the parameters collection that contains the parameters that are used by the KeyFormat property.
/// </summary>
[Description(""), Category("Data"), PersistenceMode(PersistenceMode.InnerProperty), DefaultValue((string)null), Editor("System.Web.UI.Design.WebControls.ParameterCollectionEditor, System.Design, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a", typeof(UITypeEditor)), MergableProperty(false)]
public ParameterCollection Parameters
{
get
{
if (_parameters == null)
{
_parameters = new ParameterCollection();
if (IsTrackingViewState)
{
((IStateManager)_parameters).TrackViewState();
}
}
return _parameters;
}
}
/// <summary>
/// Retrieves the cache key based on the current property values.
/// </summary>
/// <param name="context"></param>
/// <param name="control"></param>
/// <param name="container"></param>
/// <returns></returns>
public string GetCacheKey(System.Web.HttpContext context, System.Web.UI.Control control, object container)
{
return GetCacheKey(context, control, container, null);
}
/// <summary>
/// Retrieves the cache key based on the current property values.
/// </summary>
/// <param name="context"></param>
/// <param name="control"></param>
/// <param name="container"></param>
/// <param name="getDefaultKey"></param>
/// <returns></returns>
public string GetCacheKey(System.Web.HttpContext context, System.Web.UI.Control control, object container, Converter<string, string> getDefaultKey)
{
string cacheKey = KeyFormat;
if (Parameters != null && Parameters.Count > 0)
{
// use the parameters collection to build the dependency
IOrderedDictionary values = Parameters.GetValues(context, control);
if (container != null)
{
// process CacheItemParameter objects, lookup the value based on the container
foreach (Parameter param in Parameters)
{
if (param is CacheItemParameter)
{
string format = (param as CacheItemParameter).Format;
string propertyName = (param as CacheItemParameter).PropertyName;
string result = DataBinder.Eval(container, propertyName, format);
if (!string.IsNullOrEmpty(result))
{
values[param.Name] = result;
}
}
}
}
if (!string.IsNullOrEmpty(KeyFormat))
{
foreach (DictionaryEntry entry in values)
{
if (entry.Key != null)
{
string key = entry.Key.ToString().Trim();
if (!key.StartsWith("@"))
{
key = "@" + key;
}
cacheKey = Regex.Replace(cacheKey, key, "{0}".FormatWith(entry.Value), RegexOptions.IgnoreCase);
}
}
}
}
else if (!string.IsNullOrEmpty(Name) && !string.IsNullOrEmpty(PropertyName))
{
string result = null;
if (container != null)
{
try
{
result = DataBinder.Eval(container, PropertyName, "{0}");
}
catch (Exception e)
{
Tracing.FrameworkError("CacheKeyDependency", "GetCacheKey", "Invalid cache parameter settings.");
Tracing.FrameworkError("CacheKeyDependency", "GetCacheKey", e.ToString());
}
}
// use this object to build the dependency
string key = Name.Trim();
if (!key.StartsWith("@"))
{
key = "@" + key;
}
cacheKey = Regex.Replace(cacheKey, key, result ?? string.Empty, RegexOptions.IgnoreCase);
}
if (string.IsNullOrEmpty(cacheKey))
{
// could not find a suitable cacheKey from the parameters, build a default key
cacheKey = "Adxstudio:{0}:ID={1}".FormatWith(control.GetType().FullName, control.ID);
// provide an opportunity to override this suggested default
if (getDefaultKey != null)
{
cacheKey = getDefaultKey(cacheKey);
}
}
if (VaryByUser)
{
IIdentity identity;
if (TryGetCurrentIdentity(out identity) && identity.IsAuthenticated)
{
cacheKey += ":Identity={0}".FormatWith(identity.Name);
}
}
if (!string.IsNullOrEmpty(VaryByParam))
{
foreach (string section in VaryByParam.Split(_varySeparator))
{
string param = section.Trim();
cacheKey += ":{0}={1}".FormatWith(param, context.Request[param]);
}
}
return cacheKey;
}
private static bool TryGetCurrentIdentity(out IIdentity identity)
{
if (Thread.CurrentPrincipal != null && Thread.CurrentPrincipal.Identity != null)
{
identity = Thread.CurrentPrincipal.Identity;
return true;
}
if (HttpContext.Current.User != null && HttpContext.Current.User.Identity != null)
{
identity = HttpContext.Current.User.Identity;
return true;
}
if (ServiceSecurityContext.Current != null && ServiceSecurityContext.Current.PrimaryIdentity != null)
{
identity = ServiceSecurityContext.Current.PrimaryIdentity;
return true;
}
identity = null;
return false;
}
#region IStateManager Members
private bool _tracking;
bool IStateManager.IsTrackingViewState
{
get { return IsTrackingViewState; }
}
public bool IsTrackingViewState
{
get { return _tracking; }
}
void IStateManager.LoadViewState(object savedState)
{
LoadViewState(savedState);
}
protected virtual void LoadViewState(object savedState)
{
object[] state = savedState as object[];
if (state == null)
{
return;
}
KeyFormat = state[0] as string;
((IStateManager)Parameters).LoadViewState(state[1]);
}
object IStateManager.SaveViewState()
{
return SaveViewState();
}
protected virtual object SaveViewState()
{
object[] state = new object[5];
state[0] = KeyFormat;
state[1] = ((IStateManager)Parameters).SaveViewState();
return state;
}
void IStateManager.TrackViewState()
{
TrackViewState();
}
protected virtual void TrackViewState()
{
_tracking = true;
((IStateManager)Parameters).TrackViewState();
}
#endregion
}
}
| |
using System;
namespace NameAvatarGenerator.ColorHelper
{
/// <summary>
/// Structure to define CIE L*a*b*.
/// </summary>
public struct CIELab
{
/// <summary>
/// Gets an empty CIELab structure.
/// </summary>
public static readonly CIELab Empty = new CIELab();
#region Fields
private double l;
private double a;
private double b;
#endregion
#region Operators
public static bool operator ==(CIELab item1, CIELab item2)
{
return (
item1.L == item2.L
&& item1.A == item2.A
&& item1.B == item2.B
);
}
public static bool operator !=(CIELab item1, CIELab item2)
{
return (
item1.L != item2.L
|| item1.A != item2.A
|| item1.B != item2.B
);
}
#endregion
#region Accessors
/// <summary>
/// Gets or sets L component.
/// </summary>
public double L
{
get
{
return l;
}
set
{
l = value;
}
}
/// <summary>
/// Gets or sets a component.
/// </summary>
public double A
{
get
{
return a;
}
set
{
a = value;
}
}
/// <summary>
/// Gets or sets a component.
/// </summary>
public double B
{
get
{
return b;
}
set
{
b = value;
}
}
#endregion
public CIELab(double l, double a, double b)
{
this.l = l;
this.a = a;
this.b = b;
}
#region Methods
public override bool Equals(Object obj)
{
if(obj==null || GetType()!=obj.GetType()) return false;
return (this == (CIELab)obj);
}
public override int GetHashCode()
{
return L.GetHashCode() ^ a.GetHashCode() ^ b.GetHashCode();
}
// ******************************************************************
// EO: Based on: http://www.easyrgb.com/index.php?X=DELT&H=04#text4
// Based on: Delta E 1994
public static double GetDistanceBetweenCie1994(CIELab lab1, CIELab lab2)
{
const double whtL = 1;
const double whtC = 1;
const double whtH = 1; //Weighting factors depending
//on the application (1 = default)
double xC1 = Math.Sqrt(Math.Pow(lab1.a, 2.0) + Math.Pow(lab1.b, 2.0));
double xC2 = Math.Sqrt(Math.Pow(lab2.a, 2.0) + Math.Pow(lab2.b, 2.0));
double xDL = lab2.l - lab1.l;
double xDC = xC2 - xC1;
double xDE = Math.Sqrt(
((lab1.l - lab2.l) * (lab1.l - lab2.l))
+ ((lab1.a - lab2.a) * (lab1.a - lab2.a))
+ ((lab1.b - lab2.b) * (lab1.b - lab2.b)));
double xDH;
if (Math.Sqrt( xDE ) > ( Math.Sqrt( Math.Abs( xDL ) ) + Math.Sqrt( Math.Abs( xDC ) ) ) )
{
xDH = Math.Sqrt((xDE*xDE) - (xDL*xDL) - (xDC*xDC));
}
else
{
xDH = 0;
}
double xSC = 1 + (0.045*xC1);
double xSH = 1 + (0.015*xC1);
xDL /= whtL;
xDC /= whtC*xSC;
xDH /= whtH*xSH;
double deltaE94 = Math.Sqrt(Math.Pow(xDL, 2.0) + Math.Pow(xDC, 2.0) + Math.Pow(xDH, 2.0));
return deltaE94;
}
// ******************************************************************
// Given by Rob2412 http://www.codeproject.com/KB/recipes/colorspace1.aspx?msg=4041550#xx4041550xx
// and should be better then the previous one.
/// <summary>
/// Returns the color difference (distance) between a sample color CIELap(2) and a reference color CIELap(1)
/// <para>in accorance with CIE 2000 alogorithm.</para>
/// </summary>
/// <param name="lab1">CIELap reference color.</param>
/// <param name="lab2">CIELap sample color.</param>
/// <returns>Color difference.</returns>
public static double GetDistanceBetweenCie2000(CIELab lab1, CIELab lab2)
{
double p25 = Math.Pow(25, 7);
double C1 = Math.Sqrt(lab1.A * lab1.A + lab1.B * lab1.B);
double C2 = Math.Sqrt(lab2.A * lab2.A + lab2.B * lab2.B);
double avgC = (C1 + C2) / 2F;
double powAvgC = Math.Pow(avgC, 7);
double G = (1 - Math.Sqrt(powAvgC / (powAvgC + p25))) / 2D;
double a_1 = lab1.A * (1 + G);
double a_2 = lab2.A * (1 + G);
double C_1 = Math.Sqrt(a_1 * a_1 + lab1.B * lab1.B);
double C_2 = Math.Sqrt(a_2 * a_2 + lab2.B * lab2.B);
double avgC_ = (C_1 + C_2) / 2D;
double h1 = (Atan(lab1.B, a_1) >= 0 ? Atan(lab1.B, a_1) : Atan(lab1.B, a_1) + 360F);
double h2 = (Atan(lab2.B, a_2) >= 0 ? Atan(lab2.B, a_2) : Atan(lab2.B, a_2) + 360F);
double H = (h1 - h2 > 180D ? (h1 + h2 + 360F) / 2D : (h1 + h2) / 2D);
double T = 1;
T -= 0.17 * Cos(H - 30);
T += 0.24 * Cos(2 * H);
T += 0.32 * Cos(3 * H + 6);
T -= 0.20 * Cos(4 * H - 63);
double deltah = 0;
if (h2 - h1 <= 180)
deltah = h2 - h1;
else if (h2 <= h1)
deltah = h2 - h1 + 360;
else
deltah = h2 - h1 - 360;
double avgL = (lab1.L + lab2.L) / 2F;
double deltaL_ = lab2.L - lab1.L;
double deltaC_ = C_2 - C_1;
double deltaH_ = 2 * Math.Sqrt(C_1 * C_2) * Sin(deltah / 2);
double SL = 1 + (0.015 * Math.Pow(avgL - 50, 2)) / Math.Sqrt(20 + Math.Pow(avgL - 50, 2));
double SC = 1 + 0.045 * avgC_;
double SH = 1 + 0.015 * avgC_ * T;
double exp = Math.Pow((H - 275) / 25, 2);
double teta = Math.Pow(30, -exp);
double RC = 2D * Math.Sqrt(Math.Pow(avgC_, 7) / (Math.Pow(avgC_, 7) + p25));
double RT = -RC * Sin(2 * teta);
double deltaE = 0;
deltaE = Math.Pow(deltaL_ / SL, 2);
deltaE += Math.Pow(deltaC_ / SC, 2);
deltaE += Math.Pow(deltaH_ / SH, 2);
deltaE += RT * (deltaC_ / SC) * (deltaH_ / SH);
deltaE = Math.Sqrt(deltaE);
return deltaE;
}
// ******************************************************************
/// <summary>
/// Returns the angle in degree whose tangent is the quotient of the two specified numbers.
/// </summary>
/// <param name="y">The y coordinate of a point.</param>
/// <param name="x">The x coordinate of a point.</param>
/// <returns>Angle in degree.</returns>
private static double Atan(double y, double x)
{
return Math.Atan2(y, x) * 180D / Math.PI;
}
// ******************************************************************
/// <summary>
/// Returns the cosine of the specified angle in degree.
/// </summary>
/// <param name="d">Angle in degree</param>
/// <returns>Cosine of the specified angle.</returns>
private static double Cos(double d)
{
return Math.Cos(d * Math.PI / 180);
}
// ******************************************************************
/// <summary>
/// Returns the sine of the specified angle in degree.
/// </summary>
/// <param name="d">Angle in degree</param>
/// <returns>Sine of the specified angle.</returns>
private static double Sin(double d)
{
return Math.Sin(d * Math.PI / 180);
}
#endregion
}
}
| |
// Copyright (c) 2015, Outercurve Foundation.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// - Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// - Neither the name of the Outercurve Foundation nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
using System;
using System.Data;
using System.Configuration;
using System.Collections;
using System.Web;
using System.Web.Security;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Web.UI.WebControls.WebParts;
using System.Web.UI.HtmlControls;
using System.Linq;
using WebsitePanel.EnterpriseServer;
using System.Collections.Generic;
using System.Text;
namespace WebsitePanel.Portal
{
public partial class Domains : WebsitePanelModuleBase
{
public Dictionary<int, string> dnsRecords;
protected void Page_Load(object sender, EventArgs e)
{
ClientScriptManager cs = Page.ClientScript;
cs.RegisterClientScriptInclude("jquery", ResolveUrl("~/JavaScript/jquery-1.4.4.min.js"));
dnsRecords = new Dictionary<int, string>();
gvDomains.PageSize = UsersHelper.GetDisplayItemsPerPage();
// visibility
chkRecursive.Visible = (PanelSecurity.SelectedUser.Role != UserRole.User);
gvDomains.Columns[5].Visible = gvDomains.Columns[6].Visible =
(PanelSecurity.SelectedUser.Role != UserRole.User) && chkRecursive.Checked;
gvDomains.Columns[7].Visible = (PanelSecurity.SelectedUser.Role == UserRole.Administrator);
gvDomains.Columns[8].Visible = (PanelSecurity.EffectiveUser.Role == UserRole.Administrator);
if (!IsPostBack)
{
// toggle controls
btnAddDomain.Enabled = PackagesHelper.CheckGroupQuotaEnabled(PanelSecurity.PackageId, ResourceGroups.Os, Quotas.OS_DOMAINS)
|| PackagesHelper.CheckGroupQuotaEnabled(PanelSecurity.PackageId, ResourceGroups.Os, Quotas.OS_SUBDOMAINS)
|| PackagesHelper.CheckGroupQuotaEnabled(PanelSecurity.PackageId, ResourceGroups.Os, Quotas.OS_DOMAINPOINTERS);
searchBox.AddCriteria("DomainName", GetLocalizedString("SearchField.DomainName"));
if ((PanelSecurity.SelectedUser.Role != UserRole.User) && chkRecursive.Checked)
{
searchBox.AddCriteria("Username", GetLocalizedString("SearchField.Username"));
searchBox.AddCriteria("FullName", GetLocalizedString("SearchField.FullName"));
searchBox.AddCriteria("Email", GetLocalizedString("SearchField.Email"));
}
}
searchBox.AjaxData = this.GetSearchBoxAjaxData();
}
public string GetItemEditUrl(object packageId, object itemId)
{
return EditUrl("DomainID", itemId.ToString(), "edit_item",
PortalUtils.SPACE_ID_PARAM + "=" + packageId.ToString());
}
public string GetUserHomePageUrl(int userId)
{
return PortalUtils.GetUserHomePageUrl(userId);
}
public string GetSpaceHomePageUrl(int spaceId)
{
return NavigateURL(PortalUtils.SPACE_ID_PARAM, spaceId.ToString());
}
public string GetItemsPageUrl(string parameterName, string parameterValue)
{
return NavigateURL(PortalUtils.SPACE_ID_PARAM, PanelSecurity.PackageId.ToString(),
parameterName + "=" + parameterValue);
}
public string GetDomainTypeName(bool isSubDomain, bool isInstantAlias, bool isDomainPointer)
{
if(isDomainPointer)
return GetLocalizedString("DomainType.DomainPointer");
else if (isSubDomain)
return GetLocalizedString("DomainType.SubDomain");
else
return GetLocalizedString("DomainType.Domain");
}
public string GetDomainExpirationDate(object expirationDateObject, object LastUpdateDateObject)
{
var expirationDate = expirationDateObject as DateTime?;
var lastUpdateDate = LastUpdateDateObject as DateTime?;
if (expirationDate != null && expirationDate < DateTime.Now)
{
return GetLocalizedString("DomainExpirationDate.Expired");
}
else if(expirationDate != null)
{
return expirationDate.Value.ToShortDateString();
}
else if (lastUpdateDate == null)
{
return GetLocalizedString("DomainExpirationDate.NotChecked");
}
else
{
return GetLocalizedString("DomainExpirationDate.NotExist");
}
}
public bool ShowDomainDnsInfo(object expirationDateObject, object LastUpdateDateObject, bool isTopLevelDomain)
{
var expirationDate = expirationDateObject as DateTime?;
var lastUpdateDate = LastUpdateDateObject as DateTime?;
if (!isTopLevelDomain)
{
return false;
}
else if (expirationDate != null && expirationDate < DateTime.Now)
{
return false;
}
else if(expirationDate != null)
{
return true;
}
else if (lastUpdateDate == null)
{
return false;
}
else
{
return false;
}
}
public string GetDomainDnsRecords(int domainId)
{
if(dnsRecords.ContainsKey(domainId))
{
return dnsRecords[domainId];
}
var records = ES.Services.Servers.GetDomainDnsRecords(domainId);
if (!records.Any())
{
dnsRecords.Add(domainId, string.Empty);
return string.Empty;
}
var header = GetLocalizedString("DomainLookup.TooltipHeader");
var tooltipLines = new List<string>();
tooltipLines.Add(header);
tooltipLines.Add(" ");
tooltipLines.AddRange( records.Select(x=>string.Format("{0}: {1}", x.RecordType, x.Value)));
dnsRecords.Add(domainId, string.Join("\r\n", tooltipLines));
return dnsRecords[domainId];
}
public string GetDomainTooltip(int domainId, string registrar)
{
var dnsString = GetDomainDnsRecords(domainId);
var tooltipLines = new List<string>();
if (!string.IsNullOrEmpty(registrar))
{
var header = GetLocalizedString("DomainLookup.TooltipHeader.Registrar");
tooltipLines.Add(header + " " + registrar);
tooltipLines.Add("\r\n");
}
return string.Join("\r\n", tooltipLines) + dnsString;
}
protected void odsDomainsPaged_Selected(object sender, ObjectDataSourceStatusEventArgs e)
{
if (e.Exception != null)
{
ProcessException(e.Exception);
//this.DisableControls = true;
e.ExceptionHandled = true;
}
}
protected void btnAddDomain_Click(object sender, EventArgs e)
{
Response.Redirect(EditUrl(PortalUtils.SPACE_ID_PARAM, PanelSecurity.PackageId.ToString(), "add_domain"));
}
protected void gvDomains_RowCommand(object sender, GridViewCommandEventArgs e)
{
if (e.CommandName == "Detach")
{
// remove item from meta base
int domainId = Utils.ParseInt(e.CommandArgument.ToString(), 0);
int result = ES.Services.Servers.DetachDomain(domainId);
if (result < 0)
{
ShowResultMessage(result);
// return;
}
// refresh the list
//gvDomains.DataBind();
}
}
public string GetSearchBoxAjaxData()
{
StringBuilder res = new StringBuilder();
res.Append("PagedStored: 'Domains'");
res.Append(", RedirectUrl: '" + GetItemEditUrl(Request["SpaceID"] ?? "-1", "{0}").Substring(2) + "'");
res.Append(", PackageID: " + (String.IsNullOrEmpty(Request["SpaceID"]) ? "-1" : Request["SpaceID"]));
res.Append(", ServerID: " + (String.IsNullOrEmpty(Request["ServerID"]) ? "0" : Request["ServerID"]));
res.Append(", Recursive: ($('#" + chkRecursive.ClientID + "').val() == 'on')");
return res.ToString();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Linq.Impl
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using Apache.Ignite.Core.Binary;
using Apache.Ignite.Core.Cache;
using Apache.Ignite.Core.Cache.Query;
using Apache.Ignite.Core.Impl.Cache;
using Apache.Ignite.Core.Impl.Common;
using Remotion.Linq;
/// <summary>
/// Fields query executor.
/// </summary>
internal class CacheFieldsQueryExecutor : IQueryExecutor
{
/** */
private readonly ICacheInternal _cache;
/** */
private readonly QueryOptions _options;
/** */
private static readonly CopyOnWriteConcurrentDictionary<ConstructorInfo, object> CtorCache =
new CopyOnWriteConcurrentDictionary<ConstructorInfo, object>();
/// <summary>
/// Initializes a new instance of the <see cref="CacheFieldsQueryExecutor" /> class.
/// </summary>
/// <param name="cache">The executor function.</param>
/// <param name="options">Query options.</param>
public CacheFieldsQueryExecutor(ICacheInternal cache, QueryOptions options)
{
Debug.Assert(cache != null);
Debug.Assert(options != null);
_cache = cache;
_options = options;
}
/** <inheritdoc /> */
public T ExecuteScalar<T>(QueryModel queryModel)
{
return ExecuteSingle<T>(queryModel, false);
}
/** <inheritdoc /> */
public T ExecuteSingle<T>(QueryModel queryModel, bool returnDefaultWhenEmpty)
{
var col = ExecuteCollection<T>(queryModel);
return returnDefaultWhenEmpty ? col.SingleOrDefault() : col.Single();
}
/** <inheritdoc /> */
[SuppressMessage("Microsoft.Design", "CA1062:Validate arguments of public methods")]
public IEnumerable<T> ExecuteCollection<T>(QueryModel queryModel)
{
Debug.Assert(queryModel != null);
var qryData = GetQueryData(queryModel);
Debug.WriteLine("\nFields Query: {0} | {1}", qryData.QueryText,
string.Join(", ", qryData.Parameters.Select(x => x == null ? "null" : x.ToString())));
var qry = GetFieldsQuery(qryData.QueryText, qryData.Parameters.ToArray());
var selector = GetResultSelector<T>(queryModel.SelectClause.Selector);
return _cache.QueryFields(qry, selector);
}
/// <summary>
/// Compiles the query without regard to number or order of arguments.
/// </summary>
public Func<object[], IQueryCursor<T>> CompileQuery<T>(QueryModel queryModel)
{
Debug.Assert(queryModel != null);
var qryText = GetQueryData(queryModel).QueryText;
var selector = GetResultSelector<T>(queryModel.SelectClause.Selector);
return args => _cache.QueryFields(GetFieldsQuery(qryText, args), selector);
}
/// <summary>
/// Compiles the query.
/// </summary>
/// <typeparam name="T">Result type.</typeparam>
/// <param name="queryModel">The query model.</param>
/// <param name="queryLambdaModel">The query model generated from lambda body.</param>
/// <param name="queryLambda">The query lambda.</param>
/// <returns>Compiled query func.</returns>
public Func<object[], IQueryCursor<T>> CompileQuery<T>(QueryModel queryModel, QueryModel queryLambdaModel,
LambdaExpression queryLambda)
{
Debug.Assert(queryModel != null);
// Get model from lambda to map arguments properly.
var qryData = GetQueryData(queryLambdaModel);
var qryText = GetQueryData(queryModel).QueryText;
var qryTextLambda = qryData.QueryText;
if (qryText != qryTextLambda)
{
Debug.WriteLine(qryText);
Debug.WriteLine(qryTextLambda);
throw new InvalidOperationException("Error compiling query: entire LINQ expression should be " +
"specified within lambda passed to Compile method. " +
"Part of the query can't be outside the Compile method call.");
}
var selector = GetResultSelector<T>(queryModel.SelectClause.Selector);
var qryParams = qryData.Parameters.ToArray();
// Compiled query is a delegate with query parameters
// Delegate parameters order and query parameters order may differ
// Simple case: lambda with no parameters. Only embedded parameters are used.
if (!queryLambda.Parameters.Any())
{
return argsUnused => _cache.QueryFields(GetFieldsQuery(qryText, qryParams), selector);
}
// These are in order of usage in query
var qryOrderArgs = qryParams.OfType<ParameterExpression>().Select(x => x.Name).ToArray();
// These are in order they come from user
var userOrderArgs = queryLambda.Parameters.Select(x => x.Name).ToList();
// Simple case: all query args directly map to the lambda args in the same order
if (qryOrderArgs.Length == qryParams.Length
&& qryOrderArgs.SequenceEqual(userOrderArgs))
{
return args => _cache.QueryFields(GetFieldsQuery(qryText, args), selector);
}
// General case: embedded args and lambda args are mixed; same args can be used multiple times.
// Produce a mapping that defines where query arguments come from.
var mapping = qryParams.Select(x =>
{
var pe = x as ParameterExpression;
if (pe != null)
return userOrderArgs.IndexOf(pe.Name);
return -1;
}).ToArray();
return args => _cache.QueryFields(
GetFieldsQuery(qryText, MapQueryArgs(args, qryParams, mapping)), selector);
}
/// <summary>
/// Maps the query arguments.
/// </summary>
private static object[] MapQueryArgs(object[] userArgs, object[] embeddedArgs, int[] mapping)
{
var mappedArgs = new object[embeddedArgs.Length];
for (var i = 0; i < mappedArgs.Length; i++)
{
var map = mapping[i];
mappedArgs[i] = map < 0 ? embeddedArgs[i] : userArgs[map];
}
return mappedArgs;
}
/// <summary>
/// Gets the fields query.
/// </summary>
internal SqlFieldsQuery GetFieldsQuery(string text, object[] args)
{
return new SqlFieldsQuery(text)
{
EnableDistributedJoins = _options.EnableDistributedJoins,
PageSize = _options.PageSize,
EnforceJoinOrder = _options.EnforceJoinOrder,
Timeout = _options.Timeout,
ReplicatedOnly = _options.ReplicatedOnly,
Colocated = _options.Colocated,
Local = _options.Local,
Arguments = args,
Lazy = _options.Lazy
};
}
/** <inheritdoc /> */
public static QueryData GetQueryData(QueryModel queryModel)
{
Debug.Assert(queryModel != null);
return new CacheQueryModelVisitor().GenerateQuery(queryModel);
}
/// <summary>
/// Gets the result selector.
/// </summary>
private static Func<IBinaryRawReader, int, T> GetResultSelector<T>(Expression selectorExpression)
{
var newExpr = selectorExpression as NewExpression;
if (newExpr != null)
return GetCompiledCtor<T>(newExpr.Constructor);
var entryCtor = GetCacheEntryCtorInfo(typeof(T));
if (entryCtor != null)
return GetCompiledCtor<T>(entryCtor);
if (typeof(T) == typeof(bool))
return ReadBool<T>;
return (reader, count) => reader.ReadObject<T>();
}
/// <summary>
/// Reads the bool. Actual data may be bool or int/long.
/// </summary>
private static T ReadBool<T>(IBinaryRawReader reader, int count)
{
var obj = reader.ReadObject<object>();
if (obj is bool)
return (T) obj;
if (obj is long)
return TypeCaster<T>.Cast((long) obj != 0);
if (obj is int)
return TypeCaster<T>.Cast((int) obj != 0);
throw new InvalidOperationException("Expected bool, got: " + obj);
}
/// <summary>
/// Gets the cache entry constructor.
/// </summary>
private static ConstructorInfo GetCacheEntryCtorInfo(Type entryType)
{
if (!entryType.IsGenericType || entryType.GetGenericTypeDefinition() != typeof(ICacheEntry<,>))
return null;
var args = entryType.GetGenericArguments();
var targetType = typeof (CacheEntry<,>).MakeGenericType(args);
return targetType.GetConstructors().Single();
}
/// <summary>
/// Gets the compiled constructor.
/// </summary>
private static Func<IBinaryRawReader, int, T> GetCompiledCtor<T>(ConstructorInfo ctorInfo)
{
object result;
if (CtorCache.TryGetValue(ctorInfo, out result))
return (Func<IBinaryRawReader, int, T>) result;
return (Func<IBinaryRawReader, int, T>) CtorCache.GetOrAdd(ctorInfo, x =>
{
var innerCtor1 = DelegateConverter.CompileCtor<T>(x, GetCacheEntryCtorInfo);
return (Func<IBinaryRawReader, int, T>) ((r, c) => innerCtor1(r));
});
}
}
}
| |
namespace Microsoft.Protocols.TestSuites.MS_ASCMD
{
using System.Net;
using Microsoft.Protocols.TestSuites.Common;
using Microsoft.Protocols.TestTools;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Response = Microsoft.Protocols.TestSuites.Common.Response;
/// <summary>
/// This scenario is used to test the Provision command.
/// </summary>
[TestClass]
public class S12_Provision : TestSuiteBase
{
#region Class initialize and clean up
/// <summary>
/// Initialize the class.
/// </summary>
/// <param name="testContext">VSTS test context.</param>
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
TestClassBase.Initialize(testContext);
}
/// <summary>
/// Clear the class.
/// </summary>
[ClassCleanup]
public static void ClassCleanup()
{
TestClassBase.Cleanup();
}
#endregion
#region Test cases
/// <summary>
/// This test case is used to verify when download policies from server, server should return provision policies and a template policy key, and then acknowledge the policies by using template policy.
/// </summary>
[TestCategory("MSASCMD"), TestMethod()]
public void MSASCMD_S12_TC01_Provision_DownloadPolicy()
{
#region User calls Provision command to download policies from server
// Calls Provision command to download policies
ProvisionRequest provisionRequest = TestSuiteBase.GenerateDefaultProvisionRequest();
ProvisionResponse provisionResponse = this.CMDAdapter.Provision(provisionRequest);
// Get policyKey, policyType and statusCode from server response
string policyKey = GetPolicyKeyFromResponse(provisionResponse);
string policyType = provisionResponse.ResponseData.Policies.Policy.PolicyType;
Response.ProvisionPoliciesPolicyData data = provisionResponse.ResponseData.Policies.Policy.Data;
byte statusCode = provisionResponse.ResponseData.Status;
#endregion
#region Verify Requirements MS-ASCMD_R5026, MS-ASCMD_R4990, MS-ASCMD_R4992
// If User calls Provision command to download policies successful, server will return policyKey, policyType, data and statusCode in response, then MS-ASCMD_R5026, MS-ASCMD_R4990, MS-ASCMD_R4992 are verified.
// The policy settings with the format specified in PolicyType element, are contained in Data element of Provision command response.
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASCMD_R5026");
// Verify MS-ASCMD requirement: MS-ASCMD_R5026
Site.CaptureRequirementIfIsTrue(
policyKey != null && policyType != null && data != null && statusCode == 1,
5026,
@"[In Downloading Policy Settings] [Provision sequence for downloading policy settings, order 1:] The server responds with the policy type, policy key, data, and status code.");
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASCMD_R4990");
// Verify MS-ASCMD requirement: MS-ASCMD_R4990
Site.CaptureRequirementIfIsTrue(
policyKey != null && policyType != null && data != null,
4990,
@"[In Downloading Policy Settings] The server then responds with the provision:PolicyType, provision:PolicyKey (as specified in [MS-ASPROV] section 2.2.2.41), and provision:Data ([MS-ASPROV] section 2.2.2.23) elements.");
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASCMD_R4992");
// Verify MS-ASCMD requirement: MS-ASCMD_R4992
Site.CaptureRequirementIfIsTrue(
policyType != null && data != null,
4992,
@"[In Downloading Policy Settings] The policy settings, in the format specified in the provision:PolicyType element, are contained in the provision:Data element.");
#endregion
#region User calls Provision command to acknowledge policies.
// Set acknowledgeStatus value to 1, means accept the policy.
string acknowledgeStatus = "1";
ProvisionRequest provisionAcknowledgeRequest = TestSuiteBase.GenerateDefaultProvisionRequest();
provisionAcknowledgeRequest.RequestData.Policies.Policy.PolicyKey = policyKey;
provisionAcknowledgeRequest.RequestData.Policies.Policy.Status = acknowledgeStatus;
// Calls Provision command
ProvisionResponse provisionAcknowledgeResponse = this.CMDAdapter.Provision(provisionAcknowledgeRequest);
// Get policyKey, policyType and status code from server response
policyKey = GetPolicyKeyFromResponse(provisionAcknowledgeResponse);
policyType = provisionAcknowledgeResponse.ResponseData.Policies.Policy.PolicyType;
statusCode = provisionAcknowledgeResponse.ResponseData.Policies.Policy.Status;
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASCMD_R5028");
// Verify MS-ASCMD requirement: MS-ASCMD_R5028
Site.CaptureRequirementIfIsTrue(
policyKey != null && policyType != null && statusCode == 1,
5028,
@"[In Downloading Policy Settings] [Provision sequence for downloading policy settings, order 2:] The server responds with the policy type, policy key, and status code to indicate that the server recorded the client's acknowledgement.");
#endregion
}
/// <summary>
/// This test case is used to verify if Provision command request does not include PolicyType element, the server returns status 2.
/// </summary>
[TestCategory("MSASCMD"), TestMethod()]
public void MSASCMD_S12_TC02_Provision_WithoutPolicyTypeElement()
{
#region User calls Provision command to download policies without policy type element in request.
ProvisionRequest provisionRequest = TestSuiteBase.GenerateDefaultProvisionRequest();
// Set the policy type Element value to null
provisionRequest.RequestData.Policies.Policy.PolicyType = null;
ProvisionResponse provisionResponse = this.CMDAdapter.Provision(provisionRequest);
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASCMD_R4989");
// Verify MS-ASCMD requirement: MS-ASCMD_R4989
Site.CaptureRequirementIfAreEqual<int>(
2,
provisionResponse.ResponseData.Status,
4989,
@"[In Downloading Policy Settings] If the provision:PolicyType element is not included in the initial Provision command request, the server responds with a provision:Status element value of 2.");
#endregion
}
/// <summary>
/// This test case is used to verify if the client sends the Provision command with invalid policy key, server will return status value 144.
/// </summary>
[TestCategory("MSASCMD"), TestMethod()]
public void MSASCMD_S12_TC03_Provision_Status144()
{
#region User calls Provision command to download policies from server
// Calls Provision command to download policies
ProvisionRequest provisionRequest = TestSuiteBase.GenerateDefaultProvisionRequest();
ProvisionResponse provisionResponse = this.CMDAdapter.Provision(provisionRequest);
Site.Assert.AreEqual(1, provisionResponse.ResponseData.Status, "If Provision operation executes successfully, server should return status 1");
// Get policyKey
string policyKey = GetPolicyKeyFromResponse(provisionResponse);
#endregion
#region User calls Provision command to acknowledge policies.
// Set acknowledgeStatus value to 1, means accept the policy.
string acknowledgeStatus = "1";
ProvisionRequest provisionAcknowledgeRequest = TestSuiteBase.GenerateDefaultProvisionRequest();
provisionAcknowledgeRequest.RequestData.Policies.Policy.PolicyKey = policyKey;
provisionAcknowledgeRequest.RequestData.Policies.Policy.Status = acknowledgeStatus;
// Calls Provision command
ProvisionResponse provisionAcknowledgeResponse = this.CMDAdapter.Provision(provisionAcknowledgeRequest);
Site.Assert.AreEqual(1, provisionResponse.ResponseData.Status, "If Provision operation executes successfully, server should return status 1");
// Get policyKey
string finalPolicyKey = GetPolicyKeyFromResponse(provisionAcknowledgeResponse);
#endregion
#region Call FolderSync command with an invalid PolicyKey which is different from the one got from last step.
this.CMDAdapter.ChangePolicyKey(finalPolicyKey.Substring(0, 1));
this.RecordPolicyKeyChanged();
if ("12.1" == Common.GetConfigurationPropertyValue("ActiveSyncProtocolVersion", this.Site))
{
string httpErrorCode = null;
try
{
// Call FolderSync command
this.CMDAdapter.FolderSync(Common.CreateFolderSyncRequest("0"));
}
catch (WebException exception)
{
httpErrorCode = Common.GetErrorCodeFromException(exception);
}
Site.Assert.AreEqual("449", httpErrorCode, "[In MS-ASPROV Appendix A: Product Behavior] <2> Section 3.1.5.1: When the MS-ASProtocolVersion header is set to 12.1, the server sends an HTTP 449 response to request a Provision command from the client.");
}
else
{
// Call FolderSync command
FolderSyncResponse folderSyncResponse = this.CMDAdapter.FolderSync(Common.CreateFolderSyncRequest("0"));
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASCMD_R4912");
// Verify MS-ASCMD requirement: MS-ASCMD_R4912
Site.CaptureRequirementIfAreEqual<int>(
144,
int.Parse(folderSyncResponse.ResponseData.Status),
4912,
@"[In Common Status Codes] [The meaning of the status value 144 is] The device's policy key is invalid.");
}
#endregion
}
#endregion
#region Private Methods
/// <summary>
/// Get PolicyKey from Provision Response
/// </summary>
/// <param name="response">Provision Response</param>
/// <returns>Policy Key, if the response doesn't contain the PolicyKey, returns null</returns>
private static string GetPolicyKeyFromResponse(ProvisionResponse response)
{
if (null != response.ResponseData.Policies)
{
Response.ProvisionPoliciesPolicy policyInResponse = response.ResponseData.Policies.Policy;
if (policyInResponse != null)
{
return policyInResponse.PolicyKey;
}
}
return null;
}
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
namespace MigAz.Forms
{
partial class OptionsDialog
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(OptionsDialog));
this.btnOK = new System.Windows.Forms.Button();
this.btnCancel = new System.Windows.Forms.Button();
this.groupBox1 = new System.Windows.Forms.GroupBox();
this.txtPublicIPSuffix = new System.Windows.Forms.TextBox();
this.label9 = new System.Windows.Forms.Label();
this.txtLoadBalancerSuffix = new System.Windows.Forms.TextBox();
this.label8 = new System.Windows.Forms.Label();
this.lblSuffix = new System.Windows.Forms.Label();
this.txtStorageAccountSuffix = new System.Windows.Forms.TextBox();
this.txtVirtualNetworkSuffix = new System.Windows.Forms.TextBox();
this.label5 = new System.Windows.Forms.Label();
this.txtVirtualNetworkGatewaySuffix = new System.Windows.Forms.TextBox();
this.label6 = new System.Windows.Forms.Label();
this.txtNetworkSecurityGroupSuffix = new System.Windows.Forms.TextBox();
this.label7 = new System.Windows.Forms.Label();
this.txtResourceGroupSuffix = new System.Windows.Forms.TextBox();
this.label4 = new System.Windows.Forms.Label();
this.btnApplyDefaultNaming = new System.Windows.Forms.Button();
this.linkLabel1 = new System.Windows.Forms.LinkLabel();
this.txtAvailabilitySetSuffix = new System.Windows.Forms.TextBox();
this.label3 = new System.Windows.Forms.Label();
this.txtVirtualMachineSuffix = new System.Windows.Forms.TextBox();
this.label2 = new System.Windows.Forms.Label();
this.txtNetworkInterfaceCardSuffix = new System.Windows.Forms.TextBox();
this.label1 = new System.Windows.Forms.Label();
this.groupBox2 = new System.Windows.Forms.GroupBox();
this.label15 = new System.Windows.Forms.Label();
this.cmbLoginPromptBehavior = new System.Windows.Forms.ComboBox();
this.label14 = new System.Windows.Forms.Label();
this.cmbDefaultAzureEnvironment = new System.Windows.Forms.ComboBox();
this.label13 = new System.Windows.Forms.Label();
this.label12 = new System.Windows.Forms.Label();
this.label11 = new System.Windows.Forms.Label();
this.label10 = new System.Windows.Forms.Label();
this.upDownAccessSASMinutes = new System.Windows.Forms.NumericUpDown();
this.panel1 = new System.Windows.Forms.Panel();
this.rbManagedDisk = new System.Windows.Forms.RadioButton();
this.rbClassicDisk = new System.Windows.Forms.RadioButton();
this.chkSaveSelection = new System.Windows.Forms.CheckBox();
this.chkAutoSelectDependencies = new System.Windows.Forms.CheckBox();
this.chkBuildEmpty = new System.Windows.Forms.CheckBox();
this.chkAllowTelemetry = new System.Windows.Forms.CheckBox();
this.groupBox1.SuspendLayout();
this.groupBox2.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.upDownAccessSASMinutes)).BeginInit();
this.panel1.SuspendLayout();
this.SuspendLayout();
//
// btnOK
//
this.btnOK.DialogResult = System.Windows.Forms.DialogResult.OK;
this.btnOK.Location = new System.Drawing.Point(843, 864);
this.btnOK.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.btnOK.Name = "btnOK";
this.btnOK.Size = new System.Drawing.Size(148, 44);
this.btnOK.TabIndex = 15;
this.btnOK.Text = "OK";
this.btnOK.UseVisualStyleBackColor = true;
this.btnOK.Click += new System.EventHandler(this.btnOK_Click);
//
// btnCancel
//
this.btnCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.btnCancel.Location = new System.Drawing.Point(1016, 864);
this.btnCancel.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.btnCancel.Name = "btnCancel";
this.btnCancel.Size = new System.Drawing.Size(148, 44);
this.btnCancel.TabIndex = 16;
this.btnCancel.Text = "Cancel";
this.btnCancel.UseVisualStyleBackColor = true;
this.btnCancel.Click += new System.EventHandler(this.btnCancel_Click);
//
// groupBox1
//
this.groupBox1.Controls.Add(this.txtPublicIPSuffix);
this.groupBox1.Controls.Add(this.label9);
this.groupBox1.Controls.Add(this.txtLoadBalancerSuffix);
this.groupBox1.Controls.Add(this.label8);
this.groupBox1.Controls.Add(this.lblSuffix);
this.groupBox1.Controls.Add(this.txtStorageAccountSuffix);
this.groupBox1.Controls.Add(this.txtVirtualNetworkSuffix);
this.groupBox1.Controls.Add(this.label5);
this.groupBox1.Controls.Add(this.txtVirtualNetworkGatewaySuffix);
this.groupBox1.Controls.Add(this.label6);
this.groupBox1.Controls.Add(this.txtNetworkSecurityGroupSuffix);
this.groupBox1.Controls.Add(this.label7);
this.groupBox1.Controls.Add(this.txtResourceGroupSuffix);
this.groupBox1.Controls.Add(this.label4);
this.groupBox1.Controls.Add(this.btnApplyDefaultNaming);
this.groupBox1.Controls.Add(this.linkLabel1);
this.groupBox1.Controls.Add(this.txtAvailabilitySetSuffix);
this.groupBox1.Controls.Add(this.label3);
this.groupBox1.Controls.Add(this.txtVirtualMachineSuffix);
this.groupBox1.Controls.Add(this.label2);
this.groupBox1.Controls.Add(this.txtNetworkInterfaceCardSuffix);
this.groupBox1.Controls.Add(this.label1);
this.groupBox1.Location = new System.Drawing.Point(27, 424);
this.groupBox1.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.groupBox1.Name = "groupBox1";
this.groupBox1.Padding = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.groupBox1.Size = new System.Drawing.Size(1140, 415);
this.groupBox1.TabIndex = 10;
this.groupBox1.TabStop = false;
this.groupBox1.Text = "Azure Resource Manager (ARM) Object Naming Suffixes";
//
// txtPublicIPSuffix
//
this.txtPublicIPSuffix.Location = new System.Drawing.Point(324, 328);
this.txtPublicIPSuffix.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.txtPublicIPSuffix.MaxLength = 10;
this.txtPublicIPSuffix.Name = "txtPublicIPSuffix";
this.txtPublicIPSuffix.Size = new System.Drawing.Size(76, 31);
this.txtPublicIPSuffix.TabIndex = 9;
this.txtPublicIPSuffix.Text = "-pip";
this.txtPublicIPSuffix.TextChanged += new System.EventHandler(this.migAzOption_TextChanged);
//
// label9
//
this.label9.AutoSize = true;
this.label9.Location = new System.Drawing.Point(61, 328);
this.label9.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0);
this.label9.Name = "label9";
this.label9.Size = new System.Drawing.Size(102, 25);
this.label9.TabIndex = 23;
this.label9.Text = "Public IP:";
//
// txtLoadBalancerSuffix
//
this.txtLoadBalancerSuffix.Location = new System.Drawing.Point(948, 128);
this.txtLoadBalancerSuffix.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.txtLoadBalancerSuffix.MaxLength = 10;
this.txtLoadBalancerSuffix.Name = "txtLoadBalancerSuffix";
this.txtLoadBalancerSuffix.Size = new System.Drawing.Size(76, 31);
this.txtLoadBalancerSuffix.TabIndex = 10;
this.txtLoadBalancerSuffix.Text = "-lb";
this.txtLoadBalancerSuffix.TextChanged += new System.EventHandler(this.migAzOption_TextChanged);
//
// label8
//
this.label8.AutoSize = true;
this.label8.Location = new System.Drawing.Point(700, 128);
this.label8.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0);
this.label8.Name = "label8";
this.label8.Size = new System.Drawing.Size(157, 25);
this.label8.TabIndex = 21;
this.label8.Text = "Load Balancer:";
//
// lblSuffix
//
this.lblSuffix.AutoSize = true;
this.lblSuffix.Location = new System.Drawing.Point(61, 285);
this.lblSuffix.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0);
this.lblSuffix.Name = "lblSuffix";
this.lblSuffix.Size = new System.Drawing.Size(177, 25);
this.lblSuffix.TabIndex = 19;
this.lblSuffix.Text = "Storage Account:";
//
// txtStorageAccountSuffix
//
this.txtStorageAccountSuffix.CharacterCasing = System.Windows.Forms.CharacterCasing.Lower;
this.txtStorageAccountSuffix.Location = new System.Drawing.Point(324, 282);
this.txtStorageAccountSuffix.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.txtStorageAccountSuffix.MaxLength = 10;
this.txtStorageAccountSuffix.Name = "txtStorageAccountSuffix";
this.txtStorageAccountSuffix.Size = new System.Drawing.Size(76, 31);
this.txtStorageAccountSuffix.TabIndex = 8;
this.txtStorageAccountSuffix.Text = "v2";
this.txtStorageAccountSuffix.TextChanged += new System.EventHandler(this.migAzOption_TextChanged);
//
// txtVirtualNetworkSuffix
//
this.txtVirtualNetworkSuffix.Location = new System.Drawing.Point(324, 165);
this.txtVirtualNetworkSuffix.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.txtVirtualNetworkSuffix.MaxLength = 10;
this.txtVirtualNetworkSuffix.Name = "txtVirtualNetworkSuffix";
this.txtVirtualNetworkSuffix.Size = new System.Drawing.Size(76, 31);
this.txtVirtualNetworkSuffix.TabIndex = 5;
this.txtVirtualNetworkSuffix.Text = "-vnet";
this.txtVirtualNetworkSuffix.TextChanged += new System.EventHandler(this.migAzOption_TextChanged);
//
// label5
//
this.label5.AutoSize = true;
this.label5.Location = new System.Drawing.Point(61, 168);
this.label5.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0);
this.label5.Name = "label5";
this.label5.Size = new System.Drawing.Size(163, 25);
this.label5.TabIndex = 18;
this.label5.Text = "Virtual Network:";
//
// txtVirtualNetworkGatewaySuffix
//
this.txtVirtualNetworkGatewaySuffix.Location = new System.Drawing.Point(324, 206);
this.txtVirtualNetworkGatewaySuffix.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.txtVirtualNetworkGatewaySuffix.MaxLength = 10;
this.txtVirtualNetworkGatewaySuffix.Name = "txtVirtualNetworkGatewaySuffix";
this.txtVirtualNetworkGatewaySuffix.Size = new System.Drawing.Size(76, 31);
this.txtVirtualNetworkGatewaySuffix.TabIndex = 6;
this.txtVirtualNetworkGatewaySuffix.Text = "-gw";
this.txtVirtualNetworkGatewaySuffix.TextChanged += new System.EventHandler(this.migAzOption_TextChanged);
//
// label6
//
this.label6.AutoSize = true;
this.label6.Location = new System.Drawing.Point(61, 206);
this.label6.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0);
this.label6.Name = "label6";
this.label6.Size = new System.Drawing.Size(253, 25);
this.label6.TabIndex = 16;
this.label6.Text = "Virtual Network Gateway:";
//
// txtNetworkSecurityGroupSuffix
//
this.txtNetworkSecurityGroupSuffix.Location = new System.Drawing.Point(324, 244);
this.txtNetworkSecurityGroupSuffix.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.txtNetworkSecurityGroupSuffix.MaxLength = 10;
this.txtNetworkSecurityGroupSuffix.Name = "txtNetworkSecurityGroupSuffix";
this.txtNetworkSecurityGroupSuffix.Size = new System.Drawing.Size(76, 31);
this.txtNetworkSecurityGroupSuffix.TabIndex = 7;
this.txtNetworkSecurityGroupSuffix.Text = "-nsg";
this.txtNetworkSecurityGroupSuffix.TextChanged += new System.EventHandler(this.migAzOption_TextChanged);
//
// label7
//
this.label7.AutoSize = true;
this.label7.Location = new System.Drawing.Point(61, 244);
this.label7.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0);
this.label7.Name = "label7";
this.label7.Size = new System.Drawing.Size(245, 25);
this.label7.TabIndex = 14;
this.label7.Text = "Network Security Group:";
//
// txtResourceGroupSuffix
//
this.txtResourceGroupSuffix.Location = new System.Drawing.Point(324, 122);
this.txtResourceGroupSuffix.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.txtResourceGroupSuffix.MaxLength = 10;
this.txtResourceGroupSuffix.Name = "txtResourceGroupSuffix";
this.txtResourceGroupSuffix.Size = new System.Drawing.Size(76, 31);
this.txtResourceGroupSuffix.TabIndex = 4;
this.txtResourceGroupSuffix.Text = "-rg";
this.txtResourceGroupSuffix.TextChanged += new System.EventHandler(this.migAzOption_TextChanged);
//
// label4
//
this.label4.AutoSize = true;
this.label4.Location = new System.Drawing.Point(60, 129);
this.label4.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0);
this.label4.Name = "label4";
this.label4.Size = new System.Drawing.Size(175, 25);
this.label4.TabIndex = 11;
this.label4.Text = "Resource Group:";
//
// btnApplyDefaultNaming
//
this.btnApplyDefaultNaming.Location = new System.Drawing.Point(707, 339);
this.btnApplyDefaultNaming.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.btnApplyDefaultNaming.Name = "btnApplyDefaultNaming";
this.btnApplyDefaultNaming.Size = new System.Drawing.Size(413, 50);
this.btnApplyDefaultNaming.TabIndex = 14;
this.btnApplyDefaultNaming.Text = "Apply Default Naming Conventions";
this.btnApplyDefaultNaming.UseVisualStyleBackColor = true;
this.btnApplyDefaultNaming.Click += new System.EventHandler(this.btnApplyDefaultNaming_Click);
//
// linkLabel1
//
this.linkLabel1.AutoSize = true;
this.linkLabel1.Location = new System.Drawing.Point(136, 52);
this.linkLabel1.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0);
this.linkLabel1.Name = "linkLabel1";
this.linkLabel1.Size = new System.Drawing.Size(770, 25);
this.linkLabel1.TabIndex = 4;
this.linkLabel1.TabStop = true;
this.linkLabel1.Text = "https://docs.microsoft.com/en-us/azure/guidance/guidance-naming-conventions";
//
// txtAvailabilitySetSuffix
//
this.txtAvailabilitySetSuffix.Location = new System.Drawing.Point(948, 164);
this.txtAvailabilitySetSuffix.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.txtAvailabilitySetSuffix.MaxLength = 10;
this.txtAvailabilitySetSuffix.Name = "txtAvailabilitySetSuffix";
this.txtAvailabilitySetSuffix.Size = new System.Drawing.Size(76, 31);
this.txtAvailabilitySetSuffix.TabIndex = 11;
this.txtAvailabilitySetSuffix.Text = "-as";
this.txtAvailabilitySetSuffix.TextChanged += new System.EventHandler(this.migAzOption_TextChanged);
//
// label3
//
this.label3.AutoSize = true;
this.label3.Location = new System.Drawing.Point(700, 165);
this.label3.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0);
this.label3.Name = "label3";
this.label3.Size = new System.Drawing.Size(159, 25);
this.label3.TabIndex = 10;
this.label3.Text = "Availability Set:";
//
// txtVirtualMachineSuffix
//
this.txtVirtualMachineSuffix.Location = new System.Drawing.Point(948, 204);
this.txtVirtualMachineSuffix.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.txtVirtualMachineSuffix.MaxLength = 10;
this.txtVirtualMachineSuffix.Name = "txtVirtualMachineSuffix";
this.txtVirtualMachineSuffix.Size = new System.Drawing.Size(76, 31);
this.txtVirtualMachineSuffix.TabIndex = 12;
this.txtVirtualMachineSuffix.Text = "-vm";
this.txtVirtualMachineSuffix.TextChanged += new System.EventHandler(this.migAzOption_TextChanged);
//
// label2
//
this.label2.AutoSize = true;
this.label2.Location = new System.Drawing.Point(700, 204);
this.label2.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0);
this.label2.Name = "label2";
this.label2.Size = new System.Drawing.Size(167, 25);
this.label2.TabIndex = 8;
this.label2.Text = "Virtual Machine:";
//
// txtNetworkInterfaceCardSuffix
//
this.txtNetworkInterfaceCardSuffix.Location = new System.Drawing.Point(948, 244);
this.txtNetworkInterfaceCardSuffix.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.txtNetworkInterfaceCardSuffix.MaxLength = 10;
this.txtNetworkInterfaceCardSuffix.Name = "txtNetworkInterfaceCardSuffix";
this.txtNetworkInterfaceCardSuffix.Size = new System.Drawing.Size(76, 31);
this.txtNetworkInterfaceCardSuffix.TabIndex = 13;
this.txtNetworkInterfaceCardSuffix.Text = "-nic";
this.txtNetworkInterfaceCardSuffix.TextChanged += new System.EventHandler(this.migAzOption_TextChanged);
//
// label1
//
this.label1.AutoSize = true;
this.label1.Location = new System.Drawing.Point(700, 244);
this.label1.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(237, 25);
this.label1.TabIndex = 6;
this.label1.Text = "Network Interface Card:";
//
// groupBox2
//
this.groupBox2.Controls.Add(this.label15);
this.groupBox2.Controls.Add(this.cmbLoginPromptBehavior);
this.groupBox2.Controls.Add(this.label14);
this.groupBox2.Controls.Add(this.cmbDefaultAzureEnvironment);
this.groupBox2.Controls.Add(this.label13);
this.groupBox2.Controls.Add(this.label12);
this.groupBox2.Controls.Add(this.label11);
this.groupBox2.Controls.Add(this.label10);
this.groupBox2.Controls.Add(this.upDownAccessSASMinutes);
this.groupBox2.Controls.Add(this.panel1);
this.groupBox2.Controls.Add(this.chkSaveSelection);
this.groupBox2.Controls.Add(this.chkAutoSelectDependencies);
this.groupBox2.Controls.Add(this.chkBuildEmpty);
this.groupBox2.Controls.Add(this.chkAllowTelemetry);
this.groupBox2.Location = new System.Drawing.Point(27, 32);
this.groupBox2.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.groupBox2.Name = "groupBox2";
this.groupBox2.Padding = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.groupBox2.Size = new System.Drawing.Size(1139, 365);
this.groupBox2.TabIndex = 11;
this.groupBox2.TabStop = false;
this.groupBox2.Text = "MigAz Options";
//
// label15
//
this.label15.AutoSize = true;
this.label15.Location = new System.Drawing.Point(91, 281);
this.label15.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0);
this.label15.Name = "label15";
this.label15.Size = new System.Drawing.Size(71, 25);
this.label15.TabIndex = 14;
this.label15.Text = "Login:";
//
// cmbLoginPromptBehavior
//
this.cmbLoginPromptBehavior.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.cmbLoginPromptBehavior.FormattingEnabled = true;
this.cmbLoginPromptBehavior.Items.AddRange(new object[] {
"Always",
"Auto",
"SelectAccount"});
this.cmbLoginPromptBehavior.Location = new System.Drawing.Point(172, 278);
this.cmbLoginPromptBehavior.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.cmbLoginPromptBehavior.Name = "cmbLoginPromptBehavior";
this.cmbLoginPromptBehavior.Size = new System.Drawing.Size(197, 33);
this.cmbLoginPromptBehavior.TabIndex = 13;
this.cmbLoginPromptBehavior.SelectedIndexChanged += new System.EventHandler(this.comboBox_SelectedIndexChanged);
//
// label14
//
this.label14.AutoSize = true;
this.label14.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.label14.Location = new System.Drawing.Point(47, 234);
this.label14.Margin = new System.Windows.Forms.Padding(5, 0, 5, 0);
this.label14.Name = "label14";
this.label14.Size = new System.Drawing.Size(382, 26);
this.label14.TabIndex = 12;
this.label14.Text = "Default Azure AD Prompt Behavior";
//
// cmbDefaultAzureEnvironment
//
this.cmbDefaultAzureEnvironment.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.cmbDefaultAzureEnvironment.FormattingEnabled = true;
this.cmbDefaultAzureEnvironment.Location = new System.Drawing.Point(707, 71);
this.cmbDefaultAzureEnvironment.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.cmbDefaultAzureEnvironment.Name = "cmbDefaultAzureEnvironment";
this.cmbDefaultAzureEnvironment.Size = new System.Drawing.Size(363, 33);
this.cmbDefaultAzureEnvironment.TabIndex = 11;
this.cmbDefaultAzureEnvironment.SelectedIndexChanged += new System.EventHandler(this.comboBox_SelectedIndexChanged);
//
// label13
//
this.label13.AutoSize = true;
this.label13.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.label13.Location = new System.Drawing.Point(643, 28);
this.label13.Margin = new System.Windows.Forms.Padding(5, 0, 5, 0);
this.label13.Name = "label13";
this.label13.Size = new System.Drawing.Size(298, 26);
this.label13.TabIndex = 10;
this.label13.Text = "Default Azure Environment";
//
// label12
//
this.label12.AutoSize = true;
this.label12.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.label12.Location = new System.Drawing.Point(643, 135);
this.label12.Margin = new System.Windows.Forms.Padding(5, 0, 5, 0);
this.label12.Name = "label12";
this.label12.Size = new System.Drawing.Size(274, 26);
this.label12.TabIndex = 9;
this.label12.Text = "Default Target Disk Type";
//
// label11
//
this.label11.AutoSize = true;
this.label11.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.label11.Location = new System.Drawing.Point(643, 271);
this.label11.Margin = new System.Windows.Forms.Padding(5, 0, 5, 0);
this.label11.Name = "label11";
this.label11.Size = new System.Drawing.Size(399, 26);
this.label11.TabIndex = 8;
this.label11.Text = "Managed Disk Access SAS Duration";
//
// label10
//
this.label10.AutoSize = true;
this.label10.Location = new System.Drawing.Point(824, 318);
this.label10.Margin = new System.Windows.Forms.Padding(5, 0, 5, 0);
this.label10.Name = "label10";
this.label10.Size = new System.Drawing.Size(88, 25);
this.label10.TabIndex = 7;
this.label10.Text = "Minutes";
//
// upDownAccessSASMinutes
//
this.upDownAccessSASMinutes.Location = new System.Drawing.Point(712, 310);
this.upDownAccessSASMinutes.Margin = new System.Windows.Forms.Padding(5, 6, 5, 6);
this.upDownAccessSASMinutes.Maximum = new decimal(new int[] {
1440,
0,
0,
0});
this.upDownAccessSASMinutes.Minimum = new decimal(new int[] {
15,
0,
0,
0});
this.upDownAccessSASMinutes.Name = "upDownAccessSASMinutes";
this.upDownAccessSASMinutes.Size = new System.Drawing.Size(100, 31);
this.upDownAccessSASMinutes.TabIndex = 6;
this.upDownAccessSASMinutes.Value = new decimal(new int[] {
720,
0,
0,
0});
this.upDownAccessSASMinutes.ValueChanged += new System.EventHandler(this.upDownAccessSASMinutes_ValueChanged);
//
// panel1
//
this.panel1.Controls.Add(this.rbManagedDisk);
this.panel1.Controls.Add(this.rbClassicDisk);
this.panel1.Location = new System.Drawing.Point(707, 178);
this.panel1.Margin = new System.Windows.Forms.Padding(5, 6, 5, 6);
this.panel1.Name = "panel1";
this.panel1.Size = new System.Drawing.Size(364, 81);
this.panel1.TabIndex = 5;
//
// rbManagedDisk
//
this.rbManagedDisk.AutoSize = true;
this.rbManagedDisk.Location = new System.Drawing.Point(5, 6);
this.rbManagedDisk.Margin = new System.Windows.Forms.Padding(5, 6, 5, 6);
this.rbManagedDisk.Name = "rbManagedDisk";
this.rbManagedDisk.Size = new System.Drawing.Size(345, 29);
this.rbManagedDisk.TabIndex = 3;
this.rbManagedDisk.TabStop = true;
this.rbManagedDisk.Text = "Managed Disk (Recommended)";
this.rbManagedDisk.UseVisualStyleBackColor = true;
this.rbManagedDisk.CheckedChanged += new System.EventHandler(this.migAzOption_CheckChanged);
//
// rbClassicDisk
//
this.rbClassicDisk.AutoSize = true;
this.rbClassicDisk.Location = new System.Drawing.Point(5, 42);
this.rbClassicDisk.Margin = new System.Windows.Forms.Padding(5, 6, 5, 6);
this.rbClassicDisk.Name = "rbClassicDisk";
this.rbClassicDisk.Size = new System.Drawing.Size(161, 29);
this.rbClassicDisk.TabIndex = 2;
this.rbClassicDisk.TabStop = true;
this.rbClassicDisk.Text = "Classic Disk";
this.rbClassicDisk.UseVisualStyleBackColor = true;
this.rbClassicDisk.CheckedChanged += new System.EventHandler(this.migAzOption_CheckChanged);
//
// chkSaveSelection
//
this.chkSaveSelection.AutoSize = true;
this.chkSaveSelection.Location = new System.Drawing.Point(52, 94);
this.chkSaveSelection.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.chkSaveSelection.Name = "chkSaveSelection";
this.chkSaveSelection.Size = new System.Drawing.Size(185, 29);
this.chkSaveSelection.TabIndex = 2;
this.chkSaveSelection.Text = "Save selection";
this.chkSaveSelection.UseVisualStyleBackColor = true;
this.chkSaveSelection.CheckedChanged += new System.EventHandler(this.migAzOption_CheckedChanged);
//
// chkAutoSelectDependencies
//
this.chkAutoSelectDependencies.AutoSize = true;
this.chkAutoSelectDependencies.Location = new System.Drawing.Point(52, 54);
this.chkAutoSelectDependencies.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.chkAutoSelectDependencies.Name = "chkAutoSelectDependencies";
this.chkAutoSelectDependencies.Size = new System.Drawing.Size(432, 29);
this.chkAutoSelectDependencies.TabIndex = 1;
this.chkAutoSelectDependencies.Text = "Auto select dependencies (for VMs only)";
this.chkAutoSelectDependencies.UseVisualStyleBackColor = true;
this.chkAutoSelectDependencies.CheckedChanged += new System.EventHandler(this.migAzOption_CheckedChanged);
//
// chkBuildEmpty
//
this.chkBuildEmpty.AutoSize = true;
this.chkBuildEmpty.Checked = true;
this.chkBuildEmpty.CheckState = System.Windows.Forms.CheckState.Checked;
this.chkBuildEmpty.Location = new System.Drawing.Point(52, 135);
this.chkBuildEmpty.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.chkBuildEmpty.Name = "chkBuildEmpty";
this.chkBuildEmpty.Size = new System.Drawing.Size(280, 29);
this.chkBuildEmpty.TabIndex = 0;
this.chkBuildEmpty.Text = "Build empty environment";
this.chkBuildEmpty.UseVisualStyleBackColor = true;
this.chkBuildEmpty.CheckedChanged += new System.EventHandler(this.migAzOption_CheckedChanged);
//
// chkAllowTelemetry
//
this.chkAllowTelemetry.AutoSize = true;
this.chkAllowTelemetry.Checked = true;
this.chkAllowTelemetry.CheckState = System.Windows.Forms.CheckState.Checked;
this.chkAllowTelemetry.Location = new System.Drawing.Point(52, 172);
this.chkAllowTelemetry.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.chkAllowTelemetry.Name = "chkAllowTelemetry";
this.chkAllowTelemetry.Size = new System.Drawing.Size(286, 29);
this.chkAllowTelemetry.TabIndex = 3;
this.chkAllowTelemetry.Text = "Allow telemetry collection";
this.chkAllowTelemetry.UseVisualStyleBackColor = true;
this.chkAllowTelemetry.CheckedChanged += new System.EventHandler(this.chkAllowTelemetry_CheckedChanged);
//
// OptionsDialog
//
this.AcceptButton = this.btnOK;
this.AutoScaleDimensions = new System.Drawing.SizeF(12F, 25F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.CancelButton = this.btnCancel;
this.ClientSize = new System.Drawing.Size(1191, 944);
this.Controls.Add(this.groupBox2);
this.Controls.Add(this.groupBox1);
this.Controls.Add(this.btnCancel);
this.Controls.Add(this.btnOK);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
this.Margin = new System.Windows.Forms.Padding(4, 6, 4, 6);
this.MaximizeBox = false;
this.MinimizeBox = false;
this.Name = "OptionsDialog";
this.ShowIcon = false;
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "Options";
this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.OptionsDialog_FormClosing);
this.Load += new System.EventHandler(this.formOptions_Load);
this.groupBox1.ResumeLayout(false);
this.groupBox1.PerformLayout();
this.groupBox2.ResumeLayout(false);
this.groupBox2.PerformLayout();
((System.ComponentModel.ISupportInitialize)(this.upDownAccessSASMinutes)).EndInit();
this.panel1.ResumeLayout(false);
this.panel1.PerformLayout();
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.Button btnOK;
private System.Windows.Forms.Button btnCancel;
private System.Windows.Forms.GroupBox groupBox1;
private System.Windows.Forms.GroupBox groupBox2;
private System.Windows.Forms.TextBox txtNetworkInterfaceCardSuffix;
private System.Windows.Forms.Label label1;
private System.Windows.Forms.CheckBox chkSaveSelection;
private System.Windows.Forms.CheckBox chkAutoSelectDependencies;
private System.Windows.Forms.CheckBox chkBuildEmpty;
private System.Windows.Forms.CheckBox chkAllowTelemetry;
private System.Windows.Forms.TextBox txtVirtualMachineSuffix;
private System.Windows.Forms.Label label2;
private System.Windows.Forms.TextBox txtAvailabilitySetSuffix;
private System.Windows.Forms.Label label3;
private System.Windows.Forms.Button btnApplyDefaultNaming;
private System.Windows.Forms.LinkLabel linkLabel1;
private System.Windows.Forms.TextBox txtPublicIPSuffix;
private System.Windows.Forms.Label label9;
private System.Windows.Forms.TextBox txtLoadBalancerSuffix;
private System.Windows.Forms.Label label8;
private System.Windows.Forms.Label lblSuffix;
private System.Windows.Forms.TextBox txtStorageAccountSuffix;
private System.Windows.Forms.TextBox txtVirtualNetworkSuffix;
private System.Windows.Forms.Label label5;
private System.Windows.Forms.TextBox txtVirtualNetworkGatewaySuffix;
private System.Windows.Forms.Label label6;
private System.Windows.Forms.TextBox txtNetworkSecurityGroupSuffix;
private System.Windows.Forms.Label label7;
private System.Windows.Forms.TextBox txtResourceGroupSuffix;
private System.Windows.Forms.Label label4;
private System.Windows.Forms.Label label10;
private System.Windows.Forms.NumericUpDown upDownAccessSASMinutes;
private System.Windows.Forms.Panel panel1;
private System.Windows.Forms.RadioButton rbManagedDisk;
private System.Windows.Forms.RadioButton rbClassicDisk;
private System.Windows.Forms.Label label12;
private System.Windows.Forms.Label label11;
private System.Windows.Forms.ComboBox cmbDefaultAzureEnvironment;
private System.Windows.Forms.Label label13;
private System.Windows.Forms.ComboBox cmbLoginPromptBehavior;
private System.Windows.Forms.Label label14;
private System.Windows.Forms.Label label15;
}
}
| |
using System;
using NUnit.Framework;
using OpenQA.Selenium.Environment;
using System.Drawing;
namespace OpenQA.Selenium
{
[TestFixture]
public class ClickScrollingTest : DriverTestFixture
{
[Test]
public void ClickingOnAnchorScrollsPage()
{
string scrollScript = "var pageY;";
scrollScript += "if (typeof(window.pageYOffset) == 'number') {";
scrollScript += "pageY = window.pageYOffset;";
scrollScript += "} else {";
scrollScript += "pageY = document.documentElement.scrollTop;";
scrollScript += "}";
scrollScript += "return pageY;";
driver.Url = macbethPage;
driver.FindElement(By.PartialLinkText("last speech")).Click();
long yOffset = (long)((IJavaScriptExecutor)driver).ExecuteScript(scrollScript);
//Focusing on to click, but not actually following,
//the link will scroll it in to view, which is a few pixels further than 0
Assert.That(yOffset, Is.GreaterThan(300), "Did not scroll");
}
[Test]
public void ShouldScrollToClickOnAnElementHiddenByOverflow()
{
string url = EnvironmentManager.Instance.UrlBuilder.WhereIs("click_out_of_bounds_overflow.html");
driver.Url = url;
IWebElement link = driver.FindElement(By.Id("link"));
link.Click();
}
[Test]
public void ShouldBeAbleToClickOnAnElementHiddenByOverflow()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scroll.html");
IWebElement link = driver.FindElement(By.Id("line8"));
// This used to throw a MoveTargetOutOfBoundsException - we don't expect it to
link.Click();
Assert.AreEqual("line8", driver.FindElement(By.Id("clicked")).Text);
}
[Test]
public void ShouldBeAbleToClickOnAnElementHiddenByDoubleOverflow()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scrolling_tests/page_with_double_overflow_auto.html");
driver.FindElement(By.Id("link")).Click();
WaitFor(TitleToBe("Clicked Successfully!"), "Browser title was not 'Clicked Successfully'");
}
[Test]
public void ShouldBeAbleToClickOnAnElementHiddenByYOverflow()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scrolling_tests/page_with_y_overflow_auto.html");
driver.FindElement(By.Id("link")).Click();
WaitFor(TitleToBe("Clicked Successfully!"), "Browser title was not 'Clicked Successfully'");
}
[Test]
[IgnoreBrowser(Browser.IE, "Issue #716")]
[IgnoreBrowser(Browser.Firefox, "Issue #716")]
public void ShouldBeAbleToClickOnAnElementPartiallyHiddenByOverflow()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scrolling_tests/page_with_partially_hidden_element.html");
driver.FindElement(By.Id("btn")).Click();
WaitFor(TitleToBe("Clicked Successfully!"), "Browser title was not 'Clicked Successfully'");
}
[Test]
[IgnoreBrowser(Browser.Opera)]
public void ShouldNotScrollOverflowElementsWhichAreVisible()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scroll2.html");
IWebElement list = driver.FindElement(By.TagName("ul"));
IWebElement item = list.FindElement(By.Id("desired"));
item.Click();
long yOffset = (long)((IJavaScriptExecutor)driver).ExecuteScript("return arguments[0].scrollTop;", list);
Assert.AreEqual(0, yOffset, "Should not have scrolled");
}
[Test]
[IgnoreBrowser(Browser.Chrome, "Webkit-based browsers apparently scroll anyway.")]
public void ShouldNotScrollIfAlreadyScrolledAndElementIsInView()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scroll3.html");
driver.FindElement(By.Id("button1")).Click();
long scrollTop = GetScrollTop();
driver.FindElement(By.Id("button2")).Click();
Assert.AreEqual(scrollTop, GetScrollTop());
}
[Test]
public void ShouldBeAbleToClickRadioButtonScrolledIntoView()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scroll4.html");
driver.FindElement(By.Id("radio")).Click();
// If we don't throw, we're good
}
[Test]
[IgnoreBrowser(Browser.IE, "IE has special overflow handling")]
public void ShouldScrollOverflowElementsIfClickPointIsOutOfViewButElementIsInView()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scroll5.html");
driver.FindElement(By.Id("inner")).Click();
Assert.AreEqual("clicked", driver.FindElement(By.Id("clicked")).Text);
}
[Test]
[IgnoreBrowser(Browser.Opera, "Opera fails.")]
public void ShouldBeAbleToClickElementInAFrameThatIsOutOfView()
{
try
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scrolling_tests/page_with_frame_out_of_view.html");
driver.SwitchTo().Frame("frame");
IWebElement element = driver.FindElement(By.Name("checkbox"));
element.Click();
Assert.That(element.Selected, "Element is not selected");
}
finally
{
driver.SwitchTo().DefaultContent();
}
}
[Test]
[IgnoreBrowser(Browser.Opera, "Opera fails.")]
public void ShouldBeAbleToClickElementThatIsOutOfViewInAFrame()
{
try
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scrolling_tests/page_with_scrolling_frame.html");
driver.SwitchTo().Frame("scrolling_frame");
IWebElement element = driver.FindElement(By.Name("scroll_checkbox"));
element.Click();
Assert.That(element.Selected, "Element is not selected");
}
finally
{
driver.SwitchTo().DefaultContent();
}
}
[Test]
[Ignore("All tested browses scroll non-scrollable frames")]
public void ShouldNotBeAbleToClickElementThatIsOutOfViewInANonScrollableFrame()
{
try
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scrolling_tests/page_with_non_scrolling_frame.html");
driver.SwitchTo().Frame("scrolling_frame");
IWebElement element = driver.FindElement(By.Name("scroll_checkbox"));
Assert.That(() => element.Click(), Throws.InstanceOf<WebDriverException>());
}
finally
{
driver.SwitchTo().DefaultContent();
}
}
[Test]
[IgnoreBrowser(Browser.Opera, "Opera fails.")]
public void ShouldBeAbleToClickElementThatIsOutOfViewInAFrameThatIsOutOfView()
{
try
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scrolling_tests/page_with_scrolling_frame_out_of_view.html");
driver.SwitchTo().Frame("scrolling_frame");
IWebElement element = driver.FindElement(By.Name("scroll_checkbox"));
element.Click();
Assert.That(element.Selected, "Element is not selected");
}
finally
{
driver.SwitchTo().DefaultContent();
}
}
[Test]
[IgnoreBrowser(Browser.Opera, "Opera fails.")]
public void ShouldBeAbleToClickElementThatIsOutOfViewInANestedFrame()
{
try
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scrolling_tests/page_with_nested_scrolling_frames.html");
driver.SwitchTo().Frame("scrolling_frame");
driver.SwitchTo().Frame("nested_scrolling_frame");
IWebElement element = driver.FindElement(By.Name("scroll_checkbox"));
element.Click();
Assert.That(element.Selected, "Element is not selected");
}
finally
{
driver.SwitchTo().DefaultContent();
}
}
[Test]
[IgnoreBrowser(Browser.Opera, "Opera fails.")]
public void ShouldBeAbleToClickElementThatIsOutOfViewInANestedFrameThatIsOutOfView()
{
try
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scrolling_tests/page_with_nested_scrolling_frames_out_of_view.html");
driver.SwitchTo().Frame("scrolling_frame");
driver.SwitchTo().Frame("nested_scrolling_frame");
IWebElement element = driver.FindElement(By.Name("scroll_checkbox"));
element.Click();
Assert.That(element.Selected, "Element is not selected");
}
finally
{
driver.SwitchTo().DefaultContent();
}
}
[Test]
public void ShouldNotScrollWhenGettingElementSize()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scroll3.html");
long scrollTop = GetScrollTop();
Size ignoredSize = driver.FindElement(By.Id("button1")).Size;
Assert.AreEqual(scrollTop, GetScrollTop());
}
[Test]
public void ShouldBeAbleToClickElementInATallFrame()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("scrolling_tests/page_with_tall_frame.html");
driver.SwitchTo().Frame("tall_frame");
IWebElement element = driver.FindElement(By.Name("checkbox"));
element.Click();
Assert.That(element.Selected, "Element is not selected");
}
//------------------------------------------------------------------
// Tests below here are not included in the Java test suite
//------------------------------------------------------------------
[Test]
public void ShouldBeAbleToClickInlineTextElementWithChildElementAfterScrolling()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.CreateInlinePage(new InlinePage()
.WithBody(
"<div style='height: 2000px;'>Force scroll needed</div><label id='wrapper'>wraps a checkbox <input id='check' type='checkbox' checked='checked'/></label>"));
IWebElement label = driver.FindElement(By.Id("wrapper"));
label.Click();
IWebElement checkbox = driver.FindElement(By.Id("check"));
Assert.IsFalse(checkbox.Selected, "Checkbox should not be selected after click");
}
private long GetScrollTop()
{
return (long)((IJavaScriptExecutor)driver).ExecuteScript("return document.body.scrollTop;");
}
private Func<bool> TitleToBe(string desiredTitle)
{
return () =>
{
return driver.Title == desiredTitle;
};
}
}
}
| |
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
namespace Entitas.Migration {
public class M0360_2 : IMigration {
public string version { get { return "0.36.0-2"; } }
public string workingDirectory { get { return "where systems are located"; } }
public string description { get { return "Migrates systems"; } }
public MigrationFile[] Migrate(string path) {
var files = MigrationUtils.GetFiles(path);
for (int i = 0; i < files.Length; i++) {
var file = files[i];
file.fileContent = migrateBase(file.fileContent);
file.fileContent = migrateTrigger(file.fileContent);
file.fileContent = migrateToFilter(file.fileContent);
file.fileContent = migrateSetPoolsSetPool(file.fileContent);
file.fileContent = migrateExecute(file.fileContent);
}
return files;
}
string migrateBase(string fileContent) {
fileContent = removeBase(fileContent, "ISetPools");
fileContent = removeBase(fileContent, "ISetPool");
fileContent = removeBase(fileContent, "IEnsureComponents");
fileContent = removeBase(fileContent, "IExcludeComponents");
fileContent = renameBase(fileContent, "IReactiveSystem", "ReactiveSystem");
return fileContent;
}
string removeBase(string fileContent, string name) {
fileContent = Regex.Replace(fileContent, @"(,\s*)" + name, string.Empty);
fileContent = Regex.Replace(fileContent, @"(:\s*)" + name + @"(,\s*)", ": ");
fileContent = Regex.Replace(fileContent, @"(:\s*)" + name, string.Empty);
return fileContent;
}
string renameBase(string fileContent, string name, string replacement) {
fileContent = Regex.Replace(fileContent, @"(,\s*)" + name, ", " + replacement);
fileContent = Regex.Replace(fileContent, @"(:\s*)" + name + @"(,\s*)", ": " + replacement);
fileContent = Regex.Replace(fileContent, @"(:\s*)" + name, ": " + replacement);
return fileContent;
}
string migrateTrigger(string fileContent) {
const string triggerPattern = @"public(\s|\n)*TriggerOnEvent(\s|\n)*trigger(\s|\n)*{(\s|\n)*get(\s|\n)*{(\s|\n)*return(\s|\n)*(?<trigger>(.|\s|\n)*?})(\s|\n)*}";
const string triggerEventReplacement = "__ctor_placeholder__\n\n protected override Collector GetTrigger(Context context) {{\n return context.CreateCollector({0}, GroupEvent.{1});\n }}\n\n__filter_placeholder__";
const string triggerReplacement = "__ctor_placeholder__\n\n protected override Collector GetTrigger(Context context) {{\n return context.CreateCollector({0});\n }}\n\n__filter_placeholder__";
var oldTrigger = Regex.Match(fileContent, triggerPattern).Groups["trigger"].Value;
var groupEvent = Regex.Match(oldTrigger, @".OnEntity(?<event>\w*)").Groups["event"].Value;
var oldMatcher = Regex.Match(oldTrigger, @".*(?=.OnEntity)");
if (groupEvent == "Added") {
fileContent = Regex.Replace(fileContent, triggerPattern, match => string.Format(triggerReplacement, oldMatcher));
} else {
fileContent = Regex.Replace(fileContent, triggerPattern, match => string.Format(triggerEventReplacement, oldMatcher, groupEvent));
}
return fileContent;
}
string migrateToFilter(string fileContent) {
const string ensurePattern = @"public(\s|\n)*IMatcher(\s|\n)*ensureComponents(\s|\n)*{(\s|\n)*get(\s|\n)*{(\s|\n)*return(\s|\n)*(?<matcher>(.|\s|\n)*?);(\s|\n)*}(\s|\n)*}";
var ensureMatcher = Regex.Match(fileContent, ensurePattern).Groups["matcher"].Value;
const string excludePattern = @"public(\s|\n)*IMatcher(\s|\n)*excludeComponents(\s|\n)*{(\s|\n)*get(\s|\n)*{(\s|\n)*return(\s|\n)*(?<matcher>(.|\s|\n)*?);(\s|\n)*}(\s|\n)*}";
var excludeMatcher = Regex.Match(fileContent, excludePattern).Groups["matcher"].Value;
var ensureFilter = getFilter(ensureMatcher);
if (!string.IsNullOrEmpty(ensureFilter)) {
ensureFilter = "(" + ensureFilter + ")";
}
var excludeFilter = getFilter(excludeMatcher);
if (!string.IsNullOrEmpty(excludeFilter)) {
excludeFilter = "!(" + excludeFilter + ")";
}
var filter = "true";
if (ensureFilter != null) {
filter = ensureFilter;
}
if (excludeFilter != null) {
if (ensureFilter == null) {
filter = excludeFilter;
} else {
filter += " && " + excludeFilter;
}
}
const string filterReplacement =
@" protected override bool Filter(Entity entity) {{
// TODO Entitas 0.36.0 Migration
// ensure was: {0}
// exclude was: {1}
return {2};
}}";
fileContent = Regex.Replace(fileContent, ensurePattern, string.Empty);
fileContent = Regex.Replace(fileContent, excludePattern, string.Empty);
fileContent = Regex.Replace(fileContent, @"__filter_placeholder__", string.Format(filterReplacement, ensureMatcher, excludeMatcher, filter));
return fileContent;
}
string getFilter(string matcher) {
const string allOfPattern = @"AllOf(\s|\n)*\((\s|\n)*(?<matchers>(.|\s|\n)*?)\)";
const string anyOfPattern = @"AnyOf(\s|\n)*\((\s|\n)*(?<matchers>(.|\s|\n)*?)\)";
const string noneOfPattern = @"NoneOf(\s|\n)*\((\s|\n)*(?<matchers>(.|\s|\n)*?)\)";
var ensureAllOf = string.Empty;
if (Regex.IsMatch(matcher, allOfPattern)) {
ensureAllOf = string.Join(" && ", Regex.Match(matcher, allOfPattern).Groups["matchers"].Value
.Split(',')
.Select(m => m.Trim())
.Select(m => m.Split('.')[1])
.Select(m => "entity.has" + m)
.ToArray());
}
var ensureAnyOf = string.Empty;
if (Regex.IsMatch(matcher, anyOfPattern)) {
ensureAnyOf = string.Join(" || ", Regex.Match(matcher, anyOfPattern).Groups["matchers"].Value
.Split(',')
.Select(m => m.Trim())
.Select(m => m.Split('.')[1])
.Select(m => "entity.has" + m)
.ToArray());
}
var ensureNoneOf = string.Empty;
if (Regex.IsMatch(matcher, noneOfPattern)) {
ensureNoneOf = string.Join(" && !", Regex.Match(matcher, noneOfPattern).Groups["matchers"].Value
.Split(',')
.Select(m => m.Trim())
.Select(m => m.Split('.')[1])
.Select(m => "entity.has" + m)
.ToArray());
}
var filters = new List<string>();
if (!string.IsNullOrEmpty(ensureAllOf)) {
ensureAllOf = "(" + ensureAllOf + ")";
filters.Add(ensureAllOf);
}
if (!string.IsNullOrEmpty(ensureAnyOf)) {
ensureAnyOf = "(" + ensureAnyOf + ")";
filters.Add(ensureAnyOf);
}
if (!string.IsNullOrEmpty(ensureNoneOf)) {
ensureNoneOf = "(!" + ensureNoneOf + ")";
filters.Add(ensureNoneOf);
}
if (filters.Count == 0) {
if (Regex.IsMatch(matcher, @"\w*Matcher.\w*")) {
filters.Add(matcher.Split('.')[1]);
}
}
return filters.Count == 0
? null
: string.Join(" && ", filters.ToArray());
}
string migrateSetPoolsSetPool(string fileContent) {
const string setPoolsPattern = @"public(\s|\n)*void(\s|\n)*SetPools(\s|\n)*\((\s|\n)*Contexts(\s|\n)*pools(\s|\n)*\)(\s|\n)*{(\s|\n)*(?<logic>(.|\s|\n)*?)(\s|\n)*}";
var setPoolsLogic = Regex.Match(fileContent, setPoolsPattern).Groups["logic"].Value;
Regex.Replace(fileContent, setPoolsPattern, string.Empty);
const string setPoolPattern = @"public(\s|\n)*void(\s|\n)*SetPool(\s|\n)*\((\s|\n)*Context(\s|\n)*pool(\s|\n)*\)(\s|\n)*{(\s|\n)*(?<logic>(.|\s|\n)*?)(\s|\n)*}";
var setPoolLogic = Regex.Match(fileContent, setPoolPattern).Groups["logic"].Value;
Regex.Replace(fileContent, setPoolPattern, string.Empty);
var classNamePattern = @"public(\w|\s|\n)*class(\s|\n)(?<className>\w*)";
var className = Regex.Match(fileContent, classNamePattern).Groups["className"].Value;
const string constructorFormat =
@"public {0}(Contexts contexts) : base(context) {{
{1}
}}";
var construtorLogic = new List<string>();
if (!string.IsNullOrEmpty(setPoolsLogic)) {
construtorLogic.Add(" " + setPoolsLogic);
}
if (!string.IsNullOrEmpty(setPoolLogic)) {
construtorLogic.Add(" " + setPoolLogic);
}
if (fileContent.Contains("__ctor_placeholder__")) {
fileContent = fileContent.Replace(
"__ctor_placeholder__", string.Format(constructorFormat, className, string.Join(" \n", construtorLogic.ToArray()))
);
fileContent = Regex.Replace(fileContent, setPoolsPattern, string.Empty);
fileContent = Regex.Replace(fileContent, setPoolPattern, string.Empty);
} else {
fileContent = Regex.Replace(fileContent, setPoolsPattern, match => "// TODO Entitas 0.36.0 Migration (constructor)\n " + match.Value);
fileContent = Regex.Replace(fileContent, setPoolPattern, match => "// TODO Entitas 0.36.0 Migration (constructor)\n " + match.Value);
}
return fileContent;
}
string migrateExecute(string fileContent) {
const string reactiveSystemExecute = @"public(\s|\n)*void(\s|\n)*Execute(\s|\n)*\((\s|\n)*List";
const string reactiveSystemExecuteUsing = @"public(\s|\n)*void(\s|\n)*Execute(\s|\n)*\((\s|\n)*System.Collections.Generic.List";
if (Regex.IsMatch(fileContent, reactiveSystemExecuteUsing)) {
fileContent = Regex.Replace(
fileContent,
reactiveSystemExecute,
"protected override void Execute(System.Collections.Generic.List"
);
} else if (Regex.IsMatch(fileContent, reactiveSystemExecute)) {
fileContent = Regex.Replace(
fileContent,
reactiveSystemExecute,
"protected override void Execute(List"
);
}
return fileContent;
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="JsonSchemaGenerator.cs" company="NJsonSchema">
// Copyright (c) Rico Suter. All rights reserved.
// </copyright>
// <license>https://github.com/RicoSuter/NJsonSchema/blob/master/LICENSE.md</license>
// <author>Rico Suter, [email protected]</author>
//-----------------------------------------------------------------------
using Namotion.Reflection;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using Newtonsoft.Json.Linq;
using Newtonsoft.Json.Serialization;
using NJsonSchema.Annotations;
using NJsonSchema.Converters;
using NJsonSchema.Generation.TypeMappers;
using NJsonSchema.Infrastructure;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Runtime.Serialization;
namespace NJsonSchema.Generation
{
/// <summary>Generates a <see cref="JsonSchema"/> object for a given type. </summary>
public class JsonSchemaGenerator
{
private static readonly Dictionary<string, string> DataTypeFormats = new Dictionary<string, string>
{
{"DateTime", JsonFormatStrings.DateTime},
{"Date", JsonFormatStrings.Date},
{"Time", JsonFormatStrings.Time},
{"EmailAddress", JsonFormatStrings.Email},
{"PhoneNumber", JsonFormatStrings.Phone},
{"Url", JsonFormatStrings.Uri}
};
/// <summary>Initializes a new instance of the <see cref="JsonSchemaGenerator"/> class.</summary>
/// <param name="settings">The settings.</param>
public JsonSchemaGenerator(JsonSchemaGeneratorSettings settings)
{
Settings = settings;
}
/// <summary>Gets the settings.</summary>
public JsonSchemaGeneratorSettings Settings { get; }
/// <summary>Generates a <see cref="JsonSchema" /> object for the given type and adds the mapping to the given resolver.</summary>
/// <param name="type">The type.</param>
/// <returns>The schema.</returns>
/// <exception cref="InvalidOperationException">Could not find value type of dictionary type.</exception>
public JsonSchema Generate(Type type)
{
var schema = new JsonSchema();
var schemaResolver = new JsonSchemaResolver(schema, Settings);
Generate(schema, type.ToContextualType(), schemaResolver);
return schema;
}
/// <summary>Generates a <see cref="JsonSchema" /> object for the given type and adds the mapping to the given resolver.</summary>
/// <param name="type">The type.</param>
/// <param name="schemaResolver">The schema resolver.</param>
/// <returns>The schema.</returns>
/// <exception cref="InvalidOperationException">Could not find value type of dictionary type.</exception>
public JsonSchema Generate(Type type, JsonSchemaResolver schemaResolver)
{
return Generate<JsonSchema>(type, schemaResolver);
}
/// <summary>Generates a <see cref="JsonSchema" /> object for the given type and adds the mapping to the given resolver.</summary>
/// <param name="type">The type.</param>
/// <param name="schemaResolver">The schema resolver.</param>
/// <returns>The schema.</returns>
/// <exception cref="InvalidOperationException">Could not find value type of dictionary type.</exception>
public TSchemaType Generate<TSchemaType>(Type type, JsonSchemaResolver schemaResolver)
where TSchemaType : JsonSchema, new()
{
return Generate<TSchemaType>(type.ToContextualType(), schemaResolver);
}
/// <summary>Generates a <see cref="JsonSchema" /> object for the given type and adds the mapping to the given resolver.</summary>
/// <param name="contextualType">The type.</param>
/// <param name="schemaResolver">The schema resolver.</param>
/// <returns>The schema.</returns>
/// <exception cref="InvalidOperationException">Could not find value type of dictionary type.</exception>
public JsonSchema Generate(ContextualType contextualType, JsonSchemaResolver schemaResolver)
{
return Generate<JsonSchema>(contextualType, schemaResolver);
}
/// <summary>Generates a <see cref="JsonSchema" /> object for the given type and adds the mapping to the given resolver.</summary>
/// <param name="contextualType">The type.</param>
/// <param name="schemaResolver">The schema resolver.</param>
/// <returns>The schema.</returns>
/// <exception cref="InvalidOperationException">Could not find value type of dictionary type.</exception>
public TSchemaType Generate<TSchemaType>(ContextualType contextualType, JsonSchemaResolver schemaResolver)
where TSchemaType : JsonSchema, new()
{
var schema = new TSchemaType();
Generate(schema, contextualType, schemaResolver);
return schema;
}
/// <summary>Generates into the given <see cref="JsonSchema" /> object for the given type and adds the mapping to the given resolver.</summary>
/// <typeparam name="TSchemaType">The type of the schema.</typeparam>
/// <param name="schema">The schema.</param>
/// <param name="type">The type.</param>
/// <param name="schemaResolver">The schema resolver.</param>
/// <returns>The schema.</returns>
/// <exception cref="InvalidOperationException">Could not find value type of dictionary type.</exception>
public void Generate<TSchemaType>(TSchemaType schema, Type type, JsonSchemaResolver schemaResolver)
where TSchemaType : JsonSchema, new()
{
// This overload should not be used in this library directly
Generate(schema, type.ToContextualType(), schemaResolver);
}
/// <summary>Generates into the given <see cref="JsonSchema" /> object for the given type and adds the mapping to the given resolver.</summary>
/// <typeparam name="TSchemaType">The type of the schema.</typeparam>
/// <param name="schema">The schema.</param>
/// <param name="contextualType">The type.</param>
/// <param name="schemaResolver">The schema resolver.</param>
/// <returns>The schema.</returns>
/// <exception cref="InvalidOperationException">Could not find value type of dictionary type.</exception>
public virtual void Generate<TSchemaType>(TSchemaType schema, ContextualType contextualType, JsonSchemaResolver schemaResolver)
where TSchemaType : JsonSchema, new()
{
var typeDescription = Settings.ReflectionService.GetDescription(contextualType, Settings);
ApplyTypeExtensionDataAttributes(schema, contextualType);
if (TryHandleSpecialTypes(schema, typeDescription.ContextualType, schemaResolver))
{
ApplySchemaProcessors(schema, typeDescription.ContextualType, schemaResolver);
return;
}
if (schemaResolver.RootObject == schema)
{
schema.Title = Settings.SchemaNameGenerator.Generate(typeDescription.ContextualType.OriginalType);
}
if (typeDescription.Type.IsObject())
{
if (typeDescription.IsDictionary)
{
GenerateDictionary(schema, typeDescription, schemaResolver);
}
else
{
if (schemaResolver.HasSchema(typeDescription.ContextualType.OriginalType, false))
{
schema.Reference = schemaResolver.GetSchema(typeDescription.ContextualType.OriginalType, false);
}
else if (schema.GetType() == typeof(JsonSchema))
{
GenerateObject(schema, typeDescription, schemaResolver);
}
else
{
schema.Reference = Generate(typeDescription.ContextualType, schemaResolver);
}
}
}
else if (typeDescription.IsEnum)
{
GenerateEnum(schema, typeDescription, schemaResolver);
}
else if (typeDescription.Type.IsArray()) // TODO: Add support for tuples?
{
GenerateArray(schema, typeDescription, schemaResolver);
}
else
{
typeDescription.ApplyType(schema);
}
if (contextualType != typeDescription.ContextualType)
{
ApplySchemaProcessors(schema, typeDescription.ContextualType, schemaResolver);
}
ApplySchemaProcessors(schema, contextualType, schemaResolver);
}
/// <summary>Generetes a schema directly or referenced for the requested schema type;
/// does NOT change nullability.</summary>
/// <typeparam name="TSchemaType">The resulted schema type which may reference the actual schema.</typeparam>
/// <param name="contextualType">The type of the schema to generate.</param>
/// <param name="schemaResolver">The schema resolver.</param>
/// <param name="transformation">An action to transform the resulting schema (e.g. property or parameter) before the type of reference is determined (with $ref or allOf/oneOf).</param>
/// <returns>The requested schema object.</returns>
public TSchemaType GenerateWithReference<TSchemaType>(
ContextualType contextualType,
JsonSchemaResolver schemaResolver,
Action<TSchemaType, JsonSchema> transformation = null)
where TSchemaType : JsonSchema, new()
{
return GenerateWithReferenceAndNullability(contextualType, false, schemaResolver, transformation);
}
/// <summary>Generetes a schema directly or referenced for the requested schema type;
/// also adds nullability if required by looking at the type's <see cref="JsonTypeDescription" />.</summary>
/// <typeparam name="TSchemaType">The resulted schema type which may reference the actual schema.</typeparam>
/// <param name="contextualType">The type of the schema to generate.</param>
/// <param name="schemaResolver">The schema resolver.</param>
/// <param name="transformation">An action to transform the resulting schema (e.g. property or parameter) before the type of reference is determined (with $ref or allOf/oneOf).</param>
/// <returns>The requested schema object.</returns>
public TSchemaType GenerateWithReferenceAndNullability<TSchemaType>(
ContextualType contextualType, JsonSchemaResolver schemaResolver,
Action<TSchemaType, JsonSchema> transformation = null)
where TSchemaType : JsonSchema, new()
{
var typeDescription = Settings.ReflectionService.GetDescription(contextualType, Settings);
return GenerateWithReferenceAndNullability(contextualType, typeDescription.IsNullable, schemaResolver, transformation);
}
/// <summary>Generetes a schema directly or referenced for the requested schema type; also adds nullability if required.</summary>
/// <typeparam name="TSchemaType">The resulted schema type which may reference the actual schema.</typeparam>
/// <param name="contextualType">The type of the schema to generate.</param>
/// <param name="isNullable">Specifies whether the property, parameter or requested schema type is nullable.</param>
/// <param name="schemaResolver">The schema resolver.</param>
/// <param name="transformation">An action to transform the resulting schema (e.g. property or parameter) before the type of reference is determined (with $ref or allOf/oneOf).</param>
/// <returns>The requested schema object.</returns>
public virtual TSchemaType GenerateWithReferenceAndNullability<TSchemaType>(
ContextualType contextualType, bool isNullable, JsonSchemaResolver schemaResolver,
Action<TSchemaType, JsonSchema> transformation = null)
where TSchemaType : JsonSchema, new()
{
var typeDescription = Settings.ReflectionService.GetDescription(contextualType, Settings);
var requiresSchemaReference = typeDescription.RequiresSchemaReference(Settings.TypeMappers);
JsonSchema referencedSchema;
if (!requiresSchemaReference)
{
var schema = Generate<TSchemaType>(typeDescription.ContextualType, schemaResolver);
if (!schema.HasReference)
{
transformation?.Invoke(schema, schema);
if (isNullable)
{
if (Settings.SchemaType == SchemaType.JsonSchema)
{
if (schema.Type == JsonObjectType.None)
{
schema._oneOf.Add(new JsonSchema { Type = JsonObjectType.None });
schema._oneOf.Add(new JsonSchema { Type = JsonObjectType.Null });
}
else
{
schema.Type = schema.Type | JsonObjectType.Null;
}
}
else if (Settings.SchemaType == SchemaType.OpenApi3 || Settings.GenerateCustomNullableProperties)
{
schema.IsNullableRaw = isNullable;
}
}
return schema;
}
else // TODO: Is this else needed?
{
referencedSchema = schema.ActualSchema;
}
}
else
{
referencedSchema = Generate<JsonSchema>(typeDescription.ContextualType, schemaResolver);
}
var referencingSchema = new TSchemaType();
transformation?.Invoke(referencingSchema, referencedSchema);
if (isNullable)
{
if (Settings.SchemaType == SchemaType.JsonSchema)
{
referencingSchema._oneOf.Add(new JsonSchema { Type = JsonObjectType.Null });
}
else if (Settings.SchemaType == SchemaType.OpenApi3 || Settings.GenerateCustomNullableProperties)
{
referencingSchema.IsNullableRaw = true;
}
}
// See https://github.com/RicoSuter/NJsonSchema/issues/531
var useDirectReference = Settings.AllowReferencesWithProperties ||
!JsonConvert.DeserializeObject<JObject>(JsonConvert.SerializeObject(referencingSchema)).Properties().Any(); // TODO: Improve performance
if (useDirectReference && referencingSchema._oneOf.Count == 0)
{
referencingSchema.Reference = referencedSchema.ActualSchema;
}
else if (Settings.SchemaType != SchemaType.Swagger2)
{
referencingSchema._oneOf.Add(new JsonSchema
{
Reference = referencedSchema.ActualSchema
});
}
else
{
referencingSchema._allOf.Add(new JsonSchema
{
Reference = referencedSchema.ActualSchema
});
}
return referencingSchema;
}
/// <summary>Gets the converted property name.</summary>
/// <param name="jsonProperty">The property.</param>
/// <param name="accessorInfo">The accessor info.</param>
/// <returns>The property name.</returns>
public virtual string GetPropertyName(JsonProperty jsonProperty, ContextualAccessorInfo accessorInfo)
{
if (jsonProperty?.PropertyName != null)
{
return jsonProperty.PropertyName;
}
try
{
var propertyName = accessorInfo.GetName();
var contractResolver = Settings.ActualContractResolver as DefaultContractResolver;
return contractResolver != null
? contractResolver.GetResolvedPropertyName(propertyName)
: propertyName;
}
catch (Exception e)
{
throw new InvalidOperationException("Could not get JSON property name of property '" +
(accessorInfo != null ? accessorInfo.Name : "n/a") + "' and type '" +
(accessorInfo?.MemberInfo?.DeclaringType != null ? accessorInfo.MemberInfo.DeclaringType.FullName : "n/a") + "'.", e);
}
}
/// <summary>Applies the property annotations to the JSON property.</summary>
/// <param name="schema">The schema.</param>
/// <param name="typeDescription">The property type description.</param>
public virtual void ApplyDataAnnotations(JsonSchema schema, JsonTypeDescription typeDescription)
{
var contextualType = typeDescription.ContextualType;
dynamic displayAttribute = contextualType.ContextAttributes.FirstAssignableToTypeNameOrDefault("System.ComponentModel.DataAnnotations.DisplayAttribute");
if (displayAttribute != null)
{
// GetName returns null if the Name property on the attribute is not specified.
var name = displayAttribute.GetName();
if (name != null)
{
schema.Title = name;
}
}
dynamic defaultValueAttribute = contextualType.ContextAttributes.FirstAssignableToTypeNameOrDefault("System.ComponentModel.DefaultValueAttribute");
if (defaultValueAttribute != null)
{
if (typeDescription.IsEnum &&
typeDescription.Type.IsString())
{
schema.Default = defaultValueAttribute.Value?.ToString();
}
else
{
schema.Default = defaultValueAttribute.Value;
}
}
dynamic regexAttribute = contextualType.ContextAttributes.FirstAssignableToTypeNameOrDefault("System.ComponentModel.DataAnnotations.RegularExpressionAttribute");
if (regexAttribute != null)
{
if (typeDescription.IsDictionary)
{
schema.AdditionalPropertiesSchema.Pattern = regexAttribute.Pattern;
}
else
{
schema.Pattern = regexAttribute.Pattern;
}
}
if (typeDescription.Type == JsonObjectType.Number ||
typeDescription.Type == JsonObjectType.Integer)
{
ApplyRangeAttribute(schema, contextualType.ContextAttributes);
var multipleOfAttribute = contextualType.ContextAttributes.OfType<MultipleOfAttribute>().SingleOrDefault();
if (multipleOfAttribute != null)
{
schema.MultipleOf = multipleOfAttribute.MultipleOf;
}
}
dynamic minLengthAttribute = contextualType.ContextAttributes.FirstAssignableToTypeNameOrDefault("System.ComponentModel.DataAnnotations.MinLengthAttribute");
if (minLengthAttribute != null && minLengthAttribute.Length != null)
{
if (typeDescription.Type == JsonObjectType.String)
{
schema.MinLength = minLengthAttribute.Length;
}
else if (typeDescription.Type == JsonObjectType.Array)
{
schema.MinItems = minLengthAttribute.Length;
}
}
dynamic maxLengthAttribute = contextualType.ContextAttributes.FirstAssignableToTypeNameOrDefault("System.ComponentModel.DataAnnotations.MaxLengthAttribute");
if (maxLengthAttribute != null && maxLengthAttribute.Length != null)
{
if (typeDescription.Type == JsonObjectType.String)
{
schema.MaxLength = maxLengthAttribute.Length;
}
else if (typeDescription.Type == JsonObjectType.Array)
{
schema.MaxItems = maxLengthAttribute.Length;
}
}
dynamic stringLengthAttribute = contextualType.ContextAttributes.FirstAssignableToTypeNameOrDefault("System.ComponentModel.DataAnnotations.StringLengthAttribute");
if (stringLengthAttribute != null)
{
if (typeDescription.Type == JsonObjectType.String)
{
schema.MinLength = stringLengthAttribute.MinimumLength;
schema.MaxLength = stringLengthAttribute.MaximumLength;
}
}
dynamic dataTypeAttribute = contextualType.ContextAttributes.FirstAssignableToTypeNameOrDefault("System.ComponentModel.DataAnnotations.DataTypeAttribute");
if (dataTypeAttribute != null)
{
var dataType = dataTypeAttribute.DataType.ToString();
if (DataTypeFormats.ContainsKey(dataType))
{
schema.Format = DataTypeFormats[dataType];
}
}
}
/// <summary>Gets the actual default value for the given object (e.g. correctly converts enums).</summary>
/// <param name="type">The value type.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>The converted default value.</returns>
public virtual object ConvertDefaultValue(ContextualType type, object defaultValue)
{
if (defaultValue != null && defaultValue.GetType().GetTypeInfo().IsEnum)
{
var hasStringEnumConverter = Settings.ReflectionService.IsStringEnum(type, Settings.ActualSerializerSettings);
if (hasStringEnumConverter)
{
return defaultValue.ToString();
}
else
{
return (int)defaultValue;
}
}
else
{
return defaultValue;
}
}
/// <summary>Generates the example from the type's xml docs.</summary>
/// <param name="type">The type.</param>
/// <returns>The JToken or null.</returns>
public virtual object GenerateExample(ContextualType type)
{
if (Settings.GenerateExamples && Settings.UseXmlDocumentation)
{
try
{
var docs = type.GetXmlDocsTag("example", Settings.ResolveExternalXmlDocumentation);
return GenerateExample(docs);
}
catch
{
return null;
}
}
return null;
}
/// <summary>Generates the example from the accesor's xml docs.</summary>
/// <param name="accessorInfo">The accessor info.</param>
/// <returns>The JToken or null.</returns>
public virtual object GenerateExample(ContextualAccessorInfo accessorInfo)
{
if (Settings.GenerateExamples && Settings.UseXmlDocumentation)
{
try
{
var docs = accessorInfo.GetXmlDocsTag("example", Settings.ResolveExternalXmlDocumentation);
return GenerateExample(docs);
}
catch
{
return null;
}
}
return null;
}
private object GenerateExample(string xmlDocs)
{
try
{
return !string.IsNullOrEmpty(xmlDocs) ?
JsonConvert.DeserializeObject<JToken>(xmlDocs) :
null;
}
catch
{
return xmlDocs;
}
}
/// <summary>Generates the properties for the given type and schema.</summary>
/// <param name="schema">The properties</param>
/// <param name="typeDescription">The type description.</param>
/// <param name="schemaResolver">The schema resolver.</param>
/// <returns>The task.</returns>
protected virtual void GenerateObject(JsonSchema schema, JsonTypeDescription typeDescription, JsonSchemaResolver schemaResolver)
{
var type = typeDescription.ContextualType.Type;
schemaResolver.AddSchema(type, false, schema);
var rootSchema = schema;
var actualSchema = GenerateInheritance(typeDescription.ContextualType, schema, schemaResolver);
if (actualSchema != null)
{
schema = actualSchema;
}
else
{
GenerateProperties(type, schema, schemaResolver);
ApplyAdditionalProperties(schema, type, schemaResolver);
}
if (!schema.Type.IsArray())
{
typeDescription.ApplyType(schema);
}
schema.Description = type.ToCachedType().GetDescription(Settings);
schema.Example = GenerateExample(type.ToContextualType());
dynamic obsoleteAttribute = type.GetTypeInfo().GetCustomAttributes(false).FirstAssignableToTypeNameOrDefault("System.ObsoleteAttribute");
if (obsoleteAttribute != null)
{
schema.IsDeprecated = true;
schema.DeprecatedMessage = obsoleteAttribute.Message;
}
if (Settings.GetActualGenerateAbstractSchema(type))
{
schema.IsAbstract = type.GetTypeInfo().IsAbstract;
}
GenerateInheritanceDiscriminator(type, rootSchema, schema);
GenerateKnownTypes(type, schemaResolver);
if (Settings.GenerateXmlObjects)
{
schema.GenerateXmlObjectForType(type);
}
}
/// <summary>Gets the properties of the given type or null to take all properties.</summary>
/// <param name="type">The type.</param>
/// <returns>The property names or null for all.</returns>
protected virtual string[] GetTypeProperties(Type type)
{
if (type == typeof(Exception))
{
return new[] { "InnerException", "Message", "Source", "StackTrace" };
}
return null;
}
/// <summary>Generates an array in the given schema.</summary>
/// <typeparam name="TSchemaType">The schema type.</typeparam>
/// <param name="schema">The schema.</param>
/// <param name="typeDescription">The type description.</param>
/// <param name="schemaResolver">The schema resolver.</param>
protected virtual void GenerateArray<TSchemaType>(
TSchemaType schema, JsonTypeDescription typeDescription, JsonSchemaResolver schemaResolver)
where TSchemaType : JsonSchema, new()
{
var contextualType = typeDescription.ContextualType;
typeDescription.ApplyType(schema);
var jsonSchemaAttribute = contextualType.GetInheritedAttribute<JsonSchemaAttribute>();
var itemType = jsonSchemaAttribute?.ArrayItem.ToContextualType() ??
contextualType.EnumerableItemType ??
contextualType.GenericArguments.FirstOrDefault();
if (itemType != null)
{
var itemIsNullable = contextualType.GetContextAttribute<ItemsCanBeNullAttribute>() != null ||
itemType.Nullability == Nullability.Nullable;
schema.Item = GenerateWithReferenceAndNullability<JsonSchema>(
itemType, itemIsNullable, schemaResolver, (itemSchema, typeSchema) =>
{
if (Settings.GenerateXmlObjects)
{
itemSchema.GenerateXmlObjectForItemType(itemType);
}
});
if (Settings.GenerateXmlObjects)
{
schema.GenerateXmlObjectForArrayType();
}
}
else
{
schema.Item = JsonSchema.CreateAnySchema();
}
dynamic minLengthAttribute = contextualType.ContextAttributes.FirstAssignableToTypeNameOrDefault("MinLengthAttribute", TypeNameStyle.Name);
if (minLengthAttribute != null && ObjectExtensions.HasProperty(minLengthAttribute, "Length"))
{
schema.MinItems = minLengthAttribute.Length;
}
dynamic maxLengthAttribute = contextualType.ContextAttributes.FirstAssignableToTypeNameOrDefault("MaxLengthAttribute", TypeNameStyle.Name);
if (maxLengthAttribute != null && ObjectExtensions.HasProperty(maxLengthAttribute, "Length"))
{
schema.MaxItems = maxLengthAttribute.Length;
}
}
/// <summary>Generates an array in the given schema.</summary>
/// <typeparam name="TSchemaType">The schema type.</typeparam>
/// <param name="schema">The schema.</param>
/// <param name="typeDescription">The type description.</param>
/// <param name="schemaResolver">The schema resolver.</param>
/// <exception cref="InvalidOperationException">Could not find value type of dictionary type.</exception>
protected virtual void GenerateDictionary<TSchemaType>(TSchemaType schema, JsonTypeDescription typeDescription, JsonSchemaResolver schemaResolver)
where TSchemaType : JsonSchema, new()
{
var contextualType = typeDescription.ContextualType;
typeDescription.ApplyType(schema);
var genericTypeArguments = contextualType.GenericArguments;
var keyType = genericTypeArguments.Length == 2 ? genericTypeArguments[0] : typeof(string).ToContextualType();
if (keyType.OriginalType.GetTypeInfo().IsEnum)
{
schema.DictionaryKey = GenerateWithReference<JsonSchema>(keyType, schemaResolver);
}
var valueType = genericTypeArguments.Length == 2 ? genericTypeArguments[1] : typeof(object).ToContextualType();
var patternPropertiesAttributes = contextualType.ContextAttributes.OfType<JsonSchemaPatternPropertiesAttribute>();
if (patternPropertiesAttributes.Any())
{
schema.AllowAdditionalProperties = false;
foreach (var patternPropertiesAttribute in patternPropertiesAttributes)
{
var property = GenerateDictionaryValueSchema<JsonSchemaProperty>(
schemaResolver, patternPropertiesAttribute.Type?.ToContextualType() ?? valueType);
schema.PatternProperties.Add(patternPropertiesAttribute.RegularExpression, property);
}
}
else
{
schema.AdditionalPropertiesSchema = GenerateDictionaryValueSchema<JsonSchema>(schemaResolver, valueType);
schema.AllowAdditionalProperties = true;
}
dynamic minLengthAttribute = contextualType.ContextAttributes.FirstAssignableToTypeNameOrDefault("MinLengthAttribute", TypeNameStyle.Name);
if (minLengthAttribute != null && ObjectExtensions.HasProperty(minLengthAttribute, "Length"))
{
schema.MinProperties = minLengthAttribute.Length;
}
dynamic maxLengthAttribute = contextualType.ContextAttributes.FirstAssignableToTypeNameOrDefault("MaxLengthAttribute", TypeNameStyle.Name);
if (maxLengthAttribute != null && ObjectExtensions.HasProperty(maxLengthAttribute, "Length"))
{
schema.MaxProperties = maxLengthAttribute.Length;
}
}
/// <summary>Generates an enumeration in the given schema.</summary>
/// <param name="schema">The schema.</param>
/// <param name="typeDescription">The type description.</param>
protected virtual void GenerateEnum(JsonSchema schema, JsonTypeDescription typeDescription)
{
var contextualType = typeDescription.ContextualType;
schema.Type = typeDescription.Type;
schema.Enumeration.Clear();
schema.EnumerationNames.Clear();
schema.IsFlagEnumerable = contextualType.GetInheritedAttribute<FlagsAttribute>() != null;
var underlyingType = Enum.GetUnderlyingType(contextualType.Type);
var converters = Settings.ActualSerializerSettings.Converters.ToList();
if (!converters.OfType<StringEnumConverter>().Any())
{
converters.Add(new StringEnumConverter());
}
foreach (var enumName in Enum.GetNames(contextualType.Type))
{
if (typeDescription.Type == JsonObjectType.Integer)
{
var value = Convert.ChangeType(Enum.Parse(contextualType.Type, enumName), underlyingType);
schema.Enumeration.Add(value);
}
else
{
// EnumMember only checked if StringEnumConverter is used
var attributes = contextualType.Type.GetRuntimeField(enumName).GetCustomAttributes();
dynamic enumMemberAttribute = attributes.FirstAssignableToTypeNameOrDefault("System.Runtime.Serialization.EnumMemberAttribute");
if (enumMemberAttribute != null && !string.IsNullOrEmpty(enumMemberAttribute.Value))
{
schema.Enumeration.Add((string)enumMemberAttribute.Value);
}
else
{
var value = Enum.Parse(contextualType.Type, enumName);
var json = JsonConvert.SerializeObject(value, Formatting.None, converters.ToArray());
schema.Enumeration.Add(JsonConvert.DeserializeObject<string>(json));
}
}
schema.EnumerationNames.Add(enumName);
}
if (typeDescription.Type == JsonObjectType.Integer && Settings.GenerateEnumMappingDescription)
{
schema.Description = (schema.Description + "\n\n" +
string.Join("\n", schema.Enumeration.Select((e, i) => e + " = " + schema.EnumerationNames[i]))).Trim();
}
}
private TSchema GenerateDictionaryValueSchema<TSchema>(JsonSchemaResolver schemaResolver, ContextualType valueType)
where TSchema : JsonSchema, new()
{
if (valueType.OriginalType == typeof(object))
{
var additionalPropertiesSchema = new TSchema();
if (Settings.SchemaType == SchemaType.Swagger2)
{
additionalPropertiesSchema.AllowAdditionalProperties = false;
}
return additionalPropertiesSchema;
}
else
{
var valueTypeInfo = Settings.ReflectionService.GetDescription(
valueType, Settings.DefaultDictionaryValueReferenceTypeNullHandling, Settings);
var valueTypeIsNullable = valueType.GetContextAttribute<ItemsCanBeNullAttribute>() != null ||
valueTypeInfo.IsNullable;
return GenerateWithReferenceAndNullability<TSchema>(valueType, valueTypeIsNullable, schemaResolver);
}
}
private void ApplyAdditionalProperties<TSchemaType>(TSchemaType schema, Type type, JsonSchemaResolver schemaResolver)
where TSchemaType : JsonSchema, new()
{
var extensionDataProperty = type.GetContextualProperties()
.FirstOrDefault(p => p.ContextAttributes.Any(a =>
Namotion.Reflection.TypeExtensions.IsAssignableToTypeName(a.GetType(), "JsonExtensionDataAttribute", TypeNameStyle.Name)));
if (extensionDataProperty != null)
{
var genericTypeArguments = extensionDataProperty.AccessorType.GenericArguments;
var extensionDataPropertyType = genericTypeArguments.Length == 2 ? genericTypeArguments[1] : typeof(object).ToContextualType();
schema.AdditionalPropertiesSchema = GenerateWithReferenceAndNullability<JsonSchema>(
extensionDataPropertyType, schemaResolver);
}
else
{
schema.AllowAdditionalProperties = Settings.AlwaysAllowAdditionalObjectProperties;
}
}
private void ApplySchemaProcessors(JsonSchema schema, ContextualType contextualType, JsonSchemaResolver schemaResolver)
{
var context = new SchemaProcessorContext(contextualType.OriginalType, schema, schemaResolver, this, Settings);
foreach (var processor in Settings.SchemaProcessors)
{
processor.Process(context);
}
var operationProcessorAttributes = contextualType
.InheritedAttributes
.GetAssignableToTypeName(nameof(JsonSchemaProcessorAttribute), TypeNameStyle.Name);
foreach (dynamic attribute in operationProcessorAttributes)
{
var processor = Activator.CreateInstance(attribute.Type, attribute.Parameters);
processor.Process(context);
}
}
private bool TryHandleSpecialTypes<TSchemaType>(TSchemaType schema, ContextualType contextualType, JsonSchemaResolver schemaResolver)
where TSchemaType : JsonSchema, new()
{
var typeMapper = Settings.TypeMappers.FirstOrDefault(m => m.MappedType == contextualType.OriginalType);
if (typeMapper == null && contextualType.OriginalType.GetTypeInfo().IsGenericType)
{
var genericType = contextualType.OriginalType.GetGenericTypeDefinition();
typeMapper = Settings.TypeMappers.FirstOrDefault(m => m.MappedType == genericType);
}
if (typeMapper != null)
{
var context = new TypeMapperContext(contextualType.OriginalType, this, schemaResolver, contextualType.ContextAttributes);
typeMapper.GenerateSchema(schema, context);
return true;
}
if (contextualType.OriginalType.IsAssignableToTypeName(nameof(JArray), TypeNameStyle.Name) == false &&
(contextualType.OriginalType.IsAssignableToTypeName(nameof(JToken), TypeNameStyle.Name) == true ||
contextualType.OriginalType == typeof(object)))
{
if (Settings.SchemaType == SchemaType.Swagger2)
{
schema.AllowAdditionalProperties = false;
}
return true;
}
return false;
}
private void GenerateEnum<TSchemaType>(
TSchemaType schema, JsonTypeDescription typeDescription, JsonSchemaResolver schemaResolver)
where TSchemaType : JsonSchema, new()
{
var type = typeDescription.ContextualType.Type;
var isIntegerEnumeration = typeDescription.Type == JsonObjectType.Integer;
if (schemaResolver.HasSchema(type, isIntegerEnumeration))
{
schema.Reference = schemaResolver.GetSchema(type, isIntegerEnumeration);
}
else if (schema.GetType() == typeof(JsonSchema))
{
typeDescription.ApplyType(schema);
if (Settings.UseXmlDocumentation)
{
schema.Description = type.GetXmlDocsSummary(Settings.ResolveExternalXmlDocumentation);
}
GenerateEnum(schema, typeDescription);
schemaResolver.AddSchema(type, isIntegerEnumeration, schema);
}
else
{
schema.Reference = Generate(typeDescription.ContextualType, schemaResolver);
}
}
private void GenerateProperties(Type type, JsonSchema schema, JsonSchemaResolver schemaResolver)
{
// TODO(reflection): Here we should use ContextualAccessorInfo to avoid losing information
#if !LEGACY
var members = type.GetTypeInfo()
.DeclaredFields
.Where(f => !f.IsPrivate && !f.IsStatic || f.IsDefined(typeof(DataMemberAttribute)))
.OfType<MemberInfo>()
.Concat(
type.GetTypeInfo().DeclaredProperties
.Where(p => (p.GetMethod?.IsPrivate != true && p.GetMethod?.IsStatic == false) ||
(p.SetMethod?.IsPrivate != true && p.SetMethod?.IsStatic == false) ||
p.IsDefined(typeof(DataMemberAttribute)))
)
.ToList();
#else
var members = type.GetTypeInfo()
.GetFields(BindingFlags.DeclaredOnly | BindingFlags.Public | BindingFlags.Instance)
.Where(f => !f.IsPrivate && !f.IsStatic)
.OfType<MemberInfo>()
.Concat(
type.GetTypeInfo()
.GetProperties(BindingFlags.DeclaredOnly | BindingFlags.Public | BindingFlags.Instance)
.Where(p => (p.GetGetMethod()?.IsPrivate != true && p.GetGetMethod()?.IsStatic == false) ||
(p.GetSetMethod()?.IsPrivate != true && p.GetSetMethod()?.IsStatic == false))
)
.ToList();
#endif
var contextualAccessors = members.Select(m => m.ToContextualAccessor()); // TODO(reflection): Do not use this method
var contract = Settings.ResolveContract(type);
var allowedProperties = GetTypeProperties(type);
var objectContract = contract as JsonObjectContract;
if (objectContract != null && allowedProperties == null)
{
foreach (var jsonProperty in objectContract.Properties.Where(p => p.DeclaringType == type))
{
bool shouldSerialize;
try
{
shouldSerialize = jsonProperty.ShouldSerialize?.Invoke(null) != false;
}
catch
{
shouldSerialize = true;
}
if (shouldSerialize)
{
var memberInfo = contextualAccessors.FirstOrDefault(p => p.Name == jsonProperty.UnderlyingName);
if (memberInfo != null && (Settings.GenerateAbstractProperties || !IsAbstractProperty(memberInfo)))
{
LoadPropertyOrField(jsonProperty, memberInfo, type, schema, schemaResolver);
}
}
}
}
else
{
// TODO: Remove this hacky code (used to support serialization of exceptions and restore the old behavior [pre 9.x])
foreach (var memberInfo in contextualAccessors.Where(m => allowedProperties == null || allowedProperties.Contains(m.Name)))
{
var attribute = memberInfo.GetContextAttribute<JsonPropertyAttribute>();
var memberType = (memberInfo as ContextualPropertyInfo)?.PropertyInfo.PropertyType ??
(memberInfo as ContextualFieldInfo)?.FieldInfo.FieldType;
var jsonProperty = new JsonProperty
{
AttributeProvider = new ReflectionAttributeProvider(memberInfo),
PropertyType = memberType,
Ignored = IsPropertyIgnored(memberInfo, type)
};
if (attribute != null)
{
jsonProperty.PropertyName = attribute.PropertyName ?? memberInfo.Name;
jsonProperty.Required = attribute.Required;
jsonProperty.DefaultValueHandling = attribute.DefaultValueHandling;
jsonProperty.TypeNameHandling = attribute.TypeNameHandling;
jsonProperty.NullValueHandling = attribute.NullValueHandling;
jsonProperty.TypeNameHandling = attribute.TypeNameHandling;
}
else
{
jsonProperty.PropertyName = memberInfo.Name;
}
LoadPropertyOrField(jsonProperty, memberInfo, type, schema, schemaResolver);
}
}
}
private bool IsAbstractProperty(ContextualMemberInfo memberInfo)
{
return memberInfo is ContextualPropertyInfo propertyInfo &&
!propertyInfo.PropertyInfo.DeclaringType.GetTypeInfo().IsInterface &&
#if !LEGACY
(propertyInfo.PropertyInfo.GetMethod?.IsAbstract == true || propertyInfo.PropertyInfo.SetMethod?.IsAbstract == true);
#else
(propertyInfo.PropertyInfo.GetGetMethod()?.IsAbstract == true || propertyInfo.PropertyInfo.GetSetMethod()?.IsAbstract == true);
#endif
}
private void GenerateKnownTypes(Type type, JsonSchemaResolver schemaResolver)
{
var attributes = type.GetTypeInfo()
.GetCustomAttributes(Settings.GetActualFlattenInheritanceHierarchy(type));
if (Settings.GenerateKnownTypes)
{
var knownTypeAttributes = attributes
// Known types of inherited classes will be generated later (in GenerateInheritance)
.GetAssignableToTypeName("KnownTypeAttribute", TypeNameStyle.Name)
.OfType<Attribute>();
foreach (dynamic attribute in knownTypeAttributes)
{
if (attribute.Type != null)
{
AddKnownType(attribute.Type, schemaResolver);
}
else if (attribute.MethodName != null)
{
var methodInfo = type.GetRuntimeMethod((string)attribute.MethodName, new Type[0]);
if (methodInfo != null)
{
var knownTypes = methodInfo.Invoke(null, null) as IEnumerable<Type>;
if (knownTypes != null)
{
foreach (var knownType in knownTypes)
{
AddKnownType(knownType, schemaResolver);
}
}
}
}
else
{
throw new ArgumentException($"A KnownType attribute on {type.FullName} does not specify a type or a method name.", nameof(type));
}
}
}
foreach (var jsonConverterAttribute in attributes
.GetAssignableToTypeName("JsonInheritanceAttribute", TypeNameStyle.Name))
{
var knownType = ObjectExtensions.TryGetPropertyValue<Type>(
jsonConverterAttribute, "Type", null);
if (knownType != null)
{
AddKnownType(knownType, schemaResolver);
}
}
}
private void AddKnownType(Type type, JsonSchemaResolver schemaResolver)
{
var typeDescription = Settings.ReflectionService.GetDescription(type.ToContextualType(), Settings);
var isIntegerEnum = typeDescription.Type == JsonObjectType.Integer;
if (!schemaResolver.HasSchema(type, isIntegerEnum))
{
Generate(type, schemaResolver);
}
}
private JsonSchema GenerateInheritance(ContextualType type, JsonSchema schema, JsonSchemaResolver schemaResolver)
{
var baseType = type.BaseType;
if (baseType != null && baseType.Type != typeof(object) && baseType.Type != typeof(ValueType))
{
if (baseType.Attributes.FirstAssignableToTypeNameOrDefault("JsonSchemaIgnoreAttribute", TypeNameStyle.Name) == null &&
baseType.Attributes.FirstAssignableToTypeNameOrDefault("SwaggerIgnoreAttribute", TypeNameStyle.Name) == null &&
Settings.ExcludedTypeNames?.Contains(baseType.Type.FullName) != true)
{
if (Settings.GetActualFlattenInheritanceHierarchy(type))
{
var typeDescription = Settings.ReflectionService.GetDescription(baseType, Settings);
if (!typeDescription.IsDictionary && !type.Type.IsArray)
{
GenerateProperties(baseType, schema, schemaResolver);
var actualSchema = GenerateInheritance(baseType, schema, schemaResolver);
GenerateInheritanceDiscriminator(baseType, schema, actualSchema ?? schema);
}
}
else
{
var actualSchema = new JsonSchema();
GenerateProperties(type, actualSchema, schemaResolver);
ApplyAdditionalProperties(actualSchema, type, schemaResolver);
var baseTypeInfo = Settings.ReflectionService.GetDescription(baseType, Settings);
var requiresSchemaReference = baseTypeInfo.RequiresSchemaReference(Settings.TypeMappers);
if (actualSchema.Properties.Any() || requiresSchemaReference)
{
// Use allOf inheritance only if the schema is an object with properties
// (not empty class which just inherits from array or dictionary)
var baseSchema = Generate(baseType, schemaResolver);
if (requiresSchemaReference)
{
if (schemaResolver.RootObject != baseSchema.ActualSchema)
{
schemaResolver.AppendSchema(baseSchema.ActualSchema, Settings.SchemaNameGenerator.Generate(baseType));
}
schema._allOf.Add(new JsonSchema
{
Reference = baseSchema.ActualSchema
});
}
else
{
schema._allOf.Add(baseSchema);
}
// First schema is the (referenced) base schema, second is the type schema itself
schema._allOf.Add(actualSchema);
return actualSchema;
}
else
{
// Array and dictionary inheritance are not expressed with allOf but inline
Generate(schema, baseType, schemaResolver);
return schema;
}
}
}
}
if (Settings.GetActualFlattenInheritanceHierarchy(type) && Settings.GenerateAbstractProperties)
{
#if !LEGACY
foreach (var i in type.Type.GetTypeInfo().ImplementedInterfaces)
#else
foreach (var i in type.Type.GetTypeInfo().GetInterfaces())
#endif
{
var typeDescription = Settings.ReflectionService.GetDescription(i.ToContextualType(), Settings);
if (!typeDescription.IsDictionary && !type.Type.IsArray &&
!typeof(IEnumerable).GetTypeInfo().IsAssignableFrom(i.GetTypeInfo()))
{
GenerateProperties(i, schema, schemaResolver);
var actualSchema = GenerateInheritance(i.ToContextualType(), schema, schemaResolver);
GenerateInheritanceDiscriminator(i, schema, actualSchema ?? schema);
}
}
}
return null;
}
private void GenerateInheritanceDiscriminator(Type type, JsonSchema schema, JsonSchema typeSchema)
{
if (!Settings.GetActualFlattenInheritanceHierarchy(type))
{
var discriminatorConverter = TryGetInheritanceDiscriminatorConverter(type);
if (discriminatorConverter != null)
{
var discriminatorName = TryGetInheritanceDiscriminatorName(discriminatorConverter);
// Existing property can be discriminator only if it has String type
if (typeSchema.Properties.TryGetValue(discriminatorName, out var existingProperty))
{
if (!existingProperty.ActualTypeSchema.Type.IsInteger() &&
!existingProperty.ActualTypeSchema.Type.IsString())
{
throw new InvalidOperationException("The JSON discriminator property '" + discriminatorName + "' must be a string|int property on type '" + type.FullName + "' (it is recommended to not implement the discriminator property at all).");
}
existingProperty.IsRequired = true;
}
var discriminator = new OpenApiDiscriminator
{
JsonInheritanceConverter = discriminatorConverter,
PropertyName = discriminatorName
};
typeSchema.DiscriminatorObject = discriminator;
if (!typeSchema.Properties.ContainsKey(discriminatorName))
{
typeSchema.Properties[discriminatorName] = new JsonSchemaProperty
{
Type = JsonObjectType.String,
IsRequired = true
};
}
}
else
{
var baseDiscriminator = schema.ResponsibleDiscriminatorObject ?? schema.ActualTypeSchema.ResponsibleDiscriminatorObject;
baseDiscriminator?.AddMapping(type, schema);
}
}
}
private object TryGetInheritanceDiscriminatorConverter(Type type)
{
var typeAttributes = type.GetTypeInfo().GetCustomAttributes(false).OfType<Attribute>();
dynamic jsonConverterAttribute = typeAttributes.FirstAssignableToTypeNameOrDefault(nameof(JsonConverterAttribute), TypeNameStyle.Name);
if (jsonConverterAttribute != null)
{
var converterType = (Type)jsonConverterAttribute.ConverterType;
if (converterType != null && (
converterType.IsAssignableToTypeName(nameof(JsonInheritanceConverter), TypeNameStyle.Name) || // Newtonsoft's converter
converterType.IsAssignableToTypeName(nameof(JsonInheritanceConverter) + "`1", TypeNameStyle.Name) // System.Text.Json's converter
))
{
return ObjectExtensions.HasProperty(jsonConverterAttribute, "ConverterParameters") &&
jsonConverterAttribute.ConverterParameters != null &&
jsonConverterAttribute.ConverterParameters.Length > 0 ?
Activator.CreateInstance(jsonConverterAttribute.ConverterType, jsonConverterAttribute.ConverterParameters) :
Activator.CreateInstance(jsonConverterAttribute.ConverterType);
}
}
return null;
}
private string TryGetInheritanceDiscriminatorName(object jsonInheritanceConverter)
{
return ObjectExtensions.TryGetPropertyValue(
jsonInheritanceConverter,
nameof(JsonInheritanceConverter.DiscriminatorName),
JsonInheritanceConverter.DefaultDiscriminatorName);
}
private void LoadPropertyOrField(JsonProperty jsonProperty, ContextualAccessorInfo accessorInfo, Type parentType, JsonSchema parentSchema, JsonSchemaResolver schemaResolver)
{
var propertyTypeDescription = Settings.ReflectionService.GetDescription(accessorInfo.AccessorType, Settings);
if (jsonProperty.Ignored == false && IsPropertyIgnoredBySettings(accessorInfo) == false)
{
var propertyName = GetPropertyName(jsonProperty, accessorInfo);
var propertyAlreadyExists = parentSchema.Properties.ContainsKey(propertyName);
if (propertyAlreadyExists)
{
if (Settings.GetActualFlattenInheritanceHierarchy(parentType))
{
parentSchema.Properties.Remove(propertyName);
}
else
{
throw new InvalidOperationException("The JSON property '" + propertyName + "' is defined multiple times on type '" + parentType.FullName + "'.");
}
}
var requiredAttribute = accessorInfo.ContextAttributes.FirstAssignableToTypeNameOrDefault("System.ComponentModel.DataAnnotations.RequiredAttribute");
var hasJsonNetAttributeRequired = jsonProperty.Required == Required.Always || jsonProperty.Required == Required.AllowNull;
var isDataContractMemberRequired = GetDataMemberAttribute(accessorInfo, parentType)?.IsRequired == true;
var hasRequiredAttribute = requiredAttribute != null;
if (hasRequiredAttribute || isDataContractMemberRequired || hasJsonNetAttributeRequired)
{
parentSchema.RequiredProperties.Add(propertyName);
}
var isNullable = propertyTypeDescription.IsNullable &&
hasRequiredAttribute == false &&
(jsonProperty.Required == Required.Default || jsonProperty.Required == Required.AllowNull);
Action<JsonSchemaProperty, JsonSchema> TransformSchema = (propertySchema, typeSchema) =>
{
if (Settings.GenerateXmlObjects)
{
propertySchema.GenerateXmlObjectForProperty(accessorInfo.AccessorType, propertyName);
}
if (hasRequiredAttribute &&
propertyTypeDescription.IsEnum == false &&
propertyTypeDescription.Type == JsonObjectType.String &&
requiredAttribute.TryGetPropertyValue("AllowEmptyStrings", false) == false)
{
propertySchema.MinLength = 1;
}
if (!isNullable && Settings.SchemaType == SchemaType.Swagger2)
{
if (!parentSchema.RequiredProperties.Contains(propertyName))
{
parentSchema.RequiredProperties.Add(propertyName);
}
}
dynamic readOnlyAttribute = accessorInfo.ContextAttributes.FirstAssignableToTypeNameOrDefault("System.ComponentModel.ReadOnlyAttribute");
if (readOnlyAttribute != null)
{
propertySchema.IsReadOnly = readOnlyAttribute.IsReadOnly;
}
if (propertySchema.Description == null)
{
propertySchema.Description = accessorInfo.GetDescription(Settings);
}
if (propertySchema.Example == null)
{
propertySchema.Example = GenerateExample(accessorInfo);
}
dynamic obsoleteAttribute = accessorInfo.ContextAttributes.FirstAssignableToTypeNameOrDefault("System.ObsoleteAttribute");
if (obsoleteAttribute != null)
{
propertySchema.IsDeprecated = true;
propertySchema.DeprecatedMessage = obsoleteAttribute.Message;
}
propertySchema.Default = ConvertDefaultValue(accessorInfo.AccessorType, jsonProperty.DefaultValue);
ApplyDataAnnotations(propertySchema, propertyTypeDescription);
ApplyPropertyExtensionDataAttributes(accessorInfo, propertySchema);
};
var referencingProperty = GenerateWithReferenceAndNullability(
accessorInfo.AccessorType, isNullable, schemaResolver, TransformSchema);
parentSchema.Properties.Add(propertyName, referencingProperty);
}
}
/// <summary>Checks whether a property is ignored.</summary>
/// <param name="accessorInfo">The accessor info.</param>
/// <param name="parentType">The properties parent type.</param>
/// <returns>The result.</returns>
protected virtual bool IsPropertyIgnored(ContextualAccessorInfo accessorInfo, Type parentType)
{
if (accessorInfo.GetContextAttribute<JsonIgnoreAttribute>() != null)
{
return true;
}
if (accessorInfo.GetContextAttribute<JsonPropertyAttribute>() == null &&
HasDataContractAttribute(parentType) &&
GetDataMemberAttribute(accessorInfo, parentType) == null)
{
return true;
}
return IsPropertyIgnoredBySettings(accessorInfo);
}
private bool IsPropertyIgnoredBySettings(ContextualAccessorInfo accessorInfo)
{
if (Settings.IgnoreObsoleteProperties && accessorInfo.GetContextAttribute<ObsoleteAttribute>() != null)
{
return true;
}
if (accessorInfo.GetContextAttribute<JsonSchemaIgnoreAttribute>() != null)
{
return true;
}
return false;
}
private dynamic GetDataMemberAttribute(ContextualAccessorInfo accessorInfo, Type parentType)
{
if (!HasDataContractAttribute(parentType))
{
return null;
}
return accessorInfo.ContextAttributes.FirstAssignableToTypeNameOrDefault("DataMemberAttribute", TypeNameStyle.Name);
}
private bool HasDataContractAttribute(Type parentType)
{
return parentType.ToCachedType().InheritedAttributes
.FirstAssignableToTypeNameOrDefault("DataContractAttribute", TypeNameStyle.Name) != null;
}
private void ApplyRangeAttribute(JsonSchema schema, IEnumerable<Attribute> parentAttributes)
{
dynamic rangeAttribute = parentAttributes.FirstAssignableToTypeNameOrDefault("System.ComponentModel.DataAnnotations.RangeAttribute");
if (rangeAttribute != null)
{
if (rangeAttribute.Minimum != null)
{
if (rangeAttribute.OperandType == typeof(double))
{
var minimum = (double)Convert.ChangeType(rangeAttribute.Minimum, typeof(double));
if (minimum > double.MinValue)
{
schema.Minimum = (decimal)minimum;
}
}
else
{
var minimum = (decimal)Convert.ChangeType(rangeAttribute.Minimum, typeof(decimal));
if (minimum > decimal.MinValue)
{
schema.Minimum = minimum;
}
}
}
if (rangeAttribute.Maximum != null)
{
if (rangeAttribute.OperandType == typeof(double))
{
var maximum = (double)Convert.ChangeType(rangeAttribute.Maximum, typeof(double));
if (maximum < double.MaxValue)
{
schema.Maximum = (decimal)maximum;
}
}
else
{
var maximum = (decimal)Convert.ChangeType(rangeAttribute.Maximum, typeof(decimal));
if (maximum < decimal.MaxValue)
{
schema.Maximum = maximum;
}
}
}
}
}
private void ApplyTypeExtensionDataAttributes<TSchemaType>(TSchemaType schema, ContextualType contextualType) where TSchemaType : JsonSchema, new()
{
var extensionAttributes = contextualType.OriginalType.GetTypeInfo().GetCustomAttributes()
.Where(attribute => attribute is IJsonSchemaExtensionDataAttribute).ToArray();
if (extensionAttributes.Any())
{
var extensionData = new Dictionary<string, object>();
foreach (var attribute in extensionAttributes)
{
var extensionAttribute = (IJsonSchemaExtensionDataAttribute)attribute;
extensionData.Add(extensionAttribute.Key, extensionAttribute.Value);
}
schema.ExtensionData = extensionData;
}
}
private void ApplyPropertyExtensionDataAttributes(ContextualAccessorInfo accessorInfo, JsonSchemaProperty propertySchema)
{
var extensionDataAttributes = accessorInfo
.GetContextAttributes<IJsonSchemaExtensionDataAttribute>()
.ToArray();
if (extensionDataAttributes.Any())
{
propertySchema.ExtensionData = extensionDataAttributes.ToDictionary(a => a.Key, a => a.Value);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using UnityEngine;
using KSP.Localization;
namespace KERBALISM
{
public class Laboratory: PartModule, IModuleInfo, ISpecifics, IContractObjectiveModule
{
// config
[KSPField] public double ec_rate; // ec consumed per-second
[KSPField] public double analysis_rate; // analysis speed in Mb/s
[KSPField] public string researcher = string.Empty; // required crew for analysis
[KSPField] public bool cleaner = true; // can clean experiments
// persistence
[KSPField(isPersistant = true)] public bool running; // true if the lab is active
// status enum
private enum Status
{
DISABLED = 0,
NO_EC,
NO_STORAGE,
NO_SAMPLE,
NO_RESEARCHER,
RUNNING
}
// other data
private CrewSpecs researcher_cs; // crew specs for the researcher
private static CrewSpecs background_researcher_cs; // crew specs for the researcher in background simulation
private SubjectData current_sample = null; // sample currently being analyzed
private static SubjectData background_sample = null; // sample currently being analyzed in background simulation
private Status status = Status.DISABLED; // laboratory status
private string status_txt = string.Empty; // status string to show next to the ui button
private ResourceInfo ec = null; // resource info for EC
// localized strings
private static readonly string localized_title = Lib.BuildString("<size=1><color=#00000000>00</color></size>", Local.Laboratory_Title);
private static readonly string localized_toggle = Local.Laboratory_Toggle;
private static readonly string localized_enabled = Local.Generic_ENABLED;
private static readonly string localized_disabled = Local.Generic_DISABLED;
private static readonly string localized_noEC = Lib.Color(Local.Laboratory_NoEC, Lib.Kolor.Orange);
private static readonly string localized_noSample = Local.Laboratory_NoSample;
private static readonly string localized_cleaned = Local.Laboratory_Cleaned;
private static readonly string localized_results = Local.Laboratory_Results;
private static readonly string localized_noStorage = Local.Laboratory_Nostorage;//"No storage available"
public override void OnStart(StartState state)
{
// don't break tutorial scenarios
if (Lib.DisableScenario(this)) return;
// set UI text
Actions["Action"].guiName = Local.Laboratory_Action;
Events["CleanExperiments"].guiName = Local.Laboratory_Clean;
// do nothing in the editors and when compiling parts
if (!Lib.IsFlight()) return;
// parse crew specs
researcher_cs = new CrewSpecs(researcher);
}
public void Update()
{
if (Lib.IsFlight())
{
// get status text
SetStatusText();
Events["Toggle"].guiName = Lib.StatusToggle(localized_toggle, status_txt);
// if a cleaner and either a researcher is not required, or the researcher is present
if (cleaner && (!researcher_cs || researcher_cs.Check(part.protoModuleCrew))) Events["CleanExperiments"].active = true;
else Events["CleanExperiments"].active = false;
}
else Events["Toggle"].guiName = Lib.StatusToggle(localized_toggle, running ? localized_enabled : localized_disabled);
}
public void FixedUpdate()
{
// do nothing in the editor
if (Lib.IsEditor()) return;
// if enabled
if (running)
{
// if a researcher is not required, or the researcher is present
if (!researcher_cs || researcher_cs.Check(part.protoModuleCrew))
{
// get next sample to analyze
current_sample = NextSample(vessel);
double rate = analysis_rate;
if(researcher_cs) {
int bonus = researcher_cs.Bonus(part.protoModuleCrew);
double crew_gain = 1 + bonus * Settings.LaboratoryCrewLevelBonus;
crew_gain = Lib.Clamp(crew_gain, 1, Settings.MaxLaborartoryBonus);
rate *= crew_gain;
}
// if there is a sample to analyze
if (current_sample != null)
{
// consume EC
ec = ResourceCache.GetResource(vessel, "ElectricCharge");
ec.Consume(ec_rate * Kerbalism.elapsed_s, ResourceBroker.Laboratory);
// if there was ec
// - comparing against amount in previous simulation step
if (ec.Amount > double.Epsilon)
{
// analyze the sample
status = Analyze(vessel, current_sample, rate * Kerbalism.elapsed_s);
running = status == Status.RUNNING;
}
// if there was no ec
else status = Status.NO_EC;
}
// if there is no sample to analyze
else status = Status.NO_SAMPLE;
}
// if a researcher is required, but missing
else status = Status.NO_RESEARCHER;
}
// if disabled
else status = Status.DISABLED;
}
public static void BackgroundUpdate(Vessel v, ProtoPartSnapshot p, ProtoPartModuleSnapshot m, Laboratory lab, ResourceInfo ec, double elapsed_s)
{
// if enabled
if (Lib.Proto.GetBool(m, "running"))
{
// if a researcher is not required, or the researcher is present
background_researcher_cs = new CrewSpecs(lab.researcher);
if (!background_researcher_cs || background_researcher_cs.Check(p.protoModuleCrew))
{
double rate = lab.analysis_rate;
if(background_researcher_cs) {
int bonus = background_researcher_cs.Bonus(p.protoModuleCrew);
double crew_gain = 1 + bonus * Settings.LaboratoryCrewLevelBonus;
crew_gain = Lib.Clamp(crew_gain, 1, Settings.MaxLaborartoryBonus);
rate *= crew_gain;
}
// get sample to analyze
background_sample = NextSample(v);
// if there is a sample to analyze
if (background_sample != null)
{
// consume EC
ec.Consume(lab.ec_rate * elapsed_s, ResourceBroker.Laboratory);
// if there was ec
// - comparing against amount in previous simulation step
if (ec.Amount > double.Epsilon)
{
// analyze the sample
var status = Analyze(v, background_sample, rate * elapsed_s);
if (status != Status.RUNNING)
Lib.Proto.Set(m, "running", false);
}
}
}
}
}
[KSPEvent(guiActive = true, guiActiveEditor = true, guiName = "#KERBALISM_Laboratory_Toggle", active = true, groupName = "Science", groupDisplayName = "#KERBALISM_Group_Science")]//"Toggle Lab"Science
public void Toggle()
{
running = !running;
// refresh VAB/SPH ui
if (Lib.IsEditor()) GameEvents.onEditorShipModified.Fire(EditorLogic.fetch.ship);
}
[KSPEvent(guiActive = true, guiActiveEditor = false, guiName = "#KERBALISM_Laboratory_Clean", active = true, groupName = "Science", groupDisplayName = "#KERBALISM_Group_Science")]//Clean Lab""Science
public void CleanExperiments()
{
bool message = false;
var stockExperiments = vessel.FindPartModulesImplementing<ModuleScienceExperiment>();
foreach (ModuleScienceExperiment m in stockExperiments)
{
if (m.resettable && m.Inoperable)
{
m.ResetExperiment();
message = true;
}
}
var kerbalismExperiments = vessel.FindPartModulesImplementing<Experiment>();
foreach (Experiment m in kerbalismExperiments)
{
message |= m.Reset(false);
}
// inform the user
if (message) Message.Post(localized_cleaned);
}
// action groups
[KSPAction("Action")] public void Action(KSPActionParam param) { Toggle(); }
public override string GetInfo()
{
return Specs().Info(Local.Laboratory_Specs);
}
// specifics support
public Specifics Specs()
{
Specifics specs = new Specifics();
specs.Add(Local.Laboratory_Researcher, new CrewSpecs(researcher).Info());
if (cleaner) specs.Add(Local.Laboratory_CanClean);
specs.Add(Local.Laboratory_ECrate, Lib.HumanReadableRate(ec_rate));
specs.Add(Local.Laboratory_rate, Lib.HumanReadableDataRate(analysis_rate));
return specs;
}
// contract objective support
public bool CheckContractObjectiveValidity() { return true; }
public string GetContractObjectiveType() { return "Laboratory"; }
// get next sample to analyze, return null if there isn't a sample
private static SubjectData NextSample(Vessel v)
{
foreach(var drive in Drive.GetDrives(v, true))
{
// for each sample
foreach (Sample sample in drive.samples.Values)
{
// if flagged for analysis
if (sample.analyze) return sample.subjectData;
}
}
// there was no sample to analyze
return null;
}
// analyze a sample
private static Status Analyze(Vessel v, SubjectData subject, double amount)
{
Sample sample = null;
Drive sampleDrive = null;
foreach (var d in Drive.GetDrives(v, true))
{
if (d.samples.ContainsKey(subject) && d.samples[subject].analyze)
{
sample = d.samples[subject];
sampleDrive = d;
break;
}
}
bool completed = false;
if(sample != null)
{
completed = amount > sample.size;
amount = Math.Min(amount, sample.size);
}
Drive fileDrive = Drive.FileDrive(v.KerbalismData(), amount);
if (fileDrive == null)
return Status.NO_STORAGE;
if(sample != null)
{
bool recorded = fileDrive.Record_file(subject, amount, false);
double massRemoved = 0.0;
if (recorded)
massRemoved = sampleDrive.Delete_sample(subject, amount);
else
{
Message.Post(
Lib.Color(Lib.BuildString(Local.Laboratory_Analysis, " ", Local.Laboratory_stopped), Lib.Kolor.Red),//"stopped"
Local.Laboratory_Notspace//"Not enough space on hard drive"
);
return Status.NO_STORAGE;
}
// return sample mass to experiment if needed
if (massRemoved > 0.0) RestoreSampleMass(v, subject, massRemoved);
}
// if the analysis is completed
if (completed)
{
if(!PreferencesScience.Instance.analyzeSamples)
{
// only inform the user if auto-analyze is turned off
// otherwise we could be spamming "Analysis complete" messages
Message.Post(Lib.BuildString(Lib.Color(Local.Laboratory_Analysis, Lib.Kolor.Science, true), "\n",
Local.Laboratory_Analyzed.Format(Lib.Bold(v.vesselName), Lib.Bold(subject.FullTitle))), localized_results);
}
if (PreferencesScience.Instance.transmitScience)
fileDrive.Send(subject.Id, true);
// record landmark event
if (!Lib.Landed(v)) DB.landmarks.space_analysis = true;
}
return Status.RUNNING;
}
private static void RestoreSampleMass(Vessel v, SubjectData filename, double restoredAmount)
{
if(v.loaded) // loaded vessel
{
foreach (var experiment in v.FindPartModulesImplementing<Experiment>())
{
restoredAmount -= experiment.RestoreSampleMass(restoredAmount, filename.ExpInfo.ExperimentId);
}
}
else // unloaded vessel
{
foreach (ProtoPartModuleSnapshot m in Lib.FindModules(v.protoVessel, "Experiment"))
{
restoredAmount -= Experiment.RestoreSampleMass(restoredAmount, m, filename.ExpInfo.ExperimentId);
if (restoredAmount < double.Epsilon) return;
}
}
}
private void SetStatusText()
{
switch (status)
{
case Status.DISABLED:
status_txt = localized_disabled;
break;
case Status.NO_EC:
status_txt = localized_noEC;
break;
case Status.NO_STORAGE:
status_txt = localized_noStorage;
break;
case Status.NO_RESEARCHER:
status_txt = Lib.Color(researcher_cs.Warning(), Lib.Kolor.Orange);
break;
case Status.NO_SAMPLE:
status_txt = localized_noSample;
break;
case Status.RUNNING:
status_txt = Lib.Color(current_sample.FullTitle, Lib.Kolor.Green);
break;
}
}
// module info support
public string GetModuleTitle() { return localized_title; } // attempt to display at the top
public override string GetModuleDisplayName() { return localized_title; } // Attempt to display at top of tooltip
public string GetPrimaryField() { return String.Empty; }
public Callback<Rect> GetDrawModulePanelCallback() { return null; }
}
} // KERBALISM
| |
using System;
using System.Linq;
using System.Collections.Generic;
using System.Collections.Immutable;
using WampSharp.Core.Serialization;
using WampSharp.V2.Core.Contracts;
namespace WampSharp.V2.Rpc
{
internal class ProcedureRegistration : IWampRpcOperation
{
private readonly string mProcedureUri;
private readonly RegisterOptions mRegisterOptions;
private IImmutableList<IWampRpcOperation> mOperations =
ImmutableList.Create<IWampRpcOperation>();
private readonly IWampRpcOperationSelector mSelector;
private readonly object mLock = new object();
public ProcedureRegistration(string procedureUri, RegisterOptions registerOptions)
{
mProcedureUri = procedureUri;
mSelector = GetOperationSelector(registerOptions.Invoke);
mRegisterOptions = registerOptions;
}
private static IWampRpcOperationSelector GetOperationSelector(string invocationPolicy)
{
switch (invocationPolicy)
{
case "single":
case "first":
return new FirstOperationSelector();
case "last":
return new LastOperationSelector();
case "random":
return new RandomOperationSelector();
case "roundrobin":
return new RoundrobinOperationSelector();
default:
throw new WampException
("wamp.error.invalid_options",
string.Format("invoke = {0} isn't supported", invocationPolicy));
}
}
public long RegistrationId { get; set; }
public event EventHandler Empty;
public IWampRpcOperationRegistrationToken Register(IWampRpcOperation operation, RegisterOptions registerOptions)
{
VerifyInvokePoliciesAreCompatible(registerOptions);
lock (mLock)
{
if (mRegisterOptions.Invoke != "single" || !mOperations.Any())
{
if (!mOperations.Contains(operation))
{
mOperations = mOperations.Add(operation);
}
return new WampRpcOperationRegistrationToken(operation, this);
}
else
{
string registerError =
string.Format("register for already registered procedure '{0}'", operation.Procedure);
throw new WampException(WampErrors.ProcedureAlreadyExists,
registerError);
}
}
}
private void VerifyInvokePoliciesAreCompatible(RegisterOptions registerOptions)
{
if (mRegisterOptions.Invoke != registerOptions.Invoke)
{
string messageDetails =
string.Format(
"register for already registered procedure '{0}' with conflicting invocation policy (has {1} and {2} was requested)",
this.Procedure,
this.mRegisterOptions.Invoke,
registerOptions.Invoke);
throw new WampException
(WampErrors.ProcedureExistsInvocationPolicyConflict,
messageDetails);
}
}
private void RemoveOperation(IWampRpcOperation operation)
{
lock (mLock)
{
mOperations = mOperations.Remove(operation);
if (!mOperations.Any())
{
RaiseEmpty();
}
}
}
protected virtual void RaiseEmpty()
{
EventHandler handler = Empty;
if (handler != null)
{
handler(this, EventArgs.Empty);
}
}
public string Procedure
{
get
{
return mProcedureUri;
}
}
public bool HasOperations
{
get
{
return mOperations.Any();
}
}
public void Invoke<TMessage>(IWampRawRpcOperationRouterCallback caller, IWampFormatter<TMessage> formatter,
InvocationDetails details)
{
InvokePattern
(caller,
operation => operation.Invoke(caller, formatter, details));
}
public void Invoke<TMessage>(IWampRawRpcOperationRouterCallback caller, IWampFormatter<TMessage> formatter,
InvocationDetails details,
TMessage[] arguments)
{
InvokePattern
(caller,
operation => operation.Invoke(caller, formatter, details, arguments));
}
public void Invoke<TMessage>(IWampRawRpcOperationRouterCallback caller, IWampFormatter<TMessage> formatter,
InvocationDetails details,
TMessage[] arguments, IDictionary<string, TMessage> argumentsKeywords)
{
InvokePattern
(caller,
operation => operation.Invoke(caller, formatter, details, arguments, argumentsKeywords));
}
private void InvokePattern(IWampRawRpcOperationRouterCallback caller, Action<IWampRpcOperation> invokeAction)
{
lock (mLock)
{
IWampRpcOperation operation = GetOperation();
if (operation != null)
{
invokeAction(operation);
}
}
}
private IWampRpcOperation GetOperation()
{
IWampRpcOperation result = mSelector.SelectOperation(mOperations);
if (result == null)
{
WampRpcThrowHelper.NoProcedureRegistered(Procedure);
}
return result;
}
private class WampRpcOperationRegistrationToken : IWampRpcOperationRegistrationToken
{
private readonly IWampRpcOperation mOperation;
private readonly ProcedureRegistration mRegistration;
public WampRpcOperationRegistrationToken(IWampRpcOperation operation, ProcedureRegistration registration)
{
mOperation = operation;
mRegistration = registration;
}
public void Dispose()
{
mRegistration.RemoveOperation(mOperation);
}
public long RegistrationId
{
get
{
return mRegistration.RegistrationId;
}
}
}
}
}
| |
using UnityEditor;
using UnityEngine;
using System.Linq;
using System.Collections.Generic;
[CustomEditor(typeof(tk2dStaticSpriteBatcher))]
class tk2dStaticSpriteBatcherEditor : Editor
{
tk2dStaticSpriteBatcher batcher { get { return (tk2dStaticSpriteBatcher)target; } }
// Like GetComponentsInChildren, but doesn't include self
T[] GetComponentsInChildrenExcludeSelf<T>(Transform root) where T : Component {
List<T> allTransforms = new List<T>( root.GetComponentsInChildren<T>() );
return (from t in allTransforms where t.transform != root select t).ToArray();
}
void DrawEditorGUI()
{
if (GUILayout.Button("Commit"))
{
// Select all children, EXCLUDING self
Transform[] allTransforms = batcher.transform.GetComponentsInChildren<Transform>();
allTransforms = (from t in allTransforms where t != batcher.transform select t).ToArray();
// sort sprites, smaller to larger z
if (batcher.CheckFlag(tk2dStaticSpriteBatcher.Flags.SortToCamera)) {
tk2dCamera tk2dCam = tk2dCamera.CameraForLayer( batcher.gameObject.layer );
Camera cam = tk2dCam ? tk2dCam.camera : Camera.main;
allTransforms = (from t in allTransforms orderby cam.WorldToScreenPoint((t.renderer != null) ? t.renderer.bounds.center : t.position).z descending select t).ToArray();
}
else {
allTransforms = (from t in allTransforms orderby ((t.renderer != null) ? t.renderer.bounds.center : t.position).z descending select t).ToArray();
}
// and within the z sort by material
if (allTransforms.Length == 0)
{
EditorUtility.DisplayDialog("StaticSpriteBatcher", "Error: No child objects found", "Ok");
return;
}
#if !(UNITY_3_5 || UNITY_4_0 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2)
MeshCollider[] childMeshColliders = GetComponentsInChildrenExcludeSelf<MeshCollider>(batcher.transform);
BoxCollider[] childBoxColliders = GetComponentsInChildrenExcludeSelf<BoxCollider>(batcher.transform);
BoxCollider2D[] childBoxCollider2Ds = GetComponentsInChildrenExcludeSelf<BoxCollider2D>(batcher.transform);
EdgeCollider2D[] childEdgeCollider2Ds = GetComponentsInChildrenExcludeSelf<EdgeCollider2D>(batcher.transform);
PolygonCollider2D[] childPolygonCollider2Ds = GetComponentsInChildrenExcludeSelf<PolygonCollider2D>(batcher.transform);
if ((childMeshColliders.Length > 0 || childBoxColliders.Length > 0) && (childBoxCollider2Ds.Length > 0 || childEdgeCollider2Ds.Length > 0 || childPolygonCollider2Ds.Length > 0)) {
EditorUtility.DisplayDialog("StaticSpriteBatcher", "Error: Can't mix 2D and 3D colliders", "Ok");
return;
}
#endif
Dictionary<Transform, int> batchedSpriteLookup = new Dictionary<Transform, int>();
batchedSpriteLookup[batcher.transform] = -1;
Matrix4x4 batcherWorldToLocal = batcher.transform.worldToLocalMatrix;
batcher.spriteCollection = null;
batcher.batchedSprites = new tk2dBatchedSprite[allTransforms.Length];
List<tk2dTextMeshData> allTextMeshData = new List<tk2dTextMeshData>();
int currBatchedSprite = 0;
foreach (var t in allTransforms)
{
tk2dBaseSprite baseSprite = t.GetComponent<tk2dBaseSprite>();
tk2dTextMesh textmesh = t.GetComponent<tk2dTextMesh>();
tk2dBatchedSprite bs = new tk2dBatchedSprite();
bs.name = t.gameObject.name;
bs.position = t.localPosition;
bs.rotation = t.localRotation;
bs.relativeMatrix = batcherWorldToLocal * t.localToWorldMatrix;
if (baseSprite)
{
bs.baseScale = Vector3.one;
bs.localScale = new Vector3(t.localScale.x * baseSprite.scale.x, t.localScale.y * baseSprite.scale.y, t.localScale.z * baseSprite.scale.z);
FillBatchedSprite(bs, t.gameObject);
// temp redundant - just incase batcher expects to point to a valid one, somewhere we've missed
batcher.spriteCollection = baseSprite.Collection;
}
else if (textmesh)
{
bs.spriteCollection = null;
bs.type = tk2dBatchedSprite.Type.TextMesh;
bs.color = textmesh.color;
bs.baseScale = textmesh.scale;
bs.renderLayer = textmesh.SortingOrder;
bs.localScale = new Vector3(t.localScale.x * textmesh.scale.x, t.localScale.y * textmesh.scale.y, t.localScale.z * textmesh.scale.z);
bs.FormattedText = textmesh.FormattedText;
tk2dTextMeshData tmd = new tk2dTextMeshData();
tmd.font = textmesh.font;
tmd.text = textmesh.text;
tmd.color = textmesh.color;
tmd.color2 = textmesh.color2;
tmd.useGradient = textmesh.useGradient;
tmd.textureGradient = textmesh.textureGradient;
tmd.anchor = textmesh.anchor;
tmd.kerning = textmesh.kerning;
tmd.maxChars = textmesh.maxChars;
tmd.inlineStyling = textmesh.inlineStyling;
tmd.formatting = textmesh.formatting;
tmd.wordWrapWidth = textmesh.wordWrapWidth;
tmd.spacing = textmesh.Spacing;
tmd.lineSpacing = textmesh.LineSpacing;
bs.xRefId = allTextMeshData.Count;
allTextMeshData.Add(tmd);
}
else
{
// Empty GameObject
bs.spriteId = -1;
bs.baseScale = Vector3.one;
bs.localScale = t.localScale;
bs.type = tk2dBatchedSprite.Type.EmptyGameObject;
}
batchedSpriteLookup[t] = currBatchedSprite;
batcher.batchedSprites[currBatchedSprite++] = bs;
}
batcher.allTextMeshData = allTextMeshData.ToArray();
int idx = 0;
foreach (var t in allTransforms)
{
var bs = batcher.batchedSprites[idx];
bs.parentId = batchedSpriteLookup[t.parent];
t.parent = batcher.transform; // unparent
++idx;
}
Transform[] directChildren = (from t in allTransforms where t.parent == batcher.transform select t).ToArray();
foreach (var t in directChildren)
{
GameObject.DestroyImmediate(t.gameObject);
}
Vector3 inverseScale = new Vector3(1.0f / batcher.scale.x, 1.0f / batcher.scale.y, 1.0f / batcher.scale.z);
batcher.transform.localScale = Vector3.Scale( batcher.transform.localScale, inverseScale );
batcher.Build();
EditorUtility.SetDirty(target);
}
}
static void RestoreBoxColliderSettings( GameObject go, float offset, float extents ) {
BoxCollider boxCollider = go.GetComponent<BoxCollider>();
if (boxCollider != null) {
Vector3 p = boxCollider.center;
p.z = offset;
boxCollider.center = p;
p = boxCollider.size;
p.z = extents * 2;
boxCollider.size = p;
}
}
public static void FillBatchedSprite(tk2dBatchedSprite bs, GameObject go) {
tk2dSprite srcSprite = go.transform.GetComponent<tk2dSprite>();
tk2dTiledSprite srcTiledSprite = go.transform.GetComponent<tk2dTiledSprite>();
tk2dSlicedSprite srcSlicedSprite = go.transform.GetComponent<tk2dSlicedSprite>();
tk2dClippedSprite srcClippedSprite = go.transform.GetComponent<tk2dClippedSprite>();
tk2dBaseSprite baseSprite = go.GetComponent<tk2dBaseSprite>();
bs.spriteId = baseSprite.spriteId;
bs.spriteCollection = baseSprite.Collection;
bs.baseScale = baseSprite.scale;
bs.color = baseSprite.color;
bs.renderLayer = baseSprite.SortingOrder;
if (baseSprite.boxCollider != null)
{
bs.BoxColliderOffsetZ = baseSprite.boxCollider.center.z;
bs.BoxColliderExtentZ = baseSprite.boxCollider.size.z * 0.5f;
}
else {
bs.BoxColliderOffsetZ = 0.0f;
bs.BoxColliderExtentZ = 1.0f;
}
if (srcSprite) {
bs.type = tk2dBatchedSprite.Type.Sprite;
}
else if (srcTiledSprite) {
bs.type = tk2dBatchedSprite.Type.TiledSprite;
bs.Dimensions = srcTiledSprite.dimensions;
bs.anchor = srcTiledSprite.anchor;
bs.SetFlag(tk2dBatchedSprite.Flags.Sprite_CreateBoxCollider, srcTiledSprite.CreateBoxCollider);
}
else if (srcSlicedSprite) {
bs.type = tk2dBatchedSprite.Type.SlicedSprite;
bs.Dimensions = srcSlicedSprite.dimensions;
bs.anchor = srcSlicedSprite.anchor;
bs.SetFlag(tk2dBatchedSprite.Flags.Sprite_CreateBoxCollider, srcSlicedSprite.CreateBoxCollider);
bs.SetFlag(tk2dBatchedSprite.Flags.SlicedSprite_BorderOnly, srcSlicedSprite.BorderOnly);
bs.SlicedSpriteBorderBottomLeft = new Vector2(srcSlicedSprite.borderLeft, srcSlicedSprite.borderBottom);
bs.SlicedSpriteBorderTopRight = new Vector2(srcSlicedSprite.borderRight, srcSlicedSprite.borderTop);
}
else if (srcClippedSprite) {
bs.type = tk2dBatchedSprite.Type.ClippedSprite;
bs.ClippedSpriteRegionBottomLeft = srcClippedSprite.clipBottomLeft;
bs.ClippedSpriteRegionTopRight = srcClippedSprite.clipTopRight;
bs.SetFlag(tk2dBatchedSprite.Flags.Sprite_CreateBoxCollider, srcClippedSprite.CreateBoxCollider);
}
}
// This is used by other parts of code
public static void RestoreBatchedSprite(GameObject go, tk2dBatchedSprite bs) {
tk2dBaseSprite baseSprite = null;
switch (bs.type) {
case tk2dBatchedSprite.Type.EmptyGameObject:
{
break;
}
case tk2dBatchedSprite.Type.Sprite:
{
tk2dSprite s = tk2dBaseSprite.AddComponent<tk2dSprite>(go, bs.spriteCollection, bs.spriteId);
baseSprite = s;
break;
}
case tk2dBatchedSprite.Type.TiledSprite:
{
tk2dTiledSprite s = tk2dBaseSprite.AddComponent<tk2dTiledSprite>(go, bs.spriteCollection, bs.spriteId);
baseSprite = s;
s.dimensions = bs.Dimensions;
s.anchor = bs.anchor;
s.CreateBoxCollider = bs.CheckFlag(tk2dBatchedSprite.Flags.Sprite_CreateBoxCollider);
RestoreBoxColliderSettings(s.gameObject, bs.BoxColliderOffsetZ, bs.BoxColliderExtentZ);
break;
}
case tk2dBatchedSprite.Type.SlicedSprite:
{
tk2dSlicedSprite s = tk2dBaseSprite.AddComponent<tk2dSlicedSprite>(go, bs.spriteCollection, bs.spriteId);
baseSprite = s;
s.dimensions = bs.Dimensions;
s.anchor = bs.anchor;
s.BorderOnly = bs.CheckFlag(tk2dBatchedSprite.Flags.SlicedSprite_BorderOnly);
s.SetBorder(bs.SlicedSpriteBorderBottomLeft.x, bs.SlicedSpriteBorderBottomLeft.y, bs.SlicedSpriteBorderTopRight.x, bs.SlicedSpriteBorderTopRight.y);
s.CreateBoxCollider = bs.CheckFlag(tk2dBatchedSprite.Flags.Sprite_CreateBoxCollider);
RestoreBoxColliderSettings(s.gameObject, bs.BoxColliderOffsetZ, bs.BoxColliderExtentZ);
break;
}
case tk2dBatchedSprite.Type.ClippedSprite:
{
tk2dClippedSprite s = tk2dBaseSprite.AddComponent<tk2dClippedSprite>(go, bs.spriteCollection, bs.spriteId);
baseSprite = s;
s.clipBottomLeft = bs.ClippedSpriteRegionBottomLeft;
s.clipTopRight = bs.ClippedSpriteRegionTopRight;
s.CreateBoxCollider = bs.CheckFlag(tk2dBatchedSprite.Flags.Sprite_CreateBoxCollider);
RestoreBoxColliderSettings(s.gameObject, bs.BoxColliderOffsetZ, bs.BoxColliderExtentZ);
break;
}
}
if (baseSprite != null) {
baseSprite.SortingOrder = bs.renderLayer;
baseSprite.scale = bs.baseScale;
baseSprite.color = bs.color;
}
}
void DrawInstanceGUI()
{
if (GUILayout.Button("Edit"))
{
Vector3 batcherPos = batcher.transform.position;
Quaternion batcherRotation = batcher.transform.rotation;
batcher.transform.position = Vector3.zero;
batcher.transform.rotation = Quaternion.identity;
batcher.transform.localScale = Vector3.Scale(batcher.transform.localScale, batcher.scale);
Dictionary<int, Transform> parents = new Dictionary<int, Transform>();
List<Transform> children = new List<Transform>();
List<GameObject> gos = new List<GameObject>();
int id;
id = 0;
foreach (var bs in batcher.batchedSprites)
{
GameObject go = new GameObject(bs.name);
go.layer = batcher.gameObject.layer;
parents[id++] = go.transform;
children.Add(go.transform);
gos.Add (go);
}
id = 0;
foreach (var bs in batcher.batchedSprites)
{
Transform parent = batcher.transform;
if (bs.parentId != -1)
parents.TryGetValue(bs.parentId, out parent);
children[id++].parent = parent;
}
id = 0;
foreach (var bs in batcher.batchedSprites)
{
GameObject go = gos[id];
go.transform.localPosition = bs.position;
go.transform.localRotation = bs.rotation;
{
float sx = bs.localScale.x / ((Mathf.Abs (bs.baseScale.x) > Mathf.Epsilon) ? bs.baseScale.x : 1.0f);
float sy = bs.localScale.y / ((Mathf.Abs (bs.baseScale.y) > Mathf.Epsilon) ? bs.baseScale.y : 1.0f);
float sz = bs.localScale.z / ((Mathf.Abs (bs.baseScale.z) > Mathf.Epsilon) ? bs.baseScale.z : 1.0f);
go.transform.localScale = new Vector3(sx, sy, sz);
}
if (bs.type == tk2dBatchedSprite.Type.TextMesh) {
tk2dTextMesh s = go.AddComponent<tk2dTextMesh>();
if (batcher.allTextMeshData == null || bs.xRefId == -1) {
Debug.LogError("Unable to find text mesh ref");
}
else {
tk2dTextMeshData tmd = batcher.allTextMeshData[bs.xRefId];
s.font = tmd.font;
s.scale = bs.baseScale;
s.SortingOrder = bs.renderLayer;
s.text = tmd.text;
s.color = bs.color;
s.color2 = tmd.color2;
s.useGradient = tmd.useGradient;
s.textureGradient = tmd.textureGradient;
s.anchor = tmd.anchor;
s.scale = bs.baseScale;
s.kerning = tmd.kerning;
s.maxChars = tmd.maxChars;
s.inlineStyling = tmd.inlineStyling;
s.formatting = tmd.formatting;
s.wordWrapWidth = tmd.wordWrapWidth;
s.Spacing = tmd.spacing;
s.LineSpacing = tmd.lineSpacing;
s.Commit();
}
}
else {
RestoreBatchedSprite(go, bs);
}
++id;
}
batcher.batchedSprites = null;
batcher.Build();
EditorUtility.SetDirty(target);
batcher.transform.position = batcherPos;
batcher.transform.rotation = batcherRotation;
}
batcher.scale = EditorGUILayout.Vector3Field("Scale", batcher.scale);
batcher.SetFlag(tk2dStaticSpriteBatcher.Flags.GenerateCollider, EditorGUILayout.Toggle("Generate Collider", batcher.CheckFlag(tk2dStaticSpriteBatcher.Flags.GenerateCollider)));
batcher.SetFlag(tk2dStaticSpriteBatcher.Flags.FlattenDepth, EditorGUILayout.Toggle("Flatten Depth", batcher.CheckFlag(tk2dStaticSpriteBatcher.Flags.FlattenDepth)));
batcher.SetFlag(tk2dStaticSpriteBatcher.Flags.SortToCamera, EditorGUILayout.Toggle("Sort to Camera", batcher.CheckFlag(tk2dStaticSpriteBatcher.Flags.SortToCamera)));
MeshFilter meshFilter = batcher.GetComponent<MeshFilter>();
MeshRenderer meshRenderer = batcher.GetComponent<MeshRenderer>();
if (meshFilter != null && meshFilter.sharedMesh != null && meshRenderer != null) {
GUILayout.Label("Stats", EditorStyles.boldLabel);
int numIndices = 0;
Mesh mesh = meshFilter.sharedMesh;
for (int i = 0; i < mesh.subMeshCount; ++i) {
numIndices += mesh.GetTriangles(i).Length;
}
GUILayout.Label(string.Format("Triangles: {0}\nMaterials: {1}", numIndices / 3, meshRenderer.sharedMaterials.Length ));
}
}
public override void OnInspectorGUI()
{
if (batcher.batchedSprites == null || batcher.batchedSprites.Length == 0) {
DrawEditorGUI();
}
else {
DrawInstanceGUI();
}
}
[MenuItem("GameObject/Create Other/tk2d/Static Sprite Batcher", false, 13849)]
static void DoCreateSpriteObject()
{
GameObject go = tk2dEditorUtility.CreateGameObjectInScene("Static Sprite Batcher");
tk2dStaticSpriteBatcher batcher = go.AddComponent<tk2dStaticSpriteBatcher>();
batcher.version = tk2dStaticSpriteBatcher.CURRENT_VERSION;
Selection.activeGameObject = go;
Undo.RegisterCreatedObjectUndo(go, "Create Static Sprite Batcher");
}
}
| |
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using DSL.POS.DTO.DTO;
using DSL.POS.DataAccessLayer.Interface;
using DSL.POS.DataAccessLayer.Common.Imp;
namespace DSL.POS.DataAccessLayer.Imp
{
class PurchaseReturnInfoDALImp:CommonDALImp,IPurchaseReturnInfoDAL
{
#region ALL_Method
/// <summary>
/// Auto generate Sales Return No
/// </summary>
/// <param name="Obj"></param>
/// <returns></returns>
private string Get_PurchaseReturnNo(PurchaseReturnMainDTO Obj)
{
string returnNo = null;
int brcodeno = 0;
SqlConnection objmycon = new SqlConnection(ConfigurationManager.ConnectionStrings["DPOSConnectionString"].ToString());
try
{
SqlCommand objcmd = new SqlCommand();
objcmd.CommandText = "Select Isnull(Max(cast(PurchaseReturnNo as int)),0 )+1 from PurchaseReturnMain";
objmycon.Open();
objcmd.Connection = objmycon;
brcodeno = (int)objcmd.ExecuteScalar();
}
catch (Exception Exp)
{
throw Exp;
}
finally
{
objmycon.Close();
}
returnNo = brcodeno.ToString("0000000");
return returnNo;
}
/// <summary>
/// This method used for save and update
/// </summary>
/// <param name="obj">DTO Object </param>
///
//public override void Save(SalesMainInfoDTO oSalesMainInfoDTO)
public override void Save(object obj)
{
try
{
StringBuilder strBuilderSales = new StringBuilder();
SqlConnection sqlConn = new SqlConnection(ConfigurationManager.ConnectionStrings["DPOSConnectionString"].ToString());
SqlCommand objCmd = sqlConn.CreateCommand();
PurchaseReturnMainDTO oPurchaseReturnMainDTO = (PurchaseReturnMainDTO)obj;
//SalesSubInfoDTO oSalesSubInfoDTO1 = new SalesSubInfoDTO();
oPurchaseReturnMainDTO.PurchaseReturnNo = Get_PurchaseReturnNo(oPurchaseReturnMainDTO);
strBuilderSales.Append("Declare @ID int; Declare @ERR int;INSERT INTO PurchaseReturnMain (PRM_PK,PU_PK, Sp_PK, PurchaseReturnNo, ReferenceNo, GRN_No, GRNDate, TotalReturnAmount, PurchaseDeduction, PurchaseReturnAmount, Remarks, EntryBy, EntryDate) VALUES ('");
strBuilderSales.Append(oPurchaseReturnMainDTO.PrimaryKey);
strBuilderSales.Append("','");
strBuilderSales.Append(oPurchaseReturnMainDTO.PU_PK);
strBuilderSales.Append("','");
strBuilderSales.Append(oPurchaseReturnMainDTO.Sp_PK);
strBuilderSales.Append("','");
strBuilderSales.Append(oPurchaseReturnMainDTO.PurchaseReturnNo);
strBuilderSales.Append("','");
strBuilderSales.Append(oPurchaseReturnMainDTO.ReferenceNo);
strBuilderSales.Append("','");
strBuilderSales.Append(oPurchaseReturnMainDTO.GRN_No);
strBuilderSales.Append("','");
strBuilderSales.Append(oPurchaseReturnMainDTO.GRNDate);
strBuilderSales.Append("',");
strBuilderSales.Append(oPurchaseReturnMainDTO.TotalReturnAmount);
strBuilderSales.Append(",");
strBuilderSales.Append(oPurchaseReturnMainDTO.PurchaseDeduction);
strBuilderSales.Append(",");
strBuilderSales.Append(oPurchaseReturnMainDTO.PurchaseReturnAmount);
strBuilderSales.Append(",'");
strBuilderSales.Append(oPurchaseReturnMainDTO.Remarks);
strBuilderSales.Append("','");
strBuilderSales.Append(oPurchaseReturnMainDTO.EntryBy);
strBuilderSales.Append("',");
//strBuilderSales.Append(oSalesMainInfoDTO.EntryDate);
strBuilderSales.Append("GETDATE()");
strBuilderSales.Append(");");
foreach (PurchaseReturnSubDTO oPurchaseReturnSubDTO1 in oPurchaseReturnMainDTO.PurchaseReturnSubDTO)
{
strBuilderSales.Append("INSERT INTO PurchaseReturnSub (PRM_PK,P_PK, ReceivedQuantity, ItemRate, Discount, Notes, EntryBy, EntryDate)VALUES ('");
strBuilderSales.Append(oPurchaseReturnSubDTO1.PRM_PK);
strBuilderSales.Append("','");
strBuilderSales.Append(oPurchaseReturnSubDTO1.P_PK);
strBuilderSales.Append("',");
strBuilderSales.Append(oPurchaseReturnSubDTO1.ReceivedQuantity);
strBuilderSales.Append(",");
strBuilderSales.Append(oPurchaseReturnSubDTO1.ItemRate);
strBuilderSales.Append(",");
strBuilderSales.Append(oPurchaseReturnSubDTO1.Discount);
strBuilderSales.Append(",'");
strBuilderSales.Append(oPurchaseReturnSubDTO1.Notes);
strBuilderSales.Append("','");
strBuilderSales.Append(oPurchaseReturnSubDTO1.EntryBy);
strBuilderSales.Append("',");
strBuilderSales.Append("GETDATE()");
strBuilderSales.Append(");SELECT @ERR=@ERR+@@ERROR;");
}
sqlConn.Open();
objCmd.Connection = sqlConn;
objCmd.CommandType = CommandType.Text;
objCmd.CommandText = strBuilderSales.Append("SELECT @ID, ISNULL(@ERR,0)").ToString();
using (SqlDataReader rdr = objCmd.ExecuteReader(CommandBehavior.CloseConnection))
{
// Read the returned @ERR
rdr.Read();
// If the error count is not zero throw an exception
if (rdr.GetInt32(1) != 0)
throw new ApplicationException("DATA INTEGRITY ERROR ON ORDER INSERT - ROLLBACK ISSUED");
}
}
catch (Exception ex)
{
throw ex;
}
}
/// <summary>
/// This method used for update
/// </summary>
/// <param name="obj">DTO Object </param>
///
//public void Update(SalesMainInfoDTO oSalesMainInfoDTO)
public void Update(PurchaseReturnMainDTO oPurchaseReturnMainDTO)
{
//try
//{
// StringBuilder strBuilderSales = new StringBuilder();
// SqlConnection sqlConn = new SqlConnection(ConfigurationManager.ConnectionStrings["DPOSConnectionString"].ToString());
// SqlCommand objCmd = sqlConn.CreateCommand();
// strBuilderSales.Append("Declare @ID int; Declare @ERR int;update SalesMain(Sal_Pk,DeliveryNo,DeliveryDate,Cust_PK,TotalAmount,PaidAmount,DueAmount,PaymentAmount,PaymentMode,Bo_PK,Remarks,EntryBy,EntryDate) values ('");
// strBuilderSales.Append(oSalesMainInfoDTO.PrimaryKey);
// strBuilderSales.Append("','");
// strBuilderSales.Append(oSalesMainInfoDTO.DeliveryNo);
// strBuilderSales.Append("',");
// strBuilderSales.Append("GETDATE()");
// strBuilderSales.Append(",'");
// strBuilderSales.Append(oSalesMainInfoDTO.Cust_PK);
// strBuilderSales.Append("',");
// strBuilderSales.Append(oSalesMainInfoDTO.TotalAmount);
// strBuilderSales.Append(",");
// strBuilderSales.Append(oSalesMainInfoDTO.PaidAmount);
// strBuilderSales.Append(",");
// strBuilderSales.Append(oSalesMainInfoDTO.DueAmount);
// strBuilderSales.Append(",");
// strBuilderSales.Append(oSalesMainInfoDTO.PaymentAmount);
// strBuilderSales.Append(",'");
// strBuilderSales.Append(oSalesMainInfoDTO.PaymentMode);
// strBuilderSales.Append("','");
// strBuilderSales.Append(oSalesMainInfoDTO.Bo_PK);
// strBuilderSales.Append("','");
// strBuilderSales.Append(oSalesMainInfoDTO.Remarks);
// strBuilderSales.Append("','");
// strBuilderSales.Append(oSalesMainInfoDTO.EntryBy);
// strBuilderSales.Append("',");
// strBuilderSales.Append(oSalesMainInfoDTO.EntryDate);
// strBuilderSales.Append("GETDATE()");
// strBuilderSales.Append(");");
// foreach (SalesSubInfoDTO oSalesSubInfoDTO1 in oSalesMainInfoDTO.SalesSubInfoDTO)
// {
// strBuilderSales.Append("Insert Into SalesSub(Sal_Pk,P_PK,SalesRate,SalesQuantity,VatAmount,TaxAmount,Discount,TotalAmount,EntryDate,EntryBy) values ('");
// strBuilderSales.Append(oSalesSubInfoDTO1.Sal_Pk);
// strBuilderSales.Append("','");
// strBuilderSales.Append(oSalesSubInfoDTO1.P_PK);
// strBuilderSales.Append("',");
// strBuilderSales.Append(oSalesSubInfoDTO1.SalesRate);
// strBuilderSales.Append(",");
// strBuilderSales.Append(oSalesSubInfoDTO1.SalesQuantity);
// strBuilderSales.Append(",");
// strBuilderSales.Append(oSalesSubInfoDTO1.VatAmount);
// strBuilderSales.Append(",");
// strBuilderSales.Append(oSalesSubInfoDTO1.TaxAmount);
// strBuilderSales.Append(",");
// strBuilderSales.Append(oSalesSubInfoDTO1.Discount);
// strBuilderSales.Append(",");
// strBuilderSales.Append(oSalesSubInfoDTO1.TotalAmount);
// strBuilderSales.Append(",");
// strBuilderSales.Append(oSalesSubInfoDTO1.EntryDate);
// strBuilderSales.Append("GETDATE()");
// strBuilderSales.Append(",'");
// strBuilderSales.Append(oSalesSubInfoDTO1.EntryBy);
// strBuilderSales.Append("');SELECT @ERR=@ERR+@@ERROR;");
// }
// sqlConn.Open();
// objCmd.Connection = sqlConn;
// objCmd.CommandType = CommandType.Text;
// objCmd.CommandText = strBuilderSales.Append("SELECT @ID, ISNULL(@ERR,0)").ToString();
// using (SqlDataReader rdr = objCmd.ExecuteReader(CommandBehavior.CloseConnection))
// {
// Read the returned @ERR
// rdr.Read();
// If the error count is not zero throw an exception
// if (rdr.GetInt32(1) != 0)
// throw new ApplicationException("DATA INTEGRITY ERROR ON ORDER INSERT - ROLLBACK ISSUED");
// }
//}
//catch (Exception ex)
//{
// throw ex;
//}
}
/// <summary>
/// Get All Information in Sales Main And Sales Sub corresponding Invoice No.
/// </summary>
/// <param name="strCode"></param>
/// <returns></returns>
public PurchaseReturnMainDTO GetPurchaseInfoBYGRNNo(string strCode)
{
string sqlSelectMain = "SELECT PurchaseMain.PU_PK, PurchaseMain.GRN_No, PurchaseMain.GRNDate, PurchaseMain.ReferenceNo, PurchaseMain.Sp_PK, PurchaseMain.TotalAmount, PurchaseMain.Discount, PurchaseMain.EntryBy, PurchaseMain.EntryDate, SupplierInfo.SupplierName, SupplierInfo.Sp_PK, SupplierInfo.SupplierCode, SupplierInfo.Address FROM SupplierInfo INNER JOIN PurchaseMain ON SupplierInfo.Sp_PK = PurchaseMain.Sp_PK WHERE PurchaseMain.GRN_No = @GRN_No";
SqlConnection sqlConn = new SqlConnection(ConfigurationManager.ConnectionStrings["DPOSConnectionString"].ToString());
PurchaseReturnMainDTO oPurchaseReturnMainDTO = new PurchaseReturnMainDTO();
PurchaseMainDTO oPurchaseMainDTO = new PurchaseMainDTO();
List<PurchaseReturnSubDTO> olPurchaseReturnSubDTO = new List<PurchaseReturnSubDTO>();
SqlCommand objCmd = sqlConn.CreateCommand();
objCmd.CommandText = sqlSelectMain;
objCmd.Connection = sqlConn;
try
{
objCmd.Parameters.Add(new SqlParameter("@GRN_No", SqlDbType.VarChar, 20));
objCmd.Parameters["@GRN_No"].Value = strCode;
sqlConn.Open();
SqlDataReader thisReader = objCmd.ExecuteReader();
if (thisReader.Read())
{
oPurchaseReturnMainDTO = populate(thisReader);
}
sqlConn.Close();
thisReader.Close();
thisReader.Dispose();
objCmd.Dispose();
objCmd.Cancel();
//// For Sales Sub dto
//try
//{
// string sqlSelectSalesSub = "Select SalSub_Pk,Sal_Pk,P_PK,SalesRate,SalesQuantity,VatAmount,TaxAmount,Discount,TotalAmount,EntryDate,EntryBy From SalesSub where Sal_Pk=@Sal_Pk";
// objCmd.CommandText = sqlSelectSalesSub;
// objCmd.Connection = sqlConn;
// objCmd.Parameters.Add("@Sal_Pk", SqlDbType.UniqueIdentifier, 16);
// objCmd.Parameters["@Sal_Pk"].Value = oSalesMainInfoDTO.PrimaryKey;
// sqlConn.Open();
// thisReader = objCmd.ExecuteReader();
// while (thisReader.Read())
// {
// SalesSubInfoDTO oSalesSubInfoDTO = populatesub(thisReader);
// loSalesSubInfoDTO.Add(oSalesSubInfoDTO);
// }
// oSalesMainInfoDTO.SalesSubInfoDTO = loSalesSubInfoDTO;
//}
//catch (Exception ex)
//{
// throw ex;
//}
}
catch (Exception ex)
{
throw ex;
}
finally
{
objCmd.Dispose();
objCmd.Cancel();
sqlConn.Dispose();
sqlConn.Close();
}
return oPurchaseReturnMainDTO;
//return oSalesMainInfoDTO;
}
/// <summary>
/// set Sales Main information in Domain Class
/// </summary>
/// <param name="reader"></param>
/// <returns></returns>
public PurchaseReturnMainDTO populate(SqlDataReader reader)
{
try
{
PurchaseReturnMainDTO dto = new PurchaseReturnMainDTO();
PurchaseMainDTO oPurchaseMainDTO = new PurchaseMainDTO();
SupplierInfoDTO oSupplierInfoDTO = new SupplierInfoDTO();
oPurchaseMainDTO.PrimaryKey = (Guid)reader["PU_PK"];
oPurchaseMainDTO.GRN_No = (string)reader["GRN_No"];
oPurchaseMainDTO.GRNDate = (DateTime)reader["GRNDate"];
oPurchaseMainDTO.ReferenceNo = (string)reader["ReferenceNo"];
oPurchaseMainDTO.Sp_PK = (Guid)reader["Sp_PK"];
oPurchaseMainDTO.TotalAmount = (decimal)reader["TotalAmount"];
oPurchaseMainDTO.Discount = (decimal)reader["Discount"];
oPurchaseMainDTO.EntryBy = (string)reader["EntryBy"];
oPurchaseMainDTO.EntryDate = (DateTime)reader["EntryDate"];
dto.PurchaseMainDTO = oPurchaseMainDTO;
oSupplierInfoDTO.PrimaryKey = (Guid)reader["Sp_PK"];
oSupplierInfoDTO.SupplierCode = (string)reader["SupplierCode"];
oSupplierInfoDTO.SupplierName = (string)reader["SupplierName"];
oSupplierInfoDTO.Address = (string)reader["Address"];
dto.SupplierInfoDTO = oSupplierInfoDTO;
return dto;
}
catch (Exception ex)
{
throw ex;
}
}
/// <summary>
/// set up in Sales Sub Information
/// </summary>
/// <param name="reader"></param>
/// <returns></returns>
public PurchaseReturnSubDTO populatesub(SqlDataReader reader)
{
try
{
PurchaseReturnSubDTO dto = new PurchaseReturnSubDTO();
dto.PrimaryKey = (Guid)reader["PRS_PK"];
dto.PRM_PK = (Guid)reader["SALRM_PK"];
dto.P_PK = (Guid)reader["P_PK"];
dto.ReceivedQuantity = (decimal)reader["ReceivedQuantity"];
dto.ItemRate = (decimal)reader["ItemRate"];
dto.Discount = (decimal)reader["Discount"];
dto.Notes = (string)reader["Discount"];
dto.EntryBy = (string)reader["EntryBy"];
dto.EntryDate = (DateTime)reader["EntryDate"];
return dto;
}
catch (Exception ex)
{
throw ex;
}
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Baseline;
using Marten.Linq;
using Marten.Linq.QueryHandlers;
using Marten.Schema;
using Marten.Services;
using Marten.Services.BatchQuerying;
using Npgsql;
using Remotion.Linq.Parsing.Structure;
namespace Marten
{
public interface ILoader
{
FetchResult<T> LoadDocument<T>(object id) where T : class;
Task<FetchResult<T>> LoadDocumentAsync<T>(object id, CancellationToken token) where T : class;
}
public class QuerySession : IQuerySession, ILoader
{
private readonly IDocumentSchema _schema;
private readonly ISerializer _serializer;
private readonly IManagedConnection _connection;
private readonly IQueryParser _parser;
private readonly IIdentityMap _identityMap;
protected readonly CharArrayTextWriter.Pool WriterPool;
private bool _disposed;
public QuerySession(IDocumentStore store, IDocumentSchema schema, ISerializer serializer, IManagedConnection connection, IQueryParser parser, IIdentityMap identityMap, CharArrayTextWriter.Pool writerPool)
{
DocumentStore = store;
_schema = schema;
_serializer = serializer;
_connection = connection;
_parser = parser;
_identityMap = identityMap;
WriterPool = writerPool;
}
public IDocumentStore DocumentStore { get; }
public IJsonLoader Json => new JsonLoader(_connection, _schema);
protected void assertNotDisposed()
{
if (_disposed) throw new ObjectDisposedException("This session has been disposed");
}
public IMartenQueryable<T> Query<T>()
{
assertNotDisposed();
var executor = new MartenQueryExecutor(_connection, _schema, _identityMap);
var queryProvider = new MartenQueryProvider(typeof(MartenQueryable<>), _parser, executor);
return new MartenQueryable<T>(queryProvider);
}
public IList<T> Query<T>(string sql, params object[] parameters)
{
assertNotDisposed();
var handler = new UserSuppliedQueryHandler<T>(_schema, _serializer, sql, parameters);
return _connection.Fetch(handler, _identityMap.ForQuery(), null);
}
public Task<IList<T>> QueryAsync<T>(string sql, CancellationToken token, params object[] parameters)
{
assertNotDisposed();
var handler = new UserSuppliedQueryHandler<T>(_schema, _serializer, sql, parameters);
return _connection.FetchAsync(handler, _identityMap.ForQuery(), null, token);
}
public IBatchedQuery CreateBatchQuery()
{
assertNotDisposed();
return new BatchedQuery(_connection, _schema, _identityMap.ForQuery(), this, _serializer);
}
private IDocumentStorage storage<T>()
{
return _schema.StorageFor(typeof(T));
}
public FetchResult<T> LoadDocument<T>(object id) where T : class
{
assertNotDisposed();
var storage = storage<T>();
var resolver = storage.As<IResolver<T>>();
var cmd = storage.LoaderCommand(id);
return _connection.Execute(cmd, c =>
{
using (var reader = cmd.ExecuteReader())
{
return resolver.Fetch(reader, _serializer);
}
});
}
public Task<FetchResult<T>> LoadDocumentAsync<T>(object id, CancellationToken token) where T : class
{
assertNotDisposed();
var storage = storage<T>();
var resolver = storage.As<IResolver<T>>();
var cmd = storage.LoaderCommand(id);
return _connection.ExecuteAsync(cmd, async (c, tkn) =>
{
using (var reader = await cmd.ExecuteReaderAsync(tkn).ConfigureAwait(false))
{
return await resolver.FetchAsync(reader, _serializer, token).ConfigureAwait(false);
}
}, token);
}
public T Load<T>(string id)
{
return load<T>(id);
}
public Task<T> LoadAsync<T>(string id, CancellationToken token)
{
return loadAsync<T>(id, token);
}
public T Load<T>(ValueType id)
{
return load<T>(id);
}
private T load<T>(object id)
{
if (id == null) throw new ArgumentNullException(nameof(id));
assertNotDisposed();
assertCorrectIdType<T>(id);
var resolver = storage<T>().As<IResolver<T>>();
return resolver.Resolve(_identityMap, this, id);
}
private void assertCorrectIdType<T>(object id)
{
var mapping = _schema.MappingFor(typeof(T));
if (id.GetType() != mapping.IdType)
{
if (id.GetType() == typeof(int) && mapping.IdType == typeof(long)) return;
throw new InvalidOperationException(
$"The id type for {typeof(T).FullName} is {mapping.IdType.Name}, but got {id.GetType().Name}");
}
}
private Task<T> loadAsync<T>(object id, CancellationToken token)
{
assertNotDisposed();
assertCorrectIdType<T>(id);
return storage<T>().As<IResolver<T>>().ResolveAsync(_identityMap, this, token, id);
}
public ILoadByKeys<T> LoadMany<T>()
{
assertNotDisposed();
return new LoadByKeys<T>(this);
}
public IList<T> LoadMany<T>(params string[] ids)
{
assertNotDisposed();
return LoadMany<T>().ById(ids);
}
public IList<T> LoadMany<T>(params Guid[] ids)
{
assertNotDisposed();
return LoadMany<T>().ById(ids);
}
public IList<T> LoadMany<T>(params int[] ids)
{
assertNotDisposed();
return LoadMany<T>().ById(ids);
}
public IList<T> LoadMany<T>(params long[] ids)
{
assertNotDisposed();
return LoadMany<T>().ById(ids);
}
public Task<IList<T>> LoadManyAsync<T>(params string[] ids)
{
return LoadMany<T>().ByIdAsync(ids);
}
public Task<IList<T>> LoadManyAsync<T>(params Guid[] ids)
{
return LoadMany<T>().ByIdAsync(ids);
}
public Task<IList<T>> LoadManyAsync<T>(params int[] ids)
{
return LoadMany<T>().ByIdAsync(ids);
}
public Task<IList<T>> LoadManyAsync<T>(params long[] ids)
{
return LoadMany<T>().ByIdAsync(ids);
}
public Task<IList<T>> LoadManyAsync<T>(CancellationToken token, params string[] ids)
{
return LoadMany<T>().ByIdAsync(ids, token);
}
public Task<IList<T>> LoadManyAsync<T>(CancellationToken token, params Guid[] ids)
{
return LoadMany<T>().ByIdAsync(ids, token);
}
public Task<IList<T>> LoadManyAsync<T>(CancellationToken token, params int[] ids)
{
return LoadMany<T>().ByIdAsync(ids, token);
}
public Task<IList<T>> LoadManyAsync<T>(CancellationToken token, params long[] ids)
{
return LoadMany<T>().ByIdAsync(ids, token);
}
private class LoadByKeys<TDoc> : ILoadByKeys<TDoc>
{
private readonly QuerySession _parent;
public LoadByKeys(QuerySession parent)
{
_parent = parent;
}
public IList<TDoc> ById<TKey>(params TKey[] keys)
{
assertCorrectIdType<TKey>();
var hitsAndMisses = this.hitsAndMisses(keys);
var hits = hitsAndMisses.Item1;
var misses = hitsAndMisses.Item2;
var documents = fetchDocuments(misses);
return concatDocuments(hits, documents);
}
private void assertCorrectIdType<TKey>()
{
var mapping = _parent._schema.MappingFor(typeof(TDoc));
if (typeof(TKey) != mapping.IdType)
{
if (typeof(TKey) == typeof(int) && mapping.IdType == typeof(long)) return;
throw new InvalidOperationException(
$"The id type for {typeof(TDoc).FullName} is {mapping.IdType.Name}, but got {typeof(TKey).Name}");
}
}
public Task<IList<TDoc>> ByIdAsync<TKey>(params TKey[] keys)
{
return ByIdAsync(keys, CancellationToken.None);
}
public IList<TDoc> ById<TKey>(IEnumerable<TKey> keys)
{
return ById(keys.ToArray());
}
public async Task<IList<TDoc>> ByIdAsync<TKey>(IEnumerable<TKey> keys,
CancellationToken token = default(CancellationToken))
{
assertCorrectIdType<TKey>();
var hitsAndMisses = this.hitsAndMisses(keys.ToArray());
var hits = hitsAndMisses.Item1;
var misses = hitsAndMisses.Item2;
var documents = await fetchDocumentsAsync(misses, token).ConfigureAwait(false);
return concatDocuments(hits, documents);
}
private IList<TDoc> concatDocuments<TKey>(TKey[] hits, IEnumerable<TDoc> documents)
{
return
hits.Select(key => _parent._identityMap.Retrieve<TDoc>(key))
.Concat(documents)
.ToList();
}
private Tuple<TKey[], TKey[]> hitsAndMisses<TKey>(TKey[] keys)
{
var hits = keys.Where(key => _parent._identityMap.Has<TDoc>(key)).ToArray();
var misses = keys.Where(x => !hits.Contains(x)).ToArray();
return new Tuple<TKey[], TKey[]>(hits, misses);
}
private IEnumerable<TDoc> fetchDocuments<TKey>(TKey[] keys)
{
var storage = _parent._schema.StorageFor(typeof(TDoc));
var resolver = storage.As<IResolver<TDoc>>();
var cmd = storage.LoadByArrayCommand(keys);
var list = new List<TDoc>();
_parent._connection.Execute(cmd, c =>
{
using (var reader = cmd.ExecuteReader())
{
while (reader.Read())
{
var doc = resolver.Resolve(0, reader, _parent._identityMap);
list.Add(doc);
}
}
});
return list;
}
private async Task<IEnumerable<TDoc>> fetchDocumentsAsync<TKey>(TKey[] keys, CancellationToken token)
{
var storage = _parent._schema.StorageFor(typeof(TDoc));
var resolver = storage.As<IResolver<TDoc>>();
var cmd = storage.LoadByArrayCommand(keys);
var list = new List<TDoc>();
await _parent._connection.ExecuteAsync(cmd, async (conn, tkn) =>
{
using (var reader = await cmd.ExecuteReaderAsync(tkn).ConfigureAwait(false))
{
while (await reader.ReadAsync(tkn).ConfigureAwait(false))
{
var doc = resolver.Resolve(0, reader, _parent._identityMap);
list.Add(doc);
}
}
}, token).ConfigureAwait(false);
return list;
}
}
public TOut Query<TDoc, TOut>(ICompiledQuery<TDoc, TOut> query)
{
assertNotDisposed();
QueryStatistics stats;
var handler = _schema.HandlerFactory.HandlerFor(query, out stats);
return _connection.Fetch(handler, _identityMap.ForQuery(), stats);
}
public Task<TOut> QueryAsync<TDoc, TOut>(ICompiledQuery<TDoc, TOut> query,
CancellationToken token = new CancellationToken())
{
assertNotDisposed();
QueryStatistics stats;
var handler = _schema.HandlerFactory.HandlerFor(query, out stats);
return _connection.FetchAsync(handler, _identityMap.ForQuery(), stats, token);
}
public NpgsqlConnection Connection
{
get
{
assertNotDisposed();
return _connection.Connection;
}
}
public IMartenSessionLogger Logger
{
get { return _connection.As<ManagedConnection>().Logger; }
set { _connection.As<ManagedConnection>().Logger = value; }
}
public int RequestCount => _connection.RequestCount;
public void Dispose()
{
_disposed = true;
_connection.Dispose();
WriterPool?.Dispose();
}
public T Load<T>(int id)
{
return load<T>(id);
}
public T Load<T>(long id)
{
return load<T>(id);
}
public T Load<T>(Guid id)
{
return load<T>(id);
}
public Task<T> LoadAsync<T>(int id, CancellationToken token = new CancellationToken())
{
return loadAsync<T>(id, token);
}
public Task<T> LoadAsync<T>(long id, CancellationToken token = new CancellationToken())
{
return loadAsync<T>(id, token);
}
public Task<T> LoadAsync<T>(Guid id, CancellationToken token = new CancellationToken())
{
return loadAsync<T>(id, token);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using Content.Server.Atmos.Reactions;
using Content.Shared.Atmos;
using Robust.Shared.Maths;
using Robust.Shared.Prototypes;
using DependencyAttribute = Robust.Shared.IoC.DependencyAttribute;
namespace Content.Server.Atmos.EntitySystems
{
public sealed partial class AtmosphereSystem
{
[Dependency] private readonly IPrototypeManager _protoMan = default!;
private GasReactionPrototype[] _gasReactions = Array.Empty<GasReactionPrototype>();
private float[] _gasSpecificHeats = new float[Atmospherics.TotalNumberOfGases];
/// <summary>
/// List of gas reactions ordered by priority.
/// </summary>
public IEnumerable<GasReactionPrototype> GasReactions => _gasReactions!;
/// <summary>
/// Cached array of gas specific heats.
/// </summary>
public float[] GasSpecificHeats => _gasSpecificHeats;
public string?[] GasReagents = new string[Atmospherics.TotalNumberOfGases];
private void InitializeGases()
{
_gasReactions = _protoMan.EnumeratePrototypes<GasReactionPrototype>().ToArray();
Array.Sort(_gasReactions, (a, b) => b.Priority.CompareTo(a.Priority));
Array.Resize(ref _gasSpecificHeats, MathHelper.NextMultipleOf(Atmospherics.TotalNumberOfGases, 4));
for (var i = 0; i < GasPrototypes.Length; i++)
{
_gasSpecificHeats[i] = GasPrototypes[i].SpecificHeat;
GasReagents[i] = GasPrototypes[i].Reagent;
}
}
/// <summary>
/// Calculates the heat capacity for a gas mixture.
/// </summary>
public float GetHeatCapacity(GasMixture mixture)
{
return GetHeatCapacityCalculation(mixture.Moles, mixture.Immutable);
}
/// <summary>
/// Calculates the heat capacity for a gas mixture, using the archived values.
/// </summary>
public float GetHeatCapacityArchived(GasMixture mixture)
{
return GetHeatCapacityCalculation(mixture.MolesArchived, mixture.Immutable);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private float GetHeatCapacityCalculation(float[] moles, bool immutable)
{
// Little hack to make space gas mixtures have heat capacity, therefore allowing them to cool down rooms.
if (immutable && MathHelper.CloseTo(NumericsHelpers.HorizontalAdd(moles), 0f))
{
return Atmospherics.SpaceHeatCapacity;
}
Span<float> tmp = stackalloc float[moles.Length];
NumericsHelpers.Multiply(moles, GasSpecificHeats, tmp);
return MathF.Max(NumericsHelpers.HorizontalAdd(tmp), Atmospherics.MinimumHeatCapacity);
}
/// <summary>
/// Calculates the thermal energy for a gas mixture.
/// </summary>
public float GetThermalEnergy(GasMixture mixture)
{
return mixture.Temperature * GetHeatCapacity(mixture);
}
/// <summary>
/// Calculates the thermal energy for a gas mixture, using a cached heat capacity value.
/// </summary>
public float GetThermalEnergy(GasMixture mixture, float cachedHeatCapacity)
{
return mixture.Temperature * cachedHeatCapacity;
}
/// <summary>
/// Merges the <see cref="giver"/> gas mixture into the <see cref="receiver"/> gas mixture.
/// The <see cref="giver"/> gas mixture is not modified by this method.
/// </summary>
public void Merge(GasMixture receiver, GasMixture giver)
{
if (receiver.Immutable) return;
if (MathF.Abs(receiver.Temperature - giver.Temperature) > Atmospherics.MinimumTemperatureDeltaToConsider)
{
var receiverHeatCapacity = GetHeatCapacity(receiver);
var giverHeatCapacity = GetHeatCapacity(giver);
var combinedHeatCapacity = receiverHeatCapacity + giverHeatCapacity;
if (combinedHeatCapacity > 0f)
{
receiver.Temperature = (GetThermalEnergy(giver, giverHeatCapacity) + GetThermalEnergy(receiver, receiverHeatCapacity)) / combinedHeatCapacity;
}
}
NumericsHelpers.Add(receiver.Moles, giver.Moles);
}
/// <summary>
/// Shares gas between two gas mixtures. Part of LINDA.
/// </summary>
public float Share(GasMixture receiver, GasMixture sharer, int atmosAdjacentTurfs)
{
var temperatureDelta = receiver.TemperatureArchived - sharer.TemperatureArchived;
var absTemperatureDelta = Math.Abs(temperatureDelta);
var oldHeatCapacity = 0f;
var oldSharerHeatCapacity = 0f;
if (absTemperatureDelta > Atmospherics.MinimumTemperatureDeltaToConsider)
{
oldHeatCapacity = GetHeatCapacity(receiver);
oldSharerHeatCapacity = GetHeatCapacity(sharer);
}
var heatCapacityToSharer = 0f;
var heatCapacitySharerToThis = 0f;
var movedMoles = 0f;
var absMovedMoles = 0f;
for(var i = 0; i < Atmospherics.TotalNumberOfGases; i++)
{
var thisValue = receiver.Moles[i];
var sharerValue = sharer.Moles[i];
var delta = (thisValue - sharerValue) / (atmosAdjacentTurfs + 1);
if (!(MathF.Abs(delta) >= Atmospherics.GasMinMoles)) continue;
if (absTemperatureDelta > Atmospherics.MinimumTemperatureDeltaToConsider)
{
var gasHeatCapacity = delta * GasSpecificHeats[i];
if (delta > 0)
{
heatCapacityToSharer += gasHeatCapacity;
}
else
{
heatCapacitySharerToThis -= gasHeatCapacity;
}
}
if (!receiver.Immutable) receiver.Moles[i] -= delta;
if (!sharer.Immutable) sharer.Moles[i] += delta;
movedMoles += delta;
absMovedMoles += MathF.Abs(delta);
}
receiver.LastShare = absMovedMoles;
if (absTemperatureDelta > Atmospherics.MinimumTemperatureDeltaToConsider)
{
var newHeatCapacity = oldHeatCapacity + heatCapacitySharerToThis - heatCapacityToSharer;
var newSharerHeatCapacity = oldSharerHeatCapacity + heatCapacityToSharer - heatCapacitySharerToThis;
// Transfer of thermal energy (via changed heat capacity) between self and sharer.
if (!receiver.Immutable && newHeatCapacity > Atmospherics.MinimumHeatCapacity)
{
receiver.Temperature = ((oldHeatCapacity * receiver.Temperature) - (heatCapacityToSharer * receiver.TemperatureArchived) + (heatCapacitySharerToThis * sharer.TemperatureArchived)) / newHeatCapacity;
}
if (!sharer.Immutable && newSharerHeatCapacity > Atmospherics.MinimumHeatCapacity)
{
sharer.Temperature = ((oldSharerHeatCapacity * sharer.Temperature) - (heatCapacitySharerToThis * sharer.TemperatureArchived) + (heatCapacityToSharer*receiver.TemperatureArchived)) / newSharerHeatCapacity;
}
// Thermal energy of the system (self and sharer) is unchanged.
if (MathF.Abs(oldSharerHeatCapacity) > Atmospherics.MinimumHeatCapacity)
{
if (MathF.Abs(newSharerHeatCapacity / oldSharerHeatCapacity - 1) < 0.1)
{
TemperatureShare(receiver, sharer, Atmospherics.OpenHeatTransferCoefficient);
}
}
}
if (!(temperatureDelta > Atmospherics.MinimumTemperatureToMove) &&
!(MathF.Abs(movedMoles) > Atmospherics.MinimumMolesDeltaToMove)) return 0f;
var moles = receiver.TotalMoles;
var theirMoles = sharer.TotalMoles;
return (receiver.TemperatureArchived * (moles + movedMoles)) - (sharer.TemperatureArchived * (theirMoles - movedMoles)) * Atmospherics.R / receiver.Volume;
}
/// <summary>
/// Shares temperature between two mixtures, taking a conduction coefficient into account.
/// </summary>
public float TemperatureShare(GasMixture receiver, GasMixture sharer, float conductionCoefficient)
{
var temperatureDelta = receiver.TemperatureArchived - sharer.TemperatureArchived;
if (MathF.Abs(temperatureDelta) > Atmospherics.MinimumTemperatureDeltaToConsider)
{
var heatCapacity = GetHeatCapacityArchived(receiver);
var sharerHeatCapacity = GetHeatCapacityArchived(sharer);
if (sharerHeatCapacity > Atmospherics.MinimumHeatCapacity && heatCapacity > Atmospherics.MinimumHeatCapacity)
{
var heat = conductionCoefficient * temperatureDelta * (heatCapacity * sharerHeatCapacity / (heatCapacity + sharerHeatCapacity));
if (!receiver.Immutable)
receiver.Temperature = MathF.Abs(MathF.Max(receiver.Temperature - heat / heatCapacity, Atmospherics.TCMB));
if (!sharer.Immutable)
sharer.Temperature = MathF.Abs(MathF.Max(sharer.Temperature + heat / sharerHeatCapacity, Atmospherics.TCMB));
}
}
return sharer.Temperature;
}
/// <summary>
/// Shares temperature between a gas mixture and an abstract sharer, taking a conduction coefficient into account.
/// </summary>
public float TemperatureShare(GasMixture receiver, float conductionCoefficient, float sharerTemperature, float sharerHeatCapacity)
{
var temperatureDelta = receiver.TemperatureArchived - sharerTemperature;
if (MathF.Abs(temperatureDelta) > Atmospherics.MinimumTemperatureDeltaToConsider)
{
var heatCapacity = GetHeatCapacityArchived(receiver);
if (sharerHeatCapacity > Atmospherics.MinimumHeatCapacity && heatCapacity > Atmospherics.MinimumHeatCapacity)
{
var heat = conductionCoefficient * temperatureDelta * (heatCapacity * sharerHeatCapacity / (heatCapacity + sharerHeatCapacity));
if (!receiver.Immutable)
receiver.Temperature = MathF.Abs(MathF.Max(receiver.Temperature - heat / heatCapacity, Atmospherics.TCMB));
sharerTemperature = MathF.Abs(MathF.Max(sharerTemperature + heat / sharerHeatCapacity, Atmospherics.TCMB));
}
}
return sharerTemperature;
}
/// <summary>
/// Releases gas from this mixture to the output mixture.
/// If the output mixture is null, then this is being released into space.
/// It can't transfer air to a mixture with higher pressure.
/// </summary>
public bool ReleaseGasTo(GasMixture mixture, GasMixture? output, float targetPressure)
{
var outputStartingPressure = output?.Pressure ?? 0;
var inputStartingPressure = mixture.Pressure;
if (outputStartingPressure >= MathF.Min(targetPressure, inputStartingPressure - 10))
// No need to pump gas if the target is already reached or input pressure is too low.
// Need at least 10 kPa difference to overcome friction in the mechanism.
return false;
if (!(mixture.TotalMoles > 0) || !(mixture.Temperature > 0)) return false;
// We calculate the necessary moles to transfer with the ideal gas law.
var pressureDelta = MathF.Min(targetPressure - outputStartingPressure, (inputStartingPressure - outputStartingPressure) / 2f);
var transferMoles = pressureDelta * (output?.Volume ?? Atmospherics.CellVolume) / (mixture.Temperature * Atmospherics.R);
// And now we transfer the gas.
var removed = mixture.Remove(transferMoles);
if(output != null)
Merge(output, removed);
return true;
}
/// <summary>
/// Pump gas from this mixture to the output mixture.
/// Amount depends on target pressure.
/// </summary>
/// <param name="mixture">The mixture to pump the gas from</param>
/// <param name="output">The mixture to pump the gas to</param>
/// <param name="targetPressure">The target pressure to reach</param>
/// <returns>Whether we could pump air to the output or not</returns>
public bool PumpGasTo(GasMixture mixture, GasMixture output, float targetPressure)
{
var outputStartingPressure = output.Pressure;
var pressureDelta = targetPressure - outputStartingPressure;
if (pressureDelta < 0.01)
// No need to pump gas, we've reached the target.
return false;
if (!(mixture.TotalMoles > 0) || !(mixture.Temperature > 0)) return false;
// We calculate the necessary moles to transfer with the ideal gas law.
var transferMoles = pressureDelta * output.Volume / (mixture.Temperature * Atmospherics.R);
// And now we transfer the gas.
var removed = mixture.Remove(transferMoles);
Merge(output, removed);
return true;
}
/// <summary>
/// Scrubs specified gases from a gas mixture into a <see cref="destination"/> gas mixture.
/// </summary>
public void ScrubInto(GasMixture mixture, GasMixture destination, IReadOnlyCollection<Gas> filterGases)
{
var buffer = new GasMixture(mixture.Volume){Temperature = mixture.Temperature};
foreach (var gas in filterGases)
{
buffer.AdjustMoles(gas, mixture.GetMoles(gas));
mixture.SetMoles(gas, 0f);
}
Merge(destination, buffer);
}
/// <summary>
/// Performs reactions for a given gas mixture on an optional holder.
/// </summary>
public ReactionResult React(GasMixture mixture, IGasMixtureHolder? holder)
{
var reaction = ReactionResult.NoReaction;
var temperature = mixture.Temperature;
var energy = GetThermalEnergy(mixture);
foreach (var prototype in GasReactions)
{
if (energy < prototype.MinimumEnergyRequirement ||
temperature < prototype.MinimumTemperatureRequirement ||
temperature > prototype.MaximumTemperatureRequirement)
continue;
var doReaction = true;
for (var i = 0; i < prototype.MinimumRequirements.Length; i++)
{
if(i >= Atmospherics.TotalNumberOfGases)
throw new IndexOutOfRangeException("Reaction Gas Minimum Requirements Array Prototype exceeds total number of gases!");
var req = prototype.MinimumRequirements[i];
if (!(mixture.GetMoles(i) < req)) continue;
doReaction = false;
break;
}
if (!doReaction)
continue;
reaction = prototype.React(mixture, holder, this);
if(reaction.HasFlag(ReactionResult.StopReactions))
break;
}
return reaction;
}
}
}
| |
namespace System.Workflow.ComponentModel
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel.Design.Serialization;
using System.Workflow.ComponentModel.Serialization;
#region Class ActivityCollectionItemList
[DesignerSerializer(typeof(ActivityCollectionMarkupSerializer), typeof(WorkflowMarkupSerializer))]
[Obsolete("The System.Workflow.* types are deprecated. Instead, please use the new types from System.Activities.*")]
public sealed class ActivityCollection : List<Activity>, IList<Activity>, IList
{
private Activity owner = null;
internal event EventHandler<ActivityCollectionChangeEventArgs> ListChanging;
public event EventHandler<ActivityCollectionChangeEventArgs> ListChanged;
public ActivityCollection(Activity owner)
{
if (owner == null)
throw new ArgumentNullException("owner");
if (!(owner is Activity))
throw new ArgumentException(SR.GetString(SR.Error_UnexpectedArgumentType, typeof(Activity).FullName), "owner");
this.owner = owner;
}
private void FireListChanging(ActivityCollectionChangeEventArgs eventArgs)
{
if (this.ListChanging != null)
this.ListChanging(this, eventArgs);
}
private void FireListChanged(ActivityCollectionChangeEventArgs eventArgs)
{
if (this.ListChanged != null)
this.ListChanged(this, eventArgs);
}
internal Activity Owner
{
get
{
return this.owner;
}
}
internal void InnerAdd(Activity activity)
{
base.Add(activity);
}
#region IList<Activity> Members
void IList<Activity>.RemoveAt(int index)
{
if (index < 0 || index >= base.Count)
throw new ArgumentOutOfRangeException("Index");
Activity item = base[index];
ActivityCollectionChangeEventArgs args = new ActivityCollectionChangeEventArgs(index, item, null, this.owner, ActivityCollectionChangeAction.Remove);
FireListChanging(args);
base.RemoveAt(index);
FireListChanged(args);
}
void IList<Activity>.Insert(int index, Activity item)
{
if (index < 0 || index > base.Count)
throw new ArgumentOutOfRangeException("index");
if (item == null)
throw new ArgumentNullException("item");
ActivityCollectionChangeEventArgs args = new ActivityCollectionChangeEventArgs(index, null, item, this.owner, ActivityCollectionChangeAction.Add);
FireListChanging(args);
base.Insert(index, item);
FireListChanged(args);
}
Activity IList<Activity>.this[int index]
{
get
{
return base[index];
}
set
{
if (value == null)
throw new ArgumentNullException("item");
Activity oldItem = base[index];
ActivityCollectionChangeEventArgs args = new ActivityCollectionChangeEventArgs(index, oldItem, value, this.owner, ActivityCollectionChangeAction.Replace);
FireListChanging(args);
base[index] = value;
FireListChanged(args);
}
}
int IList<Activity>.IndexOf(Activity item)
{
return base.IndexOf(item);
}
#endregion
#region ICollection<Activity> Members
bool ICollection<Activity>.IsReadOnly
{
get
{
return false;
}
}
bool ICollection<Activity>.Contains(Activity item)
{
return base.Contains(item);
}
bool ICollection<Activity>.Remove(Activity item)
{
if (!base.Contains(item))
return false;
int index = base.IndexOf(item);
if (index >= 0)
{
ActivityCollectionChangeEventArgs args = new ActivityCollectionChangeEventArgs(index, item, null, this.owner, ActivityCollectionChangeAction.Remove);
FireListChanging(args);
base.Remove(item);
FireListChanged(args);
return true;
}
return false;
}
void ICollection<Activity>.Clear()
{
ICollection<Activity> children = base.GetRange(0, base.Count);
ActivityCollectionChangeEventArgs args = new ActivityCollectionChangeEventArgs(-1, children, null, this.owner, ActivityCollectionChangeAction.Remove);
FireListChanging(args);
base.Clear();
FireListChanged(args);
}
void ICollection<Activity>.Add(Activity item)
{
if (item == null)
throw new ArgumentNullException("item");
ActivityCollectionChangeEventArgs args = new ActivityCollectionChangeEventArgs(base.Count, null, item, this.owner, ActivityCollectionChangeAction.Add);
FireListChanging(args);
base.Add(item);
FireListChanged(args);
}
int ICollection<Activity>.Count
{
get
{
return base.Count;
}
}
void ICollection<Activity>.CopyTo(Activity[] array, int arrayIndex)
{
base.CopyTo(array, arrayIndex);
}
#endregion
#region IEnumerable<Activity> Members
IEnumerator<Activity> IEnumerable<Activity>.GetEnumerator()
{
return base.GetEnumerator();
}
#endregion
#region Member Implementations
public new int Count
{
get
{
return ((ICollection<Activity>)this).Count;
}
}
public new void Add(Activity item)
{
((IList<Activity>)this).Add(item);
}
public new void Clear()
{
((IList<Activity>)this).Clear();
}
public new void Insert(int index, Activity item)
{
((IList<Activity>)this).Insert(index, item);
}
public new bool Remove(Activity item)
{
return ((IList<Activity>)this).Remove(item);
}
public new void RemoveAt(int index)
{
((IList<Activity>)this).RemoveAt(index);
}
public new Activity this[int index]
{
get
{
return ((IList<Activity>)this)[index];
}
set
{
((IList<Activity>)this)[index] = value;
}
}
public Activity this[string key]
{
get
{
for (int index = 0; index < this.Count; index++)
if ((this[index].Name.Equals(key) || this[index].QualifiedName.Equals(key)))
return this[index];
return null;
}
}
public new int IndexOf(Activity item)
{
return ((IList<Activity>)this).IndexOf(item);
}
public new bool Contains(Activity item)
{
return ((IList<Activity>)this).Contains(item);
}
public new IEnumerator<Activity> GetEnumerator()
{
return ((IList<Activity>)this).GetEnumerator();
}
#endregion
#region IList Members
int IList.Add(object value)
{
if (!(value is Activity))
throw new Exception(SR.GetString(SR.Error_InvalidListItem, this.GetType().GetGenericArguments()[0].FullName));
((IList<Activity>)this).Add((Activity)value);
return this.Count - 1;
}
void IList.Clear()
{
((IList<Activity>)this).Clear();
}
bool IList.Contains(object value)
{
if (!(value is Activity))
throw new Exception(SR.GetString(SR.Error_InvalidListItem, this.GetType().GetGenericArguments()[0].FullName));
return (((IList<Activity>)this).Contains((Activity)value));
}
int IList.IndexOf(object value)
{
if (!(value is Activity))
throw new Exception(SR.GetString(SR.Error_InvalidListItem, this.GetType().GetGenericArguments()[0].FullName));
return ((IList<Activity>)this).IndexOf((Activity)value);
}
void IList.Insert(int index, object value)
{
if (!(value is Activity))
throw new Exception(SR.GetString(SR.Error_InvalidListItem, this.GetType().GetGenericArguments()[0].FullName));
((IList<Activity>)this).Insert(index, (Activity)value);
}
bool IList.IsFixedSize
{
get
{
return false;
}
}
bool IList.IsReadOnly
{
get
{
return ((IList<Activity>)this).IsReadOnly;
}
}
void IList.Remove(object value)
{
if (!(value is Activity))
throw new Exception(SR.GetString(SR.Error_InvalidListItem, this.GetType().GetGenericArguments()[0].FullName));
((IList<Activity>)this).Remove((Activity)value);
}
object IList.this[int index]
{
get
{
return ((IList<Activity>)this)[index];
}
set
{
if (!(value is Activity))
throw new Exception(SR.GetString(SR.Error_InvalidListItem, this.GetType().GetGenericArguments()[0].FullName));
((IList<Activity>)this)[index] = (Activity)value;
}
}
#endregion
#region ICollection Members
void ICollection.CopyTo(Array array, int index)
{
for (int loop = 0; loop < this.Count; loop++)
array.SetValue(this[loop], loop + index);
}
bool ICollection.IsSynchronized
{
get { return false; }
}
object ICollection.SyncRoot
{
get { return this; }
}
#endregion
#region IEnumerable Members
IEnumerator IEnumerable.GetEnumerator()
{
return (IEnumerator)((IList<Activity>)this).GetEnumerator();
}
#endregion
}
#endregion
[Obsolete("The System.Workflow.* types are deprecated. Instead, please use the new types from System.Activities.*")]
public enum ActivityCollectionChangeAction
{
Add = 0x00,
Remove = 0x01,
Replace = 0x02
}
[Obsolete("The System.Workflow.* types are deprecated. Instead, please use the new types from System.Activities.*")]
public sealed class ActivityCollectionChangeEventArgs : EventArgs
{
private int index = 0;
private ICollection<Activity> addedItems = null;
private ICollection<Activity> removedItems = null;
private object owner = null;
private ActivityCollectionChangeAction action = ActivityCollectionChangeAction.Add;
public ActivityCollectionChangeEventArgs(int index, ICollection<Activity> removedItems, ICollection<Activity> addedItems, object owner, ActivityCollectionChangeAction action)
{
this.index = index;
this.removedItems = removedItems;
this.addedItems = addedItems;
this.action = action;
this.owner = owner;
}
public ActivityCollectionChangeEventArgs(int index, Activity removedActivity, Activity addedActivity, object owner, ActivityCollectionChangeAction action)
{
this.index = index;
if (removedActivity != null)
{
this.removedItems = new List<Activity>();
((List<Activity>)this.removedItems).Add(removedActivity);
}
if (addedActivity != null)
{
this.addedItems = new List<Activity>();
((List<Activity>)this.addedItems).Add(addedActivity);
}
this.action = action;
this.owner = owner;
}
public IList<Activity> RemovedItems
{
get
{
return (this.removedItems != null) ? new List<Activity>(this.removedItems).AsReadOnly() : new List<Activity>().AsReadOnly();
}
}
public IList<Activity> AddedItems
{
get
{
return (this.addedItems != null) ? new List<Activity>(this.addedItems).AsReadOnly() : new List<Activity>().AsReadOnly();
}
}
public object Owner
{
get
{
return this.owner;
}
}
public int Index
{
get
{
return this.index;
}
}
public ActivityCollectionChangeAction Action
{
get
{
return this.action;
}
}
}
}
| |
//! \file ImageSPL.cs
//! \date Thu Sep 10 00:17:47 2015
//! \brief Zyx tiled image.
//
// Copyright (C) 2015 by morkt
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
using System.ComponentModel.Composition;
using System.IO;
using System.Windows.Media;
using GameRes.Utility;
namespace GameRes.Formats.Zyx
{
internal class Tile
{
public int Left, Top;
public int Right, Bottom;
}
internal class SplMetaData : ImageMetaData
{
public Tile[] Tiles;
public long DataOffset;
}
[Export(typeof(ImageFormat))]
public class SplFormat : ImageFormat
{
public override string Tag { get { return "SPL"; } }
public override string Description { get { return "Zyx tiled image format"; } }
public override uint Signature { get { return 0; } }
public override ImageMetaData ReadMetaData (IBinaryStream file)
{
int count = file.ReadInt16();
if (count <= 0)
return null;
var tiles = new Tile[count];
for (int i = 0; i < count; ++i)
{
var tile = new Tile();
tile.Left = file.ReadInt16();
tile.Top = file.ReadInt16();
if (tile.Left < 0 || tile.Top < 0)
return null;
tile.Right = file.ReadInt16();
tile.Bottom = file.ReadInt16();
if (tile.Right <= tile.Left || tile.Bottom <= tile.Top)
return null;
tiles[i] = tile;
}
int width = file.ReadInt16();
int height = file.ReadInt16();
if (width <= 0 || height <= 0)
return null;
foreach (var tile in tiles)
{
if (tile.Right > width || tile.Bottom > height)
return null;
}
return new SplMetaData
{
Width = (uint)width,
Height = (uint)height,
BPP = 24,
Tiles = tiles,
DataOffset = file.Position,
};
}
public override ImageData Read (IBinaryStream stream, ImageMetaData info)
{
var meta = (SplMetaData)info;
var reader = new SplReader (stream.AsStream, meta);
reader.Unpack ();
return ImageData.Create (info, PixelFormats.Bgr24, null, reader.Data);
}
public override void Write (Stream file, ImageData image)
{
throw new System.NotImplementedException ("SplFormat.Write not implemented");
}
}
internal class SplReader
{
Stream m_input;
byte[] m_output;
public byte[] Data { get { return m_output; } }
public SplReader (Stream input, SplMetaData info)
{
m_output = new byte[3 * info.Width * info.Height];
m_input = input;
m_input.Position = info.DataOffset;
}
public void Unpack ()
{
int dst = 0;
while (dst < m_output.Length)
{
int count;
int b = m_input.ReadByte();
if (-1 == b)
break;
switch (b)
{
case 0:
count = m_input.ReadByte();
if (count != -1)
{
byte p1 = m_output[dst - 3];
byte p2 = m_output[dst - 2];
byte p3 = m_output[dst - 1];
for (int i = 0; i < count; ++i)
{
m_output[dst++] = p1;
m_output[dst++] = p2;
m_output[dst++] = p3;
}
}
break;
case 1:
count = m_input.ReadByte();
b = m_input.ReadByte();
if (-1 != count && -1 != b)
{
int src = dst - 3 * b;
count *= 3;
Binary.CopyOverlapped (m_output, src, dst, count);
dst += count;
}
break;
case 2:
count = m_input.ReadByte();
b = ReadWord();
if (-1 != count && -1 != b)
{
int src = dst - 3 * b;
count *= 3;
Binary.CopyOverlapped (m_output, src, dst, count);
dst += count;
}
break;
case 3:
b = m_input.ReadByte();
if (b != -1)
{
int src = dst - 3 * b;
m_output[dst++] = m_output[src++];
m_output[dst++] = m_output[src++];
m_output[dst++] = m_output[src++];
}
break;
case 4:
b = ReadWord();
if (b != -1)
{
int src = dst - 3 * b;
m_output[dst++] = m_output[src++];
m_output[dst++] = m_output[src++];
m_output[dst++] = m_output[src++];
}
break;
default:
count = 3 * (b - 4);
m_input.Read (m_output, dst, count);
dst += count;
break;
}
}
}
private int ReadWord ()
{
int lo = m_input.ReadByte();
if (-1 == lo)
return -1;
int hi = m_input.ReadByte();
if (-1 == hi)
return -1;
return hi << 8 | lo;
}
}
}
| |
namespace BitDiffer.Client.Forms
{
partial class ProjectSetup
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.toolStrip1 = new System.Windows.Forms.ToolStrip();
this.tsbFiles = new System.Windows.Forms.ToolStripButton();
this.tsbOptions = new System.Windows.Forms.ToolStripButton();
this.tsbViewFilter = new System.Windows.Forms.ToolStripButton();
this.tsbReferences = new System.Windows.Forms.ToolStripButton();
this.splitContainer1 = new System.Windows.Forms.SplitContainer();
this.splitContainer2 = new System.Windows.Forms.SplitContainer();
this.panel1 = new System.Windows.Forms.Panel();
this.compareFiles = new BitDiffer.Client.Controls.SelectAssemblies();
this.compareOptions = new BitDiffer.Client.Controls.Configuration();
this.viewFilter = new BitDiffer.Client.Controls.CompareViewFilter();
this.btnCancel = new System.Windows.Forms.Button();
this.btnHelp = new System.Windows.Forms.Button();
this.btnOK = new System.Windows.Forms.Button();
this.referencePaths = new BitDiffer.Client.Controls.ReferencePaths();
this.toolStrip1.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.splitContainer1)).BeginInit();
this.splitContainer1.Panel1.SuspendLayout();
this.splitContainer1.Panel2.SuspendLayout();
this.splitContainer1.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.splitContainer2)).BeginInit();
this.splitContainer2.Panel1.SuspendLayout();
this.splitContainer2.Panel2.SuspendLayout();
this.splitContainer2.SuspendLayout();
this.panel1.SuspendLayout();
this.SuspendLayout();
//
// toolStrip1
//
this.toolStrip1.Dock = System.Windows.Forms.DockStyle.Left;
this.toolStrip1.Font = new System.Drawing.Font("Tahoma", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.toolStrip1.GripStyle = System.Windows.Forms.ToolStripGripStyle.Hidden;
this.toolStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] {
this.tsbFiles,
this.tsbOptions,
this.tsbViewFilter,
this.tsbReferences});
this.toolStrip1.Location = new System.Drawing.Point(0, 0);
this.toolStrip1.Name = "toolStrip1";
this.toolStrip1.RenderMode = System.Windows.Forms.ToolStripRenderMode.Professional;
this.toolStrip1.Size = new System.Drawing.Size(116, 480);
this.toolStrip1.TabIndex = 0;
this.toolStrip1.Text = "toolStrip1";
//
// tsbFiles
//
this.tsbFiles.Image = global::BitDiffer.Client.Properties.Resources.proj_files;
this.tsbFiles.ImageScaling = System.Windows.Forms.ToolStripItemImageScaling.None;
this.tsbFiles.ImageTransparentColor = System.Drawing.Color.Transparent;
this.tsbFiles.Name = "tsbFiles";
this.tsbFiles.Size = new System.Drawing.Size(113, 50);
this.tsbFiles.Text = "Assembly Selection";
this.tsbFiles.TextImageRelation = System.Windows.Forms.TextImageRelation.ImageAboveText;
this.tsbFiles.Click += new System.EventHandler(this.tsbFiles_Click);
//
// tsbOptions
//
this.tsbOptions.Image = global::BitDiffer.Client.Properties.Resources.proj_config;
this.tsbOptions.ImageScaling = System.Windows.Forms.ToolStripItemImageScaling.None;
this.tsbOptions.ImageTransparentColor = System.Drawing.Color.Transparent;
this.tsbOptions.Name = "tsbOptions";
this.tsbOptions.Size = new System.Drawing.Size(113, 50);
this.tsbOptions.Text = "Configuration";
this.tsbOptions.TextImageRelation = System.Windows.Forms.TextImageRelation.ImageAboveText;
this.tsbOptions.Click += new System.EventHandler(this.tsbOptions_Click);
//
// tsbViewFilter
//
this.tsbViewFilter.Image = global::BitDiffer.Client.Properties.Resources.proj_view_filter;
this.tsbViewFilter.ImageAlign = System.Drawing.ContentAlignment.TopCenter;
this.tsbViewFilter.ImageScaling = System.Windows.Forms.ToolStripItemImageScaling.None;
this.tsbViewFilter.ImageTransparentColor = System.Drawing.Color.Transparent;
this.tsbViewFilter.Name = "tsbViewFilter";
this.tsbViewFilter.Size = new System.Drawing.Size(113, 50);
this.tsbViewFilter.Text = "View Filter";
this.tsbViewFilter.TextImageRelation = System.Windows.Forms.TextImageRelation.ImageAboveText;
this.tsbViewFilter.Click += new System.EventHandler(this.tsbDirectories_Click);
//
// tsbReferences
//
this.tsbReferences.Image = global::BitDiffer.Client.Properties.Resources.proj_references;
this.tsbReferences.ImageAlign = System.Drawing.ContentAlignment.TopCenter;
this.tsbReferences.ImageScaling = System.Windows.Forms.ToolStripItemImageScaling.None;
this.tsbReferences.ImageTransparentColor = System.Drawing.Color.Transparent;
this.tsbReferences.Name = "tsbReferences";
this.tsbReferences.Size = new System.Drawing.Size(113, 50);
this.tsbReferences.Text = "Reference Paths";
this.tsbReferences.TextImageRelation = System.Windows.Forms.TextImageRelation.ImageAboveText;
this.tsbReferences.Click += new System.EventHandler(this.tsbReferences_Click);
//
// splitContainer1
//
this.splitContainer1.Dock = System.Windows.Forms.DockStyle.Fill;
this.splitContainer1.FixedPanel = System.Windows.Forms.FixedPanel.Panel2;
this.splitContainer1.IsSplitterFixed = true;
this.splitContainer1.Location = new System.Drawing.Point(0, 0);
this.splitContainer1.Name = "splitContainer1";
this.splitContainer1.Orientation = System.Windows.Forms.Orientation.Horizontal;
//
// splitContainer1.Panel1
//
this.splitContainer1.Panel1.Controls.Add(this.splitContainer2);
//
// splitContainer1.Panel2
//
this.splitContainer1.Panel2.Controls.Add(this.btnCancel);
this.splitContainer1.Panel2.Controls.Add(this.btnHelp);
this.splitContainer1.Panel2.Controls.Add(this.btnOK);
this.splitContainer1.Size = new System.Drawing.Size(809, 546);
this.splitContainer1.SplitterDistance = 484;
this.splitContainer1.TabIndex = 2;
//
// splitContainer2
//
this.splitContainer2.Dock = System.Windows.Forms.DockStyle.Fill;
this.splitContainer2.FixedPanel = System.Windows.Forms.FixedPanel.Panel1;
this.splitContainer2.IsSplitterFixed = true;
this.splitContainer2.Location = new System.Drawing.Point(0, 0);
this.splitContainer2.Name = "splitContainer2";
//
// splitContainer2.Panel1
//
this.splitContainer2.Panel1.Controls.Add(this.panel1);
//
// splitContainer2.Panel2
//
this.splitContainer2.Panel2.Controls.Add(this.compareFiles);
this.splitContainer2.Panel2.Controls.Add(this.compareOptions);
this.splitContainer2.Panel2.Controls.Add(this.viewFilter);
this.splitContainer2.Panel2.Controls.Add(this.referencePaths);
this.splitContainer2.Size = new System.Drawing.Size(809, 484);
this.splitContainer2.SplitterDistance = 117;
this.splitContainer2.SplitterWidth = 5;
this.splitContainer2.TabIndex = 0;
//
// panel1
//
this.panel1.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
this.panel1.Controls.Add(this.toolStrip1);
this.panel1.Dock = System.Windows.Forms.DockStyle.Fill;
this.panel1.Location = new System.Drawing.Point(0, 0);
this.panel1.Name = "panel1";
this.panel1.Size = new System.Drawing.Size(117, 484);
this.panel1.TabIndex = 3;
//
// compareFiles
//
this.compareFiles.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.compareFiles.Dock = System.Windows.Forms.DockStyle.Fill;
this.compareFiles.Font = new System.Drawing.Font("Tahoma", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.compareFiles.Location = new System.Drawing.Point(0, 0);
this.compareFiles.Name = "compareFiles";
this.compareFiles.Size = new System.Drawing.Size(687, 484);
this.compareFiles.TabIndex = 0;
//
// compareOptions
//
this.compareOptions.AutoScroll = true;
this.compareOptions.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.compareOptions.Dock = System.Windows.Forms.DockStyle.Fill;
this.compareOptions.Font = new System.Drawing.Font("Tahoma", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.compareOptions.Location = new System.Drawing.Point(0, 0);
this.compareOptions.Name = "compareOptions";
this.compareOptions.Size = new System.Drawing.Size(687, 484);
this.compareOptions.TabIndex = 2;
//
// viewFilter
//
this.viewFilter.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.viewFilter.Dock = System.Windows.Forms.DockStyle.Fill;
this.viewFilter.Font = new System.Drawing.Font("Tahoma", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.viewFilter.Location = new System.Drawing.Point(0, 0);
this.viewFilter.Name = "viewFilter";
this.viewFilter.Size = new System.Drawing.Size(687, 484);
this.viewFilter.TabIndex = 0;
//
// btnCancel
//
this.btnCancel.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this.btnCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.btnCancel.Location = new System.Drawing.Point(617, 17);
this.btnCancel.Name = "btnCancel";
this.btnCancel.Size = new System.Drawing.Size(87, 25);
this.btnCancel.TabIndex = 1;
this.btnCancel.Text = "Cancel";
this.btnCancel.UseVisualStyleBackColor = true;
//
// btnHelp
//
this.btnHelp.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this.btnHelp.Location = new System.Drawing.Point(710, 17);
this.btnHelp.Name = "btnHelp";
this.btnHelp.Size = new System.Drawing.Size(87, 25);
this.btnHelp.TabIndex = 2;
this.btnHelp.Text = "Help";
this.btnHelp.UseVisualStyleBackColor = true;
this.btnHelp.Click += new System.EventHandler(this.btnHelp_Click);
//
// btnOK
//
this.btnOK.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this.btnOK.Location = new System.Drawing.Point(523, 17);
this.btnOK.Name = "btnOK";
this.btnOK.Size = new System.Drawing.Size(87, 25);
this.btnOK.TabIndex = 0;
this.btnOK.Text = "OK";
this.btnOK.UseVisualStyleBackColor = true;
this.btnOK.Click += new System.EventHandler(this.btnOK_Click);
//
// referencePaths
//
this.referencePaths.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.referencePaths.Dock = System.Windows.Forms.DockStyle.Fill;
this.referencePaths.Font = new System.Drawing.Font("Tahoma", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.referencePaths.Location = new System.Drawing.Point(0, 0);
this.referencePaths.Name = "referencePaths";
this.referencePaths.Size = new System.Drawing.Size(687, 484);
this.referencePaths.TabIndex = 3;
//
// ProjectSetup
//
this.AcceptButton = this.btnOK;
this.AutoScaleDimensions = new System.Drawing.SizeF(7F, 14F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.CancelButton = this.btnCancel;
this.ClientSize = new System.Drawing.Size(809, 546);
this.Controls.Add(this.splitContainer1);
this.Font = new System.Drawing.Font("Tahoma", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.Name = "ProjectSetup";
this.ShowInTaskbar = false;
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "Comparison Set: Setup and Configuration";
this.toolStrip1.ResumeLayout(false);
this.toolStrip1.PerformLayout();
this.splitContainer1.Panel1.ResumeLayout(false);
this.splitContainer1.Panel2.ResumeLayout(false);
((System.ComponentModel.ISupportInitialize)(this.splitContainer1)).EndInit();
this.splitContainer1.ResumeLayout(false);
this.splitContainer2.Panel1.ResumeLayout(false);
this.splitContainer2.Panel2.ResumeLayout(false);
((System.ComponentModel.ISupportInitialize)(this.splitContainer2)).EndInit();
this.splitContainer2.ResumeLayout(false);
this.panel1.ResumeLayout(false);
this.panel1.PerformLayout();
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.ToolStrip toolStrip1;
private System.Windows.Forms.ToolStripButton tsbFiles;
private System.Windows.Forms.ToolStripButton tsbViewFilter;
private System.Windows.Forms.ToolStripButton tsbOptions;
private BitDiffer.Client.Controls.SelectAssemblies compareFiles;
private BitDiffer.Client.Controls.Configuration compareOptions;
private BitDiffer.Client.Controls.CompareViewFilter viewFilter;
private System.Windows.Forms.SplitContainer splitContainer1;
private System.Windows.Forms.SplitContainer splitContainer2;
private System.Windows.Forms.Button btnCancel;
private System.Windows.Forms.Button btnOK;
private System.Windows.Forms.Panel panel1;
private System.Windows.Forms.Button btnHelp;
private System.Windows.Forms.ToolStripButton tsbReferences;
private Controls.ReferencePaths referencePaths;
}
}
| |
#region CVS Log
/*
* Version:
* $Id: UFIDFrame.cs,v 1.10 2004/12/10 04:49:08 cwoodbury Exp $
*
* Revisions:
* $Log: UFIDFrame.cs,v $
* Revision 1.10 2004/12/10 04:49:08 cwoodbury
* Made changes to EncodedString and to how it is used to push it down to
* just being involved with frame I/O and not otherwise being used in frames.
*
* Revision 1.9 2004/11/20 23:51:04 cwoodbury
* Made default owner-identifier string explicity use ISO-8859-1, as per ID3 specs.
*
* Revision 1.8 2004/11/20 23:12:12 cwoodbury
* Removed TextEncodingType.ASCII type; replaced with ISO_8859_1 type
* or default type for EncodedString.
*
* Revision 1.7 2004/11/16 07:08:14 cwoodbury
* Changed accessibility modifiers for some methods to internal or
* protected internal where appropriate.
*
* Revision 1.6 2004/11/16 06:43:39 cwoodbury
* Fixed bug #1066848: EncodedStrings.CreateEncodedStrings() corrupted
* data in the optional leftover bytes.
*
* Revision 1.5 2004/11/10 07:32:29 cwoodbury
* Factored out ParseFrameData() into ID3v2Frame.
*
* Revision 1.4 2004/11/10 06:51:55 cwoodbury
* Hid CVS log messages away in #region
*
* Revision 1.3 2004/11/10 04:44:16 cwoodbury
* Updated documentation.
*
* Revision 1.2 2004/11/03 07:44:27 cwoodbury
* Added validity checks to parsing code.
*
* Revision 1.1 2004/11/03 01:18:50 cwoodbury
* Added to ID3Sharp
*
*/
#endregion
/*
* Author(s):
* Chris Woodbury
*
* Project Location:
* http://id3sharp.sourceforge.net
*
* License:
* Licensed under the Open Software License version 2.0
*/
using System;
using System.IO;
using System.Collections.Generic;
using ID3Sharp.Exceptions;
using ID3Sharp.IO;
using ID3Sharp.Models;
namespace ID3Sharp.Frames
{
/// <summary>
/// A Unique File Identifier frame.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Naming", "CA1705:LongAcronymsShouldBePascalCased" )]
public class UFIDFrame : ID3v2Frame
{
#region Fields
/// <summary>
/// The contents of the Owner Identifier field.
/// </summary>
private EncodedString ownerIdentifier = new EncodedString();
/// <summary>
/// The contents of the Identifier field.
/// </summary>
private byte[] identifier;
#endregion
#region Constructors
/// <summary>
/// Creates a new UFIDFrame.
/// </summary>
protected internal UFIDFrame()
{
identifier = new byte[] { };
SetEncodedStringSettings();
}
/// <summary>
/// Copy constructor.
/// </summary>
/// <param name="frame">Frame to copy.</param>
protected internal UFIDFrame( UFIDFrame frame )
: base( frame )
{
this.ownerIdentifier = frame.ownerIdentifier.Copy();
this.identifier = new byte[ frame.identifier.Length ];
frame.identifier.CopyTo( this.identifier, 0 );
}
#endregion
#region Constructor/Initialize helpers
/// <summary>
/// Parses the raw frame data.
/// </summary>
/// <param name="frameData">The raw frame data.</param>
/// <param name="version">The ID3v2 version of the tag being parsed.</param>
protected override void ParseFrameData( byte[] frameData, ID3Versions version )
{
IList<EncodedString> strings =
EncodedString.CreateStrings( TextEncodingType.ISO_8859_1, frameData, 1,
delegate( byte[] leftoverBytes ) { Identifier = leftoverBytes; } );
if ( strings.Count == 1 )
{
ownerIdentifier = strings[ 0 ];
SetEncodedStringSettings();
}
}
private void SetEncodedStringSettings()
{
ownerIdentifier.HasEncodingTypePrepended = true;
ownerIdentifier.IsTerminated = true;
}
#endregion
#region Properties
/// <summary>
/// Gets the size (in bytes) of the frame (not including header).
/// </summary>
public override int Size
{
get
{
// owner identifier + data
return ownerIdentifier.Size + identifier.Length;
}
}
/// <summary>
/// Gets and sets the contents of the Owner Identifier field.
/// </summary>
public string OwnerIdentifier
{
get
{
return ownerIdentifier.String;
}
set
{
if ( String.IsNullOrEmpty( value ) )
{
throw new ArgumentException( "Owner identifier must be non-empty", "value" );
}
else
{
ownerIdentifier.String = value;
}
}
}
/// <summary>
/// Gets and sets the method of encoding used when writing
/// strings in the frame to a stream.
/// </summary>
public TextEncodingType EncodingType
{
get
{
return ownerIdentifier.TextEncodingType;
}
set
{
ownerIdentifier.TextEncodingType = value;
}
}
/// <summary>
/// Gets and sets the contents of the Identifier field.
/// </summary>
public byte[] Identifier
{
get
{
return identifier;
}
set
{
// if ( value.Length > 8 )
// {
// throw new ArgumentException( "Identifier may only be up to 64 bits", "value" );
// }
identifier = value;
}
}
#endregion
#region Public Methods
/// <summary>
/// Returns a copy of this frame. Supports the prototype design pattern.
/// </summary>
/// <returns>A copy of this frame.</returns>
public override ID3v2Frame Copy()
{
return new UFIDFrame( this );
}
/// <summary>
/// Writes the frame to a stream.
/// </summary>
/// <param name="stream">The stream to write to.</param>
/// <param name="version">The ID3v2 version to use in writing the frame.</param>
public override void WriteToStream( Stream stream, ID3Versions version )
{
if ( stream == null )
{
throw new ArgumentNullException( "stream" );
}
Validate( version );
WriteHeaderToStream( stream, version );
ownerIdentifier.WriteToStream( stream );
stream.Write( identifier, 0, identifier.Length );
stream.Flush();
}
public override void Validate( ID3Versions version )
{
Exception innerException = null;
if ( ( version & ID3Versions.V2 ) != ID3Versions.V2 )
{
innerException = new UnsupportedVersionException( version );
}
else
{
try
{
ownerIdentifier.Validate( version );
}
catch ( IOValidationException ex )
{
innerException = ex;
}
}
if ( innerException != null )
{
throw new FrameValidationException( "Validation failed.", this, innerException );
}
}
#endregion
}
}
| |
// Copyright 2004-2009 Castle Project - http://www.castleproject.org/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
namespace Castle.Rook.Compiler.Visitors
{
using System;
using System.Collections;
using Castle.Rook.Compiler.AST;
public abstract class AbstractVisitor : IASTVisitor
{
public AbstractVisitor()
{
}
public virtual bool VisitNode(IVisitableNode node)
{
if (node == null) return true;
bool res = node.Accept(this);
return res;
}
public virtual bool VisitNodes(IList nodes)
{
foreach(IVisitableNode node in nodes)
{
VisitNode( node );
}
return true;
}
public virtual void VisitCompilationUnit(CompilationUnit compilationUnit)
{
VisitNode(compilationUnit.EntryPointSourceUnit);
VisitNodes(compilationUnit.SourceUnits);
}
public virtual bool VisitSourceUnit(SourceUnit unit)
{
VisitNodes(unit.Namespaces);
VisitNodes(unit.Statements);
return true;
}
public virtual bool VisitNamespace(NamespaceDescriptor ns)
{
if (VisitEnter(ns))
{
VisitNodes(ns.TypeDefinitions);
return VisitLeave(ns);
}
return true;
}
public virtual bool VisitEnter(NamespaceDescriptor ns)
{
return true;
}
public virtual bool VisitLeave(NamespaceDescriptor ns)
{
return true;
}
public virtual bool VisitTypeDefinitionStatement(TypeDefinitionStatement typeDef)
{
if (VisitEnter(typeDef))
{
VisitNodes(typeDef.Statements);
return VisitLeave(typeDef);
}
return false;
}
public virtual bool VisitEnter(TypeDefinitionStatement typeDef)
{
return true;
}
public virtual bool VisitLeave(TypeDefinitionStatement typeDef)
{
return true;
}
public virtual bool VisitMethodDefinitionStatement(MethodDefinitionStatement methodDef)
{
if (VisitEnter(methodDef))
{
VisitNodes(methodDef.Arguments);
VisitNodes(methodDef.Statements);
return VisitLeave(methodDef);
}
return false;
}
public virtual void VisitConstructorDefinitionStatement(ConstructorDefinitionStatement statement)
{
if (VisitEnter(statement))
{
VisitNodes(statement.Arguments);
VisitNodes(statement.Statements);
VisitLeave(statement);
}
}
public virtual bool VisitEnter(MethodDefinitionStatement methodDef)
{
return true;
}
public virtual bool VisitLeave(MethodDefinitionStatement methodDef)
{
return true;
}
//
// References
//
public virtual void VisitTypeReference(TypeReference reference)
{
}
public virtual bool VisitIdentifier(Identifier identifier)
{
VisitNode(identifier.TypeReference);
return true;
}
public virtual void VisitParameterVarIdentifier(ParameterVarIdentifier parameterIdentifier)
{
VisitNode(parameterIdentifier.TypeReference);
VisitNode(parameterIdentifier.InitExpression);
}
public virtual void VisitOpaqueIdentifier(OpaqueIdentifier opaqueIdentifier)
{
VisitNode(opaqueIdentifier.TypeReference);
}
//
// Statements
//
public virtual void VisitExpressionStatement(ExpressionStatement statement)
{
VisitNode(statement.InnerExpression);
}
//
// Expressions
//
public virtual void VisitMethodInvocationExpression(MethodInvocationExpression invocationExpression)
{
VisitNode(invocationExpression.Designator);
VisitNodes(invocationExpression.Arguments);
VisitCommonExpNodes(invocationExpression);
}
public virtual void VisitVariableReferenceExpression(VariableReferenceExpression expression)
{
VisitNode(expression.Identifier);
VisitCommonExpNodes(expression);
}
public virtual void VisitConstExpression(ConstExpression expression)
{
VisitCommonExpNodes(expression);
}
public virtual void VisitBlockExpression(BlockExpression expression)
{
VisitNodes(expression.Parameters);
VisitNodes(expression.Statements);
VisitCommonExpNodes(expression);
}
public virtual void VisitMemberAccessExpression(MemberAccessExpression accessExpression)
{
VisitNode(accessExpression.Inner);
VisitCommonExpNodes(accessExpression);
}
//
// Helpers
//
private void VisitCommonExpNodes(IExpression expression)
{
VisitNode(expression.PostfixCondition);
VisitNode(expression.Block);
}
}
}
| |
#region Header
// --------------------------------------------------------------------------
// Tethys.Silverlight
// ==========================================================================
//
// This library contains common for .Net Windows applications.
//
// ===========================================================================
//
// <copyright file="Struts.cs" company="Tethys">
// Copyright 1998-2015 by Thomas Graf
// All rights reserved.
// Licensed under the Apache License, Version 2.0.
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied.
// </copyright>
//
// System ... Microsoft .Net Framework 4
// Tools .... Microsoft Visual Studio 2013
//
// ---------------------------------------------------------------------------
#endregion
namespace Tethys.Win32
{
using System;
using System.Diagnostics.CodeAnalysis;
using System.Drawing;
using System.Runtime.InteropServices;
// Structures to interoperate with the Windows 32 API
#pragma warning disable 1591
#region
/// <summary>
/// The SIZE structure specifies the width and height of a rectangle.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct SIZE
{
public int cx;
public int cy;
}
#endregion
#region
/// <summary>
/// The RECT structure defines the coordinates of the upper-left and
/// lower-right corners of a rectangle.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct RECT
{
public int left;
public int top;
public int right;
public int bottom;
}
#endregion
#region
/// <summary>
/// Carries information used to load common control classes from the
/// dynamic-link library (DLL). This structure is used with the
/// InitCommonControlsEx function.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public class INITCOMMONCONTROLSEX
{
public int dwSize;
public int dwICC;
}
#endregion
#region
/// <summary>
/// Contains information about a button in a toolbar.
/// </summary>
[SuppressMessage("StyleCop.CSharp.DocumentationRules",
"SA1600:ElementsMustBeDocumented",
Justification = "Reviewed. Suppression is OK here.")]
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct TBBUTTON
{
public int iBitmap;
public int idCommand;
public byte fsState;
public byte fsStyle;
public byte bReserved0;
public byte bReserved1;
public int dwData;
public int iString;
}
#endregion
#region
/// <summary>
/// Win32 API POINT structure.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public class POINT
{
/// <summary>
/// X coordinate of the point.
/// </summary>
public int x;
/// <summary>
/// Y coordinate of the point.
/// </summary>
public int y;
/// <summary>
/// Default constructor.
/// </summary>
public POINT()
{
} // POINT()
/// <summary>
/// Constructs a POINT structure with the given coordinates.
/// </summary>
/// <param name="x">The x.</param>
/// <param name="y">The y.</param>
public POINT(int x, int y)
{
this.x = x;
this.y = y;
} // POINT()
} // POINT
#endregion
#region
/// <summary>
/// Contains information about a notification message.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct NMHDR
{
public IntPtr hwndFrom;
public int idFrom;
public int code;
}
#endregion
#region
/// <summary>
/// TOOLTIPTEXTA.
/// </summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)]
public struct TOOLTIPTEXTA
{
public NMHDR hdr;
public IntPtr lpszText;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 80)]
public string szText;
public IntPtr hinst;
public int uFlags;
}
#endregion
#region
/// <summary>
/// TOOLTIPTEXT.
/// </summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
public struct TOOLTIPTEXT
{
public NMHDR hdr;
public IntPtr lpszText;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 80)]
public string szText;
public IntPtr hinst;
public int uFlags;
}
#endregion
#region
/// <summary>
/// Contains information specific to an NM_CUSTOMDRAW notification message.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct NMCUSTOMDRAW
{
public NMHDR hdr;
public int dwDrawStage;
public IntPtr hdc;
public RECT rc;
public int dwItemSpec;
public int uItemState;
public int lItemlParam;
}
#endregion
#region
/// <summary>
/// Contains information specific to an NM_CUSTOMDRAW notification
/// message sent by a toolbar control.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct NMTBCUSTOMDRAW
{
public NMCUSTOMDRAW nmcd;
public IntPtr hbrMonoDither;
public IntPtr hbrLines;
public IntPtr hpenLines;
public int clrText;
public int clrMark;
public int clrTextHighlight;
public int clrBtnFace;
public int clrBtnHighlight;
public int clrHighlightHotTrack;
public RECT rcText;
public int nStringBkMode;
public int nHLStringBkMode;
}
#endregion
#region
/// <summary>
/// Contains information specific to an NM_CUSTOMDRAW (list view)
/// notification message sent by a list-view control.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
[CLSCompliant(false)]
public struct NMLVCUSTOMDRAW
{
public NMCUSTOMDRAW nmcd;
public uint clrText;
public uint clrTextBk;
public int iSubItem;
}
#endregion
#region
/// <summary>
/// Contains or receives information for a specific button in a toolbar.
/// </summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
public struct TBBUTTONINFO
{
public int cbSize;
public int dwMask;
public int idCommand;
public int iImage;
public byte fsState;
public byte fsStyle;
public short cx;
public IntPtr lParam;
public IntPtr pszText;
public int cchText;
}
#endregion
#region
/// <summary>
/// Contains information that defines a band in a rebar control.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct REBARBANDINFO
{
public int cbSize;
public int fMask;
public int fStyle;
public int clrFore;
public int clrBack;
public IntPtr lpText;
public int cch;
public int iImage;
public IntPtr hwndChild;
public int cxMinChild;
public int cyMinChild;
public int cx;
public IntPtr hbmBack;
public int wID;
public int cyChild;
public int cyMaxChild;
public int cyIntegral;
public int cxIdeal;
public int lParam;
public int cxHeader;
}
#endregion
#region
/// <summary>
/// The MOUSEHOOKSTRUCT structure contains information about a mouse event
/// passed to a WH_MOUSE hook procedure, MouseProc.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct MOUSEHOOKSTRUCT
{
public POINT pt;
public IntPtr hwnd;
public int wHitTestCode;
public IntPtr dwExtraInfo;
}
#endregion
#region
/// <summary>
/// Contains information used to process toolbar notification messages.
/// This structure supersedes the TBNOTIFY structure.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct NMTOOLBAR
{
public NMHDR hdr;
public int iItem;
public TBBUTTON tbButton;
public int cchText;
public IntPtr pszText;
public RECT rcButton;
}
#endregion
#region
/// <summary>
/// Contains information used in handling the RBN_CHEVRONPUSHED
/// notification message.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct NMREBARCHEVRON
{
public NMHDR hdr;
public int uBand;
public int wID;
public int lParam;
public RECT rc;
public int lParamNM;
}
#endregion
#region BITMAP
/// <summary>
/// The Bitmap struct.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct BITMAP
{
public long bmType;
public long bmWidth;
public long bmHeight;
public long bmWidthBytes;
public short bmPlanes;
public short bmBitsPixel;
public IntPtr bmBits;
}
#endregion
#region BITMAPINFO_FLAT
/// <summary>
/// The BITMAPINFO_FLAT struct.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct BITMAPINFO_FLAT
{
public int bmiHeader_biSize;
public int bmiHeader_biWidth;
public int bmiHeader_biHeight;
public short bmiHeader_biPlanes;
public short bmiHeader_biBitCount;
public int bmiHeader_biCompression;
public int bmiHeader_biSizeImage;
public int bmiHeader_biXPelsPerMeter;
public int bmiHeader_biYPelsPerMeter;
public int bmiHeader_biClrUsed;
public int bmiHeader_biClrImportant;
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 1024)]
public byte[] bmiColors;
}
#endregion
#region RGBQUAD
/// <summary>
/// The RGBQUAD struct.
/// </summary>
public struct RGBQUAD
{
public byte rgbBlue;
public byte rgbGreen;
public byte rgbRed;
public byte rgbReserved;
}
#endregion
#region
/// <summary>
/// The BITMAPINFOHEADER struct.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public class BITMAPINFOHEADER
{
public int biSize = Marshal.SizeOf(typeof(BITMAPINFOHEADER));
public int biWidth;
public int biHeight;
public short biPlanes;
public short biBitCount;
public int biCompression;
public int biSizeImage;
public int biXPelsPerMeter;
public int biYPelsPerMeter;
public int biClrUsed;
public int biClrImportant;
}
#endregion
// BITMAPINFO
#region
[StructLayout(LayoutKind.Sequential)]
public class BITMAPINFO
{
public BITMAPINFOHEADER bmiHeader = new BITMAPINFOHEADER();
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 1024)]
public byte[] bmiColors;
}
#endregion
// PALETTEENTRY
#region
[StructLayout(LayoutKind.Sequential)]
public struct PALETTEENTRY
{
public byte peRed;
public byte peGreen;
public byte peBlue;
public byte peFlags;
}
#endregion
// MSG
#region
[CLSCompliant(false)]
[StructLayout(LayoutKind.Sequential)]
public struct MSG
{
public IntPtr hwnd;
public int message;
public IntPtr wParam;
public IntPtr lParam;
public int time;
public int pt_x;
public int pt_y;
}
#endregion
// CALLBACK WINDOW RETURN
#region
[StructLayout(LayoutKind.Sequential)]
[SuppressMessage("StyleCop.CSharp.DocumentationRules",
"SA1600:ElementsMustBeDocumented",
Justification = "Reviewed. Suppression is OK here.")]
[CLSCompliant(false)]
public struct CWPRETSTRUCT
{
public IntPtr lResult;
public IntPtr lParam;
public IntPtr wParam;
public uint message;
public IntPtr hwnd;
};
#endregion
// HD_HITTESTINFO
#region
[StructLayout(LayoutKind.Sequential)]
[CLSCompliant(false)]
public struct HD_HITTESTINFO
{
public POINT pt;
public uint flags;
public int iItem;
}
#endregion
// DLLVERSIONINFO
#region
[StructLayout(LayoutKind.Sequential)]
public struct DLLVERSIONINFO
{
public int cbSize;
public int dwMajorVersion;
public int dwMinorVersion;
public int dwBuildNumber;
public int dwPlatformID;
}
#endregion
// PAINTSTRUCT
#region
[StructLayout(LayoutKind.Sequential)]
public struct PAINTSTRUCT
{
public IntPtr hdc;
public int fErase;
public Rectangle rcPaint;
public int fRestore;
public int fIncUpdate;
public int Reserved1;
public int Reserved2;
public int Reserved3;
public int Reserved4;
public int Reserved5;
public int Reserved6;
public int Reserved7;
public int Reserved8;
}
#endregion
// BLENDFUNCTION
#region
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct BLENDFUNCTION
{
public byte BlendOp;
public byte BlendFlags;
public byte SourceConstantAlpha;
public byte AlphaFormat;
}
#endregion
// TRACKMOUSEEVENTS
#region
[StructLayout(LayoutKind.Sequential)]
[CLSCompliant(false)]
public struct TRACKMOUSEEVENTS
{
public uint cbSize;
public uint dwFlags;
public IntPtr hWnd;
public uint dwHoverTime;
}
#endregion
// STRINGBUFFER
#region
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
public struct STRINGBUFFER
{
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 80)]
public string szText;
}
#endregion
// NMTVCUSTOMDRAW
#region
[StructLayout(LayoutKind.Sequential)]
[CLSCompliant(false)]
public struct NMTVCUSTOMDRAW
{
public NMCUSTOMDRAW nmcd;
public uint clrText;
public uint clrTextBk;
public int iLevel;
}
#endregion
// TVITEM
#region
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
[CLSCompliant(false)]
public struct TVITEM
{
public uint mask;
public IntPtr hItem;
public uint state;
public uint stateMask;
public IntPtr pszText;
public int cchTextMax;
public int iImage;
public int iSelectedImage;
public int cChildren;
public int lParam;
}
#endregion
// LVITEM
#region
/// <summary>
/// Specifies or receives the attributes of a list-view item.
/// This structure has been updated to support a new mask value (LVIF_INDENT)
/// that enables item indenting. This structure supersedes the LV_ITEM
/// structure.
/// </summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
[CLSCompliant(false)]
public struct LVITEM
{
public uint mask;
public int iItem;
public int iSubItem;
public uint state;
public uint stateMask;
public IntPtr pszText;
public int cchTextMax;
public int iImage;
public int lParam;
public int iIndent;
}
#endregion
#region HDITEM
/// <summary>
/// Contains information about an item in a header control.
/// This structure supersedes the HD_ITEM structure.
/// </summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
[CLSCompliant(false)]
public struct HDITEM
{
public uint mask;
public int cxy;
public IntPtr pszText;
public IntPtr hbm;
public int cchTextMax;
public int fmt;
public int lParam;
public int iImage;
public int iOrder;
} // HDITEM
#endregion // HDITEM
#region SHFILEOPERATION STRUCTS
/// <summary>
/// Contains information that the SHFileOperation function uses
/// to perform file operations.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1, CharSet = CharSet.Auto)]
public struct SHFILEOPSTRUCT
{
/// <summary>
/// Window handle to the dialog box to display information about the status of the file operation.
/// </summary>
public IntPtr hWnd;
/// <summary>
/// Value that indicates which operation to perform. This member can be one of the following values.
/// * FO_COPY - Copy the files specified in the pFrom member to the
/// location specified in the pTo member.
/// * FO_DELETE - Delete the files specified in pFrom.
/// * FO_MOVE - Move the files specified in pFrom to the location
/// specified in pTo.
/// * FO_RENAME - Rename the file specified in pFrom. You cannot use this
/// flag to rename multiple files with a single function call.
/// Use FO_MOVE instead.
/// </summary>
public Int32 wFunc;
/// <summary>
/// Address of a buffer to specify one or more source file names. These
/// names must be fully qualified paths. Standard Microsoft MS-DOS wild
/// cards, such as "*", are permitted in the file-name position.
/// Although this member is declared as a null-terminated string, it
/// is used as a buffer to hold multiple file names. Each file name
/// must be terminated by a single NULL character. An additional NULL
/// character must be appended to the end of the final name to indicate
/// the end of pFrom.
/// </summary>
[MarshalAs(UnmanagedType.LPTStr)]
public string pFrom;
/// <summary>
/// Address of a buffer to contain the name of the destination file or directory. This parameter must be set to NULL if it is not used. Like pFrom, the pTo member is also a double-null terminated string and is handled in much the same way. However, pTo must meet the following specifications.
/// <list type="">
/// <item>Wildcard characters are not supported.</item>
/// <item>Copy and Move operations can specify destination directories that
/// do not exist and the system will attempt to create them. The system
/// normally displays a dialog box to ask the user if they want to
/// create the new directory. To suppress this dialog box and have the
/// directories created silently, set the FOF_NOCONFIRMMKDIR flag
/// in fFlags.</item>
/// <item>For Copy and Move operations, the buffer can contain multiple
/// destination file names if the fFlags member specifies
/// FOF_MULTIDESTFILES.</item>
/// <item>Pack multiple names into the string in the same way as for pFrom.</item>
/// <item>Use only fully-qualified paths. Using relative paths will have
/// unpredictable results.</item>
/// </list>
/// </summary>
[MarshalAs(UnmanagedType.LPTStr)]
public string pTo;
/// <summary>
/// Flags that control the file operation.
/// </summary>
public Int16 fFlags;
/// <summary>
/// Value that receives TRUE if the user aborted any file operations
/// before they were completed, or FALSE otherwise.
/// </summary>
public Int32 fAnyOperationsAborted;
/// <summary>
/// A handle to a name mapping object containing the old and new names of
/// the renamed files. This member is used only if the fFlags member
/// includes the FOF_WANTMAPPINGHANDLE flag. See Remarks for more
/// details.
/// </summary>
public IntPtr hNameMappings;
/// <summary>
/// Address of a string to use as the title of a progress dialog box.
/// This member is used only if fFlags includes the FOF_SIMPLEPROGRESS
/// flag.
/// </summary>
[MarshalAs(UnmanagedType.LPTStr)]
public string lpszProgressTitle;
} // SHFILEOPSTRUCT
#endregion // SHFILEOPERATION STRUCTS
#region LOGFONT
/// <summary>
/// Windows API Logical Font structure to represent information
/// about a font.
/// </summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
public struct LOGFONT
{
/// <summary>
/// Height of the font.
/// </summary>
public int lfHeight;
public int lfWidth;
public int lfEscapement;
public int lfOrientation;
public int lfWeight;
public byte lfItalic;
public byte lfUnderline;
public byte lfStrikeOut;
public byte lfCharSet;
public byte lfOutPrecision;
public byte lfClipPrecision;
public byte lfQuality;
public byte lfPitchAndFamily;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
public string lfFaceName;
} // LOGFONT
#endregion LOGFONT
#region TEXTMETRIC
/// <summary>
/// Summary description for FontUtility.
/// </summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
public struct TEXTMETRIC
{
public int tmHeight;
public int tmAscent;
public int tmDescent;
public int tmInternalLeading;
public int tmExternalLeading;
public int tmAveCharWidth;
public int tmMaxCharWidth;
public int tmWeight;
public int tmOverhang;
public int tmDigitizedAspectX;
public int tmDigitizedAspectY;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 1)]
public String tmFirstChar;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 1)]
public String tmLastChar;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 1)]
public String tmDefaultChar;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 1)]
public String tmBreakChar;
public byte tmItalic;
public byte tmUnderlined;
public byte tmStruckOut;
public byte tmPitchAndFamily;
public byte tmCharSet;
} // TEXTMETRIC
#endregion TEXTMETRIC
#region FONTFAMILYTYPES
/// <summary>
/// FontFamilyTypes
/// </summary>
public enum FontFamilyTypes
{
/// <summary>
/// Don't care or don't know.
/// </summary>
FF_DONTCARE = (0 << 4),
/// <summary>
/// Variable stroke width, serifed.
/// Times Roman, Century Schoolbook, etc.
/// </summary>
FF_ROMAN = (1 << 4),
/// <summary>
/// Variable stroke width, sans-serifed.
/// Helvetica, Swiss, etc.
/// </summary>
FF_SWISS = (2 << 4),
/// <summary>
/// Constant stroke width, serifed or sans-serifed.
/// Pica, Elite, Courier, etc.
/// </summary>
FF_MODERN = (3 << 4),
/// <summary>
/// Cursive, etc.
/// </summary>
FF_SCRIPT = (4 << 4),
/// <summary>
/// Old English, etc.
/// </summary>
FF_DECORATIVE = (5 << 4)
} // FontFamilyTypes
/// <summary>
/// Contains information about the file that is found by the FindFirstFile,
/// FindFirstFileEx, or FindNextFile function.
/// </summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
[CLSCompliant(false)]
public struct WIN32_FIND_DATAW
{
public uint dwFileAttributes;
public long ftCreationTime;
public long ftLastAccessTime;
public long ftLastWriteTime;
public uint nFileSizeHigh;
public uint nFileSizeLow;
public uint dwReserved0;
public uint dwReserved1;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 260)]
public string cFileName;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 14)]
public string cAlternateFileName;
} // WIN32_FIND_DATAW
#endregion // FONTFAMILYTYPES
#pragma warning restore 1591
} // Tethys.Win32
// ========================
// Tethys: end of structs.cs
// ========================
| |
#pragma warning disable IDE1006 // Naming Styles
using System;
using System.Runtime.InteropServices;
using static Vanara.PInvoke.Gdi32;
using static Vanara.PInvoke.WinMm;
namespace Vanara.PInvoke
{
/// <summary>Items from the AviFil32.dll</summary>
public static partial class AviFil32
{
/// <summary></summary>
public static readonly uint ckidAVIMAINHDR = MAKEFOURCC('a', 'v', 'i', 'h');
/// <summary></summary>
public static readonly uint ckidAVINEWINDEX = MAKEFOURCC('i', 'd', 'x', '1');
/// <summary></summary>
public static readonly uint ckidAVIPADDING = MAKEFOURCC('J', 'U', 'N', 'K');
/// <summary></summary>
public static readonly uint ckidSTREAMFORMAT = MAKEFOURCC('s', 't', 'r', 'f');
/// <summary></summary>
public static readonly uint ckidSTREAMHANDLERDATA = MAKEFOURCC('s', 't', 'r', 'd');
/// <summary></summary>
public static readonly uint ckidSTREAMHEADER = MAKEFOURCC('s', 't', 'r', 'h');
/// <summary></summary>
public static readonly uint ckidSTREAMNAME = MAKEFOURCC('s', 't', 'r', 'n');
/// <summary></summary>
public static readonly ushort cktypeDIBbits = aviTWOCC('d', 'b');
/// <summary></summary>
public static readonly ushort cktypeDIBcompressed = aviTWOCC('d', 'c');
/// <summary></summary>
public static readonly ushort cktypePALchange = aviTWOCC('p', 'c');
/// <summary></summary>
public static readonly ushort cktypeWAVEbytes = aviTWOCC('w', 'b');
/// <summary></summary>
public static readonly uint formtypeAVI = MAKEFOURCC('A', 'V', 'I', ' ');
/// <summary></summary>
public static readonly uint listtypeAVIHEADER = MAKEFOURCC('h', 'd', 'r', 'l');
/// <summary></summary>
public static readonly uint listtypeAVIMOVIE = MAKEFOURCC('m', 'o', 'v', 'i');
/// <summary></summary>
public static readonly uint listtypeAVIRECORD = MAKEFOURCC('r', 'e', 'c', ' ');
/// <summary></summary>
public static readonly uint listtypeSTREAMHEADER = MAKEFOURCC('s', 't', 'r', 'l');
/// <summary></summary>
public static readonly uint streamtypeAUDIO = MAKEFOURCC('a', 'u', 'd', 's');
/// <summary></summary>
public static readonly uint streamtypeMIDI = MAKEFOURCC('m', 'i', 'd', 's');
/// <summary></summary>
public static readonly uint streamtypeTEXT = MAKEFOURCC('t', 'x', 't', 's');
/// <summary></summary>
public static readonly uint streamtypeVIDEO = MAKEFOURCC('v', 'i', 'd', 's');
/// <summary>Flags for <c>VIDEOHDR</c></summary>
[PInvokeData("vfw.h", MSDNShortId = "NS:vfw.videohdr_tag")]
[Flags]
public enum VHDR : uint
{
/// <summary>Done bit</summary>
VHDR_DONE = 0x00000001,
/// <summary>Set if this header has been prepared</summary>
VHDR_PREPARED = 0x00000002,
/// <summary>Reserved for driver</summary>
VHDR_INQUEUE = 0x00000004,
/// <summary>Key Frame</summary>
VHDR_KEYFRAME = 0x00000008,
}
/// <summary>
/// <para>The <c>CAPDRIVERCAPS</c> structure defines the capabilities of the capture driver.</para>
/// <para>
/// An application should use the WM_CAP_DRIVER_GET_CAPS message or capDriverGetCaps macro to place a copy of the driver
/// capabilities in a <c>CAPDRIVERCAPS</c> structure whenever the application connects a capture window to a capture driver.
/// </para>
/// </summary>
// https://docs.microsoft.com/en-us/windows/win32/api/vfw/ns-vfw-capdrivercaps typedef struct tagCapDriverCaps { UINT wDeviceIndex;
// BOOL fHasOverlay; BOOL fHasDlgVideoSource; BOOL fHasDlgVideoFormat; BOOL fHasDlgVideoDisplay; BOOL fCaptureInitialized; BOOL
// fDriverSuppliesPalettes; HANDLE hVideoIn; HANDLE hVideoOut; HANDLE hVideoExtIn; HANDLE hVideoExtOut; } CAPDRIVERCAPS,
// *PCAPDRIVERCAPS, *LPCAPDRIVERCAPS;
[PInvokeData("vfw.h", MSDNShortId = "NS:vfw.tagCapDriverCaps")]
[StructLayout(LayoutKind.Sequential)]
public struct CAPDRIVERCAPS
{
/// <summary>Index of the capture driver. An index value can range from 0 to 9.</summary>
public uint wDeviceIndex;
/// <summary>Video-overlay flag. The value of this member is <c>TRUE</c> if the device supports video overlay.</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fHasOverlay;
/// <summary>
/// Video source dialog flag. The value of this member is <c>TRUE</c> if the device supports a dialog box for selecting and
/// controlling the video source.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fHasDlgVideoSource;
/// <summary>
/// Video format dialog flag. The value of this member is <c>TRUE</c> if the device supports a dialog box for selecting the
/// video format.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fHasDlgVideoFormat;
/// <summary>
/// Video display dialog flag. The value of this member is <c>TRUE</c> if the device supports a dialog box for controlling the
/// redisplay of video from the capture frame buffer.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fHasDlgVideoDisplay;
/// <summary>
/// Capture initialization flag. The value of this member is <c>TRUE</c> if a capture device has been successfully connected.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fCaptureInitialized;
/// <summary>Driver palette flag. The value of this member is <c>TRUE</c> if the driver can create palettes.</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fDriverSuppliesPalettes;
/// <summary>Not used in Win32 applications.</summary>
public HANDLE hVideoIn;
/// <summary>Not used in Win32 applications.</summary>
public HANDLE hVideoOut;
/// <summary>Not used in Win32 applications.</summary>
public HANDLE hVideoExtIn;
/// <summary>Not used in Win32 applications.</summary>
public HANDLE hVideoExtOut;
}
/// <summary>
/// The <c>CAPINFOCHUNK</c> structure contains parameters that can be used to define an information chunk within an AVI capture
/// file. The WM_CAP_FILE_SET_INFOCHUNK message or <c>capSetInfoChunk</c> macro is used to send a <c>CAPINFOCHUNK</c> structure to a
/// capture window.
/// </summary>
// https://docs.microsoft.com/en-us/windows/win32/api/vfw/ns-vfw-capinfochunk typedef struct tagCapInfoChunk { FOURCC fccInfoID;
// LPVOID lpData; LONG cbData; } CAPINFOCHUNK, *PCAPINFOCHUNK, *LPCAPINFOCHUNK;
[PInvokeData("vfw.h", MSDNShortId = "NS:vfw.tagCapInfoChunk")]
[StructLayout(LayoutKind.Sequential)]
public struct CAPINFOCHUNK
{
/// <summary>
/// Four-character code that identifies the representation of the chunk data. If this value is <c>NULL</c> and <c>lpData</c> is
/// <c>NULL</c>, all accumulated information chunks are deleted.
/// </summary>
public uint fccInfoID;
/// <summary>Pointer to the data. If this value is <c>NULL</c>, all <c>fccInfoID</c> information chunks are deleted.</summary>
public IntPtr lpData;
/// <summary>
/// Size, in bytes, of the data pointed to by <c>lpData</c>. If <c>lpData</c> specifies a null-terminated string, use the string
/// length incremented by one to save the <c>NULL</c> with the string.
/// </summary>
public int cbData;
}
/// <summary>The <c>CAPSTATUS</c> structure defines the current state of the capture window.</summary>
/// <remarks>
/// Because the state of a capture window changes in response to various messages, an application should update the information in
/// this structure whenever it needs to enable menu items, determine the actual state of the capture window, or call the video
/// format dialog box. If the application yields during streaming capture, this structure returns the progress of the capture in the
/// <c>dwCurrentVideoFrame</c>, <c>dwCurrentVideoFramesDropped</c>, dwCurre <c></c> ntWaveSamples, and <c>dwCurrentTimeElapsedMS</c>
/// members. Use the WM_CAP_GET_STATUS message or capGetStatus macro to update the contents of this structure.
/// </remarks>
// https://docs.microsoft.com/en-us/windows/win32/api/vfw/ns-vfw-capstatus typedef struct tagCapStatus { UINT uiImageWidth; UINT
// uiImageHeight; BOOL fLiveWindow; BOOL fOverlayWindow; BOOL fScale; POINT ptScroll; BOOL fUsingDefaultPalette; BOOL
// fAudioHardware; BOOL fCapFileExists; DWORD dwCurrentVideoFrame; DWORD dwCurrentVideoFramesDropped; DWORD dwCurrentWaveSamples;
// DWORD dwCurrentTimeElapsedMS; HPALETTE hPalCurrent; BOOL fCapturingNow; DWORD dwReturn; UINT wNumVideoAllocated; UINT
// wNumAudioAllocated; } CAPSTATUS, *PCAPSTATUS, *LPCAPSTATUS;
[PInvokeData("vfw.h", MSDNShortId = "NS:vfw.tagCapStatus")]
[StructLayout(LayoutKind.Sequential)]
public struct CAPSTATUS
{
/// <summary>Image width, in pixels.</summary>
public uint uiImageWidth;
/// <summary>Image height, in pixels</summary>
public uint uiImageHeight;
/// <summary>
/// Live window flag. The value of this member is <c>TRUE</c> if the window is displaying video using the preview method.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fLiveWindow;
/// <summary>
/// Overlay window flag. The value of this member is <c>TRUE</c> if the window is displaying video using hardware overlay.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fOverlayWindow;
/// <summary>
/// Input scaling flag. The value of this member is <c>TRUE</c> if the window is scaling the input video to the client area when
/// displaying video using preview. This parameter has no effect when displaying video using overlay.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fScale;
/// <summary>The x- and y-offset of the pixel displayed in the upper left corner of the client area of the window.</summary>
public System.Drawing.Point ptScroll;
/// <summary>Default palette flag. The value of this member is <c>TRUE</c> if the capture driver is using its default palette.</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fUsingDefaultPalette;
/// <summary>Audio hardware flag. The value of this member is <c>TRUE</c> if the system has waveform-audio hardware installed.</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fAudioHardware;
/// <summary>Capture file flag. The value of this member is <c>TRUE</c> if a valid capture file has been generated.</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fCapFileExists;
/// <summary>
/// Number of frames processed during the current (or most recent) streaming capture. This count includes dropped frames.
/// </summary>
public uint dwCurrentVideoFrame;
/// <summary>
/// Number of frames dropped during the current (or most recent) streaming capture. Dropped frames occur when the capture rate
/// exceeds the rate at which frames can be saved to file. In this case, the capture driver has no buffers available for storing
/// data. Dropping frames does not affect synchronization because the previous frame is displayed in place of the dropped frame.
/// </summary>
public uint dwCurrentVideoFramesDropped;
/// <summary>Number of waveform-audio samples processed during the current (or most recent) streaming capture.</summary>
public uint dwCurrentWaveSamples;
/// <summary>Time, in milliseconds, since the start of the current (or most recent) streaming capture.</summary>
public uint dwCurrentTimeElapsedMS;
/// <summary>Handle to current palette.</summary>
public HPALETTE hPalCurrent;
/// <summary>Capturing flag. The value of this member is <c>TRUE</c> when capturing is in progress.</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fCapturingNow;
/// <summary>Error return values. Use this member if your application does not support an error callback function.</summary>
public uint dwReturn;
/// <summary>
/// Number of video buffers allocated. This value might be less than the number specified in the <c>wNumVideoRequested</c>
/// member of the CAPTUREPARMS structure.
/// </summary>
public uint wNumVideoAllocated;
/// <summary>
/// Number of audio buffers allocated. This value might be less than the number specified in the <c>wNumAudioRequested</c>
/// member of the CAPTUREPARMS structure.
/// </summary>
public uint wNumAudioAllocated;
}
/// <summary>
/// The <c>CAPTUREPARMS</c> structure contains parameters that control the streaming video capture process. This structure is used
/// to get and set parameters that affect the capture rate, the number of buffers to use while capturing, and how capture is terminated.
/// </summary>
/// <remarks>
/// <para>
/// The WM_CAP_GET_SEQUENCE_SETUP message or capCaptureGetSetup macro is used to retrieve the current capture parameters. The
/// WM_CAP_SET_SEQUENCE_SETUP message or capCaptureSetSetup macro is used to set the capture parameters.
/// </para>
/// <para>
/// The WM_CAP_GET_SEQUENCE_SETUP message or capCaptureGetSetup macro is used to retrieve the current capture parameters. The
/// WM_CAP_SET_SEQUENCE_SETUP message or capCaptureSetSetup macro is used to set the capture parameters.
/// </para>
/// </remarks>
// https://docs.microsoft.com/en-us/windows/win32/api/vfw/ns-vfw-captureparms typedef struct tagCaptureParms { DWORD
// dwRequestMicroSecPerFrame; BOOL fMakeUserHitOKToCapture; UINT wPercentDropForError; BOOL fYield; DWORD dwIndexSize; UINT
// wChunkGranularity; BOOL fUsingDOSMemory; UINT wNumVideoRequested; BOOL fCaptureAudio; UINT wNumAudioRequested; UINT vKeyAbort;
// BOOL fAbortLeftMouse; BOOL fAbortRightMouse; BOOL fLimitEnabled; UINT wTimeLimit; BOOL fMCIControl; BOOL fStepMCIDevice; DWORD
// dwMCIStartTime; DWORD dwMCIStopTime; BOOL fStepCaptureAt2x; UINT wStepCaptureAverageFrames; DWORD dwAudioBufferSize; BOOL
// fDisableWriteCache; UINT AVStreamMaster; } CAPTUREPARMS, *PCAPTUREPARMS, *LPCAPTUREPARMS;
[PInvokeData("vfw.h", MSDNShortId = "NS:vfw.tagCaptureParms")]
[StructLayout(LayoutKind.Sequential)]
public struct CAPTUREPARMS
{
/// <summary>Requested frame rate, in microseconds. The default value is 66667, which corresponds to 15 frames per second.</summary>
public uint dwRequestMicroSecPerFrame;
/// <summary>
/// User-initiated capture flag. If this member is <c>TRUE</c>, AVICap displays a dialog box prompting the user to initiate
/// capture. The default value is <c>FALSE</c>.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fMakeUserHitOKToCapture;
/// <summary>
/// Maximum allowable percentage of dropped frames during capture. Values range from 0 to 100. The default value is 10.
/// </summary>
public uint wPercentDropForError;
/// <summary>
/// <para>
/// Yield flag. If this member is <c>TRUE</c>, the capture window spawns a separate background thread to perform step and
/// streaming capture. The default value is <c>FALSE</c>.
/// </para>
/// <para>
/// Applications that set this flag must handle potential reentry issues because the controls in the application are not
/// disabled while capture is in progress.
/// </para>
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fYield;
/// <summary>
/// <para>
/// Maximum number of index entries in an AVI file. Values range from 1800 to 324,000. If set to 0, a default value of 34,952
/// (32K frames plus a proportional number of audio buffers) is used.
/// </para>
/// <para>
/// Each video frame or buffer of waveform-audio data uses one index entry. The value of this entry establishes a limit for the
/// number of frames or audio buffers that can be captured.
/// </para>
/// </summary>
public uint dwIndexSize;
/// <summary>Logical block size, in bytes, of an AVI file. The value 0 indicates the current sector size is used as the granularity.</summary>
public uint wChunkGranularity;
/// <summary>Not used in Win32 applications.</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fUsingDOSMemory;
/// <summary>
/// Maximum number of video buffers to allocate. The memory area to place the buffers is specified with <c>fUsingDOSMemory</c>.
/// The actual number of buffers allocated might be lower if memory is unavailable.
/// </summary>
public uint wNumVideoRequested;
/// <summary>
/// Capture audio flag. If this member is <c>TRUE</c>, audio is captured during streaming capture. This is the default value if
/// audio hardware is installed.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fCaptureAudio;
/// <summary>Maximum number of audio buffers to allocate. The maximum number of buffers is 10.</summary>
public uint wNumAudioRequested;
/// <summary>
/// <para>
/// Virtual keycode used to terminate streaming capture. The default value is VK_ESCAPE. You must call the RegisterHotKey
/// function before specifying a keystroke that can abort a capture session.
/// </para>
/// <para>
/// You can combine keycodes that include CTRL and SHIFT keystrokes by using the logical OR operator with the keycodes for CTRL
/// (0x8000) and SHIFT (0x4000).
/// </para>
/// </summary>
public uint vKeyAbort;
/// <summary>
/// Abort flag for left mouse button. If this member is <c>TRUE</c>, streaming capture stops if the left mouse button is
/// pressed. The default value is <c>TRUE</c>.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fAbortLeftMouse;
/// <summary>
/// Abort flag for right mouse button. If this member is <c>TRUE</c>, streaming capture stops if the right mouse button is
/// pressed. The default value is <c>TRUE</c>.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fAbortRightMouse;
/// <summary>
/// Time limit enabled flag. If this member is <c>TRUE</c>, streaming capture stops after the number of seconds in
/// <c>wTimeLimit</c> has elapsed. The default value is <c>FALSE</c>.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fLimitEnabled;
/// <summary>Time limit for capture, in seconds. This parameter is used only if <c>fLimitEnabled</c> is <c>TRUE</c>.</summary>
public uint wTimeLimit;
/// <summary>
/// MCI device capture flag. If this member is <c>TRUE</c>, AVICap controls an MCI-compatible video source during streaming
/// capture. MCI-compatible video sources include VCRs and laserdiscs.
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fMCIControl;
/// <summary>
/// MCI device step capture flag. If this member is <c>TRUE</c>, step capture using an MCI device as a video source is enabled.
/// If it is <c>FALSE</c>, real-time capture using an MCI device is enabled. (If <c>fMCIControl</c> is <c>FALSE</c>, this member
/// is ignored.)
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fStepMCIDevice;
/// <summary>
/// Starting position, in milliseconds, of the MCI device for the capture sequence. (If <c>fMCIControl</c> is <c>FALSE</c>, this
/// member is ignored.)
/// </summary>
public uint dwMCIStartTime;
/// <summary>
/// Stopping position, in milliseconds, of the MCI device for the capture sequence. When this position in the content is
/// reached, capture ends and the MCI device stops. (If <c>fMCIControl</c> is <c>FALSE</c>, this member is ignored.)
/// </summary>
public uint dwMCIStopTime;
/// <summary>
/// <para>
/// Double-resolution step capture flag. If this member is <c>TRUE</c>, the capture hardware captures at twice the specified
/// resolution. (The resolution for the height and width is doubled.)
/// </para>
/// <para>Enable this option if the hardware does not support hardware-based decimation and you are capturing in the RGB format.</para>
/// </summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fStepCaptureAt2x;
/// <summary>
/// Number of times a frame is sampled when creating a frame based on the average sample. A typical value for the number of
/// averages is 5.
/// </summary>
public uint wStepCaptureAverageFrames;
/// <summary>
/// Audio buffer size. If the default value of zero is used, the size of each buffer will be the maximum of 0.5 seconds of audio
/// or 10K bytes.
/// </summary>
public uint dwAudioBufferSize;
/// <summary>Not used in Win32 applications.</summary>
[MarshalAs(UnmanagedType.Bool)]
public bool fDisableWriteCache;
/// <summary>
/// Indicates whether the audio stream controls the clock when writing an AVI file. If this member is set to
/// AVSTREAMMASTER_AUDIO, the audio stream is considered the master stream and the video stream duration is forced to match the
/// audio duration. If this member is set to AVSTREAMMASTER_NONE, the durations of audio and video streams can differ.
/// </summary>
public AVSTREAMMASTER AVStreamMaster;
}
/// <summary>
/// The <c>DRAWDIBTIME</c> structure contains elapsed timing information for performing a set of DrawDib operations. The DrawDibTime
/// function resets the count and the elapsed time value for each operation each time it is called.
/// </summary>
// https://docs.microsoft.com/en-us/windows/win32/api/vfw/ns-vfw-drawdibtime typedef struct { LONG timeCount; LONG timeDraw; LONG
// timeDecompress; LONG timeDither; LONG timeStretch; LONG timeBlt; LONG timeSetDIBits; } DRAWDIBTIME, *LPDRAWDIBTIME;
[PInvokeData("vfw.h", MSDNShortId = "NS:vfw.__unnamed_struct_12")]
[StructLayout(LayoutKind.Sequential)]
public struct DRAWDIBTIME
{
/// <summary>
/// <para>Number of times the following operations have been performed since DrawDibTime was last called:</para>
/// <list type="bullet">
/// <item>
/// <term>Draw a bitmap on the screen.</term>
/// </item>
/// <item>
/// <term>Decompress a bitmap.</term>
/// </item>
/// <item>
/// <term>Dither a bitmap.</term>
/// </item>
/// <item>
/// <term>Stretch a bitmap.</term>
/// </item>
/// <item>
/// <term>Transfer bitmap data by using the BitBlt function.</term>
/// </item>
/// <item>
/// <term>Transfer bitmap data by using the SetDIBits function.</term>
/// </item>
/// </list>
/// </summary>
public int timeCount;
/// <summary>Time to draw bitmaps.</summary>
public int timeDraw;
/// <summary>Time to decompress bitmaps.</summary>
public int timeDecompress;
/// <summary>Time to dither bitmaps.</summary>
public int timeDither;
/// <summary>Time to stretch bitmaps.</summary>
public int timeStretch;
/// <summary>Time to transfer bitmaps by using the BitBlt function.</summary>
public int timeBlt;
/// <summary>Time to transfer bitmaps by using the SetDIBits function.</summary>
public int timeSetDIBits;
}
/// <summary>The <c>VIDEOHDR</c> structure is used by the capVideoStreamCallback function.</summary>
// https://docs.microsoft.com/en-us/windows/win32/api/vfw/ns-vfw-videohdr typedef struct videohdr_tag { LPBYTE lpData; DWORD
// dwBufferLength; DWORD dwBytesUsed; DWORD dwTimeCaptured; DWORD_PTR dwUser; DWORD dwFlags; DWORD_PTR dwReserved[4]; } VIDEOHDR,
// *PVIDEOHDR, *LPVIDEOHDR;
[PInvokeData("vfw.h", MSDNShortId = "NS:vfw.videohdr_tag")]
[StructLayout(LayoutKind.Sequential)]
public struct VIDEOHDR
{
/// <summary>Pointer to locked data buffer.</summary>
public IntPtr lpData;
/// <summary>Length of data buffer.</summary>
public uint dwBufferLength;
/// <summary>Bytes actually used.</summary>
public uint dwBytesUsed;
/// <summary>Milliseconds from start of stream.</summary>
public uint dwTimeCaptured;
/// <summary>User-defined data.</summary>
public IntPtr dwUser;
/// <summary>
/// <para>The flags are defined as follows.</para>
/// <list type="table">
/// <listheader>
/// <term>Flag</term>
/// <term>Meaning</term>
/// </listheader>
/// <item>
/// <term>VHDR_DONE</term>
/// <term>Done bit</term>
/// </item>
/// <item>
/// <term>VHDR_PREPARED</term>
/// <term>Set if this header has been prepared</term>
/// </item>
/// <item>
/// <term>VHDR_INQUEUE</term>
/// <term>Reserved for driver</term>
/// </item>
/// <item>
/// <term>VHDR_KEYFRAME</term>
/// <term>Key Frame</term>
/// </item>
/// </list>
/// </summary>
public VHDR dwFlags;
/// <summary>Reserved for driver.</summary>
private readonly IntPtr dwReserved1;
private readonly IntPtr dwReserved2;
private readonly IntPtr dwReserved3;
private readonly IntPtr dwReserved4;
}
}
}
| |
using System;
using Bumblebee.Extensions;
using Bumblebee.Implementation;
using Bumblebee.Interfaces;
using Bumblebee.Setup;
using FluentAssertions;
using NSubstitute;
using NUnit.Framework;
using OpenQA.Selenium;
// ReSharper disable InconsistentNaming
namespace Bumblebee.IntegrationTests.Extensions.FindRelatedTests
{
[TestFixture]
public class Given_static_page_heirarchy
{
[Test]
public void When_null_IElement_Then_throws_ArgumentNullException()
{
IElement element = null;
Action fn = () => element.FindRelated<GrandparentBlock>();
fn.ShouldThrow<ArgumentNullException>()
.Which.ParamName
.Should().Be("element");
}
[Test]
public void When_IElement_with_null_Parent_Then_throws_ArgumentNullException()
{
IBlock parent = null;
IElement element = Substitute.For<IElement>();
element.Parent.Returns(parent);
Action fn = () => element.FindRelated<GrandparentBlock>();
fn.ShouldThrow<ArgumentNullException>()
.Which.ParamName
.Should().Be("block");
}
[Test]
public void When_null_IBlock_Then_throws_ArgumentNullException()
{
IBlock block = null;
Action fn = () => block.FindRelated<GrandparentBlock>();
fn.ShouldThrow<ArgumentNullException>()
.Which.ParamName
.Should().Be("block");
}
[Test]
public void When_TestBlock_with_no_parent_Then_TestBlock_is_returned()
{
var environment = Substitute.For<IDriverEnvironment>();
var session = Substitute.For<Session>(environment);
var @by = Substitute.For<By>();
var page = new AncestryPage(session, @by);
var block = page.Grandparent;
var result = block.FindRelated<GrandparentBlock>();
result.InstanceId
.Should().Be(block.InstanceId);
}
[Test]
public void When_TestBlock_with_parent_Then_parent_is_not_returned()
{
var environment = Substitute.For<IDriverEnvironment>();
var session = Substitute.For<Session>(environment);
var @by = Substitute.For<By>();
var page = new AncestryPage(session, @by);
var block = page.Grandparent;
var result = block.FindRelated<GrandparentBlock>();
result.InstanceId
.Should().Be(block.InstanceId);
}
[Test]
public void When_TestBlock_with_parent_Then_child_is_returned()
{
var environment = Substitute.For<IDriverEnvironment>();
var session = Substitute.For<Session>(environment);
var @by = Substitute.For<By>();
var page = new AncestryPage(session, @by);
var block = page.Grandparent;
var result = block.FindRelated<GrandparentBlock>();
result.InstanceId
.Should().Be(block.InstanceId);
}
[Test]
public void When_ChildBlock1_Then_parent_is_findable()
{
var environment = Substitute.For<IDriverEnvironment>();
var session = Substitute.For<Session>(environment);
var @by = Substitute.For<By>();
var page = new AncestryPage(session, @by);
var block = page.Grandparent;
var result = block.ParentBlockA.ChildBlock1.FindRelated<ParentBlockA>();
result.Should().NotBeNull();
}
[Test]
public void When_ChildBlock1_Then_grandparent_is_findable()
{
var environment = Substitute.For<IDriverEnvironment>();
var session = Substitute.For<Session>(environment);
var @by = Substitute.For<By>();
var page = new AncestryPage(session, @by);
var block = page.Grandparent;
var result = block.ParentBlockA.ChildBlock1.FindRelated<GrandparentBlock>();
result.Should().NotBeNull();
}
[Test]
public void When_ChildBlock1_Then_sibling_is_findable()
{
var environment = Substitute.For<IDriverEnvironment>();
var session = Substitute.For<Session>(environment);
var @by = Substitute.For<By>();
var page = new AncestryPage(session, @by);
var block = page.Grandparent;
var result = block.ParentBlockA.ChildBlock1.FindRelated<ChildBlock3>();
result.Should().NotBeNull();
}
[Test]
public void When_ChildBlock1_Then_uncle_is_findable()
{
var environment = Substitute.For<IDriverEnvironment>();
var session = Substitute.For<Session>(environment);
var @by = Substitute.For<By>();
var page = new AncestryPage(session, @by);
var block = page.Grandparent;
var result = block.ParentBlockA.ChildBlock1.FindRelated<ParentBlockB>();
result.Should().NotBeNull();
}
[Test]
public void When_ChildBlock1_Then_cousin_is_findable()
{
var environment = Substitute.For<IDriverEnvironment>();
var session = Substitute.For<Session>(environment);
var @by = Substitute.For<By>();
var page = new AncestryPage(session, @by);
var block = page.Grandparent;
var result = block.ParentBlockA.ChildBlock1.FindRelated<ChildBlock2>();
result.Should().NotBeNull();
}
}
public abstract class InstanceGuidBlock : Block
{
private readonly Guid _instanceId = Guid.NewGuid();
public Guid InstanceId { get { return _instanceId; } }
protected InstanceGuidBlock(Session session, By @by) : base(session, @by)
{
}
protected InstanceGuidBlock(IBlock parent, By @by) : base(parent, @by)
{
}
}
public class AncestryPage : Page
{
public AncestryPage(Session session) : base(session)
{
}
public AncestryPage(Session session, By @by) : base(session, @by)
{
}
public GrandparentBlock Grandparent
{
get { return new GrandparentBlock(this, Substitute.For<By>()); }
}
}
public class GrandparentBlock : InstanceGuidBlock
{
public GrandparentBlock(IBlock parent, By @by) : base(parent, @by)
{
}
public ParentBlockA ParentBlockA
{
get
{
return new ParentBlockA(this, Substitute.For<By>());
}
}
public ParentBlockB ParentBlockB
{
get
{
return new ParentBlockB(this, Substitute.For<By>());
}
}
}
public class ParentBlockA : InstanceGuidBlock
{
public ParentBlockA(IBlock parent, By @by) : base(parent, @by)
{
}
public ChildBlock1 ChildBlock1
{
get
{
return new ChildBlock1(this, Substitute.For<By>());
}
}
public ChildBlock3 ChildBlock3
{
get
{
return new ChildBlock3(this, Substitute.For<By>());
}
}
}
public class ParentBlockB : InstanceGuidBlock
{
public ParentBlockB(IBlock parent, By @by) : base(parent, @by)
{
}
public ChildBlock2 ChildBlock2
{
get
{
return new ChildBlock2(this, Substitute.For<By>());
}
}
public ChildBlock4 ChildBlock4
{
get
{
return new ChildBlock4(this, Substitute.For<By>());
}
}
}
public class ChildBlock1 : InstanceGuidBlock
{
public ChildBlock1(IBlock parent, By @by) : base(parent, @by)
{
}
}
public class ChildBlock2 : InstanceGuidBlock
{
public ChildBlock2(IBlock parent, By @by) : base(parent, @by)
{
}
}
public class ChildBlock3 : InstanceGuidBlock
{
public ChildBlock3(IBlock parent, By @by) : base(parent, @by)
{
}
}
public class ChildBlock4 : InstanceGuidBlock
{
public ChildBlock4(IBlock parent, By @by) : base(parent, @by)
{
}
}
}
| |
using System;
using System.Net.Sockets;
using System.Text;
using System.Net;
using System.IO;
using RemoteLib.Net.Packets;
using RemoteLib.Net.TCP;
namespace RemoteLib.Net
{
/// <summary>
/// A class containing many tools for packet manipulation. Packet inherentence begins here.
/// </summary>
public abstract class Packet
{
#region Packet Registration
/// <summary>
/// Array of all the registered packet types.
/// 0-3 are System Level packets, and cannot be modified.
/// </summary>
public readonly static Type[] PacketTypes = new Type[0xFF];
static Packet()
{
PacketTypes[2] = typeof(PacketPing);
PacketTypes[1] = typeof(PacketDisconnect);
PacketTypes[0] = typeof(PacketInit);
}
/// <summary>
/// Registers the packet.
/// </summary>
/// <param name="packet">The packet.</param>
public static void RegisterPacket(Packet packet)
{
RegisterPacket(packet.GetType(), packet.PacketId);
}
/// <summary>
/// Registers the packet.
/// </summary>
/// <param name="type">The type.</param>
/// <param name="id">The id.</param>
public static void RegisterPacket(Type type, int id)
{
if (id > byte.MaxValue || id < byte.MinValue)
{
throw new ArgumentOutOfRangeException("id", "Id must be in bounds of byte size (0-254)");
}
if (id >= 0 && id <= 2)
{
throw new ArgumentException("The specified ID is reserved for the system");
}
if (PacketTypes[id] != null)
{
throw new ArgumentException("PacketID is already taken, please use a different ID or unregister the current one");
}
PacketTypes[id] = type;
}
/// <summary>
/// Registers the packet.
/// </summary>
/// <param name="type">The type.</param>
/// <param name="id">The id.</param>
public static void RegisterPacket(Type type)
{
int id = ((Packet)Activator.CreateInstance(type)).PacketId;
if (id > byte.MaxValue || id < byte.MinValue)
{
throw new ArgumentOutOfRangeException("id", "Id must be in bounds of byte size (0-254)");
}
if (id >= 0 && id <= 2)
{
throw new ArgumentException("The specified ID is reserved for the system");
}
if (PacketTypes[id] != null)
{
throw new ArgumentException("PacketID is already taken, please use a different ID or unregister the current one");
}
PacketTypes[id] = type;
}
/// <summary>
/// Unregisters the packet from the system.
/// </summary>
/// <param name="type">The type.</param>
public static void UnregisterPacket(Type type)
{
if (type == typeof(PacketPing) || type == typeof(PacketDisconnect) || type == typeof(PacketInit))
{
throw new ArgumentException("Cannot unregister system packet");
}
for (int i = 0; i < 0xFF; i++)
{
if (PacketTypes[i] == type)
{
PacketTypes[i] = null;
}
}
}
/// <summary>
/// Unregisters the packet.
/// </summary>
/// <param name="packet">The packet.</param>
public static void UnregisterPacket(Packet packet)
{
UnregisterPacket(packet.GetType());
}
#endregion
#region Inherited
/// <summary>
/// Gets the packet ID (must be 3 - 255).
/// </summary>
public abstract byte PacketId { get; }
/// <summary>
/// Reads the packet. The "Data" property MUST be set or problems will occur.
/// </summary>
/// <param name="c">The client stream to read from.</param>
public abstract void ReadPacket(RemoteClient c);
/// <summary>
/// Writes the packet to the client stream.
/// </summary>
/// <remarks>This is where you would actually write data to the stream</remarks>
public abstract void WritePacket(RemoteClient c);
#endregion
/// <summary>
/// Gets an empty packet, usually a packet to be filled with data.
/// </summary>
/// <param name="pId">The p id.</param>
/// <returns></returns>
public static Packet GetPacket(byte pId)
{
Type t = PacketTypes[pId];
if (t == null)
{
return null;
}
Packet p = (Packet)Activator.CreateInstance(PacketTypes[pId]);
return p;
}
#region Event Handlers
internal static void OnPacketRecieved(RemoteClient c, Packet p)
{
if (PacketRecieved != null)
{
PacketRecieved(null, new PacketEventArgs(c, p));
}
}
internal static void OnPacketSent(RemoteClient c, Packet p)
{
if (PacketSent != null)
{
PacketSent(null, new PacketEventArgs(c, p));
}
}
/// <summary>
/// Occurs when a packet is recieved.
/// </summary>
public static event EventHandler<PacketEventArgs> PacketRecieved;
/// <summary>
/// Occurs when a packet is sent.
/// </summary>
public static event EventHandler<PacketEventArgs> PacketSent;
public class PacketEventArgs : EventArgs
{
/// <summary>
/// Gets or sets the packet of the event.
/// </summary>
/// <value>
/// The packet.
/// </value>
public Packet Packet { get; set; }
/// <summary>
/// Gets or sets the remote client.
/// </summary>
/// <value>
/// The remote client.
/// </value>
public RemoteClient RemoteClient { get; set; }
/// <summary>
/// Initializes a new instance of the <see cref="PacketEventArgs"/> class.
/// </summary>
/// <param name="client">The client</param>
/// <param name="packet">The packet.</param>
public PacketEventArgs(RemoteClient client, Packet packet)
{
RemoteClient = client;
Packet = packet;
}
}
#endregion
}
}
| |
// ----------------------------------------------------------------------------
// <copyright file="Player.cs" company="Exit Games GmbH">
// Loadbalancing Framework for Photon - Copyright (C) 2011 Exit Games GmbH
// </copyright>
// <summary>
// Per client in a room, a Player is created. This client's Player is also
// known as PhotonClient.LocalPlayer and the only one you might change
// properties for.
// </summary>
// <author>[email protected]</author>
// ----------------------------------------------------------------------------
namespace ExitGames.Client.Photon.LoadBalancing
{
using System;
using System.Collections;
using ExitGames.Client.Photon;
/// <summary>
/// Summarizes a "player" within a room, identified (in that room) by ID (or "actorID").
/// </summary>
/// <remarks>
/// Each player has a actorID, valid for that room. It's -1 until assigned by server (and client logic).
/// </remarks>
public class Player
{
/// <summary>Backing field for property.</summary>
private int actorID = -1;
/// <summary>Only one player is controlled by each client. Others are not local.</summary>
public readonly bool IsLocal;
/// <summary>
/// A reference to the LoadbalancingClient which is currently keeping the connection and state.
/// </summary>
protected internal LoadBalancingClient LoadBalancingClient { get; set; }
/// <summary>
/// Used internally to identify the masterclient of a room.
/// </summary>
protected internal Room RoomReference { get; set; }
/// <summary>Identifier of this player in current room. Also known as: actorNumber or actorID. It's -1 outside of rooms.</summary>
/// <remarks>The ID is assigned per room and only valid in that context. It will change even on leave and re-join. IDs are never re-used per room.</remarks>
public int ID
{
get { return this.actorID; }
}
/// <summary>Background field for Name.</summary>
private string name;
/// <summary>Nickname of this player. Also in Properties (a "well known" property which has a byte-typed key).</summary>
/// <remarks>
/// A player might change his own playername in a room (it's only a property).
/// Setting this value updates the server and other players (using an operation).
/// </remarks>
public string Name
{
get
{
return this.name;
}
set
{
if (!string.IsNullOrEmpty(this.name) && this.name.Equals(value))
{
return;
}
this.name = value;
// update a room, if we changed our name (locally, while being in a room)
if (this.IsLocal && this.LoadBalancingClient != null && this.LoadBalancingClient.State == ClientState.Joined)
{
this.SetPlayerNameProperty();
}
}
}
/// <summary>
/// The player with the lowest actorID is the master and could be used for special tasks.
/// The LoadBalancingClient.LocalPlayer is not master unless in a room (this is the only player which exists outside of rooms, to store a name).
/// </summary>
public bool IsMasterClient
{
get
{
if (this.RoomReference == null)
{
return false;
}
return this.ID == this.RoomReference.MasterClientId;
}
}
/// <summary>Cache for custom properties of player.</summary>
/// <remarks>
/// Don't modify the content of this Hashtable. Use SetCustomProperties and the
/// properties of this class to modify values. When you use those, the client will
/// sync values with the server.
/// </remarks>
public Hashtable CustomProperties { get; private set; }
/// <summary>Creates a Hashtable with all properties (custom and "well known" ones).</summary>
/// <remarks>Creates new Hashtables each time used, so if used more often, cache this.</remarks>
public Hashtable AllProperties
{
get
{
Hashtable allProps = new Hashtable();
allProps.Merge(this.CustomProperties);
allProps[ActorProperties.PlayerName] = this.name;
return allProps;
}
}
/// <summary>Custom object associated with this Player.</summary>
public object Tag;
/// <summary>
/// Creates a player instance.
/// To extend and replace this Player, override LoadBalancingPeer.CreatePlayer().
/// </summary>
/// <param name="name">Name of the player (a "well known property").</param>
/// <param name="actorID">ID or ActorNumber of this player in the current room (a shortcut to identify each player in room)</param>
/// <param name="isLocal">If this is the local peer's player (or a remote one).</param>
protected internal Player(string name, int actorID, bool isLocal) : this(name, actorID, isLocal, null)
{
}
/// <summary>
/// Creates a player instance.
/// To extend and replace this Player, override LoadBalancingPeer.CreatePlayer().
/// </summary>
/// <param name="name">Name of the player (a "well known property").</param>
/// <param name="actorID">ID or ActorNumber of this player in the current room (a shortcut to identify each player in room)</param>
/// <param name="isLocal">If this is the local peer's player (or a remote one).</param>
/// <param name="playerProperties"> </param>
protected internal Player(string name, int actorID, bool isLocal, Hashtable playerProperties)
{
this.IsLocal = isLocal;
this.actorID = actorID;
this.Name = name;
this.CustomProperties = new Hashtable();
this.CacheProperties(playerProperties);
}
/// <summary>
/// Used to cache properties for Players.
/// This only updates the CustomProperties and doesn't send them to the server.
/// Mostly used when creating new remote players, where the server sends their properties.
/// </summary>
public virtual void CacheProperties(Hashtable properties)
{
if (properties == null || properties.Count == 0 || this.CustomProperties.Equals(properties))
{
return;
}
if (properties.ContainsKey(ActorProperties.PlayerName))
{
string nameInServersProperties = (string)properties[ActorProperties.PlayerName];
if (nameInServersProperties != null)
{
if (this.IsLocal)
{
// the local playername is different than in the properties coming from the server
// so the local name was changed and the server is outdated -> update server
// update property instead of using the outdated name coming from server
if (!nameInServersProperties.Equals(this.name))
{
this.SetPlayerNameProperty();
}
}
else
{
this.Name = nameInServersProperties;
}
}
}
this.CustomProperties.MergeStringKeys(properties);
}
/// <summary>
/// This Player name and custom properties as string.
/// </summary>
public override string ToString()
{
return this.Name + " " + SupportClass.DictionaryToString(this.CustomProperties);
}
/// <summary>
/// If players are equal.
/// </summary>
public override bool Equals(object p)
{
Player pp = p as Player;
return (pp != null && this.GetHashCode() == pp.GetHashCode());
}
/// <summary>
/// Accompanies Equals, using the ID (actorNumber) as HashCode to return.
/// </summary>
public override int GetHashCode()
{
return this.ID;
}
/// <summary>
/// Used internally, to update this client's playerID when assigned (doesn't change after assignment).
/// </summary>
protected internal void ChangeLocalID(int newID)
{
if (!this.IsLocal)
{
//Debug.LogError("ERROR You should never change Player IDs!");
return;
}
this.actorID = newID;
}
/// <summary>
/// Updates the custom properties of this Room with the key/values of propertiesToSet.
/// Only string-typed keys are applied, new properties (string keys) are added, existing are updated
/// and if a value is set to null, this will remove the custom property.
/// </summary>
/// <remarks>
/// Local cache is updated immediately, other players are updated through Photon with a fitting operation.
/// </remarks>
/// <param name="propertiesToSet">New and updates properties to cache and sync.</param>
public void SetCustomProperties(Hashtable propertiesToSet)
{
Hashtable customProps = propertiesToSet.StripToStringKeys() as Hashtable;
// merge (delete null-values)
this.CustomProperties.Merge(customProps);
this.CustomProperties.StripKeysWithNullValues();
// send (sync) these new values if in room
if (this.RoomReference != null && this.RoomReference.IsLocalClientInside)
{
this.RoomReference.LoadBalancingClient.OpSetCustomPropertiesOfActor(this.actorID, customProps);
}
}
/// <summary>Uses OpSetPropertiesOfActor to sync this player's name (server is being updated with this.Name).</summary>
private void SetPlayerNameProperty()
{
Hashtable properties = new Hashtable();
properties[ActorProperties.PlayerName] = this.name;
this.LoadBalancingClient.OpSetPropertiesOfActor(this.ID, properties);
}
}
}
| |
#region License
/*
Copyright (c) 2016 VulkaNet Project - Daniil Rodin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#endregion
using System;
using System.Runtime.InteropServices;
namespace VulkaNet
{
public unsafe struct VkPhysicalDeviceLimits
{
public uint MaxImageDimension1D { get; set; }
public uint MaxImageDimension2D { get; set; }
public uint MaxImageDimension3D { get; set; }
public uint MaxImageDimensionCube { get; set; }
public uint MaxImageArrayLayers { get; set; }
public uint MaxTexelBufferElements { get; set; }
public uint MaxUniformBufferRange { get; set; }
public uint MaxStorageBufferRange { get; set; }
public uint MaxPushConstantsSize { get; set; }
public uint MaxMemoryAllocationCount { get; set; }
public uint MaxSamplerAllocationCount { get; set; }
public ulong BufferImageGranularity { get; set; }
public ulong SparseAddressSpaceSize { get; set; }
public uint MaxBoundDescriptorSets { get; set; }
public uint MaxPerStageDescriptorSamplers { get; set; }
public uint MaxPerStageDescriptorUniformBuffers { get; set; }
public uint MaxPerStageDescriptorStorageBuffers { get; set; }
public uint MaxPerStageDescriptorSampledImages { get; set; }
public uint MaxPerStageDescriptorStorageImages { get; set; }
public uint MaxPerStageDescriptorInputAttachments { get; set; }
public uint MaxPerStageResources { get; set; }
public uint MaxDescriptorSetSamplers { get; set; }
public uint MaxDescriptorSetUniformBuffers { get; set; }
public uint MaxDescriptorSetUniformBuffersDynamic { get; set; }
public uint MaxDescriptorSetStorageBuffers { get; set; }
public uint MaxDescriptorSetStorageBuffersDynamic { get; set; }
public uint MaxDescriptorSetSampledImages { get; set; }
public uint MaxDescriptorSetStorageImages { get; set; }
public uint MaxDescriptorSetInputAttachments { get; set; }
public uint MaxVertexInputAttributes { get; set; }
public uint MaxVertexInputBindings { get; set; }
public uint MaxVertexInputAttributeOffset { get; set; }
public uint MaxVertexInputBindingStride { get; set; }
public uint MaxVertexOutputComponents { get; set; }
public uint MaxTessellationGenerationLevel { get; set; }
public uint MaxTessellationPatchSize { get; set; }
public uint MaxTessellationControlPerVertexInputComponents { get; set; }
public uint MaxTessellationControlPerVertexOutputComponents { get; set; }
public uint MaxTessellationControlPerPatchOutputComponents { get; set; }
public uint MaxTessellationControlTotalOutputComponents { get; set; }
public uint MaxTessellationEvaluationInputComponents { get; set; }
public uint MaxTessellationEvaluationOutputComponents { get; set; }
public uint MaxGeometryShaderInvocations { get; set; }
public uint MaxGeometryInputComponents { get; set; }
public uint MaxGeometryOutputComponents { get; set; }
public uint MaxGeometryOutputVertices { get; set; }
public uint MaxGeometryTotalOutputComponents { get; set; }
public uint MaxFragmentInputComponents { get; set; }
public uint MaxFragmentOutputAttachments { get; set; }
public uint MaxFragmentDualSrcAttachments { get; set; }
public uint MaxFragmentCombinedOutputResources { get; set; }
public uint MaxComputeSharedMemorySize { get; set; }
public VkUintVector3 MaxComputeWorkGroupCount { get; set; }
public uint MaxComputeWorkGroupInvocations { get; set; }
public VkUintVector3 MaxComputeWorkGroupSize { get; set; }
public uint SubPixelPrecisionBits { get; set; }
public uint SubTexelPrecisionBits { get; set; }
public uint MipmapPrecisionBits { get; set; }
public uint MaxDrawIndexedIndexValue { get; set; }
public uint MaxDrawIndirectCount { get; set; }
public float MaxSamplerLodBias { get; set; }
public float MaxSamplerAnisotropy { get; set; }
public uint MaxViewports { get; set; }
public VkUintVector2 MaxViewportDimensions { get; set; }
public VkVector2 ViewportBoundsRange { get; set; }
public uint ViewportSubPixelBits { get; set; }
public IntPtr MinMemoryMapAlignment { get; set; }
public ulong MinTexelBufferOffsetAlignment { get; set; }
public ulong MinUniformBufferOffsetAlignment { get; set; }
public ulong MinStorageBufferOffsetAlignment { get; set; }
public int MinTexelOffset { get; set; }
public uint MaxTexelOffset { get; set; }
public int MinTexelGatherOffset { get; set; }
public uint MaxTexelGatherOffset { get; set; }
public float MinInterpolationOffset { get; set; }
public float MaxInterpolationOffset { get; set; }
public uint SubPixelInterpolationOffsetBits { get; set; }
public uint MaxFramebufferWidth { get; set; }
public uint MaxFramebufferHeight { get; set; }
public uint MaxFramebufferLayers { get; set; }
public VkSampleCountFlags FramebufferColorSampleCounts { get; set; }
public VkSampleCountFlags FramebufferDepthSampleCounts { get; set; }
public VkSampleCountFlags FramebufferStencilSampleCounts { get; set; }
public VkSampleCountFlags FramebufferNoAttachmentsSampleCounts { get; set; }
public uint MaxColorAttachments { get; set; }
public VkSampleCountFlags SampledImageColorSampleCounts { get; set; }
public VkSampleCountFlags SampledImageIntegerSampleCounts { get; set; }
public VkSampleCountFlags SampledImageDepthSampleCounts { get; set; }
public VkSampleCountFlags SampledImageStencilSampleCounts { get; set; }
public VkSampleCountFlags StorageImageSampleCounts { get; set; }
public uint MaxSampleMaskWords { get; set; }
public bool TimestampComputeAndGraphics { get; set; }
public float TimestampPeriod { get; set; }
public uint MaxClipDistances { get; set; }
public uint MaxCullDistances { get; set; }
public uint MaxCombinedClipAndCullDistances { get; set; }
public uint DiscreteQueuePriorities { get; set; }
public VkVector2 PointSizeRange { get; set; }
public VkVector2 LineWidthRange { get; set; }
public float PointSizeGranularity { get; set; }
public float LineWidthGranularity { get; set; }
public bool StrictLines { get; set; }
public bool StandardSampleLocations { get; set; }
public ulong OptimalBufferCopyOffsetAlignment { get; set; }
public ulong OptimalBufferCopyRowPitchAlignment { get; set; }
public ulong NonCoherentAtomSize { get; set; }
[StructLayout(LayoutKind.Sequential)]
public struct Raw
{
public uint maxImageDimension1D;
public uint maxImageDimension2D;
public uint maxImageDimension3D;
public uint maxImageDimensionCube;
public uint maxImageArrayLayers;
public uint maxTexelBufferElements;
public uint maxUniformBufferRange;
public uint maxStorageBufferRange;
public uint maxPushConstantsSize;
public uint maxMemoryAllocationCount;
public uint maxSamplerAllocationCount;
public ulong bufferImageGranularity;
public ulong sparseAddressSpaceSize;
public uint maxBoundDescriptorSets;
public uint maxPerStageDescriptorSamplers;
public uint maxPerStageDescriptorUniformBuffers;
public uint maxPerStageDescriptorStorageBuffers;
public uint maxPerStageDescriptorSampledImages;
public uint maxPerStageDescriptorStorageImages;
public uint maxPerStageDescriptorInputAttachments;
public uint maxPerStageResources;
public uint maxDescriptorSetSamplers;
public uint maxDescriptorSetUniformBuffers;
public uint maxDescriptorSetUniformBuffersDynamic;
public uint maxDescriptorSetStorageBuffers;
public uint maxDescriptorSetStorageBuffersDynamic;
public uint maxDescriptorSetSampledImages;
public uint maxDescriptorSetStorageImages;
public uint maxDescriptorSetInputAttachments;
public uint maxVertexInputAttributes;
public uint maxVertexInputBindings;
public uint maxVertexInputAttributeOffset;
public uint maxVertexInputBindingStride;
public uint maxVertexOutputComponents;
public uint maxTessellationGenerationLevel;
public uint maxTessellationPatchSize;
public uint maxTessellationControlPerVertexInputComponents;
public uint maxTessellationControlPerVertexOutputComponents;
public uint maxTessellationControlPerPatchOutputComponents;
public uint maxTessellationControlTotalOutputComponents;
public uint maxTessellationEvaluationInputComponents;
public uint maxTessellationEvaluationOutputComponents;
public uint maxGeometryShaderInvocations;
public uint maxGeometryInputComponents;
public uint maxGeometryOutputComponents;
public uint maxGeometryOutputVertices;
public uint maxGeometryTotalOutputComponents;
public uint maxFragmentInputComponents;
public uint maxFragmentOutputAttachments;
public uint maxFragmentDualSrcAttachments;
public uint maxFragmentCombinedOutputResources;
public uint maxComputeSharedMemorySize;
public fixed uint maxComputeWorkGroupCount[3];
public uint maxComputeWorkGroupInvocations;
public fixed uint maxComputeWorkGroupSize[3];
public uint subPixelPrecisionBits;
public uint subTexelPrecisionBits;
public uint mipmapPrecisionBits;
public uint maxDrawIndexedIndexValue;
public uint maxDrawIndirectCount;
public float maxSamplerLodBias;
public float maxSamplerAnisotropy;
public uint maxViewports;
public fixed uint maxViewportDimensions[2];
public fixed float viewportBoundsRange[2];
public uint viewportSubPixelBits;
public IntPtr minMemoryMapAlignment;
public ulong minTexelBufferOffsetAlignment;
public ulong minUniformBufferOffsetAlignment;
public ulong minStorageBufferOffsetAlignment;
public int minTexelOffset;
public uint maxTexelOffset;
public int minTexelGatherOffset;
public uint maxTexelGatherOffset;
public float minInterpolationOffset;
public float maxInterpolationOffset;
public uint subPixelInterpolationOffsetBits;
public uint maxFramebufferWidth;
public uint maxFramebufferHeight;
public uint maxFramebufferLayers;
public VkSampleCountFlags framebufferColorSampleCounts;
public VkSampleCountFlags framebufferDepthSampleCounts;
public VkSampleCountFlags framebufferStencilSampleCounts;
public VkSampleCountFlags framebufferNoAttachmentsSampleCounts;
public uint maxColorAttachments;
public VkSampleCountFlags sampledImageColorSampleCounts;
public VkSampleCountFlags sampledImageIntegerSampleCounts;
public VkSampleCountFlags sampledImageDepthSampleCounts;
public VkSampleCountFlags sampledImageStencilSampleCounts;
public VkSampleCountFlags storageImageSampleCounts;
public uint maxSampleMaskWords;
public VkBool32 timestampComputeAndGraphics;
public float timestampPeriod;
public uint maxClipDistances;
public uint maxCullDistances;
public uint maxCombinedClipAndCullDistances;
public uint discreteQueuePriorities;
public fixed float pointSizeRange[2];
public fixed float lineWidthRange[2];
public float pointSizeGranularity;
public float lineWidthGranularity;
public VkBool32 strictLines;
public VkBool32 standardSampleLocations;
public ulong optimalBufferCopyOffsetAlignment;
public ulong optimalBufferCopyRowPitchAlignment;
public ulong nonCoherentAtomSize;
public static int SizeInBytes { get; } = Marshal.SizeOf<Raw>();
}
public VkPhysicalDeviceLimits(Raw* raw)
{
MaxImageDimension1D = raw->maxImageDimension1D;
MaxImageDimension2D = raw->maxImageDimension2D;
MaxImageDimension3D = raw->maxImageDimension3D;
MaxImageDimensionCube = raw->maxImageDimensionCube;
MaxImageArrayLayers = raw->maxImageArrayLayers;
MaxTexelBufferElements = raw->maxTexelBufferElements;
MaxUniformBufferRange = raw->maxUniformBufferRange;
MaxStorageBufferRange = raw->maxStorageBufferRange;
MaxPushConstantsSize = raw->maxPushConstantsSize;
MaxMemoryAllocationCount = raw->maxMemoryAllocationCount;
MaxSamplerAllocationCount = raw->maxSamplerAllocationCount;
BufferImageGranularity = raw->bufferImageGranularity;
SparseAddressSpaceSize = raw->sparseAddressSpaceSize;
MaxBoundDescriptorSets = raw->maxBoundDescriptorSets;
MaxPerStageDescriptorSamplers = raw->maxPerStageDescriptorSamplers;
MaxPerStageDescriptorUniformBuffers = raw->maxPerStageDescriptorUniformBuffers;
MaxPerStageDescriptorStorageBuffers = raw->maxPerStageDescriptorStorageBuffers;
MaxPerStageDescriptorSampledImages = raw->maxPerStageDescriptorSampledImages;
MaxPerStageDescriptorStorageImages = raw->maxPerStageDescriptorStorageImages;
MaxPerStageDescriptorInputAttachments = raw->maxPerStageDescriptorInputAttachments;
MaxPerStageResources = raw->maxPerStageResources;
MaxDescriptorSetSamplers = raw->maxDescriptorSetSamplers;
MaxDescriptorSetUniformBuffers = raw->maxDescriptorSetUniformBuffers;
MaxDescriptorSetUniformBuffersDynamic = raw->maxDescriptorSetUniformBuffersDynamic;
MaxDescriptorSetStorageBuffers = raw->maxDescriptorSetStorageBuffers;
MaxDescriptorSetStorageBuffersDynamic = raw->maxDescriptorSetStorageBuffersDynamic;
MaxDescriptorSetSampledImages = raw->maxDescriptorSetSampledImages;
MaxDescriptorSetStorageImages = raw->maxDescriptorSetStorageImages;
MaxDescriptorSetInputAttachments = raw->maxDescriptorSetInputAttachments;
MaxVertexInputAttributes = raw->maxVertexInputAttributes;
MaxVertexInputBindings = raw->maxVertexInputBindings;
MaxVertexInputAttributeOffset = raw->maxVertexInputAttributeOffset;
MaxVertexInputBindingStride = raw->maxVertexInputBindingStride;
MaxVertexOutputComponents = raw->maxVertexOutputComponents;
MaxTessellationGenerationLevel = raw->maxTessellationGenerationLevel;
MaxTessellationPatchSize = raw->maxTessellationPatchSize;
MaxTessellationControlPerVertexInputComponents = raw->maxTessellationControlPerVertexInputComponents;
MaxTessellationControlPerVertexOutputComponents = raw->maxTessellationControlPerVertexOutputComponents;
MaxTessellationControlPerPatchOutputComponents = raw->maxTessellationControlPerPatchOutputComponents;
MaxTessellationControlTotalOutputComponents = raw->maxTessellationControlTotalOutputComponents;
MaxTessellationEvaluationInputComponents = raw->maxTessellationEvaluationInputComponents;
MaxTessellationEvaluationOutputComponents = raw->maxTessellationEvaluationOutputComponents;
MaxGeometryShaderInvocations = raw->maxGeometryShaderInvocations;
MaxGeometryInputComponents = raw->maxGeometryInputComponents;
MaxGeometryOutputComponents = raw->maxGeometryOutputComponents;
MaxGeometryOutputVertices = raw->maxGeometryOutputVertices;
MaxGeometryTotalOutputComponents = raw->maxGeometryTotalOutputComponents;
MaxFragmentInputComponents = raw->maxFragmentInputComponents;
MaxFragmentOutputAttachments = raw->maxFragmentOutputAttachments;
MaxFragmentDualSrcAttachments = raw->maxFragmentDualSrcAttachments;
MaxFragmentCombinedOutputResources = raw->maxFragmentCombinedOutputResources;
MaxComputeSharedMemorySize = raw->maxComputeSharedMemorySize;
MaxComputeWorkGroupCount = new VkUintVector3(raw->maxComputeWorkGroupCount);
MaxComputeWorkGroupInvocations = raw->maxComputeWorkGroupInvocations;
MaxComputeWorkGroupSize = new VkUintVector3(raw->maxComputeWorkGroupSize);
SubPixelPrecisionBits = raw->subPixelPrecisionBits;
SubTexelPrecisionBits = raw->subTexelPrecisionBits;
MipmapPrecisionBits = raw->mipmapPrecisionBits;
MaxDrawIndexedIndexValue = raw->maxDrawIndexedIndexValue;
MaxDrawIndirectCount = raw->maxDrawIndirectCount;
MaxSamplerLodBias = raw->maxSamplerLodBias;
MaxSamplerAnisotropy = raw->maxSamplerAnisotropy;
MaxViewports = raw->maxViewports;
MaxViewportDimensions = new VkUintVector2(raw->maxViewportDimensions);
ViewportBoundsRange = new VkVector2(raw->viewportBoundsRange);
ViewportSubPixelBits = raw->viewportSubPixelBits;
MinMemoryMapAlignment = raw->minMemoryMapAlignment;
MinTexelBufferOffsetAlignment = raw->minTexelBufferOffsetAlignment;
MinUniformBufferOffsetAlignment = raw->minUniformBufferOffsetAlignment;
MinStorageBufferOffsetAlignment = raw->minStorageBufferOffsetAlignment;
MinTexelOffset = raw->minTexelOffset;
MaxTexelOffset = raw->maxTexelOffset;
MinTexelGatherOffset = raw->minTexelGatherOffset;
MaxTexelGatherOffset = raw->maxTexelGatherOffset;
MinInterpolationOffset = raw->minInterpolationOffset;
MaxInterpolationOffset = raw->maxInterpolationOffset;
SubPixelInterpolationOffsetBits = raw->subPixelInterpolationOffsetBits;
MaxFramebufferWidth = raw->maxFramebufferWidth;
MaxFramebufferHeight = raw->maxFramebufferHeight;
MaxFramebufferLayers = raw->maxFramebufferLayers;
FramebufferColorSampleCounts = raw->framebufferColorSampleCounts;
FramebufferDepthSampleCounts = raw->framebufferDepthSampleCounts;
FramebufferStencilSampleCounts = raw->framebufferStencilSampleCounts;
FramebufferNoAttachmentsSampleCounts = raw->framebufferNoAttachmentsSampleCounts;
MaxColorAttachments = raw->maxColorAttachments;
SampledImageColorSampleCounts = raw->sampledImageColorSampleCounts;
SampledImageIntegerSampleCounts = raw->sampledImageIntegerSampleCounts;
SampledImageDepthSampleCounts = raw->sampledImageDepthSampleCounts;
SampledImageStencilSampleCounts = raw->sampledImageStencilSampleCounts;
StorageImageSampleCounts = raw->storageImageSampleCounts;
MaxSampleMaskWords = raw->maxSampleMaskWords;
TimestampComputeAndGraphics = (bool)raw->timestampComputeAndGraphics;
TimestampPeriod = raw->timestampPeriod;
MaxClipDistances = raw->maxClipDistances;
MaxCullDistances = raw->maxCullDistances;
MaxCombinedClipAndCullDistances = raw->maxCombinedClipAndCullDistances;
DiscreteQueuePriorities = raw->discreteQueuePriorities;
PointSizeRange = new VkVector2(raw->pointSizeRange);
LineWidthRange = new VkVector2(raw->lineWidthRange);
PointSizeGranularity = raw->pointSizeGranularity;
LineWidthGranularity = raw->lineWidthGranularity;
StrictLines = (bool)raw->strictLines;
StandardSampleLocations = (bool)raw->standardSampleLocations;
OptimalBufferCopyOffsetAlignment = raw->optimalBufferCopyOffsetAlignment;
OptimalBufferCopyRowPitchAlignment = raw->optimalBufferCopyRowPitchAlignment;
NonCoherentAtomSize = raw->nonCoherentAtomSize;
}
}
}
| |
namespace Microsoft.Protocols.TestSuites.MS_CPSWS
{
using System.ServiceModel;
using Microsoft.Protocols.TestSuites.Common;
using Microsoft.VisualStudio.TestTools.UnitTesting;
/// <summary>
/// Scenario 4 Test cases. Test resolve related operations and requirements,
/// include resolving input strings/claims to picker entities using a list of claim providers.
/// </summary>
[TestClass]
public class S04_ResolveToEntities : TestSuiteBase
{
#region Test suite initialization and cleanup
/// <summary>
/// Initialize the test suite.
/// </summary>
/// <param name="testContext">The test context instance</param>
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
TestSuiteBase.TestSuiteClassInitialize(testContext);
}
/// <summary>
/// Reset the test environment.
/// </summary>
[ClassCleanup]
public static void ClassCleanup()
{
// Cleanup test site, must be called to ensure closing of logs.
TestSuiteBase.TestSuiteClassCleanup();
}
#endregion
#region Test Cases
/// <summary>
/// A test case used to test resolve method with valid input string.
/// </summary>
[TestCategory("MSCPSWS"), TestMethod]
public void MSCPSWS_S04_TC01_ResolveString()
{
// Call the helper method to get all claims providers.
SPProviderHierarchyTree[] allProviders = TestSuiteBase.GetAllProviders();
ArrayOfString providerNames = new ArrayOfString();
SPPrincipalType principalType = SPPrincipalType.SecurityGroup;
string resolveInput = string.Empty;
bool isResolveSuccess = false;
foreach (SPProviderHierarchyTree provider in allProviders)
{
if (provider.EntityData.Length != 0)
{
providerNames.Add(provider.ProviderName);
foreach (PickerEntity entityData in provider.EntityData)
{
resolveInput = entityData.DisplayText;
// Call Resolve method to resolve an input string to picker entities using a list of claims providers.
PickerEntity[] responseOfResolveResult = CPSWSAdapter.Resolve(providerNames, principalType, resolveInput);
Site.Assert.IsNotNull(responseOfResolveResult, "Resolve result should not null.");
isResolveSuccess = true;
}
}
}
// If the claims providers listed in the provider names in the input message is resolved successfully, then the following requirement can be captured.
Site.CaptureRequirementIfIsTrue(
isResolveSuccess,
280,
@"[In Resolve] The protocol server MUST resolve across all claims providers that meet all the following criteria:
The claims providers are associated with the Web application specified in the input message.
The claims providers are listed in the provider names in the input message.
The claims providers support resolve.");
}
/// <summary>
/// This test case is used to test typical resolve claim scenario.
/// </summary>
[TestCategory("MSCPSWS"), TestMethod]
public void MSCPSWS_S04_TC02_ResolveClaim_Valid()
{
// Call the helper method to get all claims providers.
SPProviderHierarchyTree[] allProviders = TestSuiteBase.GetAllProviders();
ArrayOfString providerNames = new ArrayOfString();
SPPrincipalType principalType = SPPrincipalType.SecurityGroup;
SPClaim resolveInput = GenerateSPClaimResolveInput_Valid();
bool isResolveClaimSuccess = false;
foreach (SPProviderHierarchyTree provider in allProviders)
{
if (provider.Children.Length != 0)
{
providerNames.Add(provider.ProviderName);
}
// Call Resolve claim method to resolve an SPClaim to picker entities using a list of claims providers.
PickerEntity[] responseOfResolveClaimResult = CPSWSAdapter.ResolveClaim(providerNames, principalType, resolveInput);
Site.Assert.IsNotNull(responseOfResolveClaimResult, "The resolve claim result should not be null.");
isResolveClaimSuccess = true;
}
// If the claims providers listed in the provider names in the input message is resolved successfully, then the following requirement can be captured.
Site.CaptureRequirementIfIsTrue(
isResolveClaimSuccess,
303,
@"[In ResolveClaim] The protocol server MUST resolve across all claims providers that meet all the following criteria:
The claims providers are associated with the Web application (1) specified in the input message.
The claims providers are listed in the provider names in the input message.
The claims providers support resolve.");
}
/// <summary>
/// This test case is used to resolve 2 valid users to picker entities.
/// </summary>
[TestCategory("MSCPSWS"), TestMethod]
public void MSCPSWS_S04_TC03_ResolveMultipleStrings_AllValid()
{
// Call the helper method to get all claims providers.
SPProviderHierarchyTree[] allProviders = TestSuiteBase.GetAllProviders();
ArrayOfString providerNames = new ArrayOfString();
SPPrincipalType principalType = SPPrincipalType.User;
ArrayOfString resolveInput = new ArrayOfString();
resolveInput.Add(Common.GetConfigurationPropertyValue("OwnerLogin", this.Site));
resolveInput.Add(Common.GetConfigurationPropertyValue("ValidUser", this.Site));
foreach (SPProviderHierarchyTree provider in allProviders)
{
providerNames.Add(provider.ProviderName);
}
// Call ResolveMultiple method to resolve 2 valid users to picker entities using a list of claims providers.
PickerEntity[] responseOfResolveMultipleResult = CPSWSAdapter.ResolveMultiple(providerNames, principalType, resolveInput);
Site.Assert.IsNotNull(responseOfResolveMultipleResult, "The resolve multiple result should not be null.");
// If the resolve multiple result contains 2 picker entities, that is to say, one picker entity in the response corresponding to one user in the request, then the following requirement can be captured.
Site.CaptureRequirementIfAreEqual<int>(
responseOfResolveMultipleResult.Length,
2,
345,
@"[In ResolveMultipleResponse] The list [ResolveMultipleResult] MUST contain one and only one picker entity per string in the input.");
Site.CaptureRequirementIfAreEqual<int>(
responseOfResolveMultipleResult.Length,
2,
325,
@"[In ResolveMultiple] The protocol server MUST resolve across all claims providers that meet all the following criteria:
The claims providers are associated with the Web application (1) specified in the input message.
The claims providers are listed in the provider names in the input message.
The claims providers support resolve.");
}
/// <summary>
/// This test case is used to resolve 2 users to picker entities, one is valid and another is invalid.
/// </summary>
[TestCategory("MSCPSWS"), TestMethod]
public void MSCPSWS_S04_TC04_ResolveMultipleStrings_SomeValid()
{
// Call the helper method to get all claims providers.
SPProviderHierarchyTree[] allProviders = TestSuiteBase.GetAllProviders();
ArrayOfString providerNames = new ArrayOfString();
SPPrincipalType principalType = SPPrincipalType.User;
ArrayOfString resolveInput = new ArrayOfString();
resolveInput.Add(Common.GetConfigurationPropertyValue("OwnerLogin", this.Site));
resolveInput.Add(this.GenerateInvalidUser());
foreach (SPProviderHierarchyTree provider in allProviders)
{
providerNames.Add(provider.ProviderName);
}
// Call Resolve multiple method to resolve 2 users to picker entities, one valid and another invalid.
PickerEntity[] responseOfResolveMultipleResult = CPSWSAdapter.ResolveMultiple(providerNames, principalType, resolveInput);
Site.Assert.IsNotNull(responseOfResolveMultipleResult, "The resolve multiple result should not be null.");
// If the resolve multiple result contains 2 picker entities, that is to say, one picker entity in the response corresponding to one user in the request, then the following requirement can be captured.
Site.CaptureRequirementIfAreEqual<int>(
responseOfResolveMultipleResult.Length,
2,
345,
@"[In ResolveMultipleResponse] The list [ResolveMultipleResult] MUST contain one and only one picker entity per string in the input.");
Site.CaptureRequirementIfAreEqual<int>(
responseOfResolveMultipleResult.Length,
2,
325,
@"[In ResolveMultiple] The protocol server MUST resolve across all claims providers that meet all the following criteria:
The claims providers are associated with the Web application (1) specified in the input message.
The claims providers are listed in the provider names in the input message.
The claims providers support resolve.");
}
/// <summary>
/// This test case is used test resolve multiple method with resolveInput parameter sets to null.
/// </summary>
[TestCategory("MSCPSWS"), TestMethod]
public void MSCPSWS_S04_TC05_ResolveMultiple_NullResolveInput()
{
// Call the helper method to get all claims providers.
SPProviderHierarchyTree[] allProviders = TestSuiteBase.GetAllProviders();
ArrayOfString providerNames = new ArrayOfString();
SPPrincipalType principalType = SPPrincipalType.User;
foreach (SPProviderHierarchyTree provider in allProviders)
{
providerNames.Add(provider.ProviderName);
}
bool caughtException = false;
try
{
// Call Resolve multiple method with resolveInput parameter sets to null.
CPSWSAdapter.ResolveMultiple(providerNames, principalType, null);
}
catch (FaultException faultException)
{
caughtException = true;
// If the server returns an ArgumentNullException<""resolveInput""> message, then the following requirement can be captured.
Site.CaptureRequirementIfIsTrue(
this.VerifyArgumentNullException(faultException, "resolveInput"),
626,
@"[In ResolveMultiple] If this [resolveInput] is NULL, the protocol server MUST return an ArgumentNullException<""resolveInput""> message.");
}
finally
{
this.Site.Assert.IsTrue(caughtException, "If resolveInput is NULL, the protocol server should return an ArgumentNullException<resolveInput> message.");
}
}
/// <summary>
/// This test case is used test resolve 2 claims to picker entities, one valid and another invalid.
/// </summary>
[TestCategory("MSCPSWS"), TestMethod]
public void MSCPSWS_S04_TC06_ResolveMultipleClaim_SomeValid()
{
// Call the helper method to get all claims providers.
SPProviderHierarchyTree[] allProviders = TestSuiteBase.GetAllProviders();
ArrayOfString providerNames = new ArrayOfString();
SPPrincipalType principalType = SPPrincipalType.SecurityGroup;
SPClaim[] resolveInput = new SPClaim[2];
resolveInput[0] = this.GenerateSPClaimResolveInput_Valid();
resolveInput[1] = this.GenerateSPClaimResolveInput_Invalid();
foreach (SPProviderHierarchyTree provider in allProviders)
{
if (provider.Children.Length != 0)
{
providerNames.Add(provider.ProviderName);
}
}
// Call Resolve multiple claim method to resolve 2 claims to picker entities using a list of claims providers.
PickerEntity[] responseOfResolveMultipleClaimResult = CPSWSAdapter.ResolveMultipleClaim(providerNames, principalType, resolveInput);
Site.Assert.IsNotNull(responseOfResolveMultipleClaimResult, "The resolve multiple claim result should not be null.");
// If the resolve multiple result contains 2 picker entities, that is to say, one picker entity in the response corresponding to one user in the request, then the following requirement can be captured.
Site.CaptureRequirementIfAreEqual<int>(
responseOfResolveMultipleClaimResult.Length,
2,
369,
@"[In ResolveMultipleClaimResponse] ResolveMultipleClaimResult: The list MUST contain one and only one picker entity per one claim in the input.");
Site.CaptureRequirementIfAreEqual<int>(
responseOfResolveMultipleClaimResult.Length,
2,
350,
@"[In ResolveMultipleClaim] The protocol server MUST resolve across all claims providers that meet all the following criteria:
The claims providers are associated with the Web application (1) specified in the input message.
The claims providers are listed in the provider names in the input message.
The claims providers support resolve.");
}
/// <summary>
/// This test case is used test ResolveMultipleClaim method with resolveInput parameter sets to null.
/// </summary>
[TestCategory("MSCPSWS"), TestMethod]
public void MSCPSWS_S04_TC07_ResolveMultipleClaim_NullResolveInput()
{
// Call the helper method to get all claims providers.
SPProviderHierarchyTree[] allProviders = TestSuiteBase.GetAllProviders();
ArrayOfString providerNames = new ArrayOfString();
SPPrincipalType principalType = SPPrincipalType.User;
foreach (SPProviderHierarchyTree provider in allProviders)
{
providerNames.Add(provider.ProviderName);
}
bool caughtException = false;
try
{
// Call Resolve multiple method with resolveInput parameter sets to null.
CPSWSAdapter.ResolveMultipleClaim(providerNames, principalType, null);
}
catch (FaultException faultException)
{
caughtException = true;
// If the server returns an ArgumentNullException<""resolveInput""> message, then the following requirement can be captured.
Site.CaptureRequirementIfIsTrue(
this.VerifyArgumentNullException(faultException, "resolveInput"),
632,
@"[In ResolveMultipleClaim] If this [resolveInput] is NULL, the protocol server MUST return an ArgumentNullException<""resolveInput""> message.");
}
finally
{
this.Site.Assert.IsTrue(caughtException, "If resolveInput is NULL, the protocol server should return an ArgumentNullException<resolveInput> message.");
}
}
/// <summary>
/// This test case is used test ResolveMultipleClaim method with resolveInput parameter sets to null.
/// </summary>
[TestCategory("MSCPSWS"), TestMethod]
public void MSCPSWS_S04_TC08_Resolve_NullResolveInput()
{
// Call the helper method to get all claims providers.
SPProviderHierarchyTree[] allProviders = TestSuiteBase.GetAllProviders();
ArrayOfString providerNames = new ArrayOfString();
SPPrincipalType principalType = SPPrincipalType.User;
foreach (SPProviderHierarchyTree provider in allProviders)
{
providerNames.Add(provider.ProviderName);
}
bool caughtException = false;
try
{
// Call Resolve multiple method with resolveInput parameter sets to null.
CPSWSAdapter.Resolve(providerNames, principalType, null);
}
catch (FaultException faultException)
{
caughtException = true;
// If the server returns an ArgumentNullException<""value""> message, then the following requirement can be captured.
Site.CaptureRequirementIfIsTrue(
this.VerifyArgumentNullException(faultException, "value"),
616,
@"[In Resolve] If this [resolveInput] is NULL, the protocol server MUST return an ArgumentNullException<""value""> message.");
}
finally
{
this.Site.Assert.IsTrue(caughtException, "If resolveInput is NULL, the protocol server should return an ArgumentNullException<value> message.");
}
}
#endregion
}
}
| |
using System;
using System.Runtime.InteropServices;
namespace FFmpeg.AutoGen
{
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate string AVClass_item_name (void* @ctx);
public unsafe struct AVClass_item_name_func
{
public IntPtr Pointer;
public static implicit operator AVClass_item_name_func(AVClass_item_name func) => new AVClass_item_name_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void* AVClass_child_next (void* @obj, void* @prev);
public unsafe struct AVClass_child_next_func
{
public IntPtr Pointer;
public static implicit operator AVClass_child_next_func(AVClass_child_next func) => new AVClass_child_next_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate AVClass* AVClass_child_class_next (AVClass* @prev);
public unsafe struct AVClass_child_class_next_func
{
public IntPtr Pointer;
public static implicit operator AVClass_child_class_next_func(AVClass_child_class_next func) => new AVClass_child_class_next_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate AVClassCategory AVClass_get_category (void* @ctx);
public unsafe struct AVClass_get_category_func
{
public IntPtr Pointer;
public static implicit operator AVClass_get_category_func(AVClass_get_category func) => new AVClass_get_category_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVClass_query_ranges (AVOptionRanges** @p0, void* @obj, [MarshalAs(UnmanagedType.LPStr)] string @key, int @flags);
public unsafe struct AVClass_query_ranges_func
{
public IntPtr Pointer;
public static implicit operator AVClass_query_ranges_func(AVClass_query_ranges func) => new AVClass_query_ranges_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void av_fifo_generic_peek_at_func (void* @p0, void* @p1, int @p2);
public unsafe struct av_fifo_generic_peek_at_func_func
{
public IntPtr Pointer;
public static implicit operator av_fifo_generic_peek_at_func_func(av_fifo_generic_peek_at_func func) => new av_fifo_generic_peek_at_func_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void av_fifo_generic_peek_func (void* @p0, void* @p1, int @p2);
public unsafe struct av_fifo_generic_peek_func_func
{
public IntPtr Pointer;
public static implicit operator av_fifo_generic_peek_func_func(av_fifo_generic_peek_func func) => new av_fifo_generic_peek_func_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void av_fifo_generic_read_func (void* @p0, void* @p1, int @p2);
public unsafe struct av_fifo_generic_read_func_func
{
public IntPtr Pointer;
public static implicit operator av_fifo_generic_read_func_func(av_fifo_generic_read_func func) => new av_fifo_generic_read_func_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int av_fifo_generic_write_func (void* @p0, void* @p1, int @p2);
public unsafe struct av_fifo_generic_write_func_func
{
public IntPtr Pointer;
public static implicit operator av_fifo_generic_write_func_func(av_fifo_generic_write_func func) => new av_fifo_generic_write_func_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodec_init_thread_copy (AVCodecContext* @p0);
public unsafe struct AVCodec_init_thread_copy_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_init_thread_copy_func(AVCodec_init_thread_copy func) => new AVCodec_init_thread_copy_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodec_update_thread_context (AVCodecContext* @dst, AVCodecContext* @src);
public unsafe struct AVCodec_update_thread_context_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_update_thread_context_func(AVCodec_update_thread_context func) => new AVCodec_update_thread_context_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void AVCodec_init_static_data (AVCodec* @codec);
public unsafe struct AVCodec_init_static_data_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_init_static_data_func(AVCodec_init_static_data func) => new AVCodec_init_static_data_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodec_init (AVCodecContext* @p0);
public unsafe struct AVCodec_init_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_init_func(AVCodec_init func) => new AVCodec_init_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodec_encode_sub (AVCodecContext* @p0, byte* @buf, int @buf_size, AVSubtitle* @sub);
public unsafe struct AVCodec_encode_sub_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_encode_sub_func(AVCodec_encode_sub func) => new AVCodec_encode_sub_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodec_encode2 (AVCodecContext* @avctx, AVPacket* @avpkt, AVFrame* @frame, int* @got_packet_ptr);
public unsafe struct AVCodec_encode2_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_encode2_func(AVCodec_encode2 func) => new AVCodec_encode2_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodec_decode (AVCodecContext* @p0, void* @outdata, int* @outdata_size, AVPacket* @avpkt);
public unsafe struct AVCodec_decode_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_decode_func(AVCodec_decode func) => new AVCodec_decode_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodec_close (AVCodecContext* @p0);
public unsafe struct AVCodec_close_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_close_func(AVCodec_close func) => new AVCodec_close_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodec_send_frame (AVCodecContext* @avctx, AVFrame* @frame);
public unsafe struct AVCodec_send_frame_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_send_frame_func(AVCodec_send_frame func) => new AVCodec_send_frame_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodec_send_packet (AVCodecContext* @avctx, AVPacket* @avpkt);
public unsafe struct AVCodec_send_packet_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_send_packet_func(AVCodec_send_packet func) => new AVCodec_send_packet_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodec_receive_frame (AVCodecContext* @avctx, AVFrame* @frame);
public unsafe struct AVCodec_receive_frame_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_receive_frame_func(AVCodec_receive_frame func) => new AVCodec_receive_frame_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodec_receive_packet (AVCodecContext* @avctx, AVPacket* @avpkt);
public unsafe struct AVCodec_receive_packet_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_receive_packet_func(AVCodec_receive_packet func) => new AVCodec_receive_packet_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void AVCodec_flush (AVCodecContext* @p0);
public unsafe struct AVCodec_flush_func
{
public IntPtr Pointer;
public static implicit operator AVCodec_flush_func(AVCodec_flush func) => new AVCodec_flush_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void AVCodecContext_draw_horiz_band (AVCodecContext* @s, AVFrame* @src, ref int_array8 @offset, int @y, int @type, int @height);
public unsafe struct AVCodecContext_draw_horiz_band_func
{
public IntPtr Pointer;
public static implicit operator AVCodecContext_draw_horiz_band_func(AVCodecContext_draw_horiz_band func) => new AVCodecContext_draw_horiz_band_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate AVPixelFormat AVCodecContext_get_format (AVCodecContext* @s, AVPixelFormat* @fmt);
public unsafe struct AVCodecContext_get_format_func
{
public IntPtr Pointer;
public static implicit operator AVCodecContext_get_format_func(AVCodecContext_get_format func) => new AVCodecContext_get_format_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodecContext_get_buffer2 (AVCodecContext* @s, AVFrame* @frame, int @flags);
public unsafe struct AVCodecContext_get_buffer2_func
{
public IntPtr Pointer;
public static implicit operator AVCodecContext_get_buffer2_func(AVCodecContext_get_buffer2 func) => new AVCodecContext_get_buffer2_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void AVCodecContext_rtp_callback (AVCodecContext* @avctx, void* @data, int @size, int @mb_nb);
public unsafe struct AVCodecContext_rtp_callback_func
{
public IntPtr Pointer;
public static implicit operator AVCodecContext_rtp_callback_func(AVCodecContext_rtp_callback func) => new AVCodecContext_rtp_callback_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVHWAccel_alloc_frame (AVCodecContext* @avctx, AVFrame* @frame);
public unsafe struct AVHWAccel_alloc_frame_func
{
public IntPtr Pointer;
public static implicit operator AVHWAccel_alloc_frame_func(AVHWAccel_alloc_frame func) => new AVHWAccel_alloc_frame_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVHWAccel_start_frame (AVCodecContext* @avctx, byte* @buf, uint @buf_size);
public unsafe struct AVHWAccel_start_frame_func
{
public IntPtr Pointer;
public static implicit operator AVHWAccel_start_frame_func(AVHWAccel_start_frame func) => new AVHWAccel_start_frame_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVHWAccel_decode_slice (AVCodecContext* @avctx, byte* @buf, uint @buf_size);
public unsafe struct AVHWAccel_decode_slice_func
{
public IntPtr Pointer;
public static implicit operator AVHWAccel_decode_slice_func(AVHWAccel_decode_slice func) => new AVHWAccel_decode_slice_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVHWAccel_end_frame (AVCodecContext* @avctx);
public unsafe struct AVHWAccel_end_frame_func
{
public IntPtr Pointer;
public static implicit operator AVHWAccel_end_frame_func(AVHWAccel_end_frame func) => new AVHWAccel_end_frame_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void AVHWAccel_decode_mb (MpegEncContext* @s);
public unsafe struct AVHWAccel_decode_mb_func
{
public IntPtr Pointer;
public static implicit operator AVHWAccel_decode_mb_func(AVHWAccel_decode_mb func) => new AVHWAccel_decode_mb_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVHWAccel_init (AVCodecContext* @avctx);
public unsafe struct AVHWAccel_init_func
{
public IntPtr Pointer;
public static implicit operator AVHWAccel_init_func(AVHWAccel_init func) => new AVHWAccel_init_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVHWAccel_uninit (AVCodecContext* @avctx);
public unsafe struct AVHWAccel_uninit_func
{
public IntPtr Pointer;
public static implicit operator AVHWAccel_uninit_func(AVHWAccel_uninit func) => new AVHWAccel_uninit_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodecContext_execute (AVCodecContext* @c, func_func @func, void* @arg2, int* @ret, int @count, int @size);
public unsafe struct AVCodecContext_execute_func
{
public IntPtr Pointer;
public static implicit operator AVCodecContext_execute_func(AVCodecContext_execute func) => new AVCodecContext_execute_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodecContext_execute2 (AVCodecContext* @c, func_func @func, void* @arg2, int* @ret, int @count);
public unsafe struct AVCodecContext_execute2_func
{
public IntPtr Pointer;
public static implicit operator AVCodecContext_execute2_func(AVCodecContext_execute2 func) => new AVCodecContext_execute2_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodecParser_parser_init (AVCodecParserContext* @s);
public unsafe struct AVCodecParser_parser_init_func
{
public IntPtr Pointer;
public static implicit operator AVCodecParser_parser_init_func(AVCodecParser_parser_init func) => new AVCodecParser_parser_init_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodecParser_parser_parse (AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size);
public unsafe struct AVCodecParser_parser_parse_func
{
public IntPtr Pointer;
public static implicit operator AVCodecParser_parser_parse_func(AVCodecParser_parser_parse func) => new AVCodecParser_parser_parse_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void AVCodecParser_parser_close (AVCodecParserContext* @s);
public unsafe struct AVCodecParser_parser_close_func
{
public IntPtr Pointer;
public static implicit operator AVCodecParser_parser_close_func(AVCodecParser_parser_close func) => new AVCodecParser_parser_close_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVCodecParser_split (AVCodecContext* @avctx, byte* @buf, int @buf_size);
public unsafe struct AVCodecParser_split_func
{
public IntPtr Pointer;
public static implicit operator AVCodecParser_split_func(AVCodecParser_split func) => new AVCodecParser_split_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVBitStreamFilter_init (AVBSFContext* @ctx);
public unsafe struct AVBitStreamFilter_init_func
{
public IntPtr Pointer;
public static implicit operator AVBitStreamFilter_init_func(AVBitStreamFilter_init func) => new AVBitStreamFilter_init_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVBitStreamFilter_filter (AVBSFContext* @ctx, AVPacket* @pkt);
public unsafe struct AVBitStreamFilter_filter_func
{
public IntPtr Pointer;
public static implicit operator AVBitStreamFilter_filter_func(AVBitStreamFilter_filter func) => new AVBitStreamFilter_filter_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void AVBitStreamFilter_close (AVBSFContext* @ctx);
public unsafe struct AVBitStreamFilter_close_func
{
public IntPtr Pointer;
public static implicit operator AVBitStreamFilter_close_func(AVBitStreamFilter_close func) => new AVBitStreamFilter_close_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVInputFormat_read_probe (AVProbeData* @p0);
public unsafe struct AVInputFormat_read_probe_func
{
public IntPtr Pointer;
public static implicit operator AVInputFormat_read_probe_func(AVInputFormat_read_probe func) => new AVInputFormat_read_probe_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVInputFormat_read_header (AVFormatContext* @p0);
public unsafe struct AVInputFormat_read_header_func
{
public IntPtr Pointer;
public static implicit operator AVInputFormat_read_header_func(AVInputFormat_read_header func) => new AVInputFormat_read_header_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVInputFormat_read_packet (AVFormatContext* @p0, AVPacket* @pkt);
public unsafe struct AVInputFormat_read_packet_func
{
public IntPtr Pointer;
public static implicit operator AVInputFormat_read_packet_func(AVInputFormat_read_packet func) => new AVInputFormat_read_packet_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVInputFormat_read_close (AVFormatContext* @p0);
public unsafe struct AVInputFormat_read_close_func
{
public IntPtr Pointer;
public static implicit operator AVInputFormat_read_close_func(AVInputFormat_read_close func) => new AVInputFormat_read_close_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVInputFormat_read_seek (AVFormatContext* @p0, int @stream_index, long @timestamp, int @flags);
public unsafe struct AVInputFormat_read_seek_func
{
public IntPtr Pointer;
public static implicit operator AVInputFormat_read_seek_func(AVInputFormat_read_seek func) => new AVInputFormat_read_seek_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate long AVInputFormat_read_timestamp (AVFormatContext* @s, int @stream_index, long* @pos, long @pos_limit);
public unsafe struct AVInputFormat_read_timestamp_func
{
public IntPtr Pointer;
public static implicit operator AVInputFormat_read_timestamp_func(AVInputFormat_read_timestamp func) => new AVInputFormat_read_timestamp_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVInputFormat_read_play (AVFormatContext* @p0);
public unsafe struct AVInputFormat_read_play_func
{
public IntPtr Pointer;
public static implicit operator AVInputFormat_read_play_func(AVInputFormat_read_play func) => new AVInputFormat_read_play_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVInputFormat_read_pause (AVFormatContext* @p0);
public unsafe struct AVInputFormat_read_pause_func
{
public IntPtr Pointer;
public static implicit operator AVInputFormat_read_pause_func(AVInputFormat_read_pause func) => new AVInputFormat_read_pause_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVInputFormat_read_seek2 (AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags);
public unsafe struct AVInputFormat_read_seek2_func
{
public IntPtr Pointer;
public static implicit operator AVInputFormat_read_seek2_func(AVInputFormat_read_seek2 func) => new AVInputFormat_read_seek2_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVInputFormat_get_device_list (AVFormatContext* @s, AVDeviceInfoList* @device_list);
public unsafe struct AVInputFormat_get_device_list_func
{
public IntPtr Pointer;
public static implicit operator AVInputFormat_get_device_list_func(AVInputFormat_get_device_list func) => new AVInputFormat_get_device_list_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVInputFormat_create_device_capabilities (AVFormatContext* @s, AVDeviceCapabilitiesQuery* @caps);
public unsafe struct AVInputFormat_create_device_capabilities_func
{
public IntPtr Pointer;
public static implicit operator AVInputFormat_create_device_capabilities_func(AVInputFormat_create_device_capabilities func) => new AVInputFormat_create_device_capabilities_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVInputFormat_free_device_capabilities (AVFormatContext* @s, AVDeviceCapabilitiesQuery* @caps);
public unsafe struct AVInputFormat_free_device_capabilities_func
{
public IntPtr Pointer;
public static implicit operator AVInputFormat_free_device_capabilities_func(AVInputFormat_free_device_capabilities func) => new AVInputFormat_free_device_capabilities_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVIOContext_read_packet (void* @opaque, byte* @buf, int @buf_size);
public unsafe struct AVIOContext_read_packet_func
{
public IntPtr Pointer;
public static implicit operator AVIOContext_read_packet_func(AVIOContext_read_packet func) => new AVIOContext_read_packet_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVIOContext_write_packet (void* @opaque, byte* @buf, int @buf_size);
public unsafe struct AVIOContext_write_packet_func
{
public IntPtr Pointer;
public static implicit operator AVIOContext_write_packet_func(AVIOContext_write_packet func) => new AVIOContext_write_packet_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate long AVIOContext_seek (void* @opaque, long @offset, int @whence);
public unsafe struct AVIOContext_seek_func
{
public IntPtr Pointer;
public static implicit operator AVIOContext_seek_func(AVIOContext_seek func) => new AVIOContext_seek_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate ulong AVIOContext_update_checksum (ulong @checksum, byte* @buf, uint @size);
public unsafe struct AVIOContext_update_checksum_func
{
public IntPtr Pointer;
public static implicit operator AVIOContext_update_checksum_func(AVIOContext_update_checksum func) => new AVIOContext_update_checksum_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVIOContext_read_pause (void* @opaque, int @pause);
public unsafe struct AVIOContext_read_pause_func
{
public IntPtr Pointer;
public static implicit operator AVIOContext_read_pause_func(AVIOContext_read_pause func) => new AVIOContext_read_pause_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate long AVIOContext_read_seek (void* @opaque, int @stream_index, long @timestamp, int @flags);
public unsafe struct AVIOContext_read_seek_func
{
public IntPtr Pointer;
public static implicit operator AVIOContext_read_seek_func(AVIOContext_read_seek func) => new AVIOContext_read_seek_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVIOContext_write_data_type (void* @opaque, byte* @buf, int @buf_size, AVIODataMarkerType @type, long @time);
public unsafe struct AVIOContext_write_data_type_func
{
public IntPtr Pointer;
public static implicit operator AVIOContext_write_data_type_func(AVIOContext_write_data_type func) => new AVIOContext_write_data_type_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVIOInterruptCB_callback (void* @p0);
public unsafe struct AVIOInterruptCB_callback_func
{
public IntPtr Pointer;
public static implicit operator AVIOInterruptCB_callback_func(AVIOInterruptCB_callback func) => new AVIOInterruptCB_callback_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVFormatContext_control_message_cb (AVFormatContext* @s, int @type, void* @data, ulong @data_size);
public unsafe struct AVFormatContext_control_message_cb_func
{
public IntPtr Pointer;
public static implicit operator AVFormatContext_control_message_cb_func(AVFormatContext_control_message_cb func) => new AVFormatContext_control_message_cb_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVFormatContext_open_cb (AVFormatContext* @s, AVIOContext** @p, [MarshalAs(UnmanagedType.LPStr)] string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options);
public unsafe struct AVFormatContext_open_cb_func
{
public IntPtr Pointer;
public static implicit operator AVFormatContext_open_cb_func(AVFormatContext_open_cb func) => new AVFormatContext_open_cb_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVFormatContext_io_open (AVFormatContext* @s, AVIOContext** @pb, [MarshalAs(UnmanagedType.LPStr)] string @url, int @flags, AVDictionary** @options);
public unsafe struct AVFormatContext_io_open_func
{
public IntPtr Pointer;
public static implicit operator AVFormatContext_io_open_func(AVFormatContext_io_open func) => new AVFormatContext_io_open_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void AVFormatContext_io_close (AVFormatContext* @s, AVIOContext* @pb);
public unsafe struct AVFormatContext_io_close_func
{
public IntPtr Pointer;
public static implicit operator AVFormatContext_io_close_func(AVFormatContext_io_close func) => new AVFormatContext_io_close_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVOutputFormat_write_header (AVFormatContext* @p0);
public unsafe struct AVOutputFormat_write_header_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_write_header_func(AVOutputFormat_write_header func) => new AVOutputFormat_write_header_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVOutputFormat_write_packet (AVFormatContext* @p0, AVPacket* @pkt);
public unsafe struct AVOutputFormat_write_packet_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_write_packet_func(AVOutputFormat_write_packet func) => new AVOutputFormat_write_packet_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVOutputFormat_write_trailer (AVFormatContext* @p0);
public unsafe struct AVOutputFormat_write_trailer_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_write_trailer_func(AVOutputFormat_write_trailer func) => new AVOutputFormat_write_trailer_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVOutputFormat_interleave_packet (AVFormatContext* @p0, AVPacket* @out, AVPacket* @in, int @flush);
public unsafe struct AVOutputFormat_interleave_packet_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_interleave_packet_func(AVOutputFormat_interleave_packet func) => new AVOutputFormat_interleave_packet_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVOutputFormat_query_codec (AVCodecID @id, int @std_compliance);
public unsafe struct AVOutputFormat_query_codec_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_query_codec_func(AVOutputFormat_query_codec func) => new AVOutputFormat_query_codec_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void AVOutputFormat_get_output_timestamp (AVFormatContext* @s, int @stream, long* @dts, long* @wall);
public unsafe struct AVOutputFormat_get_output_timestamp_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_get_output_timestamp_func(AVOutputFormat_get_output_timestamp func) => new AVOutputFormat_get_output_timestamp_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVOutputFormat_control_message (AVFormatContext* @s, int @type, void* @data, ulong @data_size);
public unsafe struct AVOutputFormat_control_message_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_control_message_func(AVOutputFormat_control_message func) => new AVOutputFormat_control_message_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVOutputFormat_write_uncoded_frame (AVFormatContext* @p0, int @stream_index, AVFrame** @frame, uint @flags);
public unsafe struct AVOutputFormat_write_uncoded_frame_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_write_uncoded_frame_func(AVOutputFormat_write_uncoded_frame func) => new AVOutputFormat_write_uncoded_frame_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVOutputFormat_get_device_list (AVFormatContext* @s, AVDeviceInfoList* @device_list);
public unsafe struct AVOutputFormat_get_device_list_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_get_device_list_func(AVOutputFormat_get_device_list func) => new AVOutputFormat_get_device_list_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVOutputFormat_create_device_capabilities (AVFormatContext* @s, AVDeviceCapabilitiesQuery* @caps);
public unsafe struct AVOutputFormat_create_device_capabilities_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_create_device_capabilities_func(AVOutputFormat_create_device_capabilities func) => new AVOutputFormat_create_device_capabilities_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVOutputFormat_free_device_capabilities (AVFormatContext* @s, AVDeviceCapabilitiesQuery* @caps);
public unsafe struct AVOutputFormat_free_device_capabilities_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_free_device_capabilities_func(AVOutputFormat_free_device_capabilities func) => new AVOutputFormat_free_device_capabilities_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVOutputFormat_init (AVFormatContext* @p0);
public unsafe struct AVOutputFormat_init_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_init_func(AVOutputFormat_init func) => new AVOutputFormat_init_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void AVOutputFormat_deinit (AVFormatContext* @p0);
public unsafe struct AVOutputFormat_deinit_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_deinit_func(AVOutputFormat_deinit func) => new AVOutputFormat_deinit_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVOutputFormat_check_bitstream (AVFormatContext* @p0, AVPacket* @pkt);
public unsafe struct AVOutputFormat_check_bitstream_func
{
public IntPtr Pointer;
public static implicit operator AVOutputFormat_check_bitstream_func(AVOutputFormat_check_bitstream func) => new AVOutputFormat_check_bitstream_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVFilter_init (AVFilterContext* @ctx);
public unsafe struct AVFilter_init_func
{
public IntPtr Pointer;
public static implicit operator AVFilter_init_func(AVFilter_init func) => new AVFilter_init_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVFilter_init_dict (AVFilterContext* @ctx, AVDictionary** @options);
public unsafe struct AVFilter_init_dict_func
{
public IntPtr Pointer;
public static implicit operator AVFilter_init_dict_func(AVFilter_init_dict func) => new AVFilter_init_dict_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void AVFilter_uninit (AVFilterContext* @ctx);
public unsafe struct AVFilter_uninit_func
{
public IntPtr Pointer;
public static implicit operator AVFilter_uninit_func(AVFilter_uninit func) => new AVFilter_uninit_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVFilter_query_formats (AVFilterContext* @p0);
public unsafe struct AVFilter_query_formats_func
{
public IntPtr Pointer;
public static implicit operator AVFilter_query_formats_func(AVFilter_query_formats func) => new AVFilter_query_formats_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVFilter_process_command (AVFilterContext* @p0, [MarshalAs(UnmanagedType.LPStr)] string @cmd, [MarshalAs(UnmanagedType.LPStr)] string @arg, byte* @res, int @res_len, int @flags);
public unsafe struct AVFilter_process_command_func
{
public IntPtr Pointer;
public static implicit operator AVFilter_process_command_func(AVFilter_process_command func) => new AVFilter_process_command_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVFilter_init_opaque (AVFilterContext* @ctx, void* @opaque);
public unsafe struct AVFilter_init_opaque_func
{
public IntPtr Pointer;
public static implicit operator AVFilter_init_opaque_func(AVFilter_init_opaque func) => new AVFilter_init_opaque_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int func (AVFilterContext* @ctx, void* @arg, int @jobnr, int @nb_jobs);
public unsafe struct func_func
{
public IntPtr Pointer;
public static implicit operator func_func(func func) => new func_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int AVFilterGraph_execute (AVFilterContext* @ctx, func_func @func, void* @arg, int* @ret, int @nb_jobs);
public unsafe struct AVFilterGraph_execute_func
{
public IntPtr Pointer;
public static implicit operator AVFilterGraph_execute_func(AVFilterGraph_execute func) => new AVFilterGraph_execute_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void av_log_set_callback_callback (void* @p0, int @p1, [MarshalAs(UnmanagedType.LPStr)] string @p2, byte* @p3);
public unsafe struct av_log_set_callback_callback_func
{
public IntPtr Pointer;
public static implicit operator av_log_set_callback_callback_func(av_log_set_callback_callback func) => new av_log_set_callback_callback_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void av_buffer_create_free (void* @opaque, byte* @data);
public unsafe struct av_buffer_create_free_func
{
public IntPtr Pointer;
public static implicit operator av_buffer_create_free_func(av_buffer_create_free func) => new av_buffer_create_free_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate AVBufferRef* av_buffer_pool_init_alloc (int @size);
public unsafe struct av_buffer_pool_init_alloc_func
{
public IntPtr Pointer;
public static implicit operator av_buffer_pool_init_alloc_func(av_buffer_pool_init_alloc func) => new av_buffer_pool_init_alloc_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate AVBufferRef* av_buffer_pool_init2_alloc (void* @opaque, int @size);
public unsafe struct av_buffer_pool_init2_alloc_func
{
public IntPtr Pointer;
public static implicit operator av_buffer_pool_init2_alloc_func(av_buffer_pool_init2_alloc func) => new av_buffer_pool_init2_alloc_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate void av_buffer_pool_init2_pool_free (void* @opaque);
public unsafe struct av_buffer_pool_init2_pool_free_func
{
public IntPtr Pointer;
public static implicit operator av_buffer_pool_init2_pool_free_func(av_buffer_pool_init2_pool_free func) => new av_buffer_pool_init2_pool_free_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int avcodec_default_execute_func (AVCodecContext* @c2, void* @arg2);
public unsafe struct avcodec_default_execute_func_func
{
public IntPtr Pointer;
public static implicit operator avcodec_default_execute_func_func(avcodec_default_execute_func func) => new avcodec_default_execute_func_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int avcodec_default_execute2_func (AVCodecContext* @c2, void* @arg2, int @p2, int @p3);
public unsafe struct avcodec_default_execute2_func_func
{
public IntPtr Pointer;
public static implicit operator avcodec_default_execute2_func_func(avcodec_default_execute2_func func) => new avcodec_default_execute2_func_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int av_lockmgr_register_cb (void** @mutex, AVLockOp @op);
public unsafe struct av_lockmgr_register_cb_func
{
public IntPtr Pointer;
public static implicit operator av_lockmgr_register_cb_func(av_lockmgr_register_cb func) => new av_lockmgr_register_cb_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int avio_alloc_context_read_packet (void* @opaque, byte* @buf, int @buf_size);
public unsafe struct avio_alloc_context_read_packet_func
{
public IntPtr Pointer;
public static implicit operator avio_alloc_context_read_packet_func(avio_alloc_context_read_packet func) => new avio_alloc_context_read_packet_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int avio_alloc_context_write_packet (void* @opaque, byte* @buf, int @buf_size);
public unsafe struct avio_alloc_context_write_packet_func
{
public IntPtr Pointer;
public static implicit operator avio_alloc_context_write_packet_func(avio_alloc_context_write_packet func) => new avio_alloc_context_write_packet_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate long avio_alloc_context_seek (void* @opaque, long @offset, int @whence);
public unsafe struct avio_alloc_context_seek_func
{
public IntPtr Pointer;
public static implicit operator avio_alloc_context_seek_func(avio_alloc_context_seek func) => new avio_alloc_context_seek_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int av_format_get_control_message_cb (AVFormatContext* @s, int @type, void* @data, ulong @data_size);
public unsafe struct av_format_get_control_message_cb_func
{
public IntPtr Pointer;
public static implicit operator av_format_get_control_message_cb_func(av_format_get_control_message_cb func) => new av_format_get_control_message_cb_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int av_format_set_control_message_cb_callback (AVFormatContext* @s, int @type, void* @data, ulong @data_size);
public unsafe struct av_format_set_control_message_cb_callback_func
{
public IntPtr Pointer;
public static implicit operator av_format_set_control_message_cb_callback_func(av_format_set_control_message_cb_callback func) => new av_format_set_control_message_cb_callback_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int av_format_get_open_cb (AVFormatContext* @s, AVIOContext** @pb, [MarshalAs(UnmanagedType.LPStr)] string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options);
public unsafe struct av_format_get_open_cb_func
{
public IntPtr Pointer;
public static implicit operator av_format_get_open_cb_func(av_format_get_open_cb func) => new av_format_get_open_cb_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public unsafe delegate int av_format_set_open_cb_callback (AVFormatContext* @s, AVIOContext** @pb, [MarshalAs(UnmanagedType.LPStr)] string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options);
public unsafe struct av_format_set_open_cb_callback_func
{
public IntPtr Pointer;
public static implicit operator av_format_set_open_cb_callback_func(av_format_set_open_cb_callback func) => new av_format_set_open_cb_callback_func { Pointer = Marshal.GetFunctionPointerForDelegate(func) };
}
}
| |
using System;
using System.Linq;
using System.Text;
using NUnit.Framework;
using TestConsole.OutputFormatting.Internal;
using TestConsole.Tests.OutputFormatting.UnitTestutilities;
using TestConsoleLib.Testing;
namespace TestConsole.Tests.OutputFormatting.Internal
{
[TestFixture]
public class TestColumnSizer
{
[Test]
public void ZeroLineBreaksReturnsWidestLength()
{
var sizer = new ColumnSizer(typeof(int));
sizer.ColumnValue("N");
for (var n = 0; n <= 100; ++n)
sizer.ColumnValue(n.ToString());
Assert.That(sizer.MinWidth(0), Is.EqualTo(3));
}
[Test]
public void IntDataDoesNotAllowLineBreaks()
{
var sizer = new ColumnSizer(typeof(int));
sizer.ColumnValue("N");
for (var n = 0; n <= 100; ++n)
sizer.ColumnValue(n.ToString());
Assert.That(sizer.MinWidth(10), Is.EqualTo(3));
}
[Test]
public void DecimalDataDoesNotAllowLineBreaks()
{
var sizer = new ColumnSizer(typeof(decimal));
sizer.ColumnValue("N");
for (decimal n = 0; n <= 100; ++n)
sizer.ColumnValue(n.ToString());
Assert.That(sizer.MinWidth(10), Is.EqualTo(3));
}
[Test]
public void DateTimeDataDoesNotAllowLineBreaks()
{
var sizer = new ColumnSizer(typeof(double));
sizer.ColumnValue("Date");
for (var n = 0; n <= 100; ++n)
sizer.ColumnValue((DateTime.Parse("2014-04-28") + new TimeSpan(n, 0, 0, 0)).ToString("yyyy-MM-dd"));
Assert.That(sizer.MinWidth(10), Is.EqualTo(10));
}
[Test]
public void StringValuesAllowLineBreaks()
{
var sizer = new ColumnSizer(typeof(string));
sizer.ColumnValue("X");
// ----+----|----+----|----+----|---\----+----|----+----|----+----|---\----+----|----+----|----+----|
const string testValue = "Several words so that line breaks can be added to fit a small column to a number of lines.";
sizer.ColumnValue(testValue);
var minWidth = sizer.MinWidth(2);
var formatted = ColumnWrapper.WrapValue(testValue, new ColumnFormat("X", typeof (string)), minWidth);
Console.WriteLine(RulerFormatter.MakeRuler(minWidth));
Console.WriteLine(string.Join(Environment.NewLine, formatted));
Assert.That(minWidth, Is.EqualTo(33));
}
[Test]
public void FittingToLineBreaksStopsIfTheMaximumPossibleNumberOfLineBreaksIsReached()
{
var sizer = new ColumnSizer(typeof(string));
sizer.ColumnValue("X");
// ----+----|----+----|----+----|---\----+----|----+----|----+----|---\----+----|----+----|----+----|
const string testValue = "A few words.";
sizer.ColumnValue(testValue);
var minWidth = sizer.MinWidth(200);
Assert.That(minWidth, Is.EqualTo(1));
}
[Test]
public void IdealMinWidthRespectsFormatterMaxWidth()
{
var sizer = new ColumnSizer(typeof(string), new ColumnFormat { MaxWidth = 3});
sizer.ColumnValue("XXXX XXXX");
sizer.ColumnValue("YYYYYY XXXXX");
Assert.That(sizer.GetIdealMinimumWidth(), Is.EqualTo(3));
}
[Test]
public void IdealMinWidthIsCalculated()
{
var sizer = new ColumnSizer(typeof(string));
sizer.ColumnValue("XXXX XXXX");
sizer.ColumnValue("YYYYYY XXXXX");
Assert.That(sizer.GetIdealMinimumWidth(), Is.EqualTo(6));
}
[Test]
public void IdealMinWidthIsReCalculated()
{
var sizer = new ColumnSizer(typeof(string));
sizer.ColumnValue("XXXX XXXX");
sizer.ColumnValue("YYYYYY XXXXX");
sizer.ColumnValue("YYYYYYY");
Assert.That(sizer.GetIdealMinimumWidth(), Is.EqualTo(7));
}
[Test]
public void FixedColumnIdealMinWidthIsAlwaysFixed()
{
var sizer = new ColumnSizer(typeof(string), new ColumnFormat(type: typeof(string)) { FixedWidth = 4});
sizer.ColumnValue("XXXX XXXX");
sizer.ColumnValue("YYYYYY XXXXX");
sizer.ColumnValue("YYYYYYY");
Assert.That(sizer.GetIdealMinimumWidth(), Is.EqualTo(4));
}
[Test]
public void FixedColumnMinWidthIsAlwaysFixed()
{
var sizer = new ColumnSizer(typeof(string), new ColumnFormat(type: typeof(string)) { FixedWidth = 4});
sizer.ColumnValue("XXXX XXXX");
sizer.ColumnValue("YYYYYY XXXXX");
var width = sizer.MinWidth(0);
Assert.That(width, Is.EqualTo(4));
}
[Test]
public void MinWidthColumnIdealMinWidthIsAlwaysMinimum()
{
var sizer = new ColumnSizer(typeof(string), new ColumnFormat(type: typeof(string)) { MinWidth = 6});
sizer.ColumnValue("XXXX XXXX");
sizer.ColumnValue("YYYYYY XXXXX");
sizer.ColumnValue("YYYYYYY");
Assert.That(sizer.GetIdealMinimumWidth(), Is.EqualTo(6));
}
[Test]
public void MinWidthColumnMinWidthIsAlwaysMinimum()
{
var sizer = new ColumnSizer(typeof(string), new ColumnFormat(type: typeof(string)) { MinWidth = 6 });
sizer.ColumnValue("XXXX XXXX");
sizer.ColumnValue("YYYYYY XXXXX");
var width = sizer.MinWidth(0);
Assert.That(width, Is.EqualTo(6));
}
[Test]
public void RenderableColumnValuesAreNotConvertedToText()
{
var sizer = new ColumnSizer(typeof(string));
sizer.ColumnValue("XXXX XXXX");
//add a renderable value
var renderable = new RecordingConsoleAdapter();
renderable.FormatTable(Enumerable.Range(0, 3).Select(i => new {String = "blah blah blah blah", Number = i}));
sizer.ColumnValue(renderable);
Assert.That(sizer.GetSizeValue(1).RenderableValue, Is.Not.Null);
}
[Test]
public void MaxLineBreaksIsCalculated()
{
var columnFormat = new ColumnFormat("", typeof(string));
var sizer = new ColumnSizer(typeof(string), columnFormat);
sizer.ColumnValue("XXXX XXXX XXXX XX XXX");
sizer.ColumnValue("YYYYYY YYYYY YY YYY YY YYYY YY Y");
var sb = new StringBuilder();
sb.AppendLine("Test values:");
sb.AppendLine(sizer.GetSizeValue(0).TextValue);
sb.AppendLine(sizer.GetSizeValue(1).TextValue);
sb.AppendLine("Max Linebreaks:");
for (var width = 15; width > 0; --width)
{
Console.WriteLine(width);
sb.AppendLine();
sb.AppendFormat("Width = {0}, line breaks = {1}", width, sizer.GetMaxLineBreaks(width));
sb.AppendLine();
sb.AppendLine(RulerFormatter.MakeRuler(width));
foreach (var line in ColumnWrapper.WrapValue(sizer.GetSizeValue(1), columnFormat, width))
{
sb.AppendLine(line);
}
}
Console.WriteLine(sb.ToString());
Approvals.Verify(sb.ToString());
}
}
}
| |
using EasyExecute.Common;
using EasyExecute.Messages;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Xunit;
namespace EasyExecute.Tests
{
public class when_any_api_method_is_used
{
[Fact]
public void ensure_all_api_works_has_id_no_command_has_result()
{
var workerId = Guid.NewGuid().ToString();
var expectedResult = GetHappyPathExpectedResult(workerId);
var testHappyPathRequest = GetHappyPathRequest<TestClass, string>(workerId);
#region HAS ID NO COMMAND HAS RESULT
RunTest(workerId, "HAS ID - NO COMMAND - HAS RESULT", (service, id, command) =>
service.ExecuteAsync(
id
, () => Task.FromResult(new TestClass(command))
, testHappyPathRequest.HasFailedHavingResult
, TimeSpan.FromSeconds(5)
, new ExecutionRequestOptions
{
DontCacheResultById = false
}).Result, expectedResult);
RunTest(workerId, "HAS ID - NO COMMAND - HAS RESULT", (service, id, command) =>
service.ExecuteAsync(
id
, () => Task.FromResult(new TestClass(command))
, testHappyPathRequest.HasFailedHavingResult
, TimeSpan.FromSeconds(5)
).Result, expectedResult);
RunTest(workerId, "HAS ID - NO COMMAND - HAS RESULT", (service, id, command) =>
service.ExecuteAsync(
id
, () => Task.FromResult(new TestClass(command))
, testHappyPathRequest.HasFailedHavingResult
).Result, expectedResult);
RunTest(workerId, "HAS ID - NO COMMAND - HAS RESULT", (service, id, command) =>
service.ExecuteAsync(
id
, () => Task.FromResult(new TestClass(command))
).Result, expectedResult);
RunTest(workerId, "HAS ID - NO COMMAND - HAS RESULT", (service, id, command) =>
service.ExecuteAsync(
id
, () => Task.FromResult(new TestClass(command))
).Result, expectedResult);
#endregion HAS ID NO COMMAND HAS RESULT
}
[Fact]
public void ensure_all_api_works_has_id_has_command_has_result()
{
var workerId = Guid.NewGuid().ToString();
var expectedResult = GetHappyPathExpectedResult(workerId);
var testHappyPathRequest = GetHappyPathRequest<TestClass, string>(workerId);
#region HAS ID HAS COMMAND HAS RESULT
RunTest(workerId, "HAS ID - HAS COMMAND - HAS RESULT", (service, id, command) =>
service.ExecuteAsync(
id
, command
, com => Task.FromResult(new TestClass(com))
, testHappyPathRequest.HasFailedHavingResult
, TimeSpan.FromSeconds(5)
, new ExecutionRequestOptions
{
DontCacheResultById = false
}).Result, expectedResult);
RunTest(workerId, "HAS ID - HAS COMMAND - HAS RESULT", (service, id, command) =>
service.ExecuteAsync(
id
, command
, com => Task.FromResult(new TestClass(com))
, testHappyPathRequest.HasFailedHavingResult
, TimeSpan.FromSeconds(5)
).Result, expectedResult);
RunTest(workerId, "HAS ID - HAS COMMAND - HAS RESULT", (service, id, command) =>
service.ExecuteAsync(
id
, command
, com => Task.FromResult(new TestClass(com))
, testHappyPathRequest.HasFailedHavingResult
).Result, expectedResult);
RunTest(workerId, "HAS ID - HAS COMMAND - HAS RESULT", (service, id, command) =>
service.ExecuteAsync(
id
, command
, com => Task.FromResult(new TestClass(com))
).Result, expectedResult);
RunTest(workerId, "HAS ID - HAS COMMAND - HAS RESULT", (service, id, command) =>
service.ExecuteAsync(
id
, command
, com => Task.FromResult(new TestClass(com))
).Result, expectedResult);
#endregion HAS ID HAS COMMAND HAS RESULT
}
[Fact]
public void ensure_all_api_works_has_id_no_command_no_result()
{
var workerId = Guid.NewGuid().ToString();
var expectedResult = GetHappyPathExpectedResult(workerId);
var testHappyPathRequest = GetHappyPathRequest<TestClass, string>(workerId);
#region HAS ID NO COMMAND NO RESULT
RunTest(workerId, "HAS ID - NO COMMAND - NO RESULT", (service, id, command) =>
{
var result = service.ExecuteAsync(
id
, command
, com => Task.FromResult(new TestClass(com))
, testHappyPathRequest.HasFailed
, TimeSpan.FromSeconds(5)
, new ExecutionRequestOptions
{
DontCacheResultById = false
}).Result;
return new ExecutionResult<TestClass>
{
Succeeded = true,
Result = new TestClass(command),
Errors = new List<string>(),
WorkerId = id
};
}, expectedResult);
#endregion HAS ID NO COMMAND NO RESULT
}
[Fact]
public void ensure_all_api_works_no_id_no_command_no_result()
{
var workerId = Guid.NewGuid().ToString();
var expectedResult = GetHappyPathExpectedResult(workerId);
var testHappyPathRequest = GetHappyPathRequest<TestClass, string>(workerId);
#region NO ID NO COMMAND NO RESULT
RunTest(workerId, "NO ID - NO COMMAND - NO RESULT", (service, id, command) =>
{
var result = service.ExecuteAsync(() => { }).Result;
return new ExecutionResult<TestClass>
{
Succeeded = true,
Result = new TestClass(command),
Errors = new List<string>(),
WorkerId = id
};
}, expectedResult);
#endregion NO ID NO COMMAND NO RESULT
}
private static TestHappyPathRequest<TCommand, TResult> GetHappyPathRequest<TCommand, TResult>(string workerId)
where TResult : class
{
var TestHappyPathRequest = new TestHappyPathRequest<TCommand, TResult>
{
Id = workerId,
Command = default(TCommand),
Operation = null,
MaxExecutionTimePerAskCall = null,
ExecutionOptions = null,
TransformResult = null,
HasFailedHavingResult = r => false,
HasFailed = () => false
};
return TestHappyPathRequest;
}
private static ExepectedTestResult GetHappyPathExpectedResult(string workerId)
{
return new ExepectedTestResult
{
ExecutionResult = new ExecutionResult<TestClass>
{
Succeeded = true,
Errors = new List<string>(),
Result = new TestClass(workerId),
WorkerId = workerId
},
ExecutionResultHistory = new ExecutionResult<GetWorkHistoryCompletedMessage>
{
Errors = new List<string>(),
Result = new GetWorkHistoryCompletedMessage(new List<Worker>
{
new Worker(workerId, new WorkerStatus
{
IsCompleted = true
}, null, null, false, DateTime.UtcNow,true,null)
}, DateTime.UtcNow),
WorkerId = null
}
};
}
private static void RunTest(string i,
string description
, Func<EasyExecuteLib.EasyExecute, string, string, ExecutionResult<TestClass>> operation
, ExepectedTestResult expectedResult)
{
// foreach (var i in Enumerable.Range(1, 2))
{
try
{
var service = new EasyExecuteLib.EasyExecute();
var result = operation(service, i, i);
var history = service.GetWorkHistoryAsync().Result;
Assert.True(result.Succeeded);
Assert.Equal(expectedResult.ExecutionResult.Result.Data, result.Result.Data);
Assert.Equal(expectedResult.ExecutionResult.WorkerId, result.WorkerId);
Assert.Equal(expectedResult.ExecutionResult.Errors.Count, result.Errors.Count);
Assert.True(history.Succeeded);
Assert.Equal(expectedResult.ExecutionResultHistory.Errors.Count, history.Errors.Count);
Assert.Equal(expectedResult.ExecutionResultHistory.Result.WorkHistory.Count,
history.Result.WorkHistory.Count);
Assert.True(history.Result.WorkHistory.First().WorkerStatus.IsCompleted);
}
catch (Exception e)
{
throw new Exception("Error : " + description + " - " + e.Message, e);
}
}
}
}
}
| |
using Microsoft.IdentityModel;
using Microsoft.IdentityModel.S2S.Protocols.OAuth2;
using Microsoft.IdentityModel.S2S.Tokens;
using Microsoft.SharePoint.Client;
using Microsoft.SharePoint.Client.EventReceivers;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IdentityModel.Selectors;
using System.IdentityModel.Tokens;
using System.IO;
using System.Linq;
using System.Net;
using System.Security.Cryptography.X509Certificates;
using System.Security.Principal;
using System.ServiceModel;
using System.Text;
using System.Web;
using System.Web.Configuration;
using System.Web.Script.Serialization;
using AudienceRestriction = Microsoft.IdentityModel.Tokens.AudienceRestriction;
using AudienceUriValidationFailedException = Microsoft.IdentityModel.Tokens.AudienceUriValidationFailedException;
using SecurityTokenHandlerConfiguration = Microsoft.IdentityModel.Tokens.SecurityTokenHandlerConfiguration;
using X509SigningCredentials = Microsoft.IdentityModel.SecurityTokenService.X509SigningCredentials;
namespace Contoso.Provisioning.Pages.AppWeb
{
public static class TokenHelper
{
#region public fields
/// <summary>
/// SharePoint principal.
/// </summary>
public const string SharePointPrincipal = "00000003-0000-0ff1-ce00-000000000000";
/// <summary>
/// Lifetime of HighTrust access token, 12 hours.
/// </summary>
public static readonly TimeSpan HighTrustAccessTokenLifetime = TimeSpan.FromHours(12.0);
#endregion public fields
#region public methods
/// <summary>
/// Retrieves the context token string from the specified request by looking for well-known parameter names in the
/// POSTed form parameters and the querystring. Returns null if no context token is found.
/// </summary>
/// <param name="request">HttpRequest in which to look for a context token</param>
/// <returns>The context token string</returns>
public static string GetContextTokenFromRequest(HttpRequest request)
{
return GetContextTokenFromRequest(new HttpRequestWrapper(request));
}
/// <summary>
/// Retrieves the context token string from the specified request by looking for well-known parameter names in the
/// POSTed form parameters and the querystring. Returns null if no context token is found.
/// </summary>
/// <param name="request">HttpRequest in which to look for a context token</param>
/// <returns>The context token string</returns>
public static string GetContextTokenFromRequest(HttpRequestBase request)
{
string[] paramNames = { "AppContext", "AppContextToken", "AccessToken", "SPAppToken" };
foreach (string paramName in paramNames)
{
if (!string.IsNullOrEmpty(request.Form[paramName]))
{
return request.Form[paramName];
}
if (!string.IsNullOrEmpty(request.QueryString[paramName]))
{
return request.QueryString[paramName];
}
}
return null;
}
/// <summary>
/// Validate that a specified context token string is intended for this application based on the parameters
/// specified in web.config. Parameters used from web.config used for validation include ClientId,
/// HostedAppHostNameOverride, HostedAppHostName, ClientSecret, and Realm (if it is specified). If HostedAppHostNameOverride is present,
/// it will be used for validation. Otherwise, if the <paramref name="appHostName"/> is not
/// null, it is used for validation instead of the web.config's HostedAppHostName. If the token is invalid, an
/// exception is thrown. If the token is valid, TokenHelper's static STS metadata url is updated based on the token contents
/// and a JsonWebSecurityToken based on the context token is returned.
/// </summary>
/// <param name="contextTokenString">The context token to validate</param>
/// <param name="appHostName">The URL authority, consisting of Domain Name System (DNS) host name or IP address and the port number, to use for token audience validation.
/// If null, HostedAppHostName web.config setting is used instead. HostedAppHostNameOverride web.config setting, if present, will be used
/// for validation instead of <paramref name="appHostName"/> .</param>
/// <returns>A JsonWebSecurityToken based on the context token.</returns>
public static SharePointContextToken ReadAndValidateContextToken(string contextTokenString, string appHostName = null)
{
JsonWebSecurityTokenHandler tokenHandler = CreateJsonWebSecurityTokenHandler();
SecurityToken securityToken = tokenHandler.ReadToken(contextTokenString);
JsonWebSecurityToken jsonToken = securityToken as JsonWebSecurityToken;
SharePointContextToken token = SharePointContextToken.Create(jsonToken);
string stsAuthority = (new Uri(token.SecurityTokenServiceUri)).Authority;
int firstDot = stsAuthority.IndexOf('.');
GlobalEndPointPrefix = stsAuthority.Substring(0, firstDot);
AcsHostUrl = stsAuthority.Substring(firstDot + 1);
tokenHandler.ValidateToken(jsonToken);
string[] acceptableAudiences;
if (!String.IsNullOrEmpty(HostedAppHostNameOverride))
{
acceptableAudiences = HostedAppHostNameOverride.Split(';');
}
else if (appHostName == null)
{
acceptableAudiences = new[] { HostedAppHostName };
}
else
{
acceptableAudiences = new[] { appHostName };
}
bool validationSuccessful = false;
string realm = Realm ?? token.Realm;
foreach (var audience in acceptableAudiences)
{
string principal = GetFormattedPrincipal(ClientId, audience, realm);
if (StringComparer.OrdinalIgnoreCase.Equals(token.Audience, principal))
{
validationSuccessful = true;
break;
}
}
if (!validationSuccessful)
{
throw new AudienceUriValidationFailedException(
String.Format(CultureInfo.CurrentCulture,
"\"{0}\" is not the intended audience \"{1}\"", String.Join(";", acceptableAudiences), token.Audience));
}
return token;
}
/// <summary>
/// Retrieves an access token from ACS to call the source of the specified context token at the specified
/// targetHost. The targetHost must be registered for the principal that sent the context token.
/// </summary>
/// <param name="contextToken">Context token issued by the intended access token audience</param>
/// <param name="targetHost">Url authority of the target principal</param>
/// <returns>An access token with an audience matching the context token's source</returns>
public static OAuth2AccessTokenResponse GetAccessToken(SharePointContextToken contextToken, string targetHost)
{
string targetPrincipalName = contextToken.TargetPrincipalName;
// Extract the refreshToken from the context token
string refreshToken = contextToken.RefreshToken;
if (String.IsNullOrEmpty(refreshToken))
{
return null;
}
string targetRealm = Realm ?? contextToken.Realm;
return GetAccessToken(refreshToken,
targetPrincipalName,
targetHost,
targetRealm);
}
/// <summary>
/// Uses the specified authorization code to retrieve an access token from ACS to call the specified principal
/// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is
/// null, the "Realm" setting in web.config will be used instead.
/// </summary>
/// <param name="authorizationCode">Authorization code to exchange for access token</param>
/// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param>
/// <param name="targetHost">Url authority of the target principal</param>
/// <param name="targetRealm">Realm to use for the access token's nameid and audience</param>
/// <param name="redirectUri">Redirect URI registerd for this app</param>
/// <returns>An access token with an audience of the target principal</returns>
public static OAuth2AccessTokenResponse GetAccessToken(
string authorizationCode,
string targetPrincipalName,
string targetHost,
string targetRealm,
Uri redirectUri)
{
if (targetRealm == null)
{
targetRealm = Realm;
}
string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm);
string clientId = GetFormattedPrincipal(ClientId, null, targetRealm);
// Create request for token. The RedirectUri is null here. This will fail if redirect uri is registered
OAuth2AccessTokenRequest oauth2Request =
OAuth2MessageFactory.CreateAccessTokenRequestWithAuthorizationCode(
clientId,
ClientSecret,
authorizationCode,
redirectUri,
resource);
// Get token
OAuth2S2SClient client = new OAuth2S2SClient();
OAuth2AccessTokenResponse oauth2Response;
try
{
oauth2Response =
client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse;
}
catch (WebException wex)
{
using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream()))
{
string responseText = sr.ReadToEnd();
throw new WebException(wex.Message + " - " + responseText, wex);
}
}
return oauth2Response;
}
/// <summary>
/// Uses the specified refresh token to retrieve an access token from ACS to call the specified principal
/// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is
/// null, the "Realm" setting in web.config will be used instead.
/// </summary>
/// <param name="refreshToken">Refresh token to exchange for access token</param>
/// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param>
/// <param name="targetHost">Url authority of the target principal</param>
/// <param name="targetRealm">Realm to use for the access token's nameid and audience</param>
/// <returns>An access token with an audience of the target principal</returns>
public static OAuth2AccessTokenResponse GetAccessToken(
string refreshToken,
string targetPrincipalName,
string targetHost,
string targetRealm)
{
if (targetRealm == null)
{
targetRealm = Realm;
}
string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm);
string clientId = GetFormattedPrincipal(ClientId, null, targetRealm);
OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithRefreshToken(clientId, ClientSecret, refreshToken, resource);
// Get token
OAuth2S2SClient client = new OAuth2S2SClient();
OAuth2AccessTokenResponse oauth2Response;
try
{
oauth2Response =
client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse;
}
catch (WebException wex)
{
using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream()))
{
string responseText = sr.ReadToEnd();
throw new WebException(wex.Message + " - " + responseText, wex);
}
}
return oauth2Response;
}
/// <summary>
/// Retrieves an app-only access token from ACS to call the specified principal
/// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is
/// null, the "Realm" setting in web.config will be used instead.
/// </summary>
/// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param>
/// <param name="targetHost">Url authority of the target principal</param>
/// <param name="targetRealm">Realm to use for the access token's nameid and audience</param>
/// <returns>An access token with an audience of the target principal</returns>
public static OAuth2AccessTokenResponse GetAppOnlyAccessToken(
string targetPrincipalName,
string targetHost,
string targetRealm)
{
if (targetRealm == null)
{
targetRealm = Realm;
}
string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm);
string clientId = GetFormattedPrincipal(ClientId, HostedAppHostName, targetRealm);
OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithClientCredentials(clientId, ClientSecret, resource);
oauth2Request.Resource = resource;
// Get token
OAuth2S2SClient client = new OAuth2S2SClient();
OAuth2AccessTokenResponse oauth2Response;
try
{
oauth2Response =
client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse;
}
catch (WebException wex)
{
using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream()))
{
string responseText = sr.ReadToEnd();
throw new WebException(wex.Message + " - " + responseText, wex);
}
}
return oauth2Response;
}
/// <summary>
/// Creates a client context based on the properties of a remote event receiver
/// </summary>
/// <param name="properties">Properties of a remote event receiver</param>
/// <returns>A ClientContext ready to call the web where the event originated</returns>
public static ClientContext CreateRemoteEventReceiverClientContext(SPRemoteEventProperties properties)
{
Uri sharepointUrl;
if (properties.ListEventProperties != null)
{
sharepointUrl = new Uri(properties.ListEventProperties.WebUrl);
}
else if (properties.ItemEventProperties != null)
{
sharepointUrl = new Uri(properties.ItemEventProperties.WebUrl);
}
else if (properties.WebEventProperties != null)
{
sharepointUrl = new Uri(properties.WebEventProperties.FullUrl);
}
else
{
return null;
}
if (IsHighTrustApp())
{
return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null);
}
return CreateAcsClientContextForUrl(properties, sharepointUrl);
}
/// <summary>
/// Creates a client context based on the properties of an app event
/// </summary>
/// <param name="properties">Properties of an app event</param>
/// <param name="useAppWeb">True to target the app web, false to target the host web</param>
/// <returns>A ClientContext ready to call the app web or the parent web</returns>
public static ClientContext CreateAppEventClientContext(SPRemoteEventProperties properties, bool useAppWeb)
{
if (properties.AppEventProperties == null)
{
return null;
}
Uri sharepointUrl = useAppWeb ? properties.AppEventProperties.AppWebFullUrl : properties.AppEventProperties.HostWebFullUrl;
if (IsHighTrustApp())
{
return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null);
}
return CreateAcsClientContextForUrl(properties, sharepointUrl);
}
/// <summary>
/// Retrieves an access token from ACS using the specified authorization code, and uses that access token to
/// create a client context
/// </summary>
/// <param name="targetUrl">Url of the target SharePoint site</param>
/// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param>
/// <param name="redirectUri">Redirect URI registerd for this app</param>
/// <returns>A ClientContext ready to call targetUrl with a valid access token</returns>
public static ClientContext GetClientContextWithAuthorizationCode(
string targetUrl,
string authorizationCode,
Uri redirectUri)
{
return GetClientContextWithAuthorizationCode(targetUrl, SharePointPrincipal, authorizationCode, GetRealmFromTargetUrl(new Uri(targetUrl)), redirectUri);
}
/// <summary>
/// Retrieves an access token from ACS using the specified authorization code, and uses that access token to
/// create a client context
/// </summary>
/// <param name="targetUrl">Url of the target SharePoint site</param>
/// <param name="targetPrincipalName">Name of the target SharePoint principal</param>
/// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param>
/// <param name="targetRealm">Realm to use for the access token's nameid and audience</param>
/// <param name="redirectUri">Redirect URI registerd for this app</param>
/// <returns>A ClientContext ready to call targetUrl with a valid access token</returns>
public static ClientContext GetClientContextWithAuthorizationCode(
string targetUrl,
string targetPrincipalName,
string authorizationCode,
string targetRealm,
Uri redirectUri)
{
Uri targetUri = new Uri(targetUrl);
string accessToken =
GetAccessToken(authorizationCode, targetPrincipalName, targetUri.Authority, targetRealm, redirectUri).AccessToken;
return GetClientContextWithAccessToken(targetUrl, accessToken);
}
/// <summary>
/// Uses the specified access token to create a client context
/// </summary>
/// <param name="targetUrl">Url of the target SharePoint site</param>
/// <param name="accessToken">Access token to be used when calling the specified targetUrl</param>
/// <returns>A ClientContext ready to call targetUrl with the specified access token</returns>
public static ClientContext GetClientContextWithAccessToken(string targetUrl, string accessToken)
{
ClientContext clientContext = new ClientContext(targetUrl);
clientContext.AuthenticationMode = ClientAuthenticationMode.Anonymous;
clientContext.FormDigestHandlingEnabled = false;
clientContext.ExecutingWebRequest +=
delegate(object oSender, WebRequestEventArgs webRequestEventArgs)
{
webRequestEventArgs.WebRequestExecutor.RequestHeaders["Authorization"] =
"Bearer " + accessToken;
};
return clientContext;
}
/// <summary>
/// Retrieves an access token from ACS using the specified context token, and uses that access token to create
/// a client context
/// </summary>
/// <param name="targetUrl">Url of the target SharePoint site</param>
/// <param name="contextTokenString">Context token received from the target SharePoint site</param>
/// <param name="appHostUrl">Url authority of the hosted app. If this is null, the value in the HostedAppHostName
/// of web.config will be used instead</param>
/// <returns>A ClientContext ready to call targetUrl with a valid access token</returns>
public static ClientContext GetClientContextWithContextToken(
string targetUrl,
string contextTokenString,
string appHostUrl)
{
SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, appHostUrl);
Uri targetUri = new Uri(targetUrl);
string accessToken = GetAccessToken(contextToken, targetUri.Authority).AccessToken;
return GetClientContextWithAccessToken(targetUrl, accessToken);
}
/// <summary>
/// Returns the SharePoint url to which the app should redirect the browser to request consent and get back
/// an authorization code.
/// </summary>
/// <param name="contextUrl">Absolute Url of the SharePoint site</param>
/// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format
/// (e.g. "Web.Read Site.Write")</param>
/// <returns>Url of the SharePoint site's OAuth authorization page</returns>
public static string GetAuthorizationUrl(string contextUrl, string scope)
{
return string.Format(
"{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code",
EnsureTrailingSlash(contextUrl),
AuthorizationPage,
ClientId,
scope);
}
/// <summary>
/// Returns the SharePoint url to which the app should redirect the browser to request consent and get back
/// an authorization code.
/// </summary>
/// <param name="contextUrl">Absolute Url of the SharePoint site</param>
/// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format
/// (e.g. "Web.Read Site.Write")</param>
/// <param name="redirectUri">Uri to which SharePoint should redirect the browser to after consent is
/// granted</param>
/// <returns>Url of the SharePoint site's OAuth authorization page</returns>
public static string GetAuthorizationUrl(string contextUrl, string scope, string redirectUri)
{
return string.Format(
"{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code&redirect_uri={4}",
EnsureTrailingSlash(contextUrl),
AuthorizationPage,
ClientId,
scope,
redirectUri);
}
/// <summary>
/// Returns the SharePoint url to which the app should redirect the browser to request a new context token.
/// </summary>
/// <param name="contextUrl">Absolute Url of the SharePoint site</param>
/// <param name="redirectUri">Uri to which SharePoint should redirect the browser to with a context token</param>
/// <returns>Url of the SharePoint site's context token redirect page</returns>
public static string GetAppContextTokenRequestUrl(string contextUrl, string redirectUri)
{
return string.Format(
"{0}{1}?client_id={2}&redirect_uri={3}",
EnsureTrailingSlash(contextUrl),
RedirectPage,
ClientId,
redirectUri);
}
/// <summary>
/// Retrieves an S2S access token signed by the application's private certificate on behalf of the specified
/// WindowsIdentity and intended for the SharePoint at the targetApplicationUri. If no Realm is specified in
/// web.config, an auth challenge will be issued to the targetApplicationUri to discover it.
/// </summary>
/// <param name="targetApplicationUri">Url of the target SharePoint site</param>
/// <param name="identity">Windows identity of the user on whose behalf to create the access token</param>
/// <returns>An access token with an audience of the target principal</returns>
public static string GetS2SAccessTokenWithWindowsIdentity(
Uri targetApplicationUri,
WindowsIdentity identity)
{
string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm;
JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null;
return GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims);
}
/// <summary>
/// Retrieves an S2S client context with an access token signed by the application's private certificate on
/// behalf of the specified WindowsIdentity and intended for application at the targetApplicationUri using the
/// targetRealm. If no Realm is specified in web.config, an auth challenge will be issued to the
/// targetApplicationUri to discover it.
/// </summary>
/// <param name="targetApplicationUri">Url of the target SharePoint site</param>
/// <param name="identity">Windows identity of the user on whose behalf to create the access token</param>
/// <returns>A ClientContext using an access token with an audience of the target application</returns>
public static ClientContext GetS2SClientContextWithWindowsIdentity(
Uri targetApplicationUri,
WindowsIdentity identity)
{
string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm;
JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null;
string accessToken = GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims);
return GetClientContextWithAccessToken(targetApplicationUri.ToString(), accessToken);
}
/// <summary>
/// Get authentication realm from SharePoint
/// </summary>
/// <param name="targetApplicationUri">Url of the target SharePoint site</param>
/// <returns>String representation of the realm GUID</returns>
public static string GetRealmFromTargetUrl(Uri targetApplicationUri)
{
WebRequest request = WebRequest.Create(targetApplicationUri + "/_vti_bin/client.svc");
request.Headers.Add("Authorization: Bearer ");
try
{
using (request.GetResponse())
{
}
}
catch (WebException e)
{
if (e.Response == null)
{
return null;
}
string bearerResponseHeader = e.Response.Headers["WWW-Authenticate"];
if (string.IsNullOrEmpty(bearerResponseHeader))
{
return null;
}
const string bearer = "Bearer realm=\"";
int bearerIndex = bearerResponseHeader.IndexOf(bearer, StringComparison.Ordinal);
if (bearerIndex < 0)
{
return null;
}
int realmIndex = bearerIndex + bearer.Length;
if (bearerResponseHeader.Length >= realmIndex + 36)
{
string targetRealm = bearerResponseHeader.Substring(realmIndex, 36);
Guid realmGuid;
if (Guid.TryParse(targetRealm, out realmGuid))
{
return targetRealm;
}
}
}
return null;
}
/// <summary>
/// Determines if this is a high trust app.
/// </summary>
/// <returns>True if this is a high trust app.</returns>
public static bool IsHighTrustApp()
{
return SigningCredentials != null;
}
/// <summary>
/// Ensures that the specified URL ends with '/' if it is not null or empty.
/// </summary>
/// <param name="url">The url.</param>
/// <returns>The url ending with '/' if it is not null or empty.</returns>
public static string EnsureTrailingSlash(string url)
{
if (!string.IsNullOrEmpty(url) && url[url.Length - 1] != '/')
{
return url + "/";
}
return url;
}
#endregion
#region private fields
//
// Configuration Constants
//
private const string AuthorizationPage = "_layouts/15/OAuthAuthorize.aspx";
private const string RedirectPage = "_layouts/15/AppRedirect.aspx";
private const string AcsPrincipalName = "00000001-0000-0000-c000-000000000000";
private const string AcsMetadataEndPointRelativeUrl = "metadata/json/1";
private const string S2SProtocol = "OAuth2";
private const string DelegationIssuance = "DelegationIssuance1.0";
private const string NameIdentifierClaimType = JsonWebTokenConstants.ReservedClaims.NameIdentifier;
private const string TrustedForImpersonationClaimType = "trustedfordelegation";
private const string ActorTokenClaimType = JsonWebTokenConstants.ReservedClaims.ActorToken;
//
// Environment Constants
//
private static string GlobalEndPointPrefix = "accounts";
private static string AcsHostUrl = "accesscontrol.windows.net";
//
// Hosted app configuration
//
private static readonly string ClientId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientId")) ? WebConfigurationManager.AppSettings.Get("HostedAppName") : WebConfigurationManager.AppSettings.Get("ClientId");
private static readonly string IssuerId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("IssuerId")) ? ClientId : WebConfigurationManager.AppSettings.Get("IssuerId");
private static readonly string HostedAppHostNameOverride = WebConfigurationManager.AppSettings.Get("HostedAppHostNameOverride");
private static readonly string HostedAppHostName = WebConfigurationManager.AppSettings.Get("HostedAppHostName");
private static readonly string ClientSecret = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientSecret")) ? WebConfigurationManager.AppSettings.Get("HostedAppSigningKey") : WebConfigurationManager.AppSettings.Get("ClientSecret");
private static readonly string SecondaryClientSecret = WebConfigurationManager.AppSettings.Get("SecondaryClientSecret");
private static readonly string Realm = WebConfigurationManager.AppSettings.Get("Realm");
private static readonly string ServiceNamespace = WebConfigurationManager.AppSettings.Get("Realm");
private static readonly string ClientSigningCertificatePath = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePath");
private static readonly string ClientSigningCertificatePassword = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePassword");
private static readonly X509Certificate2 ClientCertificate = (string.IsNullOrEmpty(ClientSigningCertificatePath) || string.IsNullOrEmpty(ClientSigningCertificatePassword)) ? null : new X509Certificate2(ClientSigningCertificatePath, ClientSigningCertificatePassword);
private static readonly X509SigningCredentials SigningCredentials = (ClientCertificate == null) ? null : new X509SigningCredentials(ClientCertificate, SecurityAlgorithms.RsaSha256Signature, SecurityAlgorithms.Sha256Digest);
#endregion
#region private methods
private static ClientContext CreateAcsClientContextForUrl(SPRemoteEventProperties properties, Uri sharepointUrl)
{
string contextTokenString = properties.ContextToken;
if (String.IsNullOrEmpty(contextTokenString))
{
return null;
}
SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, OperationContext.Current.IncomingMessageHeaders.To.Host);
string accessToken = GetAccessToken(contextToken, sharepointUrl.Authority).AccessToken;
return GetClientContextWithAccessToken(sharepointUrl.ToString(), accessToken);
}
private static string GetAcsMetadataEndpointUrl()
{
return Path.Combine(GetAcsGlobalEndpointUrl(), AcsMetadataEndPointRelativeUrl);
}
private static string GetFormattedPrincipal(string principalName, string hostName, string realm)
{
if (!String.IsNullOrEmpty(hostName))
{
return String.Format(CultureInfo.InvariantCulture, "{0}/{1}@{2}", principalName, hostName, realm);
}
return String.Format(CultureInfo.InvariantCulture, "{0}@{1}", principalName, realm);
}
private static string GetAcsPrincipalName(string realm)
{
return GetFormattedPrincipal(AcsPrincipalName, new Uri(GetAcsGlobalEndpointUrl()).Host, realm);
}
private static string GetAcsGlobalEndpointUrl()
{
return String.Format(CultureInfo.InvariantCulture, "https://{0}.{1}/", GlobalEndPointPrefix, AcsHostUrl);
}
private static JsonWebSecurityTokenHandler CreateJsonWebSecurityTokenHandler()
{
JsonWebSecurityTokenHandler handler = new JsonWebSecurityTokenHandler();
handler.Configuration = new SecurityTokenHandlerConfiguration();
handler.Configuration.AudienceRestriction = new AudienceRestriction(AudienceUriMode.Never);
handler.Configuration.CertificateValidator = X509CertificateValidator.None;
List<byte[]> securityKeys = new List<byte[]>();
securityKeys.Add(Convert.FromBase64String(ClientSecret));
if (!string.IsNullOrEmpty(SecondaryClientSecret))
{
securityKeys.Add(Convert.FromBase64String(SecondaryClientSecret));
}
List<SecurityToken> securityTokens = new List<SecurityToken>();
securityTokens.Add(new MultipleSymmetricKeySecurityToken(securityKeys));
handler.Configuration.IssuerTokenResolver =
SecurityTokenResolver.CreateDefaultSecurityTokenResolver(
new ReadOnlyCollection<SecurityToken>(securityTokens),
false);
SymmetricKeyIssuerNameRegistry issuerNameRegistry = new SymmetricKeyIssuerNameRegistry();
foreach (byte[] securitykey in securityKeys)
{
issuerNameRegistry.AddTrustedIssuer(securitykey, GetAcsPrincipalName(ServiceNamespace));
}
handler.Configuration.IssuerNameRegistry = issuerNameRegistry;
return handler;
}
private static string GetS2SAccessTokenWithClaims(
string targetApplicationHostName,
string targetRealm,
IEnumerable<JsonWebTokenClaim> claims)
{
return IssueToken(
ClientId,
IssuerId,
targetRealm,
SharePointPrincipal,
targetRealm,
targetApplicationHostName,
true,
claims,
claims == null);
}
private static JsonWebTokenClaim[] GetClaimsWithWindowsIdentity(WindowsIdentity identity)
{
JsonWebTokenClaim[] claims = new JsonWebTokenClaim[]
{
new JsonWebTokenClaim(NameIdentifierClaimType, identity.User.Value.ToLower()),
new JsonWebTokenClaim("nii", "urn:office:idp:activedirectory")
};
return claims;
}
private static string IssueToken(
string sourceApplication,
string issuerApplication,
string sourceRealm,
string targetApplication,
string targetRealm,
string targetApplicationHostName,
bool trustedForDelegation,
IEnumerable<JsonWebTokenClaim> claims,
bool appOnly = false)
{
if (null == SigningCredentials)
{
throw new InvalidOperationException("SigningCredentials was not initialized");
}
#region Actor token
string issuer = string.IsNullOrEmpty(sourceRealm) ? issuerApplication : string.Format("{0}@{1}", issuerApplication, sourceRealm);
string nameid = string.IsNullOrEmpty(sourceRealm) ? sourceApplication : string.Format("{0}@{1}", sourceApplication, sourceRealm);
string audience = string.Format("{0}/{1}@{2}", targetApplication, targetApplicationHostName, targetRealm);
List<JsonWebTokenClaim> actorClaims = new List<JsonWebTokenClaim>();
actorClaims.Add(new JsonWebTokenClaim(JsonWebTokenConstants.ReservedClaims.NameIdentifier, nameid));
if (trustedForDelegation && !appOnly)
{
actorClaims.Add(new JsonWebTokenClaim(TrustedForImpersonationClaimType, "true"));
}
// Create token
JsonWebSecurityToken actorToken = new JsonWebSecurityToken(
issuer: issuer,
audience: audience,
validFrom: DateTime.UtcNow,
validTo: DateTime.UtcNow.Add(HighTrustAccessTokenLifetime),
signingCredentials: SigningCredentials,
claims: actorClaims);
string actorTokenString = new JsonWebSecurityTokenHandler().WriteTokenAsString(actorToken);
if (appOnly)
{
// App-only token is the same as actor token for delegated case
return actorTokenString;
}
#endregion Actor token
#region Outer token
List<JsonWebTokenClaim> outerClaims = null == claims ? new List<JsonWebTokenClaim>() : new List<JsonWebTokenClaim>(claims);
outerClaims.Add(new JsonWebTokenClaim(ActorTokenClaimType, actorTokenString));
JsonWebSecurityToken jsonToken = new JsonWebSecurityToken(
nameid, // outer token issuer should match actor token nameid
audience,
DateTime.UtcNow,
DateTime.UtcNow.Add(HighTrustAccessTokenLifetime),
outerClaims);
string accessToken = new JsonWebSecurityTokenHandler().WriteTokenAsString(jsonToken);
#endregion Outer token
return accessToken;
}
#endregion
#region AcsMetadataParser
// This class is used to get MetaData document from the global STS endpoint. It contains
// methods to parse the MetaData document and get endpoints and STS certificate.
public static class AcsMetadataParser
{
public static X509Certificate2 GetAcsSigningCert(string realm)
{
JsonMetadataDocument document = GetMetadataDocument(realm);
if (null != document.keys && document.keys.Count > 0)
{
JsonKey signingKey = document.keys[0];
if (null != signingKey && null != signingKey.keyValue)
{
return new X509Certificate2(Encoding.UTF8.GetBytes(signingKey.keyValue.value));
}
}
throw new Exception("Metadata document does not contain ACS signing certificate.");
}
public static string GetDelegationServiceUrl(string realm)
{
JsonMetadataDocument document = GetMetadataDocument(realm);
JsonEndpoint delegationEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == DelegationIssuance);
if (null != delegationEndpoint)
{
return delegationEndpoint.location;
}
throw new Exception("Metadata document does not contain Delegation Service endpoint Url");
}
private static JsonMetadataDocument GetMetadataDocument(string realm)
{
string acsMetadataEndpointUrlWithRealm = String.Format(CultureInfo.InvariantCulture, "{0}?realm={1}",
GetAcsMetadataEndpointUrl(),
realm);
byte[] acsMetadata;
using (WebClient webClient = new WebClient())
{
acsMetadata = webClient.DownloadData(acsMetadataEndpointUrlWithRealm);
}
string jsonResponseString = Encoding.UTF8.GetString(acsMetadata);
JavaScriptSerializer serializer = new JavaScriptSerializer();
JsonMetadataDocument document = serializer.Deserialize<JsonMetadataDocument>(jsonResponseString);
if (null == document)
{
throw new Exception("No metadata document found at the global endpoint " + acsMetadataEndpointUrlWithRealm);
}
return document;
}
public static string GetStsUrl(string realm)
{
JsonMetadataDocument document = GetMetadataDocument(realm);
JsonEndpoint s2sEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == S2SProtocol);
if (null != s2sEndpoint)
{
return s2sEndpoint.location;
}
throw new Exception("Metadata document does not contain STS endpoint url");
}
private class JsonMetadataDocument
{
public string serviceName { get; set; }
public List<JsonEndpoint> endpoints { get; set; }
public List<JsonKey> keys { get; set; }
}
private class JsonEndpoint
{
public string location { get; set; }
public string protocol { get; set; }
public string usage { get; set; }
}
private class JsonKeyValue
{
public string type { get; set; }
public string value { get; set; }
}
private class JsonKey
{
public string usage { get; set; }
public JsonKeyValue keyValue { get; set; }
}
}
#endregion
}
/// <summary>
/// A JsonWebSecurityToken generated by SharePoint to authenticate to a 3rd party application and allow callbacks using a refresh token
/// </summary>
public class SharePointContextToken : JsonWebSecurityToken
{
public static SharePointContextToken Create(JsonWebSecurityToken contextToken)
{
return new SharePointContextToken(contextToken.Issuer, contextToken.Audience, contextToken.ValidFrom, contextToken.ValidTo, contextToken.Claims);
}
public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims)
: base(issuer, audience, validFrom, validTo, claims)
{
}
public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SecurityToken issuerToken, JsonWebSecurityToken actorToken)
: base(issuer, audience, validFrom, validTo, claims, issuerToken, actorToken)
{
}
public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SigningCredentials signingCredentials)
: base(issuer, audience, validFrom, validTo, claims, signingCredentials)
{
}
public string NameId
{
get
{
return GetClaimValue(this, "nameid");
}
}
/// <summary>
/// The principal name portion of the context token's "appctxsender" claim
/// </summary>
public string TargetPrincipalName
{
get
{
string appctxsender = GetClaimValue(this, "appctxsender");
if (appctxsender == null)
{
return null;
}
return appctxsender.Split('@')[0];
}
}
/// <summary>
/// The context token's "refreshtoken" claim
/// </summary>
public string RefreshToken
{
get
{
return GetClaimValue(this, "refreshtoken");
}
}
/// <summary>
/// The context token's "CacheKey" claim
/// </summary>
public string CacheKey
{
get
{
string appctx = GetClaimValue(this, "appctx");
if (appctx == null)
{
return null;
}
ClientContext ctx = new ClientContext("http://tempuri.org");
Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx);
string cacheKey = (string)dict["CacheKey"];
return cacheKey;
}
}
/// <summary>
/// The context token's "SecurityTokenServiceUri" claim
/// </summary>
public string SecurityTokenServiceUri
{
get
{
string appctx = GetClaimValue(this, "appctx");
if (appctx == null)
{
return null;
}
ClientContext ctx = new ClientContext("http://tempuri.org");
Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx);
string securityTokenServiceUri = (string)dict["SecurityTokenServiceUri"];
return securityTokenServiceUri;
}
}
/// <summary>
/// The realm portion of the context token's "audience" claim
/// </summary>
public string Realm
{
get
{
string aud = Audience;
if (aud == null)
{
return null;
}
string tokenRealm = aud.Substring(aud.IndexOf('@') + 1);
return tokenRealm;
}
}
private static string GetClaimValue(JsonWebSecurityToken token, string claimType)
{
if (token == null)
{
throw new ArgumentNullException("token");
}
foreach (JsonWebTokenClaim claim in token.Claims)
{
if (StringComparer.Ordinal.Equals(claim.ClaimType, claimType))
{
return claim.Value;
}
}
return null;
}
}
/// <summary>
/// Represents a security token which contains multiple security keys that are generated using symmetric algorithms.
/// </summary>
public class MultipleSymmetricKeySecurityToken : SecurityToken
{
/// <summary>
/// Initializes a new instance of the MultipleSymmetricKeySecurityToken class.
/// </summary>
/// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param>
public MultipleSymmetricKeySecurityToken(IEnumerable<byte[]> keys)
: this(UniqueId.CreateUniqueId(), keys)
{
}
/// <summary>
/// Initializes a new instance of the MultipleSymmetricKeySecurityToken class.
/// </summary>
/// <param name="tokenId">The unique identifier of the security token.</param>
/// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param>
public MultipleSymmetricKeySecurityToken(string tokenId, IEnumerable<byte[]> keys)
{
if (keys == null)
{
throw new ArgumentNullException("keys");
}
if (String.IsNullOrEmpty(tokenId))
{
throw new ArgumentException("Value cannot be a null or empty string.", "tokenId");
}
foreach (byte[] key in keys)
{
if (key.Length <= 0)
{
throw new ArgumentException("The key length must be greater then zero.", "keys");
}
}
id = tokenId;
effectiveTime = DateTime.UtcNow;
securityKeys = CreateSymmetricSecurityKeys(keys);
}
/// <summary>
/// Gets the unique identifier of the security token.
/// </summary>
public override string Id
{
get
{
return id;
}
}
/// <summary>
/// Gets the cryptographic keys associated with the security token.
/// </summary>
public override ReadOnlyCollection<SecurityKey> SecurityKeys
{
get
{
return securityKeys.AsReadOnly();
}
}
/// <summary>
/// Gets the first instant in time at which this security token is valid.
/// </summary>
public override DateTime ValidFrom
{
get
{
return effectiveTime;
}
}
/// <summary>
/// Gets the last instant in time at which this security token is valid.
/// </summary>
public override DateTime ValidTo
{
get
{
// Never expire
return DateTime.MaxValue;
}
}
/// <summary>
/// Returns a value that indicates whether the key identifier for this instance can be resolved to the specified key identifier.
/// </summary>
/// <param name="keyIdentifierClause">A SecurityKeyIdentifierClause to compare to this instance</param>
/// <returns>true if keyIdentifierClause is a SecurityKeyIdentifierClause and it has the same unique identifier as the Id property; otherwise, false.</returns>
public override bool MatchesKeyIdentifierClause(SecurityKeyIdentifierClause keyIdentifierClause)
{
if (keyIdentifierClause == null)
{
throw new ArgumentNullException("keyIdentifierClause");
}
// Since this is a symmetric token and we do not have IDs to distinguish tokens, we just check for the
// presence of a SymmetricIssuerKeyIdentifier. The actual mapping to the issuer takes place later
// when the key is matched to the issuer.
if (keyIdentifierClause is SymmetricIssuerKeyIdentifierClause)
{
return true;
}
return base.MatchesKeyIdentifierClause(keyIdentifierClause);
}
#region private members
private List<SecurityKey> CreateSymmetricSecurityKeys(IEnumerable<byte[]> keys)
{
List<SecurityKey> symmetricKeys = new List<SecurityKey>();
foreach (byte[] key in keys)
{
symmetricKeys.Add(new InMemorySymmetricSecurityKey(key));
}
return symmetricKeys;
}
private string id;
private DateTime effectiveTime;
private List<SecurityKey> securityKeys;
#endregion
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Text;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Symbols;
using Microsoft.CodeAnalysis.Scripting.Hosting;
namespace Microsoft.CodeAnalysis.CSharp.Scripting.Hosting
{
public sealed class CSharpObjectFormatter : ObjectFormatter
{
public static CSharpObjectFormatter Instance { get; } = new CSharpObjectFormatter();
private CSharpObjectFormatter()
{
}
internal override object VoidDisplayString => "<void>";
internal override string NullLiteral => ObjectDisplay.NullLiteral;
internal override string GenericParameterOpening => "<";
internal override string GenericParameterClosing => ">";
internal override string FormatLiteral(bool value)
{
return ObjectDisplay.FormatLiteral(value);
}
internal override string FormatLiteral(string value, bool quote, bool useHexadecimalNumbers = false)
{
var options = ObjectDisplayOptions.None;
if (quote)
{
options |= ObjectDisplayOptions.UseQuotes;
}
if (useHexadecimalNumbers)
{
options |= ObjectDisplayOptions.UseHexadecimalNumbers;
}
return ObjectDisplay.FormatLiteral(value, options);
}
internal override string FormatLiteral(char c, bool quote, bool includeCodePoints = false, bool useHexadecimalNumbers = false)
{
var options = ObjectDisplayOptions.None;
if (quote)
{
options |= ObjectDisplayOptions.UseQuotes;
}
if (includeCodePoints)
{
options |= ObjectDisplayOptions.IncludeCodePoints;
}
if (useHexadecimalNumbers)
{
options |= ObjectDisplayOptions.UseHexadecimalNumbers;
}
return ObjectDisplay.FormatLiteral(c, options);
}
internal override string FormatLiteral(sbyte value, bool useHexadecimalNumbers = false)
{
return ObjectDisplay.FormatLiteral(value, GetObjectDisplayOptions(useHexadecimalNumbers));
}
internal override string FormatLiteral(byte value, bool useHexadecimalNumbers = false)
{
return ObjectDisplay.FormatLiteral(value, GetObjectDisplayOptions(useHexadecimalNumbers));
}
internal override string FormatLiteral(short value, bool useHexadecimalNumbers = false)
{
return ObjectDisplay.FormatLiteral(value, GetObjectDisplayOptions(useHexadecimalNumbers));
}
internal override string FormatLiteral(ushort value, bool useHexadecimalNumbers = false)
{
return ObjectDisplay.FormatLiteral(value, GetObjectDisplayOptions(useHexadecimalNumbers));
}
internal override string FormatLiteral(int value, bool useHexadecimalNumbers = false)
{
return ObjectDisplay.FormatLiteral(value, GetObjectDisplayOptions(useHexadecimalNumbers));
}
internal override string FormatLiteral(uint value, bool useHexadecimalNumbers = false)
{
return ObjectDisplay.FormatLiteral(value, GetObjectDisplayOptions(useHexadecimalNumbers));
}
internal override string FormatLiteral(long value, bool useHexadecimalNumbers = false)
{
return ObjectDisplay.FormatLiteral(value, GetObjectDisplayOptions(useHexadecimalNumbers));
}
internal override string FormatLiteral(ulong value, bool useHexadecimalNumbers = false)
{
return ObjectDisplay.FormatLiteral(value, GetObjectDisplayOptions(useHexadecimalNumbers));
}
internal override string FormatLiteral(double value)
{
return ObjectDisplay.FormatLiteral(value, ObjectDisplayOptions.None);
}
internal override string FormatLiteral(float value)
{
return ObjectDisplay.FormatLiteral(value, ObjectDisplayOptions.None);
}
internal override string FormatLiteral(decimal value)
{
return ObjectDisplay.FormatLiteral(value, ObjectDisplayOptions.None);
}
internal override string FormatLiteral(DateTime value)
{
// DateTime is not primitive in C#
return null;
}
internal override string GetPrimitiveTypeName(SpecialType type)
{
switch (type)
{
case SpecialType.System_Boolean: return "bool";
case SpecialType.System_Byte: return "byte";
case SpecialType.System_Char: return "char";
case SpecialType.System_Decimal: return "decimal";
case SpecialType.System_Double: return "double";
case SpecialType.System_Int16: return "short";
case SpecialType.System_Int32: return "int";
case SpecialType.System_Int64: return "long";
case SpecialType.System_SByte: return "sbyte";
case SpecialType.System_Single: return "float";
case SpecialType.System_String: return "string";
case SpecialType.System_UInt16: return "ushort";
case SpecialType.System_UInt32: return "uint";
case SpecialType.System_UInt64: return "ulong";
case SpecialType.System_Object: return "object";
default:
return null;
}
}
internal override string FormatGeneratedTypeName(Type type)
{
string stateMachineName;
if (GeneratedNames.TryParseSourceMethodNameFromGeneratedName(type.Name, GeneratedNameKind.StateMachineType, out stateMachineName))
{
return stateMachineName;
}
return null;
}
internal override string FormatArrayTypeName(Type arrayType, Array arrayOpt, ObjectFormattingOptions options)
{
StringBuilder sb = new StringBuilder();
// print the inner-most element type first:
Type elementType = arrayType.GetElementType();
while (elementType.IsArray)
{
elementType = elementType.GetElementType();
}
sb.Append(FormatTypeName(elementType, options));
// print all components of a jagged array:
Type type = arrayType;
do
{
if (arrayOpt != null)
{
sb.Append('[');
int rank = type.GetArrayRank();
bool anyNonzeroLowerBound = false;
for (int i = 0; i < rank; i++)
{
if (arrayOpt.GetLowerBound(i) > 0)
{
anyNonzeroLowerBound = true;
break;
}
}
for (int i = 0; i < rank; i++)
{
int lowerBound = arrayOpt.GetLowerBound(i);
int length = arrayOpt.GetLength(i);
if (i > 0)
{
sb.Append(", ");
}
if (anyNonzeroLowerBound)
{
AppendArrayBound(sb, lowerBound, options.UseHexadecimalNumbers);
sb.Append("..");
AppendArrayBound(sb, length + lowerBound, options.UseHexadecimalNumbers);
}
else
{
AppendArrayBound(sb, length, options.UseHexadecimalNumbers);
}
}
sb.Append(']');
arrayOpt = null;
}
else
{
AppendArrayRank(sb, type);
}
type = type.GetElementType();
}
while (type.IsArray);
return sb.ToString();
}
private void AppendArrayBound(StringBuilder sb, long bound, bool useHexadecimalNumbers)
{
if (bound <= int.MaxValue)
{
sb.Append(FormatLiteral((int)bound, useHexadecimalNumbers));
}
else
{
sb.Append(FormatLiteral(bound, useHexadecimalNumbers));
}
}
private static void AppendArrayRank(StringBuilder sb, Type arrayType)
{
sb.Append('[');
int rank = arrayType.GetArrayRank();
if (rank > 1)
{
sb.Append(',', rank - 1);
}
sb.Append(']');
}
internal override string FormatMemberName(System.Reflection.MemberInfo member)
{
return member.Name;
}
internal override bool IsHiddenMember(System.Reflection.MemberInfo member)
{
// Generated fields, e.g. "<property_name>k__BackingField"
return GeneratedNames.IsGeneratedMemberName(member.Name);
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Microsoft.Win32.SafeHandles;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
using Windows.Storage;
namespace System.IO
{
internal sealed class WinRTFileStream : FileStreamBase
{
private readonly FileAccess _access;
private bool _disposed;
private StorageFile _file;
private readonly Stream _innerStream;
private readonly FileOptions _options;
private static readonly SafeFileHandle s_invalidHandle = new SafeFileHandle(IntPtr.Zero, false);
internal WinRTFileStream(Stream innerStream, StorageFile file, FileAccess access, FileOptions options, FileStream parent)
: base(parent)
{
Debug.Assert(innerStream != null);
Debug.Assert(file != null);
this._access = access;
this._disposed = false;
this._file = file;
this._innerStream = innerStream;
this._options = options;
}
~WinRTFileStream()
{
Dispose(false);
}
#region FileStream members
public override bool IsAsync { get { return true; } }
public override string Name { get { return _file.Name; } }
public override Microsoft.Win32.SafeHandles.SafeFileHandle SafeFileHandle { get { return s_invalidHandle; } }
public override void Flush(bool flushToDisk)
{
// WinRT streams are not buffered, however the WinRT stream will be wrapped in a BufferedStream
// Flush & FlushAsync will flush the internal managed buffer of the BufferedStream wrapper
// The WinRT stream only exposes FlushAsync which flushes to disk.
// The managed Stream adapter does nothing for Flush() and forwards to WinRT for FlushAsync (flushing to disk).
if (flushToDisk)
{
// FlushAsync() will do the write to disk when it hits the WinRT->NetFx adapter
Task flushTask = _innerStream.FlushAsync();
flushTask.Wait();
}
else
{
// Flush doesn't write to disk
_innerStream.Flush();
}
}
#endregion
#region Stream members
#region Properties
public override bool CanRead
{
// WinRT doesn't support write-only streams, override what the stream tells us
// with what was passed in when creating it.
get { return _innerStream.CanRead && (_access & FileAccess.Read) != 0; }
}
public override bool CanSeek
{
get { return _innerStream.CanSeek; }
}
public override bool CanWrite
{
get { return _innerStream.CanWrite; }
}
public override long Length
{
get { return _innerStream.Length; }
}
public override long Position
{
get { return _innerStream.Position; }
set { _innerStream.Position = value; }
}
public override int ReadTimeout
{
get { return _innerStream.ReadTimeout; }
set { _innerStream.ReadTimeout = value; }
}
public override bool CanTimeout
{
get { return _innerStream.CanTimeout; }
}
public override int WriteTimeout
{
get { return _innerStream.WriteTimeout; }
set { _innerStream.WriteTimeout = value; }
}
#endregion Properties
#region Methods
public override Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken)
{
return _innerStream.CopyToAsync(destination, bufferSize, cancellationToken);
}
protected override void Dispose(bool disposing)
{
try
{
if (disposing)
_innerStream.Dispose();
if ((_options & FileOptions.DeleteOnClose) != 0 && _file != null)
{
// WinRT doesn't directly support DeleteOnClose but we can mimick it
// There are a few reasons that this will fail
// 1) the file may not allow delete permissions for the current user
// 2) the storage file RCW may have already been disconnected
try
{
_file.DeleteAsync().AsTask().Wait();
}
catch { }
}
_disposed = true;
_file = null;
}
finally
{
base.Dispose(disposing);
}
}
public override void Flush()
{
_parent.Flush(false);
}
public override Task FlushAsync(CancellationToken cancellationToken)
{
return _innerStream.FlushAsync(cancellationToken);
}
public override int Read(byte[] buffer, int offset, int count)
{
if (!_disposed && !CanRead)
throw __Error.GetReadNotSupported();
return _innerStream.Read(buffer, offset, count);
}
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (!_disposed && !CanRead)
throw __Error.GetReadNotSupported();
return _innerStream.ReadAsync(buffer, offset, count, cancellationToken);
}
public override int ReadByte()
{
if (!_disposed && !CanRead)
throw __Error.GetReadNotSupported();
return _innerStream.ReadByte();
}
public override long Seek(long offset, SeekOrigin origin)
{
if (origin == SeekOrigin.Begin && offset < 0)
throw Win32Marshal.GetExceptionForWin32Error(Interop.mincore.Errors.ERROR_NEGATIVE_SEEK);
return _innerStream.Seek(offset, origin);
}
public override void SetLength(long value)
{
_innerStream.SetLength(value);
// WinRT ignores all errors when setting length, check after setting
if (_innerStream.Length < value)
{
throw new ArgumentOutOfRangeException("value", SR.ArgumentOutOfRange_FileLengthTooBig);
}
else if (_innerStream.Length != value)
{
throw new ArgumentException("value");
}
// WinRT doesn't update the position when truncating a file
if (value < _innerStream.Position)
_innerStream.Position = value;
}
public override string ToString()
{
return _innerStream.ToString();
}
public override void Write(byte[] buffer, int offset, int count)
{
_innerStream.Write(buffer, offset, count);
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
return _innerStream.WriteAsync(buffer, offset, count, cancellationToken);
}
public override void WriteByte(byte value)
{
_innerStream.WriteByte(value);
}
#endregion Methods
#endregion Stream members
}
}
| |
/*
* Infoplus API
*
* Infoplus API.
*
* OpenAPI spec version: v1.0
* Contact: [email protected]
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
namespace Infoplus.Model
{
/// <summary>
/// Replenishment
/// </summary>
[DataContract]
public partial class Replenishment : IEquatable<Replenishment>
{
/// <summary>
/// Initializes a new instance of the <see cref="Replenishment" /> class.
/// </summary>
[JsonConstructorAttribute]
protected Replenishment() { }
/// <summary>
/// Initializes a new instance of the <see cref="Replenishment" /> class.
/// </summary>
/// <param name="LocationId">LocationId (required).</param>
/// <param name="Sku">Sku.</param>
public Replenishment(int? LocationId = null, string Sku = null)
{
// to ensure "LocationId" is required (not null)
if (LocationId == null)
{
throw new InvalidDataException("LocationId is a required property for Replenishment and cannot be null");
}
else
{
this.LocationId = LocationId;
}
this.Sku = Sku;
}
/// <summary>
/// Gets or Sets Id
/// </summary>
[DataMember(Name="id", EmitDefaultValue=false)]
public int? Id { get; private set; }
/// <summary>
/// Gets or Sets CreateDate
/// </summary>
[DataMember(Name="createDate", EmitDefaultValue=false)]
public DateTime? CreateDate { get; private set; }
/// <summary>
/// Gets or Sets ModifyDate
/// </summary>
[DataMember(Name="modifyDate", EmitDefaultValue=false)]
public DateTime? ModifyDate { get; private set; }
/// <summary>
/// Gets or Sets ReplenishmentProcess
/// </summary>
[DataMember(Name="replenishmentProcess", EmitDefaultValue=false)]
public int? ReplenishmentProcess { get; private set; }
/// <summary>
/// Gets or Sets PickFaceAssignment
/// </summary>
[DataMember(Name="pickFaceAssignment", EmitDefaultValue=false)]
public int? PickFaceAssignment { get; private set; }
/// <summary>
/// Gets or Sets LocationId
/// </summary>
[DataMember(Name="locationId", EmitDefaultValue=false)]
public int? LocationId { get; set; }
/// <summary>
/// Gets or Sets Quantity
/// </summary>
[DataMember(Name="quantity", EmitDefaultValue=false)]
public int? Quantity { get; private set; }
/// <summary>
/// Gets or Sets Sku
/// </summary>
[DataMember(Name="sku", EmitDefaultValue=false)]
public string Sku { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class Replenishment {\n");
sb.Append(" Id: ").Append(Id).Append("\n");
sb.Append(" CreateDate: ").Append(CreateDate).Append("\n");
sb.Append(" ModifyDate: ").Append(ModifyDate).Append("\n");
sb.Append(" ReplenishmentProcess: ").Append(ReplenishmentProcess).Append("\n");
sb.Append(" PickFaceAssignment: ").Append(PickFaceAssignment).Append("\n");
sb.Append(" LocationId: ").Append(LocationId).Append("\n");
sb.Append(" Quantity: ").Append(Quantity).Append("\n");
sb.Append(" Sku: ").Append(Sku).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as Replenishment);
}
/// <summary>
/// Returns true if Replenishment instances are equal
/// </summary>
/// <param name="other">Instance of Replenishment to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(Replenishment other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.Id == other.Id ||
this.Id != null &&
this.Id.Equals(other.Id)
) &&
(
this.CreateDate == other.CreateDate ||
this.CreateDate != null &&
this.CreateDate.Equals(other.CreateDate)
) &&
(
this.ModifyDate == other.ModifyDate ||
this.ModifyDate != null &&
this.ModifyDate.Equals(other.ModifyDate)
) &&
(
this.ReplenishmentProcess == other.ReplenishmentProcess ||
this.ReplenishmentProcess != null &&
this.ReplenishmentProcess.Equals(other.ReplenishmentProcess)
) &&
(
this.PickFaceAssignment == other.PickFaceAssignment ||
this.PickFaceAssignment != null &&
this.PickFaceAssignment.Equals(other.PickFaceAssignment)
) &&
(
this.LocationId == other.LocationId ||
this.LocationId != null &&
this.LocationId.Equals(other.LocationId)
) &&
(
this.Quantity == other.Quantity ||
this.Quantity != null &&
this.Quantity.Equals(other.Quantity)
) &&
(
this.Sku == other.Sku ||
this.Sku != null &&
this.Sku.Equals(other.Sku)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.Id != null)
hash = hash * 59 + this.Id.GetHashCode();
if (this.CreateDate != null)
hash = hash * 59 + this.CreateDate.GetHashCode();
if (this.ModifyDate != null)
hash = hash * 59 + this.ModifyDate.GetHashCode();
if (this.ReplenishmentProcess != null)
hash = hash * 59 + this.ReplenishmentProcess.GetHashCode();
if (this.PickFaceAssignment != null)
hash = hash * 59 + this.PickFaceAssignment.GetHashCode();
if (this.LocationId != null)
hash = hash * 59 + this.LocationId.GetHashCode();
if (this.Quantity != null)
hash = hash * 59 + this.Quantity.GetHashCode();
if (this.Sku != null)
hash = hash * 59 + this.Sku.GetHashCode();
return hash;
}
}
}
}
| |
using System;
using NDatabase.Api.Query;
using NDatabase.Core.Query.Criteria.Evaluations;
using NDatabase.Meta;
using NDatabase.Tool.Wrappers;
namespace NDatabase.Core.Query.Criteria
{
internal sealed class QueryConstraint : IInternalConstraint
{
/// <summary>
/// The name of the attribute involved by this criterion
/// </summary>
private readonly string _attributeName;
/// <summary>
/// The query containing the criterion
/// </summary>
private readonly IQuery _query;
private readonly object _theObject;
private IEvaluation _evaluation;
public QueryConstraint(IQuery query, string fieldName, object theObject)
{
if (query == null)
throw new ArgumentNullException("query");
if (string.IsNullOrEmpty(fieldName))
throw new ArgumentNullException("fieldName");
_query = query;
_attributeName = fieldName;
_theObject = theObject;
((IInternalQuery) _query).Add(this);
_evaluation = new EqualsEvaluation(_theObject, _attributeName, _query);
}
#region IInternalConstraint Members
public bool Match(object valueToMatch)
{
return _evaluation == null || _evaluation.Evaluate(valueToMatch);
}
public IConstraint Equal()
{
if (IsPreEvaluationTheGreater())
{
_evaluation = new ComparisonEvaluation(_theObject, _attributeName,
ComparisonConstraint.ComparisonTypeGe);
return this;
}
if (IsPreEvaluationTheSmaller())
{
_evaluation = new ComparisonEvaluation(_theObject, _attributeName,
ComparisonConstraint.ComparisonTypeLe);
return this;
}
_evaluation = new EqualsEvaluation(_theObject, _attributeName, _query);
return this;
}
public IConstraint Identity()
{
_evaluation = new IdentityEvaluation(_theObject, _attributeName, _query);
return this;
}
public IConstraint Like()
{
_evaluation = new LikeEvaluation(_theObject, _attributeName);
return this;
}
public IConstraint InvariantLike()
{
_evaluation = new LikeEvaluation(_theObject, _attributeName, false);
return this;
}
public IConstraint Contains()
{
_evaluation = new ContainsEvaluation(_theObject, _attributeName, _query);
return this;
}
public IConstraint Greater()
{
_evaluation = new ComparisonEvaluation(_theObject, _attributeName,
ComparisonConstraint.ComparisonTypeGt);
return this;
}
public IConstraint Smaller()
{
_evaluation = new ComparisonEvaluation(_theObject, _attributeName,
ComparisonConstraint.ComparisonTypeLt);
return this;
}
public IConstraint SizeEq()
{
_evaluation = new CollectionSizeEvaluation(_theObject, _attributeName, _query, CollectionSizeEvaluation.SizeEq);
return this;
}
public IConstraint SizeNe()
{
_evaluation = new CollectionSizeEvaluation(_theObject, _attributeName, _query, CollectionSizeEvaluation.SizeNe);
return this;
}
public IConstraint SizeGt()
{
_evaluation = new CollectionSizeEvaluation(_theObject, _attributeName, _query, CollectionSizeEvaluation.SizeGt);
return this;
}
public IConstraint SizeGe()
{
_evaluation = new CollectionSizeEvaluation(_theObject, _attributeName, _query, CollectionSizeEvaluation.SizeGe);
return this;
}
public IConstraint SizeLt()
{
_evaluation = new CollectionSizeEvaluation(_theObject, _attributeName, _query, CollectionSizeEvaluation.SizeLt);
return this;
}
public object GetObject()
{
return _theObject;
}
public IConstraint EndsWith(bool isCaseSensitive)
{
_evaluation = new EndsWithEvaluation(_theObject, _attributeName, isCaseSensitive);
return this;
}
public IConstraint StartsWith(bool isCaseSensitive)
{
_evaluation = new StartsWithEvaluation(_theObject, _attributeName, isCaseSensitive);
return this;
}
public IConstraint SizeLe()
{
_evaluation = new CollectionSizeEvaluation(_theObject, _attributeName, _query, CollectionSizeEvaluation.SizeLe);
return this;
}
public bool CanUseIndex()
{
return _evaluation is EqualsEvaluation || _evaluation is IdentityEvaluation;
}
public AttributeValuesMap GetValues()
{
var equalsEvaluation = _evaluation as EqualsEvaluation;
var identityEvaluation = _evaluation as IdentityEvaluation;
return equalsEvaluation != null
? equalsEvaluation.GetValues()
: identityEvaluation != null
? identityEvaluation.GetValues()
: new AttributeValuesMap();
}
/// <summary>
/// An abstract criterion only restrict one field => it returns a list of one field!
/// </summary>
/// <returns> The list of involved field of the criteria </returns>
public IOdbList<string> GetAllInvolvedFields()
{
return new OdbList<string>(1) {_attributeName};
}
public IConstraint And(IConstraint with)
{
return new And(_query).Add(this).Add(with);
}
public IConstraint Or(IConstraint with)
{
return new Or(_query).Add(this).Add(with);
}
public IConstraint Not()
{
return new Not(_query, this);
}
#endregion
public override string ToString()
{
return _evaluation == null ? base.ToString() : _evaluation.ToString();
}
private bool IsPreEvaluationTheGreater()
{
var evaluation = _evaluation as ComparisonEvaluation;
return evaluation != null && evaluation.ComparisonType == ComparisonConstraint.ComparisonTypeGt;
}
private bool IsPreEvaluationTheSmaller()
{
var evaluation = _evaluation as ComparisonEvaluation;
return evaluation != null && evaluation.ComparisonType == ComparisonConstraint.ComparisonTypeLt;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
namespace Microsoft.DocAsCode.Glob
{
using System;
using System.Collections.Generic;
using System.Text;
using System.Text.RegularExpressions;
using System.Linq;
[Serializable]
public class GlobMatcher
{
#region Private fields
private static readonly string[] EmptyString = new string[0];
private const char NegateChar = '!';
private const string GlobStar = "**";
private const string ReplacerGroupName = "replacer";
private static readonly HashSet<char> NeedEscapeCharactersInRegex = new HashSet<char>(@"'().*{}+?[]^$\!".ToCharArray());
private static readonly Regex UnescapeGlobRegex = new Regex(@"\\(?<replacer>.)", RegexOptions.Compiled);
/// <summary>
/// start with * and has more than one * and followed by anything except * or /
/// </summary>
private static readonly Regex ExpandGlobStarRegex = new Regex(@"^\*{2,}(?=[^/*])", RegexOptions.Compiled);
// Never match .abc file unless AllowDotMatch option is set
private const string PatternStartWithDotAllowed = @"(?!(?:^|\/)\.{1,2}(?:$|\/))";
private const string PatternStartWithoutDotAllowed = @"(?!\.)";
private static readonly HashSet<char> RegexCharactersWithDotPossible = new HashSet<char>(new char[] { '.', '[', '(' });
/// <summary>
/// Any character other than /
/// </summary>
private const string QuestionMarkToRegex = "[^/]";
/// <summary>
/// Any number of character other than /, non-greedy mode
/// </summary>
private const string SingleStarToRegex = "[^/]*?";
private static readonly Regex GlobStarRegex = new Regex(@"^\*{2,}/?$", RegexOptions.Compiled);
private GlobRegexItem[][] _items;
private bool _negate = false;
private bool _ignoreCase = false;
#endregion
public const GlobMatcherOptions DefaultOptions = GlobMatcherOptions.AllowNegate | GlobMatcherOptions.IgnoreCase | GlobMatcherOptions.AllowGlobStar | GlobMatcherOptions.AllowExpand | GlobMatcherOptions.AllowEscape;
public GlobMatcherOptions Options { get; }
public string Raw { get; }
public GlobMatcher(string pattern, GlobMatcherOptions options = DefaultOptions)
{
if (pattern == null) throw new ArgumentNullException(nameof(pattern));
Options = options;
Raw = pattern;
_ignoreCase = Options.HasFlag(GlobMatcherOptions.IgnoreCase);
_negate = ParseNegate(ref pattern, Options);
_items = Compile(pattern).ToArray();
}
/// <summary>
/// Currently not used
/// TODO: add test case
/// </summary>
/// <param name="glob"></param>
/// <returns></returns>
public Regex GetRegex()
{
var regexParts = _items.Select(s => ConvertSingleGlob(s));
var content = string.Join("|", regexParts);
// Matches the entire pattern
content = $"^(?:{content})$";
if (_negate)
{
// Matches whatever not current pattern
content = $"^(?!{content}).*$";
}
if (_ignoreCase)
{
return new Regex(content, RegexOptions.IgnoreCase);
}
else
{
return new Regex(content);
}
}
public bool Match(string file, bool partial = false)
{
if (file == null) throw new ArgumentNullException(nameof(file));
var fileParts = Split(file, '/', '\\').ToArray();
bool isMatch = false;
foreach(var glob in _items)
{
if (MatchOne(fileParts, glob, partial))
{
isMatch = true;
break;
}
}
return _negate ^ isMatch;
}
#region Private methods
private IEnumerable<GlobRegexItem[]> Compile(string pattern)
{
string[] globs;
if (Options.HasFlag(GlobMatcherOptions.AllowExpand))
{
globs = ExpandGroup(pattern, Options);
if (globs.Length == 0) return Enumerable.Empty<GlobRegexItem[]>();
}
else
{
globs = new string[] { pattern };
}
// **.cs is a shortcut for **/*.cs
var items = globs
.Select(glob => ExpandGlobStarShortcut(Split(glob, '/')).Select(s => ConvertSingleGlobPart(s)).ToArray());
return items;
}
private IEnumerable<string> Split(string path, params char[] splitter)
{
var parts = path.Split(splitter, StringSplitOptions.RemoveEmptyEntries);
if (parts.Length == 0) yield break;
for (int i = 0; i < parts.Length - 1; i++)
{
yield return parts[i] + "/";
}
yield return path.EndsWith("/") ? parts[parts.Length - 1] + "/" : parts[parts.Length - 1];
}
private string ConvertSingleGlob(IEnumerable<GlobRegexItem> regexItems)
{
var items = regexItems.Select(s => GlobRegexItemToRegex(s));
return string.Join(@"\/", items);
}
private bool IsFolderPath(string path)
{
return path.EndsWith("/");
}
/// <summary>
/// Convert each part to Regex
/// </summary>
/// <param name="globPart">Part of glob that does not contain '/'</param>
/// <returns></returns>
private GlobRegexItem ConvertSingleGlobPart(string globPart)
{
// Return GlobStar for **
if (Options.HasFlag(GlobMatcherOptions.AllowGlobStar) && GlobStarRegex.IsMatch(globPart))
{
return IsFolderPath(globPart) ? GlobRegexItem.GlobStar : GlobRegexItem.GlobStarForFileOnly;
}
StringBuilder builder = new StringBuilder();
bool escaping = false;
bool disableEscape = !Options.HasFlag(GlobMatcherOptions.AllowEscape);
bool hasMagic = false;
CharClass currentCharClass = null;
string patternStart = string.Empty;
// .abc will not be matched unless . is explictly specified
if (globPart.Length > 0 && globPart[0] != '.')
{
patternStart = Options.HasFlag(GlobMatcherOptions.AllowDotMatch) ? PatternStartWithDotAllowed : PatternStartWithoutDotAllowed;
}
for (int i = 0; i < globPart.Length; i++)
{
var c = globPart[i];
switch (c)
{
case '\\':
if (!disableEscape)
{
i++;
if (i == globPart.Length)
{
// \ at the end of path part, invalid, not possible for file path, invalid
return GlobRegexItem.Empty;
}
else
{
c = globPart[i];
if (NeedEscapeCharactersInRegex.Contains(c))
{
builder.Append('\\');
}
builder.Append(c);
}
}
else
{
builder.Append("\\\\");
}
break;
case '?':
builder.Append(QuestionMarkToRegex);
hasMagic = true;
break;
case '*':
builder.Append(SingleStarToRegex);
hasMagic = true;
break;
case '[':
escaping = false;
currentCharClass = new CharClass();
int cur = i + 1;
while (cur < globPart.Length)
{
c = globPart[cur];
if (c == '\\') escaping = true;
else if (c == ']' && !escaping)
{
// current char class ends when meeting the first non-escaping ]
builder.Append(currentCharClass.ToString());
currentCharClass = null;
break;
}
// simply keeps what it is inside char class
currentCharClass.Add(c);
if (c != '\\') escaping = false;
cur++;
}
if (currentCharClass != null)
{
// no closing ] is found, fallback to no char class
builder.Append("\\[");
}
else
{
i = cur;
hasMagic = true;
}
break;
default:
if (NeedEscapeCharactersInRegex.Contains(c))
{
builder.Append('\\');
}
builder.Append(c);
break;
}
}
if (hasMagic)
{
var regexContent = builder.ToString();
if (!string.IsNullOrEmpty(regexContent))
{
// when regex is not empty, make sure it does not match against empty path, e.g. a/* should not match a/
// regex: if followed by anything
regexContent = "(?=.)" + regexContent;
}
else
{
return GlobRegexItem.Empty;
}
if (RegexCharactersWithDotPossible.Contains(regexContent[0]))
{
regexContent = patternStart + regexContent;
}
return new GlobRegexItem(regexContent, null, GlobRegexItemType.Regex, _ignoreCase);
}
else
{
// If does not contain any regex character, use the original string for regex
// use escaped string as the string to be matched
string plainText = UnescapeGlob(globPart);
return new GlobRegexItem(globPart, plainText, GlobRegexItemType.PlainText, _ignoreCase);
}
}
private string GlobRegexItemToRegex(GlobRegexItem item)
{
switch (item.ItemType)
{
case GlobRegexItemType.GlobStar:
case GlobRegexItemType.GlobStarForFileOnly:
// If globstar is disabled
if (!Options.HasFlag(GlobMatcherOptions.AllowGlobStar))
{
return SingleStarToRegex;
}
if (Options.HasFlag(GlobMatcherOptions.AllowDotMatch))
{
// ** when dots are allowed, allows anything except .. and .
// not (^ or / followed by one or two dots followed by $ or /)
return @"(?:(?!(?:\/|^)(?:\.{1,2})($|\/)).)*?";
}
else
{
// not (^ or / followed by a dot)
return @"(?:(?!(?:\/|^)\.).)*?";
}
case GlobRegexItemType.PlainText:
case GlobRegexItemType.Regex:
return item.RegexContent;
default:
throw new NotSupportedException($"{item.ItemType} is not current supported.");
}
}
/// <summary>
/// ** matches everything including "/" only when ** is after / or is the start of the pattern
/// ** between characters has the same meaning as *
/// **.cs equals to **/*.cs
/// a**.cs equals to a*.cs
/// </summary>
/// <param name="globParts"></param>
/// <returns></returns>
private IEnumerable<string> ExpandGlobStarShortcut(IEnumerable<string> globParts)
{
foreach(var part in globParts)
{
if (ExpandGlobStarRegex.IsMatch(part))
{
yield return GlobStar + "/";
yield return ExpandGlobStarRegex.Replace(part, "*");
}
else
{
yield return part;
}
}
}
private bool MatchOne(string[] fileParts, GlobRegexItem[] globParts, bool matchPartialGlob)
{
bool[,] status = new bool[2, globParts.Length + 1];
int prev = 0;
int cur = 1;
status[0, 0] = true;
for (int j = 0; j < globParts.Length; j++)
{
if (matchPartialGlob)
{
status[0, j + 1] = true;
}
else
{
var globPart = globParts[globParts.Length - j - 1];
if (globPart.ItemType == GlobRegexItemType.GlobStar) status[0, j + 1] = status[0, j];
else status[0, j + 1] = false;
}
}
for(int i = 0; i < fileParts.Length; i++)
{
status[cur, 0] = false;
for (int j = 0; j < globParts.Length; j++)
{
var filePart = fileParts[fileParts.Length - i - 1];
var globPart = globParts[globParts.Length - j - 1];
switch (globPart.ItemType)
{
case GlobRegexItemType.GlobStar:
if (DisallowedMatchExists(filePart)) status[cur, j + 1] = false;
else
{
var isFolderPath = IsFolderPath(filePart);
status[cur, j + 1] = (status[prev, j + 1] && isFolderPath) || (status[prev, j] && isFolderPath || status[cur, j]);
}
break;
case GlobRegexItemType.GlobStarForFileOnly:
if (DisallowedMatchExists(filePart)) status[cur, j + 1] = false;
else
{
var isFolderPath = IsFolderPath(filePart);
status[cur, j + 1] = status[prev, j + 1] || (status[prev, j] && !isFolderPath);
}
break;
case GlobRegexItemType.PlainText:
StringComparison comparison = StringComparison.Ordinal;
if (Options.HasFlag(GlobMatcherOptions.IgnoreCase))
{
comparison = StringComparison.OrdinalIgnoreCase;
}
status[cur, j + 1] = string.Equals(filePart, globPart.PlainText, comparison) && status[prev, j];
break;
case GlobRegexItemType.Regex:
status[cur, j + 1] = globPart.Regex.IsMatch(filePart) && status[prev, j];
break;
}
}
prev ^= 1;
cur ^= 1;
}
return status[prev, globParts.Length];
}
private bool DisallowedMatchExists(string filePart)
{
if (filePart == "."
|| filePart == ".."
|| (!Options.HasFlag(GlobMatcherOptions.AllowDotMatch) && filePart.StartsWith(".")))
{
return true;
}
return false;
}
private static string UnescapeGlob(string s)
{
return UnescapeGlobRegex.Replace(s, new MatchEvaluator(ReplaceReplacerGroup));
}
private static string ReplaceReplacerGroup(Match m)
{
if (m.Success)
{
return m.Groups[ReplacerGroupName].Value;
}
return m.Value;
}
#endregion
internal static bool ParseNegate(ref string pattern, GlobMatcherOptions options = DefaultOptions)
{
if (!options.HasFlag(GlobMatcherOptions.AllowNegate))
{
return false;
}
bool negate = false;
int i = 0;
while (i < pattern.Length && pattern[i] == NegateChar)
{
negate = !negate;
i++;
}
if (i <= pattern.Length)
{
pattern = pattern.Substring(i);
}
return negate;
}
/// <summary>
/// {a,b}c => [ac, bc]
/// </summary>
/// <param name="pattern"></param>
/// <returns></returns>
internal static string[] ExpandGroup(string pattern, GlobMatcherOptions options = DefaultOptions)
{
GlobUngrouper ungrouper = new GlobUngrouper();
bool escaping = false;
bool disableEscape = !options.HasFlag(GlobMatcherOptions.AllowEscape);
foreach (char c in pattern)
{
if (escaping)
{
if (c != ',' && c != '{' && c != '}')
{
ungrouper.AddChar('\\');
}
ungrouper.AddChar(c);
escaping = false;
continue;
}
else if (c == '\\' && !disableEscape)
{
escaping = true;
continue;
}
switch (c)
{
case '{':
ungrouper.StartLevel();
break;
case ',':
if (ungrouper.Level < 1)
{
ungrouper.AddChar(c);
}
else
{
ungrouper.AddGroup();
}
break;
case '}':
if (ungrouper.Level < 1)
{
// Unbalanced closing bracket matches nothing
return EmptyString;
}
ungrouper.FinishLevel();
break;
default:
ungrouper.AddChar(c);
break;
}
}
return ungrouper.Flatten();
}
#region Private classes
private sealed class GlobUngrouper
{
public abstract class GlobNode
{
public readonly GlobNode _parent;
protected GlobNode(GlobNode parentNode)
{
_parent = parentNode ?? this;
}
abstract public GlobNode AddChar(char c);
abstract public GlobNode StartLevel();
abstract public GlobNode AddGroup();
abstract public GlobNode FinishLevel();
abstract public List<StringBuilder> Flatten();
}
public class TextNode : GlobNode
{
private readonly StringBuilder _builder;
public TextNode(GlobNode parentNode)
: base(parentNode)
{
_builder = new StringBuilder();
}
public override GlobNode AddChar(char c)
{
if (c != 0)
{
_builder.Append(c);
}
return this;
}
public override GlobNode StartLevel()
{
return _parent.StartLevel();
}
public override GlobNode AddGroup()
{
return _parent.AddGroup();
}
public override GlobNode FinishLevel()
{
return _parent.FinishLevel();
}
public override List<StringBuilder> Flatten()
{
List<StringBuilder> result = new List<StringBuilder>(1);
result.Add(_builder);
return result;
}
}
public class ChoiceNode : GlobNode
{
private readonly List<SequenceNode> _nodes;
public ChoiceNode(GlobNode parentNode)
: base(parentNode)
{
_nodes = new List<SequenceNode>();
}
public override GlobNode AddChar(char c)
{
SequenceNode node = new SequenceNode(this);
_nodes.Add(node);
return node.AddChar(c);
}
public override GlobNode StartLevel()
{
SequenceNode node = new SequenceNode(this);
_nodes.Add(node);
return node.StartLevel();
}
public override GlobNode AddGroup()
{
return AddChar('\0');
}
public override GlobNode FinishLevel()
{
AddChar('\0');
return _parent;
}
public override List<StringBuilder> Flatten()
{
List<StringBuilder> result = new List<StringBuilder>();
foreach (GlobNode node in _nodes)
{
foreach (StringBuilder builder in node.Flatten())
{
result.Add(builder);
}
}
return result;
}
}
public class SequenceNode : GlobNode
{
private readonly List<GlobNode> _nodes;
public SequenceNode(GlobNode parentNode)
: base(parentNode)
{
_nodes = new List<GlobNode>();
}
public override GlobNode AddChar(char c)
{
TextNode node = new TextNode(this);
_nodes.Add(node);
return node.AddChar(c);
}
public override GlobNode StartLevel()
{
ChoiceNode node = new ChoiceNode(this);
_nodes.Add(node);
return node;
}
public override GlobNode AddGroup()
{
return _parent;
}
public override GlobNode FinishLevel()
{
return _parent._parent;
}
public override List<StringBuilder> Flatten()
{
List<StringBuilder> result = new List<StringBuilder>();
result.Add(new StringBuilder());
foreach (GlobNode node in _nodes)
{
List<StringBuilder> tmp = new List<StringBuilder>();
foreach (StringBuilder builder in node.Flatten())
{
foreach (StringBuilder sb in result)
{
StringBuilder newsb = new StringBuilder(sb.ToString());
newsb.Append(builder.ToString());
tmp.Add(newsb);
}
}
result = tmp;
}
return result;
}
}
private readonly SequenceNode _rootNode;
private GlobNode _currentNode;
private int _level;
public GlobUngrouper()
{
_rootNode = new SequenceNode(null);
_currentNode = _rootNode;
_level = 0;
}
public void AddChar(char c)
{
_currentNode = _currentNode.AddChar(c);
}
public void StartLevel()
{
_currentNode = _currentNode.StartLevel();
_level++;
}
public void AddGroup()
{
_currentNode = _currentNode.AddGroup();
}
public void FinishLevel()
{
_currentNode = _currentNode.FinishLevel();
_level--;
}
public int Level
{
get { return _level; }
}
public string[] Flatten()
{
if (_level != 0)
{
return EmptyString;
}
List<StringBuilder> list = _rootNode.Flatten();
string[] result = new string[list.Count];
for (int i = 0; i < list.Count; i++)
{
result[i] = list[i].ToString();
}
return result;
}
}
/// <summary>
/// Represents [] class
/// </summary>
private sealed class CharClass
{
private readonly StringBuilder _chars = new StringBuilder();
public void Add(char c)
{
_chars.Append(c);
}
public override string ToString()
{
if (_chars.Length == 0)
{
return string.Empty;
}
if (_chars.Length == 1 && _chars[0] == '^')
{
_chars.Insert(0, "\\");
}
return $"[{_chars.ToString()}]";
}
}
[Serializable]
private sealed class GlobRegexItem
{
public static readonly GlobRegexItem GlobStar = new GlobRegexItem(GlobRegexItemType.GlobStar);
public static readonly GlobRegexItem GlobStarForFileOnly = new GlobRegexItem(GlobRegexItemType.GlobStarForFileOnly);
public static readonly GlobRegexItem Empty = new GlobRegexItem(string.Empty, string.Empty, GlobRegexItemType.PlainText);
public GlobRegexItemType ItemType { get; }
public string RegexContent { get; }
public string PlainText { get; }
public Regex Regex { get; }
public GlobRegexItem(string content, string plainText, GlobRegexItemType type, bool ignoreCase = true)
{
RegexContent = content;
ItemType = type;
PlainText = plainText;
if (type == GlobRegexItemType.Regex)
{
var regexSegment = $"^{RegexContent}$";
Regex = ignoreCase ? new Regex(regexSegment, RegexOptions.IgnoreCase) : new Regex(regexSegment);
}
}
private GlobRegexItem(GlobRegexItemType itemType)
{
ItemType = itemType;
}
}
private enum GlobRegexItemType
{
GlobStarForFileOnly, // ** to match files only
GlobStar, // **/ to match files or folders
PlainText,
Regex,
}
#endregion
}
[Flags]
public enum GlobMatcherOptions
{
None = 0x0,
IgnoreCase = 0x1,
AllowNegate = 0x2,
AllowExpand = 0x4,
AllowEscape = 0x8,
AllowGlobStar = 0x10,
/// <summary>
/// Allow patterns to match filenames starting with a period even if the pattern does not explicitly have a period.
/// By default disabled: a/**/b will **not** match a/.c/d, unless `AllowDotMatch` is set
/// </summary>
AllowDotMatch = 0x20,
}
}
| |
// GzipInputStream.cs
//
// Copyright (C) 2001 Mike Krueger
//
// This file was translated from java, it was part of the GNU Classpath
// Copyright (C) 2001 Free Software Foundation, Inc.
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; either version 2
// of the License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// Linking this library statically or dynamically with other modules is
// making a combined work based on this library. Thus, the terms and
// conditions of the GNU General Public License cover the whole
// combination.
//
// As a special exception, the copyright holders of this library give you
// permission to link this library with independent modules to produce an
// executable, regardless of the license terms of these independent
// modules, and to copy and distribute the resulting executable under
// terms of your choice, provided that you also meet, for each linked
// independent module, the terms and conditions of the license of that
// module. An independent module is a module which is not derived from
// or based on this library. If you modify this library, you may extend
// this exception to your version of the library, but you are not
// obligated to do so. If you do not wish to do so, delete this
// exception statement from your version.
// HISTORY
// 2009-08-11 T9121 Geoff Hart Added Multi-member gzip support
// 2012-06-03 Z-1802 Incorrect endianness and subfield in FEXTRA handling.
#if ZIPLIB
using System;
using System.IO;
using ICSharpCode.SharpZipLib.Checksums;
using ICSharpCode.SharpZipLib.Zip.Compression;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
namespace ICSharpCode.SharpZipLib.GZip
{
/// <summary>
/// This filter stream is used to decompress a "GZIP" format stream.
/// The "GZIP" format is described baseInputStream RFC 1952.
///
/// author of the original java version : John Leuner
/// </summary>
/// <example> This sample shows how to unzip a gzipped file
/// <code>
/// using System;
/// using System.IO;
///
/// using ICSharpCode.SharpZipLib.Core;
/// using ICSharpCode.SharpZipLib.GZip;
///
/// class MainClass
/// {
/// public static void Main(string[] args)
/// {
/// using (Stream inStream = new GZipInputStream(File.OpenRead(args[0])))
/// using (FileStream outStream = File.Create(Path.GetFileNameWithoutExtension(args[0]))) {
/// byte[] buffer = new byte[4096];
/// StreamUtils.Copy(inStream, outStream, buffer);
/// }
/// }
/// }
/// </code>
/// </example>
internal class GZipInputStream : InflaterInputStream
{
#region Instance Fields
/// <summary>
/// CRC-32 value for uncompressed data
/// </summary>
protected Crc32 crc;
/// <summary>
/// Flag to indicate if we've read the GZIP header yet for the current member (block of compressed data).
/// This is tracked per-block as the file is parsed.
/// </summary>
bool readGZIPHeader;
#endregion
#region Constructors
/// <summary>
/// Creates a GZipInputStream with the default buffer size
/// </summary>
/// <param name="baseInputStream">
/// The stream to read compressed data from (baseInputStream GZIP format)
/// </param>
public GZipInputStream(Stream baseInputStream)
: this(baseInputStream, 4096)
{
}
/// <summary>
/// Creates a GZIPInputStream with the specified buffer size
/// </summary>
/// <param name="baseInputStream">
/// The stream to read compressed data from (baseInputStream GZIP format)
/// </param>
/// <param name="size">
/// Size of the buffer to use
/// </param>
public GZipInputStream(Stream baseInputStream, int size)
: base(baseInputStream, new Inflater(true), size)
{
}
#endregion
#region Stream overrides
/// <summary>
/// Reads uncompressed data into an array of bytes
/// </summary>
/// <param name="buffer">
/// The buffer to read uncompressed data into
/// </param>
/// <param name="offset">
/// The offset indicating where the data should be placed
/// </param>
/// <param name="count">
/// The number of uncompressed bytes to be read
/// </param>
/// <returns>Returns the number of bytes actually read.</returns>
public override int Read(byte[] buffer, int offset, int count)
{
// A GZIP file can contain multiple blocks of compressed data, although this is quite rare.
// A compressed block could potentially be empty, so we need to loop until we reach EOF or
// we find data.
while (true) {
// If we haven't read the header for this block, read it
if (! readGZIPHeader) {
// Try to read header. If there is no header (0 bytes available), this is EOF. If there is
// an incomplete header, this will throw an exception.
if (! ReadHeader()) {
return 0;
}
}
// Try to read compressed data
int bytesRead = base.Read(buffer, offset, count);
if (bytesRead > 0) {
crc.Update(buffer, offset, bytesRead);
}
// If this is the end of stream, read the footer
if (inf.IsFinished) {
ReadFooter();
}
if (bytesRead > 0) {
return bytesRead;
}
}
}
#endregion
#region Support routines
bool ReadHeader()
{
// Initialize CRC for this block
crc = new Crc32();
// Make sure there is data in file. We can't rely on ReadLeByte() to fill the buffer, as this could be EOF,
// which is fine, but ReadLeByte() throws an exception if it doesn't find data, so we do this part ourselves.
if (inputBuffer.Available <= 0) {
inputBuffer.Fill();
if (inputBuffer.Available <= 0) {
// No header, EOF.
return false;
}
}
// 1. Check the two magic bytes
Crc32 headCRC = new Crc32();
int magic = inputBuffer.ReadLeByte();
if (magic < 0) {
throw new EndOfStreamException("EOS reading GZIP header");
}
headCRC.Update(magic);
if (magic != (GZipConstants.GZIP_MAGIC >> 8)) {
throw new GZipException("Error GZIP header, first magic byte doesn't match");
}
//magic = baseInputStream.ReadByte();
magic = inputBuffer.ReadLeByte();
if (magic < 0) {
throw new EndOfStreamException("EOS reading GZIP header");
}
if (magic != (GZipConstants.GZIP_MAGIC & 0xFF)) {
throw new GZipException("Error GZIP header, second magic byte doesn't match");
}
headCRC.Update(magic);
// 2. Check the compression type (must be 8)
int compressionType = inputBuffer.ReadLeByte();
if ( compressionType < 0 ) {
throw new EndOfStreamException("EOS reading GZIP header");
}
if ( compressionType != 8 ) {
throw new GZipException("Error GZIP header, data not in deflate format");
}
headCRC.Update(compressionType);
// 3. Check the flags
int flags = inputBuffer.ReadLeByte();
if (flags < 0) {
throw new EndOfStreamException("EOS reading GZIP header");
}
headCRC.Update(flags);
/* This flag byte is divided into individual bits as follows:
bit 0 FTEXT
bit 1 FHCRC
bit 2 FEXTRA
bit 3 FNAME
bit 4 FCOMMENT
bit 5 reserved
bit 6 reserved
bit 7 reserved
*/
// 3.1 Check the reserved bits are zero
if ((flags & 0xE0) != 0) {
throw new GZipException("Reserved flag bits in GZIP header != 0");
}
// 4.-6. Skip the modification time, extra flags, and OS type
for (int i=0; i< 6; i++) {
int readByte = inputBuffer.ReadLeByte();
if (readByte < 0) {
throw new EndOfStreamException("EOS reading GZIP header");
}
headCRC.Update(readByte);
}
// 7. Read extra field
if ((flags & GZipConstants.FEXTRA) != 0) {
// XLEN is total length of extra subfields, we will skip them all
int len1, len2;
len1 = inputBuffer.ReadLeByte();
len2 = inputBuffer.ReadLeByte();
if ((len1 < 0) || (len2 < 0)) {
throw new EndOfStreamException("EOS reading GZIP header");
}
headCRC.Update(len1);
headCRC.Update(len2);
int extraLen = (len2 << 8) | len1; // gzip is LSB first
for (int i = 0; i < extraLen;i++) {
int readByte = inputBuffer.ReadLeByte();
if (readByte < 0)
{
throw new EndOfStreamException("EOS reading GZIP header");
}
headCRC.Update(readByte);
}
}
// 8. Read file name
if ((flags & GZipConstants.FNAME) != 0) {
int readByte;
while ( (readByte = inputBuffer.ReadLeByte()) > 0) {
headCRC.Update(readByte);
}
if (readByte < 0) {
throw new EndOfStreamException("EOS reading GZIP header");
}
headCRC.Update(readByte);
}
// 9. Read comment
if ((flags & GZipConstants.FCOMMENT) != 0) {
int readByte;
while ( (readByte = inputBuffer.ReadLeByte()) > 0) {
headCRC.Update(readByte);
}
if (readByte < 0) {
throw new EndOfStreamException("EOS reading GZIP header");
}
headCRC.Update(readByte);
}
// 10. Read header CRC
if ((flags & GZipConstants.FHCRC) != 0) {
int tempByte;
int crcval = inputBuffer.ReadLeByte();
if (crcval < 0) {
throw new EndOfStreamException("EOS reading GZIP header");
}
tempByte = inputBuffer.ReadLeByte();
if (tempByte < 0) {
throw new EndOfStreamException("EOS reading GZIP header");
}
crcval = (crcval << 8) | tempByte;
if (crcval != ((int) headCRC.Value & 0xffff)) {
throw new GZipException("Header CRC value mismatch");
}
}
readGZIPHeader = true;
return true;
}
void ReadFooter()
{
byte[] footer = new byte[8];
// End of stream; reclaim all bytes from inf, read the final byte count, and reset the inflator
long bytesRead = inf.TotalOut & 0xffffffff;
inputBuffer.Available += inf.RemainingInput;
inf.Reset();
// Read footer from inputBuffer
int needed = 8;
while (needed > 0) {
int count = inputBuffer.ReadClearTextBuffer(footer, 8 - needed, needed);
if (count <= 0) {
throw new EndOfStreamException("EOS reading GZIP footer");
}
needed -= count; // Jewel Jan 16
}
// Calculate CRC
int crcval = (footer[0] & 0xff) | ((footer[1] & 0xff) << 8) | ((footer[2] & 0xff) << 16) | (footer[3] << 24);
if (crcval != (int) crc.Value) {
throw new GZipException("GZIP crc sum mismatch, theirs \"" + crcval + "\" and ours \"" + (int) crc.Value);
}
// NOTE The total here is the original total modulo 2 ^ 32.
uint total =
(uint)((uint)footer[4] & 0xff) |
(uint)(((uint)footer[5] & 0xff) << 8) |
(uint)(((uint)footer[6] & 0xff) << 16) |
(uint)((uint)footer[7] << 24);
if (bytesRead != total) {
throw new GZipException("Number of bytes mismatch in footer");
}
// Mark header read as false so if another header exists, we'll continue reading through the file
readGZIPHeader = false;
}
#endregion
}
}
#endif
| |
// ============================================================
// RRDSharp: Managed implementation of RRDTool for .NET/Mono
// ============================================================
//
// Project Info: http://sourceforge.net/projects/rrdsharp/
// Project Lead: Julio David Quintana ([email protected])
//
// Distributed under terms of the LGPL:
//
// This library is free software; you can redistribute it and/or modify it under the terms
// of the GNU Lesser General Public License as published by the Free Software Foundation;
// either version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
// without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
// See the GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License along with this
// library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,
// Boston, MA 02111-1307, USA.
using System;
using System.Text;
namespace stRrd.Core
{
/// <summary>
/// Class to represent archive values for a single datasource.
/// </summary>
/// <remarks>
/// Robin class is the heart of the so-called "round robin database" concept. Basically,
/// each Robin object is a fixed length array of double values. Each double value reperesents
/// consolidated archive value for the specific timestamp. When the underlying array of double
/// values gets completely filled, new values will replace the oldest entries.
///
/// A Robin object does not hold values in memory - such object could be quite large. Instead of it,
/// Robin stores all values on the disk and reads them only when necessary.
///
/// </remarks>
public class Robin : IRrdUpdatable
{
private Archive parentArc;
private RrdInt pointer;
private RrdDoubleArray values;
private int rows;
internal Robin(Archive parentArc, int rows)
{
this.parentArc = parentArc;
this.rows = rows;
if (RrdFile.RrdMode == RrdFile.MODE_CREATE)
{
pointer = new RrdInt(0,this);
values = new RrdDoubleArray(this, rows, Double.NaN);
}
else
{
pointer = new RrdInt(this);
values = new RrdDoubleArray(this, rows);
}
}
/// <summary>
///
/// </summary>
public double[] Values
{
get
{
return GetValues(0,rows);;
}
}
internal void Store(double newValue)
{
int position = pointer.Get();
values.Set(position, newValue);
pointer.Set((position + 1) % rows);
}
internal void BulkStore(double newValue, int bulkCount)
{
int position = pointer.Get();
// update tail
int tailUpdateCount = Math.Min(rows - position, bulkCount);
values.Set(position, newValue, tailUpdateCount);
pointer.Set((position + tailUpdateCount) % rows);
// do we need to update from the start?
int headUpdateCount = bulkCount - tailUpdateCount;
if(headUpdateCount > 0)
{
values.Set(0, newValue, headUpdateCount);
pointer.Set(headUpdateCount);
}
}
/// <summary>
///
/// </summary>
public RrdFile RrdFile
{
get
{
return parentArc.RrdFile;
}
}
internal string Dump()
{
StringBuilder buffer = new StringBuilder("Robin " + pointer.Get() + "/" + rows + ": ");
int startPos = pointer.Get();
for(int i = startPos; i < startPos + rows; i++)
{
buffer.Append(Util.FormatDouble(values.Get(i % rows),true) + " ");
}
buffer.Append("\n");
return buffer.ToString();
}
/// <summary>
///
/// </summary>
/// <param name="index"></param>
/// <returns></returns>
public double GetValue(int index)
{
int arrayIndex = (pointer.Get() + index) % rows;
return values.Get(arrayIndex);
}
/// <summary>
///
/// </summary>
/// <param name="index"></param>
/// <param name="count"></param>
/// <returns></returns>
internal double[] GetValues(int index, int count)
{
int startIndex = (pointer.Get() + index) % rows;
int tailReadCount = Math.Min(rows - startIndex, count);
double[] tailValues = values.Get(startIndex, tailReadCount);
if(tailReadCount < count)
{
int headReadCount = count - tailReadCount;
double[] headValues = values.Get(0, headReadCount);
double[] myValues = new double[count];
int k = 0;
for(int i = 0; i < tailValues.Length; i++)
{
myValues[k++] = tailValues[i];
}
for(int i = 0; i < headValues.Length; i++)
{
myValues[k++] = headValues[i];
}
return myValues;
}
else
{
return tailValues;
}
}
/// <summary>
///
/// </summary>
public Archive Parent
{
get
{
return parentArc;
}
}
/// <summary>
///
/// </summary>
public int Size
{
get
{
return rows;
}
}
/// <summary>
///
/// </summary>
/// <param name="other"></param>
public void CopyStateTo(IRrdUpdatable other)
{
if(!(other is Robin))
{
throw new RrdException("Cannot copy Robin object to " + other.ToString());
}
Robin robin = (Robin) other;
int rowsDiff = rows - robin.rows;
if(rowsDiff == 0)
{
// Identical dimensions. Do copy in BULK to speed things up
robin.pointer.Set(pointer.Get());
robin.values.WriteBytes(values.ReadBytes());
}
else
{
// different sizes
for(int i = 0; i < robin.rows; i++)
{
int j = i + rowsDiff;
robin.Store(j >= 0? GetValue(j): Double.NaN);
}
}
}
internal void FilterValues(double minValue, double maxValue)
{
for(int i = 0; i < rows; i++)
{
double val = values.Get(i);
if(!Double.IsNaN(minValue) && !Double.IsNaN(val) && minValue > val)
{
values.Set(i, Double.NaN);
}
if(!Double.IsNaN(maxValue) && !Double.IsNaN(val) && maxValue < val)
{
values.Set(i, Double.NaN);
}
}
}
}
}
| |
#define SQLITE_ASCII
#define SQLITE_DISABLE_LFS
#define SQLITE_ENABLE_OVERSIZE_CELL_CHECK
#define SQLITE_MUTEX_OMIT
#define SQLITE_OMIT_AUTHORIZATION
#define SQLITE_OMIT_DEPRECATED
#define SQLITE_OMIT_GET_TABLE
#define SQLITE_OMIT_INCRBLOB
#define SQLITE_OMIT_LOOKASIDE
#define SQLITE_OMIT_SHARED_CACHE
#define SQLITE_OMIT_UTF16
#define SQLITE_OMIT_WAL
#define SQLITE_OS_WIN
#define SQLITE_SYSTEM_MALLOC
#define VDBE_PROFILE_OFF
#define WINDOWS_MOBILE
#define NDEBUG
#define _MSC_VER
#define YYFALLBACK
using System;
using System.Diagnostics;
using System.Text;
namespace Community.CsharpSqlite
{
using sqlite3_value = Sqlite3.Mem;
public partial class Sqlite3
{
/*
** 2008 June 18
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
**
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
**
*************************************************************************
**
** This module implements the sqlite3_status() interface and related
** functionality.
*************************************************************************
** Included in SQLite3 port to C#-SQLite; 2008 Noah B Hart
** C#-SQLite is an independent reimplementation of the SQLite software library
**
** SQLITE_SOURCE_ID: 2011-05-19 13:26:54 ed1da510a239ea767a01dc332b667119fa3c908e
**
*************************************************************************
*/
//#include "sqliteInt.h"
//#include "vdbeInt.h"
/*
** Variables in which to record status information.
*/
//typedef struct sqlite3StatType sqlite3StatType;
public class sqlite3StatType
{
public int[] nowValue = new int[10]; /* Current value */
public int[] mxValue = new int[10]; /* Maximum value */
}
public static sqlite3StatType sqlite3Stat = new sqlite3StatType();
/* The "wsdStat" macro will resolve to the status information
** state vector. If writable static data is unsupported on the target,
** we have to locate the state vector at run-time. In the more common
** case where writable static data is supported, wsdStat can refer directly
** to the "sqlite3Stat" state vector declared above.
*/
#if SQLITE_OMIT_WSD
//# define wsdStatInit sqlite3StatType *x = &GLOBAL(sqlite3StatType,sqlite3Stat)
//# define wsdStat x[0]
#else
//# define wsdStatInit
static void wsdStatInit()
{
}
//# define wsdStat sqlite3Stat
static sqlite3StatType wsdStat = sqlite3Stat;
#endif
/*
** Return the current value of a status parameter.
*/
static int sqlite3StatusValue( int op )
{
wsdStatInit();
Debug.Assert( op >= 0 && op < ArraySize( wsdStat.nowValue ) );
return wsdStat.nowValue[op];
}
/*
** Add N to the value of a status record. It is assumed that the
** caller holds appropriate locks.
*/
static void sqlite3StatusAdd( int op, int N )
{
wsdStatInit();
Debug.Assert( op >= 0 && op < ArraySize( wsdStat.nowValue ) );
wsdStat.nowValue[op] += N;
if ( wsdStat.nowValue[op] > wsdStat.mxValue[op] )
{
wsdStat.mxValue[op] = wsdStat.nowValue[op];
}
}
/*
** Set the value of a status to X.
*/
static void sqlite3StatusSet( int op, int X )
{
wsdStatInit();
Debug.Assert( op >= 0 && op < ArraySize( wsdStat.nowValue ) );
wsdStat.nowValue[op] = X;
if ( wsdStat.nowValue[op] > wsdStat.mxValue[op] )
{
wsdStat.mxValue[op] = wsdStat.nowValue[op];
}
}
/*
** Query status information.
**
** This implementation assumes that reading or writing an aligned
** 32-bit integer is an atomic operation. If that assumption is not true,
** then this routine is not threadsafe.
*/
static int sqlite3_status( int op, ref int pCurrent, ref int pHighwater, int resetFlag )
{
wsdStatInit();
if ( op < 0 || op >= ArraySize( wsdStat.nowValue ) )
{
return SQLITE_MISUSE_BKPT();
}
pCurrent = wsdStat.nowValue[op];
pHighwater = wsdStat.mxValue[op];
if ( resetFlag != 0 )
{
wsdStat.mxValue[op] = wsdStat.nowValue[op];
}
return SQLITE_OK;
}
/*
** Query status information for a single database connection
*/
static int sqlite3_db_status(
sqlite3 db, /* The database connection whose status is desired */
int op, /* Status verb */
ref int pCurrent, /* Write current value here */
ref int pHighwater, /* Write high-water mark here */
int resetFlag /* Reset high-water mark if true */
)
{
int rc = SQLITE_OK; /* Return code */
sqlite3_mutex_enter( db.mutex );
switch ( op )
{
case SQLITE_DBSTATUS_LOOKASIDE_USED:
{
pCurrent = db.lookaside.nOut;
pHighwater = db.lookaside.mxOut;
if ( resetFlag != 0 )
{
db.lookaside.mxOut = db.lookaside.nOut;
}
break;
}
case SQLITE_DBSTATUS_LOOKASIDE_HIT:
case SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE:
case SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL:
{
testcase( op == SQLITE_DBSTATUS_LOOKASIDE_HIT );
testcase( op == SQLITE_DBSTATUS_LOOKASIDE_MISS_SIZE );
testcase( op == SQLITE_DBSTATUS_LOOKASIDE_MISS_FULL );
Debug.Assert( ( op - SQLITE_DBSTATUS_LOOKASIDE_HIT ) >= 0 );
Debug.Assert( ( op - SQLITE_DBSTATUS_LOOKASIDE_HIT ) < 3 );
pCurrent = 0;
pHighwater = db.lookaside.anStat[op - SQLITE_DBSTATUS_LOOKASIDE_HIT];
if ( resetFlag != 0 )
{
db.lookaside.anStat[op - SQLITE_DBSTATUS_LOOKASIDE_HIT] = 0;
}
break;
}
/*
** Return an approximation for the amount of memory currently used
** by all pagers associated with the given database connection. The
** highwater mark is meaningless and is returned as zero.
*/
case SQLITE_DBSTATUS_CACHE_USED:
{
int totalUsed = 0;
int i;
sqlite3BtreeEnterAll( db );
for ( i = 0; i < db.nDb; i++ )
{
Btree pBt = db.aDb[i].pBt;
if ( pBt != null )
{
Pager pPager = sqlite3BtreePager( pBt );
totalUsed += sqlite3PagerMemUsed( pPager );
}
}
sqlite3BtreeLeaveAll( db );
pCurrent = totalUsed;
pHighwater = 0;
break;
}
/*
** *pCurrent gets an accurate estimate of the amount of memory used
** to store the schema for all databases (main, temp, and any ATTACHed
** databases. *pHighwater is set to zero.
*/
case SQLITE_DBSTATUS_SCHEMA_USED:
{
int i; /* Used to iterate through schemas */
int nByte = 0; /* Used to accumulate return value */
sqlite3BtreeEnterAll( db );
//db.pnBytesFreed = nByte;
for ( i = 0; i < db.nDb; i++ )
{
Schema pSchema = db.aDb[i].pSchema;
if ( ALWAYS( pSchema != null ) )
{
HashElem p;
//nByte += (int)(sqlite3GlobalConfig.m.xRoundup(sizeof(HashElem)) * (
// pSchema.tblHash.count
// + pSchema.trigHash.count
// + pSchema.idxHash.count
// + pSchema.fkeyHash.count
//));
//nByte += (int)sqlite3MallocSize( pSchema.tblHash.ht );
//nByte += (int)sqlite3MallocSize( pSchema.trigHash.ht );
//nByte += (int)sqlite3MallocSize( pSchema.idxHash.ht );
//nByte += (int)sqlite3MallocSize( pSchema.fkeyHash.ht );
for ( p = sqliteHashFirst( pSchema.trigHash ); p != null; p = sqliteHashNext( p ) )
{
Trigger t = (Trigger)sqliteHashData( p );
sqlite3DeleteTrigger( db, ref t );
}
for ( p = sqliteHashFirst( pSchema.tblHash ); p != null; p = sqliteHashNext( p ) )
{
Table t = (Table)sqliteHashData( p );
sqlite3DeleteTable( db, ref t );
}
}
}
db.pnBytesFreed = 0;
sqlite3BtreeLeaveAll( db );
pHighwater = 0;
pCurrent = nByte;
break;
}
/*
** *pCurrent gets an accurate estimate of the amount of memory used
** to store all prepared statements.
** *pHighwater is set to zero.
*/
case SQLITE_DBSTATUS_STMT_USED:
{
Vdbe pVdbe; /* Used to iterate through VMs */
int nByte = 0; /* Used to accumulate return value */
//db.pnBytesFreed = nByte;
for ( pVdbe = db.pVdbe; pVdbe != null; pVdbe = pVdbe.pNext )
{
sqlite3VdbeDeleteObject( db, ref pVdbe );
}
db.pnBytesFreed = 0;
pHighwater = 0;
pCurrent = nByte;
break;
}
default:
{
rc = SQLITE_ERROR;
break;
}
}
sqlite3_mutex_leave( db.mutex );
return rc;
}
}
}
| |
using System;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace NetCoreServer
{
/// <summary>
/// TCP session is used to read and write data from the connected TCP client
/// </summary>
/// <remarks>Thread-safe</remarks>
public class TcpSession : IDisposable
{
/// <summary>
/// Initialize the session with a given server
/// </summary>
/// <param name="server">TCP server</param>
public TcpSession(TcpServer server)
{
Id = Guid.NewGuid();
Server = server;
OptionReceiveBufferSize = server.OptionReceiveBufferSize;
OptionSendBufferSize = server.OptionSendBufferSize;
}
/// <summary>
/// Session Id
/// </summary>
public Guid Id { get; }
/// <summary>
/// Server
/// </summary>
public TcpServer Server { get; }
/// <summary>
/// Socket
/// </summary>
public Socket Socket { get; private set; }
/// <summary>
/// Number of bytes pending sent by the session
/// </summary>
public long BytesPending { get; private set; }
/// <summary>
/// Number of bytes sending by the session
/// </summary>
public long BytesSending { get; private set; }
/// <summary>
/// Number of bytes sent by the session
/// </summary>
public long BytesSent { get; private set; }
/// <summary>
/// Number of bytes received by the session
/// </summary>
public long BytesReceived { get; private set; }
/// <summary>
/// Option: receive buffer limit
/// </summary>
public int OptionReceiveBufferLimit { get; set; } = 0;
/// <summary>
/// Option: receive buffer size
/// </summary>
public int OptionReceiveBufferSize { get; set; } = 8192;
/// <summary>
/// Option: send buffer limit
/// </summary>
public int OptionSendBufferLimit { get; set; } = 0;
/// <summary>
/// Option: send buffer size
/// </summary>
public int OptionSendBufferSize { get; set; } = 8192;
#region Connect/Disconnect session
/// <summary>
/// Is the session connected?
/// </summary>
public bool IsConnected { get; private set; }
/// <summary>
/// Connect the session
/// </summary>
/// <param name="socket">Session socket</param>
internal void Connect(Socket socket)
{
Socket = socket;
// Update the session socket disposed flag
IsSocketDisposed = false;
// Setup buffers
_receiveBuffer = new Buffer();
_sendBufferMain = new Buffer();
_sendBufferFlush = new Buffer();
// Setup event args
_receiveEventArg = new SocketAsyncEventArgs();
_receiveEventArg.Completed += OnAsyncCompleted;
_sendEventArg = new SocketAsyncEventArgs();
_sendEventArg.Completed += OnAsyncCompleted;
// Apply the option: keep alive
if (Server.OptionKeepAlive)
Socket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.KeepAlive, true);
// Apply the option: no delay
if (Server.OptionNoDelay)
Socket.SetSocketOption(SocketOptionLevel.Tcp, SocketOptionName.NoDelay, true);
// Prepare receive & send buffers
_receiveBuffer.Reserve(OptionReceiveBufferSize);
_sendBufferMain.Reserve(OptionSendBufferSize);
_sendBufferFlush.Reserve(OptionSendBufferSize);
// Reset statistic
BytesPending = 0;
BytesSending = 0;
BytesSent = 0;
BytesReceived = 0;
// Call the session connecting handler
OnConnecting();
// Call the session connecting handler in the server
Server.OnConnectingInternal(this);
// Update the connected flag
IsConnected = true;
// Try to receive something from the client
TryReceive();
// Check the socket disposed state: in some rare cases it might be disconnected while receiving!
if (IsSocketDisposed)
return;
// Call the session connected handler
OnConnected();
// Call the session connected handler in the server
Server.OnConnectedInternal(this);
// Call the empty send buffer handler
if (_sendBufferMain.IsEmpty)
OnEmpty();
}
/// <summary>
/// Disconnect the session
/// </summary>
/// <returns>'true' if the section was successfully disconnected, 'false' if the section is already disconnected</returns>
public virtual bool Disconnect()
{
if (!IsConnected)
return false;
// Reset event args
_receiveEventArg.Completed -= OnAsyncCompleted;
_sendEventArg.Completed -= OnAsyncCompleted;
// Call the session disconnecting handler
OnDisconnecting();
// Call the session disconnecting handler in the server
Server.OnDisconnectingInternal(this);
try
{
try
{
// Shutdown the socket associated with the client
Socket.Shutdown(SocketShutdown.Both);
}
catch (SocketException) {}
// Close the session socket
Socket.Close();
// Dispose the session socket
Socket.Dispose();
// Dispose event arguments
_receiveEventArg.Dispose();
_sendEventArg.Dispose();
// Update the session socket disposed flag
IsSocketDisposed = true;
}
catch (ObjectDisposedException) {}
// Update the connected flag
IsConnected = false;
// Update sending/receiving flags
_receiving = false;
_sending = false;
// Clear send/receive buffers
ClearBuffers();
// Call the session disconnected handler
OnDisconnected();
// Call the session disconnected handler in the server
Server.OnDisconnectedInternal(this);
// Unregister session
Server.UnregisterSession(Id);
return true;
}
#endregion
#region Send/Recieve data
// Receive buffer
private bool _receiving;
private Buffer _receiveBuffer;
private SocketAsyncEventArgs _receiveEventArg;
// Send buffer
private readonly object _sendLock = new object();
private bool _sending;
private Buffer _sendBufferMain;
private Buffer _sendBufferFlush;
private SocketAsyncEventArgs _sendEventArg;
private long _sendBufferFlushOffset;
/// <summary>
/// Send data to the client (synchronous)
/// </summary>
/// <param name="buffer">Buffer to send</param>
/// <returns>Size of sent data</returns>
public virtual long Send(byte[] buffer) { return Send(buffer, 0, buffer.Length); }
/// <summary>
/// Send data to the client (synchronous)
/// </summary>
/// <param name="buffer">Buffer to send</param>
/// <param name="offset">Buffer offset</param>
/// <param name="size">Buffer size</param>
/// <returns>Size of sent data</returns>
public virtual long Send(byte[] buffer, long offset, long size)
{
if (!IsConnected)
return 0;
if (size == 0)
return 0;
// Sent data to the client
long sent = Socket.Send(buffer, (int)offset, (int)size, SocketFlags.None, out SocketError ec);
if (sent > 0)
{
// Update statistic
BytesSent += sent;
Interlocked.Add(ref Server._bytesSent, size);
// Call the buffer sent handler
OnSent(sent, BytesPending + BytesSending);
}
// Check for socket error
if (ec != SocketError.Success)
{
SendError(ec);
Disconnect();
}
return sent;
}
/// <summary>
/// Send text to the client (synchronous)
/// </summary>
/// <param name="text">Text string to send</param>
/// <returns>Size of sent data</returns>
public virtual long Send(string text) { return Send(Encoding.UTF8.GetBytes(text)); }
/// <summary>
/// Send data to the client (asynchronous)
/// </summary>
/// <param name="buffer">Buffer to send</param>
/// <returns>'true' if the data was successfully sent, 'false' if the session is not connected</returns>
public virtual bool SendAsync(byte[] buffer) { return SendAsync(buffer, 0, buffer.Length); }
/// <summary>
/// Send data to the client (asynchronous)
/// </summary>
/// <param name="buffer">Buffer to send</param>
/// <param name="offset">Buffer offset</param>
/// <param name="size">Buffer size</param>
/// <returns>'true' if the data was successfully sent, 'false' if the session is not connected</returns>
public virtual bool SendAsync(byte[] buffer, long offset, long size)
{
if (!IsConnected)
return false;
if (size == 0)
return true;
lock (_sendLock)
{
// Check the send buffer limit
if (((_sendBufferMain.Size + size) > OptionSendBufferLimit) && (OptionSendBufferLimit > 0))
{
SendError(SocketError.NoBufferSpaceAvailable);
return false;
}
// Fill the main send buffer
_sendBufferMain.Append(buffer, offset, size);
// Update statistic
BytesPending = _sendBufferMain.Size;
// Avoid multiple send handlers
if (_sending)
return true;
else
_sending = true;
// Try to send the main buffer
TrySend();
}
return true;
}
/// <summary>
/// Send text to the client (asynchronous)
/// </summary>
/// <param name="text">Text string to send</param>
/// <returns>'true' if the text was successfully sent, 'false' if the session is not connected</returns>
public virtual bool SendAsync(string text) { return SendAsync(Encoding.UTF8.GetBytes(text)); }
/// <summary>
/// Receive data from the client (synchronous)
/// </summary>
/// <param name="buffer">Buffer to receive</param>
/// <returns>Size of received data</returns>
public virtual long Receive(byte[] buffer) { return Receive(buffer, 0, buffer.Length); }
/// <summary>
/// Receive data from the client (synchronous)
/// </summary>
/// <param name="buffer">Buffer to receive</param>
/// <param name="offset">Buffer offset</param>
/// <param name="size">Buffer size</param>
/// <returns>Size of received data</returns>
public virtual long Receive(byte[] buffer, long offset, long size)
{
if (!IsConnected)
return 0;
if (size == 0)
return 0;
// Receive data from the client
long received = Socket.Receive(buffer, (int)offset, (int)size, SocketFlags.None, out SocketError ec);
if (received > 0)
{
// Update statistic
BytesReceived += received;
Interlocked.Add(ref Server._bytesReceived, received);
// Call the buffer received handler
OnReceived(buffer, 0, received);
}
// Check for socket error
if (ec != SocketError.Success)
{
SendError(ec);
Disconnect();
}
return received;
}
/// <summary>
/// Receive text from the client (synchronous)
/// </summary>
/// <param name="size">Text size to receive</param>
/// <returns>Received text</returns>
public virtual string Receive(long size)
{
var buffer = new byte[size];
var length = Receive(buffer);
return Encoding.UTF8.GetString(buffer, 0, (int)length);
}
/// <summary>
/// Receive data from the client (asynchronous)
/// </summary>
public virtual void ReceiveAsync()
{
// Try to receive data from the client
TryReceive();
}
/// <summary>
/// Try to receive new data
/// </summary>
private void TryReceive()
{
if (_receiving)
return;
if (!IsConnected)
return;
bool process = true;
while (process)
{
process = false;
try
{
// Async receive with the receive handler
_receiving = true;
_receiveEventArg.SetBuffer(_receiveBuffer.Data, 0, (int)_receiveBuffer.Capacity);
if (!Socket.ReceiveAsync(_receiveEventArg))
process = ProcessReceive(_receiveEventArg);
}
catch (ObjectDisposedException) {}
}
}
/// <summary>
/// Try to send pending data
/// </summary>
private void TrySend()
{
if (!IsConnected)
return;
bool empty = false;
bool process = true;
while (process)
{
process = false;
lock (_sendLock)
{
// Is previous socket send in progress?
if (_sendBufferFlush.IsEmpty)
{
// Swap flush and main buffers
_sendBufferFlush = Interlocked.Exchange(ref _sendBufferMain, _sendBufferFlush);
_sendBufferFlushOffset = 0;
// Update statistic
BytesPending = 0;
BytesSending += _sendBufferFlush.Size;
// Check if the flush buffer is empty
if (_sendBufferFlush.IsEmpty)
{
// Need to call empty send buffer handler
empty = true;
// End sending process
_sending = false;
}
}
else
return;
}
// Call the empty send buffer handler
if (empty)
{
OnEmpty();
return;
}
try
{
// Async write with the write handler
_sendEventArg.SetBuffer(_sendBufferFlush.Data, (int)_sendBufferFlushOffset, (int)(_sendBufferFlush.Size - _sendBufferFlushOffset));
if (!Socket.SendAsync(_sendEventArg))
process = ProcessSend(_sendEventArg);
}
catch (ObjectDisposedException) {}
}
}
/// <summary>
/// Clear send/receive buffers
/// </summary>
private void ClearBuffers()
{
lock (_sendLock)
{
// Clear send buffers
_sendBufferMain.Clear();
_sendBufferFlush.Clear();
_sendBufferFlushOffset= 0;
// Update statistic
BytesPending = 0;
BytesSending = 0;
}
}
#endregion
#region IO processing
/// <summary>
/// This method is called whenever a receive or send operation is completed on a socket
/// </summary>
private void OnAsyncCompleted(object sender, SocketAsyncEventArgs e)
{
if (IsSocketDisposed)
return;
// Determine which type of operation just completed and call the associated handler
switch (e.LastOperation)
{
case SocketAsyncOperation.Receive:
if (ProcessReceive(e))
TryReceive();
break;
case SocketAsyncOperation.Send:
if (ProcessSend(e))
TrySend();
break;
default:
throw new ArgumentException("The last operation completed on the socket was not a receive or send");
}
}
/// <summary>
/// This method is invoked when an asynchronous receive operation completes
/// </summary>
private bool ProcessReceive(SocketAsyncEventArgs e)
{
if (!IsConnected)
return false;
long size = e.BytesTransferred;
// Received some data from the client
if (size > 0)
{
// Update statistic
BytesReceived += size;
Interlocked.Add(ref Server._bytesReceived, size);
// Call the buffer received handler
OnReceived(_receiveBuffer.Data, 0, size);
// If the receive buffer is full increase its size
if (_receiveBuffer.Capacity == size)
{
// Check the receive buffer limit
if (((2 * size) > OptionReceiveBufferLimit) && (OptionReceiveBufferLimit > 0))
{
SendError(SocketError.NoBufferSpaceAvailable);
Disconnect();
return false;
}
_receiveBuffer.Reserve(2 * size);
}
}
_receiving = false;
// Try to receive again if the session is valid
if (e.SocketError == SocketError.Success)
{
// If zero is returned from a read operation, the remote end has closed the connection
if (size > 0)
return true;
else
Disconnect();
}
else
{
SendError(e.SocketError);
Disconnect();
}
return false;
}
/// <summary>
/// This method is invoked when an asynchronous send operation completes
/// </summary>
private bool ProcessSend(SocketAsyncEventArgs e)
{
if (!IsConnected)
return false;
long size = e.BytesTransferred;
// Send some data to the client
if (size > 0)
{
// Update statistic
BytesSending -= size;
BytesSent += size;
Interlocked.Add(ref Server._bytesSent, size);
// Increase the flush buffer offset
_sendBufferFlushOffset += size;
// Successfully send the whole flush buffer
if (_sendBufferFlushOffset == _sendBufferFlush.Size)
{
// Clear the flush buffer
_sendBufferFlush.Clear();
_sendBufferFlushOffset = 0;
}
// Call the buffer sent handler
OnSent(size, BytesPending + BytesSending);
}
// Try to send again if the session is valid
if (e.SocketError == SocketError.Success)
return true;
else
{
SendError(e.SocketError);
Disconnect();
return false;
}
}
#endregion
#region Session handlers
/// <summary>
/// Handle client connecting notification
/// </summary>
protected virtual void OnConnecting() {}
/// <summary>
/// Handle client connected notification
/// </summary>
protected virtual void OnConnected() {}
/// <summary>
/// Handle client disconnecting notification
/// </summary>
protected virtual void OnDisconnecting() {}
/// <summary>
/// Handle client disconnected notification
/// </summary>
protected virtual void OnDisconnected() {}
/// <summary>
/// Handle buffer received notification
/// </summary>
/// <param name="buffer">Received buffer</param>
/// <param name="offset">Received buffer offset</param>
/// <param name="size">Received buffer size</param>
/// <remarks>
/// Notification is called when another chunk of buffer was received from the client
/// </remarks>
protected virtual void OnReceived(byte[] buffer, long offset, long size) {}
/// <summary>
/// Handle buffer sent notification
/// </summary>
/// <param name="sent">Size of sent buffer</param>
/// <param name="pending">Size of pending buffer</param>
/// <remarks>
/// Notification is called when another chunk of buffer was sent to the client.
/// This handler could be used to send another buffer to the client for instance when the pending size is zero.
/// </remarks>
protected virtual void OnSent(long sent, long pending) {}
/// <summary>
/// Handle empty send buffer notification
/// </summary>
/// <remarks>
/// Notification is called when the send buffer is empty and ready for a new data to send.
/// This handler could be used to send another buffer to the client.
/// </remarks>
protected virtual void OnEmpty() {}
/// <summary>
/// Handle error notification
/// </summary>
/// <param name="error">Socket error code</param>
protected virtual void OnError(SocketError error) {}
#endregion
#region Error handling
/// <summary>
/// Send error notification
/// </summary>
/// <param name="error">Socket error code</param>
private void SendError(SocketError error)
{
// Skip disconnect errors
if ((error == SocketError.ConnectionAborted) ||
(error == SocketError.ConnectionRefused) ||
(error == SocketError.ConnectionReset) ||
(error == SocketError.OperationAborted) ||
(error == SocketError.Shutdown))
return;
OnError(error);
}
#endregion
#region IDisposable implementation
/// <summary>
/// Disposed flag
/// </summary>
public bool IsDisposed { get; private set; }
/// <summary>
/// Session socket disposed flag
/// </summary>
public bool IsSocketDisposed { get; private set; } = true;
// Implement IDisposable.
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposingManagedResources)
{
// The idea here is that Dispose(Boolean) knows whether it is
// being called to do explicit cleanup (the Boolean is true)
// versus being called due to a garbage collection (the Boolean
// is false). This distinction is useful because, when being
// disposed explicitly, the Dispose(Boolean) method can safely
// execute code using reference type fields that refer to other
// objects knowing for sure that these other objects have not been
// finalized or disposed of yet. When the Boolean is false,
// the Dispose(Boolean) method should not execute code that
// refer to reference type fields because those objects may
// have already been finalized."
if (!IsDisposed)
{
if (disposingManagedResources)
{
// Dispose managed resources here...
Disconnect();
}
// Dispose unmanaged resources here...
// Set large fields to null here...
// Mark as disposed.
IsDisposed = true;
}
}
// Use C# destructor syntax for finalization code.
~TcpSession()
{
// Simply call Dispose(false).
Dispose(false);
}
#endregion
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Reflection;
using System.Runtime.Serialization;
using System.Web.Http;
using System.Web.Http.Description;
using System.Xml.Serialization;
using Newtonsoft.Json;
namespace MemoryGame.Areas.HelpPage.ModelDescriptions
{
/// <summary>
/// Generates model descriptions for given types.
/// </summary>
public class ModelDescriptionGenerator
{
// Modify this to support more data annotation attributes.
private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>>
{
{ typeof(RequiredAttribute), a => "Required" },
{ typeof(RangeAttribute), a =>
{
RangeAttribute range = (RangeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum);
}
},
{ typeof(MaxLengthAttribute), a =>
{
MaxLengthAttribute maxLength = (MaxLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length);
}
},
{ typeof(MinLengthAttribute), a =>
{
MinLengthAttribute minLength = (MinLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length);
}
},
{ typeof(StringLengthAttribute), a =>
{
StringLengthAttribute strLength = (StringLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength);
}
},
{ typeof(DataTypeAttribute), a =>
{
DataTypeAttribute dataType = (DataTypeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString());
}
},
{ typeof(RegularExpressionAttribute), a =>
{
RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern);
}
},
};
// Modify this to add more default documentations.
private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string>
{
{ typeof(Int16), "integer" },
{ typeof(Int32), "integer" },
{ typeof(Int64), "integer" },
{ typeof(UInt16), "unsigned integer" },
{ typeof(UInt32), "unsigned integer" },
{ typeof(UInt64), "unsigned integer" },
{ typeof(Byte), "byte" },
{ typeof(Char), "character" },
{ typeof(SByte), "signed byte" },
{ typeof(Uri), "URI" },
{ typeof(Single), "decimal number" },
{ typeof(Double), "decimal number" },
{ typeof(Decimal), "decimal number" },
{ typeof(String), "string" },
{ typeof(Guid), "globally unique identifier" },
{ typeof(TimeSpan), "time interval" },
{ typeof(DateTime), "date" },
{ typeof(DateTimeOffset), "date" },
{ typeof(Boolean), "boolean" },
};
private Lazy<IModelDocumentationProvider> _documentationProvider;
public ModelDescriptionGenerator(HttpConfiguration config)
{
if (config == null)
{
throw new ArgumentNullException("config");
}
_documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider);
GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase);
}
public Dictionary<string, ModelDescription> GeneratedModels { get; private set; }
private IModelDocumentationProvider DocumentationProvider
{
get
{
return _documentationProvider.Value;
}
}
public ModelDescription GetOrCreateModelDescription(Type modelType)
{
if (modelType == null)
{
throw new ArgumentNullException("modelType");
}
Type underlyingType = Nullable.GetUnderlyingType(modelType);
if (underlyingType != null)
{
modelType = underlyingType;
}
ModelDescription modelDescription;
string modelName = ModelNameHelper.GetModelName(modelType);
if (GeneratedModels.TryGetValue(modelName, out modelDescription))
{
if (modelType != modelDescription.ModelType)
{
throw new InvalidOperationException(
String.Format(
CultureInfo.CurrentCulture,
"A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " +
"Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.",
modelName,
modelDescription.ModelType.FullName,
modelType.FullName));
}
return modelDescription;
}
if (DefaultTypeDocumentation.ContainsKey(modelType))
{
return GenerateSimpleTypeModelDescription(modelType);
}
if (modelType.IsEnum)
{
return GenerateEnumTypeModelDescription(modelType);
}
if (modelType.IsGenericType)
{
Type[] genericArguments = modelType.GetGenericArguments();
if (genericArguments.Length == 1)
{
Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments);
if (enumerableType.IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, genericArguments[0]);
}
}
if (genericArguments.Length == 2)
{
Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments);
if (dictionaryType.IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments);
if (keyValuePairType.IsAssignableFrom(modelType))
{
return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
}
}
if (modelType.IsArray)
{
Type elementType = modelType.GetElementType();
return GenerateCollectionModelDescription(modelType, elementType);
}
if (modelType == typeof(NameValueCollection))
{
return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string));
}
if (typeof(IDictionary).IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object));
}
if (typeof(IEnumerable).IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, typeof(object));
}
return GenerateComplexTypeModelDescription(modelType);
}
// Change this to provide different name for the member.
private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute)
{
JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>();
if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName))
{
return jsonProperty.PropertyName;
}
if (hasDataContractAttribute)
{
DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>();
if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name))
{
return dataMember.Name;
}
}
return member.Name;
}
private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute)
{
JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>();
XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>();
IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>();
NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>();
ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>();
bool hasMemberAttribute = member.DeclaringType.IsEnum ?
member.GetCustomAttribute<EnumMemberAttribute>() != null :
member.GetCustomAttribute<DataMemberAttribute>() != null;
// Display member only if all the followings are true:
// no JsonIgnoreAttribute
// no XmlIgnoreAttribute
// no IgnoreDataMemberAttribute
// no NonSerializedAttribute
// no ApiExplorerSettingsAttribute with IgnoreApi set to true
// no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute
return jsonIgnore == null &&
xmlIgnore == null &&
ignoreDataMember == null &&
nonSerialized == null &&
(apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) &&
(!hasDataContractAttribute || hasMemberAttribute);
}
private string CreateDefaultDocumentation(Type type)
{
string documentation;
if (DefaultTypeDocumentation.TryGetValue(type, out documentation))
{
return documentation;
}
if (DocumentationProvider != null)
{
documentation = DocumentationProvider.GetDocumentation(type);
}
return documentation;
}
private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel)
{
List<ParameterAnnotation> annotations = new List<ParameterAnnotation>();
IEnumerable<Attribute> attributes = property.GetCustomAttributes();
foreach (Attribute attribute in attributes)
{
Func<object, string> textGenerator;
if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator))
{
annotations.Add(
new ParameterAnnotation
{
AnnotationAttribute = attribute,
Documentation = textGenerator(attribute)
});
}
}
// Rearrange the annotations
annotations.Sort((x, y) =>
{
// Special-case RequiredAttribute so that it shows up on top
if (x.AnnotationAttribute is RequiredAttribute)
{
return -1;
}
if (y.AnnotationAttribute is RequiredAttribute)
{
return 1;
}
// Sort the rest based on alphabetic order of the documentation
return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase);
});
foreach (ParameterAnnotation annotation in annotations)
{
propertyModel.Annotations.Add(annotation);
}
}
private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType)
{
ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType);
if (collectionModelDescription != null)
{
return new CollectionModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
ElementDescription = collectionModelDescription
};
}
return null;
}
private ModelDescription GenerateComplexTypeModelDescription(Type modelType)
{
ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(complexModelDescription.Name, complexModelDescription);
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance);
foreach (PropertyInfo property in properties)
{
if (ShouldDisplayMember(property, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(property, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(property);
}
GenerateAnnotations(property, propertyModel);
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType);
}
}
FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance);
foreach (FieldInfo field in fields)
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(field, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(field);
}
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType);
}
}
return complexModelDescription;
}
private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new DictionaryModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType)
{
EnumTypeModelDescription enumDescription = new EnumTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static))
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
EnumValueDescription enumValue = new EnumValueDescription
{
Name = field.Name,
Value = field.GetRawConstantValue().ToString()
};
if (DocumentationProvider != null)
{
enumValue.Documentation = DocumentationProvider.GetDocumentation(field);
}
enumDescription.Values.Add(enumValue);
}
}
GeneratedModels.Add(enumDescription.Name, enumDescription);
return enumDescription;
}
private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new KeyValuePairModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private ModelDescription GenerateSimpleTypeModelDescription(Type modelType)
{
SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription);
return simpleModelDescription;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using SL = SortedList_SortedListUtils;
using SortedList_ICollection;
using TestSupport.Common_TestSupport;
namespace SortedList_ICollection
{
public class ICollectionTester<T>
{
private ICollection m_pCollectionToTest;
private Test m_test;
private void CheckCount(int expectedCount)
{
m_test.Eval(m_pCollectionToTest.Count == expectedCount, "Expected Count on ICollection to be " + expectedCount + ", but found " + m_pCollectionToTest.Count);
}
private void CheckIsSynchronized(bool expectedIsSynchronized)
{
m_test.Eval(m_pCollectionToTest.IsSynchronized == expectedIsSynchronized, "Expected IsSynchronized on ICollection to be " + expectedIsSynchronized + ", but found " + m_pCollectionToTest.IsSynchronized);
}
private void CheckSyncRoot(object expectedSyncRoot)
{
m_test.Eval(m_pCollectionToTest.SyncRoot != null, "Expected SyncRoot on ICollection to be non null actual " + m_pCollectionToTest.SyncRoot);
m_test.Eval(m_pCollectionToTest.SyncRoot == expectedSyncRoot, "Expected SyncRoot on ICollection to be " + expectedSyncRoot + ", but found " + m_pCollectionToTest.SyncRoot);
try
{
lock (m_pCollectionToTest.SyncRoot)
{
}
}
catch (Exception e)
{
m_test.Eval(false, "The following exception was thrown while trying to lock on SyncRoot\n" + e.ToString());
}
}
private void CheckCopyTo(Array expectedCopyTo)
{
CheckCopyTo(expectedCopyTo, false, false);
}
private void CheckCopyTo(Array expectedCopyTo, bool expectCopyToItemsOutOfOrder)
{
CheckCopyTo(expectedCopyTo, expectCopyToItemsOutOfOrder, false);
}
private void CheckCopyTo(Array expectedCopyTo, bool expectCopyToItemsOutOfOrder, bool copyToOnlySupportsZeroLowerBounds)
{
Object[] arrayToCopyTo = null;
T[] tArrayToCopyTo = new T[expectedCopyTo.Length];
Object[,] arrayToCopyToMulti = new Object[1, 1];
MyFooType[] badCastReferenceTypeArray = new MyFooType[expectedCopyTo.Length];
MyValueType[] badCastValueTypeArray = new MyValueType[expectedCopyTo.Length];
try
{
m_pCollectionToTest.CopyTo(arrayToCopyTo, 0);
m_test.Eval(false, "Expected ArgumentNullException when attempting to copy to null Array from ICollection.");
}
catch (ArgumentNullException)
{
}
catch (Exception E)
{
m_test.Eval(false, "Unknown Exception when attempting to copy to null Array from ICollection: " + E);
}
try
{
m_pCollectionToTest.CopyTo(arrayToCopyToMulti, 0);
m_test.Eval(false, "Expected ArgumentException when attempting to copy to Multidimensional Array from ICollection.");
}
catch (ArgumentException)
{
}
catch (Exception E)
{
m_test.Eval(false, "Unknown Exception when attempting to copy to Multidimensional Array from ICollection: " + E);
}
arrayToCopyTo = new Object[expectedCopyTo.Length - 1];
try
{
m_pCollectionToTest.CopyTo(arrayToCopyTo, 0);
m_test.Eval(false, "Expected ArgumentException when attempting to copy to smaller Array from ICollection.");
}
catch (ArgumentException)
{
}
catch (Exception E)
{
m_test.Eval(false, "Unknown Exception when attempting to copy to smaller Array from ICollection: " + E);
}
arrayToCopyTo = new Object[expectedCopyTo.Length];
try
{
m_pCollectionToTest.CopyTo(arrayToCopyTo, expectedCopyTo.Length);
m_test.Eval(false, "Expected ArgumentException when attempting to copy to array at index >= Length of Array from ICollection.");
}
catch (ArgumentException)
{
}
catch (Exception E)
{
m_test.Eval(false, "Unknown Exception when attempting to copy to array at index >= Length of Array from ICollection: " + E);
}
try
{
m_pCollectionToTest.CopyTo(arrayToCopyTo, -1);
m_test.Eval(false, "Expected ArgumentOutOfRangeException when attempting to copy to array at index < 0 from ICollection.");
}
catch (ArgumentOutOfRangeException)
{
}
catch (Exception E)
{
m_test.Eval(false, "Unknown Exception when attempting to copy to array at index < 0 from ICollection: " + E);
}
try
{
m_pCollectionToTest.CopyTo(arrayToCopyTo, 1);
m_test.Eval(false, "Expected ArgumentException when attempting to copy to array without enough room between index and end of array from ICollection.");
}
catch (ArgumentException)
{
}
catch (Exception E)
{
m_test.Eval(false, "Unknown Exception when attempting to copy to array without enough room between index and end of array from ICollection: " + E);
}
try
{
m_pCollectionToTest.CopyTo(badCastReferenceTypeArray, 0);
m_test.Eval(false, "Expected ArrayTypeMismatchException when attempting to copy to array that cannot be cast to from ICollection.");
}
catch (ArgumentException)
{
}
catch (Exception E)
{
m_test.Eval(false, "Unknown Exception when attempting to copy to reference type array that cannot be cast to from ICollection: " + E);
}
try
{
m_pCollectionToTest.CopyTo(badCastValueTypeArray, 0);
m_test.Eval(false, "Err_292haied Expected ArrayTypeMismatchException when attempting to copy to value type array that cannot be cast to from ICollection.");
}
#if WINCORESYS
catch ( System.InvalidCastException)
{
}
#endif
catch (ArgumentException)
{
}
catch (Exception E)
{
m_test.Eval(false, "Unknown Exception when attempting to copy to array that cannot be cast to from ICollection: " + E);
}
try
{
m_pCollectionToTest.CopyTo(arrayToCopyTo, 0);
if (arrayToCopyTo.Length == expectedCopyTo.Length)
{
if (expectCopyToItemsOutOfOrder)
{
m_test.Eval(VerifyItemsOutOfOrder(expectedCopyTo, arrayToCopyTo, 0), "Err_70928ahpg Expected items and actual item differ");
}
else
{
m_test.Eval(VerifyItemsInOrder(expectedCopyTo, arrayToCopyTo, 0), "Err_5688pqygb Expected items and actual item differ");
}
}
else
{
m_test.Eval(false, "Expected copied array length of " + expectedCopyTo.Length + ", but found a length of " + arrayToCopyTo.Length);
}
}
catch (Exception E)
{
m_test.Eval(false, "Err_550578oqpg Unknown Exception when attempting to copy to array without enough room between index and end of array from ICollection: " + E);
}
try
{
m_pCollectionToTest.CopyTo(tArrayToCopyTo, 0);
if (tArrayToCopyTo.Length == expectedCopyTo.Length)
{
if (expectCopyToItemsOutOfOrder)
{
m_test.Eval(VerifyItemsOutOfOrderT(expectedCopyTo, tArrayToCopyTo, 0), "Err_336879pqicbx Expected items and actual item differ");
}
else
{
m_test.Eval(VerifyItemsInOrderT(expectedCopyTo, tArrayToCopyTo, 0), "Err_35488qpag Expected items and actual item differ");
}
}
else
{
m_test.Eval(false, "Expected copied array length of " + expectedCopyTo.Length + ", but found a length of " + tArrayToCopyTo.Length);
}
}
catch (Exception E)
{
m_test.Eval(false, "Err_7839hpqg Unknown Exception when attempting to copy to T[] using CopyTo from ICollection: " + E);
}
//[] Non zero index
try
{
tArrayToCopyTo = new T[expectedCopyTo.Length + 1];
m_pCollectionToTest.CopyTo(tArrayToCopyTo, 1);
if (expectCopyToItemsOutOfOrder)
{
m_test.Eval(VerifyItemsOutOfOrderT(expectedCopyTo, tArrayToCopyTo, 1), "Err_56888apahpg Expected items and actual item differ");
}
else
{
m_test.Eval(VerifyItemsInOrderT(expectedCopyTo, tArrayToCopyTo, 1), "Err_00289aogs Expected items and actual item differ");
}
}
catch (Exception E)
{
m_test.Eval(false, "Err_78928pqyb Unknown Exception when attempting to copy to T[] using CopyTo from ICollection: " + E);
}
if (copyToOnlySupportsZeroLowerBounds)
{
//[] Non zero lower bounds
try
{
Array tempArray = Array.CreateInstance(typeof(Object), new int[] { expectedCopyTo.Length + 8 }, new int[] { -4 });
m_pCollectionToTest.CopyTo(tempArray, 0);
m_test.Eval(false, "Expected Argument when attempting to copy to array that has a non zero lower bound");
}
catch (ArgumentException)
{
}
catch (PlatformNotSupportedException)
{
}
catch (Exception E)
{
m_test.Eval(false, "Unknown Exception when attempting to copy to array that has a non zero lower bound: " + E);
}
}
}
private bool VerifyItemsInOrder(Array expectedItems, Object[] actualItems, int actualItemsIndex)
{
for (int i = 0; i < expectedItems.Length; ++i)
{
if (!m_test.Eval(expectedItems.GetValue(i).Equals(actualItems[i + actualItemsIndex]),
String.Format("Err_787892opyes Items differ expected={0} actual={1} at {2}",
expectedItems.GetValue(i), actualItems[i + actualItemsIndex], i)))
{
return false;
}
}
return true;
}
#if !DESKTOP
private List<Object> cloneArray(Array alst)
{
List<Object> result = new List<Object>();
foreach (Object obj in alst)
{
result.Add(obj);
}
return result;
}
#endif
private bool VerifyItemsOutOfOrder(Array expectedItems, Object[] actualItems, int actualItemsIndex)
{
#if DESKTOP
ArrayList expectedItemsArrayList = new ArrayList(expectedItems);
#else
List<Object> expectedItemsArrayList = cloneArray(expectedItems);
#endif
int itemIndex;
for (int i = 0; i < expectedItems.Length; ++i)
{
if (!m_test.Eval(-1 != (itemIndex = expectedItemsArrayList.IndexOf(actualItems[i + actualItemsIndex])),
"Err_07092apqcv Unexpected item in actualItems " + actualItems[i + actualItemsIndex]))
{
return false;
}
expectedItemsArrayList.RemoveAt(itemIndex);
}
if (!m_test.Eval(expectedItemsArrayList.Count == 0, "Err_87092pqytb Unexpected items in expectedItems that were not actualItems"))
{
for (int i = 0; i < expectedItemsArrayList.Count; ++i)
{
Console.WriteLine("\t" + expectedItemsArrayList[i]);
}
return false;
}
return true;
}
private bool VerifyItemsInOrderT(Array expectedItems, T[] actualItems, int actualItemsIndex)
{
for (int i = 0; i < expectedItems.Length; ++i)
{
if (!m_test.Eval(expectedItems.GetValue(i).Equals(actualItems[i + actualItemsIndex]),
String.Format("Err_787892opyes Items differ expected={0} actual={1} at {2}",
expectedItems.GetValue(i), actualItems[i + actualItemsIndex], i)))
{
return false;
}
}
return true;
}
private bool VerifyItemsOutOfOrderT(Array expectedItems, T[] actualItems, int actualItemsIndex)
{
#if DESKTOP
ArrayList expectedItemsArrayList = new ArrayList(expectedItems);
#else
List<Object> expectedItemsArrayList = cloneArray(expectedItems);
#endif
int itemIndex;
for (int i = 0; i < expectedItems.Length; ++i)
{
if (!m_test.Eval(-1 != (itemIndex = expectedItemsArrayList.IndexOf(actualItems[i + actualItemsIndex])),
"Err_07092apqcv Unexpected item in actualItems " + actualItems[i + actualItemsIndex]))
{
return false;
}
expectedItemsArrayList.RemoveAt(itemIndex);
}
if (!m_test.Eval(expectedItemsArrayList.Count == 0, "Err_87092pqytb Unexpected items in expectedItems that were not actualItems"))
{
for (int i = 0; i < expectedItemsArrayList.Count; ++i)
{
Console.WriteLine("\t" + expectedItemsArrayList[i]);
}
return false;
}
return true;
}
public void RunTest(Test test, ICollection collectionToTest, int expectedCount, bool expectedIsSynchronized, object expectedSyncRoot,
Array expectedCopyTo)
{
RunTest(test, collectionToTest, expectedCount, expectedIsSynchronized, expectedSyncRoot, expectedCopyTo, false);
}
public void RunTest(Test test, ICollection collectionToTest, int expectedCount, bool expectedIsSynchronized, object expectedSyncRoot,
Array expectedCopyTo, bool expectCopyToItemsOutOfOrder)
{
m_test = test;
m_pCollectionToTest = collectionToTest;
CheckCount(expectedCount);
CheckIsSynchronized(expectedIsSynchronized);
CheckSyncRoot(expectedSyncRoot);
CheckCopyTo(expectedCopyTo, expectCopyToItemsOutOfOrder);
}
public void RunTest(Test test, ICollection collectionToTest, int expectedCount, bool expectedIsSynchronized, object expectedSyncRoot,
Array expectedCopyTo, bool expectCopyToItemsOutOfOrder, bool copyToOnlySupportsZeroLowerBounds)
{
m_test = test;
m_pCollectionToTest = collectionToTest;
CheckCount(expectedCount);
CheckIsSynchronized(expectedIsSynchronized);
CheckSyncRoot(expectedSyncRoot);
CheckCopyTo(expectedCopyTo, expectCopyToItemsOutOfOrder, copyToOnlySupportsZeroLowerBounds);
}
public class MyFooType
{
}
public struct MyValueType
{
}
}
}
| |
//
// SCSharp.UI.GameMenuDialog
//
// Authors:
// Chris Toshok ([email protected])
//
// Copyright 2006-2010 Chris Toshok
//
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.IO;
using System.Text;
using System.Threading;
using SdlDotNet.Core;
using SdlDotNet.Graphics;
using System.Drawing;
namespace SCSharp.UI
{
public class GameMenuDialog : UIDialog
{
public GameMenuDialog (UIScreen parent, Mpq mpq)
: base (parent, mpq, "glue\\Palmm", Builtins.rez_GameMenuBin)
{
background_path = null;
}
const int RETURNTOGAME_ELEMENT_INDEX = 1;
const int SAVEGAME_ELEMENT_INDEX = 2;
const int LOADGAME_ELEMENT_INDEX = 3;
const int PAUSEGAME_ELEMENT_INDEX = 4;
const int RESUMEGAME_ELEMENT_INDEX = 5;
const int OPTIONS_ELEMENT_INDEX = 6;
const int HELP_ELEMENT_INDEX = 7;
const int MISSIONOBJECTIVES_ELEMENT_INDEX = 8;
const int ENDMISSION_ELEMENT_INDEX = 9;
protected override void ResourceLoader ()
{
base.ResourceLoader ();
for (int i = 0; i < Elements.Count; i ++)
Console.WriteLine ("{0}: {1} '{2}'", i, Elements[i].Type, Elements[i].Text);
Elements[RETURNTOGAME_ELEMENT_INDEX].Activate +=
delegate () {
if (ReturnToGame != null)
ReturnToGame ();
};
Elements[MISSIONOBJECTIVES_ELEMENT_INDEX].Activate +=
delegate () {
ObjectivesDialog d = new ObjectivesDialog (this, mpq);
d.Previous += delegate () { DismissDialog (); };
ShowDialog (d);
};
Elements[ENDMISSION_ELEMENT_INDEX].Activate +=
delegate () {
EndMissionDialog d = new EndMissionDialog (this, mpq);
d.Previous += delegate () { DismissDialog (); };
#if false
d.Quit += delegate () { DismissDialog ();
/* XXX hack just to get things looking right */
parent.DismissDialog ();
Game.Instance.SwitchToScreen (UIScreenType.MainMenu); };
#endif
ShowDialog (d);
};
Elements[OPTIONS_ELEMENT_INDEX].Activate +=
delegate () {
OptionsDialog d = new OptionsDialog (this, mpq);
d.Previous += delegate () { DismissDialog (); };
ShowDialog (d);
};
Elements[HELP_ELEMENT_INDEX].Activate +=
delegate () {
HelpDialog d = new HelpDialog (this, mpq);
d.Previous += delegate () { DismissDialog (); };
ShowDialog (d);
};
}
public event DialogEvent ReturnToGame;
}
public class ObjectivesDialog : UIDialog
{
public ObjectivesDialog (UIScreen parent, Mpq mpq)
: base (parent, mpq, "glue\\Palmm", Builtins.rez_ObjctDlgBin)
{
background_path = null;
}
const int PREVIOUS_ELEMENT_INDEX = 1;
protected override void ResourceLoader ()
{
base.ResourceLoader ();
for (int i = 0; i < Elements.Count; i ++)
Console.WriteLine ("{0}: {1} '{2}'", i, Elements[i].Type, Elements[i].Text);
Elements[PREVIOUS_ELEMENT_INDEX].Activate +=
delegate () {
if (Previous != null)
Previous ();
};
}
public event DialogEvent Previous;
}
public class EndMissionDialog : UIDialog
{
public EndMissionDialog (UIScreen parent, Mpq mpq)
: base (parent, mpq, "glue\\Palmm", Builtins.rez_AbrtMenuBin)
{
background_path = null;
}
const int PREVIOUS_ELEMENT_INDEX = 1;
const int RESTARTMISSION_ELEMENT_INDEX = 2;
const int QUITMISSION_ELEMENT_INDEX = 3;
const int EXITPROGRAM_ELEMENT_INDEX = 4;
protected override void ResourceLoader ()
{
base.ResourceLoader ();
for (int i = 0; i < Elements.Count; i ++)
Console.WriteLine ("{0}: {1} '{2}'", i, Elements[i].Type, Elements[i].Text);
Elements[QUITMISSION_ELEMENT_INDEX].Activate +=
delegate () {
QuitMissionDialog d = new QuitMissionDialog (this, mpq);
d.Cancel += delegate () { DismissDialog (); };
ShowDialog (d);
};
Elements[EXITPROGRAM_ELEMENT_INDEX].Activate +=
delegate () {
ExitConfirmationDialog d = new ExitConfirmationDialog (this, mpq);
d.Cancel += delegate () { DismissDialog (); };
ShowDialog (d);
};
Elements[RESTARTMISSION_ELEMENT_INDEX].Activate +=
delegate () {
RestartConfirmationDialog d = new RestartConfirmationDialog (this, mpq);
d.Cancel += delegate () { DismissDialog (); };
ShowDialog (d);
};
Elements[PREVIOUS_ELEMENT_INDEX].Activate +=
delegate () {
if (Previous != null)
Previous ();
};
}
public event DialogEvent Previous;
}
public class RestartConfirmationDialog : UIDialog
{
public RestartConfirmationDialog (UIScreen parent, Mpq mpq)
: base (parent, mpq, "glue\\Palmm", Builtins.rez_RestartBin)
{
background_path = null;
}
const int RESTART_ELEMENT_INDEX = 1;
const int CANCEL_ELEMENT_INDEX = 2;
protected override void ResourceLoader ()
{
base.ResourceLoader ();
for (int i = 0; i < Elements.Count; i ++)
Console.WriteLine ("{0}: {1} '{2}'", i, Elements[i].Type, Elements[i].Text);
Elements[CANCEL_ELEMENT_INDEX].Activate +=
delegate () {
if (Cancel != null)
Cancel ();
};
}
public event DialogEvent Cancel;
}
public class QuitMissionDialog : UIDialog
{
public QuitMissionDialog (UIScreen parent, Mpq mpq)
: base (parent, mpq, "glue\\Palmm", Builtins.rez_Quit2MnuBin)
{
background_path = null;
}
const int QUIT_ELEMENT_INDEX = 1;
const int OBSERVE_ELEMENT_INDEX = 2;
const int CANCEL_ELEMENT_INDEX = 3;
protected override void ResourceLoader ()
{
base.ResourceLoader ();
for (int i = 0; i < Elements.Count; i ++)
Console.WriteLine ("{0}: {1} '{2}'", i, Elements[i].Type, Elements[i].Text);
Elements[QUIT_ELEMENT_INDEX].Activate +=
delegate () {
DismissDialog ();
/* XXX hack just to get things looking right */
parent.DismissDialog ();
ScoreScreen score = new ScoreScreen (mpq, false /* we always lose when we quit */);
Game.Instance.SwitchToScreen (score);
};
Elements[CANCEL_ELEMENT_INDEX].Activate +=
delegate () {
if (Cancel != null)
Cancel ();
};
}
public event DialogEvent Cancel;
}
public class ExitConfirmationDialog : UIDialog
{
public ExitConfirmationDialog (UIScreen parent, Mpq mpq)
: base (parent, mpq, "glue\\Palmm", Builtins.rez_QuitBin)
{
background_path = null;
}
const int EXIT_ELEMENT_INDEX = 1;
const int CANCEL_ELEMENT_INDEX = 2;
protected override void ResourceLoader ()
{
base.ResourceLoader ();
for (int i = 0; i < Elements.Count; i ++)
Console.WriteLine ("{0}: {1} '{2}'", i, Elements[i].Type, Elements[i].Text);
Elements[EXIT_ELEMENT_INDEX].Activate +=
delegate () {
if (Exit != null)
Exit ();
};
Elements[CANCEL_ELEMENT_INDEX].Activate +=
delegate () {
if (Cancel != null)
Cancel ();
};
}
public event DialogEvent Exit;
public event DialogEvent Cancel;
}
public class HelpDialog : UIDialog
{
public HelpDialog (UIScreen parent, Mpq mpq)
: base (parent, mpq, "glue\\Palmm", Builtins.rez_HelpMenuBin)
{
background_path = null;
}
const int PREVIOUS_ELEMENT_INDEX = 1;
const int KEYSTROKE_ELEMENT_INDEX = 2;
const int TIPS_INDEX = 3;
protected override void ResourceLoader ()
{
base.ResourceLoader ();
for (int i = 0; i < Elements.Count; i ++)
Console.WriteLine ("{0}: {1} '{2}'", i, Elements[i].Type, Elements[i].Text);
Elements[PREVIOUS_ELEMENT_INDEX].Activate +=
delegate () {
if (Previous != null)
Previous ();
};
Elements[KEYSTROKE_ELEMENT_INDEX].Activate +=
delegate () {
KeystrokeDialog d = new KeystrokeDialog (this, mpq);
d.Ok += delegate () { DismissDialog (); };
ShowDialog (d);
};
}
public event DialogEvent Previous;
}
public class KeystrokeDialog : UIDialog
{
public KeystrokeDialog (UIScreen parent, Mpq mpq)
: base (parent, mpq, "glue\\Palmm", Builtins.rez_HelpBin)
{
background_path = null;
}
const int OK_ELEMENT_INDEX = 1;
const int HELPLIST_ELEMENT_INDEX = 2;
protected override void ResourceLoader ()
{
base.ResourceLoader ();
for (int i = 0; i < Elements.Count; i ++)
Console.WriteLine ("{0}: {1} '{2}'", i, Elements[i].Type, Elements[i].Text);
Elements[OK_ELEMENT_INDEX].Activate +=
delegate () {
if (Ok != null)
Ok ();
};
ListBoxElement list = (ListBoxElement)Elements[HELPLIST_ELEMENT_INDEX];
Tbl help_txt = (Tbl)mpq.GetResource (Builtins.rez_HelpTxtTbl);
for (int i = 0; i < help_txt.Strings.Length; i++) {
list.AddItem (help_txt.Strings[i]);
}
}
public event DialogEvent Ok;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Reflection.Metadata;
using System.Reflection.Metadata.Ecma335;
using System.Runtime.CompilerServices;
namespace System.Reflection
{
// This file defines an internal class used to throw exceptions. The main purpose is to reduce code size.
// Also it improves the likelihood that callers will be inlined.
internal static class Throw
{
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void InvalidCast()
{
throw new InvalidCastException();
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void LitteEndianArchitectureRequired()
{
throw new PlatformNotSupportedException(SR.LitteEndianArchitectureRequired);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void InvalidArgument(string message, string parameterName)
{
throw new ArgumentException(message, parameterName);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void InvalidArgument_OffsetForVirtualHeapHandle()
{
throw new ArgumentException(SR.CantGetOffsetForVirtualHeapHandle, "handle");
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static Exception InvalidArgument_UnexpectedHandleKind(HandleKind kind)
{
throw new ArgumentException(SR.Format(SR.UnexpectedHandleKind, kind));
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static Exception InvalidArgument_Handle(string parameterName)
{
throw new ArgumentException(SR.Format(SR.InvalidHandle), parameterName);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void SignatureNotVarArg()
{
throw new InvalidOperationException(SR.SignatureNotVarArg);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void ControlFlowBuilderNotAvailable()
{
throw new InvalidOperationException(SR.ControlFlowBuilderNotAvailable);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void InvalidOperationBuilderAlreadyLinked()
{
throw new InvalidOperationException(SR.BuilderAlreadyLinked);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void InvalidOperation(string message)
{
throw new InvalidOperationException(message);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void InvalidOperation_LabelNotMarked(int id)
{
throw new InvalidOperationException(SR.Format(SR.LabelNotMarked, id));
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void LabelDoesntBelongToBuilder(string parameterName)
{
throw new ArgumentException(SR.LabelDoesntBelongToBuilder, parameterName);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void HeapHandleRequired()
{
throw new ArgumentException(SR.NotMetadataHeapHandle, "handle");
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void EntityOrUserStringHandleRequired()
{
throw new ArgumentException(SR.NotMetadataTableOrUserStringHandle, "handle");
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void InvalidToken()
{
throw new ArgumentException(SR.InvalidToken, "token");
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void ArgumentNull(string parameterName)
{
throw new ArgumentNullException(parameterName);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void ArgumentEmptyString(string parameterName)
{
throw new ArgumentException(SR.ExpectedNonEmptyString, parameterName);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void ValueArgumentNull()
{
throw new ArgumentNullException("value");
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void BuilderArgumentNull()
{
throw new ArgumentNullException("builder");
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void ArgumentOutOfRange(string parameterName)
{
throw new ArgumentOutOfRangeException(parameterName);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void ArgumentOutOfRange(string parameterName, string message)
{
throw new ArgumentOutOfRangeException(parameterName, message);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void BlobTooLarge(string parameterName)
{
throw new ArgumentOutOfRangeException(parameterName, SR.BlobTooLarge);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void IndexOutOfRange()
{
throw new ArgumentOutOfRangeException("index");
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void TableIndexOutOfRange()
{
throw new ArgumentOutOfRangeException("tableIndex");
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void ValueArgumentOutOfRange()
{
throw new ArgumentOutOfRangeException("value");
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void OutOfBounds()
{
throw new BadImageFormatException(SR.OutOfBoundsRead);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void WriteOutOfBounds()
{
throw new InvalidOperationException(SR.OutOfBoundsWrite);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void InvalidCodedIndex()
{
throw new BadImageFormatException(SR.InvalidCodedIndex);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void InvalidHandle()
{
throw new BadImageFormatException(SR.InvalidHandle);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void InvalidCompressedInteger()
{
throw new BadImageFormatException(SR.InvalidCompressedInteger);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void InvalidSerializedString()
{
throw new BadImageFormatException(SR.InvalidSerializedString);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void ImageTooSmall()
{
throw new BadImageFormatException(SR.ImageTooSmall);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void ImageTooSmallOrContainsInvalidOffsetOrCount()
{
throw new BadImageFormatException(SR.ImageTooSmallOrContainsInvalidOffsetOrCount);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void ReferenceOverflow()
{
throw new BadImageFormatException(SR.RowIdOrHeapOffsetTooLarge);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void TableNotSorted(TableIndex tableIndex)
{
throw new BadImageFormatException(SR.Format(SR.MetadataTableNotSorted, tableIndex));
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void InvalidOperation_TableNotSorted(TableIndex tableIndex)
{
throw new InvalidOperationException(SR.Format(SR.MetadataTableNotSorted, tableIndex));
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void TooManySubnamespaces()
{
throw new BadImageFormatException(SR.TooManySubnamespaces);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void ValueOverflow()
{
throw new BadImageFormatException(SR.ValueTooLarge);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void SequencePointValueOutOfRange()
{
throw new BadImageFormatException(SR.SequencePointValueOutOfRange);
}
[MethodImpl(MethodImplOptions.NoInlining)]
internal static void HeapSizeLimitExceeded(HeapIndex heap)
{
throw new ImageFormatLimitationException(SR.Format(SR.HeapSizeLimitExceeded, heap));
}
}
}
| |
using FluentMigrator.Builders;
using FluentMigrator.Expressions;
using FluentMigrator.Infrastructure;
using FluentMigrator.Model;
using Moq;
using NUnit.Framework;
using System;
using System.Collections.Generic;
using System.Linq;
using JetBrains.Annotations;
namespace FluentMigrator.Tests.Unit.Builders
{
[TestFixture]
public class ColumnExpressionBuilderHelperTests
{
[Test]
public void SetNotNullable_SetsColumnIfNotSettingExistingRowValues()
{
var builderMock = new Mock<IColumnExpressionBuilder>();
var contextMock = new Mock<IMigrationContext>();
var columnMock = new Mock<ColumnDefinition>();
builderMock.SetupGet(n => n.Column).Returns(columnMock.Object);
columnMock.SetupGet(n => n.ModificationType).Returns(ColumnModificationType.Create);
var helper = new ColumnExpressionBuilderHelper(builderMock.Object, contextMock.Object);
helper.SetNullable(false);
columnMock.VerifySet(n => n.IsNullable = false);
}
[Test]
public void SetNotNullable_DoesntSetIfSettingExistingRowValues()
{
var builderMock = new Mock<IColumnExpressionBuilder>();
var contextMock = new Mock<IMigrationContext>();
var columnMock = new Mock<ColumnDefinition>();
var columnCloneMock = new Mock<ColumnDefinition>();
builderMock.SetupGet(n => n.Column).Returns(columnMock.Object);
columnMock.SetupGet(n => n.ModificationType).Returns(ColumnModificationType.Create);
columnMock.Setup(n => n.Clone()).Returns(columnCloneMock.Object);
contextMock.Setup(n => n.Expressions.Add(It.IsAny<IMigrationExpression>()));
var helper = new ColumnExpressionBuilderHelper(builderMock.Object, contextMock.Object);
helper.SetExistingRowsTo("test");
helper.SetNullable(false);
//The column IsNullable should NOT be set to false, instead an internal clone
//should have been created, which will have IsNullable set to false.
columnMock.VerifySet(n => n.IsNullable = false, Times.Never());
columnCloneMock.VerifySet(n => n.IsNullable = false, Times.Once());
}
[Test]
public void SetExistingRows_AddsAllRowsExpression()
{
var builderMock = new Mock<IColumnExpressionBuilder>();
var contextMock = new Mock<IMigrationContext>();
IMigrationExpression addedExpression = null;
contextMock
.Setup(n => n.Expressions.Add(It.IsAny<IMigrationExpression>()))
.Callback((IMigrationExpression ex) => addedExpression = ex);
builderMock.SetupGet(n => n.SchemaName).Returns("Fred");
builderMock.SetupGet(n => n.TableName).Returns("Flinstone");
builderMock.SetupGet(n => n.Column.Name).Returns("ColName");
builderMock.SetupGet(n => n.Column.ModificationType).Returns(ColumnModificationType.Create);
var helper = new ColumnExpressionBuilderHelper(builderMock.Object, contextMock.Object);
helper.SetExistingRowsTo(5);
contextMock.Verify(n => n.Expressions.Add(It.IsAny<IMigrationExpression>()), Times.Once());
//Check that the update data expression was added as expected. Maybe there's a cleaner way to do this?
Assert.IsInstanceOf<UpdateDataExpression>(addedExpression);
UpdateDataExpression updateDataExpr = (UpdateDataExpression)addedExpression;
Assert.IsNotNull(updateDataExpr);
Assert.AreEqual("Fred", updateDataExpr.SchemaName);
Assert.AreEqual("Flinstone", updateDataExpr.TableName);
Assert.AreEqual(true, updateDataExpr.IsAllRows);
Assert.AreEqual(1, updateDataExpr.Set.Count);
Assert.AreEqual("ColName", updateDataExpr.Set[0].Key);
Assert.AreEqual(5, updateDataExpr.Set[0].Value);
}
[Test]
public void SetExistingRows_IgnoredIfAlterColumn()
{
var builderMock = new Mock<IColumnExpressionBuilder>();
var contextMock = new Mock<IMigrationContext>();
builderMock.SetupGet(n => n.Column.ModificationType).Returns(ColumnModificationType.Alter);
contextMock.Setup(n => n.Expressions.Add(It.IsAny<IMigrationExpression>()));
var helper = new ColumnExpressionBuilderHelper(builderMock.Object, contextMock.Object);
helper.SetExistingRowsTo("test");
contextMock.Verify(n => n.Expressions.Add(It.IsAny<IMigrationExpression>()), Times.Never());
}
[Test]
public void SetExistingRows_AfterNotNullableAddsAlterColumnExpression()
{
var builderMock = new Mock<IColumnExpressionBuilder>();
var contextMock = new Mock<IMigrationContext>();
List<IMigrationExpression> addedExpressions = new List<IMigrationExpression>();
contextMock.SetupGet(n => n.Expressions).Returns(addedExpressions);
builderMock.SetupGet(n => n.SchemaName).Returns("Fred");
builderMock.SetupGet(n => n.TableName).Returns("Flinstone");
var createColColumn = new ColumnDefinition
{
ModificationType = ColumnModificationType.Create,
Name = "ColName",
Type = System.Data.DbType.String,
CustomType = "CustomType",
Size = 12,
Precision = 2
};
builderMock.SetupGet(n => n.Column).Returns(createColColumn);
var helper = new ColumnExpressionBuilderHelper(builderMock.Object, contextMock.Object);
helper.SetNullable(false);
helper.SetExistingRowsTo(5);
Assert.AreEqual(2, addedExpressions.Count);
Assert.IsInstanceOf<UpdateDataExpression>(addedExpressions[0]);
Assert.IsInstanceOf<AlterColumnExpression>(addedExpressions[1]);
//TODO: refactor to use same method of checking as "CallingUniqueAddsIndexExpressionToContext" test does.
AlterColumnExpression alterColExpr = (AlterColumnExpression)addedExpressions[1];
Assert.AreNotSame(builderMock.Object.Column, alterColExpr.Column);
Assert.AreEqual("Fred", alterColExpr.SchemaName);
Assert.AreEqual("Flinstone", alterColExpr.TableName);
//Check that the the 'alter' expression column definition is not the same instance as the
//create column definition.
Assert.IsNotNull(alterColExpr.Column);
var alterColColumn = alterColExpr.Column;
Assert.AreNotSame(createColColumn, alterColColumn);
//Check that all properties on the alter expression column have been cloned.
//Could also test this by mocking .clone method to return another mock etc, just doing
//it here tho by comparing values.
Assert.AreEqual(ColumnModificationType.Alter, alterColColumn.ModificationType);
Assert.AreEqual("ColName", alterColColumn.Name);
Assert.AreEqual(System.Data.DbType.String, alterColColumn.Type);
Assert.AreEqual("CustomType", alterColColumn.CustomType);
Assert.AreEqual(false, alterColColumn.IsNullable);
Assert.AreEqual(12, alterColColumn.Size);
Assert.AreEqual(2, alterColColumn.Precision);
}
[Test]
public void SetExistingRows_AfterNotNullableSetsOriginalColumnNullable()
{
var builderMock = new Mock<IColumnExpressionBuilder>();
var contextMock = new Mock<IMigrationContext>();
List<IMigrationExpression> addedExpressions = new List<IMigrationExpression>();
contextMock.SetupGet(n => n.Expressions).Returns(addedExpressions);
builderMock.SetupGet(n => n.Column.ModificationType).Returns(ColumnModificationType.Create);
builderMock.Setup(n => n.Column.Clone()).Returns(new ColumnDefinition());
var helper = new ColumnExpressionBuilderHelper(builderMock.Object, contextMock.Object);
helper.SetNullable(false);
helper.SetExistingRowsTo(5);
//Check that column is nullable. This is because a later alter column statement will mark it non nullable.
builderMock.VerifySet(n => n.Column.IsNullable = true);
}
/*
//Will this ever happen? It should handle it, but need to test that if users goes
// .Nullable().SetExistingRowsTo(5).NotNullable() it will be handled.
public void SetExistingRows_SettingNullableRemovesAlterColumn()
{
throw new NotImplementedException();
}
*/
[Test]
public void SetNullable_ToTrue()
{
VerifyColumnModification(h => h.SetNullable(true), c => c.IsNullable = true);
}
[Test]
public void SetNullable_ToFalse()
{
VerifyColumnModification(h => h.SetNullable(false), c => c.IsNullable = false);
}
[Test]
public void CallingUniqueSetsIsUniqueToTrue()
{
VerifyColumnModification(h => h.Unique(null), c => c.IsUnique = true);
}
[Test]
public void CallingUniqueAddsIndexExpressionToContext()
{
var collectionMock = new Mock<ICollection<IMigrationExpression>>();
var builderMock = new Mock<IColumnExpressionBuilder>();
var contextMock = new Mock<IMigrationContext>();
builderMock.SetupGet(n => n.Column.ModificationType).Returns(ColumnModificationType.Create);
builderMock.SetupGet(n => n.Column.Name).Returns("BaconId");
builderMock.SetupGet(n => n.SchemaName).Returns("Eggs");
builderMock.SetupGet(n => n.TableName).Returns("Bacon");
contextMock.Setup(x => x.Expressions).Returns(collectionMock.Object);
var helper = new ColumnExpressionBuilderHelper(builderMock.Object, contextMock.Object);
helper.Unique("IX_Bacon_BaconId");
collectionMock.Verify(x => x.Add(It.Is<CreateIndexExpression>(
ix => ix.Index.Name == "IX_Bacon_BaconId"
&& ix.Index.TableName == "Bacon"
&& ix.Index.SchemaName == "Eggs"
&& ix.Index.IsUnique
&& !ix.Index.IsClustered
&& ix.Index.Columns.All(c => c.Name == "BaconId")
)));
contextMock.VerifyGet(x => x.Expressions);
}
[Test]
public void CallingIndexedNamedAddsIndexExpressionToContext()
{
var collectionMock = new Mock<ICollection<IMigrationExpression>>();
var builderMock = new Mock<IColumnExpressionBuilder>();
var contextMock = new Mock<IMigrationContext>();
builderMock.SetupGet(n => n.Column.ModificationType).Returns(ColumnModificationType.Create);
builderMock.SetupGet(n => n.Column.Name).Returns("BaconId");
builderMock.SetupGet(n => n.SchemaName).Returns("Eggs");
builderMock.SetupGet(n => n.TableName).Returns("Bacon");
contextMock.Setup(x => x.Expressions).Returns(collectionMock.Object);
var helper = new ColumnExpressionBuilderHelper(builderMock.Object, contextMock.Object);
helper.Indexed("IX_Bacon_BaconId");
collectionMock.Verify(x => x.Add(It.Is<CreateIndexExpression>(
ix => ix.Index.Name == "IX_Bacon_BaconId"
&& ix.Index.TableName == "Bacon"
&& ix.Index.SchemaName == "Eggs"
&& !ix.Index.IsUnique
&& !ix.Index.IsClustered
&& ix.Index.Columns.All(c => c.Name == "BaconId")
)));
contextMock.VerifyGet(x => x.Expressions);
}
[Test]
public void CallingIndexedSetsIsIndexedToTrue()
{
VerifyColumnModification(h => h.Indexed(null), c => c.IsIndexed = true);
}
private void VerifyColumnModification(
[NotNull] Action<ColumnExpressionBuilderHelper> helperCall,
[NotNull] Action<ColumnDefinition> expectedAction)
{
var builderMock = new Mock<IColumnExpressionBuilder>();
var contextMock = new Mock<IMigrationContext>();
builderMock.SetupGet(n => n.Column.ModificationType).Returns(ColumnModificationType.Create);
contextMock.SetupProperty(c => c.Expressions, new List<IMigrationExpression>());
var helper = new ColumnExpressionBuilderHelper(builderMock.Object, contextMock.Object);
helperCall.Invoke(helper);
builderMock.VerifySet(n => expectedAction(n.Column));
}
}
}
| |
// LzBinTree.cs
using System;
using System.IO;
namespace osuElements.Helpers.LZMA
{
internal class BinTree : InWindow, IMatchFinder
{
private uint _cyclicBufferPos;
private uint _cyclicBufferSize;
private uint _matchMaxLen;
private uint[] _son;
private uint[] _hash;
private uint _cutValue = 0xFF;
private uint _hashMask;
private uint _hashSizeSum;
private bool HASH_ARRAY = true;
private const uint kHash2Size = 1 << 10;
private const uint kHash3Size = 1 << 16;
private const uint kBT2HashSize = 1 << 16;
private const uint kStartMaxLen = 1;
private const uint kHash3Offset = kHash2Size;
private const uint kEmptyHashValue = 0;
private const uint kMaxValForNormalize = ((uint)1 << 31) - 1;
private uint kNumHashDirectBytes;
private uint kMinMatchCheck = 4;
private uint kFixHashSize = kHash2Size + kHash3Size;
public void SetType(int numHashBytes)
{
HASH_ARRAY = numHashBytes > 2;
if (HASH_ARRAY)
{
kNumHashDirectBytes = 0;
kMinMatchCheck = 4;
kFixHashSize = kHash2Size + kHash3Size;
}
else
{
kNumHashDirectBytes = 2;
kMinMatchCheck = 2 + 1;
kFixHashSize = 0;
}
}
public new void SetStream(Stream stream) { base.SetStream(stream); }
public new void ReleaseStream() { base.ReleaseStream(); }
public new void Init()
{
base.Init();
for (uint i = 0; i < _hashSizeSum; i++)
_hash[i] = kEmptyHashValue;
_cyclicBufferPos = 0;
ReduceOffsets(-1);
}
public new void MovePos()
{
if (++_cyclicBufferPos >= _cyclicBufferSize)
_cyclicBufferPos = 0;
base.MovePos();
if (_pos == kMaxValForNormalize)
Normalize();
}
public new byte GetIndexByte(int index) { return base.GetIndexByte(index); }
public new uint GetMatchLen(int index, uint distance, uint limit)
{ return base.GetMatchLen(index, distance, limit); }
public new uint GetNumAvailableBytes() { return base.GetNumAvailableBytes(); }
public void Create(uint historySize, uint keepAddBufferBefore,
uint matchMaxLen, uint keepAddBufferAfter)
{
if (historySize > kMaxValForNormalize - 256)
throw new Exception();
_cutValue = 16 + (matchMaxLen >> 1);
var windowReservSize = (historySize + keepAddBufferBefore +
matchMaxLen + keepAddBufferAfter) / 2 + 256;
base.Create(historySize + keepAddBufferBefore, matchMaxLen + keepAddBufferAfter, windowReservSize);
_matchMaxLen = matchMaxLen;
var cyclicBufferSize = historySize + 1;
if (_cyclicBufferSize != cyclicBufferSize)
_son = new uint[(_cyclicBufferSize = cyclicBufferSize) * 2];
var hs = kBT2HashSize;
if (HASH_ARRAY)
{
hs = historySize - 1;
hs |= hs >> 1;
hs |= hs >> 2;
hs |= hs >> 4;
hs |= hs >> 8;
hs >>= 1;
hs |= 0xFFFF;
if (hs > 1 << 24)
hs >>= 1;
_hashMask = hs;
hs++;
hs += kFixHashSize;
}
if (hs != _hashSizeSum)
_hash = new uint[_hashSizeSum = hs];
}
public uint GetMatches(uint[] distances)
{
uint lenLimit;
if (_pos + _matchMaxLen <= _streamPos)
lenLimit = _matchMaxLen;
else
{
lenLimit = _streamPos - _pos;
if (lenLimit < kMinMatchCheck)
{
MovePos();
return 0;
}
}
uint offset = 0;
var matchMinPos = _pos > _cyclicBufferSize ? _pos - _cyclicBufferSize : 0;
var cur = _bufferOffset + _pos;
var maxLen = kStartMaxLen; // to avoid items for len < hashSize;
uint hashValue, hash2Value = 0, hash3Value = 0;
if (HASH_ARRAY)
{
var temp = CRC.Table[_bufferBase[cur]] ^ _bufferBase[cur + 1];
hash2Value = temp & (kHash2Size - 1);
temp ^= (uint)_bufferBase[cur + 2] << 8;
hash3Value = temp & (kHash3Size - 1);
hashValue = (temp ^ (CRC.Table[_bufferBase[cur + 3]] << 5)) & _hashMask;
}
else
hashValue = _bufferBase[cur] ^ ((uint)_bufferBase[cur + 1] << 8);
var curMatch = _hash[kFixHashSize + hashValue];
if (HASH_ARRAY)
{
var curMatch2 = _hash[hash2Value];
var curMatch3 = _hash[kHash3Offset + hash3Value];
_hash[hash2Value] = _pos;
_hash[kHash3Offset + hash3Value] = _pos;
if (curMatch2 > matchMinPos)
if (_bufferBase[_bufferOffset + curMatch2] == _bufferBase[cur])
{
distances[offset++] = maxLen = 2;
distances[offset++] = _pos - curMatch2 - 1;
}
if (curMatch3 > matchMinPos)
if (_bufferBase[_bufferOffset + curMatch3] == _bufferBase[cur])
{
if (curMatch3 == curMatch2)
offset -= 2;
distances[offset++] = maxLen = 3;
distances[offset++] = _pos - curMatch3 - 1;
curMatch2 = curMatch3;
}
if (offset != 0 && curMatch2 == curMatch)
{
offset -= 2;
maxLen = kStartMaxLen;
}
}
_hash[kFixHashSize + hashValue] = _pos;
var ptr0 = (_cyclicBufferPos << 1) + 1;
var ptr1 = _cyclicBufferPos << 1;
uint len1;
var len0 = len1 = kNumHashDirectBytes;
if (kNumHashDirectBytes != 0)
{
if (curMatch > matchMinPos)
{
if (_bufferBase[_bufferOffset + curMatch + kNumHashDirectBytes] !=
_bufferBase[cur + kNumHashDirectBytes])
{
distances[offset++] = maxLen = kNumHashDirectBytes;
distances[offset++] = _pos - curMatch - 1;
}
}
}
var count = _cutValue;
while(true)
{
if(curMatch <= matchMinPos || count-- == 0)
{
_son[ptr0] = _son[ptr1] = kEmptyHashValue;
break;
}
var delta = _pos - curMatch;
var cyclicPos = (delta <= _cyclicBufferPos ?
_cyclicBufferPos - delta :
_cyclicBufferPos - delta + _cyclicBufferSize) << 1;
var pby1 = _bufferOffset + curMatch;
var len = Math.Min(len0, len1);
if (_bufferBase[pby1 + len] == _bufferBase[cur + len])
{
while(++len != lenLimit)
if (_bufferBase[pby1 + len] != _bufferBase[cur + len])
break;
if (maxLen < len)
{
distances[offset++] = maxLen = len;
distances[offset++] = delta - 1;
if (len == lenLimit)
{
_son[ptr1] = _son[cyclicPos];
_son[ptr0] = _son[cyclicPos + 1];
break;
}
}
}
if (_bufferBase[pby1 + len] < _bufferBase[cur + len])
{
_son[ptr1] = curMatch;
ptr1 = cyclicPos + 1;
curMatch = _son[ptr1];
len1 = len;
}
else
{
_son[ptr0] = curMatch;
ptr0 = cyclicPos;
curMatch = _son[ptr0];
len0 = len;
}
}
MovePos();
return offset;
}
public void Skip(uint num)
{
do
{
uint lenLimit;
if (_pos + _matchMaxLen <= _streamPos)
lenLimit = _matchMaxLen;
else
{
lenLimit = _streamPos - _pos;
if (lenLimit < kMinMatchCheck)
{
MovePos();
continue;
}
}
var matchMinPos = _pos > _cyclicBufferSize ? _pos - _cyclicBufferSize : 0;
var cur = _bufferOffset + _pos;
uint hashValue;
if (HASH_ARRAY)
{
var temp = CRC.Table[_bufferBase[cur]] ^ _bufferBase[cur + 1];
var hash2Value = temp & (kHash2Size - 1);
_hash[hash2Value] = _pos;
temp ^= (uint)_bufferBase[cur + 2] << 8;
var hash3Value = temp & (kHash3Size - 1);
_hash[kHash3Offset + hash3Value] = _pos;
hashValue = (temp ^ (CRC.Table[_bufferBase[cur + 3]] << 5)) & _hashMask;
}
else
hashValue = _bufferBase[cur] ^ ((uint)_bufferBase[cur + 1] << 8);
var curMatch = _hash[kFixHashSize + hashValue];
_hash[kFixHashSize + hashValue] = _pos;
var ptr0 = (_cyclicBufferPos << 1) + 1;
var ptr1 = _cyclicBufferPos << 1;
uint len1;
var len0 = len1 = kNumHashDirectBytes;
var count = _cutValue;
while (true)
{
if (curMatch <= matchMinPos || count-- == 0)
{
_son[ptr0] = _son[ptr1] = kEmptyHashValue;
break;
}
var delta = _pos - curMatch;
var cyclicPos = (delta <= _cyclicBufferPos ?
_cyclicBufferPos - delta :
_cyclicBufferPos - delta + _cyclicBufferSize) << 1;
var pby1 = _bufferOffset + curMatch;
var len = Math.Min(len0, len1);
if (_bufferBase[pby1 + len] == _bufferBase[cur + len])
{
while (++len != lenLimit)
if (_bufferBase[pby1 + len] != _bufferBase[cur + len])
break;
if (len == lenLimit)
{
_son[ptr1] = _son[cyclicPos];
_son[ptr0] = _son[cyclicPos + 1];
break;
}
}
if (_bufferBase[pby1 + len] < _bufferBase[cur + len])
{
_son[ptr1] = curMatch;
ptr1 = cyclicPos + 1;
curMatch = _son[ptr1];
len1 = len;
}
else
{
_son[ptr0] = curMatch;
ptr0 = cyclicPos;
curMatch = _son[ptr0];
len0 = len;
}
}
MovePos();
}
while (--num != 0);
}
private void NormalizeLinks(uint[] items, uint numItems, uint subValue)
{
for (uint i = 0; i < numItems; i++)
{
var value = items[i];
if (value <= subValue)
value = kEmptyHashValue;
else
value -= subValue;
items[i] = value;
}
}
private void Normalize()
{
var subValue = _pos - _cyclicBufferSize;
NormalizeLinks(_son, _cyclicBufferSize * 2, subValue);
NormalizeLinks(_hash, _hashSizeSum, subValue);
ReduceOffsets((int)subValue);
}
public void SetCutValue(uint cutValue) { _cutValue = cutValue; }
}
}
| |
// Copyright (c) .NET Foundation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using FluentAssertions;
using Microsoft.DotNet.Cli.Sln.Internal;
using Microsoft.DotNet.Tools.Test.Utilities;
using System;
using System.IO;
using System.Linq;
using Xunit;
namespace Microsoft.DotNet.Cli.Sln.Remove.Tests
{
public class GivenDotnetSlnRemove : TestBase
{
private const string HelpText = @".NET Remove project(s) from a solution file Command
Usage: dotnet sln <SLN_FILE> remove [options] <args>
Arguments:
<SLN_FILE> Solution file to operate on. If not specified, the command will search the current directory for one.
<args> Remove the specified project(s) from the solution. The project is not impacted.
Options:
-h, --help Show help information
";
private const string ExpectedSlnContentsAfterRemove = @"
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.26006.2
MinimumVisualStudioVersion = 10.0.40219.1
Project(""{9A19103F-16F7-4668-BE54-9A1E7A4F7556}"") = ""App"", ""App\App.csproj"", ""{7072A694-548F-4CAE-A58F-12D257D5F486}""
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Debug|x64 = Debug|x64
Debug|x86 = Debug|x86
Release|Any CPU = Release|Any CPU
Release|x64 = Release|x64
Release|x86 = Release|x86
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|Any CPU.Build.0 = Debug|Any CPU
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|x64.ActiveCfg = Debug|x64
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|x64.Build.0 = Debug|x64
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|x86.ActiveCfg = Debug|x86
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|x86.Build.0 = Debug|x86
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|Any CPU.ActiveCfg = Release|Any CPU
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|Any CPU.Build.0 = Release|Any CPU
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|x64.ActiveCfg = Release|x64
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|x64.Build.0 = Release|x64
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|x86.ActiveCfg = Release|x86
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|x86.Build.0 = Release|x86
EndGlobalSection
EndGlobal
";
private const string ExpectedSlnContentsAfterRemoveAllProjects = @"
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.26006.2
MinimumVisualStudioVersion = 10.0.40219.1
Global
EndGlobal
";
private const string ExpectedSlnContentsAfterRemoveNestedProj = @"
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.26006.2
MinimumVisualStudioVersion = 10.0.40219.1
Project(""{9A19103F-16F7-4668-BE54-9A1E7A4F7556}"") = ""App"", ""App.csproj"", ""{7072A694-548F-4CAE-A58F-12D257D5F486}""
EndProject
Project(""{2150E333-8FDC-42A3-9474-1A3956D46DE8}"") = ""src"", ""src"", ""{7B86CE74-F620-4B32-99FE-82D40F8D6BF2}""
EndProject
Project(""{2150E333-8FDC-42A3-9474-1A3956D46DE8}"") = ""Lib"", ""Lib"", ""{EAB71280-AF32-4531-8703-43CDBA261AA3}""
EndProject
Project(""{9A19103F-16F7-4668-BE54-9A1E7A4F7556}"") = ""Lib"", ""src\Lib\Lib.csproj"", ""{84A45D44-B677-492D-A6DA-B3A71135AB8E}""
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Debug|x64 = Debug|x64
Debug|x86 = Debug|x86
Release|Any CPU = Release|Any CPU
Release|x64 = Release|x64
Release|x86 = Release|x86
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|Any CPU.Build.0 = Debug|Any CPU
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|x64.ActiveCfg = Debug|x64
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|x64.Build.0 = Debug|x64
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|x86.ActiveCfg = Debug|x86
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|x86.Build.0 = Debug|x86
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|Any CPU.ActiveCfg = Release|Any CPU
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|Any CPU.Build.0 = Release|Any CPU
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|x64.ActiveCfg = Release|x64
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|x64.Build.0 = Release|x64
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|x86.ActiveCfg = Release|x86
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|x86.Build.0 = Release|x86
{84A45D44-B677-492D-A6DA-B3A71135AB8E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{84A45D44-B677-492D-A6DA-B3A71135AB8E}.Debug|Any CPU.Build.0 = Debug|Any CPU
{84A45D44-B677-492D-A6DA-B3A71135AB8E}.Debug|x64.ActiveCfg = Debug|x64
{84A45D44-B677-492D-A6DA-B3A71135AB8E}.Debug|x64.Build.0 = Debug|x64
{84A45D44-B677-492D-A6DA-B3A71135AB8E}.Debug|x86.ActiveCfg = Debug|x86
{84A45D44-B677-492D-A6DA-B3A71135AB8E}.Debug|x86.Build.0 = Debug|x86
{84A45D44-B677-492D-A6DA-B3A71135AB8E}.Release|Any CPU.ActiveCfg = Release|Any CPU
{84A45D44-B677-492D-A6DA-B3A71135AB8E}.Release|Any CPU.Build.0 = Release|Any CPU
{84A45D44-B677-492D-A6DA-B3A71135AB8E}.Release|x64.ActiveCfg = Release|x64
{84A45D44-B677-492D-A6DA-B3A71135AB8E}.Release|x64.Build.0 = Release|x64
{84A45D44-B677-492D-A6DA-B3A71135AB8E}.Release|x86.ActiveCfg = Release|x86
{84A45D44-B677-492D-A6DA-B3A71135AB8E}.Release|x86.Build.0 = Release|x86
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{EAB71280-AF32-4531-8703-43CDBA261AA3} = {7B86CE74-F620-4B32-99FE-82D40F8D6BF2}
{84A45D44-B677-492D-A6DA-B3A71135AB8E} = {EAB71280-AF32-4531-8703-43CDBA261AA3}
EndGlobalSection
EndGlobal
";
private const string ExpectedSlnContentsAfterRemoveLastNestedProj = @"
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.26006.2
MinimumVisualStudioVersion = 10.0.40219.1
Project(""{9A19103F-16F7-4668-BE54-9A1E7A4F7556}"") = ""App"", ""App.csproj"", ""{7072A694-548F-4CAE-A58F-12D257D5F486}""
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Debug|x64 = Debug|x64
Debug|x86 = Debug|x86
Release|Any CPU = Release|Any CPU
Release|x64 = Release|x64
Release|x86 = Release|x86
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|Any CPU.Build.0 = Debug|Any CPU
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|x64.ActiveCfg = Debug|x64
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|x64.Build.0 = Debug|x64
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|x86.ActiveCfg = Debug|x86
{7072A694-548F-4CAE-A58F-12D257D5F486}.Debug|x86.Build.0 = Debug|x86
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|Any CPU.ActiveCfg = Release|Any CPU
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|Any CPU.Build.0 = Release|Any CPU
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|x64.ActiveCfg = Release|x64
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|x64.Build.0 = Release|x64
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|x86.ActiveCfg = Release|x86
{7072A694-548F-4CAE-A58F-12D257D5F486}.Release|x86.Build.0 = Release|x86
EndGlobalSection
EndGlobal
";
[Theory]
[InlineData("--help")]
[InlineData("-h")]
public void WhenHelpOptionIsPassedItPrintsUsage(string helpArg)
{
var cmd = new DotnetCommand()
.ExecuteWithCapturedOutput($"sln remove {helpArg}");
cmd.Should().Pass();
cmd.StdOut.Should().BeVisuallyEquivalentTo(HelpText);
}
[Fact]
public void WhenTooManyArgumentsArePassedItPrintsError()
{
var cmd = new DotnetCommand()
.ExecuteWithCapturedOutput("sln one.sln two.sln three.sln remove");
cmd.Should().Fail();
cmd.StdErr.Should().BeVisuallyEquivalentTo("Unrecognized command or argument 'two.sln'\r\nUnrecognized command or argument 'three.sln'\r\nYou must specify at least one project to remove.");
}
[Theory]
[InlineData("")]
[InlineData("unknownCommandName")]
public void WhenNoCommandIsPassedItPrintsError(string commandName)
{
var cmd = new DotnetCommand()
.ExecuteWithCapturedOutput($"sln {commandName}");
cmd.Should().Fail();
cmd.StdErr.Should().Be("Required command was not provided.");
}
[Theory]
[InlineData("idontexist.sln")]
[InlineData("ihave?invalidcharacters")]
[InlineData("ihaveinv@lidcharacters")]
[InlineData("ihaveinvalid/characters")]
[InlineData("ihaveinvalidchar\\acters")]
public void WhenNonExistingSolutionIsPassedItPrintsErrorAndUsage(string solutionName)
{
var cmd = new DotnetCommand()
.ExecuteWithCapturedOutput($"sln {solutionName} remove p.csproj");
cmd.Should().Fail();
cmd.StdErr.Should().Be($"Could not find solution or directory `{solutionName}`.");
cmd.StdOut.Should().BeVisuallyEquivalentTo(HelpText);
}
[Fact]
public void WhenInvalidSolutionIsPassedItPrintsErrorAndUsage()
{
var projectDirectory = TestAssets
.Get("InvalidSolution")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var projectToRemove = Path.Combine("Lib", "Lib.csproj");
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput($"sln InvalidSolution.sln remove {projectToRemove}");
cmd.Should().Fail();
cmd.StdErr.Should().Be("Invalid solution `InvalidSolution.sln`. Invalid format in line 1: File header is missing");
cmd.StdOut.Should().BeVisuallyEquivalentTo(HelpText);
}
[Fact]
public void WhenInvalidSolutionIsFoundItPrintsErrorAndUsage()
{
var projectDirectory = TestAssets
.Get("InvalidSolution")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var solutionPath = Path.Combine(projectDirectory, "InvalidSolution.sln");
var projectToRemove = Path.Combine("Lib", "Lib.csproj");
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput($"sln remove {projectToRemove}");
cmd.Should().Fail();
cmd.StdErr.Should().Be($"Invalid solution `{solutionPath}`. Invalid format in line 1: File header is missing");
cmd.StdOut.Should().BeVisuallyEquivalentTo(HelpText);
}
[Fact]
public void WhenNoProjectIsPassedItPrintsErrorAndUsage()
{
var projectDirectory = TestAssets
.Get("TestAppWithSlnAndCsprojFiles")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput(@"sln App.sln remove");
cmd.Should().Fail();
cmd.StdErr.Should().Be("You must specify at least one project to remove.");
cmd.StdOut.Should().BeVisuallyEquivalentTo(HelpText);
}
[Fact]
public void WhenNoSolutionExistsInTheDirectoryItPrintsErrorAndUsage()
{
var projectDirectory = TestAssets
.Get("TestAppWithSlnAndCsprojFiles")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var solutionPath = Path.Combine(projectDirectory, "App");
var cmd = new DotnetCommand()
.WithWorkingDirectory(solutionPath)
.ExecuteWithCapturedOutput(@"sln remove App.csproj");
cmd.Should().Fail();
cmd.StdErr.Should().Be($"Specified solution file {solutionPath + Path.DirectorySeparatorChar} does not exist, or there is no solution file in the directory.");
cmd.StdOut.Should().BeVisuallyEquivalentTo(HelpText);
}
[Fact]
public void WhenMoreThanOneSolutionExistsInTheDirectoryItPrintsErrorAndUsage()
{
var projectDirectory = TestAssets
.Get("TestAppWithMultipleSlnFiles")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var projectToRemove = Path.Combine("Lib", "Lib.csproj");
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput($"sln remove {projectToRemove}");
cmd.Should().Fail();
cmd.StdErr.Should().Be($"Found more than one solution file in {projectDirectory + Path.DirectorySeparatorChar}. Please specify which one to use.");
cmd.StdOut.Should().BeVisuallyEquivalentTo(HelpText);
}
[Fact]
public void WhenPassedAReferenceNotInSlnItPrintsStatus()
{
var projectDirectory = TestAssets
.Get("TestAppWithSlnAndExistingCsprojReferences")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var solutionPath = Path.Combine(projectDirectory, "App.sln");
var contentBefore = File.ReadAllText(solutionPath);
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput("sln remove referenceDoesNotExistInSln.csproj");
cmd.Should().Pass();
cmd.StdOut.Should().Be("Project reference `referenceDoesNotExistInSln.csproj` could not be found.");
File.ReadAllText(solutionPath)
.Should().BeVisuallyEquivalentTo(contentBefore);
}
[Fact]
public void WhenPassedAReferenceItRemovesTheReferenceButNotOtherReferences()
{
var projectDirectory = TestAssets
.Get("TestAppWithSlnAndExistingCsprojReferences")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var solutionPath = Path.Combine(projectDirectory, "App.sln");
SlnFile slnFile = SlnFile.Read(solutionPath);
slnFile.Projects.Count.Should().Be(2);
var projectToRemove = Path.Combine("Lib", "Lib.csproj");
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput($"sln remove {projectToRemove}");
cmd.Should().Pass();
cmd.StdOut.Should().Be($"Project reference `{projectToRemove}` removed.");
slnFile = SlnFile.Read(solutionPath);
slnFile.Projects.Count.Should().Be(1);
slnFile.Projects[0].FilePath.Should().Be(Path.Combine("App", "App.csproj"));
}
[Fact]
public void WhenDuplicateReferencesArePresentItRemovesThemAll()
{
var projectDirectory = TestAssets
.Get("TestAppWithSlnAndDuplicateProjectReferences")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var solutionPath = Path.Combine(projectDirectory, "App.sln");
SlnFile slnFile = SlnFile.Read(solutionPath);
slnFile.Projects.Count.Should().Be(3);
var projectToRemove = Path.Combine("Lib", "Lib.csproj");
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput($"sln remove {projectToRemove}");
cmd.Should().Pass();
string outputText = $@"Project reference `{projectToRemove}` removed.
Project reference `{projectToRemove}` removed.";
cmd.StdOut.Should().BeVisuallyEquivalentTo(outputText);
slnFile = SlnFile.Read(solutionPath);
slnFile.Projects.Count.Should().Be(1);
slnFile.Projects[0].FilePath.Should().Be(Path.Combine("App", "App.csproj"));
}
[Fact]
public void WhenPassedMultipleReferencesAndOneOfThemDoesNotExistItRemovesTheOneThatExists()
{
var projectDirectory = TestAssets
.Get("TestAppWithSlnAndExistingCsprojReferences")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var solutionPath = Path.Combine(projectDirectory, "App.sln");
SlnFile slnFile = SlnFile.Read(solutionPath);
slnFile.Projects.Count.Should().Be(2);
var projectToRemove = Path.Combine("Lib", "Lib.csproj");
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput($"sln remove idontexist.csproj {projectToRemove} idontexisteither.csproj");
cmd.Should().Pass();
string outputText = $@"Project reference `idontexist.csproj` could not be found.
Project reference `{projectToRemove}` removed.
Project reference `idontexisteither.csproj` could not be found.";
cmd.StdOut.Should().BeVisuallyEquivalentTo(outputText);
slnFile = SlnFile.Read(solutionPath);
slnFile.Projects.Count.Should().Be(1);
slnFile.Projects[0].FilePath.Should().Be(Path.Combine("App", "App.csproj"));
}
[Fact]
public void WhenReferenceIsRemovedBuildConfigsAreAlsoRemoved()
{
var projectDirectory = TestAssets
.Get("TestAppWithSlnAndCsprojToRemove")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var solutionPath = Path.Combine(projectDirectory, "App.sln");
SlnFile slnFile = SlnFile.Read(solutionPath);
slnFile.Projects.Count.Should().Be(2);
var projectToRemove = Path.Combine("Lib", "Lib.csproj");
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput($"sln remove {projectToRemove}");
cmd.Should().Pass();
File.ReadAllText(solutionPath)
.Should().BeVisuallyEquivalentTo(ExpectedSlnContentsAfterRemove);
}
[Fact]
public void WhenReferenceIsRemovedSlnBuilds()
{
var projectDirectory = TestAssets
.Get("TestAppWithSlnAndCsprojToRemove")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var solutionPath = Path.Combine(projectDirectory, "App.sln");
SlnFile slnFile = SlnFile.Read(solutionPath);
slnFile.Projects.Count.Should().Be(2);
var projectToRemove = Path.Combine("Lib", "Lib.csproj");
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput($"sln remove {projectToRemove}");
cmd.Should().Pass();
new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.Execute($"restore App.sln")
.Should().Pass();
new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.Execute("build App.sln --configuration Release")
.Should().Pass();
var reasonString = "should be built in release mode, otherwise it means build configurations are missing from the sln file";
var releaseDirectory = Directory.EnumerateDirectories(
Path.Combine(projectDirectory, "App", "bin"),
"Release",
SearchOption.AllDirectories);
releaseDirectory.Count().Should().Be(1, $"App {reasonString}");
Directory.EnumerateFiles(releaseDirectory.Single(), "App.dll", SearchOption.AllDirectories)
.Count().Should().Be(1, $"App {reasonString}");
}
[Fact]
public void WhenFinalReferenceIsRemovedEmptySectionsAreRemoved()
{
var projectDirectory = TestAssets
.Get("TestAppWithSlnAndCsprojToRemove")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var solutionPath = Path.Combine(projectDirectory, "App.sln");
SlnFile slnFile = SlnFile.Read(solutionPath);
slnFile.Projects.Count.Should().Be(2);
var appPath = Path.Combine("App", "App.csproj");
var libPath = Path.Combine("Lib", "Lib.csproj");
var projectsToRemove = $"{libPath} {appPath}";
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput($"sln remove {projectsToRemove}");
cmd.Should().Pass();
File.ReadAllText(solutionPath)
.Should().BeVisuallyEquivalentTo(ExpectedSlnContentsAfterRemoveAllProjects);
}
[Fact]
public void WhenNestedProjectIsRemovedItsSolutionFoldersAreRemoved()
{
var projectDirectory = TestAssets
.Get("TestAppWithSlnAndCsprojInSubDirToRemove")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var solutionPath = Path.Combine(projectDirectory, "App.sln");
var projectToRemove = Path.Combine("src", "NotLastProjInSrc", "NotLastProjInSrc.csproj");
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput($"sln remove {projectToRemove}");
cmd.Should().Pass();
File.ReadAllText(solutionPath)
.Should().BeVisuallyEquivalentTo(ExpectedSlnContentsAfterRemoveNestedProj);
}
[Fact]
public void WhenFinalNestedProjectIsRemovedSolutionFoldersAreRemoved()
{
var projectDirectory = TestAssets
.Get("TestAppWithSlnAndLastCsprojInSubDirToRemove")
.CreateInstance()
.WithSourceFiles()
.Root
.FullName;
var solutionPath = Path.Combine(projectDirectory, "App.sln");
var projectToRemove = Path.Combine("src", "Lib", "Lib.csproj");
var cmd = new DotnetCommand()
.WithWorkingDirectory(projectDirectory)
.ExecuteWithCapturedOutput($"sln remove {projectToRemove}");
cmd.Should().Pass();
File.ReadAllText(solutionPath)
.Should().BeVisuallyEquivalentTo(ExpectedSlnContentsAfterRemoveLastNestedProj);
}
}
}
| |
//#define USE_HASH_TABLE
using System;
using System.Collections;
namespace DifferenceEngine
{
public interface IDiffList
{
int Count();
IComparable GetByIndex(int index);
}
internal enum DiffStatus
{
Matched = 1,
NoMatch = -1,
Unknown = -2
}
internal class DiffState
{
private const int BAD_INDEX = -1;
private int _startIndex;
private int _length;
public int StartIndex {get{return _startIndex;}}
public int EndIndex {get{return ((_startIndex + _length) - 1);}}
public int Length
{
get
{
int len;
if (_length > 0)
{
len = _length;
}
else
{
if (_length == 0)
{
len = 1;
}
else
{
len = 0;
}
}
return len;
}
}
public DiffStatus Status
{
get
{
DiffStatus stat;
if (_length > 0)
{
stat = DiffStatus.Matched;
}
else
{
switch (_length)
{
case -1:
stat = DiffStatus.NoMatch;
break;
default:
System.Diagnostics.Debug.Assert(_length == -2,"Invalid status: _length < -2");
stat = DiffStatus.Unknown;
break;
}
}
return stat;
}
}
public DiffState()
{
SetToUnkown();
}
protected void SetToUnkown()
{
_startIndex = BAD_INDEX;
_length = (int)DiffStatus.Unknown;
}
public void SetMatch(int start, int length)
{
System.Diagnostics.Debug.Assert(length > 0,"Length must be greater than zero");
System.Diagnostics.Debug.Assert(start >= 0,"Start must be greater than or equal to zero");
_startIndex = start;
_length = length;
}
public void SetNoMatch()
{
_startIndex = BAD_INDEX;
_length = (int)DiffStatus.NoMatch;
}
public bool HasValidLength(int newStart, int newEnd, int maxPossibleDestLength)
{
if (_length > 0) //have unlocked match
{
if ((maxPossibleDestLength < _length)||
((_startIndex < newStart)||(EndIndex > newEnd)))
{
SetToUnkown();
}
}
return (_length != (int)DiffStatus.Unknown);
}
}
internal class DiffStateList
{
#if USE_HASH_TABLE
private Hashtable _table;
#else
private DiffState[] _array;
#endif
public DiffStateList(int destCount)
{
#if USE_HASH_TABLE
_table = new Hashtable(Math.Max(9,destCount/10));
#else
_array = new DiffState[destCount];
#endif
}
public DiffState GetByIndex(int index)
{
#if USE_HASH_TABLE
DiffState retval = (DiffState)_table[index];
if (retval == null)
{
retval = new DiffState();
_table.Add(index,retval);
}
#else
DiffState retval = _array[index];
if (retval == null)
{
retval = new DiffState();
_array[index] = retval;
}
#endif
return retval;
}
}
public enum DiffResultSpanStatus
{
NoChange,
Replace,
DeleteSource,
AddDestination
}
public class DiffResultSpan : IComparable
{
private const int BAD_INDEX = -1;
private int _destIndex;
private int _sourceIndex;
private int _length;
private DiffResultSpanStatus _status;
public int DestIndex {get{return _destIndex;}}
public int SourceIndex {get{return _sourceIndex;}}
public int Length {get{return _length;}}
public DiffResultSpanStatus Status {get{return _status;}}
protected DiffResultSpan(
DiffResultSpanStatus status,
int destIndex,
int sourceIndex,
int length)
{
_status = status;
_destIndex = destIndex;
_sourceIndex = sourceIndex;
_length = length;
}
public static DiffResultSpan CreateNoChange(int destIndex, int sourceIndex, int length)
{
return new DiffResultSpan(DiffResultSpanStatus.NoChange,destIndex,sourceIndex,length);
}
public static DiffResultSpan CreateReplace(int destIndex, int sourceIndex, int length)
{
return new DiffResultSpan(DiffResultSpanStatus.Replace,destIndex,sourceIndex,length);
}
public static DiffResultSpan CreateDeleteSource(int sourceIndex, int length)
{
return new DiffResultSpan(DiffResultSpanStatus.DeleteSource,BAD_INDEX,sourceIndex,length);
}
public static DiffResultSpan CreateAddDestination(int destIndex, int length)
{
return new DiffResultSpan(DiffResultSpanStatus.AddDestination,destIndex,BAD_INDEX,length);
}
public void AddLength(int i)
{
_length += i;
}
public override string ToString()
{
return string.Format("{0} (Dest: {1},Source: {2}) {3}",
_status.ToString(),
_destIndex.ToString(),
_sourceIndex.ToString(),
_length.ToString());
}
#region IComparable Members
public int CompareTo(object obj)
{
return _destIndex.CompareTo(((DiffResultSpan)obj)._destIndex);
}
#endregion
}
}
| |
/*
* Farseer Physics Engine based on Box2D.XNA port:
* Copyright (c) 2010 Ian Qvist
*
* Box2D.XNA port of Box2D:
* Copyright (c) 2009 Brandon Furtwangler, Nathan Furtwangler
*
* Original source Box2D:
* Copyright (c) 2006-2009 Erin Catto http://www.gphysics.com
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
* 3. This notice may not be removed or altered from any source distribution.
*/
using System;
using System.Diagnostics;
using FarseerPhysics.Common;
using Microsoft.Xna.Framework;
namespace FarseerPhysics.Dynamics.Joints
{
// Linear constraint (point-to-line)
// d = p2 - p1 = x2 + r2 - x1 - r1
// C = dot(perp, d)
// Cdot = dot(d, cross(w1, perp)) + dot(perp, v2 + cross(w2, r2) - v1 - cross(w1, r1))
// = -dot(perp, v1) - dot(cross(d + r1, perp), w1) + dot(perp, v2) + dot(cross(r2, perp), v2)
// J = [-perp, -cross(d + r1, perp), perp, cross(r2,perp)]
//
// Angular constraint
// C = a2 - a1 + a_initial
// Cdot = w2 - w1
// J = [0 0 -1 0 0 1]
//
// K = J * invM * JT
//
// J = [-a -s1 a s2]
// [0 -1 0 1]
// a = perp
// s1 = cross(d + r1, a) = cross(p2 - x1, a)
// s2 = cross(r2, a) = cross(p2 - x2, a)
// Motor/Limit linear constraint
// C = dot(ax1, d)
// Cdot = = -dot(ax1, v1) - dot(cross(d + r1, ax1), w1) + dot(ax1, v2) + dot(cross(r2, ax1), v2)
// J = [-ax1 -cross(d+r1,ax1) ax1 cross(r2,ax1)]
// Block Solver
// We develop a block solver that includes the joint limit. This makes the limit stiff (inelastic) even
// when the mass has poor distribution (leading to large torques about the joint anchor points).
//
// The Jacobian has 3 rows:
// J = [-uT -s1 uT s2] // linear
// [0 -1 0 1] // angular
// [-vT -a1 vT a2] // limit
//
// u = perp
// v = axis
// s1 = cross(d + r1, u), s2 = cross(r2, u)
// a1 = cross(d + r1, v), a2 = cross(r2, v)
// M * (v2 - v1) = JT * df
// J * v2 = bias
//
// v2 = v1 + invM * JT * df
// J * (v1 + invM * JT * df) = bias
// K * df = bias - J * v1 = -Cdot
// K = J * invM * JT
// Cdot = J * v1 - bias
//
// Now solve for f2.
// df = f2 - f1
// K * (f2 - f1) = -Cdot
// f2 = invK * (-Cdot) + f1
//
// Clamp accumulated limit impulse.
// lower: f2(3) = max(f2(3), 0)
// upper: f2(3) = min(f2(3), 0)
//
// Solve for correct f2(1:2)
// K(1:2, 1:2) * f2(1:2) = -Cdot(1:2) - K(1:2,3) * f2(3) + K(1:2,1:3) * f1
// = -Cdot(1:2) - K(1:2,3) * f2(3) + K(1:2,1:2) * f1(1:2) + K(1:2,3) * f1(3)
// K(1:2, 1:2) * f2(1:2) = -Cdot(1:2) - K(1:2,3) * (f2(3) - f1(3)) + K(1:2,1:2) * f1(1:2)
// f2(1:2) = invK(1:2,1:2) * (-Cdot(1:2) - K(1:2,3) * (f2(3) - f1(3))) + f1(1:2)
//
// Now compute impulse to be applied:
// df = f2 - f1
/// <summary>
/// A prismatic joint. This joint provides one degree of freedom: translation
/// along an axis fixed in body1. Relative rotation is prevented. You can
/// use a joint limit to restrict the range of motion and a joint motor to
/// drive the motion or to model joint friction.
/// </summary>
public class PrismaticJoint : Joint
{
public Vector2 LocalAnchorA;
public Vector2 LocalAnchorB;
private Mat33 _K;
private float _a1, _a2;
private Vector2 _axis;
private bool _enableLimit;
private bool _enableMotor;
private Vector3 _impulse;
private LimitState _limitState;
private Vector2 _localXAxis1;
private Vector2 _localYAxis1;
private float _lowerTranslation;
private float _maxMotorForce;
private float _motorImpulse;
private float _motorMass; // effective mass for motor/limit translational constraint.
private float _motorSpeed;
private Vector2 _perp;
private float _refAngle;
private float _s1, _s2;
private float _upperTranslation;
internal PrismaticJoint()
{
JointType = JointType.Prismatic;
}
/// <summary>
/// This requires defining a line of
/// motion using an axis and an anchor point. The definition uses local
/// anchor points and a local axis so that the initial configuration
/// can violate the constraint slightly. The joint translation is zero
/// when the local anchor points coincide in world space. Using local
/// anchors and a local axis helps when saving and loading a game.
/// </summary>
/// <param name="bodyA">The first body.</param>
/// <param name="bodyB">The second body.</param>
/// <param name="localAnchorA">The first body anchor.</param>
/// <param name="localAnchorB">The second body anchor.</param>
/// <param name="axis">The axis.</param>
public PrismaticJoint(Body bodyA, Body bodyB, Vector2 localAnchorA, Vector2 localAnchorB, Vector2 axis)
: base(bodyA, bodyB)
{
JointType = JointType.Prismatic;
LocalAnchorA = localAnchorA;
LocalAnchorB = localAnchorB;
_localXAxis1 = BodyA.GetLocalVector(axis);
_localYAxis1 = MathUtils.Cross(1.0f, _localXAxis1);
_refAngle = BodyB.Rotation - BodyA.Rotation;
_limitState = LimitState.Inactive;
}
public override Vector2 WorldAnchorA
{
get { return BodyA.GetWorldPoint(LocalAnchorA); }
}
public override Vector2 WorldAnchorB
{
get { return BodyB.GetWorldPoint(LocalAnchorB); }
set { Debug.Assert(false, "You can't set the world anchor on this joint type."); }
}
/// <summary>
/// Get the current joint translation, usually in meters.
/// </summary>
/// <value></value>
public float JointTranslation
{
get
{
Vector2 d = BodyB.GetWorldPoint(LocalAnchorB) - BodyA.GetWorldPoint(LocalAnchorA);
Vector2 axis = BodyA.GetWorldVector(ref _localXAxis1);
return Vector2.Dot(d, axis);
}
}
/// <summary>
/// Get the current joint translation speed, usually in meters per second.
/// </summary>
/// <value></value>
public float JointSpeed
{
get
{
Transform xf1, xf2;
BodyA.GetTransform(out xf1);
BodyB.GetTransform(out xf2);
Vector2 r1 = MathUtils.Multiply(ref xf1.R, LocalAnchorA - BodyA.LocalCenter);
Vector2 r2 = MathUtils.Multiply(ref xf2.R, LocalAnchorB - BodyB.LocalCenter);
Vector2 p1 = BodyA.Sweep.C + r1;
Vector2 p2 = BodyB.Sweep.C + r2;
Vector2 d = p2 - p1;
Vector2 axis = BodyA.GetWorldVector(ref _localXAxis1);
Vector2 v1 = BodyA.LinearVelocityInternal;
Vector2 v2 = BodyB.LinearVelocityInternal;
float w1 = BodyA.AngularVelocityInternal;
float w2 = BodyB.AngularVelocityInternal;
float speed = Vector2.Dot(d, MathUtils.Cross(w1, axis)) +
Vector2.Dot(axis, v2 + MathUtils.Cross(w2, r2) - v1 - MathUtils.Cross(w1, r1));
return speed;
}
}
/// <summary>
/// Is the joint limit enabled?
/// </summary>
/// <value><c>true</c> if [limit enabled]; otherwise, <c>false</c>.</value>
public bool LimitEnabled
{
get { return _enableLimit; }
set
{
Debug.Assert(BodyA.FixedRotation == false || BodyB.FixedRotation == false,
"Warning: limits does currently not work with fixed rotation");
WakeBodies();
_enableLimit = value;
}
}
/// <summary>
/// Get the lower joint limit, usually in meters.
/// </summary>
/// <value></value>
public float LowerLimit
{
get { return _lowerTranslation; }
set
{
WakeBodies();
_lowerTranslation = value;
}
}
/// <summary>
/// Get the upper joint limit, usually in meters.
/// </summary>
/// <value></value>
public float UpperLimit
{
get { return _upperTranslation; }
set
{
WakeBodies();
_upperTranslation = value;
}
}
/// <summary>
/// Is the joint motor enabled?
/// </summary>
/// <value><c>true</c> if [motor enabled]; otherwise, <c>false</c>.</value>
public bool MotorEnabled
{
get { return _enableMotor; }
set
{
WakeBodies();
_enableMotor = value;
}
}
/// <summary>
/// Set the motor speed, usually in meters per second.
/// </summary>
/// <value>The speed.</value>
public float MotorSpeed
{
set
{
WakeBodies();
_motorSpeed = value;
}
get { return _motorSpeed; }
}
/// <summary>
/// Set the maximum motor force, usually in N.
/// </summary>
/// <value>The force.</value>
public float MaxMotorForce
{
get { return _maxMotorForce; }
set
{
WakeBodies();
_maxMotorForce = value;
}
}
/// <summary>
/// Get the current motor force, usually in N.
/// </summary>
/// <value></value>
public float MotorForce
{
get { return _motorImpulse; }
set { _motorImpulse = value; }
}
public Vector2 LocalXAxis1
{
get { return _localXAxis1; }
set
{
_localXAxis1 = BodyA.GetLocalVector(value);
_localYAxis1 = MathUtils.Cross(1.0f, _localXAxis1);
}
}
public float ReferenceAngle
{
get { return _refAngle; }
set { _refAngle = value; }
}
public override Vector2 GetReactionForce(float inv_dt)
{
return inv_dt * (_impulse.X * _perp + (_motorImpulse + _impulse.Z) * _axis);
}
public override float GetReactionTorque(float inv_dt)
{
return inv_dt * _impulse.Y;
}
internal override void InitVelocityConstraints(ref TimeStep step)
{
Body b1 = BodyA;
Body b2 = BodyB;
LocalCenterA = b1.LocalCenter;
LocalCenterB = b2.LocalCenter;
Transform xf1, xf2;
b1.GetTransform(out xf1);
b2.GetTransform(out xf2);
// Compute the effective masses.
Vector2 r1 = MathUtils.Multiply(ref xf1.R, LocalAnchorA - LocalCenterA);
Vector2 r2 = MathUtils.Multiply(ref xf2.R, LocalAnchorB - LocalCenterB);
Vector2 d = b2.Sweep.C + r2 - b1.Sweep.C - r1;
InvMassA = b1.InvMass;
InvIA = b1.InvI;
InvMassB = b2.InvMass;
InvIB = b2.InvI;
// Compute motor Jacobian and effective mass.
{
_axis = MathUtils.Multiply(ref xf1.R, _localXAxis1);
_a1 = MathUtils.Cross(d + r1, _axis);
_a2 = MathUtils.Cross(r2, _axis);
_motorMass = InvMassA + InvMassB + InvIA * _a1 * _a1 + InvIB * _a2 * _a2;
if (_motorMass > Settings.Epsilon)
{
_motorMass = 1.0f / _motorMass;
}
}
// Prismatic constraint.
{
_perp = MathUtils.Multiply(ref xf1.R, _localYAxis1);
_s1 = MathUtils.Cross(d + r1, _perp);
_s2 = MathUtils.Cross(r2, _perp);
float m1 = InvMassA, m2 = InvMassB;
float i1 = InvIA, i2 = InvIB;
float k11 = m1 + m2 + i1 * _s1 * _s1 + i2 * _s2 * _s2;
float k12 = i1 * _s1 + i2 * _s2;
float k13 = i1 * _s1 * _a1 + i2 * _s2 * _a2;
float k22 = i1 + i2;
float k23 = i1 * _a1 + i2 * _a2;
float k33 = m1 + m2 + i1 * _a1 * _a1 + i2 * _a2 * _a2;
_K.Col1 = new Vector3(k11, k12, k13);
_K.Col2 = new Vector3(k12, k22, k23);
_K.Col3 = new Vector3(k13, k23, k33);
}
// Compute motor and limit terms.
if (_enableLimit)
{
float jointTranslation = Vector2.Dot(_axis, d);
if (Math.Abs(_upperTranslation - _lowerTranslation) < 2.0f * Settings.LinearSlop)
{
_limitState = LimitState.Equal;
}
else if (jointTranslation <= _lowerTranslation)
{
if (_limitState != LimitState.AtLower)
{
_limitState = LimitState.AtLower;
_impulse.Z = 0.0f;
}
}
else if (jointTranslation >= _upperTranslation)
{
if (_limitState != LimitState.AtUpper)
{
_limitState = LimitState.AtUpper;
_impulse.Z = 0.0f;
}
}
else
{
_limitState = LimitState.Inactive;
_impulse.Z = 0.0f;
}
}
else
{
_limitState = LimitState.Inactive;
}
if (_enableMotor == false)
{
_motorImpulse = 0.0f;
}
if (Settings.EnableWarmstarting)
{
// Account for variable time step.
_impulse *= step.dtRatio;
_motorImpulse *= step.dtRatio;
Vector2 P = _impulse.X * _perp + (_motorImpulse + _impulse.Z) * _axis;
float L1 = _impulse.X * _s1 + _impulse.Y + (_motorImpulse + _impulse.Z) * _a1;
float L2 = _impulse.X * _s2 + _impulse.Y + (_motorImpulse + _impulse.Z) * _a2;
b1.LinearVelocityInternal -= InvMassA * P;
b1.AngularVelocityInternal -= InvIA * L1;
b2.LinearVelocityInternal += InvMassB * P;
b2.AngularVelocityInternal += InvIB * L2;
}
else
{
_impulse = Vector3.Zero;
_motorImpulse = 0.0f;
}
}
internal override void SolveVelocityConstraints(ref TimeStep step)
{
Body b1 = BodyA;
Body b2 = BodyB;
Vector2 v1 = b1.LinearVelocityInternal;
float w1 = b1.AngularVelocityInternal;
Vector2 v2 = b2.LinearVelocityInternal;
float w2 = b2.AngularVelocityInternal;
// Solve linear motor constraint.
if (_enableMotor && _limitState != LimitState.Equal)
{
float Cdot = Vector2.Dot(_axis, v2 - v1) + _a2 * w2 - _a1 * w1;
float impulse = _motorMass * (_motorSpeed - Cdot);
float oldImpulse = _motorImpulse;
float maxImpulse = step.dt * _maxMotorForce;
_motorImpulse = MathUtils.Clamp(_motorImpulse + impulse, -maxImpulse, maxImpulse);
impulse = _motorImpulse - oldImpulse;
Vector2 P = impulse * _axis;
float L1 = impulse * _a1;
float L2 = impulse * _a2;
v1 -= InvMassA * P;
w1 -= InvIA * L1;
v2 += InvMassB * P;
w2 += InvIB * L2;
}
Vector2 Cdot1 = new Vector2(Vector2.Dot(_perp, v2 - v1) + _s2 * w2 - _s1 * w1, w2 - w1);
if (_enableLimit && _limitState != LimitState.Inactive)
{
// Solve prismatic and limit constraint in block form.
float Cdot2 = Vector2.Dot(_axis, v2 - v1) + _a2 * w2 - _a1 * w1;
Vector3 Cdot = new Vector3(Cdot1.X, Cdot1.Y, Cdot2);
Vector3 f1 = _impulse;
Vector3 df = _K.Solve33(-Cdot);
_impulse += df;
if (_limitState == LimitState.AtLower)
{
_impulse.Z = Math.Max(_impulse.Z, 0.0f);
}
else if (_limitState == LimitState.AtUpper)
{
_impulse.Z = Math.Min(_impulse.Z, 0.0f);
}
// f2(1:2) = invK(1:2,1:2) * (-Cdot(1:2) - K(1:2,3) * (f2(3) - f1(3))) + f1(1:2)
Vector2 b = -Cdot1 - (_impulse.Z - f1.Z) * new Vector2(_K.Col3.X, _K.Col3.Y);
Vector2 f2r = _K.Solve22(b) + new Vector2(f1.X, f1.Y);
_impulse.X = f2r.X;
_impulse.Y = f2r.Y;
df = _impulse - f1;
Vector2 P = df.X * _perp + df.Z * _axis;
float L1 = df.X * _s1 + df.Y + df.Z * _a1;
float L2 = df.X * _s2 + df.Y + df.Z * _a2;
v1 -= InvMassA * P;
w1 -= InvIA * L1;
v2 += InvMassB * P;
w2 += InvIB * L2;
}
else
{
// Limit is inactive, just solve the prismatic constraint in block form.
Vector2 df = _K.Solve22(-Cdot1);
_impulse.X += df.X;
_impulse.Y += df.Y;
Vector2 P = df.X * _perp;
float L1 = df.X * _s1 + df.Y;
float L2 = df.X * _s2 + df.Y;
v1 -= InvMassA * P;
w1 -= InvIA * L1;
v2 += InvMassB * P;
w2 += InvIB * L2;
}
b1.LinearVelocityInternal = v1;
b1.AngularVelocityInternal = w1;
b2.LinearVelocityInternal = v2;
b2.AngularVelocityInternal = w2;
}
internal override bool SolvePositionConstraints()
{
Body b1 = BodyA;
Body b2 = BodyB;
Vector2 c1 = b1.Sweep.C;
float a1 = b1.Sweep.A;
Vector2 c2 = b2.Sweep.C;
float a2 = b2.Sweep.A;
// Solve linear limit constraint.
float linearError = 0.0f;
bool active = false;
float C2 = 0.0f;
Mat22 R1 = new Mat22(a1);
Mat22 R2 = new Mat22(a2);
Vector2 r1 = MathUtils.Multiply(ref R1, LocalAnchorA - LocalCenterA);
Vector2 r2 = MathUtils.Multiply(ref R2, LocalAnchorB - LocalCenterB);
Vector2 d = c2 + r2 - c1 - r1;
if (_enableLimit)
{
_axis = MathUtils.Multiply(ref R1, _localXAxis1);
_a1 = MathUtils.Cross(d + r1, _axis);
_a2 = MathUtils.Cross(r2, _axis);
float translation = Vector2.Dot(_axis, d);
if (Math.Abs(_upperTranslation - _lowerTranslation) < 2.0f * Settings.LinearSlop)
{
// Prevent large angular corrections
C2 = MathUtils.Clamp(translation, -Settings.MaxLinearCorrection, Settings.MaxLinearCorrection);
linearError = Math.Abs(translation);
active = true;
}
else if (translation <= _lowerTranslation)
{
// Prevent large linear corrections and allow some slop.
C2 = MathUtils.Clamp(translation - _lowerTranslation + Settings.LinearSlop,
-Settings.MaxLinearCorrection, 0.0f);
linearError = _lowerTranslation - translation;
active = true;
}
else if (translation >= _upperTranslation)
{
// Prevent large linear corrections and allow some slop.
C2 = MathUtils.Clamp(translation - _upperTranslation - Settings.LinearSlop, 0.0f,
Settings.MaxLinearCorrection);
linearError = translation - _upperTranslation;
active = true;
}
}
_perp = MathUtils.Multiply(ref R1, _localYAxis1);
_s1 = MathUtils.Cross(d + r1, _perp);
_s2 = MathUtils.Cross(r2, _perp);
Vector3 impulse;
Vector2 C1 = new Vector2(Vector2.Dot(_perp, d), a2 - a1 - ReferenceAngle);
linearError = Math.Max(linearError, Math.Abs(C1.X));
float angularError = Math.Abs(C1.Y);
if (active)
{
float m1 = InvMassA, m2 = InvMassB;
float i1 = InvIA, i2 = InvIB;
float k11 = m1 + m2 + i1 * _s1 * _s1 + i2 * _s2 * _s2;
float k12 = i1 * _s1 + i2 * _s2;
float k13 = i1 * _s1 * _a1 + i2 * _s2 * _a2;
float k22 = i1 + i2;
float k23 = i1 * _a1 + i2 * _a2;
float k33 = m1 + m2 + i1 * _a1 * _a1 + i2 * _a2 * _a2;
_K.Col1 = new Vector3(k11, k12, k13);
_K.Col2 = new Vector3(k12, k22, k23);
_K.Col3 = new Vector3(k13, k23, k33);
Vector3 C = new Vector3(-C1.X, -C1.Y, -C2);
impulse = _K.Solve33(C); // negated above
}
else
{
float m1 = InvMassA, m2 = InvMassB;
float i1 = InvIA, i2 = InvIB;
float k11 = m1 + m2 + i1 * _s1 * _s1 + i2 * _s2 * _s2;
float k12 = i1 * _s1 + i2 * _s2;
float k22 = i1 + i2;
_K.Col1 = new Vector3(k11, k12, 0.0f);
_K.Col2 = new Vector3(k12, k22, 0.0f);
Vector2 impulse1 = _K.Solve22(-C1);
impulse.X = impulse1.X;
impulse.Y = impulse1.Y;
impulse.Z = 0.0f;
}
Vector2 P = impulse.X * _perp + impulse.Z * _axis;
float L1 = impulse.X * _s1 + impulse.Y + impulse.Z * _a1;
float L2 = impulse.X * _s2 + impulse.Y + impulse.Z * _a2;
c1 -= InvMassA * P;
a1 -= InvIA * L1;
c2 += InvMassB * P;
a2 += InvIB * L2;
// TODO_ERIN remove need for this.
b1.Sweep.C = c1;
b1.Sweep.A = a1;
b2.Sweep.C = c2;
b2.Sweep.A = a2;
b1.SynchronizeTransform();
b2.SynchronizeTransform();
return linearError <= Settings.LinearSlop && angularError <= Settings.AngularSlop;
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.TestModels.NullSemanticsModel;
using Xunit.Abstractions;
namespace Microsoft.EntityFrameworkCore.Query
{
public class NullSemanticsQueryMySqlTest : NullSemanticsQueryTestBase<NullSemanticsQueryMySqlFixture>
{
// ReSharper disable once UnusedParameter.Local
public NullSemanticsQueryMySqlTest(NullSemanticsQueryMySqlFixture fixture, ITestOutputHelper testOutputHelper)
: base(fixture)
{
Fixture.TestSqlLoggerFactory.Clear();
//Fixture.TestSqlLoggerFactory.SetTestOutputHelper(testOutputHelper);
}
public override void Compare_bool_with_bool_equal()
{
base.Compare_bool_with_bool_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[NullableBoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] = [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)");
}
public override void Compare_negated_bool_with_bool_equal()
{
base.Compare_negated_bool_with_bool_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] <> [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] <> [e].[NullableBoolB]) AND [e].[NullableBoolB] IS NOT NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] <> [e].[BoolB]) AND [e].[NullableBoolA] IS NOT NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] <> [e].[NullableBoolB]) AND ([e].[NullableBoolA] IS NOT NULL AND [e].[NullableBoolB] IS NOT NULL)) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)");
}
public override void Compare_bool_with_negated_bool_equal()
{
base.Compare_bool_with_negated_bool_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] <> [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] <> [e].[NullableBoolB]) AND [e].[NullableBoolB] IS NOT NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] <> [e].[BoolB]) AND [e].[NullableBoolA] IS NOT NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] <> [e].[NullableBoolB]) AND ([e].[NullableBoolA] IS NOT NULL AND [e].[NullableBoolB] IS NOT NULL)) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)");
}
public override void Compare_negated_bool_with_negated_bool_equal()
{
base.Compare_negated_bool_with_negated_bool_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] = [e].[NullableBoolB]) AND [e].[NullableBoolB] IS NOT NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] = [e].[BoolB]) AND [e].[NullableBoolA] IS NOT NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] = [e].[NullableBoolB]) AND ([e].[NullableBoolA] IS NOT NULL AND [e].[NullableBoolB] IS NOT NULL)) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)");
}
public override void Compare_bool_with_bool_equal_negated()
{
base.Compare_bool_with_bool_equal_negated();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] <> [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] <> [e].[NullableBoolB]) OR [e].[NullableBoolB] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] <> [e].[BoolB]) OR [e].[NullableBoolA] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] <> [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)");
}
public override void Compare_negated_bool_with_bool_equal_negated()
{
base.Compare_negated_bool_with_bool_equal_negated();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] = [e].[NullableBoolB]) OR [e].[NullableBoolB] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] = [e].[BoolB]) OR [e].[NullableBoolA] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] = [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)");
}
public override void Compare_bool_with_negated_bool_equal_negated()
{
base.Compare_bool_with_negated_bool_equal_negated();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] = [e].[NullableBoolB]) OR [e].[NullableBoolB] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] = [e].[BoolB]) OR [e].[NullableBoolA] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] = [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)");
}
public override void Compare_negated_bool_with_negated_bool_equal_negated()
{
base.Compare_negated_bool_with_negated_bool_equal_negated();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] <> [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] <> [e].[NullableBoolB]) OR [e].[NullableBoolB] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] <> [e].[BoolB]) OR [e].[NullableBoolA] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] <> [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)");
}
public override void Compare_bool_with_bool_not_equal()
{
base.Compare_bool_with_bool_not_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] <> [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] <> [e].[NullableBoolB]) OR [e].[NullableBoolB] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] <> [e].[BoolB]) OR [e].[NullableBoolA] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] <> [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)");
}
public override void Compare_negated_bool_with_bool_not_equal()
{
base.Compare_negated_bool_with_bool_not_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] = [e].[NullableBoolB]) OR [e].[NullableBoolB] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] = [e].[BoolB]) OR [e].[NullableBoolA] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] = [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)");
}
public override void Compare_bool_with_negated_bool_not_equal()
{
base.Compare_bool_with_negated_bool_not_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] = [e].[NullableBoolB]) OR [e].[NullableBoolB] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] = [e].[BoolB]) OR [e].[NullableBoolA] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] = [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)");
}
public override void Compare_negated_bool_with_negated_bool_not_equal()
{
base.Compare_negated_bool_with_negated_bool_not_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] <> [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] <> [e].[NullableBoolB]) OR [e].[NullableBoolB] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] <> [e].[BoolB]) OR [e].[NullableBoolA] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] <> [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)");
}
public override void Compare_bool_with_bool_not_equal_negated()
{
base.Compare_bool_with_bool_not_equal_negated();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[NullableBoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] = [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)");
}
public override void Compare_negated_bool_with_bool_not_equal_negated()
{
base.Compare_negated_bool_with_bool_not_equal_negated();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] <> [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] <> [e].[NullableBoolB]) AND [e].[NullableBoolB] IS NOT NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] <> [e].[BoolB]) AND [e].[NullableBoolA] IS NOT NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] <> [e].[NullableBoolB]) AND ([e].[NullableBoolA] IS NOT NULL AND [e].[NullableBoolB] IS NOT NULL)) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)");
}
public override void Compare_bool_with_negated_bool_not_equal_negated()
{
base.Compare_bool_with_negated_bool_not_equal_negated();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] <> [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] <> [e].[NullableBoolB]) AND [e].[NullableBoolB] IS NOT NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] <> [e].[BoolB]) AND [e].[NullableBoolA] IS NOT NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] <> [e].[NullableBoolB]) AND ([e].[NullableBoolA] IS NOT NULL AND [e].[NullableBoolB] IS NOT NULL)) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)");
}
public override void Compare_negated_bool_with_negated_bool_not_equal_negated()
{
base.Compare_negated_bool_with_negated_bool_not_equal_negated();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] = [e].[NullableBoolB]) AND [e].[NullableBoolB] IS NOT NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] = [e].[BoolB]) AND [e].[NullableBoolA] IS NOT NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] = [e].[NullableBoolB]) AND ([e].[NullableBoolA] IS NOT NULL AND [e].[NullableBoolB] IS NOT NULL)) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)");
}
public override void Compare_equals_method()
{
base.Compare_equals_method();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[NullableBoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] = [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)");
}
public override void Compare_equals_method_static()
{
base.Compare_equals_method_static();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] = [e].[NullableBoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] = [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] = [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)");
}
public override void Compare_equals_method_negated()
{
base.Compare_equals_method_negated();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] <> [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] <> [e].[NullableBoolB]) OR [e].[NullableBoolB] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] <> [e].[BoolB]) OR [e].[NullableBoolA] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] <> [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)");
}
public override void Compare_equals_method_negated_static()
{
base.Compare_equals_method_negated_static();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[BoolA] <> [e].[BoolB]",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[BoolA] <> [e].[NullableBoolB]) OR [e].[NullableBoolB] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] <> [e].[BoolB]) OR [e].[NullableBoolA] IS NULL",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableBoolA] <> [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)");
}
public override void Compare_complex_equal_equal_equal()
{
base.Compare_complex_equal_equal_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN [e].[BoolA] = [e].[BoolB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END = CASE
WHEN [e].[IntA] = [e].[IntB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN [e].[NullableBoolA] = [e].[BoolB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END = CASE
WHEN [e].[IntA] = [e].[NullableIntB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN ([e].[NullableBoolA] = [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END = CASE
WHEN ([e].[NullableIntA] = [e].[NullableIntB]) OR ([e].[NullableIntA] IS NULL AND [e].[NullableIntB] IS NULL)
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END");
}
public override void Compare_complex_equal_not_equal_equal()
{
base.Compare_complex_equal_not_equal_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN [e].[BoolA] = [e].[BoolB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END <> CASE
WHEN [e].[IntA] = [e].[IntB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN [e].[NullableBoolA] = [e].[BoolB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END <> CASE
WHEN [e].[IntA] = [e].[NullableIntB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN ([e].[NullableBoolA] = [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END <> CASE
WHEN ([e].[NullableIntA] = [e].[NullableIntB]) OR ([e].[NullableIntA] IS NULL AND [e].[NullableIntB] IS NULL)
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END");
}
public override void Compare_complex_not_equal_equal_equal()
{
base.Compare_complex_not_equal_equal_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN [e].[BoolA] <> [e].[BoolB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END = CASE
WHEN [e].[IntA] = [e].[IntB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN ([e].[NullableBoolA] <> [e].[BoolB]) OR [e].[NullableBoolA] IS NULL
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END = CASE
WHEN [e].[IntA] = [e].[NullableIntB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN (([e].[NullableBoolA] <> [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END = CASE
WHEN (([e].[NullableIntA] = [e].[NullableIntB]) AND ([e].[NullableIntA] IS NOT NULL AND [e].[NullableIntB] IS NOT NULL)) OR ([e].[NullableIntA] IS NULL AND [e].[NullableIntB] IS NULL)
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END");
}
public override void Compare_complex_not_equal_not_equal_equal()
{
base.Compare_complex_not_equal_not_equal_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN [e].[BoolA] <> [e].[BoolB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END <> CASE
WHEN [e].[IntA] = [e].[IntB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN ([e].[NullableBoolA] <> [e].[BoolB]) OR [e].[NullableBoolA] IS NULL
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END <> CASE
WHEN [e].[IntA] = [e].[NullableIntB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN (([e].[NullableBoolA] <> [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END <> CASE
WHEN (([e].[NullableIntA] = [e].[NullableIntB]) AND ([e].[NullableIntA] IS NOT NULL AND [e].[NullableIntB] IS NOT NULL)) OR ([e].[NullableIntA] IS NULL AND [e].[NullableIntB] IS NULL)
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END");
}
public override void Compare_complex_not_equal_equal_not_equal()
{
base.Compare_complex_not_equal_equal_not_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN [e].[BoolA] <> [e].[BoolB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END = CASE
WHEN [e].[IntA] <> [e].[IntB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN ([e].[NullableBoolA] <> [e].[BoolB]) OR [e].[NullableBoolA] IS NULL
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END = CASE
WHEN ([e].[IntA] <> [e].[NullableIntB]) OR [e].[NullableIntB] IS NULL
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN (([e].[NullableBoolA] <> [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END = CASE
WHEN (([e].[NullableIntA] <> [e].[NullableIntB]) OR ([e].[NullableIntA] IS NULL OR [e].[NullableIntB] IS NULL)) AND ([e].[NullableIntA] IS NOT NULL OR [e].[NullableIntB] IS NOT NULL)
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END");
}
public override void Compare_complex_not_equal_not_equal_not_equal()
{
base.Compare_complex_not_equal_not_equal_not_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN [e].[BoolA] <> [e].[BoolB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END <> CASE
WHEN [e].[IntA] <> [e].[IntB]
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN ([e].[NullableBoolA] <> [e].[BoolB]) OR [e].[NullableBoolA] IS NULL
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END <> CASE
WHEN ([e].[IntA] <> [e].[NullableIntB]) OR [e].[NullableIntB] IS NULL
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN (([e].[NullableBoolA] <> [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL OR [e].[NullableBoolB] IS NULL)) AND ([e].[NullableBoolA] IS NOT NULL OR [e].[NullableBoolB] IS NOT NULL)
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END <> CASE
WHEN (([e].[NullableIntA] <> [e].[NullableIntB]) OR ([e].[NullableIntA] IS NULL OR [e].[NullableIntB] IS NULL)) AND ([e].[NullableIntA] IS NOT NULL OR [e].[NullableIntB] IS NOT NULL)
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END");
}
public override void Compare_nullable_with_null_parameter_equal()
{
base.Compare_nullable_with_null_parameter_equal();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableStringA] IS NULL");
}
public override void Compare_nullable_with_non_null_parameter_not_equal()
{
base.Compare_nullable_with_non_null_parameter_not_equal();
AssertSql(
@"@__prm_0='Foo' (Size = 4000)
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableStringA] = @__prm_0");
}
public override void Join_uses_database_semantics()
{
base.Join_uses_database_semantics();
AssertSql(
@"SELECT [e1].[Id] AS [Id1], [e2].[Id] AS [Id2], [e1].[NullableIntA], [e2].[NullableIntB]
FROM [Entities1] AS [e1]
INNER JOIN [Entities2] AS [e2] ON [e1].[NullableIntA] = [e2].[NullableIntB]");
}
public override void Contains_with_local_array_closure_with_null()
{
base.Contains_with_local_array_closure_with_null();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableStringA] IN (N'Foo') OR [e].[NullableStringA] IS NULL");
}
public override void Contains_with_local_array_closure_false_with_null()
{
base.Contains_with_local_array_closure_false_with_null();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableStringA] NOT IN (N'Foo') AND [e].[NullableStringA] IS NOT NULL");
}
public override void Contains_with_local_array_closure_with_multiple_nulls()
{
base.Contains_with_local_array_closure_with_multiple_nulls();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableStringA] IN (N'Foo') OR [e].[NullableStringA] IS NULL");
}
public override void Where_multiple_ors_with_null()
{
base.Where_multiple_ors_with_null();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableStringA] IN (N'Foo', N'Blah') OR [e].[NullableStringA] IS NULL");
}
public override void Where_multiple_ands_with_null()
{
base.Where_multiple_ands_with_null();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableStringA] NOT IN (N'Foo', N'Blah') AND [e].[NullableStringA] IS NOT NULL");
}
public override void Where_multiple_ors_with_nullable_parameter()
{
base.Where_multiple_ors_with_nullable_parameter();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableStringA] IN (N'Foo') OR [e].[NullableStringA] IS NULL");
}
public override void Where_multiple_ands_with_nullable_parameter_and_constant()
{
base.Where_multiple_ands_with_nullable_parameter_and_constant();
AssertSql(
@"@__prm3_2='Blah' (Size = 4000)
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableStringA] NOT IN (N'Foo', @__prm3_2) AND [e].[NullableStringA] IS NOT NULL");
}
public override void Where_multiple_ands_with_nullable_parameter_and_constant_not_optimized()
{
base.Where_multiple_ands_with_nullable_parameter_and_constant_not_optimized();
AssertSql(
@"@__prm3_2='Blah' (Size = 4000)
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ((([e].[NullableStringB] IS NOT NULL AND (([e].[NullableStringA] <> N'Foo') OR [e].[NullableStringA] IS NULL)) AND [e].[NullableStringA] IS NOT NULL) AND [e].[NullableStringA] IS NOT NULL) AND (([e].[NullableStringA] <> @__prm3_2) OR [e].[NullableStringA] IS NULL)");
}
public override void Where_coalesce()
{
base.Where_coalesce();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE COALESCE([e].[NullableBoolA], 1) = 1");
}
public override void Where_equal_nullable_with_null_value_parameter()
{
base.Where_equal_nullable_with_null_value_parameter();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableStringA] IS NULL");
}
public override void Where_not_equal_nullable_with_null_value_parameter()
{
base.Where_not_equal_nullable_with_null_value_parameter();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableStringA] IS NOT NULL");
}
public override void Where_equal_with_coalesce()
{
base.Where_equal_with_coalesce();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (COALESCE([e].[NullableStringA], [e].[NullableStringB]) = [e].[NullableStringC]) OR (([e].[NullableStringA] IS NULL AND [e].[NullableStringB] IS NULL) AND [e].[NullableStringC] IS NULL)");
}
public override void Where_not_equal_with_coalesce()
{
base.Where_not_equal_with_coalesce();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ((COALESCE([e].[NullableStringA], [e].[NullableStringB]) <> [e].[NullableStringC]) OR (([e].[NullableStringA] IS NULL AND [e].[NullableStringB] IS NULL) OR [e].[NullableStringC] IS NULL)) AND (([e].[NullableStringA] IS NOT NULL OR [e].[NullableStringB] IS NOT NULL) OR [e].[NullableStringC] IS NOT NULL)");
}
public override void Where_equal_with_coalesce_both_sides()
{
base.Where_equal_with_coalesce_both_sides();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE COALESCE([e].[NullableStringA], [e].[NullableStringB]) = COALESCE([e].[StringA], [e].[StringB])");
}
public override void Where_not_equal_with_coalesce_both_sides()
{
base.Where_not_equal_with_coalesce_both_sides();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ((COALESCE([e].[NullableIntA], [e].[NullableIntB]) <> COALESCE([e].[NullableIntC], [e].[NullableIntB])) OR (([e].[NullableIntA] IS NULL AND [e].[NullableIntB] IS NULL) OR ([e].[NullableIntC] IS NULL AND [e].[NullableIntB] IS NULL))) AND (([e].[NullableIntA] IS NOT NULL OR [e].[NullableIntB] IS NOT NULL) OR ([e].[NullableIntC] IS NOT NULL OR [e].[NullableIntB] IS NOT NULL))");
}
public override void Where_equal_with_conditional()
{
base.Where_equal_with_conditional();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (CASE
WHEN ([e].[NullableStringA] = [e].[NullableStringB]) OR ([e].[NullableStringA] IS NULL AND [e].[NullableStringB] IS NULL)
THEN [e].[NullableStringA] ELSE [e].[NullableStringB]
END = [e].[NullableStringC]) OR (CASE
WHEN ([e].[NullableStringA] = [e].[NullableStringB]) OR ([e].[NullableStringA] IS NULL AND [e].[NullableStringB] IS NULL)
THEN [e].[NullableStringA] ELSE [e].[NullableStringB]
END IS NULL AND [e].[NullableStringC] IS NULL)");
}
public override void Where_not_equal_with_conditional()
{
base.Where_not_equal_with_conditional();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE (([e].[NullableStringC] <> CASE
WHEN (([e].[NullableStringA] = [e].[NullableStringB]) AND ([e].[NullableStringA] IS NOT NULL AND [e].[NullableStringB] IS NOT NULL)) OR ([e].[NullableStringA] IS NULL AND [e].[NullableStringB] IS NULL)
THEN [e].[NullableStringA] ELSE [e].[NullableStringB]
END) OR ([e].[NullableStringC] IS NULL OR CASE
WHEN (([e].[NullableStringA] = [e].[NullableStringB]) AND ([e].[NullableStringA] IS NOT NULL AND [e].[NullableStringB] IS NOT NULL)) OR ([e].[NullableStringA] IS NULL AND [e].[NullableStringB] IS NULL)
THEN [e].[NullableStringA] ELSE [e].[NullableStringB]
END IS NULL)) AND ([e].[NullableStringC] IS NOT NULL OR CASE
WHEN (([e].[NullableStringA] = [e].[NullableStringB]) AND ([e].[NullableStringA] IS NOT NULL AND [e].[NullableStringB] IS NOT NULL)) OR ([e].[NullableStringA] IS NULL AND [e].[NullableStringB] IS NULL)
THEN [e].[NullableStringA] ELSE [e].[NullableStringB]
END IS NOT NULL)");
}
public override void Where_equal_with_conditional_non_nullable()
{
base.Where_equal_with_conditional_non_nullable();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableStringC] <> CASE
WHEN (([e].[NullableStringA] = [e].[NullableStringB]) AND ([e].[NullableStringA] IS NOT NULL AND [e].[NullableStringB] IS NOT NULL)) OR ([e].[NullableStringA] IS NULL AND [e].[NullableStringB] IS NULL)
THEN [e].[StringA] ELSE [e].[StringB]
END) OR [e].[NullableStringC] IS NULL");
}
public override void Where_equal_with_and_and_contains()
{
base.Where_equal_with_and_and_contains();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ((CHARINDEX([e].[NullableStringB], [e].[NullableStringA]) > 0) OR ([e].[NullableStringB] = N'')) AND ([e].[BoolA] = 1)");
}
public override void Where_conditional_search_condition_in_result()
{
base.Where_conditional_search_condition_in_result();
AssertSql(
@"@__prm_0='True'
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN @__prm_0 = 1
THEN CASE
WHEN [e].[StringA] IN (N'Foo', N'Bar')
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END ELSE CAST(0 AS BIT)
END = 1",
//
@"@__prm_0='True'
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN @__prm_0 = 0
THEN CAST(1 AS BIT) ELSE CASE
WHEN [e].[StringA] LIKE N'A' + N'%' AND (LEFT([e].[StringA], LEN(N'A')) = N'A')
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END
END = 1");
}
public override void Where_nested_conditional_search_condition_in_result()
{
base.Where_nested_conditional_search_condition_in_result();
AssertSql(
@"@__prm1_0='True'
@__prm2_1='False'
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE CASE
WHEN @__prm1_0 = 1
THEN CASE
WHEN @__prm2_1 = 1
THEN CASE
WHEN [e].[BoolA] = 1
THEN CASE
WHEN [e].[StringA] LIKE N'A' + N'%' AND (LEFT([e].[StringA], LEN(N'A')) = N'A')
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END ELSE CAST(0 AS BIT)
END ELSE CAST(1 AS BIT)
END ELSE CASE
WHEN [e].[BoolB] = 1
THEN CASE
WHEN [e].[StringA] IN (N'Foo', N'Bar')
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END ELSE CASE
WHEN [e].[StringB] IN (N'Foo', N'Bar')
THEN CAST(1 AS BIT) ELSE CAST(0 AS BIT)
END
END
END = 1");
}
public override void Where_equal_using_relational_null_semantics()
{
base.Where_equal_using_relational_null_semantics();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] = [e].[NullableBoolB]");
}
public override void Where_nullable_bool()
{
base.Where_nullable_bool();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] = 1");
}
public override void Where_nullable_bool_equal_with_constant()
{
base.Where_nullable_bool_equal_with_constant();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] = 1");
}
public override void Where_nullable_bool_with_null_check()
{
base.Where_nullable_bool_with_null_check();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] IS NOT NULL AND ([e].[NullableBoolA] = 1)");
}
public override void Where_equal_using_relational_null_semantics_with_parameter()
{
base.Where_equal_using_relational_null_semantics_with_parameter();
AssertSql(
@"@__prm_0=''
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] = @__prm_0");
}
public override void Where_equal_using_relational_null_semantics_complex_with_parameter()
{
base.Where_equal_using_relational_null_semantics_complex_with_parameter();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] = [e].[NullableBoolB]");
}
public override void Where_not_equal_using_relational_null_semantics()
{
base.Where_not_equal_using_relational_null_semantics();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] <> [e].[NullableBoolB]");
}
public override void Where_not_equal_using_relational_null_semantics_with_parameter()
{
base.Where_not_equal_using_relational_null_semantics_with_parameter();
AssertSql(
@"@__prm_0=''
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] <> @__prm_0");
}
public override void Where_not_equal_using_relational_null_semantics_complex_with_parameter()
{
base.Where_not_equal_using_relational_null_semantics_complex_with_parameter();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] <> [e].[NullableBoolB]");
}
public override void Where_comparison_null_constant_and_null_parameter()
{
base.Where_comparison_null_constant_and_null_parameter();
AssertSql(
@"@__prm_0='' (Size = 4000)
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE @__prm_0 IS NULL",
//
@"@__prm_0='' (Size = 4000)
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE @__prm_0 IS NOT NULL");
}
public override void Where_comparison_null_constant_and_nonnull_parameter()
{
base.Where_comparison_null_constant_and_nonnull_parameter();
AssertSql(
@"@__prm_0='Foo' (Size = 4000)
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE @__prm_0 IS NULL",
//
@"@__prm_0='Foo' (Size = 4000)
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE @__prm_0 IS NOT NULL");
}
public override void Where_comparison_nonnull_constant_and_null_parameter()
{
base.Where_comparison_nonnull_constant_and_null_parameter();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE 0 = 1",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]");
}
public override void Where_comparison_null_semantics_optimization_works_with_complex_predicates()
{
base.Where_comparison_null_semantics_optimization_works_with_complex_predicates();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableStringA] IS NULL");
}
public override void Switching_null_semantics_produces_different_cache_entry()
{
base.Switching_null_semantics_produces_different_cache_entry();
AssertSql(
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE ([e].[NullableBoolA] = [e].[NullableBoolB]) OR ([e].[NullableBoolA] IS NULL AND [e].[NullableBoolB] IS NULL)",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE [e].[NullableBoolA] = [e].[NullableBoolB]");
}
public override void Switching_parameter_value_to_null_produces_different_cache_entry()
{
base.Switching_parameter_value_to_null_produces_different_cache_entry();
AssertSql(
@"@__prm_0='Foo' (Size = 4000)
SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE @__prm_0 = N'Foo'",
//
@"SELECT [e].[Id]
FROM [Entities1] AS [e]
WHERE 0 = 1");
}
public override void From_sql_composed_with_relational_null_comparison()
{
base.From_sql_composed_with_relational_null_comparison();
AssertSql(
@"SELECT [c].[Id], [c].[BoolA], [c].[BoolB], [c].[BoolC], [c].[IntA], [c].[IntB], [c].[IntC], [c].[NullableBoolA], [c].[NullableBoolB], [c].[NullableBoolC], [c].[NullableIntA], [c].[NullableIntB], [c].[NullableIntC], [c].[NullableStringA], [c].[NullableStringB], [c].[NullableStringC], [c].[StringA], [c].[StringB], [c].[StringC]
FROM (
SELECT * FROM ""Entities1""
) AS [c]
WHERE [c].[StringA] = [c].[StringB]");
}
public override void Projecting_nullable_bool_with_coalesce()
{
base.Projecting_nullable_bool_with_coalesce();
AssertSql(
@"SELECT [e].[Id], CAST(COALESCE([e].[NullableBoolA], 0) AS bit) AS [Coalesce]
FROM [Entities1] AS [e]");
}
public override void Projecting_nullable_bool_with_coalesce_nested()
{
base.Projecting_nullable_bool_with_coalesce_nested();
AssertSql(
@"SELECT [e].[Id], CAST(COALESCE([e].[NullableBoolA], COALESCE([e].[NullableBoolB], 0)) AS bit) AS [Coalesce]
FROM [Entities1] AS [e]");
}
private void AssertSql(params string[] expected)
=> Fixture.TestSqlLoggerFactory.AssertBaseline(expected);
protected override void ClearLog()
=> Fixture.TestSqlLoggerFactory.Clear();
protected override NullSemanticsContext CreateContext(bool useRelationalNulls = false)
{
var options = new DbContextOptionsBuilder(Fixture.CreateOptions());
if (useRelationalNulls)
{
new MySqlDbContextOptionsBuilder(options).UseRelationalNulls();
}
var context = new NullSemanticsContext(options.Options);
context.ChangeTracker.QueryTrackingBehavior = QueryTrackingBehavior.NoTracking;
return context;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Signum.Engine.Maps;
using Signum.Entities.Authorization;
using Signum.Entities.Basics;
using Signum.Engine.Basics;
using Signum.Utilities;
using Signum.Entities;
using System.Reflection;
using Signum.Entities.DynamicQuery;
namespace Signum.Engine.Authorization
{
public static class QueryAuthLogic
{
static AuthCache<RuleQueryEntity, QueryAllowedRule, QueryEntity, object, QueryAllowed> cache = null!;
public static HashSet<object> AvoidCoerce = new HashSet<object>();
public static IManualAuth<object, QueryAllowed> Manual { get { return cache; } }
public static bool IsStarted { get { return cache != null; } }
public readonly static Dictionary<object, QueryAllowed> MaxAutomaticUpgrade = new Dictionary<object, QueryAllowed>();
public static void Start(SchemaBuilder sb)
{
if (sb.NotDefined(MethodInfo.GetCurrentMethod()))
{
AuthLogic.AssertStarted(sb);
QueryLogic.Start(sb);
QueryLogic.Queries.AllowQuery += new Func<object, bool, bool>(dqm_AllowQuery);
sb.Include<RuleQueryEntity>()
.WithUniqueIndex(rt => new { rt.Resource, rt.Role });
cache = new AuthCache<RuleQueryEntity, QueryAllowedRule, QueryEntity, object, QueryAllowed>(sb,
toKey: qn => QueryLogic.ToQueryName(qn.Key),
toEntity: QueryLogic.GetQueryEntity,
isEquals: (q1, q2) => q1 == q2,
merger: new QueryMerger(),
invalidateWithTypes: true,
coercer: QueryCoercer.Instance);
sb.Schema.EntityEvents<RoleEntity>().PreUnsafeDelete += query =>
{
Database.Query<RuleQueryEntity>().Where(r => query.Contains(r.Role.Entity)).UnsafeDelete();
return null;
};
AuthLogic.ExportToXml += exportAll => cache.ExportXml("Queries", "Query", QueryUtils.GetKey, b => b.ToString(),
exportAll ? QueryLogic.QueryNames.Values.ToList(): null);
AuthLogic.ImportFromXml += (x, roles, replacements) =>
{
string replacementKey = "AuthRules:" + typeof(QueryEntity).Name;
replacements.AskForReplacements(
x.Element("Queries").Elements("Role").SelectMany(r => r.Elements("Query")).Select(p => p.Attribute("Resource").Value).ToHashSet(),
QueryLogic.QueryNames.Keys.ToHashSet(),
replacementKey);
return cache.ImportXml(x, "Queries", "Query", roles, s =>
{
var qn = QueryLogic.TryToQueryName(replacements.Apply(replacementKey, s));
if (qn == null)
return null;
return QueryLogic.GetQueryEntity(qn);
}, str =>
{
if (Enum.TryParse<QueryAllowed>(str, out var result))
return result;
var bResult = bool.Parse(str); //For backwards compatibilityS
return bResult ? QueryAllowed.Allow : QueryAllowed.None;
});
};
sb.Schema.Table<QueryEntity>().PreDeleteSqlSync += new Func<Entity, SqlPreCommand>(AuthCache_PreDeleteSqlSync);
}
}
static SqlPreCommand AuthCache_PreDeleteSqlSync(Entity arg)
{
return Administrator.DeleteWhereScript((RuleQueryEntity rt) => rt.Resource, (QueryEntity)arg);
}
public static void SetMaxAutomaticUpgrade(object queryName, QueryAllowed allowed)
{
MaxAutomaticUpgrade.Add(queryName, allowed);
}
static bool dqm_AllowQuery(object queryName, bool fullScreen)
{
var allowed = GetQueryAllowed(queryName);
return allowed == QueryAllowed.Allow || allowed == QueryAllowed.EmbeddedOnly && !fullScreen;
}
public static DefaultDictionary<object, QueryAllowed> QueryRules()
{
return cache.GetDefaultDictionary();
}
public static QueryRulePack GetQueryRules(Lite<RoleEntity> role, TypeEntity typeEntity)
{
var result = new QueryRulePack { Role = role, Type = typeEntity };
cache.GetRules(result, QueryLogic.GetTypeQueries(typeEntity));
var coercer = QueryCoercer.Instance.GetCoerceValue(role);
result.Rules.ForEach(r => r.CoercedValues = EnumExtensions.GetValues<QueryAllowed>()
.Where(a => !coercer(QueryLogic.ToQueryName(r.Resource.Key), a).Equals(a))
.ToArray());
return result;
}
public static void SetQueryRules(QueryRulePack rules)
{
string[] queryKeys = QueryLogic.Queries.GetTypeQueries(TypeLogic.EntityToType[rules.Type]).Keys.Select(qn => QueryUtils.GetKey(qn)).ToArray();
cache.SetRules(rules, r => queryKeys.Contains(r.Key));
}
public static QueryAllowed GetQueryAllowed(object queryName)
{
if (!AuthLogic.IsEnabled || ExecutionMode.InGlobal)
return QueryAllowed.Allow;
return cache.GetAllowed(RoleEntity.Current, queryName);
}
public static QueryAllowed GetQueryAllowed(Lite<RoleEntity> role, object queryName)
{
return cache.GetAllowed(role, queryName);
}
public static AuthThumbnail? GetAllowedThumbnail(Lite<RoleEntity> role, Type entityType)
{
return QueryLogic.Queries.GetTypeQueries(entityType).Keys.Select(qn => cache.GetAllowed(role, qn)).Collapse();
}
internal static bool AllCanRead(this Implementations implementations, Func<Type, TypeAllowedAndConditions> getAllowed)
{
if (implementations.IsByAll)
return true;
return implementations.Types.All(t => getAllowed(t).MaxUI() != TypeAllowedBasic.None);
}
public static void SetAvoidCoerce(object queryName)
{
AvoidCoerce.Add(queryName);
}
}
class QueryMerger : IMerger<object, QueryAllowed>
{
public QueryAllowed Merge(object key, Lite<RoleEntity> role, IEnumerable<KeyValuePair<Lite<RoleEntity>, QueryAllowed>> baseValues)
{
QueryAllowed best = AuthLogic.GetMergeStrategy(role) == MergeStrategy.Union ?
Max(baseValues.Select(a => a.Value)) :
Min(baseValues.Select(a => a.Value));
var maxUp = QueryAuthLogic.MaxAutomaticUpgrade.TryGetS(key);
if (maxUp.HasValue && maxUp <= best)
return best;
if (!BasicPermission.AutomaticUpgradeOfQueries.IsAuthorized(role))
return best;
if (baseValues.Where(a => a.Value.Equals(best)).All(a => GetDefault(key, a.Key).Equals(a.Value)))
{
var def = GetDefault(key, role);
return maxUp.HasValue && maxUp <= def ? maxUp.Value : def;
}
return best;
}
static QueryAllowed Max(IEnumerable<QueryAllowed> baseValues)
{
QueryAllowed result = QueryAllowed.None;
foreach (var item in baseValues)
{
if (item > result)
result = item;
if (result == QueryAllowed.Allow)
return result;
}
return result;
}
static QueryAllowed Min(IEnumerable<QueryAllowed> baseValues)
{
QueryAllowed result = QueryAllowed.Allow;
foreach (var item in baseValues)
{
if (item < result)
result = item;
if (result == QueryAllowed.None)
return result;
}
return result;
}
public Func<object, QueryAllowed> MergeDefault(Lite<RoleEntity> role)
{
return key =>
{
if (AuthLogic.GetDefaultAllowed(role))
return QueryAllowed.Allow;
if (!BasicPermission.AutomaticUpgradeOfQueries.IsAuthorized(role))
return QueryAllowed.None;
var maxUp = QueryAuthLogic.MaxAutomaticUpgrade.TryGetS(key);
var def = GetDefault(key, role);
return maxUp.HasValue && maxUp <= def ? maxUp.Value : def;
};
}
QueryAllowed GetDefault(object key, Lite<RoleEntity> role)
{
return QueryLogic.Queries.GetEntityImplementations(key).AllCanRead(t => TypeAuthLogic.GetAllowed(role, t)) ? QueryAllowed.Allow : QueryAllowed.None;
}
}
class QueryCoercer : Coercer<QueryAllowed, object>
{
public static readonly QueryCoercer Instance = new QueryCoercer();
private QueryCoercer()
{
}
public override Func<object, QueryAllowed, QueryAllowed> GetCoerceValue(Lite<RoleEntity> role)
{
return (queryName, allowed) =>
{
if (QueryAuthLogic.AvoidCoerce.Contains(queryName))
return allowed;
if (allowed == QueryAllowed.None)
return allowed;
var implementations = QueryLogic.Queries.GetEntityImplementations(queryName);
return implementations.AllCanRead(t => TypeAuthLogic.GetAllowed(role, t)) ? allowed : QueryAllowed.None;
};
}
public override Func<Lite<RoleEntity>, QueryAllowed, QueryAllowed> GetCoerceValueManual(object queryName)
{
return (role, allowed) =>
{
if (QueryAuthLogic.AvoidCoerce.Contains(queryName))
return allowed;
if (allowed == QueryAllowed.None)
return allowed;
var implementations = QueryLogic.Queries.GetEntityImplementations(queryName);
return implementations.AllCanRead(t => TypeAuthLogic.Manual.GetAllowed(role, t)) ? allowed : QueryAllowed.None;
};
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Xml;
using Microsoft.Build.Evaluation;
using Microsoft.Build.Execution;
using Microsoft.Build.Framework;
using Microsoft.Build.UnitTests.BackEnd;
using Shouldly;
using Xunit;
namespace Microsoft.Build.UnitTests.OM.Instance
{
/// <summary>
/// Tests for the HostServices object.
/// </summary>
public class HostServices_Tests
{
/// <summary>
/// Setup
/// </summary>
public HostServices_Tests()
{
ProjectCollection.GlobalProjectCollection.UnloadAllProjects();
}
/// <summary>
/// Test allowed host object registrations
/// </summary>
[Fact]
public void TestValidHostObjectRegistration()
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
TestHostObject hostObject2 = new TestHostObject();
TestHostObject hostObject3 = new TestHostObject();
hostServices.RegisterHostObject("foo.proj", "target", "task", hostObject);
hostServices.RegisterHostObject("foo.proj", "target2", "task", hostObject2);
hostServices.RegisterHostObject("foo.proj", "target", "task2", hostObject3);
Assert.Same(hostObject, hostServices.GetHostObject("foo.proj", "target", "task"));
Assert.Same(hostObject2, hostServices.GetHostObject("foo.proj", "target2", "task"));
Assert.Same(hostObject3, hostServices.GetHostObject("foo.proj", "target", "task2"));
}
/// <summary>
/// Test ensuring a null project for host object registration throws.
/// </summary>
[Fact]
public void TestInvalidHostObjectRegistration_NullProject()
{
Assert.Throws<ArgumentNullException>(() =>
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.RegisterHostObject(null, "target", "task", hostObject);
}
);
}
/// <summary>
/// Test ensuring a null target for host object registration throws.
/// </summary>
[Fact]
public void TestInvalidHostObjectRegistration_NullTarget()
{
Assert.Throws<ArgumentNullException>(() =>
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.RegisterHostObject("project", null, "task", hostObject);
}
);
}
/// <summary>
/// Test ensuring a null task for host object registration throws.
/// </summary>
[Fact]
public void TestInvalidHostObjectRegistration_NullTask()
{
Assert.Throws<ArgumentNullException>(() =>
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.RegisterHostObject("project", "target", null, hostObject);
}
);
}
/// <summary>
/// Test which verifies host object unregistration.
/// </summary>
[Fact]
public void TestUnregisterHostObject()
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.RegisterHostObject("project", "target", "task", hostObject);
Assert.Same(hostObject, hostServices.GetHostObject("project", "target", "task"));
hostServices.RegisterHostObject("project", "target", "task", hostObject: null);
Assert.Null(hostServices.GetHostObject("project", "target", "task"));
}
/// <summary>
/// Test which shows that affinity defaults to Any.
/// </summary>
[Fact]
public void TestAffinityDefaultsToAny()
{
HostServices hostServices = new HostServices();
Assert.Equal(NodeAffinity.Any, hostServices.GetNodeAffinity("project"));
}
/// <summary>
/// Test which shows that setting a host object causes the affinity to become InProc.
/// </summary>
[Fact]
public void TestHostObjectCausesInProcAffinity()
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.RegisterHostObject("project", "target", "task", hostObject);
Assert.Equal(NodeAffinity.InProc, hostServices.GetNodeAffinity("project"));
}
/// <summary>
/// Test of the ability to set and change specific project affinities.
/// </summary>
[Fact]
public void TestSpecificAffinityRegistration()
{
HostServices hostServices = new HostServices();
hostServices.SetNodeAffinity("project", NodeAffinity.InProc);
Assert.Equal(NodeAffinity.InProc, hostServices.GetNodeAffinity("project"));
hostServices.SetNodeAffinity("project", NodeAffinity.OutOfProc);
Assert.Equal(NodeAffinity.OutOfProc, hostServices.GetNodeAffinity("project"));
hostServices.SetNodeAffinity("project", NodeAffinity.Any);
Assert.Equal(NodeAffinity.Any, hostServices.GetNodeAffinity("project"));
}
/// <summary>
/// Make sure we get the default affinity when the affinity map exists, but the specific
/// project we're requesting is not set.
/// </summary>
[Fact]
public void TestDefaultAffinityWhenProjectNotRegistered()
{
HostServices hostServices = new HostServices();
hostServices.SetNodeAffinity("project1", NodeAffinity.InProc);
Assert.Equal(NodeAffinity.Any, hostServices.GetNodeAffinity("project2"));
}
/// <summary>
/// Test of setting the default affinity.
/// </summary>
[Fact]
public void TestGeneralAffinityRegistration()
{
HostServices hostServices = new HostServices();
hostServices.SetNodeAffinity(String.Empty, NodeAffinity.InProc);
Assert.Equal(NodeAffinity.InProc, hostServices.GetNodeAffinity("project"));
Assert.Equal(NodeAffinity.InProc, hostServices.GetNodeAffinity("project2"));
hostServices.SetNodeAffinity(String.Empty, NodeAffinity.OutOfProc);
Assert.Equal(NodeAffinity.OutOfProc, hostServices.GetNodeAffinity("project"));
Assert.Equal(NodeAffinity.OutOfProc, hostServices.GetNodeAffinity("project2"));
hostServices.SetNodeAffinity(String.Empty, NodeAffinity.Any);
Assert.Equal(NodeAffinity.Any, hostServices.GetNodeAffinity("project"));
Assert.Equal(NodeAffinity.Any, hostServices.GetNodeAffinity("project2"));
}
/// <summary>
/// Test which ensures specific project affinities override general affinity.
/// </summary>
[Fact]
public void TestOverrideGeneralAffinityRegistration()
{
HostServices hostServices = new HostServices();
hostServices.SetNodeAffinity(String.Empty, NodeAffinity.InProc);
hostServices.SetNodeAffinity("project", NodeAffinity.OutOfProc);
Assert.Equal(NodeAffinity.OutOfProc, hostServices.GetNodeAffinity("project"));
Assert.Equal(NodeAffinity.InProc, hostServices.GetNodeAffinity("project2"));
}
/// <summary>
/// Test of clearing the affinity settings for all projects.
/// </summary>
[Fact]
public void TestClearingAffinities()
{
HostServices hostServices = new HostServices();
hostServices.SetNodeAffinity("project", NodeAffinity.OutOfProc);
Assert.Equal(NodeAffinity.OutOfProc, hostServices.GetNodeAffinity("project"));
hostServices.SetNodeAffinity(null, NodeAffinity.OutOfProc);
Assert.Equal(NodeAffinity.Any, hostServices.GetNodeAffinity("project"));
hostServices.SetNodeAffinity(String.Empty, NodeAffinity.OutOfProc);
Assert.Equal(NodeAffinity.OutOfProc, hostServices.GetNodeAffinity("project"));
hostServices.SetNodeAffinity(null, NodeAffinity.OutOfProc);
Assert.Equal(NodeAffinity.Any, hostServices.GetNodeAffinity("project"));
}
/// <summary>
/// Test which ensures that setting an OutOfProc affinity for a project with a host object throws.
/// </summary>
[Fact]
public void TestContradictoryAffinityCausesException_OutOfProc()
{
Assert.Throws<InvalidOperationException>(() =>
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.RegisterHostObject("project", "target", "task", hostObject);
Assert.Equal(NodeAffinity.InProc, hostServices.GetNodeAffinity("project"));
hostServices.SetNodeAffinity("project", NodeAffinity.OutOfProc);
}
);
}
/// <summary>
/// Test which ensures that setting an Any affinity for a project with a host object throws.
/// </summary>
[Fact]
public void TestContradictoryAffinityCausesException_Any()
{
Assert.Throws<InvalidOperationException>(() =>
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.RegisterHostObject("project", "target", "task", hostObject);
Assert.Equal(NodeAffinity.InProc, hostServices.GetNodeAffinity("project"));
hostServices.SetNodeAffinity("project", NodeAffinity.Any);
}
);
}
#if FEATURE_COM_INTEROP
/// <summary>
/// Test which ensures that setting an Any affinity for a project with a remote host object does not throws.
/// </summary>
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Mono, "disable com tests on mono")]
public void TestNoContradictoryRemoteHostObjectAffinity()
{
HostServices hostServices = new HostServices();
hostServices.RegisterHostObject("project", "target", "task", "moniker");
hostServices.SetNodeAffinity("project", NodeAffinity.Any);
}
#endif
/// <summary>
/// Test which ensures that setting the InProc affinity for a project with a host object is allowed.
/// </summary>
[Fact]
public void TestNonContradictoryAffinityAllowed()
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.RegisterHostObject("project", "target", "task", hostObject);
Assert.Equal(NodeAffinity.InProc, hostServices.GetNodeAffinity("project"));
hostServices.SetNodeAffinity("project", NodeAffinity.InProc);
Assert.Equal(NodeAffinity.InProc, hostServices.GetNodeAffinity("project"));
}
/// <summary>
/// Test which ensures that setting a host object for a project with an out-of-proc affinity throws.
/// </summary>
[Fact]
public void TestContraditcoryHostObjectCausesException_OutOfProc()
{
Assert.Throws<InvalidOperationException>(() =>
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.SetNodeAffinity("project", NodeAffinity.OutOfProc);
hostServices.RegisterHostObject("project", "target", "task", hostObject);
}
);
}
/// <summary>
/// Test which ensures the host object can be set for a project which has the Any affinity specifically set.
/// </summary>
[Fact]
public void TestNonContraditcoryHostObjectAllowed_Any()
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.SetNodeAffinity("project", NodeAffinity.Any);
hostServices.RegisterHostObject("project", "target", "task", hostObject);
Assert.Equal(NodeAffinity.InProc, hostServices.GetNodeAffinity("project"));
}
#if FEATURE_COM_INTEROP
/// <summary>
/// Test which ensures the remote host object cannot affect a project which has the Any affinity specifically set.
/// </summary>
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Mono, "disable com tests on mono")]
public void TestRegisterRemoteHostObjectNoAffect_Any2()
{
HostServices hostServices = new HostServices();
hostServices.SetNodeAffinity("project", NodeAffinity.Any);
hostServices.RegisterHostObject("project", "target", "task", "moniker");
hostServices.GetNodeAffinity("project").ShouldBe(NodeAffinity.Any);
}
#endif
/// <summary>
/// Test which ensures the host object can be set for a project which has an out-of-proc affinity only because that affinity
/// is implied by being set generally for all project, not for that specific project.
/// </summary>
[Fact]
public void TestNonContraditcoryHostObjectAllowed_ImplicitOutOfProc()
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.SetNodeAffinity(String.Empty, NodeAffinity.InProc);
hostServices.RegisterHostObject("project", "target", "task", hostObject);
}
/// <summary>
/// Test which ensures the host object can be set for a project which has the InProc affinity specifically set.
/// </summary>
[Fact]
public void TestNonContraditcoryHostObjectAllowed_InProc()
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.SetNodeAffinity("project", NodeAffinity.InProc);
hostServices.RegisterHostObject("project", "target", "task", hostObject);
}
#if FEATURE_COM_INTEROP
/// <summary>
/// Test which ensures the affinity for a project can be changed once the in process host object is registered
/// </summary>
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Mono, "disable com tests on mono")]
public void TestAffinityChangeAfterRegisterInprocessHostObject()
{
HostServices hostServices = new HostServices();
hostServices.RegisterHostObject("project", "target", "task", "moniker");
hostServices.GetNodeAffinity("project").ShouldBe(NodeAffinity.Any);
TestHostObject hostObject = new TestHostObject();
hostServices.RegisterHostObject("project", "target", "task", hostObject);
hostServices.GetNodeAffinity("project").ShouldBe(NodeAffinity.InProc);
}
#endif
/// <summary>
/// Test which ensures the affinity for a project can be changed once the host object is cleared.
/// </summary>
[Fact]
public void TestAffinityChangeAfterClearingHostObject()
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.RegisterHostObject("project", "target", "task", hostObject);
Assert.Equal(NodeAffinity.InProc, hostServices.GetNodeAffinity("project"));
hostServices.RegisterHostObject("project", "target", "task", hostObject: null);
Assert.Equal(NodeAffinity.Any, hostServices.GetNodeAffinity("project"));
hostServices.SetNodeAffinity("project", NodeAffinity.OutOfProc);
Assert.Equal(NodeAffinity.OutOfProc, hostServices.GetNodeAffinity("project"));
}
/// <summary>
/// Test which ensures that setting then clearing the host object restores a previously specifically set non-conflicting affinity.
/// </summary>
[Fact]
public void TestUnregisteringNonConflictingHostObjectRestoresOriginalAffinity()
{
HostServices hostServices = new HostServices();
TestHostObject hostObject = new TestHostObject();
hostServices.SetNodeAffinity(String.Empty, NodeAffinity.OutOfProc);
hostServices.SetNodeAffinity("project", NodeAffinity.Any);
Assert.Equal(NodeAffinity.OutOfProc, hostServices.GetNodeAffinity("project2"));
Assert.Equal(NodeAffinity.Any, hostServices.GetNodeAffinity("project"));
hostServices.RegisterHostObject("project", "target", "task", hostObject);
Assert.Equal(NodeAffinity.InProc, hostServices.GetNodeAffinity("project"));
hostServices.RegisterHostObject("project", "target", "task", hostObject: null);
Assert.Equal(NodeAffinity.Any, hostServices.GetNodeAffinity("project"));
Assert.Equal(NodeAffinity.OutOfProc, hostServices.GetNodeAffinity("project2"));
}
/// <summary>
/// Tests that creating a BuildRequestData with a non-conflicting HostServices and ProjectInstance works.
/// </summary>
[Fact]
public void TestProjectInstanceWithNonConflictingHostServices()
{
HostServices hostServices = new HostServices();
ProjectInstance project = CreateDummyProject("foo.proj");
BuildRequestData data = new BuildRequestData(project, new string[] { }, hostServices);
hostServices.SetNodeAffinity(project.FullPath, NodeAffinity.InProc);
BuildRequestData data2 = new BuildRequestData(project, new string[] { }, hostServices);
}
/// <summary>
/// Tests that unloading all projects from the project collection
/// discards the host services
/// </summary>
[Fact]
public void UnloadedProjectDiscardsHostServicesAllProjects()
{
HostServices hostServices = new HostServices();
TestHostObject th = new TestHostObject();
ProjectCollection.GlobalProjectCollection.HostServices = hostServices;
Project project = LoadDummyProject("foo.proj");
hostServices.RegisterHostObject(project.FullPath, "test", "Message", th);
ProjectCollection.GlobalProjectCollection.UnloadAllProjects();
Assert.False(hostServices.HasInProcessHostObject(project.FullPath));
}
/// <summary>
/// Tests that unloading the last project from the project collection
/// discards the host services for that project
/// </summary>
[Fact]
public void UnloadedProjectDiscardsHostServices()
{
HostServices hostServices = new HostServices();
TestHostObject th = new TestHostObject();
ProjectCollection.GlobalProjectCollection.HostServices = hostServices;
Project project1 = LoadDummyProject("foo.proj");
Project project2 = LoadDummyProject("foo.proj");
hostServices.RegisterHostObject(project1.FullPath, "test", "Message", th);
ProjectCollection.GlobalProjectCollection.UnloadProject(project1);
Assert.True(hostServices.HasInProcessHostObject(project2.FullPath));
ProjectCollection.GlobalProjectCollection.UnloadProject(project2);
Assert.False(hostServices.HasInProcessHostObject(project2.FullPath));
}
#if FEATURE_COM_INTEROP
/// <summary>
/// Tests that register overrides existing reigsted remote host object.
/// </summary>
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Mono, "disable com tests on mono")]
public void TestRegisterOverrideExistingRegisted()
{
var hostServices = new HostServices();
var rot = new MockRunningObjectTable();
hostServices.SetTestRunningObjectTable(rot);
var moniker = Guid.NewGuid().ToString();
var remoteHost = new MockRemoteHostObject(1);
rot.Register(moniker, remoteHost);
var newMoniker = Guid.NewGuid().ToString();
var newRemoteHost = new MockRemoteHostObject(2);
rot.Register(newMoniker, newRemoteHost);
hostServices.RegisterHostObject(
"WithOutOfProc.targets",
"DisplayMessages",
"ATask",
remoteHost);
hostServices.RegisterHostObject("project", "test", "Message", moniker);
hostServices.RegisterHostObject("project", "test", "Message", newMoniker);
var resultObject = (ITestRemoteHostObject)hostServices.GetHostObject("project", "test", "Message");
resultObject.GetState().ShouldBe(2);
}
#endif
/// <summary>
/// Creates a dummy project instance.
/// </summary>
public ProjectInstance CreateDummyProject(string fileName)
{
string contents = ObjectModelHelpers.CleanupFileContents(@"
<Project xmlns='msbuildnamespace' ToolsVersion='msbuilddefaulttoolsversion'>
<Target Name='test'>
</Target>
</Project>
");
Project project = new Project(new XmlTextReader(new StringReader(contents)), new Dictionary<string, string>(), ObjectModelHelpers.MSBuildDefaultToolsVersion);
project.FullPath = fileName;
ProjectInstance instance = project.CreateProjectInstance();
return instance;
}
/// <summary>
/// Loads a dummy project instance.
/// </summary>
public Project LoadDummyProject(string fileName)
{
string contents = ObjectModelHelpers.CleanupFileContents(@"
<Project xmlns='msbuildnamespace' ToolsVersion='msbuilddefaulttoolsversion'>
<Target Name='test'>
<Message text='hello' />
</Target>
</Project>
");
Dictionary<string, string> globals = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
globals["UniqueDummy"] = Guid.NewGuid().ToString();
Project project =
ProjectCollection.GlobalProjectCollection.LoadProject(
new XmlTextReader(new StringReader(contents)),
globals,
ObjectModelHelpers.MSBuildDefaultToolsVersion);
project.FullPath = fileName;
return project;
}
/// <summary>
/// A dummy host object class.
/// </summary>
private class TestHostObject : ITaskHost
{
/// <summary>
/// Constructor.
/// </summary>
public TestHostObject()
{
}
}
}
}
| |
#if !UNITY_WINRT || UNITY_EDITOR || (UNITY_WP8 && !UNITY_WP_8_1)
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using PlayFab.Json.Utilities;
using System.Globalization;
namespace PlayFab.Json.Linq
{
/// <summary>
/// Represents a reader that provides fast, non-cached, forward-only access to serialized Json data.
/// </summary>
public class JTokenReader : JsonReader, IJsonLineInfo
{
private readonly JToken _root;
private JToken _parent;
private JToken _current;
/// <summary>
/// Initializes a new instance of the <see cref="JTokenReader"/> class.
/// </summary>
/// <param name="token">The token to read from.</param>
public JTokenReader(JToken token)
{
ValidationUtils.ArgumentNotNull(token, "token");
_root = token;
_current = token;
}
/// <summary>
/// Reads the next JSON token from the stream as a <see cref="T:Byte[]"/>.
/// </summary>
/// <returns>
/// A <see cref="T:Byte[]"/> or a null reference if the next JSON token is null.
/// </returns>
public override byte[] ReadAsBytes()
{
Read();
// attempt to convert possible base 64 string to bytes
if (TokenType == JsonToken.String)
{
string s = (string) Value;
byte[] data = (s.Length == 0) ? new byte[0] : Convert.FromBase64String(s);
SetToken(JsonToken.Bytes, data);
}
if (TokenType == JsonToken.Null)
return null;
if (TokenType == JsonToken.Bytes)
return (byte[])Value;
throw new JsonReaderException("Error reading bytes. Expected bytes but got {0}.".FormatWith(CultureInfo.InvariantCulture, TokenType));
}
/// <summary>
/// Reads the next JSON token from the stream as a <see cref="Nullable{Decimal}"/>.
/// </summary>
/// <returns>A <see cref="Nullable{Decimal}"/>.</returns>
public override decimal? ReadAsDecimal()
{
Read();
if (TokenType == JsonToken.Null)
return null;
if (TokenType == JsonToken.Integer || TokenType == JsonToken.Float)
{
SetToken(JsonToken.Float, Convert.ToDecimal(Value, CultureInfo.InvariantCulture));
return (decimal) Value;
}
throw new JsonReaderException("Error reading decimal. Expected a number but got {0}.".FormatWith(CultureInfo.InvariantCulture, TokenType));
}
/// <summary>
/// Reads the next JSON token from the stream as a <see cref="Nullable{DateTimeOffset}"/>.
/// </summary>
/// <returns>A <see cref="Nullable{DateTimeOffset}"/>.</returns>
public override DateTimeOffset? ReadAsDateTimeOffset()
{
Read();
if (TokenType == JsonToken.Null)
return null;
if (TokenType == JsonToken.Date)
{
SetToken(JsonToken.Date, new DateTimeOffset((DateTime)Value));
return (DateTimeOffset)Value;
}
throw new JsonReaderException("Error reading date. Expected bytes but got {0}.".FormatWith(CultureInfo.InvariantCulture, TokenType));
}
/// <summary>
/// Reads the next JSON token from the stream.
/// </summary>
/// <returns>
/// true if the next token was read successfully; false if there are no more tokens to read.
/// </returns>
public override bool Read()
{
if (CurrentState != State.Start)
{
JContainer container = _current as JContainer;
if (container != null && _parent != container)
return ReadInto(container);
else
return ReadOver(_current);
}
SetToken(_current);
return true;
}
private bool ReadOver(JToken t)
{
if (t == _root)
return ReadToEnd();
JToken next = t.Next;
if ((next == null || next == t) || t == t.Parent.Last)
{
if (t.Parent == null)
return ReadToEnd();
return SetEnd(t.Parent);
}
else
{
_current = next;
SetToken(_current);
return true;
}
}
private bool ReadToEnd()
{
//CurrentState = State.Finished;
return false;
}
private bool IsEndElement
{
get { return (_current == _parent); }
}
private JsonToken? GetEndToken(JContainer c)
{
switch (c.Type)
{
case JTokenType.Object:
return JsonToken.EndObject;
case JTokenType.Array:
return JsonToken.EndArray;
case JTokenType.Constructor:
return JsonToken.EndConstructor;
case JTokenType.Property:
return null;
default:
throw MiscellaneousUtils.CreateArgumentOutOfRangeException("Type", c.Type, "Unexpected JContainer type.");
}
}
private bool ReadInto(JContainer c)
{
JToken firstChild = c.First;
if (firstChild == null)
{
return SetEnd(c);
}
else
{
SetToken(firstChild);
_current = firstChild;
_parent = c;
return true;
}
}
private bool SetEnd(JContainer c)
{
JsonToken? endToken = GetEndToken(c);
if (endToken != null)
{
SetToken(endToken.Value);
_current = c;
_parent = c;
return true;
}
else
{
return ReadOver(c);
}
}
private void SetToken(JToken token)
{
switch (token.Type)
{
case JTokenType.Object:
SetToken(JsonToken.StartObject);
break;
case JTokenType.Array:
SetToken(JsonToken.StartArray);
break;
case JTokenType.Constructor:
SetToken(JsonToken.StartConstructor);
break;
case JTokenType.Property:
SetToken(JsonToken.PropertyName, ((JProperty)token).Name);
break;
case JTokenType.Comment:
SetToken(JsonToken.Comment, ((JValue)token).Value);
break;
case JTokenType.Integer:
SetToken(JsonToken.Integer, ((JValue)token).Value);
break;
case JTokenType.Float:
SetToken(JsonToken.Float, ((JValue)token).Value);
break;
case JTokenType.String:
SetToken(JsonToken.String, ((JValue)token).Value);
break;
case JTokenType.Boolean:
SetToken(JsonToken.Boolean, ((JValue)token).Value);
break;
case JTokenType.Null:
SetToken(JsonToken.Null, ((JValue)token).Value);
break;
case JTokenType.Undefined:
SetToken(JsonToken.Undefined, ((JValue)token).Value);
break;
case JTokenType.Date:
SetToken(JsonToken.Date, ((JValue)token).Value);
break;
case JTokenType.Raw:
SetToken(JsonToken.Raw, ((JValue)token).Value);
break;
case JTokenType.Bytes:
SetToken(JsonToken.Bytes, ((JValue)token).Value);
break;
case JTokenType.Guid:
SetToken(JsonToken.String, SafeToString(((JValue)token).Value));
break;
case JTokenType.Uri:
SetToken(JsonToken.String, SafeToString(((JValue)token).Value));
break;
case JTokenType.TimeSpan:
SetToken(JsonToken.String, SafeToString(((JValue)token).Value));
break;
default:
throw MiscellaneousUtils.CreateArgumentOutOfRangeException("Type", token.Type, "Unexpected JTokenType.");
}
}
private string SafeToString(object value)
{
return (value != null) ? value.ToString() : null;
}
bool IJsonLineInfo.HasLineInfo()
{
if (CurrentState == State.Start)
return false;
IJsonLineInfo info = IsEndElement ? null : _current;
return (info != null && info.HasLineInfo());
}
int IJsonLineInfo.LineNumber
{
get
{
if (CurrentState == State.Start)
return 0;
IJsonLineInfo info = IsEndElement ? null : _current;
if (info != null)
return info.LineNumber;
return 0;
}
}
int IJsonLineInfo.LinePosition
{
get
{
if (CurrentState == State.Start)
return 0;
IJsonLineInfo info = IsEndElement ? null : _current;
if (info != null)
return info.LinePosition;
return 0;
}
}
}
}
#endif
| |
// Python Tools for Visual Studio
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.ComponentModel.Design;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Windows;
using Microsoft.PythonTools.Infrastructure;
using Microsoft.PythonTools.Interpreter;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.ComponentModelHost;
using Microsoft.VisualStudio.Shell;
using Microsoft.VisualStudio.Shell.Interop;
using Microsoft.VisualStudioTools;
using Microsoft.VisualStudioTools.Project;
namespace Microsoft.PythonTools.Profiling {
/// <summary>
/// This is the class that implements the package exposed by this assembly.
///
/// The minimum requirement for a class to be considered a valid package for Visual Studio
/// is to implement the IVsPackage interface and register itself with the shell.
/// This package uses the helper classes defined inside the Managed Package Framework (MPF)
/// to do it: it derives from the Package class that provides the implementation of the
/// IVsPackage interface and uses the registration attributes defined in the framework to
/// register itself and its components with the shell.
/// </summary>
// This attribute tells the PkgDef creation utility (CreatePkgDef.exe) that this class is
// a package.
[PackageRegistration(UseManagedResourcesOnly = true)]
[Description("Python Tools Profiling Package")]
// This attribute is used to register the informations needed to show the this package
// in the Help/About dialog of Visual Studio.
[InstalledProductRegistration("#110", "#112", AssemblyVersionInfo.Version, IconResourceID = 400)]
// This attribute is needed to let the shell know that this package exposes some menus.
[ProvideMenuResource("Menus.ctmenu", 1)]
[Guid(GuidList.guidPythonProfilingPkgString)]
// set the window to dock where Toolbox/Performance Explorer dock by default
[ProvideToolWindow(typeof(PerfToolWindow), Orientation = ToolWindowOrientation.Left, Style = VsDockStyle.Tabbed, Window = EnvDTE.Constants.vsWindowKindToolbox)]
[ProvideFileFilterAttribute("{81da0100-e6db-4783-91ea-c38c3fa1b81e}", "/1", "Python Performance Session (*.pyperf);*.pyperf", 100)]
[ProvideEditorExtension(typeof(ProfilingSessionEditorFactory), ".pyperf", 50,
ProjectGuid = "{81da0100-e6db-4783-91ea-c38c3fa1b81e}",
NameResourceID = 105,
DefaultName = "PythonPerfSession")]
[ProvideAutomationObject("PythonProfiling")]
sealed class PythonProfilingPackage : Package {
internal static PythonProfilingPackage Instance;
private static ProfiledProcess _profilingProcess; // process currently being profiled
internal static string PythonProjectGuid = "{888888a0-9f3d-457c-b088-3a5042f75d52}";
internal const string PerformanceFileFilter = "Performance Report Files|*.vsp;*.vsps";
private AutomationProfiling _profilingAutomation;
private static OleMenuCommand _stopCommand, _startCommand;
/// <summary>
/// Default constructor of the package.
/// Inside this method you can place any initialization code that does not require
/// any Visual Studio service because at this point the package object is created but
/// not sited yet inside Visual Studio environment. The place to do all the other
/// initialization is the Initialize method.
/// </summary>
public PythonProfilingPackage() {
Instance = this;
}
protected override void Dispose(bool disposing) {
if (disposing) {
var process = _profilingProcess;
_profilingProcess = null;
if (process != null) {
process.Dispose();
}
}
base.Dispose(disposing);
}
/// <summary>
/// Initialization of the package; this method is called right after the package is sited, so this is the place
/// where you can put all the initilaization code that rely on services provided by VisualStudio.
/// </summary>
protected override void Initialize() {
Trace.WriteLine(string.Format(CultureInfo.CurrentCulture, "Entering Initialize() of: {0}", this.ToString()));
base.Initialize();
// Add our command handlers for menu (commands must exist in the .vsct file)
OleMenuCommandService mcs = GetService(typeof(IMenuCommandService)) as OleMenuCommandService;
if (null != mcs) {
// Create the command for the menu item.
CommandID menuCommandID = new CommandID(GuidList.guidPythonProfilingCmdSet, (int)PkgCmdIDList.cmdidStartPythonProfiling);
MenuCommand menuItem = new MenuCommand(StartProfilingWizard, menuCommandID);
mcs.AddCommand(menuItem);
// Create the command for the menu item.
menuCommandID = new CommandID(GuidList.guidPythonProfilingCmdSet, (int)PkgCmdIDList.cmdidPerfExplorer);
var oleMenuItem = new OleMenuCommand(ShowPeformanceExplorer, menuCommandID);
oleMenuItem.BeforeQueryStatus += ShowPerfQueryStatus;
mcs.AddCommand(oleMenuItem);
menuCommandID = new CommandID(GuidList.guidPythonProfilingCmdSet, (int)PkgCmdIDList.cmdidAddPerfSession);
menuItem = new MenuCommand(AddPerformanceSession, menuCommandID);
mcs.AddCommand(menuItem);
menuCommandID = new CommandID(GuidList.guidPythonProfilingCmdSet, (int)PkgCmdIDList.cmdidStartProfiling);
oleMenuItem = _startCommand = new OleMenuCommand(StartProfiling, menuCommandID);
oleMenuItem.BeforeQueryStatus += IsProfilingActive;
mcs.AddCommand(oleMenuItem);
menuCommandID = new CommandID(GuidList.guidPythonProfilingCmdSet, (int)PkgCmdIDList.cmdidStopProfiling);
_stopCommand = oleMenuItem = new OleMenuCommand(StopProfiling, menuCommandID);
oleMenuItem.BeforeQueryStatus += IsProfilingInactive;
mcs.AddCommand(oleMenuItem);
}
//Create Editor Factory. Note that the base Package class will call Dispose on it.
base.RegisterEditorFactory(new ProfilingSessionEditorFactory(this));
}
protected override object GetAutomationObject(string name) {
if (name == "PythonProfiling") {
if (_profilingAutomation == null) {
var pane = (PerfToolWindow)this.FindToolWindow(typeof(PerfToolWindow), 0, true);
_profilingAutomation = new AutomationProfiling(pane.Sessions);
}
return _profilingAutomation;
}
return base.GetAutomationObject(name);
}
internal static Guid GetStartupProjectGuid(IServiceProvider serviceProvider) {
var buildMgr = (IVsSolutionBuildManager)serviceProvider.GetService(typeof(IVsSolutionBuildManager));
IVsHierarchy hierarchy;
if (buildMgr != null && ErrorHandler.Succeeded(buildMgr.get_StartupProject(out hierarchy)) && hierarchy != null) {
Guid guid;
if (ErrorHandler.Succeeded(hierarchy.GetGuidProperty(
(uint)VSConstants.VSITEMID.Root,
(int)__VSHPROPID.VSHPROPID_ProjectIDGuid,
out guid
))) {
return guid;
}
}
return Guid.Empty;
}
internal IVsSolution Solution {
get {
return GetService(typeof(SVsSolution)) as IVsSolution;
}
}
/// <summary>
/// This function is the callback used to execute a command when the a menu item is clicked.
/// See the Initialize method to see how the menu item is associated to this function using
/// the OleMenuCommandService service and the MenuCommand class.
/// </summary>
private void StartProfilingWizard(object sender, EventArgs e) {
var targetView = new ProfilingTargetView(this);
var dialog = new LaunchProfiling(this, targetView);
var res = dialog.ShowModal() ?? false;
if (res && targetView.IsValid) {
var target = targetView.GetTarget();
if (target != null) {
ProfileTarget(target);
}
}
}
internal SessionNode ProfileTarget(ProfilingTarget target, bool openReport = true) {
bool save;
string name = target.GetProfilingName(this, out save);
var session = ShowPerformanceExplorer().Sessions.AddTarget(target, name, save);
StartProfiling(target, session, openReport);
return session;
}
internal void StartProfiling(ProfilingTarget target, SessionNode session, bool openReport = true) {
if (!Utilities.SaveDirtyFiles()) {
// Abort
return;
}
if (target.ProjectTarget != null) {
ProfileProjectTarget(session, target.ProjectTarget, openReport);
} else if (target.StandaloneTarget != null) {
ProfileStandaloneTarget(session, target.StandaloneTarget, openReport);
} else {
if (MessageBox.Show("Profiling session is not configured - would you like to configure now and then launch?", "No Profiling Target", MessageBoxButton.YesNo) == MessageBoxResult.Yes) {
var newTarget = session.OpenTargetProperties();
if (newTarget != null && (newTarget.ProjectTarget != null || newTarget.StandaloneTarget != null)) {
StartProfiling(newTarget, session, openReport);
}
}
}
}
private void ProfileProjectTarget(SessionNode session, ProjectTarget projectTarget, bool openReport) {
var targetGuid = projectTarget.TargetProject;
var dte = (EnvDTE.DTE)GetService(typeof(EnvDTE.DTE));
EnvDTE.Project projectToProfile = null;
foreach (EnvDTE.Project project in dte.Solution.Projects) {
var kind = project.Kind;
if (String.Equals(kind, PythonProfilingPackage.PythonProjectGuid, StringComparison.OrdinalIgnoreCase)) {
var guid = project.Properties.Item("Guid").Value as string;
Guid guidVal;
if (Guid.TryParse(guid, out guidVal) && guidVal == projectTarget.TargetProject) {
projectToProfile = project;
break;
}
}
}
if (projectToProfile != null) {
ProfileProject(session, projectToProfile, openReport);
} else {
MessageBox.Show("Project could not be found in current solution.", "Python Tools for Visual Studio");
}
}
internal static void ProfileProject(SessionNode session, EnvDTE.Project projectToProfile, bool openReport) {
var model = (IComponentModel)(session._serviceProvider.GetService(typeof(SComponentModel)));
var projectHome = PathUtils.GetAbsoluteDirectoryPath(
Path.GetDirectoryName(projectToProfile.FullName),
(string)projectToProfile.Properties.Item("ProjectHome").Value
);
var args = (string)projectToProfile.Properties.Item("CommandLineArguments").Value;
var interpreterPath = (string)projectToProfile.Properties.Item("InterpreterPath").Value;
var searchPath = (string)projectToProfile.Properties.Item("SearchPath").Value;
var project = projectToProfile.AsPythonProject();
var interpreter = project != null ? project.GetInterpreterFactory() : null;
if (interpreter == null /*|| interpreter == interpreterService.NoInterpretersValue*/) {
MessageBox.Show(String.Format("Could not find interpreter for project {0}", projectToProfile.Name), "Python Tools for Visual Studio");
return;
}
var arch = interpreter.Configuration.Architecture;
var pathEnvVarName = interpreter.Configuration.PathEnvironmentVariable;
if (String.IsNullOrWhiteSpace(interpreterPath)) {
interpreterPath = interpreter.Configuration.InterpreterPath;
}
string startupFile = (string)projectToProfile.Properties.Item("StartupFile").Value;
if (String.IsNullOrEmpty(startupFile)) {
MessageBox.Show("Project has no configured startup file, cannot start profiling.", "Python Tools for Visual Studio");
return;
}
string workingDir = projectToProfile.Properties.Item("WorkingDirectory").Value as string;
if (String.IsNullOrEmpty(workingDir) || workingDir == ".") {
workingDir = projectToProfile.Properties.Item("ProjectHome").Value as string;
if (String.IsNullOrEmpty(workingDir)) {
workingDir = Path.GetDirectoryName(projectToProfile.FullName);
}
}
var env = new Dictionary<string, string>();
if (!String.IsNullOrWhiteSpace(pathEnvVarName) && !String.IsNullOrEmpty(searchPath)) {
var searchPaths = searchPath.Split(';').ToList();
var pyService = (PythonToolsService)session._serviceProvider.GetService(typeof(PythonToolsService));
if (!pyService.GeneralOptions.ClearGlobalPythonPath) {
searchPaths.AddRange(Environment.GetEnvironmentVariable(pathEnvVarName).Split(';'));
}
env[pathEnvVarName] = string.Join(";", searchPaths
.Where(PathUtils.IsValidPath)
.Select(p => PathUtils.GetAbsoluteDirectoryPath(projectHome, p))
.Where(Directory.Exists)
.Distinct(StringComparer.OrdinalIgnoreCase)
);
}
var userEnv = projectToProfile.Properties.Item("Environment").Value as string;
if (userEnv != null) {
foreach (var envVar in userEnv.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries)) {
var nameValue = envVar.Split(new[] { '=' }, 2);
if (nameValue.Length == 2) {
env[nameValue[0]] = nameValue[1];
}
}
}
RunProfiler(session, interpreterPath, startupFile, args, workingDir, env, openReport, arch);
}
private static void ProfileStandaloneTarget(SessionNode session, StandaloneTarget runTarget, bool openReport) {
var model = (IComponentModel)(session._serviceProvider.GetService(typeof(SComponentModel)));
var registry = model.DefaultExportProvider.GetExportedValue<IInterpreterRegistryService>();
var interpreterPath = runTarget.InterpreterPath;
var arch = ProcessorArchitecture.X86;
if (runTarget.PythonInterpreter != null) {
var interpreter = registry.FindInterpreter(runTarget.PythonInterpreter.Id);
if (interpreter == null) {
return;
}
interpreterPath = interpreter.Configuration.InterpreterPath;
arch = interpreter.Configuration.Architecture;
}
RunProfiler(session, interpreterPath, runTarget.Script, runTarget.Arguments, runTarget.WorkingDirectory, null, openReport, arch);
}
private static void RunProfiler(SessionNode session, string interpreter, string script, string arguments, string workingDir, Dictionary<string, string> env, bool openReport, ProcessorArchitecture arch) {
var process = new ProfiledProcess(
(PythonToolsService)session._serviceProvider.GetService(typeof(PythonToolsService)),
interpreter,
String.Format("\"{0}\" {1}", script, arguments ?? string.Empty),
workingDir,
env,
arch
);
string baseName = Path.GetFileNameWithoutExtension(session.Filename);
string date = DateTime.Now.ToString("yyyyMMdd");
string outPath = Path.Combine(Path.GetTempPath(), baseName + "_" + date + ".vsp");
int count = 1;
while (File.Exists(outPath)) {
outPath = Path.Combine(Path.GetTempPath(), baseName + "_" + date + "(" + count + ").vsp");
count++;
}
process.ProcessExited += (sender, args) => {
var dte = (EnvDTE.DTE)session._serviceProvider.GetService(typeof(EnvDTE.DTE));
_profilingProcess = null;
_stopCommand.Enabled = false;
_startCommand.Enabled = true;
if (openReport && File.Exists(outPath)) {
dte.ItemOperations.OpenFile(outPath);
}
};
session.AddProfile(outPath);
process.StartProfiling(outPath);
_profilingProcess = process;
_stopCommand.Enabled = true;
_startCommand.Enabled = false;
}
/// <summary>
/// This function is the callback used to execute a command when the a menu item is clicked.
/// See the Initialize method to see how the menu item is associated to this function using
/// the OleMenuCommandService service and the MenuCommand class.
/// </summary>
private void ShowPeformanceExplorer(object sender, EventArgs e) {
ShowPerformanceExplorer();
}
internal PerfToolWindow ShowPerformanceExplorer() {
var pane = this.FindToolWindow(typeof(PerfToolWindow), 0, true);
if (pane == null) {
throw new InvalidOperationException();
}
IVsWindowFrame frame = pane.Frame as IVsWindowFrame;
if (frame == null) {
throw new InvalidOperationException();
}
ErrorHandler.ThrowOnFailure(frame.Show());
return pane as PerfToolWindow;
}
private void AddPerformanceSession(object sender, EventArgs e) {
var dte = (EnvDTE.DTE)GetService(typeof(EnvDTE.DTE));
string filename = "Performance.pyperf";
bool save = false;
if (dte.Solution.IsOpen && !String.IsNullOrEmpty(dte.Solution.FullName)) {
filename = Path.Combine(Path.GetDirectoryName(dte.Solution.FullName), filename);
save = true;
}
ShowPerformanceExplorer().Sessions.AddTarget(new ProfilingTarget(), filename, save);
}
private void StartProfiling(object sender, EventArgs e) {
ShowPerformanceExplorer().Sessions.StartProfiling();
}
private void StopProfiling(object sender, EventArgs e) {
var process = _profilingProcess;
if (process != null) {
process.StopProfiling();
}
}
private void IsProfilingActive(object sender, EventArgs args) {
var oleMenu = sender as OleMenuCommand;
if (_profilingProcess != null) {
oleMenu.Enabled = false;
} else {
oleMenu.Enabled = true;
}
}
private void IsProfilingInactive(object sender, EventArgs args) {
var oleMenu = sender as OleMenuCommand;
if (_profilingProcess != null) {
oleMenu.Enabled = true;
} else {
oleMenu.Enabled = false;
}
}
private void ShowPerfQueryStatus(object sender, EventArgs args) {
var oleMenu = sender as OleMenuCommand;
if (IsProfilingInstalled()) {
oleMenu.Enabled = true;
oleMenu.Visible = true;
} else {
oleMenu.Enabled = false;
oleMenu.Visible = false;
}
}
internal bool IsProfilingInstalled() {
IVsShell shell = (IVsShell)GetService(typeof(IVsShell));
Guid perfGuid = GuidList.GuidPerfPkg;
int installed;
ErrorHandler.ThrowOnFailure(
shell.IsPackageInstalled(ref perfGuid, out installed)
);
return installed != 0;
}
public bool IsProfiling {
get {
return _profilingProcess != null;
}
}
}
}
| |
//-----------------------------------------------------------------------------
// Filename: SIPUDPChannel.cs
//
// Description: SIP transport for UDP.
//
// History:
// 17 Oct 2005 Aaron Clauson Created.
//
// License:
// This software is licensed under the BSD License http://www.opensource.org/licenses/bsd-license.php
//
// Copyright (c) 2006 Aaron Clauson ([email protected]), SIP Sorcery PTY LTD, Hobart, Australia (www.sipsorcery.com)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that
// the following conditions are met:
//
// Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
// Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of SIP Sorcery PTY LTD.
// nor the names of its contributors may be used to endorse or promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
// BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
// OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//-----------------------------------------------------------------------------
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using SIPSorcery.Sys;
using log4net;
#if UNITTEST
using NUnit.Framework;
#endif
namespace SIPSorcery.SIP
{
public class SIPUDPChannel : SIPChannel
{
private const string THREAD_NAME = "sipchanneludp-";
//private ILog logger = AssemblyState.logger;
// Channel sockets.
private Guid m_socketId = Guid.NewGuid();
private UdpClient m_sipConn = null;
//private bool m_closed = false;
public SIPUDPChannel(IPEndPoint endPoint) {
m_localSIPEndPoint = new SIPEndPoint(SIPProtocolsEnum.udp, endPoint);
Initialise();
}
private void Initialise() {
try {
m_sipConn = new UdpClient(m_localSIPEndPoint.GetIPEndPoint());
Thread listenThread = new Thread(new ThreadStart(Listen));
listenThread.Name = THREAD_NAME + Crypto.GetRandomString(4);
listenThread.Start();
logger.Debug("SIPUDPChannel listener created " + m_localSIPEndPoint.GetIPEndPoint() + ".");
}
catch (Exception excp) {
logger.Error("Exception SIPUDPChannel Initialise. " + excp.Message);
throw excp;
}
}
private void Dispose(bool disposing) {
try {
this.Close();
}
catch (Exception excp) {
logger.Error("Exception Disposing SIPUDPChannel. " + excp.Message);
}
}
private void Listen()
{
try
{
byte[] buffer = null;
logger.Debug("SIPUDPChannel socket on " + m_localSIPEndPoint.ToString() + " listening started.");
while(!Closed)
{
IPEndPoint inEndPoint = new IPEndPoint(IPAddress.Any, 0);
try
{
buffer = m_sipConn.Receive(ref inEndPoint);
}
catch (SocketException)
{
// ToDo. Pretty sure these exceptions get thrown when an ICMP message comes back indicating there is no listening
// socket on the other end. It would be nice to be able to relate that back to the socket that the data was sent to
// so that we know to stop sending.
//logger.Warn("SocketException SIPUDPChannel Receive (" + sockExcp.ErrorCode + "). " + sockExcp.Message);
//inEndPoint = new SIPEndPoint(new IPEndPoint(IPAddress.Any, 0));
continue;
}
catch(Exception listenExcp)
{
// There is no point logging this as without processing the ICMP message it's not possible to know which socket the rejection came from.
logger.Error("Exception listening on SIPUDPChannel. " + listenExcp.Message);
inEndPoint = new IPEndPoint(IPAddress.Any, 0);
continue;
}
if(buffer == null || buffer.Length == 0)
{
// No need to care about zero byte packets.
//string remoteEndPoint = (inEndPoint != null) ? inEndPoint.ToString() : "could not determine";
//logger.Error("Zero bytes received on SIPUDPChannel " + m_localSIPEndPoint.ToString() + ".");
}
else
{
if(SIPMessageReceived != null)
{
SIPMessageReceived(this, new SIPEndPoint(SIPProtocolsEnum.udp, inEndPoint), buffer);
}
}
}
logger.Debug("SIPUDPChannel socket on " + m_localSIPEndPoint + " listening halted.");
}
catch(Exception excp)
{
logger.Error("Exception SIPUDPChannel Listen. " + excp.Message);
//throw excp;
}
}
public override void Send(IPEndPoint destinationEndPoint, string message)
{
byte[] messageBuffer = Encoding.UTF8.GetBytes(message);
Send(destinationEndPoint, messageBuffer);
}
public override void Send(IPEndPoint destinationEndPoint, byte[] buffer)
{
try
{
if (destinationEndPoint == null)
{
throw new ApplicationException("An empty destination was specified to Send in SIPUDPChannel.");
}
else
{
m_sipConn.Send(buffer, buffer.Length, destinationEndPoint);
}
}
catch (Exception excp)
{
logger.Error("Exception (" + excp.GetType().ToString() + ") SIPUDPChannel Send (sendto=>" + IPSocket.GetSocketString(destinationEndPoint) + "). " + excp.Message);
throw excp;
}
}
public override void Send(IPEndPoint dstEndPoint, byte[] buffer, string serverCertificateName)
{
throw new ApplicationException("This Send method is not available in the SIP UDP channel, please use an alternative overload.");
}
public override bool IsConnectionEstablished(IPEndPoint remoteEndPoint)
{
throw new NotSupportedException("The SIP UDP channel does not support connections.");
}
protected override Dictionary<string, SIPConnection> GetConnectionsList()
{
throw new NotSupportedException("The SIP UDP channel does not support connections.");
}
public override void Close()
{
try
{
logger.Debug("Closing SIP UDP Channel " + SIPChannelEndPoint + ".");
Closed = true;
m_sipConn.Close();
}
catch(Exception excp)
{
logger.Warn("Exception SIPUDPChannel Close. " +excp.Message);
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://aurora-sim.org/, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Aurora-Sim Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.IO;
using System.Text;
using System.Xml;
using OpenMetaverse;
namespace Aurora.Framework.Serialization.External
{
/// <summary>
/// Serialize and deserialize user inventory items as an external format.
/// </summary>
/// XXX: Please do not use yet.
public class UserInventoryItemSerializer
{
/// <summary>
/// Deserialize item
/// </summary>
/// <param name = "serializedSettings"></param>
/// <returns></returns>
/// <exception cref = "System.Xml.XmlException"></exception>
public static InventoryItemBase Deserialize(byte[] serialization)
{
InventoryItemBase item = new InventoryItemBase();
StringReader reader = new StringReader(Encoding.ASCII.GetString(serialization, 0, serialization.Length));
XmlReader xtr = XmlReader.Create(reader);
//Uses byte[] directly... should be used once issues with it are fixed
//MemoryStream mr = new MemoryStream (serialization);
//StreamReader sr = new StreamReader (mr, Encoding.ASCII);
//XmlReader xtr = XmlTextReader.Create (sr);
xtr.ReadStartElement("InventoryItem");
item.Name = xtr.ReadElementString("Name");
item.ID = UUID.Parse(xtr.ReadElementString("ID"));
item.InvType = Convert.ToInt32(xtr.ReadElementString("InvType"));
item.CreatorId = xtr.ReadElementString("CreatorUUID");
try
{
item.CreatorData = xtr.ReadElementString("CreatorData");
}
catch
{
}
item.CreationDate = Convert.ToInt32(xtr.ReadElementString("CreationDate"));
item.Owner = UUID.Parse(xtr.ReadElementString("Owner"));
item.Description = xtr.ReadElementString("Description");
item.AssetType = Convert.ToInt32(xtr.ReadElementString("AssetType"));
item.AssetID = UUID.Parse(xtr.ReadElementString("AssetID"));
item.SaleType = Convert.ToByte(xtr.ReadElementString("SaleType"));
item.SalePrice = Convert.ToInt32(xtr.ReadElementString("SalePrice"));
item.BasePermissions = Convert.ToUInt32(xtr.ReadElementString("BasePermissions"));
item.CurrentPermissions = Convert.ToUInt32(xtr.ReadElementString("CurrentPermissions"));
item.EveryOnePermissions = Convert.ToUInt32(xtr.ReadElementString("EveryOnePermissions"));
item.NextPermissions = Convert.ToUInt32(xtr.ReadElementString("NextPermissions"));
item.Flags = Convert.ToUInt32(xtr.ReadElementString("Flags"));
item.GroupID = UUID.Parse(xtr.ReadElementString("GroupID"));
item.GroupOwned = Convert.ToBoolean(xtr.ReadElementString("GroupOwned"));
xtr.ReadEndElement();
xtr.Close();
return item;
}
/// <summary>
/// Deserialize settings
/// </summary>
/// <param name = "serializedSettings"></param>
/// <returns></returns>
/// <exception cref = "System.Xml.XmlException"></exception>
public static InventoryItemBase Deserialize(string serialization)
{
InventoryItemBase item = new InventoryItemBase();
StringReader sr = new StringReader(serialization);
XmlTextReader xtr = new XmlTextReader(sr);
xtr.ReadStartElement("InventoryItem");
item.Name = xtr.ReadElementString("Name");
item.ID = UUID.Parse(xtr.ReadElementString("ID"));
item.InvType = Convert.ToInt32(xtr.ReadElementString("InvType"));
item.CreatorId = xtr.ReadElementString("CreatorUUID");
item.CreatorData = xtr.ReadElementString("CreatorData");
item.CreationDate = Convert.ToInt32(xtr.ReadElementString("CreationDate"));
item.Owner = UUID.Parse(xtr.ReadElementString("Owner"));
item.Description = xtr.ReadElementString("Description");
item.AssetType = Convert.ToInt32(xtr.ReadElementString("AssetType"));
item.AssetID = UUID.Parse(xtr.ReadElementString("AssetID"));
item.SaleType = Convert.ToByte(xtr.ReadElementString("SaleType"));
item.SalePrice = Convert.ToInt32(xtr.ReadElementString("SalePrice"));
item.BasePermissions = Convert.ToUInt32(xtr.ReadElementString("BasePermissions"));
item.CurrentPermissions = Convert.ToUInt32(xtr.ReadElementString("CurrentPermissions"));
item.EveryOnePermissions = Convert.ToUInt32(xtr.ReadElementString("EveryOnePermissions"));
item.NextPermissions = Convert.ToUInt32(xtr.ReadElementString("NextPermissions"));
item.Flags = Convert.ToUInt32(xtr.ReadElementString("Flags"));
item.GroupID = UUID.Parse(xtr.ReadElementString("GroupID"));
item.GroupOwned = Convert.ToBoolean(xtr.ReadElementString("GroupOwned"));
xtr.ReadEndElement();
xtr.Close();
sr.Close();
return item;
}
public static string Serialize(InventoryItemBase inventoryItem)
{
StringWriter sw = new StringWriter();
XmlTextWriter writer = new XmlTextWriter(sw) {Formatting = Formatting.Indented};
writer.WriteStartDocument();
writer.WriteStartElement("InventoryItem");
writer.WriteStartElement("Name");
writer.WriteString(inventoryItem.Name);
writer.WriteEndElement();
writer.WriteStartElement("ID");
writer.WriteString(inventoryItem.ID.ToString());
writer.WriteEndElement();
writer.WriteStartElement("InvType");
writer.WriteString(inventoryItem.InvType.ToString());
writer.WriteEndElement();
writer.WriteStartElement("CreatorUUID");
writer.WriteString(inventoryItem.CreatorId);
writer.WriteEndElement();
writer.WriteStartElement("CreatorData");
writer.WriteString(inventoryItem.CreatorData);
writer.WriteEndElement();
writer.WriteStartElement("CreationDate");
writer.WriteString(inventoryItem.CreationDate.ToString());
writer.WriteEndElement();
writer.WriteStartElement("Owner");
writer.WriteString(inventoryItem.Owner.ToString());
writer.WriteEndElement();
writer.WriteStartElement("Description");
writer.WriteString(inventoryItem.Description);
writer.WriteEndElement();
writer.WriteStartElement("AssetType");
writer.WriteString(inventoryItem.AssetType.ToString());
writer.WriteEndElement();
writer.WriteStartElement("AssetID");
writer.WriteString(inventoryItem.AssetID.ToString());
writer.WriteEndElement();
writer.WriteStartElement("SaleType");
writer.WriteString(inventoryItem.SaleType.ToString());
writer.WriteEndElement();
writer.WriteStartElement("SalePrice");
writer.WriteString(inventoryItem.SalePrice.ToString());
writer.WriteEndElement();
writer.WriteStartElement("BasePermissions");
writer.WriteString(inventoryItem.BasePermissions.ToString());
writer.WriteEndElement();
writer.WriteStartElement("CurrentPermissions");
writer.WriteString(inventoryItem.CurrentPermissions.ToString());
writer.WriteEndElement();
writer.WriteStartElement("EveryOnePermissions");
writer.WriteString(inventoryItem.EveryOnePermissions.ToString());
writer.WriteEndElement();
writer.WriteStartElement("NextPermissions");
writer.WriteString(inventoryItem.NextPermissions.ToString());
writer.WriteEndElement();
writer.WriteStartElement("Flags");
writer.WriteString(inventoryItem.Flags.ToString());
writer.WriteEndElement();
writer.WriteStartElement("GroupID");
writer.WriteString(inventoryItem.GroupID.ToString());
writer.WriteEndElement();
writer.WriteStartElement("GroupOwned");
writer.WriteString(inventoryItem.GroupOwned.ToString());
writer.WriteEndElement();
writer.WriteEndElement();
writer.Close();
sw.Close();
return sw.ToString();
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections;
using Xunit;
namespace System.Collections.StackTests
{
public class StackBasicTests
{
[Fact]
public static void EmptyStackSizeIsZero()
{
Stack stack = new Stack();
Assert.Equal(0, stack.Count);
}
[Fact]
public static void DefaultStackIsNotSynchronized()
{
Stack stack = new Stack();
Assert.False(stack.IsSynchronized);
}
[Fact]
public static void NumberOfElementsAddedIsEqualToStackSize()
{
int iNumElementsAdded = 1975;
Stack stack = new Stack();
for (int i = 0; i < iNumElementsAdded; i++)
{
stack.Push(new Object());
}
Assert.Equal(stack.Count, iNumElementsAdded);
}
[Fact]
public static void ClearResetsNumberOfElementsToZero()
{
int iNumElementsAdded = 2;
Stack stack = new Stack();
for (int i = 0; i < iNumElementsAdded; i++)
{
stack.Push(new Object());
}
stack.Clear();
Assert.Equal(0, stack.Count);
}
[Fact]
public static void PopDecrementsStackSize()
{
int iNumElementsAdded = 25;
Stack stack = new Stack();
for (int i = 0; i < iNumElementsAdded; i++)
{
stack.Push(i);
}
for (int i = 0; i < iNumElementsAdded; i++)
{
Assert.Equal(stack.Count, iNumElementsAdded - i);
Object objTop = stack.Pop();
Assert.Equal(stack.Count, iNumElementsAdded - i - 1);
}
}
[Fact]
public static void PeekingEmptyStackThrows()
{
Stack stack = new Stack();
Assert.Throws<InvalidOperationException>(() => { var x = stack.Peek(); });
}
[Fact]
public static void PeekingEmptyStackAfterRemovingElementsThrows()
{
object objRet;
Stack stack = new Stack();
for (int i = 0; i < 1000; i++)
{
stack.Push(i);
}
for (int i = 0; i < 1000; i++)
{
objRet = stack.Pop();
}
Assert.Throws<InvalidOperationException>(() => { objRet = stack.Peek(); });
}
[Fact]
public static void ICollectionCanBeGivenToStack()
{
int iNumElements = 10000;
var objArr = new Object[iNumElements];
for (int i = 0; i < iNumElements; i++)
{
objArr[i] = i;
}
Stack stack = new Stack(objArr);
for (int i = 0; i < iNumElements; i++)
{
var objRet = stack.Pop();
Assert.True(objRet.Equals(iNumElements - i - 1));
}
}
[Fact]
public static void PeekingAtElementTwiceGivesSameResults()
{
Stack stack = new Stack();
stack.Push(1);
Assert.True(stack.Peek().Equals(stack.Peek()));
}
[Fact]
public static void PushAndPopWorkOnNullElements()
{
Stack stack = new Stack();
stack.Push(null);
stack.Push(-1);
stack.Push(null);
Assert.Equal(stack.Pop(), null);
Assert.True((-1).Equals(stack.Pop()));
Assert.Equal(stack.Pop(), null);
}
[Fact]
public static void CopyingToNullArrayThrows()
{
Stack stack = new Stack();
stack.Push("hey");
Assert.Throws<ArgumentNullException>(() => stack.CopyTo(null, 0));
}
[Fact]
public static void CopyingToMultiDimArrayThrows()
{
Stack stack = new Stack();
stack.Push("hey");
Assert.Throws<ArgumentException>(() => stack.CopyTo(new Object[8, 8], 0));
}
[Fact]
public static void CopyingOutOfRangeThrows_1()
{
Stack stack = new Stack();
var objArr = new Object[0];
Assert.Throws<ArgumentException>(() => stack.CopyTo(objArr, 1));
stack = new Stack();
Assert.Throws<ArgumentException>(() => stack.CopyTo(objArr, Int32.MaxValue));
stack = new Stack();
Assert.Throws<ArgumentOutOfRangeException>(() => stack.CopyTo(objArr, Int32.MinValue));
stack = new Stack();
Assert.Throws<ArgumentOutOfRangeException>(() => stack.CopyTo(objArr, -1));
}
[Fact]
public static void CopyingOutOfRangeThrows_2()
{
Stack stack = new Stack();
stack.Push("MyString");
var objArr = new Object[0];
Assert.Throws<ArgumentException>(() => stack.CopyTo(objArr, 0));
}
[Fact]
public static void GettingEnumeratorAndLoopingThroughWorks()
{
Stack stack = new Stack();
stack.Push("hey");
stack.Push("hello");
IEnumerator ienum = stack.GetEnumerator();
int iCounter = 0;
while (ienum.MoveNext())
{
iCounter++;
}
Assert.Equal(iCounter, stack.Count);
}
[Fact]
public static void GetBeforeStartingEnumerator()
{
// NOTE: The docs say this behaviour is undefined so if test fails it might be ok
Stack stack = new Stack();
stack.Push("a");
stack.Push("b");
IEnumerator ienum = stack.GetEnumerator();
Assert.Throws<InvalidOperationException>(() => { Object obj = ienum.Current; });
}
[Fact]
public static void EnumeratingBeyondEndOfListThenGetObject()
{
Stack stack = new Stack();
stack.Push(new Object());
stack.Push(stack);
IEnumerator ienum = stack.GetEnumerator();
Assert.True(ienum.MoveNext());
for (int i = 0; i < 100; i++)
{
Object objTemp1 = ienum.Current;
Assert.True(objTemp1.Equals(stack));
}
Assert.True(ienum.MoveNext());
for (int i = 0; i < 100; i++)
{
Assert.False(ienum.MoveNext());
}
Assert.Throws<InvalidOperationException>(() => { var o = ienum.Current; });
}
[Fact]
public static void PassingNegativeCapacityThrows()
{
Assert.Throws<ArgumentOutOfRangeException>(() => { Stack stack = new Stack(Int32.MinValue); });
}
[Fact]
public static void CreatingStackWithZeroCapacityDoesntThrow()
{
Stack stack = new Stack(0);
}
[Fact]
public static void PassingValidCapacityCreatesZeroElementsStack()
{
Stack stack = new Stack(1);
Assert.Equal(0, stack.Count);
}
[Fact]
public static void SynchronizedStacksIsSynchronizedPropertyReturnsTrue()
{
Stack stack = Stack.Synchronized(new Stack());
Assert.True(stack.IsSynchronized);
}
[Fact]
public static void SynchronizingNullStackThrows()
{
Assert.Throws<ArgumentNullException>(() => { Stack stack = Stack.Synchronized(null); });
}
[Fact]
public static void TestingAllMethodsOfSynchronizedStack()
{
Stack q1 = new Stack();
for (int i = 0; i < 10; i++)
{
q1.Push("String_" + i);
}
Stack q2 = Stack.Synchronized(q1);
Assert.Equal(q2.Count, q1.Count);
q2.Clear();
Assert.Equal(0, q2.Count);
for (int i = 0; i < 10; i++)
{
q2.Push("String_" + i);
}
for (int i = 0, j = 9; i < 10; i++, j--)
{
Assert.True(((String)q2.Peek()).Equals("String_" + j));
Assert.True(((String)q2.Pop()).Equals("String_" + j));
}
Assert.Equal(0, q2.Count);
Assert.True(q2.IsSynchronized);
for (int i = 0; i < 10; i++)
q2.Push("String_" + i);
Stack q3 = Stack.Synchronized(q2);
Assert.True(q3.IsSynchronized);
Assert.Equal(q2.Count, q3.Count);
var strArr = new String[10];
q2.CopyTo(strArr, 0);
for (int i = 0, j = 9; i < 10; i++, j--)
{
Assert.True(strArr[i].Equals("String_" + j));
}
strArr = new String[10 + 10];
q2.CopyTo(strArr, 10);
for (int i = 0, j = 9; i < 10; i++, j--)
{
Assert.True(strArr[i + 10].Equals("String_" + j));
}
Assert.Throws<ArgumentNullException>(() => q2.CopyTo(null, 0));
var oArr = q2.ToArray();
for (int i = 0, j = 9; i < 10; i++, j--)
{
Assert.True(((String)oArr[i]).Equals("String_" + j));
}
var ienm1 = q2.GetEnumerator();
Assert.Throws<InvalidOperationException>(() => { var oValue = ienm1.Current; });
var iCount = 9;
while (ienm1.MoveNext())
{
Assert.True(((String)ienm1.Current).Equals("String_" + iCount));
iCount--;
}
ienm1.Reset();
iCount = 9;
while (ienm1.MoveNext())
{
Assert.True(((String)ienm1.Current).Equals("String_" + iCount));
iCount--;
}
ienm1.Reset();
q2.Pop();
Assert.Throws<InvalidOperationException>(() => { var oValue = ienm1.Current; });
Assert.Throws<InvalidOperationException>(() => ienm1.MoveNext());
Assert.Throws<InvalidOperationException>(() => ienm1.Reset());
}
[Fact]
public static void PassingNullCollectionToConstructorThrows()
{
Assert.Throws<ArgumentNullException>(() => { Stack stack = new Stack(null); });
}
}
}
| |
// Copyright 2020 The Tilt Brush Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System.Collections.Generic;
using UnityEngine;
namespace TiltBrush {
// TODO: Could be slightly more vtx-efficient with a non-textured tube
// (no need to duplicate verts along seam)
// TODO: remove use of nRight, nSurface
class TubeBrush : GeometryBrush {
const float TWOPI = 2 * Mathf.PI;
const float kMinimumMoveMeters_PS = 5e-4f;
const ushort kUpperBoundVertsPerKnot = 12;
const float kBreakAngleScalar = 3.0f;
const float kSolidAspectRatio = 0.2f;
[SerializeField] float m_CapAspect = .8f;
[SerializeField] ushort m_PointsInClosedCircle = 8;
[SerializeField] bool m_EndCaps = true;
[SerializeField] bool m_HardEdges = false;
[SerializeField] protected UVStyle m_uvStyle = UVStyle.Distance;
[SerializeField] protected ShapeModifier m_ShapeModifier = ShapeModifier.None;
/// Specific to Taper shape modifier.
[SerializeField] float m_TaperScalar = 1.0f;
/// Specific to Petal shape modifier.
[SerializeField] float m_PetalDisplacementAmt = 0.5f;
[SerializeField] float m_PetalDisplacementExp = 3.0f;
/// XXX - in my experience a higher multiplier actually makes
/// the break LESS sensitive. Not more.
///
/// Positive multiplier; 1.0 is standard, higher is more sensitive.
[SerializeField] float m_BreakAngleMultiplier = 2;
int m_VertsInClosedCircle;
int m_VertsInCap;
// Tube brush tracks per vert displacement directions for use with the vert post processing modifier
List<Vector3> m_Displacements;
protected enum UVStyle {
Distance,
Stretch
};
protected enum ShapeModifier {
None,
DoubleSidedTaper,
Sin,
Comet,
Taper,
Petal,
};
public TubeBrush() : this(true) { }
public TubeBrush(bool bCanBatch)
: base(bCanBatch: bCanBatch,
upperBoundVertsPerKnot: kUpperBoundVertsPerKnot * 2,
bDoubleSided: false) {
}
//
// GeometryBrush API
//
protected override void InitBrush(BrushDescriptor desc, TrTransform localPointerXf) {
base.InitBrush(desc, localPointerXf);
m_geometry.Layout = GetVertexLayout(desc);
if (m_ShapeModifier != ShapeModifier.None) {
m_Displacements = new List<Vector3>();
}
if (m_HardEdges) {
// Break verts along every seam
m_VertsInClosedCircle = m_PointsInClosedCircle * 2;
} else {
// Only break a single vert to allow for proper UV unwrapping
m_VertsInClosedCircle = m_PointsInClosedCircle + 1;
}
if (m_EndCaps) {
m_VertsInCap = m_PointsInClosedCircle;
} else {
m_VertsInCap = 0;
}
// Start and end of circle are coincident, and need at least one more point.
Debug.Assert(m_PointsInClosedCircle > 2);
// Make sure the number of verts per knot are less than the upper bound
Debug.Assert(m_VertsInClosedCircle <= kUpperBoundVertsPerKnot);
}
public override void ResetBrushForPreview(TrTransform unused) {
base.ResetBrushForPreview(unused);
if (m_Displacements != null) {
m_Displacements.Clear();
}
}
override public float GetSpawnInterval(float pressure01) {
return m_Desc.m_SolidMinLengthMeters_PS * POINTER_TO_LOCAL * App.METERS_TO_UNITS +
(PressuredSize(pressure01) * kSolidAspectRatio);
}
override protected void ControlPointsChanged(int iKnot0) {
// Updating a control point affects geometry generated by previous knot
// (if there is any). The HasGeometry check is not a micro-optimization:
// it also keeps us from backing up past knot 0.
int start = (m_knots[iKnot0 - 1].HasGeometry) ? iKnot0 - 1 : iKnot0;
// Frames knots, determines how much geometry each knot should get.
if (OnChanged_FrameKnots(start)) {
// If we were notified that the beginning knot turned into a break, step back a knot.
// Note that OnChanged_MakeGeometry requires our specified knot has a previous.
start = Mathf.Max(1, start - 1);
}
OnChanged_MakeGeometry(start);
ResizeGeometry();
if (m_uvStyle == UVStyle.Stretch) {
OnChanged_StretchUVs(start);
}
if (m_ShapeModifier != ShapeModifier.None) {
OnChanged_ModifySilhouette(start);
}
}
// Fills in any knot data needed for geometry generation.
// Returns true if a strip break is detected on the initial knot.
// - fill in length, nRight, nSurface, iVert, iTri
// - calculate strip-break points
bool OnChanged_FrameKnots(int iKnot0) {
bool initialKnotContainsBreak = false;
Knot prev = m_knots[iKnot0 - 1];
for (int iKnot = iKnot0; iKnot < m_knots.Count; ++iKnot) {
Knot cur = m_knots[iKnot];
bool shouldBreak = false;
Vector3 vMove = cur.smoothedPos - prev.smoothedPos;
cur.length = vMove.magnitude;
if (cur.length < kMinimumMoveMeters_PS * App.METERS_TO_UNITS * POINTER_TO_LOCAL) {
shouldBreak = true;
} else {
Vector3 nTangent = vMove / cur.length;
cur.qFrame = MathUtils.ComputeMinimalRotationFrame(
nTangent, prev.Frame, cur.point.m_Orient);
// More break checking; replicates previous logic
// TODO: decompose into twist and swing; use different constraints
// http://www.euclideanspace.com/maths/geometry/rotations/for/decomposition/
if (prev.HasGeometry && !m_PreviewMode) {
float fWidthHeightRatio = cur.length / PressuredSize(cur.smoothedPressure);
float fBreakAngle = Mathf.Atan(fWidthHeightRatio) * Mathf.Rad2Deg
* m_BreakAngleMultiplier;
float angle = Quaternion.Angle(prev.qFrame, cur.qFrame);
if (angle > fBreakAngle) {
shouldBreak = true;
}
}
}
if (shouldBreak) {
cur.qFrame = new Quaternion(0, 0, 0, 0);
cur.nRight = cur.nSurface = Vector3.zero;
if (iKnot == iKnot0) {
initialKnotContainsBreak = true;
}
} else {
cur.nRight = cur.qFrame * Vector3.right;
cur.nSurface = cur.qFrame * Vector3.up;
}
// Just mark whether or not the strip is broken
// tri/vert allocation will happen next pass
cur.nTri = cur.nVert = (ushort)(shouldBreak ? 0 : 1);
m_knots[iKnot] = cur;
prev = cur;
}
return initialKnotContainsBreak;
}
// Textures are laid out so u goes along the strip,
// and v goes across the strip (from left to right)
void OnChanged_MakeGeometry(int iKnot0) {
// Invariant: there is a previous knot.
Knot prev = m_knots[iKnot0 - 1];
for (int iKnot = iKnot0; iKnot < m_knots.Count; ++iKnot) {
// Invariant: all of prev's geometry (if any) is correct and up-to-date.
// Thus, there is no need to modify anything shared with prev.
Knot cur = m_knots[iKnot];
cur.iTri = prev.iTri + prev.nTri;
cur.iVert = (ushort)(prev.iVert + prev.nVert);
// Verts are: back cap, back circle, front circle, front cap
// Back circle is shared with previous knot
// Diagram:
//
// START KNOT KNOT KNOT
// <start cap> <closed circle> <closed circle> <closed circle> <end cap>
//
if (cur.HasGeometry) {
cur.nVert = cur.nTri = 0;
Vector3 rt = cur.qFrame * Vector3.right;
Vector3 up = cur.qFrame * Vector3.up;
Vector3 fwd = cur.qFrame * Vector3.forward;
bool isStart = !prev.HasGeometry;
bool isEnd = IsPenultimate(iKnot);
// Verts, back half
float u0, v0, v1;
if (isStart) {
float random01 = m_rng.In01(cur.iVert - 1);
u0 = random01;
int numV = m_Desc.m_TextureAtlasV;
int iAtlas = (int)(random01 * 3331) % numV;
v0 = (iAtlas) / (float)numV;
v1 = (iAtlas + 1) / (float)numV;
float prevSize = PressuredSize(prev.smoothedPressure);
float prevRadius = prevSize / 2;
float prevCircumference = TWOPI * prevRadius;
float prevURate = m_Desc.m_TileRate / prevCircumference;
if (m_EndCaps) {
MakeCapVerts(
ref cur, m_PointsInClosedCircle,
prev.smoothedPos - fwd * prevRadius * m_CapAspect,
prev.smoothedPos, prevRadius,
u0, v0, v1, -prevURate,
up, rt, fwd);
}
MakeClosedCircle(ref cur, prev.smoothedPos, prevRadius,
m_PointsInClosedCircle, up, rt, fwd, u0, v0, v1);
} else {
// Share some verts
cur.iVert -= (ushort)(m_VertsInClosedCircle);
cur.nVert += (ushort)(m_VertsInClosedCircle);
// Start and end verts wrap differently between soft / hard edge geometry due to our topology
// TO DO: Refactor this so that things... are sane. Not sure exactly how to do that elegantly though.
int iEdgeLoopStart = cur.iVert;
int iEdgeLoopEnd = cur.iVert + m_VertsInClosedCircle - 1;
if (m_HardEdges) {
iEdgeLoopStart = cur.iVert + 1;
iEdgeLoopEnd = cur.iVert;
}
if (m_Desc.m_TubeStoreRadiusInTexcoord0Z) {
Vector3 u0v0 = m_geometry.m_Texcoord0.v3[iEdgeLoopStart];
u0 = u0v0.x;
v0 = u0v0.y;
v1 = m_geometry.m_Texcoord0.v3[iEdgeLoopEnd].y;
} else {
Vector2 u0v0 = m_geometry.m_Texcoord0.v2[iEdgeLoopStart];
u0 = u0v0.x;
v0 = u0v0.y;
v1 = m_geometry.m_Texcoord0.v2[iEdgeLoopEnd].y;
}
}
// Verts, front half
{
float size = PressuredSize(cur.smoothedPressure);
float radius = size / 2;
float circumference = TWOPI * radius;
float uRate = m_Desc.m_TileRate / circumference;
float u1;
u1 = u0 + cur.length * uRate;
MakeClosedCircle(ref cur, cur.smoothedPos, radius,
m_PointsInClosedCircle, up, rt, fwd, u1, v0, v1);
if (isEnd && m_EndCaps) {
MakeCapVerts(
ref cur, m_PointsInClosedCircle,
cur.smoothedPos + fwd * radius * m_CapAspect,
cur.smoothedPos, radius,
u1, v0, v1, uRate,
up, rt, fwd);
}
}
// Tris
// vert index of back circle.
// If it is the start, then we will need to apply an additional vert offset
// because we will be building triangles on the start cap prior to building
// triangles on the end cap
int BC = (isStart ? (int)m_VertsInCap : 0);
//
// vert index of front circle
int FC = BC + m_VertsInClosedCircle;
// Backcap
if (isStart) {
int CAP = 0;
if (m_HardEdges) {
for (int i = 0; i < m_VertsInCap; ++i) {
// CAP + i is the start vert on the start cap
// j is the first vert on the back circle
// ii is the second vert on the back circle
int j = i * 2 + 1;
int ii = (j + 1) % (m_VertsInClosedCircle);
AppendTri(ref cur, CAP + i, BC + j, BC + ii);
}
} else {
for (int i = 0; i < m_VertsInCap; ++i) {
// CAP + i is the start vert on the start cap
// i is the first vert on the back circle
// ii is the second vert on the back circle
int ii = (i + 1);
AppendTri(ref cur, CAP + i, BC + i, BC + ii);
}
}
}
// Cylinder
if (m_HardEdges) {
for (int i = 0; i < m_PointsInClosedCircle; i += 1) {
int j = i * 2 + 1;
int ii = (j + 1) % (m_VertsInClosedCircle);
AppendTri(ref cur, BC + j, FC + j, BC + ii);
AppendTri(ref cur, BC + ii, FC + j, FC + ii);
}
} else {
for (int i = 0; i < m_VertsInClosedCircle - 1; ++i) {
int ii = (i + 1);
AppendTri(ref cur, BC + i, FC + i, BC + ii);
AppendTri(ref cur, BC + ii, FC + i, FC + ii);
}
}
// Front cap
if (isEnd) {
int CAP = FC + m_VertsInClosedCircle;
if (m_HardEdges) {
for (int i = 0; i < m_VertsInCap; ++i) {
// CAP + i is the start vert on the end cap
// ii is the first vert on the front circle
// j is the second vert on the front circle
int j = i * 2 + 1;
int ii = (j + 1) % (m_VertsInClosedCircle);
AppendTri(ref cur, CAP + i, FC + ii, FC + j);
}
} else {
for (int i = 0; i < m_VertsInCap; ++i) {
// CAP + i is the start vert on the end cap
// i is the first vert on the front circle
// ii is the second vert on the front circle
int ii = (i + 1);
AppendTri(ref cur, CAP + i, FC + ii, FC + i);
}
}
}
}
m_knots[iKnot] = cur;
prev = cur;
}
}
// TODO: Set correct UVs on end caps. Right now this runs over all knots in the brush,
// but the start knot and end knot have additional verts in the case of the end caps.
// Those need to be special cased.
void OnChanged_StretchUVs(int iChangedKnot) {
// Back up knot to the start of the segment
// Invariant: knot 0 never has geometry
int knotSegmentStart = iChangedKnot;
while (m_knots[knotSegmentStart - 1].HasGeometry) {
knotSegmentStart -= 1;
}
// Modify this segment, and if it doesn't end at the stroke end, advance to the next segment.
while (true) {
int knotPastSegmentEnd = ModifyStretchUVsOfSegment(knotSegmentStart);
if (knotPastSegmentEnd >= m_knots.Count) {
break;
}
knotSegmentStart = knotPastSegmentEnd;
}
}
int ModifyStretchUVsOfSegment(int initialSegmentKnot) {
// Find length and end knot of segment
int totalNumKnots = 0;
int endSegmentKnot = initialSegmentKnot;
for (; endSegmentKnot < m_knots.Count; ++endSegmentKnot) {
Knot cur = m_knots[endSegmentKnot];
if (!cur.HasGeometry) {
break;
}
totalNumKnots++;
}
//Iterate over the knots in this segment
int numKnots = 0;
for (int iKnot = initialSegmentKnot; iKnot < endSegmentKnot; ++iKnot) {
Knot cur = m_knots[iKnot];
float u = (float)numKnots / (float)totalNumKnots;
int numVerts = cur.nVert;
for (int i = 0; i < numVerts; i++) {
int vert = (cur.iVert + i);
if (m_Desc.m_TubeStoreRadiusInTexcoord0Z) {
var tmp = m_geometry.m_Texcoord0.v3[vert];
tmp.x = u;
m_geometry.m_Texcoord0.v3[vert] = tmp;
} else {
var tmp = m_geometry.m_Texcoord0.v2[vert];
tmp.x = u;
m_geometry.m_Texcoord0.v2[vert] = tmp;
}
}
numKnots++;
}
return endSegmentKnot + 1;
}
internal class LoftedProfile {
// The number of knots at the start and end of the stroke.
const int kNumEndKnots = 5;
// The minimum number of knots required to draw anything.
const int kMinKnotCount = 3;
float partialProgress;
int knotCount;
public LoftedProfile(GeometryBrush brush,
int firstKnotIndex, int lastKnotIndex,
float totalLength, float lastLength,
List<Knot> knots) {
// Compute the partial progress to emitting the next knot, this is used to shape the verts
// by knot continuously (knots are intrinsicly discrete) without popping as new knots are
// added.
partialProgress =
Mathf.Clamp01(lastLength / brush.GetSpawnInterval(knots[lastKnotIndex].smoothedPressure));
knotCount = lastKnotIndex - firstKnotIndex + 1;
}
public float ComputeCurve(int iKnot,
int firstKnotIndex,
int lastKnotIndex,
float t,
float tPrev) {
// Not enough knots to make a meaningful shape.
if (knotCount < kMinKnotCount) {
return 0;
}
// The leading and trailing knot count.
int halfCount = Mathf.CeilToInt(Mathf.Min(kNumEndKnots, knotCount / 2.0f));
int nextHalfCount = Mathf.CeilToInt(Mathf.Min(kNumEndKnots, (knotCount + 1) / 2.0f));
// This is the segment-relative index into the curve, where iKnot is the absolute offset in
// all knots for all segments.
int localIndex = iKnot - firstKnotIndex;
// The index starting from the tail, i.e. the last knot has reverseIndex = 0.
int reverseIndex = knotCount - localIndex - 1;
int nextReverseIndex = (knotCount + 1) - localIndex - 1;
float curValue = 1;
float nextValue = 1;
// Note that the current and next knots must be computed separately because when the cuve is
// extremely small, knots will transition from head to tail and the half count will change
// as knots are added until there are kNumEndKnots * 2 total knots.
// Compute the knot position at given the current curve.
if (localIndex < halfCount) {
// The head of the curve.
curValue = localIndex / (halfCount - 1f);
} else if (reverseIndex < halfCount) {
// The tail of the curve.
curValue = Mathf.Max(0f, reverseIndex - 1f) / Mathf.Max(1f, halfCount - 1f);
}
// Compute the knot position for the next curve, immediately after we add a new knot.
if (localIndex < nextHalfCount) {
// The head of the curve.
nextValue = localIndex / (nextHalfCount - 1f);
} else if (nextReverseIndex < nextHalfCount) {
// The tail of the curve.
nextValue = Mathf.Max(0f, nextReverseIndex - 1f) / Mathf.Max(1f, nextHalfCount - 1f);
}
// TODO: this is a gross hack to account for the fact that curValue is too small,
// this ultra magical scaling factor fixes some undulation at the head of the curve. The
// correct fix requires refactoring the curValue above when reverseIndex < halfCount.
curValue = Mathf.Lerp(curValue, nextValue, 0.185f);
// Smoothly interpolate between the previous curve and the next curve as the next knot comes
// into existence. This is required to eliminate pops as each knot is created.
curValue = Mathf.Lerp(curValue, nextValue, partialProgress);
// Finally attenuate the curve amplitude when there are a small number of knots, this hides
// the ugly transtion from "not enough knots" to the steady state.
float atten = Mathf.Clamp01(
(knotCount - kMinKnotCount + partialProgress) / (kNumEndKnots * 2f - kMinKnotCount));
curValue *= atten;
return Mathf.Clamp01(curValue);
}
}
// Post process vert geometry per segment.
// Can be leveraged for more interesting shapes
void OnChanged_ModifySilhouette(int iChangedKnot) {
// Back up knot to the start of the segment
// Invariant: knot 0 never has geometry
int knotSegmentStart = iChangedKnot;
while (m_knots[knotSegmentStart - 1].HasGeometry) {
knotSegmentStart -= 1;
}
// Modify this segment, and if it doesn't end at the stroke end, advance to the next segment.
while (true) {
int knotPastSegmentEnd = ModifySilhouetteOfSegment(knotSegmentStart);
if (knotPastSegmentEnd >= m_knots.Count) {
break;
}
knotSegmentStart = knotPastSegmentEnd;
}
}
// Returns the next knot after the end of segment.
int ModifySilhouetteOfSegment(int initialSegmentKnot) {
// Find the last knot and how long this segment is.
float totalLength = 0;
int endSegmentKnot = initialSegmentKnot;
for (; endSegmentKnot < m_knots.Count; ++endSegmentKnot) {
Knot cur = m_knots[endSegmentKnot];
if (!cur.HasGeometry) {
break;
}
totalLength += cur.length;
}
// Specific to petal shape.
float petalAmtCacheValue = m_PetalDisplacementAmt * POINTER_TO_LOCAL * m_BaseSize_PS;
// Specific to the double taper shape.
LoftedProfile lofted = null;
if (m_ShapeModifier == ShapeModifier.DoubleSidedTaper) {
float lastLength = DistanceFromKnot(
Mathf.Max(0, endSegmentKnot - 2), m_knots[endSegmentKnot - 1].point.m_Pos);
// "endSegmentKnot - 1" because the last knot does not generate geometry.
lofted = new LoftedProfile(this, initialSegmentKnot, endSegmentKnot - 1, totalLength,
lastLength, m_knots);
}
// Iterate over the knots in this segment
float distance = 0;
for (int iKnot = initialSegmentKnot; iKnot < endSegmentKnot; ++iKnot) {
Knot cur = m_knots[iKnot];
Knot prev = m_knots[iKnot - 1];
bool isStart = !prev.HasGeometry;
bool isEnd = IsPenultimate(iKnot);
// The curve parameter t never goes to zero, because geometry knots always have non-zero
// length, so tPrev represents a prameter that goes from [0, 1), rather than (0, 1].
float tPrev = distance / totalLength;
distance += cur.length;
float t = distance / totalLength;
int numVerts = cur.nVert;
for (int i = 0; i < numVerts; i++) {
int vert = (cur.iVert + i);
float radius = PressuredSize(cur.smoothedPressure) / 2.0f;
Vector3 dir = m_Displacements[vert];
// skip start/end cap verts
if (m_EndCaps) {
if (isStart && (i < m_VertsInCap)) {
continue;
}
// XXX: This needs more attention. Modifiers don't always play nicely with
// geo that only has start/end caps.
bool bEndCapGeometryIsComplete = (m_VertsInClosedCircle * 2 + m_VertsInCap) == numVerts;
if (isEnd && bEndCapGeometryIsComplete && (i >= m_VertsInClosedCircle * 2)) {
continue;
}
}
float curve = 0;
Vector3 offset = Vector3.zero;
switch (m_ShapeModifier) {
// Double Sided Taper (i.e. Jeremy's Lofted Brush)
case ShapeModifier.DoubleSidedTaper:
// "endSegmentKnot - 1" because the last knot does not generate geometry.
curve = lofted.ComputeCurve(iKnot, initialSegmentKnot, endSegmentKnot - 1, t, tPrev);
break;
// Sin curve
case ShapeModifier.Sin:
curve = Mathf.Abs(Mathf.Sin(t * Mathf.PI));
break;
// Taper for fire
case ShapeModifier.Comet:
curve = Mathf.Sin(t * 1.5f + 1.55f);
break;
// Tapers to a point
case ShapeModifier.Taper:
curve = m_TaperScalar * (1 - t);
break;
case ShapeModifier.Petal:
curve = Mathf.Abs(Mathf.Sin(t * Mathf.PI));
float displacement = Mathf.Pow(t, m_PetalDisplacementExp);
offset = m_geometry.m_Normals[vert] * displacement * petalAmtCacheValue *
cur.smoothedPressure;
break;
}
m_geometry.m_Vertices[vert] = offset + cur.smoothedPos + radius * dir * curve;
}
}
return endSegmentKnot + 1;
}
// Cap verts always have the same # of points & verts
void MakeCapVerts(
ref Knot k, int numPoints,
Vector3 tip, Vector3 circleCenter, float radius,
float u0, float v0, float v1, float uRate,
Vector3 up, Vector3 rt, Vector3 fwd) {
// Length of diagonal between circle and tip
float diagonal = ((circleCenter + up * radius) - tip).magnitude;
float u = u0 + uRate * diagonal;
Vector3 fwdNormal = Mathf.Sign(Vector3.Dot(tip - circleCenter, fwd)) * fwd;
for (int i = 0; i < numPoints; ++i) {
// Endcap vert n tangent points halfway between circle verts n and (n+1)
float t = (i + .5f) / numPoints;
float theta = TWOPI * t;
Vector3 tan = -Mathf.Cos(theta) * up + -Mathf.Sin(theta) * rt;
Vector2 uv = new Vector2(u, Mathf.Lerp(v0, v1, t));
Vector3 normal = fwdNormal;
if (m_HardEdges) {
// For hard edges, use the same normal calculations as the other closed circles.
normal = -Mathf.Cos(theta) * up + -Mathf.Sin(theta) * rt;
}
// Note that for the purpose of displacement in the shader,
// radius is zero on the end point verts.
//
// Additionally, be aware that here "radius" is packed into a vertex channel but
// does not actually have anything to do with the radius of the created geometry.
//
AppendVert(ref k, tip, normal.normalized, m_Color, tan, uv, /*radius*/ 0);
AppendDisplacement(ref k, fwdNormal);
}
}
void MakeClosedCircle(
ref Knot k, Vector3 center, float radius, int numPoints,
Vector3 up, Vector3 rt, Vector3 fwd,
float u, float v0, float v1) {
if (m_HardEdges) {
MakeClosedCircleHardEdges(ref k, center, radius, numPoints, up, rt, fwd, u, v0, v1);
} else {
MakeClosedCircleSoftEdges(ref k, center, radius, numPoints, up, rt, fwd, u, v0, v1);
}
}
// Soft edge circle rings have one more vertex than the number of points. verts = points + 1
// The additional vertex is only used to wrap UV's properly around a ring.
// Parameters "up" and "rt" are assumed to be normalized.
void MakeClosedCircleSoftEdges(
ref Knot k, Vector3 center, float radius, int numPoints,
Vector3 up, Vector3 rt, Vector3 fwd,
float u, float v0, float v1) {
// When facing down the tangent, circle verts should go clockwise
// We'd like the seam to be on the bottom
int numVerts = numPoints + 1;
for (int i = 0; i < numVerts; ++i) {
float t = (float)i / (numVerts-1);
// Ensure that the first and last verts are exactly coincident
float theta = (t == 1) ? 0 : TWOPI * t;
Vector2 uv = new Vector2(u, Mathf.Lerp(v0, v1, t));
Vector3 off = -Mathf.Cos(theta) * up + -Mathf.Sin(theta) * rt;
AppendVert(ref k, center + radius * off, off, m_Color, fwd, uv, radius);
AppendDisplacement(ref k, off.normalized);
}
}
// Hard edge circle rings have double the vertices than the number of points. verts = points * 2.
// Parameters "up" and "rt" are assumed to be normalized.
void MakeClosedCircleHardEdges(
ref Knot k, Vector3 center, float radius, int numPoints,
Vector3 up, Vector3 rt, Vector3 fwd,
float u, float v0, float v1) {
// When facing down the tangent, circle verts should go clockwise
// We'd like the seam to be on the bottom
float? lastTheta = null;
for (int i = 0; i < numPoints; ++i) {
float t = (float)i / (numPoints);
// Ensure that the first and last verts are exactly coincident
float theta = (t == 0) ? 0 : TWOPI * t;
if (!lastTheta.HasValue) {
lastTheta = theta - (TWOPI / numPoints);
}
Vector3 tan = -Mathf.Cos(theta) * up + -Mathf.Sin(theta) * rt;
float dTheta = TWOPI / numPoints * .5f;
Vector3 off1 = -Mathf.Cos(theta) * up + -Mathf.Sin(theta) * rt;
Vector3 nCur = -Mathf.Cos(theta + dTheta) * up + -Mathf.Sin(theta + dTheta) * rt;
Vector3 nPrev = -Mathf.Cos(theta - dTheta) * up + -Mathf.Sin(theta - dTheta) * rt;
lastTheta = theta;
// Calculate V's for hard edges.
int prevFace = (i + (numPoints - 1)) % numPoints;
float v = Mathf.Lerp(v0, v1, (float)prevFace / (float)(numPoints) + (1.0f / numPoints));
Vector2 uv = new Vector2(u, v);
AppendVert(ref k, center + radius * off1, nPrev, m_Color, tan, uv, radius);
AppendDisplacement(ref k, off1);
int currentFace = i;
v = Mathf.Lerp(v0, v1, (float)currentFace / (float)(numPoints));
uv = new Vector2(u, v);
AppendVert(ref k, center + radius * off1, nCur, m_Color, tan, uv, radius);
AppendDisplacement(ref k, off1);
}
}
override public GeometryPool.VertexLayout GetVertexLayout(BrushDescriptor desc) {
bool radiusInZ = desc.m_TubeStoreRadiusInTexcoord0Z;
return new GeometryPool.VertexLayout {
bUseColors = true,
bUseNormals = true,
bUseTangents = true,
uv0Size = radiusInZ ? 3 : 2,
uv0Semantic = radiusInZ ? GeometryPool.Semantic.XyIsUvZIsDistance : GeometryPool.Semantic.XyIsUv,
uv1Size = 0
};
}
void AppendDisplacement(ref Knot k, Vector3 v) {
if (m_ShapeModifier == ShapeModifier.None) {
return;
}
int i = k.iVert + k.nVert - 1;
if (i >= m_Displacements.Count) {
m_Displacements.Add(v);
} else {
m_Displacements[i] = v;
}
}
/// Resizes arrays if necessary, appends data, mutates knot's vtx count. The
/// incoming normal n should be normalized.
void AppendVert(ref Knot k, Vector3 v, Vector3 n, Color32 c,
Vector3 tan, Vector2 uv, float radius) {
int i = k.iVert + k.nVert++;
Vector3 uv3 = new Vector3(uv.x, uv.y, radius);
Vector4 tan4 = tan;
// Making sure Tangent w is 1.0
tan4.w = 1.0f;
if (i == m_geometry.m_Vertices.Count) {
m_geometry.m_Vertices .Add(v);
m_geometry.m_Normals .Add(n);
m_geometry.m_Colors .Add(c);
if (m_Desc.m_TubeStoreRadiusInTexcoord0Z) {
m_geometry.m_Texcoord0.v3.Add(uv3);
} else {
m_geometry.m_Texcoord0.v2.Add(uv);
}
m_geometry.m_Tangents .Add(tan4);
} else {
m_geometry.m_Vertices[i] = v;
m_geometry.m_Normals[i] = n;
m_geometry.m_Colors[i] = c;
if (m_Desc.m_TubeStoreRadiusInTexcoord0Z) {
m_geometry.m_Texcoord0.v3[i] = uv3;
} else {
m_geometry.m_Texcoord0.v2[i] = uv;
}
m_geometry.m_Tangents[i] = tan4;
}
}
void AppendTri(ref Knot k, int t0, int t1, int t2) {
int i = (k.iTri + k.nTri++) * 3;
if (i == m_geometry.m_Tris.Count) {
m_geometry.m_Tris.Add(k.iVert + t0);
m_geometry.m_Tris.Add(k.iVert + t1);
m_geometry.m_Tris.Add(k.iVert + t2);
} else {
m_geometry.m_Tris[i + 0] = k.iVert + t0;
m_geometry.m_Tris[i + 1] = k.iVert + t1;
m_geometry.m_Tris[i + 2] = k.iVert + t2;
}
}
bool IsPenultimate(int iKnot) {
return (iKnot+1 == m_knots.Count || !m_knots[iKnot+1].HasGeometry);
}
}
} // namespace TiltBrush
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace WebService.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
// GtkSharp.Generation.SymbolTable.cs - The Symbol Table Class.
//
// Author: Mike Kestner <[email protected]>
//
// Copyright (c) 2001-2003 Mike Kestner
// Copyright (c) 2004-2005 Novell, Inc.
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of version 2 of the GNU General Public
// License as published by the Free Software Foundation.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
//
// You should have received a copy of the GNU General Public
// License along with this program; if not, write to the
// Free Software Foundation, Inc., 59 Temple Place - Suite 330,
// Boston, MA 02111-1307, USA.
namespace GtkSharp.Generation {
using System;
using System.Collections;
public class SymbolTable {
static SymbolTable table = null;
Hashtable types = new Hashtable ();
public static SymbolTable Table {
get {
if (table == null)
table = new SymbolTable ();
return table;
}
}
public SymbolTable ()
{
// Simple easily mapped types
AddType (new SimpleGen ("void", "void", String.Empty));
AddType (new SimpleGen ("gpointer", "IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("gboolean", "bool", "false"));
AddType (new SimpleGen ("gint", "int", "0"));
AddType (new SimpleGen ("guint", "uint", "0"));
AddType (new SimpleGen ("int", "int", "0"));
AddType (new SimpleGen ("unsigned", "uint", "0"));
AddType (new SimpleGen ("unsigned int", "uint", "0"));
AddType (new SimpleGen ("unsigned-int", "uint", "0"));
AddType (new SimpleGen ("gshort", "short", "0"));
AddType (new SimpleGen ("gushort", "ushort", "0"));
AddType (new SimpleGen ("short", "short", "0"));
AddType (new SimpleGen ("guchar", "byte", "0"));
AddType (new SimpleGen ("unsigned char", "byte", "0"));
AddType (new SimpleGen ("unsigned-char", "byte", "0"));
AddType (new SimpleGen ("guint1", "bool", "false"));
AddType (new SimpleGen ("uint1", "bool", "false"));
AddType (new SimpleGen ("gint8", "sbyte", "0"));
AddType (new SimpleGen ("guint8", "byte", "0"));
AddType (new SimpleGen ("gint16", "short", "0"));
AddType (new SimpleGen ("guint16", "ushort", "0"));
AddType (new SimpleGen ("gint32", "int", "0"));
AddType (new SimpleGen ("guint32", "uint", "0"));
AddType (new SimpleGen ("gint64", "long", "0"));
AddType (new SimpleGen ("guint64", "ulong", "0"));
AddType (new SimpleGen ("long long", "long", "0"));
AddType (new SimpleGen ("gfloat", "float", "0.0"));
AddType (new SimpleGen ("float", "float", "0.0"));
AddType (new SimpleGen ("gdouble", "double", "0.0"));
AddType (new SimpleGen ("double", "double", "0.0"));
AddType (new SimpleGen ("goffset", "long", "0"));
AddType (new SimpleGen ("GQuark", "int", "0"));
// platform specific integer types.
#if WIN64LONGS
AddType (new SimpleGen ("long", "int", "0"));
AddType (new SimpleGen ("glong", "int", "0"));
AddType (new SimpleGen ("ulong", "uint", "0"));
AddType (new SimpleGen ("gulong", "uint", "0"));
AddType (new SimpleGen ("unsigned long", "uint", "0"));
#else
AddType (new LPGen ("long"));
AddType (new LPGen ("glong"));
AddType (new LPUGen ("ulong"));
AddType (new LPUGen ("gulong"));
AddType (new LPUGen ("unsigned long"));
#endif
AddType (new LPGen ("ssize_t"));
AddType (new LPGen ("gssize"));
AddType (new LPUGen ("size_t"));
AddType (new LPUGen ("gsize"));
#if OFF_T_8
AddType (new AliasGen ("off_t", "long"));
#else
AddType (new LPGen ("off_t"));
#endif
// string types
AddType (new ConstStringGen ("const-gchar"));
AddType (new ConstStringGen ("const-xmlChar"));
AddType (new ConstStringGen ("const-char"));
AddType (new ConstFilenameGen ("const-gfilename"));
AddType (new MarshalGen ("gfilename", "string", "IntPtr", "GLib.Marshaller.StringToFilenamePtr({0})", "GLib.Marshaller.FilenamePtrToStringGFree({0})"));
AddType (new MarshalGen ("gchar", "string", "IntPtr", "GLib.Marshaller.StringToPtrGStrdup({0})", "GLib.Marshaller.PtrToStringGFree({0})"));
AddType (new MarshalGen ("char", "string", "IntPtr", "GLib.Marshaller.StringToPtrGStrdup({0})", "GLib.Marshaller.PtrToStringGFree({0})"));
AddType (new SimpleGen ("GStrv", "string[]", "null"));
// manually wrapped types requiring more complex marshaling
AddType (new ManualGen ("GInitiallyUnowned", "GLib.InitiallyUnowned", "GLib.Object.GetObject ({0})"));
AddType (new ManualGen ("GObject", "GLib.Object", "GLib.Object.GetObject ({0})"));
AddType (new ManualGen ("GList", "GLib.List"));
AddType (new ManualGen ("GPtrArray", "GLib.PtrArray"));
AddType (new ManualGen ("GSList", "GLib.SList"));
AddType (new MarshalGen ("gunichar", "char", "uint", "GLib.Marshaller.CharToGUnichar ({0})", "GLib.Marshaller.GUnicharToChar ({0})"));
AddType (new MarshalGen ("time_t", "System.DateTime", "IntPtr", "GLib.Marshaller.DateTimeTotime_t ({0})", "GLib.Marshaller.time_tToDateTime ({0})"));
AddType (new MarshalGen ("GString", "string", "IntPtr", "new GLib.GString ({0}).Handle", "GLib.GString.PtrToString ({0})"));
AddType (new MarshalGen ("GType", "GLib.GType", "IntPtr", "{0}.Val", "new GLib.GType({0})"));
AddType (new ByRefGen ("GValue", "GLib.Value"));
AddType (new SimpleGen ("GDestroyNotify", "GLib.DestroyNotify", "null"));
// FIXME: These ought to be handled properly.
AddType (new SimpleGen ("GC", "IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("GError", "IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("GMemChunk", "IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("GTimeVal", "IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("GClosure", "IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("GArray", "IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("GByteArray", "IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("GData", "IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("GIOChannel", "IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("GTypeModule", "GLib.Object", "null"));
AddType (new SimpleGen ("GHashTable", "System.IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("va_list", "IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("GParamSpec", "IntPtr", "IntPtr.Zero"));
AddType (new SimpleGen ("gconstpointer", "IntPtr", "IntPtr.Zero"));
}
public void AddType (IGeneratable gen)
{
types [gen.CName] = gen;
}
public void AddTypes (IGeneratable[] gens)
{
foreach (IGeneratable gen in gens)
types [gen.CName] = gen;
}
public int Count {
get
{
return types.Count;
}
}
public IEnumerable Generatables {
get {
return types.Values;
}
}
public IGeneratable this [string ctype] {
get {
return DeAlias (ctype) as IGeneratable;
}
}
private bool IsConstString (string type)
{
switch (type) {
case "const-gchar":
case "const-char":
case "const-xmlChar":
case "const-gfilename":
return true;
default:
return false;
}
}
private string Trim(string type)
{
// HACK: If we don't detect this here, there is no
// way of indicating it in the symbol table
if (type == "void*" || type == "const-void*") return "gpointer";
string trim_type = type.TrimEnd('*');
if (IsConstString (trim_type))
return trim_type;
if (trim_type.StartsWith("const-")) return trim_type.Substring(6);
return trim_type;
}
private object DeAlias (string type)
{
type = Trim (type);
while (types [type] is AliasGen) {
IGeneratable igen = types [type] as AliasGen;
types [type] = types [igen.Name];
type = igen.Name;
}
return types [type];
}
public string FromNativeReturn(string c_type, string val)
{
IGeneratable gen = this[c_type];
if (gen == null)
return "";
return gen.FromNativeReturn (val);
}
public string ToNativeReturn(string c_type, string val)
{
IGeneratable gen = this[c_type];
if (gen == null)
return "";
return gen.ToNativeReturn (val);
}
public string FromNative(string c_type, string val)
{
IGeneratable gen = this[c_type];
if (gen == null)
return "";
return gen.FromNative (val);
}
public string GetCSType(string c_type)
{
IGeneratable gen = this[c_type];
if (gen == null)
return "";
return gen.QualifiedName;
}
public string GetName(string c_type)
{
IGeneratable gen = this[c_type];
if (gen == null)
return "";
return gen.Name;
}
public string GetMarshalReturnType(string c_type)
{
IGeneratable gen = this[c_type];
if (gen == null)
return "";
return gen.MarshalReturnType;
}
public string GetToNativeReturnType(string c_type)
{
IGeneratable gen = this[c_type];
if (gen == null)
return "";
return gen.ToNativeReturnType;
}
public string GetMarshalType(string c_type)
{
IGeneratable gen = this[c_type];
if (gen == null)
return "";
return gen.MarshalType;
}
public string CallByName(string c_type, string var_name)
{
IGeneratable gen = this[c_type];
if (gen == null)
return "";
return gen.CallByName(var_name);
}
public bool IsOpaque(string c_type)
{
if (this[c_type] is OpaqueGen)
return true;
return false;
}
public bool IsBoxed(string c_type)
{
if (this[c_type] is BoxedGen)
return true;
return false;
}
public bool IsStruct(string c_type)
{
if (this[c_type] is StructGen)
return true;
return false;
}
public bool IsEnum(string c_type)
{
if (this[c_type] is EnumGen)
return true;
return false;
}
public bool IsEnumFlags(string c_type)
{
EnumGen gen = this [c_type] as EnumGen;
return (gen != null && gen.Elem.GetAttribute ("type") == "flags");
}
public bool IsInterface(string c_type)
{
if (this[c_type] is InterfaceGen)
return true;
return false;
}
public ClassBase GetClassGen(string c_type)
{
return this[c_type] as ClassBase;
}
public bool IsObject(string c_type)
{
if (this[c_type] is ObjectGen)
return true;
return false;
}
public bool IsCallback(string c_type)
{
if (this[c_type] is CallbackGen)
return true;
return false;
}
public bool IsManuallyWrapped(string c_type)
{
if (this[c_type] is ManualGen)
return true;
return false;
}
public string MangleName(string name)
{
switch (name) {
case "string":
return "str1ng";
case "event":
return "evnt";
case "null":
return "is_null";
case "object":
return "objekt";
case "params":
return "parms";
case "ref":
return "reference";
case "in":
return "in_param";
case "out":
return "out_param";
case "fixed":
return "mfixed";
case "byte":
return "_byte";
case "new":
return "_new";
case "base":
return "_base";
case "lock":
return "_lock";
case "callback":
return "cb";
case "readonly":
return "read_only";
case "interface":
return "iface";
case "internal":
return "_internal";
case "where":
return "wh3r3";
case "foreach":
return "for_each";
case "remove":
return "_remove";
default:
break;
}
return name;
}
}
}
| |
using Signum.Utilities;
using Signum.Utilities.ExpressionTrees;
using Signum.Utilities.Reflection;
using System;
using System.CodeDom.Compiler;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
namespace Signum.Entities.Reflection
{
/* Fields
* Value
* Modifiables
* MList
* EmbeddedEntities
* IdentifiableEntities
*
* An identifiable can be accesed thought:
* Normal Reference
* Interface
* Lite
*/
public static class Reflector
{
const BindingFlags flags = BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic;
static Reflector()
{
DescriptionManager.CleanTypeName = CleanTypeName; //To allow MyEntityEntity
DescriptionManager.CleanType = t => EnumEntity.Extract(t) ?? t.CleanType(); //To allow Lite<T>
DescriptionManager.DefaultDescriptionOptions += DescriptionManager_IsEnumsInEntities;
DescriptionManager.DefaultDescriptionOptions += DescriptionManager_IsQuery;
DescriptionManager.DefaultDescriptionOptions += DescriptionManager_IsSymbolContainer;
DescriptionManager.DefaultDescriptionOptions += DescriptionManager_IsIEntity;
DescriptionManager.ShouldLocalizeMemeber += DescriptionManager_ShouldLocalizeMemeber;
DescriptionManager.Invalidate();
}
static bool DescriptionManager_ShouldLocalizeMemeber(MemberInfo arg)
{
return !arg.HasAttribute<HiddenPropertyAttribute>() || arg.HasAttribute<DescriptionAttribute>();
}
static ResetLazy<HashSet<Type>> EnumsInEntities = new ResetLazy<HashSet<Type>>(() =>
{
return new HashSet<Type>(
from a in AppDomain.CurrentDomain.GetAssemblies()
where a.GetName().Name != "Signum.Analyzer" && a.HasAttribute<DefaultAssemblyCultureAttribute>()
from t in a.GetTypes()
where typeof(IEntity).IsAssignableFrom(t) || typeof(ModifiableEntity).IsAssignableFrom(t)
let da = t.GetCustomAttribute<DescriptionOptionsAttribute>(true)
where da == null || da.Options.IsSet(DescriptionOptions.Members)
from p in t.GetProperties(BindingFlags.Instance | BindingFlags.Public)
where DescriptionManager.OnShouldLocalizeMember(p)
let et = (p.PropertyType.ElementType() ?? p.PropertyType).UnNullify()
where et.IsEnum && et.Assembly.HasAttribute<DefaultAssemblyCultureAttribute>()
select et
);
});
static DescriptionOptions? DescriptionManager_IsEnumsInEntities(Type t)
{
return EnumsInEntities.Value.Contains(t) ? DescriptionOptions.Members | DescriptionOptions.Description : (DescriptionOptions?)null;
}
static DescriptionOptions? DescriptionManager_IsIEntity(Type t)
{
return t.IsInterface && typeof(IEntity).IsAssignableFrom(t) ? DescriptionOptions.Members : (DescriptionOptions?)null;
}
static DescriptionOptions? DescriptionManager_IsQuery(Type t)
{
return t.IsEnum && t.Name.EndsWith("Query") ? DescriptionOptions.Members : (DescriptionOptions?)null;
}
static DescriptionOptions? DescriptionManager_IsSymbolContainer(Type t)
{
return t.IsAbstract && t.IsSealed &&
t.GetFields(BindingFlags.Static | BindingFlags.Public)
.Any(a => typeof(Symbol).IsAssignableFrom(a.FieldType) || typeof(IOperationSymbolContainer).IsAssignableFrom(a.FieldType)) ? DescriptionOptions.Members : (DescriptionOptions?)null;
}
public static string CleanTypeName(Type t)
{
if (t.Name.EndsWith("Entity"))
return t.Name.RemoveSuffix("Entity");
if (t.Name.EndsWith("Embedded"))
return t.Name.RemoveSuffix("Embedded");
if (t.Name.EndsWith("Model"))
return t.Name.RemoveSuffix("Model");
if (t.Name.EndsWith("Symbol"))
return t.Name.RemoveSuffix("Symbol");
return t.Name;
}
public static string RemoveSuffix(this string text, string postfix)
{
if (text.EndsWith(postfix) && text != postfix)
return text.Substring(0, text.Length - postfix.Length);
return text;
}
public static string RemovePrefix(this string text, string prefix)
{
if (text.StartsWith(prefix) && text != prefix)
return text.Substring(prefix.Length);
return text;
}
public static bool IsMList(this Type ft)
{
return ft.ElementType() != null && IsModifiable(ft);
}
public static bool IsModifiable(this Type t)
{
return typeof(Modifiable).IsAssignableFrom(t);
}
public static bool IsIEntity(this Type type)
{
return typeof(IEntity).IsAssignableFrom(type);
}
public static bool IsIRootEntity(this Type type)
{
return typeof(IRootEntity).IsAssignableFrom(type);
}
public static bool IsModifiableEntity(this Type type)
{
return typeof(ModifiableEntity).IsAssignableFrom(type);
}
public static bool IsModifiableIdentifiableOrLite(this Type t)
{
return t.IsModifiable() || t.IsIEntity() || t.IsLite();
}
public static bool IsEntity(this Type ft)
{
return typeof(Entity).IsAssignableFrom(ft);
}
public static bool IsEmbeddedEntity(this Type t)
{
return typeof(EmbeddedEntity).IsAssignableFrom(t);
}
public static bool IsModelEntity(this Type t)
{
return typeof(ModelEntity).IsAssignableFrom(t);
}
public static FieldInfo[] InstanceFieldsInOrder(Type type)
{
using (HeavyProfiler.LogNoStackTrace("Reflector", () => type.Name))
{
var result = type.For(t => t != typeof(object), t => t.BaseType!)
.Reverse()
.SelectMany(t => t.GetFields(flags | BindingFlags.DeclaredOnly).OrderBy(f => f.MetadataToken)).ToArray();
return result;
}
}
public static PropertyInfo[] PublicInstanceDeclaredPropertiesInOrder(Type type)
{
return type.GetProperties(BindingFlags.Public | BindingFlags.Instance | BindingFlags.DeclaredOnly)
.Where(p => !p.HasAttribute<HiddenPropertyAttribute>())
.OrderBy(f => f.MetadataToken).ToArray();
}
public static PropertyInfo[] PublicInstancePropertiesInOrder(Type type)
{
Dictionary<string, PropertyInfo> properties = new Dictionary<string, PropertyInfo>();
foreach (var t in type.Follow(t => t.BaseType).Reverse())
{
foreach (var pi in PublicInstanceDeclaredPropertiesInOrder(t))
{
properties[pi.Name] = pi;
}
}
return properties.Values.ToArray();
}
public static MemberInfo[] GetMemberList<T, S>(Expression<Func<T, S>> lambdaToField) => GetMemberListUntyped(lambdaToField);
public static MemberInfo[] GetMemberListUntyped(LambdaExpression lambdaToField)
{
Expression e = lambdaToField.Body;
if (e is UnaryExpression ue && ue.NodeType == ExpressionType.Convert && ue.Type == typeof(object))
e = ue.Operand;
MemberInfo[] result = GetMemberListBase(e);
return result;
}
public static MemberInfo[] GetMemberListBase(Expression e)
{
return e.Follow(NextExpression).Select(GetMember).NotNull().Reverse().ToArray();
}
static Expression? NextExpression(Expression e)
{
switch (e.NodeType)
{
case ExpressionType.MemberAccess: return ((MemberExpression)e).Expression;
case ExpressionType.Call:
{
MethodCallExpression mce = (MethodCallExpression)e;
var parent = mce.Method.IsExtensionMethod() ? mce.Arguments.FirstEx() : mce.Object;
if (parent != null)
return parent;
break;
}
case ExpressionType.Convert: return ((UnaryExpression)e).Operand;
case ExpressionType.Parameter: return null;
}
throw new InvalidCastException("Not supported {0}".FormatWith(e.NodeType));
}
static readonly string[] collectionMethods = new[] { "Element" };
static MemberInfo? GetMember(Expression e)
{
switch (e.NodeType)
{
case ExpressionType.MemberAccess:
{
MemberExpression me = (MemberExpression)e;
if (me.Member.DeclaringType!.IsLite() && !me.Member.Name.StartsWith("Entity"))
throw new InvalidOperationException("Members of Lite not supported");
return me.Member;
}
case ExpressionType.Call:
{
MethodCallExpression mce = (MethodCallExpression)e;
var parent = mce.Method.IsExtensionMethod() ? mce.Arguments.FirstEx() : mce.Object;
if (parent != null && parent.Type.ElementType() == e.Type)
return parent.Type.GetProperty("Item");
return mce.Method;
}
case ExpressionType.Convert: return ((UnaryExpression)e).Type;
case ExpressionType.Parameter: return null;
default: throw new InvalidCastException("Not supported {0}".FormatWith(e.NodeType));
}
}
internal static FieldInfo FindFieldInfo(Type type, PropertyInfo value)
{
var fi = TryFindFieldInfo(type, value);
if (fi == null)
throw new InvalidOperationException("No FieldInfo for '{0}' found on '{1}'".FormatWith(value.Name, type.Name));
return fi;
}
static readonly BindingFlags privateFlags = BindingFlags.IgnoreCase | BindingFlags.Instance | BindingFlags.NonPublic;
public static FieldInfo? TryFindFieldInfo(Type type, PropertyInfo pi)
{
string? prefix = pi.DeclaringType != type && pi.DeclaringType!.IsInterface ? pi.DeclaringType.FullName + "." : null;
FieldInfo? fi = null;
for (Type? tempType = type; tempType != null && fi == null; tempType = tempType.BaseType)
{
fi = tempType.GetField("<" + pi.Name + ">k__BackingField", privateFlags) ??
(prefix != null ? tempType.GetField("<" + prefix + pi.Name + ">k__BackingField", privateFlags) : null);
if (fi != null)
CheckSignumProcessed(fi);
else
fi = tempType.GetField(pi.Name, privateFlags);
}
return fi;
}
public static ConcurrentDictionary<Assembly, bool> processedAssemblies = new ConcurrentDictionary<Assembly, bool>();
private static void CheckSignumProcessed(FieldInfo fieldInfo)
{
var isProcessed = processedAssemblies.GetOrAdd(fieldInfo.DeclaringType!.Assembly,
a => a.GetCustomAttributes<GeneratedCodeAttribute>().Any(gc => gc.Tool == "SignumTask"));
if (!isProcessed)
throw new InvalidOperationException("Entity {0} has auto-property {1}, but you can not use auto-propertes if the assembly is not processed by 'SignumTask'".FormatWith(fieldInfo.DeclaringType.Name, fieldInfo.FieldType.Name));
}
public static PropertyInfo FindPropertyInfo(FieldInfo fi)
{
var pi = TryFindPropertyInfo(fi);
if (pi == null)
throw new InvalidOperationException("No PropertyInfo for '{0}' found".FormatWith(fi.Name));
return pi;
}
public static PropertyInfo? TryFindPropertyInfo(FieldInfo fi)
{
try
{
using (HeavyProfiler.LogNoStackTrace("TryFindPropertyInfo", () => fi.Name))
{
const BindingFlags flags = BindingFlags.IgnoreCase | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic;
string? propertyName = null;
if (fi.Name.StartsWith("<"))
{
CheckSignumProcessed(fi);
propertyName = fi.Name.After('<').Before('>');
}
else
propertyName = fi.Name.FirstUpper();
var result = fi.DeclaringType!.GetProperty(propertyName, flags, null, null, new Type[0], null);
if (result != null)
return result;
foreach (Type i in fi.DeclaringType.GetInterfaces())
{
result = fi.DeclaringType.GetProperty(i.FullName + "." + propertyName, flags);
if (result != null)
return result;
}
return null;
}
}
catch (Exception e)
{
throw new InvalidOperationException(e.Message + $" (FieldInfo: {fi.FieldName()} DeclaringType: {fi.DeclaringType!.TypeName()})", e);
}
}
public static bool QueryableProperty(Type type, PropertyInfo pi)
{
QueryablePropertyAttribute? spa = pi.GetCustomAttribute<QueryablePropertyAttribute>();
if (spa != null)
return spa.AvailableForQueries;
FieldInfo? fi = TryFindFieldInfo(type, pi);
if (fi != null && !fi.HasAttribute<IgnoreAttribute>() && !pi.HasAttribute<IgnoreAttribute>())
return true;
if (ExpressionCleaner.HasExpansions(type, pi))
return true;
return false;
}
public static Func<IFormattable?, string?> GetPropertyFormatter(string format, string unitName)
{
if (format != null)
{
if (unitName != null)
return a => a == null ? null : a.ToString(format, CultureInfo.CurrentCulture) + " " + unitName;
else
return a => a?.ToString(format, CultureInfo.CurrentCulture);
}
else
{
if (unitName != null)
return a => a == null ? null : a.ToString() + " " + unitName;
else
return a => a?.ToString();
}
}
public static string? FormatString(PropertyRoute route)
{
PropertyRoute simpleRoute = route.SimplifyToProperty();
FormatAttribute? format = simpleRoute.PropertyInfo!.GetCustomAttribute<FormatAttribute>();
if (format != null)
return format.Format;
var pp = Validator.TryGetPropertyValidator(simpleRoute);
if (pp != null)
{
DateTimePrecisionValidatorAttribute? datetimePrecision = pp.Validators.OfType<DateTimePrecisionValidatorAttribute>().SingleOrDefaultEx();
if (datetimePrecision != null)
return datetimePrecision.FormatString;
TimeSpanPrecisionValidatorAttribute? timeSpanPrecision = pp.Validators.OfType<TimeSpanPrecisionValidatorAttribute>().SingleOrDefaultEx();
if (timeSpanPrecision != null)
return timeSpanPrecision.FormatString;
DecimalsValidatorAttribute? decimals = pp.Validators.OfType<DecimalsValidatorAttribute>().SingleOrDefaultEx();
if (decimals != null)
return "N" + decimals.DecimalPlaces;
StringCaseValidatorAttribute? stringCase = pp.Validators.OfType<StringCaseValidatorAttribute>().SingleOrDefaultEx();
if (stringCase != null)
return stringCase.TextCase == StringCase.Lowercase ? "L" : "U";
}
if (route.IsId() && ReflectionTools.IsNumber(PrimaryKey.Type(route.RootType)))
return "D";
return FormatString(route.Type);
}
public static string? FormatString(Type type)
{
type = type.UnNullify();
if (type.IsEnum)
return null;
if (type == typeof(Date))
return "d";
switch (Type.GetTypeCode(type))
{
case TypeCode.DateTime:
return "g";
case TypeCode.Byte:
case TypeCode.Int16:
case TypeCode.Int32:
case TypeCode.Int64:
case TypeCode.SByte:
case TypeCode.UInt16:
case TypeCode.UInt32:
case TypeCode.UInt64:
return "D";
case TypeCode.Decimal:
case TypeCode.Double:
case TypeCode.Single:
return "N2";
}
return null;
}
public static PropertyInfo PropertyInfo<T>(this T entity, Expression<Func<T, object?>> property) where T : ModifiableEntity
{
return ReflectionTools.GetPropertyInfo(property);
}
public static string NicePropertyName<T>(this T entity, Expression<Func<T, object?>> property) where T : ModifiableEntity
{
return ReflectionTools.GetPropertyInfo(property).NiceName();
}
public static int NumDecimals(string format)
{
var str = (0.0).ToString(format, CultureInfo.InvariantCulture).TryAfter('.');
if (str == null)
return 0;
return str.Length;
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using log4net;
using System;
using System.Collections.Generic;
using System.Reflection;
using Nini.Config;
using OpenSim.Framework;
using OpenSim.Framework.Statistics;
using OpenSim.Services.Connectors;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Services.Interfaces;
using OpenMetaverse;
namespace OpenSim.Region.CoreModules.ServiceConnectorsOut.Inventory
{
public class RemoteXInventoryServicesConnector : ISharedRegionModule, IInventoryService
{
private static readonly ILog m_log =
LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private bool m_Enabled = false;
private bool m_Initialized = false;
private Scene m_Scene;
private XInventoryServicesConnector m_RemoteConnector;
public Type ReplaceableInterface
{
get { return null; }
}
public string Name
{
get { return "RemoteXInventoryServicesConnector"; }
}
public RemoteXInventoryServicesConnector()
{
}
public RemoteXInventoryServicesConnector(string url)
{
m_RemoteConnector = new XInventoryServicesConnector(url);
}
public RemoteXInventoryServicesConnector(IConfigSource source)
{
Init(source);
}
protected void Init(IConfigSource source)
{
m_RemoteConnector = new XInventoryServicesConnector(source);
}
#region ISharedRegionModule
public void Initialise(IConfigSource source)
{
IConfig moduleConfig = source.Configs["Modules"];
if (moduleConfig != null)
{
string name = moduleConfig.GetString("InventoryServices", "");
if (name == Name)
{
Init(source);
m_Enabled = true;
m_log.Info("[XINVENTORY CONNECTOR]: Remote XInventory enabled");
}
}
}
public void PostInitialise()
{
}
public void Close()
{
}
public void AddRegion(Scene scene)
{
m_Scene = scene;
//m_log.Debug("[XXXX] Adding scene " + m_Scene.RegionInfo.RegionName);
if (!m_Enabled)
return;
if (!m_Initialized)
{
m_Initialized = true;
}
scene.RegisterModuleInterface<IInventoryService>(this);
}
public void RemoveRegion(Scene scene)
{
if (!m_Enabled)
return;
}
public void RegionLoaded(Scene scene)
{
if (!m_Enabled)
return;
m_log.InfoFormat("[XINVENTORY CONNECTOR]: Enabled remote XInventory for region {0}", scene.RegionInfo.RegionName);
}
#endregion ISharedRegionModule
#region IInventoryService
public bool CreateUserInventory(UUID user)
{
return false;
}
public List<InventoryFolderBase> GetInventorySkeleton(UUID userId)
{
return new List<InventoryFolderBase>();
}
public InventoryCollection GetUserInventory(UUID userID)
{
return null;
}
public void GetUserInventory(UUID userID, InventoryReceiptCallback callback)
{
}
public InventoryFolderBase GetRootFolder(UUID userID)
{
return m_RemoteConnector.GetRootFolder(userID);
}
public InventoryFolderBase GetFolderForType(UUID userID, AssetType type)
{
return m_RemoteConnector.GetFolderForType(userID, type);
}
public InventoryCollection GetFolderContent(UUID userID, UUID folderID)
{
return m_RemoteConnector.GetFolderContent(userID, folderID);
}
public List<InventoryItemBase> GetFolderItems(UUID userID, UUID folderID)
{
return m_RemoteConnector.GetFolderItems(userID, folderID);
}
public bool AddFolder(InventoryFolderBase folder)
{
if (folder == null)
return false;
return m_RemoteConnector.AddFolder(folder);
}
public bool UpdateFolder(InventoryFolderBase folder)
{
if (folder == null)
return false;
return m_RemoteConnector.UpdateFolder(folder);
}
public bool MoveFolder(InventoryFolderBase folder)
{
if (folder == null)
return false;
return m_RemoteConnector.MoveFolder(folder);
}
public bool DeleteFolders(UUID ownerID, List<UUID> folderIDs)
{
if (folderIDs == null)
return false;
if (folderIDs.Count == 0)
return false;
return m_RemoteConnector.DeleteFolders(ownerID, folderIDs);
}
public bool PurgeFolder(InventoryFolderBase folder)
{
if (folder == null)
return false;
return m_RemoteConnector.PurgeFolder(folder);
}
public bool AddItem(InventoryItemBase item)
{
if (item == null)
return false;
if (UUID.Zero == item.Folder)
{
InventoryFolderBase f = m_RemoteConnector.GetFolderForType(item.Owner, (AssetType)item.AssetType);
if (f != null)
{
item.Folder = f.ID;
}
else
{
f = m_RemoteConnector.GetRootFolder(item.Owner);
if (f != null)
item.Folder = f.ID;
else
return false;
}
}
return m_RemoteConnector.AddItem(item);
}
public bool UpdateItem(InventoryItemBase item)
{
if (item == null)
return false;
return m_RemoteConnector.UpdateItem(item);
}
public bool MoveItems(UUID ownerID, List<InventoryItemBase> items)
{
if (items == null)
return false;
return m_RemoteConnector.MoveItems(ownerID, items);
}
public bool DeleteItems(UUID ownerID, List<UUID> itemIDs)
{
if (itemIDs == null)
return false;
if (itemIDs.Count == 0)
return true;
return m_RemoteConnector.DeleteItems(ownerID, itemIDs);
}
public InventoryItemBase GetItem(InventoryItemBase item)
{
m_log.DebugFormat("[XINVENTORY CONNECTOR]: GetItem {0}", item.ID);
if (item == null)
return null;
if (m_RemoteConnector == null)
m_log.DebugFormat("[XINVENTORY CONNECTOR]: connector stub is null!!!");
return m_RemoteConnector.GetItem(item);
}
public InventoryFolderBase GetFolder(InventoryFolderBase folder)
{
m_log.DebugFormat("[XINVENTORY CONNECTOR]: GetFolder {0}", folder.ID);
if (folder == null)
return null;
return m_RemoteConnector.GetFolder(folder);
}
public bool HasInventoryForUser(UUID userID)
{
return false;
}
public List<InventoryItemBase> GetActiveGestures(UUID userId)
{
return new List<InventoryItemBase>();
}
public int GetAssetPermissions(UUID userID, UUID assetID)
{
return m_RemoteConnector.GetAssetPermissions(userID, assetID);
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using GitVersion.Common;
using GitVersion.Configuration;
using GitVersion.Extensions;
using GitVersion.Logging;
using LibGit2Sharp;
namespace GitVersion.VersionCalculation
{
internal class MainlineVersionCalculator : IMainlineVersionCalculator
{
private readonly ILog log;
private readonly IRepositoryMetadataProvider repositoryMetadataProvider;
private readonly Lazy<GitVersionContext> versionContext;
private GitVersionContext context => versionContext.Value;
public MainlineVersionCalculator(ILog log, IRepositoryMetadataProvider repositoryMetadataProvider, Lazy<GitVersionContext> versionContext)
{
this.log = log ?? throw new ArgumentNullException(nameof(log));
this.repositoryMetadataProvider = repositoryMetadataProvider ?? throw new ArgumentNullException(nameof(repositoryMetadataProvider));
this.versionContext = versionContext ?? throw new ArgumentNullException(nameof(versionContext));
}
public SemanticVersion FindMainlineModeVersion(BaseVersion baseVersion)
{
if (baseVersion.SemanticVersion.PreReleaseTag.HasTag())
{
throw new NotSupportedException("Mainline development mode doesn't yet support pre-release tags on master");
}
using (log.IndentLog("Using mainline development mode to calculate current version"))
{
var mainlineVersion = baseVersion.SemanticVersion;
// Forward merge / PR
// * feature/foo
// / |
// master * *
//
var mergeBase = baseVersion.BaseVersionSource;
var mainline = GetMainline(baseVersion.BaseVersionSource);
var mainlineTip = mainline.Tip;
// when the current branch is not mainline, find the effective mainline tip for versioning the branch
if (!context.CurrentBranch.IsSameBranch(mainline))
{
mergeBase = FindMergeBaseBeforeForwardMerge(baseVersion.BaseVersionSource, mainline, out mainlineTip);
log.Info($"Current branch ({context.CurrentBranch.FriendlyName}) was branch from {mergeBase}");
}
var mainlineCommitLog = repositoryMetadataProvider.GetMainlineCommitLog(baseVersion.BaseVersionSource, mainlineTip);
var directCommits = new List<Commit>(mainlineCommitLog.Count);
if (string.IsNullOrEmpty(context.Configuration.NextVersion))
{
// Scans commit log in reverse, aggregating merge commits
foreach (var commit in mainlineCommitLog)
{
directCommits.Add(commit);
if (commit.Parents.Count() > 1)
{
mainlineVersion = AggregateMergeCommitIncrement(commit, directCommits, mainlineVersion, mainline);
}
}
// This will increment for any direct commits on mainline
mainlineVersion = IncrementForEachCommit(directCommits, mainlineVersion, mainline);
}
mainlineVersion.BuildMetaData = CreateVersionBuildMetaData(mergeBase);
// branches other than master always get a bump for the act of branching
if ((!context.CurrentBranch.IsSameBranch(mainline)) && (string.IsNullOrEmpty(context.Configuration.NextVersion)))
{
var branchIncrement = FindMessageIncrement(null, context.CurrentCommit, mergeBase, mainlineCommitLog);
log.Info($"Performing {branchIncrement} increment for current branch ");
mainlineVersion = mainlineVersion.IncrementVersion(branchIncrement);
}
return mainlineVersion;
}
}
public SemanticVersionBuildMetaData CreateVersionBuildMetaData(Commit baseVersionSource)
{
var commitLog = repositoryMetadataProvider.GetCommitLog(baseVersionSource, context.CurrentCommit);
var commitsSinceTag = commitLog.Count();
log.Info($"{commitsSinceTag} commits found between {baseVersionSource.Sha} and {context.CurrentCommit.Sha}");
var shortSha = repositoryMetadataProvider.ShortenObjectId(context.CurrentCommit);
return new SemanticVersionBuildMetaData(
baseVersionSource.Sha,
commitsSinceTag,
context.CurrentBranch.FriendlyName,
context.CurrentCommit.Sha,
shortSha,
context.CurrentCommit.When(),
context.NumberOfUncommittedChanges);
}
private SemanticVersion AggregateMergeCommitIncrement(Commit commit, List<Commit> directCommits, SemanticVersion mainlineVersion, Branch mainline)
{
// Merge commit, process all merged commits as a batch
var mergeCommit = commit;
var mergedHead = GetMergedHead(mergeCommit);
var findMergeBase = repositoryMetadataProvider.FindMergeBase(mergeCommit.Parents.First(), mergedHead);
var findMessageIncrement = FindMessageIncrement(mergeCommit, mergedHead, findMergeBase, directCommits);
// If this collection is not empty there has been some direct commits against master
// Treat each commit as it's own 'release', we need to do this before we increment the branch
mainlineVersion = IncrementForEachCommit(directCommits, mainlineVersion, mainline);
directCommits.Clear();
// Finally increment for the branch
mainlineVersion = mainlineVersion.IncrementVersion(findMessageIncrement);
log.Info($"Merge commit {mergeCommit.Sha} incremented base versions {findMessageIncrement}, now {mainlineVersion}");
return mainlineVersion;
}
private Branch GetMainline(Commit baseVersionSource)
{
var mainlineBranchConfigs = context.FullConfiguration.Branches.Where(b => b.Value.IsMainline == true).ToList();
var mainlineBranches = repositoryMetadataProvider.GetMainlineBranches(context.CurrentCommit, mainlineBranchConfigs);
var allMainlines = mainlineBranches.Values.SelectMany(branches => branches.Select(b => b.FriendlyName));
log.Info("Found possible mainline branches: " + string.Join(", ", allMainlines));
// Find closest mainline branch
var firstMatchingCommit = context.CurrentBranch.Commits.First(c => mainlineBranches.ContainsKey(c.Sha));
var possibleMainlineBranches = mainlineBranches[firstMatchingCommit.Sha];
if (possibleMainlineBranches.Count == 1)
{
var mainlineBranch = possibleMainlineBranches[0];
log.Info("Mainline for current branch is " + mainlineBranch.FriendlyName);
return mainlineBranch;
}
// prefer current branch, if it is a mainline branch
if (possibleMainlineBranches.Any(context.CurrentBranch.IsSameBranch))
{
log.Info($"Choosing {context.CurrentBranch.FriendlyName} as mainline because it is the current branch");
return context.CurrentBranch;
}
// prefer a branch on which the merge base was a direct commit, if there is such a branch
var firstMatchingCommitBranch = possibleMainlineBranches.FirstOrDefault(b => repositoryMetadataProvider.GetMatchingCommitBranch(baseVersionSource, b, firstMatchingCommit));
if (firstMatchingCommitBranch != null)
{
var message = string.Format(
"Choosing {0} as mainline because {1}'s merge base was a direct commit to {0}",
firstMatchingCommitBranch.FriendlyName,
context.CurrentBranch.FriendlyName);
log.Info(message);
return firstMatchingCommitBranch;
}
var chosenMainline = possibleMainlineBranches[0];
log.Info($"Multiple mainlines ({string.Join(", ", possibleMainlineBranches.Select(b => b.FriendlyName))}) have the same merge base for the current branch, choosing {chosenMainline.FriendlyName} because we found that branch first...");
return chosenMainline;
}
/// <summary>
/// Gets the commit on mainline at which <paramref name="mergeBase"/> was fully integrated.
/// </summary>
/// <param name="mainlineCommitLog">The collection of commits made directly to mainline, in reverse order.</param>
/// <param name="mergeBase">The best possible merge base between <paramref name="mainlineTip"/> and the current commit.</param>
/// <param name="mainlineTip">The tip of the mainline branch.</param>
/// <returns>The commit on mainline at which <paramref name="mergeBase"/> was merged, if such a commit exists; otherwise, <paramref name="mainlineTip"/>.</returns>
/// <remarks>
/// This method gets the most recent commit on mainline that should be considered for versioning the current branch.
/// </remarks>
private Commit GetEffectiveMainlineTip(IEnumerable<Commit> mainlineCommitLog, Commit mergeBase, Commit mainlineTip)
{
// find the commit that merged mergeBase into mainline
foreach (var commit in mainlineCommitLog)
{
if (commit == mergeBase || commit.Parents.Contains(mergeBase))
{
log.Info($"Found branch merge point; choosing {commit} as effective mainline tip");
return commit;
}
}
return mainlineTip;
}
/// <summary>
/// Gets the best possible merge base between the current commit and <paramref name="mainline"/> that is not the child of a forward merge.
/// </summary>
/// <param name="baseVersionSource">The commit that establishes the contextual base version.</param>
/// <param name="mainline">The mainline branch.</param>
/// <param name="mainlineTip">The commit on mainline at which the returned merge base was fully integrated.</param>
/// <returns>The best possible merge base between the current commit and <paramref name="mainline"/> that is not the child of a forward merge.</returns>
private Commit FindMergeBaseBeforeForwardMerge(Commit baseVersionSource, Branch mainline, out Commit mainlineTip)
{
var mergeBase = repositoryMetadataProvider.FindMergeBase(context.CurrentCommit, mainline.Tip);
var mainlineCommitLog = repositoryMetadataProvider.GetMainlineCommitLog(baseVersionSource, mainline.Tip);
// find the mainline commit effective for versioning the current branch
mainlineTip = GetEffectiveMainlineTip(mainlineCommitLog, mergeBase, mainline.Tip);
// detect forward merge and rewind mainlineTip to before it
if (mergeBase == context.CurrentCommit && !mainlineCommitLog.Contains(mergeBase))
{
var mainlineTipPrevious = mainlineTip.Parents.FirstOrDefault();
if (mainlineTipPrevious != null)
{
var message = $"Detected forward merge at {mainlineTip}; rewinding mainline to previous commit {mainlineTipPrevious}";
log.Info(message);
// re-do mergeBase detection before the forward merge
mergeBase = repositoryMetadataProvider.FindMergeBase(context.CurrentCommit, mainlineTipPrevious);
mainlineTip = GetEffectiveMainlineTip(mainlineCommitLog, mergeBase, mainlineTipPrevious);
}
}
return mergeBase;
}
private SemanticVersion IncrementForEachCommit(IEnumerable<Commit> directCommits, SemanticVersion mainlineVersion, Branch mainline)
{
foreach (var directCommit in directCommits)
{
var directCommitIncrement = IncrementStrategyFinder.GetIncrementForCommits(context, new[]
{
directCommit
}) ?? IncrementStrategyFinder.FindDefaultIncrementForBranch(context, mainline.FriendlyName);
mainlineVersion = mainlineVersion.IncrementVersion(directCommitIncrement);
log.Info($"Direct commit on master {directCommit.Sha} incremented base versions {directCommitIncrement}, now {mainlineVersion}");
}
return mainlineVersion;
}
private VersionField FindMessageIncrement(Commit mergeCommit, Commit mergedHead, Commit findMergeBase, List<Commit> commitLog)
{
var commits = repositoryMetadataProvider.GetMergeBaseCommits(mergeCommit, mergedHead, findMergeBase);
commitLog.RemoveAll(c => commits.Any(c1 => c1.Sha == c.Sha));
return IncrementStrategyFinder.GetIncrementForCommits(context, commits)
?? TryFindIncrementFromMergeMessage(mergeCommit);
}
private VersionField TryFindIncrementFromMergeMessage(Commit mergeCommit)
{
if (mergeCommit != null)
{
var mergeMessage = new MergeMessage(mergeCommit.Message, context.FullConfiguration);
if (mergeMessage.MergedBranch != null)
{
var config = context.FullConfiguration.GetConfigForBranch(mergeMessage.MergedBranch);
if (config?.Increment != null && config.Increment != IncrementStrategy.Inherit)
{
return config.Increment.Value.ToVersionField();
}
}
}
// Fallback to config increment value
return IncrementStrategyFinder.FindDefaultIncrementForBranch(context);
}
private static Commit GetMergedHead(Commit mergeCommit)
{
var parents = mergeCommit.Parents.Skip(1).ToList();
if (parents.Count > 1)
throw new NotSupportedException("Mainline development does not support more than one merge source in a single commit yet");
return parents.Single();
}
}
}
| |
using System;
using System.Collections.Specialized;
using System.Linq;
using Microsoft.Practices.ServiceLocation;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Moq;
using Prism.Regions;
using Prism.Wpf.Tests.Mocks;
namespace Prism.Wpf.Tests.Regions
{
[TestClass]
public class RegionFixture
{
[TestMethod]
public void WhenRegionConstructed_SortComparisonIsDefault()
{
IRegion region = new Region();
Assert.IsNotNull(region.SortComparison);
Assert.AreEqual(region.SortComparison, Region.DefaultSortComparison);
}
[TestMethod]
public void CanAddContentToRegion()
{
IRegion region = new Region();
Assert.AreEqual(0, region.Views.Cast<object>().Count());
region.Add(new object());
Assert.AreEqual(1, region.Views.Cast<object>().Count());
}
[TestMethod]
public void CanRemoveContentFromRegion()
{
IRegion region = new Region();
object view = new object();
region.Add(view);
region.Remove(view);
Assert.AreEqual(0, region.Views.Cast<object>().Count());
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void RemoveInexistentViewThrows()
{
IRegion region = new Region();
object view = new object();
region.Remove(view);
Assert.AreEqual(0, region.Views.Cast<object>().Count());
}
[TestMethod]
public void RegionExposesCollectionOfContainedViews()
{
IRegion region = new Region();
object view = new object();
region.Add(view);
var views = region.Views;
Assert.IsNotNull(views);
Assert.AreEqual(1, views.Cast<object>().Count());
Assert.AreSame(view, views.Cast<object>().ElementAt(0));
}
[TestMethod]
public void CanAddAndRetrieveNamedViewInstance()
{
IRegion region = new Region();
object myView = new object();
region.Add(myView, "MyView");
object returnedView = region.GetView("MyView");
Assert.IsNotNull(returnedView);
Assert.AreSame(returnedView, myView);
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException))]
public void AddingDuplicateNamedViewThrows()
{
IRegion region = new Region();
region.Add(new object(), "MyView");
region.Add(new object(), "MyView");
}
[TestMethod]
public void AddNamedViewIsAlsoListedInViewsCollection()
{
IRegion region = new Region();
object myView = new object();
region.Add(myView, "MyView");
Assert.AreEqual(1, region.Views.Cast<object>().Count());
Assert.AreSame(myView, region.Views.Cast<object>().ElementAt(0));
}
[TestMethod]
public void GetViewReturnsNullWhenViewDoesNotExistInRegion()
{
IRegion region = new Region();
Assert.IsNull(region.GetView("InexistentView"));
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void GetViewWithNullOrEmptyStringThrows()
{
IRegion region = new Region();
region.GetView(string.Empty);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void AddNamedViewWithNullOrEmptyStringNameThrows()
{
IRegion region = new Region();
region.Add(new object(), string.Empty);
}
[TestMethod]
public void GetViewReturnsNullAfterRemovingViewFromRegion()
{
IRegion region = new Region();
object myView = new object();
region.Add(myView, "MyView");
region.Remove(myView);
Assert.IsNull(region.GetView("MyView"));
}
[TestMethod]
public void AddViewPassesSameScopeByDefaultToView()
{
var regionManager = new MockRegionManager();
IRegion region = new Region();
region.RegionManager = regionManager;
var myView = new MockDependencyObject();
region.Add(myView);
Assert.AreSame(regionManager, myView.GetValue(RegionManager.RegionManagerProperty));
}
[TestMethod]
public void AddViewPassesSameScopeByDefaultToNamedView()
{
var regionManager = new MockRegionManager();
IRegion region = new Region();
region.RegionManager = regionManager;
var myView = new MockDependencyObject();
region.Add(myView, "MyView");
Assert.AreSame(regionManager, myView.GetValue(RegionManager.RegionManagerProperty));
}
[TestMethod]
public void AddViewPassesDiferentScopeWhenAdding()
{
var regionManager = new MockRegionManager();
IRegion region = new Region();
region.RegionManager = regionManager;
var myView = new MockDependencyObject();
region.Add(myView, "MyView", true);
Assert.AreNotSame(regionManager, myView.GetValue(RegionManager.RegionManagerProperty));
}
[TestMethod]
public void CreatingNewScopesAsksTheRegionManagerForNewInstance()
{
var regionManager = new MockRegionManager();
IRegion region = new Region();
region.RegionManager = regionManager;
var myView = new object();
region.Add(myView, "MyView", true);
Assert.IsTrue(regionManager.CreateRegionManagerCalled);
}
[TestMethod]
public void AddViewReturnsExistingRegionManager()
{
var regionManager = new MockRegionManager();
IRegion region = new Region();
region.RegionManager = regionManager;
var myView = new object();
var returnedRegionManager = region.Add(myView, "MyView", false);
Assert.AreSame(regionManager, returnedRegionManager);
}
[TestMethod]
public void AddViewReturnsNewRegionManager()
{
var regionManager = new MockRegionManager();
IRegion region = new Region();
region.RegionManager = regionManager;
var myView = new object();
var returnedRegionManager = region.Add(myView, "MyView", true);
Assert.AreNotSame(regionManager, returnedRegionManager);
}
[TestMethod]
public void AddingNonDependencyObjectToRegionDoesNotThrow()
{
IRegion region = new Region();
object model = new object();
region.Add(model);
Assert.AreEqual(1, region.Views.Cast<object>().Count());
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void ActivateNonAddedViewThrows()
{
IRegion region = new Region();
object nonAddedView = new object();
region.Activate(nonAddedView);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void DeactivateNonAddedViewThrows()
{
IRegion region = new Region();
object nonAddedView = new object();
region.Deactivate(nonAddedView);
}
[TestMethod]
[ExpectedException(typeof(ArgumentNullException))]
public void ActivateNullViewThrows()
{
IRegion region = new Region();
region.Activate(null);
}
[TestMethod]
public void AddViewRaisesCollectionViewEvent()
{
bool viewAddedCalled = false;
IRegion region = new Region();
region.Views.CollectionChanged += (sender, e) =>
{
if (e.Action == NotifyCollectionChangedAction.Add)
viewAddedCalled = true;
};
object model = new object();
Assert.IsFalse(viewAddedCalled);
region.Add(model);
Assert.IsTrue(viewAddedCalled);
}
[TestMethod]
public void ViewAddedEventPassesTheViewAddedInTheEventArgs()
{
object viewAdded = null;
IRegion region = new Region();
region.Views.CollectionChanged += (sender, e) =>
{
if (e.Action == NotifyCollectionChangedAction.Add)
{
viewAdded = e.NewItems[0];
}
};
object model = new object();
Assert.IsNull(viewAdded);
region.Add(model);
Assert.IsNotNull(viewAdded);
Assert.AreSame(model, viewAdded);
}
[TestMethod]
public void RemoveViewFiresViewRemovedEvent()
{
bool viewRemoved = false;
IRegion region = new Region();
object model = new object();
region.Views.CollectionChanged += (sender, e) =>
{
if (e.Action == NotifyCollectionChangedAction.Remove)
viewRemoved = true;
};
region.Add(model);
Assert.IsFalse(viewRemoved);
region.Remove(model);
Assert.IsTrue(viewRemoved);
}
[TestMethod]
public void ViewRemovedEventPassesTheViewRemovedInTheEventArgs()
{
object removedView = null;
IRegion region = new Region();
region.Views.CollectionChanged += (sender, e) =>
{
if (e.Action == NotifyCollectionChangedAction.Remove)
removedView = e.OldItems[0];
};
object model = new object();
region.Add(model);
Assert.IsNull(removedView);
region.Remove(model);
Assert.AreSame(model, removedView);
}
[TestMethod]
public void ShowViewFiresViewShowedEvent()
{
bool viewActivated = false;
IRegion region = new Region();
object model = new object();
region.ActiveViews.CollectionChanged += (o, e) =>
{
if (e.Action == NotifyCollectionChangedAction.Add && e.NewItems.Contains(model))
viewActivated = true;
};
region.Add(model);
Assert.IsFalse(viewActivated);
region.Activate(model);
Assert.IsTrue(viewActivated);
}
[TestMethod]
public void AddingSameViewTwiceThrows()
{
object view = new object();
IRegion region = new Region();
region.Add(view);
try
{
region.Add(view);
Assert.Fail();
}
catch (InvalidOperationException ex)
{
Assert.AreEqual("View already exists in region.", ex.Message);
}
catch
{
Assert.Fail();
}
}
[TestMethod]
public void RemovingViewAlsoRemovesItFromActiveViews()
{
IRegion region = new Region();
object model = new object();
region.Add(model);
region.Activate(model);
Assert.IsTrue(region.ActiveViews.Contains(model));
region.Remove(model);
Assert.IsFalse(region.ActiveViews.Contains(model));
}
[TestMethod]
public void ShouldGetNotificationWhenContextChanges()
{
IRegion region = new Region();
bool contextChanged = false;
region.PropertyChanged += (s, args) => { if (args.PropertyName == "Context") contextChanged = true; };
region.Context = "MyNewContext";
Assert.IsTrue(contextChanged);
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException))]
public void ChangingNameOnceItIsSetThrows()
{
var region = new Region();
region.Name = "MyRegion";
region.Name = "ChangedRegionName";
}
private class MockRegionManager : IRegionManager
{
public bool CreateRegionManagerCalled;
public IRegionManager CreateRegionManager()
{
CreateRegionManagerCalled = true;
return new MockRegionManager();
}
public IRegionManager AddToRegion(string regionName, object view)
{
throw new NotImplementedException();
}
public IRegionManager RegisterViewWithRegion(string regionName, Type viewType)
{
throw new NotImplementedException();
}
public IRegionManager RegisterViewWithRegion(string regionName, Func<object> getContentDelegate)
{
throw new NotImplementedException();
}
public void RequestNavigate(string regionName, Uri source, Action<NavigationResult> navigationCallback)
{
throw new NotImplementedException();
}
public void RequestNavigate(string regionName, Uri source)
{
throw new NotImplementedException();
}
public void RequestNavigate(string regionName, string source, Action<NavigationResult> navigationCallback)
{
throw new NotImplementedException();
}
public void RequestNavigate(string regionName, string source)
{
throw new NotImplementedException();
}
public void RequestNavigate(string regionName, Uri target, Action<NavigationResult> navigationCallback, NavigationParameters navigationParameters)
{
throw new NotImplementedException();
}
public void RequestNavigate(string regionName, string target, Action<NavigationResult> navigationCallback, NavigationParameters navigationParameters)
{
throw new NotImplementedException();
}
public void RequestNavigate(string regionName, Uri target, NavigationParameters navigationParameters)
{
throw new NotImplementedException();
}
public void RequestNavigate(string regionName, string target, NavigationParameters navigationParameters)
{
throw new NotImplementedException();
}
public IRegionCollection Regions
{
get { throw new NotImplementedException(); }
}
public IRegion AttachNewRegion(object regionTarget, string regionName)
{
throw new NotImplementedException();
}
public bool Navigate(Uri source)
{
throw new NotImplementedException();
}
}
[TestMethod]
public void NavigateDelegatesToIRegionNavigationService()
{
try
{
// Prepare
IRegion region = new Region();
object view = new object();
region.Add(view);
Uri uri = new Uri(view.GetType().Name, UriKind.Relative);
Action<NavigationResult> navigationCallback = nr => { };
NavigationParameters navigationParameters = new NavigationParameters();
Mock<IRegionNavigationService> mockRegionNavigationService = new Mock<IRegionNavigationService>();
mockRegionNavigationService.Setup(x => x.RequestNavigate(uri, navigationCallback, navigationParameters)).Verifiable();
Mock<IServiceLocator> mockServiceLocator = new Mock<IServiceLocator>();
mockServiceLocator.Setup(x => x.GetInstance<IRegionNavigationService>()).Returns(mockRegionNavigationService.Object);
ServiceLocator.SetLocatorProvider(() => mockServiceLocator.Object);
// Act
region.RequestNavigate(uri, navigationCallback, navigationParameters);
// Verify
mockRegionNavigationService.VerifyAll();
}
finally
{
ServiceLocator.SetLocatorProvider(() => null);
}
}
[TestMethod]
public void WhenViewsWithSortHintsAdded_RegionSortsViews()
{
IRegion region = new Region();
object view1 = new ViewOrder1();
object view2 = new ViewOrder2();
object view3 = new ViewOrder3();
region.Add(view1);
region.Add(view2);
region.Add(view3);
Assert.AreEqual(3, region.Views.Count());
Assert.AreSame(view2, region.Views.ElementAt(0));
Assert.AreSame(view3, region.Views.ElementAt(1));
Assert.AreSame(view1, region.Views.ElementAt(2));
}
[TestMethod]
public void WhenViewHasBeenRemovedAndRegionManagerPropertyCleared_ThenItCanBeAddedAgainToARegion()
{
IRegion region = new Region { RegionManager = new MockRegionManager() };
var view = new MockFrameworkElement();
var scopedRegionManager = region.Add(view, null, true);
Assert.AreEqual(view, region.Views.First());
region.Remove(view);
view.ClearValue(RegionManager.RegionManagerProperty);
Assert.AreEqual(0, region.Views.Cast<object>().Count());
var newScopedRegion = region.Add(view, null, true);
Assert.AreEqual(view, region.Views.First());
Assert.AreSame(newScopedRegion, view.GetValue(RegionManager.RegionManagerProperty));
}
[ViewSortHint("C")]
private class ViewOrder1 { };
[ViewSortHint("A")]
private class ViewOrder2 { };
[ViewSortHint("B")]
private class ViewOrder3 { };
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.