context
stringlengths 2.52k
185k
| gt
stringclasses 1
value |
---|---|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
public struct ValX0 {}
public struct ValY0 {}
public struct ValX1<T> {}
public struct ValY1<T> {}
public struct ValX2<T,U> {}
public struct ValY2<T,U>{}
public struct ValX3<T,U,V>{}
public struct ValY3<T,U,V>{}
public class RefX0 {}
public class RefY0 {}
public class RefX1<T> {}
public class RefY1<T> {}
public class RefX2<T,U> {}
public class RefY2<T,U>{}
public class RefX3<T,U,V>{}
public class RefY3<T,U,V>{}
public class GenOuter<U>
{
public interface IGen<T>
{
void _Init(T fld1);
bool InstVerify(System.Type t1);
}
}
public class GenInt : GenOuter<int>.IGen<int>
{
int Fld1;
public void _Init(int fld1)
{
Fld1 = fld1;
}
public bool InstVerify(System.Type t1)
{
bool result = true;
if (!(Fld1.GetType().Equals(t1)))
{
result = false;
Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(GenOuter<int>.IGen<int>) );
}
return result;
}
}
public class GenDouble: GenOuter<int>.IGen<double>
{
double Fld1;
public void _Init(double fld1)
{
Fld1 = fld1;
}
public bool InstVerify(System.Type t1)
{
bool result = true;
if (!(Fld1.GetType().Equals(t1)))
{
result = false;
Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(GenOuter<int>.IGen<double>) );
}
return result;
}
}
public class GenString : GenOuter<int>.IGen<String>
{
string Fld1;
public void _Init(string fld1)
{
Fld1 = fld1;
}
public bool InstVerify(System.Type t1)
{
bool result = true;
if (!(Fld1.GetType().Equals(t1)))
{
result = false;
Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(GenOuter<int>.IGen<string>) );
}
return result;
}
}
public class GenObject : GenOuter<int>.IGen<object>
{
object Fld1;
public void _Init(object fld1)
{
Fld1 = fld1;
}
public bool InstVerify(System.Type t1)
{
bool result = true;
if (!(Fld1.GetType().Equals(t1)))
{
result = false;
Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(GenOuter<int>.IGen<object>) );
}
return result;
}
}
public class GenGuid : GenOuter<int>.IGen<Guid>
{
Guid Fld1;
public void _Init(Guid fld1)
{
Fld1 = fld1;
}
public bool InstVerify(System.Type t1)
{
bool result = true;
if (!(Fld1.GetType().Equals(t1)))
{
result = false;
Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(GenOuter<int>.IGen<Guid>) );
}
return result;
}
}
public class GenConstructedReference : GenOuter<int>.IGen<RefX1<int>>
{
RefX1<int> Fld1;
public void _Init(RefX1<int> fld1)
{
Fld1 = fld1;
}
public bool InstVerify(System.Type t1)
{
bool result = true;
if (!(Fld1.GetType().Equals(t1)))
{
result = false;
Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(GenOuter<int>.IGen<RefX1<int>>) );
}
return result;
}
}
public class GenConstructedValue: GenOuter<int>.IGen<ValX1<string>>
{
ValX1<string> Fld1;
public void _Init(ValX1<string> fld1)
{
Fld1 = fld1;
}
public bool InstVerify(System.Type t1)
{
bool result = true;
if (!(Fld1.GetType().Equals(t1)))
{
result = false;
Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(GenOuter<int>.IGen<ValX1<string>>) );
}
return result;
}
}
public class Gen1DIntArray : GenOuter<int>.IGen<int[]>
{
int[] Fld1;
public void _Init(int[] fld1)
{
Fld1 = fld1;
}
public bool InstVerify(System.Type t1)
{
bool result = true;
if (!(Fld1.GetType().Equals(t1)))
{
result = false;
Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(GenOuter<int>.IGen<int[]>) );
}
return result;
}
}
public class Gen2DStringArray : GenOuter<int>.IGen<string[,]>
{
string[,] Fld1;
public void _Init(string[,] fld1)
{
Fld1 = fld1;
}
public bool InstVerify(System.Type t1)
{
bool result = true;
if (!(Fld1.GetType().Equals(t1)))
{
result = false;
Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(GenOuter<int>.IGen<string[,]>) );
}
return result;
}
}
public class GenJaggedObjectArray : GenOuter<int>.IGen<object[][]>
{
object[][] Fld1;
public void _Init(object[][] fld1)
{
Fld1 = fld1;
}
public bool InstVerify(System.Type t1)
{
bool result = true;
if (!(Fld1.GetType().Equals(t1)))
{
result = false;
Console.WriteLine("Failed to verify type of Fld1 in: " + typeof(GenOuter<int>.IGen<object[][]>) );
}
return result;
}
}
public class Test
{
public static int counter = 0;
public static bool result = true;
public static void Eval(bool exp)
{
counter++;
if (!exp)
{
result = exp;
Console.WriteLine("Test Failed at location: " + counter);
}
}
public static int Main()
{
GenOuter<int>.IGen<int> IGenInt = new GenInt();
IGenInt._Init(new int());
Eval(IGenInt.InstVerify(typeof(int)));
GenOuter<int>.IGen<double> IGenDouble = new GenDouble();
IGenDouble._Init(new double());
Eval(IGenDouble.InstVerify(typeof(double)));
GenOuter<int>.IGen<string> IGenString = new GenString();
IGenString._Init("string");
Eval(IGenString.InstVerify(typeof(string)));
GenOuter<int>.IGen<object> IGenObject = new GenObject();
IGenObject._Init(new object());
Eval(IGenObject.InstVerify(typeof(object)));
GenOuter<int>.IGen<Guid> IGenGuid = new GenGuid();
IGenGuid._Init(new Guid());
Eval(IGenGuid.InstVerify(typeof(Guid)));
GenOuter<int>.IGen<RefX1<int>> IGenConstructedReference = new GenConstructedReference();
IGenConstructedReference._Init(new RefX1<int>());
Eval(IGenConstructedReference.InstVerify(typeof(RefX1<int>)));
GenOuter<int>.IGen<ValX1<string>> IGenConstructedValue = new GenConstructedValue();
IGenConstructedValue._Init(new ValX1<string>());
Eval(IGenConstructedValue.InstVerify(typeof(ValX1<string>)));
GenOuter<int>.IGen<int[]> IGen1DIntArray = new Gen1DIntArray();
IGen1DIntArray._Init(new int[1]);
Eval(IGen1DIntArray.InstVerify(typeof(int[])));
GenOuter<int>.IGen<string[,]> IGen2DStringArray = new Gen2DStringArray();
IGen2DStringArray._Init(new string[1,1]);
Eval(IGen2DStringArray.InstVerify(typeof(string[,])));
GenOuter<int>.IGen<object[][]> IGenJaggedObjectArray = new GenJaggedObjectArray();
IGenJaggedObjectArray._Init(new object[1][]);
Eval(IGenJaggedObjectArray.InstVerify(typeof(object[][])));
if (result)
{
Console.WriteLine("Test Passed");
return 100;
}
else
{
Console.WriteLine("Test Failed");
return 1;
}
}
}
| |
using System;
using System.ComponentModel;
using System.Drawing;
using System.Reflection;
using System.Windows.Forms;
using ScrollEventArgs = GuruComponents.CodeEditor.Forms.IntelliMouse.ScrollEventArgs;
using ScrollEventHandler = GuruComponents.CodeEditor.Forms.IntelliMouse.ScrollEventHandler;
using GuruComponents.CodeEditor.Library.Timers;
using GuruComponents.CodeEditor.Library.Win32;
namespace GuruComponents.CodeEditor.Forms
{
/// <summary>
/// Summary description for IntelliMouseControl.
/// </summary>
public class IntelliMouseControl : Control
{
protected const int WM_MBUTTONDOWN = 0x0207;
protected const int WM_MBUTTONUP = 0x0208;
protected const int WM_LBUTTONDOWN = 0x0201;
protected const int WM_RBUTTONDOWN = 0x0204;
protected const int WM_CAPTURECHANGED = 0x0215;
protected const int WM_MOUSELEAVE = 0x02A3;
protected IContainer components;
protected bool Active = false;
#region GENERAL DECLARATIONS
protected Point ActivationPoint = new Point(0, 0);
protected Point CurrentDelta = new Point(0, 0);
protected WeakReference _CurrentParent = null;
protected Control CurrentParent
{
get
{
if (_CurrentParent != null)
return (Control) _CurrentParent.Target;
else
return null;
}
set { _CurrentParent = new WeakReference(value); }
}
#endregion
#region EVENTS
public event EventHandler BeginScroll = null;
public event EventHandler EndScroll = null;
public event ScrollEventHandler Scroll = null;
#endregion
#region PUBLIC PROPERTY IMAGE
protected Bitmap _Image;
protected WeakTimer tmrFeedback;
protected PictureBox picImage;
protected RegionHandler regionHandler1;
public Bitmap Image
{
get { return _Image; }
set
{
_Image = value;
IsDirty = true;
}
}
#endregion
#region PUBLIC PROPERTY TRANSPARENCYKEY
protected Color _TransparencyKey = Color.FromArgb(255, 0, 255);
public Color TransparencyKey
{
get { return _TransparencyKey; }
set
{
_TransparencyKey = value;
IsDirty = true;
}
}
#endregion
#region CONSTRUCTOR
public IntelliMouseControl()
{
InitializeComponent();
// SetStyle(ControlStyles.Selectable,false);
// this.Image = (Bitmap)this.picImage.Image;
// this.Visible =false;
}
#endregion
#region DISPOSE
protected override void Dispose(bool disposing)
{
#if DEBUG
try
{
Console.WriteLine("disposing intellimouse");
}
catch
{
}
#endif
if (disposing)
{
if (components != null)
{
components.Dispose();
}
}
base.Dispose(disposing);
}
#endregion
#region FINALIZE
~IntelliMouseControl()
{
#if DEBUG
try
{
Console.WriteLine("finalizing intellimouse");
}
catch
{
}
#endif
}
#endregion
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
protected void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof (IntelliMouseControl));
this.tmrFeedback = new GuruComponents.CodeEditor.Library.Timers.WeakTimer(this.components);
this.picImage = new System.Windows.Forms.PictureBox();
this.regionHandler1 = new GuruComponents.CodeEditor.Forms.RegionHandler(this.components);
//
// tmrFeedback
//
this.tmrFeedback.Enabled = true;
this.tmrFeedback.Interval = 10;
this.tmrFeedback.Tick += new System.EventHandler(this.tmrFeedback_Tick);
//
// picImage
//
this.picImage.Image = ((System.Drawing.Bitmap) (resources.GetObject("picImage.Image")));
this.picImage.Location = new System.Drawing.Point(17, 17);
this.picImage.Name = "picImage";
this.picImage.TabIndex = 0;
this.picImage.TabStop = false;
//
// regionHandler1
//
this.regionHandler1.Control = null;
this.regionHandler1.MaskImage = null;
this.regionHandler1.TransparencyKey = System.Drawing.Color.FromArgb(((System.Byte) (255)), ((System.Byte) (0)), ((System.Byte) (255)));
//
// IntelliMouseControl
//
this.ParentChanged += new System.EventHandler(this.IntelliMouseControl_ParentChanged);
}
#endregion
private bool IsDirty = false;
protected void CreateRegion()
{
this.regionHandler1.ApplyRegion(this, this.Image, this.TransparencyKey);
IsDirty = false;
}
public void Activate(int x, int y)
{
if (IsDirty)
CreateRegion();
this.Size = new Size(Image.Width, Image.Height);
this.Location = new Point(x - Image.Width/2, y - Image.Height/2);
this.ActivationPoint.X = x;
this.ActivationPoint.Y = y;
this.BringToFront();
this.Visible = true;
this.Focus();
Active = false;
Application.DoEvents();
SetCursor(0, 0);
tmrFeedback.Enabled = true;
onBeginScroll(new EventArgs());
NativeMethods.SendMessage(this.Handle, WM_MBUTTONDOWN, 0, 0);
Active = true;
}
protected void SetCursor(int x, int y)
{
Assembly assembly = GetType().Assembly;
int dY = y;
int dX = x;
CurrentDelta.X = dX;
CurrentDelta.Y = dY;
if (dY > 16)
{
this.Cursor = new Cursor(assembly.GetManifestResourceStream("MoveDown.cur"));
CurrentDelta.Y -= 16;
}
else if (dY < -16)
{
this.Cursor = new Cursor(assembly.GetManifestResourceStream("MoveUp.cur"));
CurrentDelta.Y += 16;
}
else
{
this.Cursor = new Cursor(assembly.GetManifestResourceStream("MoveUpDown.cur"));
CurrentDelta = new Point(0, 0);
}
}
protected override void OnMouseDown(MouseEventArgs e)
{
base.OnMouseDown(e);
if (Active)
{
if (e.Button != MouseButtons.None && (e.Button != MouseButtons.Middle && e.X != 0 && e.Y != 0))
{
Deactivate();
Point p = new Point(e.X + this.Left, e.Y + this.Top);
NativeMethods.SendMessage(this.Parent.Handle, WM_LBUTTONDOWN, 0, p.Y*0x10000 + p.X);
}
}
}
protected override void OnMouseMove(MouseEventArgs e)
{
if (Active)
{
if (e.Button != MouseButtons.Middle && e.Button != MouseButtons.None)
{
Deactivate();
}
else
{
int x = e.X;
int y = e.Y;
x -= Image.Width/2;
y -= Image.Height/2;
SetCursor(x, y);
NativeMethods.SendMessage(this.Handle, WM_MBUTTONDOWN, 0, 0);
}
}
else
{
base.OnMouseMove(e);
}
}
protected override void OnMouseUp(MouseEventArgs e)
{
base.OnMouseUp(e);
}
protected void Deactivate()
{
NativeMethods.SendMessage(this.Handle, WM_MBUTTONUP, 0, 0);
Active = false;
tmrFeedback.Enabled = false;
this.Hide();
onEndScroll(new EventArgs());
this.Parent.Focus();
}
protected override void OnResize(EventArgs e)
{
if (this.Image != null)
this.Size = new Size(Image.Width, Image.Height);
else
this.Size = new Size(32, 32);
}
protected void Parent_MouseDown(object s, MouseEventArgs e)
{
if (e.Button == MouseButtons.Middle)
{
this.Activate(e.X, e.Y);
}
}
protected void tmrFeedback_Tick(object sender, EventArgs e)
{
ScrollEventArgs a = new ScrollEventArgs();
a.DeltaX = CurrentDelta.X;
a.DeltaY = CurrentDelta.Y;
onScroll(a);
}
protected virtual void onBeginScroll(EventArgs e)
{
if (BeginScroll != null)
BeginScroll(this, e);
}
protected virtual void onEndScroll(EventArgs e)
{
if (EndScroll != null)
EndScroll(this, e);
}
protected virtual void onScroll(ScrollEventArgs e)
{
if (Scroll != null)
Scroll(this, e);
}
protected void IntelliMouseControl_ParentChanged(object sender, EventArgs e)
{
if (CurrentParent != null)
{
CurrentParent.MouseDown -= new MouseEventHandler(this.Parent_MouseDown);
}
if (this.Parent != null)
{
this.Parent.MouseDown += new MouseEventHandler(this.Parent_MouseDown);
this.Deactivate();
}
}
protected override void OnKeyDown(KeyEventArgs e)
{
Deactivate();
}
protected override void OnLeave(EventArgs e)
{
base.OnLeave(e);
Deactivate();
}
protected override void OnLostFocus(EventArgs e)
{
base.OnLostFocus(e);
Deactivate();
}
protected override void WndProc(ref Message m)
{
if (m.Msg == WM_MOUSELEAVE)
{
base.WndProc(ref m);
this.Deactivate();
return;
}
base.WndProc(ref m);
}
}
}
| |
// Copyright (c) Umbraco.
// See LICENSE for more details.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Threading;
using NUnit.Framework;
using Umbraco.Cms.Core;
using Umbraco.Cms.Core.Events;
using Umbraco.Cms.Core.Models;
using Umbraco.Cms.Core.Scoping;
using Umbraco.Cms.Core.Services;
using Umbraco.Cms.Core.Services.Implement;
using Umbraco.Cms.Tests.Common.Builders;
using Umbraco.Cms.Tests.Common.Testing;
using Umbraco.Cms.Tests.Integration.Testing;
namespace Umbraco.Cms.Tests.Integration.Umbraco.Infrastructure.Services
{
[TestFixture]
[UmbracoTest(Database = UmbracoTestOptions.Database.NewSchemaPerTest, PublishedRepositoryEvents = true)]
public class MediaServiceTests : UmbracoIntegrationTest
{
private IMediaService MediaService => GetRequiredService<IMediaService>();
private IMediaTypeService MediaTypeService => GetRequiredService<IMediaTypeService>();
[Test]
public void Can_Update_Media_Property_Values()
{
IMediaType mediaType = MediaTypeBuilder.CreateSimpleMediaType("test", "Test");
MediaTypeService.Save(mediaType);
IMedia media = MediaBuilder.CreateSimpleMedia(mediaType, "hello", -1);
media.SetValue("title", "title of mine");
media.SetValue("bodyText", "hello world");
MediaService.Save(media);
// re-get
media = MediaService.GetById(media.Id);
media.SetValue("title", "another title of mine"); // Change a value
media.SetValue("bodyText", null); // Clear a value
media.SetValue("author", "new author"); // Add a value
MediaService.Save(media);
// re-get
media = MediaService.GetById(media.Id);
Assert.AreEqual("another title of mine", media.GetValue("title"));
Assert.IsNull(media.GetValue("bodyText"));
Assert.AreEqual("new author", media.GetValue("author"));
}
/// <summary>
/// Used to list out all ambiguous events that will require dispatching with a name
/// </summary>
[Test]
[Explicit]
public void List_Ambiguous_Events()
{
EventInfo[] events = MediaService.GetType().GetEvents(BindingFlags.Static | BindingFlags.Public);
Type typedEventHandler = typeof(TypedEventHandler<,>);
foreach (EventInfo e in events)
{
// only continue if this is a TypedEventHandler
if (!e.EventHandlerType.IsGenericType)
{
continue;
}
Type typeDef = e.EventHandlerType.GetGenericTypeDefinition();
if (typedEventHandler != typeDef)
{
continue;
}
// get the event arg type
Type eventArgType = e.EventHandlerType.GenericTypeArguments[1];
Attempt<EventNameExtractorResult> found = EventNameExtractor.FindEvent(typeof(MediaService), eventArgType, EventNameExtractor.MatchIngNames);
if (!found.Success && found.Result.Error == EventNameExtractorError.Ambiguous)
{
Console.WriteLine($"Ambiguous event, source: {typeof(MediaService)}, args: {eventArgType}");
}
}
}
[Test]
public void Get_Paged_Children_With_Media_Type_Filter()
{
MediaType mediaType1 = MediaTypeBuilder.CreateImageMediaType("Image2");
MediaTypeService.Save(mediaType1);
MediaType mediaType2 = MediaTypeBuilder.CreateImageMediaType("Image3");
MediaTypeService.Save(mediaType2);
for (int i = 0; i < 10; i++)
{
Media m1 = MediaBuilder.CreateMediaImage(mediaType1, -1);
MediaService.Save(m1);
Media m2 = MediaBuilder.CreateMediaImage(mediaType2, -1);
MediaService.Save(m2);
}
IScopeProvider provider = ScopeProvider;
using (provider.CreateScope())
{
IEnumerable<IMedia> result = MediaService.GetPagedChildren(
-1,
0,
11,
out long total,
provider.SqlContext.Query<IMedia>()
.Where(x => new[] { mediaType1.Id, mediaType2.Id }.Contains(x.ContentTypeId)),
Ordering.By("SortOrder", Direction.Ascending));
Assert.AreEqual(11, result.Count());
Assert.AreEqual(20, total);
result = MediaService.GetPagedChildren(
-1,
1,
11,
out total,
provider.SqlContext.Query<IMedia>()
.Where(x => new[] { mediaType1.Id, mediaType2.Id }.Contains(x.ContentTypeId)),
Ordering.By("SortOrder", Direction.Ascending));
Assert.AreEqual(9, result.Count());
Assert.AreEqual(20, total);
}
}
[Test]
public void Can_Move_Media()
{
// Arrange
Tuple<IMedia, IMedia, IMedia, IMedia, IMedia> mediaItems = CreateTrashedTestMedia();
IMedia media = MediaService.GetById(mediaItems.Item3.Id);
// Act
MediaService.Move(media, mediaItems.Item2.Id);
// Assert
Assert.That(media.ParentId, Is.EqualTo(mediaItems.Item2.Id));
Assert.That(media.Trashed, Is.False);
}
[Test]
public void Can_Move_Media_To_RecycleBin()
{
// Arrange
Tuple<IMedia, IMedia, IMedia, IMedia, IMedia> mediaItems = CreateTrashedTestMedia();
IMedia media = MediaService.GetById(mediaItems.Item1.Id);
// Act
MediaService.MoveToRecycleBin(media);
// Assert
Assert.That(media.ParentId, Is.EqualTo(-21));
Assert.That(media.Trashed, Is.True);
}
[Test]
public void Can_Move_Media_From_RecycleBin()
{
// Arrange
Tuple<IMedia, IMedia, IMedia, IMedia, IMedia> mediaItems = CreateTrashedTestMedia();
IMedia media = MediaService.GetById(mediaItems.Item4.Id);
// Act - moving out of recycle bin
MediaService.Move(media, mediaItems.Item1.Id);
IMedia mediaChild = MediaService.GetById(mediaItems.Item5.Id);
// Assert
Assert.That(media.ParentId, Is.EqualTo(mediaItems.Item1.Id));
Assert.That(media.Trashed, Is.False);
Assert.That(mediaChild.ParentId, Is.EqualTo(mediaItems.Item4.Id));
Assert.That(mediaChild.Trashed, Is.False);
}
[Test]
public void Cannot_Save_Media_With_Empty_Name()
{
// Arrange
MediaType mediaType = MediaTypeBuilder.CreateNewMediaType();
MediaTypeService.Save(mediaType);
IMedia media = MediaService.CreateMedia(string.Empty, -1, Constants.Conventions.MediaTypes.VideoAlias);
// Act & Assert
Assert.Throws<ArgumentException>(() => MediaService.Save(media));
}
// [Test]
// public void Ensure_Content_Xml_Created()
// {
// var mediaType = MediaTypeBuilder.CreateVideoMediaType();
// MediaTypeService.Save(mediaType);
// var media = MediaService.CreateMedia("Test", -1, Constants.Conventions.MediaTypes.VideoAlias);
//
// MediaService.Save(media);
//
// using (var scope = ScopeProvider.CreateScope())
// {
// Assert.IsTrue(scope.Database.Exists<ContentXmlDto>(media.Id));
// }
// }
[Test]
public void Can_Get_Media_By_Path()
{
MediaType mediaType = MediaTypeBuilder.CreateImageMediaType("Image2");
MediaTypeService.Save(mediaType);
Media media = MediaBuilder.CreateMediaImage(mediaType, -1);
MediaService.Save(media);
string mediaPath = "/media/test-image.png";
IMedia resolvedMedia = MediaService.GetMediaByPath(mediaPath);
Assert.IsNotNull(resolvedMedia);
Assert.That(resolvedMedia.GetValue(Constants.Conventions.Media.File).ToString() == mediaPath);
}
[Test]
public void Can_Get_Media_With_Crop_By_Path()
{
MediaType mediaType = MediaTypeBuilder.CreateImageMediaTypeWithCrop("Image2");
MediaTypeService.Save(mediaType);
Media media = MediaBuilder.CreateMediaImageWithCrop(mediaType, -1);
MediaService.Save(media);
string mediaPath = "/media/test-image.png";
IMedia resolvedMedia = MediaService.GetMediaByPath(mediaPath);
Assert.IsNotNull(resolvedMedia);
Assert.That(resolvedMedia.GetValue(Constants.Conventions.Media.File).ToString().Contains(mediaPath));
}
[Test]
public void Can_Get_Paged_Children()
{
MediaType mediaType = MediaTypeBuilder.CreateImageMediaType("Image2");
MediaTypeService.Save(mediaType);
for (int i = 0; i < 10; i++)
{
Media c1 = MediaBuilder.CreateMediaImage(mediaType, -1);
MediaService.Save(c1);
}
IMediaService service = MediaService;
IMedia[] entities = service.GetPagedChildren(-1, 0, 6, out long total).ToArray();
Assert.That(entities.Length, Is.EqualTo(6));
Assert.That(total, Is.EqualTo(10));
entities = service.GetPagedChildren(-1, 1, 6, out total).ToArray();
Assert.That(entities.Length, Is.EqualTo(4));
Assert.That(total, Is.EqualTo(10));
}
[Test]
public void Can_Get_Paged_Children_Dont_Get_Descendants()
{
MediaType mediaType = MediaTypeBuilder.CreateImageMediaType("Image2");
MediaTypeService.Save(mediaType);
// Only add 9 as we also add a folder with children.
for (int i = 0; i < 9; i++)
{
Media m1 = MediaBuilder.CreateMediaImage(mediaType, -1);
MediaService.Save(m1);
}
MediaType mediaTypeForFolder = MediaTypeBuilder.CreateImageMediaType("Folder2");
MediaTypeService.Save(mediaTypeForFolder);
Media mediaFolder = MediaBuilder.CreateMediaFolder(mediaTypeForFolder, -1);
MediaService.Save(mediaFolder);
for (int i = 0; i < 10; i++)
{
Media m1 = MediaBuilder.CreateMediaImage(mediaType, mediaFolder.Id);
MediaService.Save(m1);
}
IMediaService service = MediaService;
// Children in root including the folder - not the descendants in the folder.
IMedia[] entities = service.GetPagedChildren(-1, 0, 6, out long total).ToArray();
Assert.That(entities.Length, Is.EqualTo(6));
Assert.That(total, Is.EqualTo(10));
entities = service.GetPagedChildren(-1, 1, 6, out total).ToArray();
Assert.That(entities.Length, Is.EqualTo(4));
Assert.That(total, Is.EqualTo(10));
// Children in folder.
entities = service.GetPagedChildren(mediaFolder.Id, 0, 6, out total).ToArray();
Assert.That(entities.Length, Is.EqualTo(6));
Assert.That(total, Is.EqualTo(10));
entities = service.GetPagedChildren(mediaFolder.Id, 1, 6, out total).ToArray();
Assert.That(entities.Length, Is.EqualTo(4));
Assert.That(total, Is.EqualTo(10));
}
private Tuple<IMedia, IMedia, IMedia, IMedia, IMedia> CreateTrashedTestMedia()
{
// Create and Save folder-Media -> 1050
IMediaType folderMediaType = MediaTypeService.Get(1031);
Media folder = MediaBuilder.CreateMediaFolder(folderMediaType, -1);
MediaService.Save(folder);
// Create and Save folder-Media -> 1051
Media folder2 = MediaBuilder.CreateMediaFolder(folderMediaType, -1);
MediaService.Save(folder2);
// Create and Save image-Media -> 1052
IMediaType imageMediaType = MediaTypeService.Get(1032);
Media image = MediaBuilder.CreateMediaImage(imageMediaType, 1050);
MediaService.Save(image);
// Create and Save folder-Media that is trashed -> 1053
Media folderTrashed = MediaBuilder.CreateMediaFolder(folderMediaType, -21);
folderTrashed.Trashed = true;
MediaService.Save(folderTrashed);
// Create and Save image-Media child of folderTrashed -> 1054
Media imageTrashed = MediaBuilder.CreateMediaImage(imageMediaType, folderTrashed.Id);
imageTrashed.Trashed = true;
MediaService.Save(imageTrashed);
return new Tuple<IMedia, IMedia, IMedia, IMedia, IMedia>(folder, folder2, image, folderTrashed, imageTrashed);
}
}
}
| |
#region Copyright & License
//
// Copyright 2001-2005 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.Reflection;
using log4net.Core;
using log4net.Repository;
namespace log4net
{
/// <summary>
/// This class is used by client applications to request logger instances.
/// </summary>
/// <remarks>
/// <para>
/// This class has static methods that are used by a client to request
/// a logger instance. The <see cref="GetLogger(string)"/> method is
/// used to retrieve a logger.
/// </para>
/// <para>
/// See the <see cref="ILog"/> interface for more details.
/// </para>
/// </remarks>
/// <example>Simple example of logging messages
/// <code lang="C#">
/// ILog log = LogManager.GetLogger("application-log");
///
/// log.Info("Application Start");
/// log.Debug("This is a debug message");
///
/// if (log.IsDebugEnabled)
/// {
/// log.Debug("This is another debug message");
/// }
/// </code>
/// </example>
/// <threadsafety static="true" instance="true" />
/// <seealso cref="ILog"/>
/// <author>Nicko Cadell</author>
/// <author>Gert Driesen</author>
public sealed class LogManager
{
#region Private Instance Constructors
/// <summary>
/// Initializes a new instance of the <see cref="LogManager" /> class.
/// </summary>
/// <remarks>
/// Uses a private access modifier to prevent instantiation of this class.
/// </remarks>
private LogManager()
{
}
#endregion Private Instance Constructors
#region Type Specific Manager Methods
/// <overloads>Returns the named logger if it exists.</overloads>
/// <summary>
/// Returns the named logger if it exists.
/// </summary>
/// <remarks>
/// <para>
/// If the named logger exists (in the default repository) then it
/// returns a reference to the logger, otherwise it returns <c>null</c>.
/// </para>
/// </remarks>
/// <param name="name">The fully qualified logger name to look for.</param>
/// <returns>The logger found, or <c>null</c> if no logger could be found.</returns>
public static ILog Exists(string name)
{
return Exists(Assembly.GetCallingAssembly(), name);
}
/// <summary>
/// Returns the named logger if it exists.
/// </summary>
/// <remarks>
/// <para>
/// If the named logger exists (in the specified repository) then it
/// returns a reference to the logger, otherwise it returns
/// <c>null</c>.
/// </para>
/// </remarks>
/// <param name="repository">The repository to lookup in.</param>
/// <param name="name">The fully qualified logger name to look for.</param>
/// <returns>
/// The logger found, or <c>null</c> if the logger doesn't exist in the specified
/// repository.
/// </returns>
public static ILog Exists(string repository, string name)
{
return WrapLogger(LoggerManager.Exists(repository, name));
}
/// <summary>
/// Returns the named logger if it exists.
/// </summary>
/// <remarks>
/// <para>
/// If the named logger exists (in the repository for the specified assembly) then it
/// returns a reference to the logger, otherwise it returns
/// <c>null</c>.
/// </para>
/// </remarks>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
/// <param name="name">The fully qualified logger name to look for.</param>
/// <returns>
/// The logger, or <c>null</c> if the logger doesn't exist in the specified
/// assembly's repository.
/// </returns>
public static ILog Exists(Assembly repositoryAssembly, string name)
{
return WrapLogger(LoggerManager.Exists(repositoryAssembly, name));
}
/// <overloads>Get the currently defined loggers.</overloads>
/// <summary>
/// Returns all the currently defined loggers in the default repository.
/// </summary>
/// <remarks>
/// <para>The root logger is <b>not</b> included in the returned array.</para>
/// </remarks>
/// <returns>All the defined loggers.</returns>
public static ILog[] GetCurrentLoggers()
{
return GetCurrentLoggers(Assembly.GetCallingAssembly());
}
/// <summary>
/// Returns all the currently defined loggers in the specified repository.
/// </summary>
/// <param name="repository">The repository to lookup in.</param>
/// <remarks>
/// The root logger is <b>not</b> included in the returned array.
/// </remarks>
/// <returns>All the defined loggers.</returns>
public static ILog[] GetCurrentLoggers(string repository)
{
return WrapLoggers(LoggerManager.GetCurrentLoggers(repository));
}
/// <summary>
/// Returns all the currently defined loggers in the specified assembly's repository.
/// </summary>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
/// <remarks>
/// The root logger is <b>not</b> included in the returned array.
/// </remarks>
/// <returns>All the defined loggers.</returns>
public static ILog[] GetCurrentLoggers(Assembly repositoryAssembly)
{
return WrapLoggers(LoggerManager.GetCurrentLoggers(repositoryAssembly));
}
/// <overloads>Get or create a logger.</overloads>
/// <summary>
/// Retrieves or creates a named logger.
/// </summary>
/// <remarks>
/// <para>
/// Retrieves a logger named as the <paramref name="name"/>
/// parameter. If the named logger already exists, then the
/// existing instance will be returned. Otherwise, a new instance is
/// created.
/// </para>
/// <para>By default, loggers do not have a set level but inherit
/// it from the hierarchy. This is one of the central features of
/// log4net.
/// </para>
/// </remarks>
/// <param name="name">The name of the logger to retrieve.</param>
/// <returns>The logger with the name specified.</returns>
public static ILog GetLogger(string name)
{
return GetLogger(Assembly.GetCallingAssembly(), name);
}
/// <summary>
/// Retrieves or creates a named logger.
/// </summary>
/// <remarks>
/// <para>
/// Retrieve a logger named as the <paramref name="name"/>
/// parameter. If the named logger already exists, then the
/// existing instance will be returned. Otherwise, a new instance is
/// created.
/// </para>
/// <para>
/// By default, loggers do not have a set level but inherit
/// it from the hierarchy. This is one of the central features of
/// log4net.
/// </para>
/// </remarks>
/// <param name="repository">The repository to lookup in.</param>
/// <param name="name">The name of the logger to retrieve.</param>
/// <returns>The logger with the name specified.</returns>
public static ILog GetLogger(string repository, string name)
{
return WrapLogger(LoggerManager.GetLogger(repository, name));
}
/// <summary>
/// Retrieves or creates a named logger.
/// </summary>
/// <remarks>
/// <para>
/// Retrieve a logger named as the <paramref name="name"/>
/// parameter. If the named logger already exists, then the
/// existing instance will be returned. Otherwise, a new instance is
/// created.
/// </para>
/// <para>
/// By default, loggers do not have a set level but inherit
/// it from the hierarchy. This is one of the central features of
/// log4net.
/// </para>
/// </remarks>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
/// <param name="name">The name of the logger to retrieve.</param>
/// <returns>The logger with the name specified.</returns>
public static ILog GetLogger(Assembly repositoryAssembly, string name)
{
return WrapLogger(LoggerManager.GetLogger(repositoryAssembly, name));
}
/// <summary>
/// Shorthand for <see cref="LogManager.GetLogger(string)"/>.
/// </summary>
/// <remarks>
/// Get the logger for the fully qualified name of the type specified.
/// </remarks>
/// <param name="type">The full name of <paramref name="type"/> will be used as the name of the logger to retrieve.</param>
/// <returns>The logger with the name specified.</returns>
public static ILog GetLogger(Type type)
{
return GetLogger(Assembly.GetCallingAssembly(), type.FullName);
}
/// <summary>
/// Shorthand for <see cref="LogManager.GetLogger(string)"/>.
/// </summary>
/// <remarks>
/// Gets the logger for the fully qualified name of the type specified.
/// </remarks>
/// <param name="repository">The repository to lookup in.</param>
/// <param name="type">The full name of <paramref name="type"/> will be used as the name of the logger to retrieve.</param>
/// <returns>The logger with the name specified.</returns>
public static ILog GetLogger(string repository, Type type)
{
return WrapLogger(LoggerManager.GetLogger(repository, type));
}
/// <summary>
/// Shorthand for <see cref="LogManager.GetLogger(string)"/>.
/// </summary>
/// <remarks>
/// Gets the logger for the fully qualified name of the type specified.
/// </remarks>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
/// <param name="type">The full name of <paramref name="type"/> will be used as the name of the logger to retrieve.</param>
/// <returns>The logger with the name specified.</returns>
public static ILog GetLogger(Assembly repositoryAssembly, Type type)
{
return WrapLogger(LoggerManager.GetLogger(repositoryAssembly, type));
}
#endregion Type Specific Manager Methods
#region Domain & Repository Manager Methods
/// <summary>
/// Shuts down the log4net system.
/// </summary>
/// <remarks>
/// <para>
/// Calling this method will <b>safely</b> close and remove all
/// appenders in all the loggers including root contained in all the
/// default repositories.
/// </para>
/// <para>
/// Some appenders need to be closed before the application exists.
/// Otherwise, pending logging events might be lost.
/// </para>
/// <para>The <c>shutdown</c> method is careful to close nested
/// appenders before closing regular appenders. This is allows
/// configurations where a regular appender is attached to a logger
/// and again to a nested appender.
/// </para>
/// </remarks>
public static void Shutdown()
{
LoggerManager.Shutdown();
}
/// <overloads>Shutdown a logger repository.</overloads>
/// <summary>
/// Shuts down the default repository.
/// </summary>
/// <remarks>
/// <para>
/// Calling this method will <b>safely</b> close and remove all
/// appenders in all the loggers including root contained in the
/// default repository.
/// </para>
/// <para>Some appenders need to be closed before the application exists.
/// Otherwise, pending logging events might be lost.
/// </para>
/// <para>The <c>shutdown</c> method is careful to close nested
/// appenders before closing regular appenders. This is allows
/// configurations where a regular appender is attached to a logger
/// and again to a nested appender.
/// </para>
/// </remarks>
public static void ShutdownRepository()
{
ShutdownRepository(Assembly.GetCallingAssembly());
}
/// <summary>
/// Shuts down the repository for the repository specified.
/// </summary>
/// <remarks>
/// <para>
/// Calling this method will <b>safely</b> close and remove all
/// appenders in all the loggers including root contained in the
/// <paramref name="repository"/> specified.
/// </para>
/// <para>
/// Some appenders need to be closed before the application exists.
/// Otherwise, pending logging events might be lost.
/// </para>
/// <para>The <c>shutdown</c> method is careful to close nested
/// appenders before closing regular appenders. This is allows
/// configurations where a regular appender is attached to a logger
/// and again to a nested appender.
/// </para>
/// </remarks>
/// <param name="repository">The repository to shutdown.</param>
public static void ShutdownRepository(string repository)
{
LoggerManager.ShutdownRepository(repository);
}
/// <summary>
/// Shuts down the repository specified.
/// </summary>
/// <remarks>
/// <para>
/// Calling this method will <b>safely</b> close and remove all
/// appenders in all the loggers including root contained in the
/// repository. The repository is looked up using
/// the <paramref name="repositoryAssembly"/> specified.
/// </para>
/// <para>
/// Some appenders need to be closed before the application exists.
/// Otherwise, pending logging events might be lost.
/// </para>
/// <para>
/// The <c>shutdown</c> method is careful to close nested
/// appenders before closing regular appenders. This is allows
/// configurations where a regular appender is attached to a logger
/// and again to a nested appender.
/// </para>
/// </remarks>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
public static void ShutdownRepository(Assembly repositoryAssembly)
{
LoggerManager.ShutdownRepository(repositoryAssembly);
}
/// <overloads>Reset the configuration of a repository</overloads>
/// <summary>
/// Resets all values contained in this repository instance to their defaults.
/// </summary>
/// <remarks>
/// <para>
/// Resets all values contained in the repository instance to their
/// defaults. This removes all appenders from all loggers, sets
/// the level of all non-root loggers to <c>null</c>,
/// sets their additivity flag to <c>true</c> and sets the level
/// of the root logger to <see cref="Level.Debug"/>. Moreover,
/// message disabling is set to its default "off" value.
/// </para>
/// </remarks>
public static void ResetConfiguration()
{
ResetConfiguration(Assembly.GetCallingAssembly());
}
/// <summary>
/// Resets all values contained in this repository instance to their defaults.
/// </summary>
/// <remarks>
/// <para>
/// Reset all values contained in the repository instance to their
/// defaults. This removes all appenders from all loggers, sets
/// the level of all non-root loggers to <c>null</c>,
/// sets their additivity flag to <c>true</c> and sets the level
/// of the root logger to <see cref="Level.Debug"/>. Moreover,
/// message disabling is set to its default "off" value.
/// </para>
/// </remarks>
/// <param name="repository">The repository to reset.</param>
public static void ResetConfiguration(string repository)
{
LoggerManager.ResetConfiguration(repository);
}
/// <summary>
/// Resets all values contained in this repository instance to their defaults.
/// </summary>
/// <remarks>
/// <para>
/// Reset all values contained in the repository instance to their
/// defaults. This removes all appenders from all loggers, sets
/// the level of all non-root loggers to <c>null</c>,
/// sets their additivity flag to <c>true</c> and sets the level
/// of the root logger to <see cref="Level.Debug"/>. Moreover,
/// message disabling is set to its default "off" value.
/// </para>
/// </remarks>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository to reset.</param>
public static void ResetConfiguration(Assembly repositoryAssembly)
{
LoggerManager.ResetConfiguration(repositoryAssembly);
}
/// <overloads>Get the logger repository.</overloads>
/// <summary>
/// Returns the default <see cref="ILoggerRepository"/> instance.
/// </summary>
/// <remarks>
/// <para>
/// Gets the <see cref="ILoggerRepository"/> for the repository specified
/// by the callers assembly (<see cref="Assembly.GetCallingAssembly()"/>).
/// </para>
/// </remarks>
/// <returns>The <see cref="ILoggerRepository"/> instance for the default repository.</returns>
[Obsolete("Use GetRepository instead of GetLoggerRepository")]
public static ILoggerRepository GetLoggerRepository()
{
return GetRepository(Assembly.GetCallingAssembly());
}
/// <summary>
/// Returns the default <see cref="ILoggerRepository"/> instance.
/// </summary>
/// <returns>The default <see cref="ILoggerRepository"/> instance.</returns>
/// <remarks>
/// <para>
/// Gets the <see cref="ILoggerRepository"/> for the repository specified
/// by the <paramref name="repository"/> argument.
/// </para>
/// </remarks>
/// <param name="repository">The repository to lookup in.</param>
[Obsolete("Use GetRepository instead of GetLoggerRepository")]
public static ILoggerRepository GetLoggerRepository(string repository)
{
return GetRepository(repository);
}
/// <summary>
/// Returns the default <see cref="ILoggerRepository"/> instance.
/// </summary>
/// <returns>The default <see cref="ILoggerRepository"/> instance.</returns>
/// <remarks>
/// <para>
/// Gets the <see cref="ILoggerRepository"/> for the repository specified
/// by the <paramref name="repositoryAssembly"/> argument.
/// </para>
/// </remarks>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
[Obsolete("Use GetRepository instead of GetLoggerRepository")]
public static ILoggerRepository GetLoggerRepository(Assembly repositoryAssembly)
{
return GetRepository(repositoryAssembly);
}
/// <overloads>Get a logger repository.</overloads>
/// <summary>
/// Returns the default <see cref="ILoggerRepository"/> instance.
/// </summary>
/// <remarks>
/// <para>
/// Gets the <see cref="ILoggerRepository"/> for the repository specified
/// by the callers assembly (<see cref="Assembly.GetCallingAssembly()"/>).
/// </para>
/// </remarks>
/// <returns>The <see cref="ILoggerRepository"/> instance for the default repository.</returns>
public static ILoggerRepository GetRepository()
{
return GetRepository(Assembly.GetCallingAssembly());
}
/// <summary>
/// Returns the default <see cref="ILoggerRepository"/> instance.
/// </summary>
/// <returns>The default <see cref="ILoggerRepository"/> instance.</returns>
/// <remarks>
/// <para>
/// Gets the <see cref="ILoggerRepository"/> for the repository specified
/// by the <paramref name="repository"/> argument.
/// </para>
/// </remarks>
/// <param name="repository">The repository to lookup in.</param>
public static ILoggerRepository GetRepository(string repository)
{
return LoggerManager.GetRepository(repository);
}
/// <summary>
/// Returns the default <see cref="ILoggerRepository"/> instance.
/// </summary>
/// <returns>The default <see cref="ILoggerRepository"/> instance.</returns>
/// <remarks>
/// <para>
/// Gets the <see cref="ILoggerRepository"/> for the repository specified
/// by the <paramref name="repositoryAssembly"/> argument.
/// </para>
/// </remarks>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
public static ILoggerRepository GetRepository(Assembly repositoryAssembly)
{
return LoggerManager.GetRepository(repositoryAssembly);
}
/// <overloads>Create a domain</overloads>
/// <summary>
/// Creates a repository with the specified repository type.
/// </summary>
/// <remarks>
/// <para>
/// <b>CreateDomain is obsolete. Use CreateRepository instead of CreateDomain.</b>
/// </para>
/// <para>
/// The <see cref="ILoggerRepository"/> created will be associated with the repository
/// specified such that a call to <see cref="GetRepository()"/> will return
/// the same repository instance.
/// </para>
/// </remarks>
/// <param name="repositoryType">A <see cref="Type"/> that implements <see cref="ILoggerRepository"/>
/// and has a no arg constructor. An instance of this type will be created to act
/// as the <see cref="ILoggerRepository"/> for the repository specified.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
[Obsolete("Use CreateRepository instead of CreateDomain")]
public static ILoggerRepository CreateDomain(Type repositoryType)
{
return CreateRepository(Assembly.GetCallingAssembly(), repositoryType);
}
/// <overloads>Create a logger repository.</overloads>
/// <summary>
/// Creates a repository with the specified repository type.
/// </summary>
/// <param name="repositoryType">A <see cref="Type"/> that implements <see cref="ILoggerRepository"/>
/// and has a no arg constructor. An instance of this type will be created to act
/// as the <see cref="ILoggerRepository"/> for the repository specified.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
/// <remarks>
/// <para>
/// The <see cref="ILoggerRepository"/> created will be associated with the repository
/// specified such that a call to <see cref="GetRepository()"/> will return
/// the same repository instance.
/// </para>
/// </remarks>
public static ILoggerRepository CreateRepository(Type repositoryType)
{
return CreateRepository(Assembly.GetCallingAssembly(), repositoryType);
}
/// <summary>
/// Creates a repository with the specified name.
/// </summary>
/// <remarks>
/// <para>
/// <b>CreateDomain is obsolete. Use CreateRepository instead of CreateDomain.</b>
/// </para>
/// <para>
/// Creates the default type of <see cref="ILoggerRepository"/> which is a
/// <see cref="log4net.Repository.Hierarchy.Hierarchy"/> object.
/// </para>
/// <para>
/// The <paramref name="repository"/> name must be unique. Repositories cannot be redefined.
/// An <see cref="Exception"/> will be thrown if the repository already exists.
/// </para>
/// </remarks>
/// <param name="repository">The name of the repository, this must be unique amongst repositories.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
/// <exception cref="LogException">The specified repository already exists.</exception>
[Obsolete("Use CreateRepository instead of CreateDomain")]
public static ILoggerRepository CreateDomain(string repository)
{
return LoggerManager.CreateRepository(repository);
}
/// <summary>
/// Creates a repository with the specified name.
/// </summary>
/// <remarks>
/// <para>
/// Creates the default type of <see cref="ILoggerRepository"/> which is a
/// <see cref="log4net.Repository.Hierarchy.Hierarchy"/> object.
/// </para>
/// <para>
/// The <paramref name="repository"/> name must be unique. Repositories cannot be redefined.
/// An <see cref="Exception"/> will be thrown if the repository already exists.
/// </para>
/// </remarks>
/// <param name="repository">The name of the repository, this must be unique amongst repositories.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
/// <exception cref="LogException">The specified repository already exists.</exception>
public static ILoggerRepository CreateRepository(string repository)
{
return LoggerManager.CreateRepository(repository);
}
/// <summary>
/// Creates a repository with the specified name and repository type.
/// </summary>
/// <remarks>
/// <para>
/// <b>CreateDomain is obsolete. Use CreateRepository instead of CreateDomain.</b>
/// </para>
/// <para>
/// The <paramref name="repository"/> name must be unique. Repositories cannot be redefined.
/// An <see cref="Exception"/> will be thrown if the repository already exists.
/// </para>
/// </remarks>
/// <param name="repository">The name of the repository, this must be unique to the repository.</param>
/// <param name="repositoryType">A <see cref="Type"/> that implements <see cref="ILoggerRepository"/>
/// and has a no arg constructor. An instance of this type will be created to act
/// as the <see cref="ILoggerRepository"/> for the repository specified.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
/// <exception cref="LogException">The specified repository already exists.</exception>
[Obsolete("Use CreateRepository instead of CreateDomain")]
public static ILoggerRepository CreateDomain(string repository, Type repositoryType)
{
return LoggerManager.CreateRepository(repository, repositoryType);
}
/// <summary>
/// Creates a repository with the specified name and repository type.
/// </summary>
/// <remarks>
/// <para>
/// The <paramref name="repository"/> name must be unique. Repositories cannot be redefined.
/// An <see cref="Exception"/> will be thrown if the repository already exists.
/// </para>
/// </remarks>
/// <param name="repository">The name of the repository, this must be unique to the repository.</param>
/// <param name="repositoryType">A <see cref="Type"/> that implements <see cref="ILoggerRepository"/>
/// and has a no arg constructor. An instance of this type will be created to act
/// as the <see cref="ILoggerRepository"/> for the repository specified.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
/// <exception cref="LogException">The specified repository already exists.</exception>
public static ILoggerRepository CreateRepository(string repository, Type repositoryType)
{
return LoggerManager.CreateRepository(repository, repositoryType);
}
/// <summary>
/// Creates a repository for the specified assembly and repository type.
/// </summary>
/// <remarks>
/// <para>
/// <b>CreateDomain is obsolete. Use CreateRepository instead of CreateDomain.</b>
/// </para>
/// <para>
/// The <see cref="ILoggerRepository"/> created will be associated with the repository
/// specified such that a call to <see cref="GetRepository(Assembly)"/> with the
/// same assembly specified will return the same repository instance.
/// </para>
/// </remarks>
/// <param name="repositoryAssembly">The assembly to use to get the name of the repository.</param>
/// <param name="repositoryType">A <see cref="Type"/> that implements <see cref="ILoggerRepository"/>
/// and has a no arg constructor. An instance of this type will be created to act
/// as the <see cref="ILoggerRepository"/> for the repository specified.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
[Obsolete("Use CreateRepository instead of CreateDomain")]
public static ILoggerRepository CreateDomain(Assembly repositoryAssembly, Type repositoryType)
{
return LoggerManager.CreateRepository(repositoryAssembly, repositoryType);
}
/// <summary>
/// Creates a repository for the specified assembly and repository type.
/// </summary>
/// <remarks>
/// <para>
/// The <see cref="ILoggerRepository"/> created will be associated with the repository
/// specified such that a call to <see cref="GetRepository(Assembly)"/> with the
/// same assembly specified will return the same repository instance.
/// </para>
/// </remarks>
/// <param name="repositoryAssembly">The assembly to use to get the name of the repository.</param>
/// <param name="repositoryType">A <see cref="Type"/> that implements <see cref="ILoggerRepository"/>
/// and has a no arg constructor. An instance of this type will be created to act
/// as the <see cref="ILoggerRepository"/> for the repository specified.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
public static ILoggerRepository CreateRepository(Assembly repositoryAssembly, Type repositoryType)
{
return LoggerManager.CreateRepository(repositoryAssembly, repositoryType);
}
/// <summary>
/// Gets the list of currently defined repositories.
/// </summary>
/// <remarks>
/// <para>
/// Get an array of all the <see cref="ILoggerRepository"/> objects that have been created.
/// </para>
/// </remarks>
/// <returns>An array of all the known <see cref="ILoggerRepository"/> objects.</returns>
public static ILoggerRepository[] GetAllRepositories()
{
return LoggerManager.GetAllRepositories();
}
#endregion Domain & Repository Manager Methods
#region Extension Handlers
/// <summary>
/// Looks up the wrapper object for the logger specified.
/// </summary>
/// <param name="logger">The logger to get the wrapper for.</param>
/// <returns>The wrapper for the logger specified.</returns>
private static ILog WrapLogger(ILogger logger)
{
return (ILog)s_wrapperMap.GetWrapper(logger);
}
/// <summary>
/// Looks up the wrapper objects for the loggers specified.
/// </summary>
/// <param name="loggers">The loggers to get the wrappers for.</param>
/// <returns>The wrapper objects for the loggers specified.</returns>
private static ILog[] WrapLoggers(ILogger[] loggers)
{
ILog[] results = new ILog[loggers.Length];
for(int i=0; i<loggers.Length; i++)
{
results[i] = WrapLogger(loggers[i]);
}
return results;
}
/// <summary>
/// Create the <see cref="ILoggerWrapper"/> objects used by
/// this manager.
/// </summary>
/// <param name="logger">The logger to wrap.</param>
/// <returns>The wrapper for the logger specified.</returns>
private static ILoggerWrapper WrapperCreationHandler(ILogger logger)
{
return new LogImpl(logger);
}
#endregion
#region Private Static Fields
/// <summary>
/// The wrapper map to use to hold the <see cref="LogImpl"/> objects.
/// </summary>
private static readonly WrapperMap s_wrapperMap = new WrapperMap(new WrapperCreationHandler(WrapperCreationHandler));
#endregion Private Static Fields
}
}
| |
using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using LumiSoft.Net.IO;
namespace LumiSoft.Net.Mime
{
/// <summary>
/// Mime entity header fields collection.
/// </summary>
[Obsolete("See LumiSoft.Net.MIME or LumiSoft.Net.Mail namepaces for replacement.")]
public class HeaderFieldCollection : IEnumerable
{
private List<HeaderField> m_pHeaderFields = null;
/// <summary>
/// Default constructor.
/// </summary>
public HeaderFieldCollection()
{
m_pHeaderFields = new List<HeaderField>();
}
#region method Add
/// <summary>
/// Adds a new header field with specified name and value to the end of the collection.
/// </summary>
/// <param name="fieldName">Header field name.</param>
/// <param name="value">Header field value.</param>
public void Add(string fieldName,string value)
{
m_pHeaderFields.Add(new HeaderField(fieldName,value));
}
/// <summary>
/// Adds specified header field to the end of the collection.
/// </summary>
/// <param name="headerField">Header field.</param>
public void Add(HeaderField headerField)
{
m_pHeaderFields.Add(headerField);
}
#endregion
#region method Insert
/// <summary>
/// Inserts a new header field into the collection at the specified location.
/// </summary>
/// <param name="index">The location in the collection where you want to add the header field.</param>
/// <param name="fieldName">Header field name.</param>
/// <param name="value">Header field value.</param>
public void Insert(int index,string fieldName,string value)
{
m_pHeaderFields.Insert(index,new HeaderField(fieldName,value));
}
#endregion
#region method Remove
/// <summary>
/// Removes header field at the specified index from the collection.
/// </summary>
/// <param name="index">The index of the header field to remove.</param>
public void Remove(int index)
{
m_pHeaderFields.RemoveAt(index);
}
/// <summary>
/// Removes specified header field from the collection.
/// </summary>
/// <param name="field">Header field to remove.</param>
public void Remove(HeaderField field)
{
m_pHeaderFields.Remove(field);
}
#endregion
#region method RemoveAll
/// <summary>
/// Removes all header fields with specified name from the collection.
/// </summary>
/// <param name="fieldName">Header field name.</param>
public void RemoveAll(string fieldName)
{
for(int i=0;i<m_pHeaderFields.Count;i++){
HeaderField h = (HeaderField)m_pHeaderFields[i];
if(h.Name.ToLower() == fieldName.ToLower()){
m_pHeaderFields.Remove(h);
i--;
}
}
}
#endregion
#region method Clear
/// <summary>
/// Clears the collection of all header fields.
/// </summary>
public void Clear()
{
m_pHeaderFields.Clear();
}
#endregion
#region method Contains
/// <summary>
/// Gets if collection contains specified header field.
/// </summary>
/// <param name="fieldName">Header field name.</param>
/// <returns></returns>
public bool Contains(string fieldName)
{
foreach(HeaderField h in m_pHeaderFields){
if(h.Name.ToLower() == fieldName.ToLower()){
return true;
}
}
return false;
}
/// <summary>
/// Gets if collection contains specified header field.
/// </summary>
/// <param name="headerField">Header field.</param>
/// <returns></returns>
public bool Contains(HeaderField headerField)
{
return m_pHeaderFields.Contains(headerField);
}
#endregion
#region method GetFirst
/// <summary>
/// Gets first header field with specified name, returns null if specified field doesn't exist.
/// </summary>
/// <param name="fieldName">Header field name.</param>
/// <returns></returns>
public HeaderField GetFirst(string fieldName)
{
foreach(HeaderField h in m_pHeaderFields){
if(h.Name.ToLower() == fieldName.ToLower()){
return h;
}
}
return null;
}
#endregion
#region method Get
/// <summary>
/// Gets header fields with specified name, returns null if specified field doesn't exist.
/// </summary>
/// <param name="fieldName">Header field name.</param>
/// <returns></returns>
public HeaderField[] Get(string fieldName)
{
ArrayList fields = new ArrayList();
foreach(HeaderField h in m_pHeaderFields){
if(h.Name.ToLower() == fieldName.ToLower()){
fields.Add(h);
}
}
if(fields.Count > 0){
HeaderField[] retVal = new HeaderField[fields.Count];
fields.CopyTo(retVal);
return retVal;
}
else{
return null;
}
}
#endregion
#region method Parse
/// <summary>
/// Parses header fields from string.
/// </summary>
/// <param name="headerString">Header string.</param>
public void Parse(string headerString)
{
Parse(new MemoryStream(Encoding.Default.GetBytes(headerString)));
}
/// <summary>
/// Parses header fields from stream. Stream position stays where header reading ends.
/// </summary>
/// <param name="stream">Stream from where to parse.</param>
public void Parse(Stream stream)
{
Parse(new SmartStream(stream,false));
}
/// <summary>
/// Parses header fields from stream. Stream position stays where header reading ends.
/// </summary>
/// <param name="stream">Stream from where to parse.</param>
public void Parse(SmartStream stream)
{
/* Rfc 2822 2.2 Header Fields
Header fields are lines composed of a field name, followed by a colon
(":"), followed by a field body, and terminated by CRLF. A field
name MUST be composed of printable US-ASCII characters (i.e.,
characters that have values between 33 and 126, inclusive), except
colon. A field body may be composed of any US-ASCII characters,
except for CR and LF. However, a field body may contain CRLF when
used in header "folding" and "unfolding" as described in section
2.2.3. All field bodies MUST conform to the syntax described in
sections 3 and 4 of this standard.
Rfc 2822 2.2.3 Long Header Fields
The process of moving from this folded multiple-line representation
of a header field to its single line representation is called
"unfolding". Unfolding is accomplished by simply removing any CRLF
that is immediately followed by WSP. Each header field should be
treated in its unfolded form for further syntactic and semantic
evaluation.
Example:
Subject: aaaaa<CRLF>
<TAB or SP>aaaaa<CRLF>
*/
m_pHeaderFields.Clear();
SmartStream.ReadLineAsyncOP args = new SmartStream.ReadLineAsyncOP(new byte[32000],SizeExceededAction.JunkAndThrowException);
stream.ReadLine(args,false);
if(args.Error != null){
throw args.Error;
}
string line = args.LineUtf8;
while(line != null){
// End of header reached
if(line == ""){
break;
}
// Store current header line and read next. We need to read 1 header line to ahead,
// because of multiline header fields.
string headerField = line;
stream.ReadLine(args,false);
if(args.Error != null){
throw args.Error;
}
line = args.LineUtf8;
// See if header field is multiline. See comment above.
while(line != null && (line.StartsWith("\t") || line.StartsWith(" "))){
headerField += line;
stream.ReadLine(args,false);
if(args.Error != null){
throw args.Error;
}
line = args.LineUtf8;
}
string[] name_value = headerField.Split(new char[]{':'},2);
// There must be header field name and value, otherwise invalid header field
if(name_value.Length == 2){
Add(name_value[0] + ":",name_value[1].Trim());
}
}
}
#endregion
#region method ToHeaderString
/// <summary>
/// Converts header fields to rfc 2822 message header string.
/// </summary>
/// <param name="encodingCharSet">CharSet to use for non ASCII header field values. Utf-8 is recommended value, if you explicity don't need other.</param>
/// <returns></returns>
public string ToHeaderString(string encodingCharSet)
{
StringBuilder headerString = new StringBuilder();
foreach(HeaderField f in this){
headerString.Append(f.Name + " " + f.EncodedValue + "\r\n");
}
return headerString.ToString();
}
#endregion
#region interface IEnumerator
/// <summary>
/// Gets enumerator.
/// </summary>
/// <returns></returns>
public IEnumerator GetEnumerator()
{
return m_pHeaderFields.GetEnumerator();
}
#endregion
#region Properties Implementation
/// <summary>
/// Gets header field from specified index.
/// </summary>
public HeaderField this[int index]
{
get{ return (HeaderField)m_pHeaderFields[index]; }
}
/// <summary>
/// Gets header fields count in the collection.
/// </summary>
public int Count
{
get{ return m_pHeaderFields.Count; }
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CodeGeneration;
using Microsoft.CodeAnalysis.Editing;
using Microsoft.CodeAnalysis.Formatting;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Rename;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Simplification;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
using System.Globalization;
namespace Microsoft.CodeAnalysis.EncapsulateField
{
internal abstract class AbstractEncapsulateFieldService : ILanguageService
{
public async Task<EncapsulateFieldResult> EncapsulateFieldAsync(Document document, TextSpan span, bool useDefaultBehavior, CancellationToken cancellationToken)
{
var fields = await GetFieldsAsync(document, span, cancellationToken).ConfigureAwait(false);
if (fields == null || !fields.Any())
{
return null;
}
return new EncapsulateFieldResult(c => EncapsulateFieldResultAsync(document, span, useDefaultBehavior, c));
}
public async Task<IEnumerable<EncapsulateFieldCodeAction>> GetEncapsulateFieldCodeActionsAsync(Document document, TextSpan span, CancellationToken cancellationToken)
{
var fields = (await GetFieldsAsync(document, span, cancellationToken).ConfigureAwait(false)).ToImmutableArrayOrEmpty();
if (fields.Length == 0)
{
return SpecializedCollections.EmptyEnumerable<EncapsulateFieldCodeAction>();
}
if (fields.Length == 1)
{
// there is only one field
return EncapsulateOneField(document, span, fields[0], index: 0);
}
else
{
// there are multiple fields.
var current = SpecializedCollections.EmptyEnumerable<EncapsulateFieldCodeAction>();
if (span.IsEmpty)
{
// if there is no selection, get action for each field + all of them.
for (var i = 0; i < fields.Length; i++)
{
current = current.Concat(EncapsulateOneField(document, span, fields[i], i));
}
}
return current.Concat(EncapsulateAllFields(document, span));
}
}
private IEnumerable<EncapsulateFieldCodeAction> EncapsulateAllFields(Document document, TextSpan span)
{
var action1Text = FeaturesResources.Encapsulate_fields_and_use_property;
var action2Text = FeaturesResources.Encapsulate_fields_but_still_use_field;
return new[]
{
new EncapsulateFieldCodeAction(new EncapsulateFieldResult(c => EncapsulateFieldResultAsync(document, span, true, c)), action1Text),
new EncapsulateFieldCodeAction(new EncapsulateFieldResult(c => EncapsulateFieldResultAsync(document, span, false, c)), action2Text)
};
}
private IEnumerable<EncapsulateFieldCodeAction> EncapsulateOneField(Document document, TextSpan span, IFieldSymbol field, int index)
{
var action1Text = string.Format(FeaturesResources.Encapsulate_field_colon_0_and_use_property, field.Name);
var action2Text = string.Format(FeaturesResources.Encapsulate_field_colon_0_but_still_use_field, field.Name);
return new[]
{
new EncapsulateFieldCodeAction(new EncapsulateFieldResult(c => SingleEncapsulateFieldResultAsync(document, span, index, true, c)), action1Text),
new EncapsulateFieldCodeAction(new EncapsulateFieldResult(c => SingleEncapsulateFieldResultAsync(document, span, index, false, c)), action2Text)
};
}
private async Task<Result> SingleEncapsulateFieldResultAsync(Document document, TextSpan span, int index, bool updateReferences, CancellationToken cancellationToken)
{
var fields = (await GetFieldsAsync(document, span, cancellationToken).ConfigureAwait(false)).ToImmutableArrayOrEmpty();
Contract.Requires(fields.Length > index);
var field = fields[index];
var result = await EncapsulateFieldAsync(field, document, updateReferences, cancellationToken).ConfigureAwait(false);
if (result == null)
{
return new Result(document.Project.Solution, field);
}
return result;
}
private async Task<Result> EncapsulateFieldResultAsync(Document document, TextSpan span, bool updateReferences, CancellationToken cancellationToken)
{
// probably later we want to add field and reason why it failed.
var failedFieldSymbols = new List<IFieldSymbol>();
var fields = await GetFieldsAsync(document, span, cancellationToken).ConfigureAwait(false);
Contract.Requires(fields.Any());
// For now, build up the multiple field case by encapsulating one at a time.
Result result = null;
foreach (var field in fields)
{
var semanticModel = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
var compilation = semanticModel.Compilation;
var currentField = field.GetSymbolKey().Resolve(compilation, cancellationToken: cancellationToken).Symbol as IFieldSymbol;
// We couldn't resolve this field. skip it
if (currentField == null)
{
failedFieldSymbols.Add(field);
continue;
}
result = await EncapsulateFieldAsync(currentField, document, updateReferences, cancellationToken).ConfigureAwait(false);
if (result == null)
{
failedFieldSymbols.Add(field);
continue;
}
document = result.Solution.GetDocument(document.Id);
}
if (result == null)
{
return new Result(document.Project.Solution, fields.ToArray());
}
// add failed field symbol info
return result.WithFailedFields(failedFieldSymbols);
}
private async Task<Result> EncapsulateFieldAsync(IFieldSymbol field, Document document, bool updateReferences, CancellationToken cancellationToken)
{
var originalField = field;
var finalNames = GeneratePropertyAndFieldNames(field);
var finalFieldName = finalNames.Item1;
var generatedPropertyName = finalNames.Item2;
// Annotate the field declarations so we can find it after rename.
var fieldDeclaration = field.DeclaringSyntaxReferences.First();
var declarationAnnotation = new SyntaxAnnotation();
document = document.WithSyntaxRoot(fieldDeclaration.SyntaxTree.GetRoot(cancellationToken).ReplaceNode(fieldDeclaration.GetSyntax(cancellationToken),
fieldDeclaration.GetSyntax(cancellationToken).WithAdditionalAnnotations(declarationAnnotation)));
var solution = document.Project.Solution;
foreach (var linkedDocumentId in document.GetLinkedDocumentIds())
{
var linkedDocument = solution.GetDocument(linkedDocumentId);
var linkedRoot = await linkedDocument.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
var linkedFieldNode = linkedRoot.FindNode(fieldDeclaration.Span);
if (linkedFieldNode.Span != fieldDeclaration.Span)
{
continue;
}
var updatedRoot = linkedRoot.ReplaceNode(linkedFieldNode, linkedFieldNode.WithAdditionalAnnotations(declarationAnnotation));
solution = solution.WithDocumentSyntaxRoot(linkedDocumentId, updatedRoot);
}
document = solution.GetDocument(document.Id);
// Resolve the annotated symbol and prepare for rename.
var semanticModel = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
var compilation = semanticModel.Compilation;
field = field.GetSymbolKey().Resolve(compilation, cancellationToken: cancellationToken).Symbol as IFieldSymbol;
var solutionNeedingProperty = solution;
// We couldn't resolve field after annotating its declaration. Bail
if (field == null)
{
return null;
}
solutionNeedingProperty = await UpdateReferencesAsync(
updateReferences, solution, document, field, finalFieldName, generatedPropertyName, cancellationToken).ConfigureAwait(false);
document = solutionNeedingProperty.GetDocument(document.Id);
var markFieldPrivate = field.DeclaredAccessibility != Accessibility.Private;
var rewrittenFieldDeclaration = await RewriteFieldNameAndAccessibility(finalFieldName, markFieldPrivate, document, declarationAnnotation, cancellationToken).ConfigureAwait(false);
document = await Formatter.FormatAsync(document.WithSyntaxRoot(rewrittenFieldDeclaration), Formatter.Annotation, cancellationToken: cancellationToken).ConfigureAwait(false);
solution = document.Project.Solution;
foreach (var linkedDocumentId in document.GetLinkedDocumentIds())
{
var linkedDocument = solution.GetDocument(linkedDocumentId);
var updatedLinkedRoot = await RewriteFieldNameAndAccessibility(finalFieldName, markFieldPrivate, linkedDocument, declarationAnnotation, cancellationToken).ConfigureAwait(false);
var updatedLinkedDocument = await Formatter.FormatAsync(linkedDocument.WithSyntaxRoot(updatedLinkedRoot), Formatter.Annotation, cancellationToken: cancellationToken).ConfigureAwait(false);
solution = updatedLinkedDocument.Project.Solution;
}
document = solution.GetDocument(document.Id);
semanticModel = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
compilation = semanticModel.Compilation;
var newRoot = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
var newDeclaration = newRoot.GetAnnotatedNodes<SyntaxNode>(declarationAnnotation).First();
field = semanticModel.GetDeclaredSymbol(newDeclaration, cancellationToken) as IFieldSymbol;
var generatedProperty = GenerateProperty(generatedPropertyName, finalFieldName, originalField.DeclaredAccessibility, originalField, field.ContainingType, new SyntaxAnnotation(), document, cancellationToken);
var codeGenerationService = document.GetLanguageService<ICodeGenerationService>();
var solutionWithProperty = await AddPropertyAsync(document, document.Project.Solution, field, generatedProperty, cancellationToken).ConfigureAwait(false);
return new Result(solutionWithProperty, originalField.ToDisplayString(), originalField.GetGlyph());
}
private async Task<Solution> UpdateReferencesAsync(
bool updateReferences, Solution solution, Document document, IFieldSymbol field, string finalFieldName, string generatedPropertyName, CancellationToken cancellationToken)
{
if (!updateReferences)
{
return solution;
}
if (field.IsReadOnly)
{
// Inside the constructor we want to rename references the field to the final field name.
var constructorSyntaxes = GetConstructorNodes(field.ContainingType).ToSet();
if (finalFieldName != field.Name && constructorSyntaxes.Count > 0)
{
solution = await Renamer.RenameSymbolAsync(solution, field, finalFieldName, solution.Options,
location => constructorSyntaxes.Any(c => c.Span.IntersectsWith(location.SourceSpan)), cancellationToken: cancellationToken).ConfigureAwait(false);
document = solution.GetDocument(document.Id);
var compilation = await document.Project.GetCompilationAsync(cancellationToken).ConfigureAwait(false);
field = field.GetSymbolKey().Resolve(compilation, cancellationToken: cancellationToken).Symbol as IFieldSymbol;
}
// Outside the constructor we want to rename references to the field to final property name.
return await Renamer.RenameSymbolAsync(solution, field, generatedPropertyName, solution.Options,
location => !constructorSyntaxes.Any(c => c.Span.IntersectsWith(location.SourceSpan)), cancellationToken: cancellationToken).ConfigureAwait(false);
}
else
{
// Just rename everything.
return await Renamer.RenameSymbolAsync(solution, field, generatedPropertyName, solution.Options, cancellationToken).ConfigureAwait(false);
}
}
internal abstract IEnumerable<SyntaxNode> GetConstructorNodes(INamedTypeSymbol containingType);
protected async Task<Solution> AddPropertyAsync(Document document, Solution destinationSolution, IFieldSymbol field, IPropertySymbol property, CancellationToken cancellationToken)
{
var codeGenerationService = document.GetLanguageService<ICodeGenerationService>();
var fieldDeclaration = field.DeclaringSyntaxReferences.First();
var options = new CodeGenerationOptions(contextLocation: fieldDeclaration.SyntaxTree.GetLocation(fieldDeclaration.Span));
var destination = field.ContainingType;
var updatedDocument = await codeGenerationService.AddPropertyAsync(destinationSolution, destination, property, options, cancellationToken)
.ConfigureAwait(false);
updatedDocument = await Formatter.FormatAsync(updatedDocument, Formatter.Annotation, cancellationToken: cancellationToken).ConfigureAwait(false);
updatedDocument = await Simplifier.ReduceAsync(updatedDocument, cancellationToken: cancellationToken).ConfigureAwait(false);
return updatedDocument.Project.Solution;
}
protected IPropertySymbol GenerateProperty(string propertyName, string fieldName, Accessibility accessibility, IFieldSymbol field, INamedTypeSymbol containingSymbol, SyntaxAnnotation annotation, Document document, CancellationToken cancellationToken)
{
var factory = document.GetLanguageService<SyntaxGenerator>();
var propertySymbol = annotation.AddAnnotationToSymbol(CodeGenerationSymbolFactory.CreatePropertySymbol(containingType: containingSymbol,
attributes: SpecializedCollections.EmptyList<AttributeData>(),
accessibility: ComputeAccessibility(accessibility, field.Type),
modifiers: new DeclarationModifiers(isStatic: field.IsStatic, isReadOnly: field.IsReadOnly, isUnsafe: field.IsUnsafe()),
type: field.Type,
explicitInterfaceSymbol: null,
name: propertyName,
parameters: SpecializedCollections.EmptyList<IParameterSymbol>(),
getMethod: CreateGet(fieldName, field, factory),
setMethod: field.IsReadOnly || field.IsConst ? null : CreateSet(fieldName, field, factory)));
return Simplifier.Annotation.AddAnnotationToSymbol(
Formatter.Annotation.AddAnnotationToSymbol(propertySymbol));
}
protected abstract Tuple<string, string> GeneratePropertyAndFieldNames(IFieldSymbol field);
protected Accessibility ComputeAccessibility(Accessibility accessibility, ITypeSymbol type)
{
var computedAccessibility = accessibility;
if (accessibility == Accessibility.NotApplicable || accessibility == Accessibility.Private)
{
computedAccessibility = Accessibility.Public;
}
var returnTypeAccessibility = type.DetermineMinimalAccessibility();
return AccessibilityUtilities.Minimum(computedAccessibility, returnTypeAccessibility);
}
protected IMethodSymbol CreateSet(string originalFieldName, IFieldSymbol field, SyntaxGenerator factory)
{
var assigned = !field.IsStatic
? factory.MemberAccessExpression(
factory.ThisExpression(),
factory.IdentifierName(originalFieldName))
: factory.IdentifierName(originalFieldName);
var body = factory.ExpressionStatement(
factory.AssignmentStatement(
assigned.WithAdditionalAnnotations(Simplifier.Annotation),
factory.IdentifierName("value")));
return CodeGenerationSymbolFactory.CreateAccessorSymbol(SpecializedCollections.EmptyList<AttributeData>(),
Accessibility.NotApplicable,
new[] { body }.ToList());
}
protected IMethodSymbol CreateGet(string originalFieldName, IFieldSymbol field, SyntaxGenerator factory)
{
var value = !field.IsStatic
? factory.MemberAccessExpression(
factory.ThisExpression(),
factory.IdentifierName(originalFieldName))
: factory.IdentifierName(originalFieldName);
var body = factory.ReturnStatement(
value.WithAdditionalAnnotations(Simplifier.Annotation));
return CodeGenerationSymbolFactory.CreateAccessorSymbol(SpecializedCollections.EmptyList<AttributeData>(),
Accessibility.NotApplicable,
new[] { body }.ToList());
}
private static readonly char[] s_underscoreCharArray = new[] { '_' };
protected string GeneratePropertyName(string fieldName)
{
// Trim leading underscores
var baseName = fieldName.TrimStart(s_underscoreCharArray);
// Trim leading "m_"
if (baseName.Length >= 2 && baseName[0] == 'm' && baseName[1] == '_')
{
baseName = baseName.Substring(2);
}
// Take original name if no characters left
if (baseName.Length == 0)
{
baseName = fieldName;
}
// Make the first character upper case using the "en-US" culture. See discussion at
// https://github.com/dotnet/roslyn/issues/5524.
var firstCharacter = EnUSCultureInfo.TextInfo.ToUpper(baseName[0]);
return firstCharacter.ToString() + baseName.Substring(1);
}
private static readonly CultureInfo EnUSCultureInfo = new CultureInfo("en-US");
protected abstract Task<SyntaxNode> RewriteFieldNameAndAccessibility(string originalFieldName, bool makePrivate, Document document, SyntaxAnnotation declarationAnnotation, CancellationToken cancellationToken);
protected abstract Task<IEnumerable<IFieldSymbol>> GetFieldsAsync(Document document, TextSpan span, CancellationToken cancellationToken);
internal class Result
{
public Result(Solution solutionWithProperty, string name, Glyph glyph)
{
this.Solution = solutionWithProperty;
this.Name = name;
this.Glyph = glyph;
}
public Result(Solution solutionWithProperty, string name, Glyph glyph, List<IFieldSymbol> failedFieldSymbols) :
this(solutionWithProperty, name, glyph)
{
this.FailedFields = failedFieldSymbols.ToImmutableArrayOrEmpty();
}
public Result(Solution originalSolution, params IFieldSymbol[] fields) :
this(originalSolution, string.Empty, Glyph.Error)
{
this.FailedFields = fields.ToImmutableArrayOrEmpty();
}
public Solution Solution { get; }
public string Name { get; }
public Glyph Glyph { get; }
public ImmutableArray<IFieldSymbol> FailedFields { get; }
public Result WithFailedFields(List<IFieldSymbol> failedFieldSymbols)
{
if (failedFieldSymbols.Count == 0)
{
return this;
}
return new Result(Solution, Name, Glyph, failedFieldSymbols);
}
}
}
}
| |
/**
* Copyright 2016 Dartmouth-Hitchcock
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Data.Linq;
using System.Linq;
using System.Text;
using System.Xml;
using Legion.Core.Services;
using Mjolnir.Encryption;
using Mjolnir.Extensions;
namespace Caesar {
public class Methods {
internal struct VerificationGroups {
public static ParameterVerificationGroup ExpireResultCache = new ParameterVerificationGroup(new string[] { "Service", "Method" });
}
#region Settings
public static void GetAssemblyDirectory(Request request, ReplyNode result, ErrorNode error) {
string assemblyDirectory = null;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
db.xspGetSettingByKey("AssemblyDirectory", ref assemblyDirectory);
result.AddElement("assemblydirectory", assemblyDirectory);
}
}
#endregion
#region Applications
public static void GetRateLimitTypes(Request request, ReplyNode result, ErrorNode error) {
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
XmlElement xRateType, xRateTypes = result.AddElement("ratelimittypes");
ISingleResult<xspGetRateLimitTypesResult> rates = db.xspGetRateLimitTypes();
foreach (xspGetRateLimitTypesResult rate in rates) {
xRateType = result.AddElement(xRateTypes, "ratelimittype");
xRateType.SetAttribute("id", rate.Id.ToString());
xRateType.InnerText = rate.Name;
}
}
}
public static void RevokeApplicationKey(Request request, ReplyNode result, ErrorNode error){
int? applicationid = null;
string key = null;
string resultcode = null;
if (request.ParameterSet.Verify(new ParameterVerificationGroup("id", ParameterType.Int)))
applicationid = request.ParameterSet.GetInt("id");
else if (request.ParameterSet.ContainsKey("key"))
key = request.ParameterSet["key"];
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
db.xspRevokeKey(applicationid, key, resultcode);
if (resultcode == "SUCCESS")
result.AddElement("result", resultcode);
else
error.Text = resultcode;
}
}
public static void GetNewApplicationKey(Request request, ReplyNode result, ErrorNode error) {
string appKey = null;
bool? collision = true;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
while (collision != false) {
appKey = Guid.NewGuid().ToString().ToUpper();
db.xspCheckApplicationKeyForCollision(appKey, ref collision);
}
}
result.AddElement("applicationkey", appKey);
}
public static void GetApplicationList(Request request, ReplyNode result, ErrorNode error) {
XmlElement applications = result.AddElement("applications");
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
ISingleResult<xspGetApplicationListResult> results = db.xspGetApplicationList();
XmlElement application;
foreach (xspGetApplicationListResult r in results) {
application = result.AddElement(applications, "application");
result.AddElement(application, "id", r.ApplicationId.ToString());
result.AddElement(application, "name", r.ApplicationName);
}
}
}
public static void GetApplicationDetailList(Request request, ReplyNode result, ErrorNode error) {
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
ISingleResult<xspGetApplicationListResult> applications = db.xspGetApplicationList();
XmlElement xApplication, xApplications = result.AddElement("applications");
foreach (xspGetApplicationListResult applciation in applications) {
xApplication = result.AddElement(xApplications, "application");
result.AddElement(xApplication, "id", applciation.ApplicationId.ToString());
result.AddElement(xApplication, "key", applciation.ApplicationKey);
result.AddElement(xApplication, "name", applciation.ApplicationName);
result.AddElement(xApplication, "consumeriprange", applciation.ConsumerIPRange);
result.AddElement(xApplication, "description", applciation.Description);
result.AddElement(xApplication, "ratelimittypeid", applciation.RateLimitTypeId.ToString());
result.AddElement(xApplication, "ratelimittype", applciation.RateLimitType);
result.AddElement(xApplication, "ratelimit", applciation.RateLimit.ToString());
result.AddElement(xApplication, "ratelimitinterval", applciation.RateLimitInterval.ToString());
result.AddElement(xApplication, "public", (applciation.IsPublic ? "true" : "false"));
result.AddElement(xApplication, "logged", (applciation.IsLogged ? "true" : "false"));
}
}
}
public static void UpdateApplication(Request request, ReplyNode result, ErrorNode error) {
string sId = request.ParameterSet["id"],
name = request.ParameterSet["name"],
key = request.ParameterSet["appkey"],
range = (request.ParameterSet["range"].Length == 0 ? null : request.ParameterSet["range"]),
description = request.ParameterSet["description"],
resultcode = null;
bool? ispublic = (request.ParameterSet["public"] == "true" ? true : false);
bool? islogged = (request.ParameterSet["logged"] == "true" ? true : false);
int? ratelimitid = (request.ParameterSet["ratelimittypeid"].Length > 0 ? (int?)int.Parse(request.ParameterSet["ratelimittypeid"]) : null);
int? ratelimit = (request.ParameterSet["ratelimittypeid"].Length > 0 ? (int?)int.Parse(request.ParameterSet["ratelimit"]) : null);
int? ratelimitinterval = (request.ParameterSet["ratelimittypeid"].Length > 0 ? (int?)int.Parse(request.ParameterSet["ratelimitinterval"]) : null);
int id;
int? nId;
if (Int32.TryParse(sId, out id)) {
nId = id;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
db.xspUpdateApplication(ref nId, name, key, range, description, ratelimitid, ratelimit, ratelimitinterval, ispublic, islogged, ref resultcode);
}
XmlElement r = result.AddElement("result");
result.AddElement(r, "resultcode", resultcode);
if (sId == "-1")
result.AddElement(r, "id", nId.ToString());
}
else
error.Text = "not a valid id";
}
public static void DeleteApplication(Request request, ReplyNode result, ErrorNode error) {
string sId = request.ParameterSet["id"],
resultcode = null;
int id;
int? nId;
if (Int32.TryParse(sId, out id)) {
nId = id;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
db.xspDeleteApplication(nId, ref resultcode);
}
XmlElement r = result.AddElement("result");
result.AddElement(r, "resultcode", resultcode);
}
else
error.Text = "not a valid id";
}
public static void GetMethodList(Request request, ReplyNode result, ErrorNode error) {
XmlElement methods = result.AddElement("methods");
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
ISingleResult<xspGetMethodsResult> results = db.xspGetMethods();
XmlElement method;
foreach (xspGetMethodsResult r in results) {
method = result.AddElement(methods, "method");
result.AddElement(method, "id", r.MethodId.ToString());
result.AddElement(method, "key", r.MethodKey);
result.AddElement(method, "name", r.MethodName);
result.AddElement(method, "public", (r.IsPublic ? "true" : "false"));
result.AddElement(method, "restricted", (r.IsRestricted == true ? "true" : "false"));
}
}
}
public static void UpdateApplicationPermissions(Request request, ReplyNode result, ErrorNode error) {
string[] permissions = (request.ParameterSet["permissions"] == null ? null : request.ParameterSet["permissions"].Split(';'));
int? applicationId, methodId;
int temp;
string resultcode = "SUCCESS";
if (Int32.TryParse(request.ParameterSet["applicationId"], out temp)){
if (permissions == null)
result.AddElement("resultcode", "SUCCESS");
else {
applicationId = temp;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
db.xspDeleteAllApplciationPermissions(applicationId);
foreach (string p in permissions) {
if (p.Length > 1) {
if (Int32.TryParse(p, out temp)) {
methodId = temp;
db.xspAddApplicationPermission(applicationId, methodId, ref resultcode);
if (resultcode != "SUCCESS")
break;
}
else {
error.Text = string.Format("'{0}' not a valid id", p);
break;
}
}
}
result.AddElement("resultcode", resultcode);
}
}
}
}
public static void GetApplicationPermissions(Request request, ReplyNode result, ErrorNode error) {
int? applicationId;
int temp;
if (Int32.TryParse(request.ParameterSet["applicationId"], out temp)) {
applicationId = temp;
XmlElement permissions = result.AddElement("permissions");
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
ISingleResult<xspGetApplicationPermissionsResult> results = db.xspGetApplicationPermissions(applicationId);
XmlElement permission;
foreach (xspGetApplicationPermissionsResult r in results) {
permission = result.AddElement(permissions, "permission");
result.AddElement(permission, "id", r.MethodId.ToString());
}
}
}
}
#endregion
#region Services
public static void GetService(Request request, ReplyNode result, ErrorNode error) {
if (request.ParameterSet.ContainsKey("serviceid")) {
int serviceid = request.ParameterSet.GetInt("serviceid");
string resultcode = null;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
ISingleResult<xspGetServiceResult> results = db.xspGetService(serviceid, ref resultcode);
foreach (xspGetServiceResult service in results) {
result.AddElement("assemblyname", service.AssemblyName);
result.AddElement("classname", service.ClassName);
result.AddElement("consumeriprange", service.ConsumerIPRange);
result.AddElement("description", service.Description);
result.AddElement("servicekey", service.ServiceKey);
result.AddElement("logged", (service.IsLogged ? "true" : "false"));
result.AddElement("public", (service.IsPublic ? "true" : "false"));
result.AddElement("restricted", (service.IsRestricted ? "true" : "false"));
}
result.AddElement("resultcode", resultcode);
}
}
else
error.Text = "No service specified.";
}
public static void GetServiceList(Request request, ReplyNode result, ErrorNode error) {
XmlElement services = result.AddElement("services");
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
ISingleResult<xspGetServicesResult> results = db.xspGetServices();
XmlElement service;
foreach (xspGetServicesResult r in results) {
service = result.AddElement(services, "service");
result.AddElement(service, "id", r.ServiceId.ToString());
result.AddElement(service, "key", r.ServiceKey);
result.AddElement(service, "assembly", r.AssemblyName);
result.AddElement(service, "class", r.ClassName);
result.AddElement(service, "restricted", (r.IsRestricted ? "true" : "false"));
result.AddElement(service, "public", (r.IsPublic ? "true" : "false"));
result.AddElement(service, "logged", (r.IsLogged ? "true" : "false"));
result.AddElement(service, "description", r.Description);
result.AddElement(service, "consumeriprange", r.ConsumerIPRange);
}
}
}
public static void GetServiceMethods(Request request, ReplyNode result, ErrorNode error) {
int serviceId;
if (Int32.TryParse(request.ParameterSet["serviceId"], out serviceId)) {
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
ISingleResult<xspGetServiceMethodsResult> results = db.xspGetServiceMethods(serviceId);
XmlElement methods = result.AddElement("methods");
XmlElement method;
foreach (xspGetServiceMethodsResult r in results) {
method = result.AddElement(methods, "method");
result.AddElement(method, "id", r.MethodId.ToString());
result.AddElement(method, "serviceid", r.ServiceId.ToString());
result.AddElement(method, "key", r.MethodKey);
result.AddElement(method, "name", r.MethodName);
result.AddElement(method, "cachedresultlifetime", r.CachedResultLifetime.ToString());
result.AddElement(method, "cacheresult", (r.IsResultCacheable ? "true" : "false"));
result.AddElement(method, "restricted", (r.IsRestricted ? "true" : "false"));
result.AddElement(method, "public", (r.IsPublic ? "true" : "false"));
result.AddElement(method, "logged", (r.IsLogged ? "true" : "false"));
}
}
}
}
public static void UpdateService(Request request, ReplyNode result, ErrorNode error) {
string sId = request.ParameterSet["id"],
key = request.ParameterSet["servicekey"],
description = request.ParameterSet["description"],
consumeriprange = request.ParameterSet["consumeriprange"],
assembly = request.ParameterSet["assembly"],
serviceclass = request.ParameterSet["class"],
resultcode = null;
bool? isrestricted = (request.ParameterSet["restricted"] == "true" ? true : false);
bool? ispublic = (request.ParameterSet["public"] == "true" ? true : false);
bool? islogged = (request.ParameterSet["logged"] == "true" ? true : false);
int id;
int? nId;
if (Int32.TryParse(sId, out id)) {
nId = id;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
db.xspUpdateService(ref nId, key, description, consumeriprange, assembly, serviceclass, isrestricted, ispublic, islogged, ref resultcode);
}
XmlElement r = result.AddElement("result");
result.AddElement(r, "resultcode", resultcode);
if (sId == "-1")
result.AddElement(r, "id", nId.ToString());
}
else
error.Text = "not a valid id";
}
public static void UpdateServiceMethod(Request request, ReplyNode result, ErrorNode error) {
string sId = request.ParameterSet["id"],
sServiceId = request.ParameterSet["serviceid"],
methodKey = request.ParameterSet["methodkey"],
methodName = request.ParameterSet["methodname"],
resultcode = null;
bool? isresultcached = (request.ParameterSet["cacheresult"] == "true" ? true : false);
bool? isrestricted = (request.ParameterSet["restricted"] == "true" ? true : false);
bool? ispublic = (request.ParameterSet["public"] == "true" ? true : false);
bool? islogged = (request.ParameterSet["logged"] == "true" ? true : false);
//bool? islogreplaydetailsonexception = (request.ParameterSet["logreplaydetailsonexception"] == "true" ? true : false);
int id, serviceId, cachedResultLifetime;
int? nId, nServiceId, nCachedResultLifetime = null;
if (Int32.TryParse(sId, out id) && Int32.TryParse(sServiceId, out serviceId) && Int32.TryParse(sServiceId, out serviceId)) {
nId = id; nServiceId = serviceId;
if (Int32.TryParse(request.ParameterSet["cachedresultlifetime"], out cachedResultLifetime))
nCachedResultLifetime = cachedResultLifetime;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
db.xspUpdateServiceMethod(ref nId, nServiceId, methodKey, methodName, cachedResultLifetime, isresultcached, isrestricted, ispublic, islogged, ref resultcode);
}
XmlElement r = result.AddElement("result");
result.AddElement(r, "resultcode", resultcode);
if (sId == "-1")
result.AddElement(r, "id", nId.ToString());
}
else
error.Text = "not a valid id";
}
public static void DeleteService(Request request, ReplyNode result, ErrorNode error) {
string sId = request.ParameterSet["id"],
resultcode = null;
int id;
int? nId;
if (Int32.TryParse(sId, out id)) {
nId = id;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
db.xspDeleteService(nId, ref resultcode);
}
XmlElement r = result.AddElement("result");
result.AddElement(r, "resultcode", resultcode);
}
else
error.Text = "not a valid id";
}
public static void DeleteServiceMethod(Request request, ReplyNode result, ErrorNode error) {
string sId = request.ParameterSet["id"],
resultcode = null;
int id;
int? nId;
if (Int32.TryParse(sId, out id)) {
nId = id;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
db.xspDeleteServiceMethod(nId, ref resultcode);
}
XmlElement r = result.AddElement("result");
result.AddElement(r, "resultcode", resultcode);
}
else
error.Text = "not a valid id";
}
public static void GetServiceSettings(Request request, ReplyNode result, ErrorNode error) {
int serviceid;
string resultcode = null;
XmlElement element, parent;
SimpleAES aes;
string aesKey = null, aesVector = null;
if (request.ParameterSet["id"] != null && Int32.TryParse(request.ParameterSet["id"], out serviceid)) {
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
db.xspGetSettingByKey("AesKey", ref aesKey);
db.xspGetSettingByKey("AesVector", ref aesVector);
ISingleResult<xspGetServiceSettingsResult> settings = db.xspGetServiceSettings(null, serviceid, ref resultcode);
parent = result.AddElement("settings");
foreach (xspGetServiceSettingsResult s in settings) {
element = result.AddElement(parent, "setting");
result.AddElement(element, "id", s.Id.ToString());
result.AddElement(element, "name", s.Name);
if (s.IsEncrypted) {
aes = new SimpleAES(aesKey, string.Format("{0}//{1}//{2}", s.Id, aesVector, s.Name));
result.AddElement(element, "value", aes.DecryptString(s.Value));
result.AddElement(element, "encrypted", "true");
}
else {
result.AddElement(element, "value", s.Value);
result.AddElement(element, "encrypted", "false");
}
}
result.AddElement(parent, "resultcode", resultcode);
}
}
}
public static void UpdateServiceSetting(Request request, ReplyNode result, ErrorNode error) {
int? id = Int32.Parse(request.ParameterSet["id"]);
int? serviceid = (request.ParameterSet["serviceid"] == null ? null : (int?)Int32.Parse(request.ParameterSet["serviceid"]));
string name = request.ParameterSet["name"];
string value = request.ParameterSet["value"];
bool? encrypted = (request.ParameterSet["encrypted"] == "true" ? true : false);
string resultcode = null;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
if (id == -1) {
db.xspUpdateServiceSetting(ref id, serviceid, name, value, encrypted, ref resultcode);
if (encrypted == true) {
string aesKey = null, aesVector = null;
db.xspGetSettingByKey("AesKey", ref aesKey);
db.xspGetSettingByKey("AesVector", ref aesVector);
SimpleAES aes = new SimpleAES(aesKey, string.Format("{0}//{1}//{2}", id, aesVector, name));
value = aes.EncryptToString(value);
db.xspUpdateServiceSetting(ref id, serviceid, name, value, encrypted, ref resultcode);
}
result.AddElement("id", id.ToString());
}
else {
if (encrypted == true) {
string aesKey = null, aesVector = null;
db.xspGetSettingByKey("AesKey", ref aesKey);
db.xspGetSettingByKey("AesVector", ref aesVector);
SimpleAES aes = new SimpleAES(aesKey, string.Format("{0}//{1}//{2}", id, aesVector, name));
value = aes.EncryptToString(value);
}
db.xspUpdateServiceSetting(ref id, serviceid, name, value, encrypted, ref resultcode);
}
result.AddElement("resultcode", resultcode);
}
}
public static void DeleteServiceSetting(Request request, ReplyNode result, ErrorNode error) {
int? id = Int32.Parse(request.ParameterSet["id"]);
int? serviceid = Int32.Parse(request.ParameterSet["serviceid"]);
string resultcode = null;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
db.xspDeleteServiceSetting(id, serviceid, ref resultcode);
result.AddElement("resultcode", resultcode);
}
}
#endregion
#region result caching
public static void ExpireResultCache(Request request, ReplyNode result, ErrorNode error) {
if (request.ParameterSet.Verify(VerificationGroups.ExpireResultCache)) {
if (request.Application.HasPermissionTo(request.ParameterSet.GetString("Service"), request.ParameterSet.GetString("Method"))) {
int? itemcount = null;
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
db.xspExpireResultsCache(
request.ParameterSet.GetString("Service"),
request.ParameterSet.GetString("Method"),
ref itemcount
);
result.AddElement("count", (itemcount == null ? "0" : itemcount.ToString()));
}
}
else {
throw new Exception(request.Service.Settings["ApplicationPermissionErrorMessage"].Build(new Dictionary<string, string>() {
{"applicationname", request.Application.Name},
{"service", request.ParameterSet.GetString("Service")},
{"method", request.ParameterSet.GetString("Method")}
}));
}
}
else
error.Text = VerificationGroups.ExpireResultCache.GetDefinition();
}
#endregion
#region Servers
public static void GetServerStatus(Request request, ReplyNode result, ErrorNode error) {
using (LegionLinqDataContext db = new LegionLinqDataContext(request.Service.Settings["LegionConnectionString"])) {
ISingleResult<xspGetServerStatusResult> results = db.xspGetServerStatus();
XmlElement host, hosts = result.AddElement("servers");
foreach (xspGetServerStatusResult r in results) {
host = result.AddElement(hosts, "server");
host.SetAttribute("id", r.Id.ToString());
result.AddElement(host, "ipaddress", r.IPAddress);
result.AddElement(host, "hostname", r.HostName);
result.AddElement(host, "cacherefreshrequired", (r.IsCacheRefreshRequired ? "true" : "false"));
result.AddElement(host, "assemblyrefreshrequired", (r.IsAssemblyRefreshRequired ? "true" : "false"));
}
}
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using OpenTK;
namespace Examples.Shapes
{
public sealed class ChamferCube: DrawableShape
{
public enum SubDivs: byte
{
Zero,
One,
Two,
Three,
Four,
}
public ChamferCube( double Width, double Height, double Length, SubDivs subdivs, double radius, bool useDL )
: base( useDL )
{
SlicedSphere.eSubdivisions sphereSubDivs = SlicedSphere.eSubdivisions.Zero;
uint hoseSubDivs = 0;
switch ( subdivs )
{
case SubDivs.Zero:
sphereSubDivs = SlicedSphere.eSubdivisions.Zero;
hoseSubDivs = 0;
break;
case SubDivs.One:
sphereSubDivs = SlicedSphere.eSubdivisions.One;
hoseSubDivs = 1;
break;
case SubDivs.Two:
sphereSubDivs = SlicedSphere.eSubdivisions.Two;
hoseSubDivs = 3;
break;
case SubDivs.Three:
sphereSubDivs = SlicedSphere.eSubdivisions.Three;
hoseSubDivs = 7;
break;
case SubDivs.Four:
sphereSubDivs = SlicedSphere.eSubdivisions.Four;
hoseSubDivs = 15;
break;
}
#region Temporary Storage
List<Chunk> AllChunks = new List<Chunk>();
OpenTK.Graphics.OpenGL.BeginMode TemporaryMode;
VertexT2dN3dV3d[] TemporaryVBO;
uint[] TemporaryIBO;
#endregion Temporary Storage
Vector3d FrontTopRightEdge = new Vector3d( +Width - radius, +Height - radius, +Length - radius );
Vector3d FrontTopLeftEdge = new Vector3d( +Width - radius, +Height - radius, -Length + radius );
Vector3d FrontBottomRightEdge = new Vector3d( +Width - radius, -Height + radius, +Length - radius );
Vector3d FrontBottomLeftEdge = new Vector3d( +Width - radius, -Height + radius, -Length + radius );
Vector3d BackTopRightEdge = new Vector3d( -Width + radius, +Height - radius, +Length - radius );
Vector3d BackTopLeftEdge = new Vector3d( -Width + radius, +Height - radius, -Length + radius );
Vector3d BackBottomRightEdge = new Vector3d( -Width + radius, -Height + radius, +Length - radius );
Vector3d BackBottomLeftEdge = new Vector3d( -Width + radius, -Height + radius, -Length + radius );
#region 8 sliced Spheres
SlicedSphere tempSphere;
Vector3d tempVector = Vector3d.Zero;
SlicedSphere.eDir[] tempEdge = new SlicedSphere.eDir[1];
for ( int i = 0; i < 8; i++ )
{
switch ( i )
{
case 0:
tempVector = FrontTopRightEdge;
tempEdge = new SlicedSphere.eDir[] { SlicedSphere.eDir.FrontTopRight };
break;
case 1:
tempVector = FrontTopLeftEdge;
tempEdge = new SlicedSphere.eDir[] { SlicedSphere.eDir.FrontTopLeft };
break;
case 2:
tempVector = FrontBottomRightEdge;
tempEdge = new SlicedSphere.eDir[] { SlicedSphere.eDir.FrontBottomRight };
break;
case 3:
tempVector = FrontBottomLeftEdge;
tempEdge = new SlicedSphere.eDir[] { SlicedSphere.eDir.FrontBottomLeft };
break;
case 4:
tempVector = BackBottomRightEdge;
tempEdge = new SlicedSphere.eDir[] { SlicedSphere.eDir.BackBottomRight };
break;
case 5:
tempVector = BackBottomLeftEdge;
tempEdge = new SlicedSphere.eDir[] { SlicedSphere.eDir.BackBottomLeft };
break;
case 6:
tempVector = BackTopRightEdge;
tempEdge = new SlicedSphere.eDir[] { SlicedSphere.eDir.BackTopRight };
break;
case 7:
tempVector = BackTopLeftEdge;
tempEdge = new SlicedSphere.eDir[] { SlicedSphere.eDir.BackTopLeft };
break;
}
tempSphere = new SlicedSphere( radius,
tempVector,
sphereSubDivs,
tempEdge,
false );
tempSphere.GetArraysforVBO( out TemporaryMode, out TemporaryVBO, out TemporaryIBO );
tempSphere.Dispose();
AllChunks.Add( new Chunk( ref TemporaryVBO, ref TemporaryIBO ) );
}
#endregion 8 sliced Spheres
#region 12 sliced Hoses
SlicedHose tempHose;
SlicedHose.eSide tempSide = SlicedHose.eSide.BackBottom;
Vector3d tempHoseStart = Vector3d.Zero;
Vector3d tempHoseEnd = Vector3d.Zero;
for ( int i = 0; i < 12; i++ )
{
switch ( i )
{
#region Around X Axis
case 0:
tempSide = SlicedHose.eSide.BottomRight;
tempHoseStart = BackBottomRightEdge;
tempHoseEnd = FrontBottomRightEdge;
break;
case 1:
tempSide = SlicedHose.eSide.TopRight;
tempHoseStart = BackTopRightEdge;
tempHoseEnd = FrontTopRightEdge;
break;
case 2:
tempSide = SlicedHose.eSide.TopLeft;
tempHoseStart = BackTopLeftEdge;
tempHoseEnd = FrontTopLeftEdge;
break;
case 3:
tempSide = SlicedHose.eSide.BottomLeft;
tempHoseStart = BackBottomLeftEdge;
tempHoseEnd = FrontBottomLeftEdge;
break;
#endregion Around X Axis
#region Around Y Axis
case 4:
tempSide = SlicedHose.eSide.FrontRight;
tempHoseStart = FrontBottomRightEdge;
tempHoseEnd = FrontTopRightEdge;
break;
case 5:
tempSide = SlicedHose.eSide.BackRight;
tempHoseStart = BackBottomRightEdge;
tempHoseEnd = BackTopRightEdge;
break;
case 6:
tempSide = SlicedHose.eSide.BackLeft;
tempHoseStart = BackBottomLeftEdge;
tempHoseEnd = BackTopLeftEdge;
break;
case 7:
tempSide = SlicedHose.eSide.FrontLeft;
tempHoseStart = FrontBottomLeftEdge;
tempHoseEnd = FrontTopLeftEdge;
break;
#endregion Around Y Axis
#region Around Z Axis
case 8:
tempSide = SlicedHose.eSide.FrontTop;
tempHoseStart = FrontTopRightEdge;
tempHoseEnd = FrontTopLeftEdge;
break;
case 9:
tempSide = SlicedHose.eSide.BackTop;
tempHoseStart = BackTopRightEdge;
tempHoseEnd = BackTopLeftEdge;
break;
case 10:
tempSide = SlicedHose.eSide.BackBottom;
tempHoseStart = BackBottomRightEdge;
tempHoseEnd = BackBottomLeftEdge;
break;
case 11:
tempSide = SlicedHose.eSide.FrontBottom;
tempHoseStart = FrontBottomRightEdge;
tempHoseEnd = FrontBottomLeftEdge;
break;
#endregion Around Z Axis
}
tempHose = new SlicedHose( tempSide,
hoseSubDivs,
radius,
tempHoseStart,
tempHoseEnd,
false );
tempHose.GetArraysforVBO( out TemporaryMode, out TemporaryVBO, out TemporaryIBO );
tempHose.Dispose();
AllChunks.Add( new Chunk( ref TemporaryVBO, ref TemporaryIBO ) );
}
#endregion 12 sliced Hoses
#region 6 quads for the sides
VertexT2dN3dV3d[] tempVBO = new VertexT2dN3dV3d[4];
uint[] tempIBO = new uint[6] { 0, 1, 2, 0, 2, 3 }; // all quads share this IBO
// all quads use the same texcoords
tempVBO[0].TexCoord = new Vector2d( 0.0, 1.0 );
tempVBO[1].TexCoord = new Vector2d( 0.0, 0.0 );
tempVBO[2].TexCoord = new Vector2d( 1.0, 0.0 );
tempVBO[3].TexCoord = new Vector2d( 1.0, 1.0 );
// front face
tempVBO[0].Normal = tempVBO[1].Normal = tempVBO[2].Normal = tempVBO[3].Normal = Vector3d.UnitX;
tempVBO[0].Position = FrontTopRightEdge + new Vector3d( radius, 0.0, 0.0 );
tempVBO[1].Position = FrontBottomRightEdge + new Vector3d( radius, 0.0, 0.0 );
tempVBO[2].Position = FrontBottomLeftEdge + new Vector3d( radius, 0.0, 0.0 );
tempVBO[3].Position = FrontTopLeftEdge + new Vector3d( radius, 0.0, 0.0 );
AllChunks.Add( new Chunk( ref tempVBO, ref tempIBO ) );
// back face
tempVBO[0].Normal = tempVBO[1].Normal = tempVBO[2].Normal = tempVBO[3].Normal = -Vector3d.UnitX;
tempVBO[0].Position = BackTopLeftEdge - new Vector3d( radius, 0.0, 0.0 );
tempVBO[1].Position = BackBottomLeftEdge - new Vector3d( radius, 0.0, 0.0 );
tempVBO[2].Position = BackBottomRightEdge - new Vector3d( radius, 0.0, 0.0 );
tempVBO[3].Position = BackTopRightEdge - new Vector3d( radius, 0.0, 0.0 );
AllChunks.Add( new Chunk( ref tempVBO, ref tempIBO ) );
// top face
tempVBO[0].Normal = tempVBO[1].Normal = tempVBO[2].Normal = tempVBO[3].Normal = Vector3d.UnitY;
tempVBO[0].Position = BackTopRightEdge + new Vector3d( 0.0, radius, 0.0 );
tempVBO[1].Position = FrontTopRightEdge + new Vector3d( 0.0, radius, 0.0 );
tempVBO[2].Position = FrontTopLeftEdge + new Vector3d( 0.0, radius, 0.0 );
tempVBO[3].Position = BackTopLeftEdge + new Vector3d( 0.0, radius, 0.0 );
AllChunks.Add( new Chunk( ref tempVBO, ref tempIBO ) );
// bottom face
tempVBO[0].Normal = tempVBO[1].Normal = tempVBO[2].Normal = tempVBO[3].Normal = -Vector3d.UnitY;
tempVBO[0].Position = BackBottomLeftEdge - new Vector3d( 0.0, radius, 0.0 );
tempVBO[1].Position = FrontBottomLeftEdge - new Vector3d( 0.0, radius, 0.0 );
tempVBO[2].Position = FrontBottomRightEdge - new Vector3d( 0.0, radius, 0.0 );
tempVBO[3].Position = BackBottomRightEdge - new Vector3d( 0.0, radius, 0.0 );
AllChunks.Add( new Chunk( ref tempVBO, ref tempIBO ) );
// right face
tempVBO[0].Normal = tempVBO[1].Normal = tempVBO[2].Normal = tempVBO[3].Normal = Vector3d.UnitZ;
tempVBO[0].Position = BackTopRightEdge + new Vector3d( 0.0, 0.0, radius );
tempVBO[1].Position = BackBottomRightEdge + new Vector3d( 0.0, 0.0, radius );
tempVBO[2].Position = FrontBottomRightEdge + new Vector3d( 0.0, 0.0, radius );
tempVBO[3].Position = FrontTopRightEdge + new Vector3d( 0.0, 0.0, radius );
AllChunks.Add( new Chunk( ref tempVBO, ref tempIBO ) );
// left face
tempVBO[0].Normal = tempVBO[1].Normal = tempVBO[2].Normal = tempVBO[3].Normal = -Vector3d.UnitZ;
tempVBO[0].Position = FrontTopLeftEdge - new Vector3d( 0.0, 0.0, radius );
tempVBO[1].Position = FrontBottomLeftEdge - new Vector3d( 0.0, 0.0, radius );
tempVBO[2].Position = BackBottomLeftEdge - new Vector3d( 0.0, 0.0, radius );
tempVBO[3].Position = BackTopLeftEdge - new Vector3d( 0.0, 0.0, radius );
AllChunks.Add( new Chunk( ref tempVBO, ref tempIBO ) );
#endregion 6 quads for the sides
#region Final Assembly of Chunks
PrimitiveMode = OpenTK.Graphics.OpenGL.BeginMode.Triangles;
Chunk.GetArray( ref AllChunks, out VertexArray, out IndexArray );
AllChunks.Clear();
#endregion Final Assembly of Chunks
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void ExtractVector128Single1()
{
var test = new ImmUnaryOpTest__ExtractVector128Single1();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class ImmUnaryOpTest__ExtractVector128Single1
{
private struct TestStruct
{
public Vector256<Single> _fld;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref testStruct._fld), ref Unsafe.As<Single, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
return testStruct;
}
public void RunStructFldScenario(ImmUnaryOpTest__ExtractVector128Single1 testClass)
{
var result = Avx.ExtractVector128(_fld, 1);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 32;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<Single>>() / sizeof(Single);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single);
private static Single[] _data = new Single[Op1ElementCount];
private static Vector256<Single> _clsVar;
private Vector256<Single> _fld;
private SimpleUnaryOpTest__DataTable<Single, Single> _dataTable;
static ImmUnaryOpTest__ExtractVector128Single1()
{
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _clsVar), ref Unsafe.As<Single, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
}
public ImmUnaryOpTest__ExtractVector128Single1()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _fld), ref Unsafe.As<Single, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetSingle(); }
_dataTable = new SimpleUnaryOpTest__DataTable<Single, Single>(_data, new Single[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Avx.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Avx.ExtractVector128(
Unsafe.Read<Vector256<Single>>(_dataTable.inArrayPtr),
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Avx.ExtractVector128(
Avx.LoadVector256((Single*)(_dataTable.inArrayPtr)),
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Avx.ExtractVector128(
Avx.LoadAlignedVector256((Single*)(_dataTable.inArrayPtr)),
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Avx).GetMethod(nameof(Avx.ExtractVector128), new Type[] { typeof(Vector256<Single>), typeof(byte) })
.Invoke(null, new object[] {
Unsafe.Read<Vector256<Single>>(_dataTable.inArrayPtr),
(byte)1
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Avx).GetMethod(nameof(Avx.ExtractVector128), new Type[] { typeof(Vector256<Single>), typeof(byte) })
.Invoke(null, new object[] {
Avx.LoadVector256((Single*)(_dataTable.inArrayPtr)),
(byte)1
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Avx).GetMethod(nameof(Avx.ExtractVector128), new Type[] { typeof(Vector256<Single>), typeof(byte) })
.Invoke(null, new object[] {
Avx.LoadAlignedVector256((Single*)(_dataTable.inArrayPtr)),
(byte)1
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Avx.ExtractVector128(
_clsVar,
1
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var firstOp = Unsafe.Read<Vector256<Single>>(_dataTable.inArrayPtr);
var result = Avx.ExtractVector128(firstOp, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var firstOp = Avx.LoadVector256((Single*)(_dataTable.inArrayPtr));
var result = Avx.ExtractVector128(firstOp, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var firstOp = Avx.LoadAlignedVector256((Single*)(_dataTable.inArrayPtr));
var result = Avx.ExtractVector128(firstOp, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new ImmUnaryOpTest__ExtractVector128Single1();
var result = Avx.ExtractVector128(test._fld, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Avx.ExtractVector128(_fld, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Avx.ExtractVector128(test._fld, 1);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector256<Single> firstOp, void* result, [CallerMemberName] string method = "")
{
Single[] inArray = new Single[Op1ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray[0]), firstOp);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Single>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(void* firstOp, void* result, [CallerMemberName] string method = "")
{
Single[] inArray = new Single[Op1ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(firstOp), (uint)Unsafe.SizeOf<Vector256<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Single>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(Single[] firstOp, Single[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (BitConverter.SingleToInt32Bits(result[0]) != BitConverter.SingleToInt32Bits(firstOp[4]))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (BitConverter.SingleToInt32Bits(result[i]) != BitConverter.SingleToInt32Bits(firstOp[i+4]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Avx)}.{nameof(Avx.ExtractVector128)}<Single>(Vector256<Single><9>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
// Copyright (c) ppy Pty Ltd <[email protected]>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.IO;
using System.Linq;
using JetBrains.Annotations;
using osu.Framework.Audio;
using osu.Framework.Audio.Track;
using osu.Framework.Graphics.Textures;
using osu.Framework.IO.Stores;
using osu.Framework.Lists;
using osu.Framework.Logging;
using osu.Framework.Platform;
using osu.Framework.Statistics;
using osu.Framework.Testing;
using osu.Game.Beatmaps.Formats;
using osu.Game.IO;
using osu.Game.Skinning;
using osu.Game.Storyboards;
namespace osu.Game.Beatmaps
{
public class WorkingBeatmapCache : IBeatmapResourceProvider, IWorkingBeatmapCache
{
private readonly WeakList<BeatmapManagerWorkingBeatmap> workingCache = new WeakList<BeatmapManagerWorkingBeatmap>();
/// <summary>
/// A default representation of a WorkingBeatmap to use when no beatmap is available.
/// </summary>
public readonly WorkingBeatmap DefaultBeatmap;
public BeatmapModelManager BeatmapManager { private get; set; }
private readonly AudioManager audioManager;
private readonly IResourceStore<byte[]> resources;
private readonly LargeTextureStore largeTextureStore;
private readonly ITrackStore trackStore;
private readonly IResourceStore<byte[]> files;
[CanBeNull]
private readonly GameHost host;
public WorkingBeatmapCache([NotNull] AudioManager audioManager, IResourceStore<byte[]> resources, IResourceStore<byte[]> files, WorkingBeatmap defaultBeatmap = null, GameHost host = null)
{
DefaultBeatmap = defaultBeatmap;
this.audioManager = audioManager;
this.resources = resources;
this.host = host;
this.files = files;
largeTextureStore = new LargeTextureStore(host?.CreateTextureLoaderStore(files));
trackStore = audioManager.GetTrackStore(files);
}
public void Invalidate(BeatmapSetInfo info)
{
if (info.Beatmaps == null) return;
foreach (var b in info.Beatmaps)
Invalidate(b);
}
public void Invalidate(BeatmapInfo info)
{
lock (workingCache)
{
var working = workingCache.FirstOrDefault(w => w.BeatmapInfo?.ID == info.ID);
if (working != null)
{
Logger.Log($"Invalidating working beatmap cache for {info}");
workingCache.Remove(working);
}
}
}
public virtual WorkingBeatmap GetWorkingBeatmap(BeatmapInfo beatmapInfo)
{
// if there are no files, presume the full beatmap info has not yet been fetched from the database.
if (beatmapInfo?.BeatmapSet?.Files.Count == 0)
{
int lookupId = beatmapInfo.ID;
beatmapInfo = BeatmapManager.QueryBeatmap(b => b.ID == lookupId);
}
if (beatmapInfo?.BeatmapSet == null)
return DefaultBeatmap;
lock (workingCache)
{
var working = workingCache.FirstOrDefault(w => w.BeatmapInfo?.ID == beatmapInfo.ID);
if (working != null)
return working;
beatmapInfo.Metadata ??= beatmapInfo.BeatmapSet.Metadata;
workingCache.Add(working = new BeatmapManagerWorkingBeatmap(beatmapInfo, this));
// best effort; may be higher than expected.
GlobalStatistics.Get<int>(nameof(Beatmaps), $"Cached {nameof(WorkingBeatmap)}s").Value = workingCache.Count();
return working;
}
}
#region IResourceStorageProvider
TextureStore IBeatmapResourceProvider.LargeTextureStore => largeTextureStore;
ITrackStore IBeatmapResourceProvider.Tracks => trackStore;
AudioManager IStorageResourceProvider.AudioManager => audioManager;
IResourceStore<byte[]> IStorageResourceProvider.Files => files;
IResourceStore<byte[]> IStorageResourceProvider.Resources => resources;
IResourceStore<TextureUpload> IStorageResourceProvider.CreateTextureLoaderStore(IResourceStore<byte[]> underlyingStore) => host?.CreateTextureLoaderStore(underlyingStore);
#endregion
[ExcludeFromDynamicCompile]
private class BeatmapManagerWorkingBeatmap : WorkingBeatmap
{
[NotNull]
private readonly IBeatmapResourceProvider resources;
public BeatmapManagerWorkingBeatmap(BeatmapInfo beatmapInfo, [NotNull] IBeatmapResourceProvider resources)
: base(beatmapInfo, resources.AudioManager)
{
this.resources = resources;
}
protected override IBeatmap GetBeatmap()
{
if (BeatmapInfo.Path == null)
return new Beatmap { BeatmapInfo = BeatmapInfo };
try
{
using (var stream = new LineBufferedReader(GetStream(BeatmapSetInfo.GetPathForFile(BeatmapInfo.Path))))
return Decoder.GetDecoder<Beatmap>(stream).Decode(stream);
}
catch (Exception e)
{
Logger.Error(e, "Beatmap failed to load");
return null;
}
}
protected override bool BackgroundStillValid(Texture b) => false; // bypass lazy logic. we want to return a new background each time for refcounting purposes.
protected override Texture GetBackground()
{
if (Metadata?.BackgroundFile == null)
return null;
try
{
return resources.LargeTextureStore.Get(BeatmapSetInfo.GetPathForFile(Metadata.BackgroundFile));
}
catch (Exception e)
{
Logger.Error(e, "Background failed to load");
return null;
}
}
protected override Track GetBeatmapTrack()
{
if (Metadata?.AudioFile == null)
return null;
try
{
return resources.Tracks.Get(BeatmapSetInfo.GetPathForFile(Metadata.AudioFile));
}
catch (Exception e)
{
Logger.Error(e, "Track failed to load");
return null;
}
}
protected override Waveform GetWaveform()
{
if (Metadata?.AudioFile == null)
return null;
try
{
var trackData = GetStream(BeatmapSetInfo.GetPathForFile(Metadata.AudioFile));
return trackData == null ? null : new Waveform(trackData);
}
catch (Exception e)
{
Logger.Error(e, "Waveform failed to load");
return null;
}
}
protected override Storyboard GetStoryboard()
{
Storyboard storyboard;
try
{
using (var stream = new LineBufferedReader(GetStream(BeatmapSetInfo.GetPathForFile(BeatmapInfo.Path))))
{
var decoder = Decoder.GetDecoder<Storyboard>(stream);
var storyboardFilename = BeatmapSetInfo?.Files.FirstOrDefault(f => f.Filename.EndsWith(".osb", StringComparison.OrdinalIgnoreCase))?.Filename;
// todo: support loading from both set-wide storyboard *and* beatmap specific.
if (string.IsNullOrEmpty(storyboardFilename))
storyboard = decoder.Decode(stream);
else
{
using (var secondaryStream = new LineBufferedReader(GetStream(BeatmapSetInfo.GetPathForFile(storyboardFilename))))
storyboard = decoder.Decode(stream, secondaryStream);
}
}
}
catch (Exception e)
{
Logger.Error(e, "Storyboard failed to load");
storyboard = new Storyboard();
}
storyboard.BeatmapInfo = BeatmapInfo;
return storyboard;
}
protected internal override ISkin GetSkin()
{
try
{
return new LegacyBeatmapSkin(BeatmapInfo, resources.Files, resources);
}
catch (Exception e)
{
Logger.Error(e, "Skin failed to load");
return null;
}
}
public override Stream GetStream(string storagePath) => resources.Files.GetStream(storagePath);
}
}
}
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="RectFileReader.cs" company="Microsoft">
// (c) Microsoft Corporation. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.IO;
using System.Text.RegularExpressions;
using Microsoft.Msagl.Core.Geometry.Curves;
using Microsoft.Msagl.Core.Layout;
using Microsoft.Msagl.Routing;
using Microsoft.Msagl.Routing.Visibility;
namespace Microsoft.Msagl.UnitTests.Rectilinear
{
using System.Linq;
using System.Text;
using Core.DataStructures;
using Core.Geometry;
using Routing.Rectilinear;
internal class RectFileReader : IDisposable
{
internal List<Shape> UnpaddedObstacles { get; private set; }
internal List<Polyline> PaddedObstacles { get; private set; }
internal List<EdgeGeometry> RoutingSpecs { get; private set; }
internal List<LineSegment> HorizontalScanLineSegments { get; private set; }
internal List<LineSegment> VerticalScanLineSegments { get; private set; }
internal VisibilityGraph VisibilityGraph { get; private set; }
internal int InitialSeed { get; private set; }
internal string RandomObstacleArg { get; private set; }
internal int RandomObstacleCount { get; private set; }
internal string RandomObstacleDensity { get; private set; }
internal bool RandomObstacleRotate { get; private set; }
internal double Padding { get; private set; }
internal double EdgeSeparation { get; private set; }
internal bool RouteToCenter { get; private set; }
internal double ArrowheadLength { get; private set; }
private int fileVertexCount;
private int fileEdgeCount;
internal bool IsRandom { get; set; }
// For reading the relationship between free relative ports and their shapes.
internal Dictionary<Port, Shape> FreeRelativePortToShapeMap { get; private set; }
internal bool UseFreePortsForObstaclePorts { get; private set; }
internal bool UseSparseVisibilityGraph { get; private set; }
internal bool UseObstacleRectangles { get; private set; }
internal double BendPenalty { get; private set; }
internal bool LimitPortVisibilitySpliceToEndpointBoundingBox { get; private set; }
// Verification stuff.
internal bool WantPaths { get; private set; }
internal bool WantNudger { get; private set; }
internal bool WantVerify { get; private set; }
internal double StraightTolerance { get; private set; }
internal double CornerTolerance { get; private set; }
// Used by subroutines.
private string currentLine;
private int currentLineNumber;
private StreamReader inputFileReader;
private readonly Dictionary<int, Port> idToPortMap = new Dictionary<int, Port>();
private readonly Dictionary<int, Shape> idToShapeMap = new Dictionary<int, Shape>();
private readonly Dictionary<Shape, int> shapeToIdMap = new Dictionary<Shape, int>();
private readonly Dictionary<int, Polyline> idToPaddedPolylineMap = new Dictionary<int, Polyline>();
private readonly Dictionary<Tuple<Port, Port>, Curve> portsToPathMap = new Dictionary<Tuple<Port, Port>, Curve>();
// For clumps and convex hulls.
private class ObstacleAccretion
{
internal readonly int Id;
internal readonly List<int> SiblingIds = new List<int>();
internal Polyline Polyline;
// Filled in by clump or convex hull verification.
internal object RouterAccretion;
internal ObstacleAccretion(int id)
{
this.Id = id;
}
}
private readonly Dictionary<int, ObstacleAccretion> convexHullIdToAccretionMap = new Dictionary<int, ObstacleAccretion>();
private readonly Dictionary<int, ObstacleAccretion> shapeIdToConvexHullMap = new Dictionary<int, ObstacleAccretion>();
private readonly Dictionary<int, ObstacleAccretion> clumpIdToAccretionMap = new Dictionary<int, ObstacleAccretion>();
private readonly Dictionary<int, ObstacleAccretion> shapeIdToClumpMap = new Dictionary<int, ObstacleAccretion>();
// This is not "provably invalid" as any shape Id might have it but we currently don't
// assign an id < 0 except for -1 for shapeId for FreePorts (which don't have a shape).
private const int InvalidId = -42;
private readonly TestPointComparer comparer;
internal RectFileReader(string fileName, int fileRoundingDigits)
{
comparer = new TestPointComparer(fileRoundingDigits);
UnpaddedObstacles = new List<Shape>();
PaddedObstacles = new List<Polyline>();
RoutingSpecs = new List<EdgeGeometry>();
HorizontalScanLineSegments = new List<LineSegment>();
VerticalScanLineSegments = new List<LineSegment>();
VisibilityGraph = new VisibilityGraph();
this.FreeRelativePortToShapeMap = new Dictionary<Port, Shape>();
this.inputFileReader = new StreamReader(fileName);
this.RandomObstacleArg = RectFileStrings.NullStr;
this.RandomObstacleDensity = RectFileStrings.NullStr;
// Input parameters
ReadHeader();
// Input detail
ReadInputObstacles();
ReadPorts();
ReadRoutingSpecs();
// Output detail.
ReadPaddedObstacles();
ReadConvexHulls();
ReadScanSegments();
ReadVisibilityGraph();
ReadPaths();
this.inputFileReader.Close();
if (0 == this.UnpaddedObstacles.Count)
{
Validate.Fail("No obstacles found in file");
}
}
private void ReadHeader()
{
// Input parameters and output summary
Match m = ParseNext(RectFileStrings.ParseSeed);
if (m.Success)
{
string strSeed = m.Groups["seed"].ToString();
System.Globalization.NumberStyles style = System.Globalization.NumberStyles.Integer;
if (strSeed.StartsWith("0x", StringComparison.OrdinalIgnoreCase))
{
// For some reason the 0x prefix is not allowed for hex strings.
strSeed = strSeed.Substring(2);
style = System.Globalization.NumberStyles.HexNumber;
}
this.InitialSeed = int.Parse(strSeed, style);
}
m = ParseNext(RectFileStrings.ParseRandomArgs);
if (!IsString(RectFileStrings.NullStr, m.Groups["arg"].ToString()))
{
this.IsRandom = true;
this.RandomObstacleArg = m.Groups["arg"].ToString();
this.RandomObstacleCount = int.Parse(m.Groups["count"].ToString());
this.RandomObstacleDensity = m.Groups["density"].ToString();
this.RandomObstacleRotate = bool.Parse(m.Groups["rotate"].ToString());
}
// This sequencing assumes the members are in the expected order but
m = ParseNext(RectFileStrings.ParsePadding);
this.Padding = double.Parse(m.Groups["padding"].ToString());
m = ParseNext(RectFileStrings.ParseEdgeSeparation);
this.EdgeSeparation = double.Parse(m.Groups["sep"].ToString());
m = ParseNext(RectFileStrings.ParseRouteToCenter);
this.RouteToCenter = bool.Parse(m.Groups["toCenter"].ToString());
m = ParseNext(RectFileStrings.ParseArrowheadLength);
this.ArrowheadLength = double.Parse(m.Groups["length"].ToString());
m = this.ParseNext(RectFileStrings.ParseUseFreePortsForObstaclePorts);
this.UseFreePortsForObstaclePorts = bool.Parse(m.Groups["value"].ToString());
m = this.ParseNext(RectFileStrings.ParseUseSparseVisibilityGraph);
this.UseSparseVisibilityGraph = bool.Parse(m.Groups["value"].ToString());
m = this.ParseNext(RectFileStrings.ParseUseObstacleRectangles);
this.UseObstacleRectangles = bool.Parse(m.Groups["value"].ToString());
m = ParseNext(RectFileStrings.ParseBendPenalty);
this.BendPenalty = double.Parse(m.Groups["value"].ToString());
m = this.ParseNext(RectFileStrings.ParseLimitPortVisibilitySpliceToEndpointBoundingBox);
this.LimitPortVisibilitySpliceToEndpointBoundingBox = bool.Parse(m.Groups["value"].ToString());
m = ParseNext(RectFileStrings.ParseWantPaths);
this.WantPaths = bool.Parse(m.Groups["value"].ToString());
m = ParseNext(RectFileStrings.ParseWantNudger);
this.WantNudger = bool.Parse(m.Groups["value"].ToString());
m = ParseNext(RectFileStrings.ParseWantVerify);
this.WantVerify = bool.Parse(m.Groups["value"].ToString());
m = ParseNext(RectFileStrings.ParseStraightTolerance);
this.StraightTolerance = double.Parse(m.Groups["value"].ToString());
m = ParseNext(RectFileStrings.ParseCornerTolerance);
this.CornerTolerance = double.Parse(m.Groups["value"].ToString());
// Output summary
m = ParseNext(RectFileStrings.ParseVisibilityGraphSummary);
this.fileVertexCount = int.Parse(m.Groups["vcount"].ToString());
this.fileEdgeCount = int.Parse(m.Groups["ecount"].ToString());
}
private void ReadInputObstacles()
{
this.ReadUnpaddedObstacles();
foreach (var sibList in this.shapeIdToClumpMap.Values.Select(acc => acc.SiblingIds)) { sibList.OrderBy(sibId => sibId); }
foreach (var sibList in this.shapeIdToConvexHullMap.Values.Select(acc => acc.SiblingIds)) { sibList.OrderBy(sibId => sibId); }
}
private void ReadUnpaddedObstacles()
{
this.VerifyIsNextLine(RectFileStrings.BeginUnpaddedObstacles);
// Get to the first line for consistency with the lookahead for children which will end up
// reading the following line.
this.NextLine();
for (;;)
{
int id;
Shape shape;
if (this.IsLine(RectFileStrings.BeginCurve))
{
id = ParseNextId();
shape = new Shape(this.ReadCurve()) { UserData = id };
}
else if (this.IsLine(RectFileStrings.BeginEllipse))
{
id = ParseNextId();
shape = new Shape(this.ReadEllipse()) { UserData = id };
}
else if (this.IsLine(RectFileStrings.BeginRoundedRect))
{
id = ParseNextId();
shape = new Shape(this.ReadRoundedRect()) { UserData = id };
}
else if (this.IsLine(RectFileStrings.BeginPolyline))
{
id = ParseNextId();
shape = new Shape(this.ReadPolyline()) { UserData = id };
}
else
{
this.VerifyIsLine(RectFileStrings.EndUnpaddedObstacles);
return;
}
this.UnpaddedObstacles.Add(shape);
idToShapeMap.Add(id, shape);
shapeToIdMap.Add(shape, id);
this.NextLine();
TryParseChildren(shape);
this.TryParseClumpId(id);
this.TryParseConvexHullId(id);
}
}
private void TryParseChildren(Shape shape)
{
if (!IsLine(RectFileStrings.Children))
{
return;
}
int id;
while (TryParseNextId(out id))
{
shape.AddChild(idToShapeMap[id]);
}
}
private void TryParseClumpId(int shapeId)
{
Match m = this.TryParse(RectFileStrings.ParseClumpId);
if (!m.Success)
{
return;
}
int clumpId = int.Parse(m.Groups["id"].ToString());
this.shapeIdToClumpMap[shapeId] = AddShapeToAccretion(shapeId, clumpId, this.clumpIdToAccretionMap);
this.NextLine();
}
private void TryParseConvexHullId(int shapeId)
{
Match m = this.TryParse(RectFileStrings.ParseConvexHullId);
if (!m.Success)
{
return;
}
int convexHullId = int.Parse(m.Groups["id"].ToString());
this.shapeIdToConvexHullMap[shapeId] = AddShapeToAccretion(shapeId, convexHullId, this.convexHullIdToAccretionMap);
this.NextLine();
}
private static ObstacleAccretion AddShapeToAccretion(int shapeId, int accretionId, Dictionary<int, ObstacleAccretion> accretionMap)
{
ObstacleAccretion accretion;
if (!accretionMap.TryGetValue(accretionId, out accretion))
{
accretion = new ObstacleAccretion(accretionId);
accretionMap[accretionId] = accretion;
}
accretion.SiblingIds.Add(shapeId);
return accretion;
}
private void ReadPorts()
{
Match m;
this.VerifyIsNextLine(RectFileStrings.BeginPorts);
// Get to the first line for consistency with the lookahead for multiPort offsets and/or any
// PortEntries, which will end up reading the following line.
this.NextLine();
for (;;)
{
if (!(m = ParseOrDone(RectFileStrings.ParsePort, RectFileStrings.EndPorts)).Success)
{
break;
}
bool isMultiPort = IsString(m.Groups["type"].ToString(), RectFileStrings.Multi);
bool isRelative = IsString(m.Groups["type"].ToString(), RectFileStrings.Relative);
var x = double.Parse(m.Groups["X"].ToString());
var y = double.Parse(m.Groups["Y"].ToString());
var portId = int.Parse(m.Groups["portId"].ToString());
var shapeId = int.Parse(m.Groups["shapeId"].ToString());
Validate.IsFalse(idToPortMap.ContainsKey(portId), "PortId already exists");
var location = new Point(x, y);
Shape shape = GetShapeFromId(shapeId, isMultiPort || isRelative);
Port port;
if (isMultiPort)
{
// 'location' was actually the active offset of the multiPort. Recreate it and reset the
// closest-location and verify the active offset index is the same. This may fail if there
// are two identical offsets in the offset list, in which case fix the test setup.
int activeOffsetIndex;
var offsets = ReadMultiPortOffsets(out activeOffsetIndex);
var multiPort = new MultiLocationFloatingPort(() => shape.BoundaryCurve, () => shape.BoundingBox.Center, offsets);
multiPort.SetClosestLocation(multiPort.CenterDelegate() + location);
Validate.AreEqual(multiPort.ActiveOffsetIndex, activeOffsetIndex, CurrentLineError("ActiveOffsetIndex is not as expected"));
port = multiPort;
}
else
{
if (isRelative)
{
// The location in the ParsePort line is the offset for the relative port.
port = new RelativeFloatingPort(() => shape.BoundaryCurve, () => shape.BoundingBox.Center, location);
}
else
{
Validate.IsTrue(IsString(m.Groups["type"].ToString(), RectFileStrings.Floating), CurrentLineError("Unknown port type"));
port = new FloatingPort((null == shape) ? null : shape.BoundaryCurve, location);
}
this.NextLine(); // Since we didn't read multiPort offsets
}
idToPortMap.Add(portId, port);
if (null != shape)
{
if (!this.UseFreePortsForObstaclePorts)
{
shape.Ports.Insert(port);
}
else
{
FreeRelativePortToShapeMap[port] = shape;
}
}
ReadPortEntries(port);
}
}
private Shape GetShapeFromId(int shapeId, bool isMultiOrRelative)
{
Shape shape;
if (!idToShapeMap.TryGetValue(shapeId, out shape))
{
Validate.IsFalse(isMultiOrRelative, CurrentLineError("Shape not found for MultiOrRelativePort"));
}
return shape;
}
private List<Point> ReadMultiPortOffsets(out int activeOffsetIndex)
{
var offsets = new List<Point>();
Match m = ParseNext(RectFileStrings.ParseMultiPortOffsets);
Validate.IsTrue(m.Success, CurrentLineError("Did not find expected MultiPortOffsets"));
activeOffsetIndex = int.Parse(m.Groups["index"].ToString());
for (;;)
{
m = TryParseNext(RectFileStrings.ParsePoint);
if (!m.Success)
{
break;
}
var x = double.Parse(m.Groups["X"].ToString());
var y = double.Parse(m.Groups["Y"].ToString());
offsets.Add(new Point(x, y));
}
Validate.IsTrue(offsets.Count > 0, CurrentLineError("No offsets found for multiPort"));
return offsets;
}
private void ReadPortEntries(Port port)
{
// We've already positioned ourselves to the next line in ReadPorts.
if (!IsLine(RectFileStrings.PortEntryOnCurve))
{
return;
}
var spans = new List<Tuple<double, double>>();
Match m;
while ((m = TryParseNext(RectFileStrings.ParsePoint)).Success)
{
// We reuse ParsePoint because a span is just two doubles as well.
var first = double.Parse(m.Groups["X"].ToString());
var second = double.Parse(m.Groups["Y"].ToString());
spans.Add(new Tuple<double, double>(first, second));
}
Validate.IsTrue(spans.Count > 0, CurrentLineError("Empty span list"));
port.PortEntry = new PortEntryOnCurve(port.Curve, spans);
}
private void ReadRoutingSpecs()
{
Match m;
this.VerifyIsNextLine(RectFileStrings.BeginRoutingSpecs);
// Get to the first line for consistency with the lookahead for waypoints, which will
// end up reading the following line.
this.NextLine();
for (;;)
{
if (!(m = ParseOrDone(RectFileStrings.ParseEdgeGeometry, RectFileStrings.EndRoutingSpecs)).Success)
{
break;
}
var sourceId = int.Parse(m.Groups["sourceId"].ToString());
var targetId = int.Parse(m.Groups["targetId"].ToString());
var arrowheadAtSource = bool.Parse(m.Groups["arrowheadAtSource"].ToString());
var arrowheadAtTarget = bool.Parse(m.Groups["arrowheadAtTarget"].ToString());
var arrowheadLength = double.Parse(m.Groups["arrowheadLength"].ToString());
var arrowheadWidth = double.Parse(m.Groups["arrowheadWidth"].ToString());
var lineWidth = double.Parse(m.Groups["lineWidth"].ToString());
Port sourcePort, targetPort;
Validate.IsTrue(idToPortMap.TryGetValue(sourceId, out sourcePort), CurrentLineError("Can't find source port"));
Validate.IsTrue(idToPortMap.TryGetValue(targetId, out targetPort), CurrentLineError("Can't find target port"));
var edgeGeom = new EdgeGeometry(sourcePort, targetPort) { LineWidth = lineWidth };
if (arrowheadAtSource)
{
edgeGeom.SourceArrowhead = new Arrowhead { Length = arrowheadLength, Width = arrowheadWidth };
}
if (arrowheadAtTarget)
{
edgeGeom.TargetArrowhead = new Arrowhead { Length = arrowheadLength, Width = arrowheadWidth };
}
this.RoutingSpecs.Add(edgeGeom);
this.ReadWaypoints(edgeGeom);
}
}
private void ReadWaypoints(EdgeGeometry edgeGeom)
{
if (!IsNextLine(RectFileStrings.Waypoints))
{
return;
}
var waypoints = new List<Point>();
Match m;
while ((m = TryParseNext(RectFileStrings.ParsePoint)).Success)
{
// We reuse ParsePoint because a span is just two doubles as well.
var x = double.Parse(m.Groups["X"].ToString());
var y = double.Parse(m.Groups["Y"].ToString());
waypoints.Add(new Point(x, y));
}
Validate.IsTrue(waypoints.Count > 0, CurrentLineError("Empty waypoint list"));
edgeGeom.Waypoints = waypoints;
}
private void ReadPaddedObstacles()
{
this.VerifyIsNextLine(RectFileStrings.BeginPaddedObstacles);
for (;;)
{
if (!this.IsNextLine(RectFileStrings.BeginPolyline))
{
this.VerifyIsLine(RectFileStrings.EndPaddedObstacles);
break;
}
int id = ParseNextId();
Validate.IsFalse(idToPaddedPolylineMap.ContainsKey(id), CurrentLineError("Duplicate padded-obstacle id"));
var polyline = this.ReadPolyline();
this.PaddedObstacles.Add(polyline);
idToPaddedPolylineMap.Add(id, polyline);
}
}
private void ReadConvexHulls()
{
this.VerifyIsNextLine(RectFileStrings.BeginConvexHulls);
for (; ; )
{
if (!this.IsNextLine(RectFileStrings.BeginPolyline))
{
this.VerifyIsLine(RectFileStrings.EndConvexHulls);
break;
}
int id = ParseNextId();
var hullAccretion = this.convexHullIdToAccretionMap[id];
Validate.AreEqual(id, hullAccretion.Id, "This should always be true.. accretion.Id is just to make debugging easier");
hullAccretion.Polyline = this.ReadPolyline();
}
}
public void VerifyObstaclePaddedPolylines(IEnumerable<Obstacle> routerObstacles)
{
if (0 == idToPaddedPolylineMap.Count)
{
return;
}
foreach (var routerObstacle in routerObstacles)
{
var filePolyline = idToPaddedPolylineMap[shapeToIdMap[routerObstacle.InputShape]];
var routerPolyline = routerObstacle.PaddedPolyline;
VerifyPolylinesAreSame(filePolyline, routerPolyline);
}
}
private void VerifyPolylinesAreSame(Polyline filePolyline, Polyline routerPolyline)
{
Validate.AreEqual(filePolyline.PolylinePoints.Count(), routerPolyline.PolylinePoints.Count(), "Different number of points in polyline");
Validate.IsTrue(comparer.Equals(filePolyline.StartPoint.Point, routerPolyline.StartPoint.Point), "Polyline StartPoints are not equal");
Validate.IsTrue(comparer.Equals(filePolyline.EndPoint.Point, routerPolyline.EndPoint.Point), "Polyline EndPoints are not equal");
var filePoint = filePolyline.StartPoint.NextOnPolyline;
var obstaclePoint = routerPolyline.StartPoint.NextOnPolyline;
while (!comparer.Equals(filePoint.Point, filePolyline.EndPoint.Point))
{
Validate.IsTrue(comparer.Equals(filePoint.Point, obstaclePoint.Point), "Polyline Intermediate Points are not equal");
filePoint = filePoint.NextOnPolyline;
obstaclePoint = obstaclePoint.NextOnPolyline;
}
}
private void ReadScanSegments()
{
Match m;
this.VerifyIsNextLine(RectFileStrings.BeginHScanSegments);
for (;;)
{
if (!(m = ParseNextOrDone(RectFileStrings.ParseSegment, RectFileStrings.EndHScanSegments)).Success)
{
break;
}
this.HorizontalScanLineSegments.Add(LoadLineSegment(m));
}
this.VerifyIsNextLine(RectFileStrings.BeginVScanSegments);
for (;;)
{
if (!(m = ParseNextOrDone(RectFileStrings.ParseSegment, RectFileStrings.EndVScanSegments)).Success)
{
break;
}
this.VerticalScanLineSegments.Add(LoadLineSegment(m));
}
}
internal void VerifyScanSegments(RectilinearEdgeRouterWrapper router)
{
this.VerifyAxisScanSegments(this.HorizontalScanLineSegments, router.HorizontalScanLineSegments, "Horizontal ScanSegment");
this.VerifyAxisScanSegments(this.VerticalScanLineSegments, router.VerticalScanLineSegments, "Vertical ScanSegment");
}
private void VerifyAxisScanSegments(List<LineSegment> readerSegmentList, IEnumerable<LineSegment> routerSegs, string name)
{
// These are already ordered as desired in the tree.
if (0 != readerSegmentList.Count)
{
IEnumerator<LineSegment> readerSegs = readerSegmentList.GetEnumerator();
foreach (var routerSeg in routerSegs)
{
readerSegs.MoveNext();
var readerSeg = readerSegs.Current;
Validate.IsTrue(comparer.IsClose(routerSeg, readerSeg),
string.Format(System.Globalization.CultureInfo.InvariantCulture, "Router {0} does not match Reader", name));
}
}
}
internal void VerifyClumps(RectilinearEdgeRouterWrapper router) {
if (this.clumpIdToAccretionMap.Count == 0)
{
return;
}
VerifyThatAllRouterClumpsAreInFile(router);
VerifyThatAllFileClumpsAreInRouter(router);
}
private void VerifyThatAllRouterClumpsAreInFile(RectilinearEdgeRouterWrapper router) {
foreach (var routerClump in router.ObstacleTree.GetAllPrimaryObstacles().Where(obs => obs.IsOverlapped).Select(obs => obs.Clump).Distinct())
{
var routerSiblings = routerClump.Select(obs => this.shapeToIdMap[obs.InputShape]).OrderBy(id => id).ToArray();
var fileClump = this.shapeIdToClumpMap[routerSiblings.First()];
fileClump.RouterAccretion = routerClump;
VerifyOrderedSiblingLists(fileClump.SiblingIds, routerSiblings); // SiblingIds are already ordered
}
}
private void VerifyThatAllFileClumpsAreInRouter(RectilinearEdgeRouterWrapper router)
{
foreach (var fileClump in this.clumpIdToAccretionMap.Values)
{
var firstSibling = this.idToShapeMap[fileClump.SiblingIds.First()];
var firstObstacle = router.ShapeToObstacleMap[firstSibling];
// We've already verified all the router clumps, so we now just need to know that we do have a router clump.
Validate.AreSame(fileClump.RouterAccretion, firstObstacle.Clump, "Clump from file was not found in router");
}
}
internal void VerifyConvexHulls(RectilinearEdgeRouterWrapper router)
{
if (this.convexHullIdToAccretionMap.Count == 0)
{
return;
}
VerifyThatAllRouterHullsAreInFile(router);
VerifyThatAllFileHullsAreInRouter(router);
}
private void VerifyThatAllRouterHullsAreInFile(RectilinearEdgeRouterWrapper router) {
foreach (var routerHull in router.ObstacleTree.GetAllPrimaryObstacles().Where(obs => obs.ConvexHull != null).Select(obs => obs.ConvexHull))
{
var routerSiblings = routerHull.Obstacles.Select(obs => this.shapeToIdMap[obs.InputShape]).OrderBy(id => id).ToArray();
var fileHull = this.shapeIdToConvexHullMap[routerSiblings.First()];
fileHull.RouterAccretion = routerHull;
VerifyOrderedSiblingLists(fileHull.SiblingIds, routerSiblings); // SiblingIds are already ordered
// This may be null if -nowriteConvexHulls
if (fileHull.Polyline != null)
{
this.VerifyPolylinesAreSame(fileHull.Polyline, routerHull.Polyline);
}
// Convex Hulls can exist for both groups and non-groups. For groups, there should only be one obstacle,
// the group, in the hull.
var firstSibling = this.idToShapeMap[routerSiblings.First()];
if (firstSibling.IsGroup)
{
Validate.AreEqual(1, routerSiblings.Count(), "only one item should be in a convex hull for a group");
}
else
{
Validate.IsFalse(routerSiblings.Any(sib => this.idToShapeMap[sib].IsGroup), "group found with non-groups in a convex hull");
}
}
}
private void VerifyThatAllFileHullsAreInRouter(RectilinearEdgeRouterWrapper router)
{
foreach (var fileHull in this.convexHullIdToAccretionMap.Values)
{
var firstSibling = this.idToShapeMap[fileHull.SiblingIds.First()];
var firstObstacle = router.ShapeToObstacleMap[firstSibling];
// We've already verified all the router hulls, so we now just need to know that we do have a router hull.
Validate.AreSame(fileHull.RouterAccretion, firstObstacle.ConvexHull, "Convex hull from file was not found in router");
}
}
private static void VerifyOrderedSiblingLists(List<int> fileSiblings, int[] routerSiblings)
{
Validate.AreEqual(fileSiblings.Count, routerSiblings.Length, "Unequal number of file and router siblings");
for (int ii = 0; ii < routerSiblings.Length; ++ii)
{
Validate.AreEqual(fileSiblings[ii], routerSiblings[ii], "File and router siblings differ");
}
}
private void ReadVisibilityGraph()
{
Match m;
this.VerifyIsNextLine(RectFileStrings.BeginVisibilityVertices);
for (;;)
{
if (!(m = ParseNextOrDone(RectFileStrings.ParsePoint, RectFileStrings.EndVisibilityVertices)).Success)
{
break;
}
var x = double.Parse(m.Groups["X"].ToString());
var y = double.Parse(m.Groups["Y"].ToString());
this.VisibilityGraph.AddVertex(ApproximateComparer.Round(new Point(x, y)));
}
this.VerifyIsNextLine(RectFileStrings.BeginVisibilityEdges);
for (;;)
{
if (!(m = ParseNextOrDone(RectFileStrings.ParseSegment, RectFileStrings.EndVisibilityEdges)).Success)
{
break;
}
var startX = double.Parse(m.Groups["startX"].ToString());
var startY = double.Parse(m.Groups["startY"].ToString());
var endX = double.Parse(m.Groups["endX"].ToString());
var endY = double.Parse(m.Groups["endY"].ToString());
this.VisibilityGraph.AddEdge(ApproximateComparer.Round(new Point(startX, startY)),
ApproximateComparer.Round(new Point(endX, endY)));
}
}
internal void VerifyVisibilityGraph(RectilinearEdgeRouterWrapper router)
{
Validate.AreEqual(this.fileVertexCount, router.VisibilityGraph.VertexCount, "Graph vertex count difference");
// If the vertices and edges were stored to the file, verify them.
if (0 != this.VisibilityGraph.VertexCount)
{
foreach (var fileVertex in this.VisibilityGraph.Vertices())
{
Validate.IsNotNull(router.VisibilityGraph.FindVertex(fileVertex.Point), "Cannot find file vertex in router graph");
}
foreach (var routerVertex in router.VisibilityGraph.Vertices())
{
Validate.IsNotNull(this.VisibilityGraph.FindVertex(routerVertex.Point), "Cannot find router vertex in file graph");
}
foreach (var fileEdge in this.VisibilityGraph.Edges)
{
Validate.IsNotNull(VisibilityGraph.FindEdge(fileEdge), "Cannot find file edge in router graph");
}
foreach (var routerEdge in router.VisibilityGraph.Edges)
{
Validate.IsNotNull(VisibilityGraph.FindEdge(routerEdge), "Cannot find router edge in file graph");
}
}
}
private void ReadPaths()
{
Match m;
this.VerifyIsNextLine(RectFileStrings.BeginPaths);
for (;;)
{
if (!(m = ParseNextOrDone(RectFileStrings.ParsePathEndpoints, RectFileStrings.EndPaths)).Success)
{
break;
}
var sourceId = int.Parse(m.Groups["sourceId"].ToString());
var targetId = int.Parse(m.Groups["targetId"].ToString());
Port sourcePort, targetPort;
Validate.IsTrue(idToPortMap.TryGetValue(sourceId, out sourcePort), CurrentLineError("Can't find source port"));
Validate.IsTrue(idToPortMap.TryGetValue(targetId, out targetPort), CurrentLineError("Can't find target port"));
this.VerifyIsNextLine(RectFileStrings.BeginCurve);
var curve = this.ReadCurve();
portsToPathMap.Add(new Tuple<Port, Port>(sourcePort, targetPort), curve);
}
}
private void VerifyPaths(RectilinearEdgeRouterWrapper router)
{
IEnumerable<EdgeGeometry> routerEdgeGeometries = router.EdgeGeometriesToRoute;
foreach (EdgeGeometry routerEdgeGeom in routerEdgeGeometries.Where(edgeGeom => null != edgeGeom.Curve))
{
Curve fileCurve;
if (this.portsToPathMap.TryGetValue(new Tuple<Port, Port>(routerEdgeGeom.SourcePort, routerEdgeGeom.TargetPort), out fileCurve))
{
var routerCurve = (Curve)routerEdgeGeom.Curve; // This is currently always a Curve
this.VerifyCurvesAreSame(fileCurve, routerCurve);
}
}
}
private void VerifyCurvesAreSame(Curve fileCurve, Curve routerCurve)
{
var fileSegments = fileCurve.Segments;
var routerSegments = routerCurve.Segments;
Validate.AreEqual(fileSegments.Count, routerSegments.Count, "Unequal Curve segment counts");
for (int ii = 0; ii < fileSegments.Count; ++ii)
{
Validate.IsTrue(comparer.IsClose(fileSegments[ii].Start, routerSegments[ii].Start), "Unequal Curve segment Start");
Validate.IsTrue(comparer.IsClose(fileSegments[ii].End, routerSegments[ii].End), "Unequal Curve segment End");
}
}
public void VerifyRouter(RectilinearEdgeRouterWrapper router)
{
if (router.WantVerify)
{
VerifyVisibilityGraph(router);
VerifyPaths(router);
}
}
private Curve ReadCurve()
{
var c = new Curve();
Match m;
while ((m = ParseNextOrDone(RectFileStrings.ParseSegment, RectFileStrings.EndCurve)).Success)
{
c.AddSegment(LoadLineSegment(m));
}
return c;
}
private static LineSegment LoadLineSegment(Match m)
{
var startX = double.Parse(m.Groups["startX"].ToString());
var startY = double.Parse(m.Groups["startY"].ToString());
var endX = double.Parse(m.Groups["endX"].ToString());
var endY = double.Parse(m.Groups["endY"].ToString());
return new LineSegment(new Point(startX, startY), new Point(endX, endY));
}
private Ellipse ReadEllipse()
{
Match m = ParseNext(RectFileStrings.ParseEllipse);
var axisAx = double.Parse(m.Groups["axisAx"].ToString());
var axisAy = double.Parse(m.Groups["axisAy"].ToString());
var axisBx = double.Parse(m.Groups["axisBx"].ToString());
var axisBy = double.Parse(m.Groups["axisBy"].ToString());
var centerX = double.Parse(m.Groups["centerX"].ToString());
var centerY = double.Parse(m.Groups["centerY"].ToString());
VerifyIsNextLine(RectFileStrings.EndEllipse);
return new Ellipse(new Point(axisAx, axisAy),
new Point(axisBx, axisBy), new Point(centerX, centerY));
}
private RoundedRect ReadRoundedRect()
{
Match m = ParseNext(RectFileStrings.ParseRoundedRect);
var x = double.Parse(m.Groups["X"].ToString());
var y = double.Parse(m.Groups["Y"].ToString());
var width = double.Parse(m.Groups["width"].ToString());
var height = double.Parse(m.Groups["height"].ToString());
var radiusX = double.Parse(m.Groups["radiusX"].ToString());
var radiusY = double.Parse(m.Groups["radiusY"].ToString());
VerifyIsNextLine(RectFileStrings.EndRoundedRect);
return new RoundedRect(new Rectangle(x, y, x + width, y + height), radiusX, radiusY);
}
private Polyline ReadPolyline()
{
var p = new Polyline();
Match m;
while ((m = ParseNextOrDone(RectFileStrings.ParsePoint, RectFileStrings.EndPolyline)).Success)
{
var x = double.Parse(m.Groups["X"].ToString());
var y = double.Parse(m.Groups["Y"].ToString());
p.AddPoint(new Point(x, y));
}
p.Closed = true;
return p;
}
private void NextLine()
{
while ((this.currentLine = this.inputFileReader.ReadLine()) != null)
{
++this.currentLineNumber;
if (this.currentLine.StartsWith("//", StringComparison.CurrentCultureIgnoreCase))
{
continue;
}
this.currentLine = this.currentLine.Trim();
if (string.IsNullOrEmpty(this.currentLine))
{
continue;
}
return;
}
// We only call this when we expect a line.
Validate.Fail("Unexpected EOF in source file");
}
private Match ParseNext(Regex rgx)
{
NextLine();
return Parse(rgx);
}
private Match Parse(Regex rgx)
{
Match m = rgx.Match(this.currentLine);
if (!m.Success)
{
Validate.Fail(CurrentLineError("Unexpected Parse mismatch"));
}
return m;
}
private Match TryParseNext(Regex rgx)
{
NextLine();
return TryParse(rgx);
}
private Match TryParse(Regex rgx)
{
Match m = rgx.Match(this.currentLine);
return m;
}
private Match ParseNextOrDone(Regex rgx, string strDone)
{
NextLine();
return ParseOrDone(rgx, strDone);
}
private Match ParseOrDone(Regex rgx, string strDone)
{
Match m = rgx.Match(this.currentLine);
if (m.Success)
{
return m;
}
if (!this.IsLine(strDone))
{
Validate.Fail(CurrentLineError("Unexpected ParseOrDone mismatch"));
}
return m;
}
private int ParseNextId()
{
NextLine();
return ParseId();
}
private int ParseId()
{
int id;
if (!TryParseId(out id))
{
Validate.Fail(CurrentLineError("Unexpected ParseNextId mismatch"));
}
return id;
}
private bool TryParseNextId(out int id)
{
this.NextLine();
return TryParseId(out id);
}
private bool TryParseId(out int id)
{
Match m = TryParse(RectFileStrings.ParseId);
id = InvalidId;
if (!m.Success)
{
return false;
}
id = int.Parse(m.Groups["id"].ToString());
return true;
}
private void VerifyIsNextLine(string strTest)
{
NextLine();
VerifyIsLine(strTest);
}
private void VerifyIsLine(string strTest)
{
if (!IsLine(strTest))
{
Validate.Fail(CurrentLineError("Unexpected mismatch: expected {0}", strTest));
}
}
private bool IsNextLine(string strTest)
{
NextLine();
return IsLine(strTest);
}
private bool IsLine(string strTest)
{
return IsString(strTest, this.currentLine);
}
private static bool IsString(string strWant, string strTest)
{
return 0 == string.Compare(strWant, strTest, StringComparison.CurrentCultureIgnoreCase);
}
private string CurrentLineError(string format, params object[] args)
{
var details = string.Format(format, args);
return string.Format("{0} on line {1}: {2}", details, this.currentLineNumber, this.currentLine);
}
internal static string[] GetCommandLineArgs(string fileName, out string commandLine)
{
// First line is the commented command line.
using (var reader = new StreamReader(fileName))
{
// This line starts with "// ".
commandLine = reader.ReadLine();
if (string.IsNullOrEmpty(commandLine))
{
Validate.Fail("Cannot find command line in file");
return null; // Validate.Fail isn't recognized as preventing possible nullref on following line
}
commandLine = commandLine.Substring(3).Trim();
return SplitCommandLineArgs(commandLine);
}
}
internal static string[] SplitCommandLineArgs(string cmdLine)
{
// Simple parser for -updatefile, since we don't do much complicated arg stuff.
var args = new List<string>();
var builder = new StringBuilder();
bool inQuotes = false;
foreach (char c in cmdLine)
{
if ('"' == c)
{
inQuotes = !inQuotes;
continue;
}
if ((' ' == c) && !inQuotes)
{
args.Add(builder.ToString());
builder.Clear();
continue;
}
builder.Append(c);
}
if (0 != builder.Length)
{
args.Add(builder.ToString());
}
return args.ToArray();
}
#region IDisposable Members
public void Dispose()
{
if (null != this.inputFileReader)
{
this.inputFileReader.Dispose();
this.inputFileReader = null;
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void UnpackHighInt32()
{
var test = new SimpleBinaryOpTest__UnpackHighInt32();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (Sse2.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (Sse2.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (Sse2.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (Sse2.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (Sse2.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__UnpackHighInt32
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Int32[] inArray1, Int32[] inArray2, Int32[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Int32>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Int32>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Int32>();
if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Int32, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Int32, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<Int32> _fld1;
public Vector128<Int32> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref testStruct._fld1), ref Unsafe.As<Int32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref testStruct._fld2), ref Unsafe.As<Int32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__UnpackHighInt32 testClass)
{
var result = Sse2.UnpackHigh(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__UnpackHighInt32 testClass)
{
fixed (Vector128<Int32>* pFld1 = &_fld1)
fixed (Vector128<Int32>* pFld2 = &_fld2)
{
var result = Sse2.UnpackHigh(
Sse2.LoadVector128((Int32*)(pFld1)),
Sse2.LoadVector128((Int32*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Int32>>() / sizeof(Int32);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<Int32>>() / sizeof(Int32);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Int32>>() / sizeof(Int32);
private static Int32[] _data1 = new Int32[Op1ElementCount];
private static Int32[] _data2 = new Int32[Op2ElementCount];
private static Vector128<Int32> _clsVar1;
private static Vector128<Int32> _clsVar2;
private Vector128<Int32> _fld1;
private Vector128<Int32> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__UnpackHighInt32()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _clsVar1), ref Unsafe.As<Int32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _clsVar2), ref Unsafe.As<Int32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
}
public SimpleBinaryOpTest__UnpackHighInt32()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _fld1), ref Unsafe.As<Int32, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _fld2), ref Unsafe.As<Int32, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetInt32(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetInt32(); }
_dataTable = new DataTable(_data1, _data2, new Int32[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Sse2.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Sse2.UnpackHigh(
Unsafe.Read<Vector128<Int32>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Int32>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Sse2.UnpackHigh(
Sse2.LoadVector128((Int32*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((Int32*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Sse2.UnpackHigh(
Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Sse2).GetMethod(nameof(Sse2.UnpackHigh), new Type[] { typeof(Vector128<Int32>), typeof(Vector128<Int32>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Int32>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Int32>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Sse2).GetMethod(nameof(Sse2.UnpackHigh), new Type[] { typeof(Vector128<Int32>), typeof(Vector128<Int32>) })
.Invoke(null, new object[] {
Sse2.LoadVector128((Int32*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((Int32*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Sse2).GetMethod(nameof(Sse2.UnpackHigh), new Type[] { typeof(Vector128<Int32>), typeof(Vector128<Int32>) })
.Invoke(null, new object[] {
Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int32>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Sse2.UnpackHigh(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<Int32>* pClsVar1 = &_clsVar1)
fixed (Vector128<Int32>* pClsVar2 = &_clsVar2)
{
var result = Sse2.UnpackHigh(
Sse2.LoadVector128((Int32*)(pClsVar1)),
Sse2.LoadVector128((Int32*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<Int32>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<Int32>>(_dataTable.inArray2Ptr);
var result = Sse2.UnpackHigh(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = Sse2.LoadVector128((Int32*)(_dataTable.inArray1Ptr));
var op2 = Sse2.LoadVector128((Int32*)(_dataTable.inArray2Ptr));
var result = Sse2.UnpackHigh(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var op1 = Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArray1Ptr));
var op2 = Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArray2Ptr));
var result = Sse2.UnpackHigh(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__UnpackHighInt32();
var result = Sse2.UnpackHigh(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__UnpackHighInt32();
fixed (Vector128<Int32>* pFld1 = &test._fld1)
fixed (Vector128<Int32>* pFld2 = &test._fld2)
{
var result = Sse2.UnpackHigh(
Sse2.LoadVector128((Int32*)(pFld1)),
Sse2.LoadVector128((Int32*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Sse2.UnpackHigh(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<Int32>* pFld1 = &_fld1)
fixed (Vector128<Int32>* pFld2 = &_fld2)
{
var result = Sse2.UnpackHigh(
Sse2.LoadVector128((Int32*)(pFld1)),
Sse2.LoadVector128((Int32*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Sse2.UnpackHigh(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = Sse2.UnpackHigh(
Sse2.LoadVector128((Int32*)(&test._fld1)),
Sse2.LoadVector128((Int32*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<Int32> op1, Vector128<Int32> op2, void* result, [CallerMemberName] string method = "")
{
Int32[] inArray1 = new Int32[Op1ElementCount];
Int32[] inArray2 = new Int32[Op2ElementCount];
Int32[] outArray = new Int32[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Int32, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Int32, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Int32>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Int32[] inArray1 = new Int32[Op1ElementCount];
Int32[] inArray2 = new Int32[Op2ElementCount];
Int32[] outArray = new Int32[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Int32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<Int32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Int32>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Int32[] left, Int32[] right, Int32[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (result[0] != left[2])
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if ((i % 2 == 0) ? result[i] != left[i/2 + 2] : result[i] != right[(i - 1)/2 + 2])
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Sse2)}.{nameof(Sse2.UnpackHigh)}<Int32>(Vector128<Int32>, Vector128<Int32>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Orleans;
using Orleans.Configuration;
using Orleans.Runtime;
using Orleans.Runtime.Configuration;
using Orleans.Runtime.ReminderService;
using Orleans.Runtime.Services;
using Orleans.Timers;
using UnitTests.GrainInterfaces;
#pragma warning disable 612,618
namespace UnitTests.Grains
{
// NOTE: if you make any changes here, copy them to ReminderTestCopyGrain
public class ReminderTestGrain2 : Grain, IReminderTestGrain2, IRemindable
{
private readonly IReminderTable reminderTable;
private readonly IReminderRegistry unvalidatedReminderRegistry;
Dictionary<string, IGrainReminder> allReminders;
Dictionary<string, long> sequence;
private TimeSpan period;
private static long aCCURACY = 50 * TimeSpan.TicksPerMillisecond; // when we use ticks to compute sequence numbers, we might get wrong results as timeouts don't happen with precision of ticks ... we keep this as a leeway
private IOptions<ReminderOptions> reminderOptions;
private ILogger logger;
private string myId; // used to distinguish during debugging between multiple activations of the same grain
private string filePrefix;
public ReminderTestGrain2(IServiceProvider services, IReminderTable reminderTable, ILoggerFactory loggerFactory)
{
this.reminderTable = reminderTable;
this.unvalidatedReminderRegistry = new UnvalidatedReminderRegistry(services);
this.logger = loggerFactory.CreateLogger($"{this.GetType().Name}-{this.IdentityString}");
this.reminderOptions = services.GetService<IOptions<ReminderOptions>>();
}
public override Task OnActivateAsync()
{
this.myId = this.Data.ActivationId.ToString();// new Random().Next();
this.allReminders = new Dictionary<string, IGrainReminder>();
this.sequence = new Dictionary<string, long>();
this.period = GetDefaultPeriod(this.logger);
this.logger.Info("OnActivateAsync.");
this.filePrefix = "g" + this.GrainId.ToString().Replace('/', '_') + "_";
return GetMissingReminders();
}
public override Task OnDeactivateAsync()
{
this.logger.Info("OnDeactivateAsync");
return Task.CompletedTask;
}
public async Task<IGrainReminder> StartReminder(string reminderName, TimeSpan? p = null, bool validate = false)
{
TimeSpan usePeriod = p ?? this.period;
this.logger.Info("Starting reminder {0}.", reminderName);
TimeSpan dueTime;
if (reminderOptions.Value.MinimumReminderPeriod < TimeSpan.FromSeconds(2))
dueTime = TimeSpan.FromSeconds(2) - reminderOptions.Value.MinimumReminderPeriod;
else dueTime = usePeriod - TimeSpan.FromSeconds(2);
IGrainReminder r;
if (validate)
r = await RegisterOrUpdateReminder(reminderName, dueTime, usePeriod);
else
r = await this.unvalidatedReminderRegistry.RegisterOrUpdateReminder(reminderName, dueTime, usePeriod);
this.allReminders[reminderName] = r;
this.sequence[reminderName] = 0;
string fileName = GetFileName(reminderName);
File.Delete(fileName); // if successfully started, then remove any old data
this.logger.Info("Started reminder {0}", r);
return r;
}
public Task ReceiveReminder(string reminderName, TickStatus status)
{
// it can happen that due to failure, when a new activation is created,
// it doesn't know which reminders were registered against the grain
// hence, this activation may receive a reminder that it didn't register itself, but
// the previous activation (incarnation of the grain) registered... so, play it safe
if (!this.sequence.ContainsKey(reminderName))
{
// allReminders.Add(reminderName, r); // not using allReminders at the moment
//counters.Add(reminderName, 0);
this.sequence.Add(reminderName, 0); // we'll get upto date to the latest sequence number while processing this tick
}
// calculating tick sequence number
// we do all arithmetics on DateTime by converting into long because we dont have divide operation on DateTime
// using dateTime.Ticks is not accurate as between two invocations of ReceiveReminder(), there maybe < period.Ticks
// if # of ticks between two consecutive ReceiveReminder() is larger than period.Ticks, everything is fine... the problem is when its less
// thus, we reduce our accuracy by ACCURACY ... here, we are preparing all used variables for the given accuracy
long now = status.CurrentTickTime.Ticks / aCCURACY; //DateTime.UtcNow.Ticks / ACCURACY;
long first = status.FirstTickTime.Ticks / aCCURACY;
long per = status.Period.Ticks / aCCURACY;
long sequenceNumber = 1 + ((now - first) / per);
// end of calculating tick sequence number
// do switch-ing here
if (sequenceNumber < this.sequence[reminderName])
{
this.logger.Info("ReceiveReminder: {0} Incorrect tick {1} vs. {2} with status {3}.", reminderName, this.sequence[reminderName], sequenceNumber, status);
return Task.CompletedTask;
}
this.sequence[reminderName] = sequenceNumber;
this.logger.Info("ReceiveReminder: {0} Sequence # {1} with status {2}.", reminderName, this.sequence[reminderName], status);
string fileName = GetFileName(reminderName);
string counterValue = this.sequence[reminderName].ToString(CultureInfo.InvariantCulture);
File.WriteAllText(fileName, counterValue);
return Task.CompletedTask;
}
public async Task StopReminder(string reminderName)
{
this.logger.Info("Stopping reminder {0}.", reminderName);
// we dont reset counter as we want the test methods to be able to read it even after stopping the reminder
//return UnregisterReminder(allReminders[reminderName]);
IGrainReminder reminder;
if (this.allReminders.TryGetValue(reminderName, out reminder))
{
await UnregisterReminder(reminder);
}
else
{
// during failures, there may be reminders registered by an earlier activation that we dont have cached locally
// therefore, we need to update our local cache
await GetMissingReminders();
if (this.allReminders.TryGetValue(reminderName, out reminder))
{
await UnregisterReminder(reminder);
}
else
{
//var reminders = await this.GetRemindersList();
throw new OrleansException(string.Format(
"Could not find reminder {0} in grain {1}", reminderName, this.IdentityString));
}
}
}
private async Task GetMissingReminders()
{
List<IGrainReminder> reminders = await base.GetReminders();
this.logger.Info("Got missing reminders {0}", Utils.EnumerableToString(reminders));
foreach (IGrainReminder l in reminders)
{
if (!this.allReminders.ContainsKey(l.ReminderName))
{
this.allReminders.Add(l.ReminderName, l);
}
}
}
public async Task StopReminder(IGrainReminder reminder)
{
this.logger.Info("Stopping reminder (using ref) {0}.", reminder);
// we dont reset counter as we want the test methods to be able to read it even after stopping the reminder
await UnregisterReminder(reminder);
}
public Task<TimeSpan> GetReminderPeriod(string reminderName)
{
return Task.FromResult(this.period);
}
public Task<long> GetCounter(string name)
{
string fileName = GetFileName(name);
string data = File.ReadAllText(fileName);
long counterValue = long.Parse(data);
return Task.FromResult(counterValue);
}
public Task<IGrainReminder> GetReminderObject(string reminderName)
{
return base.GetReminder(reminderName);
}
public async Task<List<IGrainReminder>> GetRemindersList()
{
return await base.GetReminders();
}
private string GetFileName(string reminderName)
{
return string.Format("{0}{1}", this.filePrefix, reminderName);
}
public static TimeSpan GetDefaultPeriod(ILogger log)
{
int period = 12; // Seconds
var reminderPeriod = TimeSpan.FromSeconds(period);
log.Info("Using reminder period of {0} in ReminderTestGrain", reminderPeriod);
return reminderPeriod;
}
public async Task EraseReminderTable()
{
await this.reminderTable.TestOnlyClearTable();
}
}
// NOTE: do not make changes here ... this is a copy of ReminderTestGrain
// changes to make when copying:
// 1. rename logger to ReminderCopyGrain
// 2. filePrefix should start with "gc", instead of "g"
public class ReminderTestCopyGrain : Grain, IReminderTestCopyGrain, IRemindable
{
private readonly IReminderRegistry unvalidatedReminderRegistry;
Dictionary<string, IGrainReminder> allReminders;
Dictionary<string, long> sequence;
private TimeSpan period;
private static long aCCURACY = 50 * TimeSpan.TicksPerMillisecond; // when we use ticks to compute sequence numbers, we might get wrong results as timeouts don't happen with precision of ticks ... we keep this as a leeway
private ILogger logger;
private long myId; // used to distinguish during debugging between multiple activations of the same grain
private string filePrefix;
public ReminderTestCopyGrain(IServiceProvider services, ILoggerFactory loggerFactory)
{
this.unvalidatedReminderRegistry = new UnvalidatedReminderRegistry(services); ;
this.logger = loggerFactory.CreateLogger($"{this.GetType().Name}-{this.IdentityString}");
}
public override async Task OnActivateAsync()
{
this.myId = new Random().Next();
this.allReminders = new Dictionary<string, IGrainReminder>();
this.sequence = new Dictionary<string, long>();
this.period = ReminderTestGrain2.GetDefaultPeriod(this.logger);
this.logger.Info("OnActivateAsync.");
this.filePrefix = "gc" + this.GrainId.Key + "_";
await GetMissingReminders();
}
public override Task OnDeactivateAsync()
{
this.logger.Info("OnDeactivateAsync.");
return Task.CompletedTask;
}
public async Task<IGrainReminder> StartReminder(string reminderName, TimeSpan? p = null, bool validate = false)
{
TimeSpan usePeriod = p ?? this.period;
this.logger.Info("Starting reminder {0} for {1}", reminderName, this.GrainId);
IGrainReminder r;
if (validate)
r = await RegisterOrUpdateReminder(reminderName, /*TimeSpan.FromSeconds(3)*/usePeriod - TimeSpan.FromSeconds(2), usePeriod);
else
r = await this.unvalidatedReminderRegistry.RegisterOrUpdateReminder(
reminderName,
usePeriod - TimeSpan.FromSeconds(2),
usePeriod);
if (this.allReminders.ContainsKey(reminderName))
{
this.allReminders[reminderName] = r;
this.sequence[reminderName] = 0;
}
else
{
this.allReminders.Add(reminderName, r);
this.sequence.Add(reminderName, 0);
}
File.Delete(GetFileName(reminderName)); // if successfully started, then remove any old data
this.logger.Info("Started reminder {0}.", r);
return r;
}
public Task ReceiveReminder(string reminderName, TickStatus status)
{
// it can happen that due to failure, when a new activation is created,
// it doesn't know which reminders were registered against the grain
// hence, this activation may receive a reminder that it didn't register itself, but
// the previous activation (incarnation of the grain) registered... so, play it safe
if (!this.sequence.ContainsKey(reminderName))
{
// allReminders.Add(reminderName, r); // not using allReminders at the moment
//counters.Add(reminderName, 0);
this.sequence.Add(reminderName, 0); // we'll get upto date to the latest sequence number while processing this tick
}
// calculating tick sequence number
// we do all arithmetics on DateTime by converting into long because we dont have divide operation on DateTime
// using dateTime.Ticks is not accurate as between two invocations of ReceiveReminder(), there maybe < period.Ticks
// if # of ticks between two consecutive ReceiveReminder() is larger than period.Ticks, everything is fine... the problem is when its less
// thus, we reduce our accuracy by ACCURACY ... here, we are preparing all used variables for the given accuracy
long now = status.CurrentTickTime.Ticks / aCCURACY; //DateTime.UtcNow.Ticks / ACCURACY;
long first = status.FirstTickTime.Ticks / aCCURACY;
long per = status.Period.Ticks / aCCURACY;
long sequenceNumber = 1 + ((now - first) / per);
// end of calculating tick sequence number
// do switch-ing here
if (sequenceNumber < this.sequence[reminderName])
{
this.logger.Info("{0} Incorrect tick {1} vs. {2} with status {3}.", reminderName, this.sequence[reminderName], sequenceNumber, status);
return Task.CompletedTask;
}
this.sequence[reminderName] = sequenceNumber;
this.logger.Info("{0} Sequence # {1} with status {2}.", reminderName, this.sequence[reminderName], status);
File.WriteAllText(GetFileName(reminderName), this.sequence[reminderName].ToString());
return Task.CompletedTask;
}
public async Task StopReminder(string reminderName)
{
this.logger.Info("Stopping reminder {0}.", reminderName);
// we dont reset counter as we want the test methods to be able to read it even after stopping the reminder
//return UnregisterReminder(allReminders[reminderName]);
IGrainReminder reminder;
if (this.allReminders.TryGetValue(reminderName, out reminder))
{
await UnregisterReminder(reminder);
}
else
{
// during failures, there may be reminders registered by an earlier activation that we dont have cached locally
// therefore, we need to update our local cache
await GetMissingReminders();
await UnregisterReminder(this.allReminders[reminderName]);
}
}
private async Task GetMissingReminders()
{
List<IGrainReminder> reminders = await base.GetReminders();
foreach (IGrainReminder l in reminders)
{
if (!this.allReminders.ContainsKey(l.ReminderName))
{
this.allReminders.Add(l.ReminderName, l);
}
}
}
public async Task StopReminder(IGrainReminder reminder)
{
this.logger.Info("Stopping reminder (using ref) {0}.", reminder);
// we dont reset counter as we want the test methods to be able to read it even after stopping the reminder
await UnregisterReminder(reminder);
}
public Task<TimeSpan> GetReminderPeriod(string reminderName)
{
return Task.FromResult(this.period);
}
public Task<long> GetCounter(string name)
{
return Task.FromResult(long.Parse(File.ReadAllText(GetFileName(name))));
}
public async Task<IGrainReminder> GetReminderObject(string reminderName)
{
return await base.GetReminder(reminderName);
}
public async Task<List<IGrainReminder>> GetRemindersList()
{
return await base.GetReminders();
}
private string GetFileName(string reminderName)
{
return string.Format("{0}{1}", this.filePrefix, reminderName);
}
}
public class WrongReminderGrain : Grain, IReminderGrainWrong
{
private ILogger logger;
public WrongReminderGrain(ILoggerFactory loggerFactory)
{
this.logger = loggerFactory.CreateLogger($"{this.GetType().Name}-{this.IdentityString}");
}
public override Task OnActivateAsync()
{
this.logger.Info("OnActivateAsync.");
return Task.CompletedTask;
}
public async Task<bool> StartReminder(string reminderName)
{
this.logger.Info("Starting reminder {0}.", reminderName);
IGrainReminder r = await RegisterOrUpdateReminder(reminderName, TimeSpan.FromSeconds(2), TimeSpan.FromSeconds(3));
this.logger.Info("Started reminder {0}. It shouldn't have succeeded!", r);
return true;
}
}
internal class UnvalidatedReminderRegistry : GrainServiceClient<IReminderService>, IReminderRegistry
{
public UnvalidatedReminderRegistry(IServiceProvider serviceProvider) : base(serviceProvider)
{
}
public Task<IGrainReminder> RegisterOrUpdateReminder(string reminderName, TimeSpan dueTime, TimeSpan period)
{
return this.GrainService.RegisterOrUpdateReminder(this.CallingGrainReference, reminderName, dueTime, period);
}
public Task UnregisterReminder(IGrainReminder reminder)
{
return this.GrainService.UnregisterReminder(reminder);
}
public Task<IGrainReminder> GetReminder(string reminderName)
{
return this.GrainService.GetReminder(this.CallingGrainReference, reminderName);
}
public Task<List<IGrainReminder>> GetReminders()
{
return this.GrainService.GetReminders(this.CallingGrainReference);
}
}
}
#pragma warning restore 612,618
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.Runtime.InteropServices;
using System.ComponentModel;
using System.IO;
namespace Vestris.ResourceLib
{
/// <summary>
/// A version resource.
/// </summary>
internal abstract class Resource
{
/// <summary>
/// Resource type.
/// </summary>
protected ResourceId _type;
/// <summary>
/// Resource name.
/// </summary>
protected ResourceId _name;
/// <summary>
/// Resource language.
/// </summary>
protected UInt16 _language;
/// <summary>
/// Loaded binary nodule.
/// </summary>
protected IntPtr _hModule = IntPtr.Zero;
/// <summary>
/// Pointer to the resource.
/// </summary>
protected IntPtr _hResource = IntPtr.Zero;
/// <summary>
/// Resource size.
/// </summary>
protected int _size = 0;
/// <summary>
/// Version resource size in bytes.
/// </summary>
internal int Size
{
get
{
return _size;
}
}
/// <summary>
/// Language ID.
/// </summary>
internal UInt16 Language
{
get
{
return _language;
}
set
{
_language = value;
}
}
/// <summary>
/// Resource type.
/// </summary>
internal ResourceId Type
{
get
{
return _type;
}
}
/// <summary>
/// String representation of the resource type.
/// </summary>
internal string TypeName
{
get
{
return _type.IsIntResource()
? _type.ResourceType.ToString()
: _type.Name;
}
}
/// <summary>
/// Resource name.
/// </summary>
internal ResourceId Name
{
get
{
return _name;
}
set
{
_name = value;
}
}
/// <summary>
/// A new resource.
/// </summary>
internal Resource()
{
}
/// <summary>
/// A structured resource embedded in an executable module.
/// </summary>
/// <param name="hModule">Module handle.</param>
/// <param name="hResource">Resource handle.</param>
/// <param name="type">Resource type.</param>
/// <param name="name">Resource name.</param>
/// <param name="language">Language ID.</param>
/// <param name="size">Resource size.</param>
internal Resource(IntPtr hModule, IntPtr hResource, ResourceId type, ResourceId name, UInt16 language, int size)
{
_hModule = hModule;
_type = type;
_name = name;
_language = language;
_hResource = hResource;
_size = size;
LockAndReadResource(hModule, hResource);
}
/// <summary>
/// Lock and read the resource.
/// </summary>
/// <param name="hModule">Module handle.</param>
/// <param name="hResource">Resource handle.</param>
internal void LockAndReadResource(IntPtr hModule, IntPtr hResource)
{
if (hResource == IntPtr.Zero)
return;
IntPtr lpRes = Kernel32.LockResource(hResource);
if (lpRes == IntPtr.Zero)
throw new Win32Exception(Marshal.GetLastWin32Error());
Read(hModule, lpRes);
}
/// <summary>
/// Load a resource from an executable (.exe or .dll) file.
/// </summary>
/// <param name="filename">An executable (.exe or .dll) file.</param>
internal virtual void LoadFrom(string filename)
{
LoadFrom(filename, _type, _name, _language);
}
/// <summary>
/// Load a resource from an executable (.exe or .dll) file.
/// </summary>
/// <param name="filename">An executable (.exe or .dll) file.</param>
/// <param name="name">Resource name.</param>
/// <param name="type">Resource type.</param>
/// <param name="lang">Resource language.</param>
internal void LoadFrom(string filename, ResourceId type, ResourceId name, UInt16 lang)
{
IntPtr hModule = IntPtr.Zero;
try
{
hModule = Kernel32.LoadLibraryEx(filename, IntPtr.Zero,
Kernel32.DONT_RESOLVE_DLL_REFERENCES | Kernel32.LOAD_LIBRARY_AS_DATAFILE);
LoadFrom(hModule, type, name, lang);
}
finally
{
if (hModule != IntPtr.Zero)
Kernel32.FreeLibrary(hModule);
}
}
/// <summary>
/// Load a resource from an executable (.exe or .dll) module.
/// </summary>
/// <param name="hModule">An executable (.exe or .dll) module.</param>
/// <param name="type">Resource type.</param>
/// <param name="name">Resource name.</param>
/// <param name="lang">Resource language.</param>
internal void LoadFrom(IntPtr hModule, ResourceId type, ResourceId name, UInt16 lang)
{
if (IntPtr.Zero == hModule)
throw new Win32Exception(Marshal.GetLastWin32Error());
IntPtr hRes = Kernel32.FindResourceEx(hModule, type.Id, name.Id, lang);
if (IntPtr.Zero == hRes)
throw new Win32Exception(Marshal.GetLastWin32Error());
IntPtr hGlobal = Kernel32.LoadResource(hModule, hRes);
if (IntPtr.Zero == hGlobal)
throw new Win32Exception(Marshal.GetLastWin32Error());
IntPtr lpRes = Kernel32.LockResource(hGlobal);
if (lpRes == IntPtr.Zero)
throw new Win32Exception(Marshal.GetLastWin32Error());
_size = Kernel32.SizeofResource(hModule, hRes);
if (_size <= 0)
throw new Win32Exception(Marshal.GetLastWin32Error());
_type = type;
_name = name;
_language = lang;
Read(hModule, lpRes);
}
/// <summary>
/// Read a resource from a previously loaded module.
/// </summary>
/// <param name="hModule">Module handle.</param>
/// <param name="lpRes">Pointer to the beginning of the resource.</param>
/// <returns>Pointer to the end of the resource.</returns>
internal abstract IntPtr Read(IntPtr hModule, IntPtr lpRes);
/// <summary>
/// Write the resource to a memory stream.
/// </summary>
/// <param name="w">Binary stream.</param>
internal abstract void Write(BinaryWriter w);
/// <summary>
/// Return resource data.
/// </summary>
/// <returns>Resource data.</returns>
internal byte[] WriteAndGetBytes()
{
MemoryStream ms = new MemoryStream();
BinaryWriter w = new BinaryWriter(ms, Encoding.Default);
Write(w);
w.Close();
return ms.ToArray();
}
/// <summary>
/// Save a resource.
/// </summary>
/// <param name="filename">Name of an executable file (.exe or .dll).</param>
internal virtual void SaveTo(string filename)
{
SaveTo(filename, _type, _name, _language);
}
/// <summary>
/// Save a resource to an executable (.exe or .dll) file.
/// </summary>
/// <param name="filename">Path to an executable file.</param>
/// <param name="name">Resource name.</param>
/// <param name="type">Resource type.</param>
/// <param name="langid">Language id.</param>
internal void SaveTo(string filename, ResourceId type, ResourceId name, UInt16 langid)
{
byte[] data = WriteAndGetBytes();
SaveTo(filename, type, name, langid, data);
}
/// <summary>
/// Delete a resource from an executable (.exe or .dll) file.
/// </summary>
/// <param name="filename">Path to an executable file.</param>
internal virtual void DeleteFrom(string filename)
{
Delete(filename, _type, _name, _language);
}
/// <summary>
/// Delete a resource from an executable (.exe or .dll) file.
/// </summary>
/// <param name="filename">Path to an executable file.</param>
/// <param name="name">Resource name.</param>
/// <param name="type">Resource type.</param>
/// <param name="lang">Resource language.</param>
internal static void Delete(string filename, ResourceId type, ResourceId name, UInt16 lang)
{
SaveTo(filename, type, name, lang, null);
}
/// <summary>
/// Save a resource to an executable (.exe or .dll) file.
/// </summary>
/// <param name="filename">Path to an executable file.</param>
/// <param name="name">Resource name.</param>
/// <param name="type">Resource type.</param>
/// <param name="lang">Resource language.</param>
/// <param name="data">Resource data.</param>
internal static void SaveTo(string filename, ResourceId type, ResourceId name, UInt16 lang, byte[] data)
{
IntPtr h = Kernel32.BeginUpdateResource(filename, false);
if (h == IntPtr.Zero)
throw new Win32Exception(Marshal.GetLastWin32Error());
if (!Kernel32.UpdateResource(h, type.Id, name.Id,
lang, data, (data == null ? 0 : (uint)data.Length)))
{
throw new Win32Exception(Marshal.GetLastWin32Error());
}
if (!Kernel32.EndUpdateResource(h, false))
throw new Win32Exception(Marshal.GetLastWin32Error());
}
}
}
| |
//
// Copyright (c) 2004-2016 Jaroslaw Kowalski <[email protected]>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
#if !SILVERLIGHT
#define DEBUG
namespace NLog.UnitTests
{
using System;
using System.Globalization;
using System.Threading;
using System.Diagnostics;
using Xunit;
public class NLogTraceListenerTests : NLogTestBase, IDisposable
{
private readonly CultureInfo previousCultureInfo;
public NLogTraceListenerTests()
{
this.previousCultureInfo = Thread.CurrentThread.CurrentCulture;
// set the culture info with the decimal separator (comma) different from InvariantCulture separator (point)
Thread.CurrentThread.CurrentCulture = new CultureInfo("fr-FR");
}
public void Dispose()
{
// restore previous culture info
Thread.CurrentThread.CurrentCulture = this.previousCultureInfo;
}
[Fact]
public void TraceWriteTest()
{
LogManager.Configuration = CreateConfigurationFromString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${logger} ${level} ${message}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
Debug.Listeners.Clear();
Debug.Listeners.Add(new NLogTraceListener { Name = "Logger1" });
Debug.Write("Hello");
AssertDebugLastMessage("debug", "Logger1 Debug Hello");
Debug.Write("Hello", "Cat1");
AssertDebugLastMessage("debug", "Logger1 Debug Cat1: Hello");
Debug.Write(3.1415);
AssertDebugLastMessage("debug", string.Format("Logger1 Debug {0}", 3.1415));
Debug.Write(3.1415, "Cat2");
AssertDebugLastMessage("debug", string.Format("Logger1 Debug Cat2: {0}", 3.1415));
}
[Fact]
public void TraceWriteLineTest()
{
LogManager.Configuration = CreateConfigurationFromString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${logger} ${level} ${message}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
Debug.Listeners.Clear();
Debug.Listeners.Add(new NLogTraceListener { Name = "Logger1" });
Debug.WriteLine("Hello");
AssertDebugLastMessage("debug", "Logger1 Debug Hello");
Debug.WriteLine("Hello", "Cat1");
AssertDebugLastMessage("debug", "Logger1 Debug Cat1: Hello");
Debug.WriteLine(3.1415);
AssertDebugLastMessage("debug", string.Format("Logger1 Debug {0}", 3.1415));
Debug.WriteLine(3.1415, "Cat2");
AssertDebugLastMessage("debug", string.Format("Logger1 Debug Cat2: {0}", 3.1415));
}
[Fact]
public void TraceWriteNonDefaultLevelTest()
{
LogManager.Configuration = CreateConfigurationFromString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${logger} ${level} ${message}' /></targets>
<rules>
<logger name='*' minlevel='Trace' writeTo='debug' />
</rules>
</nlog>");
Debug.Listeners.Clear();
Debug.Listeners.Add(new NLogTraceListener { Name = "Logger1", DefaultLogLevel = LogLevel.Trace });
Debug.Write("Hello");
AssertDebugLastMessage("debug", "Logger1 Trace Hello");
}
[Fact]
public void TraceConfiguration()
{
var listener = new NLogTraceListener();
listener.Attributes.Add("defaultLogLevel", "Warn");
listener.Attributes.Add("forceLogLevel", "Error");
listener.Attributes.Add("autoLoggerName", "1");
listener.Attributes.Add("DISABLEFLUSH", "true");
Assert.Equal(LogLevel.Warn, listener.DefaultLogLevel);
Assert.Equal(LogLevel.Error, listener.ForceLogLevel);
Assert.True(listener.AutoLoggerName);
Assert.True(listener.DisableFlush);
}
[Fact]
public void TraceFailTest()
{
LogManager.Configuration = CreateConfigurationFromString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${logger} ${level} ${message}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
Debug.Listeners.Clear();
Debug.Listeners.Add(new NLogTraceListener { Name = "Logger1" });
Debug.Fail("Message");
AssertDebugLastMessage("debug", "Logger1 Error Message");
Debug.Fail("Message", "Detailed Message");
AssertDebugLastMessage("debug", "Logger1 Error Message Detailed Message");
}
[Fact]
public void AutoLoggerNameTest()
{
LogManager.Configuration = CreateConfigurationFromString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${logger} ${level} ${message}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
Debug.Listeners.Clear();
Debug.Listeners.Add(new NLogTraceListener { Name = "Logger1", AutoLoggerName = true });
Debug.Write("Hello");
AssertDebugLastMessage("debug", this.GetType().FullName + " Debug Hello");
}
[Fact]
public void TraceDataTests()
{
LogManager.Configuration = CreateConfigurationFromString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${logger} ${level} ${message} ${event-context:EventID}' /></targets>
<rules>
<logger name='*' minlevel='Trace' writeTo='debug' />
</rules>
</nlog>");
TraceSource ts = CreateTraceSource();
ts.Listeners.Add(new NLogTraceListener { Name = "Logger1", DefaultLogLevel = LogLevel.Trace });
ts.TraceData(TraceEventType.Critical, 123, 42);
AssertDebugLastMessage("debug", "MySource1 Fatal 42 123");
ts.TraceData(TraceEventType.Critical, 145, 42, 3.14, "foo");
AssertDebugLastMessage("debug", string.Format("MySource1 Fatal 42, {0}, foo 145", 3.14.ToString(System.Globalization.CultureInfo.CurrentCulture)));
}
#if MONO
[Fact(Skip="Not working under MONO - not sure if unit test is wrong, or the code")]
#else
[Fact]
#endif
public void LogInformationTest()
{
LogManager.Configuration = CreateConfigurationFromString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${logger} ${level} ${message} ${event-context:EventID}' /></targets>
<rules>
<logger name='*' minlevel='Trace' writeTo='debug' />
</rules>
</nlog>");
TraceSource ts = CreateTraceSource();
ts.Listeners.Add(new NLogTraceListener { Name = "Logger1", DefaultLogLevel = LogLevel.Trace });
ts.TraceInformation("Quick brown fox");
AssertDebugLastMessage("debug", "MySource1 Info Quick brown fox 0");
ts.TraceInformation("Mary had {0} lamb", "a little");
AssertDebugLastMessage("debug", "MySource1 Info Mary had a little lamb 0");
}
[Fact]
public void TraceEventTests()
{
LogManager.Configuration = CreateConfigurationFromString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${logger} ${level} ${message} ${event-context:EventID}' /></targets>
<rules>
<logger name='*' minlevel='Trace' writeTo='debug' />
</rules>
</nlog>");
TraceSource ts = CreateTraceSource();
ts.Listeners.Add(new NLogTraceListener { Name = "Logger1", DefaultLogLevel = LogLevel.Trace });
ts.TraceEvent(TraceEventType.Information, 123, "Quick brown {0} jumps over the lazy {1}.", "fox", "dog");
AssertDebugLastMessage("debug", "MySource1 Info Quick brown fox jumps over the lazy dog. 123");
ts.TraceEvent(TraceEventType.Information, 123);
AssertDebugLastMessage("debug", "MySource1 Info 123");
ts.TraceEvent(TraceEventType.Verbose, 145, "Bar");
AssertDebugLastMessage("debug", "MySource1 Trace Bar 145");
ts.TraceEvent(TraceEventType.Error, 145, "Foo");
AssertDebugLastMessage("debug", "MySource1 Error Foo 145");
ts.TraceEvent(TraceEventType.Suspend, 145, "Bar");
AssertDebugLastMessage("debug", "MySource1 Debug Bar 145");
ts.TraceEvent(TraceEventType.Resume, 145, "Foo");
AssertDebugLastMessage("debug", "MySource1 Debug Foo 145");
ts.TraceEvent(TraceEventType.Warning, 145, "Bar");
AssertDebugLastMessage("debug", "MySource1 Warn Bar 145");
ts.TraceEvent(TraceEventType.Critical, 145, "Foo");
AssertDebugLastMessage("debug", "MySource1 Fatal Foo 145");
}
#if MONO
[Fact(Skip="Not working under MONO - not sure if unit test is wrong, or the code")]
#else
[Fact]
#endif
public void ForceLogLevelTest()
{
LogManager.Configuration = CreateConfigurationFromString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${logger} ${level} ${message} ${event-context:EventID}' /></targets>
<rules>
<logger name='*' minlevel='Trace' writeTo='debug' />
</rules>
</nlog>");
TraceSource ts = CreateTraceSource();
ts.Listeners.Add(new NLogTraceListener { Name = "Logger1", DefaultLogLevel = LogLevel.Trace, ForceLogLevel = LogLevel.Warn });
// force all logs to be Warn, DefaultLogLevel has no effect on TraceSource
ts.TraceInformation("Quick brown fox");
AssertDebugLastMessage("debug", "MySource1 Warn Quick brown fox 0");
ts.TraceInformation("Mary had {0} lamb", "a little");
AssertDebugLastMessage("debug", "MySource1 Warn Mary had a little lamb 0");
}
private static TraceSource CreateTraceSource()
{
var ts = new TraceSource("MySource1", SourceLevels.All);
#if MONO
// for some reason needed on Mono
ts.Switch = new SourceSwitch("MySource1", "Verbose");
ts.Switch.Level = SourceLevels.All;
#endif
return ts;
}
}
}
#endif
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using FluentAssertions;
using NUnit.Framework;
using SharpFlame.Core.Domain;
using SharpFlame.Core.Domain.Colors;
using SharpFlame.Core.Parsers;
using SharpFlame.Core.Parsers.Ini;
using SharpFlame.Core.Domain;
using SharpFlame.Core.Domain.Colors;
using SharpFlame.Core.Parsers.Ini;
using Sprache;
namespace SharpFlame.Tests.Parser
{
[TestFixture]
public class IniParserTests
{
[Test]
public void CanReadHealthPercent() {
var data = @"100%";
var tmpInt = IniReader.ReadHealthPercent (data);
tmpInt.Should ().Be (100);
}
[Test]
public void CanParseSettingsIni()
{
var path = Path.Combine ("Data",
Path.Combine ("Inis",
Path.Combine ("settings.ini")));
Console.WriteLine( "Parsing: {0}", path );
var iniFile = IniReader.ReadFile (path);
iniFile.Count.Should ().Be (1);
foreach (var sec in iniFile) {
sec.Name.Should ().Be ("Global");
sec.Data.Count.Should ().Be (25);
Console.WriteLine ("Section {0} Token count is: {1}", sec.Name, sec.Data.Count);
foreach (var d in sec.Data) {
switch (d.Name) {
case "AutoSave":
Convert.ToBoolean (d.Data).Should ().Be (true);
break;
case "AutoSaveCompress":
Convert.ToBoolean (d.Data).Should ().Be (false);
break;
case "AutoSaveMinInterval":
Convert.ToInt32 (d.Data).Should ().Be (180);
break;
case "AutoSaveMinChanges":
Convert.ToInt32 (d.Data).Should ().Be (20);
break;
case "MinimapCliffColour":
var color = Rgba.FromString (d.Data);
color.Red.Should ().Be (1F);
color.Green.Should ().Be (0.25F);
color.Blue.Should ().Be (0.25F);
color.Alpha.Should ().Be (0.5F);
break;
case "FOVDefault":
Convert.ToDouble (d.Data, CultureInfo.InvariantCulture).Should ().Be (0.000666666666666667D);
break;
case "FontFamily":
d.Data.Should ().Be ("DejaVu Serif");
break;
case "TilesetsPath":
d.Data.Should ().Be ("/home/pcdummy/Projekte/wzlobby/SharpFlame/source/Data/tilesets");
break;
case "PickOrientation":
Convert.ToBoolean (d.Data).Should ().Be (true);
break;
}
}
}
}
[Test]
public void CanParseDroidIni()
{
var path = Path.Combine ("Data",
Path.Combine ("Inis",
Path.Combine ("droid.ini")));
Console.WriteLine ("Parsing: {0}", path);
var iniFile = IniReader.ReadFile (path);
iniFile.Count.Should().Be (40);
// Parse:
// id = 3693
// startpos = 3
// template = ConstructionDroid
// position = 9792, 26048, 0
// rotation = 0, 0, 0
foreach (var d in iniFile[4].Data) {
switch (d.Name) {
case "id":
int.Parse (d.Data).Should ().Be (3693);
break;
case "startpos":
int.Parse (d.Data).Should ().Be (3);
break;
case "template":
d.Data.Should ().Be ("ConstructionDroid");
break;
case "position":
var tmpPosition = XYZInt.FromString (d.Data);
tmpPosition.X.Should ().Be (9792);
tmpPosition.Y.Should ().Be (26048);
tmpPosition.Z.Should ().Be (0);
break;
case "rotation":
var tmpRotation = Rotation.FromString (d.Data);
tmpRotation.Direction.Should ().Be (0);
tmpRotation.Pitch.Should ().Be (0);
tmpRotation.Roll.Should ().Be (0);
break;
case "player":
break;
case "name":
break;
case "health":
break;
case "droidtype":
break;
case "weapons":
break;
case "parts\\body":
break;
case "parts\\propulsion":
break;
case "parts\\brain":
break;
case "parts\\repair":
break;
case "parts\\ecm":
break;
case "parts\\sensor":
break;
case "parts\\construct":
break;
case "parts\\weapon\\1":
break;
case "parts\\weapon\\2":
break;
case "parts\\weapon\\3":
break;
}
}
}
[Test]
public void CanParseFeatureIni()
{
var path = Path.Combine ("Data",
Path.Combine ("Inis",
Path.Combine ("feature.ini")));
Console.WriteLine ("Parsing: {0}", path);
var iniFile = IniReader.ReadFile (path);
iniFile [3].Name.Should ().Be ("feature_1493");
foreach (var d in iniFile[3].Data) {
switch (d.Name) {
case "id":
int.Parse (d.Data).Should ().Be (1493);
break;
case "position":
var tmpPosition = XYZInt.FromString (d.Data);
tmpPosition.X.Should ().Be (2496);
tmpPosition.Y.Should ().Be (26688);
tmpPosition.Z.Should ().Be (0);
break;
case "rotation":
var tmpRotation = Rotation.FromString (d.Data);
tmpRotation.Direction.Should ().Be (0);
tmpRotation.Pitch.Should ().Be (0);
tmpRotation.Roll.Should ().Be (0);
break;
case "name":
d.Data.Should ().Be ("AirTrafficControl");
break;
default:
throw new Exception (string.Format ("Invalid ID \"{0}\" value \"{1}\"", d.Name, d.Data));
}
}
}
[Test]
public void CanParseStructIni()
{
var path = Path.Combine ("Data",
Path.Combine ("Inis",
Path.Combine ("struct.ini")));
Console.WriteLine ("Parsing: {0}", path);
var iniFile = IniReader.ReadFile (path);
iniFile.Count.Should ().Be (2006);
iniFile [2].Name.Should ().Be ("structure_1248");
foreach (var d in iniFile[2].Data) {
switch (d.Name) {
case "id":
int.Parse (d.Data).Should ().Be (1248);
break;
case "startpos":
int.Parse (d.Data).Should ().Be (5);
break;
case "name":
break;
case "wall/type":
int.Parse (d.Data).Should ().Be (0);
break;
case "position":
var tmpPosition = XYZInt.FromString (d.Data);
tmpPosition.X.Should ().Be (6208);
tmpPosition.Y.Should ().Be (5440);
tmpPosition.Z.Should ().Be (0);
break;
case "rotation":
var tmpRotation = Rotation.FromString (d.Data);
tmpRotation.Direction.Should ().Be (16384);
tmpRotation.Pitch.Should ().Be (0);
tmpRotation.Roll.Should ().Be (0);
break;
case "modules":
int.Parse (d.Data).Should ().Be (0);
break;
default:
throw new Exception (string.Format ("Invalid ID \"{0}\" value \"{1}\"", d.Name, d.Data));
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Threading;
namespace Apache.Geode.Client.UnitTests
{
using NUnit.Framework;
using Apache.Geode.DUnitFramework;
using Apache.Geode.Client;
[TestFixture]
[Category("group1")]
[Category("unicast_only")]
[Category("generics")]
public class ThinClientRegionInterestFailoverInterest2Tests : ThinClientRegionSteps
{
#region Private members and methods
private UnitProcess m_client1, m_client2, m_client3, m_feeder;
private static string[] m_regexes = { "Key-*1", "Key-*2",
"Key-*3", "Key-*4" };
private const string m_regex23 = "Key-[23]";
private const string m_regexWildcard = "Key-.*";
private const int m_numUnicodeStrings = 5;
private static string[] m_keysNonRegex = { "key-1", "key-2", "key-3" };
private static string[] m_keysForRegex = {"key-regex-1",
"key-regex-2", "key-regex-3" };
private static string[] RegionNamesForInterestNotify =
{ "RegionTrue", "RegionFalse", "RegionOther" };
string GetUnicodeString(int index)
{
return new string('\x0905', 40) + index.ToString("D10");
}
#endregion
protected override ClientBase[] GetClients()
{
m_client1 = new UnitProcess();
m_client2 = new UnitProcess();
m_client3 = new UnitProcess();
m_feeder = new UnitProcess();
return new ClientBase[] { m_client1, m_client2, m_client3, m_feeder };
}
[TestFixtureTearDown]
public override void EndTests()
{
CacheHelper.StopJavaServers();
base.EndTests();
}
[TearDown]
public override void EndTest()
{
try
{
m_client1.Call(DestroyRegions);
m_client2.Call(DestroyRegions);
CacheHelper.ClearEndpoints();
}
finally
{
CacheHelper.StopJavaServers();
}
base.EndTest();
}
#region Steps for Thin Client IRegion<object, object> with Interest
public void StepFourIL()
{
VerifyCreated(m_regionNames[0], m_keys[0]);
VerifyCreated(m_regionNames[1], m_keys[2]);
VerifyEntry(m_regionNames[0], m_keys[0], m_vals[0]);
VerifyEntry(m_regionNames[1], m_keys[2], m_vals[2]);
}
public void StepFourRegex3()
{
IRegion<object, object> region0 = CacheHelper.GetVerifyRegion<object, object>(m_regionNames[0]);
IRegion<object, object> region1 = CacheHelper.GetVerifyRegion<object, object>(m_regionNames[1]);
try
{
Util.Log("Registering empty regular expression.");
region0.GetSubscriptionService().RegisterRegex(string.Empty);
Assert.Fail("Did not get expected exception!");
}
catch (Exception ex)
{
Util.Log("Got expected exception {0}: {1}", ex.GetType(), ex.Message);
}
try
{
Util.Log("Registering null regular expression.");
region1.GetSubscriptionService().RegisterRegex(null);
Assert.Fail("Did not get expected exception!");
}
catch (Exception ex)
{
Util.Log("Got expected exception {0}: {1}", ex.GetType(), ex.Message);
}
try
{
Util.Log("Registering non-existent regular expression.");
region1.GetSubscriptionService().UnregisterRegex("Non*Existent*Regex*");
Assert.Fail("Did not get expected exception!");
}
catch (Exception ex)
{
Util.Log("Got expected exception {0}: {1}", ex.GetType(), ex.Message);
}
}
public void StepFourFailoverRegex()
{
VerifyCreated(m_regionNames[0], m_keys[0]);
VerifyCreated(m_regionNames[1], m_keys[2]);
VerifyEntry(m_regionNames[0], m_keys[0], m_vals[0]);
VerifyEntry(m_regionNames[1], m_keys[2], m_vals[2]);
UpdateEntry(m_regionNames[1], m_keys[1], m_vals[1], true);
UnregisterRegexes(null, m_regexes[2]);
}
public void StepFiveIL()
{
VerifyCreated(m_regionNames[0], m_keys[1]);
VerifyCreated(m_regionNames[1], m_keys[3]);
VerifyEntry(m_regionNames[0], m_keys[1], m_vals[1]);
VerifyEntry(m_regionNames[1], m_keys[3], m_vals[3]);
UpdateEntry(m_regionNames[0], m_keys[0], m_nvals[0], false);
UpdateEntry(m_regionNames[1], m_keys[2], m_nvals[2], false);
}
public void StepFiveRegex()
{
CreateEntry(m_regionNames[0], m_keys[2], m_vals[2]);
CreateEntry(m_regionNames[1], m_keys[3], m_vals[3]);
}
public void CreateAllEntries(string regionName)
{
CreateEntry(regionName, m_keys[0], m_vals[0]);
CreateEntry(regionName, m_keys[1], m_vals[1]);
CreateEntry(regionName, m_keys[2], m_vals[2]);
CreateEntry(regionName, m_keys[3], m_vals[3]);
}
public void VerifyAllEntries(string regionName, bool newVal, bool checkVal)
{
string[] vals = newVal ? m_nvals : m_vals;
VerifyEntry(regionName, m_keys[0], vals[0], checkVal);
VerifyEntry(regionName, m_keys[1], vals[1], checkVal);
VerifyEntry(regionName, m_keys[2], vals[2], checkVal);
VerifyEntry(regionName, m_keys[3], vals[3], checkVal);
}
public void VerifyInvalidAll(string regionName, params string[] keys)
{
if (keys != null)
{
foreach (string key in keys)
{
VerifyInvalid(regionName, key);
}
}
}
public void UpdateAllEntries(string regionName, bool checkVal)
{
UpdateEntry(regionName, m_keys[0], m_nvals[0], checkVal);
UpdateEntry(regionName, m_keys[1], m_nvals[1], checkVal);
UpdateEntry(regionName, m_keys[2], m_nvals[2], checkVal);
UpdateEntry(regionName, m_keys[3], m_nvals[3], checkVal);
}
public void DoNetsearchAllEntries(string regionName, bool newVal,
bool checkNoKey)
{
string[] vals;
if (newVal)
{
vals = m_nvals;
}
else
{
vals = m_vals;
}
DoNetsearch(regionName, m_keys[0], vals[0], checkNoKey);
DoNetsearch(regionName, m_keys[1], vals[1], checkNoKey);
DoNetsearch(regionName, m_keys[2], vals[2], checkNoKey);
DoNetsearch(regionName, m_keys[3], vals[3], checkNoKey);
}
public void StepFiveFailoverRegex()
{
UpdateEntry(m_regionNames[0], m_keys[0], m_nvals[0], false);
UpdateEntry(m_regionNames[1], m_keys[2], m_nvals[2], false);
VerifyEntry(m_regionNames[1], m_keys[1], m_vals[1], false);
}
public void StepSixIL()
{
VerifyEntry(m_regionNames[0], m_keys[0], m_nvals[0]);
VerifyEntry(m_regionNames[1], m_keys[2], m_vals[2]);
IRegion<object, object> region0 = CacheHelper.GetRegion<object, object>(m_regionNames[0]);
IRegion<object, object> region1 = CacheHelper.GetRegion<object, object>(m_regionNames[1]);
region0.Remove(m_keys[1]);
region1.Remove(m_keys[3]);
}
public void StepSixRegex()
{
CreateEntry(m_regionNames[0], m_keys[0], m_vals[0]);
CreateEntry(m_regionNames[1], m_keys[1], m_vals[1]);
VerifyEntry(m_regionNames[0], m_keys[2], m_vals[2]);
VerifyEntry(m_regionNames[1], m_keys[3], m_vals[3]);
UnregisterRegexes(null, m_regexes[3]);
}
public void StepSixFailoverRegex()
{
VerifyEntry(m_regionNames[0], m_keys[0], m_nvals[0], false);
VerifyEntry(m_regionNames[1], m_keys[2], m_vals[2], false);
UpdateEntry(m_regionNames[1], m_keys[1], m_nvals[1], false);
}
public void StepSevenIL()
{
VerifyDestroyed(m_regionNames[0], m_keys[1]);
VerifyEntry(m_regionNames[1], m_keys[3], m_vals[3]);
}
public void StepSevenRegex()
{
VerifyEntry(m_regionNames[0], m_keys[0], m_vals[0]);
VerifyEntry(m_regionNames[1], m_keys[1], m_vals[1]);
UpdateEntry(m_regionNames[0], m_keys[2], m_nvals[2], true);
UpdateEntry(m_regionNames[1], m_keys[3], m_nvals[3], true);
UnregisterRegexes(null, m_regexes[1]);
}
public void StepSevenRegex2()
{
VerifyEntry(m_regionNames[0], m_keys[1], m_vals[1]);
VerifyEntry(m_regionNames[0], m_keys[2], m_vals[2]);
DoNetsearch(m_regionNames[0], m_keys[0], m_vals[0], true);
DoNetsearch(m_regionNames[0], m_keys[3], m_vals[3], true);
UpdateAllEntries(m_regionNames[1], true);
}
public void StepSevenInterestResultPolicyInv()
{
IRegion<object, object> region = CacheHelper.GetVerifyRegion<object, object>(m_regionNames[0]);
region.GetSubscriptionService().RegisterRegex(m_regex23);
VerifyInvalidAll(m_regionNames[0], m_keys[1], m_keys[2]);
VerifyEntry(m_regionNames[0], m_keys[0], m_vals[0], true);
VerifyEntry(m_regionNames[0], m_keys[3], m_vals[3], true);
}
public void StepSevenFailoverRegex()
{
UpdateEntry(m_regionNames[0], m_keys[0], m_vals[0], true);
UpdateEntry(m_regionNames[1], m_keys[2], m_vals[2], true);
VerifyEntry(m_regionNames[1], m_keys[1], m_nvals[1]);
}
public void StepEightIL()
{
VerifyEntry(m_regionNames[0], m_keys[0], m_nvals[0]);
VerifyEntry(m_regionNames[1], m_keys[2], m_nvals[2]);
}
public void StepEightRegex()
{
VerifyEntry(m_regionNames[0], m_keys[2], m_nvals[2]);
VerifyEntry(m_regionNames[1], m_keys[3], m_vals[3]);
UpdateEntry(m_regionNames[0], m_keys[0], m_nvals[0], true);
UpdateEntry(m_regionNames[1], m_keys[1], m_nvals[1], true);
}
public void StepEightInterestResultPolicyInv()
{
IRegion<object, object> region = CacheHelper.GetVerifyRegion<object, object>(m_regionNames[1]);
region.GetSubscriptionService().RegisterAllKeys();
VerifyInvalidAll(m_regionNames[1], m_keys[0], m_keys[1],
m_keys[2], m_keys[3]);
UpdateAllEntries(m_regionNames[0], true);
}
public void StepEightFailoverRegex()
{
VerifyEntry(m_regionNames[0], m_keys[0], m_vals[0]);
VerifyEntry(m_regionNames[1], m_keys[2], m_vals[2]);
}
public void StepNineRegex()
{
VerifyEntry(m_regionNames[0], m_keys[0], m_nvals[0]);
VerifyEntry(m_regionNames[1], m_keys[1], m_vals[1]);
}
public void StepNineRegex2()
{
VerifyEntry(m_regionNames[0], m_keys[0], m_vals[0]);
VerifyEntry(m_regionNames[0], m_keys[1], m_nvals[1]);
VerifyEntry(m_regionNames[0], m_keys[2], m_nvals[2]);
VerifyEntry(m_regionNames[0], m_keys[3], m_vals[3]);
}
public void StepNineInterestResultPolicyInv()
{
IRegion<object, object> region = CacheHelper.GetVerifyRegion<object, object>(m_regionNames[0]);
region.GetSubscriptionService().UnregisterRegex(m_regex23);
List<Object> keys = new List<Object>();
keys.Add(m_keys[0]);
keys.Add(m_keys[1]);
keys.Add(m_keys[2]);
region.GetSubscriptionService().RegisterKeys(keys);
VerifyInvalidAll(m_regionNames[0], m_keys[0], m_keys[1], m_keys[2]);
}
public void PutUnicodeKeys(string regionName, bool updates)
{
IRegion<object, object> region = CacheHelper.GetVerifyRegion<object, object>(regionName);
string key;
object val;
for (int index = 0; index < m_numUnicodeStrings; ++index)
{
key = GetUnicodeString(index);
if (updates)
{
val = index + 100;
}
else
{
val = (float)index + 20.0F;
}
region[key] = val;
}
}
public void RegisterUnicodeKeys(string regionName)
{
IRegion<object, object> region = CacheHelper.GetVerifyRegion<object, object>(regionName);
string[] keys = new string[m_numUnicodeStrings];
for (int index = 0; index < m_numUnicodeStrings; ++index)
{
keys[m_numUnicodeStrings - index - 1] = GetUnicodeString(index);
}
region.GetSubscriptionService().RegisterKeys(keys);
}
public void VerifyUnicodeKeys(string regionName, bool updates)
{
IRegion<object, object> region = CacheHelper.GetVerifyRegion<object, object>(regionName);
string key;
object expectedVal;
for (int index = 0; index < m_numUnicodeStrings; ++index)
{
key = GetUnicodeString(index);
if (updates)
{
expectedVal = index + 100;
Assert.AreEqual(expectedVal, region.GetEntry(key).Value,
"Got unexpected value");
}
else
{
expectedVal = (float)index + 20.0F;
Assert.AreEqual(expectedVal, region[key],
"Got unexpected value");
}
}
}
public void CreateRegionsInterestNotify_Pool(string[] regionNames,
string locators, string poolName, bool notify, string nbs)
{
Properties<string, string> props = Properties<string, string>.Create<string, string>();
//props.Insert("notify-by-subscription-override", nbs);
CacheHelper.InitConfig(props);
CacheHelper.CreateTCRegion_Pool(regionNames[0], true, true,
new TallyListener<object, object>(), locators, poolName, notify);
CacheHelper.CreateTCRegion_Pool(regionNames[1], true, true,
new TallyListener<object, object>(), locators, poolName, notify);
CacheHelper.CreateTCRegion_Pool(regionNames[2], true, true,
new TallyListener<object, object>(), locators, poolName, notify);
}
/*
public void CreateRegionsInterestNotify(string[] regionNames,
string endpoints, bool notify, string nbs)
{
Properties props = Properties.Create();
//props.Insert("notify-by-subscription-override", nbs);
CacheHelper.InitConfig(props);
CacheHelper.CreateTCRegion(regionNames[0], true, false,
new TallyListener(), endpoints, notify);
CacheHelper.CreateTCRegion(regionNames[1], true, false,
new TallyListener(), endpoints, notify);
CacheHelper.CreateTCRegion(regionNames[2], true, false,
new TallyListener(), endpoints, notify);
}
* */
public void DoFeed()
{
foreach (string regionName in RegionNamesForInterestNotify)
{
IRegion<object, object> region = CacheHelper.GetRegion<object, object>(regionName);
foreach (string key in m_keysNonRegex)
{
region[key] = "00";
}
foreach (string key in m_keysForRegex)
{
region[key] = "00";
}
}
}
public void DoFeederOps()
{
foreach (string regionName in RegionNamesForInterestNotify)
{
IRegion<object, object> region = CacheHelper.GetRegion<object, object>(regionName);
foreach (string key in m_keysNonRegex)
{
region[key] = "11";
region[key] = "22";
region[key] = "33";
region.GetLocalView().Invalidate(key);
region.Remove(key);
}
foreach (string key in m_keysForRegex)
{
region[key] = "11";
region[key] = "22";
region[key] = "33";
region.GetLocalView().Invalidate(key);
region.Remove(key);
}
}
}
public void DoRegister()
{
DoRegisterInterests(RegionNamesForInterestNotify[0], true);
DoRegisterInterests(RegionNamesForInterestNotify[1], false);
// We intentionally do not register interest in Region3
//DoRegisterInterestsBlah(RegionNamesForInterestNotifyBlah[2]);
}
public void DoRegisterInterests(string regionName, bool receiveValues)
{
IRegion<object, object> region = CacheHelper.GetRegion<object, object>(regionName);
List<string> keys = new List<string>();
foreach (string key in m_keysNonRegex)
{
keys.Add(key);
}
region.GetSubscriptionService().RegisterKeys(keys.ToArray(), false, false, receiveValues);
region.GetSubscriptionService().RegisterRegex("key-regex.*", false, false, receiveValues);
}
public void DoUnregister()
{
DoUnregisterInterests(RegionNamesForInterestNotify[0]);
DoUnregisterInterests(RegionNamesForInterestNotify[1]);
}
public void DoUnregisterInterests(string regionName)
{
List<string> keys = new List<string>();
foreach (string key in m_keysNonRegex)
{
keys.Add(key);
}
IRegion<object, object> region = CacheHelper.GetRegion<object, object>(regionName);
region.GetSubscriptionService().UnregisterKeys(keys.ToArray());
region.GetSubscriptionService().UnregisterRegex("key-regex.*");
}
public void DoValidation(string clientName, string regionName,
int creates, int updates, int invalidates, int destroys)
{
IRegion<object, object> region = CacheHelper.GetRegion<object, object>(regionName);
TallyListener<object, object> listener = region.Attributes.CacheListener as TallyListener<object, object>;
Util.Log(clientName + ": " + regionName + ": creates expected=" + creates +
", actual=" + listener.Creates);
Util.Log(clientName + ": " + regionName + ": updates expected=" + updates +
", actual=" + listener.Updates);
Util.Log(clientName + ": " + regionName + ": invalidates expected=" + invalidates +
", actual=" + listener.Invalidates);
Util.Log(clientName + ": " + regionName + ": destroys expected=" + destroys +
", actual=" + listener.Destroys);
Assert.AreEqual(creates, listener.Creates, clientName + ": " + regionName);
Assert.AreEqual(updates, listener.Updates, clientName + ": " + regionName);
Assert.AreEqual(invalidates, listener.Invalidates, clientName + ": " + regionName);
Assert.AreEqual(destroys, listener.Destroys, clientName + ": " + regionName);
}
#endregion
[Test]
public void FailoverInterest2()
{
CacheHelper.SetupJavaServers(true,
"cacheserver_notify_subscription.xml",
"cacheserver_notify_subscription2.xml");
CacheHelper.StartJavaLocator(1, "GFELOC");
Util.Log("Locator started");
CacheHelper.StartJavaServerWithLocators(1, "GFECS1", 1);
Util.Log("Cacheserver 1 started.");
m_client1.Call(CreateTCRegions_Pool, RegionNames,
CacheHelper.Locators, "__TESTPOOL1_", true);
Util.Log("StepOne complete.");
m_client2.Call(CreateTCRegions_Pool, RegionNames,
CacheHelper.Locators, "__TESTPOOL1_", true);
Util.Log("StepTwo complete.");
m_client2.Call(RegisterAllKeys, RegionNames);
Util.Log("StepTwo complete.");
m_client1.Call(StepThree);
Util.Log("StepThree complete.");
m_client2.Call(StepFourIL);
Util.Log("StepFour complete.");
CacheHelper.StartJavaServerWithLocators(2, "GFECS2", 1);
Util.Log("Cacheserver 2 started.");
CacheHelper.StopJavaServer(1);
Util.Log("Cacheserver 1 stopped.");
m_client1.Call(StepFiveFailover);
Util.Log("StepFive complete.");
m_client2.Call(StepSixFailover);
Util.Log("StepSix complete.");
// Client2, unregister all keys
m_client2.Call(UnregisterAllKeys, RegionNames);
Util.Log("UnregisterAllKeys complete.");
m_client1.Call(StepSevenFailover);
Util.Log("StepSeven complete.");
m_client2.Call(StepEightIL);
Util.Log("StepEight complete.");
m_client1.Call(Close);
m_client2.Call(Close);
CacheHelper.StopJavaServer(2);
Util.Log("Cacheserver 2 stopped.");
CacheHelper.StopJavaLocator(1);
Util.Log("Locator stopped");
CacheHelper.ClearEndpoints();
CacheHelper.ClearLocators();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.ServiceModel;
using System.ServiceModel.Channels;
using System.ServiceModel.Description;
using System.ServiceModel.Security;
using System.ServiceModel.Security.Tokens;
namespace Microsoft.Tools.ServiceModel.Svcutil
{
internal class EndpointSelector : MetadataFixup
{
private static List<string> s_bindingValidationErrors = new List<string>();
public EndpointSelector(WsdlImporter importer, Collection<ServiceEndpoint> endpoints, Collection<Binding> bindings, Collection<ContractDescription> contracts)
: base(importer, endpoints, bindings, contracts)
{ }
public override void Fixup()
{
CollectionHelpers.MapList<ServiceEndpoint>(this.endpoints, EndpointSelector.SelectEndpoint, this.AddWarning);
bindings.Clear();
foreach (ServiceEndpoint endpoint in endpoints)
{
bindings.Add(endpoint.Binding);
}
}
private static bool SelectEndpoint(ServiceEndpoint endpoint)
{
return IsBindingSupported(endpoint.Binding);
}
private static bool IsBindingSupported(Binding binding)
{
s_bindingValidationErrors.Clear();
if (!(binding is BasicHttpBinding || binding is NetHttpBinding || binding is WSHttpBinding || binding is NetTcpBinding || binding is CustomBinding))
{
s_bindingValidationErrors.Add(string.Format(SR.BindingTypeNotSupportedFormat, binding.GetType().FullName,
typeof(BasicHttpBinding).FullName, typeof(NetHttpBinding).FullName, typeof(WSHttpBinding).FullName, typeof(NetTcpBinding).FullName, typeof(CustomBinding).FullName));
}
else
{
WSHttpBinding wsHttpBinding = binding as WSHttpBinding;
if (wsHttpBinding != null)
{
if (wsHttpBinding.TransactionFlow)
{
s_bindingValidationErrors.Add(SR.BindingTransactionFlowNotSupported);
}
if (wsHttpBinding.Security.Mode == SecurityMode.Message)
{
s_bindingValidationErrors.Add(string.Format(SRServiceModel.UnsupportedSecuritySetting, "Mode", wsHttpBinding.Security.Mode));
}
}
else
{
NetTcpBinding netTcpBinding = binding as NetTcpBinding;
if (netTcpBinding != null)
{
if (netTcpBinding.TransactionFlow)
{
s_bindingValidationErrors.Add(SR.BindingTransactionFlowNotSupported);
}
if (netTcpBinding.Security.Mode == SecurityMode.Message)
{
s_bindingValidationErrors.Add(string.Format(SRServiceModel.UnsupportedSecuritySetting, "Mode", netTcpBinding.Security.Mode));
}
}
else
{
NetHttpBinding netHttpBinding = binding as NetHttpBinding;
if (netHttpBinding != null)
{
if (netHttpBinding.Security.Mode == BasicHttpSecurityMode.Message)
{
s_bindingValidationErrors.Add(string.Format(SRServiceModel.UnsupportedSecuritySetting, "Mode", netHttpBinding.Security.Mode));
}
}
else
{
BasicHttpBinding basicHttpBinding = binding as BasicHttpBinding;
if (basicHttpBinding != null && basicHttpBinding.Security.Mode == BasicHttpSecurityMode.Message)
{
s_bindingValidationErrors.Add(string.Format(SRServiceModel.UnsupportedSecuritySetting, "Mode", basicHttpBinding.Security.Mode));
}
}
}
}
ValidateBindingElements(binding);
}
return s_bindingValidationErrors.Count == 0;
}
private static void ValidateBindingElements(Binding binding)
{
BindingElementCollection bindingElements = binding.CreateBindingElements();
foreach (BindingElement bindingElement in bindingElements)
{
if (bindingElement is TransportBindingElement)
{
if (!(bindingElement is HttpTransportBindingElement || bindingElement is HttpsTransportBindingElement || bindingElement is TcpTransportBindingElement))
{
s_bindingValidationErrors.Add(string.Format(SR.BindingTransportTypeNotSupportedFormat, bindingElement.GetType().FullName,
typeof(HttpTransportBindingElement).FullName, typeof(HttpsTransportBindingElement).FullName, typeof(TcpTransportBindingElement).FullName));
}
}
else if (bindingElement is MessageEncodingBindingElement)
{
if (!(bindingElement is BinaryMessageEncodingBindingElement || bindingElement is TextMessageEncodingBindingElement))
{
s_bindingValidationErrors.Add(string.Format(SR.BindingMessageEncodingElementNotSupportedFormat, bindingElement.GetType().FullName,
typeof(BinaryMessageEncodingBindingElement).FullName, typeof(TextMessageEncodingBindingElement).FullName));
}
else
{
var binMsgEncodingElement = bindingElement as BinaryMessageEncodingBindingElement;
if (binMsgEncodingElement != null)
{
if (binMsgEncodingElement.MessageVersion != MessageVersion.Soap12WSAddressing10)
{
s_bindingValidationErrors.Add(string.Format(SR.BindingBinaryMessageEncodingVersionNotSupportedFormat,
binMsgEncodingElement.MessageVersion, MessageVersion.Soap12WSAddressing10));
}
}
else
{
var txtMsgEncodingElement = bindingElement as TextMessageEncodingBindingElement;
if (txtMsgEncodingElement != null &&
txtMsgEncodingElement.MessageVersion != MessageVersion.None &&
txtMsgEncodingElement.MessageVersion != MessageVersion.Soap11 &&
txtMsgEncodingElement.MessageVersion != MessageVersion.Soap12 &&
txtMsgEncodingElement.MessageVersion != MessageVersion.Soap11WSAddressing10 &&
txtMsgEncodingElement.MessageVersion != MessageVersion.Soap12WSAddressing10)
{
s_bindingValidationErrors.Add(string.Format(SR.BindingTextMessageEncodingVersionNotSupportedFormat,
txtMsgEncodingElement.MessageVersion,
MessageVersion.None,
MessageVersion.Soap11,
MessageVersion.Soap12,
MessageVersion.Soap11WSAddressing10,
MessageVersion.Soap12WSAddressing10));
}
}
}
}
else if (bindingElement is SslStreamSecurityBindingElement)
{
// do nothing
}
else if (bindingElement is WindowsStreamSecurityBindingElement)
{
// do nothing
}
else if (bindingElement is ReliableSessionBindingElement)
{
// do nothing
}
else if (bindingElement is TransactionFlowBindingElement)
{
if (binding is WSHttpBinding && ((WSHttpBinding)binding).TransactionFlow)
{
s_bindingValidationErrors.Add(SR.BindingTransactionFlowNotSupported);
}
if (binding is NetTcpBinding && ((NetTcpBinding)binding).TransactionFlow)
{
s_bindingValidationErrors.Add(SR.BindingTransactionFlowNotSupported);
}
}
else if (bindingElement is TransportSecurityBindingElement)
{
ValidateTransportSecurityBindingElement(bindingElement as TransportSecurityBindingElement);
}
else
{
s_bindingValidationErrors.Add(string.Format(SR.BindingElementTypeNotSupportedFormat, bindingElement.GetType().FullName));
}
}
}
private static void ValidateTransportSecurityBindingElement(TransportSecurityBindingElement transportSecurityBindingElement)
{
if (transportSecurityBindingElement.EndpointSupportingTokenParameters.Signed.Count != 0 ||
transportSecurityBindingElement.EndpointSupportingTokenParameters.SignedEndorsing.Count != 0)
{
s_bindingValidationErrors.Add(SR.BindingTransportSecurityTokenSignedOrSignedEndorsingNotSupported);
}
else if (transportSecurityBindingElement.EndpointSupportingTokenParameters.SignedEncrypted.Count == 1)
{
ValidateUserNamePasswordSecurityBindingElement(transportSecurityBindingElement);
}
else if (transportSecurityBindingElement.EndpointSupportingTokenParameters.Endorsing.Count == 1)
{
SecureConversationSecurityTokenParameters endorsingTokenParams = transportSecurityBindingElement.EndpointSupportingTokenParameters.Endorsing[0] as SecureConversationSecurityTokenParameters;
if (endorsingTokenParams != null)
{
if (endorsingTokenParams.RequireDerivedKeys)
{
s_bindingValidationErrors.Add(SR.BindingTransportSecurityTokenParamsRequiringDerivedKeysNotSupported);
}
TransportSecurityBindingElement bootstrapElement = endorsingTokenParams.BootstrapSecurityBindingElement as TransportSecurityBindingElement;
if (bootstrapElement == null)
{
s_bindingValidationErrors.Add(string.Format(SR.BindingTransportSecurityElementTypeNotSupportedFormat,
endorsingTokenParams.BootstrapSecurityBindingElement.GetType().FullName, typeof(TransportSecurityBindingElement).FullName));
}
else
{
ValidateTransportSecurityBindingElement(bootstrapElement);
}
}
}
if (!transportSecurityBindingElement.IncludeTimestamp)
{
s_bindingValidationErrors.Add(SR.BindingTransportSecurityMustIncludeTimestamp);
}
if (transportSecurityBindingElement.DefaultAlgorithmSuite != SecurityAlgorithmSuite.Default)
{
s_bindingValidationErrors.Add(string.Format(SR.BindingTransportSecurityDefaultAlgorithmSuiteNotSupportedFormat,
transportSecurityBindingElement.DefaultAlgorithmSuite.GetType().FullName, SecurityAlgorithmSuite.Default.GetType().FullName));
}
}
private static void ValidateUserNamePasswordSecurityBindingElement(TransportSecurityBindingElement transportSecurityBindingElement)
{
bool singleSignedEncryptedParam = transportSecurityBindingElement.EndpointSupportingTokenParameters.SignedEncrypted.Count == 1;
System.Diagnostics.Debug.Assert(singleSignedEncryptedParam, "Unexpected number of SignedEncrypted token parameters in transport security binding!");
if (!singleSignedEncryptedParam)
{
return;
}
if (transportSecurityBindingElement.MessageSecurityVersion != MessageSecurityVersion.WSSecurity10WSTrustFebruary2005WSSecureConversationFebruary2005WSSecurityPolicy11BasicSecurityProfile10 &&
transportSecurityBindingElement.MessageSecurityVersion != MessageSecurityVersion.WSSecurity11WSTrustFebruary2005WSSecureConversationFebruary2005WSSecurityPolicy11BasicSecurityProfile10 &&
transportSecurityBindingElement.MessageSecurityVersion != MessageSecurityVersion.WSSecurity11WSTrustFebruary2005WSSecureConversationFebruary2005WSSecurityPolicy11 &&
transportSecurityBindingElement.MessageSecurityVersion != MessageSecurityVersion.WSSecurity11WSTrust13WSSecureConversation13WSSecurityPolicy12BasicSecurityProfile10)
{
string values = string.Format(CultureInfo.InvariantCulture, "'{0}', '{1}', '{2}'",
MessageSecurityVersion.WSSecurity10WSTrustFebruary2005WSSecureConversationFebruary2005WSSecurityPolicy11BasicSecurityProfile10,
MessageSecurityVersion.WSSecurity11WSTrustFebruary2005WSSecureConversationFebruary2005WSSecurityPolicy11BasicSecurityProfile10,
MessageSecurityVersion.WSSecurity11WSTrustFebruary2005WSSecureConversationFebruary2005WSSecurityPolicy11);
s_bindingValidationErrors.Add(string.Format(SR.BindingTransportMessageSecurityVersionNotSupportedFormat, transportSecurityBindingElement.MessageSecurityVersion, values));
}
if (transportSecurityBindingElement.DefaultAlgorithmSuite != SecurityAlgorithmSuite.Default)
{
s_bindingValidationErrors.Add(string.Format(SR.BindingTransportSecurityDefaultAlgorithmSuiteNotSupportedFormat,
transportSecurityBindingElement.DefaultAlgorithmSuite.GetType().FullName, SecurityAlgorithmSuite.Default.GetType().FullName));
}
var userNameParams = transportSecurityBindingElement.EndpointSupportingTokenParameters.SignedEncrypted[0] as UserNameSecurityTokenParameters;
if (userNameParams != null)
{
if (userNameParams.InclusionMode != SecurityTokenInclusionMode.AlwaysToRecipient)
{
s_bindingValidationErrors.Add(string.Format(SR.BindingTransportSecurityTokenParamsInclusionModeValueNotSupportedFormat,
userNameParams.InclusionMode, SecurityTokenInclusionMode.AlwaysToRecipient));
}
if (userNameParams.ReferenceStyle != SecurityTokenReferenceStyle.Internal)
{
s_bindingValidationErrors.Add(string.Format(SR.BindingTransportSecurityTokenParamsReferenceStyleNotSupportedFormat,
userNameParams.ReferenceStyle, SecurityTokenReferenceStyle.Internal));
}
if (userNameParams.RequireDerivedKeys != false)
{
s_bindingValidationErrors.Add(SR.BindingTransportSecurityTokenParamsRequiringDerivedKeysNotSupported);
}
}
else
{
s_bindingValidationErrors.Add(string.Format(SR.BindingTransportSecurityTokenParamsTypeNotSupportedFormat,
transportSecurityBindingElement.EndpointSupportingTokenParameters.SignedEncrypted[0].GetType().FullName, typeof(UserNameSecurityTokenParameters).FullName));
}
}
private void AddWarning(ServiceEndpoint endpoint, int i)
{
MetadataConversionError warning;
foreach (var validationErrorMsg in s_bindingValidationErrors)
{
warning = new MetadataConversionError(validationErrorMsg, isWarning: true);
if (!importer.Errors.Contains(warning))
{
importer.Errors.Add(warning);
}
}
string incompatEndpointMsg = string.Format(CultureInfo.InvariantCulture, SR.WrnIncompatibleEndpointFormat, endpoint.Name, endpoint.Address);
warning = new MetadataConversionError(incompatEndpointMsg, isWarning: true);
if (!importer.Errors.Contains(warning))
{
importer.Errors.Add(warning);
}
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Diagnostics.CodeAnalysis;
using System.Threading.Tasks;
using Microsoft.AspNet.Hosting;
using Microsoft.AspNet.Http;
using Microsoft.AspNet.SignalR.Infrastructure;
using Microsoft.AspNet.SignalR.Json;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
namespace Microsoft.AspNet.SignalR.Transports
{
[SuppressMessage("Microsoft.Design", "CA1001:TypesThatOwnDisposableFieldsShouldBeDisposable", Justification = "The disposer is an optimization")]
public abstract class ForeverTransport : TransportDisconnectBase, ITransport
{
private static readonly ProtocolResolver _protocolResolver = new ProtocolResolver();
private readonly IPerformanceCounterManager _counters;
private readonly JsonSerializer _jsonSerializer;
private IDisposable _busRegistration;
internal RequestLifetime _transportLifetime;
protected ForeverTransport(HttpContext context,
JsonSerializer jsonSerializer,
ITransportHeartbeat heartbeat,
IPerformanceCounterManager performanceCounterManager,
IApplicationLifetime applicationLifetime,
ILoggerFactory loggerFactory,
IMemoryPool pool)
: base(context, heartbeat, performanceCounterManager, applicationLifetime, loggerFactory, pool)
{
_jsonSerializer = jsonSerializer;
_counters = performanceCounterManager;
}
protected virtual int MaxMessages
{
get
{
return 10;
}
}
protected JsonSerializer JsonSerializer
{
get { return _jsonSerializer; }
}
protected virtual void OnSending(string payload)
{
Heartbeat.MarkConnection(this);
}
protected virtual void OnSendingResponse(PersistentResponse response)
{
Heartbeat.MarkConnection(this);
}
public Func<string, Task> Received { get; set; }
public Func<Task> Connected { get; set; }
public Func<Task> Reconnected { get; set; }
// Unit testing hooks
internal Action AfterReceive;
internal Action BeforeCancellationTokenCallbackRegistered;
internal Action BeforeReceive;
internal Action<Exception> AfterRequestEnd;
protected override async Task InitializePersistentState()
{
await base.InitializePersistentState().PreserveCulture();
// The _transportLifetime must be initialized after calling base.InitializePersistentState since
// _transportLifetime depends on _requestLifetime.
_transportLifetime = new RequestLifetime(this, _requestLifeTime);
}
protected async Task ProcessRequestCore(ITransportConnection connection)
{
Connection = connection;
if (IsSendRequest)
{
await ProcessSendRequest().PreserveCulture();
}
else if (IsAbortRequest)
{
await Connection.Abort(ConnectionId).PreserveCulture();
}
else
{
await InitializePersistentState().PreserveCulture();
await ProcessReceiveRequest(connection).PreserveCulture();
}
}
public virtual Task ProcessRequest(ITransportConnection connection)
{
return ProcessRequestCore(connection);
}
public abstract Task Send(PersistentResponse response);
public virtual Task Send(object value)
{
var context = new ForeverTransportContext(this, value);
return EnqueueOperation(state => PerformSend(state), context);
}
protected internal virtual Task InitializeResponse(ITransportConnection connection)
{
return TaskAsyncHelper.Empty;
}
protected void OnError(Exception ex)
{
IncrementErrors();
// Complete the http request
_transportLifetime.Complete(ex);
}
protected virtual async Task ProcessSendRequest()
{
IReadableStringCollection form = await Context.Request.ReadFormAsync().PreserveCulture();
string data = form["data"];
if (Received != null)
{
await Received(data).PreserveCulture();
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Exceptions are flowed to the caller.")]
private Task ProcessReceiveRequest(ITransportConnection connection)
{
Func<Task> initialize = null;
// If this transport isn't replacing an existing transport, oldConnection will be null.
ITrackingConnection oldConnection = Heartbeat.AddOrUpdateConnection(this);
bool newConnection = oldConnection == null;
if (IsConnectRequest)
{
if (_protocolResolver.SupportsDelayedStart(Context.Request))
{
// TODO: Ensure delegate continues to use the C# Compiler static delegate caching optimization.
initialize = () => connection.Initialize(ConnectionId);
}
else
{
Func<Task> connected;
if (newConnection)
{
connected = Connected ?? _emptyTaskFunc;
_counters.ConnectionsConnected.Increment();
}
else
{
// Wait until the previous call to Connected completes.
// We don't want to call Connected twice
connected = () => oldConnection.ConnectTask;
}
initialize = () =>
{
return connected().Then((conn, id) => conn.Initialize(id), connection, ConnectionId);
};
}
}
else if (!SuppressReconnect)
{
initialize = Reconnected;
_counters.ConnectionsReconnected.Increment();
}
initialize = initialize ?? _emptyTaskFunc;
Func<Task> fullInit = () => initialize().ContinueWith(_connectTcs);
return ProcessMessages(connection, fullInit);
}
[SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope", Justification = "The object is disposed otherwise")]
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Exceptions are flowed to the caller.")]
private Task ProcessMessages(ITransportConnection connection, Func<Task> initialize)
{
var disposer = new Disposer();
if (BeforeCancellationTokenCallbackRegistered != null)
{
BeforeCancellationTokenCallbackRegistered();
}
var cancelContext = new ForeverTransportContext(this, disposer);
// Ensure delegate continues to use the C# Compiler static delegate caching optimization.
_busRegistration = ConnectionEndToken.SafeRegister(state => Cancel(state), cancelContext);
if (BeforeReceive != null)
{
BeforeReceive();
}
try
{
// Ensure we enqueue the response initialization before any messages are received
EnqueueOperation(state => InitializeResponse((ITransportConnection)state), connection)
.Catch((ex, state) => ((ForeverTransport)state).OnError(ex), this, Logger);
// Ensure delegate continues to use the C# Compiler static delegate caching optimization.
IDisposable subscription = connection.Receive(LastMessageId,
(response, state) => ((ForeverTransport)state).OnMessageReceived(response),
MaxMessages,
this);
if (AfterReceive != null)
{
AfterReceive();
}
// Ensure delegate continues to use the C# Compiler static delegate caching optimization.
initialize().Catch((ex, state) => ((ForeverTransport)state).OnError(ex), this, Logger)
.Finally(state => ((SubscriptionDisposerContext)state).Set(),
new SubscriptionDisposerContext(disposer, subscription));
}
catch (Exception ex)
{
_transportLifetime.Complete(ex);
}
return _requestLifeTime.Task;
}
private static void Cancel(object state)
{
var context = (ForeverTransportContext)state;
context.Transport.Logger.LogVerbose("Cancel(" + context.Transport.ConnectionId + ")");
((IDisposable)context.State).Dispose();
}
protected virtual Task<bool> OnMessageReceived(PersistentResponse response)
{
if (response == null)
{
throw new ArgumentNullException("response");
}
response.Reconnect = HostShutdownToken.IsCancellationRequested;
if (IsTimedOut || response.Aborted)
{
_busRegistration.Dispose();
if (response.Aborted)
{
// If this was a clean disconnect raise the event.
return Abort().Then(() => TaskAsyncHelper.False);
}
}
if (response.Terminal)
{
// End the request on the terminal response
_transportLifetime.Complete();
return TaskAsyncHelper.False;
}
// Ensure delegate continues to use the C# Compiler static delegate caching optimization.
return Send(response).Then(() => TaskAsyncHelper.True);
}
private static Task PerformSend(object state)
{
var context = (ForeverTransportContext)state;
if (!context.Transport.IsAlive)
{
return TaskAsyncHelper.Empty;
}
context.Transport.Context.Response.ContentType = JsonUtility.JsonMimeType;
using (var writer = new BinaryMemoryPoolTextWriter(context.Transport.Pool))
{
context.Transport.JsonSerializer.Serialize(context.State, writer);
writer.Flush();
context.Transport.Context.Response.Write(writer.Buffer);
}
return TaskAsyncHelper.Empty;
}
private class ForeverTransportContext
{
public object State;
public ForeverTransport Transport;
public ForeverTransportContext(ForeverTransport foreverTransport, object state)
{
State = state;
Transport = foreverTransport;
}
}
private class SubscriptionDisposerContext
{
private readonly Disposer _disposer;
private readonly IDisposable _supscription;
public SubscriptionDisposerContext(Disposer disposer, IDisposable subscription)
{
_disposer = disposer;
_supscription = subscription;
}
public void Set()
{
_disposer.Set(_supscription);
}
}
internal class RequestLifetime
{
private readonly HttpRequestLifeTime _lifetime;
private readonly ForeverTransport _transport;
public RequestLifetime(ForeverTransport transport, HttpRequestLifeTime lifetime)
{
_lifetime = lifetime;
_transport = transport;
}
public void Complete()
{
Complete(error: null);
}
public void Complete(Exception error)
{
_lifetime.Complete(error);
_transport.Dispose();
if (_transport.AfterRequestEnd != null)
{
_transport.AfterRequestEnd(error);
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Collections;
using Xunit;
using SortedList_SortedListUtils;
public class Driver<K, V, R, S> where K : IPublicValue<R> where V : IPublicValue<S>
{
private Test m_test;
public Driver(Test test)
{
m_test = test;
}
public void BasicRemoveAt(K[] keys, V[] values)
{
SortedList<K, V> tbl = new SortedList<K, V>();
try
{
tbl.RemoveAt(0);
m_test.Eval(false, "BasicRemoveAt0: Expected trying to reference an Index with no value to generate an ArgumentOutOfRangeException, but it did not.");
}
catch (ArgumentOutOfRangeException)
{
}
for (int i = 0; i < keys.Length; i++)
{
tbl.Add(keys[i], values[i]);
try
{
tbl.RemoveAt(i + 1);
m_test.Eval(false, "BasicRemoveAt1: Expected trying to reference an Index with no value to generate an ArgumentOutOfRangeException");
}
catch (ArgumentOutOfRangeException)
{
}
}
m_test.Eval(tbl.Count == keys.Length);
for (int i = keys.Length - 1; i >= 0; i--)
{
tbl.RemoveAt(i);
m_test.Eval(!tbl.ContainsKey(keys[i]), "BasicRemoveAt3: Expected RemoveAt to remove items but found one item still in existance: " + keys[i].publicVal);
}
m_test.Eval(tbl.Count == 0, "BasicRemoveAt2: Expected RemoveAt to clear the sorted Dictionary, but it did not and count is still " + tbl.Count);
}
public void RemoveAtNegative(K[] keys, V[] values, V[] missingvalues)
{
SortedList<K, V> tbl = new SortedList<K, V>();
for (int i = 0; i < keys.Length; i++)
{
tbl.Add(keys[i], values[i]);
}
m_test.Eval(tbl.Count == keys.Length);
try
{
tbl.RemoveAt(keys.Length);
m_test.Eval(false, "RemoveAtNegative0: Expected trying to reference an Index with no value to generate an ArgumentOutOfRangeException, but it did not.");
}
catch (ArgumentOutOfRangeException)
{
}
try
{
tbl.RemoveAt(-1);
m_test.Eval(false, "RemoveAtNegative1: Expected trying to reference an Index with no value to generate an ArgumentOutOfRangeException, but it did not.");
}
catch (ArgumentOutOfRangeException)
{
}
for (int i = 0; i < missingvalues.Length; i++)
{
m_test.Eval(tbl.IndexOfValue(missingvalues[i]) == -1, "RemoveAtNegative2: Expected IndexofValue to return -1, but it returned" + tbl.IndexOfValue(missingvalues[i]) + " for index of " + missingvalues[i].publicVal);
}
}
public void AddRemoveKeyRemoveAt(K[] keys, V[] values, int index)
{
SortedList<K, V> tbl = new SortedList<K, V>();
for (int i = 0; i < keys.Length; i++)
{
tbl.Add(keys[i], values[i]);
}
tbl.Remove(keys[index]);
try
{
tbl.RemoveAt(keys.Length - 1);
m_test.Eval(false, "AddRemoveKeyRemoveAt: Expected trying to reference an Index that has been removed to throw an exception, but it did not.");
}
catch (ArgumentOutOfRangeException)
{
}
}
}
public class RemoveAt
{
[Fact]
public static void RemoveAtMain()
{
Test test = new Test();
Driver<RefX1<int>, ValX1<string>, int, string> IntDriver = new Driver<RefX1<int>, ValX1<string>, int, string>(test);
RefX1<int>[] intArr1 = new RefX1<int>[100];
for (int i = 0; i < 100; i++)
{
intArr1[i] = new RefX1<int>(i + 100);
}
int[] intArr2 = new int[15];
for (int i = 0; i < 10; i++)
{
intArr2[i] = i + 200;
}
for (int i = 10; i < 15; i++)
{
intArr2[i] = i + 195;
}
Driver<ValX1<string>, RefX1<int>, string, int> StringDriver = new Driver<ValX1<string>, RefX1<int>, string, int>(test);
ValX1<string>[] stringArr1 = new ValX1<string>[100];
for (int i = 0; i < 100; i++)
{
stringArr1[i] = new ValX1<string>("SomeTestString" + ((i + 100).ToString()));
}
ValX1<string>[] stringArr2 = new ValX1<string>[15];
for (int i = 0; i < 10; i++)
{
stringArr2[i] = new ValX1<string>("SomeTestString" + (i + 200).ToString());
}
//Ref<val>,Val<Ref>
IntDriver.BasicRemoveAt(intArr1, stringArr1);
IntDriver.RemoveAtNegative(intArr1, stringArr1, stringArr2);
IntDriver.RemoveAtNegative(new RefX1<int>[0], new ValX1<string>[0], stringArr2);
IntDriver.AddRemoveKeyRemoveAt(intArr1, stringArr1, 0);
IntDriver.AddRemoveKeyRemoveAt(intArr1, stringArr1, 50);
IntDriver.AddRemoveKeyRemoveAt(intArr1, stringArr1, 99);
//Val<Ref>,Ref<Val>
StringDriver.BasicRemoveAt(stringArr1, intArr1);
StringDriver.AddRemoveKeyRemoveAt(stringArr1, intArr1, 0);
StringDriver.AddRemoveKeyRemoveAt(stringArr1, intArr1, 50);
StringDriver.AddRemoveKeyRemoveAt(stringArr1, intArr1, 99);
intArr1 = new RefX1<int>[105];
for (int i = 0; i < 105; i++)
{
intArr1[i] = new RefX1<int>(i);
}
RefX1<int>[] intArr3 = new RefX1<int>[15];
for (int i = 0; i < 10; i++)
{
intArr3[i] = new RefX1<int>(i + 100);
}
for (int i = 10; i < 15; i++)
{
intArr3[i] = new RefX1<int>(101);
}
stringArr1 = new ValX1<string>[105];
for (int i = 0; i < 100; i++)
{
stringArr1[i] = new ValX1<string>("SomeTestString" + i.ToString());
}
for (int i = 100; i < 105; i++)
{
stringArr1[i] = new ValX1<string>("SomeTestString11");
}
stringArr2 = new ValX1<string>[15];
for (int i = 0; i < 15; i++)
{
stringArr2[i] = new ValX1<string>("SomeTestString" + (i + 100).ToString());
}
IntDriver.BasicRemoveAt(intArr1, stringArr1);
StringDriver.BasicRemoveAt(stringArr2, intArr3);
stringArr1 = new ValX1<string>[100];
for (int i = 0; i < 100; i++)
{
stringArr1[i] = new ValX1<string>("SomeTestString" + ((i + 100).ToString()));
}
StringDriver.RemoveAtNegative(stringArr1, intArr1, intArr3);
Assert.True(test.result);
}
}
| |
#region License
//
// MethodContact.cs April 2007
//
// Copyright (C) 2007, Niall Gallagher <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
//
#endregion
#region Using directives
using System;
#endregion
namespace SimpleFramework.Xml.Core {
/// <summary>
/// The <c>MethodContact</c> object is acts as a contact that
/// can set and get data to and from an object using methods. This
/// requires a get method and a set method that share the same class
/// type for the return and parameter respectively.
/// </summary>
/// <seealso>
/// SimpleFramework.Xml.Core.MethodScanner
/// </seealso>
class MethodContact : Contact {
/// <summary>
/// This is the label that marks both the set and get methods.
/// </summary>
private Annotation label;
/// <summary>
/// This is the set method which is used to set the value.
/// </summary>
private MethodPart set;
/// <summary>
/// This is the dependent types as taken from the get method.
/// </summary>
private Class[] items;
/// <summary>
/// This is the dependent type as taken from the get method.
/// </summary>
private Class item;
/// <summary>
/// This is the type associated with this point of contact.
/// </summary>
private Class type;
/// <summary>
/// This is the get method which is used to get the value.
/// </summary>
private Method get;
/// <summary>
/// This represents the name of the method for this contact.
/// </summary>
private String name;
/// <summary>
/// Constructor for the <c>MethodContact</c> object. This is
/// used to compose a point of contact that makes use of a get and
/// set method on a class. The specified methods will be invoked
/// during the serialization process to get and set values.
/// </summary>
/// <param name="get">
/// this forms the get method for the object
/// </param>
public MethodContact(MethodPart get) {
this(get, null);
}
/// <summary>
/// Constructor for the <c>MethodContact</c> object. This is
/// used to compose a point of contact that makes use of a get and
/// set method on a class. The specified methods will be invoked
/// during the serialization process to get and set values.
/// </summary>
/// <param name="get">
/// this forms the get method for the object
/// </param>
/// <param name="set">
/// this forms the get method for the object
/// </param>
public MethodContact(MethodPart get, MethodPart set) {
this.label = get.Annotation;
this.items = get.Dependents;
this.item = get.Dependent;
this.get = get.Method;
this.type = get.Type;
this.name = get.Name;
this.set = set;
}
/// <summary>
/// This is used to determine if the annotated contact is for a
/// read only variable. A read only variable is a field that
/// can be set from within the constructor such as a blank readonly
/// variable. It can also be a method with no set counterpart.
/// </summary>
/// <returns>
/// this returns true if the contact is a constant one
/// </returns>
public bool IsReadOnly() {
return set == null;
}
/// <summary>
/// This is the annotation associated with the point of contact.
/// This will be an XML annotation that describes how the contact
/// should be serialized and deserialized from the object.
/// </summary>
/// <returns>
/// this provides the annotation associated with this
/// </returns>
public Annotation Annotation {
get {
return label;
}
}
//public Annotation GetAnnotation() {
// return label;
//}
/// This is the annotation associated with the point of contact.
/// This will be an XML annotation that describes how the contact
/// should be serialized and deserialized from the object.
/// </summary>
/// <param name="type">
/// this is the type of the annotation to acquire
/// </param>
/// <returns>
/// this provides the annotation associated with this
/// </returns>
public <T : Annotation> T getAnnotation(Class<T> type) {
T result = get.getAnnotation(type);
if(type == label.annotationType()) {
return (T) label;
}
if(result == null && set != null) {
return set.getAnnotation(type);
}
return result;
}
/// <summary>
/// This will provide the contact type. The contact type is the
/// class that is to be set and get on the object. This represents
/// the return type for the get and the parameter for the set.
/// </summary>
/// <returns>
/// this returns the type that this contact represents
/// </returns>
public Class Type {
get {
return type;
}
}
//public Class GetType() {
// return type;
//}
/// This provides the dependent class for the contact. This will
/// actually represent a generic type for the actual type. For
/// contacts that use a <c>Collection</c> type this will
/// be the generic type parameter for that collection.
/// </summary>
/// <returns>
/// this returns the dependent type for the contact
/// </returns>
public Class Dependent {
get {
return item;
}
}
//public Class GetDependent() {
// return item;
//}
/// This provides the dependent classes for the contact. This will
/// typically represent a generic types for the actual type. For
/// contacts that use a <c>Map</c> type this will be the
/// generic type parameter for that map type declaration.
/// </summary>
/// <returns>
/// this returns the dependent type for the contact
/// </returns>
public Class[] Dependents {
get {
return items;
}
}
//public Class[] GetDependents() {
// return items;
//}
/// This is used to acquire the name of the method. This returns
/// the name of the method without the get, set or is prefix that
/// represents the Java Bean method type. Also this decapitalizes
/// the resulting name. The result is used to represent the XML
/// attribute of element within the class schema represented.
/// </summary>
/// <returns>
/// this returns the name of the method represented
/// </returns>
public String Name {
get {
return name;
}
}
//public String GetName() {
// return name;
//}
/// This is used to set the specified value on the provided object.
/// The value provided must be an instance of the contact class so
/// that it can be set without a runtime class compatibility error.
/// </summary>
/// <param name="source">
/// this is the object to set the value on
/// </param>
/// <param name="value">
/// this is the value that is to be set on the object
/// </param>
public void Set(Object source, Object value) {
Class type = Type;
if(set == null) {
throw new MethodException("Method '%s' of '%s' is read only", name, type);
}
set.Method.invoke(source, value);
}
/// <summary>
/// This is used to get the specified value on the provided object.
/// The value returned from this method will be an instance of the
/// contact class type. If the returned object is of a different
/// type then the serialization process will fail.
/// </summary>
/// <param name="source">
/// this is the object to acquire the value from
/// </param>
/// <returns>
/// this is the value that is acquired from the object
/// </returns>
public Object Get(Object source) {
return get.invoke(source);
}
/// <summary>
/// This is used to describe the contact as it exists within the
/// owning class. It is used to provide error messages that can
/// be used to debug issues that occur when processing a contact.
/// The string provided contains both the set and get methods.
/// </summary>
/// <returns>
/// this returns a string representation of the contact
/// </returns>
public String ToString() {
return String.format("method '%s'", name);
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Text;
using System.Reflection;
using System.Globalization;
using System.Diagnostics;
namespace Microsoft.Build.BuildEngine.Shared
{
/// <summary>
/// A replacement for AssemblyName that optimizes calls to FullName which is expensive.
/// The assembly name is represented internally by an AssemblyName and a string, conversion
/// between the two is done lazily on demand.
/// </summary>
[Serializable]
sealed internal class AssemblyNameExtension
{
private AssemblyName asAssemblyName = null;
private string asString = null;
static private AssemblyNameExtension unnamedAssembly = new AssemblyNameExtension();
/// <summary>
/// Construct an unnamed assembly.
/// Private because we want only one of these.
/// </summary>
private AssemblyNameExtension()
{
}
/// <summary>
/// Construct with AssemblyName.
/// </summary>
/// <param name="assemblyName"></param>
internal AssemblyNameExtension(AssemblyName assemblyName)
{
asAssemblyName = assemblyName;
}
/// <summary>
/// Construct with string.
/// </summary>
/// <param name="assemblyName"></param>
internal AssemblyNameExtension(string assemblyName)
{
asString = assemblyName;
}
/// <summary>
/// Construct from a string, but immediately construct a real AssemblyName.
/// This will cause an exception to be thrown up front if the assembly name
/// isn't well formed.
/// </summary>
/// <param name="assemblyName">
/// The string version of the assembly name.
/// </param>
/// <param name="validate">
/// Used when the assembly name comes from a user-controlled source like a project file or config file.
/// Does extra checking on the assembly name and will throw exceptions if something is invalid.
/// </param>
internal AssemblyNameExtension(string assemblyName, bool validate)
{
asString = assemblyName;
if (validate)
{
// This will throw...
CreateAssemblyName();
}
}
/// <summary>
/// To be used as a delegate. Gets the AssemblyName of the given file.
/// </summary>
/// <param name="path"></param>
/// <returns></returns>
internal static AssemblyNameExtension GetAssemblyNameEx(string path)
{
AssemblyName assemblyName = AssemblyName.GetAssemblyName(path);
if (assemblyName == null)
{
return null;
}
return new AssemblyNameExtension(assemblyName);
}
/// <summary>
/// Assume there is a string version, create the AssemblyName version.
/// </summary>
private void CreateAssemblyName()
{
if (asAssemblyName == null)
{
asAssemblyName = GetAssemblyNameFromDisplayName(asString);
}
}
/// <summary>
/// Assume there is a string version, create the AssemblyName version.
/// </summary>
private void CreateFullName()
{
if (asString == null)
{
asString = asAssemblyName.FullName;
}
}
/// <summary>
/// The base name of the assembly.
/// </summary>
/// <value></value>
internal string Name
{
get
{
// Is there a string?
CreateAssemblyName();
return asAssemblyName.Name;
}
}
/// <summary>
/// The assembly's version number.
/// </summary>
/// <value></value>
internal Version Version
{
get
{
// Is there a string?
CreateAssemblyName();
return asAssemblyName.Version;
}
}
/// <summary>
/// Replace the current version with a new version.
/// </summary>
/// <param name="version"></param>
internal void ReplaceVersion(Version version)
{
CreateAssemblyName();
if (asAssemblyName.Version != version)
{
asAssemblyName.Version = version;
// String would now be invalid.
asString = null;
}
}
/// <summary>
/// The assembly's version number.
/// </summary>
/// <value></value>
internal CultureInfo CultureInfo
{
get
{
// Is there a string?
CreateAssemblyName();
return asAssemblyName.CultureInfo;
}
}
/// <summary>
/// As an AssemblyName
/// </summary>
/// <value></value>
internal AssemblyName AssemblyName
{
get
{
// Is there a string?
CreateAssemblyName();
return asAssemblyName;
}
}
/// <summary>
/// The assembly's full name.
/// </summary>
/// <value></value>
internal string FullName
{
get
{
// Is there a string?
CreateFullName();
return asString;
}
}
/// <summary>
/// Get the assembly's public key token.
/// </summary>
/// <returns></returns>
internal byte[] GetPublicKeyToken()
{
// Is there a string?
CreateAssemblyName();
return asAssemblyName.GetPublicKeyToken();
}
/// <summary>
/// A special "unnamed" instance of AssemblyNameExtension.
/// </summary>
/// <value></value>
internal static AssemblyNameExtension UnnamedAssembly
{
get
{
return unnamedAssembly;
}
}
/// <summary>
/// Compare one assembly name to another.
/// </summary>
/// <param name="that"></param>
/// <returns></returns>
internal int CompareTo(AssemblyNameExtension that)
{
// Are they identical?
if (this.Equals(that))
{
return 0;
}
// Are the base names not identical?
int result = CompareBaseNameTo(that);
if (result != 0)
{
return result;
}
// We need some collating order for these, alphabetical by FullName seems as good as any.
return String.Compare(this.FullName, that.FullName, StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Get a hash code for this assembly name.
/// </summary>
/// <returns></returns>
new internal int GetHashCode()
{
// Ok, so this isn't a great hashing algorithm. However, basenames with different
// versions or PKTs are relatively uncommon and so collisions should be low.
// Hashing on FullName is wrong because the order of tuple fields is undefined.
int hash = StringComparer.OrdinalIgnoreCase.GetHashCode(this.Name);
return hash;
}
/// <summary>
/// Compare two base names as quickly as possible.
/// </summary>
/// <param name="that"></param>
/// <returns></returns>
internal int CompareBaseNameTo(AssemblyNameExtension that)
{
int result = CompareBaseNameToImpl(that);
#if DEBUG
// Now, compare to the real value to make sure the result was accurate.
AssemblyName a1 = this.asAssemblyName;
AssemblyName a2 = that.asAssemblyName;
if (a1 == null)
{
a1 = new AssemblyName(this.asString);
}
if (a2 == null)
{
a2 = new AssemblyName(that.asString);
}
int baselineResult = String.Compare(a1.Name, a2.Name, StringComparison.OrdinalIgnoreCase);
Debug.Assert(result == baselineResult, "Optimized version of CompareBaseNameTo didn't return the same result as the baseline.");
#endif
return result;
}
/// <summary>
/// An implementation of compare that compares two base
/// names as quickly as possible.
/// </summary>
/// <param name="that"></param>
/// <returns></returns>
private int CompareBaseNameToImpl(AssemblyNameExtension that)
{
// Pointer compare, if identical then base names are
// equal.
if (this == that)
{
return 0;
}
// Do both have assembly names?
if (this.asAssemblyName != null && that.asAssemblyName != null)
{
// Pointer compare.
if (this.asAssemblyName == that.asAssemblyName)
{
return 0;
}
// Base name compare.
return String.Compare(this.asAssemblyName.Name, that.asAssemblyName.Name, StringComparison.OrdinalIgnoreCase);
}
// Do both have strings?
if (this.asString != null && that.asString != null)
{
// If we have two random-case strings, then we need to compare case sensitively.
return CompareBaseNamesStringWise(this.asString, that.asString);
}
// Fall back to comparing by name. This is the slow path.
return String.Compare(this.Name, that.Name, StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Compare two basenames.
/// </summary>
/// <param name="asString1"></param>
/// <param name="asString2"></param>
/// <returns></returns>
private static int CompareBaseNamesStringWise(string asString1, string asString2)
{
// Identical strings just match.
if (asString1 == asString2)
{
return 0;
}
// Get the lengths of base names to compare.
int baseLenThis = asString1.IndexOf(',');
int baseLenThat = asString2.IndexOf(',');
if (baseLenThis == -1)
{
baseLenThis = asString1.Length;
}
if (baseLenThat == -1)
{
baseLenThat = asString2.Length;
}
// If the lengths are the same then we can compare without copying.
if (baseLenThis == baseLenThat)
{
return String.Compare(asString1, 0, asString2, 0, baseLenThis, StringComparison.OrdinalIgnoreCase);
}
// Lengths are different, so string copy is required.
string nameThis = asString1.Substring(0, baseLenThis);
string nameThat = asString2.Substring(0, baseLenThat);
return String.Compare(nameThis, nameThat, StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Compare two assembly names for equality.
/// </summary>
/// <param name="that"></param>
/// <returns></returns>
internal bool Equals(AssemblyNameExtension that)
{
// Pointer compare.
if (this == that)
{
return true;
}
// Do both have assembly names?
if (this.asAssemblyName != null && that.asAssemblyName != null)
{
// Pointer compare.
if (this.asAssemblyName == that.asAssemblyName)
{
return true;
}
}
// Do both have strings that equal each-other?
if (this.asString != null && that.asString != null)
{
if (this.asString == that.asString)
{
return true;
}
// If they weren't identical then they might still differ only by
// case. So we can't assume that they don't match. So fall through...
}
// Do the names match?
if (!String.Equals(Name, that.Name, StringComparison.OrdinalIgnoreCase))
{
return false;
}
// Do the versions match?
if (Version != that.Version)
{
return false;
}
// Do the Cultures match?
CultureInfo aCulture = CultureInfo;
CultureInfo bCulture = that.CultureInfo;
if (aCulture == null)
{
aCulture = CultureInfo.InvariantCulture;
}
if (bCulture == null)
{
bCulture = CultureInfo.InvariantCulture;
}
if (aCulture.LCID != bCulture.LCID)
{
return false;
}
// Do the PKTs match?
byte[] aPKT = GetPublicKeyToken();
byte[] bPKT = that.GetPublicKeyToken();
// Some assemblies (real case was interop assembly) may have null PKTs.
if (aPKT == null)
{
aPKT = new byte[0];
}
if (bPKT == null)
{
bPKT = new byte[0];
}
if (aPKT.Length != bPKT.Length)
{
return false;
}
for (int i = 0; i < aPKT.Length; ++i)
{
if (aPKT[i] != bPKT[i])
{
return false;
}
}
return true;
}
/// <summary>
/// Only the unnamed assembly has both null assemblyname and null string.
/// </summary>
/// <returns></returns>
internal bool IsUnnamedAssembly
{
get
{
return asAssemblyName == null && asString == null;
}
}
/// <summary>
/// Given a display name, construct an assembly name.
/// </summary>
/// <param name="displayName">The display name.</param>
/// <returns>The assembly name.</returns>
private static AssemblyName GetAssemblyNameFromDisplayName(string displayName)
{
AssemblyName assemblyName = new AssemblyName(displayName);
return assemblyName;
}
/// <summary>
/// Return a string that has AssemblyName special characters escaped.
/// Those characters are Equals(=), Comma(,), Quote("), Apostrophe('), Backslash(\).
/// </summary>
/// <remarks>
/// WARNING! This method is not meant as a general purpose escaping method for assembly names.
/// Use only if you really know that this does what you need.
/// </remarks>
/// <param name="displayName"></param>
/// <returns></returns>
internal static string EscapeDisplayNameCharacters(string displayName)
{
StringBuilder sb = new StringBuilder(displayName);
sb = sb.Replace("\\", "\\\\");
sb = sb.Replace("=", "\\=");
sb = sb.Replace(",", "\\,");
sb = sb.Replace("\"", "\\\"");
sb = sb.Replace("'", "\\'");
return sb.ToString();
}
/// <summary>
/// Convert to a string for display.
/// </summary>
/// <returns></returns>
override public string ToString()
{
CreateFullName();
return this.asString;
}
}
}
| |
#region License
/*
* All content copyright Terracotta, Inc., unless otherwise indicated. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
#endregion
using System;
using System.Collections.Generic;
using Quartz.Core;
using Quartz.Impl.Matchers;
namespace Quartz.Spi
{
/// <summary>
/// The interface to be implemented by classes that want to provide a <see cref="IJob" />
/// and <see cref="ITrigger" /> storage mechanism for the
/// <see cref="QuartzScheduler" />'s use.
/// </summary>
/// <remarks>
/// Storage of <see cref="IJob" /> s and <see cref="ITrigger" /> s should be keyed
/// on the combination of their name and group for uniqueness.
/// </remarks>
/// <seealso cref="QuartzScheduler" />
/// <seealso cref="ITrigger" />
/// <seealso cref="IJob" />
/// <seealso cref="IJobDetail" />
/// <seealso cref="JobDataMap" />
/// <seealso cref="ICalendar" />
/// <author>James House</author>
/// <author>Marko Lahma (.NET)</author>
public interface IJobStore
{
/// <summary>
/// Called by the QuartzScheduler before the <see cref="IJobStore" /> is
/// used, in order to give the it a chance to Initialize.
/// </summary>
void Initialize(ITypeLoadHelper loadHelper, ISchedulerSignaler signaler);
/// <summary>
/// Called by the QuartzScheduler to inform the <see cref="IJobStore" /> that
/// the scheduler has started.
/// </summary>
void SchedulerStarted();
/// <summary>
/// Called by the QuartzScheduler to inform the JobStore that
/// the scheduler has been paused.
/// </summary>
void SchedulerPaused();
/// <summary>
/// Called by the QuartzScheduler to inform the JobStore that
/// the scheduler has resumed after being paused.
/// </summary>
void SchedulerResumed();
/// <summary>
/// Called by the QuartzScheduler to inform the <see cref="IJobStore" /> that
/// it should free up all of it's resources because the scheduler is
/// shutting down.
/// </summary>
void Shutdown();
/// <summary>
/// Indicates whether job store supports persistence.
/// </summary>
/// <returns></returns>
bool SupportsPersistence { get; }
/// <summary>
/// How long (in milliseconds) the <see cref="IJobStore" /> implementation
/// estimates that it will take to release a trigger and acquire a new one.
/// </summary>
long EstimatedTimeToReleaseAndAcquireTrigger { get; }
/// <summary>
/// Whether or not the <see cref="IJobStore" /> implementation is clustered.
/// </summary>
/// <returns></returns>
bool Clustered { get; }
/// <summary>
/// Store the given <see cref="IJobDetail" /> and <see cref="ITrigger" />.
/// </summary>
/// <param name="newJob">The <see cref="IJobDetail" /> to be stored.</param>
/// <param name="newTrigger">The <see cref="ITrigger" /> to be stored.</param>
/// <throws> ObjectAlreadyExistsException </throws>
void StoreJobAndTrigger(IJobDetail newJob, IOperableTrigger newTrigger);
/// <summary>
/// returns true if the given JobGroup is paused
/// </summary>
/// <param name="groupName"></param>
/// <returns></returns>
bool IsJobGroupPaused(string groupName);
/// <summary>
/// returns true if the given TriggerGroup
/// is paused
/// </summary>
/// <param name="groupName"></param>
/// <returns></returns>
bool IsTriggerGroupPaused(string groupName);
/// <summary>
/// Store the given <see cref="IJobDetail" />.
/// </summary>
/// <param name="newJob">The <see cref="IJobDetail" /> to be stored.</param>
/// <param name="replaceExisting">
/// If <see langword="true" />, any <see cref="IJob" /> existing in the
/// <see cref="IJobStore" /> with the same name and group should be
/// over-written.
/// </param>
void StoreJob(IJobDetail newJob, bool replaceExisting);
void StoreJobsAndTriggers(IDictionary<IJobDetail, Collection.ISet<ITrigger>> triggersAndJobs, bool replace);
/// <summary>
/// Remove (delete) the <see cref="IJob" /> with the given
/// key, and any <see cref="ITrigger" /> s that reference
/// it.
/// </summary>
/// <remarks>
/// If removal of the <see cref="IJob" /> results in an empty group, the
/// group should be removed from the <see cref="IJobStore" />'s list of
/// known group names.
/// </remarks>
/// <returns>
/// <see langword="true" /> if a <see cref="IJob" /> with the given name and
/// group was found and removed from the store.
/// </returns>
bool RemoveJob(JobKey jobKey);
bool RemoveJobs(IList<JobKey> jobKeys);
/// <summary>
/// Retrieve the <see cref="IJobDetail" /> for the given
/// <see cref="IJob" />.
/// </summary>
/// <returns>
/// The desired <see cref="IJob" />, or null if there is no match.
/// </returns>
IJobDetail RetrieveJob(JobKey jobKey);
/// <summary>
/// Store the given <see cref="ITrigger" />.
/// </summary>
/// <param name="newTrigger">The <see cref="ITrigger" /> to be stored.</param>
/// <param name="replaceExisting">If <see langword="true" />, any <see cref="ITrigger" /> existing in
/// the <see cref="IJobStore" /> with the same name and group should
/// be over-written.</param>
/// <throws> ObjectAlreadyExistsException </throws>
void StoreTrigger(IOperableTrigger newTrigger, bool replaceExisting);
/// <summary>
/// Remove (delete) the <see cref="ITrigger" /> with the given key.
/// </summary>
/// <remarks>
/// <para>
/// If removal of the <see cref="ITrigger" /> results in an empty group, the
/// group should be removed from the <see cref="IJobStore" />'s list of
/// known group names.
/// </para>
/// <para>
/// If removal of the <see cref="ITrigger" /> results in an 'orphaned' <see cref="IJob" />
/// that is not 'durable', then the <see cref="IJob" /> should be deleted
/// also.
/// </para>
/// </remarks>
/// <returns>
/// <see langword="true" /> if a <see cref="ITrigger" /> with the given
/// name and group was found and removed from the store.
/// </returns>
bool RemoveTrigger(TriggerKey triggerKey);
bool RemoveTriggers(IList<TriggerKey> triggerKeys);
/// <summary>
/// Remove (delete) the <see cref="ITrigger" /> with the
/// given name, and store the new given one - which must be associated
/// with the same job.
/// </summary>
/// <param name="triggerKey">The <see cref="ITrigger"/> to be replaced.</param>
/// <param name="newTrigger">The new <see cref="ITrigger" /> to be stored.</param>
/// <returns>
/// <see langword="true" /> if a <see cref="ITrigger" /> with the given
/// name and group was found and removed from the store.
/// </returns>
bool ReplaceTrigger(TriggerKey triggerKey, IOperableTrigger newTrigger);
/// <summary>
/// Retrieve the given <see cref="ITrigger" />.
/// </summary>
/// <returns>
/// The desired <see cref="ITrigger" />, or null if there is no
/// match.
/// </returns>
IOperableTrigger RetrieveTrigger(TriggerKey triggerKey);
/// <summary>
/// Determine whether a <see cref="ICalendar" /> with the given identifier already
/// exists within the scheduler.
/// </summary>
/// <remarks>
/// </remarks>
/// <param name="calName">the identifier to check for</param>
/// <returns>true if a calendar exists with the given identifier</returns>
bool CalendarExists(string calName);
/// <summary>
/// Determine whether a <see cref="IJob" /> with the given identifier already
/// exists within the scheduler.
/// </summary>
/// <remarks>
/// </remarks>
/// <param name="jobKey">the identifier to check for</param>
/// <returns>true if a job exists with the given identifier</returns>
bool CheckExists(JobKey jobKey);
/// <summary>
/// Determine whether a <see cref="ITrigger" /> with the given identifier already
/// exists within the scheduler.
/// </summary>
/// <remarks>
/// </remarks>
/// <param name="triggerKey">the identifier to check for</param>
/// <returns>true if a trigger exists with the given identifier</returns>
bool CheckExists(TriggerKey triggerKey);
/// <summary>
/// Clear (delete!) all scheduling data - all <see cref="IJob"/>s, <see cref="ITrigger" />s
/// <see cref="ICalendar" />s.
/// </summary>
/// <remarks>
/// </remarks>
void ClearAllSchedulingData();
/// <summary>
/// Store the given <see cref="ICalendar" />.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="calendar">The <see cref="ICalendar" /> to be stored.</param>
/// <param name="replaceExisting">If <see langword="true" />, any <see cref="ICalendar" /> existing
/// in the <see cref="IJobStore" /> with the same name and group
/// should be over-written.</param>
/// <param name="updateTriggers">If <see langword="true" />, any <see cref="ITrigger" />s existing
/// in the <see cref="IJobStore" /> that reference an existing
/// Calendar with the same name with have their next fire time
/// re-computed with the new <see cref="ICalendar" />.</param>
/// <throws> ObjectAlreadyExistsException </throws>
void StoreCalendar(string name, ICalendar calendar, bool replaceExisting, bool updateTriggers);
/// <summary>
/// Remove (delete) the <see cref="ICalendar" /> with the
/// given name.
/// </summary>
/// <remarks>
/// If removal of the <see cref="ICalendar" /> would result in
/// <see cref="ITrigger" />s pointing to non-existent calendars, then a
/// <see cref="JobPersistenceException" /> will be thrown.
/// </remarks>
/// <param name="calName">The name of the <see cref="ICalendar" /> to be removed.</param>
/// <returns>
/// <see langword="true" /> if a <see cref="ICalendar" /> with the given name
/// was found and removed from the store.
/// </returns>
bool RemoveCalendar(string calName);
/// <summary>
/// Retrieve the given <see cref="ITrigger" />.
/// </summary>
/// <param name="calName">The name of the <see cref="ICalendar" /> to be retrieved.</param>
/// <returns>
/// The desired <see cref="ICalendar" />, or null if there is no
/// match.
/// </returns>
ICalendar RetrieveCalendar(string calName);
/// <summary>
/// Get the number of <see cref="IJob" />s that are
/// stored in the <see cref="IJobStore" />.
/// </summary>
/// <returns></returns>
int GetNumberOfJobs();
/// <summary>
/// Get the number of <see cref="ITrigger" />s that are
/// stored in the <see cref="IJobStore" />.
/// </summary>
/// <returns></returns>
int GetNumberOfTriggers();
/// <summary>
/// Get the number of <see cref="ICalendar" /> s that are
/// stored in the <see cref="IJobStore" />.
/// </summary>
/// <returns></returns>
int GetNumberOfCalendars();
/// <summary>
/// Get the names of all of the <see cref="IJob" /> s that
/// have the given group name.
/// <para>
/// If there are no jobs in the given group name, the result should be a
/// zero-length array (not <see langword="null" />).
/// </para>
/// </summary>
/// <param name="matcher"></param>
/// <returns></returns>
Collection.ISet<JobKey> GetJobKeys(GroupMatcher<JobKey> matcher);
/// <summary>
/// Get the names of all of the <see cref="ITrigger" />s
/// that have the given group name.
/// <para>
/// If there are no triggers in the given group name, the result should be a
/// zero-length array (not <see langword="null" />).
/// </para>
/// </summary>
Collection.ISet<TriggerKey> GetTriggerKeys(GroupMatcher<TriggerKey> matcher);
/// <summary>
/// Get the names of all of the <see cref="IJob" />
/// groups.
/// <para>
/// If there are no known group names, the result should be a zero-length
/// array (not <see langword="null" />).
/// </para>
/// </summary>
IList<string> GetJobGroupNames();
/// <summary>
/// Get the names of all of the <see cref="ITrigger" />
/// groups.
/// <para>
/// If there are no known group names, the result should be a zero-length
/// array (not <see langword="null" />).
/// </para>
/// </summary>
IList<string> GetTriggerGroupNames();
/// <summary>
/// Get the names of all of the <see cref="ICalendar" /> s
/// in the <see cref="IJobStore" />.
///
/// <para>
/// If there are no Calendars in the given group name, the result should be
/// a zero-length array (not <see langword="null" />).
/// </para>
/// </summary>
IList<string> GetCalendarNames();
/// <summary>
/// Get all of the Triggers that are associated to the given Job.
/// </summary>
/// <remarks>
/// If there are no matches, a zero-length array should be returned.
/// </remarks>
IList<IOperableTrigger> GetTriggersForJob(JobKey jobKey);
/// <summary>
/// Get the current state of the identified <see cref="ITrigger" />.
/// </summary>
/// <seealso cref="TriggerState" />
TriggerState GetTriggerState(TriggerKey triggerKey);
/////////////////////////////////////////////////////////////////////////////
//
// Trigger State manipulation methods
//
/////////////////////////////////////////////////////////////////////////////
/// <summary>
/// Pause the <see cref="ITrigger" /> with the given key.
/// </summary>
void PauseTrigger(TriggerKey triggerKey);
/// <summary>
/// Pause all of the <see cref="ITrigger" />s in the
/// given group.
/// </summary>
/// <remarks>
/// The JobStore should "remember" that the group is paused, and impose the
/// pause on any new triggers that are added to the group while the group is
/// paused.
/// </remarks>
Collection.ISet<string> PauseTriggers(GroupMatcher<TriggerKey> matcher);
/// <summary>
/// Pause the <see cref="IJob" /> with the given key - by
/// pausing all of its current <see cref="ITrigger" />s.
/// </summary>
void PauseJob(JobKey jobKey);
/// <summary>
/// Pause all of the <see cref="IJob" />s in the given
/// group - by pausing all of their <see cref="ITrigger" />s.
/// <para>
/// The JobStore should "remember" that the group is paused, and impose the
/// pause on any new jobs that are added to the group while the group is
/// paused.
/// </para>
/// </summary>
/// <seealso cref="string">
/// </seealso>
IList<string> PauseJobs(GroupMatcher<JobKey> matcher);
/// <summary>
/// Resume (un-pause) the <see cref="ITrigger" /> with the
/// given key.
///
/// <para>
/// If the <see cref="ITrigger" /> missed one or more fire-times, then the
/// <see cref="ITrigger" />'s misfire instruction will be applied.
/// </para>
/// </summary>
/// <seealso cref="string">
/// </seealso>
void ResumeTrigger(TriggerKey triggerKey);
/// <summary>
/// Resume (un-pause) all of the <see cref="ITrigger" />s
/// in the given group.
/// <para>
/// If any <see cref="ITrigger" /> missed one or more fire-times, then the
/// <see cref="ITrigger" />'s misfire instruction will be applied.
/// </para>
/// </summary>
IList<string> ResumeTriggers(GroupMatcher<TriggerKey> matcher);
/// <summary>
/// Gets the paused trigger groups.
/// </summary>
/// <returns></returns>
Collection.ISet<string> GetPausedTriggerGroups();
/// <summary>
/// Resume (un-pause) the <see cref="IJob" /> with the
/// given key.
/// <para>
/// If any of the <see cref="IJob" />'s<see cref="ITrigger" /> s missed one
/// or more fire-times, then the <see cref="ITrigger" />'s misfire
/// instruction will be applied.
/// </para>
/// </summary>
void ResumeJob(JobKey jobKey);
/// <summary>
/// Resume (un-pause) all of the <see cref="IJob" />s in
/// the given group.
/// <para>
/// If any of the <see cref="IJob" /> s had <see cref="ITrigger" /> s that
/// missed one or more fire-times, then the <see cref="ITrigger" />'s
/// misfire instruction will be applied.
/// </para>
/// </summary>
Collection.ISet<string> ResumeJobs(GroupMatcher<JobKey> matcher);
/// <summary>
/// Pause all triggers - equivalent of calling <see cref="PauseTriggers" />
/// on every group.
/// <para>
/// When <see cref="ResumeAll" /> is called (to un-pause), trigger misfire
/// instructions WILL be applied.
/// </para>
/// </summary>
/// <seealso cref="ResumeAll" />
void PauseAll();
/// <summary>
/// Resume (un-pause) all triggers - equivalent of calling <see cref="ResumeTriggers" />
/// on every group.
/// <para>
/// If any <see cref="ITrigger" /> missed one or more fire-times, then the
/// <see cref="ITrigger" />'s misfire instruction will be applied.
/// </para>
///
/// </summary>
/// <seealso cref="PauseAll" />
void ResumeAll();
/// <summary>
/// Get a handle to the next trigger to be fired, and mark it as 'reserved'
/// by the calling scheduler.
/// </summary>
/// <param name="noLaterThan">If > 0, the JobStore should only return a Trigger
/// that will fire no later than the time represented in this value as
/// milliseconds.</param>
/// <param name="maxCount"></param>
/// <param name="timeWindow"></param>
/// <returns></returns>
/// <seealso cref="ITrigger">
/// </seealso>
IList<IOperableTrigger> AcquireNextTriggers(DateTimeOffset noLaterThan, int maxCount, TimeSpan timeWindow);
/// <summary>
/// Inform the <see cref="IJobStore" /> that the scheduler no longer plans to
/// fire the given <see cref="ITrigger" />, that it had previously acquired
/// (reserved).
/// </summary>
void ReleaseAcquiredTrigger(IOperableTrigger trigger);
/// <summary>
/// Inform the <see cref="IJobStore" /> that the scheduler is now firing the
/// given <see cref="ITrigger" /> (executing its associated <see cref="IJob" />),
/// that it had previously acquired (reserved).
/// </summary>
/// <returns>
/// May return null if all the triggers or their calendars no longer exist, or
/// if the trigger was not successfully put into the 'executing'
/// state. Preference is to return an empty list if none of the triggers
/// could be fired.
/// </returns>
IList<TriggerFiredResult> TriggersFired(IList<IOperableTrigger> triggers);
/// <summary>
/// Inform the <see cref="IJobStore" /> that the scheduler has completed the
/// firing of the given <see cref="ITrigger" /> (and the execution its
/// associated <see cref="IJob" />), and that the <see cref="JobDataMap" />
/// in the given <see cref="IJobDetail" /> should be updated if the <see cref="IJob" />
/// is stateful.
/// </summary>
void TriggeredJobComplete(IOperableTrigger trigger, IJobDetail jobDetail, SchedulerInstruction triggerInstCode);
/// <summary>
/// Inform the <see cref="IJobStore" /> of the Scheduler instance's Id,
/// prior to initialize being invoked.
/// </summary>
string InstanceId { set; }
/// <summary>
/// Inform the <see cref="IJobStore" /> of the Scheduler instance's name,
/// prior to initialize being invoked.
/// </summary>
string InstanceName { set; }
/// <summary>
/// Tells the JobStore the pool size used to execute jobs.
/// </summary>
int ThreadPoolSize { set; }
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Microsoft.Win32.SafeHandles;
using System.Diagnostics;
using System.Threading;
namespace System.Net.Sockets
{
// This class implements a safe socket handle.
// It uses an inner and outer SafeHandle to do so. The inner
// SafeHandle holds the actual socket, but only ever has one
// reference to it. The outer SafeHandle guards the inner
// SafeHandle with real ref counting. When the outer SafeHandle
// is cleaned up, it releases the inner SafeHandle - since
// its ref is the only ref to the inner SafeHandle, it deterministically
// gets closed at that point - no races with concurrent IO calls.
// This allows Close() on the outer SafeHandle to deterministically
// close the inner SafeHandle, in turn allowing the inner SafeHandle
// to block the user thread in case a graceful close has been
// requested. (It's not legal to block any other thread - such closes
// are always abortive.)
internal partial class SafeCloseSocket :
#if DEBUG
DebugSafeHandleMinusOneIsInvalid
#else
SafeHandleMinusOneIsInvalid
#endif
{
protected SafeCloseSocket() : base(true) { }
private InnerSafeCloseSocket _innerSocket;
private volatile bool _released;
#if DEBUG
private InnerSafeCloseSocket _innerSocketCopy;
#endif
public override bool IsInvalid
{
get
{
return IsClosed || base.IsInvalid;
}
}
#if DEBUG
public void AddRef()
{
try
{
// The inner socket can be closed by CloseAsIs and when SafeHandle runs ReleaseHandle.
InnerSafeCloseSocket innerSocket = Volatile.Read(ref _innerSocket);
if (innerSocket != null)
{
innerSocket.AddRef();
}
}
catch (Exception e)
{
Debug.Fail("SafeCloseSocket.AddRef after inner socket disposed." + e);
}
}
public void Release()
{
try
{
// The inner socket can be closed by CloseAsIs and when SafeHandle runs ReleaseHandle.
InnerSafeCloseSocket innerSocket = Volatile.Read(ref _innerSocket);
if (innerSocket != null)
{
innerSocket.Release();
}
}
catch (Exception e)
{
Debug.Fail("SafeCloseSocket.Release after inner socket disposed." + e);
}
}
#endif
private void SetInnerSocket(InnerSafeCloseSocket socket)
{
_innerSocket = socket;
SetHandle(socket.DangerousGetHandle());
#if DEBUG
_innerSocketCopy = socket;
#endif
}
private static SafeCloseSocket CreateSocket(InnerSafeCloseSocket socket)
{
SafeCloseSocket ret = new SafeCloseSocket();
CreateSocket(socket, ret);
if (NetEventSource.IsEnabled) NetEventSource.Info(null, ret);
return ret;
}
protected static void CreateSocket(InnerSafeCloseSocket socket, SafeCloseSocket target)
{
if (socket != null && socket.IsInvalid)
{
target.SetHandleAsInvalid();
return;
}
bool b = false;
try
{
socket.DangerousAddRef(ref b);
}
catch
{
if (b)
{
socket.DangerousRelease();
b = false;
}
}
finally
{
if (b)
{
target.SetInnerSocket(socket);
socket.Dispose();
}
else
{
target.SetHandleAsInvalid();
}
}
}
protected override bool ReleaseHandle()
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"_innerSocket={_innerSocket}");
_released = true;
InnerSafeCloseSocket innerSocket = _innerSocket == null ? null : Interlocked.Exchange<InnerSafeCloseSocket>(ref _innerSocket, null);
#if DEBUG
// On AppDomain unload we may still have pending Overlapped operations.
// ThreadPoolBoundHandle should handle this scenario by canceling them.
innerSocket?.LogRemainingOperations();
#endif
InnerReleaseHandle();
innerSocket?.DangerousRelease();
return true;
}
internal void CloseAsIs()
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"_innerSocket={_innerSocket}");
#if DEBUG
// If this throws it could be very bad.
try
{
#endif
InnerSafeCloseSocket innerSocket = _innerSocket == null ? null : Interlocked.Exchange<InnerSafeCloseSocket>(ref _innerSocket, null);
Dispose();
if (innerSocket != null)
{
// Wait until it's safe.
SpinWait sw = new SpinWait();
while (!_released)
{
sw.SpinOnce();
}
// Now free it with blocking.
innerSocket.BlockingRelease();
}
InnerReleaseHandle();
#if DEBUG
}
catch (Exception exception) when (!ExceptionCheck.IsFatal(exception))
{
NetEventSource.Fail(this, $"handle:{handle}, error:{exception}");
throw;
}
#endif
}
internal sealed partial class InnerSafeCloseSocket : SafeHandleMinusOneIsInvalid
{
private InnerSafeCloseSocket() : base(true) { }
private bool _blockable;
public override bool IsInvalid
{
get
{
return IsClosed || base.IsInvalid;
}
}
// This method is implicitly reliable and called from a CER.
protected override bool ReleaseHandle()
{
bool ret = false;
#if DEBUG
try
{
#endif
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"handle:{handle}");
SocketError errorCode = InnerReleaseHandle();
return ret = errorCode == SocketError.Success;
#if DEBUG
}
catch (Exception exception)
{
if (!ExceptionCheck.IsFatal(exception))
{
NetEventSource.Fail(this, $"handle:{handle}, error:{exception}");
}
ret = true; // Avoid a second assert.
throw;
}
finally
{
_closeSocketThread = Environment.CurrentManagedThreadId;
_closeSocketTick = Environment.TickCount;
if (!ret)
{
NetEventSource.Fail(this, $"ReleaseHandle failed. handle:{handle}");
}
}
#endif
}
#if DEBUG
private IntPtr _closeSocketHandle;
private SocketError _closeSocketResult = unchecked((SocketError)0xdeadbeef);
private SocketError _closeSocketLinger = unchecked((SocketError)0xdeadbeef);
private int _closeSocketThread;
private int _closeSocketTick;
private int _refCount = 0;
public void AddRef()
{
Interlocked.Increment(ref _refCount);
}
public void Release()
{
Interlocked.MemoryBarrier();
Debug.Assert(_refCount > 0, "InnerSafeCloseSocket: Release() called more times than AddRef");
Interlocked.Decrement(ref _refCount);
}
public void LogRemainingOperations()
{
Interlocked.MemoryBarrier();
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"Releasing with pending operations: {_refCount}");
}
#endif
// Use this method to close the socket handle using the linger options specified on the socket.
// Guaranteed to only be called once, under a CER, and not if regular DangerousRelease is called.
internal void BlockingRelease()
{
#if DEBUG
// Expected to have outstanding operations such as Accept.
LogRemainingOperations();
#endif
_blockable = true;
DangerousRelease();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
namespace Lucene.Net.Documents
{
/// <summary> Provides support for converting dates to strings and vice-versa.
/// The strings are structured so that lexicographic sorting orders
/// them by date, which makes them suitable for use as field values
/// and search terms.
///
/// <P>This class also helps you to limit the resolution of your dates. Do not
/// save dates with a finer resolution than you really need, as then
/// RangeQuery and PrefixQuery will require more memory and become slower.
///
/// <P>Compared to {@link DateField} the strings generated by the methods
/// in this class take slightly more space, unless your selected resolution
/// is set to <code>Resolution.DAY</code> or lower.
/// </summary>
public class DateTools
{
// private static readonly System.TimeZone GMT = TimeZone.getTimeZone("GMT"); // {{Aroush-2.1}}
private static readonly System.String YEAR_FORMAT = "yyyy";
private static readonly System.String MONTH_FORMAT = "yyyyMM";
private static readonly System.String DAY_FORMAT = "yyyyMMdd";
private static readonly System.String HOUR_FORMAT = "yyyyMMddHH";
private static readonly System.String MINUTE_FORMAT = "yyyyMMddHHmm";
private static readonly System.String SECOND_FORMAT = "yyyyMMddHHmmss";
private static readonly System.String MILLISECOND_FORMAT = "yyyyMMddHHmmssfff";
// cannot create, the class has static methods only
private DateTools()
{
}
/// <summary> Converts a Date to a string suitable for indexing.
///
/// </summary>
/// <param name="date">the date to be converted
/// </param>
/// <param name="resolution">the desired resolution, see
/// {@link #Round(Date, DateTools.Resolution)}
/// </param>
/// <returns> a string in format <code>yyyyMMddHHmmssSSS</code> or shorter,
/// depeding on <code>resolution</code>; using GMT as timezone
/// </returns>
public static System.String DateToString(System.DateTime date, Resolution resolution)
{
return TimeToString(date.Ticks, resolution);
}
/// <summary> Converts a millisecond time to a string suitable for indexing.
///
/// </summary>
/// <param name="time">the date expressed as milliseconds since January 1, 1970, 00:00:00 GMT
/// </param>
/// <param name="resolution">the desired resolution, see
/// {@link #Round(long, DateTools.Resolution)}
/// </param>
/// <returns> a string in format <code>yyyyMMddHHmmssSSS</code> or shorter,
/// depeding on <code>resolution</code>; using GMT as timezone
/// </returns>
public static System.String TimeToString(long time, Resolution resolution)
{
System.Globalization.Calendar cal = new System.Globalization.GregorianCalendar();
//protected in JDK's prior to 1.4
//cal.setTimeInMillis(round(time, resolution));
System.DateTime dt = new System.DateTime(Round(time, resolution));
System.String result;
if (resolution == Resolution.YEAR)
{
lock (YEAR_FORMAT)
{
result = dt.ToString(YEAR_FORMAT);
}
}
else if (resolution == Resolution.MONTH)
{
lock (MONTH_FORMAT)
{
result = dt.ToString(MONTH_FORMAT);
}
}
else if (resolution == Resolution.DAY)
{
lock (DAY_FORMAT)
{
result = result = dt.ToString(DAY_FORMAT);
}
}
else if (resolution == Resolution.HOUR)
{
lock (HOUR_FORMAT)
{
result = result = dt.ToString(HOUR_FORMAT);
}
}
else if (resolution == Resolution.MINUTE)
{
lock (MINUTE_FORMAT)
{
result = result = dt.ToString(MINUTE_FORMAT);
}
}
else if (resolution == Resolution.SECOND)
{
lock (SECOND_FORMAT)
{
result = result = dt.ToString(SECOND_FORMAT);
}
}
else if (resolution == Resolution.MILLISECOND)
{
lock (MILLISECOND_FORMAT)
{
result = result = dt.ToString(MILLISECOND_FORMAT);
}
}
else
{
throw new System.ArgumentException("unknown resolution " + resolution);
}
return result;
}
/// <summary> Converts a string produced by <code>timeToString</code> or
/// <code>dateToString</code> back to a time, represented as the
/// number of milliseconds since January 1, 1970, 00:00:00 GMT.
///
/// </summary>
/// <param name="dateString">the date string to be converted
/// </param>
/// <returns> the number of milliseconds since January 1, 1970, 00:00:00 GMT
/// </returns>
/// <throws> ParseException if <code>dateString</code> is not in the </throws>
/// <summary> expected format
/// </summary>
public static long StringToTime(System.String dateString)
{
return StringToDate(dateString).Ticks;
}
/// <summary> Converts a string produced by <code>timeToString</code> or
/// <code>dateToString</code> back to a time, represented as a
/// Date object.
///
/// </summary>
/// <param name="dateString">the date string to be converted
/// </param>
/// <returns> the parsed time as a Date object
/// </returns>
/// <throws> ParseException if <code>dateString</code> is not in the </throws>
/// <summary> expected format
/// </summary>
public static System.DateTime StringToDate(System.String dateString)
{
System.DateTime date;
if (dateString.Length == 4)
{
lock (YEAR_FORMAT)
{
date = new System.DateTime(Convert.ToInt16(dateString.Substring(0, 4)),
1, 1, 0, 0, 0, 0);
}
}
else if (dateString.Length == 6)
{
lock (MONTH_FORMAT)
{
date = new System.DateTime(Convert.ToInt16(dateString.Substring(0, 4)),
Convert.ToInt16(dateString.Substring(4, 2)),
1, 0, 0, 0, 0);
}
}
else if (dateString.Length == 8)
{
lock (DAY_FORMAT)
{
date = new System.DateTime(Convert.ToInt16(dateString.Substring(0, 4)),
Convert.ToInt16(dateString.Substring(4, 2)),
Convert.ToInt16(dateString.Substring(6, 2)),
0, 0, 0, 0);
}
}
else if (dateString.Length == 10)
{
lock (HOUR_FORMAT)
{
date = new System.DateTime(Convert.ToInt16(dateString.Substring(0, 4)),
Convert.ToInt16(dateString.Substring(4, 2)),
Convert.ToInt16(dateString.Substring(6, 2)),
Convert.ToInt16(dateString.Substring(8, 2)),
0, 0, 0);
}
}
else if (dateString.Length == 12)
{
lock (MINUTE_FORMAT)
{
date = new System.DateTime(Convert.ToInt16(dateString.Substring(0, 4)),
Convert.ToInt16(dateString.Substring(4, 2)),
Convert.ToInt16(dateString.Substring(6, 2)),
Convert.ToInt16(dateString.Substring(8, 2)),
Convert.ToInt16(dateString.Substring(10, 2)),
0, 0);
}
}
else if (dateString.Length == 14)
{
lock (SECOND_FORMAT)
{
date = new System.DateTime(Convert.ToInt16(dateString.Substring(0, 4)),
Convert.ToInt16(dateString.Substring(4, 2)),
Convert.ToInt16(dateString.Substring(6, 2)),
Convert.ToInt16(dateString.Substring(8, 2)),
Convert.ToInt16(dateString.Substring(10, 2)),
Convert.ToInt16(dateString.Substring(12, 2)),
0);
}
}
else if (dateString.Length == 17)
{
lock (MILLISECOND_FORMAT)
{
date = new System.DateTime(Convert.ToInt16(dateString.Substring(0, 4)),
Convert.ToInt16(dateString.Substring(4, 2)),
Convert.ToInt16(dateString.Substring(6, 2)),
Convert.ToInt16(dateString.Substring(8, 2)),
Convert.ToInt16(dateString.Substring(10, 2)),
Convert.ToInt16(dateString.Substring(12, 2)),
Convert.ToInt16(dateString.Substring(14, 3)));
}
}
else
{
throw new System.FormatException("Input is not valid date string: " + dateString);
}
return date;
}
/// <summary> Limit a date's resolution. For example, the date <code>2004-09-21 13:50:11</code>
/// will be changed to <code>2004-09-01 00:00:00</code> when using
/// <code>Resolution.MONTH</code>.
///
/// </summary>
/// <param name="resolution">The desired resolution of the date to be returned
/// </param>
/// <returns> the date with all values more precise than <code>resolution</code>
/// set to 0 or 1
/// </returns>
public static System.DateTime Round(System.DateTime date, Resolution resolution)
{
return new System.DateTime(Round(date.Ticks, resolution));
}
/// <summary> Limit a date's resolution. For example, the date <code>1095767411000</code>
/// (which represents 2004-09-21 13:50:11) will be changed to
/// <code>1093989600000</code> (2004-09-01 00:00:00) when using
/// <code>Resolution.MONTH</code>.
///
/// </summary>
/// <param name="resolution">The desired resolution of the date to be returned
/// </param>
/// <returns> the date with all values more precise than <code>resolution</code>
/// set to 0 or 1, expressed as milliseconds since January 1, 1970, 00:00:00 GMT
/// </returns>
public static long Round(long time, Resolution resolution)
{
System.Globalization.Calendar cal = new System.Globalization.GregorianCalendar(); // {{Aroush}} do we care about 'cal'
// protected in JDK's prior to 1.4
//cal.setTimeInMillis(time);
System.DateTime dt = new System.DateTime(time);
if (resolution == Resolution.YEAR)
{
dt = dt.AddMonths(1 - dt.Month);
dt = dt.AddDays(1 - dt.Day);
dt = dt.AddHours(0 - dt.Hour);
dt = dt.AddMinutes(0 - dt.Minute);
dt = dt.AddSeconds(0 - dt.Second);
dt = dt.AddMilliseconds(0 - dt.Millisecond);
}
else if (resolution == Resolution.MONTH)
{
dt = dt.AddDays(1 - dt.Day);
dt = dt.AddHours(0 - dt.Hour);
dt = dt.AddMinutes(0 - dt.Minute);
dt = dt.AddSeconds(0 - dt.Second);
dt = dt.AddMilliseconds(0 - dt.Millisecond);
}
else if (resolution == Resolution.DAY)
{
dt = dt.AddHours(0 - dt.Hour);
dt = dt.AddMinutes(0 - dt.Minute);
dt = dt.AddSeconds(0 - dt.Second);
dt = dt.AddMilliseconds(0 - dt.Millisecond);
}
else if (resolution == Resolution.HOUR)
{
dt = dt.AddMinutes(0 - dt.Minute);
dt = dt.AddSeconds(0 - dt.Second);
dt = dt.AddMilliseconds(0 - dt.Millisecond);
}
else if (resolution == Resolution.MINUTE)
{
dt = dt.AddSeconds(0 - dt.Second);
dt = dt.AddMilliseconds(0 - dt.Millisecond);
}
else if (resolution == Resolution.SECOND)
{
dt = dt.AddMilliseconds(0 - dt.Millisecond);
}
else if (resolution == Resolution.MILLISECOND)
{
// don't cut off anything
}
else
{
throw new System.ArgumentException("unknown resolution " + resolution);
}
return dt.Ticks;
}
/// <summary>Specifies the time granularity. </summary>
public class Resolution
{
public static readonly Resolution YEAR = new Resolution("year");
public static readonly Resolution MONTH = new Resolution("month");
public static readonly Resolution DAY = new Resolution("day");
public static readonly Resolution HOUR = new Resolution("hour");
public static readonly Resolution MINUTE = new Resolution("minute");
public static readonly Resolution SECOND = new Resolution("second");
public static readonly Resolution MILLISECOND = new Resolution("millisecond");
private System.String resolution;
internal Resolution()
{
}
internal Resolution(System.String resolution)
{
this.resolution = resolution;
}
public override System.String ToString()
{
return resolution;
}
}
static DateTools()
{
{
// times need to be normalized so the value doesn't depend on the
// location the index is created/used:
// {{Aroush-2.1}}
/*
YEAR_FORMAT.setTimeZone(GMT);
MONTH_FORMAT.setTimeZone(GMT);
DAY_FORMAT.setTimeZone(GMT);
HOUR_FORMAT.setTimeZone(GMT);
MINUTE_FORMAT.setTimeZone(GMT);
SECOND_FORMAT.setTimeZone(GMT);
MILLISECOND_FORMAT.setTimeZone(GMT);
*/
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.IO.Packaging;
using System.Linq;
using System.Reflection;
using System.Text.RegularExpressions;
using System.Windows;
using EnvDTE;
using Microsoft.VisualStudio.ComponentModelHost;
using Microsoft.VisualStudio.Language.Intellisense;
using Microsoft.VisualStudio.OLE.Interop;
using Microsoft.VisualStudio.Text;
using Microsoft.VisualStudio.Text.Classification;
using Microsoft.VisualStudio.Text.Editor;
using Microsoft.VisualStudio.Text.Operations;
using Microsoft.VisualStudio.Text.Tagging;
using Microsoft.VisualStudio.TextManager.Interop;
using VSLangProj;
using IServiceProvider = System.IServiceProvider;
namespace Ammy.VisualStudio
{
public static class RuntimeLoader
{
private static readonly ConcurrentDictionary<string, Project> ProjectCache = new ConcurrentDictionary<string, Project>();
private static Assembly _serviceAssembly;
private static DateTime _lastError;
public static ITagger<T> CreateErrorTagger<T>(string filePath, IServiceProvider serviceProvider, ITextBuffer buffer) where T : ITag
{
return GetObject<ITagger<T>>(filePath, t => typeof(ITagger<T>).IsAssignableFrom(t), serviceProvider, buffer, serviceProvider);
}
public static IWpfTextViewMargin CreateTextViewMargin(string filePath, IServiceProvider serviceProvider, IWpfTextViewHost wpfTextViewHost, IWpfTextViewMargin marginContainer)
{
return GetObject<IWpfTextViewMargin>(filePath, t => typeof(IWpfTextViewMargin).IsAssignableFrom(t), serviceProvider, wpfTextViewHost, marginContainer, serviceProvider);
}
public static ICompletionSource CreateCompletionSource(string filePath, IServiceProvider serviceProvider, ITextBuffer textBuffer)
{
return GetObject<ICompletionSource>(filePath, t => typeof(ICompletionSource).IsAssignableFrom(t), serviceProvider, textBuffer, serviceProvider);
}
public static IClassifier GetClassifier(string filePath, IServiceProvider serviceProvider, ITextBuffer buffer, ITextDocument textDocument, IClassificationTypeRegistryService classificationRegistry, IClassificationFormatMapService classificationFormatMapService)
{
return GetObject<IClassifier>(filePath, t => typeof(IClassifier).IsAssignableFrom(t), serviceProvider, buffer, textDocument, classificationRegistry, classificationFormatMapService, serviceProvider);
}
public static IOleCommandTarget CreateCommandHandler(string filePath, IServiceProvider serviceProvider, IVsTextView textViewAdapter, ITextView textView, ITextStructureNavigatorSelectorService navigatorService, ISignatureHelpBroker signatureHelpBroker, ICompletionBroker completionBroker)
{
return GetObject<IOleCommandTarget>(filePath, t => typeof(IOleCommandTarget).IsAssignableFrom(t), serviceProvider, textViewAdapter, textView, serviceProvider, navigatorService, signatureHelpBroker, completionBroker);
}
public static T GetObjectByName<T>(string filePath, string name, IServiceProvider serviceProvider, params object[] parameters) where T : class
{
return GetObject<T>(filePath, t => t.Name == name, serviceProvider, parameters);
}
public static T GetObject<T>(string filePath, Func<Type, bool> predicate, IServiceProvider serviceProvider, params object[] parameters) where T : class
{
try {
var assembly = GetServiceAssembly(filePath, serviceProvider);
if (assembly == null)
return null;
var type = assembly.GetTypes()
.FirstOrDefault(predicate);
if (type == null)
return null;
var obj = (T) Activator.CreateInstance(type, parameters);
var componentModel = (IComponentModel) serviceProvider.GetService(typeof (SComponentModel));
componentModel.DefaultCompositionService.SatisfyImportsOnce(obj);
return obj;
} catch (Exception e) {
if ((DateTime.Now - _lastError) > TimeSpan.FromMinutes(1))
MessageBox.Show(e.Message);
_lastError = DateTime.Now;
return null;
}
}
private static Assembly GetServiceAssembly(string filePath, IServiceProvider serviceProvider)
{
if (_serviceAssembly != null)
return _serviceAssembly;
var dte = (DTE)serviceProvider.GetService(typeof(DTE));
var project = GetProjectByFilename(dte, filePath, serviceProvider);
if (project == null)
return null;
var assemblyDirectory = GetAssemblyDirectory(project, dte);
var serviceAssemblyPath = Path.Combine(assemblyDirectory, "Ammy.VisualStudio.Service.dll");
return _serviceAssembly = Assembly.LoadFrom(serviceAssemblyPath);
}
private static string GetAssemblyDirectory(Project project, DTE dte)
{
var nl = Environment.NewLine;
var proj = (VSProject)project.Object;
var sidekickReference = proj.References.Cast<Reference>().FirstOrDefault(r => r.Path.EndsWith("AmmySidekick.dll", StringComparison.InvariantCultureIgnoreCase));
string packagesPath = null;
if (sidekickReference != null) {
var match = Regex.Match(sidekickReference.Path, @"(.+packages\\Ammy)\.\w+((?:\.\d+){3})");
if (match.Success && match.Groups.Count >= 3) {
var version = match.Groups[2].Value;
packagesPath = match.Groups[1].Value + version + "\\build";
EnsureVersion(version);
} else {
match = Regex.Match(sidekickReference.Path, @"(.+packages\\Ammy)\.\w+\\(\d+\.\d+\.\d+)");
if (match.Success && match.Groups.Count >= 3)
packagesPath = match.Groups[1].Value + "\\" + match.Groups[2].Value + "\\build";
}
}
if (packagesPath == null) {
var solutionDirectoryName = Path.GetDirectoryName(dte.Solution.FullName);
if (solutionDirectoryName == null)
throw new DirectoryNotFoundException("Ammy extension couldn't find solution directory");
var packagesFolder = Path.Combine(solutionDirectoryName, "packages");
var comparer = new AmmyVersionComparer();
var ammyPackagePath = Directory.GetDirectories(packagesFolder)
.Where(path => Regex.IsMatch(path, @"Ammy\.\d+\.\d+\.\d+"))
.OrderByDescending(path => path, comparer)
.FirstOrDefault();
if (ammyPackagePath != null)
packagesPath = Path.Combine(ammyPackagePath, "build");
}
if (packagesPath == null)
throw new DirectoryNotFoundException("Ammy extension couldn't find packages directory." + nl + nl +
"Run `install-package Ammy` in Package Manager Console and restart Visual Studio");
return packagesPath;
// Can't copy files from here, because they a being loaded into VS by msbuild first
// Need a Task that would copy contents of `Ammy.x.x.x\build` into temp directory before compilation
//try {
// var tempPath = Path.GetTempPath();
// var packageDirectoryName = new DirectoryInfo(ammyPackagePath).Name;
// var runtimeDirectoryPath = Path.Combine(tempPath, packageDirectoryName);
// if (!Directory.Exists(runtimeDirectoryPath))
// Directory.CreateDirectory(runtimeDirectoryPath);
// var files = Directory.GetFiles(ammyPackageBuildPath);
// foreach (var file in files) {
// try {
// var dest = Path.GetFileName(file);
// File.Copy(file, Path.Combine(runtimeDirectoryPath, dest));
// } catch {
// }
// }
// return runtimeDirectoryPath;
//} catch {
// return ammyPackageBuildPath;
//}
}
private static void EnsureVersion(string version)
{
var comparer = new AmmyVersionComparer();
// If Ammy version is lower than 1.2.21 show message box
if (comparer.Compare("Ammy" + version, "Ammy.1.2.20") < 0 && version != ".1.0.0")
throw new DirectoryNotFoundException("Please update Ammy NuGet package. " + Environment.NewLine + Environment.NewLine +
"Current extension is not compatible with packages older than 1.2.21");
}
private static Project GetProjectFromCache(string key, Func<string, Project> resolver)
{
Project project;
if (ProjectCache.TryGetValue(key, out project)) {
try {
Debug.WriteLine(project.Name); // Check that project is available
return project;
} catch {
// Update cache if not available
return ProjectCache[key] = resolver(key);
}
} else {
return ProjectCache[key] = resolver(key);
}
}
public static Project GetProjectByFilename(DTE dte, string filename, IServiceProvider serviceProvider)
{
return GetProjectFromCache(filename, _ => {
if (dte != null) {
var projectItem = dte.Solution.FindProjectItem(filename);
return projectItem?.ContainingProject;
}
return null;
});
}
}
class AmmyVersionComparer : IComparer<string>
{
public int Compare(string left, string right)
{
try {
var leftVersion = GetVersion(left);
var rightVersion = GetVersion(right);
var major = leftVersion.Item1.CompareTo(rightVersion.Item1);
if (major != 0)
return major;
var minor = leftVersion.Item2.CompareTo(rightVersion.Item2);
if (minor != 0)
return minor;
var patch = leftVersion.Item3.CompareTo(rightVersion.Item3);
if (patch != 0)
return patch;
return 0;
} catch {
return 0;
}
}
private Tuple<int, int, int> GetVersion(string str)
{
var match = Regex.Match(str, @"Ammy\.(\d+)\.(\d+)\.(\d+)");
var major = int.Parse(match.Groups[1].Value, CultureInfo.InvariantCulture);
var minor = int.Parse(match.Groups[2].Value, CultureInfo.InvariantCulture);
var patch = int.Parse(match.Groups[3].Value, CultureInfo.InvariantCulture);
return Tuple.Create(major, minor, patch);
}
}
}
| |
/*
* SETTE - Symbolic Execution based Test Tool Evaluator
*
* SETTE is a tool to help the evaluation and comparison of symbolic execution
* based test input generator tools.
*
* Budapest University of Technology and Economics (BME)
*
* Authors: Lajos Cseppento <[email protected]>, Zoltan Micskei
* <[email protected]>
*
* Copyright 2014
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
using System;
namespace BME.MIT.SETTE.Basic.B3
{
public static class B3c_DoWhile
{
/**
* Calculates the sum of numbers from 1 to min(10, x)
*
* @param x
* @return
*/
public static int withLimit(int x)
{
int i = 1;
int sum = 0;
do
{
sum += i;
i++;
} while (i <= x && i <= 10);
return sum;
}
/**
* Calculates the sum of even numbers from 1 to min(10, x)
*
* @param x
* @return
*/
public static int withConditionAndLimit(int x)
{
int i = 1;
int sum = 0;
do
{
if (i % 2 == 0)
{
sum += i;
}
i++;
} while (i <= x && i <= 10);
return sum;
}
/**
* Calculates the sum of even numbers from 1 to x
*
* @param x
* @return
*/
public static int withConditionNoLimit(int x)
{
int i = 1;
int sum = 0;
do
{
if (x % 2 == 0)
{
sum += i;
}
i++;
} while (i <= x);
return sum;
}
/**
* Calculates the sum of even numbers from 1 to min(100, x)
*
* @param x
* @return
*/
public static int withContinueBreak(int x)
{
int i = 1;
int sum = 0;
do
{
if (i % 2 != 0)
{
i++;
continue;
}
if (i > 100)
{
break;
}
sum += i;
i++;
} while (i <= x);
return sum;
}
/**
* Calculates the sum of even numbers from 1 to min(limit, x), excluding
* skip
*
* @param x
* @param limit
* When this number is reached the loop breaks
* @param skip
* When this number is reached the loop continues
* @param stop
* When this number is reached the loop immediately stops
* @return
*/
//@SetteRequiredStatementCoverage(value = 78)
public static int complex(int x, int limit, int skip, int stop)
{
int i = 1;
int sum = 0;
do
{
sum += i;
if (i == limit)
{
break;
}
else if (i == skip)
{
i++;
continue;
}
else if (i == stop)
{
return -1;
}
else if (sum < 0)
{
// only if overflow
return -2;
}
else if (i < 0)
{
// unreachable code
return -3;
}
i++;
} while (i <= x);
return sum;
}
/**
* Please note that the infinite loop may be removed by the compiler or JIT. <br/>
* See {@link https
* ://www.securecoding.cert.org/confluence/display/java/MSC01
* -J.+Do+not+use+an+empty+infinite+loop}.
*
* @param x
* @return
*/
//@SetteRequiredStatementCoverage(value = 0)
public static int infinite(int x)
{
do
{
} while (true);
}
/**
* Please note that it is highly unlikely that the infinite loop will be
* removed by the compiler or JIT.
*
* @param x
* @return
*/
//@SetteRequiredStatementCoverage(value = 0)
public static int infiniteNotOptimalizable(int x)
{
int i = 1;
do
{
if (i < 0)
{
// unreachable code
return i;
}
i += x;
if (i < 0)
{
// only if overflow
i = 1;
}
} while (true);
}
public static int nestedLoop(int x, int y)
{
int i = 0, j = 0;
int sum = 0;
do
{
if (x % 2 == 0)
{
do
{
if (j % 2 == 0)
{
sum += x * y;
}
j++;
} while (j < y && i < 10);
i++;
}
} while (i < x && i < 10);
return sum;
}
public static int nestedLoopWithLabel(int x, int y)
{
// in C#.NET it is not allowed to break to a label, use goto instead
int i = 0, j;
int sum = 0;
do
{
if (x % 2 == 0)
{
j = 0;
do
{
if (y == 6)
{
goto breakInnerLoop;
}
else if (x == 8 && y == 8)
{
goto breakOuterLoop;
}
if (j % 2 == 0)
{
sum += x * y;
}
j++;
} while (j < y && i < 10);
breakInnerLoop: i++;
}
} while (i < x && i < 10);
breakOuterLoop: return sum;
}
}
}
| |
using Strados.Obd.Extensions;
namespace Strados.Obd.Specification
{
public enum ObdPid : int
{
[StringValue("atspa{0}")]
Elm327AutoProtocol = -11,
[StringValue("atz")]
Elm327Initialize = -10,
[StringValue("ati")]
Elm327Info = -9,
[StringValue("ate{0}")]
Elm327Echo = -8,
[StringValue("ath{0}")]
Elm327Headers = -7,
[StringValue("atl{0}")]
Elm327LineFeed = -6,
[StringValue("atsp{0}")]
Elm327Protocol = -5,
[StringValue("atst{0}")]
Elm327Timeout = -4,
[StringValue("atws")]
Elm327WarmReset = -3,
[StringValue("atrv")]
Elm327ReadVoltage = -2,
[StringValue("atign")]
Elm327IgnMonInputLevel = -1,
[StringValue("0100")]
PidSupport_01_20 = 0,
[StringValue("0101")]
MonitorStatus = 1,
[StringValue("0102")]
FreezeDTC = 2,
[StringValue("0103")]
FuelSystemStatus = 3,
[StringValue("0104")]
CalcEngineLoad = 4,
[StringValue("0105")]
EngineCoolantTemp = 5,
[StringValue("0106")]
ShortTermFuelPercentTrimBankOne = 6,
[StringValue("0107")]
LongTermFuelPercentTrimBankOne = 7,
[StringValue("0108")]
ShortTermFuelPercentTrimBankTwo = 8,
[StringValue("0109")]
LongTermFuelPercentTrimBankTwo = 9,
[StringValue("010A")]
FuelPressure = 10,
[StringValue("010B")]
IntakeManifoldAbsolutePressure = 11,
[StringValue("010C")]
EngineRPM = 12,
[StringValue("010D")]
VehicleSpeed = 13,
[StringValue("010E")]
TimingAdvance = 14,
[StringValue("010F")]
IntakeAirTemperature = 15,
[StringValue("0110")]
MAFRate = 16,
[StringValue("0111")]
ThrottlePosition = 17,
[StringValue("0112")]
CommandedSecondaryAirStatus = 18,
[StringValue("0113")]
OxygenSensorsPresent = 19,
[StringValue("0114")]
Bank1_Sensor1 = 20,
[StringValue("0115")]
Bank1_Sensor2 = 21,
[StringValue("0116")]
Bank1_Sensor3 = 22,
[StringValue("0117")]
Bank1_Sensor4 = 23,
[StringValue("0118")]
Bank2_Sensor1 = 24,
[StringValue("0119")]
Bank2_Sensor2 = 25,
[StringValue("011A")]
Bank2_Sensor3 = 26,
[StringValue("011B")]
Bank2_Sensor4 = 27,
[StringValue("011C")]
OBDStandard = 28,
[StringValue("011D")]
OxygenSensorsPresent_1 = 29,
[StringValue("011E")]
AuxilaryInputStatus = 30,
[StringValue("011F")]
RunTimeSinceEngineStart = 31,
[StringValue("0120")]
PidSupport_21_40 = 32,
[StringValue("0121")]
DistanceTraveledWithMILOn = 33,
[StringValue("0122")]
FuelRailPressure_RelativeToManifoldVacuum = 34,
[StringValue("0123")]
FuelRailPressure_Diesel_GasolineDirectInject = 35,
[StringValue("0124")]
O2S1_WR_Lambda_EquivRatio = 36,
[StringValue("0125")]
O2S2_WR_Lambda_EquivRatio = 37,
[StringValue("0126")]
O2S3_WR_Lambda_EquivRatio = 38,
[StringValue("0127")]
O2S4_WR_Lambda_EquivRatio = 39,
[StringValue("0128")]
O2S5_WR_Lambda_EquivRatio = 40,
[StringValue("0129")]
O2S6_WR_Lambda_EquivRatio = 41,
[StringValue("012A")]
O2S7_WR_Lambda_EquivRatio = 42,
[StringValue("012B")]
O2S8_WR_Lambda_EquivRatio = 43,
[StringValue("012C")]
CommandedEGR = 44,
[StringValue("012D")]
EGRError = 45,
[StringValue("012E")]
CommandedEvapPurge = 46,
[StringValue("012F")]
FuelLevelInput = 47,
[StringValue("0130")]
NumberWarmUpsSinceCodeCleared = 48,
[StringValue("0131")]
FuelLevelInput_2 = 49,
[StringValue("0132")]
EvaporationSystemVaporPressure = 50,
[StringValue("0133")]
BarometricPressure = 51,
[StringValue("0134")]
O2S1_WR_Lambda_EquivRatio_1 = 52,
[StringValue("0135")]
O2S2_WR_Lambda_EquivRatio_1 = 53,
[StringValue("0136")]
O2S3_WR_Lambda_EquivRatio_1 = 54,
[StringValue("0137")]
O2S4_WR_Lambda_EquivRatio_1 = 55,
[StringValue("0138")]
O2S5_WR_Lambda_EquivRatio_1 = 56,
[StringValue("0139")]
O2S6_WR_Lambda_EquivRatio_1 = 57,
[StringValue("013A")]
O2S7_WR_Lambda_EquivRatio_1 = 58,
[StringValue("013B")]
O2S8_WR_Lambda_EquivRatio_1 = 59,
[StringValue("013C")]
CatalystTemperature_Bank1_Sensor1 = 60,
[StringValue("013D")]
CatalystTemperature_Bank2_Sensor1 = 61,
[StringValue("013E")]
CatalystTemperature_Bank1_Sensor2 = 62,
[StringValue("013F")]
CatalystTemperature_Bank2_Sensor2 = 63,
[StringValue("0140")]
PidSupport_41_60 = 64,
[StringValue("0141")]
MonitorStatusThisDriveCycle = 65,
[StringValue("0142")]
ControlModuleVoltage = 66,
[StringValue("0143")]
AbsoluteLoadValue = 67,
[StringValue("0144")]
FuelAirCommandedEquivalenceRatio = 68,
[StringValue("0145")]
RelativeThrottlePosition = 69,
[StringValue("0146")]
AmbientAirTemperature = 70,
[StringValue("0147")]
AbsoluteThrottlePosB = 71,
[StringValue("0148")]
AbsoluteThrottlePosC = 72,
[StringValue("0149")]
AbsolutePedalPosD = 73,
[StringValue("014A")]
AbsolutePedalPosE = 74,
[StringValue("014B")]
AbsolutePedalPosF = 75,
[StringValue("014C")]
CommandedThrottleActuator = 76,
[StringValue("014D")]
TimeRunWithMILOn = 77,
[StringValue("014E")]
TimeSinceTroubleCodesCleared = 78,
[StringValue("014F")]
MaxEquivRatioO2SensorIMAP = 79,
[StringValue("0150")]
MaxMAF = 80,
[StringValue("0151")]
FuelRate = 81,
[StringValue("0152")]
EthanolFuelPercent = 82,
[StringValue("0153")]
AbsoluteEvapVaporPressure = 83,
[StringValue("0154")]
EvapVaporPressure = 84,
[StringValue("0155")]
STSecondaryO2SensorTrim_Bank_1_3 = 85,
[StringValue("0156")]
LTSecondaryO2SensorTrim_Bank_1_3 = 86,
[StringValue("0157")]
STSecondaryO2SensorTrim_Bank_2_4 = 87,
[StringValue("0158")]
LTSecondaryO2SensorTrim_Bank_2_4 = 88,
[StringValue("0159")]
FuelRailPressure = 89,
[StringValue("015A")]
RelativeAcceleratorPedalPosition = 90,
[StringValue("015B")]
HybridBatteryPackRemainingLife = 91,
[StringValue("015C")]
EngineOilTemperature = 92,
[StringValue("015D")]
FuelInjectionTiming = 93,
[StringValue("015E")]
EngineFuelRate = 94,
[StringValue("015F")]
EmissionRequirements = 95,
[StringValue("0160")]
PidSupport_61_80 = 96,
[StringValue("0161")]
DriverDemandEnginePercentTorque = 97,
[StringValue("0162")]
ActualEnginePercentTorque = 98,
[StringValue("0163")]
EngineReferenceTorque = 99,
[StringValue("0164")]
EnginePercentTorqueData = 100,
[StringValue("0165")]
AuxilaryIOSupported = 101,
[StringValue("0166")]
MassAirFlowSensor = 102,
[StringValue("0167")]
EngineCoolantTemperature = 103,
[StringValue("0168")]
IntakeAirTemperatureSensor = 104,
[StringValue("0169")]
CommandedEGRAndError = 105,
[StringValue("016A")]
CommandedDieselIntakeAirFlowControl = 106,
[StringValue("016B")]
FuelPressureControlSystem = 107,
[StringValue("016E")]
InjectionPressureControlSystem = 108,
[StringValue("016F")]
TurbochargerCompressorInletPressure = 109,
[StringValue("0170")]
BoostPressureControl = 110,
[StringValue("0171")]
VariableGeometryTurboControl = 111,
[StringValue("0172")]
WastegateControl = 112,
[StringValue("0173")]
TurbochargerRPM = 113,
[StringValue("0174")]
TurbochargerTemperature = 114,
[StringValue("0175")]
TurbochargerTemperature_2 = 115,
[StringValue("0176")]
ChargeAirCoolerTemperature = 116,
[StringValue("0177")]
ExhaustGasTemperatureBank_1 = 117,
[StringValue("0178")]
ExhaustGasTemperatureBank_2 = 118,
[StringValue("017F")]
EngineRunTime = 119,
[StringValue("0180")]
PidSupport_81_A0 = 120,
[StringValue("0181")]
EngineRunTimeAECD = 121,
[StringValue("0182")]
EngineRunTimeAECD_2 = 122,
[StringValue("0183")]
NOxSensor = 123,
[StringValue("0184")]
ManifoldSurfaceTemperature = 124,
[StringValue("0185")]
NOxReagentSystem = 125,
[StringValue("0186")]
ParticulateMatterSensor = 126,
[StringValue("0187")]
IntakeManifoldAbsolutePressure_2 = 127,
[StringValue("0202")]
FreezeFrameTroubleCodes = 202,
[StringValue("03")]
RequestTroubleCodes = 300,
[StringValue("04")]
ClearTroubleCodes = 400,
[StringValue("07")]
PendingTroubleCodes = 700,
[StringValue("0A")]
PermanentTroubleCodes = 1000,
[StringValue("0901")]
VinMessageCount = 901,
[StringValue("0902")]
VINNumber = 902,
[StringValue("0903")]
CalibrationID = 903,
[StringValue("0904")]
Calibration = 904,
}
}
| |
/// <summary>**************************************************************************
///
/// $Id: ColorSpace.java,v 1.2 2002/07/25 16:31:11 grosbois Exp $
///
/// Copyright Eastman Kodak Company, 343 State Street, Rochester, NY 14650
/// $Date $
/// ***************************************************************************
/// </summary>
using System;
using FileFormatBoxes = CSJ2K.j2k.fileformat.FileFormatBoxes;
using ParameterList = CSJ2K.j2k.util.ParameterList;
using HeaderDecoder = CSJ2K.j2k.codestream.reader.HeaderDecoder;
using RandomAccessIO = CSJ2K.j2k.io.RandomAccessIO;
using ICCProfile = CSJ2K.Icc.ICCProfile;
using PaletteBox = CSJ2K.Color.Boxes.PaletteBox;
using ComponentMappingBox = CSJ2K.Color.Boxes.ComponentMappingBox;
using ColorSpecificationBox = CSJ2K.Color.Boxes.ColorSpecificationBox;
using ChannelDefinitionBox = CSJ2K.Color.Boxes.ChannelDefinitionBox;
using ImageHeaderBox = CSJ2K.Color.Boxes.ImageHeaderBox;
using JP2Box = CSJ2K.Color.Boxes.JP2Box;
namespace CSJ2K.Color
{
/// <summary> This class analyzes the image to provide colorspace
/// information for the decoding chain. It does this by
/// examining the box structure of the JP2 image.
/// It also provides access to the parameter list information,
/// which is stored as a public final field.
///
/// </summary>
/// <seealso cref="jj2000.j2k.icc.ICCProfile">
/// </seealso>
/// <version> 1.0
/// </version>
/// <author> Bruce A. Kern
/// </author>
public class ColorSpace
{
/// <summary> Retrieve the ICC profile from the images as
/// a byte array.
/// </summary>
/// <returns> the ICC Profile as a byte [].
/// </returns>
virtual public byte[] ICCProfile
{
get
{
return csbox.ICCProfile;
}
}
/// <summary>Return the colorspace method (Profiled, enumerated, or palettized). </summary>
virtual public MethodEnum Method
{
get
{
return csbox.Method;
}
}
/// <summary>Return number of channels in the palette. </summary>
virtual public PaletteBox PaletteBox
{
get
{
return pbox;
}
}
/// <summary>Return number of channels in the palette. </summary>
virtual public int PaletteChannels
{
get
{
return pbox == null?0:pbox.NumColumns;
}
}
/// <summary>Is palettized predicate. </summary>
virtual public bool Palettized
{
get
{
return pbox != null;
}
}
//UPGRADE_NOTE: Final was removed from the declaration of 'eol '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public static readonly System.String eol = System.Environment.NewLine;
// Renamed for convenience:
internal const int GRAY = 0;
internal const int RED = 1;
internal const int GREEN = 2;
internal const int BLUE = 3;
/// <summary>Parameter Specs </summary>
public ParameterList pl;
/// <summary>Parameter Specs </summary>
public HeaderDecoder hd;
/* Image box structure as pertains to colorspacees. */
private PaletteBox pbox = null;
private ComponentMappingBox cmbox = null;
private ColorSpecificationBox csbox = null;
private ChannelDefinitionBox cdbox = null;
private ImageHeaderBox ihbox = null;
/// <summary>Input image </summary>
private RandomAccessIO in_Renamed = null;
/// <summary>Indent a String that contains newlines. </summary>
public static System.String indent(System.String ident, System.Text.StringBuilder instr)
{
return indent(ident, instr.ToString());
}
/// <summary>Indent a String that contains newlines. </summary>
public static System.String indent(System.String ident, System.String instr)
{
System.Text.StringBuilder tgt = new System.Text.StringBuilder(instr);
char eolChar = eol[0];
int i = tgt.Length;
while (--i > 0)
{
if (tgt[i] == eolChar)
tgt.Insert(i + 1, ident);
}
return ident + tgt.ToString();
}
/// <summary> public constructor which takes in the image, parameterlist and the
/// image header decoder as args.
/// </summary>
/// <param name="in">input RandomAccess image file.
/// </param>
/// <param name="hd">provides information about the image header.
/// </param>
/// <param name="pl">provides parameters from the default and commandline lists.
/// </param>
/// <exception cref="IOException,">ColorSpaceException
/// </exception>
public ColorSpace(RandomAccessIO in_Renamed, HeaderDecoder hd, ParameterList pl)
{
this.pl = pl;
this.in_Renamed = in_Renamed;
this.hd = hd;
getBoxes();
}
/// <summary> Retrieve the various boxes from the JP2 file.</summary>
/// <exception cref="ColorSpaceException,">IOException
/// </exception>
protected internal void getBoxes()
{
//byte[] data;
int type;
long len = 0;
int boxStart = 0;
byte[] boxHeader = new byte[16];
int i = 0;
// Search the toplevel boxes for the header box
while (true)
{
in_Renamed.seek(boxStart);
in_Renamed.readFully(boxHeader, 0, 16);
// CONVERSION PROBLEM?
len = (long)CSJ2K.Icc.ICCProfile.getInt(boxHeader, 0);
if (len == 1)
len = CSJ2K.Icc.ICCProfile.getLong(boxHeader, 8); // Extended
// length
type = CSJ2K.Icc.ICCProfile.getInt(boxHeader, 4);
// Verify the contents of the file so far.
if (i == 0 && type != CSJ2K.j2k.fileformat.FileFormatBoxes.JP2_SIGNATURE_BOX)
{
throw new ColorSpaceException("first box in image not " + "signature");
}
else if (i == 1 && type != CSJ2K.j2k.fileformat.FileFormatBoxes.FILE_TYPE_BOX)
{
throw new ColorSpaceException("second box in image not file");
}
else if (type == CSJ2K.j2k.fileformat.FileFormatBoxes.CONTIGUOUS_CODESTREAM_BOX)
{
throw new ColorSpaceException("header box not found in image");
}
else if (type == CSJ2K.j2k.fileformat.FileFormatBoxes.JP2_HEADER_BOX)
{
break;
}
// Progress to the next box.
++i;
boxStart = (int) (boxStart + len);
}
// boxStart indexes the start of the JP2_HEADER_BOX,
// make headerBoxEnd index the end of the box.
long headerBoxEnd = boxStart + len;
if (len == 1)
boxStart += 8; // Extended length header
for (boxStart += 8; boxStart < headerBoxEnd; boxStart = (int) (boxStart + len))
{
in_Renamed.seek(boxStart);
in_Renamed.readFully(boxHeader, 0, 16);
len = (long)CSJ2K.Icc.ICCProfile.getInt(boxHeader, 0);
if (len == 1)
throw new ColorSpaceException("Extended length boxes " + "not supported");
type = (int)CSJ2K.Icc.ICCProfile.getInt(boxHeader, 4);
switch (type)
{
case CSJ2K.j2k.fileformat.FileFormatBoxes.IMAGE_HEADER_BOX:
ihbox = new ImageHeaderBox(in_Renamed, boxStart);
break;
case CSJ2K.j2k.fileformat.FileFormatBoxes.COLOUR_SPECIFICATION_BOX:
csbox = new ColorSpecificationBox(in_Renamed, boxStart);
break;
case CSJ2K.j2k.fileformat.FileFormatBoxes.CHANNEL_DEFINITION_BOX:
cdbox = new ChannelDefinitionBox(in_Renamed, boxStart);
break;
case CSJ2K.j2k.fileformat.FileFormatBoxes.COMPONENT_MAPPING_BOX:
cmbox = new ComponentMappingBox(in_Renamed, boxStart);
break;
case CSJ2K.j2k.fileformat.FileFormatBoxes.PALETTE_BOX:
pbox = new PaletteBox(in_Renamed, boxStart);
break;
default:
break;
}
}
if (ihbox == null)
throw new ColorSpaceException("image header box not found");
if ((pbox == null && cmbox != null) || (pbox != null && cmbox == null))
throw new ColorSpaceException("palette box and component " + "mapping box inconsistency");
}
/// <summary>Return the channel definition of the input component. </summary>
public virtual int getChannelDefinition(int c)
{
if (cdbox == null)
return c;
else
return cdbox.getCn(c + 1);
}
/// <summary>Return the colorspace (sYCC, sRGB, sGreyScale). </summary>
public virtual CSEnum getColorSpace()
{
return csbox.ColorSpace;
}
/// <summary>Return bitdepth of the palette entries. </summary>
public virtual int getPaletteChannelBits(int c)
{
return pbox == null ? 0 : (int)pbox.getBitDepth(c);
}
/// <summary> Return a palettized sample</summary>
/// <param name="channel">requested
/// </param>
/// <param name="index">of entry
/// </param>
/// <returns> palettized sample
/// </returns>
public virtual int getPalettizedSample(int channel, int index)
{
return pbox == null?0:pbox.getEntry(channel, index);
}
/// <summary>Signed output predicate. </summary>
public virtual bool isOutputSigned(int channel)
{
return (pbox != null)?pbox.isSigned(channel):hd.isOriginalSigned(channel);
}
/// <summary>Return a suitable String representation of the class instance. </summary>
public override System.String ToString()
{
System.Text.StringBuilder rep = new System.Text.StringBuilder("[ColorSpace is ").Append(csbox.MethodString).Append(Palettized?" and palettized ":" ").Append(Method == MethodEnum.ENUMERATED?csbox.ColorSpaceString:"");
if (ihbox != null)
{
rep.Append(eol).Append(indent(" ", ihbox.ToString()));
}
if (cdbox != null)
{
rep.Append(eol).Append(indent(" ", cdbox.ToString()));
}
if (csbox != null)
{
rep.Append(eol).Append(indent(" ", csbox.ToString()));
}
if (pbox != null)
{
rep.Append(eol).Append(indent(" ", pbox.ToString()));
}
if (cmbox != null)
{
rep.Append(eol).Append(indent(" ", cmbox.ToString()));
}
return rep.Append("]").ToString();
}
/// <summary> Are profiling diagnostics turned on</summary>
/// <returns> yes or no
/// </returns>
public virtual bool debugging()
{
return pl.Get("colorspace_debug") != null && pl.Get("colorspace_debug").ToUpper().Equals("on".ToUpper());
}
public enum MethodEnum
{
ICC_PROFILED,
ENUMERATED
}
public enum CSEnum
{
sRGB,
GreyScale,
sYCC,
esRGB,
Illegal,
Unknown
}
/* Enumeration Class */
/*
/// <summary>method enumeration </summary>
//UPGRADE_NOTE: Final was removed from the declaration of 'ICC_PROFILED '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public const MethodEnum ICC_PROFILED = new MethodEnum("profiled");
/// <summary>method enumeration </summary>
//UPGRADE_NOTE: Final was removed from the declaration of 'ENUMERATED '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public const MethodEnum ENUMERATED = new MethodEnum("enumerated");
/// <summary>colorspace enumeration </summary>
//UPGRADE_NOTE: Final was removed from the declaration of 'sRGB '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public const CSEnum sRGB = new CSEnum("sRGB");
/// <summary>colorspace enumeration </summary>
//UPGRADE_NOTE: Final was removed from the declaration of 'GreyScale '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public const CSEnum GreyScale = new CSEnum("GreyScale");
/// <summary>colorspace enumeration </summary>
//UPGRADE_NOTE: Final was removed from the declaration of 'sYCC '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public const CSEnum sYCC = new CSEnum("sYCC");
/// <summary>colorspace enumeration </summary>
//UPGRADE_NOTE: Final was removed from the declaration of 'Illegal '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public const CSEnum Illegal = new CSEnum("Illegal");
/// <summary>colorspace enumeration </summary>
//UPGRADE_NOTE: Final was removed from the declaration of 'Unknown '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public const CSEnum Unknown = new CSEnum("Unknown");
/// <summary> Typesafe enumeration class</summary>
/// <version> 1.0
/// </version>
/// <author> Bruce A Kern
/// </author>
public class Enumeration
{
//UPGRADE_NOTE: Final was removed from the declaration of 'value '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public System.String value_Renamed;
public Enumeration(System.String value_Renamed)
{
this.value_Renamed = value_Renamed;
}
public override System.String ToString()
{
return value_Renamed;
}
}
/// <summary> Method enumeration class</summary>
/// <version> 1.0
/// </version>
/// <author> Bruce A Kern
/// </author>
public class MethodEnum:Enumeration
{
public MethodEnum(System.String value_Renamed):base(value_Renamed)
{
}
}
/// <summary> Colorspace enumeration class</summary>
/// <version> 1.0
/// </version>
/// <author> Bruce A Kern
/// </author>
public class CSEnum:Enumeration
{
public CSEnum(System.String value_Renamed):base(value_Renamed)
{
}
}
*/
/* end class ColorSpace */
}
}
| |
using Discord.Commands;
using Discord.Modules;
using NadekoBot.Classes;
using NadekoBot.Classes.JSONModels;
using NadekoBot.DataModels;
using NadekoBot.Extensions;
using NadekoBot.Modules.Permissions.Classes;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
namespace NadekoBot.Modules.Pokemon
{
class PokemonModule : DiscordModule
{
public override string Prefix { get; } = NadekoBot.Config.CommandPrefixes.Pokemon;
private ConcurrentDictionary<ulong, PokeStats> Stats = new ConcurrentDictionary<ulong, PokeStats>();
public PokemonModule()
{
}
private int GetDamage(PokemonType usertype, PokemonType targetType)
{
var rng = new Random();
int damage = rng.Next(40, 60);
foreach (PokemonMultiplier Multiplier in usertype.Multipliers)
{
if (Multiplier.Type == targetType.Name)
{
var multiplier = Multiplier.Multiplication;
damage = (int)(damage * multiplier);
}
}
return damage;
}
private PokemonType GetPokeType(ulong id)
{
var db = DbHandler.Instance.GetAllRows<UserPokeTypes>();
Dictionary<long, string> setTypes = db.ToDictionary(x => x.UserId, y => y.type);
if (setTypes.ContainsKey((long)id))
{
return stringToPokemonType(setTypes[(long)id]);
}
int count = NadekoBot.Config.PokemonTypes.Count;
int remainder = Math.Abs((int)(id % (ulong)count));
return NadekoBot.Config.PokemonTypes[remainder];
}
private PokemonType stringToPokemonType(string v)
{
var str = v.ToUpperInvariant();
var list = NadekoBot.Config.PokemonTypes;
foreach (PokemonType p in list)
{
if (str == p.Name)
{
return p;
}
}
return null;
}
public override void Install(ModuleManager manager)
{
manager.CreateCommands("", cgb =>
{
cgb.AddCheck(PermissionChecker.Instance);
commands.ForEach(cmd => cmd.Init(cgb));
cgb.CreateCommand(Prefix + "attack")
.Description($"Attacks a target with the given move. Use `{Prefix}movelist` to see a list of moves your type can use. | `{Prefix}attack \"vine whip\" @someguy`")
.Parameter("move", ParameterType.Required)
.Parameter("target", ParameterType.Unparsed)
.Do(async e =>
{
var move = e.GetArg("move");
var targetStr = e.GetArg("target")?.Trim();
if (string.IsNullOrWhiteSpace(targetStr))
return;
var target = e.Server.FindUsers(targetStr).FirstOrDefault();
if (target == null)
{
await e.Channel.SendMessage("No such person.").ConfigureAwait(false);
return;
}
else if (target == e.User)
{
await e.Channel.SendMessage("You can't attack yourself.").ConfigureAwait(false);
return;
}
// Checking stats first, then move
//Set up the userstats
PokeStats userStats;
userStats = Stats.GetOrAdd(e.User.Id, new PokeStats());
//Check if able to move
//User not able if HP < 0, has made more than 4 attacks
if (userStats.Hp < 0)
{
await e.Channel.SendMessage($"{e.User.Mention} has fainted and was not able to move!").ConfigureAwait(false);
return;
}
if (userStats.MovesMade >= 5)
{
await e.Channel.SendMessage($"{e.User.Mention} has used too many moves in a row and was not able to move!").ConfigureAwait(false);
return;
}
if (userStats.LastAttacked.Contains(target.Id))
{
await e.Channel.SendMessage($"{e.User.Mention} can't attack again without retaliation!").ConfigureAwait(false);
return;
}
//get target stats
PokeStats targetStats;
targetStats = Stats.GetOrAdd(target.Id, new PokeStats());
//If target's HP is below 0, no use attacking
if (targetStats.Hp <= 0)
{
await e.Channel.SendMessage($"{target.Mention} has already fainted!").ConfigureAwait(false);
return;
}
//Check whether move can be used
PokemonType userType = GetPokeType(e.User.Id);
var enabledMoves = userType.Moves;
if (!enabledMoves.Contains(move.ToLowerInvariant()))
{
await e.Channel.SendMessage($"{e.User.Mention} was not able to use **{move}**, use `{Prefix}ml` to see moves you can use").ConfigureAwait(false);
return;
}
//get target type
PokemonType targetType = GetPokeType(target.Id);
//generate damage
int damage = GetDamage(userType, targetType);
//apply damage to target
targetStats.Hp -= damage;
var response = $"{e.User.Mention} used **{move}**{userType.Icon} on {target.Mention}{targetType.Icon} for **{damage}** damage";
//Damage type
if (damage < 40)
{
response += "\nIt's not effective..";
}
else if (damage > 60)
{
response += "\nIt's super effective!";
}
else
{
response += "\nIt's somewhat effective";
}
//check fainted
if (targetStats.Hp <= 0)
{
response += $"\n**{target.Name}** has fainted!";
}
else
{
response += $"\n**{target.Name}** has {targetStats.Hp} HP remaining";
}
//update other stats
userStats.LastAttacked.Add(target.Id);
userStats.MovesMade++;
targetStats.MovesMade = 0;
if (targetStats.LastAttacked.Contains(e.User.Id))
{
targetStats.LastAttacked.Remove(e.User.Id);
}
//update dictionary
//This can stay the same right?
Stats[e.User.Id] = userStats;
Stats[target.Id] = targetStats;
await e.Channel.SendMessage(response).ConfigureAwait(false);
});
cgb.CreateCommand(Prefix + "movelist")
.Alias(Prefix + "ml")
.Description($"Lists the moves you are able to use | `{Prefix}ml`")
.Do(async e =>
{
var userType = GetPokeType(e.User.Id);
var movesList = userType.Moves;
var str = $"**Moves for `{userType.Name}` type.**";
foreach (string m in movesList)
{
str += $"\n{userType.Icon}{m}";
}
await e.Channel.SendMessage(str).ConfigureAwait(false);
});
cgb.CreateCommand(Prefix + "heal")
.Description($"Heals someone. Revives those who fainted. Costs a {NadekoBot.Config.CurrencyName} | `{Prefix}heal @someone`")
.Parameter("target", ParameterType.Unparsed)
.Do(async e =>
{
var targetStr = e.GetArg("target")?.Trim();
if (string.IsNullOrWhiteSpace(targetStr))
return;
var usr = e.Server.FindUsers(targetStr).FirstOrDefault();
if (usr == null)
{
await e.Channel.SendMessage("No such person.").ConfigureAwait(false);
return;
}
if (Stats.ContainsKey(usr.Id))
{
var targetStats = Stats[usr.Id];
int HP = targetStats.Hp;
if (targetStats.Hp == targetStats.MaxHp)
{
await e.Channel.SendMessage($"{usr.Name} already has full HP!").ConfigureAwait(false);
return;
}
//Payment~
var amount = 1;
var pts = Classes.DbHandler.Instance.GetStateByUserId((long)e.User.Id)?.Value ?? 0;
if (pts < amount)
{
await e.Channel.SendMessage($"{e.User.Mention} you don't have enough {NadekoBot.Config.CurrencyName}s! \nYou still need {amount - pts} {NadekoBot.Config.CurrencySign} to be able to do this!").ConfigureAwait(false);
return;
}
var target = (usr.Id == e.User.Id) ? "yourself" : usr.Name;
await FlowersHandler.RemoveFlowers(e.User, $"Poke-Heal {target}", amount).ConfigureAwait(false);
//healing
targetStats.Hp = targetStats.MaxHp;
if (HP < 0)
{
//Could heal only for half HP?
Stats[usr.Id].Hp = (targetStats.MaxHp / 2);
await e.Channel.SendMessage($"{e.User.Name} revived {usr.Name} with one {NadekoBot.Config.CurrencySign}").ConfigureAwait(false);
return;
}
var vowelFirst = new[] { 'a', 'e', 'i', 'o', 'u' }.Contains(NadekoBot.Config.CurrencyName[0]);
await e.Channel.SendMessage($"{e.User.Name} healed {usr.Name} for {targetStats.MaxHp - HP} HP with {(vowelFirst ? "an" : "a")} {NadekoBot.Config.CurrencySign}").ConfigureAwait(false);
return;
}
else
{
await e.Channel.SendMessage($"{usr.Name} already has full HP!").ConfigureAwait(false);
}
});
cgb.CreateCommand(Prefix + "type")
.Description($"Get the poketype of the target. | `{Prefix}type @someone`")
.Parameter("target", ParameterType.Unparsed)
.Do(async e =>
{
var usrStr = e.GetArg("target")?.Trim();
if (string.IsNullOrWhiteSpace(usrStr))
return;
var usr = e.Server.FindUsers(usrStr).FirstOrDefault();
if (usr == null)
{
await e.Channel.SendMessage("No such person.").ConfigureAwait(false);
return;
}
var pType = GetPokeType(usr.Id);
await e.Channel.SendMessage($"Type of {usr.Name} is **{pType.Name.ToLowerInvariant()}**{pType.Icon}").ConfigureAwait(false);
});
cgb.CreateCommand(Prefix + "settype")
.Description($"Set your poketype. Costs a {NadekoBot.Config.CurrencyName}. | `{Prefix}settype fire`")
.Parameter("targetType", ParameterType.Unparsed)
.Do(async e =>
{
var targetTypeStr = e.GetArg("targetType")?.ToUpperInvariant();
if (string.IsNullOrWhiteSpace(targetTypeStr))
return;
var targetType = stringToPokemonType(targetTypeStr);
if (targetType == null)
{
await e.Channel.SendMessage("Invalid type specified. Type must be one of:\n" + string.Join(", ", NadekoBot.Config.PokemonTypes.Select(t => t.Name.ToUpperInvariant()))).ConfigureAwait(false);
return;
}
if (targetType == GetPokeType(e.User.Id))
{
await e.Channel.SendMessage($"Your type is already {targetType.Name.ToLowerInvariant()}{targetType.Icon}").ConfigureAwait(false);
return;
}
//Payment~
var amount = 1;
var pts = DbHandler.Instance.GetStateByUserId((long)e.User.Id)?.Value ?? 0;
if (pts < amount)
{
await e.Channel.SendMessage($"{e.User.Mention} you don't have enough {NadekoBot.Config.CurrencyName}s! \nYou still need {amount - pts} {NadekoBot.Config.CurrencySign} to be able to do this!").ConfigureAwait(false);
return;
}
await FlowersHandler.RemoveFlowers(e.User, $"set usertype to {targetTypeStr}", amount).ConfigureAwait(false);
//Actually changing the type here
var preTypes = DbHandler.Instance.GetAllRows<UserPokeTypes>();
Dictionary<long, int> Dict = preTypes.ToDictionary(x => x.UserId, y => y.Id.Value);
if (Dict.ContainsKey((long)e.User.Id))
{
//delete previous type
DbHandler.Instance.Delete<UserPokeTypes>(Dict[(long)e.User.Id]);
}
DbHandler.Instance.Connection.Insert(new UserPokeTypes
{
UserId = (long)e.User.Id,
type = targetType.Name
}, typeof(UserPokeTypes));
//Now for the response
await e.Channel.SendMessage($"Set type of {e.User.Mention} to {targetTypeStr}{targetType.Icon} for a {NadekoBot.Config.CurrencySign}").ConfigureAwait(false);
});
});
}
}
}
| |
//* Bobo Browse Engine - High performance faceted/parametric search implementation
//* that handles various types of semi-structured data. Originally written in Java.
//*
//* Ported and adapted for C# by Shad Storhaug, Alexey Shcherbachev, and zhengchun.
//*
//* Copyright (C) 2005-2015 John Wang
//*
//* Licensed under the Apache License, Version 2.0 (the "License");
//* you may not use this file except in compliance with the License.
//* You may obtain a copy of the License at
//*
//* http://www.apache.org/licenses/LICENSE-2.0
//*
//* Unless required by applicable law or agreed to in writing, software
//* distributed under the License is distributed on an "AS IS" BASIS,
//* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//* See the License for the specific language governing permissions and
//* limitations under the License.
// Version compatibility level: 4.0.2
namespace BoboBrowse.Net.Facets.Impl
{
using BoboBrowse.Net;
using BoboBrowse.Net.Facets.Filter;
using BoboBrowse.Net.Sort;
using BoboBrowse.Net.Support;
using BoboBrowse.Net.Util;
using Lucene.Net.Index;
using Lucene.Net.Search;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
public class SimpleGroupbyFacetHandler : FacetHandler<FacetDataNone>
{
private readonly IList<string> m_fieldsSet;
private IList<SimpleFacetHandler> m_facetHandlers;
private IDictionary<string, SimpleFacetHandler> m_facetHandlerMap;
private const string SEP = ",";
private readonly string m_sep;
/// <summary>
/// Initializes a new instance of <see cref="T:SimpleFacetHandler"/> with the specified name,
/// dependent facet handler names, and separator.
/// </summary>
/// <param name="name">The facet handler name.</param>
/// <param name="dependsOn">List of facet handler names that will be included in the group.</param>
/// <param name="separator">The separator string that will be used to delineate each value in the group.</param>
public SimpleGroupbyFacetHandler(string name, IList<string> dependsOn, string separator)
: base(name, dependsOn)
{
m_fieldsSet = dependsOn;
m_facetHandlers = null;
m_facetHandlerMap = null;
m_sep = separator;
}
/// <summary>
/// Initializes a new instance of <see cref="T:SimpleFacetHandler"/> with the specified name and
/// dependent facet handler names. The separator is assumed to be ",".
/// </summary>
/// <param name="name">The facet handler name.</param>
/// <param name="dependsOn">List of facet handler names that will be included in the group.</param>
public SimpleGroupbyFacetHandler(string name, IList<string> dependsOn)
: this(name, dependsOn, SEP)
{
}
public override RandomAccessFilter BuildRandomAccessFilter(string value, IDictionary<string, string> selectionProperty)
{
List<RandomAccessFilter> filterList = new List<RandomAccessFilter>();
string[] vals = value.Split(new string[] { m_sep }, StringSplitOptions.RemoveEmptyEntries);
for (int i = 0; i < vals.Length; ++i)
{
SimpleFacetHandler handler = m_facetHandlers[i];
BrowseSelection sel = new BrowseSelection(handler.Name);
sel.AddValue(vals[i]);
filterList.Add(handler.BuildFilter(sel));
}
return new RandomAccessAndFilter(filterList);
}
public override FacetCountCollectorSource GetFacetCountCollectorSource(BrowseSelection sel, FacetSpec fspec)
{
return new GroupbyFacetCountCollectorSource(m_facetHandlers, m_name, m_sep, sel, fspec);
}
private class GroupbyFacetCountCollectorSource : FacetCountCollectorSource
{
private readonly IList<SimpleFacetHandler> m_facetHandlers;
private readonly string m_name;
private readonly string m_sep;
private readonly BrowseSelection m_sel;
private readonly FacetSpec m_fspec;
public GroupbyFacetCountCollectorSource(IList<SimpleFacetHandler> facetHandlers, string name, string sep, BrowseSelection sel, FacetSpec fspec)
{
m_facetHandlers = facetHandlers;
m_name = name;
m_sep = sep;
m_sel = sel;
m_fspec = fspec;
}
public override IFacetCountCollector GetFacetCountCollector(BoboSegmentReader reader, int docBase)
{
var collectorList = new List<DefaultFacetCountCollector>(m_facetHandlers.Count);
foreach (var facetHandler in m_facetHandlers)
{
collectorList.Add((DefaultFacetCountCollector)facetHandler.GetFacetCountCollectorSource(m_sel, m_fspec).GetFacetCountCollector(reader, docBase));
}
return new GroupbyFacetCountCollector(m_name, m_fspec, collectorList.ToArray(), reader.MaxDoc, m_sep);
}
}
public override string[] GetFieldValues(BoboSegmentReader reader, int id)
{
List<string> valList = new List<string>();
foreach (IFacetHandler handler in m_facetHandlers)
{
StringBuilder buf = new StringBuilder();
bool firsttime = true;
string[] vals = handler.GetFieldValues(reader, id);
if (vals != null && vals.Length > 0)
{
if (!firsttime)
{
buf.Append(",");
}
else
{
firsttime = false;
}
foreach (string val in vals)
{
buf.Append(val);
}
}
valList.Add(buf.ToString());
}
return valList.ToArray();
}
public override object[] GetRawFieldValues(BoboSegmentReader reader, int id)
{
return GetFieldValues(reader, id);
}
public override DocComparerSource GetDocComparerSource()
{
return new GroupbyDocComparerSource(m_fieldsSet, m_facetHandlers);
}
private class GroupbyDocComparerSource : DocComparerSource
{
private readonly IList<string> m_fieldsSet;
private readonly IList<SimpleFacetHandler> m_facetHandlers;
public GroupbyDocComparerSource(IList<string> fieldsSet, IList<SimpleFacetHandler> facetHandlers)
{
m_fieldsSet = fieldsSet;
m_facetHandlers = facetHandlers;
}
public override DocComparer GetComparer(AtomicReader reader, int docbase)
{
var comparerList = new List<DocComparer>(m_fieldsSet.Count);
foreach (var handler in m_facetHandlers)
{
comparerList.Add(handler.GetDocComparerSource().GetComparer(reader, docbase));
}
return new GroupbyDocComparer(comparerList.ToArray());
}
}
public override FacetDataNone Load(BoboSegmentReader reader)
{
m_facetHandlers = new List<SimpleFacetHandler>(m_fieldsSet.Count);
m_facetHandlerMap = new Dictionary<string, SimpleFacetHandler>(m_fieldsSet.Count);
foreach (string name in m_fieldsSet)
{
IFacetHandler handler = reader.GetFacetHandler(name);
if (handler == null || !(handler is SimpleFacetHandler))
{
throw new InvalidOperationException("only simple facet handlers supported");
}
SimpleFacetHandler sfh = (SimpleFacetHandler)handler;
m_facetHandlers.Add(sfh);
m_facetHandlerMap.Add(name, sfh);
}
return FacetDataNone.Instance;
}
private class GroupbyDocComparer : DocComparer
{
private readonly DocComparer[] m_comparers;
public GroupbyDocComparer(DocComparer[] comparers)
{
m_comparers = comparers;
}
public override sealed int Compare(ScoreDoc d1, ScoreDoc d2)
{
int retval = 0;
foreach (DocComparer comparer in m_comparers)
{
retval = comparer.Compare(d1, d2);
if (retval != 0) break;
}
return retval;
}
public override sealed IComparable Value(ScoreDoc doc)
{
return new GroupbyComparable(m_comparers, doc);
}
}
private class GroupbyComparable : IComparable
{
private readonly DocComparer[] m_comparers;
private readonly ScoreDoc m_doc;
public GroupbyComparable(DocComparer[] comparers, ScoreDoc doc)
{
m_comparers = comparers;
m_doc = doc;
}
public virtual int CompareTo(object o)
{
int retval = 0;
foreach (DocComparer comparer in m_comparers)
{
retval = comparer.Value(m_doc).CompareTo(o);
if (retval != 0) break;
}
return retval;
}
}
private class GroupbyFacetCountCollector : IFacetCountCollector
{
private readonly DefaultFacetCountCollector[] m_subcollectors;
private readonly string m_name;
private readonly FacetSpec m_fspec;
private readonly BigSegmentedArray m_count;
private readonly int m_countlength;
private readonly int[] m_lens;
private readonly int m_maxdoc;
private readonly string m_sep;
public GroupbyFacetCountCollector(string name, FacetSpec fspec, DefaultFacetCountCollector[] subcollectors, int maxdoc, string sep)
{
m_name = name;
m_fspec = fspec;
m_subcollectors = subcollectors;
m_sep = sep;
int totalLen = 1;
m_lens = new int[m_subcollectors.Length];
for (int i = 0; i < m_subcollectors.Length; ++i)
{
m_lens[i] = m_subcollectors[i].CountLength;
totalLen *= m_lens[i];
}
m_countlength = totalLen;
m_count = new LazyBigInt32Array(m_countlength);
m_maxdoc = maxdoc;
}
public void Collect(int docid)
{
int idx = 0;
int i = 0;
int segsize = m_countlength;
foreach (DefaultFacetCountCollector subcollector in m_subcollectors)
{
segsize = segsize / m_lens[i++];
idx += (subcollector.DataCache.OrderArray.Get(docid) * segsize);
}
m_count.Add(idx, m_count.Get(idx) + 1);
}
public virtual void CollectAll()
{
for (int i = 0; i < m_maxdoc; ++i)
{
Collect(i);
}
}
public virtual BigSegmentedArray GetCountDistribution()
{
return m_count;
}
public virtual string Name
{
get { return m_name; }
}
public virtual BrowseFacet GetFacet(string value)
{
string[] vals = value.Split(new string[] { m_sep }, StringSplitOptions.RemoveEmptyEntries);
if (vals.Length == 0)
return null;
StringBuilder buf = new StringBuilder();
int startIdx = 0;
int segLen = m_countlength;
for (int i = 0; i < vals.Length; ++i)
{
if (i > 0)
{
buf.Append(m_sep);
}
int index = m_subcollectors[i].DataCache.ValArray.IndexOf(vals[i]);
string facetName = m_subcollectors[i].DataCache.ValArray.Get(index);
buf.Append(facetName);
segLen /= m_subcollectors[i].CountLength;
startIdx += index * segLen;
}
int count = 0;
for (int i = startIdx; i < startIdx + segLen; ++i)
{
count += m_count.Get(i);
}
BrowseFacet f = new BrowseFacet(buf.ToString(), count);
return f;
}
public virtual int GetFacetHitsCount(object value)
{
string[] vals = ((string)value).Split(new string[] { m_sep }, StringSplitOptions.RemoveEmptyEntries);
if (vals.Length == 0) return 0;
int startIdx = 0;
int segLen = m_countlength;
for (int i = 0; i < vals.Length; ++i)
{
int index = m_subcollectors[i].DataCache.ValArray.IndexOf(vals[i]);
segLen /= m_subcollectors[i].CountLength;
startIdx += index * segLen;
}
int count = 0;
for (int i = startIdx; i < startIdx + segLen; ++i)
count += m_count.Get(i);
return count;
}
private string GetFacetString(int idx)
{
StringBuilder buf = new StringBuilder();
int i = 0;
foreach (int len in m_lens)
{
if (i > 0)
{
buf.Append(m_sep);
}
int adjusted = idx * len;
int bucket = adjusted / m_countlength;
buf.Append(m_subcollectors[i].DataCache.ValArray.Get(bucket));
idx = adjusted % m_countlength;
i++;
}
return buf.ToString();
}
private object[] GetRawFaceValue(int idx)
{
object[] retVal = new object[m_lens.Length];
int i = 0;
foreach (int len in m_lens)
{
int adjusted = idx * len;
int bucket = adjusted / m_countlength;
retVal[i++] = m_subcollectors[i].DataCache.ValArray.GetRawValue(bucket);
idx = adjusted % m_countlength;
}
return retVal;
}
public virtual ICollection<BrowseFacet> GetFacets()
{
if (m_fspec != null)
{
int minCount = m_fspec.MinHitCount;
int max = m_fspec.MaxCount;
if (max <= 0)
max = m_countlength;
FacetSpec.FacetSortSpec sortspec = m_fspec.OrderBy;
List<BrowseFacet> facetColl;
if (sortspec == FacetSpec.FacetSortSpec.OrderValueAsc)
{
facetColl = new List<BrowseFacet>(max);
for (int i = 1; i < m_countlength; ++i) // exclude zero
{
int hits = m_count.Get(i);
if (hits >= minCount)
{
BrowseFacet facet = new BrowseFacet(GetFacetString(i), hits);
facetColl.Add(facet);
}
if (facetColl.Count >= max)
break;
}
}
else
{
IComparerFactory comparerFactory;
if (sortspec == FacetSpec.FacetSortSpec.OrderHitsDesc)
{
comparerFactory = new FacetHitcountComparerFactory();
}
else
{
comparerFactory = m_fspec.CustomComparerFactory;
}
if (comparerFactory == null)
{
throw new System.ArgumentException("facet comparer factory not specified");
}
IComparer<int> comparer = comparerFactory.NewComparer(new GroupbyFieldValueAccessor(this.GetFacetString, this.GetRawFaceValue), m_count);
facetColl = new List<BrowseFacet>();
int forbidden = -1;
Int32BoundedPriorityQueue pq = new Int32BoundedPriorityQueue(comparer, max, forbidden);
for (int i = 1; i < m_countlength; ++i) // exclude zero
{
int hits = m_count.Get(i);
if (hits >= minCount)
{
if (!pq.Offer(i))
{
// pq is full. we can safely ignore any facet with <=hits.
minCount = hits + 1;
}
}
}
int val;
while ((val = pq.Poll()) != forbidden)
{
BrowseFacet facet = new BrowseFacet(GetFacetString(val), m_count.Get(val));
facetColl.Insert(0, facet);
}
}
return facetColl;
}
else
{
return FacetCountCollector.EMPTY_FACET_LIST;
}
}
private class GroupbyFieldValueAccessor : IFieldValueAccessor
{
private readonly Func<int, string> getFacetString;
private readonly Func<int, object> getRawFaceValue;
public GroupbyFieldValueAccessor(Func<int, string> getFacetString, Func<int, object> getRawFaceValue)
{
this.getFacetString = getFacetString;
this.getRawFaceValue = getRawFaceValue;
}
public string GetFormatedValue(int index)
{
return getFacetString(index);
}
public object GetRawValue(int index)
{
return getRawFaceValue(index);
}
}
public virtual void Dispose()
{
}
public virtual FacetIterator GetIterator()
{
return new GroupByFacetIterator(this);
}
public class GroupByFacetIterator : FacetIterator
{
private readonly GroupbyFacetCountCollector m_parent;
private int m_index;
public GroupByFacetIterator(GroupbyFacetCountCollector parent)
{
m_parent = parent;
m_index = 0;
m_facet = null;
m_count = 0;
}
/// <summary>
/// (non-Javadoc)
/// see com.browseengine.bobo.api.FacetIterator#next()
/// </summary>
/// <returns></returns>
public override string Next()
{
if ((m_index >= 0) && !HasNext())
throw new IndexOutOfRangeException("No more facets in this iteration");
m_index++;
m_facet = m_parent.GetFacetString(m_index);
m_count = m_parent.m_count.Get(m_index);
return m_facet;
}
/// <summary>
/// (non-Javadoc)
/// see java.util.Iterator#hasNext()
/// </summary>
/// <returns></returns>
public override bool HasNext()
{
return (m_index < (m_parent.m_countlength - 1));
}
// BoboBrowse.Net: Not supported in .NET anyway
///// <summary>
///// (non-Javadoc)
///// see java.util.Iterator#remove()
///// </summary>
//public override void Remove()
//{
// throw new NotSupportedException("remove() method not supported for Facet Iterators");
//}
/// <summary>
/// (non-Javadoc)
/// see com.browseengine.bobo.api.FacetIterator#next(int)
/// </summary>
/// <param name="minHits"></param>
/// <returns></returns>
public override string Next(int minHits)
{
if ((m_index >= 0) && !HasNext())
{
m_count = 0;
m_facet = null;
return null;
}
do
{
m_index++;
} while ((m_index < (m_parent.m_countlength - 1)) && (m_parent.m_count.Get(m_index) < minHits));
if (m_parent.m_count.Get(m_index) >= minHits)
{
m_facet = m_parent.GetFacetString(m_index);
m_count = m_parent.m_count.Get(m_index);
}
else
{
m_count = 0;
m_facet = null;
}
return m_facet;
}
/// <summary>
/// The string from here should be already formatted. No need to reformat.
/// see com.browseengine.bobo.api.FacetIterator#format(java.lang.Object)
/// </summary>
/// <param name="val"></param>
/// <returns></returns>
public override string Format(object val)
{
return (string)val;
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Globalization;
using System.Linq;
using Mercurial.Attributes;
namespace Mercurial
{
/// <summary>
/// This class implements the "hg diff" command (<see href="http://www.selenic.com/mercurial/hg.1.html#diff"/>):
/// diff repository (or selected files).
/// </summary>
public sealed class DiffCommand : IncludeExcludeCommandBase<DiffCommand>, IMercurialCommand<string>
{
/// <summary>
/// This is the backing field for the <see cref="Names"/> property.
/// </summary>
private readonly ListFile _Names = new ListFile();
/// <summary>
/// This is the backing field for the <see cref="Revisions"/> property.
/// </summary>
private readonly List<RevSpec> _Revisions = new List<RevSpec>();
/// <summary>
/// Initializes a new instance of the <see cref="DiffCommand"/> class.
/// </summary>
public DiffCommand()
: base("diff")
{
// Do nothing here
}
/// <summary>
/// Gets the collection <see cref="RevSpec"/> that identifies the revision(s) or the
/// revision range(s) to view a diff of.
/// </summary>
[RepeatableArgument(Option = "--rev")]
public Collection<RevSpec> Revisions
{
get
{
return new Collection<RevSpec>(_Revisions);
}
}
/// <summary>
/// Gets or sets the <see cref="RevSpec"/> identifying a revision, where all changes
/// introduced by that changeset will be returned.
/// </summary>
[NullableArgument(NonNullOption = "--change")]
[DefaultValue(null)]
public RevSpec ChangeIntroducedByRevision
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether to omit dates from diff headers.
/// Default value is <c>false</c>.
/// </summary>
[BooleanArgument(TrueOption = "--nodates")]
[DefaultValue(false)]
public bool OmitDatesFromHeaders
{
get;
set;
}
/// <summary>
/// Adds the specified value to the <see cref="Revisions"/> property and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="value">
/// The value to add to the <see cref="Revisions"/> property.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
public DiffCommand WithRevisions(RevSpec value)
{
Revisions.Add(value);
return this;
}
/// <summary>
/// Gets the collection of names (of files or directories) to revert.
/// </summary>
public Collection<string> Names
{
get
{
return _Names.Collection;
}
}
/// <summary>
/// Adds the values to the <see cref="Names"/> collection property and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="values">
/// The values to add to the <see cref="Names"/> collection property.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
public DiffCommand WithNames(params string[] values)
{
if (values != null)
Names.AddRange(values);
return this;
}
/// <summary>
/// Gets or sets a value indicating whether to follow renames and copies when limiting the log.
/// Default is <c>false</c>.
/// </summary>
[BooleanArgument(TrueOption = "--git")]
[DefaultValue(false)]
public bool UseGitDiffFormat
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether to treat all files as text.
/// Default value is <c>false</c>.
/// </summary>
[BooleanArgument(TrueOption = "--text")]
[DefaultValue(false)]
public bool TreatAllFilesAsText
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether to show which function each change is in.
/// Default value is <c>false</c>.
/// </summary>
[BooleanArgument(TrueOption = "--show-function")]
[DefaultValue(false)]
public bool ShowFunctions
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether to produce a reversal diff, one that would undo
/// the change introduced by the changeset(s).
/// Default value is <c>false</c>.
/// </summary>
[BooleanArgument(TrueOption = "--reverse")]
[DefaultValue(false)]
public bool ProduceReverseDiff
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether to recurse into subrepositories.
/// Default is <c>false</c>.
/// </summary>
[BooleanArgument(TrueOption = "--subrepos")]
[DefaultValue(false)]
public bool RecurseSubRepositories
{
get;
set;
}
/// <summary>
/// Gets or sets the types of changes to ignore.
/// Default value is <see cref="DiffIgnores.None"/>.
/// </summary>
[DefaultValue(DiffIgnores.None)]
public DiffIgnores Ignore
{
get;
set;
}
/// <summary>
/// Gets or sets the number of lines of context to show for each diff. Use 0 to leave at default.
/// Default value is <c>0</c>.
/// </summary>
[DefaultValue(0)]
public int ContextLineCount
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether to output diffstat-style of summary of changes instead
/// of the full diff.
/// Default value is <c>false</c>.
/// </summary>
[BooleanArgument(TrueOption = "--stat")]
[DefaultValue(false)]
public bool SummaryOnly
{
get;
set;
}
/// <summary>
/// Sets the <see cref="RecurseSubRepositories"/> property to the specified value and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="value">
/// The new value for the <see cref="RecurseSubRepositories"/> property,
/// defaults to <c>true</c>.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
public DiffCommand WithRecurseSubRepositories(bool value)
{
RecurseSubRepositories = value;
return this;
}
/// <summary>
/// Sets the <see cref="UseGitDiffFormat"/> property to the specified value and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="value">
/// The new value for the <see cref="UseGitDiffFormat"/> property,
/// defaults to <c>true</c>.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
[Obsolete("Use WithUseGitDiffFormat instead")]
public DiffCommand WithGitDiffFormat(bool value = true)
{
UseGitDiffFormat = value;
return this;
}
/// <summary>
/// Sets the <see cref="ChangeIntroducedByRevision"/> property to the specified value and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="value">
/// The new value for the <see cref="ChangeIntroducedByRevision"/> property,
/// defaults to <c>true</c>.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
public DiffCommand WithChangeIntroducedByRevision(RevSpec value)
{
ChangeIntroducedByRevision = value;
return this;
}
/// <summary>
/// Sets the <see cref="Ignore"/> property to the specified value and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="value">
/// The new value for the <see cref="Ignore"/> property,
/// defaults to <c>true</c>.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
public DiffCommand WithIgnore(DiffIgnores value)
{
Ignore = value;
return this;
}
/// <summary>
/// Sets the <see cref="UseGitDiffFormat"/> property to the specified value and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="value">
/// The new value for the <see cref="UseGitDiffFormat"/> property,
/// defaults to <c>true</c>.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
public DiffCommand WithUseGitDiffFormat(bool value = true)
{
UseGitDiffFormat = value;
return this;
}
/// <summary>
/// Sets the <see cref="SummaryOnly"/> property to the specified value and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="value">
/// The new value for the <see cref="SummaryOnly"/> property,
/// defaults to <c>true</c>.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
public DiffCommand WithSummaryOnly(bool value = true)
{
SummaryOnly = value;
return this;
}
/// <summary>
/// Sets the <see cref="ContextLineCount"/> property to the specified value and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="value">
/// The new value for the <see cref="ContextLineCount"/> property,
/// defaults to <c>true</c>.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
public DiffCommand WithContextLineCount(int value)
{
ContextLineCount = value;
return this;
}
/// <summary>
/// Sets the <see cref="ProduceReverseDiff"/> property to the specified value and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="value">
/// The new value for the <see cref="ProduceReverseDiff"/> property,
/// defaults to <c>true</c>.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
public DiffCommand WithProduceReverseDiff(bool value = true)
{
ProduceReverseDiff = value;
return this;
}
/// <summary>
/// Sets the <see cref="ShowFunctions"/> property to the specified value and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="value">
/// The new value for the <see cref="ShowFunctions"/> property,
/// defaults to <c>true</c>.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
public DiffCommand WithShowFunctions(bool value = true)
{
ShowFunctions = value;
return this;
}
/// <summary>
/// Sets the <see cref="OmitDatesFromHeaders"/> property to the specified value and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="value">
/// The new value for the <see cref="OmitDatesFromHeaders"/> property,
/// defaults to <c>true</c>.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
public DiffCommand WithOmitDatesFromHeaders(bool value = true)
{
OmitDatesFromHeaders = value;
return this;
}
/// <summary>
/// Sets the <see cref="TreatAllFilesAsText"/> property to the specified value and
/// returns this <see cref="DiffCommand"/> instance.
/// </summary>
/// <param name="value">
/// The new value for the <see cref="TreatAllFilesAsText"/> property,
/// defaults to <c>true</c>.
/// </param>
/// <returns>
/// This <see cref="DiffCommand"/> instance.
/// </returns>
/// <remarks>
/// This method is part of the fluent interface.
/// </remarks>
public DiffCommand WithTreatAllFilesAsText(bool value = true)
{
TreatAllFilesAsText = value;
return this;
}
/// <summary>
/// This method should parse and store the appropriate execution result output
/// according to the type of data the command line client would return for
/// the command.
/// </summary>
/// <param name="exitCode">
/// The exit code from executing the command line client.
/// </param>
/// <param name="standardOutput">
/// The standard output from executing the command line client.
/// </param>
/// <remarks>
/// Note that as long as you descend from <see cref="MercurialCommandBase{T}"/> you're not required to call
/// the base method at all.
/// </remarks>
protected override void ParseStandardOutputForResults(int exitCode, string standardOutput)
{
base.ParseStandardOutputForResults(exitCode, standardOutput);
if (exitCode == 0)
Result = standardOutput;
}
/// <summary>
/// Gets the result from the command line execution, as an appropriately typed value.
/// </summary>
public string Result
{
get;
private set;
}
/// <summary>
/// Gets all the arguments to the <see cref="CommandBase{T}.Command"/>, or an
/// empty array if there are none.
/// </summary>
public override IEnumerable<string> Arguments
{
get
{
List<string> arguments = base.Arguments.Concat(_Names.GetArguments()).ToList();
if ((Ignore & DiffIgnores.WhiteSpace) != 0)
arguments.Add("--ignore-all-space");
if ((Ignore & DiffIgnores.ChangedWhiteSpace) != 0)
arguments.Add("--ignore-space-change");
if ((Ignore & DiffIgnores.BlankLines) != 0)
arguments.Add("--ignore-blank-lines");
if (ContextLineCount > 0)
{
arguments.Add("--unified");
arguments.Add(ContextLineCount.ToString(CultureInfo.InvariantCulture));
}
return arguments;
}
}
/// <summary>
/// Override this method to implement code that will execute after command
/// line execution.
/// </summary>
protected override void Cleanup()
{
base.Cleanup();
_Names.Cleanup();
}
/// <summary>
/// Validates the command configuration. This method should throw the necessary
/// exceptions to signal missing or incorrect configuration (like attempting to
/// add files to the repository without specifying which files to add.)
/// </summary>
/// <remarks>
/// Note that as long as you descend from <see cref="MercurialCommandBase{T}"/> you're not required to call
/// the base method at all.
/// </remarks>
/// <exception cref="InvalidOperationException">
/// The <see cref="ContextLineCount"/> property of the <see cref="DiffCommand"/> has to be 0 or higher.
/// </exception>
public override void Validate()
{
base.Validate();
if (ContextLineCount < 0)
throw new InvalidOperationException("The ContextLineCount property of the DiffCommand has to be 0 or higher");
}
}
}
| |
/*
* Created by: Leslie Sanford
*
* Last modified: 02/23/2005
*
* Contact: [email protected]
*/
using System;
using System.Collections;
using System.ComponentModel;
using System.Diagnostics;
namespace Sanford.Collections.Immutable
{
/// <summary>
/// Represents a collection of key-and-value pairs that are sorted by the
/// keys and are accessible by key.
/// </summary>
[ImmutableObject(true)]
public class SortedList : IEnumerable
{
#region SortedList Members
#region Class Fields
/// <summary>
/// An empty SortedList.
/// </summary>
public static readonly SortedList Empty = new SortedList();
#endregion
#region Instance Fields
// The compare object used for making comparisions.
private IComparer comparer = null;
// The root of the AVL tree.
private IAvlNode root = AvlNode.NullNode;
// Represents the method responsible for comparing keys.
private delegate int CompareHandler(object x, object y);
// The actual delegate to use for comparing keys.
private CompareHandler compareHandler;
#endregion
#region Construction
/// <summary>
/// Initializes a new instance of the SortedList class that is empty
/// and is sorted according to the IComparable interface implemented by
/// each key added to the SortedList.
/// </summary>
public SortedList()
{
InitializeCompareHandler();
}
/// <summary>
/// Initializes a new instance of the SortedList class that is empty
/// and is sorted according to the specified IComparer interface.
/// </summary>
/// <param name="comparer">
/// The IComparer implementation to use when comparing keys, or a null
/// reference to use the IComparable implementation of each key.
/// </param>
public SortedList(IComparer comparer)
{
this.comparer = comparer;
InitializeCompareHandler();
}
/// <summary>
/// Initializes a new instance of the SortedList class with the
/// specified root node and the IComparer interface to use for sorting
/// keys.
/// </summary>
/// <param name="root">
/// The root of the AVL tree.
/// </param>
/// <param name="comparer">
/// The IComparer implementation to use when comparing keys, or a null
/// reference to use the IComparable implementation of each key.
/// </param>
private SortedList(IAvlNode root, IComparer comparer)
{
this.root = root;
this.comparer = comparer;
InitializeCompareHandler();
}
#endregion
#region Methods
/// <summary>
/// Adds an element with the specified key and value to the SortedList.
/// </summary>
/// <param name="key">
/// The key of the element to add.
/// </param>
/// <param name="value">
/// The value of the element to add. The value can be a null reference.
/// </param>
/// <returns>
/// A new SortedList with the specified key and value added to the
/// previous SortedList.
/// </returns>
/// <exception cref="ArgumentNullException">
/// <i>key</i> is a null reference.
/// </exception>
/// <exception cref="ArgumentException">
/// An element with the specified key already exists in the SortedList,
/// or The SortedList is set to use the IComparable interface, and key
/// does not implement the IComparable interface.
/// </exception>
public SortedList Add(object key, object value)
{
// Preconditions.
if(key == null)
{
throw new ArgumentNullException("key",
"Key cannot be null.");
}
else if(comparer == null && !(key is IComparable))
{
throw new ArgumentException(
"Key does not implement IComparable interface.");
}
return new SortedList(
Add(key, value, root),
comparer);
}
/// <summary>
/// Determines whether the SortedList contains a specific key.
/// </summary>
/// <param name="key">
/// The key to locate in the SortedList.
/// </param>
/// <returns>
/// <b>true</b> if the SortedList contains an element with the
/// specified <i>key</i>; otherwise, <b>false</b>.
/// </returns>
public bool Contains(object key)
{
return this[key] != null;
}
/// <summary>
/// Returns an IDictionaryEnumerator that can iterate through the
/// SortedList.
/// </summary>
/// <returns>
/// An IDictionaryEnumerator for the SortedList.
/// </returns>
public IDictionaryEnumerator GetEnumerator()
{
return new SortedListEnumerator(root);
}
/// <summary>
/// Removes the element with the specified key from SortedList.
/// </summary>
/// <param name="key">
/// </param>
/// <returns>
/// The <i>key</i> of the element to remove.
/// </returns>
/// <exception cref="ArgumentNullException">
/// <i>key</i> is a null reference.
/// </exception>
/// <exception cref="ArgumentException">
/// The SortedList is set to use the IComparable interface, and key
/// does not implement the IComparable interface.
/// </exception>
public SortedList Remove(object key)
{
// Preconditions.
if(key == null)
{
throw new ArgumentNullException("key",
"Key cannot be null.");
}
else if(comparer == null && !(key is IComparable))
{
throw new ArgumentException(
"Key does not implement IComparable interface.");
}
return new SortedList(Remove(key, root), comparer);
}
// Initializes the delegate to use for making key comparisons.
private void InitializeCompareHandler()
{
if(comparer == null)
{
compareHandler = new CompareHandler(CompareWithoutComparer);
}
else
{
compareHandler = new CompareHandler(CompareWithComparer);
}
}
// Method for comparing keys using the IComparable interface.
private int CompareWithoutComparer(object x, object y)
{
return ((IComparable)x).CompareTo(y);
}
// Method for comparing keys using the provided comparer.
private int CompareWithComparer(object x, object y)
{
return comparer.Compare(x, y);
}
// Adds key/value pair to the internal AVL tree.
private IAvlNode Add(object key, object value, IAvlNode node)
{
IAvlNode result;
// If the bottom of the tree has been reached.
if(node == AvlNode.NullNode)
{
// Create new node representing the new key/value pair.
result = new AvlNode(
new DictionaryEntry(key, value),
AvlNode.NullNode,
AvlNode.NullNode);
}
// Else the bottom of the tree has not been reached.
else
{
DictionaryEntry entry = (DictionaryEntry)node.Data;
int compareResult = compareHandler(key, entry.Key);
// If the specified key is less than the current key.
if(compareResult < 0)
{
// Create new node and continue searching to the left.
result = new AvlNode(
node.Data,
Add(key, value, node.LeftChild),
node.RightChild);
}
// Else the specified key is greater than the current key.
else if(compareResult > 0)
{
// Create new node and continue searching to the right.
result = new AvlNode(
node.Data,
node.LeftChild,
Add(key, value, node.RightChild));
}
// Else the specified key is equal to the current key.
else
{
// Throw exception. Duplicate keys are not allowed.
throw new ArgumentException(
"Item is already in the collection.");
}
}
// If the current node is not balanced.
if(!result.IsBalanced())
{
// Balance node.
result = result.Balance();
}
return result;
}
// Search for the node with the specified key.
private object Search(object key, IAvlNode node)
{
object result;
// If the key is not in the SortedList.
if(node == AvlNode.NullNode)
{
// Result is null.
result = null;
}
// Else the key has not yet been found.
else
{
DictionaryEntry entry = (DictionaryEntry)node.Data;
int compareResult = compareHandler(key, entry.Key);
// If the specified key is less than the current key.
if(compareResult < 0)
{
// Search to the left.
result = Search(key, node.LeftChild);
}
// Else if the specified key is greater than the current key.
else if(compareResult > 0)
{
// Search to the right.
result = Search(key, node.RightChild);
}
// Else the key has been found.
else
{
// Get value.
result = entry.Value;
}
}
return result;
}
// Remove the node with the specified key.
private IAvlNode Remove(object key, IAvlNode node)
{
IAvlNode result;
// The the key does not exist in the SortedList.
if(node == AvlNode.NullNode)
{
// Result is null.
result = node;
}
// Else the key has not yet been found.
else
{
DictionaryEntry entry = (DictionaryEntry)node.Data;
int compareResult = compareHandler(key, entry.Key);
// If the specified key is less than the current key.
if(compareResult < 0)
{
// Create node and continue searching to the left.
result = new AvlNode(
node.Data,
Remove(key, node.LeftChild),
node.RightChild);
}
// Else if the specified key is greater than the current key.
else if(compareResult > 0)
{
// Create node and continue searching to the right.
result = new AvlNode(
node.Data,
node.LeftChild,
Remove(key, node.RightChild));
}
// Else the node to remove has been found.
else
{
// Remove node.
result = node.Remove();
}
}
// If the node is out of balance.
if(!result.IsBalanced())
{
// Rebalance node.
result = result.Balance();
}
// Postconditions.
Debug.Assert(result.IsBalanced());
return result;
}
#endregion
#region Properties
/// <summary>
/// Gets the value associated with the specified key.
/// </summary>
public object this[object key]
{
get
{
return Search(key, root);
}
}
/// <summary>
/// Gets the number of elements contained in the SortedList.
/// </summary>
public int Count
{
get
{
return root.Count;
}
}
#endregion
#region SortedListEnumerator Class
/// <summary>
/// Provides functionality for iterating through a SortedList.
/// </summary>
private class SortedListEnumerator : IDictionaryEnumerator
{
#region SortedListEnumerator Members
#region Instance Fields
private AvlEnumerator enumerator;
#endregion
#region Construction
/// <summary>
/// Initializes a new instance of the SortedListEnumerator class
/// with the specified root of the AVL tree to iterate over.
/// </summary>
/// <param name="root">
/// The root of the AVL tree the SortedList uses internally.
/// </param>
public SortedListEnumerator(IAvlNode root)
{
enumerator = new AvlEnumerator(root);
}
#endregion
#endregion
#region IDictionaryEnumerator Members
public object Key
{
get
{
DictionaryEntry entry = (DictionaryEntry)enumerator.Current;
return entry.Key;
}
}
public object Value
{
get
{
DictionaryEntry entry = (DictionaryEntry)enumerator.Current;
return entry.Value;
}
}
public DictionaryEntry Entry
{
get
{
DictionaryEntry entry = (DictionaryEntry)enumerator.Current;
return entry;
}
}
#endregion
#region IEnumerator Members
public void Reset()
{
enumerator.Reset();
}
public object Current
{
get
{
return enumerator.Current;
}
}
public bool MoveNext()
{
return enumerator.MoveNext();
}
#endregion
}
#endregion
#endregion
#region IEnumerable Members
IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
return new AvlEnumerator(root);
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reactive;
using System.Reactive.Concurrency;
using System.Reactive.Linq;
using System.Reactive.Subjects;
using System.Reactive.Threading.Tasks;
using System.Threading.Tasks;
using System.Windows.Input;
using ReactiveUIMicro;
namespace ReactiveUIMicro.Xaml
{
/// <summary>
/// IReactiveCommand is an Rx-enabled version of ICommand that is also an
/// Observable. Its Observable fires once for each invocation of
/// ICommand.Execute and its value is the CommandParameter that was
/// provided.
/// </summary>
public class ReactiveCommand : ICommand, IObservable<object>, IDisposable, IEnableLogger
{
/// <summary>
/// Creates a new ReactiveCommand object.
/// </summary>
/// <param name="canExecute">An Observable, often obtained via
/// ObservableFromProperty, that defines when the Command can
/// execute.</param>
/// <param name="scheduler">The scheduler to publish events on - default
/// is RxApp.DeferredScheduler.</param>
/// <param name="initialCondition">Initial CanExecute state</param>
public ReactiveCommand(IObservable<bool> canExecute = null, IScheduler scheduler = null, bool initialCondition = true)
{
canExecute = canExecute ?? Observable.Return(true).Concat(Observable.Never<bool>());
canExecute = canExecute.ObserveOn(scheduler ?? RxApp.DeferredScheduler);
commonCtor(scheduler, initialCondition);
_inner = canExecute.Subscribe(
_canExecuteSubject.OnNext,
_exSubject.OnNext);
ThrownExceptions = _exSubject;
}
protected ReactiveCommand(Func<object, Task<bool>> canExecuteFunc, IScheduler scheduler = null)
{
var canExecute = _canExecuteProbed.SelectMany(x => canExecuteFunc(x).ToObservable());
commonCtor(scheduler);
_inner = canExecute.Subscribe(
_canExecuteSubject.OnNext,
_exSubject.OnNext);
}
protected ReactiveCommand(Func<object, bool> canExecute, IScheduler scheduler = null)
{
_canExecuteExplicitFunc = canExecute;
commonCtor(scheduler);
}
/// <summary>
/// Creates a new ReactiveCommand object in an imperative, non-Rx way,
/// similar to RelayCommand.
/// </summary>
/// <param name="canExecute">A function that determines when the Command
/// can execute.</param>
/// <param name="executed">A method that will be invoked when the
/// Execute method is invoked.</param>
/// <param name="scheduler">The scheduler to publish events on - default
/// is RxApp.DeferredScheduler.</param>
/// <returns>A new ReactiveCommand object.</returns>
public static ReactiveCommand Create(
Func<object, bool> canExecute,
Action<object> executed = null,
IScheduler scheduler = null)
{
var ret = new ReactiveCommand(canExecute, scheduler);
if (executed != null) {
ret.Subscribe(executed);
}
return ret;
}
/// <summary>
/// Creates a new ReactiveCommand object in an imperative, non-Rx way,
/// similar to RelayCommand, only via a TPL Async method
/// </summary>
/// <param name="canExecute">A function that determines when the Command
/// can execute.</param>
/// <param name="executed">A method that will be invoked when the
/// Execute method is invoked.</param>
/// <param name="scheduler">The scheduler to publish events on - default
/// is RxApp.DeferredScheduler.</param>
/// <returns>A new ReactiveCommand object.</returns>
public static ReactiveCommand Create(
Func<object, Task<bool>> canExecute,
Action<object> executed = null,
IScheduler scheduler = null)
{
var ret = new ReactiveCommand(canExecute, scheduler);
if (executed != null) {
ret.Subscribe(executed);
}
return ret;
}
public IObservable<Exception> ThrownExceptions { get; protected set; }
void commonCtor(IScheduler scheduler, bool initialCondition = true)
{
this.scheduler = scheduler ?? RxApp.DeferredScheduler;
_canExecuteSubject = new ScheduledSubject<bool>(RxApp.DeferredScheduler);
canExecuteLatest = new ObservableAsPropertyHelper<bool>(_canExecuteSubject,
b => { raiseCanExecuteChanged(EventArgs.Empty); },
initialCondition, scheduler);
_canExecuteProbed = new Subject<object>();
executeSubject = new Subject<object>();
_exSubject = new ScheduledSubject<Exception>(RxApp.DeferredScheduler, RxApp.DefaultExceptionHandler);
ThrownExceptions = _exSubject;
}
Func<object, bool> _canExecuteExplicitFunc;
protected ISubject<bool> _canExecuteSubject;
protected Subject<object> _canExecuteProbed;
IDisposable _inner = null;
ScheduledSubject<Exception> _exSubject;
/// <summary>
/// Fires whenever the CanExecute of the ICommand changes.
/// </summary>
public IObservable<bool> CanExecuteObservable {
get { return _canExecuteSubject.DistinctUntilChanged(); }
}
ObservableAsPropertyHelper<bool> canExecuteLatest;
public virtual bool CanExecute(object parameter)
{
_canExecuteProbed.OnNext(parameter);
if (_canExecuteExplicitFunc != null) {
bool ret = _canExecuteExplicitFunc(parameter);
_canExecuteSubject.OnNext(ret);
return ret;
}
return canExecuteLatest.Value;
}
public event EventHandler CanExecuteChanged;
IScheduler scheduler;
Subject<object> executeSubject;
public void Execute(object parameter)
{
this.Log().Debug("{0:X}: Executed", this.GetHashCode());
executeSubject.OnNext(parameter);
}
public IDisposable Subscribe(IObserver<object> observer)
{
return executeSubject.ObserveOn(scheduler).Subscribe(
Observer.Create<object>(
x => marshalFailures(observer.OnNext, x),
ex => marshalFailures(observer.OnError, ex),
() => marshalFailures(observer.OnCompleted)));
}
public void Dispose()
{
if (_inner != null) {
_inner.Dispose();
}
}
void marshalFailures<T>(Action<T> block, T param)
{
try {
block(param);
} catch (Exception ex) {
_exSubject.OnNext(ex);
}
}
void marshalFailures(Action block)
{
marshalFailures(_ => block(), Unit.Default);
}
protected virtual void raiseCanExecuteChanged(EventArgs e)
{
EventHandler handler = this.CanExecuteChanged;
if (handler != null) {
handler(this, e);
}
}
}
public static class ReactiveCommandMixins
{
/// <summary>
/// ToCommand is a convenience method for returning a new
/// ReactiveCommand based on an existing Observable chain.
/// </summary>
/// <param name="scheduler">The scheduler to publish events on - default
/// is RxApp.DeferredScheduler.</param>
/// <returns>A new ReactiveCommand whose CanExecute Observable is the
/// current object.</returns>
public static ReactiveCommand ToCommand(this IObservable<bool> This, IScheduler scheduler = null)
{
return new ReactiveCommand(This, scheduler);
}
/// <summary>
/// A utility method that will pipe an Observable to an ICommand (i.e.
/// it will first call its CanExecute with the provided value, then if
/// the command can be executed, Execute() will be called)
/// </summary>
/// <param name="command">The command to be executed.</param>
/// <returns>An object that when disposes, disconnects the Observable
/// from the command.</returns>
public static IDisposable InvokeCommand<T>(this IObservable<T> This, ICommand command)
{
return This.ObserveOn(RxApp.DeferredScheduler).Subscribe(x => {
if (!command.CanExecute(x)) {
return;
}
command.Execute(x);
});
}
}
}
// vim: tw=120 ts=4 sw=4 et :
| |
/********************************************************************
The Multiverse Platform is made available under the MIT License.
Copyright (c) 2012 The Multiverse Foundation
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
*********************************************************************/
namespace Multiverse.Tools.WorldEditor
{
partial class AddMobDialog
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.spawnRadiusLabel = new System.Windows.Forms.Label();
this.spawnRadiusTextbox = new System.Windows.Forms.TextBox();
this.templateName = new System.Windows.Forms.Label();
this.templateNameTextbox = new System.Windows.Forms.TextBox();
this.label1 = new System.Windows.Forms.Label();
this.numSpawnsLabel = new System.Windows.Forms.Label();
this.respawnTimeTextbox = new System.Windows.Forms.TextBox();
this.numberOfSpawnsTextbox = new System.Windows.Forms.TextBox();
this.okButton = new System.Windows.Forms.Button();
this.cancelButton = new System.Windows.Forms.Button();
this.helpButton = new System.Windows.Forms.Button();
this.SuspendLayout();
//
// spawnRadiusLabel
//
this.spawnRadiusLabel.AutoSize = true;
this.spawnRadiusLabel.Location = new System.Drawing.Point(13, 93);
this.spawnRadiusLabel.Name = "spawnRadiusLabel";
this.spawnRadiusLabel.Size = new System.Drawing.Size(74, 13);
this.spawnRadiusLabel.TabIndex = 0;
this.spawnRadiusLabel.Text = "Spawn radius:";
this.spawnRadiusLabel.Visible = false;
//
// spawnRadiusTextbox
//
this.spawnRadiusTextbox.Location = new System.Drawing.Point(114, 90);
this.spawnRadiusTextbox.Name = "spawnRadiusTextbox";
this.spawnRadiusTextbox.Size = new System.Drawing.Size(166, 20);
this.spawnRadiusTextbox.TabIndex = 3;
this.spawnRadiusTextbox.Visible = false;
this.spawnRadiusTextbox.Validating += new System.ComponentModel.CancelEventHandler(this.floatVerifyevent);
//
// templateName
//
this.templateName.AutoSize = true;
this.templateName.Location = new System.Drawing.Point(13, 12);
this.templateName.Name = "templateName";
this.templateName.Size = new System.Drawing.Size(83, 13);
this.templateName.TabIndex = 2;
this.templateName.Text = "Template name:";
//
// templateNameTextbox
//
this.templateNameTextbox.Location = new System.Drawing.Point(114, 9);
this.templateNameTextbox.Name = "templateNameTextbox";
this.templateNameTextbox.Size = new System.Drawing.Size(166, 20);
this.templateNameTextbox.TabIndex = 0;
//
// label1
//
this.label1.AutoSize = true;
this.label1.Location = new System.Drawing.Point(13, 40);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(77, 13);
this.label1.TabIndex = 4;
this.label1.Text = "Respawn time:";
//
// numSpawnsLabel
//
this.numSpawnsLabel.AutoSize = true;
this.numSpawnsLabel.Location = new System.Drawing.Point(13, 65);
this.numSpawnsLabel.Name = "numSpawnsLabel";
this.numSpawnsLabel.Size = new System.Drawing.Size(98, 13);
this.numSpawnsLabel.TabIndex = 5;
this.numSpawnsLabel.Text = "Number of spawns:";
//
// respawnTimeTextbox
//
this.respawnTimeTextbox.Location = new System.Drawing.Point(114, 37);
this.respawnTimeTextbox.Name = "respawnTimeTextbox";
this.respawnTimeTextbox.Size = new System.Drawing.Size(166, 20);
this.respawnTimeTextbox.TabIndex = 1;
//
// numberOfSpawnsTextbox
//
this.numberOfSpawnsTextbox.Location = new System.Drawing.Point(114, 62);
this.numberOfSpawnsTextbox.Name = "numberOfSpawnsTextbox";
this.numberOfSpawnsTextbox.Size = new System.Drawing.Size(166, 20);
this.numberOfSpawnsTextbox.TabIndex = 2;
//
// okButton
//
this.okButton.DialogResult = System.Windows.Forms.DialogResult.OK;
this.okButton.Location = new System.Drawing.Point(16, 116);
this.okButton.Name = "okButton";
this.okButton.Size = new System.Drawing.Size(75, 23);
this.okButton.TabIndex = 4;
this.okButton.Text = "Add";
this.okButton.UseVisualStyleBackColor = true;
//
// cancelButton
//
this.cancelButton.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.cancelButton.Location = new System.Drawing.Point(205, 116);
this.cancelButton.Name = "cancelButton";
this.cancelButton.Size = new System.Drawing.Size(75, 23);
this.cancelButton.TabIndex = 6;
this.cancelButton.Text = "&Cancel";
this.cancelButton.UseVisualStyleBackColor = true;
//
// helpButton
//
this.helpButton.Location = new System.Drawing.Point(124, 116);
this.helpButton.Name = "helpButton";
this.helpButton.Size = new System.Drawing.Size(75, 23);
this.helpButton.TabIndex = 5;
this.helpButton.Tag = "Spawn_Generator";
this.helpButton.Text = "Help";
this.helpButton.UseVisualStyleBackColor = true;
this.helpButton.Click += new System.EventHandler(this.helpButton_clicked);
//
// AddMobDialog
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(297, 149);
this.Controls.Add(this.helpButton);
this.Controls.Add(this.cancelButton);
this.Controls.Add(this.okButton);
this.Controls.Add(this.numberOfSpawnsTextbox);
this.Controls.Add(this.respawnTimeTextbox);
this.Controls.Add(this.numSpawnsLabel);
this.Controls.Add(this.label1);
this.Controls.Add(this.templateNameTextbox);
this.Controls.Add(this.templateName);
this.Controls.Add(this.spawnRadiusTextbox);
this.Controls.Add(this.spawnRadiusLabel);
this.Name = "AddMobDialog";
this.ShowInTaskbar = false;
this.Text = "Add Spawn Generator";
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.Label spawnRadiusLabel;
private System.Windows.Forms.TextBox spawnRadiusTextbox;
private System.Windows.Forms.Label templateName;
private System.Windows.Forms.TextBox templateNameTextbox;
private System.Windows.Forms.Label label1;
private System.Windows.Forms.Label numSpawnsLabel;
private System.Windows.Forms.TextBox respawnTimeTextbox;
private System.Windows.Forms.TextBox numberOfSpawnsTextbox;
private System.Windows.Forms.Button okButton;
private System.Windows.Forms.Button cancelButton;
private System.Windows.Forms.Button helpButton;
}
}
| |
/*
* Copyright (c) 2009, Stefan Simek
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
using System;
using System.Diagnostics;
using System.Reflection;
using System.Reflection.Emit;
namespace TriAxis.RunSharp
{
public sealed class DynamicMethodGen : RoutineGen<DynamicMethodGen>, ICodeGenContext
{
Attributes attrs;
DynamicMethod dm;
public static Attributes Static<TOwner>()
{
return Static(typeof(TOwner));
}
public static Attributes Static(Type owner)
{
return new Attributes(owner, false);
}
public static Attributes Static(Module owner)
{
return new Attributes(owner);
}
public static Attributes Instance(Type owner)
{
return new Attributes(owner, false);
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1034:NestedTypesShouldNotBeVisible", Justification = "The type has to be public, but would be useless outside of this class")]
public sealed class Attributes
{
internal string name = "";
internal bool skipVisibility;
internal Type ownerType;
internal Module ownerModule;
internal bool asInstance;
internal Attributes(Type owner, bool asInstance)
{
this.ownerType = owner;
this.asInstance = asInstance;
}
internal Attributes(Module owner)
{
this.ownerModule = owner;
}
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
public Attributes NoVisibilityChecks { get { skipVisibility = true; return this; } }
public Attributes WithName(string name)
{
this.name = name;
return this;
}
public DynamicMethodGen Void()
{
return Method(typeof(void));
}
public DynamicMethodGen Method<T>()
{
return Method(typeof(T));
}
public DynamicMethodGen Method(Type returnType)
{
return new DynamicMethodGen(this, returnType);
}
}
private DynamicMethodGen(Attributes attrs, Type returnType)
: base(attrs.ownerType, returnType)
{
this.attrs = attrs;
if (attrs.asInstance)
Parameter(attrs.ownerType, "this");
}
protected override void CreateMember()
{
if (attrs.ownerType != null)
this.dm = new DynamicMethod(attrs.name, ReturnType, ParameterTypes, attrs.ownerType, attrs.skipVisibility);
else
this.dm = new DynamicMethod(attrs.name, ReturnType, ParameterTypes, attrs.ownerModule, attrs.skipVisibility);
}
protected override void RegisterMember()
{
// nothing to register
}
public bool IsCompleted { get { return !(SignatureComplete && GetCode().IsCompleted); } }
public void Complete() { GetCode().Complete(); }
public DynamicMethod GetCompletedDynamicMethod()
{
return GetCompletedDynamicMethod(false);
}
public DynamicMethod GetCompletedDynamicMethod(bool completeIfNeeded)
{
if (completeIfNeeded)
Complete();
else if (!IsCompleted)
throw new InvalidOperationException(Properties.Messages.ErrDynamicMethodNotCompleted);
return dm;
}
#region RoutineGen concrete implementation
protected override bool HasCode
{
get { return true; }
}
protected override ILGenerator GetILGenerator()
{
return dm.GetILGenerator();
}
protected override ParameterBuilder DefineParameter(int position, ParameterAttributes attributes, string parameterName)
{
return dm.DefineParameter(position, attributes, parameterName);
}
protected override MemberInfo Member
{
get { return dm; }
}
public override string Name
{
get { return attrs.name; }
}
protected internal override bool IsStatic
{
get { return !attrs.asInstance; }
}
protected internal override bool IsOverride
{
get { return false; }
}
protected override AttributeTargets AttributeTarget
{
get { throw new InvalidOperationException(Properties.Messages.ErrDynamicMethodNoCustomAttrs); }
}
protected override void SetCustomAttribute(CustomAttributeBuilder cab)
{
throw new InvalidOperationException(Properties.Messages.ErrDynamicMethodNoCustomAttrs);
}
#endregion
#region ICodeGenContext Members
bool ICodeGenContext.SupportsScopes
{
get { return false; }
}
#endregion
}
public static class DynamicMethodExtensions
{
public static T CreateDelegate<T>(this DynamicMethod dm)
{
return (T)(object)dm.CreateDelegate(typeof(T));
}
}
}
| |
namespace RealArtists.ChargeBee.Models {
using System;
using System.ComponentModel;
using System.Net.Http;
using Newtonsoft.Json.Linq;
using RealArtists.ChargeBee.Api;
using RealArtists.ChargeBee.Internal;
using RealArtists.ChargeBee.Models.Enums;
public class HostedPageActions : ApiResourceActions {
public HostedPageActions(ChargeBeeApi api) : base(api) { }
public HostedPage.CheckoutNewRequest CheckoutNew() {
string url = BuildUrl("hosted_pages", "checkout_new");
return new HostedPage.CheckoutNewRequest(Api, url, HttpMethod.Post);
}
public HostedPage.CheckoutExistingRequest CheckoutExisting() {
string url = BuildUrl("hosted_pages", "checkout_existing");
return new HostedPage.CheckoutExistingRequest(Api, url, HttpMethod.Post);
}
public HostedPage.UpdatePaymentMethodRequest UpdatePaymentMethod() {
string url = BuildUrl("hosted_pages", "update_payment_method");
return new HostedPage.UpdatePaymentMethodRequest(Api, url, HttpMethod.Post);
}
public EntityRequest<Type> Retrieve(string id) {
string url = BuildUrl("hosted_pages", id);
return new EntityRequest<Type>(Api, url, HttpMethod.Get);
}
public EntityRequest<Type> Acknowledge(string id) {
string url = BuildUrl("hosted_pages", id, "acknowledge");
return new EntityRequest<Type>(Api, url, HttpMethod.Post);
}
public HostedPage.HostedPageListRequest List() {
string url = BuildUrl("hosted_pages");
return new HostedPage.HostedPageListRequest(Api, url);
}
}
public class HostedPage : Resource {
public string Id {
get { return GetValue<string>("id", false); }
}
public TypeEnum? HostedPageType {
get { return GetEnum<TypeEnum>("type", false); }
}
public string Url {
get { return GetValue<string>("url", false); }
}
public StateEnum? State {
get { return GetEnum<StateEnum>("state", false); }
}
public string PassThruContent {
get { return GetValue<string>("pass_thru_content", false); }
}
public bool Embed {
get { return GetValue<bool>("embed", true); }
}
public DateTime? CreatedAt {
get { return GetDateTime("created_at", false); }
}
public DateTime? ExpiresAt {
get { return GetDateTime("expires_at", false); }
}
public DateTime? UpdatedAt {
get { return GetDateTime("updated_at", false); }
}
public long? ResourceVersion {
get { return GetValue<long?>("resource_version", false); }
}
public JToken CheckoutInfo {
get { return GetJToken("checkout_info", false); }
}
public HostedPageContent Content {
get {
if (GetValue<JToken>("content", false) == null) {
return null;
}
return new HostedPageContent(GetValue<JToken>("content"));
}
}
public class CheckoutNewRequest : EntityRequest<CheckoutNewRequest> {
public CheckoutNewRequest(ChargeBeeApi api, string url, HttpMethod method)
: base(api, url, method) {
}
public CheckoutNewRequest BillingCycles(int billingCycles) {
_params.AddOpt("billing_cycles", billingCycles);
return this;
}
public CheckoutNewRequest TermsToCharge(int termsToCharge) {
_params.AddOpt("terms_to_charge", termsToCharge);
return this;
}
public CheckoutNewRequest BillingAlignmentMode(ChargeBee.Models.Enums.BillingAlignmentModeEnum billingAlignmentMode) {
_params.AddOpt("billing_alignment_mode", billingAlignmentMode);
return this;
}
public CheckoutNewRequest RedirectUrl(string redirectUrl) {
_params.AddOpt("redirect_url", redirectUrl);
return this;
}
public CheckoutNewRequest CancelUrl(string cancelUrl) {
_params.AddOpt("cancel_url", cancelUrl);
return this;
}
public CheckoutNewRequest PassThruContent(string passThruContent) {
_params.AddOpt("pass_thru_content", passThruContent);
return this;
}
public CheckoutNewRequest Embed(bool embed) {
_params.AddOpt("embed", embed);
return this;
}
public CheckoutNewRequest IframeMessaging(bool iframeMessaging) {
_params.AddOpt("iframe_messaging", iframeMessaging);
return this;
}
public CheckoutNewRequest SubscriptionId(string subscriptionId) {
_params.AddOpt("subscription[id]", subscriptionId);
return this;
}
public CheckoutNewRequest CustomerId(string customerId) {
_params.AddOpt("customer[id]", customerId);
return this;
}
public CheckoutNewRequest CustomerEmail(string customerEmail) {
_params.AddOpt("customer[email]", customerEmail);
return this;
}
public CheckoutNewRequest CustomerFirstName(string customerFirstName) {
_params.AddOpt("customer[first_name]", customerFirstName);
return this;
}
public CheckoutNewRequest CustomerLastName(string customerLastName) {
_params.AddOpt("customer[last_name]", customerLastName);
return this;
}
public CheckoutNewRequest CustomerCompany(string customerCompany) {
_params.AddOpt("customer[company]", customerCompany);
return this;
}
public CheckoutNewRequest CustomerTaxability(TaxabilityEnum customerTaxability) {
_params.AddOpt("customer[taxability]", customerTaxability);
return this;
}
public CheckoutNewRequest CustomerLocale(string customerLocale) {
_params.AddOpt("customer[locale]", customerLocale);
return this;
}
public CheckoutNewRequest CustomerPhone(string customerPhone) {
_params.AddOpt("customer[phone]", customerPhone);
return this;
}
public CheckoutNewRequest SubscriptionPlanId(string subscriptionPlanId) {
_params.Add("subscription[plan_id]", subscriptionPlanId);
return this;
}
public CheckoutNewRequest SubscriptionPlanQuantity(int subscriptionPlanQuantity) {
_params.AddOpt("subscription[plan_quantity]", subscriptionPlanQuantity);
return this;
}
public CheckoutNewRequest SubscriptionPlanUnitPrice(int subscriptionPlanUnitPrice) {
_params.AddOpt("subscription[plan_unit_price]", subscriptionPlanUnitPrice);
return this;
}
public CheckoutNewRequest SubscriptionSetupFee(int subscriptionSetupFee) {
_params.AddOpt("subscription[setup_fee]", subscriptionSetupFee);
return this;
}
public CheckoutNewRequest SubscriptionStartDate(long subscriptionStartDate) {
_params.AddOpt("subscription[start_date]", subscriptionStartDate);
return this;
}
public CheckoutNewRequest SubscriptionTrialEnd(long subscriptionTrialEnd) {
_params.AddOpt("subscription[trial_end]", subscriptionTrialEnd);
return this;
}
public CheckoutNewRequest SubscriptionCoupon(string subscriptionCoupon) {
_params.AddOpt("subscription[coupon]", subscriptionCoupon);
return this;
}
public CheckoutNewRequest SubscriptionAutoCollection(AutoCollectionEnum subscriptionAutoCollection) {
_params.AddOpt("subscription[auto_collection]", subscriptionAutoCollection);
return this;
}
public CheckoutNewRequest SubscriptionInvoiceNotes(string subscriptionInvoiceNotes) {
_params.AddOpt("subscription[invoice_notes]", subscriptionInvoiceNotes);
return this;
}
public CheckoutNewRequest CardGatewayAccountId(string cardGatewayAccountId) {
_params.AddOpt("card[gateway_account_id]", cardGatewayAccountId);
return this;
}
public CheckoutNewRequest CustomerConsolidatedInvoicing(bool customerConsolidatedInvoicing) {
_params.AddOpt("customer[consolidated_invoicing]", customerConsolidatedInvoicing);
return this;
}
public CheckoutNewRequest BillingAddressFirstName(string billingAddressFirstName) {
_params.AddOpt("billing_address[first_name]", billingAddressFirstName);
return this;
}
public CheckoutNewRequest BillingAddressLastName(string billingAddressLastName) {
_params.AddOpt("billing_address[last_name]", billingAddressLastName);
return this;
}
public CheckoutNewRequest BillingAddressEmail(string billingAddressEmail) {
_params.AddOpt("billing_address[email]", billingAddressEmail);
return this;
}
public CheckoutNewRequest BillingAddressCompany(string billingAddressCompany) {
_params.AddOpt("billing_address[company]", billingAddressCompany);
return this;
}
public CheckoutNewRequest BillingAddressPhone(string billingAddressPhone) {
_params.AddOpt("billing_address[phone]", billingAddressPhone);
return this;
}
public CheckoutNewRequest BillingAddressLine1(string billingAddressLine1) {
_params.AddOpt("billing_address[line1]", billingAddressLine1);
return this;
}
public CheckoutNewRequest BillingAddressLine2(string billingAddressLine2) {
_params.AddOpt("billing_address[line2]", billingAddressLine2);
return this;
}
public CheckoutNewRequest BillingAddressLine3(string billingAddressLine3) {
_params.AddOpt("billing_address[line3]", billingAddressLine3);
return this;
}
public CheckoutNewRequest BillingAddressCity(string billingAddressCity) {
_params.AddOpt("billing_address[city]", billingAddressCity);
return this;
}
public CheckoutNewRequest BillingAddressStateCode(string billingAddressStateCode) {
_params.AddOpt("billing_address[state_code]", billingAddressStateCode);
return this;
}
public CheckoutNewRequest BillingAddressState(string billingAddressState) {
_params.AddOpt("billing_address[state]", billingAddressState);
return this;
}
public CheckoutNewRequest BillingAddressZip(string billingAddressZip) {
_params.AddOpt("billing_address[zip]", billingAddressZip);
return this;
}
public CheckoutNewRequest BillingAddressCountry(string billingAddressCountry) {
_params.AddOpt("billing_address[country]", billingAddressCountry);
return this;
}
public CheckoutNewRequest BillingAddressValidationStatus(ValidationStatusEnum billingAddressValidationStatus) {
_params.AddOpt("billing_address[validation_status]", billingAddressValidationStatus);
return this;
}
public CheckoutNewRequest ShippingAddressFirstName(string shippingAddressFirstName) {
_params.AddOpt("shipping_address[first_name]", shippingAddressFirstName);
return this;
}
public CheckoutNewRequest ShippingAddressLastName(string shippingAddressLastName) {
_params.AddOpt("shipping_address[last_name]", shippingAddressLastName);
return this;
}
public CheckoutNewRequest ShippingAddressEmail(string shippingAddressEmail) {
_params.AddOpt("shipping_address[email]", shippingAddressEmail);
return this;
}
public CheckoutNewRequest ShippingAddressCompany(string shippingAddressCompany) {
_params.AddOpt("shipping_address[company]", shippingAddressCompany);
return this;
}
public CheckoutNewRequest ShippingAddressPhone(string shippingAddressPhone) {
_params.AddOpt("shipping_address[phone]", shippingAddressPhone);
return this;
}
public CheckoutNewRequest ShippingAddressLine1(string shippingAddressLine1) {
_params.AddOpt("shipping_address[line1]", shippingAddressLine1);
return this;
}
public CheckoutNewRequest ShippingAddressLine2(string shippingAddressLine2) {
_params.AddOpt("shipping_address[line2]", shippingAddressLine2);
return this;
}
public CheckoutNewRequest ShippingAddressLine3(string shippingAddressLine3) {
_params.AddOpt("shipping_address[line3]", shippingAddressLine3);
return this;
}
public CheckoutNewRequest ShippingAddressCity(string shippingAddressCity) {
_params.AddOpt("shipping_address[city]", shippingAddressCity);
return this;
}
public CheckoutNewRequest ShippingAddressStateCode(string shippingAddressStateCode) {
_params.AddOpt("shipping_address[state_code]", shippingAddressStateCode);
return this;
}
public CheckoutNewRequest ShippingAddressState(string shippingAddressState) {
_params.AddOpt("shipping_address[state]", shippingAddressState);
return this;
}
public CheckoutNewRequest ShippingAddressZip(string shippingAddressZip) {
_params.AddOpt("shipping_address[zip]", shippingAddressZip);
return this;
}
public CheckoutNewRequest ShippingAddressCountry(string shippingAddressCountry) {
_params.AddOpt("shipping_address[country]", shippingAddressCountry);
return this;
}
public CheckoutNewRequest ShippingAddressValidationStatus(ValidationStatusEnum shippingAddressValidationStatus) {
_params.AddOpt("shipping_address[validation_status]", shippingAddressValidationStatus);
return this;
}
public CheckoutNewRequest AddonId(int index, string addonId) {
_params.AddOpt("addons[id][" + index + "]", addonId);
return this;
}
public CheckoutNewRequest AddonQuantity(int index, int addonQuantity) {
_params.AddOpt("addons[quantity][" + index + "]", addonQuantity);
return this;
}
public CheckoutNewRequest AddonUnitPrice(int index, int addonUnitPrice) {
_params.AddOpt("addons[unit_price][" + index + "]", addonUnitPrice);
return this;
}
}
public class CheckoutExistingRequest : EntityRequest<CheckoutExistingRequest> {
public CheckoutExistingRequest(ChargeBeeApi api, string url, HttpMethod method)
: base(api, url, method) {
}
public CheckoutExistingRequest BillingCycles(int billingCycles) {
_params.AddOpt("billing_cycles", billingCycles);
return this;
}
public CheckoutExistingRequest ReplaceAddonList(bool replaceAddonList) {
_params.AddOpt("replace_addon_list", replaceAddonList);
return this;
}
public CheckoutExistingRequest TermsToCharge(int termsToCharge) {
_params.AddOpt("terms_to_charge", termsToCharge);
return this;
}
public CheckoutExistingRequest ReactivateFrom(long reactivateFrom) {
_params.AddOpt("reactivate_from", reactivateFrom);
return this;
}
public CheckoutExistingRequest BillingAlignmentMode(ChargeBee.Models.Enums.BillingAlignmentModeEnum billingAlignmentMode) {
_params.AddOpt("billing_alignment_mode", billingAlignmentMode);
return this;
}
public CheckoutExistingRequest Reactivate(bool reactivate) {
_params.AddOpt("reactivate", reactivate);
return this;
}
public CheckoutExistingRequest ForceTermReset(bool forceTermReset) {
_params.AddOpt("force_term_reset", forceTermReset);
return this;
}
public CheckoutExistingRequest RedirectUrl(string redirectUrl) {
_params.AddOpt("redirect_url", redirectUrl);
return this;
}
public CheckoutExistingRequest CancelUrl(string cancelUrl) {
_params.AddOpt("cancel_url", cancelUrl);
return this;
}
public CheckoutExistingRequest PassThruContent(string passThruContent) {
_params.AddOpt("pass_thru_content", passThruContent);
return this;
}
public CheckoutExistingRequest Embed(bool embed) {
_params.AddOpt("embed", embed);
return this;
}
public CheckoutExistingRequest IframeMessaging(bool iframeMessaging) {
_params.AddOpt("iframe_messaging", iframeMessaging);
return this;
}
public CheckoutExistingRequest SubscriptionId(string subscriptionId) {
_params.Add("subscription[id]", subscriptionId);
return this;
}
public CheckoutExistingRequest SubscriptionPlanId(string subscriptionPlanId) {
_params.AddOpt("subscription[plan_id]", subscriptionPlanId);
return this;
}
public CheckoutExistingRequest SubscriptionPlanQuantity(int subscriptionPlanQuantity) {
_params.AddOpt("subscription[plan_quantity]", subscriptionPlanQuantity);
return this;
}
public CheckoutExistingRequest SubscriptionPlanUnitPrice(int subscriptionPlanUnitPrice) {
_params.AddOpt("subscription[plan_unit_price]", subscriptionPlanUnitPrice);
return this;
}
public CheckoutExistingRequest SubscriptionSetupFee(int subscriptionSetupFee) {
_params.AddOpt("subscription[setup_fee]", subscriptionSetupFee);
return this;
}
public CheckoutExistingRequest SubscriptionStartDate(long subscriptionStartDate) {
_params.AddOpt("subscription[start_date]", subscriptionStartDate);
return this;
}
public CheckoutExistingRequest SubscriptionTrialEnd(long subscriptionTrialEnd) {
_params.AddOpt("subscription[trial_end]", subscriptionTrialEnd);
return this;
}
public CheckoutExistingRequest SubscriptionCoupon(string subscriptionCoupon) {
_params.AddOpt("subscription[coupon]", subscriptionCoupon);
return this;
}
public CheckoutExistingRequest SubscriptionInvoiceNotes(string subscriptionInvoiceNotes) {
_params.AddOpt("subscription[invoice_notes]", subscriptionInvoiceNotes);
return this;
}
public CheckoutExistingRequest CardGatewayAccountId(string cardGatewayAccountId) {
_params.AddOpt("card[gateway_account_id]", cardGatewayAccountId);
return this;
}
public CheckoutExistingRequest AddonId(int index, string addonId) {
_params.AddOpt("addons[id][" + index + "]", addonId);
return this;
}
public CheckoutExistingRequest AddonQuantity(int index, int addonQuantity) {
_params.AddOpt("addons[quantity][" + index + "]", addonQuantity);
return this;
}
public CheckoutExistingRequest AddonUnitPrice(int index, int addonUnitPrice) {
_params.AddOpt("addons[unit_price][" + index + "]", addonUnitPrice);
return this;
}
}
public class UpdateCardRequest : EntityRequest<UpdateCardRequest> {
public UpdateCardRequest(ChargeBeeApi api, string url, HttpMethod method)
: base(api, url, method) {
}
public UpdateCardRequest RedirectUrl(string redirectUrl) {
_params.AddOpt("redirect_url", redirectUrl);
return this;
}
public UpdateCardRequest CancelUrl(string cancelUrl) {
_params.AddOpt("cancel_url", cancelUrl);
return this;
}
public UpdateCardRequest PassThruContent(string passThruContent) {
_params.AddOpt("pass_thru_content", passThruContent);
return this;
}
public UpdateCardRequest Embed(bool embed) {
_params.AddOpt("embed", embed);
return this;
}
public UpdateCardRequest IframeMessaging(bool iframeMessaging) {
_params.AddOpt("iframe_messaging", iframeMessaging);
return this;
}
public UpdateCardRequest CustomerId(string customerId) {
_params.Add("customer[id]", customerId);
return this;
}
public UpdateCardRequest CardGatewayAccountId(string cardGatewayAccountId) {
_params.AddOpt("card[gateway_account_id]", cardGatewayAccountId);
return this;
}
}
public class UpdatePaymentMethodRequest : EntityRequest<UpdatePaymentMethodRequest> {
public UpdatePaymentMethodRequest(ChargeBeeApi api, string url, HttpMethod method)
: base(api, url, method) {
}
public UpdatePaymentMethodRequest RedirectUrl(string redirectUrl) {
_params.AddOpt("redirect_url", redirectUrl);
return this;
}
public UpdatePaymentMethodRequest CancelUrl(string cancelUrl) {
_params.AddOpt("cancel_url", cancelUrl);
return this;
}
public UpdatePaymentMethodRequest PassThruContent(string passThruContent) {
_params.AddOpt("pass_thru_content", passThruContent);
return this;
}
public UpdatePaymentMethodRequest Embed(bool embed) {
_params.AddOpt("embed", embed);
return this;
}
public UpdatePaymentMethodRequest IframeMessaging(bool iframeMessaging) {
_params.AddOpt("iframe_messaging", iframeMessaging);
return this;
}
public UpdatePaymentMethodRequest CustomerId(string customerId) {
_params.Add("customer[id]", customerId);
return this;
}
public UpdatePaymentMethodRequest CardGatewayAccountId(string cardGatewayAccountId) {
_params.AddOpt("card[gateway_account_id]", cardGatewayAccountId);
return this;
}
}
public class HostedPageListRequest : ListRequestBase<HostedPageListRequest> {
public HostedPageListRequest(ChargeBeeApi api, string url)
: base(api, url) {
}
public StringFilter<HostedPageListRequest> Id() {
return new StringFilter<HostedPageListRequest>("id", this).SupportsMultiOperators(true);
}
public EnumFilter<HostedPage.TypeEnum, HostedPageListRequest> Type() {
return new EnumFilter<HostedPage.TypeEnum, HostedPageListRequest>("type", this);
}
public EnumFilter<HostedPage.StateEnum, HostedPageListRequest> State() {
return new EnumFilter<HostedPage.StateEnum, HostedPageListRequest>("state", this);
}
public TimestampFilter<HostedPageListRequest> UpdatedAt() {
return new TimestampFilter<HostedPageListRequest>("updated_at", this);
}
}
public enum TypeEnum {
Unknown,
[Description("checkout_new")]
CheckoutNew,
[Description("checkout_existing")]
CheckoutExisting,
[Description("update_payment_method")]
UpdatePaymentMethod,
}
public enum StateEnum {
Unknown,
[Description("created")]
Created,
[Description("requested")]
Requested,
[Description("succeeded")]
Succeeded,
[Description("cancelled")]
Cancelled,
[Description("acknowledged")]
Acknowledged,
}
public class HostedPageContent : ResultBase {
public HostedPageContent() { }
internal HostedPageContent(JToken jobj) {
_jobj = jobj;
}
}
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
// <auto-generated />
namespace Northwind{
/// <summary>
/// Strongly-typed collection for the SummaryOfSalesByQuarter class.
/// </summary>
[Serializable]
public partial class SummaryOfSalesByQuarterCollection : ReadOnlyList<SummaryOfSalesByQuarter, SummaryOfSalesByQuarterCollection>
{
public SummaryOfSalesByQuarterCollection() {}
}
/// <summary>
/// This is Read-only wrapper class for the Summary of Sales by Quarter view.
/// </summary>
[Serializable]
public partial class SummaryOfSalesByQuarter : ReadOnlyRecord<SummaryOfSalesByQuarter>, IReadOnlyRecord
{
#region Default Settings
protected static void SetSQLProps()
{
GetTableSchema();
}
#endregion
#region Schema Accessor
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
{
SetSQLProps();
}
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("Summary of Sales by Quarter", TableType.View, DataService.GetInstance("Northwind"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarShippedDate = new TableSchema.TableColumn(schema);
colvarShippedDate.ColumnName = "ShippedDate";
colvarShippedDate.DataType = DbType.DateTime;
colvarShippedDate.MaxLength = 0;
colvarShippedDate.AutoIncrement = false;
colvarShippedDate.IsNullable = true;
colvarShippedDate.IsPrimaryKey = false;
colvarShippedDate.IsForeignKey = false;
colvarShippedDate.IsReadOnly = false;
schema.Columns.Add(colvarShippedDate);
TableSchema.TableColumn colvarOrderID = new TableSchema.TableColumn(schema);
colvarOrderID.ColumnName = "OrderID";
colvarOrderID.DataType = DbType.Int32;
colvarOrderID.MaxLength = 0;
colvarOrderID.AutoIncrement = false;
colvarOrderID.IsNullable = false;
colvarOrderID.IsPrimaryKey = false;
colvarOrderID.IsForeignKey = false;
colvarOrderID.IsReadOnly = false;
schema.Columns.Add(colvarOrderID);
TableSchema.TableColumn colvarSubtotal = new TableSchema.TableColumn(schema);
colvarSubtotal.ColumnName = "Subtotal";
colvarSubtotal.DataType = DbType.Currency;
colvarSubtotal.MaxLength = 0;
colvarSubtotal.AutoIncrement = false;
colvarSubtotal.IsNullable = true;
colvarSubtotal.IsPrimaryKey = false;
colvarSubtotal.IsForeignKey = false;
colvarSubtotal.IsReadOnly = false;
schema.Columns.Add(colvarSubtotal);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["Northwind"].AddSchema("Summary of Sales by Quarter",schema);
}
}
#endregion
#region Query Accessor
public static Query CreateQuery()
{
return new Query(Schema);
}
#endregion
#region .ctors
public SummaryOfSalesByQuarter()
{
SetSQLProps();
SetDefaults();
MarkNew();
}
public SummaryOfSalesByQuarter(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
{
ForceDefaults();
}
MarkNew();
}
public SummaryOfSalesByQuarter(object keyID)
{
SetSQLProps();
LoadByKey(keyID);
}
public SummaryOfSalesByQuarter(string columnName, object columnValue)
{
SetSQLProps();
LoadByParam(columnName,columnValue);
}
#endregion
#region Props
[XmlAttribute("ShippedDate")]
[Bindable(true)]
public DateTime? ShippedDate
{
get
{
return GetColumnValue<DateTime?>("ShippedDate");
}
set
{
SetColumnValue("ShippedDate", value);
}
}
[XmlAttribute("OrderID")]
[Bindable(true)]
public int OrderID
{
get
{
return GetColumnValue<int>("OrderID");
}
set
{
SetColumnValue("OrderID", value);
}
}
[XmlAttribute("Subtotal")]
[Bindable(true)]
public decimal? Subtotal
{
get
{
return GetColumnValue<decimal?>("Subtotal");
}
set
{
SetColumnValue("Subtotal", value);
}
}
#endregion
#region Columns Struct
public struct Columns
{
public static string ShippedDate = @"ShippedDate";
public static string OrderID = @"OrderID";
public static string Subtotal = @"Subtotal";
}
#endregion
#region IAbstractRecord Members
public new CT GetColumnValue<CT>(string columnName) {
return base.GetColumnValue<CT>(columnName);
}
public object GetColumnValue(string columnName) {
return base.GetColumnValue<object>(columnName);
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// See the LICENSE file in the project root for more information.
// (C) Ameya Gargesh
// Copyright (C) 2004 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
using Xunit;
using System.Data.Common;
namespace System.Data.Tests.Common
{
public class DataColumnMappingCollectionTest : IDisposable
{
//DataTableMapping tableMap;
private DataColumnMappingCollection _columnMapCollection;
private DataColumnMapping[] _cols;
public DataColumnMappingCollectionTest()
{
_cols = new DataColumnMapping[5];
_cols[0] = new DataColumnMapping("sourceName", "dataSetName");
_cols[1] = new DataColumnMapping("sourceID", "dataSetID");
_cols[2] = new DataColumnMapping("sourceAddress", "dataSetAddress");
_cols[3] = new DataColumnMapping("sourcePhone", "dataSetPhone");
_cols[4] = new DataColumnMapping("sourcePIN", "dataSetPIN");
_columnMapCollection = new DataColumnMappingCollection();
}
public void Dispose()
{
_columnMapCollection.Clear();
}
[Fact]
public void Add()
{
DataColumnMapping col1 = new DataColumnMapping("sourceName", "dataSetName");
int t = _columnMapCollection.Add(col1);
Assert.Equal(0, t);
bool eq1 = col1.Equals(_columnMapCollection[0]);
Assert.Equal(true, eq1);
Assert.Equal(1, _columnMapCollection.Count);
DataColumnMapping col2;
col2 = _columnMapCollection.Add("sourceID", "dataSetID");
bool eq2 = col2.Equals(_columnMapCollection[1]);
Assert.Equal(true, eq2);
Assert.Equal(2, _columnMapCollection.Count);
}
[Fact]
public void AddException1()
{
Assert.Throws<InvalidCastException>(() =>
{
DataColumnMappingCollection c = new DataColumnMappingCollection();
_columnMapCollection.Add(c);
});
}
[Fact]
public void AddRange()
{
_columnMapCollection.Add(new DataColumnMapping("sourceAge", "dataSetAge"));
Assert.Equal(1, _columnMapCollection.Count);
_columnMapCollection.AddRange(_cols);
Assert.Equal(6, _columnMapCollection.Count);
bool eq;
eq = _cols[0].Equals(_columnMapCollection[1]);
Assert.Equal(true, eq);
eq = _cols[1].Equals(_columnMapCollection[2]);
Assert.Equal(true, eq);
eq = _cols[0].Equals(_columnMapCollection[0]);
Assert.Equal(false, eq);
eq = _cols[1].Equals(_columnMapCollection[0]);
Assert.Equal(false, eq);
}
[Fact]
public void Clear()
{
DataColumnMapping col1 = new DataColumnMapping("sourceName", "dataSetName");
_columnMapCollection.Add(col1);
Assert.Equal(1, _columnMapCollection.Count);
_columnMapCollection.Clear();
Assert.Equal(0, _columnMapCollection.Count);
_columnMapCollection.AddRange(_cols);
Assert.Equal(5, _columnMapCollection.Count);
_columnMapCollection.Clear();
Assert.Equal(0, _columnMapCollection.Count);
}
[Fact]
public void Contains()
{
DataColumnMapping col1 = new DataColumnMapping("sourceName", "dataSetName");
_columnMapCollection.AddRange(_cols);
bool eq;
eq = _columnMapCollection.Contains(_cols[0]);
Assert.Equal(true, eq);
eq = _columnMapCollection.Contains(_cols[1]);
Assert.Equal(true, eq);
eq = _columnMapCollection.Contains(col1);
Assert.Equal(false, eq);
eq = _columnMapCollection.Contains(_cols[0].SourceColumn);
Assert.Equal(true, eq);
eq = _columnMapCollection.Contains(_cols[1].SourceColumn);
Assert.Equal(true, eq);
eq = _columnMapCollection.Contains(col1.SourceColumn);
Assert.Equal(true, eq);
eq = _columnMapCollection.Contains(_cols[0].DataSetColumn);
Assert.Equal(false, eq);
eq = _columnMapCollection.Contains(_cols[1].DataSetColumn);
Assert.Equal(false, eq);
eq = _columnMapCollection.Contains(col1.DataSetColumn);
Assert.Equal(false, eq);
}
[Fact]
public void ContainsException1()
{
Assert.Throws<InvalidCastException>(() =>
{
object o = new object();
bool a = _columnMapCollection.Contains(o);
});
}
[Fact]
public void CopyTo()
{
DataColumnMapping[] colcops = new DataColumnMapping[5];
_columnMapCollection.AddRange(_cols);
_columnMapCollection.CopyTo(colcops, 0);
bool eq;
for (int i = 0; i < 5; i++)
{
eq = _columnMapCollection[i].Equals(colcops[i]);
Assert.Equal(true, eq);
}
colcops = null;
colcops = new DataColumnMapping[7];
_columnMapCollection.CopyTo(colcops, 2);
for (int i = 0; i < 5; i++)
{
eq = _columnMapCollection[i].Equals(colcops[i + 2]);
Assert.Equal(true, eq);
}
eq = _columnMapCollection[0].Equals(colcops[0]);
Assert.Equal(false, eq);
eq = _columnMapCollection[0].Equals(colcops[1]);
Assert.Equal(false, eq);
}
[Fact]
public void Equals()
{
// DataColumnMappingCollection collect2=new DataColumnMappingCollection();
_columnMapCollection.AddRange(_cols);
// collect2.AddRange(cols);
DataColumnMappingCollection copy1;
copy1 = _columnMapCollection;
// Assert.Equal (false, columnMapCollection.Equals(collect2));
Assert.Equal(true, _columnMapCollection.Equals(copy1));
// Assert.Equal (false, collect2.Equals(columnMapCollection));
Assert.Equal(true, copy1.Equals(_columnMapCollection));
// Assert.Equal (false, collect2.Equals(copy1));
Assert.Equal(true, copy1.Equals(_columnMapCollection));
Assert.Equal(true, _columnMapCollection.Equals(_columnMapCollection));
// Assert.Equal (true, collect2.Equals(collect2));
Assert.Equal(true, copy1.Equals(copy1));
// Assert.Equal (false, Object.Equals(collect2, columnMapCollection));
Assert.Equal(true, object.Equals(copy1, _columnMapCollection));
// Assert.Equal (false, Object.Equals(columnMapCollection, collect2));
Assert.Equal(true, object.Equals(_columnMapCollection, copy1));
// Assert.Equal (false, Object.Equals(copy1, collect2));
Assert.Equal(true, object.Equals(_columnMapCollection, copy1));
Assert.Equal(true, object.Equals(_columnMapCollection, _columnMapCollection));
// Assert.Equal (true, Object.Equals(collect2, collect2));
Assert.Equal(true, object.Equals(copy1, copy1));
// Assert.Equal (false, Object.Equals(columnMapCollection, collect2));
Assert.Equal(true, object.Equals(_columnMapCollection, copy1));
// Assert.Equal (false, Object.Equals(collect2, columnMapCollection));
Assert.Equal(true, object.Equals(copy1, _columnMapCollection));
// Assert.Equal (false, Object.Equals(collect2, copy1));
Assert.Equal(true, object.Equals(copy1, _columnMapCollection));
}
[Fact]
public void GetByDataSetColumn()
{
_columnMapCollection.AddRange(_cols);
bool eq;
DataColumnMapping col1;
col1 = _columnMapCollection.GetByDataSetColumn("dataSetName");
eq = (col1.DataSetColumn.Equals("dataSetName") && col1.SourceColumn.Equals("sourceName"));
Assert.Equal(true, eq);
col1 = _columnMapCollection.GetByDataSetColumn("dataSetID");
eq = (col1.DataSetColumn.Equals("dataSetID") && col1.SourceColumn.Equals("sourceID"));
Assert.Equal(true, eq);
col1 = _columnMapCollection.GetByDataSetColumn("datasetname");
eq = (col1.DataSetColumn.Equals("dataSetName") && col1.SourceColumn.Equals("sourceName"));
Assert.Equal(true, eq);
col1 = _columnMapCollection.GetByDataSetColumn("datasetid");
eq = (col1.DataSetColumn.Equals("dataSetID") && col1.SourceColumn.Equals("sourceID"));
Assert.Equal(true, eq);
}
[Fact]
public void GetByDataSetColumn_String_InvalidArguments()
{
DataColumnMappingCollection dataColumnMappingCollection = new DataColumnMappingCollection();
Assert.Throws<IndexOutOfRangeException>(() => dataColumnMappingCollection.GetByDataSetColumn((string)null));
}
[Fact]
public void GetColumnMappingBySchemaAction()
{
_columnMapCollection.AddRange(_cols);
bool eq;
DataColumnMapping col1;
col1 = DataColumnMappingCollection.GetColumnMappingBySchemaAction(_columnMapCollection, "sourceName", MissingMappingAction.Passthrough);
eq = (col1.DataSetColumn.Equals("dataSetName") && col1.SourceColumn.Equals("sourceName"));
Assert.Equal(true, eq);
col1 = DataColumnMappingCollection.GetColumnMappingBySchemaAction(_columnMapCollection, "sourceID", MissingMappingAction.Passthrough);
eq = (col1.DataSetColumn.Equals("dataSetID") && col1.SourceColumn.Equals("sourceID"));
Assert.Equal(true, eq);
col1 = DataColumnMappingCollection.GetColumnMappingBySchemaAction(_columnMapCollection, "sourceData", MissingMappingAction.Passthrough);
eq = (col1.DataSetColumn.Equals("sourceData") && col1.SourceColumn.Equals("sourceData"));
Assert.Equal(true, eq);
eq = _columnMapCollection.Contains(col1);
Assert.Equal(false, eq);
col1 = DataColumnMappingCollection.GetColumnMappingBySchemaAction(_columnMapCollection, "sourceData", MissingMappingAction.Ignore);
Assert.Equal(null, col1);
}
[Fact]
public void GetColumnMappingBySchemaActionException1()
{
Assert.Throws<InvalidOperationException>(() =>
{
DataColumnMappingCollection.GetColumnMappingBySchemaAction(_columnMapCollection, "sourceName", MissingMappingAction.Error);
});
}
[Fact]
public void IndexOf()
{
_columnMapCollection.AddRange(_cols);
int ind;
ind = _columnMapCollection.IndexOf(_cols[0]);
Assert.Equal(0, ind);
ind = _columnMapCollection.IndexOf(_cols[1]);
Assert.Equal(1, ind);
ind = _columnMapCollection.IndexOf(_cols[0].SourceColumn);
Assert.Equal(0, ind);
ind = _columnMapCollection.IndexOf(_cols[1].SourceColumn);
Assert.Equal(1, ind);
}
[Fact]
public void IndexOf_Object_IsNull()
{
DataColumnMappingCollection dataColumnMappingCollection = new DataColumnMappingCollection();
Assert.Equal(-1, dataColumnMappingCollection.IndexOf((object)null));
}
[Fact]
public void IndexOf_String_IsNull()
{
DataColumnMappingCollection dataColumnMappingCollection = new DataColumnMappingCollection();
Assert.Equal(-1, dataColumnMappingCollection.IndexOf((string)null));
}
[Fact]
public void IndexOfDataSetColumn()
{
_columnMapCollection.AddRange(_cols);
int ind;
ind = _columnMapCollection.IndexOfDataSetColumn(_cols[0].DataSetColumn);
Assert.Equal(0, ind);
ind = _columnMapCollection.IndexOfDataSetColumn(_cols[1].DataSetColumn);
Assert.Equal(1, ind);
ind = _columnMapCollection.IndexOfDataSetColumn("datasetname");
Assert.Equal(0, ind);
ind = _columnMapCollection.IndexOfDataSetColumn("datasetid");
Assert.Equal(1, ind);
ind = _columnMapCollection.IndexOfDataSetColumn("sourcedeter");
Assert.Equal(-1, ind);
}
[Fact]
public void Insert()
{
_columnMapCollection.AddRange(_cols);
DataColumnMapping mymap = new DataColumnMapping("sourceAge", "dataSetAge");
_columnMapCollection.Insert(3, mymap);
int ind = _columnMapCollection.IndexOfDataSetColumn("dataSetAge");
Assert.Equal(3, ind);
}
[Fact]
public void Remove_DataColumnMapping_InvalidArguments()
{
DataColumnMappingCollection dataColumnMappingCollection = new DataColumnMappingCollection();
Assert.Throws<ArgumentNullException>(() => dataColumnMappingCollection.Remove((DataColumnMapping)null));
}
[Fact]
public void RemoveException1()
{
Assert.Throws<InvalidCastException>(() =>
{
string te = "testingdata";
_columnMapCollection.AddRange(_cols);
_columnMapCollection.Remove(te);
});
}
[Fact]
public void RemoveException2()
{
Assert.Throws<ArgumentException>(() =>
{
_columnMapCollection.AddRange(_cols);
DataColumnMapping mymap = new DataColumnMapping("sourceAge", "dataSetAge");
_columnMapCollection.Remove(mymap);
});
}
[Fact]
public void RemoveAt()
{
_columnMapCollection.AddRange(_cols);
bool eq;
_columnMapCollection.RemoveAt(0);
eq = _columnMapCollection.Contains(_cols[0]);
Assert.Equal(false, eq);
eq = _columnMapCollection.Contains(_cols[1]);
Assert.Equal(true, eq);
_columnMapCollection.RemoveAt("sourceID");
eq = _columnMapCollection.Contains(_cols[1]);
Assert.Equal(false, eq);
eq = _columnMapCollection.Contains(_cols[2]);
Assert.Equal(true, eq);
}
[Fact]
public void RemoveAtException1()
{
Assert.Throws<IndexOutOfRangeException>(() =>
{
_columnMapCollection.RemoveAt(3);
});
}
[Fact]
public void RemoveAtException2()
{
Assert.Throws<IndexOutOfRangeException>(() =>
{
_columnMapCollection.RemoveAt("sourceAge");
});
}
[Fact]
public void ToStringTest()
{
Assert.Equal("System.Data.Common.DataColumnMappingCollection", _columnMapCollection.ToString());
}
[Fact]
public void Insert_Int_DataColumnMapping_InvalidArguments()
{
DataColumnMappingCollection dataColumnMappingCollection = new DataColumnMappingCollection();
Assert.Throws<ArgumentNullException>(() => dataColumnMappingCollection.Insert(123, (DataColumnMapping)null));
}
}
}
| |
/*
* Copyright (c) InWorldz Halcyon Developers
* Copyright (c) Contributors, http://opensimulator.org/
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSim Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using vector = OpenSim.Region.ScriptEngine.Shared.LSL_Types.Vector3;
using rotation = OpenSim.Region.ScriptEngine.Shared.LSL_Types.Quaternion;
using LSLInteger = OpenSim.Region.ScriptEngine.Shared.LSL_Types.LSLInteger;
namespace OpenSim.Region.ScriptEngine.Shared.ScriptBase
{
public partial class ScriptBaseClass : MarshalByRefObject
{
// LSL CONSTANTS
public static readonly LSLInteger TRUE = new LSLInteger(1);
public static readonly LSLInteger FALSE = new LSLInteger(0);
public const int STATUS_PHYSICS = 1;
public const int STATUS_ROTATE_X = 2;
public const int STATUS_ROTATE_Y = 4;
public const int STATUS_ROTATE_Z = 8;
public const int STATUS_PHANTOM = 16;
public const int STATUS_SANDBOX = 32;
public const int STATUS_BLOCK_GRAB = 64; // Note this will be treated as STATUS_BLOCK_GRAB_OBJECT
public const int STATUS_DIE_AT_EDGE = 128;
public const int STATUS_RETURN_AT_EDGE = 256;
public const int STATUS_CAST_SHADOWS = 512;
public const int STATUS_BLOCK_GRAB_OBJECT = 1024;
public const int AGENT = 1;
public const int ACTIVE = 2;
public const int PASSIVE = 4;
public const int SCRIPTED = 8;
public const int CONTROL_FWD = 1;
public const int CONTROL_BACK = 2;
public const int CONTROL_LEFT = 4;
public const int CONTROL_RIGHT = 8;
public const int CONTROL_UP = 16;
public const int CONTROL_DOWN = 32;
public const int CONTROL_ROT_LEFT = 256;
public const int CONTROL_ROT_RIGHT = 512;
public const int CONTROL_LBUTTON = 268435456;
public const int CONTROL_ML_LBUTTON = 1073741824;
//Permissions
public const int PERMISSION_DEBIT = 2;
public const int PERMISSION_TAKE_CONTROLS = 4;
public const int PERMISSION_REMAP_CONTROLS = 8;
public const int PERMISSION_TRIGGER_ANIMATION = 16;
public const int PERMISSION_ATTACH = 32;
public const int PERMISSION_RELEASE_OWNERSHIP = 64;
public const int PERMISSION_CHANGE_LINKS = 128;
public const int PERMISSION_CHANGE_JOINTS = 256;
public const int PERMISSION_CHANGE_PERMISSIONS = 512;
public const int PERMISSION_TRACK_CAMERA = 1024;
public const int PERMISSION_CONTROL_CAMERA = 2048;
public const int PERMISSION_TELEPORT = 4096; // 0x1000
public const int PERMISSION_SILENT_ESTATE_MANAGEMENT = 16384; // 0x4000
public const int PERMISSION_OVERRIDE_ANIMATIONS = 32768; // 0x8000
public const int AGENT_FLYING = 1;
public const int AGENT_ATTACHMENTS = 2;
public const int AGENT_SCRIPTED = 4;
public const int AGENT_MOUSELOOK = 8;
public const int AGENT_SITTING = 16;
public const int AGENT_ON_OBJECT = 32;
public const int AGENT_AWAY = 64;
public const int AGENT_WALKING = 128;
public const int AGENT_IN_AIR = 256;
public const int AGENT_TYPING = 512;
public const int AGENT_CROUCHING = 1024;
public const int AGENT_BUSY = 2048;
public const int AGENT_ALWAYS_RUN = 4096;
//Particle Systems
public const int PSYS_PART_INTERP_COLOR_MASK = 1;
public const int PSYS_PART_INTERP_SCALE_MASK = 2;
public const int PSYS_PART_BOUNCE_MASK = 4;
public const int PSYS_PART_WIND_MASK = 8;
public const int PSYS_PART_FOLLOW_SRC_MASK = 16;
public const int PSYS_PART_FOLLOW_VELOCITY_MASK = 32;
public const int PSYS_PART_TARGET_POS_MASK = 64;
public const int PSYS_PART_TARGET_LINEAR_MASK = 128;
public const int PSYS_PART_EMISSIVE_MASK = 256;
public const int PSYS_PART_RIBBON_MASK = 1024;
public const int PSYS_PART_FLAGS = 0;
public const int PSYS_PART_START_COLOR = 1;
public const int PSYS_PART_START_ALPHA = 2;
public const int PSYS_PART_END_COLOR = 3;
public const int PSYS_PART_END_ALPHA = 4;
public const int PSYS_PART_START_SCALE = 5;
public const int PSYS_PART_END_SCALE = 6;
public const int PSYS_PART_MAX_AGE = 7;
public const int PSYS_SRC_ACCEL = 8;
public const int PSYS_SRC_PATTERN = 9;
public const int PSYS_SRC_INNERANGLE = 10;
public const int PSYS_SRC_OUTERANGLE = 11;
public const int PSYS_SRC_TEXTURE = 12;
public const int PSYS_SRC_BURST_RATE = 13;
public const int PSYS_SRC_BURST_PART_COUNT = 15;
public const int PSYS_SRC_BURST_RADIUS = 16;
public const int PSYS_SRC_BURST_SPEED_MIN = 17;
public const int PSYS_SRC_BURST_SPEED_MAX = 18;
public const int PSYS_SRC_MAX_AGE = 19;
public const int PSYS_SRC_TARGET_KEY = 20;
public const int PSYS_SRC_OMEGA = 21;
public const int PSYS_SRC_ANGLE_BEGIN = 22;
public const int PSYS_SRC_ANGLE_END = 23;
public const int PSYS_SRC_PATTERN_DROP = 1;
public const int PSYS_SRC_PATTERN_EXPLODE = 2;
public const int PSYS_SRC_PATTERN_ANGLE = 4;
public const int PSYS_SRC_PATTERN_ANGLE_CONE = 8;
public const int PSYS_SRC_PATTERN_ANGLE_CONE_EMPTY = 16;
public const int PSYS_PART_BLEND_FUNC_SOURCE = 24;
public const int PSYS_PART_BLEND_FUNC_DEST = 25;
public const int PSYS_PART_BF_ONE = 0;
public const int PSYS_PART_BF_ZERO = 1;
public const int PSYS_PART_BF_DEST_COLOR = 2;
public const int PSYS_PART_BF_SOURCE_COLOR = 3;
public const int PSYS_PART_BF_ONE_MINUS_DEST_COLOR = 4;
public const int PSYS_PART_BF_ONE_MINUS_SOURCE_COLOR = 5;
public const int PSYS_PART_BF_SOURCE_ALPHA = 7;
public const int PSYS_PART_BF_ONE_MINUS_SOURCE_ALPHA = 9;
public const int PSYS_PART_START_GLOW = 26;
public const int PSYS_PART_END_GLOW = 27;
public const int VEHICLE_TYPE_NONE = 0;
public const int VEHICLE_TYPE_SLED = 1;
public const int VEHICLE_TYPE_CAR = 2;
public const int VEHICLE_TYPE_BOAT = 3;
public const int VEHICLE_TYPE_AIRPLANE = 4;
public const int VEHICLE_TYPE_BALLOON = 5;
public const int VEHICLE_TYPE_SAILBOAT = 10001;
public const int VEHICLE_TYPE_MOTORCYCLE = 10002;
public const int VEHICLE_LINEAR_FRICTION_TIMESCALE = 16;
public const int VEHICLE_ANGULAR_FRICTION_TIMESCALE = 17;
public const int VEHICLE_LINEAR_MOTOR_DIRECTION = 18;
public const int VEHICLE_LINEAR_MOTOR_OFFSET = 20;
public const int VEHICLE_ANGULAR_MOTOR_DIRECTION = 19;
public const int VEHICLE_HOVER_HEIGHT = 24;
public const int VEHICLE_HOVER_EFFICIENCY = 25;
public const int VEHICLE_HOVER_TIMESCALE = 26;
public const int VEHICLE_BUOYANCY = 27;
public const int VEHICLE_LINEAR_DEFLECTION_EFFICIENCY = 28;
public const int VEHICLE_LINEAR_DEFLECTION_TIMESCALE = 29;
public const int VEHICLE_LINEAR_MOTOR_TIMESCALE = 30;
public const int VEHICLE_LINEAR_MOTOR_DECAY_TIMESCALE = 31;
public const int VEHICLE_ANGULAR_DEFLECTION_EFFICIENCY = 32;
public const int VEHICLE_ANGULAR_DEFLECTION_TIMESCALE = 33;
public const int VEHICLE_ANGULAR_MOTOR_TIMESCALE = 34;
public const int VEHICLE_ANGULAR_MOTOR_DECAY_TIMESCALE = 35;
public const int VEHICLE_VERTICAL_ATTRACTION_EFFICIENCY = 36;
public const int VEHICLE_VERTICAL_ATTRACTION_TIMESCALE = 37;
public const int VEHICLE_BANKING_EFFICIENCY = 38;
public const int VEHICLE_BANKING_MIX = 39;
public const int VEHICLE_BANKING_TIMESCALE = 40;
public const int VEHICLE_REFERENCE_FRAME = 44;
public const int VEHICLE_FLAG_NO_DEFLECTION_UP = 1;
public const int VEHICLE_FLAG_LIMIT_ROLL_ONLY = 2;
public const int VEHICLE_FLAG_HOVER_WATER_ONLY = 4;
public const int VEHICLE_FLAG_HOVER_TERRAIN_ONLY = 8;
public const int VEHICLE_FLAG_HOVER_GLOBAL_HEIGHT = 16;
public const int VEHICLE_FLAG_HOVER_UP_ONLY = 32;
public const int VEHICLE_FLAG_LIMIT_MOTOR_UP = 64;
public const int VEHICLE_FLAG_MOUSELOOK_STEER = 128;
public const int VEHICLE_FLAG_MOUSELOOK_BANK = 256;
public const int VEHICLE_FLAG_CAMERA_DECOUPLED = 512;
public const int INVENTORY_ALL = -1;
public const int INVENTORY_NONE = -1;
public const int INVENTORY_TEXTURE = 0;
public const int INVENTORY_SOUND = 1;
public const int INVENTORY_LANDMARK = 3;
public const int INVENTORY_CLOTHING = 5;
public const int INVENTORY_OBJECT = 6;
public const int INVENTORY_NOTECARD = 7;
public const int INVENTORY_SCRIPT = 10;
public const int INVENTORY_BODYPART = 13;
public const int INVENTORY_ANIMATION = 20;
public const int INVENTORY_GESTURE = 21;
public const int ATTACH_CHEST = 1;
public const int ATTACH_HEAD = 2;
public const int ATTACH_LSHOULDER = 3;
public const int ATTACH_RSHOULDER = 4;
public const int ATTACH_LHAND = 5;
public const int ATTACH_RHAND = 6;
public const int ATTACH_LFOOT = 7;
public const int ATTACH_RFOOT = 8;
public const int ATTACH_BACK = 9;
public const int ATTACH_PELVIS = 10;
public const int ATTACH_MOUTH = 11;
public const int ATTACH_CHIN = 12;
public const int ATTACH_LEAR = 13;
public const int ATTACH_REAR = 14;
public const int ATTACH_LEYE = 15;
public const int ATTACH_REYE = 16;
public const int ATTACH_NOSE = 17;
public const int ATTACH_RUARM = 18;
public const int ATTACH_RLARM = 19;
public const int ATTACH_LUARM = 20;
public const int ATTACH_LLARM = 21;
public const int ATTACH_RHIP = 22;
public const int ATTACH_RULEG = 23;
public const int ATTACH_RLLEG = 24;
public const int ATTACH_LHIP = 25;
public const int ATTACH_LULEG = 26;
public const int ATTACH_LLLEG = 27;
public const int ATTACH_BELLY = 28;
public const int ATTACH_LEFT_PEC = 29;
public const int ATTACH_RIGHT_PEC = 30;
public const int ATTACH_HUD_CENTER_2 = 31;
public const int ATTACH_HUD_TOP_RIGHT = 32;
public const int ATTACH_HUD_TOP_CENTER = 33;
public const int ATTACH_HUD_TOP_LEFT = 34;
public const int ATTACH_HUD_CENTER_1 = 35;
public const int ATTACH_HUD_BOTTOM_LEFT = 36;
public const int ATTACH_HUD_BOTTOM = 37;
public const int ATTACH_HUD_BOTTOM_RIGHT = 38;
public const int ATTACH_NECK = 39;
public const int ATTACH_AVATAR_CENTER = 40;
public const int LAND_LEVEL = 0;
public const int LAND_RAISE = 1;
public const int LAND_LOWER = 2;
public const int LAND_SMOOTH = 3;
public const int LAND_NOISE = 4;
public const int LAND_REVERT = 5;
public const int LAND_SMALL_BRUSH = 1;
public const int LAND_MEDIUM_BRUSH = 2;
public const int LAND_LARGE_BRUSH = 3;
//Agent Dataserver
public const int DATA_ONLINE = 1;
public const int DATA_NAME = 2;
public const int DATA_BORN = 3;
public const int DATA_RATING = 4;
public const int DATA_SIM_POS = 5;
public const int DATA_SIM_STATUS = 6;
public const int DATA_SIM_RATING = 7;
public const int DATA_PAYINFO = 8;
public const int DATA_SIM_RELEASE = 128;
public const int DATA_ACCOUNT_TYPE = 11001;
public const int ANIM_ON = 1;
public const int LOOP = 2;
public const int REVERSE = 4;
public const int PING_PONG = 8;
public const int SMOOTH = 16;
public const int ROTATE = 32;
public const int SCALE = 64;
public const int ALL_SIDES = -1;
public const int LINK_SET = -1;
public const int LINK_ROOT = 1;
public const int LINK_ALL_OTHERS = -2;
public const int LINK_ALL_CHILDREN = -3;
public const int LINK_THIS = -4;
public const int CHANGED_INVENTORY = 1;
public const int CHANGED_COLOR = 2;
public const int CHANGED_SHAPE = 4;
public const int CHANGED_SCALE = 8;
public const int CHANGED_TEXTURE = 16;
public const int CHANGED_LINK = 32;
public const int CHANGED_ALLOWED_DROP = 64;
public const int CHANGED_OWNER = 128;
public const int CHANGED_REGION = 256;
public const int CHANGED_TELEPORT = 512;
public const int CHANGED_REGION_START = 1024;
public const int CHANGED_REGION_RESTART = 1024;
public const int CHANGED_MEDIA = 2048;
public const int CHANGED_ANIMATION = 16384;
public const int TYPE_INVALID = 0;
public const int TYPE_INTEGER = 1;
public const int TYPE_FLOAT = 2;
public const int TYPE_STRING = 3;
public const int TYPE_KEY = 4;
public const int TYPE_VECTOR = 5;
public const int TYPE_ROTATION = 6;
//XML RPC Remote Data Channel
public const int REMOTE_DATA_CHANNEL = 1;
public const int REMOTE_DATA_REQUEST = 2;
public const int REMOTE_DATA_REPLY = 3;
//llHTTPRequest
public const int HTTP_METHOD = 0;
public const int HTTP_MIMETYPE = 1;
public const int HTTP_BODY_MAXLENGTH = 2;
public const int HTTP_VERIFY_CERT = 3;
public const int PRIM_MATERIAL = 2;
public const int PRIM_PHYSICS = 3;
public const int PRIM_TEMP_ON_REZ = 4;
public const int PRIM_PHANTOM = 5;
public const int PRIM_POSITION = 6;
public const int PRIM_SIZE = 7;
public const int PRIM_ROTATION = 8;
public const int PRIM_TYPE = 9;
public const int PRIM_TEXTURE = 17;
public const int PRIM_COLOR = 18;
public const int PRIM_BUMP_SHINY = 19;
public const int PRIM_FULLBRIGHT = 20;
public const int PRIM_FLEXIBLE = 21;
public const int PRIM_TEXGEN = 22;
public const int PRIM_POINT_LIGHT = 23; // Huh?
public const int PRIM_CAST_SHADOWS = 24; // Not implemented, here for completeness sake
public const int PRIM_GLOW = 25;
public const int PRIM_TEXT = 26; // added by LL in server 1.38
public const int PRIM_NAME = 27; // added by LL in server 1.40
public const int PRIM_DESC = 28; // added by LL in server 1.40
public const int PRIM_ROT_LOCAL = 29; // added by LL in Oct 2010 thru Feb 2011 (JIRA SVC-93)
public const int PRIM_PHYSICS_SHAPE_TYPE = 30;
public const int PRIM_OMEGA = 32;
public const int PRIM_POS_LOCAL = 33;
public const int PRIM_LINK_TARGET = 34;
public const int PRIM_SLICE = 35;
// large out of normal range value unlikely to conflict with future LL values
public const int IW_PRIM_ALPHA = 11001;
public const int PRIM_TEXGEN_DEFAULT = 0;
public const int PRIM_TEXGEN_PLANAR = 1;
public const int PRIM_TYPE_BOX = 0;
public const int PRIM_TYPE_CYLINDER = 1;
public const int PRIM_TYPE_PRISM = 2;
public const int PRIM_TYPE_SPHERE = 3;
public const int PRIM_TYPE_TORUS = 4;
public const int PRIM_TYPE_TUBE = 5;
public const int PRIM_TYPE_RING = 6;
public const int PRIM_TYPE_SCULPT = 7;
public const int PRIM_HOLE_DEFAULT = 0;
public const int PRIM_HOLE_CIRCLE = 16;
public const int PRIM_HOLE_SQUARE = 32;
public const int PRIM_HOLE_TRIANGLE = 48;
public const int PRIM_MATERIAL_STONE = 0;
public const int PRIM_MATERIAL_METAL = 1;
public const int PRIM_MATERIAL_GLASS = 2;
public const int PRIM_MATERIAL_WOOD = 3;
public const int PRIM_MATERIAL_FLESH = 4;
public const int PRIM_MATERIAL_PLASTIC = 5;
public const int PRIM_MATERIAL_RUBBER = 6;
public const int PRIM_MATERIAL_LIGHT = 7;
public const int PRIM_SHINY_NONE = 0;
public const int PRIM_SHINY_LOW = 1;
public const int PRIM_SHINY_MEDIUM = 2;
public const int PRIM_SHINY_HIGH = 3;
public const int PRIM_BUMP_NONE = 0;
public const int PRIM_BUMP_BRIGHT = 1;
public const int PRIM_BUMP_DARK = 2;
public const int PRIM_BUMP_WOOD = 3;
public const int PRIM_BUMP_BARK = 4;
public const int PRIM_BUMP_BRICKS = 5;
public const int PRIM_BUMP_CHECKER = 6;
public const int PRIM_BUMP_CONCRETE = 7;
public const int PRIM_BUMP_TILE = 8;
public const int PRIM_BUMP_STONE = 9;
public const int PRIM_BUMP_DISKS = 10;
public const int PRIM_BUMP_GRAVEL = 11;
public const int PRIM_BUMP_BLOBS = 12;
public const int PRIM_BUMP_SIDING = 13;
public const int PRIM_BUMP_LARGETILE = 14;
public const int PRIM_BUMP_STUCCO = 15;
public const int PRIM_BUMP_SUCTION = 16;
public const int PRIM_BUMP_WEAVE = 17;
public const int PRIM_SCULPT_TYPE_SPHERE = 1;
public const int PRIM_SCULPT_TYPE_TORUS = 2;
public const int PRIM_SCULPT_TYPE_PLANE = 3;
public const int PRIM_SCULPT_TYPE_CYLINDER = 4;
public const int PRIM_SCULPT_FLAG_INVERT = 64;
public const int PRIM_SCULPT_FLAG_MIRROR = 128;
public const int PRIM_PHYSICS_SHAPE_PRIM = 0;
public const int PRIM_PHYSICS_SHAPE_NONE = 1;
public const int PRIM_PHYSICS_SHAPE_CONVEX = 2;
public const int MASK_BASE = 0;
public const int MASK_OWNER = 1;
public const int MASK_GROUP = 2;
public const int MASK_EVERYONE = 3;
public const int MASK_NEXT = 4;
public const int PERM_TRANSFER = 8192;
public const int PERM_MODIFY = 16384;
public const int PERM_COPY = 32768;
public const int PERM_MOVE = 524288;
public const int PERM_ALL = 2147483647;
public const int PARCEL_MEDIA_COMMAND_STOP = 0;
public const int PARCEL_MEDIA_COMMAND_PAUSE = 1;
public const int PARCEL_MEDIA_COMMAND_PLAY = 2;
public const int PARCEL_MEDIA_COMMAND_LOOP = 3;
public const int PARCEL_MEDIA_COMMAND_TEXTURE = 4;
public const int PARCEL_MEDIA_COMMAND_URL = 5;
public const int PARCEL_MEDIA_COMMAND_TIME = 6;
public const int PARCEL_MEDIA_COMMAND_AGENT = 7;
public const int PARCEL_MEDIA_COMMAND_UNLOAD = 8;
public const int PARCEL_MEDIA_COMMAND_AUTO_ALIGN = 9;
public const int PARCEL_MEDIA_COMMAND_TYPE = 10;
public const int PARCEL_MEDIA_COMMAND_SIZE = 11;
public const int PARCEL_MEDIA_COMMAND_DESC = 12;
public const int PARCEL_FLAG_ALLOW_FLY = 0x1; // parcel allows flying
public const int PARCEL_FLAG_ALLOW_SCRIPTS = 0x2; // parcel allows outside scripts
public const int PARCEL_FLAG_ALLOW_LANDMARK = 0x8; // parcel allows landmarks to be created
public const int PARCEL_FLAG_ALLOW_TERRAFORM = 0x10; // parcel allows anyone to terraform the land
public const int PARCEL_FLAG_ALLOW_DAMAGE = 0x20; // parcel allows damage
public const int PARCEL_FLAG_ALLOW_CREATE_OBJECTS = 0x40; // parcel allows anyone to create objects
public const int PARCEL_FLAG_USE_ACCESS_GROUP = 0x100; // parcel limits access to a group
public const int PARCEL_FLAG_USE_ACCESS_LIST = 0x200; // parcel limits access to a list of residents
public const int PARCEL_FLAG_USE_BAN_LIST = 0x400; // parcel uses a ban list, including restricting access based on payment info
public const int PARCEL_FLAG_USE_LAND_PASS_LIST = 0x800; // parcel allows passes to be purchased
public const int PARCEL_FLAG_LOCAL_SOUND_ONLY = 0x8000; // parcel restricts spatialized sound to the parcel
public const int PARCEL_FLAG_RESTRICT_PUSHOBJECT = 0x200000; // parcel restricts llPushObject
public const int PARCEL_FLAG_ALLOW_GROUP_SCRIPTS = 0x2000000; // parcel allows scripts owned by group
public const int PARCEL_FLAG_ALLOW_CREATE_GROUP_OBJECTS = 0x4000000; // parcel allows group object creation
public const int PARCEL_FLAG_ALLOW_ALL_OBJECT_ENTRY = 0x8000000; // parcel allows objects owned by any user to enter
public const int PARCEL_FLAG_ALLOW_GROUP_OBJECT_ENTRY = 0x10000000; // parcel allows with the same group to enter
public const int REGION_FLAG_ALLOW_DAMAGE = 0x1; // region is entirely damage enabled
public const int REGION_FLAG_FIXED_SUN = 0x10; // region has a fixed sun position
public const int REGION_FLAG_BLOCK_TERRAFORM = 0x40; // region terraforming disabled
public const int REGION_FLAG_SANDBOX = 0x100; // region is a sandbox
public const int REGION_FLAG_DISABLE_COLLISIONS = 0x1000; // region has disabled collisions
public const int REGION_FLAG_DISABLE_PHYSICS = 0x4000; // region has disabled physics
public const int REGION_FLAG_BLOCK_FLY = 0x80000; // region blocks flying
public const int REGION_FLAG_ALLOW_DIRECT_TELEPORT = 0x100000; // region allows direct teleports
public const int REGION_FLAG_RESTRICT_PUSHOBJECT = 0x400000; // region restricts llPushObject
public static readonly LSLInteger PAY_HIDE = new LSLInteger(-1);
public static readonly LSLInteger PAY_DEFAULT = new LSLInteger(-2);
public const string NULL_KEY = "00000000-0000-0000-0000-000000000000";
public const string EOF = "\n\n\n";
public const double PI = 3.14159274f;
public const double TWO_PI = 6.28318548f;
public const double PI_BY_TWO = 1.57079637f;
public const double DEG_TO_RAD = 0.01745329238f;
public const double RAD_TO_DEG = 57.29578f;
public const double SQRT2 = 1.414213538f;
public const int STRING_TRIM_HEAD = 1;
public const int STRING_TRIM_TAIL = 2;
public const int STRING_TRIM = 3;
public const int LIST_STAT_RANGE = 0;
public const int LIST_STAT_MIN = 1;
public const int LIST_STAT_MAX = 2;
public const int LIST_STAT_MEAN = 3;
public const int LIST_STAT_MEDIAN = 4;
public const int LIST_STAT_STD_DEV = 5;
public const int LIST_STAT_SUM = 6;
public const int LIST_STAT_SUM_SQUARES = 7;
public const int LIST_STAT_NUM_COUNT = 8;
public const int LIST_STAT_GEOMETRIC_MEAN = 9;
public const int LIST_STAT_HARMONIC_MEAN = 100;
//ParcelPrim Categories
public const int PARCEL_COUNT_TOTAL = 0;
public const int PARCEL_COUNT_OWNER = 1;
public const int PARCEL_COUNT_GROUP = 2;
public const int PARCEL_COUNT_OTHER = 3;
public const int PARCEL_COUNT_SELECTED = 4;
public const int PARCEL_COUNT_TEMP = 5;
public const int DEBUG_CHANNEL = 0x7FFFFFFF;
public const int PUBLIC_CHANNEL = 0x00000000;
// http://wiki.secondlife.com/wiki/LlGetObjectDetails
public const int OBJECT_NAME = 1;
public const int OBJECT_DESC = 2;
public const int OBJECT_POS = 3;
public const int OBJECT_ROT = 4;
public const int OBJECT_VELOCITY = 5;
public const int OBJECT_OWNER = 6;
public const int OBJECT_GROUP = 7;
public const int OBJECT_CREATOR = 8;
public const int OBJECT_RUNNING_SCRIPT_COUNT = 9;
public const int OBJECT_TOTAL_SCRIPT_COUNT = 10;
public const int OBJECT_SCRIPT_MEMORY = 11; // http://wiki.secondlife.com/wiki/LSL_Script_Memory
public const int OBJECT_SCRIPT_TIME = 12;
public const int OBJECT_PRIM_EQUIVALENCE = 13;
public const int OBJECT_SERVER_COST = 14; // http://wiki.secondlife.com/wiki/Mesh/Mesh_Server_Weight
public const int OBJECT_STREAMING_COST = 15; // http://wiki.secondlife.com/wiki/Mesh/Mesh_Streaming_Cost
public const int OBJECT_PHYSICS_COST = 16; // http://wiki.secondlife.com/wiki/Mesh/Mesh_physics
public const int OBJECT_CHARACTER_TIME = 17;
public const int OBJECT_ROOT = 18;
public const int OBJECT_ATTACHED_POINT = 19;
public const int OBJECT_PATHFINDING_TYPE = 20;
public const int OBJECT_PHYSICS = 21;
public const int OBJECT_PHANTOM = 22;
public const int OBJECT_TEMP_ON_REZ = 23;
// Values for llGetObjectDetails(OBJECT_PATHFINDING_TYPE) above
public const int OPT_OTHER = -1; // Attachments, Linden trees & grass
public const int OPT_LEGACY_LINKSET = 0; // Movable obstacles, movable phantoms, physical, and volumedetect objects
public const int OPT_AVATAR = 1; // Avatars
public const int OPT_CHARACTER = 2; // Pathfinding characters
public const int OPT_WALKABLE = 3; // Walkable objects
public const int OPT_STATIC_OBSTACLE = 4; // Static obstacles
public const int OPT_MATERIAL_VOLUME = 5; // Material volumes
public const int OPT_EXCLUSION_VOLUME = 6;
// Can not be public const?
public static readonly vector ZERO_VECTOR = new vector(0.0, 0.0, 0.0);
public static readonly rotation ZERO_ROTATION = new rotation(0.0, 0.0, 0.0, 1.0);
// constants for llSetCameraParams
public const int CAMERA_PITCH = 0;
public const int CAMERA_FOCUS_OFFSET = 1;
public const int CAMERA_FOCUS_OFFSET_X = 2;
public const int CAMERA_FOCUS_OFFSET_Y = 3;
public const int CAMERA_FOCUS_OFFSET_Z = 4;
public const int CAMERA_POSITION_LAG = 5;
public const int CAMERA_FOCUS_LAG = 6;
public const int CAMERA_DISTANCE = 7;
public const int CAMERA_BEHINDNESS_ANGLE = 8;
public const int CAMERA_BEHINDNESS_LAG = 9;
public const int CAMERA_POSITION_THRESHOLD = 10;
public const int CAMERA_FOCUS_THRESHOLD = 11;
public const int CAMERA_ACTIVE = 12;
public const int CAMERA_POSITION = 13;
public const int CAMERA_POSITION_X = 14;
public const int CAMERA_POSITION_Y = 15;
public const int CAMERA_POSITION_Z = 16;
public const int CAMERA_FOCUS = 17;
public const int CAMERA_FOCUS_X = 18;
public const int CAMERA_FOCUS_Y = 19;
public const int CAMERA_FOCUS_Z = 20;
public const int CAMERA_POSITION_LOCKED = 21;
public const int CAMERA_FOCUS_LOCKED = 22;
// constants for llGetParcelDetails
public const int PARCEL_DETAILS_NAME = 0;
public const int PARCEL_DETAILS_DESC = 1;
public const int PARCEL_DETAILS_OWNER = 2;
public const int PARCEL_DETAILS_GROUP = 3;
public const int PARCEL_DETAILS_AREA = 4;
public const int PARCEL_DETAILS_ID = 5;
public const int PARCEL_DETAILS_SEE_AVATARS = 6;
// constants for llSetClickAction
public const int CLICK_ACTION_NONE = 0;
public const int CLICK_ACTION_TOUCH = 0;
public const int CLICK_ACTION_SIT = 1;
public const int CLICK_ACTION_BUY = 2;
public const int CLICK_ACTION_PAY = 3;
public const int CLICK_ACTION_OPEN = 4;
public const int CLICK_ACTION_PLAY = 5;
public const int CLICK_ACTION_OPEN_MEDIA = 6;
public const int CLICK_ACTION_ZOOM = 7;
// constants for the llDetectedTouch* functions
public const int TOUCH_INVALID_FACE = -1;
public static readonly vector TOUCH_INVALID_TEXCOORD = new vector(-1.0, -1.0, 0.0);
public static readonly vector TOUCH_INVALID_VECTOR = ZERO_VECTOR;
// constants for llGetPrimMediaParams/llSetPrimMediaParams
public const int PRIM_MEDIA_ALT_IMAGE_ENABLE = 0;
public const int PRIM_MEDIA_CONTROLS = 1;
public const int PRIM_MEDIA_CURRENT_URL = 2;
public const int PRIM_MEDIA_HOME_URL = 3;
public const int PRIM_MEDIA_AUTO_LOOP = 4;
public const int PRIM_MEDIA_AUTO_PLAY = 5;
public const int PRIM_MEDIA_AUTO_SCALE = 6;
public const int PRIM_MEDIA_AUTO_ZOOM = 7;
public const int PRIM_MEDIA_FIRST_CLICK_INTERACT = 8;
public const int PRIM_MEDIA_WIDTH_PIXELS = 9;
public const int PRIM_MEDIA_HEIGHT_PIXELS = 10;
public const int PRIM_MEDIA_WHITELIST_ENABLE = 11;
public const int PRIM_MEDIA_WHITELIST = 12;
public const int PRIM_MEDIA_PERMS_INTERACT = 13;
public const int PRIM_MEDIA_PERMS_CONTROL = 14;
public const int PRIM_MEDIA_CONTROLS_STANDARD = 0;
public const int PRIM_MEDIA_CONTROLS_MINI = 1;
public const int PRIM_MEDIA_PERM_NONE = 0;
public const int PRIM_MEDIA_PERM_OWNER = 1;
public const int PRIM_MEDIA_PERM_GROUP = 2;
public const int PRIM_MEDIA_PERM_ANYONE = 4;
// extra constants for llSetPrimMediaParams
public static readonly LSLInteger LSL_STATUS_OK = new LSLInteger(0);
public static readonly LSLInteger LSL_STATUS_MALFORMED_PARAMS = new LSLInteger(1000);
public static readonly LSLInteger LSL_STATUS_TYPE_MISMATCH = new LSLInteger(1001);
public static readonly LSLInteger LSL_STATUS_BOUNDS_ERROR = new LSLInteger(1002);
public static readonly LSLInteger LSL_STATUS_NOT_FOUND = new LSLInteger(1003);
public static readonly LSLInteger LSL_STATUS_NOT_SUPPORTED = new LSLInteger(1004);
public static readonly LSLInteger LSL_STATUS_INTERNAL_ERROR = new LSLInteger(1999);
public static readonly LSLInteger LSL_STATUS_WHITELIST_FAILED = new LSLInteger(2001);
// Constants for default textures
public const string TEXTURE_BLANK = "5748decc-f629-461c-9a36-a35a221fe21f";
public const string TEXTURE_DEFAULT = "89556747-24cb-43ed-920b-47caed15465f";
public const string TEXTURE_PLYWOOD = "89556747-24cb-43ed-920b-47caed15465f";
public const string TEXTURE_TRANSPARENT = "8dcd4a48-2d37-4909-9f78-f7a9eb4ef903";
public const string TEXTURE_MEDIA = "8b5fec65-8d8d-9dc5-cda8-8fdf2716e361";
// llGetAgentList and iwGetAgentList scopes
public const int AGENT_LIST_PARCEL = 1;
public const int AGENT_LIST_PARCEL_OWNER = 2;
public const int AGENT_LIST_REGION = 4;
// For llManageEstateAccess -- Warning, the constant values do not match SL, they should have been bit masks
// to permit simultaneous add/remove, but it is too late to change that
public const int ESTATE_ACCESS_ALLOWED_AGENT_ADD = 0;
public const int ESTATE_ACCESS_ALLOWED_AGENT_REMOVE = 1;
public const int ESTATE_ACCESS_ALLOWED_GROUP_ADD = 2;
public const int ESTATE_ACCESS_ALLOWED_GROUP_REMOVE = 3;
public const int ESTATE_ACCESS_BANNED_AGENT_ADD = 4;
public const int ESTATE_ACCESS_BANNED_AGENT_REMOVE = 5;
public const int ESTATE_ACCESS_QUERY_CAN_MANAGE = 11000;
public const int ESTATE_ACCESS_QUERY_ALLOWED_AGENT = 11001;
public const int ESTATE_ACCESS_QUERY_ALLOWED_GROUP = 11002;
public const int ESTATE_ACCESS_QUERY_BANNED_AGENT = 11003;
// llJsonXXX
public const string JSON_INVALID = "\uFDD0";
public const string JSON_OBJECT = "\uFDD1";
public const string JSON_ARRAY = "\uFDD2";
public const string JSON_NUMBER = "\uFDD3";
public const string JSON_STRING = "\uFDD4";
public const string JSON_NULL = "\uFDD5";
public const string JSON_TRUE = "\uFDD6";
public const string JSON_FALSE = "\uFDD7";
public const string JSON_DELETE = "\uFDD8";
public const int JSON_APPEND = -1;
// llSetContentType content types
public const int CONTENT_TYPE_TEXT = 0;
public const int CONTENT_TYPE_HTML = 1;
public const int CONTENT_TYPE_XML = 2;
public const int CONTENT_TYPE_XHTML = 3;
public const int CONTENT_TYPE_ATOM = 4;
public const int CONTENT_TYPE_JSON = 5;
public const int CONTENT_TYPE_LLSD = 6;
public const int CONTENT_TYPE_FORM = 7;
public const int CONTENT_TYPE_RSS = 8;
// iwSetWind types
public const int WIND_SPEED_DEFAULT = 0;
public const int WIND_SPEED_FIXED = 1;
public const int RC_REJECT_TYPES = 0;
public const int RC_DETECT_PHANTOM = 1;
public const int RC_DATA_FLAGS = 2;
public const int RC_MAX_HITS = 3;
public const int RC_REJECT_AGENTS = 1;
public const int RC_REJECT_PHYSICAL = 2;
public const int RC_REJECT_NONPHYSICAL = 4;
public const int RC_REJECT_LAND = 8;
public const int RC_GET_NORMAL = 1;
public const int RC_GET_ROOT_KEY = 2;
public const int RC_GET_LINK_NUM = 4;
public const int RCERR_UNKNOWN = -1;
public const int RCERR_SIM_PERF_LOW = -2;
public const int RCERR_CAST_TIME_EXCEEDED = -3;
public const int KFM_ROTATION = 1;
public const int KFM_TRANSLATION = 2;
public const int KFM_COMMAND = 0;
public const int KFM_MODE = 1;
public const int KFM_DATA = 2;
public const int KFM_FORWARD = 0;
public const int KFM_LOOP = 1;
public const int KFM_PING_PONG = 2;
public const int KFM_REVERSE = 3;
public const int KFM_CMD_PLAY = 0;
public const int KFM_CMD_STOP = 1;
public const int KFM_CMD_PAUSE = 2;
public const int BOT_ERROR = -3;
public const int BOT_USER_NOT_FOUND = -2;
public const int BOT_NOT_FOUND = -1;
public const int BOT_SUCCESS = 0;
public const int BOT_ALLOW_RUNNING = 1;
public const int BOT_ALLOW_FLYING = 2;
public const int BOT_ALLOW_JUMPING = 3;
public const int BOT_FOLLOW_OFFSET = 4;
public const int BOT_REQUIRES_LINE_OF_SIGHT = 5;
public const int BOT_START_FOLLOWING_DISTANCE = 6;
public const int BOT_STOP_FOLLOWING_DISTANCE = 7;
public const int BOT_LOST_AVATAR_DISTANCE = 8;
public const int BOT_TRAVELMODE_WALK = 1;
public const int BOT_TRAVELMODE_RUN = 2;
public const int BOT_TRAVELMODE_FLY = 3;
public const int BOT_TRAVELMODE_TELEPORT = 4;
public const int BOT_TRAVELMODE_WAIT = 5;
public const int BOT_MOVEMENT_TYPE = 0;
public const int BOT_MOVEMENT_TELEPORT_AFTER = 1;
public const int BOT_MOVEMENT_FLAG_NONE = 0;
public const int BOT_MOVEMENT_FLAG_FOLLOW_INDEFINITELY = 1;
public const int BOT_CREATE_DEFAULT = 0;
public const int BOT_CREATE_NO_OWNER = 1;
public const int BOT_MOVE_COMPLETE = 1;
public const int BOT_MOVE_UPDATE = 2;
public const int BOT_MOVE_FAILED = 3;
public const int BOT_MOVE_AVATAR_LOST = 4;
public const int BOT_WANDER_MOVEMENT_TYPE = 1;
public const int BOT_WANDER_TIME_BETWEEN_NODES = 2;
public const int BOT_ABOUT_TEXT = 1;
public const int BOT_EMAIL = 2;
public const int BOT_IMAGE_UUID = 3;
public const int BOT_PROFILE_URL = 4;
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.ComponentModel;
using System.Diagnostics;
using System.IO;
using System.Net.Sockets;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace System.Net.NetworkInformation
{
public partial class Ping
{
private const int IcmpHeaderLengthInBytes = 8;
private const int IpHeaderLengthInBytes = 20;
private async Task<PingReply> SendPingAsyncCore(IPAddress address, byte[] buffer, int timeout, PingOptions options)
{
try
{
Task<PingReply> t = RawSocketPermissions.CanUseRawSockets() ?
SendIcmpEchoRequestOverRawSocket(address, buffer, timeout, options) :
SendWithPingUtility(address, buffer, timeout, options);
return await t.ConfigureAwait(false);
}
finally
{
Finish();
}
}
private async Task<PingReply> SendIcmpEchoRequestOverRawSocket(IPAddress address, byte[] buffer, int timeout, PingOptions options)
{
EndPoint endPoint = new IPEndPoint(address, 0);
bool isIpv4 = address.AddressFamily == AddressFamily.InterNetwork;
ProtocolType protocolType = isIpv4 ? ProtocolType.Icmp : ProtocolType.IcmpV6;
// Use the current thread's ID as the identifier.
ushort identifier = (ushort)Environment.CurrentManagedThreadId;
IcmpHeader header = new IcmpHeader()
{
Type = isIpv4 ? (byte)IcmpV4MessageType.EchoRequest : (byte)IcmpV6MessageType.EchoRequest,
Code = 0,
HeaderChecksum = 0,
Identifier = identifier,
SequenceNumber = 0,
};
byte[] sendBuffer = CreateSendMessageBuffer(header, buffer);
using (Socket socket = new Socket(address.AddressFamily, SocketType.Raw, protocolType))
{
socket.ReceiveTimeout = timeout;
socket.SendTimeout = timeout;
// Setting Socket.DontFragment and .Ttl is not supported on Unix, so ignore the PingOptions parameter.
int ipHeaderLength = isIpv4 ? IpHeaderLengthInBytes : 0;
await socket.SendToAsync(new ArraySegment<byte>(sendBuffer), SocketFlags.None, endPoint).ConfigureAwait(false);
byte[] receiveBuffer = new byte[ipHeaderLength + IcmpHeaderLengthInBytes + buffer.Length];
long elapsed;
Stopwatch sw = Stopwatch.StartNew();
// Read from the socket in a loop. We may receive messages that are not echo replies, or that are not in response
// to the echo request we just sent. We need to filter such messages out, and continue reading until our timeout.
// For example, when pinging the local host, we need to filter out our own echo requests that the socket reads.
while ((elapsed = sw.ElapsedMilliseconds) < timeout)
{
Task<SocketReceiveFromResult> receiveTask = socket.ReceiveFromAsync(
new ArraySegment<byte>(receiveBuffer),
SocketFlags.None,
endPoint);
var cts = new CancellationTokenSource();
Task finished = await Task.WhenAny(receiveTask, Task.Delay(timeout - (int)elapsed, cts.Token)).ConfigureAwait(false);
cts.Cancel();
if (finished != receiveTask)
{
sw.Stop();
return CreateTimedOutPingReply();
}
SocketReceiveFromResult receiveResult = receiveTask.GetAwaiter().GetResult();
int bytesReceived = receiveResult.ReceivedBytes;
if (bytesReceived - ipHeaderLength < IcmpHeaderLengthInBytes)
{
continue; // Not enough bytes to reconstruct IP header + ICMP header.
}
byte type, code;
unsafe
{
fixed (byte* bytesPtr = receiveBuffer)
{
int icmpHeaderOffset = ipHeaderLength;
IcmpHeader receivedHeader = *((IcmpHeader*)(bytesPtr + icmpHeaderOffset)); // Skip IP header.
type = receivedHeader.Type;
code = receivedHeader.Code;
if (identifier != receivedHeader.Identifier
|| type == (byte)IcmpV4MessageType.EchoRequest
|| type == (byte)IcmpV6MessageType.EchoRequest) // Echo Request, ignore
{
continue;
}
}
}
sw.Stop();
long roundTripTime = sw.ElapsedMilliseconds;
int dataOffset = ipHeaderLength + IcmpHeaderLengthInBytes;
// We want to return a buffer with the actual data we sent out, not including the header data.
byte[] dataBuffer = new byte[bytesReceived - dataOffset];
Array.Copy(receiveBuffer, dataOffset, dataBuffer, 0, dataBuffer.Length);
IPStatus status = isIpv4
? IcmpV4MessageConstants.MapV4TypeToIPStatus(type, code)
: IcmpV6MessageConstants.MapV6TypeToIPStatus(type, code);
return new PingReply(address, options, status, roundTripTime, dataBuffer);
}
// We have exceeded our timeout duration, and no reply has been received.
sw.Stop();
return CreateTimedOutPingReply();
}
}
private async Task<PingReply> SendWithPingUtility(IPAddress address, byte[] buffer, int timeout, PingOptions options)
{
bool isIpv4 = address.AddressFamily == AddressFamily.InterNetwork;
string pingExecutable = isIpv4 ? UnixCommandLinePing.Ping4UtilityPath : UnixCommandLinePing.Ping6UtilityPath;
if (pingExecutable == null)
{
throw new PlatformNotSupportedException(SR.net_ping_utility_not_found);
}
string processArgs = UnixCommandLinePing.ConstructCommandLine(buffer.Length, address.ToString(), isIpv4);
ProcessStartInfo psi = new ProcessStartInfo(pingExecutable, processArgs);
psi.RedirectStandardOutput = true;
psi.RedirectStandardError = true;
Process p = new Process() { StartInfo = psi };
var processCompletion = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously);
p.EnableRaisingEvents = true;
p.Exited += (s, e) => processCompletion.SetResult(true);
p.Start();
var cts = new CancellationTokenSource();
Task timeoutTask = Task.Delay(timeout, cts.Token);
Task finished = await Task.WhenAny(processCompletion.Task, timeoutTask).ConfigureAwait(false);
if (finished == timeoutTask && !p.HasExited)
{
// Try to kill the ping process if it didn't return. If it is already in the process of exiting, a Win32Exception will be thrown.
try
{
p.Kill();
}
catch (Win32Exception) { }
return CreateTimedOutPingReply();
}
else
{
cts.Cancel();
if (p.ExitCode != 0)
{
// This means no reply was received, although transmission may have been successful.
return CreateTimedOutPingReply();
}
try
{
string output = await p.StandardOutput.ReadToEndAsync().ConfigureAwait(false);
long rtt = UnixCommandLinePing.ParseRoundTripTime(output);
return new PingReply(
address,
null, // Ping utility cannot accomodate these, return null to indicate they were ignored.
IPStatus.Success,
rtt,
Array.Empty<byte>()); // Ping utility doesn't deliver this info.
}
catch (Exception)
{
// If the standard output cannot be successfully parsed, throw a generic PingException.
throw new PingException(SR.net_ping);
}
}
}
private PingReply CreateTimedOutPingReply()
{
// Documentation indicates that you should only pay attention to the IPStatus value when
// its value is not "Success", but the rest of these values match that of the Windows implementation.
return new PingReply(new IPAddress(0), null, IPStatus.TimedOut, 0, Array.Empty<byte>());
}
#if DEBUG
static Ping()
{
Debug.Assert(Marshal.SizeOf<IcmpHeader>() == 8, "The size of an ICMP Header must be 8 bytes.");
}
#endif
// Must be 8 bytes total.
[StructLayout(LayoutKind.Sequential)]
internal struct IcmpHeader
{
public byte Type;
public byte Code;
public ushort HeaderChecksum;
public ushort Identifier;
public ushort SequenceNumber;
}
private static unsafe byte[] CreateSendMessageBuffer(IcmpHeader header, byte[] payload)
{
int headerSize = sizeof(IcmpHeader);
byte[] result = new byte[headerSize + payload.Length];
Marshal.Copy(new IntPtr(&header), result, 0, headerSize);
payload.CopyTo(result, headerSize);
ushort checksum = ComputeBufferChecksum(result);
// Jam the checksum into the buffer.
result[2] = (byte)(checksum >> 8);
result[3] = (byte)(checksum & (0xFF));
return result;
}
private static ushort ComputeBufferChecksum(byte[] buffer)
{
// This is using the "deferred carries" approach outlined in RFC 1071.
uint sum = 0;
for (int i = 0; i < buffer.Length; i += 2)
{
// Combine each pair of bytes into a 16-bit number and add it to the sum
ushort element0 = (ushort)((buffer[i] << 8) & 0xFF00);
ushort element1 = (i + 1 < buffer.Length)
? (ushort)(buffer[i + 1] & 0x00FF)
: (ushort)0; // If there's an odd number of bytes, pad by one octet of zeros.
ushort combined = (ushort)(element0 | element1);
sum += (uint)combined;
}
// Add back the "carry bits" which have risen to the upper 16 bits of the sum.
while ((sum >> 16) != 0)
{
var partialSum = sum & 0xFFFF;
var carries = sum >> 16;
sum = partialSum + carries;
}
return (ushort)~sum;
}
}
}
| |
using NUnit.Framework;
using RefactoringEssentials.CSharp.Diagnostics;
namespace RefactoringEssentials.Tests.CSharp.Diagnostics
{
[TestFixture]
[Ignore("TODO: Issue not ported yet")]
public class RedundantAssignmentTests : CSharpDiagnosticTestBase
{
[Test]
public void TestVariableInitializerNotUsed()
{
var input = @"
class TestClass
{
void TestMethod ()
{
int i = 1;
}
}";
var output = @"
class TestClass
{
void TestMethod ()
{
}
}";
Test<RedundantAssignmentAnalyzer>(input, 1, output);
}
[Test]
public void TestVariableInitializerNotUsedVar()
{
var input = @"
class TestClass
{
void TestMethod ()
{
var i = 1;
}
}";
var output = @"
class TestClass
{
void TestMethod ()
{
}
}";
Test<RedundantAssignmentAnalyzer>(input, 1, output);
}
[Test]
public void TestVariableAssignmentNotUsed()
{
var input = @"
class TestClass
{
int TestMethod ()
{
int i = 1;
int j = i;
i = 2;
return j;
}
}";
var output = @"
class TestClass
{
int TestMethod ()
{
int i = 1;
int j = i;
return j;
}
}";
Test<RedundantAssignmentAnalyzer>(input, 1, output);
}
[Test]
public void TestParameterAssignmentNotUsed()
{
var input = @"
class TestClass
{
int TestMethod (int i)
{
int j = i;
i = 2;
return j;
}
}";
var output = @"
class TestClass
{
int TestMethod (int i)
{
int j = i;
return j;
}
}";
Test<RedundantAssignmentAnalyzer>(input, 1, output);
}
[Test]
public void TestAssignmentInExpression()
{
var input = @"
class TestClass
{
int TestMethod (int i)
{
int j = i = 2;
return j;
}
}";
var output = @"
class TestClass
{
int TestMethod (int i)
{
int j = 2;
return j;
}
}";
Test<RedundantAssignmentAnalyzer>(input, 1, output);
}
[Test]
public void TestOutArgument()
{
var input = @"
class TestClass
{
void Test (out int i)
{
i = 0;
}
int TestMethod ()
{
int i = 2;
Test (out i);
return i;
}
}";
var output = @"
class TestClass
{
void Test (out int i)
{
i = 0;
}
int TestMethod ()
{
int i;
Test (out i);
return i;
}
}";
Test<RedundantAssignmentAnalyzer>(input, 1, output);
}
[Test]
public void TestOutArgument2()
{
var input = @"
class TestClass
{
void Test (out int i)
{
i = 0;
}
int TestMethod ()
{
int i;
Test (out i);
i = 2;
return i;
}
}";
Test<RedundantAssignmentAnalyzer>(input, 0);
}
[Test]
public void TestRefArgument()
{
var input = @"
class TestClass
{
void Test (ref int i)
{
i = 0;
}
}";
Test<RedundantAssignmentAnalyzer>(input, 0);
}
[Test]
public void TestAssignmentOperator()
{
var input = @"
class TestClass
{
int TestMethod ()
{
int i = 1;
i += 2;
return i;
}
}";
Test<RedundantAssignmentAnalyzer>(input, 0);
}
[Test]
public void TestIf()
{
var input = @"
class TestClass
{
int TestMethod (int j)
{
int i = 1;
if (j > 0) {
i += 2;
} else {
}
return i;
}
}";
Test<RedundantAssignmentAnalyzer>(input, 0);
}
[Test]
public void TestConditionalExpression()
{
var input = @"
class TestClass
{
int TestMethod (int j)
{
int i = 1;
return j > 0 ? i : 0;
}
}";
Test<RedundantAssignmentAnalyzer>(input, 0);
}
[Test]
public void TestLoop()
{
var input = @"
class TestClass
{
void TestMethod ()
{
var x = 0;
for (int i = 0; i < 10; i++) {
if (i > 5) {
x++;
} else {
x = 2;
}
}
if (x > 1) ;
}
}";
Test<RedundantAssignmentAnalyzer>(input, 0);
}
[Test]
public void TestForeach()
{
var input = @"
class TestClass
{
void TestMethod (int[] array)
{
foreach (int j in array) {
bool x = false;
foreach (int k in array)
foreach (int i in array)
if (i > 5) x = true;
if (x) break;
}
}
}";
Test<RedundantAssignmentAnalyzer>(input, 0);
}
[Test]
public void TestAssignmentInTryCatch()
{
var input = @"using System;
class TestClass
{
void TestMethod ()
{
var a = new TestClass ();
try {
a = null;
} catch (Exception) {
if (a != null) {
a.TestMethod ();
}
}
}
}";
Test<RedundantAssignmentAnalyzer>(input, 0);
}
[Test]
public void TestAssignmentInTryCatchFinally()
{
var input = @"
class TestClass
{
void TestMethod ()
{
var a = new TestClass ();
try {
a = null;
} finally {
if (a != null) {
a.TestMethod ();
}
}
}
}";
Test<RedundantAssignmentAnalyzer>(input, 0);
}
[Test]
public void TestAssignmentInCatch()
{
var input = @"using System;
class TestClass
{
void Test(TestClass a) { }
void TestMethod ()
{
var a = new TestClass ();
try {
} catch (Exception) {
a = null;
}
Test (a);
}
}";
Test<RedundantAssignmentAnalyzer>(input, 0);
}
[Test]
public void TestAssignmentBeforeTry()
{
var input = @"using System;
class TestClass
{
void Test(TestClass a) { }
void TestMethod ()
{
var a = null;
try {
a = new TestClass ();
} catch (Exception) {
}
Test (a);
}
}";
Test<RedundantAssignmentAnalyzer>(input, 0);
}
[Test]
public void TestAssignmentInUsing()
{
var input = @"using System;
class TestClass
{
void TestMethod ()
{
using (var tc = new TestClass ()) {
// nothing
}
}
}";
Test<RedundantAssignmentAnalyzer>(input, 0);
}
[Test]
public void TestAssignmentWithFunction()
{
var input = @"using System;
class TestClass
{
TestClass Func () { return null; }
void TestMethod ()
{
var a = Func ();
}
}";
var output = @"using System;
class TestClass
{
TestClass Func () { return null; }
void TestMethod ()
{
Func ();
}
}";
Test<RedundantAssignmentAnalyzer>(input, output);
}
[Test]
public void TestAssignmentWithFunctionUsedLater()
{
var input = @"using System;
class TestClass
{
TestClass Func () { return null; }
void TestMethod ()
{
var a = Func ();
a = 2;
}
}";
var output = @"using System;
class TestClass
{
TestClass Func () { return null; }
void TestMethod ()
{
TestClass a;
Func ();
a = 2;
}
}";
Test<RedundantAssignmentAnalyzer>(input, 2, output, 0);
}
/// <summary>
/// Bug 11795 - Use of regex in linq statement not being recognized.
/// </summary>
[Test]
public void TestBug11795()
{
Analyze<RedundantAssignmentAnalyzer>(@"
using System;
using System.Text.RegularExpressions;
using System.IO;
using System.Linq;
public class Test
{
public void Demo ()
{
Regex pattern = new Regex (@""^.*\.(jpg|png)$"", RegexOptions.IgnoreCase);
string path = Path.Combine (""/"", ""speakers"");
Console.WriteLine (
from file in Directory.GetFiles (path)
where pattern.IsMatch (file)
select file
);
}
}");
}
/// <summary>
/// Bug 14929 - Assignment greyed out (meaning "redundant") when it should not be
/// </summary>
[Test]
public void TestBug14929()
{
Analyze<RedundantAssignmentAnalyzer>(@"
using system;
public class Test
{
public void Demo ()
{
bool save = true;
try {
throw new Exception ();
} catch (Exception) {
save = false;
throw;
} finally {
System.Console.WriteLine (save);
}
}
}");
}
[Test]
public void TestMultipleVariableInitializers()
{
Test<RedundantAssignmentAnalyzer>(@"using System;
public class MyClass
{
public static void Main ()
{
string outputFile = null, inputFile = null;
Console.WriteLine (outputFile);
}
}
", 1, @"using System;
public class MyClass
{
public static void Main ()
{
string outputFile = null, inputFile;
Console.WriteLine (outputFile);
}
}
", 0);
}
}
}
| |
namespace Serenity.Data
{
using Newtonsoft.Json;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Text;
[DebuggerDisplay("{ToStringIgnoreParams()}")]
[JsonConverter(typeof(JsonCriteriaConverter))]
public abstract class BaseCriteria : ICriteria
{
private static NoParamsChecker noParamsChecker = new NoParamsChecker();
private static IgnoreParams ignoreParams = new IgnoreParams();
public virtual bool IsEmpty
{
get { return false; }
}
public BaseCriteria IsNull()
{
return new UnaryCriteria(CriteriaOperator.IsNull, this);
}
public BaseCriteria IsNotNull()
{
return new UnaryCriteria(CriteriaOperator.IsNotNull, this);
}
public BaseCriteria Like(string mask)
{
return new BinaryCriteria(this, CriteriaOperator.Like, new ValueCriteria(mask));
}
public BaseCriteria NotLike(string mask)
{
return new BinaryCriteria(this, CriteriaOperator.NotLike, new ValueCriteria(mask));
}
public BaseCriteria StartsWith(string mask)
{
if (mask == null)
throw new ArgumentNullException("mask");
return Like(mask + "%");
}
public BaseCriteria EndsWith(string mask)
{
if (mask == null)
throw new ArgumentNullException("mask");
return Like("%" + mask);
}
public BaseCriteria Contains(string mask)
{
return Like("%" + mask + "%");
}
public BaseCriteria NotContains(string mask)
{
return NotLike("%" + mask + "%");
}
public BaseCriteria In<T>(params T[] values)
{
if (values == null || values.Length == 0)
throw new ArgumentNullException("values");
if (values.Length == 1 &&
values[0] is BaseCriteria)
{
return In((BaseCriteria)(object)values[0]);
}
if (values.Length == 1 &&
!(values[0] is string) &&
values[0] is IEnumerable)
{
return new BinaryCriteria(this, CriteriaOperator.In, new ValueCriteria(values[0]));
}
if (values.Length == 1 &&
values[0] is ISqlQuery)
{
return In((ISqlQuery)(object)values[0]);
}
return new BinaryCriteria(this, CriteriaOperator.In, new ValueCriteria(values));
}
public BaseCriteria In(BaseCriteria statement)
{
if (Object.ReferenceEquals(null, statement) || statement.IsEmpty)
throw new ArgumentNullException("statement");
return new BinaryCriteria(this, CriteriaOperator.In, statement);
}
public BaseCriteria InStatement(BaseCriteria statement)
{
return In(statement);
}
public BaseCriteria In(ISqlQuery statement)
{
if (Object.ReferenceEquals(null, statement))
throw new ArgumentNullException("statement");
return new BinaryCriteria(this, CriteriaOperator.In, new Criteria(statement));
}
public BaseCriteria NotIn<T>(params T[] values)
{
if (values == null || values.Length == 0)
throw new ArgumentNullException("values");
if (values.Length == 1 &&
values[0] is BaseCriteria)
{
return NotIn((BaseCriteria)(object)values[0]);
}
if (values.Length == 1 &&
!(values[0] is string) &&
values[0] is IEnumerable)
{
return new BinaryCriteria(this, CriteriaOperator.NotIn, new ValueCriteria(values[0]));
}
if (values.Length == 1 &&
values[0] is ISqlQuery)
{
return NotIn((ISqlQuery)(object)values[0]);
}
return new BinaryCriteria(this, CriteriaOperator.NotIn, new ValueCriteria(values));
}
public BaseCriteria NotIn(BaseCriteria statement)
{
if (Object.ReferenceEquals(null, statement) || statement.IsEmpty)
throw new ArgumentNullException("statement");
return new BinaryCriteria(this, CriteriaOperator.NotIn, statement);
}
public BaseCriteria NotIn(ISqlQuery statement)
{
if (Object.ReferenceEquals(null, statement))
throw new ArgumentNullException("statement");
return new BinaryCriteria(this, CriteriaOperator.NotIn, new Criteria(statement));
}
public static BaseCriteria operator !(BaseCriteria criteria)
{
return new UnaryCriteria(CriteriaOperator.Not, criteria);
}
public static BaseCriteria operator ==(BaseCriteria criteria1, BaseCriteria criteria2)
{
return new BinaryCriteria(criteria1, CriteriaOperator.EQ, criteria2);
}
public static BaseCriteria operator ==(BaseCriteria criteria1, Parameter param)
{
return new BinaryCriteria(criteria1, CriteriaOperator.EQ, new ParamCriteria(param.Name));
}
public static BaseCriteria operator ==(BaseCriteria criteria1, int value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.EQ, new ValueCriteria(value));
}
public static BaseCriteria operator ==(BaseCriteria criteria1, Int64 value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.EQ, new ValueCriteria(value));
}
public static BaseCriteria operator ==(BaseCriteria criteria1, string value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.EQ, new ValueCriteria(value));
}
public static BaseCriteria operator ==(BaseCriteria criteria1, Double value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.EQ, new ValueCriteria(value));
}
public static BaseCriteria operator ==(BaseCriteria criteria1, Decimal value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.EQ, new ValueCriteria(value));
}
public static BaseCriteria operator ==(BaseCriteria criteria1, DateTime value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.EQ, new ValueCriteria(value));
}
public static BaseCriteria operator ==(BaseCriteria criteria1, Guid value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.EQ, new ValueCriteria(value));
}
public static BaseCriteria operator !=(BaseCriteria criteria1, BaseCriteria criteria2)
{
return new BinaryCriteria(criteria1, CriteriaOperator.NE, criteria2);
}
public static BaseCriteria operator !=(BaseCriteria criteria1, Parameter param)
{
return new BinaryCriteria(criteria1, CriteriaOperator.NE, new ParamCriteria(param.Name));
}
public static BaseCriteria operator !=(BaseCriteria criteria1, int value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.NE, new ValueCriteria(value));
}
public static BaseCriteria operator !=(BaseCriteria criteria1, Int64 value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.NE, new ValueCriteria(value));
}
public static BaseCriteria operator !=(BaseCriteria criteria1, string value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.NE, new ValueCriteria(value));
}
public static BaseCriteria operator !=(BaseCriteria criteria1, Double value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.NE, new ValueCriteria(value));
}
public static BaseCriteria operator !=(BaseCriteria criteria1, Decimal value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.NE, new ValueCriteria(value));
}
public static BaseCriteria operator !=(BaseCriteria criteria1, DateTime value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.NE, new ValueCriteria(value));
}
public static BaseCriteria operator !=(BaseCriteria criteria1, Guid value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.NE, new ValueCriteria(value));
}
public static BaseCriteria operator >(BaseCriteria criteria1, BaseCriteria criteria2)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GT, criteria2);
}
public static BaseCriteria operator >(BaseCriteria criteria1, Parameter param)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GT, new ParamCriteria(param.Name));
}
public static BaseCriteria operator >(BaseCriteria criteria1, Int32 value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GT, new ValueCriteria(value));
}
public static BaseCriteria operator >(BaseCriteria criteria1, Int64 value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GT, new ValueCriteria(value));
}
public static BaseCriteria operator >(BaseCriteria criteria1, string value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GT, new ValueCriteria(value));
}
public static BaseCriteria operator >(BaseCriteria criteria1, Double value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GT, new ValueCriteria(value));
}
public static BaseCriteria operator >(BaseCriteria criteria1, Decimal value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GT, new ValueCriteria(value));
}
public static BaseCriteria operator >(BaseCriteria criteria1, DateTime value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GT, new ValueCriteria(value));
}
public static BaseCriteria operator >(BaseCriteria criteria1, Guid value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GT, new ValueCriteria(value));
}
public static BaseCriteria operator >=(BaseCriteria criteria1, BaseCriteria criteria2)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GE, criteria2);
}
public static BaseCriteria operator >=(BaseCriteria criteria1, Parameter param)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GE, new ParamCriteria(param.Name));
}
public static BaseCriteria operator >=(BaseCriteria criteria1, Int32 value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GE, new ValueCriteria(value));
}
public static BaseCriteria operator >=(BaseCriteria criteria1, Int64 value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GE, new ValueCriteria(value));
}
public static BaseCriteria operator >=(BaseCriteria criteria1, string value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GE, new ValueCriteria(value));
}
public static BaseCriteria operator >=(BaseCriteria criteria1, Double value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GE, new ValueCriteria(value));
}
public static BaseCriteria operator >=(BaseCriteria criteria1, Decimal value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GE, new ValueCriteria(value));
}
public static BaseCriteria operator >=(BaseCriteria criteria1, DateTime value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GE, new ValueCriteria(value));
}
public static BaseCriteria operator >=(BaseCriteria criteria1, Guid value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.GE, new ValueCriteria(value));
}
public static BaseCriteria operator <(BaseCriteria criteria1, BaseCriteria criteria2)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LT, criteria2);
}
public static BaseCriteria operator <(BaseCriteria criteria1, Parameter param)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LT, new ParamCriteria(param.Name));
}
public static BaseCriteria operator <(BaseCriteria criteria1, Int32 value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LT, new ValueCriteria(value));
}
public static BaseCriteria operator <(BaseCriteria criteria1, Int64 value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LT, new ValueCriteria(value));
}
public static BaseCriteria operator <(BaseCriteria criteria1, string value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LT, new ValueCriteria(value));
}
public static BaseCriteria operator <(BaseCriteria criteria1, Double value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LT, new ValueCriteria(value));
}
public static BaseCriteria operator <(BaseCriteria criteria1, Decimal value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LT, new ValueCriteria(value));
}
public static BaseCriteria operator <(BaseCriteria criteria1, DateTime value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LT, new ValueCriteria(value));
}
public static BaseCriteria operator <(BaseCriteria criteria1, Guid value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LT, new ValueCriteria(value));
}
public static BaseCriteria operator <=(BaseCriteria criteria1, BaseCriteria criteria2)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LE, criteria2);
}
public static BaseCriteria operator <=(BaseCriteria criteria1, Parameter param)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LE, new ParamCriteria(param.Name));
}
public static BaseCriteria operator <=(BaseCriteria criteria1, Int32 value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LE, new ValueCriteria(value));
}
public static BaseCriteria operator <=(BaseCriteria criteria1, Int64 value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LE, new ValueCriteria(value));
}
public static BaseCriteria operator <=(BaseCriteria criteria1, string value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LE, new ValueCriteria(value));
}
public static BaseCriteria operator <=(BaseCriteria criteria1, Double value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LE, new ValueCriteria(value));
}
public static BaseCriteria operator <=(BaseCriteria criteria1, Decimal value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LE, new ValueCriteria(value));
}
public static BaseCriteria operator <=(BaseCriteria criteria1, DateTime value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LE, new ValueCriteria(value));
}
public static BaseCriteria operator <=(BaseCriteria criteria1, Guid value)
{
return new BinaryCriteria(criteria1, CriteriaOperator.LE, new ValueCriteria(value));
}
private static BaseCriteria JoinIf(BaseCriteria criteria1, BaseCriteria criteria2, CriteriaOperator op)
{
if (ReferenceEquals(null, criteria1))
throw new ArgumentNullException("criteria1");
if (ReferenceEquals(null, criteria2))
throw new ArgumentNullException("criteria2");
if (criteria1.IsEmpty)
return criteria2;
if (criteria2.IsEmpty)
return criteria1;
return new BinaryCriteria(criteria1, op, criteria2);
}
public static BaseCriteria operator &(BaseCriteria criteria1, BaseCriteria criteria2)
{
return JoinIf(criteria1, criteria2, CriteriaOperator.AND);
}
public static BaseCriteria operator |(BaseCriteria criteria1, BaseCriteria criteria2)
{
return JoinIf(criteria1, criteria2, CriteriaOperator.OR);
}
public static BaseCriteria operator ^(BaseCriteria criteria1, BaseCriteria criteria2)
{
return JoinIf(criteria1, criteria2, CriteriaOperator.XOR);
}
public static BaseCriteria operator ~(BaseCriteria criteria)
{
if (!criteria.IsEmpty)
return new UnaryCriteria(CriteriaOperator.Paren, criteria);
return criteria;
}
/// <summary>
/// Must override this or will get operator overload warning.
/// </summary>
public override int GetHashCode()
{
return base.GetHashCode();
}
/// <summary>
/// Must override this or will get operator overload warning.
/// </summary>
/// <param name="obj">object</param>
/// <returns>True if equals to object</returns>
public override bool Equals(object obj)
{
return base.Equals(obj);
}
public string ToStringIgnoreParams()
{
return ToString(ignoreParams);
}
public string ToString(IQueryWithParams query)
{
var sb = new StringBuilder(256);
ToString(sb, query);
return sb.ToString();
}
public override string ToString()
{
return ToString(noParamsChecker);
}
public virtual void ToString(StringBuilder sb, IQueryWithParams query)
{
throw new NotImplementedException();
}
private class NoParamsChecker : IQueryWithParams
{
public void AddParam(string name, object value)
{
throw new InvalidOperationException("Criteria should not have parameters!");
}
public void SetParam(string name, object value)
{
throw new InvalidOperationException("Criteria should not have parameters!");
}
public Parameter AutoParam()
{
throw new InvalidOperationException("Criteria should not have parameters!");
}
public IDictionary<string, object> Params
{
get { return null; }
}
public SqlDialect Dialect
{
get { return SqlSettings.CurrentDialect; }
}
}
private class IgnoreParams : IQueryWithParams
{
private static int next;
public void AddParam(string name, object value)
{
}
public void SetParam(string name, object value)
{
}
public Parameter AutoParam()
{
return new Parameter((next++).IndexParam());
}
public IDictionary<string, object> Params
{
get { return null; }
}
public SqlDialect Dialect
{
get { return SqlSettings.CurrentDialect; }
}
}
}
}
| |
#region License
/* **********************************************************************************
* Copyright (c) Roman Ivantsov
* This source code is subject to terms and conditions of the MIT License
* for Irony. A copy of the license can be found in the License.txt file
* at the root of this distribution.
* By using this source code in any fashion, you are agreeing to be bound by the terms of the
* MIT License.
* You must not remove this notice from this software.
* **********************************************************************************/
#endregion
using System;
using System.Collections.Generic;
using System.Text;
using System.Runtime.InteropServices;
using System.Diagnostics;
using System.Globalization;
namespace Irony.Parsing {
[Flags]
public enum ParseOptions {
GrammarDebugging = 0x01,
TraceParser = 0x02,
AnalyzeCode = 0x10, //run code analysis; effective only in Module mode
}
public enum ParseMode {
File, //default, continuous input file
VsLineScan, // line-by-line scanning in VS integration for syntax highlighting
CommandLine, //line-by-line from console
}
public enum ParserStatus {
Init, //initial state
Parsing,
Previewing, //previewing tokens
Recovering, //recovering from error
Accepted,
AcceptedPartial,
Error,
}
// The purpose of this class is to provide a container for information shared
// between parser, scanner and token filters.
public class ParsingContext {
public readonly Parser Parser;
public readonly LanguageData Language;
//Parser settings
public ParseOptions Options;
public ParseMode Mode = ParseMode.File;
public int MaxErrors = 20; //maximum error count to report
public int TabWidth = 8;
public CultureInfo Culture; //defaults to Grammar.DefaultCulture, might be changed by app code
#region properties and fields
//Parser fields
public ParserState CurrentParserState { get; internal set; }
public ParseTreeNode CurrentParserInput { get; internal set; }
internal readonly ParserStack ParserStack = new ParserStack();
internal readonly ParserStack ParserInputStack = new ParserStack();
public ParseTree CurrentParseTree { get; internal set; }
public readonly TokenStack OpenBraces = new TokenStack();
public ParserTrace ParserTrace = new ParserTrace();
public ISourceStream Source { get { return SourceStream; } }
//list for terminals - for current parser state and current input char
public TerminalList CurrentTerminals = new TerminalList();
public Token CurrentToken; //The token just scanned by Scanner
public Token PreviousToken;
public SourceLocation PreviousLineStart; //Location of last line start
//Internal fields
internal SourceStream SourceStream;
internal TokenFilterList TokenFilters = new TokenFilterList();
internal TokenStack BufferedTokens = new TokenStack();
internal IEnumerator<Token> FilteredTokens; //stream of tokens after filter
internal TokenStack PreviewTokens = new TokenStack();
internal ParsingEventArgs SharedParsingEventArgs;
public VsScannerStateMap VsLineScanState; //State variable used in line scanning mode for VS integration
public ParserStatus Status {get; internal set;}
public bool HasErrors; //error flag, once set remains set
//values dictionary to use by custom language implementations to save some temporary values in parse process
public readonly Dictionary<string, object> Values = new Dictionary<string, object>();
#endregion
#region constructors
public ParsingContext(Parser parser) {
this.Parser = parser;
Language = Parser.Language;
Culture = Language.Grammar.DefaultCulture;
//This might be a problem for multi-threading - if we have several contexts on parallel threads with different culture.
//Resources.Culture is static property (this is not Irony's fault, this is auto-generated file).
Resources.Culture = Culture;
//We assume that if Irony is compiled in Debug mode, then developer is debugging his grammar/language implementation
#if DEBUG
Options |= ParseOptions.GrammarDebugging;
#endif
SharedParsingEventArgs = new ParsingEventArgs(this);
}
#endregion
#region Events: TokenCreated
public event EventHandler<ParsingEventArgs> TokenCreated;
internal void OnTokenCreated() {
if (TokenCreated != null)
TokenCreated(this, SharedParsingEventArgs);
}
#endregion
#region Options helper methods
public bool OptionIsSet(ParseOptions option) {
return (Options & option) != 0;
}
public void SetOption(ParseOptions option, bool value) {
if (value)
Options |= option;
else
Options &= ~option;
}
#endregion
#region Error handling and tracing
public void AddParserError(string message, params object[] args) {
var location = CurrentParserInput == null? Source.Location : CurrentParserInput.Span.Location;
HasErrors = true;
AddParserMessage(ParserErrorLevel.Error, location, message, args);
}
public void AddParserMessage(ParserErrorLevel level, SourceLocation location, string message, params object[] args) {
if (CurrentParseTree == null) return;
if (CurrentParseTree.ParserMessages.Count >= MaxErrors) return;
if (args != null && args.Length > 0)
message = string.Format(message, args);
CurrentParseTree.ParserMessages.Add(new ParserMessage(level, location, message, CurrentParserState));
if (OptionIsSet(ParseOptions.TraceParser))
ParserTrace.Add( new ParserTraceEntry(CurrentParserState, ParserStack.Top, CurrentParserInput, message, true));
}
public void AddTrace(string message, params object[] args) {
if (!OptionIsSet(ParseOptions.TraceParser)) return;
if (args != null && args.Length > 0)
message = string.Format(message, args);
ParserTrace.Add(new ParserTraceEntry(CurrentParserState, ParserStack.Top, CurrentParserInput, message, false));
}
internal string FormatUnexpectedInputErrorMessage() {
string msg;
var expectedSet = GetExpectedTermSet();
msg = Language.Grammar.ConstructParserErrorMessage(this, expectedSet);
if (string.IsNullOrEmpty(msg))
msg = Resources.ErrSyntaxErrorNoInfo;
return msg;
}
#endregion
internal void Reset() {
CurrentParserState = Parser.InitialState;
CurrentParserInput = null;
ParserStack.Clear();
HasErrors = false;
ParserStack.Push(new ParseTreeNode(CurrentParserState));
ParserInputStack.Clear();
CurrentParseTree = null;
OpenBraces.Clear();
ParserTrace.Clear();
CurrentTerminals.Clear();
CurrentToken = null;
PreviousToken = null;
PreviousLineStart = new SourceLocation(0, -1, 0);
BufferedTokens.Clear();
PreviewTokens.Clear();
Values.Clear();
foreach (var filter in TokenFilters)
filter.Reset();
}
public void SetSourceLocation(SourceLocation location) {
foreach (var filter in TokenFilters)
filter.OnSetSourceLocation(location);
SourceStream.Location = location;
}
#region Expected term set computations
public StringSet GetExpectedTermSet() {
if (CurrentParserState == null)
return new StringSet();
//See note about multi-threading issues in ComputeReportedExpectedSet comments.
if (CurrentParserState.ReportedExpectedSet == null)
CurrentParserState.ReportedExpectedSet = CoreParser.ComputeGroupedExpectedSetForState(Language.Grammar, CurrentParserState);
//Filter out closing braces which are not expected based on previous input.
// While the closing parenthesis ")" might be expected term in a state in general,
// if there was no opening parenthesis in preceding input then we would not
// expect a closing one.
var expectedSet = FilterBracesInExpectedSet(CurrentParserState.ReportedExpectedSet);
return expectedSet;
}
private StringSet FilterBracesInExpectedSet(StringSet stateExpectedSet) {
var result = new StringSet();
result.UnionWith(stateExpectedSet);
//Find what brace we expect
var nextClosingBrace = string.Empty;
if (OpenBraces.Count > 0) {
var lastOpenBraceTerm = OpenBraces.Peek().KeyTerm;
var nextClosingBraceTerm = lastOpenBraceTerm.IsPairFor as KeyTerm;
if (nextClosingBraceTerm != null)
nextClosingBrace = nextClosingBraceTerm.Text;
}
//Now check all closing braces in result set, and leave only nextClosingBrace
foreach(var closingBrace in Language.GrammarData.ClosingBraces) {
if (result.Contains(closingBrace) && closingBrace != nextClosingBrace)
result.Remove(closingBrace);
}
return result;
}
#endregion
}//class
// A struct used for packing/unpacking ScannerState int value; used for VS integration.
// When Terminal produces incomplete token, it sets
// this state to non-zero value; this value identifies this terminal as the one who will continue scanning when
// it resumes, and the terminal's internal state when there may be several types of multi-line tokens for one terminal.
// For ex., there maybe several types of string literal like in Python.
[StructLayout(LayoutKind.Explicit)]
public struct VsScannerStateMap {
[FieldOffset(0)]
public int Value;
[FieldOffset(0)]
public byte TerminalIndex; //1-based index of active multiline term in MultilineTerminals
[FieldOffset(1)]
public byte TokenSubType; //terminal subtype (used in StringLiteral to identify string kind)
[FieldOffset(2)]
public short TerminalFlags; //Terminal flags
}//struct
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.Xml.Serialization;
using Rynchodon.AntennaRelay;
using Rynchodon.Autopilot.Data;
using Rynchodon.Autopilot.Instruction;
using Rynchodon.Autopilot.Movement;
using Rynchodon.Autopilot.Navigator;
using Rynchodon.Autopilot.Pathfinding;
using Rynchodon.Settings;
using Rynchodon.Threading;
using Rynchodon.Utility;
using Sandbox.Common.ObjectBuilders;
using Sandbox.Game.Entities;
using Sandbox.ModAPI;
using VRage.Game.Components;
using VRage.Game.ModAPI;
using VRageMath;
namespace Rynchodon.Autopilot
{
/// <summary>
/// Contains components of the autopilot block.
/// </summary>
public class ShipControllerBlock
{
public readonly IMyCubeBlock CubeBlock;
public readonly PseudoBlock Pseudo;
public readonly RelayNode NetworkNode;
public readonly AutopilotTerminal AutopilotTerminal;
public MyShipController Controller { get { return (MyShipController)CubeBlock; } }
public IMyTerminalBlock Terminal { get { return (IMyTerminalBlock)CubeBlock; } }
public RelayStorage NetworkStorage { get { return NetworkNode.Storage; } }
public IMyCubeGrid CubeGrid { get { return Controller.CubeGrid; } }
public MyPhysicsComponentBase Physics { get { return Controller.CubeGrid.Physics; } }
public bool AutopilotControl
{
get { return AutopilotTerminal.AutopilotControlSwitch; }
set { AutopilotTerminal.AutopilotControlSwitch = value; }
}
public ShipControllerBlock(IMyCubeBlock block, Action<Message> messageHandler)
{
CubeBlock = block;
Pseudo = new PseudoBlock(block);
NetworkNode = new RelayNode(block) { MessageHandler = messageHandler };
AutopilotTerminal = new AutopilotTerminal(block);
}
}
/// <summary>
/// Core class for all Autopilot functionality.
/// </summary>
public class ShipAutopilot
{
[Serializable]
public class Builder_Autopilot
{
[XmlAttribute]
public long AutopilotBlock;
public string Commands;
public int CurrentCommand;
public Vector3D EngagerOriginalPosition = Vector3D.PositiveInfinity;
public long EngagerOriginalEntity;
}
public const uint UpdateFrequency = 3u;
private const string subtype_autopilotBlock = "Autopilot-Block";
public static ThreadManager AutopilotThread = new ThreadManager(threadName: "Autopilot");
private static HashSet<IMyCubeGrid> GridBeingControlled = new HashSet<IMyCubeGrid>();
/// <summary>
/// Determines if the given block is an autopilot block. Does not check ServerSettings.
/// </summary>
/// <param name="block">The block to check</param>
/// <returns>True iff the given block is an autopilot block.</returns>
public static bool IsAutopilotBlock(VRage.Game.ModAPI.Ingame.IMyCubeBlock block)
{
if (block is MyCockpit)
return block.BlockDefinition.SubtypeId.Contains(subtype_autopilotBlock);
return block is MyRemoteControl;
}
/// <summary>
/// Determines if the given grid has an autopilot block. Does check ServerSettings.
/// </summary>
/// <param name="grid">The grid to search</param>
/// <returns>True iff the given grid contains one or more autopilot blocks.</returns>
public static bool HasAutopilotBlock(IMyCubeGrid grid)
{
if (!ServerSettings.GetSetting<bool>(ServerSettings.SettingName.bAllowAutopilot))
return false;
var cache = CubeGridCache.GetFor(grid);
foreach (IMyCubeBlock cockpit in cache.BlocksOfType(typeof(MyObjectBuilder_Cockpit)))
if (IsAutopilotBlock(cockpit))
return true;
if (ServerSettings.GetSetting<bool>(ServerSettings.SettingName.bUseRemoteControl))
{
foreach (IMyCubeBlock remote in cache.BlocksOfType(typeof(MyObjectBuilder_RemoteControl)))
if (IsAutopilotBlock(remote))
return true;
}
return false;
}
public enum State : byte { Disabled, Player, Enabled, Halted, Closed }
public readonly ShipControllerBlock m_block;
private readonly FastResourceLock lock_execution = new FastResourceLock();
private Pathfinder m_pathfinder;
private AutopilotCommands m_commands;
private IMyCubeGrid m_controlledGrid;
private State value_state = State.Disabled;
private TimeSpan m_previousInstructions = TimeSpan.MinValue;
private TimeSpan m_endOfHalt;
private ulong m_nextCustomInfo;
private AutopilotActionList m_autopilotActions;
private Logable Log { get { return new Logable(m_block?.CubeBlock); } }
private Mover m_mover { get { return m_pathfinder.Mover; } }
private AllNavigationSettings m_navSet { get { return m_mover.NavSet; } }
private State m_state
{
get { return value_state; }
set
{
if (value_state == value || value_state == State.Closed)
return;
Log.DebugLog("state change from " + value_state + " to " + value, Logger.severity.DEBUG);
value_state = value;
m_pathfinder.Halt();
switch (value_state)
{
case State.Enabled:
case State.Player:
m_mover.MoveAndRotateStop(false);
return;
case State.Disabled:
m_navSet.OnStartOfCommands(); // here so that navigators are disposed of
m_autopilotActions = null;
m_mover.MoveAndRotateStop(false);
return;
case State.Halted:
m_endOfHalt = Globals.ElapsedTime.Add(new TimeSpan(0, 5, 0));
m_mover.MoveAndRotateStop(true);
return;
case State.Closed:
ReleaseControlledGrid();
m_pathfinder = null;
m_commands = null;
return;
default:
Log.AlwaysLog("State not implemented: " + value, Logger.severity.FATAL);
return;
}
}
}
/// <summary>
/// Creates an Autopilot for the given ship controller.
/// </summary>
/// <param name="block">The ship controller to use</param>
public ShipAutopilot(IMyCubeBlock block)
{
this.m_block = new ShipControllerBlock(block, HandleMessage);
this.m_pathfinder = new Pathfinder(m_block);
this.m_commands = AutopilotCommands.GetOrCreate(m_block.Terminal);
this.m_block.CubeBlock.OnClosing += CubeBlock_OnClosing;
int start = block.DisplayNameText.IndexOf('[') + 1, end = block.DisplayNameText.IndexOf(']');
if (start > 0 && end > start)
{
m_block.AutopilotTerminal.AutopilotCommandsText = new StringBuilder(block.DisplayNameText.Substring(start, end - start).Trim());
int lengthBefore = start - 1;
string nameBefore = lengthBefore > 0 ? m_block.Terminal.DisplayNameText.Substring(0, lengthBefore) : string.Empty;
end++;
int lengthAfter = m_block.Terminal.DisplayNameText.Length - end;
string nameAfter = lengthAfter > 0 ? m_block.Terminal.DisplayNameText.Substring(end, lengthAfter) : string.Empty;
m_block.Terminal.CustomName = (nameBefore + nameAfter).Trim();
}
Log.DebugLog("Created autopilot for: " + block.DisplayNameText);
Registrar.Add(block, this);
}
private void CubeBlock_OnClosing(VRage.ModAPI.IMyEntity obj)
{
m_block.CubeBlock.OnClosing -= CubeBlock_OnClosing;
m_state = State.Closed;
}
public void Update()
{
AutopilotThread.EnqueueAction(UpdateThread);
}
/// <summary>
/// Run the autopilot
/// </summary>
private void UpdateThread()
{
if (!lock_execution.TryAcquireExclusive())
return;
try
{
if (Globals.UpdateCount > m_nextCustomInfo)
{
m_nextCustomInfo = Globals.UpdateCount + 10ul;
UpdateCustomInfo();
}
switch (m_state)
{
case State.Disabled:
if (CheckControl())
m_state = State.Enabled;
return;
case State.Enabled:
if (CheckControl())
break;
m_state = State.Disabled;
return;
case State.Player:
// wait for player to give back control, do not reset
if (MyAPIGateway.Players.GetPlayerControllingEntity(m_controlledGrid) == null)
m_state = State.Enabled;
return;
case State.Halted:
if (!m_block.AutopilotControl || Globals.ElapsedTime > m_endOfHalt)
m_state = State.Disabled;
return;
case State.Closed:
return;
default:
throw new Exception("Case not implemented: " + m_state);
}
if (MyAPIGateway.Players.GetPlayerControllingEntity(m_controlledGrid) != null)
{
m_state = State.Player;
return;
}
EnemyFinder ef = m_navSet.Settings_Current.EnemyFinder;
if (ef != null)
ef.Update();
if (m_navSet.Settings_Current.WaitUntil > Globals.ElapsedTime)
return;
if (MoveAndRotate())
return;
if (m_autopilotActions != null)
while (true)
{
if (!m_autopilotActions.MoveNext())
{
Log.DebugLog("finder: " + m_navSet.Settings_Current.EnemyFinder);
m_autopilotActions = null;
return;
}
m_autopilotActions.Current.Invoke(m_pathfinder);
if (m_navSet.Settings_Current.WaitUntil > Globals.ElapsedTime)
{
Log.DebugLog("now waiting until " + m_navSet.Settings_Current.WaitUntil);
return;
}
if (m_navSet.Settings_Current.NavigatorMover != null)
{
Log.DebugLog("now have a navigator mover: " + m_navSet.Settings_Current.NavigatorMover);
return;
}
}
if (RotateOnly())
return;
TimeSpan nextInstructions = m_previousInstructions + TimeSpan.FromSeconds(m_navSet.Settings_Current.Complaint != InfoString.StringId.None || ef != null ? 60d : 1d);
if (nextInstructions > Globals.ElapsedTime)
{
Log.DebugLog("Delaying instructions until " + nextInstructions, Logger.severity.INFO);
m_navSet.Settings_Task_NavWay.WaitUntil = nextInstructions;
return;
}
Log.DebugLog("enqueing instructions", Logger.severity.DEBUG);
m_previousInstructions = Globals.ElapsedTime;
m_autopilotActions = m_commands.GetActions();
if (m_autopilotActions == null || m_autopilotActions.IsEmpty)
ReleaseControlledGrid();
m_navSet.OnStartOfCommands();
m_mover.MoveAndRotateStop(false);
if (m_commands.HasSyntaxErrors)
m_navSet.Settings_Task_NavWay.WaitUntil = Globals.ElapsedTime + TimeSpan.FromMinutes(1d);
}
catch (Exception ex)
{
Log.AlwaysLog("Commands: " + m_commands.Commands, Logger.severity.DEBUG);
Log.AlwaysLog("Exception: " + ex, Logger.severity.ERROR);
m_state = State.Halted;
}
finally
{ lock_execution.ReleaseExclusive(); }
}
private bool MoveAndRotate()
{
INavigatorMover navM = m_navSet.Settings_Current.NavigatorMover;
if (navM != null)
{
Profiler.Profile(navM.Move);
INavigatorRotator navR = m_navSet.Settings_Current.NavigatorRotator; // fetched here because mover might remove it
if (navR != null)
Profiler.Profile(navR.Rotate);
else
{
navR = m_navSet.Settings_Current.NavigatorMover as INavigatorRotator; // fetch again in case it was removed
if (navR != null)
Profiler.Profile(navR.Rotate);
}
Profiler.Profile(m_mover.MoveAndRotate);
return true;
}
return false;
}
/// <summary>
/// run the rotator by itself until direction is matched
/// </summary>
private bool RotateOnly()
{
INavigatorRotator navR = m_navSet.Settings_Current.NavigatorRotator;
if (navR != null)
{
// direction might have been matched by another rotator, so run it first
Profiler.Profile(navR.Rotate);
Profiler.Profile(m_mover.MoveAndRotate);
if (m_navSet.DirectionMatched())
{
m_mover.StopRotate();
m_mover.MoveAndRotate();
}
else
return true;
}
return false;
}
#region Control
/// <summary>
/// Checks if the Autopilot has permission to run.
/// </summary>
/// <returns>True iff the Autopilot has permission to run.</returns>
private bool CheckControl()
{
// cache current grid in case it changes
IMyCubeGrid myGrid = m_block.CubeGrid;
if (m_controlledGrid != null)
{
if (m_controlledGrid != myGrid)
{
// a (de)merge happened
ReleaseControlledGrid();
}
else if (CanControlBlockGrid(m_controlledGrid))
{
// OK to continue controlling
return true;
}
else
{
// cannot continue to control
ReleaseControlledGrid();
return false;
}
}
if (!CanControlBlockGrid(myGrid) || !GridBeingControlled.Add(myGrid))
return false;
m_controlledGrid = myGrid;
// toggle thrusters off and on to make sure thrusters are actually online
MyAPIGateway.Utilities.InvokeOnGameThread(() => {
if (this.m_block.Controller.ControlThrusters)
this.m_block.CubeBlock.ApplyAction("ControlThrusters");
this.m_block.CubeBlock.ApplyAction("ControlThrusters");
});
return true;
}
/// <summary>
/// Checks if block and grid can be controlled.
/// </summary>
/// <returns>True iff block and grid can be controlled.</returns>
private bool CanControlBlockGrid(IMyCubeGrid grid)
{
// is grid ready
if (grid.IsStatic)
return false;
// is block ready
if (!m_block.Controller.IsWorking
|| !m_block.AutopilotControl)
return false;
MyCubeGrid mcg = grid as MyCubeGrid;
if (mcg.HasMainCockpit() && !m_block.Controller.IsMainCockpit)
return false;
return true;
}
/// <summary>
/// Release the grid so another Autopilot can control it.
/// </summary>
private void ReleaseControlledGrid()
{
if (m_controlledGrid == null)
return;
if (!GridBeingControlled.Remove(m_controlledGrid))
{
Log.AlwaysLog("Failed to remove " + m_controlledGrid.DisplayName + " from GridBeingControlled", Logger.severity.FATAL);
throw new InvalidOperationException("Failed to remove " + m_controlledGrid.DisplayName + " from GridBeingControlled");
}
//Log.DebugLog("Released control of " + ControlledGrid.DisplayName, "ReleaseControlledGrid()", Logger.severity.DEBUG);
m_controlledGrid = null;
}
#endregion
#region Custom Info
private void UpdateCustomInfo()
{
AutopilotTerminal ApTerm = m_block.AutopilotTerminal;
AllNavigationSettings.SettingsLevel Settings_Current = m_navSet.Settings_Current;
ApTerm.m_autopilotStatus = m_state;
if (m_state == State.Halted)
return;
AutopilotTerminal.AutopilotFlags flags = AutopilotTerminal.AutopilotFlags.None;
if (m_controlledGrid != null)
flags |= AutopilotTerminal.AutopilotFlags.HasControl;
if (m_pathfinder.ReportedObstruction != null)
{
ApTerm.m_blockedBy = m_pathfinder.ReportedObstruction.EntityId;
if (m_pathfinder.RotateCheck.ObstructingEntity != null)
flags |= AutopilotTerminal.AutopilotFlags.RotationBlocked;
}
else if (m_pathfinder.RotateCheck.ObstructingEntity != null)
{
flags |= AutopilotTerminal.AutopilotFlags.RotationBlocked;
ApTerm.m_blockedBy = m_pathfinder.RotateCheck.ObstructingEntity.EntityId;
}
EnemyFinder ef = Settings_Current.EnemyFinder;
if (ef != null && ef.Grid == null)
{
flags |= AutopilotTerminal.AutopilotFlags.EnemyFinderIssue;
ApTerm.m_reasonCannotTarget = ef.m_reason;
if (ef.m_bestGrid != null)
ApTerm.m_enemyFinderBestTarget = ef.m_bestGrid.Entity.EntityId;
}
INavigatorMover navM = Settings_Current.NavigatorMover;
if (navM != null)
{
flags |= AutopilotTerminal.AutopilotFlags.HasNavigatorMover;
ApTerm.m_prevNavMover = navM.GetType().Name;
AutopilotTerminal.Static.prevNavMoverInfo.Update((IMyTerminalBlock)m_block.CubeBlock, navM.AppendCustomInfo);
}
INavigatorRotator navR = Settings_Current.NavigatorRotator;
if (navR != null && navR != navM)
{
flags |= AutopilotTerminal.AutopilotFlags.HasNavigatorRotator;
ApTerm.m_prevNavRotator = navR.GetType().Name;
AutopilotTerminal.Static.prevNavRotatorInfo.Update((IMyTerminalBlock)m_block.CubeBlock, navR.AppendCustomInfo);
}
ApTerm.m_autopilotFlags = flags;
ApTerm.m_pathfinderState = m_pathfinder.CurrentState;
ApTerm.SetWaitUntil(Settings_Current.WaitUntil);
ApTerm.SetDistance(Settings_Current.Distance, Settings_Current.DistanceAngle);
ApTerm.m_welderUnfinishedBlocks = m_navSet.WelderUnfinishedBlocks;
ApTerm.m_complaint = Settings_Current.Complaint;
ApTerm.m_jumpComplaint = m_pathfinder.JumpComplaint;
}
#endregion Custom Info
private void HandleMessage(Message msg)
{
using (lock_execution.AcquireExclusiveUsing())
{
m_autopilotActions = m_commands.GetActions(msg.Content);
m_navSet.OnStartOfCommands();
m_mover.MoveAndRotateStop(false);
m_mover.SetControl(true);
}
}
public Builder_Autopilot GetBuilder()
{
if (!m_block.AutopilotControl)
return null;
Builder_Autopilot result = new Builder_Autopilot() { AutopilotBlock = m_block.CubeBlock.EntityId };
if (m_autopilotActions == null || m_autopilotActions.CurrentIndex <= 0 || m_autopilotActions.Current == null)
return null;
result.CurrentCommand = m_autopilotActions.CurrentIndex;
Log.DebugLog("current command: " + result.CurrentCommand);
result.Commands = m_commands.Commands;
Log.DebugLog("commands: " + result.Commands);
EnemyFinder finder = m_navSet.Settings_Current.EnemyFinder;
if (finder != null)
{
result.EngagerOriginalEntity = finder.m_originalDestEntity == null ? 0L : finder.m_originalDestEntity.EntityId;
result.EngagerOriginalPosition = finder.m_originalPosition;
Log.DebugLog("added EngagerOriginalEntity: " + result.EngagerOriginalEntity + ", and EngagerOriginalPosition: " + result.EngagerOriginalPosition);
}
return result;
}
public void ResumeFromSave(Builder_Autopilot builder)
{
using (lock_execution.AcquireExclusiveUsing())
{
m_navSet.OnStartOfCommands();
Log.DebugLog("resume: " + builder.Commands + ", current: " + builder.CurrentCommand, Logger.severity.DEBUG);
m_autopilotActions = m_commands.GetActions(builder.Commands);
while (m_autopilotActions.CurrentIndex < builder.CurrentCommand - 1 && m_autopilotActions.MoveNext())
{
m_autopilotActions.Current.Invoke(m_pathfinder);
Log.DebugLog("fast forward: " + m_autopilotActions.CurrentIndex);
// clear navigators' levels
for (AllNavigationSettings.SettingsLevelName levelName = AllNavigationSettings.SettingsLevelName.NavRot; levelName < AllNavigationSettings.SettingsLevelName.NavWay; levelName++)
{
AllNavigationSettings.SettingsLevel settingsAtLevel = m_navSet.GetSettingsLevel(levelName);
if (settingsAtLevel.NavigatorMover != null || settingsAtLevel.NavigatorRotator != null)
{
Log.DebugLog("clear " + levelName);
m_navSet.OnTaskComplete(levelName);
break;
}
}
}
if (m_autopilotActions.MoveNext())
m_autopilotActions.Current.Invoke(m_pathfinder);
// clear wait
m_navSet.OnTaskComplete(AllNavigationSettings.SettingsLevelName.NavWay);
EnemyFinder finder = m_navSet.Settings_Current.EnemyFinder;
if (finder != null)
{
if (builder.EngagerOriginalEntity != 0L)
{
if (!MyAPIGateway.Entities.TryGetEntityById(builder.EngagerOriginalEntity, out finder.m_originalDestEntity))
{
Log.AlwaysLog("Failed to restore original destination enitity for enemy finder: " + builder.EngagerOriginalEntity, Logger.severity.WARNING);
finder.m_originalDestEntity = null;
}
else
Log.DebugLog("Restored original destination enitity for enemy finder: " + finder.m_originalDestEntity.getBestName());
}
if (builder.EngagerOriginalPosition.IsValid())
{
finder.m_originalPosition = builder.EngagerOriginalPosition;
Log.DebugLog("Restored original position for enemy finder: " + builder.EngagerOriginalPosition);
}
}
}
}
}
}
| |
/*
* SubSonic - http://subsonicproject.com
*
* The contents of this file are subject to the Mozilla Public
* License Version 1.1 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an
* "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*/
using System;
using System.Collections;
using System.Data;
using System.Globalization;
using System.Web.UI.WebControls;
using MbUnit.Framework;
using NorthwindAccess;
namespace SubSonic.Tests.MsAccess
{
/// <summary>
/// Summary for the QueryTest class
/// </summary>
[TestFixture]
public class QueryTest
{
/// <summary>
/// Query_s the select.
/// </summary>
[Test]
public void Acc_Query_Select()
{
Query qry = new Query(Product.Schema);
qry.AddWhere("productID", 1);
int pk = (int)qry.ExecuteScalar();
Assert.IsTrue(pk == 1, "Bad Select");
}
/// <summary>
/// Query_s the select top.
/// </summary>
[Test]
public void Acc_Query_SelectTop()
{
Where LikeUnitTestTerritory = new Where();
LikeUnitTestTerritory.TableName = Territory.Schema.TableName;
LikeUnitTestTerritory.ColumnName = Territory.Columns.TerritoryDescription;
LikeUnitTestTerritory.Comparison = Comparison.Like;
LikeUnitTestTerritory.ParameterValue = "%ville%";
Query qry = new Query(Territory.Schema);
qry.Top = "3";
qry.AddWhere(LikeUnitTestTerritory);
int counter = 0;
using(IDataReader rdr = qry.ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
Assert.AreEqual(3, counter, "Count is " + counter);
}
/// <summary>
/// Query_s the updates.
/// </summary>
[Test]
public void Acc_Query_Updates()
{
Query qry = new Query(Product.Schema);
qry.AddUpdateSetting("Discontinued", true);
qry.AddWhere(Product.Columns.ProductName, "Unit Test Product 3");
qry.Execute();
//verify
qry = new Query(Product.Schema);
qry.AddWhere(Product.Columns.ProductName, "Unit Test Product 3");
ProductCollection coll = new ProductCollection();
using(IDataReader rdr = qry.ExecuteReader())
{
coll.Load(rdr);
rdr.Close();
}
foreach(Product prod in coll)
Assert.IsTrue(prod.Discontinued);
}
/// <summary>
/// Query_s the between and.
/// </summary>
[Test]
public void Acc_Query_BetweenAnd()
{
int counter = 0;
using(
IDataReader rdr =
new Query(DataService.GetTableSchema("Orders", "NorthwindAccess")).AddBetweenAnd("OrderDate", new DateTime(1996, 7, 4), new DateTime(1996, 7, 16)).
ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
Assert.IsTrue(counter == 10, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the view.
/// </summary>
[Test]
public void Acc_Query_View()
{
int counter = 0;
using(
IDataReader rdr = new Query(DataService.GetTableSchema("Invoices", "NorthwindAccess")).AddWhere("ShipPostalCode", "51100").ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
Assert.IsTrue(counter == 10, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the between numbers.
/// </summary>
[Test]
public void Acc_Query_BetweenNumbers()
{
int counter = 0;
using(IDataReader rdr = new Query(Product.Schema).AddBetweenValues("productID", 1, 7).ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
Assert.IsTrue(counter == 7, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the I n_ object array.
/// </summary>
[Test]
public void Acc_Query_IN_ObjectArray()
{
int counter = 0;
using (IDataReader rdr = new Query("products", "NorthwindAccess").IN("ProductID", new object[] { 1, 2, 3, 4, 5 }).ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
Assert.IsTrue(counter == 5, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the I n_ list collection.
/// </summary>
[Test]
public void Acc_Query_IN_ListCollection()
{
ListItemCollection coll = new ListItemCollection();
for(int i = 1; i <= 5; i++)
{
ListItem item = new ListItem(i.ToString(), i.ToString());
item.Selected = true;
coll.Add(item);
}
int counter = 0;
using (IDataReader rdr = new Query("products", "NorthwindAccess").IN("ProductID", coll).ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
Assert.IsTrue(counter == 5, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the I n_ array list.
/// </summary>
[Test]
public void Acc_Query_IN_ArrayList()
{
ArrayList list = new ArrayList();
for(int i = 1; i <= 5; i++)
list.Add(i);
int counter = 0;
using (IDataReader rdr = new Query("products", "NorthwindAccess").IN("ProductID", list).ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
Assert.IsTrue(counter == 5, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the Not In_ object array.
/// </summary>
[Test]
public void Acc_Query_NOT_IN_ObjectArray()
{
int counter = 0;
int productCount = new Query(Product.Schema).GetRecordCount();
using (IDataReader rdr = new Query("products", "NorthwindAccess").NOT_IN("ProductID", new object[] { 1, 2, 3, 4, 5 }).ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
Assert.IsTrue(counter == productCount-5, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the NOT In_ list collection.
/// </summary>
[Test]
public void Acc_Query_NOT_IN_ListCollection()
{
int productCount = new Query(Product.Schema).GetRecordCount();
ListItemCollection coll = new ListItemCollection();
for(int i = 1; i <= 5; i++)
{
ListItem item = new ListItem(i.ToString(), i.ToString());
item.Selected = true;
coll.Add(item);
}
int counter = 0;
using (IDataReader rdr = new Query("products", "NorthwindAccess").NOT_IN("ProductID", coll).ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
Assert.IsTrue(counter == productCount - 5, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the Not In_ array list.
/// </summary>
[Test]
public void Acc_Query_NOT_IN_ArrayList()
{
int productCount = new Query(Product.Schema).GetRecordCount();
ArrayList list = new ArrayList();
for(int i = 1; i <= 5; i++)
list.Add(i);
int counter = 0;
using (IDataReader rdr = new Query("products", "NorthwindAccess").NOT_IN("ProductID", list).ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
Assert.IsTrue(counter == productCount - 5, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the is not null.
/// </summary>
[Test]
public void Acc_Query_IsNotNull()
{
int counter = 0;
using(IDataReader rdr = new Query("Products", "NorthwindAccess").AddWhere("ProductID", Comparison.IsNot, null).ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
//should bring back all records
Assert.AreEqual(new Query(Product.Schema).GetCount(Product.Columns.ProductID), counter, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the O r_ simple.
/// </summary>
[Test]
public void Acc_Query_OR_Simple()
{
int counter = 0;
using (IDataReader rdr = new Query("Categories", "NorthwindAccess").WHERE("CategoryID", 5).OR("CategoryID", 1).ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
//should bring back all records
Assert.IsTrue(counter == 2, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the O r_ moderate.
/// </summary>
[Test]
public void Acc_Query_OR_Moderate()
{
int counter = 0;
Query q = new Query("Products", "NorthwindAccess").WHERE("CategoryID", 5).AND("UnitPrice", Comparison.GreaterThan, 50)
.OR("CategoryID", 1).AND("UnitPrice", Comparison.GreaterThan, 50);
using(IDataReader rdr = q.ExecuteReader()) {
while(rdr.Read())
counter++;
}
Assert.AreEqual(1, counter, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the O r_ moderate with expressions.
/// </summary>
[Test]
public void Acc_Query_OR_ModerateWithExpressions()
{
int counter = 0;
using(
IDataReader rdr =
new Query("Products", "NorthwindAccess").WHERE("CategoryID = 5").AND("UnitPrice > 50").OR("CategoryID = 1").AND(
"UnitPrice > 50").ExecuteReader())
{
while(rdr.Read())
counter++;
}
Assert.AreEqual(1, counter, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the O r_ between.
/// </summary>
[Test]
public void Acc_Query_OR_Between()
{
int counter = 0;
string[] sFormat = {"M/d/yyyy"};
using(IDataReader rdr =
new Query("Orders", "NorthwindAccess").BETWEEN_AND("OrderDate",
DateTime.ParseExact("7/4/1996", sFormat, CultureInfo.CurrentCulture, DateTimeStyles.None),
DateTime.ParseExact("7/10/1996", sFormat, CultureInfo.CurrentCulture, DateTimeStyles.None)).OR_BETWEEN_AND("OrderDate",
DateTime.ParseExact("7/14/1996", sFormat, CultureInfo.CurrentCulture, DateTimeStyles.None),
DateTime.ParseExact("7/20/1996", sFormat, CultureInfo.CurrentCulture, DateTimeStyles.None)).ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
//should bring back all records
Assert.IsTrue(counter == 12, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the expression.
/// </summary>
[Test]
public void Acc_Query_Expression()
{
int counter = 0;
using(IDataReader rdr = new Query("Products", "NorthwindAccess").WHERE("ProductID < 5").ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
//should bring back all records
Assert.IsTrue(counter == 4, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the paging test_ table.
/// </summary>
[Test]
public void Acc_Query_PagingTest_Table()
{
Query q = new Query("Products", "NorthwindAccess");
q.PageSize = 10;
q.PageIndex = 1;
int counter = 0;
using(IDataReader rdr = q.ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
//should bring back all records
Assert.IsTrue(counter == 10, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the paging test_ view.
/// </summary>
[Test]
public void Acc_Query_PagingTest_View()
{
Query q = new Query("Sales By Category", "NorthwindAccess");
q.PageSize = 10;
q.PageIndex = 1;
q.ORDER_BY("CategoryID", "ASC");
int counter = 0;
using(IDataReader rdr = q.ExecuteReader())
{
while(rdr.Read())
counter++;
rdr.Close();
}
//should bring back all records
Assert.IsTrue(counter == 10, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the paging test_ view.
/// </summary>
[Test]
public void Acc_Query_JoinedDataSet()
{
Query q = new Query("Products", "NorthwindAccess");
DataSet ds = q.ExecuteJoinedDataSet();
//should bring back all records
Assert.IsTrue(ds.Tables[0].Columns["SupplierID"].DataType == typeof(string));
}
[Test]
public void Acc_ExecuteJoinedDataSet_Should_Accept_Parameters() {
int someProduct = 1;
SubSonic.SqlQuery sq = new Select()
.From(Product.Schema)
.Where(Product.ProductIDColumn).IsEqualTo(someProduct);
DataSet ds = sq.ExecuteJoinedDataSet();
Assert.AreEqual(1, ds.Tables[0].Rows.Count);
}
/// <summary>
/// Query_s the constraint expression.
/// </summary>
[Test]
public void Acc_Query_ConstraintExpression()
{
int counter = 0;
using(IDataReader rdr = new Query(Product.Schema.TableName, "NorthwindAccess").WHERE(Product.Columns.ProductID, Is.LessThan(5)).ExecuteReader())
{
while(rdr.Read())
counter++;
}
Assert.AreEqual(4, counter, "Nope - it's " + counter);
}
/// <summary>
/// Test Order by FK on Joined Tables.
/// </summary>
[Test]
public void Acc_Query_JoinedDataSet_OrderByFK()
{
Query q = new Query("Products", "NorthwindAccess");
TableSchema.Table ts = DataService.GetTableSchema("Products", "NorthwindAccess");
//q.OrderBy = OrderBy.Desc(ts.GetColumn("CategoryID"));
q.ORDER_BY(ts.GetColumn("CategoryID"), "DESC");
q.ORDER_BY(ts.GetColumn("ProductID"), "ASC");
DataSet ds = q.ExecuteJoinedDataSet();
//should bring 10 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(10));
}
/// <summary>
/// Query_s the between and over ExecuteJoinedDataSet
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_AddBetweenAnd()
{
DataSet ds = new Query(DataService.GetTableSchema("Orders", "NorthwindAccess"))
.AddBetweenAnd("OrderDate", new DateTime(1996, 7, 4), new DateTime(1996, 7, 16)).
ExecuteJoinedDataSet();
int counter = ds.Tables[0].Rows.Count;
Assert.IsTrue(counter == 10, "Nope - it's " + counter);
}
/// <summary>
/// Query_s the between and over ExecuteJoinedDataSet with more than one Where Condition
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_AddBetweenAnd_WithExtraWhereCondition()
{
DataSet ds = new Query(DataService.GetTableSchema("Orders", "NorthwindAccess")).
AddWhere("OrderID", Comparison.GreaterThan, 0).
AddBetweenAnd("OrderDate", new DateTime(1996, 7, 4), new DateTime(1996, 7, 16)).
ExecuteJoinedDataSet();
int counter = ds.Tables[0].Rows.Count;
Assert.IsTrue(counter == 10, "Nope - it's " + counter);
}
/// <summary>
/// Query_s with BETWEEN_AND clause over ExecuteJoinedDataSet
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_BETWEEN_AND()
{
DataSet ds = new Query(DataService.GetTableSchema("Orders", "NorthwindAccess"))
.BETWEEN_AND("OrderDate", new DateTime(1996, 7, 4), new DateTime(1996, 7, 16)).
ExecuteJoinedDataSet();
int counter = ds.Tables[0].Rows.Count;
Assert.IsTrue(counter == 10, "Nope - it's " + counter);
}
/// <summary>
/// Query_s with BETWEEN_AND over ExecuteJoinedDataSet with and extra Where Condition
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_BETWEEN_AND_WithExtraWhereCondition()
{
DataSet ds =
new Query(DataService.GetTableSchema("Orders", "NorthwindAccess")).
AddWhere("OrderID", Comparison.GreaterThan, 0).
BETWEEN_AND("OrderDate", new DateTime(1996, 7, 4), new DateTime(1996, 7, 16)).
ExecuteJoinedDataSet();
int counter = ds.Tables[0].Rows.Count;
Assert.IsTrue(counter == 10, "Nope - it's " + counter);
}
/// <summary>
/// Query_s ExecuteJoinedDataSet In_ object array.
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_IN_ObjectArray()
{
DataSet ds = new Query("products", "NorthwindAccess").IN("ProductID", new object[] { 1, 2, 3, 4, 5 }).ExecuteJoinedDataSet();
int counter = ds.Tables[0].Rows.Count;
Assert.IsTrue(counter == 5, "Nope - it's " + counter);
}
/// <summary>
/// Query_s ExecuteJoinedDataSet the In_ list collection.
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_IN_ListCollection()
{
ListItemCollection coll = new ListItemCollection();
for(int i = 1; i <= 5; i++)
{
ListItem item = new ListItem(i.ToString(), i.ToString());
item.Selected = true;
coll.Add(item);
}
DataSet ds = new Query("products", "NorthwindAccess").IN("ProductID", coll).ExecuteJoinedDataSet();
int counter = ds.Tables[0].Rows.Count;
Assert.IsTrue(counter == 5, "Nope - it's " + counter);
}
/// <summary>
/// Query_s ExecuteJoinedDataSet In_ array list.
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_IN_ArrayList()
{
ArrayList list = new ArrayList();
for(int i = 1; i <= 5; i++)
list.Add(i);
DataSet ds = new Query("products", "NorthwindAccess").IN("ProductID", list).ExecuteJoinedDataSet();
int counter = ds.Tables[0].Rows.Count;
Assert.IsTrue(counter == 5, "Nope - it's " + counter);
}
/// <summary>
/// Query_s ExecuteJoinedDataSet NOT_In_ object array.
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_NOT_IN_ObjectArray()
{
DataSet ds = new Query("products", "NorthwindAccess").ExecuteJoinedDataSet();
int counter1 = ds.Tables[0].Rows.Count;
ds = new Query("products", "NorthwindAccess").NOT_IN("ProductID", new object[] { 1, 2, 3, 4, 5 }).ExecuteJoinedDataSet();
int counter2 = ds.Tables[0].Rows.Count;
Assert.IsTrue(counter1 - counter2 == 5, "Nope - difference is " + (counter1 - counter2).ToString());
}
/// <summary>
/// Query_s ExecuteJoinedDataSet the NOT In_ list collection.
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_NOT_IN_ListCollection()
{
ListItemCollection coll = new ListItemCollection();
for(int i = 1; i <= 5; i++)
{
ListItem item = new ListItem(i.ToString(), i.ToString());
item.Selected = true;
coll.Add(item);
}
DataSet ds = new Query("products", "NorthwindAccess").ExecuteJoinedDataSet();
int counter1 = ds.Tables[0].Rows.Count;
ds = new Query("products", "NorthwindAccess").NOT_IN("ProductID", coll).ExecuteJoinedDataSet();
int counter2 = ds.Tables[0].Rows.Count;
Assert.IsTrue(counter1 - counter2 == 5, "Nope - difference is " + (counter1 - counter2).ToString());
}
/// <summary>
/// Query_s ExecuteJoinedDataSet NOT_In_ array list.
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_NOT_IN_ArrayList()
{
ArrayList list = new ArrayList();
for(int i = 1; i <= 5; i++)
list.Add(i);
DataSet ds = new Query("products", "NorthwindAccess").ExecuteJoinedDataSet();
int counter1 = ds.Tables[0].Rows.Count;
ds = new Query("products", "NorthwindAccess").NOT_IN("ProductID", list).ExecuteJoinedDataSet();
int counter2 = ds.Tables[0].Rows.Count;
Assert.IsTrue(counter1 - counter2 == 5, "Nope - difference is " + (counter1 - counter2).ToString());
}
/// <summary>
/// Test JoinedDataSet Order by Collection
/// </summary>
[Test]
public void Acc_Query_JoinedDataSet_OrderByCollection()
{
Query q = new Query("Products", "NorthwindAccess");
TableSchema.Table ts = DataService.GetTableSchema("Products", "NorthwindAccess");
q.OrderByCollection.Add(OrderBy.Desc(ts.GetColumn("CategoryID")));
q.OrderByCollection.Add(OrderBy.Desc(ts.GetColumn("ProductID")));
DataSet ds = q.ExecuteJoinedDataSet();
//should bring 73 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(73));
}
/// <summary>
/// Test Order by Collection
/// </summary>
[Test]
public void Acc_Query_OrderByCollection()
{
Query q = new Query("Products", "NorthwindAccess");
TableSchema.Table ts = DataService.GetTableSchema("Products", "NorthwindAccess");
q.OrderByCollection.Add(OrderBy.Desc(ts.GetColumn("CategoryID")));
q.OrderByCollection.Add(OrderBy.Desc(ts.GetColumn("ProductID")));
DataSet ds = q.ExecuteDataSet();
//should bring 73 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(73));
}
/// <summary>
/// Test Order by
/// </summary>
[Test]
public void Acc_Query_OrderBy()
{
Query q = new Query("Products", "NorthwindAccess");
TableSchema.Table ts = DataService.GetTableSchema("Products", "NorthwindAccess");
q.OrderBy = OrderBy.Desc(ts.GetColumn("ProductID"));
DataSet ds = q.ExecuteDataSet();
//should bring 77 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(77));
}
/// <summary>
/// Test Query_ORDER_BY
/// </summary>
[Test]
public void Acc_Query_ORDER_BY()
{
Query q = new Query("Products", "NorthwindAccess");
TableSchema.Table ts = DataService.GetTableSchema("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY(ts.GetColumn("ProductID")).ExecuteDataSet();
//should bring 1 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(1));
}
/// <summary>
/// Test Query_ORDER_BY_DESC
/// </summary>
[Test]
public void Acc_Query_ORDER_BY_DESC()
{
Query q = new Query("Products", "NorthwindAccess");
TableSchema.Table ts = DataService.GetTableSchema("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY(ts.GetColumn("ProductID"), SqlFragment.DESC).ExecuteDataSet();
//should bring 77 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(77));
}
/// <summary>
/// Test Query_ORDER_BY_Expression
/// </summary>
[Test]
public void Acc_Query_ORDER_BY_Expression()
{
Query q = new Query("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY("ProductID DESC").ExecuteDataSet();
//should bring 77 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(77));
}
/// <summary>
/// Test Query_ORDER_BY_ExpressionDESC
/// </summary>
[Test]
public void Acc_Query_ORDER_BY_ExpressionDESC()
{
Query q = new Query("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY("ProductID", SqlFragment.DESC).ExecuteDataSet();
//should bring 77 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(77));
}
/// <summary>
/// Test QueryExecuteJoinedDataSet_ORDER_BY
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_ORDER_BY()
{
Query q = new Query("Products", "NorthwindAccess");
TableSchema.Table ts = DataService.GetTableSchema("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY(ts.GetColumn("ProductID")).ExecuteJoinedDataSet();
//should bring 1 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(1));
}
/// <summary>
/// Test QueryExecuteJoinedDataSet_ORDER_BY_DESC
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_ORDER_BY_DESC()
{
Query q = new Query("Products", "NorthwindAccess");
TableSchema.Table ts = DataService.GetTableSchema("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY(ts.GetColumn("ProductID"), SqlFragment.DESC).ExecuteJoinedDataSet();
//should bring 77 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(77));
}
/// <summary>
/// Test QueryExecuteJoinedDataSet_ORDER_BY_Expression
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_ORDER_BY_Expression()
{
Query q = new Query("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY("ProductID DESC").ExecuteJoinedDataSet();
//should bring 77 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(77));
}
/// <summary>
/// Test QueryExecuteJoinedDataSet_ORDER_BY_ExpressionDESC
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_ORDER_BY_ExpressionDESC()
{
Query q = new Query("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY("ProductID", SqlFragment.DESC).ExecuteJoinedDataSet();
//should bring 77 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(77));
}
/// <summary>
/// Test Query_ORDER_BY_Collection
/// </summary>
[Test]
public void Acc_Query_ORDER_BY_Collection()
{
Query q = new Query("Products", "NorthwindAccess");
TableSchema.Table ts = DataService.GetTableSchema("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY(ts.GetColumn("SupplierID")).
ORDER_BY(ts.GetColumn("ProductID")).
ExecuteDataSet();
//should bring 1 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(1));
}
/// <summary>
/// Test Query_ORDER_BY_DESC_Collection
/// </summary>
[Test]
public void Acc_Query_ORDER_BY_DESC_Collection()
{
Query q = new Query("Products", "NorthwindAccess");
TableSchema.Table ts = DataService.GetTableSchema("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY(ts.GetColumn("CategoryID"), SqlFragment.DESC).
ORDER_BY(ts.GetColumn("ProductID"), SqlFragment.DESC).
ExecuteDataSet();
//should bring 73 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(73));
}
/// <summary>
/// Test Query_ORDER_BY_Expression_Collection
/// </summary>
[Test]
public void Acc_Query_ORDER_BY_Expression_Collection()
{
Query q = new Query("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY("CategoryID DESC").
ORDER_BY("ProductID DESC").
ExecuteDataSet();
//should bring 73 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(73));
}
/// <summary>
/// Test Query_ORDER_BY_ExpressionDESC_Collection
/// </summary>
[Test]
public void Acc_Query_ORDER_BY_ExpressionDESC_Collection()
{
Query q = new Query("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY("CategoryID", SqlFragment.DESC).
ORDER_BY("ProductID", SqlFragment.DESC).
ExecuteDataSet();
//should bring 73 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(73));
}
/// <summary>
/// Test QueryExecuteJoinedDataSet_ORDER_BY_Collection
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_ORDER_BY_Collection()
{
Query q = new Query("Products", "NorthwindAccess");
TableSchema.Table ts = DataService.GetTableSchema("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY(ts.GetColumn("SupplierID")).
ORDER_BY(ts.GetColumn("ProductID")).
ExecuteJoinedDataSet();
//should bring 38 as first (SupplierID is replaced by CompanyName)
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(38));
}
/// <summary>
/// Test QueryExecuteJoinedDataSet_ORDER_BY_DESC_Collection
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_ORDER_BY_DESC_Collection()
{
Query q = new Query("Products", "NorthwindAccess");
TableSchema.Table ts = DataService.GetTableSchema("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY(ts.GetColumn("SupplierID"), SqlFragment.DESC).
ORDER_BY(ts.GetColumn("ProductID"), SqlFragment.DESC).
ExecuteJoinedDataSet();
//should bring 48 as first (SupplierID is replaced by CompanyName)
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(48));
}
/// <summary>
/// Test QueryExecuteJoinedDataSet_ORDER_BY_Expression_Collection
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_ORDER_BY_Expression_Collection()
{
Query q = new Query("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY("SupplierID DESC").
ORDER_BY("ProductID DESC").
ExecuteJoinedDataSet();
//should bring 48 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(48));
}
/// <summary>
/// Test QueryExecuteJoinedDataSet_ORDER_BY_ExpressionDESC_Collection
/// </summary>
[Test]
public void Acc_QueryExecuteJoinedDataSet_ORDER_BY_ExpressionDESC_Collection()
{
Query q = new Query("Products", "NorthwindAccess");
DataSet ds = q.ORDER_BY("SupplierID", SqlFragment.DESC).
ORDER_BY("ProductID", SqlFragment.DESC).
ExecuteJoinedDataSet();
//should bring 48 as first
Assert.IsTrue(ds.Tables[0].Rows[0]["ProductID"].Equals(48));
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// ------------------------------------------------------------------------------
// Changes to this file must follow the http://aka.ms/api-review process.
// ------------------------------------------------------------------------------
[assembly: System.Runtime.CompilerServices.TypeForwardedTo(typeof(System.Security.Cryptography.CryptographicException))]
namespace System.Security.Cryptography
{
public abstract partial class AsymmetricAlgorithm : System.IDisposable
{
protected int KeySizeValue;
protected System.Security.Cryptography.KeySizes[] LegalKeySizesValue;
protected AsymmetricAlgorithm() { }
public virtual string KeyExchangeAlgorithm { get { throw null; } }
public virtual int KeySize { get { throw null; } set { } }
public virtual System.Security.Cryptography.KeySizes[] LegalKeySizes { get { throw null; } }
public virtual string SignatureAlgorithm { get { throw null; } }
public void Clear() { }
public static System.Security.Cryptography.AsymmetricAlgorithm Create() { throw null; }
public static System.Security.Cryptography.AsymmetricAlgorithm Create(string algName) { throw null; }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public virtual void FromXmlString(string xmlString) { }
public virtual string ToXmlString(bool includePrivateParameters) { throw null; }
}
public enum CipherMode
{
CBC = 1,
[System.ComponentModel.EditorBrowsableAttribute((System.ComponentModel.EditorBrowsableState)(1))]
CFB = 4,
CTS = 5,
ECB = 2,
[System.ComponentModel.EditorBrowsableAttribute((System.ComponentModel.EditorBrowsableState)(1))]
OFB = 3,
}
public partial class CryptographicUnexpectedOperationException : System.Security.Cryptography.CryptographicException
{
public CryptographicUnexpectedOperationException() { }
protected CryptographicUnexpectedOperationException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
public CryptographicUnexpectedOperationException(string message) { }
public CryptographicUnexpectedOperationException(string message, System.Exception inner) { }
public CryptographicUnexpectedOperationException(string format, string insert) { }
}
public partial class CryptoStream : System.IO.Stream, System.IDisposable
{
public CryptoStream(System.IO.Stream stream, System.Security.Cryptography.ICryptoTransform transform, System.Security.Cryptography.CryptoStreamMode mode) { }
public CryptoStream(System.IO.Stream stream, System.Security.Cryptography.ICryptoTransform transform, System.Security.Cryptography.CryptoStreamMode mode, bool leaveOpen) { }
public override bool CanRead { get { throw null; } }
public override bool CanSeek { get { throw null; } }
public override bool CanWrite { get { throw null; } }
public bool HasFlushedFinalBlock { get { throw null; } }
public override long Length { get { throw null; } }
public override long Position { get { throw null; } set { } }
public override System.IAsyncResult BeginRead(byte[] buffer, int offset, int count, System.AsyncCallback callback, object state) { throw null; }
public override System.IAsyncResult BeginWrite(byte[] buffer, int offset, int count, System.AsyncCallback callback, object state) { throw null; }
public void Clear() { }
protected override void Dispose(bool disposing) { }
public override int EndRead(System.IAsyncResult asyncResult) { throw null; }
public override void EndWrite(System.IAsyncResult asyncResult) { }
public override void Flush() { }
public override System.Threading.Tasks.Task FlushAsync(System.Threading.CancellationToken cancellationToken) { throw null; }
public void FlushFinalBlock() { }
public override int Read(byte[] buffer, int offset, int count) { throw null; }
public override System.Threading.Tasks.Task<int> ReadAsync(byte[] buffer, int offset, int count, System.Threading.CancellationToken cancellationToken) { throw null; }
public override int ReadByte() { throw null; }
public override long Seek(long offset, System.IO.SeekOrigin origin) { throw null; }
public override void SetLength(long value) { }
public override void Write(byte[] buffer, int offset, int count) { }
public override System.Threading.Tasks.Task WriteAsync(byte[] buffer, int offset, int count, System.Threading.CancellationToken cancellationToken) { throw null; }
public override void WriteByte(byte value) { }
}
public enum CryptoStreamMode
{
Read = 0,
Write = 1,
}
public abstract partial class HashAlgorithm : System.IDisposable, System.Security.Cryptography.ICryptoTransform
{
protected int HashSizeValue;
protected internal byte[] HashValue;
protected int State;
protected HashAlgorithm() { }
public virtual bool CanReuseTransform { get { throw null; } }
public virtual bool CanTransformMultipleBlocks { get { throw null; } }
public virtual byte[] Hash { get { throw null; } }
public virtual int HashSize { get { throw null; } }
public virtual int InputBlockSize { get { throw null; } }
public virtual int OutputBlockSize { get { throw null; } }
public void Clear() { }
public byte[] ComputeHash(byte[] buffer) { throw null; }
public byte[] ComputeHash(byte[] buffer, int offset, int count) { throw null; }
public byte[] ComputeHash(System.IO.Stream inputStream) { throw null; }
public static System.Security.Cryptography.HashAlgorithm Create() { throw null; }
public static System.Security.Cryptography.HashAlgorithm Create(string hashName) { throw null; }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
protected abstract void HashCore(byte[] array, int ibStart, int cbSize);
protected virtual void HashCore(System.ReadOnlySpan<byte> source) { }
protected abstract byte[] HashFinal();
public abstract void Initialize();
public int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset) { throw null; }
public byte[] TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount) { throw null; }
public bool TryComputeHash(System.ReadOnlySpan<byte> source, System.Span<byte> destination, out int bytesWritten) { throw null; }
protected virtual bool TryHashFinal(System.Span<byte> destination, out int bytesWritten) { throw null; }
}
public readonly partial struct HashAlgorithmName : System.IEquatable<System.Security.Cryptography.HashAlgorithmName>
{
private readonly object _dummy;
public HashAlgorithmName(string name) { throw null; }
public static System.Security.Cryptography.HashAlgorithmName MD5 { get { throw null; } }
public string Name { get { throw null; } }
public static System.Security.Cryptography.HashAlgorithmName SHA1 { get { throw null; } }
public static System.Security.Cryptography.HashAlgorithmName SHA256 { get { throw null; } }
public static System.Security.Cryptography.HashAlgorithmName SHA384 { get { throw null; } }
public static System.Security.Cryptography.HashAlgorithmName SHA512 { get { throw null; } }
public override bool Equals(object obj) { throw null; }
public bool Equals(System.Security.Cryptography.HashAlgorithmName other) { throw null; }
public override int GetHashCode() { throw null; }
public static bool operator ==(System.Security.Cryptography.HashAlgorithmName left, System.Security.Cryptography.HashAlgorithmName right) { throw null; }
public static bool operator !=(System.Security.Cryptography.HashAlgorithmName left, System.Security.Cryptography.HashAlgorithmName right) { throw null; }
public override string ToString() { throw null; }
}
public abstract partial class HMAC : System.Security.Cryptography.KeyedHashAlgorithm
{
protected HMAC() { }
protected int BlockSizeValue { get { throw null; } set { } }
public string HashName { get { throw null; } set { } }
public override byte[] Key { get { throw null; } set { } }
public static new System.Security.Cryptography.HMAC Create() { throw null; }
public static new System.Security.Cryptography.HMAC Create(string algorithmName) { throw null; }
protected override void Dispose(bool disposing) { }
protected override void HashCore(byte[] rgb, int ib, int cb) { }
protected override void HashCore(System.ReadOnlySpan<byte> source) { }
protected override byte[] HashFinal() { throw null; }
public override void Initialize() { }
protected override bool TryHashFinal(System.Span<byte> destination, out int bytesWritten) { throw null; }
}
public partial interface ICryptoTransform : System.IDisposable
{
bool CanReuseTransform { get; }
bool CanTransformMultipleBlocks { get; }
int InputBlockSize { get; }
int OutputBlockSize { get; }
int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset);
byte[] TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount);
}
public abstract partial class KeyedHashAlgorithm : System.Security.Cryptography.HashAlgorithm
{
protected byte[] KeyValue;
protected KeyedHashAlgorithm() { }
public virtual byte[] Key { get { throw null; } set { } }
public static new System.Security.Cryptography.KeyedHashAlgorithm Create() { throw null; }
public static new System.Security.Cryptography.KeyedHashAlgorithm Create(string algName) { throw null; }
protected override void Dispose(bool disposing) { }
}
public sealed partial class KeySizes
{
public KeySizes(int minSize, int maxSize, int skipSize) { }
public int MaxSize { get { throw null; } }
public int MinSize { get { throw null; } }
public int SkipSize { get { throw null; } }
}
public enum PaddingMode
{
ANSIX923 = 4,
ISO10126 = 5,
None = 1,
PKCS7 = 2,
Zeros = 3,
}
public abstract partial class SymmetricAlgorithm : System.IDisposable
{
protected int BlockSizeValue;
protected int FeedbackSizeValue;
protected byte[] IVValue;
protected int KeySizeValue;
protected byte[] KeyValue;
protected System.Security.Cryptography.KeySizes[] LegalBlockSizesValue;
protected System.Security.Cryptography.KeySizes[] LegalKeySizesValue;
protected System.Security.Cryptography.CipherMode ModeValue;
protected System.Security.Cryptography.PaddingMode PaddingValue;
protected SymmetricAlgorithm() { }
public virtual int BlockSize { get { throw null; } set { } }
public virtual int FeedbackSize { get { throw null; } set { } }
public virtual byte[] IV { get { throw null; } set { } }
public virtual byte[] Key { get { throw null; } set { } }
public virtual int KeySize { get { throw null; } set { } }
public virtual System.Security.Cryptography.KeySizes[] LegalBlockSizes { get { throw null; } }
public virtual System.Security.Cryptography.KeySizes[] LegalKeySizes { get { throw null; } }
public virtual System.Security.Cryptography.CipherMode Mode { get { throw null; } set { } }
public virtual System.Security.Cryptography.PaddingMode Padding { get { throw null; } set { } }
public void Clear() { }
public static System.Security.Cryptography.SymmetricAlgorithm Create() { throw null; }
public static System.Security.Cryptography.SymmetricAlgorithm Create(string algName) { throw null; }
public virtual System.Security.Cryptography.ICryptoTransform CreateDecryptor() { throw null; }
public abstract System.Security.Cryptography.ICryptoTransform CreateDecryptor(byte[] rgbKey, byte[] rgbIV);
public virtual System.Security.Cryptography.ICryptoTransform CreateEncryptor() { throw null; }
public abstract System.Security.Cryptography.ICryptoTransform CreateEncryptor(byte[] rgbKey, byte[] rgbIV);
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public abstract void GenerateIV();
public abstract void GenerateKey();
public bool ValidKeySize(int bitLength) { throw null; }
}
}
| |
/********************************************************************++
Copyright (c) Microsoft Corporation. All rights reserved.
--********************************************************************/
using System.Collections;
using System.Collections.Generic;
using System.Management.Automation.Language;
using System.Management.Automation.Runspaces;
using Dbg = System.Management.Automation.Diagnostics;
namespace System.Management.Automation
{
/// <summary>
/// Holds the state of a Monad Shell session
/// </summary>
internal sealed partial class SessionStateInternal
{
#region Functions
/// <summary>
/// Add an new SessionState function entry to this session state object...
/// </summary>
/// <param name="entry">The entry to add</param>
internal void AddSessionStateEntry(SessionStateFunctionEntry entry)
{
ScriptBlock sb = entry.ScriptBlock.Clone();
FunctionInfo fn = this.SetFunction(entry.Name, sb, null, entry.Options, false, CommandOrigin.Internal, this.ExecutionContext, entry.HelpFile, true);
fn.Visibility = entry.Visibility;
fn.Module = entry.Module;
fn.ScriptBlock.LanguageMode = PSLanguageMode.FullLanguage;
}
#if !CORECLR // Workflow Not Supported On CSS
internal void AddSessionStateEntry(InitialSessionState initialSessionState, SessionStateWorkflowEntry entry)
{
var converterInstance = Utils.GetAstToWorkflowConverterAndEnsureWorkflowModuleLoaded(null);
var workflowInfo = entry.WorkflowInfo ??
converterInstance.CompileWorkflow(entry.Name, entry.Definition, initialSessionState);
WorkflowInfo wf = new WorkflowInfo(workflowInfo);
wf = this.SetWorkflowRaw(wf, CommandOrigin.Internal);
wf.Visibility = entry.Visibility;
wf.Module = entry.Module;
}
#endif
/// <summary>
/// Gets a flattened view of the functions that are visible using
/// the current scope as a reference and filtering the functions in
/// the other scopes based on the scoping rules.
/// </summary>
///
/// <returns>
/// An IDictionary representing the visible functions.
/// </returns>
///
internal IDictionary GetFunctionTable()
{
SessionStateScopeEnumerator scopeEnumerator =
new SessionStateScopeEnumerator(_currentScope);
Dictionary<string, FunctionInfo> result =
new Dictionary<string, FunctionInfo>(StringComparer.OrdinalIgnoreCase);
foreach (SessionStateScope scope in scopeEnumerator)
{
foreach (FunctionInfo entry in scope.FunctionTable.Values)
{
if (!result.ContainsKey(entry.Name))
{
result.Add(entry.Name, entry);
}
}
}
return result;
} // GetFunctionTable
/// <summary>
/// Gets an IEnumerable for the function table for a given scope
/// </summary>
///
/// <param name="scopeID">
/// A scope identifier that is either one of the "special" scopes like
/// "global", "script", "local", or "private, or a numeric ID of a relative scope
/// to the current scope.
/// </param>
///
/// <exception cref="ArgumentException">
/// If <paramref name="scopeID"/> is less than zero, or not
/// a number and not "script", "global", "local", or "private"
/// </exception>
///
/// <exception cref="ArgumentOutOfRangeException">
/// If <paramref name="scopeID"/> is less than zero or greater than the number of currently
/// active scopes.
/// </exception>
///
internal IDictionary<string, FunctionInfo> GetFunctionTableAtScope(string scopeID)
{
Dictionary<string, FunctionInfo> result =
new Dictionary<string, FunctionInfo>(StringComparer.OrdinalIgnoreCase);
SessionStateScope scope = GetScopeByID(scopeID);
foreach (FunctionInfo entry in scope.FunctionTable.Values)
{
// Make sure the function/filter isn't private or if it is that the current
// scope is the same scope the alias was retrieved from.
if ((entry.Options & ScopedItemOptions.Private) == 0 ||
scope == _currentScope)
{
result.Add(entry.Name, entry);
}
}
return result;
} // GetFunctionTableAtScope
/// <summary>
/// List of functions/filters to export from this session state object...
/// </summary>
internal List<FunctionInfo> ExportedFunctions { get; } = new List<FunctionInfo>();
/// <summary>
/// List of workflows to export from this session state object...
/// </summary>
internal List<WorkflowInfo> ExportedWorkflows { get; } = new List<WorkflowInfo>();
internal bool UseExportList { get; set; } = false;
/// <summary>
/// Get a functions out of session state.
/// </summary>
///
/// <param name="name">
/// name of function to look up
/// </param>
///
/// <param name="origin">
/// Origin of the command that called this API...
/// </param>
///
/// <returns>
/// The value of the specified function.
/// </returns>
///
/// <exception cref="ArgumentException">
/// If <paramref name="name"/> is null or empty.
/// </exception>
///
internal FunctionInfo GetFunction(string name, CommandOrigin origin)
{
if (String.IsNullOrEmpty(name))
{
throw PSTraceSource.NewArgumentException("name");
}
FunctionInfo result = null;
FunctionLookupPath lookupPath = new FunctionLookupPath(name);
FunctionScopeItemSearcher searcher =
new FunctionScopeItemSearcher(this, lookupPath, origin);
if (searcher.MoveNext())
{
result = ((IEnumerator<FunctionInfo>)searcher).Current;
}
return result;
} // GetFunction
/// <summary>
/// Get a functions out of session state.
/// </summary>
///
/// <param name="name">
/// name of function to look up
/// </param>
///
/// <returns>
/// The value of the specified function.
/// </returns>
///
/// <exception cref="ArgumentException">
/// If <paramref name="name"/> is null or empty.
/// </exception>
///
internal FunctionInfo GetFunction(string name)
{
return GetFunction(name, CommandOrigin.Internal);
} // GetFunction
private IEnumerable<string> GetFunctionAliases(IParameterMetadataProvider ipmp)
{
if (ipmp == null || ipmp.Body.ParamBlock == null)
yield break;
var attributes = ipmp.Body.ParamBlock.Attributes;
foreach (var attributeAst in attributes)
{
var attributeType = attributeAst.TypeName.GetReflectionAttributeType();
if (attributeType == typeof(AliasAttribute))
{
var cvv = new ConstantValueVisitor { AttributeArgument = true };
for (int i = 0; i < attributeAst.PositionalArguments.Count; i++)
{
yield return Compiler._attrArgToStringConverter.Target(Compiler._attrArgToStringConverter,
attributeAst.PositionalArguments[i].Accept(cvv));
}
}
}
}
/// <summary>
/// Set a function in the current scope of session state.
/// </summary>
///
/// <param name="name">
/// The name of the function to set.
/// </param>
///
/// <param name="function">
/// The new value of the function being set.
/// </param>
///
/// <param name="origin">
/// Origin of the caller of this API
/// </param>
///
/// <exception cref="ArgumentException">
/// If <paramref name="name"/> is null or empty.
/// </exception>
///
/// <exception cref="ArgumentNullException">
/// If <paramref name="function"/> is null.
/// </exception>
///
/// <exception cref="SessionStateUnauthorizedAccessException">
/// If the function is read-only or constant.
/// </exception>
///
/// <exception cref="SessionStateOverflowException">
/// If the maximum number of functions have been reached for this scope.
/// </exception>
///
internal FunctionInfo SetFunctionRaw(
string name,
ScriptBlock function,
CommandOrigin origin)
{
if (String.IsNullOrEmpty(name))
{
throw PSTraceSource.NewArgumentException("name");
}
if (function == null)
{
throw PSTraceSource.NewArgumentNullException("function");
}
string originalName = name;
FunctionLookupPath path = new FunctionLookupPath(name);
name = path.UnqualifiedPath;
if (String.IsNullOrEmpty(name))
{
SessionStateException exception =
new SessionStateException(
originalName,
SessionStateCategory.Function,
"ScopedFunctionMustHaveName",
SessionStateStrings.ScopedFunctionMustHaveName,
ErrorCategory.InvalidArgument);
throw exception;
}
ScopedItemOptions options = ScopedItemOptions.None;
if (path.IsPrivate)
{
options |= ScopedItemOptions.Private;
}
FunctionScopeItemSearcher searcher =
new FunctionScopeItemSearcher(
this,
path,
origin);
var functionInfo = searcher.InitialScope.SetFunction(name, function, null, options, false, origin, ExecutionContext);
foreach (var aliasName in GetFunctionAliases(function.Ast as IParameterMetadataProvider))
{
searcher.InitialScope.SetAliasValue(aliasName, name, ExecutionContext, false, origin);
}
return functionInfo;
} // SetFunctionRaw
internal WorkflowInfo SetWorkflowRaw(
WorkflowInfo workflowInfo,
CommandOrigin origin)
{
string originalName = workflowInfo.Name;
string name = originalName;
FunctionLookupPath path = new FunctionLookupPath(name);
name = path.UnqualifiedPath;
if (String.IsNullOrEmpty(name))
{
SessionStateException exception =
new SessionStateException(
originalName,
SessionStateCategory.Function,
"ScopedFunctionMustHaveName",
SessionStateStrings.ScopedFunctionMustHaveName,
ErrorCategory.InvalidArgument);
throw exception;
}
ScopedItemOptions options = ScopedItemOptions.None;
if (path.IsPrivate)
{
options |= ScopedItemOptions.Private;
}
FunctionScopeItemSearcher searcher =
new FunctionScopeItemSearcher(
this,
path,
origin);
// The script that defines a workflowInfo wrapper is fully trusted
workflowInfo.ScriptBlock.LanguageMode = PSLanguageMode.FullLanguage;
if (workflowInfo.Module == null && this.Module != null)
{
workflowInfo.Module = this.Module;
}
var wfInfo = (WorkflowInfo)
searcher.InitialScope.SetFunction(name, workflowInfo.ScriptBlock, null, options, false, origin, ExecutionContext, null,
(arg1, arg2, arg3, arg4, arg5, arg6) => workflowInfo);
foreach (var aliasName in GetFunctionAliases(workflowInfo.ScriptBlock.Ast as IParameterMetadataProvider))
{
searcher.InitialScope.SetAliasValue(aliasName, name, ExecutionContext, false, origin);
}
return wfInfo;
} // SetWorkflowRaw
/// <summary>
/// Set a function in the current scope of session state.
/// </summary>
///
/// <param name="name">
/// The name of the function to set.
/// </param>
///
/// <param name="function">
/// The new value of the function being set.
/// </param>
///
/// <param name="originalFunction">
/// The original function (if any) from which the ScriptBlock is derived.
/// </param>
///
/// <param name="options">
/// The options to set on the function.
/// </param>
///
/// <param name="force">
/// If true, the function will be set even if its ReadOnly.
/// </param>
///
/// <param name="origin">
/// Origin of the caller of this API
/// </param>
/// <exception cref="ArgumentException">
/// If <paramref name="name"/> is null or empty.
/// </exception>
///
/// <exception cref="ArgumentNullException">
/// If <paramref name="function"/> is null.
/// </exception>
///
/// <exception cref="SessionStateUnauthorizedAccessException">
/// If the function is read-only or constant.
/// </exception>
///
/// <exception cref="SessionStateOverflowException">
/// If the maximum number of functions have been reached for this scope.
/// </exception>
///
internal FunctionInfo SetFunction(
string name,
ScriptBlock function,
FunctionInfo originalFunction,
ScopedItemOptions options,
bool force,
CommandOrigin origin)
{
return SetFunction(name, function, originalFunction, options, force, origin, ExecutionContext, null);
} // SetFunction
/// <summary>
/// Set a function in the current scope of session state.
/// </summary>
///
/// <param name="name">
/// The name of the function to set.
/// </param>
///
/// <param name="function">
/// The new value of the function being set.
/// </param>
///
/// <param name="originalFunction">
/// The original function (if any) from which the ScriptBlock is derived.
/// </param>
///
/// <param name="options">
/// The options to set on the function.
/// </param>
///
/// <param name="force">
/// If true, the function will be set even if its ReadOnly.
/// </param>
///
/// <param name="origin">
/// Origin of the caller of this API
/// </param>
///
/// <param name="helpFile">
/// The name of the help file associated with the function.
/// </param>
///
/// <exception cref="ArgumentException">
/// If <paramref name="name"/> is null or empty.
/// </exception>
///
/// <exception cref="ArgumentNullException">
/// If <paramref name="function"/> is null.
/// </exception>
///
/// <exception cref="SessionStateUnauthorizedAccessException">
/// If the function is read-only or constant.
/// </exception>
///
/// <exception cref="SessionStateOverflowException">
/// If the maximum number of functions have been reached for this scope.
/// </exception>
///
internal FunctionInfo SetFunction(
string name,
ScriptBlock function,
FunctionInfo originalFunction,
ScopedItemOptions options,
bool force,
CommandOrigin origin,
string helpFile)
{
return SetFunction(name, function, originalFunction, options, force, origin, ExecutionContext, helpFile, false);
} // SetFunction
/// <summary>
/// Set a function in the current scope of session state.
/// </summary>
///
/// <param name="name">
/// The name of the function to set.
/// </param>
///
/// <param name="function">
/// The new value of the function being set.
/// </param>
///
/// <param name="originalFunction">
/// The original function (if any) from which the ScriptBlock is derived.
/// </param>
///
/// <param name="options">
/// The options to set on the function.
/// </param>
///
/// <param name="force">
/// If true, the function will be set even if its ReadOnly.
/// </param>
///
/// <param name="origin">
/// Origin of the caller of this API
/// </param>
///
/// <param name="context">
/// The execution context for the function.
/// </param>
///
/// <param name="helpFile">
/// The name of the help file associated with the function.
/// </param>
/// <exception cref="ArgumentException">
/// If <paramref name="name"/> is null or empty.
/// </exception>
///
/// <exception cref="ArgumentNullException">
/// If <paramref name="function"/> is null.
/// </exception>
///
/// <exception cref="SessionStateUnauthorizedAccessException">
/// If the function is read-only or constant.
/// </exception>
///
/// <exception cref="SessionStateOverflowException">
/// If the maximum number of functions have been reached for this scope.
/// </exception>
///
internal FunctionInfo SetFunction(
string name,
ScriptBlock function,
FunctionInfo originalFunction,
ScopedItemOptions options,
bool force,
CommandOrigin origin,
ExecutionContext context,
string helpFile)
{
return SetFunction(name, function, originalFunction, options, force, origin, context, helpFile, false);
} // SetFunction
/// <summary>
/// Set a function in the current scope of session state.
/// </summary>
///
/// <param name="name">
/// The name of the function to set.
/// </param>
///
/// <param name="function">
/// The new value of the function being set.
/// </param>
///
/// <param name="originalFunction">
/// The original function (if any) from which the ScriptBlock is derived.
/// </param>
///
/// <param name="options">
/// The options to set on the function.
/// </param>
///
/// <param name="force">
/// If true, the function will be set even if its ReadOnly.
/// </param>
///
/// <param name="origin">
/// Origin of the caller of this API
/// </param>
///
/// <param name="context">
/// The execution context for the function.
/// </param>
///
/// <param name="helpFile">
/// The name of the help file associated with the function.
/// </param>
///
/// <param name="isPreValidated">
/// Set to true if it is a regular function (meaning, we do not need to check this is a workflow or if the script contains JobDefinition Attribute and then process it)
/// </param>
///
/// <exception cref="ArgumentException">
/// If <paramref name="name"/> is null or empty.
/// </exception>
///
/// <exception cref="ArgumentNullException">
/// If <paramref name="function"/> is null.
/// </exception>
///
/// <exception cref="SessionStateUnauthorizedAccessException">
/// If the function is read-only or constant.
/// </exception>
///
/// <exception cref="SessionStateOverflowException">
/// If the maximum number of functions have been reached for this scope.
/// </exception>
///
internal FunctionInfo SetFunction(
string name,
ScriptBlock function,
FunctionInfo originalFunction,
ScopedItemOptions options,
bool force,
CommandOrigin origin,
ExecutionContext context,
string helpFile,
bool isPreValidated)
{
if (String.IsNullOrEmpty(name))
{
throw PSTraceSource.NewArgumentException("name");
}
if (function == null)
{
throw PSTraceSource.NewArgumentNullException("function");
}
string originalName = name;
FunctionLookupPath path = new FunctionLookupPath(name);
name = path.UnqualifiedPath;
if (String.IsNullOrEmpty(name))
{
SessionStateException exception =
new SessionStateException(
originalName,
SessionStateCategory.Function,
"ScopedFunctionMustHaveName",
SessionStateStrings.ScopedFunctionMustHaveName,
ErrorCategory.InvalidArgument);
throw exception;
}
if (path.IsPrivate)
{
options |= ScopedItemOptions.Private;
}
FunctionScopeItemSearcher searcher =
new FunctionScopeItemSearcher(
this,
path,
origin);
return searcher.InitialScope.SetFunction(name, function, originalFunction, options, force, origin, context, helpFile);
} // SetFunction
/// <summary>
/// Set a function in the current scope of session state.
/// </summary>
///
/// <param name="name">
/// The name of the function to set.
/// </param>
///
/// <param name="function">
/// The new value of the function being set.
/// </param>
///
/// <param name="originalFunction">
/// The original function (if any) from which the ScriptBlock is derived.
/// </param>
///
/// <param name="force">
/// If true, the function will be set even if its ReadOnly.
/// </param>
///
/// <param name="origin">
/// The origin of the caller
/// </param>
///
/// <exception cref="ArgumentException">
/// If <paramref name="name"/> is null or empty.
/// or
/// If <paramref name="function"/> is not a <see cref="FilterInfo">FilterInfo</see>
/// or <see cref="FunctionInfo">FunctionInfo</see>
/// </exception>
///
/// <exception cref="ArgumentNullException">
/// If <paramref name="function"/> is null.
/// </exception>
///
/// <exception cref="SessionStateUnauthorizedAccessException">
/// If the function is read-only or constant.
/// </exception>
///
/// <exception cref="SessionStateOverflowException">
/// If the maximum number of functions have been reached for this scope.
/// </exception>
///
internal FunctionInfo SetFunction(
string name,
ScriptBlock function,
FunctionInfo originalFunction,
bool force,
CommandOrigin origin)
{
if (String.IsNullOrEmpty(name))
{
throw PSTraceSource.NewArgumentException("name");
}
if (function == null)
{
throw PSTraceSource.NewArgumentNullException("function");
}
string originalName = name;
FunctionLookupPath path = new FunctionLookupPath(name);
name = path.UnqualifiedPath;
if (String.IsNullOrEmpty(name))
{
SessionStateException exception =
new SessionStateException(
originalName,
SessionStateCategory.Function,
"ScopedFunctionMustHaveName",
SessionStateStrings.ScopedFunctionMustHaveName,
ErrorCategory.InvalidArgument);
throw exception;
}
ScopedItemOptions options = ScopedItemOptions.None;
if (path.IsPrivate)
{
options |= ScopedItemOptions.Private;
}
FunctionScopeItemSearcher searcher =
new FunctionScopeItemSearcher(
this,
path,
origin);
FunctionInfo result = null;
SessionStateScope scope = searcher.InitialScope;
if (searcher.MoveNext())
{
scope = searcher.CurrentLookupScope;
name = searcher.Name;
if (path.IsPrivate)
{
// Need to add the Private flag
FunctionInfo existingFunction = scope.GetFunction(name);
options |= existingFunction.Options;
result = scope.SetFunction(name, function, originalFunction, options, force, origin, ExecutionContext);
}
else
{
result = scope.SetFunction(name, function, force, origin, ExecutionContext);
}
}
else
{
if (path.IsPrivate)
{
result = scope.SetFunction(name, function, originalFunction, options, force, origin, ExecutionContext);
}
else
{
result = scope.SetFunction(name, function, force, origin, ExecutionContext);
}
}
return result;
}
/// <summary>
/// Set a function in the current scope of session state.
///
/// BUGBUG: this overload is preserved because a lot of tests use reflection to
/// call it. The tests should be fixed and this API eventually removed.
/// </summary>
///
/// <param name="name">
/// The name of the function to set.
/// </param>
///
/// <param name="function">
/// The new value of the function being set.
/// </param>
///
/// <param name="force">
/// If true, the function will be set even if its ReadOnly.
/// </param>
///
/// <exception cref="ArgumentException">
/// If <paramref name="name"/> is null or empty.
/// or
/// If <paramref name="function"/> is not a <see cref="FilterInfo">FilterInfo</see>
/// or <see cref="FunctionInfo">FunctionInfo</see>
/// </exception>
///
/// <exception cref="ArgumentNullException">
/// If <paramref name="function"/> is null.
/// </exception>
///
/// <exception cref="SessionStateUnauthorizedAccessException">
/// If the function is read-only or constant.
/// </exception>
///
/// <exception cref="SessionStateOverflowException">
/// If the maximum number of functions have been reached for this scope.
/// </exception>
///
internal FunctionInfo SetFunction(string name, ScriptBlock function, bool force)
{
return SetFunction(name, function, null, force, CommandOrigin.Internal);
}
/// <summary>
/// Removes a function from the function table.
/// </summary>
///
/// <param name="name">
/// The name of the function to remove.
/// </param>
///
/// <param name="origin">
/// THe origin of the caller of this API
/// </param>
///
/// <param name="force">
/// If true, the function is removed even if it is ReadOnly.
/// </param>
///
/// <exception cref="ArgumentException">
/// If <paramref name="name"/> is null or empty.
/// </exception>
///
/// <exception cref="SessionStateUnauthorizedAccessException">
/// If the function is constant.
/// </exception>
///
internal void RemoveFunction(string name, bool force, CommandOrigin origin)
{
if (String.IsNullOrEmpty(name))
{
throw PSTraceSource.NewArgumentException("name");
}
// Use the scope enumerator to find an existing function
SessionStateScope scope = _currentScope;
FunctionLookupPath path = new FunctionLookupPath(name);
FunctionScopeItemSearcher searcher =
new FunctionScopeItemSearcher(
this,
path,
origin);
if (searcher.MoveNext())
{
scope = searcher.CurrentLookupScope;
}
scope.RemoveFunction(name, force);
} // RemoveFunction
/// <summary>
/// Removes a function from the function table.
/// </summary>
///
/// <param name="name">
/// The name of the function to remove.
/// </param>
///
/// <param name="force">
/// If true, the function is removed even if it is ReadOnly.
/// </param>
///
/// <exception cref="ArgumentException">
/// If <paramref name="name"/> is null or empty.
/// </exception>
///
/// <exception cref="SessionStateUnauthorizedAccessException">
/// If the function is constant.
/// </exception>
///
internal void RemoveFunction(string name, bool force)
{
RemoveFunction(name, force, CommandOrigin.Internal);
}
/// <summary>
/// Removes a function from the function table
/// if the function was imported from the given module.
///
/// BUGBUG: This is only used by the implict remoting functions...
/// </summary>
///
/// <param name="name">
/// The name of the function to remove.
/// </param>
///
/// <param name="module">
/// Module the function might be imported from.
/// </param>
///
/// <exception cref="SessionStateUnauthorizedAccessException">
/// If the function is constant.
/// </exception>
///
internal void RemoveFunction(string name, PSModuleInfo module)
{
Dbg.Assert(module != null, "Caller should verify that module parameter is not null");
FunctionInfo func = GetFunction(name) as FunctionInfo;
if (func != null && func.ScriptBlock != null
&& func.ScriptBlock.File != null
&& func.ScriptBlock.File.Equals(module.Path, StringComparison.OrdinalIgnoreCase))
{
RemoveFunction(name, true);
}
}
#endregion Functions
} // SessionStateInternal class
}
| |
using Xunit;
namespace Jint.Tests.Ecma
{
public class Test_11_8_3 : EcmaTest
{
[Fact]
[Trait("Category", "11.8.3")]
public void LessThanOrEqualOperatorPartialLeftToRightOrderEnforcedWhenUsingLessThanOrEqualOperatorValueofValueof()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/11.8.3-1.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void LessThanOrEqualOperatorPartialLeftToRightOrderEnforcedWhenUsingLessThanOrEqualOperatorValueofTostring()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/11.8.3-2.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void LessThanOrEqualOperatorPartialLeftToRightOrderEnforcedWhenUsingLessThanOrEqualOperatorTostringValueof()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/11.8.3-3.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void LessThanOrEqualOperatorPartialLeftToRightOrderEnforcedWhenUsingLessThanOrEqualOperatorTostringTostring()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/11.8.3-4.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void LessThanOrEqualOperatorPartialLeftToRightOrderEnforcedWhenUsingLessThanOrEqualOperatorValueofValueof2()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/11.8.3-5.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void WhiteSpaceAndLineTerminatorBetweenRelationalexpressionAndOrBetweenAndShiftexpressionAreAllowed()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A1.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYUsesGetvalue()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A2.1_T1.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYUsesGetvalue2()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A2.1_T2.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYUsesGetvalue3()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A2.1_T3.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYUsesDefaultValue()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A2.2_T1.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void InEs5FirstExpressionShouldBeEvaluatedFirst()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A2.3_T1.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void FirstExpressionIsEvaluatedFirstAndThenSecondExpression()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A2.4_T1.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void FirstExpressionIsEvaluatedFirstAndThenSecondExpression2()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A2.4_T2.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void FirstExpressionIsEvaluatedFirstAndThenSecondExpression3()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A2.4_T3.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTonumberXTonumberYIfTypePrimitiveXIsNotStringOrTypePrimitiveYIsNotString()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.1_T1.1.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTonumberXTonumberYIfTypePrimitiveXIsNotStringOrTypePrimitiveYIsNotString2()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.1_T1.2.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTonumberXTonumberYIfTypePrimitiveXIsNotStringOrTypePrimitiveYIsNotString3()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.1_T1.3.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTonumberXTonumberYIfTypePrimitiveXIsNotStringOrTypePrimitiveYIsNotString4()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.1_T2.1.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTonumberXTonumberYIfTypePrimitiveXIsNotStringOrTypePrimitiveYIsNotString5()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.1_T2.2.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTonumberXTonumberYIfTypePrimitiveXIsNotStringOrTypePrimitiveYIsNotString6()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.1_T2.3.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTonumberXTonumberYIfTypePrimitiveXIsNotStringOrTypePrimitiveYIsNotString7()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.1_T2.4.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTonumberXTonumberYIfTypePrimitiveXIsNotStringOrTypePrimitiveYIsNotString8()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.1_T2.5.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTonumberXTonumberYIfTypePrimitiveXIsNotStringOrTypePrimitiveYIsNotString9()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.1_T2.6.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTonumberXTonumberYIfTypePrimitiveXIsNotStringOrTypePrimitiveYIsNotString10()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.1_T2.7.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTonumberXTonumberYIfTypePrimitiveXIsNotStringOrTypePrimitiveYIsNotString11()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.1_T2.8.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTonumberXTonumberYIfTypePrimitiveXIsNotStringOrTypePrimitiveYIsNotString12()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.1_T2.9.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTostringXTostringYIfTypePrimitiveXIsStringAndTypePrimitiveYIsString()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.2_T1.1.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void OperatorXYReturnsTostringXTostringYIfTypePrimitiveXIsStringAndTypePrimitiveYIsString2()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A3.2_T1.2.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfXIsNanReturnFalseIfResultIn1185IsUndefinedReturnFalse()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.1.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfYIsAPrefixOfXAndXYReturnFalse()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.10.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfXIsAPrefixOfYReturnTrue()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.11.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfNeitherXNorYIsAPrefixOfEachOtherReturnedResultOfStringsComparisonAppliesASimpleLexicographicOrderingToTheSequencesOfCodePointValueValues()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.12_T1.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfNeitherXNorYIsAPrefixOfEachOtherReturnedResultOfStringsComparisonAppliesASimpleLexicographicOrderingToTheSequencesOfCodePointValueValues2()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.12_T2.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfYIsNanReturnFalseIfResultIn1185IsUndefinedReturnFalse()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.2.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfXAndYAreTheSameNumberValueReturnTrue()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.3.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfEitherXOrYIs0AndTheOtherIs0ReturnTrue()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.4.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfXIsInfinityAndXYReturnFalse()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.5.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfYIsInfinityAndXYReturnTrue()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.6.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfXIsInfinityReturnTrue()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.7.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfYIsInfinityAndXYReturnFalse()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.8.js", false);
}
[Fact]
[Trait("Category", "11.8.3")]
public void IfXIsLessOrEqualThanYAndTheseValuesAreBothFiniteNonZeroReturnTrueOtherwiseReturnFalse()
{
RunTest(@"TestCases/ch11/11.8/11.8.3/S11.8.3_A4.9.js", false);
}
}
}
| |
//------------------------------------------------------------------------------
// Symbooglix
//
//
// Copyright 2014-2017 Daniel Liew
//
// This file is licensed under the MIT license.
// See LICENSE.txt for details.
//------------------------------------------------------------------------------
using CommandLine;
using CommandLine.Text;
using System;
using System.IO;
using Microsoft;
using System.Linq;
using Microsoft.Boogie;
using Symbooglix;
using Solver = Symbooglix.Solver;
using Transform = Symbooglix.Transform;
using Util = Symbooglix.Util;
using System.Diagnostics;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace SymbooglixDriver
{
public class Driver
{
public class CmdLineOpts
{
[Option("append-query-log-file", DefaultValue = 0, HelpText = "When logging queries (see --log-queries) append to file rather than overwriting")]
public int appendLoggedQueries { get; set; }
[Option("catch-exceptions", DefaultValue = 1, HelpText="Catch Exceptions")]
public int CatchExceptions { get; set; }
[Option("constant-caching", DefaultValue=1, HelpText="Cache constants when building expressions")]
public int ConstantCaching { get; set; }
[Option("concurrent-logging", DefaultValue = 1, HelpText = "Log files concurrently, otherwise do in serial")]
public int ConcurrentLogging { get ; set; }
[Option("check-entry-requires", DefaultValue = 1, HelpText="Check entry point requires")]
public int CheckEntryRequires { get; set;}
[Option("check-entry-axioms", DefaultValue = 1, HelpText="Check axioms")]
public int CheckEntryAxioms { get; set;}
[Option("check-unique-vars", DefaultValue = 1, HelpText="Check unique variables")]
public int CheckUniqueVariableDecls { get; set;}
[Option("emit-before", DefaultValue = false, HelpText = "Emit Boogie program to stdout before running each pass")]
public bool emitProgramBefore { get; set; }
[Option("emit-after", DefaultValue = false, HelpText = "Emit Boogie program to stdout before running each pass")]
public bool emitProgramAfter { get; set; }
[Option("emit-triggers", DefaultValue = 1, HelpText = "Emit quantifier triggers during execution (experimental). Default 1")]
public int emitTriggers { get; set; }
[Option("esi-show-constraints", DefaultValue = 0, HelpText = "If logging ExecutionState info as YAML then show constraints (Default: 0)")]
public int ExecutionStateInfoShowConstraints { get; set; }
[Option("esi-show-vars", DefaultValue = 0, HelpText = "If logging ExecutionState info as YAML then show variables (Default: 0)")]
public int ExecutionStateInfoShowVariables { get; set; }
[OptionList('D', "defines",Separator = ',', HelpText="Add defines to the Boogie parser. Each define should be seperated by a comma.")]
public List<string> Defines { get; set; }
// FIXME: Urgh... how do you set the default value of the list?
[OptionList('e', "entry-points",
Separator = ',',
DefaultValue = null,
HelpText = "Comma seperated list of implementations to use as entry points for execution.")]
public List<string> entryPoints { get; set; }
[Option("stop-at-failure", DefaultValue=0, HelpText="Stop executor once N failures have been found. 0 means unlimited (Default 0)")]
public int FailureLimit { get; set; }
[Option("file-logging", DefaultValue=1, HelpText="Log information about execution to files (default=1)")]
public int FileLogging { get; set ; }
[Option("force-qfaufbv", DefaultValue= false, HelpText="HACK: Force solver to use qf_aufbv logic")]
public bool ForceQFAUFBV { get; set; }
[Option("fork-at-predicated-assign", DefaultValue = false, HelpText="Fork at predicated assign commands (v := if x then <expr> else v)")]
public bool ForkAtPredicatedAssign { get; set; }
[Option("goto-assume-look-ahead", DefaultValue= 1, HelpText="Prevent needless state creation and destruction by looking ahead at gotos")]
public int gotoAssumeLookAhead { get; set; }
[Option("gpuverify-entry-points", DefaultValue=false, HelpText = "Use GPUVerify kernels as entry points")]
public bool gpuverifyEntryPoints { get; set; }
[Option("gpuverify-ignore-invariants", DefaultValue=false, HelpText = "Ignore GPUVerify specific invariants")]
public bool GPUverifyIgnoreInvariants { get; set; }
[Option("globaldde", DefaultValue=1, HelpText="Run Global Dead Declaration eliminiation(default 1)")]
public int GlobalDDE { get; set; }
// FIXME: Booleans can't be disabled in the CommandLine library so use ints instead
[Option("fold-constants", DefaultValue = 1, HelpText = "Use Constant folding during execution")]
public int useConstantFolding { get; set; }
[Option("human-readable-smtlib", DefaultValue = 1, HelpText = "When writing SMTLIBv2 queries make them more readable by using indentation and comments")]
public int humanReadable { get; set ;}
[Option("log-queries", DefaultValue = "", HelpText= "Path to file to log queries to. Blank means logging is disabled.")]
public string queryLogPath { get; set; }
[Option("log-terminated-state-info", DefaultValue=1, HelpText="Log information about a termination state to a YAML file (a value of 0 disables)")]
public int LogTerminatedStateInfo { get; set;}
[Option("log-non-terminated-state-info", DefaultValue=1, HelpText="Log information about a termination state to a YAML file (a value of 0 disables)")]
public int LogNonTerminatedStateInfo { get; set;}
[Option("caching-solver", DefaultValue=-1, HelpText="-1 do not use, 0 unlimited query cache, other query cache limited by specified number")]
public int CachingSolver { get; set; }
[Option("ci-solver", DefaultValue = 1, HelpText = "Use Constraint independence solver")]
public int ConstraintIndepenceSolver { get; set; }
[Option("max-depth", DefaultValue=-1, HelpText="Max ExplicitBranchDepth to explore. Default is -1 which means no limit")]
public int MaxDepth { get; set; }
[Option("max-loop-depth", DefaultValue = -1, HelpText = "Max loop depth to explore. Default is -1 which means no limit")]
public int MaxLoopDepth { get; set; }
[Option("print-instr", DefaultValue = false, HelpText = "Print instructions during execution")]
public bool useInstructionPrinter { get; set; }
[Option("prefer-loop-escaping-paths", DefaultValue =1, HelpText= "Prefer paths that escape loops (Default 1)")]
public int PreferLoopEscapingPaths { get; set; }
[Option("print-call-seq", DefaultValue = false, HelpText = "Print call sequence during execution")]
public bool useCallSequencePrinter { get; set; }
[Option("remove-trivial-assumes", DefaultValue= false, HelpText="Remove trivial assumes")]
public bool RemoveTrivialAssumes { get; set; }
[Option("skip-log-success-states", HelpText="Don't log information about states that terminate with success")]
public bool SkipLogTerminatedWithSuccess { get; set; }
[Option("skip-log-unsat-assume-states", HelpText="Don't log information about states that terminate with unsatisfiable assume")]
public bool SkipLogTerminatedWithUnsatAssume { get; set; }
[Option("timeout", DefaultValue=0, HelpText="Number of seconds to wait before killing executor for the current entry point")]
public int timeout { get; set;}
public enum Solver
{
CVC4,
DUMMY,
Z3
}
[Option("output-dir", DefaultValue="", HelpText="Directory to place Executor log files. By default a symbooglix-<N> directory is used")]
public string outputDir { get; set; }
public enum Scheduler
{
DFS,
BFS,
UntilEndBFS,
AltBFS
}
[Option("persistent-solver", DefaultValue=1, HelpText="Try to make solver process persistent")]
public int PersistentSolver { get; set; }
[Option("scheduler", DefaultValue = Scheduler.DFS, HelpText="State scheduler to use")]
public Scheduler scheduler { get; set; }
// FIXME: The command line library should tell the user what are the valid values
[Option("solver", DefaultValue = Solver.Z3, HelpText = "Solver to use (valid values CVC4, DUMMY, Z3)")]
public Solver solver { get; set; }
[Option("solver-path", DefaultValue = "", HelpText = "Path to the SMTLIBv2 solver")]
public string pathToSolver { get; set; }
[Option("solver-timeout", DefaultValue=120, HelpText="Maximum time allowed for a single query")]
public int solverTimeout {get; set;}
[Option("solver-use-named-attr", DefaultValue=1, HelpText="Use named attributes with SMTLIB based solvers")]
public int UseNamedAttributes { get; set; }
[Option("use-modset-transform", DefaultValue = 1, HelpText = "Run the modset analysis to fix incorrect modsets before type checking")]
public int useModSetTransform { get; set; }
[Option("symbolic-pool-cache", DefaultValue = 0, HelpText = "Use Symbolic pool cache (0 uses naive symbolic pool")]
public int useSymbolicPoolCache { get ; set ; }
[Option("write-smt2", DefaultValue = 1, HelpText="Write constraints for each ExecutionState as SMTLIBv2 (Default 1)")]
public int WriteConstraints { get ; set; }
// Positional args
[ValueOption(0)]
public string boogieProgramPath { get; set; }
// For printing parser error messages
[ParserState]
public IParserState LastParserState { get; set; }
[HelpOption]
public string GetUsage()
{
var help = new HelpText {
Heading = new HeadingInfo("Symbooglix", "The symbolic execution engine for boogie programs"),
Copyright = new CopyrightInfo("Dan Liew", 2014),
AdditionalNewLineAfterOption = true,
AddDashesToOption = true
};
// FIXME: Printing parser errors is totally broken.
if (LastParserState == null)
Console.WriteLine("FIXME: CommandLine parser did not give state");
if (LastParserState != null && LastParserState.Errors.Any())
{
var errors = help.RenderParsingErrorsText(this, 2);
help.AddPostOptionsLine("Error: Failed to parse command line options");
help.AddPostOptionsLine(errors);
}
else
{
help.AddPreOptionsLine("Usage: symbooglix [options] <boogie program>");
help.AddOptions(this);
}
return help;
}
}
public enum ExitCode : int
{
// For Executor
NO_ERRORS_NO_TIMEOUT = 0, // Essentially means path exploration was exhaustive
// Mono exits with exitcode 1 if there are uncaught exceptions so
// we should use the same exit code when we catch them
EXCEPTION_RAISED = 1,
// For Executor
ERRORS_NO_TIMEOUT = 2,
NO_ERRORS_TIMEOUT,
ERRORS_TIMEOUT,
OUT_OF_MEMORY,
NOT_IMPLEMENTED_EXCEPTION,
NOT_SUPPORTED_EXCEPTION,
INITIAL_STATE_TERMINATED,
NO_ERRORS_NO_TIMEOUT_BUT_FOUND_SPECULATIVE_PATHS,
NO_ERRORS_NO_TIMEOUT_BUT_HIT_BOUND,
// Other stuff
COMMAND_LINE_ERROR = 128,
PARSE_ERROR,
RESOLVE_ERROR,
TYPECHECK_ERROR,
RECURSIVE_FUNCTIONS_FOUND_ERROR,
SOLVER_NOT_FOUND,
ENTRY_POINT_NOT_FOUND_ERROR,
CTRL_C_FORCED_EXIT,
}
private static bool TimeoutHit = false;
private static void ExitWith(ExitCode exitCode)
{
Console.WriteLine("Exiting with {0}", exitCode.ToString());
System.Environment.Exit( (int) exitCode);
throw new InvalidOperationException("Unreachable");
}
public static int Main(String[] args)
{
// This is for debugging
bool catchExceptions = true;
foreach (var arg in args)
{
// Look for --catch-exceptions=0
if (arg == "--catch-exceptions=0")
{
catchExceptions = false;
break;
}
}
if (!catchExceptions)
{
Console.WriteLine("Not catching exceptions in the driver");
return RealMain(args);
}
// We use this to capture if an unhandled exception was
// raised and exit with the appropriate exit code if this happens.
try
{
return RealMain(args);
}
catch (Exception e)
{
Console.Error.WriteLine("Exception raised");
Console.Error.WriteLine(e.ToString());
ExitWith(ExitCode.EXCEPTION_RAISED);
return (int)ExitCode.EXCEPTION_RAISED; // Keep compiler happy
}
}
private static void SetupTerminationCatchers(Executor executor)
{
// Catch CTRL+C
bool hitCancelOnce = false;
Console.CancelKeyPress += delegate(object sender, ConsoleCancelEventArgs eventArgs)
{
if (hitCancelOnce)
{
Console.WriteLine("CTRL+C pressed again. Giving up and just exiting");
eventArgs.Cancel = false; // Force exit
ExitWith(ExitCode.CTRL_C_FORCED_EXIT);
}
else
{
hitCancelOnce = true;
Console.WriteLine("Received CTRL+C. Attempting to terminated Executor");
executor.Terminate(/*block=*/ false);
eventArgs.Cancel = true; // Don't exit yet
}
};
// Sending SIGINT to the driver when stdout/stderr is not attached to a TTY does not seem to
// trigger the Coinsole.CancelKeyPress event. So here we use a HACK to catch the signals we
// care about and ask the Executor to terminate
var signals = new Mono.Unix.UnixSignal[] {
new Mono.Unix.UnixSignal(Mono.Unix.Native.Signum.SIGTERM), // boogie-runner sends this
};
bool signalCaught = false;
Task.Factory.StartNew(() =>
{
while (true)
{
Console.WriteLine("Waiting for UNIX signals");
Mono.Unix.UnixSignal.WaitAny(signals);
Console.WriteLine("Caught UNIX signal");
if (signalCaught)
{
Console.WriteLine("Signal received again. Just exiting");
ExitWith(ExitCode.CTRL_C_FORCED_EXIT);
}
else
{
executor.Terminate(false);
signalCaught = true;
}
}
});
}
public static int RealMain(String[] args)
{
// Debug log output goes to standard error.
Debug.Listeners.Add(new ExceptionThrowingTextWritierTraceListener(Console.Error));
// FIXME: Urgh... we are forced to use Boogie's command line
// parser becaue the Boogie program resolver/type checker
// is dependent on the parser being used...EURGH!
CommandLineOptions.Install(new Microsoft.Boogie.CommandLineOptions());
var options = new CmdLineOpts();
if (! CommandLine.Parser.Default.ParseArguments(args, options))
{
Console.WriteLine("Failed to parse args");
ExitWith(ExitCode.COMMAND_LINE_ERROR);
}
if (options.boogieProgramPath == null)
{
Console.WriteLine("A boogie program must be specified. See --help");
ExitWith(ExitCode.COMMAND_LINE_ERROR);
}
if (!File.Exists(options.boogieProgramPath))
{
Console.WriteLine("Boogie program \"" + options.boogieProgramPath + "\" does not exist");
ExitWith(ExitCode.COMMAND_LINE_ERROR);
}
Program program = null;
if (options.Defines != null)
{
foreach (var define in options.Defines)
Console.WriteLine("Adding define \"" + define + "\" to Boogie parser");
}
int errors = Microsoft.Boogie.Parser.Parse(options.boogieProgramPath, options.Defines, out program);
if (errors != 0)
{
Console.WriteLine("Failed to parse");
ExitWith(ExitCode.PARSE_ERROR);
}
errors = program.Resolve();
if (errors != 0)
{
Console.WriteLine("Failed to resolve.");
ExitWith(ExitCode.RESOLVE_ERROR);
}
if (options.useModSetTransform > 0)
{
// This is useful for Boogie Programs produced by the GPUVerify tool that
// have had instrumentation added that invalidates the modset attached to
// procedures. By running the analysis we may modify the modsets attached to
// procedures in the program to be correct so that Boogie's Type checker doesn't
// produce an error.
var modsetAnalyser = new ModSetCollector();
modsetAnalyser.DoModSetAnalysis(program);
}
errors = program.Typecheck();
if (errors != 0)
{
Console.WriteLine("Failed to Typecheck.");
ExitWith(ExitCode.TYPECHECK_ERROR);
}
IStateScheduler scheduler = GetScheduler(options);
// Limit Depth if necessary
if (options.MaxDepth >= 0)
{
scheduler = new LimitExplicitDepthScheduler(scheduler, options.MaxDepth);
Console.WriteLine("Using Depth limit:{0}", options.MaxDepth);
}
if (options.FailureLimit < 0)
{
Console.Error.WriteLine("FailureLimit must be >= 0");
ExitWith(ExitCode.COMMAND_LINE_ERROR);
}
Console.WriteLine("Using Scheduler: {0}", scheduler.ToString());
var nonSpeculativeterminationCounter = new TerminationCounter(TerminationCounter.CountType.ONLY_NON_SPECULATIVE);
var speculativeTerminationCounter = new TerminationCounter(TerminationCounter.CountType.ONLY_SPECULATIVE);
IExprBuilder builder = new SimpleExprBuilder(/*immutable=*/ true);
ISymbolicPool symbolicPool = null;
if (options.useSymbolicPoolCache > 0)
{
throw new Exception("DON'T USE THIS. IT'S BROKEN");
symbolicPool = new CachingSymbolicPool();
}
else
symbolicPool = new SimpleSymbolicPool();
Console.WriteLine("Using Symbolic Pool: {0}", symbolicPool.ToString());
if (options.useConstantFolding > 0)
{
if (options.ConstantCaching > 0)
{
Console.WriteLine("Using ConstantCachingExprBuilder");
builder = new ConstantCachingExprBuilder(builder);
}
builder = new ConstantFoldingExprBuilder(builder);
}
// Destroy the solver when we stop using it
using (var solver = BuildSolverChain(options))
{
Executor executor = new Executor(program, scheduler, solver, builder, symbolicPool);
executor.ExecutorTimeoutReached += delegate(object sender, Executor.ExecutorTimeoutReachedArgs eventArgs)
{
TimeoutHit = true; // Record so we can set the exitcode appropriately later
Console.Error.WriteLine("Timeout hit. Trying to kill Executor (may wait for solver)");
};
// Check all implementations exist and build list of entry points to execute
var entryPoints = new List<Implementation>();
// This is specific to GPUVerify
if (options.gpuverifyEntryPoints)
{
var kernels = program.TopLevelDeclarations.OfType<Implementation>().Where(impl => QKeyValue.FindBoolAttribute(impl.Attributes,"kernel"));
foreach (var kernel in kernels)
{
entryPoints.Add(kernel);
}
if (entryPoints.Count() == 0)
{
Console.WriteLine("Could not find any kernel entry points");
ExitWith(ExitCode.ENTRY_POINT_NOT_FOUND_ERROR);
}
}
else
{
// Set main as default.
if (options.entryPoints == null)
options.entryPoints = new List<string>() { "main" };
foreach (var implString in options.entryPoints)
{
Implementation entry = program.TopLevelDeclarations.OfType<Implementation>().Where(i => i.Name == implString).FirstOrDefault();
if (entry == null)
{
Console.WriteLine("Could not find implementation \"" + implString + "\" to use as entry point");
ExitWith(ExitCode.ENTRY_POINT_NOT_FOUND_ERROR);
}
entryPoints.Add(entry);
}
}
if (options.useInstructionPrinter)
{
Console.WriteLine("Installing instruction printer");
var instrPrinter = new InstructionPrinter(Console.Out);
instrPrinter.Connect(executor);
}
if (options.useCallSequencePrinter)
{
Console.WriteLine("Installing call sequence printer");
var callPrinter = new CallPrinter(Console.Out);
callPrinter.Connect(executor);
}
if (options.gotoAssumeLookAhead > 0)
{
executor.UseGotoLookAhead = true;
}
else
{
executor.UseGotoLookAhead = false;
}
if (options.ForkAtPredicatedAssign)
executor.UseForkAtPredicatedAssign = true;
if (options.CheckEntryRequires > 0)
executor.CheckEntryRequires = true;
else
{
Console.WriteLine("Warning: Requires at the entry point are not being checked");
executor.CheckEntryRequires = false;
}
if (options.CheckEntryAxioms > 0)
executor.CheckEntryAxioms = true;
else
{
Console.WriteLine("Warning: Axioms are not being checked");
executor.CheckEntryAxioms = false;
}
if (options.CheckUniqueVariableDecls > 0)
executor.CheckUniqueVariableDecls = true;
else
{
Console.WriteLine("Warning: Unique variables are not being checked");
executor.CheckUniqueVariableDecls = false;
}
if (options.GlobalDDE > 0)
{
executor.UseGlobalDDE = true;
Console.WriteLine("WARNING: Using GlobalDDE. This may remove unsatisfiable axioms");
}
else
executor.UseGlobalDDE = false;
// Just print a message about break points for now.
executor.BreakPointReached += BreakPointPrinter.handleBreakPoint;
// Write to the console about context changes
var contextChangeReporter = new ContextChangedReporter();
contextChangeReporter.Connect(executor);
var stateHandler = new TerminationConsoleReporter();
stateHandler.Connect(executor);
nonSpeculativeterminationCounter.Connect(executor);
speculativeTerminationCounter.Connect(executor);
if (options.FileLogging > 0)
SetupFileLoggers(options, executor, solver);
SetupTerminationCatchers(executor);
ApplyFilters(executor, options);
if (options.FailureLimit > 0)
{
var failureLimiter = new FailureLimiter(options.FailureLimit);
failureLimiter.Connect(executor);
Console.WriteLine("Using failure limit of {0}", options.FailureLimit);
}
try
{
// Supply our own PassManager for preparation so we can hook into its events
executor.PreparationPassManager = GetPassManager(options);
foreach (var entryPoint in entryPoints)
{
Console.ForegroundColor = ConsoleColor.Cyan;
Console.WriteLine("Entering Implementation " + entryPoint.Name + " as entry point");
Console.ResetColor();
executor.Run(entryPoint, options.timeout);
}
}
catch (InitialStateTerminated)
{
if (options.CatchExceptions == 0)
{
throw;
}
Console.ForegroundColor = ConsoleColor.Red;
Console.Error.WriteLine("The initial state terminated. Execution cannot continue");
Console.ResetColor();
ExitWith(ExitCode.INITIAL_STATE_TERMINATED);
}
catch (RecursiveFunctionDetectedException rfdException)
{
if (options.CatchExceptions == 0)
{
throw;
}
Console.ForegroundColor = ConsoleColor.Red;
Console.Error.WriteLine("Detected the following recursive functions");
foreach (var function in rfdException.Functions)
{
Console.Error.Write(function.Name + ": ");
if (function.Body != null)
Console.Error.WriteLine(function.Body.ToString());
if (function.DefinitionAxiom != null)
Console.Error.WriteLine(function.DefinitionAxiom.Expr.ToString());
}
Console.ResetColor();
ExitWith(ExitCode.RECURSIVE_FUNCTIONS_FOUND_ERROR);
}
catch (OutOfMemoryException e)
{
if (options.CatchExceptions == 0)
{
throw;
}
Console.Error.WriteLine("Ran out of memory!");
Console.Error.WriteLine(e.ToString());
ExitWith(ExitCode.OUT_OF_MEMORY);
}
catch (NotImplementedException e)
{
if (options.CatchExceptions == 0)
{
throw;
}
Console.Error.WriteLine("Feature not implemented!");
Console.Error.WriteLine(e.ToString());
ExitWith(ExitCode.NOT_IMPLEMENTED_EXCEPTION);
}
catch (NotSupportedException e)
{
if (options.CatchExceptions == 0)
{
throw;
}
Console.Error.WriteLine("Feature not supported!");
Console.Error.WriteLine(e.ToString());
ExitWith(ExitCode.NOT_SUPPORTED_EXCEPTION);
}
Console.WriteLine("Finished executing");
DumpStats(executor, solver, nonSpeculativeterminationCounter, speculativeTerminationCounter);
}
if (TimeoutHit)
{
ExitWith(nonSpeculativeterminationCounter.NumberOfFailures > 0 ? ExitCode.ERRORS_TIMEOUT : ExitCode.NO_ERRORS_TIMEOUT);
throw new InvalidOperationException("Unreachable");
}
var exitCode = nonSpeculativeterminationCounter.NumberOfFailures > 0 ? ExitCode.ERRORS_NO_TIMEOUT : ExitCode.NO_ERRORS_NO_TIMEOUT;
if (exitCode == ExitCode.NO_ERRORS_NO_TIMEOUT)
{
// If no errors were found we may need to pick a different exit code
// because path exploration may not have been exhaustive due to speculative paths
// or hitting a bound. This isn't perfect because we may hit a bound and have speculative
// paths so we could use either exit code in this case.
if (nonSpeculativeterminationCounter.DisallowedSpeculativePaths > 0 || speculativeTerminationCounter.NumberOfTerminatedStates > 0) {
exitCode = ExitCode.NO_ERRORS_NO_TIMEOUT_BUT_FOUND_SPECULATIVE_PATHS;
Console.WriteLine("NOTE: Bugs may have been missed!");
}
else if (nonSpeculativeterminationCounter.DisallowedPathDepths > 0) {
exitCode = ExitCode.NO_ERRORS_NO_TIMEOUT_BUT_HIT_BOUND;
Console.WriteLine("NOTE: Bugs may have been missed!");
}
}
ExitWith(exitCode);
return (int) exitCode; // This is required to keep the compiler happy.
}
public static void DumpStats(Executor executor, Solver.ISolver solver, TerminationCounter nonSpeculativeTerminationCounter,
TerminationCounter speculativeTerminationCounter)
{
using (var ITW = new System.CodeDom.Compiler.IndentedTextWriter(Console.Out))
{
executor.Statistics.WriteAsYAML(ITW);
solver.Statistics.WriteAsYAML(ITW);
solver.SolverImpl.Statistics.WriteAsYAML(ITW);
nonSpeculativeTerminationCounter.WriteAsYAML(ITW);
speculativeTerminationCounter.WriteAsYAML(ITW);
}
}
public static void SetupFileLoggers(CmdLineOpts options, Executor executor, Solver.ISolver solver)
{
ExecutorFileLoggerHandler executorLogger = null;
if (options.outputDir.Length == 0)
executorLogger = new ExecutorFileLoggerHandler(executor, Directory.GetCurrentDirectory(), /*makeDirectoryInPath=*/ true);
else
executorLogger = new ExecutorFileLoggerHandler(executor, options.outputDir, /*makeDirectoryInPath=*/ false);
// Add our loggers
executorLogger.AddRootDirLogger(new CallGrindFileLogger());
//executorLogger.AddRootDirLogger(new MemoryUsageLogger()); // FIXME: Disable for experiments it is buggy
executorLogger.AddRootDirLogger(new TerminationCounterLogger(TerminationCounter.CountType.ONLY_NON_SPECULATIVE));
executorLogger.AddRootDirLogger(new TerminationCounterLogger(TerminationCounter.CountType.ONLY_SPECULATIVE));
//executorLogger.AddRootDirLogger(new ExecutionTreeLogger(true));
executorLogger.AddRootDirLogger(new ExecutorInfoLogger());
Predicate<ExecutionState> statesToIgnoreFilter = delegate(ExecutionState state)
{
if (options.SkipLogTerminatedWithSuccess)
{
if (state.TerminationType is TerminatedWithoutError)
return true; // Ignore
}
if (options.SkipLogTerminatedWithUnsatAssume)
{
if (state.TerminationType is TerminatedAtUnsatisfiableAssume)
return true; // Ignore
}
return false;
};
bool concurrentLogging = options.ConcurrentLogging > 0;
if (options.WriteConstraints > 0)
{
executorLogger.AddTerminatedStateDirLogger(new ExecutionStateConstraintLogger(ExecutionStateLogger.ExecutorEventType.TERMINATED_STATE,
statesToIgnoreFilter, concurrentLogging));
executorLogger.AddTerminatedStateDirLogger(new ExecutionStateUnSatCoreLogger(ExecutionStateLogger.ExecutorEventType.TERMINATED_STATE,
statesToIgnoreFilter, concurrentLogging));
executorLogger.AddNonTerminatedStateDirLogger(new ExecutionStateConstraintLogger(ExecutionStateLogger.ExecutorEventType.NON_TERMINATED_STATE_REMOVED,
statesToIgnoreFilter, concurrentLogging));
}
bool showConstraints = options.ExecutionStateInfoShowConstraints > 0;
bool showVariables = options.ExecutionStateInfoShowVariables > 0;
if (options.LogTerminatedStateInfo > 0)
{
executorLogger.AddTerminatedStateDirLogger(new ExecutionStateInfoLogger(ExecutionStateLogger.ExecutorEventType.TERMINATED_STATE,
showConstraints,
showVariables,
statesToIgnoreFilter,
concurrentLogging));
}
if (options.LogNonTerminatedStateInfo > 0)
{
executorLogger.AddNonTerminatedStateDirLogger(new ExecutionStateInfoLogger(ExecutionStateLogger.ExecutorEventType.NON_TERMINATED_STATE_REMOVED,
showConstraints,
showVariables,
statesToIgnoreFilter,
concurrentLogging));
}
executorLogger.Connect();
Console.WriteLine("Logging to directory: " + executorLogger.RootDir.FullName);
}
public static Transform.PassManager GetPassManager(CmdLineOpts options)
{
// Supply our own PassManager for preparation so we can hook into its events
var PM = new Transform.PassManager();
if (options.RemoveTrivialAssumes)
PM.Add(new Transform.TrivialAssumeElimination());
// Use anonymous methods so we can use closure to read command line options
Transform.PassManager.PassRunEvent beforePassHandler = delegate(Object passManager, Transform.PassManager.PassManagerEventArgs eventArgs)
{
Console.ForegroundColor = ConsoleColor.Red;
Console.WriteLine("Running pass " + eventArgs.ThePass.GetName());
Console.ResetColor();
if (options.emitProgramBefore)
{
Console.WriteLine("**** Program before pass:");
Util.ProgramPrinter.Print(eventArgs.TheProgram, Console.Out, /*pretty=*/true, Symbooglix.Util.ProgramPrinter.PrintType.UNSTRUCTURED_ONLY);
Console.WriteLine("**** END Program before pass");
}
};
Transform.PassManager.PassRunEvent afterPassHandler = delegate(Object passManager, Transform.PassManager.PassManagerEventArgs eventArgs)
{
Console.ForegroundColor = ConsoleColor.Green;
Console.WriteLine("Finished running pass " + eventArgs.ThePass.GetName());
Console.ResetColor();
if (options.emitProgramAfter)
{
Console.WriteLine("**** Program after pass:");
Util.ProgramPrinter.Print(eventArgs.TheProgram, Console.Out, /*pretty=*/true, Symbooglix.Util.ProgramPrinter.PrintType.UNSTRUCTURED_ONLY);
Console.WriteLine("**** END Program after pass:");
}
};
PM.BeforePassRun += beforePassHandler;
PM.AfterPassRun += afterPassHandler;
return PM;
}
public static IStateScheduler GetScheduler(CmdLineOpts options)
{
IStateScheduler scheduler = null;
switch (options.scheduler)
{
case CmdLineOpts.Scheduler.DFS:
scheduler = new DFSStateScheduler();
break;
case CmdLineOpts.Scheduler.BFS:
scheduler = new BFSStateScheduler();
break;
case CmdLineOpts.Scheduler.UntilEndBFS:
scheduler = new UntilTerminationBFSStateScheduler();
break;
case CmdLineOpts.Scheduler.AltBFS:
scheduler = new AlternativeBFSStateScheduler();
break;
default:
throw new ArgumentException("Unsupported scheduler");
}
if (options.PreferLoopEscapingPaths > 0)
scheduler = new LoopEscapingScheduler(scheduler);
if (options.MaxLoopDepth > 0)
scheduler = new LimitLoopBoundScheduler(scheduler, options.MaxLoopDepth);
return scheduler;
}
public static Solver.ISolver BuildSolverChain(CmdLineOpts options)
{
Solver.ISolverImpl solverImpl = null;
// Try to guess the location of executable. This is just for convenience
if (options.pathToSolver.Length == 0 && options.solver != CmdLineOpts.Solver.DUMMY)
{
Console.WriteLine("Path to SMT solver not specified. Guessing location");
// Look in the directory of the currently running executable for other solvers
var pathToSolver = Path.Combine(Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location),
options.solver.ToString().ToLower());
if (File.Exists(pathToSolver))
{
Console.WriteLine("Found \"{0}\"", pathToSolver);
options.pathToSolver = pathToSolver;
}
else
{
// Try with ".exe" appended
pathToSolver = pathToSolver + ".exe";
if (File.Exists(pathToSolver))
{
Console.WriteLine("Found \"{0}\"", pathToSolver);
options.pathToSolver = pathToSolver;
}
else
{
Console.Error.WriteLine("Could not find \"{0}\" (also without .exe)", pathToSolver);
ExitWith(ExitCode.SOLVER_NOT_FOUND);
}
}
}
// HACK: THIS IS GROSS! REMOVE THIS ASAP AND FIND A CLEAN WAY OF DOING THIS!!!!!!!!!!!!
var logicToUse = options.ForceQFAUFBV ? SMTLIBQueryPrinter.Logic.QF_AUFBV : SMTLIBQueryPrinter.Logic.DO_NOT_SET;
switch (options.solver)
{
case CmdLineOpts.Solver.CVC4:
solverImpl = new Solver.CVC4SMTLIBSolver(options.UseNamedAttributes > 0,
options.pathToSolver,
options.PersistentSolver > 0,
options.emitTriggers > 0,
logicToUse);
break;
case CmdLineOpts.Solver.Z3:
solverImpl = new Solver.Z3SMTLIBSolver(options.UseNamedAttributes > 0,
options.pathToSolver,
options.PersistentSolver > 0,
options.emitTriggers > 0,
logicToUse);
break;
case CmdLineOpts.Solver.DUMMY:
solverImpl = new Solver.DummySolver(Symbooglix.Solver.Result.UNKNOWN);
break;
default:
throw new NotSupportedException("Unhandled solver type");
}
if (options.queryLogPath.Length > 0)
{
// FIXME: How are we going to ensure this file gets closed properly?
StreamWriter QueryLogFile = new StreamWriter(options.queryLogPath, /*append=*/ options.appendLoggedQueries > 0);
solverImpl = new Solver.SMTLIBQueryLoggingSolverImpl(solverImpl, QueryLogFile, /*useNamedAttributeBindings=*/true, options.humanReadable > 0);
}
if (options.CachingSolver >= 0)
{
solverImpl = new Solver.SimpleSolverCache(solverImpl, options.CachingSolver);
}
if (options.ConstraintIndepenceSolver > 0)
{
solverImpl = new Solver.ConstraintIndependenceSolver(solverImpl);
}
// Only support this for now.
Solver.ISolver solver = new Solver.SimpleSolver(solverImpl);
solver.SetTimeout(options.solverTimeout);
return solver;
}
public static void ApplyFilters(Executor executor, CmdLineOpts options)
{
if (!options.GPUverifyIgnoreInvariants)
return;
Console.ForegroundColor = ConsoleColor.DarkMagenta;
Console.Error.WriteLine("WARNING: GPUVerify invariants will be ignored!");
Console.ResetColor();
executor.AssertFilter = (AssertCmd c) =>
{
if (QKeyValue.FindBoolAttribute(c.Attributes, "originated_from_invariant"))
{
Console.ForegroundColor = ConsoleColor.DarkMagenta;
Console.Error.WriteLine("WARNING: Ignoring invariant {0}", c.ToString());
Console.ResetColor();
return false;
}
return true;
};
}
}
}
| |
using System;
using JetBrains.Application.Threading;
using JetBrains.Collections.Viewable;
using JetBrains.Diagnostics;
using JetBrains.IDE;
using JetBrains.Lifetimes;
using JetBrains.ProjectModel;
using JetBrains.Rd.Base;
using JetBrains.ReSharper.Plugins.Unity.Core.ProjectModel;
using JetBrains.ReSharper.Plugins.Unity.UnityEditorIntegration;
using JetBrains.ReSharper.Resources.Shell;
using JetBrains.Rider.Model.Unity;
using JetBrains.Rider.Model.Unity.BackendUnity;
using JetBrains.Rider.Model.Unity.FrontendBackend;
using JetBrains.Util;
using FrontendOpenArgs = JetBrains.Rider.Model.Unity.FrontendBackend.RdOpenFileArgs;
namespace JetBrains.ReSharper.Plugins.Unity.Rider.Integration.Protocol
{
[SolutionComponent]
public class PassthroughHost
{
private readonly ISolution mySolution;
private readonly IThreading myThreading;
private readonly IEditorManager myEditorManager;
private readonly BackendUnityHost myBackendUnityHost;
private readonly FrontendBackendHost myFrontendBackendHost;
private readonly ILogger myLogger;
public PassthroughHost(Lifetime lifetime,
ISolution solution,
IThreading threading,
IEditorManager editorManager,
UnitySolutionTracker unitySolutionTracker,
BackendUnityHost backendUnityHost,
FrontendBackendHost frontendBackendHost,
ILogger logger)
{
mySolution = solution;
myThreading = threading;
myEditorManager = editorManager;
myBackendUnityHost = backendUnityHost;
myFrontendBackendHost = frontendBackendHost;
myLogger = logger;
if (!frontendBackendHost.IsAvailable)
return;
unitySolutionTracker.IsUnityProject.View(lifetime, (unityProjectLifetime , args) =>
{
var model = frontendBackendHost.Model;
if (args && model != null)
{
AdviseFrontendToUnityModel(unityProjectLifetime, model);
// Advise the backend/Unity model as high priority so we get called back before other subscribers.
// This allows us to populate the protocol on reconnection before other subscribes start to advise
using (Signal.PriorityAdviseCookie.Create())
{
backendUnityHost.BackendUnityModel.ViewNotNull(unityProjectLifetime,
AdviseUnityToFrontendModel);
}
backendUnityHost.BackendUnityModel.Advise(lifetime, backendUnityModel =>
{
// https://github.com/JetBrains/resharper-unity/pull/2023
if (backendUnityModel == null) frontendBackendHost.Model?.PlayControlsInitialized.SetValue(false);
});
}
});
}
private void AdviseFrontendToUnityModel(Lifetime lifetime, FrontendBackendModel frontendBackendModel)
{
// BackendUnityModel is recreated frequently (e.g. on each AppDomain reload when changing play/edit mode).
// So subscribe to the frontendBackendModel once and flow in changes only if backendUnityModel is available.
// Note that we only flow changes, not the current value. Even though these properties are stateful,
// frontendBackendModel is not the source of truth - values need to flow from backendUnityModel. Also, due
// to model reload, we go through a few values before we stabilise. E.g.:
// * User clicks play, fb.Play is true, flows into bu.Play which switches to play mode and causes an
// AppDomain reload.
// * bu.Play becomes false due to AppDomain teardown, flows into fb.Play
// * BackendUnityModel is torn down and recreated (<- WARNING!)
// * bu.Play becomes true as Unity enters play mode, flows into fb.Play
// If we flowed the current value of fb.Play into backendUnityModel when it is recreated, we'd set it to
// false, triggering play mode to end.
// Step is simply since it's a non-stateful ISource<T>
var backendUnityModelProperty = myBackendUnityHost.BackendUnityModel;
frontendBackendModel.PlayControls.Play.FlowChangesIntoRdDeferred(lifetime,
() => backendUnityModelProperty.Maybe.ValueOrDefault?.PlayControls.Play);
frontendBackendModel.PlayControls.Pause.FlowChangesIntoRdDeferred(lifetime,
() => backendUnityModelProperty.Maybe.ValueOrDefault?.PlayControls.Pause);
frontendBackendModel.PlayControls.Step.Advise(lifetime, () => backendUnityModelProperty.Maybe.ValueOrDefault?.PlayControls.Step.Fire());
// Called from frontend to generate the UIElements schema files
frontendBackendModel.GenerateUIElementsSchema.Set((l, u) =>
backendUnityModelProperty.Maybe.ValueOrDefault?.GenerateUIElementsSchema.Start(l, u).ToRdTask(l));
// Signalled from frontend to select and ping the object in the Project view
frontendBackendModel.ShowFileInUnity.Advise(lifetime, file =>
backendUnityModelProperty.Maybe.ValueOrDefault?.ShowFileInUnity.Fire(file));
// Signalled from fronted to open the preferences window
frontendBackendModel.ShowPreferences.Advise(lifetime, _ =>
backendUnityModelProperty.Maybe.ValueOrDefault?.ShowPreferences.Fire());
// Called from frontend to run a method in unity
frontendBackendModel.RunMethodInUnity.Set((l, data) =>
{
var backendUnityModel = backendUnityModelProperty.Maybe.ValueOrDefault;
return backendUnityModel == null
? Rd.Tasks.RdTask<RunMethodResult>.Cancelled()
: backendUnityModel.RunMethodInUnity.Start(l, data).ToRdTask(l);
});
frontendBackendModel.HasUnsavedScenes.Set((l, u) =>
backendUnityModelProperty.Maybe.ValueOrDefault?.HasUnsavedScenes.Start(l, u).ToRdTask(l));
}
private void AdviseUnityToFrontendModel(Lifetime lifetime, BackendUnityModel backendUnityModel)
{
// *********************************************************************************************************
//
// WARNING
//
// Be very careful with stateful properties!
//
// When the backend/Unity protocol is closed, the existing properties maintain their current values. This
// doesn't affect BackendUnityModel because we clear the model when the connection is lost. However, it does
// affect any properties that have had values flowed in from BackedUnityModel - these values are not reset.
//
// When the backend/Unity protocol is (re)created and advertised, we *should* have initial values from the
// Unity end (the model is advertised asynchronously to being created, and the dispatcher *should* have
// processed messages). However, we cannot guarantee this - during testing, it usually works as expected,
// but occasionally wouldn't be fully initialised. These means we need to be careful when assuming that
// initial values are available in the properties. Advise and RdExtensions.FlowIntoRdSafe will correctly set
// the target value if the source value exists. Avoid BeUtilExtensions.FlowIntoRd, as that will throw an
// exception if the source value does not yet exist.
// Note that creating and advertising the model, as well as all callbacks, happen on the main thread.
//
// We must ensure that the Unity end (re)initialises properties when the protocol is created, or we could
// have stale or empty properties here and in the frontend.
//
// *********************************************************************************************************
var frontendBackendModel = myFrontendBackendHost.Model.NotNull("frontendBackendModel != null");
AdviseApplicationData(lifetime, backendUnityModel, frontendBackendModel);
AdviseApplicationSettings(lifetime, backendUnityModel, frontendBackendModel);
AdviseProjectSettings(lifetime, backendUnityModel, frontendBackendModel);
AdvisePlayControls(lifetime, backendUnityModel, frontendBackendModel);
AdviseConsoleEvents(lifetime, backendUnityModel, frontendBackendModel);
AdviseOpenFile(backendUnityModel, frontendBackendModel);
}
private static void AdviseApplicationData(in Lifetime lifetime, BackendUnityModel backendUnityModel,
FrontendBackendModel frontendBackendModel)
{
backendUnityModel.UnityApplicationData.FlowIntoRdSafe(lifetime, frontendBackendModel.UnityApplicationData);
backendUnityModel.UnityApplicationData.FlowIntoRdSafe(lifetime, data =>
{
var version = UnityVersion.Parse(data.ApplicationVersion);
return UnityVersion.RequiresRiderPackage(version);
}, frontendBackendModel.RequiresRiderPackage);
}
private static void AdviseApplicationSettings(in Lifetime lifetime, BackendUnityModel backendUnityModel,
FrontendBackendModel frontendBackendModel)
{
backendUnityModel.UnityApplicationSettings.ScriptCompilationDuringPlay.FlowIntoRdSafe(lifetime,
frontendBackendModel.UnityApplicationSettings.ScriptCompilationDuringPlay);
}
private static void AdviseProjectSettings(in Lifetime lifetime, BackendUnityModel backendUnityModel,
FrontendBackendModel frontendBackendModel)
{
backendUnityModel.UnityProjectSettings.BuildLocation.FlowIntoRdSafe(lifetime,
frontendBackendModel.UnityProjectSettings.BuildLocation);
}
private static void AdvisePlayControls(in Lifetime lifetime, BackendUnityModel backendUnityModel,
FrontendBackendModel frontendBackendModel)
{
backendUnityModel.PlayControls.Play.FlowIntoRdSafe(lifetime, frontendBackendModel.PlayControls.Play);
backendUnityModel.PlayControls.Pause.FlowIntoRdSafe(lifetime, frontendBackendModel.PlayControls.Pause);
// https://github.com/JetBrains/resharper-unity/pull/2023
backendUnityModel.PlayControls.Play.Advise(lifetime, _ => frontendBackendModel.PlayControlsInitialized.SetValue(true));
}
private static void AdviseConsoleEvents(in Lifetime lifetime, BackendUnityModel backendUnityModel,
FrontendBackendModel frontendBackendModel)
{
backendUnityModel.ConsoleLogging.OnConsoleLogEvent.Advise(lifetime, frontendBackendModel.ConsoleLogging.OnConsoleLogEvent.Fire);
backendUnityModel.ConsoleLogging.LastInitTime.FlowIntoRdSafe(lifetime, frontendBackendModel.ConsoleLogging.LastInitTime);
backendUnityModel.ConsoleLogging.LastPlayTime.FlowIntoRdSafe(lifetime, frontendBackendModel.ConsoleLogging.LastPlayTime);
}
private void AdviseOpenFile(BackendUnityModel backendUnityModel, FrontendBackendModel frontendBackendModel)
{
backendUnityModel.OpenFileLineCol.Set((lf, args) =>
{
Rd.Tasks.RdTask<bool> result = new Rd.Tasks.RdTask<bool>();
using (ReadLockCookie.Create())
{
try
{
return frontendBackendModel.OpenFileLineCol.Start(lf, new FrontendOpenArgs(args.Path, args.Line, args.Col)).ToRdTask(lf);
}
catch (Exception e)
{
myLogger.Error(e);
result.Set(false);
}
}
return result;
});
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Text;
namespace fastJSON
{
/// <summary>
/// This class encodes and decodes JSON strings.
/// Spec. details, see http://www.json.org/
/// </summary>
internal sealed class JsonParser
{
enum Token
{
None = -1, // Used to denote no Lookahead available
Curly_Open,
Curly_Close,
Squared_Open,
Squared_Close,
Colon,
Comma,
String,
Number,
True,
False,
Null
}
readonly char[] json;
readonly StringBuilder s = new StringBuilder();
Token lookAheadToken = Token.None;
int index;
bool _ignorecase = false;
internal JsonParser(string json, bool ignorecase)
{
this.json = json.ToCharArray();
_ignorecase = ignorecase;
}
public object Decode()
{
return ParseValue();
}
private Dictionary<string, object> ParseObject()
{
Dictionary<string, object> table = new Dictionary<string, object>();
ConsumeToken(); // {
while (true)
{
switch (LookAhead())
{
case Token.Comma:
ConsumeToken();
break;
case Token.Curly_Close:
ConsumeToken();
return table;
default:
{
// name
string name = ParseString();
if (_ignorecase)
name = name.ToLower();
// :
if (NextToken() != Token.Colon)
{
throw new Exception("Expected colon at index " + index);
}
// value
object value = ParseValue();
table[name] = value;
}
break;
}
}
}
private List<object> ParseArray()
{
List<object> array = new List<object>();
ConsumeToken(); // [
while (true)
{
switch (LookAhead())
{
case Token.Comma:
ConsumeToken();
break;
case Token.Squared_Close:
ConsumeToken();
return array;
default:
array.Add(ParseValue());
break;
}
}
}
private object ParseValue()
{
switch (LookAhead())
{
case Token.Number:
return ParseNumber();
case Token.String:
return ParseString();
case Token.Curly_Open:
return ParseObject();
case Token.Squared_Open:
return ParseArray();
case Token.True:
ConsumeToken();
return true;
case Token.False:
ConsumeToken();
return false;
case Token.Null:
ConsumeToken();
return null;
}
throw new Exception("Unrecognized token at index" + index);
}
private string ParseString()
{
ConsumeToken(); // "
s.Length = 0;
int runIndex = -1;
while (index < json.Length)
{
var c = json[index++];
if (c == '"')
{
if (runIndex != -1)
{
if (s.Length == 0)
return new string(json, runIndex, index - runIndex - 1);
s.Append(json, runIndex, index - runIndex - 1);
}
return s.ToString();
}
if (c != '\\')
{
if (runIndex == -1)
runIndex = index - 1;
continue;
}
if (index == json.Length) break;
if (runIndex != -1)
{
s.Append(json, runIndex, index - runIndex - 1);
runIndex = -1;
}
switch (json[index++])
{
case '"':
s.Append('"');
break;
case '\\':
s.Append('\\');
break;
case '/':
s.Append('/');
break;
case 'b':
s.Append('\b');
break;
case 'f':
s.Append('\f');
break;
case 'n':
s.Append('\n');
break;
case 'r':
s.Append('\r');
break;
case 't':
s.Append('\t');
break;
case 'u':
{
int remainingLength = json.Length - index;
if (remainingLength < 4) break;
// parse the 32 bit hex into an integer codepoint
uint codePoint = ParseUnicode(json[index], json[index + 1], json[index + 2], json[index + 3]);
s.Append((char)codePoint);
// skip 4 chars
index += 4;
}
break;
}
}
throw new Exception("Unexpectedly reached end of string");
}
private uint ParseSingleChar(char c1, uint multipliyer)
{
uint p1 = 0;
if (c1 >= '0' && c1 <= '9')
p1 = (uint)(c1 - '0') * multipliyer;
else if (c1 >= 'A' && c1 <= 'F')
p1 = (uint)((c1 - 'A') + 10) * multipliyer;
else if (c1 >= 'a' && c1 <= 'f')
p1 = (uint)((c1 - 'a') + 10) * multipliyer;
return p1;
}
private uint ParseUnicode(char c1, char c2, char c3, char c4)
{
uint p1 = ParseSingleChar(c1, 0x1000);
uint p2 = ParseSingleChar(c2, 0x100);
uint p3 = ParseSingleChar(c3, 0x10);
uint p4 = ParseSingleChar(c4, 1);
return p1 + p2 + p3 + p4;
}
private long CreateLong(string s)
{
long num = 0;
bool neg = false;
foreach (char cc in s)
{
if (cc == '-')
neg = true;
else if (cc == '+')
neg = false;
else
{
num *= 10;
num += (int)(cc - '0');
}
}
return neg ? -num : num;
}
private object ParseNumber()
{
ConsumeToken();
// Need to start back one place because the first digit is also a token and would have been consumed
var startIndex = index - 1;
bool dec = false;
do
{
if (index == json.Length)
break;
var c = json[index];
if ((c >= '0' && c <= '9') || c == '.' || c == '-' || c == '+' || c == 'e' || c == 'E')
{
if (c == '.' || c == 'e' || c == 'E')
dec = true;
if (++index == json.Length)
break; //throw new Exception("Unexpected end of string whilst parsing number");
continue;
}
break;
} while (true);
if (dec)
{
string s = new string(json, startIndex, index - startIndex);
return double.Parse(s, NumberFormatInfo.InvariantInfo);
}
long num;
return JSON.CreateLong(out num, json, startIndex, index - startIndex);
}
private Token LookAhead()
{
if (lookAheadToken != Token.None) return lookAheadToken;
return lookAheadToken = NextTokenCore();
}
private void ConsumeToken()
{
lookAheadToken = Token.None;
}
private Token NextToken()
{
var result = lookAheadToken != Token.None ? lookAheadToken : NextTokenCore();
lookAheadToken = Token.None;
return result;
}
private Token NextTokenCore()
{
char c;
// Skip past whitespace
do
{
c = json[index];
if (c > ' ') break;
if (c != ' ' && c != '\t' && c != '\n' && c != '\r') break;
} while (++index < json.Length);
if (index == json.Length)
{
throw new Exception("Reached end of string unexpectedly");
}
c = json[index];
index++;
//if (c >= '0' && c <= '9')
// return Token.Number;
switch (c)
{
case '{':
return Token.Curly_Open;
case '}':
return Token.Curly_Close;
case '[':
return Token.Squared_Open;
case ']':
return Token.Squared_Close;
case ',':
return Token.Comma;
case '"':
return Token.String;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '-':
case '+':
case '.':
return Token.Number;
case ':':
return Token.Colon;
case 'f':
if (json.Length - index >= 4 &&
json[index + 0] == 'a' &&
json[index + 1] == 'l' &&
json[index + 2] == 's' &&
json[index + 3] == 'e')
{
index += 4;
return Token.False;
}
break;
case 't':
if (json.Length - index >= 3 &&
json[index + 0] == 'r' &&
json[index + 1] == 'u' &&
json[index + 2] == 'e')
{
index += 3;
return Token.True;
}
break;
case 'n':
if (json.Length - index >= 3 &&
json[index + 0] == 'u' &&
json[index + 1] == 'l' &&
json[index + 2] == 'l')
{
index += 3;
return Token.Null;
}
break;
}
throw new Exception("Could not find token at index " + --index);
}
}
}
| |
#region S# License
/******************************************************************************************
NOTICE!!! This program and source code is owned and licensed by
StockSharp, LLC, www.stocksharp.com
Viewing or use of this code requires your acceptance of the license
agreement found at https://github.com/StockSharp/StockSharp/blob/master/LICENSE
Removal of this comment is a violation of the license agreement.
Project: SampleCQG.SampleCQGPublic
File: MainWindow.xaml.cs
Created: 2015, 11, 11, 2:32 PM
Copyright 2010 by StockSharp, LLC
*******************************************************************************************/
#endregion S# License
namespace SampleCQG
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Windows;
using Ecng.Common;
using Ecng.Xaml;
using MoreLinq;
using StockSharp.Messages;
using StockSharp.BusinessEntities;
using StockSharp.CQG;
using StockSharp.Logging;
using StockSharp.Xaml;
using StockSharp.Localization;
public partial class MainWindow
{
public static MainWindow Instance { get; private set; }
public static readonly DependencyProperty IsConnectedProperty =
DependencyProperty.Register("IsConnected", typeof(bool), typeof(MainWindow), new PropertyMetadata(default(bool)));
public bool IsConnected
{
get { return (bool)GetValue(IsConnectedProperty); }
set { SetValue(IsConnectedProperty, value); }
}
public CQGTrader Trader { get; private set; }
private readonly SecuritiesWindow _securitiesWindow = new SecuritiesWindow();
private readonly OrdersWindow _ordersWindow = new OrdersWindow();
private readonly PortfoliosWindow _portfoliosWindow = new PortfoliosWindow();
private readonly StopOrdersWindow _stopOrdersWindow = new StopOrdersWindow();
private readonly MyTradesWindow _myTradesWindow = new MyTradesWindow();
private readonly LogManager _logManager = new LogManager();
private static string Username => Properties.Settings.Default.Username;
public MainWindow()
{
Instance = this;
InitializeComponent();
Title = Title.Put("CQG");
Closing += OnClosing;
_ordersWindow.MakeHideable();
_securitiesWindow.MakeHideable();
_stopOrdersWindow.MakeHideable();
_portfoliosWindow.MakeHideable();
var guiListener = new GuiLogListener(LogControl);
//guiListener.Filters.Add(msg => msg.Level > LogLevels.Debug);
_logManager.Listeners.Add(guiListener);
_logManager.Listeners.Add(new FileLogListener("sterling") { LogDirectory = "Logs" });
Application.Current.MainWindow = this;
}
private void OnClosing(object sender, CancelEventArgs cancelEventArgs)
{
Properties.Settings.Default.Save();
_ordersWindow.DeleteHideable();
_securitiesWindow.DeleteHideable();
_stopOrdersWindow.DeleteHideable();
_portfoliosWindow.DeleteHideable();
_securitiesWindow.Close();
_stopOrdersWindow.Close();
_ordersWindow.Close();
_portfoliosWindow.Close();
if (Trader != null)
Trader.Dispose();
}
private void ConnectClick(object sender, RoutedEventArgs e)
{
var pwd = PwdBox.Password;
if (!IsConnected)
{
if (Username.IsEmpty())
{
MessageBox.Show(this, LocalizedStrings.Str3751);
return;
}
if (pwd.IsEmpty())
{
MessageBox.Show(this, LocalizedStrings.Str2975);
return;
}
if (Trader == null)
{
// create connector
Trader = new CQGTrader { LogLevel = LogLevels.Debug };
_logManager.Sources.Add(Trader);
// subscribe on connection successfully event
Trader.Connected += () =>
{
this.GuiAsync(() => OnConnectionChanged(true));
};
// subscribe on connection error event
Trader.ConnectionError += error => this.GuiAsync(() =>
{
OnConnectionChanged(Trader.ConnectionState == ConnectionStates.Connected);
MessageBox.Show(this, error.ToString(), LocalizedStrings.Str2959);
});
Trader.Disconnected += () => this.GuiAsync(() => OnConnectionChanged(false));
// subscribe on error event
Trader.Error += error =>
this.GuiAsync(() => MessageBox.Show(this, error.ToString(), LocalizedStrings.Str2955));
// subscribe on error of market data subscription event
Trader.MarketDataSubscriptionFailed += (security, msg, error) =>
this.GuiAsync(() => MessageBox.Show(this, error.ToString(), LocalizedStrings.Str2956Params.Put(msg.DataType, security)));
Trader.NewSecurities += securities => _securitiesWindow.SecurityPicker.Securities.AddRange(securities);
Trader.NewMyTrades += trades => _myTradesWindow.TradeGrid.Trades.AddRange(trades);
Trader.NewOrders += orders => _ordersWindow.OrderGrid.Orders.AddRange(orders);
Trader.NewStopOrders += orders => _stopOrdersWindow.OrderGrid.Orders.AddRange(orders);
Trader.NewPortfolios += portfolios =>
{
// subscribe on portfolio updates
portfolios.ForEach(Trader.RegisterPortfolio);
_portfoliosWindow.PortfolioGrid.Portfolios.AddRange(portfolios);
};
Trader.NewPositions += positions => _portfoliosWindow.PortfolioGrid.Positions.AddRange(positions);
// subscribe on error of order registration event
Trader.OrdersRegisterFailed += OrdersFailed;
// subscribe on error of order cancelling event
Trader.OrdersCancelFailed += OrdersFailed;
// subscribe on error of stop-order registration event
Trader.StopOrdersRegisterFailed += OrdersFailed;
// subscribe on error of stop-order cancelling event
Trader.StopOrdersCancelFailed += OrdersFailed;
Trader.MassOrderCancelFailed += (transId, error) =>
this.GuiAsync(() => MessageBox.Show(this, error.ToString(), LocalizedStrings.Str716));
// set market data provider
_securitiesWindow.SecurityPicker.MarketDataProvider = Trader;
}
Trader.Connect();
}
else
{
Trader.Disconnect();
}
}
private void OnConnectionChanged(bool isConnected)
{
IsConnected = isConnected;
ConnectBtn.Content = isConnected ? LocalizedStrings.Disconnect : LocalizedStrings.Connect;
}
private void OrdersFailed(IEnumerable<OrderFail> fails)
{
this.GuiAsync(() =>
{
foreach (var fail in fails)
{
var msg = fail.Error.ToString();
MessageBox.Show(this, msg, LocalizedStrings.Str153);
}
});
}
private static void ShowOrHide(Window window)
{
if (window == null)
throw new ArgumentNullException(nameof(window));
if (window.Visibility == Visibility.Visible)
window.Hide();
else
window.Show();
}
private void ShowMyTradesClick(object sender, RoutedEventArgs e)
{
ShowOrHide(_myTradesWindow);
}
private void ShowSecuritiesClick(object sender, RoutedEventArgs e)
{
ShowOrHide(_securitiesWindow);
}
private void ShowPortfoliosClick(object sender, RoutedEventArgs e)
{
ShowOrHide(_portfoliosWindow);
}
private void ShowOrdersClick(object sender, RoutedEventArgs e)
{
ShowOrHide(_ordersWindow);
}
private void ShowStopOrdersClick(object sender, RoutedEventArgs e)
{
ShowOrHide(_stopOrdersWindow);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Net.Http.Headers;
using System.Web.Http;
using System.Web.Http.Description;
using DataService.Areas.HelpPage.Models;
namespace DataService.Areas.HelpPage
{
public static class HelpPageConfigurationExtensions
{
private const string ApiModelPrefix = "MS_HelpPageApiModel_";
/// <summary>
/// Sets the documentation provider for help page.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="documentationProvider">The documentation provider.</param>
public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider)
{
config.Services.Replace(typeof(IDocumentationProvider), documentationProvider);
}
/// <summary>
/// Sets the objects that will be used by the formatters to produce sample requests/responses.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleObjects">The sample objects.</param>
public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects)
{
config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects;
}
/// <summary>
/// Sets the sample request directly for the specified media type and action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type and action with parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type of the action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample response directly for the specified media type of the action with specific parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified type and media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="type">The parameter type or return type of an action.</param>
public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Gets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <returns>The help page sample generator.</returns>
public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config)
{
return (HelpPageSampleGenerator)config.Properties.GetOrAdd(
typeof(HelpPageSampleGenerator),
k => new HelpPageSampleGenerator());
}
/// <summary>
/// Sets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleGenerator">The help page sample generator.</param>
public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator)
{
config.Properties.AddOrUpdate(
typeof(HelpPageSampleGenerator),
k => sampleGenerator,
(k, o) => sampleGenerator);
}
/// <summary>
/// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param>
/// <returns>
/// An <see cref="HelpPageApiModel"/>
/// </returns>
public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId)
{
object model;
string modelId = ApiModelPrefix + apiDescriptionId;
if (!config.Properties.TryGetValue(modelId, out model))
{
Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions;
ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase));
if (apiDescription != null)
{
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
model = GenerateApiModel(apiDescription, sampleGenerator);
config.Properties.TryAdd(modelId, model);
}
}
return (HelpPageApiModel)model;
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")]
private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HelpPageSampleGenerator sampleGenerator)
{
HelpPageApiModel apiModel = new HelpPageApiModel();
apiModel.ApiDescription = apiDescription;
try
{
foreach (var item in sampleGenerator.GetSampleRequests(apiDescription))
{
apiModel.SampleRequests.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
foreach (var item in sampleGenerator.GetSampleResponses(apiDescription))
{
apiModel.SampleResponses.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
}
catch (Exception e)
{
apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception Message: {0}", e.Message));
}
return apiModel;
}
private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample)
{
InvalidSample invalidSample = sample as InvalidSample;
if (invalidSample != null)
{
apiModel.ErrorMessages.Add(invalidSample.ErrorMessage);
}
}
}
}
| |
/********************************************************************
The Multiverse Platform is made available under the MIT License.
Copyright (c) 2012 The Multiverse Foundation
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
*********************************************************************/
/***************************************************************************
Copyright (c) Microsoft Corporation. All rights reserved.
This code is licensed under the Visual Studio SDK license terms.
THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
***************************************************************************/
using System;
using System.IO;
using System.Collections.Generic;
using System.Globalization;
using System.Reflection;
using Microsoft.Build.Utilities;
using Microsoft.Build.Framework;
using Microsoft.Samples.VisualStudio.IronPythonTasks.Properties;
namespace Microsoft.Samples.VisualStudio.IronPythonTasks
{
/////////////////////////////////////////////////////////////////////////////
// My MSBuild Task
public class IronPythonCompilerTask : Task
{
private ICompiler compiler = null;
#region Constructors
/// <summary>
/// Constructor. This is the constructor that will be used
/// when the task run.
/// </summary>
public IronPythonCompilerTask()
{
}
/// <summary>
/// Constructor. The goal of this constructor is to make
/// it easy to test the task.
/// </summary>
public IronPythonCompilerTask(ICompiler compilerToUse)
{
compiler = compilerToUse;
}
#endregion
#region Public Properties and related Fields
private string[] sourceFiles;
/// <summary>
/// List of Python source files that should be compiled into the assembly
/// </summary>
[Required()]
public string[] SourceFiles
{
get { return sourceFiles; }
set { sourceFiles = value; }
}
private string outputAssembly;
/// <summary>
/// Output Assembly (including extension)
/// </summary>
[Required()]
public string OutputAssembly
{
get { return outputAssembly; }
set { outputAssembly = value; }
}
private ITaskItem[] referencedAssemblies = new ITaskItem[0];
/// <summary>
/// List of dependent assemblies
/// </summary>
public ITaskItem[] ReferencedAssemblies
{
get { return referencedAssemblies; }
set
{
if (value != null)
{
referencedAssemblies = value;
}
else
{
referencedAssemblies = new ITaskItem[0];
}
}
}
private ITaskItem[] resourceFiles = new ITaskItem[0];
/// <summary>
/// List of resource files
/// </summary>
public ITaskItem[] ResourceFiles
{
get { return resourceFiles; }
set
{
if (value != null)
{
resourceFiles = value;
}
else
{
resourceFiles = new ITaskItem[0];
}
}
}
private string mainFile;
/// <summary>
/// For applications, which file is the entry point
/// </summary>
[Required()]
public string MainFile
{
get { return mainFile; }
set { mainFile = value; }
}
private string targetKind;
/// <summary>
/// Target type (exe, winexe, library)
/// These will be mapped to System.Reflection.Emit.PEFileKinds
/// </summary>
public string TargetKind
{
get { return targetKind; }
set { targetKind = value.ToLower(CultureInfo.InvariantCulture); }
}
private bool debugSymbols = true;
/// <summary>
/// Generate debug information
/// </summary>
public bool DebugSymbols
{
get { return debugSymbols; }
set { debugSymbols = value; }
}
private string projectPath = null;
/// <summary>
/// This should be set to $(MSBuildProjectDirectory)
/// </summary>
public string ProjectPath
{
get { return projectPath; }
set { projectPath = value; }
}
private bool useExperimentalCompiler;
/// <summary>
/// This property is only needed because Iron Python does not officially support building real .Net assemblies.
/// For WAP scenarios, we need to support real assemblies and as such we use an alternate approach to build those assemblies.
/// </summary>
public bool UseExperimentalCompiler
{
get { return useExperimentalCompiler; }
set { useExperimentalCompiler = value; }
}
#endregion
/// <summary>
/// Main entry point for the task
/// </summary>
/// <returns></returns>
public override bool Execute()
{
Log.LogMessage(MessageImportance.Normal, "Iron Python Compilation Task");
// Create the compiler if it does not already exist
CompilerErrorSink errorSink = new CompilerErrorSink(this.Log);
errorSink.ProjectDirectory = ProjectPath;
if (compiler == null)
{
if (UseExperimentalCompiler)
compiler = new ExperimentalCompiler(new List<string>(this.SourceFiles), this.OutputAssembly, errorSink);
else
compiler = new Compiler(new List<string>(this.SourceFiles), this.OutputAssembly, errorSink);
}
if (!InitializeCompiler())
return false;
// Call the compiler and report errors and warnings
compiler.Compile();
return errorSink.BuildSucceeded;
}
/// <summary>
/// Initialize compiler options based on task parameters
/// </summary>
/// <returns>false if failed</returns>
private bool InitializeCompiler()
{
switch (TargetKind)
{
case "exe":
{
compiler.TargetKind = System.Reflection.Emit.PEFileKinds.ConsoleApplication;
break;
}
case "winexe":
{
compiler.TargetKind = System.Reflection.Emit.PEFileKinds.WindowApplication;
break;
}
case "library":
{
compiler.TargetKind = System.Reflection.Emit.PEFileKinds.Dll;
break;
}
default:
{
this.Log.LogError(Resources.InvalidTargetType, TargetKind);
return false;
}
}
compiler.IncludeDebugInformation = this.DebugSymbols;
compiler.MainFile = this.MainFile;
compiler.SourceFiles = new List<string>(this.SourceFiles);
// References require a bit more work since our compiler expect us to pass the Assemblies (and not just paths)
compiler.ReferencedAssemblies = new List<string>();
foreach (ITaskItem assemblyReference in this.ReferencedAssemblies)
{
compiler.ReferencedAssemblies.Add(assemblyReference.ItemSpec);
}
// Add each resource
List<IronPython.Hosting.ResourceFile> resourcesList = new List<IronPython.Hosting.ResourceFile>();
foreach (ITaskItem resource in this.ResourceFiles)
{
bool publicVisibility = true;
string access = resource.GetMetadata("Access");
if (String.CompareOrdinal("Private", access) == 0)
publicVisibility = false;
string filename = resource.ItemSpec;
string logicalName = resource.GetMetadata("LogicalName");
if (String.IsNullOrEmpty(logicalName))
logicalName = Path.GetFileName(resource.ItemSpec);
IronPython.Hosting.ResourceFile resourceFile = new IronPython.Hosting.ResourceFile(logicalName, filename, publicVisibility);
resourcesList.Add(resourceFile);
}
compiler.ResourceFiles = resourcesList;
return true;
}
}
}
| |
//
// DatabaseMethods.cs
//
// Author:
// Jim Borden <[email protected]>
//
// Copyright (c) 2015 Couchbase, Inc All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using Couchbase.Lite.Internal;
using Couchbase.Lite.Replicator;
using Couchbase.Lite.Util;
using Sharpen;
namespace Couchbase.Lite.Listener
{
/// <summary>
/// Methods that operate at the database level
/// </summary>
/// <remarks>
/// http://docs.couchdb.org/en/latest/api/database/index.html
/// </remarks>
internal static class DatabaseMethods
{
#region Constants
private const string TAG = "DatabaseMethods";
private const int MIN_HEARTBEAT = 5000; //NOTE: iOS uses seconds but .NET uses milliseconds
#endregion
#region Public Methods
/// <summary>
/// Gets information about the specified database.
/// </summary>
/// <returns>The response state for further HTTP processing</returns>
/// <param name="context">The context of the Couchbase Lite HTTP request</param>
/// <remarks>
/// http://docs.couchdb.org/en/latest/api/database/common.html#get--db
/// <remarks>
public static ICouchbaseResponseState GetConfiguration(ICouchbaseListenerContext context)
{
return PerformLogicWithDatabase(context, true, db =>
{
int numDocs = db.DocumentCount;
long updateSequence = db.LastSequenceNumber;
if (numDocs < 0 || updateSequence < 0) {
return context.CreateResponse(StatusCode.DbError);
}
var response = context.CreateResponse();
response.JsonBody = new Body(new Dictionary<string, object> {
{ "db_name", db.Name },
{ "doc_count", numDocs },
{ "update_seq", updateSequence },
{ "committed_update_seq", updateSequence },
{ "purge_seq", 0 }, //TODO: Implement
{ "disk_size", db.TotalDataSize },
{ "start_time", db.StartTime * 1000 }
});
return response;
}).AsDefaultState();
}
/// <summary>
/// Deletes the specified database, and all the documents and attachments contained within it.
/// </summary>
/// <returns>The response state for further HTTP processing</returns>
/// <param name="context">The context of the Couchbase Lite HTTP request</param>
/// <remarks>
/// http://docs.couchdb.org/en/latest/api/database/common.html#delete--db
/// <remarks>
public static ICouchbaseResponseState DeleteConfiguration(ICouchbaseListenerContext context)
{
return PerformLogicWithDatabase(context, false, db =>
{
if(context.GetQueryParam("rev") != null) {
// CouchDB checks for this; probably meant to be a document deletion
return context.CreateResponse(StatusCode.BadId);
}
try {
db.Delete();
} catch (CouchbaseLiteException) {
return context.CreateResponse(StatusCode.InternalServerError);
}
return context.CreateResponse();
}).AsDefaultState();
}
/// <summary>
/// Creates a new database.
/// </summary>
/// <returns>The response state for further HTTP processing</returns>
/// <param name="context">The context of the Couchbase Lite HTTP request</param>
/// <remarks>
/// http://docs.couchdb.org/en/latest/api/database/common.html#put--db
/// <remarks>
public static ICouchbaseResponseState UpdateConfiguration(ICouchbaseListenerContext context)
{
string dbName = context.DatabaseName;
Database db = context.DbManager.GetDatabaseWithoutOpening(dbName, false);
if (db != null && db.Exists()) {
return context.CreateResponse(StatusCode.PreconditionFailed).AsDefaultState();
}
try {
db.Open();
} catch(CouchbaseLiteException) {
return context.CreateResponse(StatusCode.Exception).AsDefaultState();
}
return context.CreateResponse(StatusCode.Created).AsDefaultState();
}
/// <summary>
/// Returns a JSON structure of all of the documents in a given database.
/// </summary>
/// <returns>The response state for further HTTP processing</returns>
/// <param name="context">The context of the Couchbase Lite HTTP request</param>
/// <remarks>
/// http://docs.couchdb.org/en/latest/api/database/bulk-api.html#get--db-_all_docs
/// <remarks>
public static ICouchbaseResponseState GetAllDocuments(ICouchbaseListenerContext context)
{
return PerformLogicWithDatabase(context, true, db =>
{
if(context.CacheWithEtag(db.LastSequenceNumber.ToString())) {
return context.CreateResponse(StatusCode.NotModified);
}
var options = context.QueryOptions;
if(options == null) {
return context.CreateResponse(StatusCode.BadParam);
}
return DoAllDocs(context, db, options);
}).AsDefaultState();
}
/// <summary>
/// The POST to _all_docs allows to specify multiple keys to be selected from the database.
/// This enables you to request multiple documents in a single request, in place of multiple GET /{db}/{docid} requests.
/// </summary>
/// <returns>The response state for further HTTP processing</returns>
/// <param name="context">The context of the Couchbase Lite HTTP request</param>
/// <remarks>
/// http://docs.couchdb.org/en/latest/api/database/bulk-api.html#post--db-_all_docs
/// <remarks>
public static ICouchbaseResponseState GetAllSpecifiedDocuments(ICouchbaseListenerContext context)
{
return PerformLogicWithDatabase(context, true, db =>
{
var options = context.QueryOptions;
if(options == null) {
return context.CreateResponse(StatusCode.BadParam);
}
var body = context.BodyAs<Dictionary<string, object>>();
if(body == null) {
return context.CreateResponse(StatusCode.BadJson);
}
if(!body.ContainsKey("keys")) {
return context.CreateResponse(StatusCode.BadParam);
}
var keys = body["keys"].AsList<object>();
options.Keys = keys;
return DoAllDocs(context, db, options);
}).AsDefaultState();
}
/// <summary>
/// Create and update multiple documents at the same time within a single request.
/// </summary>
/// <returns>The response state for further HTTP processing</returns>
/// <param name="context">The context of the Couchbase Lite HTTP request</param>
/// <remarks>
/// http://docs.couchdb.org/en/latest/api/database/bulk-api.html#post--db-_bulk_docs
/// <remarks>
public static ICouchbaseResponseState ProcessDocumentChangeOperations(ICouchbaseListenerContext context)
{
return PerformLogicWithDatabase(context, true, db =>
{
var postBody = context.BodyAs<Dictionary<string, object>>();
if(postBody == null) {
return context.CreateResponse(StatusCode.BadJson);
}
if(!postBody.ContainsKey("docs")) {
return context.CreateResponse(StatusCode.BadParam);
}
var docs = postBody["docs"].AsList<IDictionary<string, object>>();
bool allOrNothing;
postBody.TryGetValue<bool>("all_or_nothing", out allOrNothing);
bool newEdits;
postBody.TryGetValue<bool>("new_edits", out newEdits);
var response = context.CreateResponse();
StatusCode status = StatusCode.Ok;
bool success = db.RunInTransaction(() => {
List<IDictionary<string, object>> results = new List<IDictionary<string, object>>(docs.Count);
foreach(var doc in docs) {
string docId = doc.GetCast<string>("_id");
RevisionInternal rev = null;
Body body = new Body(doc);
if(!newEdits) {
if(!RevisionInternal.IsValid(body)) {
status = StatusCode.BadParam;
} else {
rev = new RevisionInternal(body);
var history = Database.ParseCouchDBRevisionHistory(doc);
try {
db.ForceInsert(rev, history, null);
} catch(CouchbaseLiteException e) {
status = e.Code;
}
}
} else {
status = DocumentMethods.UpdateDocument(context, db, docId, body, false, allOrNothing, out rev);
}
IDictionary<string, object> result = null;
if((int)status < 300) {
Debug.Assert(rev != null && rev.GetRevId() != null);
if(newEdits) {
result = new Dictionary<string, object>
{
{ "id", rev.GetDocId() },
{ "rev", rev.GetRevId() },
{ "status", (int)status }
};
}
} else if((int)status >= 500) {
return false; // abort the whole thing if something goes badly wrong
} else if(allOrNothing) {
return false; // all_or_nothing backs out if there's any error
} else {
var info = Status.ToHttpStatus(status);
result = new Dictionary<string, object>
{
{ "id", docId },
{ "error", info.Item2 },
{ "status", info.Item1 }
};
}
if(result != null) {
results.Add(result);
}
}
response.JsonBody = new Body(results.Cast<object>().ToList());
return true;
});
if(!success) {
response.InternalStatus = status;
}
return response;
}).AsDefaultState();
}
/// <summary>
/// Returns a sorted list of changes made to documents in the database, in time order of application.
/// </summary>
/// <returns>The response state for further HTTP processing</returns>
/// <param name="context">The context of the Couchbase Lite HTTP request</param>
/// <remarks>
/// http://docs.couchdb.org/en/latest/api/database/changes.html#get--db-_changes
/// <remarks>
public static ICouchbaseResponseState GetChanges(ICouchbaseListenerContext context)
{
DBMonitorCouchbaseResponseState responseState = new DBMonitorCouchbaseResponseState();
var responseObject = PerformLogicWithDatabase(context, true, db =>
{
var response = context.CreateResponse();
responseState.Response = response;
if (context.ChangesFeedMode < ChangesFeedMode.Continuous) {
if(context.CacheWithEtag(db.LastSequenceNumber.ToString())) {
response.InternalStatus = StatusCode.NotModified;
return response;
}
}
var options = new ChangesOptions();
responseState.Db = db;
responseState.ContentOptions = context.ContentOptions;
responseState.ChangesFeedMode = context.ChangesFeedMode;
responseState.ChangesIncludeDocs = context.GetQueryParam<bool>("include_docs", bool.TryParse, false);
options.SetIncludeDocs(responseState.ChangesIncludeDocs);
responseState.ChangesIncludeConflicts = context.GetQueryParam("style") == "all_docs";
options.SetIncludeConflicts(responseState.ChangesIncludeConflicts);
options.SetContentOptions(context.ContentOptions);
options.SetSortBySequence(!options.IsIncludeConflicts());
options.SetLimit(context.GetQueryParam<int>("limit", int.TryParse, options.GetLimit()));
int since = context.GetQueryParam<int>("since", int.TryParse, 0);
string filterName = context.GetQueryParam("filter");
if(filterName != null) {
Status status = new Status();
responseState.ChangesFilter = db.GetFilter(filterName, status);
if(responseState.ChangesFilter == null) {
return context.CreateResponse(status.Code);
}
responseState.FilterParams = context.GetQueryParams();
}
RevisionList changes = db.ChangesSince(since, options, responseState.ChangesFilter, responseState.FilterParams);
if((context.ChangesFeedMode >= ChangesFeedMode.Continuous) ||
(context.ChangesFeedMode == ChangesFeedMode.LongPoll && changes.Count == 0)) {
// Response is going to stay open (continuous, or hanging GET):
response.Chunked = true;
if(context.ChangesFeedMode == ChangesFeedMode.EventSource) {
response["Content-Type"] = "text/event-stream; charset=utf-8";
}
if(context.ChangesFeedMode >= ChangesFeedMode.Continuous) {
response.WriteHeaders();
foreach(var rev in changes) {
response.SendContinuousLine(ChangesDictForRev(rev, responseState), context.ChangesFeedMode);
}
}
responseState.SubscribeToDatabase(db);
string heartbeatParam = context.GetQueryParam("heartbeat");
if(heartbeatParam != null) {
int heartbeat;
if(!int.TryParse(heartbeatParam, out heartbeat) || heartbeat <= 0) {
responseState.IsAsync = false;
return context.CreateResponse(StatusCode.BadParam);
}
heartbeat = Math.Min(heartbeat, MIN_HEARTBEAT);
string heartbeatResponse = context.ChangesFeedMode == ChangesFeedMode.EventSource ? "\n\n" : "\r\n";
responseState.StartHeartbeat(heartbeatResponse, heartbeat);
}
return context.CreateResponse();
} else {
if(responseState.ChangesIncludeConflicts) {
response.JsonBody = new Body(ResponseBodyForChanges(changes, since, options.GetLimit(), responseState));
} else {
response.JsonBody = new Body(ResponseBodyForChanges(changes, since, responseState));
}
return response;
}
});
responseState.Response = responseObject;
return responseState;
}
/// <summary>
/// Request compaction of the specified database. Compaction compresses the disk database file.
/// </summary>
/// <returns>The response state for further HTTP processing</returns>
/// <param name="context">The context of the Couchbase Lite HTTP request</param>
/// <remarks>
/// http://docs.couchdb.org/en/latest/api/database/compact.html#post--db-_compact
/// <remarks>
public static ICouchbaseResponseState Compact(ICouchbaseListenerContext context)
{
return PerformLogicWithDatabase(context, true, db =>
{
try {
db.Compact();
return context.CreateResponse(StatusCode.Accepted);
} catch (CouchbaseLiteException) {
return context.CreateResponse(StatusCode.DbError);
}
}).AsDefaultState();
}
/// <summary>
/// A database purge permanently removes the references to deleted documents from the database.
/// </summary>
/// <returns>The response state for further HTTP processing</returns>
/// <param name="context">The context of the Couchbase Lite HTTP request</param>
/// <remarks>
/// http://docs.couchdb.org/en/latest/api/database/misc.html#post--db-_purge
/// <remarks>
public static ICouchbaseResponseState Purge(ICouchbaseListenerContext context)
{
return PerformLogicWithDatabase(context, true, db =>
{
var body = context.BodyAs<Dictionary<string, IList<string>>>();
if(body == null) {
return context.CreateResponse(StatusCode.BadJson);
}
var purgedRevisions = db.Storage.PurgeRevisions(body);
if(purgedRevisions == null) {
return context.CreateResponse(StatusCode.DbError);
}
var responseBody = new Body(new Dictionary<string, object>
{
{ "purged", purgedRevisions }
});
var retVal = context.CreateResponse();
retVal.JsonBody = responseBody;
return retVal;
}).AsDefaultState();
}
/// <summary>
/// Creates (and executes) a temporary view based on the view function supplied in the JSON request.
/// </summary>
/// <returns>The response state for further HTTP processing</returns>
/// <param name="context">The context of the Couchbase Lite HTTP request</param>
/// <remarks>
/// http://docs.couchdb.org/en/latest/api/database/temp-views.html#post--db-_temp_view
/// <remarks>
public static ICouchbaseResponseState ExecuteTemporaryViewFunction(ICouchbaseListenerContext context)
{
var response = context.CreateResponse();
if (context.RequestHeaders["Content-Type"] == null ||
!context.RequestHeaders["Content-Type"].StartsWith("application/json")) {
response.InternalStatus = StatusCode.UnsupportedType;
return response.AsDefaultState();
}
IEnumerable<byte> json = context.BodyStream.ReadAllBytes();
var requestBody = new Body(json);
if (!requestBody.IsValidJSON()) {
response.InternalStatus = StatusCode.BadJson;
return response.AsDefaultState();
}
var props = requestBody.GetProperties();
if (props == null) {
response.InternalStatus = StatusCode.BadJson;
return response.AsDefaultState();
}
var options = context.QueryOptions;
if (options == null) {
response.InternalStatus = StatusCode.BadRequest;
return response.AsDefaultState();
}
return PerformLogicWithDatabase(context, true, db =>
{
if (context.CacheWithEtag(db.LastSequenceNumber.ToString())) {
response.InternalStatus = StatusCode.NotModified;
return response;
}
var view = db.GetView("@@TEMPVIEW@@");
var status = view.Compile(props, "javascript");
if(status.IsError) {
response.InternalStatus = status.Code;
return response;
}
try {
view.UpdateIndex();
return QueryView(context, null, view, options);
} catch(CouchbaseLiteException e) {
response.InternalStatus = e.CBLStatus.Code;
}
return response;
}).AsDefaultState();
}
/// <summary>
/// Performs the given logic with the specified database
/// </summary>
/// <returns>The result (in terms of response to the client) of the database operation</returns>
/// <param name="context">The Couchbase Lite HTTP context</param>
/// <param name="open">Whether or not to open the database, or just find it</param>
/// <param name="action">The logic to perform on the database</param>
public static CouchbaseLiteResponse PerformLogicWithDatabase(ICouchbaseListenerContext context, bool open,
Func<Database, CouchbaseLiteResponse> action)
{
string dbName = context.DatabaseName;
Database db = context.DbManager.GetDatabaseWithoutOpening(dbName, false);
if (db == null || !db.Exists()) {
return context.CreateResponse(StatusCode.NotFound);
}
if (open) {
try {
db.Open();
} catch(Exception) {
return context.CreateResponse(StatusCode.DbError);
}
}
return action(db);
}
/// <summary>
/// Create a response body for an HTTP response from a given list of DB changes (no conflicts)
/// </summary>
/// <returns>The response body</returns>
/// <param name="changes">The list of changes to be processed</param>
/// <param name="since">The first change ID to be processed</param>
/// <param name="responseState">The current response state</param>
public static IDictionary<string, object> ResponseBodyForChanges(RevisionList changes, long since, DBMonitorCouchbaseResponseState responseState)
{
List<IDictionary<string, object>> results = new List<IDictionary<string, object>>();
foreach (var change in changes) {
results.Add(DatabaseMethods.ChangesDictForRev(change, responseState));
}
if (changes.Count > 0) {
since = changes.Last().GetSequence();
}
return new Dictionary<string, object> {
{ "results", results },
{ "last_seq", since }
};
}
/// <summary>
/// Creates a dictionary of metadata for one specific revision
/// </summary>
/// <returns>The metadata dictionary</returns>
/// <param name="rev">The revision to examine</param>
/// <param name="responseState">The current response state</param>
public static IDictionary<string, object> ChangesDictForRev(RevisionInternal rev, DBMonitorCouchbaseResponseState responseState)
{
if (responseState.ChangesIncludeDocs) {
var status = new Status();
var rev2 = DocumentMethods.ApplyOptions(responseState.ContentOptions, rev, responseState.Context, responseState.Db, status);
if (rev2 != null) {
rev2.SetSequence(rev.GetSequence());
rev = rev2;
}
}
return new NonNullDictionary<string, object> {
{ "seq", rev.GetSequence() },
{ "id", rev.GetDocId() },
{ "changes", new List<object> {
new Dictionary<string, object> {
{ "rev", rev.GetRevId() }
}
}
},
{ "deleted", rev.IsDeleted() ? (object)true : null },
{ "doc", responseState.ChangesIncludeDocs ? rev.GetProperties() : null }
};
}
/// <summary>
/// Queries the specified view using the specified options
/// </summary>
/// <returns>The HTTP response containing the results of the query</returns>
/// <param name="context">The request context</param>
/// <param name="view">The view to query</param>
/// <param name="options">The options to apply to the query</param>
public static CouchbaseLiteResponse QueryView(ICouchbaseListenerContext context, Database db, View view, QueryOptions options)
{
var result = view.QueryWithOptions(options);
object updateSeq = options.UpdateSeq ? (object)view.LastSequenceIndexed : null;
var mappedResult = new List<object>();
foreach (var row in result) {
row.Database = db;
var dict = row.AsJSONDictionary();
if (context.ContentOptions != DocumentContentOptions.None) {
var doc = dict.Get("doc").AsDictionary<string, object>();
if (doc != null) {
// Add content options:
RevisionInternal rev = new RevisionInternal(doc);
var status = new Status();
rev = DocumentMethods.ApplyOptions(context.ContentOptions, rev, context, db, status);
if (rev != null) {
dict["doc"] = rev.GetProperties();
}
}
}
mappedResult.Add(dict);
}
var body = new Body(new NonNullDictionary<string, object> {
{ "rows", mappedResult },
{ "total_rows", view.TotalRows },
{ "offset", options.Skip },
{ "update_seq", updateSeq }
});
var retVal = context.CreateResponse();
retVal.JsonBody = body;
return retVal;
}
public static ICouchbaseResponseState RevsDiff(ICouchbaseListenerContext context)
{
// Collect all of the input doc/revision IDs as CBL_Revisions:
var revs = new RevisionList();
var body = context.BodyAs<Dictionary<string, object>>();
if (body == null) {
return context.CreateResponse(StatusCode.BadJson).AsDefaultState();
}
foreach (var docPair in body) {
var revIDs = docPair.Value.AsList<string>();
if (revIDs == null) {
return context.CreateResponse(StatusCode.BadParam).AsDefaultState();
}
foreach (var revID in revIDs) {
var rev = new RevisionInternal(docPair.Key, revID, false);
revs.Add(rev);
}
}
return PerformLogicWithDatabase(context, true, db =>
{
var response = context.CreateResponse();
// Look them up, removing the existing ones from revs:
db.Storage.FindMissingRevisions(revs);
// Return the missing revs in a somewhat different format:
IDictionary<string, object> diffs = new Dictionary<string, object>();
foreach(var rev in revs) {
var docId = rev.GetDocId();
IList<string> missingRevs = null;
if(!diffs.ContainsKey(docId)) {
missingRevs = new List<string>();
diffs[docId] = new Dictionary<string, IList<string>> { { "missing", missingRevs } };
} else {
missingRevs = ((Dictionary<string, IList<string>>)diffs[docId])["missing"];
}
missingRevs.Add(rev.GetRevId());
}
// Add the possible ancestors for each missing revision:
foreach(var docPair in diffs) {
IDictionary<string, IList<string>> docInfo = (IDictionary<string, IList<string>>)docPair.Value;
int maxGen = 0;
string maxRevID = null;
foreach(var revId in docInfo["missing"]) {
var parsed = RevisionInternal.ParseRevId(revId);
if(parsed.Item1 > maxGen) {
maxGen = parsed.Item1;
maxRevID = revId;
}
}
var rev = new RevisionInternal(docPair.Key, maxRevID, false);
var ancestors = db.Storage.GetPossibleAncestors(rev, 0, false);
var ancestorList = ancestors == null ? null : ancestors.ToList();
if(ancestorList != null && ancestorList.Count > 0) {
docInfo["possible_ancestors"] = ancestorList;
}
}
response.JsonBody = new Body(diffs);
return response;
}).AsDefaultState();
}
#endregion
#region Private Methods
//Do an all document request on the database (i.e. fetch all docs given some options)
private static CouchbaseLiteResponse DoAllDocs(ICouchbaseListenerContext context, Database db, QueryOptions options)
{
var iterator = db.GetAllDocs(options);
if (iterator == null) {
return context.CreateResponse(StatusCode.BadJson);
}
var response = context.CreateResponse();
var result = (from row in iterator
select row.AsJSONDictionary()).ToList();
response.JsonBody = new Body(new NonNullDictionary<string, object> {
{ "rows", result },
{ "total_rows", result.Count },
{ "offset", options.Skip },
{ "update_seq", options.UpdateSeq ? (object)db.LastSequenceNumber : null }
});
return response;
}
//Create a response body for an HTTP response from a given list of DB changes, including all conflicts
private static IDictionary<string, object> ResponseBodyForChanges(RevisionList changes, long since, int limit, DBMonitorCouchbaseResponseState state)
{
string lastDocId = null;
IDictionary<string, object> lastEntry = null;
var entries = new List<IDictionary<string, object>>();
foreach (var rev in changes) {
string docId = rev.GetDocId();
if (docId.Equals(lastDocId)) {
((IList)lastEntry["changes"]).Add(new Dictionary<string, object> { { "rev", rev.GetRevId() } });
} else {
lastEntry = ChangesDictForRev(rev, state);
entries.Add(lastEntry);
lastDocId = docId;
}
}
entries.Sort((x, y) => (int)((long)x["seq"] - (long)y["seq"]));
if (entries.Count > limit) {
entries.RemoveRange(limit, entries.Count - limit);
}
long lastSequence = entries.Any() ? (long)entries.Last()["seq"] : since;
return new Dictionary<string, object> {
{ "results", entries },
{ "last_seq", lastSequence }
};
}
#endregion
}
}
| |
//
// (C) Copyright 2003-2011 by Autodesk, Inc.
//
// Permission to use, copy, modify, and distribute this software in
// object code form for any purpose and without fee is hereby granted,
// provided that the above copyright notice appears in all copies and
// that both that copyright notice and the limited warranty and
// restricted rights notice below appear in all supporting
// documentation.
//
// AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS.
// AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF
// MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC.
// DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE
// UNINTERRUPTED OR ERROR FREE.
//
// Use, duplication, or disclosure by the U.S. Government is subject to
// restrictions set forth in FAR 52.227-19 (Commercial Computer
// Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii)
// (Rights in Technical Data and Computer Software), as applicable.
//
using System;
using System.Collections.Generic;
using System.Text;
using System.Collections;
using System.Windows.Forms;
using Autodesk.Revit;
using Autodesk.Revit.DB;
using Autodesk.Revit.UI;
using Application = Autodesk.Revit.ApplicationServices.Application;
namespace Revit.SDK.Samples.GridCreation.CS
{
/// <summary>
/// Data class which stores information for creating orthogonal grids
/// </summary>
public class CreateOrthogonalGridsData : CreateGridsData
{
#region Fields
// X coordinate of origin
private double m_xOrigin;
// Y coordinate of origin
private double m_yOrigin;
// Spacing between horizontal grids
private double m_xSpacing;
// Spacing between vertical grids
private double m_ySpacing;
// Number of horizontal grids
private uint m_xNumber;
// Number of vertical grids
private uint m_yNumber;
// Bubble location of horizontal grids
private BubbleLocation m_xBubbleLoc;
// Bubble location of vertical grids
private BubbleLocation m_yBubbleLoc;
// Label of first horizontal grid
private String m_xFirstLabel;
// Label of first vertical grid
private String m_yFirstLabel;
#endregion
#region Properties
/// <summary>
/// X coordinate of origin
/// </summary>
public double XOrigin
{
get
{
return m_xOrigin;
}
set
{
m_xOrigin = value;
}
}
/// <summary>
/// Y coordinate of origin
/// </summary>
public double YOrigin
{
get
{
return m_yOrigin;
}
set
{
m_yOrigin = value;
}
}
/// <summary>
/// Spacing between horizontal grids
/// </summary>
public double XSpacing
{
get
{
return m_xSpacing;
}
set
{
m_xSpacing = value;
}
}
/// <summary>
/// Spacing between vertical grids
/// </summary>
public double YSpacing
{
get
{
return m_ySpacing;
}
set
{
m_ySpacing = value;
}
}
/// <summary>
/// Number of horizontal grids
/// </summary>
public uint XNumber
{
get
{
return m_xNumber;
}
set
{
m_xNumber = value;
}
}
/// <summary>
/// Number of vertical grids
/// </summary>
public uint YNumber
{
get
{
return m_yNumber;
}
set
{
m_yNumber = value;
}
}
/// <summary>
/// Bubble location of horizontal grids
/// </summary>
public BubbleLocation XBubbleLoc
{
get
{
return m_xBubbleLoc;
}
set
{
m_xBubbleLoc = value;
}
}
/// <summary>
/// Bubble location of vertical grids
/// </summary>
public BubbleLocation YBubbleLoc
{
get
{
return m_yBubbleLoc;
}
set
{
m_yBubbleLoc = value;
}
}
/// <summary>
/// Label of first horizontal grid
/// </summary>
public String XFirstLabel
{
get
{
return m_xFirstLabel;
}
set
{
m_xFirstLabel = value;
}
}
/// <summary>
/// Label of first vertical grid
/// </summary>
public String YFirstLabel
{
get
{
return m_yFirstLabel;
}
set
{
m_yFirstLabel = value;
}
}
#endregion
#region Methods
/// <summary>
/// Constructor
/// </summary>
/// <param name="application">Application object</param>
/// <param name="dut">Current length display unit type</param>
/// <param name="labels">All existing labels in Revit's document</param>
public CreateOrthogonalGridsData(UIApplication application, DisplayUnitType dut, ArrayList labels)
: base(application, labels, dut)
{
}
/// <summary>
/// Create grids
/// </summary>
public void CreateGrids()
{
ArrayList failureReasons = new ArrayList();
if (CreateXGrids(ref failureReasons) + CreateYGrids(ref failureReasons) != 0)
{
String failureReason = resManager.GetString("FailedToCreateGrids");
if (failureReasons.Count != 0)
{
failureReason += resManager.GetString("Reasons") + "\r";
failureReason += "\r";
foreach (String reason in failureReasons)
{
failureReason += reason + "\r";
}
}
failureReason += "\r" + resManager.GetString("AjustValues");
ShowMessage(failureReason, resManager.GetString("FailureCaptionCreateGrids"));
}
}
/// <summary>
/// Create horizontal grids
/// </summary>
/// <param name="failureReasons">ArrayList contains failure reasons</param>
/// <returns>Number of grids failed to create</returns>
private int CreateXGrids(ref ArrayList failureReasons)
{
int errorCount = 0;
// Curve array which stores all curves for batch creation
CurveArray curves = new CurveArray();
for (int i = 0; i < m_xNumber; ++i)
{
Autodesk.Revit.DB.XYZ startPoint;
Autodesk.Revit.DB.XYZ endPoint;
Line line;
try
{
if (m_yNumber != 0)
{
// Grids will have an extension distance of m_ySpacing / 2
startPoint = new Autodesk.Revit.DB.XYZ (m_xOrigin - m_ySpacing / 2, m_yOrigin + i * m_xSpacing, 0);
endPoint = new Autodesk.Revit.DB.XYZ (m_xOrigin + (m_yNumber - 1) * m_ySpacing + m_ySpacing / 2, m_yOrigin + i * m_xSpacing, 0);
}
else
{
startPoint = new Autodesk.Revit.DB.XYZ (m_xOrigin, m_yOrigin + i * m_xSpacing, 0);
endPoint = new Autodesk.Revit.DB.XYZ (m_xOrigin + m_xSpacing / 2, m_yOrigin + i * m_xSpacing, 0);
}
try
{
// Create a line according to the bubble location
if (m_xBubbleLoc == BubbleLocation.StartPoint)
{
line = NewLine(startPoint, endPoint);
}
else
{
line = NewLine(endPoint, startPoint);
}
}
catch (System.ArgumentException)
{
String failureReason = resManager.GetString("SpacingsTooSmall");
if (!failureReasons.Contains(failureReason))
{
failureReasons.Add(failureReason);
}
errorCount++;
continue;
}
if (i == 0)
{
Grid grid;
// Create grid with line
grid = NewGrid(line);
try
{
// Set the label of first horizontal grid
grid.Name = m_xFirstLabel;
}
catch (System.ArgumentException)
{
ShowMessage(resManager.GetString("FailedToSetLabel") + m_xFirstLabel + "!",
resManager.GetString("FailureCaptionSetLabel"));
}
}
else
{
// Add the line to curve array
curves.Append(line);
}
}
catch (Exception)
{
++errorCount;
continue;
}
}
// Create grids with curve array
CreateGrids(curves);
return errorCount;
}
/// <summary>
/// Create vertical grids
/// </summary>
/// <param name="failureReasons">ArrayList contains failure reasons</param>
/// <returns>Number of grids failed to create</returns>
private int CreateYGrids(ref ArrayList failureReasons)
{
int errorCount = 0;
// Curve array which stores all curves for batch creation
CurveArray curves = new CurveArray();
for (int j = 0; j < m_yNumber; ++j)
{
Autodesk.Revit.DB.XYZ startPoint;
Autodesk.Revit.DB.XYZ endPoint;
Line line;
try
{
if (m_xNumber != 0)
{
startPoint = new Autodesk.Revit.DB.XYZ (m_xOrigin + j * m_ySpacing, m_yOrigin - m_xSpacing / 2, 0);
endPoint = new Autodesk.Revit.DB.XYZ (m_xOrigin + j * m_ySpacing, m_yOrigin + (m_xNumber - 1) * m_xSpacing + m_xSpacing / 2, 0);
}
else
{
startPoint = new Autodesk.Revit.DB.XYZ (m_xOrigin + j * m_ySpacing, m_yOrigin, 0);
endPoint = new Autodesk.Revit.DB.XYZ (m_xOrigin + j * m_ySpacing, m_yOrigin + m_ySpacing / 2, 0);
}
try
{
// Create a line according to the bubble location
if (m_yBubbleLoc == BubbleLocation.StartPoint)
{
line = NewLine(startPoint, endPoint);
}
else
{
line = NewLine(endPoint, startPoint);
}
}
catch (System.ArgumentException)
{
String failureReason = resManager.GetString("SpacingsTooSmall");
if (!failureReasons.Contains(failureReason))
{
failureReasons.Add(failureReason);
}
errorCount++;
continue;
}
if (j == 0)
{
Grid grid;
// Create grid with line
grid = NewGrid(line);
try
{
// Set label of first vertical grid
grid.Name = m_yFirstLabel;
}
catch (System.ArgumentException)
{
ShowMessage(resManager.GetString("FailedToSetLabel") + m_yFirstLabel + "!",
resManager.GetString("FailureCaptionSetLabel"));
}
}
else
{
// Add the line to curve array
curves.Append(line);
}
}
catch (Exception)
{
++errorCount;
continue;
}
}
// Create grids with curves
CreateGrids(curves);
return errorCount;
}
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Storage
{
using Azure;
using Management;
using Rest;
using Rest.Azure;
using Models;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// StorageAccountsOperations operations.
/// </summary>
public partial interface IStorageAccountsOperations
{
/// <summary>
/// Checks that the storage account name is valid and is not already in
/// use.
/// </summary>
/// <param name='name'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<CheckNameAvailabilityResult>> CheckNameAvailabilityWithHttpMessagesAsync(string name, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Asynchronously creates a new storage account with the specified
/// parameters. If an account is already created and a subsequent
/// create request is issued with different properties, the account
/// properties will be updated. If an account is already created and a
/// subsequent create or update request is issued with the exact same
/// set of properties, the request will succeed.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The
/// name is case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='parameters'>
/// The parameters to provide for the created account.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<StorageAccount>> CreateWithHttpMessagesAsync(string resourceGroupName, string accountName, StorageAccountCreateParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Deletes a storage account in Microsoft Azure.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The
/// name is case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Returns the properties for the specified storage account including
/// but not limited to name, SKU name, location, and account status.
/// The ListKeys operation should be used to retrieve storage keys.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The
/// name is case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<StorageAccount>> GetPropertiesWithHttpMessagesAsync(string resourceGroupName, string accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// The update operation can be used to update the SKU, encryption,
/// access tier, or tags for a storage account. It can also be used to
/// map the account to a custom domain. Only one custom domain is
/// supported per storage account; the replacement/change of custom
/// domain is not supported. In order to replace an old custom domain,
/// the old value must be cleared/unregistered before a new value can
/// be set. The update of multiple properties is supported. This call
/// does not change the storage keys for the account. If you want to
/// change the storage account keys, use the regenerate keys operation.
/// The location and name of the storage account cannot be changed
/// after creation.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The
/// name is case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='parameters'>
/// The parameters to provide for the updated account.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<StorageAccount>> UpdateWithHttpMessagesAsync(string resourceGroupName, string accountName, StorageAccountUpdateParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists all the storage accounts available under the subscription.
/// Note that storage keys are not returned; use the ListKeys operation
/// for this.
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IEnumerable<StorageAccount>>> ListWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists all the storage accounts available under the given resource
/// group. Note that storage keys are not returned; use the ListKeys
/// operation for this.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The
/// name is case insensitive.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IEnumerable<StorageAccount>>> ListByResourceGroupWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists the access keys for the specified storage account.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The
/// name is case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<StorageAccountListKeysResult>> ListKeysWithHttpMessagesAsync(string resourceGroupName, string accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Regenerates one of the access keys for the specified storage
/// account.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The
/// name is case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='keyName'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<StorageAccountListKeysResult>> RegenerateKeyWithHttpMessagesAsync(string resourceGroupName, string accountName, string keyName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List SAS credentials of a storage account.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The
/// name is case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='parameters'>
/// The parameters to provide to list SAS credentials for the storage
/// account.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<ListAccountSasResponse>> ListAccountSASWithHttpMessagesAsync(string resourceGroupName, string accountName, AccountSasParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// List service SAS credentials of a specific resource.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The
/// name is case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='parameters'>
/// The parameters to provide to list service SAS credentials.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<ListServiceSasResponse>> ListServiceSASWithHttpMessagesAsync(string resourceGroupName, string accountName, ServiceSasParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Asynchronously creates a new storage account with the specified
/// parameters. If an account is already created and a subsequent
/// create request is issued with different properties, the account
/// properties will be updated. If an account is already created and a
/// subsequent create or update request is issued with the exact same
/// set of properties, the request will succeed.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The
/// name is case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='parameters'>
/// The parameters to provide for the created account.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<StorageAccount>> BeginCreateWithHttpMessagesAsync(string resourceGroupName, string accountName, StorageAccountCreateParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| |
// Inflater.cs
//
// Copyright (C) 2001 Mike Krueger
// Copyright (C) 2004 John Reilly
//
// This file was translated from java, it was part of the GNU Classpath
// Copyright (C) 2001 Free Software Foundation, Inc.
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; either version 2
// of the License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// Linking this library statically or dynamically with other modules is
// making a combined work based on this library. Thus, the terms and
// conditions of the GNU General Public License cover the whole
// combination.
//
// As a special exception, the copyright holders of this library give you
// permission to link this library with independent modules to produce an
// executable, regardless of the license terms of these independent
// modules, and to copy and distribute the resulting executable under
// terms of your choice, provided that you also meet, for each linked
// independent module, the terms and conditions of the license of that
// module. An independent module is a module which is not derived from
// or based on this library. If you modify this library, you may extend
// this exception to your version of the library, but you are not
// obligated to do so. If you do not wish to do so, delete this
// exception statement from your version.
#if ZIPLIB
using System;
using ICSharpCode.SharpZipLib.Checksums;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// Inflater is used to decompress data that has been compressed according
/// to the "deflate" standard described in rfc1951.
///
/// By default Zlib (rfc1950) headers and footers are expected in the input.
/// You can use constructor <code> public Inflater(bool noHeader)</code> passing true
/// if there is no Zlib header information
///
/// The usage is as following. First you have to set some input with
/// <code>SetInput()</code>, then Inflate() it. If inflate doesn't
/// inflate any bytes there may be three reasons:
/// <ul>
/// <li>IsNeedingInput() returns true because the input buffer is empty.
/// You have to provide more input with <code>SetInput()</code>.
/// NOTE: IsNeedingInput() also returns true when, the stream is finished.
/// </li>
/// <li>IsNeedingDictionary() returns true, you have to provide a preset
/// dictionary with <code>SetDictionary()</code>.</li>
/// <li>IsFinished returns true, the inflater has finished.</li>
/// </ul>
/// Once the first output byte is produced, a dictionary will not be
/// needed at a later stage.
///
/// author of the original java version : John Leuner, Jochen Hoenicke
/// </summary>
internal class Inflater
{
#region Constants/Readonly
/// <summary>
/// Copy lengths for literal codes 257..285
/// </summary>
static readonly int[] CPLENS = {
3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258
};
/// <summary>
/// Extra bits for literal codes 257..285
/// </summary>
static readonly int[] CPLEXT = {
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0
};
/// <summary>
/// Copy offsets for distance codes 0..29
/// </summary>
static readonly int[] CPDIST = {
1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
8193, 12289, 16385, 24577
};
/// <summary>
/// Extra bits for distance codes
/// </summary>
static readonly int[] CPDEXT = {
0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
12, 12, 13, 13
};
/// <summary>
/// These are the possible states for an inflater
/// </summary>
const int DECODE_HEADER = 0;
const int DECODE_DICT = 1;
const int DECODE_BLOCKS = 2;
const int DECODE_STORED_LEN1 = 3;
const int DECODE_STORED_LEN2 = 4;
const int DECODE_STORED = 5;
const int DECODE_DYN_HEADER = 6;
const int DECODE_HUFFMAN = 7;
const int DECODE_HUFFMAN_LENBITS = 8;
const int DECODE_HUFFMAN_DIST = 9;
const int DECODE_HUFFMAN_DISTBITS = 10;
const int DECODE_CHKSUM = 11;
const int FINISHED = 12;
#endregion
#region Instance Fields
/// <summary>
/// This variable contains the current state.
/// </summary>
int mode;
/// <summary>
/// The adler checksum of the dictionary or of the decompressed
/// stream, as it is written in the header resp. footer of the
/// compressed stream.
/// Only valid if mode is DECODE_DICT or DECODE_CHKSUM.
/// </summary>
int readAdler;
/// <summary>
/// The number of bits needed to complete the current state. This
/// is valid, if mode is DECODE_DICT, DECODE_CHKSUM,
/// DECODE_HUFFMAN_LENBITS or DECODE_HUFFMAN_DISTBITS.
/// </summary>
int neededBits;
int repLength;
int repDist;
int uncomprLen;
/// <summary>
/// True, if the last block flag was set in the last block of the
/// inflated stream. This means that the stream ends after the
/// current block.
/// </summary>
bool isLastBlock;
/// <summary>
/// The total number of inflated bytes.
/// </summary>
long totalOut;
/// <summary>
/// The total number of bytes set with setInput(). This is not the
/// value returned by the TotalIn property, since this also includes the
/// unprocessed input.
/// </summary>
long totalIn;
/// <summary>
/// This variable stores the noHeader flag that was given to the constructor.
/// True means, that the inflated stream doesn't contain a Zlib header or
/// footer.
/// </summary>
bool noHeader;
StreamManipulator input;
OutputWindow outputWindow;
InflaterDynHeader dynHeader;
InflaterHuffmanTree litlenTree, distTree;
Adler32 adler;
#endregion
#region Constructors
/// <summary>
/// Creates a new inflater or RFC1951 decompressor
/// RFC1950/Zlib headers and footers will be expected in the input data
/// </summary>
public Inflater() : this(false)
{
}
/// <summary>
/// Creates a new inflater.
/// </summary>
/// <param name="noHeader">
/// True if no RFC1950/Zlib header and footer fields are expected in the input data
///
/// This is used for GZIPed/Zipped input.
///
/// For compatibility with
/// Sun JDK you should provide one byte of input more than needed in
/// this case.
/// </param>
public Inflater(bool noHeader)
{
this.noHeader = noHeader;
this.adler = new Adler32();
input = new StreamManipulator();
outputWindow = new OutputWindow();
mode = noHeader ? DECODE_BLOCKS : DECODE_HEADER;
}
#endregion
/// <summary>
/// Resets the inflater so that a new stream can be decompressed. All
/// pending input and output will be discarded.
/// </summary>
public void Reset()
{
mode = noHeader ? DECODE_BLOCKS : DECODE_HEADER;
totalIn = 0;
totalOut = 0;
input.Reset();
outputWindow.Reset();
dynHeader = null;
litlenTree = null;
distTree = null;
isLastBlock = false;
adler.Reset();
}
/// <summary>
/// Decodes a zlib/RFC1950 header.
/// </summary>
/// <returns>
/// False if more input is needed.
/// </returns>
/// <exception cref="SharpZipBaseException">
/// The header is invalid.
/// </exception>
private bool DecodeHeader()
{
int header = input.PeekBits(16);
if (header < 0) {
return false;
}
input.DropBits(16);
// The header is written in "wrong" byte order
header = ((header << 8) | (header >> 8)) & 0xffff;
if (header % 31 != 0) {
throw new SharpZipBaseException("Header checksum illegal");
}
if ((header & 0x0f00) != (Deflater.DEFLATED << 8)) {
throw new SharpZipBaseException("Compression Method unknown");
}
/* Maximum size of the backwards window in bits.
* We currently ignore this, but we could use it to make the
* inflater window more space efficient. On the other hand the
* full window (15 bits) is needed most times, anyway.
int max_wbits = ((header & 0x7000) >> 12) + 8;
*/
if ((header & 0x0020) == 0) { // Dictionary flag?
mode = DECODE_BLOCKS;
} else {
mode = DECODE_DICT;
neededBits = 32;
}
return true;
}
/// <summary>
/// Decodes the dictionary checksum after the deflate header.
/// </summary>
/// <returns>
/// False if more input is needed.
/// </returns>
private bool DecodeDict()
{
while (neededBits > 0) {
int dictByte = input.PeekBits(8);
if (dictByte < 0) {
return false;
}
input.DropBits(8);
readAdler = (readAdler << 8) | dictByte;
neededBits -= 8;
}
return false;
}
/// <summary>
/// Decodes the huffman encoded symbols in the input stream.
/// </summary>
/// <returns>
/// false if more input is needed, true if output window is
/// full or the current block ends.
/// </returns>
/// <exception cref="SharpZipBaseException">
/// if deflated stream is invalid.
/// </exception>
private bool DecodeHuffman()
{
int free = outputWindow.GetFreeSpace();
while (free >= 258)
{
int symbol;
switch (mode)
{
case DECODE_HUFFMAN:
// This is the inner loop so it is optimized a bit
while (((symbol = litlenTree.GetSymbol(input)) & ~0xff) == 0)
{
outputWindow.Write(symbol);
if (--free < 258)
{
return true;
}
}
if (symbol < 257)
{
if (symbol < 0)
{
return false;
}
else
{
// symbol == 256: end of block
distTree = null;
litlenTree = null;
mode = DECODE_BLOCKS;
return true;
}
}
try
{
repLength = CPLENS[symbol - 257];
neededBits = CPLEXT[symbol - 257];
}
catch (Exception)
{
throw new SharpZipBaseException("Illegal rep length code");
}
goto case DECODE_HUFFMAN_LENBITS; // fall through
case DECODE_HUFFMAN_LENBITS:
if (neededBits > 0)
{
mode = DECODE_HUFFMAN_LENBITS;
int i = input.PeekBits(neededBits);
if (i < 0)
{
return false;
}
input.DropBits(neededBits);
repLength += i;
}
mode = DECODE_HUFFMAN_DIST;
goto case DECODE_HUFFMAN_DIST; // fall through
case DECODE_HUFFMAN_DIST:
symbol = distTree.GetSymbol(input);
if (symbol < 0)
{
return false;
}
try
{
repDist = CPDIST[symbol];
neededBits = CPDEXT[symbol];
}
catch (Exception)
{
throw new SharpZipBaseException("Illegal rep dist code");
}
goto case DECODE_HUFFMAN_DISTBITS; // fall through
case DECODE_HUFFMAN_DISTBITS:
if (neededBits > 0)
{
mode = DECODE_HUFFMAN_DISTBITS;
int i = input.PeekBits(neededBits);
if (i < 0)
{
return false;
}
input.DropBits(neededBits);
repDist += i;
}
outputWindow.Repeat(repLength, repDist);
free -= repLength;
mode = DECODE_HUFFMAN;
break;
default:
throw new SharpZipBaseException("Inflater unknown mode");
}
}
return true;
}
/// <summary>
/// Decodes the adler checksum after the deflate stream.
/// </summary>
/// <returns>
/// false if more input is needed.
/// </returns>
/// <exception cref="SharpZipBaseException">
/// If checksum doesn't match.
/// </exception>
private bool DecodeChksum()
{
while (neededBits > 0) {
int chkByte = input.PeekBits(8);
if (chkByte < 0) {
return false;
}
input.DropBits(8);
readAdler = (readAdler << 8) | chkByte;
neededBits -= 8;
}
if ((int) adler.Value != readAdler) {
throw new SharpZipBaseException("Adler chksum doesn't match: " + (int)adler.Value + " vs. " + readAdler);
}
mode = FINISHED;
return false;
}
/// <summary>
/// Decodes the deflated stream.
/// </summary>
/// <returns>
/// false if more input is needed, or if finished.
/// </returns>
/// <exception cref="SharpZipBaseException">
/// if deflated stream is invalid.
/// </exception>
private bool Decode()
{
switch (mode) {
case DECODE_HEADER:
return DecodeHeader();
case DECODE_DICT:
return DecodeDict();
case DECODE_CHKSUM:
return DecodeChksum();
case DECODE_BLOCKS:
if (isLastBlock) {
if (noHeader) {
mode = FINISHED;
return false;
} else {
input.SkipToByteBoundary();
neededBits = 32;
mode = DECODE_CHKSUM;
return true;
}
}
int type = input.PeekBits(3);
if (type < 0) {
return false;
}
input.DropBits(3);
if ((type & 1) != 0) {
isLastBlock = true;
}
switch (type >> 1){
case DeflaterConstants.STORED_BLOCK:
input.SkipToByteBoundary();
mode = DECODE_STORED_LEN1;
break;
case DeflaterConstants.STATIC_TREES:
litlenTree = InflaterHuffmanTree.defLitLenTree;
distTree = InflaterHuffmanTree.defDistTree;
mode = DECODE_HUFFMAN;
break;
case DeflaterConstants.DYN_TREES:
dynHeader = new InflaterDynHeader();
mode = DECODE_DYN_HEADER;
break;
default:
throw new SharpZipBaseException("Unknown block type " + type);
}
return true;
case DECODE_STORED_LEN1:
{
if ((uncomprLen = input.PeekBits(16)) < 0) {
return false;
}
input.DropBits(16);
mode = DECODE_STORED_LEN2;
}
goto case DECODE_STORED_LEN2; // fall through
case DECODE_STORED_LEN2:
{
int nlen = input.PeekBits(16);
if (nlen < 0) {
return false;
}
input.DropBits(16);
if (nlen != (uncomprLen ^ 0xffff)) {
throw new SharpZipBaseException("broken uncompressed block");
}
mode = DECODE_STORED;
}
goto case DECODE_STORED; // fall through
case DECODE_STORED:
{
int more = outputWindow.CopyStored(input, uncomprLen);
uncomprLen -= more;
if (uncomprLen == 0) {
mode = DECODE_BLOCKS;
return true;
}
return !input.IsNeedingInput;
}
case DECODE_DYN_HEADER:
if (!dynHeader.Decode(input)) {
return false;
}
litlenTree = dynHeader.BuildLitLenTree();
distTree = dynHeader.BuildDistTree();
mode = DECODE_HUFFMAN;
goto case DECODE_HUFFMAN; // fall through
case DECODE_HUFFMAN:
case DECODE_HUFFMAN_LENBITS:
case DECODE_HUFFMAN_DIST:
case DECODE_HUFFMAN_DISTBITS:
return DecodeHuffman();
case FINISHED:
return false;
default:
throw new SharpZipBaseException("Inflater.Decode unknown mode");
}
}
/// <summary>
/// Sets the preset dictionary. This should only be called, if
/// needsDictionary() returns true and it should set the same
/// dictionary, that was used for deflating. The getAdler()
/// function returns the checksum of the dictionary needed.
/// </summary>
/// <param name="buffer">
/// The dictionary.
/// </param>
public void SetDictionary(byte[] buffer)
{
SetDictionary(buffer, 0, buffer.Length);
}
/// <summary>
/// Sets the preset dictionary. This should only be called, if
/// needsDictionary() returns true and it should set the same
/// dictionary, that was used for deflating. The getAdler()
/// function returns the checksum of the dictionary needed.
/// </summary>
/// <param name="buffer">
/// The dictionary.
/// </param>
/// <param name="index">
/// The index into buffer where the dictionary starts.
/// </param>
/// <param name="count">
/// The number of bytes in the dictionary.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// No dictionary is needed.
/// </exception>
/// <exception cref="SharpZipBaseException">
/// The adler checksum for the buffer is invalid
/// </exception>
public void SetDictionary(byte[] buffer, int index, int count)
{
if ( buffer == null ) {
throw new ArgumentNullException("buffer");
}
if ( index < 0 ) {
throw new ArgumentOutOfRangeException("index");
}
if ( count < 0 ) {
throw new ArgumentOutOfRangeException("count");
}
if (!IsNeedingDictionary) {
throw new InvalidOperationException("Dictionary is not needed");
}
adler.Update(buffer, index, count);
if ((int)adler.Value != readAdler) {
throw new SharpZipBaseException("Wrong adler checksum");
}
adler.Reset();
outputWindow.CopyDict(buffer, index, count);
mode = DECODE_BLOCKS;
}
/// <summary>
/// Sets the input. This should only be called, if needsInput()
/// returns true.
/// </summary>
/// <param name="buffer">
/// the input.
/// </param>
public void SetInput(byte[] buffer)
{
SetInput(buffer, 0, buffer.Length);
}
/// <summary>
/// Sets the input. This should only be called, if needsInput()
/// returns true.
/// </summary>
/// <param name="buffer">
/// The source of input data
/// </param>
/// <param name="index">
/// The index into buffer where the input starts.
/// </param>
/// <param name="count">
/// The number of bytes of input to use.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// No input is needed.
/// </exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// The index and/or count are wrong.
/// </exception>
public void SetInput(byte[] buffer, int index, int count)
{
input.SetInput(buffer, index, count);
totalIn += (long)count;
}
/// <summary>
/// Inflates the compressed stream to the output buffer. If this
/// returns 0, you should check, whether IsNeedingDictionary(),
/// IsNeedingInput() or IsFinished() returns true, to determine why no
/// further output is produced.
/// </summary>
/// <param name="buffer">
/// the output buffer.
/// </param>
/// <returns>
/// The number of bytes written to the buffer, 0 if no further
/// output can be produced.
/// </returns>
/// <exception cref="System.ArgumentOutOfRangeException">
/// if buffer has length 0.
/// </exception>
/// <exception cref="System.FormatException">
/// if deflated stream is invalid.
/// </exception>
public int Inflate(byte[] buffer)
{
if ( buffer == null )
{
throw new ArgumentNullException("buffer");
}
return Inflate(buffer, 0, buffer.Length);
}
/// <summary>
/// Inflates the compressed stream to the output buffer. If this
/// returns 0, you should check, whether needsDictionary(),
/// needsInput() or finished() returns true, to determine why no
/// further output is produced.
/// </summary>
/// <param name="buffer">
/// the output buffer.
/// </param>
/// <param name="offset">
/// the offset in buffer where storing starts.
/// </param>
/// <param name="count">
/// the maximum number of bytes to output.
/// </param>
/// <returns>
/// the number of bytes written to the buffer, 0 if no further output can be produced.
/// </returns>
/// <exception cref="System.ArgumentOutOfRangeException">
/// if count is less than 0.
/// </exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// if the index and / or count are wrong.
/// </exception>
/// <exception cref="System.FormatException">
/// if deflated stream is invalid.
/// </exception>
public int Inflate(byte[] buffer, int offset, int count)
{
if ( buffer == null )
{
throw new ArgumentNullException("buffer");
}
if ( count < 0 ) {
#if NETCF_1_0
throw new ArgumentOutOfRangeException("count");
#else
throw new ArgumentOutOfRangeException("count", "count cannot be negative");
#endif
}
if ( offset < 0 ) {
#if NETCF_1_0
throw new ArgumentOutOfRangeException("offset");
#else
throw new ArgumentOutOfRangeException("offset", "offset cannot be negative");
#endif
}
if ( offset + count > buffer.Length ) {
throw new ArgumentException("count exceeds buffer bounds");
}
// Special case: count may be zero
if (count == 0)
{
if (!IsFinished) { // -jr- 08-Nov-2003 INFLATE_BUG fix..
Decode();
}
return 0;
}
int bytesCopied = 0;
do {
if (mode != DECODE_CHKSUM) {
/* Don't give away any output, if we are waiting for the
* checksum in the input stream.
*
* With this trick we have always:
* IsNeedingInput() and not IsFinished()
* implies more output can be produced.
*/
int more = outputWindow.CopyOutput(buffer, offset, count);
if ( more > 0 ) {
adler.Update(buffer, offset, more);
offset += more;
bytesCopied += more;
totalOut += (long)more;
count -= more;
if (count == 0) {
return bytesCopied;
}
}
}
} while (Decode() || ((outputWindow.GetAvailable() > 0) && (mode != DECODE_CHKSUM)));
return bytesCopied;
}
/// <summary>
/// Returns true, if the input buffer is empty.
/// You should then call setInput().
/// NOTE: This method also returns true when the stream is finished.
/// </summary>
public bool IsNeedingInput {
get {
return input.IsNeedingInput;
}
}
/// <summary>
/// Returns true, if a preset dictionary is needed to inflate the input.
/// </summary>
public bool IsNeedingDictionary {
get {
return mode == DECODE_DICT && neededBits == 0;
}
}
/// <summary>
/// Returns true, if the inflater has finished. This means, that no
/// input is needed and no output can be produced.
/// </summary>
public bool IsFinished {
get {
return mode == FINISHED && outputWindow.GetAvailable() == 0;
}
}
/// <summary>
/// Gets the adler checksum. This is either the checksum of all
/// uncompressed bytes returned by inflate(), or if needsDictionary()
/// returns true (and thus no output was yet produced) this is the
/// adler checksum of the expected dictionary.
/// </summary>
/// <returns>
/// the adler checksum.
/// </returns>
public int Adler {
get {
return IsNeedingDictionary ? readAdler : (int) adler.Value;
}
}
/// <summary>
/// Gets the total number of output bytes returned by Inflate().
/// </summary>
/// <returns>
/// the total number of output bytes.
/// </returns>
public long TotalOut {
get {
return totalOut;
}
}
/// <summary>
/// Gets the total number of processed compressed input bytes.
/// </summary>
/// <returns>
/// The total number of bytes of processed input bytes.
/// </returns>
public long TotalIn {
get {
return totalIn - (long)RemainingInput;
}
}
/// <summary>
/// Gets the number of unprocessed input bytes. Useful, if the end of the
/// stream is reached and you want to further process the bytes after
/// the deflate stream.
/// </summary>
/// <returns>
/// The number of bytes of the input which have not been processed.
/// </returns>
public int RemainingInput {
// TODO: This should be a long?
get {
return input.AvailableBytes;
}
}
}
}
#endif
| |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
#pragma warning disable 1634, 1691
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Management.Automation.Internal;
using System.Management.Automation.Provider;
using Dbg = System.Management.Automation;
namespace System.Management.Automation
{
/// <summary>
/// Holds the state of a Monad Shell session.
/// </summary>
internal sealed partial class SessionStateInternal
{
#region Current working directory/drive
/// <summary>
/// Gets the current monad namespace specific working location. If
/// you want to change the current working directory use the SetLocation
/// method.
/// </summary>
/// <exception cref="InvalidOperationException">
/// If a location has not been set yet.
/// </exception>
internal PathInfo CurrentLocation
{
get
{
if (CurrentDrive == null)
{
// We need the error handling, and moving to a method would be
// a breaking change
#pragma warning suppress 56503
throw PSTraceSource.NewInvalidOperationException();
}
PathInfo result =
new PathInfo(
CurrentDrive,
CurrentDrive.Provider,
CurrentDrive.CurrentLocation,
new SessionState(this));
return result;
}
}
/// <summary>
/// Gets the namespace specific path of the current working directory
/// for the specified namespace.
/// </summary>
/// <param name="namespaceID">
/// An identifier that uniquely identifies the namespace to get the
/// current working directory for.
/// </param>
/// <returns>
/// The namespace specific path of the current working directory for
/// the specified namespace.
/// </returns>
/// <exception cref="ArgumentNullException">
/// If <paramref name="namespaceID"/> is null.
/// </exception>
/// <exception cref="ProviderNotFoundException">
/// If <paramref name="namespacesID"/> refers to a provider that does not exist.
/// </exception>
/// <exception cref="DriveNotFoundException">
/// If a current drive cannot be found for the provider <paramref name="namespaceID"/>
/// </exception>
internal PathInfo GetNamespaceCurrentLocation(string namespaceID)
{
if (namespaceID == null)
{
throw PSTraceSource.NewArgumentNullException(nameof(namespaceID));
}
// If namespace ID is empty, we will use the current working drive
PSDriveInfo drive = null;
if (namespaceID.Length == 0)
{
ProvidersCurrentWorkingDrive.TryGetValue(CurrentDrive.Provider, out drive);
}
else
{
// First check to see if the provider exists
ProvidersCurrentWorkingDrive.TryGetValue(GetSingleProvider(namespaceID), out drive);
}
if (drive == null)
{
DriveNotFoundException e =
new DriveNotFoundException(
namespaceID,
"DriveNotFound",
SessionStateStrings.DriveNotFound);
throw e;
}
CmdletProviderContext context = new CmdletProviderContext(this.ExecutionContext);
context.Drive = drive;
// Now make the namespace specific path
string path = null;
if (drive.Hidden)
{
if (LocationGlobber.IsProviderDirectPath(drive.CurrentLocation))
{
path = drive.CurrentLocation;
}
else
{
path = LocationGlobber.GetProviderQualifiedPath(drive.CurrentLocation, drive.Provider);
}
}
else
{
path = LocationGlobber.GetDriveQualifiedPath(drive.CurrentLocation, drive);
}
return new PathInfo(drive, drive.Provider, path, new SessionState(this));
}
/// <summary>
/// Changes the current working directory to the path specified.
/// </summary>
/// <param name="path">
/// The path of the new current working directory.
/// </param>
/// <returns>
/// The PathInfo object representing the path of the location
/// that was set.
/// </returns>
/// <exception cref="ArgumentNullException">
/// If <paramref name="path"/> is null.
/// </exception>
/// <exception cref="ArgumentException">
/// If <paramref name="path"/> does not exist, is not a container, or
/// resolved to multiple containers.
/// </exception>
/// <exception cref="ProviderNotFoundException">
/// If <paramref name="path"/> refers to a provider that does not exist.
/// </exception>
/// <exception cref="DriveNotFoundException">
/// If <paramref name="path"/> refers to a drive that does not exist.
/// </exception>
/// <exception cref="ProviderInvocationException">
/// If the provider associated with <paramref name="path"/> threw an
/// exception.
/// </exception>
internal PathInfo SetLocation(string path)
{
return SetLocation(path, null);
}
/// <summary>
/// Changes the current working directory to the path specified.
/// </summary>
/// <param name="path">
/// The path of the new current working directory
/// </param>
/// <param name="context">
/// The context the provider uses when performing the operation.
/// </param>
/// <returns>
/// The PathInfo object representing the path of the location
/// that was set.
/// </returns>
/// <exception cref="ArgumentNullException">
/// If <paramref name="path"/> is null.
/// </exception>
/// <exception cref="ArgumentException">
/// If <paramref name="path"/> does not exist, is not a container, or
/// resolved to multiple containers.
/// </exception>
/// <exception cref="ProviderNotFoundException">
/// If <paramref name="path"/> refers to a provider that does not exist.
/// </exception>
/// <exception cref="DriveNotFoundException">
/// If <paramref name="path"/> refers to a drive that does not exist.
/// </exception>
/// <exception cref="ProviderInvocationException">
/// If the provider associated with <paramref name="path"/> threw an
/// exception.
/// </exception>
/// <exception cref="ItemNotFoundException">
/// If the <paramref name="path"/> could not be resolved.
/// </exception>
internal PathInfo SetLocation(string path, CmdletProviderContext context)
{
return SetLocation(path, context, literalPath: false);
}
/// <summary>
/// Changes the current working directory to the path specified.
/// </summary>
/// <param name="path">
/// The path of the new current working directory.
/// </param>
/// <param name="context">
/// The context the provider uses when performing the operation.
/// </param>
/// <param name="literalPath">
/// Indicate if the path is a literal path.
/// </param>
/// <returns>
/// The PathInfo object representing the path of the location
/// that was set.
/// </returns>
/// <exception cref="ArgumentNullException">
/// If <paramref name="path"/> is null.
/// </exception>
/// <exception cref="ArgumentException">
/// If <paramref name="path"/> does not exist, is not a container, or
/// resolved to multiple containers.
/// </exception>
/// <exception cref="ProviderNotFoundException">
/// If <paramref name="path"/> refers to a provider that does not exist.
/// </exception>
/// <exception cref="DriveNotFoundException">
/// If <paramref name="path"/> refers to a drive that does not exist.
/// </exception>
/// <exception cref="ProviderInvocationException">
/// If the provider associated with <paramref name="path"/> threw an
/// exception.
/// </exception>
/// <exception cref="ItemNotFoundException">
/// If the <paramref name="path"/> could not be resolved.
/// </exception>
internal PathInfo SetLocation(string path, CmdletProviderContext context, bool literalPath)
{
if (path == null)
{
throw PSTraceSource.NewArgumentNullException(nameof(path));
}
PathInfo current = CurrentLocation;
string originalPath = path;
string driveName = null;
ProviderInfo provider = null;
string providerId = null;
switch (originalPath)
{
case string originalPathSwitch when !literalPath && originalPathSwitch.Equals("-", StringComparison.Ordinal):
if (_setLocationHistory.UndoCount <= 0)
{
throw new InvalidOperationException(SessionStateStrings.LocationUndoStackIsEmpty);
}
path = _setLocationHistory.Undo(this.CurrentLocation).Path;
break;
case string originalPathSwitch when !literalPath && originalPathSwitch.Equals("+", StringComparison.Ordinal):
if (_setLocationHistory.RedoCount <= 0)
{
throw new InvalidOperationException(SessionStateStrings.LocationRedoStackIsEmpty);
}
path = _setLocationHistory.Redo(this.CurrentLocation).Path;
break;
default:
var pushPathInfo = GetNewPushPathInfo();
_setLocationHistory.Push(pushPathInfo);
break;
}
PSDriveInfo previousWorkingDrive = CurrentDrive;
// First check to see if the path is a home path
if (LocationGlobber.IsHomePath(path))
{
path = Globber.GetHomeRelativePath(path);
}
if (LocationGlobber.IsProviderDirectPath(path))
{
// The path is a provider-direct path so use the current
// provider and its hidden drive but don't modify the path
// at all.
provider = CurrentLocation.Provider;
CurrentDrive = provider.HiddenDrive;
}
else if (LocationGlobber.IsProviderQualifiedPath(path, out providerId))
{
provider = GetSingleProvider(providerId);
CurrentDrive = provider.HiddenDrive;
}
else
{
// See if the path is a relative or absolute
// path.
if (Globber.IsAbsolutePath(path, out driveName))
{
// Since the path is an absolute path
// we need to change the current working
// drive
PSDriveInfo newWorkingDrive = GetDrive(driveName);
CurrentDrive = newWorkingDrive;
// If the path is simply a colon-terminated drive,
// not a slash-terminated path to the root of a drive,
// set the path to the current working directory of that drive.
string colonTerminatedVolume = CurrentDrive.Name + ':';
if (CurrentDrive.VolumeSeparatedByColon && (path.Length == colonTerminatedVolume.Length))
{
path = Path.Combine(colonTerminatedVolume + Path.DirectorySeparatorChar, CurrentDrive.CurrentLocation);
}
// Now that the current working drive is set,
// process the rest of the path as a relative path.
}
}
if (context == null)
{
context = new CmdletProviderContext(this.ExecutionContext);
}
if (CurrentDrive != null)
{
context.Drive = CurrentDrive;
}
CmdletProvider providerInstance = null;
Collection<PathInfo> workingPath = null;
try
{
workingPath =
Globber.GetGlobbedMonadPathsFromMonadPath(
path,
false,
context,
out providerInstance);
}
catch (LoopFlowException)
{
throw;
}
catch (PipelineStoppedException)
{
throw;
}
catch (ActionPreferenceStopException)
{
throw;
}
catch (Exception)
{
// Reset the drive to the previous drive and
// then rethrow the error
CurrentDrive = previousWorkingDrive;
throw;
}
if (workingPath.Count == 0)
{
// Set the current working drive back to the previous
// one in case it was changed.
CurrentDrive = previousWorkingDrive;
throw
new ItemNotFoundException(
path,
"PathNotFound",
SessionStateStrings.PathNotFound);
}
// We allow globbing the location as long as it only resolves a single container.
bool foundContainer = false;
bool pathIsContainer = false;
bool pathIsProviderQualifiedPath = false;
bool currentPathisProviderQualifiedPath = false;
for (int index = 0; index < workingPath.Count; ++index)
{
CmdletProviderContext normalizePathContext =
new CmdletProviderContext(context);
PathInfo resolvedPath = workingPath[index];
string currentPath = path;
try
{
string providerName = null;
currentPathisProviderQualifiedPath = LocationGlobber.IsProviderQualifiedPath(resolvedPath.Path, out providerName);
if (currentPathisProviderQualifiedPath)
{
// The path should be the provider-qualified path without the provider ID
// or ::
string providerInternalPath = LocationGlobber.RemoveProviderQualifier(resolvedPath.Path);
try
{
currentPath = NormalizeRelativePath(GetSingleProvider(providerName), providerInternalPath, string.Empty, normalizePathContext);
}
catch (NotSupportedException)
{
// Since the provider does not support normalizing the path, just
// use the path we currently have.
}
catch (LoopFlowException)
{
throw;
}
catch (PipelineStoppedException)
{
throw;
}
catch (ActionPreferenceStopException)
{
throw;
}
catch (Exception)
{
// Reset the drive to the previous drive and
// then rethrow the error
CurrentDrive = previousWorkingDrive;
throw;
}
}
else
{
try
{
currentPath = NormalizeRelativePath(resolvedPath.Path, CurrentDrive.Root, normalizePathContext);
}
catch (NotSupportedException)
{
// Since the provider does not support normalizing the path, just
// use the path we currently have.
}
catch (LoopFlowException)
{
throw;
}
catch (PipelineStoppedException)
{
throw;
}
catch (ActionPreferenceStopException)
{
throw;
}
catch (Exception)
{
// Reset the drive to the previous drive and
// then rethrow the error
CurrentDrive = previousWorkingDrive;
throw;
}
}
// Now see if there was errors while normalizing the path
if (normalizePathContext.HasErrors())
{
// Set the current working drive back to the previous
// one in case it was changed.
CurrentDrive = previousWorkingDrive;
normalizePathContext.ThrowFirstErrorOrDoNothing();
}
}
finally
{
normalizePathContext.RemoveStopReferral();
}
bool isContainer = false;
CmdletProviderContext itemContainerContext =
new CmdletProviderContext(context);
itemContainerContext.SuppressWildcardExpansion = true;
try
{
isContainer =
IsItemContainer(
resolvedPath.Path,
itemContainerContext);
if (itemContainerContext.HasErrors())
{
// Set the current working drive back to the previous
// one in case it was changed.
CurrentDrive = previousWorkingDrive;
itemContainerContext.ThrowFirstErrorOrDoNothing();
}
}
catch (NotSupportedException)
{
if (currentPath.Length == 0)
{
// Treat this as a container because providers that only
// support the ContainerCmdletProvider interface are really
// containers at their root.
isContainer = true;
}
}
finally
{
itemContainerContext.RemoveStopReferral();
}
if (isContainer)
{
if (foundContainer)
{
// The path resolved to more than one container
// Set the current working drive back to the previous
// one in case it was changed.
CurrentDrive = previousWorkingDrive;
throw
PSTraceSource.NewArgumentException(
nameof(path),
SessionStateStrings.PathResolvedToMultiple,
originalPath);
}
else
{
// Set the path to use
path = currentPath;
// Mark it as a container
pathIsContainer = true;
// Mark whether or not it was provider-qualified
pathIsProviderQualifiedPath = currentPathisProviderQualifiedPath;
// Mark that we have already found one container. Finding additional
// should be an error
foundContainer = true;
}
}
}
if (pathIsContainer)
{
// Remove the root slash since it is implied that the
// current working directory is relative to the root.
if (!LocationGlobber.IsProviderDirectPath(path) &&
path.StartsWith(StringLiterals.DefaultPathSeparator) &&
!pathIsProviderQualifiedPath)
{
path = path.Substring(1);
}
s_tracer.WriteLine(
"New working path = {0}",
path);
CurrentDrive.CurrentLocation = path;
}
else
{
// Set the current working drive back to the previous
// one in case it was changed.
CurrentDrive = previousWorkingDrive;
throw
new ItemNotFoundException(
originalPath,
"PathNotFound",
SessionStateStrings.PathNotFound);
}
// Now make sure the current drive is set in the provider's
// current working drive hashtable
ProvidersCurrentWorkingDrive[CurrentDrive.Provider] =
CurrentDrive;
// Set the $PWD variable to the new location
this.SetVariable(SpecialVariables.PWDVarPath, this.CurrentLocation, false, true, CommandOrigin.Internal);
// If an action has been defined for location changes, invoke it now.
if (PublicSessionState.InvokeCommand.LocationChangedAction != null)
{
var eventArgs = new LocationChangedEventArgs(PublicSessionState, current, CurrentLocation);
PublicSessionState.InvokeCommand.LocationChangedAction.Invoke(ExecutionContext.CurrentRunspace, eventArgs);
s_tracer.WriteLine("Invoked LocationChangedAction");
}
return this.CurrentLocation;
}
/// <summary>
/// Determines if the specified path is the current working directory
/// or a parent of the current working directory.
/// </summary>
/// <param name="path">
/// A monad namespace absolute or relative path.
/// </param>
/// <param name="context">
/// The context the provider uses when performing the operation.
/// </param>
/// <returns>
/// true, if the path is the current working directory or a parent of the current
/// working directory. false, otherwise.
/// </returns>
/// <exception cref="ArgumentNullException">
/// If <paramref name="path"/> is null.
/// </exception>
/// <exception cref="ProviderNotFoundException">
/// If the path is a provider-qualified path for a provider that is
/// not loaded into the system.
/// </exception>
/// <exception cref="DriveNotFoundException">
/// If the <paramref name="path"/> refers to a drive that could not be found.
/// </exception>
/// <exception cref="ProviderInvocationException">
/// If the provider used to build the path threw an exception.
/// </exception>
/// <exception cref="NotSupportedException">
/// If the provider that the <paramref name="path"/> represents is not a NavigationCmdletProvider
/// or ContainerCmdletProvider.
/// </exception>
/// <exception cref="InvalidOperationException">
/// If the <paramref name="path"/> starts with "~" and the home location is not set for
/// the provider.
/// </exception>
/// <exception cref="ProviderInvocationException">
/// If the provider specified by <paramref name="providerId"/> threw an
/// exception when its GetParentPath or MakePath was called while
/// processing the <paramref name="path"/>.
/// </exception>
internal bool IsCurrentLocationOrAncestor(string path, CmdletProviderContext context)
{
bool result = false;
if (path == null)
{
throw PSTraceSource.NewArgumentNullException(nameof(path));
}
PSDriveInfo drive = null;
ProviderInfo provider = null;
string providerSpecificPath =
Globber.GetProviderPath(
path,
context,
out provider,
out drive);
if (drive != null)
{
s_tracer.WriteLine("Tracing drive");
drive.Trace();
}
Dbg.Diagnostics.Assert(
providerSpecificPath != null,
"There should always be a way to generate a provider path for a " +
"given path");
if (drive != null)
{
context.Drive = drive;
}
// Check to see if the path that was specified is within the current
// working drive
if (drive == CurrentDrive)
{
// The path needs to be normalized to get rid of relative path tokens
// so they don't interfere with our path comparisons below
CmdletProviderContext normalizePathContext
= new CmdletProviderContext(context);
try
{
providerSpecificPath = NormalizeRelativePath(path, null, normalizePathContext);
}
catch (NotSupportedException)
{
// Since the provider does not support normalizing the path, just
// use the path we currently have.
}
catch (LoopFlowException)
{
throw;
}
catch (PipelineStoppedException)
{
throw;
}
catch (ActionPreferenceStopException)
{
throw;
}
finally
{
normalizePathContext.RemoveStopReferral();
}
if (normalizePathContext.HasErrors())
{
normalizePathContext.ThrowFirstErrorOrDoNothing();
}
s_tracer.WriteLine("Provider path = {0}", providerSpecificPath);
// Get the current working directory provider specific path
PSDriveInfo currentWorkingDrive = null;
ProviderInfo currentDriveProvider = null;
string currentWorkingPath =
Globber.GetProviderPath(
".",
context,
out currentDriveProvider,
out currentWorkingDrive);
Dbg.Diagnostics.Assert(
currentWorkingDrive == CurrentDrive,
"The current working drive should be the CurrentDrive.");
s_tracer.WriteLine(
"Current working path = {0}",
currentWorkingPath);
// See if the path is the current working directory or a parent
// of the current working directory
s_tracer.WriteLine(
"Comparing {0} to {1}",
providerSpecificPath,
currentWorkingPath);
if (string.Equals(providerSpecificPath, currentWorkingPath, StringComparison.OrdinalIgnoreCase))
{
// The path is the current working directory so
// return true
s_tracer.WriteLine("The path is the current working directory");
result = true;
}
else
{
// Check to see if the specified path is a parent
// of the current working directory
string lockedDirectory = currentWorkingPath;
while (lockedDirectory.Length > 0)
{
// We need to allow the provider to go as far up the tree
// as it can even if that means it has to traverse higher
// than the mount point for this drive. That is
// why we are passing the empty string as the root here.
lockedDirectory =
GetParentPath(
drive.Provider,
lockedDirectory,
string.Empty,
context);
s_tracer.WriteLine(
"Comparing {0} to {1}",
lockedDirectory,
providerSpecificPath);
if (string.Equals(lockedDirectory, providerSpecificPath, StringComparison.OrdinalIgnoreCase))
{
// The path is a parent of the current working
// directory
s_tracer.WriteLine(
"The path is a parent of the current working directory: {0}",
lockedDirectory);
result = true;
break;
}
}
}
}
else
{
s_tracer.WriteLine("Drives are not the same");
}
return result;
}
#endregion Current working directory/drive
#region push-Pop current working directory
/// <summary>
/// Location history for Set-Location that supports Undo/Redo using bounded stacks.
/// </summary>
private readonly HistoryStack<PathInfo> _setLocationHistory;
/// <summary>
/// A stack of the most recently pushed locations.
/// </summary>
private readonly Dictionary<string, Stack<PathInfo>> _workingLocationStack;
private const string startingDefaultStackName = "default";
/// <summary>
/// The name of the default location stack.
/// </summary>
private string _defaultStackName = startingDefaultStackName;
/// <summary>
/// Pushes the current location onto the working
/// location stack so that it can be retrieved later.
/// </summary>
/// <param name="stackName">
/// The ID of the stack to push the location on. If
/// it is null or empty the default stack is used.
/// </param>
internal void PushCurrentLocation(string stackName)
{
if (string.IsNullOrEmpty(stackName))
{
stackName = _defaultStackName;
}
// Get the location stack from the hashtable
Stack<PathInfo> locationStack = null;
if (!_workingLocationStack.TryGetValue(stackName, out locationStack))
{
locationStack = new Stack<PathInfo>();
_workingLocationStack[stackName] = locationStack;
}
// Push the directory/drive pair onto the stack
var pushPathInfo = GetNewPushPathInfo();
locationStack.Push(pushPathInfo);
}
private PathInfo GetNewPushPathInfo()
{
// Create a new instance of the directory/drive pair
ProviderInfo provider = CurrentDrive.Provider;
string mshQualifiedPath =
LocationGlobber.GetMshQualifiedPath(CurrentDrive.CurrentLocation, CurrentDrive);
PathInfo newPushLocation =
new PathInfo(
CurrentDrive,
provider,
mshQualifiedPath,
new SessionState(this));
s_tracer.WriteLine(
"Pushing drive: {0} directory: {1}",
CurrentDrive.Name,
mshQualifiedPath);
return newPushLocation;
}
/// <summary>
/// Resets the current working drive and directory to the first
/// entry on the working directory stack and removes that entry
/// from the stack.
/// </summary>
/// <param name="stackName">
/// The ID of the stack to pop the location from. If it is null or
/// empty the default stack is used.
/// </param>
/// <returns>
/// A PathInfo object representing the location that was popped
/// from the location stack and set as the new location.
/// </returns>
/// <exception cref="ArgumentException">
/// If the path on the stack does not exist, is not a container, or
/// resolved to multiple containers.
/// or
/// If <paramref name="stackName"/> contains wildcard characters and resolves
/// to multiple location stacks.
/// or
/// A stack was not found with the specified name.
/// </exception>
/// <exception cref="ProviderNotFoundException">
/// If the path on the stack refers to a provider that does not exist.
/// </exception>
/// <exception cref="DriveNotFoundException">
/// If the path on the stack refers to a drive that does not exist.
/// </exception>
/// <exception cref="ProviderInvocationException">
/// If the provider associated with the path on the stack threw an
/// exception.
/// </exception>
internal PathInfo PopLocation(string stackName)
{
if (string.IsNullOrEmpty(stackName))
{
stackName = _defaultStackName;
}
if (WildcardPattern.ContainsWildcardCharacters(stackName))
{
// Need to glob the stack name, but it can only glob to a single.
bool haveMatch = false;
WildcardPattern stackNamePattern =
WildcardPattern.Get(stackName, WildcardOptions.IgnoreCase);
foreach (string key in _workingLocationStack.Keys)
{
if (stackNamePattern.IsMatch(key))
{
if (haveMatch)
{
throw
PSTraceSource.NewArgumentException(
nameof(stackName),
SessionStateStrings.StackNameResolvedToMultiple,
stackName);
}
haveMatch = true;
stackName = key;
}
}
}
PathInfo result = CurrentLocation;
try
{
Stack<PathInfo> locationStack = null;
if (!_workingLocationStack.TryGetValue(stackName, out locationStack))
{
if (!string.Equals(stackName, startingDefaultStackName, StringComparison.OrdinalIgnoreCase))
{
throw
PSTraceSource.NewArgumentException(
nameof(stackName),
SessionStateStrings.StackNotFound,
stackName);
}
return null;
}
PathInfo poppedWorkingDirectory = locationStack.Pop();
Dbg.Diagnostics.Assert(
poppedWorkingDirectory != null,
"All items in the workingLocationStack should be " +
"of type PathInfo");
string newPath =
LocationGlobber.GetMshQualifiedPath(
WildcardPattern.Escape(poppedWorkingDirectory.Path),
poppedWorkingDirectory.GetDrive());
result = SetLocation(newPath);
if (locationStack.Count == 0 &&
!string.Equals(stackName, startingDefaultStackName, StringComparison.OrdinalIgnoreCase))
{
// Remove the stack from the stack list if it
// no longer contains any paths.
_workingLocationStack.Remove(stackName);
}
}
catch (InvalidOperationException)
{
// This is a no-op. We stay with the current working
// directory.
}
return result;
}
/// <summary>
/// Gets the monad namespace paths for all the directories that are
/// pushed on the working directory stack.
/// </summary>
/// <param name="stackName">
/// The stack of the ID of the location stack to retrieve. If it is
/// null or empty the default stack is used.
/// </param>
/// <returns>
/// The PathInfoStack representing the location stack for the specified
/// stack ID.
/// </returns>
/// <exception cref="ArgumentException">
/// If no location stack <paramref name="stackName"/> exists except if
/// the default stack is requested.
/// </exception>
internal PathInfoStack LocationStack(string stackName)
{
if (string.IsNullOrEmpty(stackName))
{
stackName = _defaultStackName;
}
Stack<PathInfo> locationStack = null;
if (!_workingLocationStack.TryGetValue(stackName, out locationStack))
{
// If the request was for the default stack, but it doesn't
// yet exist, create a dummy stack and return it.
if (string.Equals(
stackName,
startingDefaultStackName,
StringComparison.OrdinalIgnoreCase))
{
locationStack = new Stack<PathInfo>();
}
else
{
throw PSTraceSource.NewArgumentException(nameof(stackName));
}
}
PathInfoStack result = new PathInfoStack(stackName, locationStack);
return result;
}
/// <summary>
/// Sets the default stack ID to the specified stack ID.
/// </summary>
/// <param name="stackName">
/// The stack ID to be used as the default.
/// </param>
/// <returns>
/// The PathInfoStack for the new default stack or null if the
/// stack does not exist yet.
/// </returns>
/// <exception cref="ItemNotFoundException">
/// If <paramref name="stackName"/> does not exist as a location stack.
/// </exception>
internal PathInfoStack SetDefaultLocationStack(string stackName)
{
if (string.IsNullOrEmpty(stackName))
{
stackName = startingDefaultStackName;
}
if (!_workingLocationStack.ContainsKey(stackName))
{
if (string.Equals(stackName, startingDefaultStackName, StringComparison.OrdinalIgnoreCase))
{
// Since the "default" stack must always exist, create it here
return new PathInfoStack(startingDefaultStackName, new Stack<PathInfo>());
}
ItemNotFoundException itemNotFound =
new ItemNotFoundException(
stackName,
"StackNotFound",
SessionStateStrings.PathNotFound);
throw itemNotFound;
}
_defaultStackName = stackName;
Stack<PathInfo> locationStack = _workingLocationStack[_defaultStackName];
if (locationStack != null)
{
return new PathInfoStack(_defaultStackName, locationStack);
}
return null;
}
#endregion push-Pop current working directory
}
/// <summary>
/// Event argument for the LocationChangedAction containing
/// information about the old location we were in and the new
/// location we changed to.
/// </summary>
public class LocationChangedEventArgs : EventArgs
{
/// <summary>
/// Initializes a new instance of the LocationChangedEventArgs class.
/// </summary>
/// <param name="sessionState">
/// The public session state instance associated with this runspace.
/// </param>
/// <param name="oldPath">
/// The path we changed locations from.
/// </param>
/// <param name="newPath">
/// The path we change locations to.
/// </param>
internal LocationChangedEventArgs(SessionState sessionState, PathInfo oldPath, PathInfo newPath)
{
SessionState = sessionState;
OldPath = oldPath;
NewPath = newPath;
}
/// <summary>
/// Gets the path we changed location from.
/// </summary>
public PathInfo OldPath { get; internal set; }
/// <summary>
/// Gets the path we changed location to.
/// </summary>
public PathInfo NewPath { get; internal set; }
/// <summary>
/// Gets the session state instance for the current runspace.
/// </summary>
public SessionState SessionState { get; internal set; }
}
}
| |
/*
* Qa full api
*
* No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: all
*
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
namespace HostMe.Sdk.Model
{
/// <summary>
/// TableUserInfo
/// </summary>
[DataContract]
public partial class TableUserInfo : IEquatable<TableUserInfo>, IValidatableObject
{
/// <summary>
/// Initializes a new instance of the <see cref="TableUserInfo" /> class.
/// </summary>
/// <param name="Id">Id.</param>
/// <param name="GroupSize">GroupSize.</param>
/// <param name="TableNumber">TableNumber.</param>
/// <param name="CustomerName">CustomerName.</param>
/// <param name="Email">Email.</param>
/// <param name="Phone">Phone.</param>
/// <param name="SpecialRequests">SpecialRequests.</param>
/// <param name="Areas">Areas.</param>
/// <param name="ReservationTime">ReservationTime.</param>
/// <param name="HighChair">HighChair.</param>
/// <param name="Stroller">Stroller.</param>
/// <param name="Booth">Booth.</param>
/// <param name="HighTop">HighTop.</param>
/// <param name="Table">Table.</param>
/// <param name="Party">Party.</param>
/// <param name="PartyTypes">PartyTypes.</param>
/// <param name="CustomerProfile">CustomerProfile.</param>
/// <param name="EstimatedTurnOverTime">EstimatedTurnOverTime.</param>
public TableUserInfo(string Id = null, int? GroupSize = null, string TableNumber = null, string CustomerName = null, string Email = null, string Phone = null, string SpecialRequests = null, string Areas = null, DateTimeOffset? ReservationTime = null, bool? HighChair = null, bool? Stroller = null, bool? Booth = null, bool? HighTop = null, bool? Table = null, bool? Party = null, List<string> PartyTypes = null, ProfileData CustomerProfile = null, double? EstimatedTurnOverTime = null)
{
this.Id = Id;
this.GroupSize = GroupSize;
this.TableNumber = TableNumber;
this.CustomerName = CustomerName;
this.Email = Email;
this.Phone = Phone;
this.SpecialRequests = SpecialRequests;
this.Areas = Areas;
this.ReservationTime = ReservationTime;
this.HighChair = HighChair;
this.Stroller = Stroller;
this.Booth = Booth;
this.HighTop = HighTop;
this.Table = Table;
this.Party = Party;
this.PartyTypes = PartyTypes;
this.CustomerProfile = CustomerProfile;
this.EstimatedTurnOverTime = EstimatedTurnOverTime;
}
/// <summary>
/// Gets or Sets Id
/// </summary>
[DataMember(Name="id", EmitDefaultValue=true)]
public string Id { get; set; }
/// <summary>
/// Gets or Sets GroupSize
/// </summary>
[DataMember(Name="groupSize", EmitDefaultValue=true)]
public int? GroupSize { get; set; }
/// <summary>
/// Gets or Sets TableNumber
/// </summary>
[DataMember(Name="tableNumber", EmitDefaultValue=true)]
public string TableNumber { get; set; }
/// <summary>
/// Gets or Sets CustomerName
/// </summary>
[DataMember(Name="customerName", EmitDefaultValue=true)]
public string CustomerName { get; set; }
/// <summary>
/// Gets or Sets Email
/// </summary>
[DataMember(Name="email", EmitDefaultValue=true)]
public string Email { get; set; }
/// <summary>
/// Gets or Sets Phone
/// </summary>
[DataMember(Name="phone", EmitDefaultValue=true)]
public string Phone { get; set; }
/// <summary>
/// Gets or Sets SpecialRequests
/// </summary>
[DataMember(Name="specialRequests", EmitDefaultValue=true)]
public string SpecialRequests { get; set; }
/// <summary>
/// Gets or Sets Areas
/// </summary>
[DataMember(Name="areas", EmitDefaultValue=true)]
public string Areas { get; set; }
/// <summary>
/// Gets or Sets ReservationTime
/// </summary>
[DataMember(Name="reservationTime", EmitDefaultValue=true)]
public DateTimeOffset? ReservationTime { get; set; }
/// <summary>
/// Gets or Sets HighChair
/// </summary>
[DataMember(Name="highChair", EmitDefaultValue=true)]
public bool? HighChair { get; set; }
/// <summary>
/// Gets or Sets Stroller
/// </summary>
[DataMember(Name="stroller", EmitDefaultValue=true)]
public bool? Stroller { get; set; }
/// <summary>
/// Gets or Sets Booth
/// </summary>
[DataMember(Name="booth", EmitDefaultValue=true)]
public bool? Booth { get; set; }
/// <summary>
/// Gets or Sets HighTop
/// </summary>
[DataMember(Name="highTop", EmitDefaultValue=true)]
public bool? HighTop { get; set; }
/// <summary>
/// Gets or Sets Table
/// </summary>
[DataMember(Name="table", EmitDefaultValue=true)]
public bool? Table { get; set; }
/// <summary>
/// Gets or Sets Party
/// </summary>
[DataMember(Name="party", EmitDefaultValue=true)]
public bool? Party { get; set; }
/// <summary>
/// Gets or Sets PartyTypes
/// </summary>
[DataMember(Name="partyTypes", EmitDefaultValue=true)]
public List<string> PartyTypes { get; set; }
/// <summary>
/// Gets or Sets CustomerProfile
/// </summary>
[DataMember(Name="customerProfile", EmitDefaultValue=true)]
public ProfileData CustomerProfile { get; set; }
/// <summary>
/// Gets or Sets EstimatedTurnOverTime
/// </summary>
[DataMember(Name="estimatedTurnOverTime", EmitDefaultValue=true)]
public double? EstimatedTurnOverTime { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class TableUserInfo {\n");
sb.Append(" Id: ").Append(Id).Append("\n");
sb.Append(" GroupSize: ").Append(GroupSize).Append("\n");
sb.Append(" TableNumber: ").Append(TableNumber).Append("\n");
sb.Append(" CustomerName: ").Append(CustomerName).Append("\n");
sb.Append(" Email: ").Append(Email).Append("\n");
sb.Append(" Phone: ").Append(Phone).Append("\n");
sb.Append(" SpecialRequests: ").Append(SpecialRequests).Append("\n");
sb.Append(" Areas: ").Append(Areas).Append("\n");
sb.Append(" ReservationTime: ").Append(ReservationTime).Append("\n");
sb.Append(" HighChair: ").Append(HighChair).Append("\n");
sb.Append(" Stroller: ").Append(Stroller).Append("\n");
sb.Append(" Booth: ").Append(Booth).Append("\n");
sb.Append(" HighTop: ").Append(HighTop).Append("\n");
sb.Append(" Table: ").Append(Table).Append("\n");
sb.Append(" Party: ").Append(Party).Append("\n");
sb.Append(" PartyTypes: ").Append(PartyTypes).Append("\n");
sb.Append(" CustomerProfile: ").Append(CustomerProfile).Append("\n");
sb.Append(" EstimatedTurnOverTime: ").Append(EstimatedTurnOverTime).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as TableUserInfo);
}
/// <summary>
/// Returns true if TableUserInfo instances are equal
/// </summary>
/// <param name="other">Instance of TableUserInfo to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(TableUserInfo other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.Id == other.Id ||
this.Id != null &&
this.Id.Equals(other.Id)
) &&
(
this.GroupSize == other.GroupSize ||
this.GroupSize != null &&
this.GroupSize.Equals(other.GroupSize)
) &&
(
this.TableNumber == other.TableNumber ||
this.TableNumber != null &&
this.TableNumber.Equals(other.TableNumber)
) &&
(
this.CustomerName == other.CustomerName ||
this.CustomerName != null &&
this.CustomerName.Equals(other.CustomerName)
) &&
(
this.Email == other.Email ||
this.Email != null &&
this.Email.Equals(other.Email)
) &&
(
this.Phone == other.Phone ||
this.Phone != null &&
this.Phone.Equals(other.Phone)
) &&
(
this.SpecialRequests == other.SpecialRequests ||
this.SpecialRequests != null &&
this.SpecialRequests.Equals(other.SpecialRequests)
) &&
(
this.Areas == other.Areas ||
this.Areas != null &&
this.Areas.Equals(other.Areas)
) &&
(
this.ReservationTime == other.ReservationTime ||
this.ReservationTime != null &&
this.ReservationTime.Equals(other.ReservationTime)
) &&
(
this.HighChair == other.HighChair ||
this.HighChair != null &&
this.HighChair.Equals(other.HighChair)
) &&
(
this.Stroller == other.Stroller ||
this.Stroller != null &&
this.Stroller.Equals(other.Stroller)
) &&
(
this.Booth == other.Booth ||
this.Booth != null &&
this.Booth.Equals(other.Booth)
) &&
(
this.HighTop == other.HighTop ||
this.HighTop != null &&
this.HighTop.Equals(other.HighTop)
) &&
(
this.Table == other.Table ||
this.Table != null &&
this.Table.Equals(other.Table)
) &&
(
this.Party == other.Party ||
this.Party != null &&
this.Party.Equals(other.Party)
) &&
(
this.PartyTypes == other.PartyTypes ||
this.PartyTypes != null &&
this.PartyTypes.SequenceEqual(other.PartyTypes)
) &&
(
this.CustomerProfile == other.CustomerProfile ||
this.CustomerProfile != null &&
this.CustomerProfile.Equals(other.CustomerProfile)
) &&
(
this.EstimatedTurnOverTime == other.EstimatedTurnOverTime ||
this.EstimatedTurnOverTime != null &&
this.EstimatedTurnOverTime.Equals(other.EstimatedTurnOverTime)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.Id != null)
hash = hash * 59 + this.Id.GetHashCode();
if (this.GroupSize != null)
hash = hash * 59 + this.GroupSize.GetHashCode();
if (this.TableNumber != null)
hash = hash * 59 + this.TableNumber.GetHashCode();
if (this.CustomerName != null)
hash = hash * 59 + this.CustomerName.GetHashCode();
if (this.Email != null)
hash = hash * 59 + this.Email.GetHashCode();
if (this.Phone != null)
hash = hash * 59 + this.Phone.GetHashCode();
if (this.SpecialRequests != null)
hash = hash * 59 + this.SpecialRequests.GetHashCode();
if (this.Areas != null)
hash = hash * 59 + this.Areas.GetHashCode();
if (this.ReservationTime != null)
hash = hash * 59 + this.ReservationTime.GetHashCode();
if (this.HighChair != null)
hash = hash * 59 + this.HighChair.GetHashCode();
if (this.Stroller != null)
hash = hash * 59 + this.Stroller.GetHashCode();
if (this.Booth != null)
hash = hash * 59 + this.Booth.GetHashCode();
if (this.HighTop != null)
hash = hash * 59 + this.HighTop.GetHashCode();
if (this.Table != null)
hash = hash * 59 + this.Table.GetHashCode();
if (this.Party != null)
hash = hash * 59 + this.Party.GetHashCode();
if (this.PartyTypes != null)
hash = hash * 59 + this.PartyTypes.GetHashCode();
if (this.CustomerProfile != null)
hash = hash * 59 + this.CustomerProfile.GetHashCode();
if (this.EstimatedTurnOverTime != null)
hash = hash * 59 + this.EstimatedTurnOverTime.GetHashCode();
return hash;
}
}
public IEnumerable<ValidationResult> Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Threading;
using System.Diagnostics;
using Xunit;
namespace System.Data.SqlClient.ManualTesting.Tests
{
public class RandomStressTest
{
private static readonly TimeSpan TimeLimitDefault = new TimeSpan(0, 0, 10);
private const int ThreadCountDefault = 4;
private const int IterationsPerTableDefault = 50;
private const int MaxColumns = 5000;
private const int MaxRows = 100;
private const int MaxTotal = MaxColumns * 10;
private string[] _connectionStrings;
private string _operationCanceledErrorMessage;
private string _severeErrorMessage;
private SqlRandomTypeInfoCollection _katmaiTypes;
private ManualResetEvent _endEvent;
private int _runningThreads;
private long _totalValues;
private long _totalTables;
private long _totalIterations;
private long _totalTicks;
private RandomizerPool _randPool;
[ConditionalFact(typeof(DataTestUtility),nameof(DataTestUtility.AreConnStringsSetup))]
public void TestMain()
{
_operationCanceledErrorMessage = SystemDataResourceManager.Instance.SQL_OperationCancelled;
_severeErrorMessage = SystemDataResourceManager.Instance.SQL_SevereError;
// pure random
_randPool = new RandomizerPool();
SqlConnectionStringBuilder regularConnectionString = new SqlConnectionStringBuilder(DataTestUtility.TcpConnStr);
regularConnectionString.MultipleActiveResultSets = false;
List<string> connStrings = new List<string>();
connStrings.Add(regularConnectionString.ToString());
connStrings.Add(regularConnectionString.ToString());
regularConnectionString.MultipleActiveResultSets = true;
connStrings.Add(regularConnectionString.ToString());
_connectionStrings = connStrings.ToArray();
_katmaiTypes = SqlRandomTypeInfoCollection.CreateSql2008Collection();
_endEvent = new ManualResetEvent(false);
if (_randPool.ReproMode)
{
_runningThreads = 1;
TestThread();
}
else
{
for (int tcount = 0; tcount < ThreadCountDefault; tcount++)
{
Thread t = new Thread(TestThread);
t.Start();
}
}
}
private void NextConnection(ref SqlConnection con, Randomizer rand)
{
if (con != null)
{
con.Close();
}
string connString = _connectionStrings[rand.Next(_connectionStrings.Length)];
con = new SqlConnection(connString);
con.Open();
}
private void TestThread()
{
try
{
using (var rootScope = _randPool.RootScope<SqlRandomizer>())
{
Stopwatch watch = new Stopwatch();
SqlConnection con = null;
try
{
NextConnection(ref con, rootScope.Current);
if (_randPool.ReproMode)
{
using (var testScope = rootScope.NewScope<SqlRandomizer>())
{
// run only once if repro file is provided
RunTest(con, testScope, _katmaiTypes, watch);
}
}
else
{
while (watch.Elapsed < TimeLimitDefault)
{
using (var testScope = rootScope.NewScope<SqlRandomizer>())
{
RunTest(con, testScope, _katmaiTypes, watch);
}
if (rootScope.Current.Next(100) == 0)
{
// replace the connection
NextConnection(ref con, rootScope.Current);
}
}
}
}
finally
{
if (con != null)
{
con.Close();
}
}
}
}
catch (Exception e)
{
Console.WriteLine(e);
}
finally
{
if (Interlocked.Decrement(ref _runningThreads) == 0)
_endEvent.Set();
}
}
private void RunTest(SqlConnection con, RandomizerPool.Scope<SqlRandomizer> testScope, SqlRandomTypeInfoCollection types, Stopwatch watch)
{
Exception pendingException = null;
string tempTableName = null;
try
{
// select number of columns to use and null bitmap to test
int columnsCount, rowsCount;
testScope.Current.NextTableDimentions(MaxRows, MaxColumns, MaxTotal, out rowsCount, out columnsCount);
SqlRandomTable table = SqlRandomTable.Create(testScope.Current, types, columnsCount, rowsCount, createPrimaryKeyColumn: true);
long total = (long)rowsCount * columnsCount;
Interlocked.Add(ref _totalValues, total);
Interlocked.Increment(ref _totalTables);
tempTableName = SqlRandomizer.GenerateUniqueTempTableNameForSqlServer();
table.GenerateTableOnServer(con, tempTableName);
long prevTicks = watch.ElapsedTicks;
watch.Start();
if (_randPool.ReproMode)
{
// perform one iteration only
using (var iterationScope = testScope.NewScope<SqlRandomizer>())
{
RunTestIteration(con, iterationScope.Current, table, tempTableName);
Interlocked.Increment(ref _totalIterations);
}
}
else
{
// continue with normal loop
for (int i = 0; i < IterationsPerTableDefault && watch.Elapsed < TimeLimitDefault; i++)
{
using (var iterationScope = testScope.NewScope<SqlRandomizer>())
{
RunTestIteration(con, iterationScope.Current, table, tempTableName);
Interlocked.Increment(ref _totalIterations);
}
}
}
watch.Stop();
Interlocked.Add(ref _totalTicks, watch.ElapsedTicks - prevTicks);
}
catch (Exception e)
{
pendingException = e;
throw;
}
finally
{
// keep the temp table for troubleshooting if debugger is attached
// the thread is going down anyway and connection will be closed
if (pendingException == null && tempTableName != null)
{
// destroy the temp table to free resources on the server
SqlCommand cmd = con.CreateCommand();
cmd.CommandType = CommandType.Text;
cmd.CommandText = "DROP TABLE " + tempTableName;
try
{
cmd.ExecuteNonQuery();
}
catch
{
}
}
}
}
private void RunTestIteration(SqlConnection con, SqlRandomizer rand, SqlRandomTable table, string tableName)
{
// random list of columns
int columnCount = table.Columns.Count;
int[] columnIndices = rand.NextIndices(columnCount);
int selectedCount = rand.NextIntInclusive(1, maxValueInclusive: columnCount);
StringBuilder selectBuilder = new StringBuilder();
table.GenerateSelectFromTableTSql(tableName, selectBuilder, columnIndices, 0, selectedCount);
SqlCommand cmd = con.CreateCommand();
cmd.CommandType = CommandType.Text;
cmd.CommandText = selectBuilder.ToString();
bool cancel = rand.Next(100) == 0; // in 1% of the cases, call Cancel
if (cancel)
{
int cancelAfterMilliseconds = rand.Next(5);
int cancelAfterSpinCount = rand.Next(1000);
ThreadPool.QueueUserWorkItem((object state) =>
{
for (int i = 0; cancel && i < cancelAfterMilliseconds; i++)
{
Thread.Sleep(1);
}
if (cancel && cancelAfterSpinCount > 0)
{
SpinWait.SpinUntil(() => false, new TimeSpan(cancelAfterSpinCount));
}
if (cancel)
{
cmd.Cancel();
}
});
}
int readerRand = rand.NextIntInclusive(0, maxValueInclusive: 256);
CommandBehavior readerBehavior = CommandBehavior.Default;
if (readerRand % 10 == 0)
readerBehavior = CommandBehavior.SequentialAccess;
try
{
using (SqlDataReader reader = cmd.ExecuteReader(readerBehavior))
{
int row = 0;
while (reader.Read())
{
int rowRand = rand.NextIntInclusive();
if (rowRand % 1000 == 0)
{
// abandon this reader
break;
}
else if (rowRand % 25 == 0)
{
// skip the row
row++;
continue;
}
IList<object> expectedRow = table[row];
for (int c = 0; c < reader.FieldCount; c++)
{
if (rand.NextIntInclusive(0, maxValueInclusive: 10) == 0)
{
// skip the column
continue;
}
int expectedTableColumn = columnIndices[c];
object expectedValue = expectedRow[expectedTableColumn];
if (table.Columns[expectedTableColumn].CanCompareValues)
{
Assert.True(expectedValue != null, "FAILED: Null is expected with CanCompareValues");
// read the value same way it was written
object actualValue = table.Columns[expectedTableColumn].Read(reader, c, expectedValue.GetType());
Assert.True(table.Columns[expectedTableColumn].CompareValues(expectedValue, actualValue),
string.Format("FAILED: Data Comparison Failure:\n{0}", table.Columns[expectedTableColumn].BuildErrorMessage(expectedValue, actualValue)));
}
}
row++;
}
}
// keep last - this will stop the cancel task, if it is still active
cancel = false;
}
catch (SqlException e)
{
if (!cancel)
throw;
bool expected = false;
foreach (SqlError error in e.Errors)
{
if (error.Message == _operationCanceledErrorMessage)
{
// ignore this one - expected if canceled
expected = true;
break;
}
else if (error.Message == _severeErrorMessage)
{
// A severe error occurred on the current command. The results, if any, should be discarded.
expected = true;
break;
}
}
if (!expected)
{
// rethrow to the user
foreach (SqlError error in e.Errors)
{
Console.WriteLine("{0} {1}", error.Number, error.Message);
}
throw;
}
}
catch (InvalidOperationException e)
{
bool expected = false;
if (e.Message == _operationCanceledErrorMessage)
{
// "Operation canceled" exception is raised as a SqlException (as one of SqlError objects) and as InvalidOperationException
expected = true;
}
if (!expected)
{
throw;
}
}
}
}
}
| |
// ****************************************************************
// Copyright 2007, Charlie Poole
// This is free software licensed under the NUnit license. You may
// obtain a copy of the license at http://nunit.org/?p=license&r=2.4
// ****************************************************************
using System;
namespace NUnit.Core
{
/// <summary>
/// TestName encapsulates all info needed to identify and
/// locate a test that has been loaded by a runner. It consists
/// of a three components: the simple name of the test, an int
/// id that is unique to a given tree of tests and an int
/// runner id that identifies the particular runner that
/// holds the test instance.
/// </summary>
[Serializable]
public class TestName : ICloneable
{
#region Fields
/// <summary>
/// ID that uniquely identifies the test
/// </summary>
private TestID testID;
private int runnerID;
/// <summary>
/// The simple name of the test, without qualification
/// </summary>
private string name;
/// <summary>
/// The fully qualified name of the test
/// </summary>
private string fullName;
#endregion
#region Properties
/// <summary>
/// Gets or sets the TestID that uniquely identifies this test
/// </summary>
public TestID TestID
{
get { return testID; }
set { testID = value; }
}
/// <summary>
/// Gets the ID for the runner that created the test from
/// the TestID, or returns -1 if the TestID is null.
/// </summary>
public int RunnerID
{
get { return runnerID; }
set { runnerID = value; }
}
/// <summary>
/// Gets or sets the simple name of the test
/// </summary>
public string Name
{
get { return name; }
set { name = value; }
}
/// <summary>
/// Gets or sets the full (qualified) name of the test
/// </summary>
public string FullName
{
get { return fullName; }
set { fullName = value; }
}
/// <summary>
/// Get the string representation of this test name, incorporating all
/// the components of the name.
/// </summary>
public string UniqueName
{
get
{
if ( this.testID == null )
return string.Format( "[{0}]{1}", this.runnerID, this.fullName );
else
return string.Format( "[{0}-{1}]{2}", this.RunnerID, this.testID, this.fullName );
}
}
#endregion
#region Static Methods
/// <summary>
/// Parse a string representation of a TestName,
/// returning a TestName.
/// </summary>
/// <param name="s">The string to parse</param>
/// <returns>A TestName</returns>
public static TestName Parse( string s )
{
if ( s == null ) throw new ArgumentNullException( "s", "Cannot parse a null string" );
TestName testName = new TestName();
testName.FullName = testName.Name = s;
if ( s.StartsWith( "[" ) )
{
int rbrack = s.IndexOf( "]" );
if ( rbrack < 0 || rbrack == s.Length - 1 )
throw new FormatException( "Invalid TestName format: " + s );
testName.FullName = testName.Name = s.Substring( rbrack + 1 );
int dash = s.IndexOf( "-" );
if ( dash < 0 || dash > rbrack )
testName.RunnerID = Int32.Parse( s.Substring( 1, rbrack - 1 ) );
else
{
testName.RunnerID = Int32.Parse( s.Substring( 1, dash - 1 ) );
testName.TestID = TestID.Parse( s.Substring( dash + 1, rbrack - dash - 1 ) );
}
}
return testName;
}
#endregion
#region Object Overrides
/// <summary>
/// Compares two TestNames for equality
/// </summary>
/// <param name="obj">the other TestID</param>
/// <returns>True if the two TestIDs are equal</returns>
public override bool Equals(object obj)
{
TestName other = obj as TestName;
if ( other == null )
return base.Equals (obj);
return this.TestID == other.testID
&& this.runnerID == other.runnerID
&& this.fullName == other.fullName;
}
/// <summary>
/// Calculates a hashcode for this TestID
/// </summary>
/// <returns>The hash code.</returns>
public override int GetHashCode()
{
return unchecked( this.testID.GetHashCode() + this.fullName.GetHashCode() );
}
/// <summary>
/// Override ToString() to display the UniqueName
/// </summary>
/// <returns></returns>
public override string ToString()
{
return this.UniqueName;
}
#endregion
#region Operator Overrides
/// <summary>
/// Override the == operator
/// </summary>
/// <param name="name1"></param>
/// <param name="name2"></param>
/// <returns></returns>
public static bool operator ==( TestName name1, TestName name2 )
{
if ( Object.Equals( name1, null ) )
return Object.Equals( name2, null );
return name1.Equals( name2 );
}
/// <summary>
/// Override the != operator
/// </summary>
/// <param name="name1"></param>
/// <param name="name2"></param>
/// <returns></returns>
public static bool operator !=( TestName name1, TestName name2 )
{
return name1 == name2 ? false : true;
}
#endregion
#region ICloneable Implementation
/// <summary>
/// Returns a duplicate of this TestName
/// </summary>
/// <returns></returns>
public object Clone()
{
return this.MemberwiseClone();
}
#endregion
}
}
| |
// ===========================================================
// Copyright (C) 2014-2015 Kendar.org
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
// modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
// is furnished to do so, subject to the following conditions:
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
// BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
// OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ===========================================================
using System.Linq;
using System.Web;
using System;
using System.Web.WebSockets;
using System.Threading.Tasks;
using System.Globalization;
using System.Web.SessionState;
using System.Web.Configuration;
using System.Web.Caching;
using System.Collections;
using System.Web.Instrumentation;
using System.Web.Profile;
using System.Security.Principal;
using System.Collections.Generic;
using Http.Shared.Contexts;
namespace Http.Contexts
{
public class SimpleHttpContext : HttpContextBase, IHttpContext
{
public SimpleHttpContext()
{
RouteParams = new Dictionary<string, object>(StringComparer.OrdinalIgnoreCase);
}
public IHttpContext RootContext
{
get
{
if (Parent == null) return this;
return Parent.RootContext;
}
}
public IHttpContext Parent
{
get { return null; }
}
public void ForceRootDir(string rootDir)
{
RootDir = rootDir;
}
public string RootDir { get; private set; }
public void ForceHeader(string key, string value)
{
var nameValueCollection = _request.Headers;
if (!nameValueCollection.AllKeys.ToArray().Contains(key))
{
nameValueCollection.Add(key, value);
}
else
{
nameValueCollection.Set(key, value);
}
}
public Dictionary<string, object> RouteParams { get; set; }
public override ISubscriptionToken AddOnRequestCompleted(Action<HttpContextBase> callback)
{
return null;
}
public override void AcceptWebSocketRequest(Func<AspNetWebSocketContext, Task> userFunc)
{
}
public override void AcceptWebSocketRequest(Func<AspNetWebSocketContext, Task> userFunc, AspNetWebSocketOptions options)
{
}
public override void AddError(Exception errorInfo)
{
}
public override void ClearError()
{
}
public override ISubscriptionToken DisposeOnPipelineCompleted(IDisposable target)
{
return null;
}
public override Object GetGlobalResourceObject(String classKey, String resourceKey)
{
return null;
}
public override Object GetGlobalResourceObject(String classKey, String resourceKey, CultureInfo culture)
{
return null;
}
public override Object GetLocalResourceObject(String virtualPath, String resourceKey)
{
return null;
}
public override Object GetLocalResourceObject(String virtualPath, String resourceKey, CultureInfo culture)
{
return null;
}
public override Object GetSection(String sectionName)
{
return null;
}
public override void RemapHandler(IHttpHandler handler)
{
}
public override void RewritePath(String path)
{
}
public override void RewritePath(String path, Boolean rebaseClientPath)
{
}
public override void RewritePath(String filePath, String pathInfo, String queryString)
{
}
public override void RewritePath(String filePath, String pathInfo, String queryString, Boolean setClientFilePath)
{
}
public override void SetSessionStateBehavior(SessionStateBehavior sessionStateBehavior)
{
}
public override Object GetService(Type serviceType)
{
return null;
}
private Exception[] _allErrors = { };
public override Exception[] AllErrors { get { return _allErrors; } }
public void SetAllErrors(Exception[] val)
{
_allErrors = val;
}
public override Boolean AllowAsyncDuringSyncStages { get; set; }
private HttpApplicationStateBase _application;
public override HttpApplicationStateBase Application { get { return _application; } }
public void SetApplication(HttpApplicationStateBase val)
{
_application = val;
}
public override HttpApplication ApplicationInstance { get; set; }
public override AsyncPreloadModeFlags AsyncPreloadMode { get; set; }
private Cache _cache = new Cache();
public override Cache Cache { get { return _cache; } }
public void SetCache(Cache val)
{
_cache = val;
}
private IHttpHandler _currentHandler;
public override IHttpHandler CurrentHandler { get { return _currentHandler; } }
public void SetCurrentHandler(IHttpHandler val)
{
_currentHandler = val;
}
private RequestNotification _currentNotification;
public override RequestNotification CurrentNotification { get { return _currentNotification; } }
public void SetCurrentNotification(RequestNotification val)
{
_currentNotification = val;
}
private Exception _error = new Exception();
public override Exception Error { get { return _error; } }
public void SetError(Exception val)
{
_error = val;
}
public override IHttpHandler Handler { get; set; }
private Boolean _isCustomErrorEnabled;
public override Boolean IsCustomErrorEnabled { get { return _isCustomErrorEnabled; } }
public void SetIsCustomErrorEnabled(Boolean val)
{
_isCustomErrorEnabled = val;
}
private Boolean _isDebuggingEnabled;
public override Boolean IsDebuggingEnabled { get { return _isDebuggingEnabled; } }
public void SetIsDebuggingEnabled(Boolean val)
{
_isDebuggingEnabled = val;
}
private Boolean _isPostNotification;
public override Boolean IsPostNotification { get { return _isPostNotification; } }
public void SetIsPostNotification(Boolean val)
{
_isPostNotification = val;
}
private Boolean _isWebSocketRequest;
public override Boolean IsWebSocketRequest { get { return _isWebSocketRequest; } }
public void SetIsWebSocketRequest(Boolean val)
{
_isWebSocketRequest = val;
}
private Boolean _isWebSocketRequestUpgrading;
public override Boolean IsWebSocketRequestUpgrading { get { return _isWebSocketRequestUpgrading; } }
public void SetIsWebSocketRequestUpgrading(Boolean val)
{
_isWebSocketRequestUpgrading = val;
}
private IDictionary _items = new Dictionary<string, object>();
public override IDictionary Items { get { return _items; } }
public void SetItems(IDictionary val)
{
_items = val;
}
private PageInstrumentationService _pageInstrumentation = new PageInstrumentationService();
public override PageInstrumentationService PageInstrumentation { get { return _pageInstrumentation; } }
public void SetPageInstrumentation(PageInstrumentationService val)
{
_pageInstrumentation = val;
}
private IHttpHandler _previousHandler;
public override IHttpHandler PreviousHandler { get { return _previousHandler; } }
public void SetPreviousHandler(IHttpHandler val)
{
_previousHandler = val;
}
private ProfileBase _profile = new ProfileBase();
public override ProfileBase Profile { get { return _profile; } }
public void SetProfile(ProfileBase val)
{
_profile = val;
}
private HttpRequestBase _request;
public override HttpRequestBase Request { get { return _request; } }
public void SetRequest(HttpRequestBase val)
{
_request = val;
if (_response != null)
{
_response.ContentEncoding = _request.ContentEncoding;
}
}
private HttpResponseBase _response;
public override HttpResponseBase Response { get { return _response; } }
public void SetResponse(HttpResponseBase val)
{
_response = val;
if (_request != null)
{
_response.ContentEncoding = _request.ContentEncoding;
}
}
private HttpServerUtilityBase _server;
public override HttpServerUtilityBase Server { get { return _server; } }
public void SetServer(HttpServerUtilityBase val)
{
_server = val;
}
private HttpSessionStateBase _session = new SimpleHttpSessionState();
public override HttpSessionStateBase Session { get { return _session; } }
public void SetSession(HttpSessionStateBase val)
{
_session = val;
}
public override Boolean SkipAuthorization { get; set; }
private DateTime _timestamp;
public override DateTime Timestamp { get { return _timestamp; } }
public void SetTimestamp(DateTime val)
{
_timestamp = val;
}
public override Boolean ThreadAbortOnTimeout { get; set; }
private TraceContext _trace;
public override TraceContext Trace { get { return _trace; } }
public void SetTrace(TraceContext val)
{
_trace = val;
}
public override IPrincipal User { get; set; }
private String _webSocketNegotiatedProtocol = "";
public override String WebSocketNegotiatedProtocol { get { return _webSocketNegotiatedProtocol; } }
public void SetWebSocketNegotiatedProtocol(String val)
{
_webSocketNegotiatedProtocol = val;
}
private IList<String> _webSocketRequestedProtocols = new List<string>();
public override IList<String> WebSocketRequestedProtocols { get { return _webSocketRequestedProtocols; } }
public void SetWebSocketRequestedProtocols(IList<String> val)
{
_webSocketRequestedProtocols = val;
}
public Task InitializeWebSocket()
{
throw new NotImplementedException();
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/protobuf/map_unittest_proto3.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace Google.Protobuf.TestProtos {
/// <summary>Holder for reflection information generated from google/protobuf/map_unittest_proto3.proto</summary>
public static partial class MapUnittestProto3Reflection {
#region Descriptor
/// <summary>File descriptor for google/protobuf/map_unittest_proto3.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static MapUnittestProto3Reflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"Cilnb29nbGUvcHJvdG9idWYvbWFwX3VuaXR0ZXN0X3Byb3RvMy5wcm90bxIR",
"cHJvdG9idWZfdW5pdHRlc3QaJWdvb2dsZS9wcm90b2J1Zi91bml0dGVzdF9w",
"cm90bzMucHJvdG8ilhIKB1Rlc3RNYXASRgoPbWFwX2ludDMyX2ludDMyGAEg",
"AygLMi0ucHJvdG9idWZfdW5pdHRlc3QuVGVzdE1hcC5NYXBJbnQzMkludDMy",
"RW50cnkSRgoPbWFwX2ludDY0X2ludDY0GAIgAygLMi0ucHJvdG9idWZfdW5p",
"dHRlc3QuVGVzdE1hcC5NYXBJbnQ2NEludDY0RW50cnkSSgoRbWFwX3VpbnQz",
"Ml91aW50MzIYAyADKAsyLy5wcm90b2J1Zl91bml0dGVzdC5UZXN0TWFwLk1h",
"cFVpbnQzMlVpbnQzMkVudHJ5EkoKEW1hcF91aW50NjRfdWludDY0GAQgAygL",
"Mi8ucHJvdG9idWZfdW5pdHRlc3QuVGVzdE1hcC5NYXBVaW50NjRVaW50NjRF",
"bnRyeRJKChFtYXBfc2ludDMyX3NpbnQzMhgFIAMoCzIvLnByb3RvYnVmX3Vu",
"aXR0ZXN0LlRlc3RNYXAuTWFwU2ludDMyU2ludDMyRW50cnkSSgoRbWFwX3Np",
"bnQ2NF9zaW50NjQYBiADKAsyLy5wcm90b2J1Zl91bml0dGVzdC5UZXN0TWFw",
"Lk1hcFNpbnQ2NFNpbnQ2NEVudHJ5Ek4KE21hcF9maXhlZDMyX2ZpeGVkMzIY",
"ByADKAsyMS5wcm90b2J1Zl91bml0dGVzdC5UZXN0TWFwLk1hcEZpeGVkMzJG",
"aXhlZDMyRW50cnkSTgoTbWFwX2ZpeGVkNjRfZml4ZWQ2NBgIIAMoCzIxLnBy",
"b3RvYnVmX3VuaXR0ZXN0LlRlc3RNYXAuTWFwRml4ZWQ2NEZpeGVkNjRFbnRy",
"eRJSChVtYXBfc2ZpeGVkMzJfc2ZpeGVkMzIYCSADKAsyMy5wcm90b2J1Zl91",
"bml0dGVzdC5UZXN0TWFwLk1hcFNmaXhlZDMyU2ZpeGVkMzJFbnRyeRJSChVt",
"YXBfc2ZpeGVkNjRfc2ZpeGVkNjQYCiADKAsyMy5wcm90b2J1Zl91bml0dGVz",
"dC5UZXN0TWFwLk1hcFNmaXhlZDY0U2ZpeGVkNjRFbnRyeRJGCg9tYXBfaW50",
"MzJfZmxvYXQYCyADKAsyLS5wcm90b2J1Zl91bml0dGVzdC5UZXN0TWFwLk1h",
"cEludDMyRmxvYXRFbnRyeRJIChBtYXBfaW50MzJfZG91YmxlGAwgAygLMi4u",
"cHJvdG9idWZfdW5pdHRlc3QuVGVzdE1hcC5NYXBJbnQzMkRvdWJsZUVudHJ5",
"EkIKDW1hcF9ib29sX2Jvb2wYDSADKAsyKy5wcm90b2J1Zl91bml0dGVzdC5U",
"ZXN0TWFwLk1hcEJvb2xCb29sRW50cnkSSgoRbWFwX3N0cmluZ19zdHJpbmcY",
"DiADKAsyLy5wcm90b2J1Zl91bml0dGVzdC5UZXN0TWFwLk1hcFN0cmluZ1N0",
"cmluZ0VudHJ5EkYKD21hcF9pbnQzMl9ieXRlcxgPIAMoCzItLnByb3RvYnVm",
"X3VuaXR0ZXN0LlRlc3RNYXAuTWFwSW50MzJCeXRlc0VudHJ5EkQKDm1hcF9p",
"bnQzMl9lbnVtGBAgAygLMiwucHJvdG9idWZfdW5pdHRlc3QuVGVzdE1hcC5N",
"YXBJbnQzMkVudW1FbnRyeRJZChltYXBfaW50MzJfZm9yZWlnbl9tZXNzYWdl",
"GBEgAygLMjYucHJvdG9idWZfdW5pdHRlc3QuVGVzdE1hcC5NYXBJbnQzMkZv",
"cmVpZ25NZXNzYWdlRW50cnkaNAoSTWFwSW50MzJJbnQzMkVudHJ5EgsKA2tl",
"eRgBIAEoBRINCgV2YWx1ZRgCIAEoBToCOAEaNAoSTWFwSW50NjRJbnQ2NEVu",
"dHJ5EgsKA2tleRgBIAEoAxINCgV2YWx1ZRgCIAEoAzoCOAEaNgoUTWFwVWlu",
"dDMyVWludDMyRW50cnkSCwoDa2V5GAEgASgNEg0KBXZhbHVlGAIgASgNOgI4",
"ARo2ChRNYXBVaW50NjRVaW50NjRFbnRyeRILCgNrZXkYASABKAQSDQoFdmFs",
"dWUYAiABKAQ6AjgBGjYKFE1hcFNpbnQzMlNpbnQzMkVudHJ5EgsKA2tleRgB",
"IAEoERINCgV2YWx1ZRgCIAEoEToCOAEaNgoUTWFwU2ludDY0U2ludDY0RW50",
"cnkSCwoDa2V5GAEgASgSEg0KBXZhbHVlGAIgASgSOgI4ARo4ChZNYXBGaXhl",
"ZDMyRml4ZWQzMkVudHJ5EgsKA2tleRgBIAEoBxINCgV2YWx1ZRgCIAEoBzoC",
"OAEaOAoWTWFwRml4ZWQ2NEZpeGVkNjRFbnRyeRILCgNrZXkYASABKAYSDQoF",
"dmFsdWUYAiABKAY6AjgBGjoKGE1hcFNmaXhlZDMyU2ZpeGVkMzJFbnRyeRIL",
"CgNrZXkYASABKA8SDQoFdmFsdWUYAiABKA86AjgBGjoKGE1hcFNmaXhlZDY0",
"U2ZpeGVkNjRFbnRyeRILCgNrZXkYASABKBASDQoFdmFsdWUYAiABKBA6AjgB",
"GjQKEk1hcEludDMyRmxvYXRFbnRyeRILCgNrZXkYASABKAUSDQoFdmFsdWUY",
"AiABKAI6AjgBGjUKE01hcEludDMyRG91YmxlRW50cnkSCwoDa2V5GAEgASgF",
"Eg0KBXZhbHVlGAIgASgBOgI4ARoyChBNYXBCb29sQm9vbEVudHJ5EgsKA2tl",
"eRgBIAEoCBINCgV2YWx1ZRgCIAEoCDoCOAEaNgoUTWFwU3RyaW5nU3RyaW5n",
"RW50cnkSCwoDa2V5GAEgASgJEg0KBXZhbHVlGAIgASgJOgI4ARo0ChJNYXBJ",
"bnQzMkJ5dGVzRW50cnkSCwoDa2V5GAEgASgFEg0KBXZhbHVlGAIgASgMOgI4",
"ARpPChFNYXBJbnQzMkVudW1FbnRyeRILCgNrZXkYASABKAUSKQoFdmFsdWUY",
"AiABKA4yGi5wcm90b2J1Zl91bml0dGVzdC5NYXBFbnVtOgI4ARpgChtNYXBJ",
"bnQzMkZvcmVpZ25NZXNzYWdlRW50cnkSCwoDa2V5GAEgASgFEjAKBXZhbHVl",
"GAIgASgLMiEucHJvdG9idWZfdW5pdHRlc3QuRm9yZWlnbk1lc3NhZ2U6AjgB",
"IkEKEVRlc3RNYXBTdWJtZXNzYWdlEiwKCHRlc3RfbWFwGAEgASgLMhoucHJv",
"dG9idWZfdW5pdHRlc3QuVGVzdE1hcCK8AQoOVGVzdE1lc3NhZ2VNYXASUQoR",
"bWFwX2ludDMyX21lc3NhZ2UYASADKAsyNi5wcm90b2J1Zl91bml0dGVzdC5U",
"ZXN0TWVzc2FnZU1hcC5NYXBJbnQzMk1lc3NhZ2VFbnRyeRpXChRNYXBJbnQz",
"Mk1lc3NhZ2VFbnRyeRILCgNrZXkYASABKAUSLgoFdmFsdWUYAiABKAsyHy5w",
"cm90b2J1Zl91bml0dGVzdC5UZXN0QWxsVHlwZXM6AjgBIuMBCg9UZXN0U2Ft",
"ZVR5cGVNYXASOgoEbWFwMRgBIAMoCzIsLnByb3RvYnVmX3VuaXR0ZXN0LlRl",
"c3RTYW1lVHlwZU1hcC5NYXAxRW50cnkSOgoEbWFwMhgCIAMoCzIsLnByb3Rv",
"YnVmX3VuaXR0ZXN0LlRlc3RTYW1lVHlwZU1hcC5NYXAyRW50cnkaKwoJTWFw",
"MUVudHJ5EgsKA2tleRgBIAEoBRINCgV2YWx1ZRgCIAEoBToCOAEaKwoJTWFw",
"MkVudHJ5EgsKA2tleRgBIAEoBRINCgV2YWx1ZRgCIAEoBToCOAEi5BAKDFRl",
"c3RBcmVuYU1hcBJLCg9tYXBfaW50MzJfaW50MzIYASADKAsyMi5wcm90b2J1",
"Zl91bml0dGVzdC5UZXN0QXJlbmFNYXAuTWFwSW50MzJJbnQzMkVudHJ5EksK",
"D21hcF9pbnQ2NF9pbnQ2NBgCIAMoCzIyLnByb3RvYnVmX3VuaXR0ZXN0LlRl",
"c3RBcmVuYU1hcC5NYXBJbnQ2NEludDY0RW50cnkSTwoRbWFwX3VpbnQzMl91",
"aW50MzIYAyADKAsyNC5wcm90b2J1Zl91bml0dGVzdC5UZXN0QXJlbmFNYXAu",
"TWFwVWludDMyVWludDMyRW50cnkSTwoRbWFwX3VpbnQ2NF91aW50NjQYBCAD",
"KAsyNC5wcm90b2J1Zl91bml0dGVzdC5UZXN0QXJlbmFNYXAuTWFwVWludDY0",
"VWludDY0RW50cnkSTwoRbWFwX3NpbnQzMl9zaW50MzIYBSADKAsyNC5wcm90",
"b2J1Zl91bml0dGVzdC5UZXN0QXJlbmFNYXAuTWFwU2ludDMyU2ludDMyRW50",
"cnkSTwoRbWFwX3NpbnQ2NF9zaW50NjQYBiADKAsyNC5wcm90b2J1Zl91bml0",
"dGVzdC5UZXN0QXJlbmFNYXAuTWFwU2ludDY0U2ludDY0RW50cnkSUwoTbWFw",
"X2ZpeGVkMzJfZml4ZWQzMhgHIAMoCzI2LnByb3RvYnVmX3VuaXR0ZXN0LlRl",
"c3RBcmVuYU1hcC5NYXBGaXhlZDMyRml4ZWQzMkVudHJ5ElMKE21hcF9maXhl",
"ZDY0X2ZpeGVkNjQYCCADKAsyNi5wcm90b2J1Zl91bml0dGVzdC5UZXN0QXJl",
"bmFNYXAuTWFwRml4ZWQ2NEZpeGVkNjRFbnRyeRJXChVtYXBfc2ZpeGVkMzJf",
"c2ZpeGVkMzIYCSADKAsyOC5wcm90b2J1Zl91bml0dGVzdC5UZXN0QXJlbmFN",
"YXAuTWFwU2ZpeGVkMzJTZml4ZWQzMkVudHJ5ElcKFW1hcF9zZml4ZWQ2NF9z",
"Zml4ZWQ2NBgKIAMoCzI4LnByb3RvYnVmX3VuaXR0ZXN0LlRlc3RBcmVuYU1h",
"cC5NYXBTZml4ZWQ2NFNmaXhlZDY0RW50cnkSSwoPbWFwX2ludDMyX2Zsb2F0",
"GAsgAygLMjIucHJvdG9idWZfdW5pdHRlc3QuVGVzdEFyZW5hTWFwLk1hcElu",
"dDMyRmxvYXRFbnRyeRJNChBtYXBfaW50MzJfZG91YmxlGAwgAygLMjMucHJv",
"dG9idWZfdW5pdHRlc3QuVGVzdEFyZW5hTWFwLk1hcEludDMyRG91YmxlRW50",
"cnkSRwoNbWFwX2Jvb2xfYm9vbBgNIAMoCzIwLnByb3RvYnVmX3VuaXR0ZXN0",
"LlRlc3RBcmVuYU1hcC5NYXBCb29sQm9vbEVudHJ5EkkKDm1hcF9pbnQzMl9l",
"bnVtGA4gAygLMjEucHJvdG9idWZfdW5pdHRlc3QuVGVzdEFyZW5hTWFwLk1h",
"cEludDMyRW51bUVudHJ5El4KGW1hcF9pbnQzMl9mb3JlaWduX21lc3NhZ2UY",
"DyADKAsyOy5wcm90b2J1Zl91bml0dGVzdC5UZXN0QXJlbmFNYXAuTWFwSW50",
"MzJGb3JlaWduTWVzc2FnZUVudHJ5GjQKEk1hcEludDMySW50MzJFbnRyeRIL",
"CgNrZXkYASABKAUSDQoFdmFsdWUYAiABKAU6AjgBGjQKEk1hcEludDY0SW50",
"NjRFbnRyeRILCgNrZXkYASABKAMSDQoFdmFsdWUYAiABKAM6AjgBGjYKFE1h",
"cFVpbnQzMlVpbnQzMkVudHJ5EgsKA2tleRgBIAEoDRINCgV2YWx1ZRgCIAEo",
"DToCOAEaNgoUTWFwVWludDY0VWludDY0RW50cnkSCwoDa2V5GAEgASgEEg0K",
"BXZhbHVlGAIgASgEOgI4ARo2ChRNYXBTaW50MzJTaW50MzJFbnRyeRILCgNr",
"ZXkYASABKBESDQoFdmFsdWUYAiABKBE6AjgBGjYKFE1hcFNpbnQ2NFNpbnQ2",
"NEVudHJ5EgsKA2tleRgBIAEoEhINCgV2YWx1ZRgCIAEoEjoCOAEaOAoWTWFw",
"Rml4ZWQzMkZpeGVkMzJFbnRyeRILCgNrZXkYASABKAcSDQoFdmFsdWUYAiAB",
"KAc6AjgBGjgKFk1hcEZpeGVkNjRGaXhlZDY0RW50cnkSCwoDa2V5GAEgASgG",
"Eg0KBXZhbHVlGAIgASgGOgI4ARo6ChhNYXBTZml4ZWQzMlNmaXhlZDMyRW50",
"cnkSCwoDa2V5GAEgASgPEg0KBXZhbHVlGAIgASgPOgI4ARo6ChhNYXBTZml4",
"ZWQ2NFNmaXhlZDY0RW50cnkSCwoDa2V5GAEgASgQEg0KBXZhbHVlGAIgASgQ",
"OgI4ARo0ChJNYXBJbnQzMkZsb2F0RW50cnkSCwoDa2V5GAEgASgFEg0KBXZh",
"bHVlGAIgASgCOgI4ARo1ChNNYXBJbnQzMkRvdWJsZUVudHJ5EgsKA2tleRgB",
"IAEoBRINCgV2YWx1ZRgCIAEoAToCOAEaMgoQTWFwQm9vbEJvb2xFbnRyeRIL",
"CgNrZXkYASABKAgSDQoFdmFsdWUYAiABKAg6AjgBGk8KEU1hcEludDMyRW51",
"bUVudHJ5EgsKA2tleRgBIAEoBRIpCgV2YWx1ZRgCIAEoDjIaLnByb3RvYnVm",
"X3VuaXR0ZXN0Lk1hcEVudW06AjgBGmAKG01hcEludDMyRm9yZWlnbk1lc3Nh",
"Z2VFbnRyeRILCgNrZXkYASABKAUSMAoFdmFsdWUYAiABKAsyIS5wcm90b2J1",
"Zl91bml0dGVzdC5Gb3JlaWduTWVzc2FnZToCOAEi5AEKH01lc3NhZ2VDb250",
"YWluaW5nRW51bUNhbGxlZFR5cGUSSgoEdHlwZRgBIAMoCzI8LnByb3RvYnVm",
"X3VuaXR0ZXN0Lk1lc3NhZ2VDb250YWluaW5nRW51bUNhbGxlZFR5cGUuVHlw",
"ZUVudHJ5Gl8KCVR5cGVFbnRyeRILCgNrZXkYASABKAUSQQoFdmFsdWUYAiAB",
"KAsyMi5wcm90b2J1Zl91bml0dGVzdC5NZXNzYWdlQ29udGFpbmluZ0VudW1D",
"YWxsZWRUeXBlOgI4ASIUCgRUeXBlEgwKCFRZUEVfRk9PEAAinQEKH01lc3Nh",
"Z2VDb250YWluaW5nTWFwQ2FsbGVkRW50cnkSTAoFZW50cnkYASADKAsyPS5w",
"cm90b2J1Zl91bml0dGVzdC5NZXNzYWdlQ29udGFpbmluZ01hcENhbGxlZEVu",
"dHJ5LkVudHJ5RW50cnkaLAoKRW50cnlFbnRyeRILCgNrZXkYASABKAUSDQoF",
"dmFsdWUYAiABKAU6AjgBKj8KB01hcEVudW0SEAoMTUFQX0VOVU1fRk9PEAAS",
"EAoMTUFQX0VOVU1fQkFSEAESEAoMTUFQX0VOVU1fQkFaEAJCIPgBAaoCGkdv",
"b2dsZS5Qcm90b2J1Zi5UZXN0UHJvdG9zYgZwcm90bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Google.Protobuf.TestProtos.MapEnum), }, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestMap), global::Google.Protobuf.TestProtos.TestMap.Parser, new[]{ "MapInt32Int32", "MapInt64Int64", "MapUint32Uint32", "MapUint64Uint64", "MapSint32Sint32", "MapSint64Sint64", "MapFixed32Fixed32", "MapFixed64Fixed64", "MapSfixed32Sfixed32", "MapSfixed64Sfixed64", "MapInt32Float", "MapInt32Double", "MapBoolBool", "MapStringString", "MapInt32Bytes", "MapInt32Enum", "MapInt32ForeignMessage" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, }),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestMapSubmessage), global::Google.Protobuf.TestProtos.TestMapSubmessage.Parser, new[]{ "TestMap" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestMessageMap), global::Google.Protobuf.TestProtos.TestMessageMap.Parser, new[]{ "MapInt32Message" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, }),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestSameTypeMap), global::Google.Protobuf.TestProtos.TestSameTypeMap.Parser, new[]{ "Map1", "Map2" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, null, }),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestArenaMap), global::Google.Protobuf.TestProtos.TestArenaMap.Parser, new[]{ "MapInt32Int32", "MapInt64Int64", "MapUint32Uint32", "MapUint64Uint64", "MapSint32Sint32", "MapSint64Sint64", "MapFixed32Fixed32", "MapFixed64Fixed64", "MapSfixed32Sfixed32", "MapSfixed64Sfixed64", "MapInt32Float", "MapInt32Double", "MapBoolBool", "MapInt32Enum", "MapInt32ForeignMessage" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, }),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType), global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType.Parser, new[]{ "Type" }, null, new[]{ typeof(global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType.Types.Type) }, new pbr::GeneratedClrTypeInfo[] { null, }),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.MessageContainingMapCalledEntry), global::Google.Protobuf.TestProtos.MessageContainingMapCalledEntry.Parser, new[]{ "Entry" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, })
}));
}
#endregion
}
#region Enums
public enum MapEnum {
[pbr::OriginalName("MAP_ENUM_FOO")] Foo = 0,
[pbr::OriginalName("MAP_ENUM_BAR")] Bar = 1,
[pbr::OriginalName("MAP_ENUM_BAZ")] Baz = 2,
}
#endregion
#region Messages
/// <summary>
/// Tests maps.
/// </summary>
public sealed partial class TestMap : pb::IMessage<TestMap> {
private static readonly pb::MessageParser<TestMap> _parser = new pb::MessageParser<TestMap>(() => new TestMap());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<TestMap> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestMap() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestMap(TestMap other) : this() {
mapInt32Int32_ = other.mapInt32Int32_.Clone();
mapInt64Int64_ = other.mapInt64Int64_.Clone();
mapUint32Uint32_ = other.mapUint32Uint32_.Clone();
mapUint64Uint64_ = other.mapUint64Uint64_.Clone();
mapSint32Sint32_ = other.mapSint32Sint32_.Clone();
mapSint64Sint64_ = other.mapSint64Sint64_.Clone();
mapFixed32Fixed32_ = other.mapFixed32Fixed32_.Clone();
mapFixed64Fixed64_ = other.mapFixed64Fixed64_.Clone();
mapSfixed32Sfixed32_ = other.mapSfixed32Sfixed32_.Clone();
mapSfixed64Sfixed64_ = other.mapSfixed64Sfixed64_.Clone();
mapInt32Float_ = other.mapInt32Float_.Clone();
mapInt32Double_ = other.mapInt32Double_.Clone();
mapBoolBool_ = other.mapBoolBool_.Clone();
mapStringString_ = other.mapStringString_.Clone();
mapInt32Bytes_ = other.mapInt32Bytes_.Clone();
mapInt32Enum_ = other.mapInt32Enum_.Clone();
mapInt32ForeignMessage_ = other.mapInt32ForeignMessage_.Clone();
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestMap Clone() {
return new TestMap(this);
}
/// <summary>Field number for the "map_int32_int32" field.</summary>
public const int MapInt32Int32FieldNumber = 1;
private static readonly pbc::MapField<int, int>.Codec _map_mapInt32Int32_codec
= new pbc::MapField<int, int>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForInt32(16), 10);
private readonly pbc::MapField<int, int> mapInt32Int32_ = new pbc::MapField<int, int>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, int> MapInt32Int32 {
get { return mapInt32Int32_; }
}
/// <summary>Field number for the "map_int64_int64" field.</summary>
public const int MapInt64Int64FieldNumber = 2;
private static readonly pbc::MapField<long, long>.Codec _map_mapInt64Int64_codec
= new pbc::MapField<long, long>.Codec(pb::FieldCodec.ForInt64(8), pb::FieldCodec.ForInt64(16), 18);
private readonly pbc::MapField<long, long> mapInt64Int64_ = new pbc::MapField<long, long>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<long, long> MapInt64Int64 {
get { return mapInt64Int64_; }
}
/// <summary>Field number for the "map_uint32_uint32" field.</summary>
public const int MapUint32Uint32FieldNumber = 3;
private static readonly pbc::MapField<uint, uint>.Codec _map_mapUint32Uint32_codec
= new pbc::MapField<uint, uint>.Codec(pb::FieldCodec.ForUInt32(8), pb::FieldCodec.ForUInt32(16), 26);
private readonly pbc::MapField<uint, uint> mapUint32Uint32_ = new pbc::MapField<uint, uint>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<uint, uint> MapUint32Uint32 {
get { return mapUint32Uint32_; }
}
/// <summary>Field number for the "map_uint64_uint64" field.</summary>
public const int MapUint64Uint64FieldNumber = 4;
private static readonly pbc::MapField<ulong, ulong>.Codec _map_mapUint64Uint64_codec
= new pbc::MapField<ulong, ulong>.Codec(pb::FieldCodec.ForUInt64(8), pb::FieldCodec.ForUInt64(16), 34);
private readonly pbc::MapField<ulong, ulong> mapUint64Uint64_ = new pbc::MapField<ulong, ulong>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<ulong, ulong> MapUint64Uint64 {
get { return mapUint64Uint64_; }
}
/// <summary>Field number for the "map_sint32_sint32" field.</summary>
public const int MapSint32Sint32FieldNumber = 5;
private static readonly pbc::MapField<int, int>.Codec _map_mapSint32Sint32_codec
= new pbc::MapField<int, int>.Codec(pb::FieldCodec.ForSInt32(8), pb::FieldCodec.ForSInt32(16), 42);
private readonly pbc::MapField<int, int> mapSint32Sint32_ = new pbc::MapField<int, int>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, int> MapSint32Sint32 {
get { return mapSint32Sint32_; }
}
/// <summary>Field number for the "map_sint64_sint64" field.</summary>
public const int MapSint64Sint64FieldNumber = 6;
private static readonly pbc::MapField<long, long>.Codec _map_mapSint64Sint64_codec
= new pbc::MapField<long, long>.Codec(pb::FieldCodec.ForSInt64(8), pb::FieldCodec.ForSInt64(16), 50);
private readonly pbc::MapField<long, long> mapSint64Sint64_ = new pbc::MapField<long, long>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<long, long> MapSint64Sint64 {
get { return mapSint64Sint64_; }
}
/// <summary>Field number for the "map_fixed32_fixed32" field.</summary>
public const int MapFixed32Fixed32FieldNumber = 7;
private static readonly pbc::MapField<uint, uint>.Codec _map_mapFixed32Fixed32_codec
= new pbc::MapField<uint, uint>.Codec(pb::FieldCodec.ForFixed32(13), pb::FieldCodec.ForFixed32(21), 58);
private readonly pbc::MapField<uint, uint> mapFixed32Fixed32_ = new pbc::MapField<uint, uint>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<uint, uint> MapFixed32Fixed32 {
get { return mapFixed32Fixed32_; }
}
/// <summary>Field number for the "map_fixed64_fixed64" field.</summary>
public const int MapFixed64Fixed64FieldNumber = 8;
private static readonly pbc::MapField<ulong, ulong>.Codec _map_mapFixed64Fixed64_codec
= new pbc::MapField<ulong, ulong>.Codec(pb::FieldCodec.ForFixed64(9), pb::FieldCodec.ForFixed64(17), 66);
private readonly pbc::MapField<ulong, ulong> mapFixed64Fixed64_ = new pbc::MapField<ulong, ulong>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<ulong, ulong> MapFixed64Fixed64 {
get { return mapFixed64Fixed64_; }
}
/// <summary>Field number for the "map_sfixed32_sfixed32" field.</summary>
public const int MapSfixed32Sfixed32FieldNumber = 9;
private static readonly pbc::MapField<int, int>.Codec _map_mapSfixed32Sfixed32_codec
= new pbc::MapField<int, int>.Codec(pb::FieldCodec.ForSFixed32(13), pb::FieldCodec.ForSFixed32(21), 74);
private readonly pbc::MapField<int, int> mapSfixed32Sfixed32_ = new pbc::MapField<int, int>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, int> MapSfixed32Sfixed32 {
get { return mapSfixed32Sfixed32_; }
}
/// <summary>Field number for the "map_sfixed64_sfixed64" field.</summary>
public const int MapSfixed64Sfixed64FieldNumber = 10;
private static readonly pbc::MapField<long, long>.Codec _map_mapSfixed64Sfixed64_codec
= new pbc::MapField<long, long>.Codec(pb::FieldCodec.ForSFixed64(9), pb::FieldCodec.ForSFixed64(17), 82);
private readonly pbc::MapField<long, long> mapSfixed64Sfixed64_ = new pbc::MapField<long, long>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<long, long> MapSfixed64Sfixed64 {
get { return mapSfixed64Sfixed64_; }
}
/// <summary>Field number for the "map_int32_float" field.</summary>
public const int MapInt32FloatFieldNumber = 11;
private static readonly pbc::MapField<int, float>.Codec _map_mapInt32Float_codec
= new pbc::MapField<int, float>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForFloat(21), 90);
private readonly pbc::MapField<int, float> mapInt32Float_ = new pbc::MapField<int, float>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, float> MapInt32Float {
get { return mapInt32Float_; }
}
/// <summary>Field number for the "map_int32_double" field.</summary>
public const int MapInt32DoubleFieldNumber = 12;
private static readonly pbc::MapField<int, double>.Codec _map_mapInt32Double_codec
= new pbc::MapField<int, double>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForDouble(17), 98);
private readonly pbc::MapField<int, double> mapInt32Double_ = new pbc::MapField<int, double>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, double> MapInt32Double {
get { return mapInt32Double_; }
}
/// <summary>Field number for the "map_bool_bool" field.</summary>
public const int MapBoolBoolFieldNumber = 13;
private static readonly pbc::MapField<bool, bool>.Codec _map_mapBoolBool_codec
= new pbc::MapField<bool, bool>.Codec(pb::FieldCodec.ForBool(8), pb::FieldCodec.ForBool(16), 106);
private readonly pbc::MapField<bool, bool> mapBoolBool_ = new pbc::MapField<bool, bool>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<bool, bool> MapBoolBool {
get { return mapBoolBool_; }
}
/// <summary>Field number for the "map_string_string" field.</summary>
public const int MapStringStringFieldNumber = 14;
private static readonly pbc::MapField<string, string>.Codec _map_mapStringString_codec
= new pbc::MapField<string, string>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForString(18), 114);
private readonly pbc::MapField<string, string> mapStringString_ = new pbc::MapField<string, string>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<string, string> MapStringString {
get { return mapStringString_; }
}
/// <summary>Field number for the "map_int32_bytes" field.</summary>
public const int MapInt32BytesFieldNumber = 15;
private static readonly pbc::MapField<int, pb::ByteString>.Codec _map_mapInt32Bytes_codec
= new pbc::MapField<int, pb::ByteString>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForBytes(18), 122);
private readonly pbc::MapField<int, pb::ByteString> mapInt32Bytes_ = new pbc::MapField<int, pb::ByteString>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, pb::ByteString> MapInt32Bytes {
get { return mapInt32Bytes_; }
}
/// <summary>Field number for the "map_int32_enum" field.</summary>
public const int MapInt32EnumFieldNumber = 16;
private static readonly pbc::MapField<int, global::Google.Protobuf.TestProtos.MapEnum>.Codec _map_mapInt32Enum_codec
= new pbc::MapField<int, global::Google.Protobuf.TestProtos.MapEnum>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForEnum(16, x => (int) x, x => (global::Google.Protobuf.TestProtos.MapEnum) x), 130);
private readonly pbc::MapField<int, global::Google.Protobuf.TestProtos.MapEnum> mapInt32Enum_ = new pbc::MapField<int, global::Google.Protobuf.TestProtos.MapEnum>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, global::Google.Protobuf.TestProtos.MapEnum> MapInt32Enum {
get { return mapInt32Enum_; }
}
/// <summary>Field number for the "map_int32_foreign_message" field.</summary>
public const int MapInt32ForeignMessageFieldNumber = 17;
private static readonly pbc::MapField<int, global::Google.Protobuf.TestProtos.ForeignMessage>.Codec _map_mapInt32ForeignMessage_codec
= new pbc::MapField<int, global::Google.Protobuf.TestProtos.ForeignMessage>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.TestProtos.ForeignMessage.Parser), 138);
private readonly pbc::MapField<int, global::Google.Protobuf.TestProtos.ForeignMessage> mapInt32ForeignMessage_ = new pbc::MapField<int, global::Google.Protobuf.TestProtos.ForeignMessage>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, global::Google.Protobuf.TestProtos.ForeignMessage> MapInt32ForeignMessage {
get { return mapInt32ForeignMessage_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as TestMap);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(TestMap other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!MapInt32Int32.Equals(other.MapInt32Int32)) return false;
if (!MapInt64Int64.Equals(other.MapInt64Int64)) return false;
if (!MapUint32Uint32.Equals(other.MapUint32Uint32)) return false;
if (!MapUint64Uint64.Equals(other.MapUint64Uint64)) return false;
if (!MapSint32Sint32.Equals(other.MapSint32Sint32)) return false;
if (!MapSint64Sint64.Equals(other.MapSint64Sint64)) return false;
if (!MapFixed32Fixed32.Equals(other.MapFixed32Fixed32)) return false;
if (!MapFixed64Fixed64.Equals(other.MapFixed64Fixed64)) return false;
if (!MapSfixed32Sfixed32.Equals(other.MapSfixed32Sfixed32)) return false;
if (!MapSfixed64Sfixed64.Equals(other.MapSfixed64Sfixed64)) return false;
if (!MapInt32Float.Equals(other.MapInt32Float)) return false;
if (!MapInt32Double.Equals(other.MapInt32Double)) return false;
if (!MapBoolBool.Equals(other.MapBoolBool)) return false;
if (!MapStringString.Equals(other.MapStringString)) return false;
if (!MapInt32Bytes.Equals(other.MapInt32Bytes)) return false;
if (!MapInt32Enum.Equals(other.MapInt32Enum)) return false;
if (!MapInt32ForeignMessage.Equals(other.MapInt32ForeignMessage)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= MapInt32Int32.GetHashCode();
hash ^= MapInt64Int64.GetHashCode();
hash ^= MapUint32Uint32.GetHashCode();
hash ^= MapUint64Uint64.GetHashCode();
hash ^= MapSint32Sint32.GetHashCode();
hash ^= MapSint64Sint64.GetHashCode();
hash ^= MapFixed32Fixed32.GetHashCode();
hash ^= MapFixed64Fixed64.GetHashCode();
hash ^= MapSfixed32Sfixed32.GetHashCode();
hash ^= MapSfixed64Sfixed64.GetHashCode();
hash ^= MapInt32Float.GetHashCode();
hash ^= MapInt32Double.GetHashCode();
hash ^= MapBoolBool.GetHashCode();
hash ^= MapStringString.GetHashCode();
hash ^= MapInt32Bytes.GetHashCode();
hash ^= MapInt32Enum.GetHashCode();
hash ^= MapInt32ForeignMessage.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
mapInt32Int32_.WriteTo(output, _map_mapInt32Int32_codec);
mapInt64Int64_.WriteTo(output, _map_mapInt64Int64_codec);
mapUint32Uint32_.WriteTo(output, _map_mapUint32Uint32_codec);
mapUint64Uint64_.WriteTo(output, _map_mapUint64Uint64_codec);
mapSint32Sint32_.WriteTo(output, _map_mapSint32Sint32_codec);
mapSint64Sint64_.WriteTo(output, _map_mapSint64Sint64_codec);
mapFixed32Fixed32_.WriteTo(output, _map_mapFixed32Fixed32_codec);
mapFixed64Fixed64_.WriteTo(output, _map_mapFixed64Fixed64_codec);
mapSfixed32Sfixed32_.WriteTo(output, _map_mapSfixed32Sfixed32_codec);
mapSfixed64Sfixed64_.WriteTo(output, _map_mapSfixed64Sfixed64_codec);
mapInt32Float_.WriteTo(output, _map_mapInt32Float_codec);
mapInt32Double_.WriteTo(output, _map_mapInt32Double_codec);
mapBoolBool_.WriteTo(output, _map_mapBoolBool_codec);
mapStringString_.WriteTo(output, _map_mapStringString_codec);
mapInt32Bytes_.WriteTo(output, _map_mapInt32Bytes_codec);
mapInt32Enum_.WriteTo(output, _map_mapInt32Enum_codec);
mapInt32ForeignMessage_.WriteTo(output, _map_mapInt32ForeignMessage_codec);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += mapInt32Int32_.CalculateSize(_map_mapInt32Int32_codec);
size += mapInt64Int64_.CalculateSize(_map_mapInt64Int64_codec);
size += mapUint32Uint32_.CalculateSize(_map_mapUint32Uint32_codec);
size += mapUint64Uint64_.CalculateSize(_map_mapUint64Uint64_codec);
size += mapSint32Sint32_.CalculateSize(_map_mapSint32Sint32_codec);
size += mapSint64Sint64_.CalculateSize(_map_mapSint64Sint64_codec);
size += mapFixed32Fixed32_.CalculateSize(_map_mapFixed32Fixed32_codec);
size += mapFixed64Fixed64_.CalculateSize(_map_mapFixed64Fixed64_codec);
size += mapSfixed32Sfixed32_.CalculateSize(_map_mapSfixed32Sfixed32_codec);
size += mapSfixed64Sfixed64_.CalculateSize(_map_mapSfixed64Sfixed64_codec);
size += mapInt32Float_.CalculateSize(_map_mapInt32Float_codec);
size += mapInt32Double_.CalculateSize(_map_mapInt32Double_codec);
size += mapBoolBool_.CalculateSize(_map_mapBoolBool_codec);
size += mapStringString_.CalculateSize(_map_mapStringString_codec);
size += mapInt32Bytes_.CalculateSize(_map_mapInt32Bytes_codec);
size += mapInt32Enum_.CalculateSize(_map_mapInt32Enum_codec);
size += mapInt32ForeignMessage_.CalculateSize(_map_mapInt32ForeignMessage_codec);
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(TestMap other) {
if (other == null) {
return;
}
mapInt32Int32_.Add(other.mapInt32Int32_);
mapInt64Int64_.Add(other.mapInt64Int64_);
mapUint32Uint32_.Add(other.mapUint32Uint32_);
mapUint64Uint64_.Add(other.mapUint64Uint64_);
mapSint32Sint32_.Add(other.mapSint32Sint32_);
mapSint64Sint64_.Add(other.mapSint64Sint64_);
mapFixed32Fixed32_.Add(other.mapFixed32Fixed32_);
mapFixed64Fixed64_.Add(other.mapFixed64Fixed64_);
mapSfixed32Sfixed32_.Add(other.mapSfixed32Sfixed32_);
mapSfixed64Sfixed64_.Add(other.mapSfixed64Sfixed64_);
mapInt32Float_.Add(other.mapInt32Float_);
mapInt32Double_.Add(other.mapInt32Double_);
mapBoolBool_.Add(other.mapBoolBool_);
mapStringString_.Add(other.mapStringString_);
mapInt32Bytes_.Add(other.mapInt32Bytes_);
mapInt32Enum_.Add(other.mapInt32Enum_);
mapInt32ForeignMessage_.Add(other.mapInt32ForeignMessage_);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
mapInt32Int32_.AddEntriesFrom(input, _map_mapInt32Int32_codec);
break;
}
case 18: {
mapInt64Int64_.AddEntriesFrom(input, _map_mapInt64Int64_codec);
break;
}
case 26: {
mapUint32Uint32_.AddEntriesFrom(input, _map_mapUint32Uint32_codec);
break;
}
case 34: {
mapUint64Uint64_.AddEntriesFrom(input, _map_mapUint64Uint64_codec);
break;
}
case 42: {
mapSint32Sint32_.AddEntriesFrom(input, _map_mapSint32Sint32_codec);
break;
}
case 50: {
mapSint64Sint64_.AddEntriesFrom(input, _map_mapSint64Sint64_codec);
break;
}
case 58: {
mapFixed32Fixed32_.AddEntriesFrom(input, _map_mapFixed32Fixed32_codec);
break;
}
case 66: {
mapFixed64Fixed64_.AddEntriesFrom(input, _map_mapFixed64Fixed64_codec);
break;
}
case 74: {
mapSfixed32Sfixed32_.AddEntriesFrom(input, _map_mapSfixed32Sfixed32_codec);
break;
}
case 82: {
mapSfixed64Sfixed64_.AddEntriesFrom(input, _map_mapSfixed64Sfixed64_codec);
break;
}
case 90: {
mapInt32Float_.AddEntriesFrom(input, _map_mapInt32Float_codec);
break;
}
case 98: {
mapInt32Double_.AddEntriesFrom(input, _map_mapInt32Double_codec);
break;
}
case 106: {
mapBoolBool_.AddEntriesFrom(input, _map_mapBoolBool_codec);
break;
}
case 114: {
mapStringString_.AddEntriesFrom(input, _map_mapStringString_codec);
break;
}
case 122: {
mapInt32Bytes_.AddEntriesFrom(input, _map_mapInt32Bytes_codec);
break;
}
case 130: {
mapInt32Enum_.AddEntriesFrom(input, _map_mapInt32Enum_codec);
break;
}
case 138: {
mapInt32ForeignMessage_.AddEntriesFrom(input, _map_mapInt32ForeignMessage_codec);
break;
}
}
}
}
}
public sealed partial class TestMapSubmessage : pb::IMessage<TestMapSubmessage> {
private static readonly pb::MessageParser<TestMapSubmessage> _parser = new pb::MessageParser<TestMapSubmessage>(() => new TestMapSubmessage());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<TestMapSubmessage> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[1]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestMapSubmessage() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestMapSubmessage(TestMapSubmessage other) : this() {
TestMap = other.testMap_ != null ? other.TestMap.Clone() : null;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestMapSubmessage Clone() {
return new TestMapSubmessage(this);
}
/// <summary>Field number for the "test_map" field.</summary>
public const int TestMapFieldNumber = 1;
private global::Google.Protobuf.TestProtos.TestMap testMap_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Protobuf.TestProtos.TestMap TestMap {
get { return testMap_; }
set {
testMap_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as TestMapSubmessage);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(TestMapSubmessage other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!object.Equals(TestMap, other.TestMap)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (testMap_ != null) hash ^= TestMap.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (testMap_ != null) {
output.WriteRawTag(10);
output.WriteMessage(TestMap);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (testMap_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(TestMap);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(TestMapSubmessage other) {
if (other == null) {
return;
}
if (other.testMap_ != null) {
if (testMap_ == null) {
testMap_ = new global::Google.Protobuf.TestProtos.TestMap();
}
TestMap.MergeFrom(other.TestMap);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
if (testMap_ == null) {
testMap_ = new global::Google.Protobuf.TestProtos.TestMap();
}
input.ReadMessage(testMap_);
break;
}
}
}
}
}
public sealed partial class TestMessageMap : pb::IMessage<TestMessageMap> {
private static readonly pb::MessageParser<TestMessageMap> _parser = new pb::MessageParser<TestMessageMap>(() => new TestMessageMap());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<TestMessageMap> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[2]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestMessageMap() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestMessageMap(TestMessageMap other) : this() {
mapInt32Message_ = other.mapInt32Message_.Clone();
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestMessageMap Clone() {
return new TestMessageMap(this);
}
/// <summary>Field number for the "map_int32_message" field.</summary>
public const int MapInt32MessageFieldNumber = 1;
private static readonly pbc::MapField<int, global::Google.Protobuf.TestProtos.TestAllTypes>.Codec _map_mapInt32Message_codec
= new pbc::MapField<int, global::Google.Protobuf.TestProtos.TestAllTypes>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.TestProtos.TestAllTypes.Parser), 10);
private readonly pbc::MapField<int, global::Google.Protobuf.TestProtos.TestAllTypes> mapInt32Message_ = new pbc::MapField<int, global::Google.Protobuf.TestProtos.TestAllTypes>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, global::Google.Protobuf.TestProtos.TestAllTypes> MapInt32Message {
get { return mapInt32Message_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as TestMessageMap);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(TestMessageMap other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!MapInt32Message.Equals(other.MapInt32Message)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= MapInt32Message.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
mapInt32Message_.WriteTo(output, _map_mapInt32Message_codec);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += mapInt32Message_.CalculateSize(_map_mapInt32Message_codec);
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(TestMessageMap other) {
if (other == null) {
return;
}
mapInt32Message_.Add(other.mapInt32Message_);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
mapInt32Message_.AddEntriesFrom(input, _map_mapInt32Message_codec);
break;
}
}
}
}
}
/// <summary>
/// Two map fields share the same entry default instance.
/// </summary>
public sealed partial class TestSameTypeMap : pb::IMessage<TestSameTypeMap> {
private static readonly pb::MessageParser<TestSameTypeMap> _parser = new pb::MessageParser<TestSameTypeMap>(() => new TestSameTypeMap());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<TestSameTypeMap> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[3]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestSameTypeMap() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestSameTypeMap(TestSameTypeMap other) : this() {
map1_ = other.map1_.Clone();
map2_ = other.map2_.Clone();
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestSameTypeMap Clone() {
return new TestSameTypeMap(this);
}
/// <summary>Field number for the "map1" field.</summary>
public const int Map1FieldNumber = 1;
private static readonly pbc::MapField<int, int>.Codec _map_map1_codec
= new pbc::MapField<int, int>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForInt32(16), 10);
private readonly pbc::MapField<int, int> map1_ = new pbc::MapField<int, int>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, int> Map1 {
get { return map1_; }
}
/// <summary>Field number for the "map2" field.</summary>
public const int Map2FieldNumber = 2;
private static readonly pbc::MapField<int, int>.Codec _map_map2_codec
= new pbc::MapField<int, int>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForInt32(16), 18);
private readonly pbc::MapField<int, int> map2_ = new pbc::MapField<int, int>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, int> Map2 {
get { return map2_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as TestSameTypeMap);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(TestSameTypeMap other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!Map1.Equals(other.Map1)) return false;
if (!Map2.Equals(other.Map2)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= Map1.GetHashCode();
hash ^= Map2.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
map1_.WriteTo(output, _map_map1_codec);
map2_.WriteTo(output, _map_map2_codec);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += map1_.CalculateSize(_map_map1_codec);
size += map2_.CalculateSize(_map_map2_codec);
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(TestSameTypeMap other) {
if (other == null) {
return;
}
map1_.Add(other.map1_);
map2_.Add(other.map2_);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
map1_.AddEntriesFrom(input, _map_map1_codec);
break;
}
case 18: {
map2_.AddEntriesFrom(input, _map_map2_codec);
break;
}
}
}
}
}
public sealed partial class TestArenaMap : pb::IMessage<TestArenaMap> {
private static readonly pb::MessageParser<TestArenaMap> _parser = new pb::MessageParser<TestArenaMap>(() => new TestArenaMap());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<TestArenaMap> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[4]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestArenaMap() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestArenaMap(TestArenaMap other) : this() {
mapInt32Int32_ = other.mapInt32Int32_.Clone();
mapInt64Int64_ = other.mapInt64Int64_.Clone();
mapUint32Uint32_ = other.mapUint32Uint32_.Clone();
mapUint64Uint64_ = other.mapUint64Uint64_.Clone();
mapSint32Sint32_ = other.mapSint32Sint32_.Clone();
mapSint64Sint64_ = other.mapSint64Sint64_.Clone();
mapFixed32Fixed32_ = other.mapFixed32Fixed32_.Clone();
mapFixed64Fixed64_ = other.mapFixed64Fixed64_.Clone();
mapSfixed32Sfixed32_ = other.mapSfixed32Sfixed32_.Clone();
mapSfixed64Sfixed64_ = other.mapSfixed64Sfixed64_.Clone();
mapInt32Float_ = other.mapInt32Float_.Clone();
mapInt32Double_ = other.mapInt32Double_.Clone();
mapBoolBool_ = other.mapBoolBool_.Clone();
mapInt32Enum_ = other.mapInt32Enum_.Clone();
mapInt32ForeignMessage_ = other.mapInt32ForeignMessage_.Clone();
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TestArenaMap Clone() {
return new TestArenaMap(this);
}
/// <summary>Field number for the "map_int32_int32" field.</summary>
public const int MapInt32Int32FieldNumber = 1;
private static readonly pbc::MapField<int, int>.Codec _map_mapInt32Int32_codec
= new pbc::MapField<int, int>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForInt32(16), 10);
private readonly pbc::MapField<int, int> mapInt32Int32_ = new pbc::MapField<int, int>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, int> MapInt32Int32 {
get { return mapInt32Int32_; }
}
/// <summary>Field number for the "map_int64_int64" field.</summary>
public const int MapInt64Int64FieldNumber = 2;
private static readonly pbc::MapField<long, long>.Codec _map_mapInt64Int64_codec
= new pbc::MapField<long, long>.Codec(pb::FieldCodec.ForInt64(8), pb::FieldCodec.ForInt64(16), 18);
private readonly pbc::MapField<long, long> mapInt64Int64_ = new pbc::MapField<long, long>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<long, long> MapInt64Int64 {
get { return mapInt64Int64_; }
}
/// <summary>Field number for the "map_uint32_uint32" field.</summary>
public const int MapUint32Uint32FieldNumber = 3;
private static readonly pbc::MapField<uint, uint>.Codec _map_mapUint32Uint32_codec
= new pbc::MapField<uint, uint>.Codec(pb::FieldCodec.ForUInt32(8), pb::FieldCodec.ForUInt32(16), 26);
private readonly pbc::MapField<uint, uint> mapUint32Uint32_ = new pbc::MapField<uint, uint>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<uint, uint> MapUint32Uint32 {
get { return mapUint32Uint32_; }
}
/// <summary>Field number for the "map_uint64_uint64" field.</summary>
public const int MapUint64Uint64FieldNumber = 4;
private static readonly pbc::MapField<ulong, ulong>.Codec _map_mapUint64Uint64_codec
= new pbc::MapField<ulong, ulong>.Codec(pb::FieldCodec.ForUInt64(8), pb::FieldCodec.ForUInt64(16), 34);
private readonly pbc::MapField<ulong, ulong> mapUint64Uint64_ = new pbc::MapField<ulong, ulong>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<ulong, ulong> MapUint64Uint64 {
get { return mapUint64Uint64_; }
}
/// <summary>Field number for the "map_sint32_sint32" field.</summary>
public const int MapSint32Sint32FieldNumber = 5;
private static readonly pbc::MapField<int, int>.Codec _map_mapSint32Sint32_codec
= new pbc::MapField<int, int>.Codec(pb::FieldCodec.ForSInt32(8), pb::FieldCodec.ForSInt32(16), 42);
private readonly pbc::MapField<int, int> mapSint32Sint32_ = new pbc::MapField<int, int>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, int> MapSint32Sint32 {
get { return mapSint32Sint32_; }
}
/// <summary>Field number for the "map_sint64_sint64" field.</summary>
public const int MapSint64Sint64FieldNumber = 6;
private static readonly pbc::MapField<long, long>.Codec _map_mapSint64Sint64_codec
= new pbc::MapField<long, long>.Codec(pb::FieldCodec.ForSInt64(8), pb::FieldCodec.ForSInt64(16), 50);
private readonly pbc::MapField<long, long> mapSint64Sint64_ = new pbc::MapField<long, long>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<long, long> MapSint64Sint64 {
get { return mapSint64Sint64_; }
}
/// <summary>Field number for the "map_fixed32_fixed32" field.</summary>
public const int MapFixed32Fixed32FieldNumber = 7;
private static readonly pbc::MapField<uint, uint>.Codec _map_mapFixed32Fixed32_codec
= new pbc::MapField<uint, uint>.Codec(pb::FieldCodec.ForFixed32(13), pb::FieldCodec.ForFixed32(21), 58);
private readonly pbc::MapField<uint, uint> mapFixed32Fixed32_ = new pbc::MapField<uint, uint>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<uint, uint> MapFixed32Fixed32 {
get { return mapFixed32Fixed32_; }
}
/// <summary>Field number for the "map_fixed64_fixed64" field.</summary>
public const int MapFixed64Fixed64FieldNumber = 8;
private static readonly pbc::MapField<ulong, ulong>.Codec _map_mapFixed64Fixed64_codec
= new pbc::MapField<ulong, ulong>.Codec(pb::FieldCodec.ForFixed64(9), pb::FieldCodec.ForFixed64(17), 66);
private readonly pbc::MapField<ulong, ulong> mapFixed64Fixed64_ = new pbc::MapField<ulong, ulong>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<ulong, ulong> MapFixed64Fixed64 {
get { return mapFixed64Fixed64_; }
}
/// <summary>Field number for the "map_sfixed32_sfixed32" field.</summary>
public const int MapSfixed32Sfixed32FieldNumber = 9;
private static readonly pbc::MapField<int, int>.Codec _map_mapSfixed32Sfixed32_codec
= new pbc::MapField<int, int>.Codec(pb::FieldCodec.ForSFixed32(13), pb::FieldCodec.ForSFixed32(21), 74);
private readonly pbc::MapField<int, int> mapSfixed32Sfixed32_ = new pbc::MapField<int, int>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, int> MapSfixed32Sfixed32 {
get { return mapSfixed32Sfixed32_; }
}
/// <summary>Field number for the "map_sfixed64_sfixed64" field.</summary>
public const int MapSfixed64Sfixed64FieldNumber = 10;
private static readonly pbc::MapField<long, long>.Codec _map_mapSfixed64Sfixed64_codec
= new pbc::MapField<long, long>.Codec(pb::FieldCodec.ForSFixed64(9), pb::FieldCodec.ForSFixed64(17), 82);
private readonly pbc::MapField<long, long> mapSfixed64Sfixed64_ = new pbc::MapField<long, long>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<long, long> MapSfixed64Sfixed64 {
get { return mapSfixed64Sfixed64_; }
}
/// <summary>Field number for the "map_int32_float" field.</summary>
public const int MapInt32FloatFieldNumber = 11;
private static readonly pbc::MapField<int, float>.Codec _map_mapInt32Float_codec
= new pbc::MapField<int, float>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForFloat(21), 90);
private readonly pbc::MapField<int, float> mapInt32Float_ = new pbc::MapField<int, float>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, float> MapInt32Float {
get { return mapInt32Float_; }
}
/// <summary>Field number for the "map_int32_double" field.</summary>
public const int MapInt32DoubleFieldNumber = 12;
private static readonly pbc::MapField<int, double>.Codec _map_mapInt32Double_codec
= new pbc::MapField<int, double>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForDouble(17), 98);
private readonly pbc::MapField<int, double> mapInt32Double_ = new pbc::MapField<int, double>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, double> MapInt32Double {
get { return mapInt32Double_; }
}
/// <summary>Field number for the "map_bool_bool" field.</summary>
public const int MapBoolBoolFieldNumber = 13;
private static readonly pbc::MapField<bool, bool>.Codec _map_mapBoolBool_codec
= new pbc::MapField<bool, bool>.Codec(pb::FieldCodec.ForBool(8), pb::FieldCodec.ForBool(16), 106);
private readonly pbc::MapField<bool, bool> mapBoolBool_ = new pbc::MapField<bool, bool>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<bool, bool> MapBoolBool {
get { return mapBoolBool_; }
}
/// <summary>Field number for the "map_int32_enum" field.</summary>
public const int MapInt32EnumFieldNumber = 14;
private static readonly pbc::MapField<int, global::Google.Protobuf.TestProtos.MapEnum>.Codec _map_mapInt32Enum_codec
= new pbc::MapField<int, global::Google.Protobuf.TestProtos.MapEnum>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForEnum(16, x => (int) x, x => (global::Google.Protobuf.TestProtos.MapEnum) x), 114);
private readonly pbc::MapField<int, global::Google.Protobuf.TestProtos.MapEnum> mapInt32Enum_ = new pbc::MapField<int, global::Google.Protobuf.TestProtos.MapEnum>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, global::Google.Protobuf.TestProtos.MapEnum> MapInt32Enum {
get { return mapInt32Enum_; }
}
/// <summary>Field number for the "map_int32_foreign_message" field.</summary>
public const int MapInt32ForeignMessageFieldNumber = 15;
private static readonly pbc::MapField<int, global::Google.Protobuf.TestProtos.ForeignMessage>.Codec _map_mapInt32ForeignMessage_codec
= new pbc::MapField<int, global::Google.Protobuf.TestProtos.ForeignMessage>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.TestProtos.ForeignMessage.Parser), 122);
private readonly pbc::MapField<int, global::Google.Protobuf.TestProtos.ForeignMessage> mapInt32ForeignMessage_ = new pbc::MapField<int, global::Google.Protobuf.TestProtos.ForeignMessage>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, global::Google.Protobuf.TestProtos.ForeignMessage> MapInt32ForeignMessage {
get { return mapInt32ForeignMessage_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as TestArenaMap);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(TestArenaMap other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!MapInt32Int32.Equals(other.MapInt32Int32)) return false;
if (!MapInt64Int64.Equals(other.MapInt64Int64)) return false;
if (!MapUint32Uint32.Equals(other.MapUint32Uint32)) return false;
if (!MapUint64Uint64.Equals(other.MapUint64Uint64)) return false;
if (!MapSint32Sint32.Equals(other.MapSint32Sint32)) return false;
if (!MapSint64Sint64.Equals(other.MapSint64Sint64)) return false;
if (!MapFixed32Fixed32.Equals(other.MapFixed32Fixed32)) return false;
if (!MapFixed64Fixed64.Equals(other.MapFixed64Fixed64)) return false;
if (!MapSfixed32Sfixed32.Equals(other.MapSfixed32Sfixed32)) return false;
if (!MapSfixed64Sfixed64.Equals(other.MapSfixed64Sfixed64)) return false;
if (!MapInt32Float.Equals(other.MapInt32Float)) return false;
if (!MapInt32Double.Equals(other.MapInt32Double)) return false;
if (!MapBoolBool.Equals(other.MapBoolBool)) return false;
if (!MapInt32Enum.Equals(other.MapInt32Enum)) return false;
if (!MapInt32ForeignMessage.Equals(other.MapInt32ForeignMessage)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= MapInt32Int32.GetHashCode();
hash ^= MapInt64Int64.GetHashCode();
hash ^= MapUint32Uint32.GetHashCode();
hash ^= MapUint64Uint64.GetHashCode();
hash ^= MapSint32Sint32.GetHashCode();
hash ^= MapSint64Sint64.GetHashCode();
hash ^= MapFixed32Fixed32.GetHashCode();
hash ^= MapFixed64Fixed64.GetHashCode();
hash ^= MapSfixed32Sfixed32.GetHashCode();
hash ^= MapSfixed64Sfixed64.GetHashCode();
hash ^= MapInt32Float.GetHashCode();
hash ^= MapInt32Double.GetHashCode();
hash ^= MapBoolBool.GetHashCode();
hash ^= MapInt32Enum.GetHashCode();
hash ^= MapInt32ForeignMessage.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
mapInt32Int32_.WriteTo(output, _map_mapInt32Int32_codec);
mapInt64Int64_.WriteTo(output, _map_mapInt64Int64_codec);
mapUint32Uint32_.WriteTo(output, _map_mapUint32Uint32_codec);
mapUint64Uint64_.WriteTo(output, _map_mapUint64Uint64_codec);
mapSint32Sint32_.WriteTo(output, _map_mapSint32Sint32_codec);
mapSint64Sint64_.WriteTo(output, _map_mapSint64Sint64_codec);
mapFixed32Fixed32_.WriteTo(output, _map_mapFixed32Fixed32_codec);
mapFixed64Fixed64_.WriteTo(output, _map_mapFixed64Fixed64_codec);
mapSfixed32Sfixed32_.WriteTo(output, _map_mapSfixed32Sfixed32_codec);
mapSfixed64Sfixed64_.WriteTo(output, _map_mapSfixed64Sfixed64_codec);
mapInt32Float_.WriteTo(output, _map_mapInt32Float_codec);
mapInt32Double_.WriteTo(output, _map_mapInt32Double_codec);
mapBoolBool_.WriteTo(output, _map_mapBoolBool_codec);
mapInt32Enum_.WriteTo(output, _map_mapInt32Enum_codec);
mapInt32ForeignMessage_.WriteTo(output, _map_mapInt32ForeignMessage_codec);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += mapInt32Int32_.CalculateSize(_map_mapInt32Int32_codec);
size += mapInt64Int64_.CalculateSize(_map_mapInt64Int64_codec);
size += mapUint32Uint32_.CalculateSize(_map_mapUint32Uint32_codec);
size += mapUint64Uint64_.CalculateSize(_map_mapUint64Uint64_codec);
size += mapSint32Sint32_.CalculateSize(_map_mapSint32Sint32_codec);
size += mapSint64Sint64_.CalculateSize(_map_mapSint64Sint64_codec);
size += mapFixed32Fixed32_.CalculateSize(_map_mapFixed32Fixed32_codec);
size += mapFixed64Fixed64_.CalculateSize(_map_mapFixed64Fixed64_codec);
size += mapSfixed32Sfixed32_.CalculateSize(_map_mapSfixed32Sfixed32_codec);
size += mapSfixed64Sfixed64_.CalculateSize(_map_mapSfixed64Sfixed64_codec);
size += mapInt32Float_.CalculateSize(_map_mapInt32Float_codec);
size += mapInt32Double_.CalculateSize(_map_mapInt32Double_codec);
size += mapBoolBool_.CalculateSize(_map_mapBoolBool_codec);
size += mapInt32Enum_.CalculateSize(_map_mapInt32Enum_codec);
size += mapInt32ForeignMessage_.CalculateSize(_map_mapInt32ForeignMessage_codec);
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(TestArenaMap other) {
if (other == null) {
return;
}
mapInt32Int32_.Add(other.mapInt32Int32_);
mapInt64Int64_.Add(other.mapInt64Int64_);
mapUint32Uint32_.Add(other.mapUint32Uint32_);
mapUint64Uint64_.Add(other.mapUint64Uint64_);
mapSint32Sint32_.Add(other.mapSint32Sint32_);
mapSint64Sint64_.Add(other.mapSint64Sint64_);
mapFixed32Fixed32_.Add(other.mapFixed32Fixed32_);
mapFixed64Fixed64_.Add(other.mapFixed64Fixed64_);
mapSfixed32Sfixed32_.Add(other.mapSfixed32Sfixed32_);
mapSfixed64Sfixed64_.Add(other.mapSfixed64Sfixed64_);
mapInt32Float_.Add(other.mapInt32Float_);
mapInt32Double_.Add(other.mapInt32Double_);
mapBoolBool_.Add(other.mapBoolBool_);
mapInt32Enum_.Add(other.mapInt32Enum_);
mapInt32ForeignMessage_.Add(other.mapInt32ForeignMessage_);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
mapInt32Int32_.AddEntriesFrom(input, _map_mapInt32Int32_codec);
break;
}
case 18: {
mapInt64Int64_.AddEntriesFrom(input, _map_mapInt64Int64_codec);
break;
}
case 26: {
mapUint32Uint32_.AddEntriesFrom(input, _map_mapUint32Uint32_codec);
break;
}
case 34: {
mapUint64Uint64_.AddEntriesFrom(input, _map_mapUint64Uint64_codec);
break;
}
case 42: {
mapSint32Sint32_.AddEntriesFrom(input, _map_mapSint32Sint32_codec);
break;
}
case 50: {
mapSint64Sint64_.AddEntriesFrom(input, _map_mapSint64Sint64_codec);
break;
}
case 58: {
mapFixed32Fixed32_.AddEntriesFrom(input, _map_mapFixed32Fixed32_codec);
break;
}
case 66: {
mapFixed64Fixed64_.AddEntriesFrom(input, _map_mapFixed64Fixed64_codec);
break;
}
case 74: {
mapSfixed32Sfixed32_.AddEntriesFrom(input, _map_mapSfixed32Sfixed32_codec);
break;
}
case 82: {
mapSfixed64Sfixed64_.AddEntriesFrom(input, _map_mapSfixed64Sfixed64_codec);
break;
}
case 90: {
mapInt32Float_.AddEntriesFrom(input, _map_mapInt32Float_codec);
break;
}
case 98: {
mapInt32Double_.AddEntriesFrom(input, _map_mapInt32Double_codec);
break;
}
case 106: {
mapBoolBool_.AddEntriesFrom(input, _map_mapBoolBool_codec);
break;
}
case 114: {
mapInt32Enum_.AddEntriesFrom(input, _map_mapInt32Enum_codec);
break;
}
case 122: {
mapInt32ForeignMessage_.AddEntriesFrom(input, _map_mapInt32ForeignMessage_codec);
break;
}
}
}
}
}
/// <summary>
/// Previously, message containing enum called Type cannot be used as value of
/// map field.
/// </summary>
public sealed partial class MessageContainingEnumCalledType : pb::IMessage<MessageContainingEnumCalledType> {
private static readonly pb::MessageParser<MessageContainingEnumCalledType> _parser = new pb::MessageParser<MessageContainingEnumCalledType>(() => new MessageContainingEnumCalledType());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<MessageContainingEnumCalledType> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[5]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public MessageContainingEnumCalledType() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public MessageContainingEnumCalledType(MessageContainingEnumCalledType other) : this() {
type_ = other.type_.Clone();
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public MessageContainingEnumCalledType Clone() {
return new MessageContainingEnumCalledType(this);
}
/// <summary>Field number for the "type" field.</summary>
public const int TypeFieldNumber = 1;
private static readonly pbc::MapField<int, global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType>.Codec _map_type_codec
= new pbc::MapField<int, global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType.Parser), 10);
private readonly pbc::MapField<int, global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType> type_ = new pbc::MapField<int, global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType> Type {
get { return type_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as MessageContainingEnumCalledType);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(MessageContainingEnumCalledType other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!Type.Equals(other.Type)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= Type.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
type_.WriteTo(output, _map_type_codec);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += type_.CalculateSize(_map_type_codec);
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(MessageContainingEnumCalledType other) {
if (other == null) {
return;
}
type_.Add(other.type_);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
type_.AddEntriesFrom(input, _map_type_codec);
break;
}
}
}
}
#region Nested types
/// <summary>Container for nested types declared in the MessageContainingEnumCalledType message type.</summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static partial class Types {
public enum Type {
[pbr::OriginalName("TYPE_FOO")] Foo = 0,
}
}
#endregion
}
/// <summary>
/// Previously, message cannot contain map field called "entry".
/// </summary>
public sealed partial class MessageContainingMapCalledEntry : pb::IMessage<MessageContainingMapCalledEntry> {
private static readonly pb::MessageParser<MessageContainingMapCalledEntry> _parser = new pb::MessageParser<MessageContainingMapCalledEntry>(() => new MessageContainingMapCalledEntry());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<MessageContainingMapCalledEntry> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[6]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public MessageContainingMapCalledEntry() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public MessageContainingMapCalledEntry(MessageContainingMapCalledEntry other) : this() {
entry_ = other.entry_.Clone();
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public MessageContainingMapCalledEntry Clone() {
return new MessageContainingMapCalledEntry(this);
}
/// <summary>Field number for the "entry" field.</summary>
public const int EntryFieldNumber = 1;
private static readonly pbc::MapField<int, int>.Codec _map_entry_codec
= new pbc::MapField<int, int>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForInt32(16), 10);
private readonly pbc::MapField<int, int> entry_ = new pbc::MapField<int, int>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<int, int> Entry {
get { return entry_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as MessageContainingMapCalledEntry);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(MessageContainingMapCalledEntry other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!Entry.Equals(other.Entry)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= Entry.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
entry_.WriteTo(output, _map_entry_codec);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += entry_.CalculateSize(_map_entry_codec);
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(MessageContainingMapCalledEntry other) {
if (other == null) {
return;
}
entry_.Add(other.entry_);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
entry_.AddEntriesFrom(input, _map_entry_codec);
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
////////////////////////////////////////////////////////////////////////////
//
//
//
// Purpose: This class implements a set of methods for comparing
// strings.
//
//
////////////////////////////////////////////////////////////////////////////
using System.Reflection;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Runtime.Serialization;
namespace System.Globalization
{
[Flags]
public enum CompareOptions
{
None = 0x00000000,
IgnoreCase = 0x00000001,
IgnoreNonSpace = 0x00000002,
IgnoreSymbols = 0x00000004,
IgnoreKanaType = 0x00000008, // ignore kanatype
IgnoreWidth = 0x00000010, // ignore width
OrdinalIgnoreCase = 0x10000000, // This flag can not be used with other flags.
StringSort = 0x20000000, // use string sort method
Ordinal = 0x40000000, // This flag can not be used with other flags.
}
[Serializable]
[System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")]
public partial class CompareInfo : IDeserializationCallback
{
// Mask used to check if IndexOf()/LastIndexOf()/IsPrefix()/IsPostfix() has the right flags.
private const CompareOptions ValidIndexMaskOffFlags =
~(CompareOptions.IgnoreCase | CompareOptions.IgnoreSymbols | CompareOptions.IgnoreNonSpace |
CompareOptions.IgnoreWidth | CompareOptions.IgnoreKanaType);
// Mask used to check if Compare() has the right flags.
private const CompareOptions ValidCompareMaskOffFlags =
~(CompareOptions.IgnoreCase | CompareOptions.IgnoreSymbols | CompareOptions.IgnoreNonSpace |
CompareOptions.IgnoreWidth | CompareOptions.IgnoreKanaType | CompareOptions.StringSort);
// Mask used to check if GetHashCodeOfString() has the right flags.
private const CompareOptions ValidHashCodeOfStringMaskOffFlags =
~(CompareOptions.IgnoreCase | CompareOptions.IgnoreSymbols | CompareOptions.IgnoreNonSpace |
CompareOptions.IgnoreWidth | CompareOptions.IgnoreKanaType);
// Mask used to check if we have the right flags.
private const CompareOptions ValidSortkeyCtorMaskOffFlags =
~(CompareOptions.IgnoreCase | CompareOptions.IgnoreSymbols | CompareOptions.IgnoreNonSpace |
CompareOptions.IgnoreWidth | CompareOptions.IgnoreKanaType | CompareOptions.StringSort);
//
// CompareInfos have an interesting identity. They are attached to the locale that created them,
// ie: en-US would have an en-US sort. For haw-US (custom), then we serialize it as haw-US.
// The interesting part is that since haw-US doesn't have its own sort, it has to point at another
// locale, which is what SCOMPAREINFO does.
[OptionalField(VersionAdded = 2)]
private string m_name; // The name used to construct this CompareInfo. Do not rename (binary serialization)
[NonSerialized]
private string _sortName; // The name that defines our behavior.
[OptionalField(VersionAdded = 3)]
private SortVersion m_SortVersion; // Do not rename (binary serialization)
// _invariantMode is defined for the perf reason as accessing the instance field is faster than access the static property GlobalizationMode.Invariant
[NonSerialized]
private readonly bool _invariantMode = GlobalizationMode.Invariant;
private int culture; // Do not rename (binary serialization). The fields sole purpose is to support Desktop serialization.
internal CompareInfo(CultureInfo culture)
{
m_name = culture._name;
InitSort(culture);
}
/*=================================GetCompareInfo==========================
**Action: Get the CompareInfo constructed from the data table in the specified assembly for the specified culture.
** Warning: The assembly versioning mechanism is dead!
**Returns: The CompareInfo for the specified culture.
**Arguments:
** culture the ID of the culture
** assembly the assembly which contains the sorting table.
**Exceptions:
** ArugmentNullException when the assembly is null
** ArgumentException if culture is invalid.
============================================================================*/
// Assembly constructor should be deprecated, we don't act on the assembly information any more
public static CompareInfo GetCompareInfo(int culture, Assembly assembly)
{
// Parameter checking.
if (assembly == null)
{
throw new ArgumentNullException(nameof(assembly));
}
if (assembly != typeof(Object).Module.Assembly)
{
throw new ArgumentException(SR.Argument_OnlyMscorlib);
}
Contract.EndContractBlock();
return GetCompareInfo(culture);
}
/*=================================GetCompareInfo==========================
**Action: Get the CompareInfo constructed from the data table in the specified assembly for the specified culture.
** The purpose of this method is to provide version for CompareInfo tables.
**Returns: The CompareInfo for the specified culture.
**Arguments:
** name the name of the culture
** assembly the assembly which contains the sorting table.
**Exceptions:
** ArugmentNullException when the assembly is null
** ArgumentException if name is invalid.
============================================================================*/
// Assembly constructor should be deprecated, we don't act on the assembly information any more
public static CompareInfo GetCompareInfo(string name, Assembly assembly)
{
if (name == null || assembly == null)
{
throw new ArgumentNullException(name == null ? nameof(name) : nameof(assembly));
}
Contract.EndContractBlock();
if (assembly != typeof(Object).Module.Assembly)
{
throw new ArgumentException(SR.Argument_OnlyMscorlib);
}
return GetCompareInfo(name);
}
/*=================================GetCompareInfo==========================
**Action: Get the CompareInfo for the specified culture.
** This method is provided for ease of integration with NLS-based software.
**Returns: The CompareInfo for the specified culture.
**Arguments:
** culture the ID of the culture.
**Exceptions:
** ArgumentException if culture is invalid.
============================================================================*/
// People really shouldn't be calling LCID versions, no custom support
public static CompareInfo GetCompareInfo(int culture)
{
if (CultureData.IsCustomCultureId(culture))
{
// Customized culture cannot be created by the LCID.
throw new ArgumentException(SR.Argument_CustomCultureCannotBePassedByNumber, nameof(culture));
}
return CultureInfo.GetCultureInfo(culture).CompareInfo;
}
/*=================================GetCompareInfo==========================
**Action: Get the CompareInfo for the specified culture.
**Returns: The CompareInfo for the specified culture.
**Arguments:
** name the name of the culture.
**Exceptions:
** ArgumentException if name is invalid.
============================================================================*/
public static CompareInfo GetCompareInfo(string name)
{
if (name == null)
{
throw new ArgumentNullException(nameof(name));
}
Contract.EndContractBlock();
return CultureInfo.GetCultureInfo(name).CompareInfo;
}
public static unsafe bool IsSortable(char ch)
{
if (GlobalizationMode.Invariant)
{
return true;
}
char *pChar = &ch;
return IsSortable(pChar, 1);
}
public static unsafe bool IsSortable(string text)
{
if (text == null)
{
// A null param is invalid here.
throw new ArgumentNullException(nameof(text));
}
if (text.Length == 0)
{
// A zero length string is not invalid, but it is also not sortable.
return (false);
}
if (GlobalizationMode.Invariant)
{
return true;
}
fixed (char *pChar = text)
{
return IsSortable(pChar, text.Length);
}
}
[OnDeserializing]
private void OnDeserializing(StreamingContext ctx)
{
m_name = null;
}
void IDeserializationCallback.OnDeserialization(Object sender)
{
OnDeserialized();
}
[OnDeserialized]
private void OnDeserialized(StreamingContext ctx)
{
OnDeserialized();
}
private void OnDeserialized()
{
// If we didn't have a name, use the LCID
if (m_name == null)
{
// From whidbey, didn't have a name
CultureInfo ci = CultureInfo.GetCultureInfo(this.culture);
m_name = ci._name;
}
else
{
InitSort(CultureInfo.GetCultureInfo(m_name));
}
}
[OnSerializing]
private void OnSerializing(StreamingContext ctx)
{
// This is merely for serialization compatibility with Whidbey/Orcas, it can go away when we don't want that compat any more.
culture = CultureInfo.GetCultureInfo(this.Name).LCID; // This is the lcid of the constructing culture (still have to dereference to get target sort)
Contract.Assert(m_name != null, "CompareInfo.OnSerializing - expected m_name to be set already");
}
///////////////////////////----- Name -----/////////////////////////////////
//
// Returns the name of the culture (well actually, of the sort).
// Very important for providing a non-LCID way of identifying
// what the sort is.
//
// Note that this name isn't dereferenced in case the CompareInfo is a different locale
// which is consistent with the behaviors of earlier versions. (so if you ask for a sort
// and the locale's changed behavior, then you'll get changed behavior, which is like
// what happens for a version update)
//
////////////////////////////////////////////////////////////////////////
public virtual string Name
{
get
{
Debug.Assert(m_name != null, "CompareInfo.Name Expected _name to be set");
if (m_name == "zh-CHT" || m_name == "zh-CHS")
{
return m_name;
}
return _sortName;
}
}
////////////////////////////////////////////////////////////////////////
//
// Compare
//
// Compares the two strings with the given options. Returns 0 if the
// two strings are equal, a number less than 0 if string1 is less
// than string2, and a number greater than 0 if string1 is greater
// than string2.
//
////////////////////////////////////////////////////////////////////////
public virtual int Compare(string string1, string string2)
{
return (Compare(string1, string2, CompareOptions.None));
}
public unsafe virtual int Compare(string string1, string string2, CompareOptions options)
{
if (options == CompareOptions.OrdinalIgnoreCase)
{
return String.Compare(string1, string2, StringComparison.OrdinalIgnoreCase);
}
// Verify the options before we do any real comparison.
if ((options & CompareOptions.Ordinal) != 0)
{
if (options != CompareOptions.Ordinal)
{
throw new ArgumentException(SR.Argument_CompareOptionOrdinal, nameof(options));
}
return String.CompareOrdinal(string1, string2);
}
if ((options & ValidCompareMaskOffFlags) != 0)
{
throw new ArgumentException(SR.Argument_InvalidFlag, nameof(options));
}
//Our paradigm is that null sorts less than any other string and
//that two nulls sort as equal.
if (string1 == null)
{
if (string2 == null)
{
return (0); // Equal
}
return (-1); // null < non-null
}
if (string2 == null)
{
return (1); // non-null > null
}
if (_invariantMode)
{
if ((options & CompareOptions.IgnoreCase) != 0)
return CompareOrdinalIgnoreCase(string1, 0, string1.Length, string2, 0, string2.Length);
return String.CompareOrdinal(string1, string2);
}
return CompareString(string1, 0, string1.Length, string2, 0, string2.Length, options);
}
////////////////////////////////////////////////////////////////////////
//
// Compare
//
// Compares the specified regions of the two strings with the given
// options.
// Returns 0 if the two strings are equal, a number less than 0 if
// string1 is less than string2, and a number greater than 0 if
// string1 is greater than string2.
//
////////////////////////////////////////////////////////////////////////
public unsafe virtual int Compare(string string1, int offset1, int length1, string string2, int offset2, int length2)
{
return Compare(string1, offset1, length1, string2, offset2, length2, 0);
}
public virtual int Compare(string string1, int offset1, string string2, int offset2, CompareOptions options)
{
return Compare(string1, offset1, string1 == null ? 0 : string1.Length - offset1,
string2, offset2, string2 == null ? 0 : string2.Length - offset2, options);
}
public virtual int Compare(string string1, int offset1, string string2, int offset2)
{
return Compare(string1, offset1, string2, offset2, 0);
}
public virtual int Compare(string string1, int offset1, int length1, string string2, int offset2, int length2, CompareOptions options)
{
if (options == CompareOptions.OrdinalIgnoreCase)
{
int result = String.Compare(string1, offset1, string2, offset2, length1 < length2 ? length1 : length2, StringComparison.OrdinalIgnoreCase);
if ((length1 != length2) && result == 0)
return (length1 > length2 ? 1 : -1);
return (result);
}
// Verify inputs
if (length1 < 0 || length2 < 0)
{
throw new ArgumentOutOfRangeException((length1 < 0) ? nameof(length1) : nameof(length2), SR.ArgumentOutOfRange_NeedPosNum);
}
if (offset1 < 0 || offset2 < 0)
{
throw new ArgumentOutOfRangeException((offset1 < 0) ? nameof(offset1) : nameof(offset2), SR.ArgumentOutOfRange_NeedPosNum);
}
if (offset1 > (string1 == null ? 0 : string1.Length) - length1)
{
throw new ArgumentOutOfRangeException(nameof(string1), SR.ArgumentOutOfRange_OffsetLength);
}
if (offset2 > (string2 == null ? 0 : string2.Length) - length2)
{
throw new ArgumentOutOfRangeException(nameof(string2), SR.ArgumentOutOfRange_OffsetLength);
}
if ((options & CompareOptions.Ordinal) != 0)
{
if (options != CompareOptions.Ordinal)
{
throw new ArgumentException(SR.Argument_CompareOptionOrdinal,
nameof(options));
}
}
else if ((options & ValidCompareMaskOffFlags) != 0)
{
throw new ArgumentException(SR.Argument_InvalidFlag, nameof(options));
}
//
// Check for the null case.
//
if (string1 == null)
{
if (string2 == null)
{
return (0);
}
return (-1);
}
if (string2 == null)
{
return (1);
}
if (options == CompareOptions.Ordinal)
{
return CompareOrdinal(string1, offset1, length1,
string2, offset2, length2);
}
if (_invariantMode)
{
if ((options & CompareOptions.IgnoreCase) != 0)
return CompareOrdinalIgnoreCase(string1, offset1, length1, string2, offset2, length2);
return CompareOrdinal(string1, offset1, length1, string2, offset2, length2);
}
return CompareString(string1, offset1, length1,
string2, offset2, length2,
options);
}
private static int CompareOrdinal(string string1, int offset1, int length1, string string2, int offset2, int length2)
{
int result = String.CompareOrdinal(string1, offset1, string2, offset2,
(length1 < length2 ? length1 : length2));
if ((length1 != length2) && result == 0)
{
return (length1 > length2 ? 1 : -1);
}
return (result);
}
//
// CompareOrdinalIgnoreCase compare two string ordinally with ignoring the case.
// it assumes the strings are Ascii string till we hit non Ascii character in strA or strB and then we continue the comparison by
// calling the OS.
//
internal static unsafe int CompareOrdinalIgnoreCase(string strA, int indexA, int lengthA, string strB, int indexB, int lengthB)
{
Debug.Assert(indexA + lengthA <= strA.Length);
Debug.Assert(indexB + lengthB <= strB.Length);
int length = Math.Min(lengthA, lengthB);
int range = length;
fixed (char* ap = strA) fixed (char* bp = strB)
{
char* a = ap + indexA;
char* b = bp + indexB;
// in InvariantMode we support all range and not only the ascii characters.
char maxChar = (char) (GlobalizationMode.Invariant ? 0xFFFF : 0x80);
while (length != 0 && (*a <= maxChar) && (*b <= maxChar))
{
int charA = *a;
int charB = *b;
if (charA == charB)
{
a++; b++;
length--;
continue;
}
// uppercase both chars - notice that we need just one compare per char
if ((uint)(charA - 'a') <= (uint)('z' - 'a')) charA -= 0x20;
if ((uint)(charB - 'a') <= (uint)('z' - 'a')) charB -= 0x20;
// Return the (case-insensitive) difference between them.
if (charA != charB)
return charA - charB;
// Next char
a++; b++;
length--;
}
if (length == 0)
return lengthA - lengthB;
Debug.Assert(!GlobalizationMode.Invariant);
range -= length;
return CompareStringOrdinalIgnoreCase(a, lengthA - range, b, lengthB - range);
}
}
////////////////////////////////////////////////////////////////////////
//
// IsPrefix
//
// Determines whether prefix is a prefix of string. If prefix equals
// String.Empty, true is returned.
//
////////////////////////////////////////////////////////////////////////
public virtual bool IsPrefix(string source, string prefix, CompareOptions options)
{
if (source == null || prefix == null)
{
throw new ArgumentNullException((source == null ? nameof(source) : nameof(prefix)),
SR.ArgumentNull_String);
}
Contract.EndContractBlock();
if (prefix.Length == 0)
{
return (true);
}
if (source.Length == 0)
{
return false;
}
if (options == CompareOptions.OrdinalIgnoreCase)
{
return source.StartsWith(prefix, StringComparison.OrdinalIgnoreCase);
}
if (options == CompareOptions.Ordinal)
{
return source.StartsWith(prefix, StringComparison.Ordinal);
}
if ((options & ValidIndexMaskOffFlags) != 0)
{
throw new ArgumentException(SR.Argument_InvalidFlag, nameof(options));
}
if (_invariantMode)
{
return source.StartsWith(prefix, (options & CompareOptions.IgnoreCase) != 0 ? StringComparison.OrdinalIgnoreCase : StringComparison.Ordinal);
}
return StartsWith(source, prefix, options);
}
public virtual bool IsPrefix(string source, string prefix)
{
return (IsPrefix(source, prefix, 0));
}
////////////////////////////////////////////////////////////////////////
//
// IsSuffix
//
// Determines whether suffix is a suffix of string. If suffix equals
// String.Empty, true is returned.
//
////////////////////////////////////////////////////////////////////////
public virtual bool IsSuffix(string source, string suffix, CompareOptions options)
{
if (source == null || suffix == null)
{
throw new ArgumentNullException((source == null ? nameof(source) : nameof(suffix)),
SR.ArgumentNull_String);
}
Contract.EndContractBlock();
if (suffix.Length == 0)
{
return (true);
}
if (source.Length == 0)
{
return false;
}
if (options == CompareOptions.OrdinalIgnoreCase)
{
return source.EndsWith(suffix, StringComparison.OrdinalIgnoreCase);
}
if (options == CompareOptions.Ordinal)
{
return source.EndsWith(suffix, StringComparison.Ordinal);
}
if ((options & ValidIndexMaskOffFlags) != 0)
{
throw new ArgumentException(SR.Argument_InvalidFlag, nameof(options));
}
if (_invariantMode)
{
return source.EndsWith(suffix, (options & CompareOptions.IgnoreCase) != 0 ? StringComparison.OrdinalIgnoreCase : StringComparison.Ordinal);
}
return EndsWith(source, suffix, options);
}
public virtual bool IsSuffix(string source, string suffix)
{
return (IsSuffix(source, suffix, 0));
}
////////////////////////////////////////////////////////////////////////
//
// IndexOf
//
// Returns the first index where value is found in string. The
// search starts from startIndex and ends at endIndex. Returns -1 if
// the specified value is not found. If value equals String.Empty,
// startIndex is returned. Throws IndexOutOfRange if startIndex or
// endIndex is less than zero or greater than the length of string.
// Throws ArgumentException if value is null.
//
////////////////////////////////////////////////////////////////////////
public virtual int IndexOf(string source, char value)
{
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
return IndexOf(source, value, 0, source.Length, CompareOptions.None);
}
public virtual int IndexOf(string source, string value)
{
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
return IndexOf(source, value, 0, source.Length, CompareOptions.None);
}
public virtual int IndexOf(string source, char value, CompareOptions options)
{
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
return IndexOf(source, value, 0, source.Length, options);
}
public virtual int IndexOf(string source, string value, CompareOptions options)
{
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
return IndexOf(source, value, 0, source.Length, options);
}
public virtual int IndexOf(string source, char value, int startIndex)
{
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
return IndexOf(source, value, startIndex, source.Length - startIndex, CompareOptions.None);
}
public virtual int IndexOf(string source, string value, int startIndex)
{
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
return IndexOf(source, value, startIndex, source.Length - startIndex, CompareOptions.None);
}
public virtual int IndexOf(string source, char value, int startIndex, CompareOptions options)
{
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
return IndexOf(source, value, startIndex, source.Length - startIndex, options);
}
public virtual int IndexOf(string source, string value, int startIndex, CompareOptions options)
{
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
return IndexOf(source, value, startIndex, source.Length - startIndex, options);
}
public virtual int IndexOf(string source, char value, int startIndex, int count)
{
return IndexOf(source, value, startIndex, count, CompareOptions.None);
}
public virtual int IndexOf(string source, string value, int startIndex, int count)
{
return IndexOf(source, value, startIndex, count, CompareOptions.None);
}
public unsafe virtual int IndexOf(string source, char value, int startIndex, int count, CompareOptions options)
{
// Validate inputs
if (source == null)
throw new ArgumentNullException(nameof(source));
if (startIndex < 0 || startIndex > source.Length)
throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index);
if (count < 0 || startIndex > source.Length - count)
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count);
Contract.EndContractBlock();
if (options == CompareOptions.OrdinalIgnoreCase)
{
return source.IndexOf(value.ToString(), startIndex, count, StringComparison.OrdinalIgnoreCase);
}
// Validate CompareOptions
// Ordinal can't be selected with other flags
if ((options & ValidIndexMaskOffFlags) != 0 && (options != CompareOptions.Ordinal))
throw new ArgumentException(SR.Argument_InvalidFlag, nameof(options));
if (_invariantMode)
return IndexOfOrdinal(source, new string(value, 1), startIndex, count, ignoreCase: (options & (CompareOptions.IgnoreCase | CompareOptions.OrdinalIgnoreCase)) != 0);
return IndexOfCore(source, new string(value, 1), startIndex, count, options, null);
}
public unsafe virtual int IndexOf(string source, string value, int startIndex, int count, CompareOptions options)
{
// Validate inputs
if (source == null)
throw new ArgumentNullException(nameof(source));
if (value == null)
throw new ArgumentNullException(nameof(value));
if (startIndex > source.Length)
{
throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index);
}
Contract.EndContractBlock();
// In Everett we used to return -1 for empty string even if startIndex is negative number so we keeping same behavior here.
// We return 0 if both source and value are empty strings for Everett compatibility too.
if (source.Length == 0)
{
if (value.Length == 0)
{
return 0;
}
return -1;
}
if (startIndex < 0)
{
throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index);
}
if (count < 0 || startIndex > source.Length - count)
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count);
if (options == CompareOptions.OrdinalIgnoreCase)
{
return IndexOfOrdinal(source, value, startIndex, count, ignoreCase: true);
}
// Validate CompareOptions
// Ordinal can't be selected with other flags
if ((options & ValidIndexMaskOffFlags) != 0 && (options != CompareOptions.Ordinal))
throw new ArgumentException(SR.Argument_InvalidFlag, nameof(options));
if (_invariantMode)
return IndexOfOrdinal(source, value, startIndex, count, ignoreCase: (options & (CompareOptions.IgnoreCase | CompareOptions.OrdinalIgnoreCase)) != 0);
return IndexOfCore(source, value, startIndex, count, options, null);
}
// The following IndexOf overload is mainly used by String.Replace. This overload assumes the parameters are already validated
// and the caller is passing a valid matchLengthPtr pointer.
internal unsafe int IndexOf(string source, string value, int startIndex, int count, CompareOptions options, int* matchLengthPtr)
{
Debug.Assert(source != null);
Debug.Assert(value != null);
Debug.Assert(startIndex >= 0);
Debug.Assert(matchLengthPtr != null);
*matchLengthPtr = 0;
if (source.Length == 0)
{
if (value.Length == 0)
{
return 0;
}
return -1;
}
if (startIndex >= source.Length)
{
return -1;
}
if (options == CompareOptions.OrdinalIgnoreCase)
{
int res = IndexOfOrdinal(source, value, startIndex, count, ignoreCase: true);
if (res >= 0)
{
*matchLengthPtr = value.Length;
}
return res;
}
if (_invariantMode)
{
int res = IndexOfOrdinal(source, value, startIndex, count, ignoreCase: (options & (CompareOptions.IgnoreCase | CompareOptions.OrdinalIgnoreCase)) != 0);
if (res >= 0)
{
*matchLengthPtr = value.Length;
}
return res;
}
return IndexOfCore(source, value, startIndex, count, options, matchLengthPtr);
}
internal int IndexOfOrdinal(string source, string value, int startIndex, int count, bool ignoreCase)
{
if (_invariantMode)
{
return InvariantIndexOf(source, value, startIndex, count, ignoreCase);
}
return IndexOfOrdinalCore(source, value, startIndex, count, ignoreCase);
}
////////////////////////////////////////////////////////////////////////
//
// LastIndexOf
//
// Returns the last index where value is found in string. The
// search starts from startIndex and ends at endIndex. Returns -1 if
// the specified value is not found. If value equals String.Empty,
// endIndex is returned. Throws IndexOutOfRange if startIndex or
// endIndex is less than zero or greater than the length of string.
// Throws ArgumentException if value is null.
//
////////////////////////////////////////////////////////////////////////
public virtual int LastIndexOf(String source, char value)
{
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
// Can't start at negative index, so make sure we check for the length == 0 case.
return LastIndexOf(source, value, source.Length - 1, source.Length, CompareOptions.None);
}
public virtual int LastIndexOf(string source, string value)
{
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
// Can't start at negative index, so make sure we check for the length == 0 case.
return LastIndexOf(source, value, source.Length - 1,
source.Length, CompareOptions.None);
}
public virtual int LastIndexOf(string source, char value, CompareOptions options)
{
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
// Can't start at negative index, so make sure we check for the length == 0 case.
return LastIndexOf(source, value, source.Length - 1,
source.Length, options);
}
public virtual int LastIndexOf(string source, string value, CompareOptions options)
{
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
// Can't start at negative index, so make sure we check for the length == 0 case.
return LastIndexOf(source, value, source.Length - 1, source.Length, options);
}
public virtual int LastIndexOf(string source, char value, int startIndex)
{
return LastIndexOf(source, value, startIndex, startIndex + 1, CompareOptions.None);
}
public virtual int LastIndexOf(string source, string value, int startIndex)
{
return LastIndexOf(source, value, startIndex, startIndex + 1, CompareOptions.None);
}
public virtual int LastIndexOf(string source, char value, int startIndex, CompareOptions options)
{
return LastIndexOf(source, value, startIndex, startIndex + 1, options);
}
public virtual int LastIndexOf(string source, string value, int startIndex, CompareOptions options)
{
return LastIndexOf(source, value, startIndex, startIndex + 1, options);
}
public virtual int LastIndexOf(string source, char value, int startIndex, int count)
{
return LastIndexOf(source, value, startIndex, count, CompareOptions.None);
}
public virtual int LastIndexOf(string source, string value, int startIndex, int count)
{
return LastIndexOf(source, value, startIndex, count, CompareOptions.None);
}
public virtual int LastIndexOf(string source, char value, int startIndex, int count, CompareOptions options)
{
// Verify Arguments
if (source == null)
throw new ArgumentNullException(nameof(source));
Contract.EndContractBlock();
// Validate CompareOptions
// Ordinal can't be selected with other flags
if ((options & ValidIndexMaskOffFlags) != 0 &&
(options != CompareOptions.Ordinal) &&
(options != CompareOptions.OrdinalIgnoreCase))
throw new ArgumentException(SR.Argument_InvalidFlag, nameof(options));
// Special case for 0 length input strings
if (source.Length == 0 && (startIndex == -1 || startIndex == 0))
return -1;
// Make sure we're not out of range
if (startIndex < 0 || startIndex > source.Length)
throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index);
// Make sure that we allow startIndex == source.Length
if (startIndex == source.Length)
{
startIndex--;
if (count > 0)
count--;
}
// 2nd have of this also catches when startIndex == MAXINT, so MAXINT - 0 + 1 == -1, which is < 0.
if (count < 0 || startIndex - count + 1 < 0)
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count);
if (options == CompareOptions.OrdinalIgnoreCase)
{
return source.LastIndexOf(value.ToString(), startIndex, count, StringComparison.OrdinalIgnoreCase);
}
if (_invariantMode)
return InvariantLastIndexOf(source, new string(value, 1), startIndex, count, (options & (CompareOptions.IgnoreCase | CompareOptions.OrdinalIgnoreCase)) != 0);
return LastIndexOfCore(source, value.ToString(), startIndex, count, options);
}
public virtual int LastIndexOf(string source, string value, int startIndex, int count, CompareOptions options)
{
// Verify Arguments
if (source == null)
throw new ArgumentNullException(nameof(source));
if (value == null)
throw new ArgumentNullException(nameof(value));
Contract.EndContractBlock();
// Validate CompareOptions
// Ordinal can't be selected with other flags
if ((options & ValidIndexMaskOffFlags) != 0 &&
(options != CompareOptions.Ordinal) &&
(options != CompareOptions.OrdinalIgnoreCase))
throw new ArgumentException(SR.Argument_InvalidFlag, nameof(options));
// Special case for 0 length input strings
if (source.Length == 0 && (startIndex == -1 || startIndex == 0))
return (value.Length == 0) ? 0 : -1;
// Make sure we're not out of range
if (startIndex < 0 || startIndex > source.Length)
throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index);
// Make sure that we allow startIndex == source.Length
if (startIndex == source.Length)
{
startIndex--;
if (count > 0)
count--;
// If we are looking for nothing, just return 0
if (value.Length == 0 && count >= 0 && startIndex - count + 1 >= 0)
return startIndex;
}
// 2nd half of this also catches when startIndex == MAXINT, so MAXINT - 0 + 1 == -1, which is < 0.
if (count < 0 || startIndex - count + 1 < 0)
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count);
if (options == CompareOptions.OrdinalIgnoreCase)
{
return LastIndexOfOrdinal(source, value, startIndex, count, ignoreCase: true);
}
if (_invariantMode)
return InvariantLastIndexOf(source, value, startIndex, count, (options & (CompareOptions.IgnoreCase | CompareOptions.OrdinalIgnoreCase)) != 0);
return LastIndexOfCore(source, value, startIndex, count, options);
}
internal int LastIndexOfOrdinal(string source, string value, int startIndex, int count, bool ignoreCase)
{
if (_invariantMode)
{
return InvariantLastIndexOf(source, value, startIndex, count, ignoreCase);
}
return LastIndexOfOrdinalCore(source, value, startIndex, count, ignoreCase);
}
////////////////////////////////////////////////////////////////////////
//
// GetSortKey
//
// Gets the SortKey for the given string with the given options.
//
////////////////////////////////////////////////////////////////////////
public virtual SortKey GetSortKey(string source, CompareOptions options)
{
if (_invariantMode)
return InvariantCreateSortKey(source, options);
return CreateSortKey(source, options);
}
public virtual SortKey GetSortKey(string source)
{
if (_invariantMode)
return InvariantCreateSortKey(source, CompareOptions.None);
return CreateSortKey(source, CompareOptions.None);
}
////////////////////////////////////////////////////////////////////////
//
// Equals
//
// Implements Object.Equals(). Returns a boolean indicating whether
// or not object refers to the same CompareInfo as the current
// instance.
//
////////////////////////////////////////////////////////////////////////
public override bool Equals(Object value)
{
CompareInfo that = value as CompareInfo;
if (that != null)
{
return this.Name == that.Name;
}
return (false);
}
////////////////////////////////////////////////////////////////////////
//
// GetHashCode
//
// Implements Object.GetHashCode(). Returns the hash code for the
// CompareInfo. The hash code is guaranteed to be the same for
// CompareInfo A and B where A.Equals(B) is true.
//
////////////////////////////////////////////////////////////////////////
public override int GetHashCode()
{
return (this.Name.GetHashCode());
}
////////////////////////////////////////////////////////////////////////
//
// GetHashCodeOfString
//
// This internal method allows a method that allows the equivalent of creating a Sortkey for a
// string from CompareInfo, and generate a hashcode value from it. It is not very convenient
// to use this method as is and it creates an unnecessary Sortkey object that will be GC'ed.
//
// The hash code is guaranteed to be the same for string A and B where A.Equals(B) is true and both
// the CompareInfo and the CompareOptions are the same. If two different CompareInfo objects
// treat the string the same way, this implementation will treat them differently (the same way that
// Sortkey does at the moment).
//
// This method will never be made public itself, but public consumers of it could be created, e.g.:
//
// string.GetHashCode(CultureInfo)
// string.GetHashCode(CompareInfo)
// string.GetHashCode(CultureInfo, CompareOptions)
// string.GetHashCode(CompareInfo, CompareOptions)
// etc.
//
// (the methods above that take a CultureInfo would use CultureInfo.CompareInfo)
//
////////////////////////////////////////////////////////////////////////
internal int GetHashCodeOfString(string source, CompareOptions options)
{
//
// Parameter validation
//
if (null == source)
{
throw new ArgumentNullException(nameof(source));
}
if ((options & ValidHashCodeOfStringMaskOffFlags) != 0)
{
throw new ArgumentException(SR.Argument_InvalidFlag, nameof(options));
}
Contract.EndContractBlock();
return GetHashCodeOfStringCore(source, options);
}
public virtual int GetHashCode(string source, CompareOptions options)
{
if (source == null)
{
throw new ArgumentNullException(nameof(source));
}
if (options == CompareOptions.Ordinal)
{
return source.GetHashCode();
}
if (options == CompareOptions.OrdinalIgnoreCase)
{
return TextInfo.GetHashCodeOrdinalIgnoreCase(source);
}
//
// GetHashCodeOfString does more parameters validation. basically will throw when
// having Ordinal, OrdinalIgnoreCase and StringSort
//
return GetHashCodeOfString(source, options);
}
////////////////////////////////////////////////////////////////////////
//
// ToString
//
// Implements Object.ToString(). Returns a string describing the
// CompareInfo.
//
////////////////////////////////////////////////////////////////////////
public override string ToString()
{
return ("CompareInfo - " + this.Name);
}
public SortVersion Version
{
get
{
if (m_SortVersion == null)
{
if (_invariantMode)
{
m_SortVersion = new SortVersion(0, CultureInfo.LOCALE_INVARIANT, new Guid(0, 0, 0, 0, 0, 0, 0,
(byte) (CultureInfo.LOCALE_INVARIANT >> 24),
(byte) ((CultureInfo.LOCALE_INVARIANT & 0x00FF0000) >> 16),
(byte) ((CultureInfo.LOCALE_INVARIANT & 0x0000FF00) >> 8),
(byte) (CultureInfo.LOCALE_INVARIANT & 0xFF)));
}
else
{
m_SortVersion = GetSortVersion();
}
}
return m_SortVersion;
}
}
public int LCID
{
get
{
return CultureInfo.GetCultureInfo(Name).LCID;
}
}
}
}
| |
using System;
using System.Collections;
namespace QuickGraph.Algorithms.AllShortestPath
{
using QuickGraph.Concepts;
using QuickGraph.Concepts.Traversals;
using QuickGraph.Concepts.Algorithms;
using QuickGraph.Concepts.Collections;
using QuickGraph.Collections;
using QuickGraph.Algorithms.AllShortestPath.Testers;
using QuickGraph.Algorithms.AllShortestPath.Reducers;
using QuickGraph.Exceptions;
/// <summary>
/// Floyd Warshall All Shortest Path Algorithm
/// </summary>
/// <remarks>
/// </remarks>
public class FloydWarshallAllShortestPathAlgorithm : IAlgorithm
{
private IVertexAndEdgeListGraph visitedGraph;
private IFloydWarshallTester tester;
private Hashtable definedPaths;
/// <summary>
/// Default constructor - initializes all fields to default values
/// </summary>
public FloydWarshallAllShortestPathAlgorithm(
IVertexAndEdgeListGraph visitedGraph,
IFloydWarshallTester tester
)
{
if (visitedGraph==null)
throw new ArgumentNullException("visitedGraph");
if (tester==null)
throw new ArgumentNullException("test");
this.visitedGraph = visitedGraph;
this.tester = tester;
this.definedPaths = null;
}
/// <summary>
/// Gets the visited graph
/// </summary>
/// <value>
/// Visited Graph
/// </value>
public IVertexAndEdgeListGraph VisitedGraph
{
get
{
return this.visitedGraph;
}
}
/// <summary>
///
/// </summary>
Object IAlgorithm.VisitedGraph
{
get
{
return this.VisitedGraph;
}
}
/// <summary>
/// Gets the <see cref="IFloydWarshallTester"/> instance
/// </summary>
public IFloydWarshallTester Tester
{
get
{
return this.tester;
}
}
/// <summary>
/// Internal use
/// </summary>
private Hashtable DefinedPaths
{
get
{
return this.definedPaths;
}
}
/// <summary>
/// Raised when initializing a new path
/// </summary>
/// <remarks>
/// </remarks>
public event FloydWarshallEventHandler InitiliazePath;
/// <summary>
/// Raises the <see cref="InitializePath"/> event.
/// </summary>
/// <param name="source">source vertex</param>
/// <param name="target">target vertex</param>
protected virtual void OnInitiliazePath(IVertex source, IVertex target)
{
if (InitiliazePath!=null)
InitiliazePath(this, new FloydWarshallEventArgs(source,target));
}
/// <summary>
///
/// </summary>
public event FloydWarshallEventHandler ProcessPath;
/// <summary>
/// Raises the <see cref="ProcessPath"/> event.
/// </summary>
/// <param name="source">source vertex</param>
/// <param name="target">target vertex</param>
/// <param name="intermediate"></param>
protected virtual void OnProcessPath(IVertex source, IVertex target, IVertex intermediate)
{
if (ProcessPath != null)
ProcessPath(this, new FloydWarshallEventArgs(source, target,intermediate));
}
/// <summary>
/// Raised when a path is reduced
/// </summary>
public event FloydWarshallEventHandler ReducePath;
/// <summary>
/// Raises the <see cref="ReducePath"/> event.
/// </summary>
/// <param name="source"></param>
/// <param name="target"></param>
/// <param name="intermediate"></param>
protected virtual void OnReducePath(IVertex source, IVertex target, IVertex intermediate)
{
if (ReducePath != null)
ReducePath(this, new FloydWarshallEventArgs(source, target,intermediate));
}
/// <summary>
/// Raised when a path is not reduced
/// </summary>
public event FloydWarshallEventHandler NotReducePath;
/// <summary>
/// Raises the <see cref="NotReducePath"/> event.
/// </summary>
/// <param name="source"></param>
/// <param name="target"></param>
/// <param name="intermediate"></param>
protected virtual void OnNotReducePath(IVertex source, IVertex target, IVertex intermediate)
{
if (NotReducePath != null)
NotReducePath(this, new FloydWarshallEventArgs(source, target,intermediate));
}
/// <summary>
/// Compute the All shortest path problem.
/// </summary>
public void Compute()
{
this.definedPaths = new Hashtable();
// initialize distance map
foreach(IVertex i in VisitedGraph.Vertices)
foreach(IVertex j in VisitedGraph.Vertices)
{
if (VisitedGraph.ContainsEdge(i,j))
DefinedPaths.Add( new VertexPair(i,j), null );
OnInitiliazePath(i,j);
}
// iterate
foreach(IVertex k in VisitedGraph.Vertices)
{
foreach(IVertex i in VisitedGraph.Vertices)
{
if (DefinedPaths.Contains(new VertexPair(i,k)))
{
foreach(IVertex j in VisitedGraph.Vertices)
{
OnProcessPath(i,j,k);
bool defkj = DefinedPaths.Contains(new VertexPair(k,j));
bool defij = DefinedPaths.Contains(new VertexPair(i,j));
if (defkj && (defij || Tester.Test(i,j,k)))
{
DefinedPaths[new VertexPair(i,j)]=null;
OnReducePath(i,j,k);
}
else
OnNotReducePath(i,j,k);
}
}
}
}
}
/// <summary>
/// Checks the graph for connectivity and negative cycles
/// </summary>
/// <param name="costs">cost distionary</param>
/// <exception cref="NegativeCycleException">graph has negatice cycle.</exception>
/// <exception cref="GraphNotStronglyConnectedException">graph is not strongly connected</exception>
public void CheckConnectivityAndNegativeCycles(IVertexDistanceMatrix costs)
{
foreach(IVertex u in VisitedGraph.Vertices)
{
if( costs!=null && costs.Distance(u,u) < 0 )
throw new NegativeCycleException("Graph has negative cycle");
foreach(IVertex v in VisitedGraph.Vertices)
if(!DefinedPaths.Contains(new VertexPair(u,v)))
throw new Exception("Graph is not strongly connected");
}
}
}
}
| |
//
// System.Net.HttpListenerRequest
//
// Authors:
// Gonzalo Paniagua Javier ([email protected])
// Marek Safar ([email protected])
//
// Copyright (c) 2005 Novell, Inc. (http://www.novell.com)
// Copyright (c) 2011-2012 Xamarin, Inc. (http://xamarin.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Specialized;
using System.Globalization;
using System.IO;
using System.Net;
using System.Security.Cryptography.X509Certificates;
using System.Text;
namespace Reactor.Net
{
public sealed class HttpListenerRequest
{
private string[] accept_types;
private Encoding content_encoding;
private long content_length;
private bool cl_set;
private CookieCollection cookies;
private WebHeaderCollection headers;
private string method;
private Stream input_stream;
private Version version;
private NameValueCollection query_string; // check if null is ok, check if read-only, check case-sensitiveness
private string raw_url;
private Uri url;
private Uri referrer;
private string[] user_languages;
private HttpListenerContext context;
private bool is_chunked;
private bool ka_set;
private bool keep_alive;
private delegate X509Certificate2 GCCDelegate();
private GCCDelegate gcc_delegate;
static byte[] _100continue = Encoding.ASCII.GetBytes("HTTP/1.1 100 Continue\r\n\r\n");
internal HttpListenerRequest(HttpListenerContext context)
{
this.context = context;
headers = new WebHeaderCollection();
version = HttpVersion.Version10;
}
static char[] separators = new char[] { ' ' };
internal void SetRequestLine(string req)
{
string[] parts = req.Split(separators, 3);
if (parts.Length != 3)
{
context.ErrorMessage = "Invalid request line (parts).";
return;
}
method = parts[0];
foreach (char c in method)
{
int ic = (int)c;
if ((ic >= 'A' && ic <= 'Z') ||
(ic > 32 && c < 127 && c != '(' && c != ')' && c != '<' &&
c != '<' && c != '>' && c != '@' && c != ',' && c != ';' &&
c != ':' && c != '\\' && c != '"' && c != '/' && c != '[' &&
c != ']' && c != '?' && c != '=' && c != '{' && c != '}'))
{
continue;
}
context.ErrorMessage = "(Invalid verb)";
return;
}
raw_url = parts[1];
if (parts[2].Length != 8 || !parts[2].StartsWith("HTTP/"))
{
context.ErrorMessage = "Invalid request line (version).";
return;
}
try
{
version = new Version(parts[2].Substring(5));
if (version.Major < 1)
{
throw new Exception();
}
}
catch
{
context.ErrorMessage = "Invalid request line (version).";
return;
}
}
void CreateQueryString(string query)
{
if (query == null || query.Length == 0)
{
query_string = new NameValueCollection(1);
return;
}
query_string = new NameValueCollection();
if (query[0] == '?')
{
query = query.Substring(1);
}
string[] components = query.Split('&');
foreach (string kv in components)
{
int pos = kv.IndexOf('=');
if (pos == -1)
{
query_string.Add(null, HttpUtility.UrlDecode(kv));
}
else
{
string key = HttpUtility.UrlDecode(kv.Substring(0, pos));
string val = HttpUtility.UrlDecode(kv.Substring(pos + 1));
query_string.Add(key, val);
}
}
}
#region Uri
private bool IsPredefinedScheme(string scheme)
{
if (scheme == null || scheme.Length < 3)
{
return false;
}
char c = scheme[0];
if (c == 'h')
{
return (scheme == "http" || scheme == "https");
}
if (c == 'f')
{
return (scheme == "file" || scheme == "ftp");
}
if (c == 'n')
{
c = scheme[1];
if (c == 'e')
{
return (scheme == "news" || scheme == "net.pipe" || scheme == "net.tcp");
}
if (scheme == "nntp")
{
return true;
}
return false;
}
if ((c == 'g' && scheme == "gopher") || (c == 'm' && scheme == "mailto"))
{
return true;
}
return false;
}
internal bool MaybeUri(string s)
{
int p = s.IndexOf(':');
if (p == -1)
{
return false;
}
if (p >= 10)
{
return false;
}
return IsPredefinedScheme(s.Substring(0, p));
}
#endregion
internal void FinishInitialization()
{
string host = UserHostName;
if (version > HttpVersion.Version10 && (host == null || host.Length == 0))
{
context.ErrorMessage = "Invalid host name";
return;
}
string path;
Uri raw_uri = null;
if (MaybeUri(raw_url) && Uri.TryCreate(raw_url, UriKind.Absolute, out raw_uri))
{
path = raw_uri.PathAndQuery;
}
else
{
path = raw_url;
}
if ((host == null || host.Length == 0))
{
host = UserHostAddress;
}
if (raw_uri != null)
{
host = raw_uri.Host;
}
int colon = host.IndexOf(':');
if (colon >= 0)
{
host = host.Substring(0, colon);
}
string base_uri = String.Format("{0}://{1}:{2}", (IsSecureConnection) ? "https" : "http", host, LocalEndPoint.Port);
if (!Uri.TryCreate(base_uri + path, UriKind.Absolute, out url))
{
context.ErrorMessage = "Invalid url: " + base_uri + path;
return;
}
CreateQueryString(url.Query);
if (version >= HttpVersion.Version11)
{
string t_encoding = Headers["Transfer-Encoding"];
is_chunked = (t_encoding != null && String.Compare(t_encoding, "chunked", StringComparison.OrdinalIgnoreCase) == 0);
// 'identity' is not valid!
if (t_encoding != null && !is_chunked)
{
context.Connection.SendError(null, 501);
return;
}
}
if (!is_chunked && !cl_set)
{
if (String.Compare(method, "POST", StringComparison.OrdinalIgnoreCase) == 0 ||
String.Compare(method, "PUT", StringComparison.OrdinalIgnoreCase) == 0)
{
context.Connection.SendError(null, 411);
return;
}
}
if (String.Compare(Headers["Expect"], "100-continue", StringComparison.OrdinalIgnoreCase) == 0)
{
ResponseStream output = context.Connection.GetResponseStream();
output.InternalWrite(_100continue, 0, _100continue.Length);
}
}
internal static string Unquote(String str)
{
int start = str.IndexOf('\"');
int end = str.LastIndexOf('\"');
if (start >= 0 && end >= 0)
{
str = str.Substring(start + 1, end - 1);
}
return str.Trim();
}
internal void AddHeader(string header)
{
int colon = header.IndexOf(':');
if (colon == -1 || colon == 0)
{
context.ErrorMessage = "Bad Request";
context.ErrorStatus = 400;
return;
}
string name = header.Substring(0, colon).Trim();
string val = header.Substring(colon + 1).Trim();
string lower = name.ToLower(CultureInfo.InvariantCulture);
headers.SetInternal(name, val);
switch (lower)
{
case "accept-language":
user_languages = val.Split(','); // yes, only split with a ','
break;
case "accept":
accept_types = val.Split(','); // yes, only split with a ','
break;
case "content-length":
try
{
//TODO: max. content_length?
content_length = Int64.Parse(val.Trim());
if (content_length < 0)
{
context.ErrorMessage = "Invalid Content-Length.";
}
cl_set = true;
}
catch
{
context.ErrorMessage = "Invalid Content-Length.";
}
break;
case "referer":
try
{
referrer = new Uri(val);
}
catch
{
referrer = new Uri("http://someone.is.screwing.with.the.headers.com/");
}
break;
case "cookie":
if (cookies == null)
{
cookies = new CookieCollection();
}
string[] cookieStrings = val.Split(new char[] { ',', ';' });
Cookie current = null;
int version = 0;
foreach (string cookieString in cookieStrings)
{
string str = cookieString.Trim();
if (str.Length == 0)
{
continue;
}
if (str.StartsWith("$Version"))
{
version = Int32.Parse(Unquote(str.Substring(str.IndexOf('=') + 1)));
}
else if (str.StartsWith("$Path"))
{
if (current != null)
{
current.Path = str.Substring(str.IndexOf('=') + 1).Trim();
}
}
else if (str.StartsWith("$Domain"))
{
if (current != null)
{
current.Domain = str.Substring(str.IndexOf('=') + 1).Trim();
}
}
else if (str.StartsWith("$Port"))
{
if (current != null)
{
current.Port = str.Substring(str.IndexOf('=') + 1).Trim();
}
}
else
{
if (current != null)
{
cookies.Add(current);
}
current = new Cookie();
int idx = str.IndexOf('=');
if (idx > 0)
{
current.Name = str.Substring(0, idx).Trim();
current.Value = str.Substring(idx + 1).Trim();
}
else
{
current.Name = str.Trim();
current.Value = String.Empty;
}
current.Version = version;
}
}
if (current != null)
{
cookies.Add(current);
}
break;
}
}
// returns true is the stream could be reused.
internal bool FlushInput()
{
if (!HasEntityBody)
{
return true;
}
int length = 2048;
if (content_length > 0)
{
length = (int)Math.Min(content_length, (long)length);
}
byte[] bytes = new byte[length];
while (true)
{
// TODO: test if MS has a timeout when doing this
try
{
IAsyncResult ares = InputStream.BeginRead(bytes, 0, length, null, null);
if (!ares.IsCompleted && !ares.AsyncWaitHandle.WaitOne(1000))
{
return false;
}
if (InputStream.EndRead(ares) <= 0)
{
return true;
}
}
catch
{
return false;
}
}
}
public string[] AcceptTypes
{
get { return accept_types; }
}
public int ClientCertificateError
{
get
{
HttpConnection cnc = context.Connection;
if (cnc.ClientCertificate == null)
{
throw new InvalidOperationException("No client certificate");
}
int[] errors = cnc.ClientCertificateErrors;
if (errors != null && errors.Length > 0)
{
return errors[0];
}
return 0;
}
}
public Encoding ContentEncoding
{
get
{
if (content_encoding == null)
{
content_encoding = Encoding.Default;
}
return content_encoding;
}
}
public long ContentLength64
{
get { return content_length; }
}
public string ContentType
{
get { return headers["content-type"]; }
}
public CookieCollection Cookies
{
get
{
// TODO: check if the collection is read-only
if (cookies == null)
{
cookies = new CookieCollection();
}
return cookies;
}
}
public bool HasEntityBody
{
get { return (content_length > 0 || is_chunked); }
}
public NameValueCollection Headers
{
get { return headers; }
}
public string HttpMethod
{
get { return method; }
}
public Stream InputStream
{
get
{
if (input_stream == null)
{
if (is_chunked || content_length > 0)
{
input_stream = context.Connection.GetRequestStream(is_chunked, content_length);
}
else
{
input_stream = Stream.Null;
}
}
return input_stream;
}
}
public bool IsAuthenticated
{
get { return false; }
}
public bool IsLocal
{
get { return IPAddress.IsLoopback(RemoteEndPoint.Address); }
}
public bool IsSecureConnection
{
get { return context.Connection.IsSecure; }
}
public bool KeepAlive
{
get
{
if (ka_set)
{
return keep_alive;
}
ka_set = true;
// 1. Connection header
// 2. Protocol (1.1 == keep-alive by default)
// 3. Keep-Alive header
string cnc = headers["Connection"];
if (!String.IsNullOrEmpty(cnc))
{
keep_alive = (0 == String.Compare(cnc, "keep-alive", StringComparison.OrdinalIgnoreCase));
}
else if (version == HttpVersion.Version11)
{
keep_alive = true;
}
else
{
cnc = headers["keep-alive"];
if (!String.IsNullOrEmpty(cnc))
{
keep_alive = (0 != String.Compare(cnc, "closed", StringComparison.OrdinalIgnoreCase));
}
}
return keep_alive;
}
}
public IPEndPoint LocalEndPoint
{
get
{
return context.Connection.LocalEndPoint;
}
}
public Version ProtocolVersion
{
get
{
return version;
}
}
public NameValueCollection QueryString
{
get
{
return query_string;
}
}
public string RawUrl
{
get
{
return raw_url;
}
}
public IPEndPoint RemoteEndPoint
{
get
{
return context.Connection.RemoteEndPoint;
}
}
public Guid RequestTraceIdentifier
{
get
{
return Guid.Empty;
}
}
public Uri Url
{
get
{
return url;
}
}
public Uri UrlReferrer
{
get
{
return referrer;
}
}
public string UserAgent
{
get
{
return headers["user-agent"];
}
}
public string UserHostAddress
{
get
{
return LocalEndPoint.ToString();
}
}
public string UserHostName
{
get
{
return headers["host"];
}
}
public string[] UserLanguages
{
get
{
return user_languages;
}
}
public IAsyncResult BeginGetClientCertificate(AsyncCallback requestCallback, object state)
{
if (gcc_delegate == null)
{
gcc_delegate = new GCCDelegate(GetClientCertificate);
}
return gcc_delegate.BeginInvoke(requestCallback, state);
}
public X509Certificate2 EndGetClientCertificate(IAsyncResult asyncResult)
{
if (asyncResult == null)
{
throw new ArgumentNullException("asyncResult");
}
if (gcc_delegate == null)
{
throw new InvalidOperationException();
}
return gcc_delegate.EndInvoke(asyncResult);
}
public X509Certificate2 GetClientCertificate()
{
return context.Connection.ClientCertificate;
}
}
}
| |
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
#pragma warning disable 1634, 1691
namespace System.ServiceModel.Dispatcher
{
using System;
using System.Collections;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Reflection;
using System.Runtime;
using System.Runtime.InteropServices;
using System.ServiceModel;
using System.Xml;
// Thread Safety: This class is thread safe
public class QueryStringConverter
{
Hashtable defaultSupportedQueryStringTypes;
// the cache does not have a quota since it is per endpoint and is
// bounded by the number of types in the contract at the endpoint
Hashtable typeConverterCache;
public QueryStringConverter()
{
this.defaultSupportedQueryStringTypes = new Hashtable();
this.defaultSupportedQueryStringTypes.Add(typeof(Byte), null);
this.defaultSupportedQueryStringTypes.Add(typeof(SByte), null);
this.defaultSupportedQueryStringTypes.Add(typeof(Int16), null);
this.defaultSupportedQueryStringTypes.Add(typeof(Int32), null);
this.defaultSupportedQueryStringTypes.Add(typeof(Int64), null);
this.defaultSupportedQueryStringTypes.Add(typeof(UInt16), null);
this.defaultSupportedQueryStringTypes.Add(typeof(UInt32), null);
this.defaultSupportedQueryStringTypes.Add(typeof(UInt64), null);
this.defaultSupportedQueryStringTypes.Add(typeof(Single), null);
this.defaultSupportedQueryStringTypes.Add(typeof(Double), null);
this.defaultSupportedQueryStringTypes.Add(typeof(Boolean), null);
this.defaultSupportedQueryStringTypes.Add(typeof(Char), null);
this.defaultSupportedQueryStringTypes.Add(typeof(Decimal), null);
this.defaultSupportedQueryStringTypes.Add(typeof(String), null);
this.defaultSupportedQueryStringTypes.Add(typeof(Object), null);
this.defaultSupportedQueryStringTypes.Add(typeof(DateTime), null);
this.defaultSupportedQueryStringTypes.Add(typeof(TimeSpan), null);
this.defaultSupportedQueryStringTypes.Add(typeof(byte[]), null);
this.defaultSupportedQueryStringTypes.Add(typeof(Guid), null);
this.defaultSupportedQueryStringTypes.Add(typeof(Uri), null);
this.defaultSupportedQueryStringTypes.Add(typeof(DateTimeOffset), null);
this.typeConverterCache = new Hashtable();
}
public virtual bool CanConvert(Type type)
{
if (this.defaultSupportedQueryStringTypes.ContainsKey(type))
{
return true;
}
// otherwise check if its an enum
if (typeof(Enum).IsAssignableFrom(type))
{
return true;
}
// check if there's a typeconverter defined on the type
return (GetStringConverter(type) != null);
}
public virtual object ConvertStringToValue(string parameter, Type parameterType)
{
if (parameterType == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("parameterType");
}
switch (Type.GetTypeCode(parameterType))
{
case TypeCode.Byte:
return parameter == null ? default(Byte) : XmlConvert.ToByte(parameter);
case TypeCode.SByte:
return parameter == null ? default(SByte) : XmlConvert.ToSByte(parameter);
case TypeCode.Int16:
return parameter == null ? default(Int16) : XmlConvert.ToInt16(parameter);
case TypeCode.Int32:
{
if (typeof(Enum).IsAssignableFrom(parameterType))
{
return Enum.Parse(parameterType, parameter, true);
}
else
{
return parameter == null ? default(Int32) : XmlConvert.ToInt32(parameter);
}
}
case TypeCode.Int64:
return parameter == null ? default(Int64) : XmlConvert.ToInt64(parameter);
case TypeCode.UInt16:
return parameter == null ? default(UInt16) : XmlConvert.ToUInt16(parameter);
case TypeCode.UInt32:
return parameter == null ? default(UInt32) : XmlConvert.ToUInt32(parameter);
case TypeCode.UInt64:
return parameter == null ? default(UInt64) : XmlConvert.ToUInt64(parameter);
case TypeCode.Single:
return parameter == null ? default(Single) : XmlConvert.ToSingle(parameter);
case TypeCode.Double:
return parameter == null ? default(Double) : XmlConvert.ToDouble(parameter);
case TypeCode.Char:
return parameter == null ? default(Char) : XmlConvert.ToChar(parameter);
case TypeCode.Decimal:
return parameter == null ? default(Decimal) : XmlConvert.ToDecimal(parameter);
case TypeCode.Boolean:
return parameter == null ? default(Boolean) : Convert.ToBoolean(parameter, CultureInfo.InvariantCulture);
case TypeCode.String:
return parameter;
case TypeCode.DateTime:
return parameter == null ? default(DateTime) : DateTime.Parse(parameter, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind);
default:
{
if (parameterType == typeof(TimeSpan))
{
// support the XML as well as default way of representing timespans
TimeSpan result;
if (!TimeSpan.TryParse(parameter, out result))
{
result = parameter == null ? default(TimeSpan) : XmlConvert.ToTimeSpan(parameter);
}
return result;
}
else if (parameterType == typeof(Guid))
{
return parameter == null ? default(Guid) : XmlConvert.ToGuid(parameter);
}
else if (parameterType == typeof(DateTimeOffset))
{
return (parameter == null) ? default(DateTimeOffset) : DateTimeOffset.Parse(parameter, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind | DateTimeStyles.AllowWhiteSpaces);
}
else if (parameterType == typeof(byte[]))
{
return (!string.IsNullOrEmpty(parameter)) ? Convert.FromBase64String(parameter) : new byte[] { };
}
else if (parameterType == typeof(Uri))
{
return (!string.IsNullOrEmpty(parameter)) ? new Uri(parameter, UriKind.RelativeOrAbsolute) : null;
}
else if (parameterType == typeof(object))
{
return parameter;
}
else
{
TypeConverter stringConverter = GetStringConverter(parameterType);
if (stringConverter == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new NotSupportedException(
SR2.GetString(
SR2.TypeNotSupportedByQueryStringConverter,
parameterType.ToString(), this.GetType().Name)));
}
return stringConverter.ConvertFromInvariantString(parameter);
}
}
}
}
public virtual string ConvertValueToString(object parameter, Type parameterType)
{
if (parameterType == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("parameterType");
}
if (parameterType.IsValueType && parameter == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("parameter");
}
switch (Type.GetTypeCode(parameterType))
{
case TypeCode.Byte:
return XmlConvert.ToString((Byte) parameter);
case TypeCode.SByte:
return XmlConvert.ToString((SByte) parameter);
case TypeCode.Int16:
return XmlConvert.ToString((Int16) parameter);
case TypeCode.Int32:
{
if (typeof(Enum).IsAssignableFrom(parameterType))
{
return Enum.Format(parameterType, parameter, "G");
}
else
{
return XmlConvert.ToString((int) parameter);
}
}
case TypeCode.Int64:
return XmlConvert.ToString((Int64) parameter);
case TypeCode.UInt16:
return XmlConvert.ToString((UInt16) parameter);
case TypeCode.UInt32:
return XmlConvert.ToString((uint) parameter);
case TypeCode.UInt64:
return XmlConvert.ToString((UInt64) parameter);
case TypeCode.Single:
return XmlConvert.ToString((Single) parameter);
case TypeCode.Double:
return XmlConvert.ToString((double) parameter);
case TypeCode.Char:
return XmlConvert.ToString((char) parameter);
case TypeCode.Decimal:
return XmlConvert.ToString((decimal) parameter);
case TypeCode.Boolean:
return XmlConvert.ToString((bool) parameter);
case TypeCode.String:
return (string) parameter;
case TypeCode.DateTime:
return XmlConvert.ToString((DateTime) parameter, XmlDateTimeSerializationMode.RoundtripKind);
default:
{
if (parameterType == typeof(TimeSpan))
{
return XmlConvert.ToString((TimeSpan) parameter);
}
else if (parameterType == typeof(Guid))
{
return XmlConvert.ToString((Guid) parameter);
}
else if (parameterType == typeof(DateTimeOffset))
{
return XmlConvert.ToString((DateTimeOffset) parameter);
}
else if (parameterType == typeof(byte[]))
{
return (parameter != null) ? Convert.ToBase64String((byte[]) parameter, Base64FormattingOptions.None) : null;
}
else if (parameterType == typeof(Uri) || parameterType == typeof(object))
{
// URI or object
return (parameter != null) ? Convert.ToString(parameter, CultureInfo.InvariantCulture) : null;
}
else
{
TypeConverter stringConverter = GetStringConverter(parameterType);
if (stringConverter == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new NotSupportedException(
SR2.GetString(
SR2.TypeNotSupportedByQueryStringConverter,
parameterType.ToString(), this.GetType().Name)));
}
else
{
return stringConverter.ConvertToInvariantString(parameter);
}
}
}
}
}
// hash table is safe for multiple readers single writer
[SuppressMessage("Reliability", "Reliability104:CaughtAndHandledExceptionsRule", Justification = "The exception is traced in the finally clause")]
TypeConverter GetStringConverter(Type parameterType)
{
if (this.typeConverterCache.ContainsKey(parameterType))
{
return (TypeConverter) this.typeConverterCache[parameterType];
}
TypeConverterAttribute[] typeConverterAttrs = parameterType.GetCustomAttributes(typeof(TypeConverterAttribute), true) as TypeConverterAttribute[];
if (typeConverterAttrs != null)
{
foreach (TypeConverterAttribute converterAttr in typeConverterAttrs)
{
Type converterType = Type.GetType(converterAttr.ConverterTypeName, false, true);
if (converterType != null)
{
TypeConverter converter = null;
Exception handledException = null;
try
{
converter = (TypeConverter) Activator.CreateInstance(converterType);
}
catch (TargetInvocationException e)
{
handledException = e;
}
catch (MemberAccessException e)
{
handledException = e;
}
catch (TypeLoadException e)
{
handledException = e;
}
catch (COMException e)
{
handledException = e;
}
catch (InvalidComObjectException e)
{
handledException = e;
}
finally
{
if (handledException != null)
{
if (Fx.IsFatal(handledException))
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(handledException);
}
DiagnosticUtility.TraceHandledException(handledException, TraceEventType.Warning);
}
}
if (converter == null)
{
continue;
}
if (converter.CanConvertTo(typeof(string)) && converter.CanConvertFrom(typeof(string)))
{
this.typeConverterCache.Add(parameterType, converter);
return converter;
}
}
}
}
return null;
}
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="ToolStripLabel.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Windows.Forms {
using System;
using System.ComponentModel;
using System.Drawing;
using System.Drawing.Design;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Windows.Forms.ButtonInternal;
using System.Security.Permissions;
using System.Security;
using System.Windows.Forms.Design;
/// <include file='doc\ToolStripLabel.uex' path='docs/doc[@for="ToolStripLabel"]/*' />
/// <devdoc>
/// A non selectable winbar item
/// </devdoc>
[ToolStripItemDesignerAvailability(ToolStripItemDesignerAvailability.ToolStrip)]
public class ToolStripLabel : ToolStripItem {
private LinkBehavior linkBehavior = LinkBehavior.SystemDefault;
private bool isLink = false, linkVisited = false;
private Color linkColor = Color.Empty;
private Color activeLinkColor = Color.Empty;
private Color visitedLinkColor = Color.Empty;
private Font hoverLinkFont, linkFont;
private Cursor lastCursor;
/// <include file='doc\ToolStripLabel.uex' path='docs/doc[@for="ToolStripLabel.ToolStripLabel"]/*' />
/// <devdoc>
/// A non selectable winbar item
/// </devdoc>
public ToolStripLabel() {
}
public ToolStripLabel(string text):base(text,null,null) {
}
public ToolStripLabel(Image image):base(null,image,null) {
}
public ToolStripLabel(string text, Image image):base(text,image,null) {
}
public ToolStripLabel(string text, Image image, bool isLink):this(text,image,isLink, null) {
}
[SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public ToolStripLabel(string text, Image image, bool isLink, EventHandler onClick):this(text,image,isLink,onClick,null) {
}
[SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public ToolStripLabel(string text, Image image, bool isLink, EventHandler onClick, string name) : base(text,image,onClick,name) {
IsLink = isLink;
}
/// <include file='doc\ToolStripLabel.uex' path='docs/doc[@for="ToolStripLabel.CanSelect"]/*' />
public override bool CanSelect {
get { return (IsLink || DesignMode); }
}
[
DefaultValue(false),
SRCategory(SR.CatBehavior),
SRDescription(SR.ToolStripLabelIsLinkDescr)
]
public bool IsLink {
get {
return isLink;
}
set {
if (isLink != value) {
isLink = value;
Invalidate();
}
}
}
[
SRCategory(SR.CatAppearance),
SRDescription(SR.ToolStripLabelActiveLinkColorDescr)
]
public Color ActiveLinkColor {
get {
if (activeLinkColor.IsEmpty) {
return IEActiveLinkColor;
}
else {
return activeLinkColor;
}
}
set {
if (activeLinkColor != value) {
activeLinkColor = value;
Invalidate();
}
}
}
private Color IELinkColor {
get {
return LinkUtilities.IELinkColor;
}
}
private Color IEActiveLinkColor {
get {
return LinkUtilities.IEActiveLinkColor;
}
}
private Color IEVisitedLinkColor {
get {
return LinkUtilities.IEVisitedLinkColor;
}
}
[
DefaultValue(LinkBehavior.SystemDefault),
SRCategory(SR.CatBehavior),
SRDescription(SR.ToolStripLabelLinkBehaviorDescr)
]
public LinkBehavior LinkBehavior {
get {
return linkBehavior;
}
set {
//valid values are 0x0 to 0x3
if (!ClientUtils.IsEnumValid(value, (int)value, (int)LinkBehavior.SystemDefault, (int)LinkBehavior.NeverUnderline))
{
throw new InvalidEnumArgumentException("LinkBehavior", (int)value, typeof(LinkBehavior));
}
if (linkBehavior != value) {
linkBehavior = value;
InvalidateLinkFonts();
Invalidate();
}
}
}
[
SRCategory(SR.CatAppearance),
SRDescription(SR.ToolStripLabelLinkColorDescr)
]
public Color LinkColor {
get {
if (linkColor.IsEmpty) {
return IELinkColor;
}
else {
return linkColor;
}
}
set {
if (linkColor != value) {
linkColor = value;
Invalidate();
}
}
}
[
DefaultValue(false),
SRCategory(SR.CatAppearance),
SRDescription(SR.ToolStripLabelLinkVisitedDescr)
]
public bool LinkVisited {
get {
return linkVisited;
}
set {
if (linkVisited != value) {
linkVisited = value;
Invalidate();
}
}
}
[
SRCategory(SR.CatAppearance),
SRDescription(SR.ToolStripLabelVisitedLinkColorDescr)
]
public Color VisitedLinkColor {
get {
if (visitedLinkColor.IsEmpty) {
return IEVisitedLinkColor;
}
else {
return visitedLinkColor;
}
}
set {
if (visitedLinkColor != value) {
visitedLinkColor = value;
Invalidate();
}
}
}
/// <include file='doc\LinkLabel.uex' path='docs/doc[@for="LinkLabel.InvalidateLinkFonts"]/*' />
/// <devdoc>
/// Invalidates the current set of fonts we use when painting
/// links. The fonts will be recreated when needed.
/// </devdoc>
private void InvalidateLinkFonts() {
if (linkFont != null) {
linkFont.Dispose();
}
if (hoverLinkFont != null && hoverLinkFont != linkFont) {
hoverLinkFont.Dispose();
}
linkFont = null;
hoverLinkFont = null;
}
protected override void OnFontChanged(EventArgs e) {
InvalidateLinkFonts();
base.OnFontChanged(e);
}
protected override void OnMouseEnter(EventArgs e) {
if (IsLink) {
ToolStrip parent = this.Parent;
if (parent != null) {
lastCursor = parent.Cursor;
parent.Cursor = Cursors.Hand;
}
}
base.OnMouseEnter(e);
}
protected override void OnMouseLeave(EventArgs e) {
if (IsLink) {
ToolStrip parent = this.Parent;
if (parent != null) {
parent.Cursor = lastCursor;
}
}
base.OnMouseLeave(e);
}
private void ResetActiveLinkColor()
{
ActiveLinkColor = IEActiveLinkColor;
}
private void ResetLinkColor()
{
LinkColor = IELinkColor;
}
private void ResetVisitedLinkColor()
{
VisitedLinkColor = IEVisitedLinkColor;
}
[EditorBrowsable(EditorBrowsableState.Never)]
private bool ShouldSerializeActiveLinkColor() {
return !activeLinkColor.IsEmpty;
}
[EditorBrowsable(EditorBrowsableState.Never)]
private bool ShouldSerializeLinkColor() {
return !linkColor.IsEmpty;
}
[EditorBrowsable(EditorBrowsableState.Never)]
private bool ShouldSerializeVisitedLinkColor() {
return !visitedLinkColor.IsEmpty;
}
/// <devdoc>
/// Creates an instance of the object that defines how image and text
/// gets laid out in the ToolStripItem
/// </devdoc>
internal override ToolStripItemInternalLayout CreateInternalLayout() {
return new ToolStripLabelLayout(this);
}
[EditorBrowsable(EditorBrowsableState.Advanced)]
protected override AccessibleObject CreateAccessibilityInstance() {
return new ToolStripLabelAccessibleObject(this);
}
/// <include file='doc\ToolStripLabel.uex' path='docs/doc[@for="ToolStripLabel.OnPaint"]/*' />
/// <devdoc>
/// Inheriting classes should override this method to handle this event.
/// </devdoc>
protected override void OnPaint(System.Windows.Forms.PaintEventArgs e) {
if (this.Owner != null) {
ToolStripRenderer renderer = this.Renderer;
renderer.DrawLabelBackground(new ToolStripItemRenderEventArgs(e.Graphics, this));
if ((DisplayStyle & ToolStripItemDisplayStyle.Image) == ToolStripItemDisplayStyle.Image) {
renderer.DrawItemImage(new ToolStripItemImageRenderEventArgs(e.Graphics, this, InternalLayout.ImageRectangle));
}
PaintText(e.Graphics);
}
}
internal void PaintText(Graphics g) {
ToolStripRenderer renderer = this.Renderer;
if ((DisplayStyle & ToolStripItemDisplayStyle.Text) == ToolStripItemDisplayStyle.Text) {
Font font = this.Font;
Color textColor = this.ForeColor;
if (IsLink) {
LinkUtilities.EnsureLinkFonts(font, this.LinkBehavior, ref this.linkFont, ref this.hoverLinkFont);
if (this.Pressed) {
font = hoverLinkFont;
textColor = this.ActiveLinkColor;
}
else if (this.Selected) {
font = hoverLinkFont;
textColor = (this.LinkVisited) ? this.VisitedLinkColor : this.LinkColor;
}
else {
font = linkFont;
textColor = (this.LinkVisited) ? this.VisitedLinkColor : this.LinkColor;
}
}
Rectangle textRect = InternalLayout.TextRectangle;
renderer.DrawItemText(new ToolStripItemTextRenderEventArgs(g, this, this.Text, textRect, textColor, font, InternalLayout.TextFormat));
}
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1720:AvoidTypeNamesInParameters")] // 'charCode' matches control.cs
[UIPermission(SecurityAction.LinkDemand, Window=UIPermissionWindow.AllWindows)]
protected internal override bool ProcessMnemonic(char charCode) {
// checking IsMnemonic is not necessary - toolstrip does this for us.
if (ParentInternal != null) {
if (!CanSelect) {
ParentInternal.SetFocusUnsafe();
ParentInternal.SelectNextToolStripItem(this, /*forward=*/true);
}
else {
FireEvent(ToolStripItemEventType.Click);
}
return true;
}
return false;
}
[System.Runtime.InteropServices.ComVisible(true)]
[SuppressMessage("Microsoft.Performance", "CA1812:AvoidUninstantiatedInternalClasses")]
internal class ToolStripLabelAccessibleObject : ToolStripItemAccessibleObject {
private ToolStripLabel ownerItem = null;
public ToolStripLabelAccessibleObject(ToolStripLabel ownerItem) : base(ownerItem) {
this.ownerItem = ownerItem;
}
public override string DefaultAction {
get {
if (ownerItem.IsLink) {
return SR.GetString(SR.AccessibleActionClick);
}
else {
return string.Empty;
}
}
}
public override void DoDefaultAction() {
if (ownerItem.IsLink) {
base.DoDefaultAction();
}
}
public override AccessibleRole Role {
get {
AccessibleRole role = Owner.AccessibleRole;
if (role != AccessibleRole.Default) {
return role;
}
return (ownerItem.IsLink) ? AccessibleRole.Link : AccessibleRole.StaticText;
}
}
public override AccessibleStates State {
get {
return base.State | AccessibleStates.ReadOnly;
}
}
}
/// <devdoc>
/// This class performs internal layout for the "split button button" portion of a split button.
/// Its main job is to make sure the inner button has the same parent as the split button, so
/// that layout can be performed using the correct graphics context.
/// </devdoc>
private class ToolStripLabelLayout : ToolStripItemInternalLayout {
ToolStripLabel owner;
public ToolStripLabelLayout(ToolStripLabel owner) : base(owner) {
this.owner = owner;
}
protected override ToolStripItemLayoutOptions CommonLayoutOptions() {
ToolStripItemLayoutOptions layoutOptions = base.CommonLayoutOptions();
layoutOptions.borderSize = 0;
return layoutOptions;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using Xunit;
namespace System.ComponentModel.Tests
{
public class MyTypeDescriptorContext : ITypeDescriptorContext
{
public IContainer Container { get { return null; } }
public object Instance { get { return null; } }
public PropertyDescriptor PropertyDescriptor { get { return null; } }
public bool OnComponentChanging() { return true; }
public void OnComponentChanged() { }
public object GetService(Type serviceType) { return null; }
}
public struct SomeValueType
{
public int a;
}
public enum SomeEnum
{
Add,
Sub,
Mul
}
[Flags]
public enum SomeFlagsEnum
{
Option1 = 1,
Option2 = 2,
Option3 = 4
}
public class FormattableClass : IFormattable
{
public string ToString(string format, IFormatProvider formatProvider)
{
return FormattableClass.Token;
}
public const string Token = "Formatted class.";
}
public class Collection1 : ICollection
{
public void CopyTo(Array array, int index)
{
throw new NotImplementedException();
}
public int Count
{
get { throw new NotImplementedException(); }
}
public bool IsSynchronized
{
get { throw new NotImplementedException(); }
}
public object SyncRoot
{
get { throw new NotImplementedException(); }
}
public IEnumerator GetEnumerator()
{
throw new NotImplementedException();
}
}
public class MyTypeListConverter : TypeListConverter
{
public MyTypeListConverter(Type[] types)
: base(types)
{
}
}
#if FUNCTIONAL_TESTS
[TypeConverter("System.ComponentModel.Tests.BaseClassConverter, System.ComponentModel.TypeConverter.Tests, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null")]
#elif PERFORMANCE_TESTS
[TypeConverter("System.ComponentModel.Tests.BaseClassConverter, System.ComponentModel.TypeConverter.Performance.Tests, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null")]
#else
#error Define FUNCTIONAL_TESTS or PERFORMANCE_TESTS
#endif
public class BaseClass
{
public BaseClass()
{
BaseProperty = 1;
}
public override bool Equals(object other)
{
BaseClass otherBaseClass = other as BaseClass;
if (otherBaseClass == null)
{
return false;
}
if (otherBaseClass.BaseProperty == this.BaseProperty)
{
return true;
}
return base.Equals(other);
}
public override int GetHashCode()
{
return base.GetHashCode();
}
public int BaseProperty;
}
public class BaseClassConverter : TypeConverter
{
public BaseClassConverter(string someString) { throw new InvalidOperationException("This constructor should not be invoked by TypeDescriptor.GetConverter."); }
public BaseClassConverter() { }
public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType)
{
if (sourceType == typeof(int))
{
return true;
}
return base.CanConvertFrom(context, sourceType);
}
public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType)
{
if (destinationType == typeof(int))
{
return true;
}
return base.CanConvertTo(context, destinationType);
}
public override object ConvertFrom(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value)
{
if (value is int)
{
BaseClass baseClass = new BaseClass();
baseClass.BaseProperty = (int)value;
return baseClass;
}
return base.ConvertFrom(context, culture, value);
}
public override object ConvertTo(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value, Type destinationType)
{
if (destinationType == typeof(int))
{
BaseClass baseClass = value as BaseClass;
if (baseClass != null)
{
return baseClass.BaseProperty;
}
}
return base.ConvertTo(context, culture, value, destinationType);
}
}
[TypeConverter("System.ComponentModel.Tests.DerivedClassConverter")]
internal class DerivedClass : BaseClass
{
public DerivedClass()
: base()
{
this.DerivedProperty = 2;
}
public DerivedClass(int i)
: base()
{
this.DerivedProperty = i;
}
public override bool Equals(object other)
{
DerivedClass otherDerivedClass = other as DerivedClass;
if (otherDerivedClass == null)
{
return false;
}
if (otherDerivedClass.DerivedProperty != this.DerivedProperty)
{
return false;
}
return base.Equals(other);
}
public override int GetHashCode()
{
return base.GetHashCode();
}
public int DerivedProperty;
}
internal class DerivedClassConverter : TypeConverter
{
public DerivedClassConverter() { }
public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType)
{
if (sourceType == typeof(int))
{
return true;
}
return base.CanConvertFrom(context, sourceType);
}
public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType)
{
if (destinationType == typeof(int))
{
return true;
}
return base.CanConvertTo(context, destinationType);
}
public override object ConvertFrom(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value)
{
if (value is int)
{
DerivedClass derived = new DerivedClass();
derived.BaseProperty = (int)value;
derived.DerivedProperty = (int)value;
return derived;
}
return base.ConvertFrom(context, culture, value);
}
public override object ConvertTo(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value, Type destinationType)
{
if (destinationType == typeof(int))
{
DerivedClass derived = value as DerivedClass;
if (derived != null)
{
return derived.BaseProperty + derived.DerivedProperty;
}
}
return base.ConvertTo(context, culture, value, destinationType);
}
}
[TypeConverter(typeof(IBaseConverter))]
public interface IBase
{
int InterfaceProperty { get; set; }
}
public interface IDerived : IBase
{
int DerivedInterfaceProperty { get; set; }
}
public class ClassIBase : IBase
{
public ClassIBase()
{
this.InterfaceProperty = 10;
}
public int InterfaceProperty { get; set; }
}
public class ClassIDerived : IDerived
{
public ClassIDerived()
{
this.InterfaceProperty = 20;
this.DerivedInterfaceProperty = this.InterfaceProperty / 2;
}
public int InterfaceProperty { get; set; }
public int DerivedInterfaceProperty { get; set; }
}
public class IBaseConverter : TypeConverter
{
public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType)
{
if (destinationType == typeof(string) || destinationType == typeof(int))
{
return true;
}
return base.CanConvertTo(context, destinationType);
}
public override object ConvertTo(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value, Type destinationType)
{
if (destinationType == typeof(string))
{
IBase baseInterface = (IBase)value;
return "InterfaceProperty = " + baseInterface.InterfaceProperty.ToString();
}
if (destinationType == typeof(int))
{
IBase baseInterface = (IBase)value;
return baseInterface.InterfaceProperty;
}
return base.ConvertTo(context, culture, value, destinationType);
}
}
[TypeConverter("System.ComponentModel.Tests.InvalidConverter")]
internal class ClassWithInvalidConverter : BaseClass
{
}
public class InvalidConverter : TypeConverter
{
public InvalidConverter(string someString)
{
throw new InvalidOperationException("This constructor should not be invoked by TypeDescriptor.GetConverter.");
}
// Default constructor is missing, we expect the following exception when getting a converter:
// System.MissingMethodException: No parameterless constructor defined for this object.
}
// TypeDescriptor should default to the TypeConverter in this case.
public class ClassWithNoConverter
{
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.Scripting.Test;
using Roslyn.Test.Utilities;
using Xunit;
#pragma warning disable RS0003 // Do not directly await a Task
namespace Microsoft.CodeAnalysis.Scripting.CSharp.UnitTests
{
public class HostModel
{
public readonly int Foo;
}
public class InteractiveSessionTests : TestBase
{
private static readonly Assembly s_lazySystemRuntimeAssembly;
private static readonly Assembly SystemRuntimeAssembly = s_lazySystemRuntimeAssembly ?? (s_lazySystemRuntimeAssembly = Assembly.Load(new AssemblyName("System.Runtime, Version=4.0.20.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")));
private static readonly Assembly HostAssembly = typeof(InteractiveSessionTests).GetTypeInfo().Assembly;
// TODO: shouldn't be needed
private static readonly ScriptOptions OptionsWithFacades = ScriptOptions.Default.AddReferences(SystemRuntimeAssembly);
#region Namespaces, Types
[Fact]
public void CompilationChain_NestedTypesClass()
{
var script = CSharpScript.Create(@"
static string outerStr = null;
public static void Foo(string str) { outerStr = str; }
class InnerClass
{
public string innerStr = null;
public void Goo() { Foo(""test""); innerStr = outerStr; }
}
").ContinueWith(@"
InnerClass iC = new InnerClass();
iC.Goo();
").ContinueWith(@"
System.Console.WriteLine(iC.innerStr);
");
using (var redirect = new OutputRedirect(CultureInfo.InvariantCulture))
{
script.RunAsync().Wait();
Assert.Equal("test", redirect.Output.Trim());
}
}
[Fact]
public void CompilationChain_NestedTypesStruct()
{
var script = CSharpScript.Create(@"
static string outerStr = null;
public static void Foo(string str) { outerStr = str; }
struct InnerStruct
{
public string innerStr;
public void Goo() { Foo(""test""); innerStr = outerStr; }
}
").ContinueWith(@"
InnerStruct iS = new InnerStruct();
iS.Goo();
").ContinueWith(@"
System.Console.WriteLine(iS.innerStr);
");
using (var redirect = new OutputRedirect(CultureInfo.InvariantCulture))
{
script.RunAsync().Wait();
Assert.Equal("test", redirect.Output.Trim());
}
}
[Fact]
public void CompilationChain_InterfaceTypes()
{
var script = CSharpScript.Create(@"
interface I1 { int Goo();}
class InnerClass : I1
{
public int Goo() { return 1; }
}").ContinueWith(@"
I1 iC = new InnerClass();
").ContinueWith(@"
iC.Goo()
");
Assert.Equal(1, script.EvaluateAsync().Result);
}
[Fact]
public void ScriptMemberAccessFromNestedClass()
{
var script = CSharpScript.Create(@"
object field;
object Property { get; set; }
void Method() { }
").ContinueWith(@"
class C
{
public void Foo()
{
object f = field;
object p = Property;
Method();
}
}
");
ScriptingTestHelpers.AssertCompilationError(script,
// (6,20): error CS0120: An object reference is required for the non-static field, method, or property 'field'
Diagnostic(ErrorCode.ERR_ObjectRequired, "field").WithArguments("field"),
// (7,20): error CS0120: An object reference is required for the non-static field, method, or property 'Property'
Diagnostic(ErrorCode.ERR_ObjectRequired, "Property").WithArguments("Property"),
// (8,9): error CS0120: An object reference is required for the non-static field, method, or property 'Method()'
Diagnostic(ErrorCode.ERR_ObjectRequired, "Method").WithArguments("Method()"));
}
#region Anonymous Types
[Fact]
public void AnonymousTypes_TopLevel_MultipleSubmissions()
{
var script = CSharpScript.Create(@"
var a = new { f = 1 };
").ContinueWith(@"
var b = new { g = 1 };
").ContinueWith<Array>(@"
var c = new { f = 1 };
var d = new { g = 1 };
new object[] { new[] { a, c }, new[] { b, d } }
");
var result = script.EvaluateAsync().Result;
Assert.Equal(2, result.Length);
Assert.Equal(2, ((Array)result.GetValue(0)).Length);
Assert.Equal(2, ((Array)result.GetValue(1)).Length);
}
[Fact]
public void AnonymousTypes_TopLevel_MultipleSubmissions2()
{
var script = CSharpScript.Create(@"
var a = new { f = 1 };
").ContinueWith(@"
var b = new { g = 1 };
").ContinueWith(@"
var c = new { f = 1 };
var d = new { g = 1 };
object.ReferenceEquals(a.GetType(), c.GetType()).ToString() + "" "" +
object.ReferenceEquals(a.GetType(), b.GetType()).ToString() + "" "" +
object.ReferenceEquals(b.GetType(), d.GetType()).ToString()
");
Assert.Equal("True False True", script.EvaluateAsync().Result.ToString());
}
[WorkItem(543863)]
[Fact]
public void AnonymousTypes_Redefinition()
{
var script = CSharpScript.Create(@"
var x = new { Foo = ""foo"" };
").ContinueWith(@"
var x = new { Foo = ""foo"" };
").ContinueWith(@"
x.Foo
");
var result = script.EvaluateAsync().Result;
Assert.Equal("foo", result);
}
[Fact]
public void AnonymousTypes_TopLevel_Empty()
{
var script = CSharpScript.Create(@"
var a = new { };
").ContinueWith(@"
var b = new { };
").ContinueWith<Array>(@"
var c = new { };
var d = new { };
new object[] { new[] { a, c }, new[] { b, d } }
");
var result = script.EvaluateAsync().Result;
Assert.Equal(2, result.Length);
Assert.Equal(2, ((Array)result.GetValue(0)).Length);
Assert.Equal(2, ((Array)result.GetValue(1)).Length);
}
#endregion
#region Dynamic
[Fact]
public void Dynamic_Expando()
{
var options = OptionsWithFacades.
AddReferences(
typeof(Microsoft.CSharp.RuntimeBinder.RuntimeBinderException).GetTypeInfo().Assembly,
typeof(System.Dynamic.ExpandoObject).GetTypeInfo().Assembly).
AddNamespaces(
"System.Dynamic");
var script = CSharpScript.Create(@"
dynamic expando = new ExpandoObject();
", options).ContinueWith(@"
expando.foo = 1;
").ContinueWith(@"
expando.foo
");
Assert.Equal(1, script.EvaluateAsync().Result);
}
#endregion
[Fact]
public void Enums()
{
var script = CSharpScript.Create(@"
public enum Enum1
{
A, B, C
}
Enum1 E = Enum1.C;
E
");
var e = script.EvaluateAsync().Result;
Assert.True(e.GetType().GetTypeInfo().IsEnum, "Expected enum");
Assert.Equal(typeof(int), Enum.GetUnderlyingType(e.GetType()));
}
#endregion
#region Attributes
[Fact]
public void PInvoke()
{
var source = @"
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
[DllImport(""foo"",
EntryPoint = ""bar"",
CallingConvention = CallingConvention.Cdecl,
CharSet = CharSet.Unicode,
ExactSpelling = true,
PreserveSig = true,
SetLastError = true,
BestFitMapping = true,
ThrowOnUnmappableChar = true)]
public static extern void M();
class C { }
typeof(C)
";
Type c = CSharpScript.EvaluateAsync<Type>(source).Result;
var m = c.DeclaringType.GetTypeInfo().GetDeclaredMethod("M");
Assert.Equal(MethodImplAttributes.PreserveSig, m.MethodImplementationFlags);
// Reflection synthesizes DllImportAttribute
var dllImport = (DllImportAttribute)m.GetCustomAttributes(typeof(DllImportAttribute), inherit: false).Single();
Assert.True(dllImport.BestFitMapping);
Assert.Equal(CallingConvention.Cdecl, dllImport.CallingConvention);
Assert.Equal(CharSet.Unicode, dllImport.CharSet);
Assert.True(dllImport.ExactSpelling);
Assert.True(dllImport.SetLastError);
Assert.True(dllImport.PreserveSig);
Assert.True(dllImport.ThrowOnUnmappableChar);
Assert.Equal("bar", dllImport.EntryPoint);
Assert.Equal("foo", dllImport.Value);
}
#endregion
// extension methods - must be private, can be top level
#region Modifiers and Visibility
[Fact]
public void PrivateTopLevel()
{
var script = CSharpScript.Create<int>(@"
private int foo() { return 1; }
private static int bar() { return 10; }
private static int f = 100;
foo() + bar() + f
");
Assert.Equal(111, script.EvaluateAsync().Result);
script = script.ContinueWith<int>(@"
foo() + bar() + f
");
Assert.Equal(111, script.EvaluateAsync().Result);
script = script.ContinueWith<int>(@"
class C { public static int baz() { return bar() + f; } }
C.baz()
");
Assert.Equal(110, script.EvaluateAsync().Result);
}
[Fact]
public void NestedVisibility()
{
var script = CSharpScript.Create(@"
private class C
{
internal class D
{
internal static int foo() { return 1; }
}
private class E
{
internal static int foo() { return 1; }
}
public class F
{
internal protected static int foo() { return 1; }
}
internal protected class G
{
internal static int foo() { return 1; }
}
}
");
Assert.Equal(1, script.ContinueWith<int>("C.D.foo()").EvaluateAsync().Result);
Assert.Equal(1, script.ContinueWith<int>("C.F.foo()").EvaluateAsync().Result);
Assert.Equal(1, script.ContinueWith<int>("C.G.foo()").EvaluateAsync().Result);
ScriptingTestHelpers.AssertCompilationError(script.ContinueWith<int>(@"C.E.foo()"),
// error CS0122: 'C.E' is inaccessible due to its protection level
Diagnostic(ErrorCode.ERR_BadAccess, "E").WithArguments("C.E"));
}
[Fact]
public void Fields_Visibility()
{
var script = CSharpScript.Create(@"
private int i = 2; // test comment;
public int j = 2;
protected int k = 2;
internal protected int l = 2;
internal int pi = 2;
").ContinueWith(@"
i = i + i;
j = j + j;
k = k + k;
l = l + l;
").ContinueWith(@"
pi = i + j + k + l;
");
Assert.Equal(4, script.ContinueWith<int>("i").EvaluateAsync().Result);
Assert.Equal(4, script.ContinueWith<int>("j").EvaluateAsync().Result);
Assert.Equal(4, script.ContinueWith<int>("k").EvaluateAsync().Result);
Assert.Equal(4, script.ContinueWith<int>("l").EvaluateAsync().Result);
Assert.Equal(16, script.ContinueWith<int>("pi").EvaluateAsync().Result);
}
#endregion
#region Chaining
[Fact]
public void CompilationChain_BasicFields()
{
var script = CSharpScript.Create("var x = 1;").ContinueWith("x");
Assert.Equal(1, script.EvaluateAsync().Result);
}
[Fact]
public void CompilationChain_GlobalNamespaceAndUsings()
{
var result =
CSharpScript.Create("using InteractiveFixtures.C;", OptionsWithFacades.AddReferences(HostAssembly)).
ContinueWith("using InteractiveFixtures.C;").
ContinueWith("System.Environment.ProcessorCount").
EvaluateAsync().Result;
Assert.Equal(Environment.ProcessorCount, result);
}
[Fact]
public void CompilationChain_CurrentSubmissionUsings()
{
var s0 = CSharpScript.RunAsync("", OptionsWithFacades.AddReferences(HostAssembly));
var state = s0.
ContinueWith("class X { public int foo() { return 1; } }").
ContinueWith("class X { public int foo() { return 1; } }").
ContinueWith("using InteractiveFixtures.A;").
ContinueWith("new X().foo()");
Assert.Equal(1, state.Result.ReturnValue);
state =
s0.
ContinueWith("class X { public int foo() { return 1; } }").
ContinueWith(@"
using InteractiveFixtures.A;
new X().foo()
");
Assert.Equal(1, state.Result.ReturnValue);
}
[Fact]
public void CompilationChain_UsingDuplicates()
{
var script = CSharpScript.Create(@"
using System;
using System;
").ContinueWith(@"
using System;
using System;
").ContinueWith(@"
Environment.ProcessorCount
");
Assert.Equal(Environment.ProcessorCount, script.EvaluateAsync().Result);
}
[Fact]
public void CompilationChain_GlobalImports()
{
var options = ScriptOptions.Default.AddNamespaces("System");
var state = CSharpScript.RunAsync("Environment.ProcessorCount", options);
Assert.Equal(Environment.ProcessorCount, state.Result.ReturnValue);
state = state.ContinueWith("Environment.ProcessorCount");
Assert.Equal(Environment.ProcessorCount, state.Result.ReturnValue);
}
[Fact]
public void CompilationChain_SubmissionSlotResize()
{
var state = CSharpScript.RunAsync("");
for (int i = 0; i < 17; i++)
{
state = state.ContinueWith(@"public int i = 1;");
}
using (var redirect = new OutputRedirect(CultureInfo.InvariantCulture))
{
state.ContinueWith(@"System.Console.WriteLine(i);").Wait();
Assert.Equal(1, int.Parse(redirect.Output));
}
}
[Fact]
public void CompilationChain_UsingNotHidingPreviousSubmission()
{
int result1 =
CSharpScript.Create("using System;").
ContinueWith("int Environment = 1;").
ContinueWith<int>("Environment").
EvaluateAsync().Result;
Assert.Equal(1, result1);
int result2 =
CSharpScript.Create("int Environment = 1;").
ContinueWith("using System;").
ContinueWith<int>("Environment").
EvaluateAsync().Result;
Assert.Equal(1, result2);
}
[Fact]
public void CompilationChain_DefinitionHidesGlobal()
{
var result =
CSharpScript.Create("int System = 1;").
ContinueWith("System").
EvaluateAsync().Result;
Assert.Equal(1, result);
}
public class C1
{
public readonly int System = 1;
public readonly int Environment = 2;
}
/// <summary>
/// Symbol declaration in host object model hides global definition.
/// </summary>
[Fact]
public void CompilationChain_HostObjectMembersHidesGlobal()
{
var result =
CSharpScript.RunAsync("System", globals: new C1()).
Result.ReturnValue;
Assert.Equal(1, result);
}
[Fact]
public void CompilationChain_UsingNotHidingHostObjectMembers()
{
var result =
CSharpScript.RunAsync("using System;", globals: new C1()).
ContinueWith("Environment").
Result.ReturnValue;
Assert.Equal(2, result);
}
[Fact]
public void CompilationChain_DefinitionHidesHostObjectMembers()
{
var result =
CSharpScript.RunAsync("int System = 2;", globals: new C1()).
ContinueWith("System").
Result.ReturnValue;
Assert.Equal(2, result);
}
[Fact]
public void Submissions_ExecutionOrder1()
{
var s0 = CSharpScript.Create("int x = 1;");
var s1 = s0.ContinueWith("int y = 2;");
var s2 = s1.ContinueWith<int>("x + y");
Assert.Equal(3, s2.EvaluateAsync().Result);
Assert.Null(s1.EvaluateAsync().Result);
Assert.Null(s0.EvaluateAsync().Result);
Assert.Equal(3, s2.EvaluateAsync().Result);
Assert.Null(s1.EvaluateAsync().Result);
Assert.Null(s0.EvaluateAsync().Result);
Assert.Equal(3, s2.EvaluateAsync().Result);
Assert.Equal(3, s2.EvaluateAsync().Result);
}
[Fact]
public async void Submissions_ExecutionOrder2()
{
var s0 = await CSharpScript.RunAsync("int x = 1;");
Assert.Throws<CompilationErrorException>(() => s0.ContinueWithAsync("invalid$syntax").Result);
var s1 = await s0.ContinueWithAsync("x = 2; x = 10");
Assert.Throws<CompilationErrorException>(() => s1.ContinueWithAsync("invalid$syntax").Result);
Assert.Throws<CompilationErrorException>(() => s1.ContinueWithAsync("x = undefined_symbol").Result);
var s2 = await s1.ContinueWithAsync("int y = 2;");
Assert.Null(s2.ReturnValue);
var s3 = await s2.ContinueWithAsync("x + y");
Assert.Equal(12, s3.ReturnValue);
}
public class HostObjectWithOverrides
{
public override bool Equals(object obj) => true;
public override int GetHashCode() => 1234567;
public override string ToString() => "HostObjectToString impl";
}
[Fact]
public async void ObjectOverrides1()
{
var state0 = await CSharpScript.RunAsync("", OptionsWithFacades, new HostObjectWithOverrides());
var state1 = await state0.ContinueWithAsync<bool>("Equals(null)");
Assert.True(state1.ReturnValue);
var state2 = await state1.ContinueWithAsync<int>("GetHashCode()");
Assert.Equal(1234567, state2.ReturnValue);
var state3 = await state2.ContinueWithAsync<string>("ToString()");
Assert.Equal("HostObjectToString impl", state3.ReturnValue);
}
[Fact]
public async void ObjectOverrides2()
{
var state0 = await CSharpScript.RunAsync("", OptionsWithFacades, new object());
var state1 = await state0.ContinueWithAsync<bool>(@"
object x = 1;
object y = x;
ReferenceEquals(x, y)");
Assert.True(state1.ReturnValue);
var state2 = await state1.ContinueWithAsync<string>("ToString()");
Assert.Equal("System.Object", state2.ReturnValue);
var state3 = await state2.ContinueWithAsync<bool>("Equals(null)");
Assert.False(state3.ReturnValue);
}
[Fact]
public void ObjectOverrides3()
{
var state0 = CSharpScript.RunAsync("", OptionsWithFacades);
var src1 = @"
Equals(null);
GetHashCode();
ToString();
ReferenceEquals(null, null);";
ScriptingTestHelpers.AssertCompilationError(state0, src1,
// (2,1): error CS0103: The name 'Equals' does not exist in the current context
Diagnostic(ErrorCode.ERR_NameNotInContext, "Equals").WithArguments("Equals"),
// (3,1): error CS0103: The name 'GetHashCode' does not exist in the current context
Diagnostic(ErrorCode.ERR_NameNotInContext, "GetHashCode").WithArguments("GetHashCode"),
// (4,1): error CS0103: The name 'ToString' does not exist in the current context
Diagnostic(ErrorCode.ERR_NameNotInContext, "ToString").WithArguments("ToString"),
// (5,1): error CS0103: The name 'ReferenceEquals' does not exist in the current context
Diagnostic(ErrorCode.ERR_NameNotInContext, "ReferenceEquals").WithArguments("ReferenceEquals"));
var src2 = @"
public override string ToString() { return null; }
";
ScriptingTestHelpers.AssertCompilationError(state0, src2,
// (1,24): error CS0115: 'ToString()': no suitable method found to override
Diagnostic(ErrorCode.ERR_OverrideNotExpected, "ToString").WithArguments("ToString()"));
}
#endregion
#region Generics
[Fact, WorkItem(201759)]
public void CompilationChain_GenericTypes()
{
var script = CSharpScript.Create(@"
class InnerClass<T>
{
public int method(int value) { return value + 1; }
public int field = 2;
}").ContinueWith(@"
InnerClass<int> iC = new InnerClass<int>();
").ContinueWith(@"
iC.method(iC.field)
");
Assert.Equal(3, script.EvaluateAsync().Result);
}
[WorkItem(529243)]
[Fact]
public void RecursiveBaseType()
{
CSharpScript.EvaluateAsync(@"
class A<T> { }
class B<T> : A<B<B<T>>> { }
");
}
[WorkItem(5378, "DevDiv_Projects/Roslyn")]
[Fact]
public void CompilationChain_GenericMethods()
{
var s0 = CSharpScript.Create(@"
public int foo<T, R>(T arg) { return 1; }
public static T bar<T>(T i)
{
return i;
}
");
Assert.Equal(1, s0.ContinueWith(@"foo<int, int>(1)").EvaluateAsync().Result);
Assert.Equal(5, s0.ContinueWith(@"bar(5)").EvaluateAsync().Result);
}
/// <summary>
/// Tests that we emit ldftn and ldvirtftn instructions correctly.
/// </summary>
[Fact]
public void CompilationChain_Ldftn()
{
var state = CSharpScript.RunAsync(@"
public class C
{
public static int f() { return 1; }
public int g() { return 10; }
public virtual int h() { return 100; }
public static int gf<T>() { return 2; }
public int gg<T>() { return 20; }
public virtual int gh<T>() { return 200; }
}
");
state = state.ContinueWith(@"
new System.Func<int>(C.f)() +
new System.Func<int>(new C().g)() +
new System.Func<int>(new C().h)()"
);
Assert.Equal(111, state.Result.ReturnValue);
state = state.ContinueWith(@"
new System.Func<int>(C.gf<int>)() +
new System.Func<int>(new C().gg<object>)() +
new System.Func<int>(new C().gh<bool>)()
");
Assert.Equal(222, state.Result.ReturnValue);
}
/// <summary>
/// Tests that we emit ldftn and ldvirtftn instructions correctly.
/// </summary>
[Fact]
public void CompilationChain_Ldftn_GenericType()
{
var state = CSharpScript.RunAsync(@"
public class C<S>
{
public static int f() { return 1; }
public int g() { return 10; }
public virtual int h() { return 100; }
public static int gf<T>() { return 2; }
public int gg<T>() { return 20; }
public virtual int gh<T>() { return 200; }
}
");
state = state.ContinueWith(@"
new System.Func<int>(C<byte>.f)() +
new System.Func<int>(new C<byte>().g)() +
new System.Func<int>(new C<byte>().h)()
");
Assert.Equal(111, state.Result.ReturnValue);
state = state.ContinueWith(@"
new System.Func<int>(C<byte>.gf<int>)() +
new System.Func<int>(new C<byte>().gg<object>)() +
new System.Func<int>(new C<byte>().gh<bool>)()
");
Assert.Equal(222, state.Result.ReturnValue);
}
#endregion
#region Statements and Expressions
[Fact]
public void IfStatement()
{
var result = CSharpScript.EvaluateAsync<int>(@"
using static System.Console;
int x;
if (true)
{
x = 5;
}
else
{
x = 6;
}
x
").Result;
Assert.Equal(5, result);
}
[Fact]
public void ExprStmtParenthesesUsedToOverrideDefaultEval()
{
Assert.Equal(18, CSharpScript.EvaluateAsync<int>("(4 + 5) * 2").Result);
Assert.Equal(1, CSharpScript.EvaluateAsync<long>("6 / (2 * 3)").Result);
}
[WorkItem(5397, "DevDiv_Projects/Roslyn")]
[Fact]
public void TopLevelLambda()
{
var s = CSharpScript.RunAsync(@"
using System;
delegate void TestDelegate(string s);
");
s = s.ContinueWith(@"
TestDelegate testDelB = delegate (string s) { Console.WriteLine(s); };
");
using (var redirect = new OutputRedirect(CultureInfo.InvariantCulture))
{
s.ContinueWith(@"testDelB(""hello"");");
Assert.Equal("hello", redirect.Output.Trim());
}
}
[Fact]
public void Closure()
{
var f = CSharpScript.EvaluateAsync<Func<int, int>>(@"
int Foo(int arg) { return arg + 1; }
System.Func<int, int> f = (arg) =>
{
return Foo(arg);
};
f
").Result;
Assert.Equal(3, f(2));
}
[Fact]
public void Closure2()
{
var result = CSharpScript.EvaluateAsync<List<string>>(@"
#r ""System.Core""
using System;
using System.Linq;
using System.Collections.Generic;
List<string> result = new List<string>();
string s = ""hello"";
Enumerable.ToList(Enumerable.Range(1, 2)).ForEach(x => result.Add(s));
result
").Result;
AssertEx.Equal(new[] { "hello", "hello" }, result);
}
[Fact]
public void UseDelegateMixStaticAndDynamic()
{
var f = CSharpScript.RunAsync("using System;").
ContinueWith("int Sqr(int x) {return x*x;}").
ContinueWith<Func<int, int>>("new Func<int,int>(Sqr)").Result.ReturnValue;
Assert.Equal(4, f(2));
}
[WorkItem(9229, "DevDiv_Projects/Roslyn")]
[Fact]
public void Arrays()
{
var s = CSharpScript.RunAsync(@"
int[] arr_1 = { 1, 2, 3 };
int[] arr_2 = new int[] { 1, 2, 3 };
int[] arr_3 = new int[5];
").ContinueWith(@"
arr_2[0] = 5;
");
Assert.Equal(3, s.ContinueWith(@"arr_1[2]").Result.ReturnValue);
Assert.Equal(5, s.ContinueWith(@"arr_2[0]").Result.ReturnValue);
Assert.Equal(0, s.ContinueWith(@"arr_3[0]").Result.ReturnValue);
}
[Fact]
public void FieldInitializers()
{
var result = CSharpScript.EvaluateAsync<List<int>>(@"
using System.Collections.Generic;
static List<int> result = new List<int>();
int b = 2;
int a;
int x = 1, y = b;
static int g = 1;
static int f = g + 1;
a = x + y;
result.Add(a);
int z = 4 + f;
result.Add(z);
result.Add(a * z);
result
").Result;
Assert.Equal(3, result.Count);
Assert.Equal(3, result[0]);
Assert.Equal(6, result[1]);
Assert.Equal(18, result[2]);
}
[Fact]
public void FieldInitializersWithBlocks()
{
var result = CSharpScript.EvaluateAsync<List<int>>(@"
using System.Collections.Generic;
static List<int> result = new List<int>();
const int constant = 1;
{
int x = constant;
result.Add(x);
}
int field = 2;
{
int x = field;
result.Add(x);
}
result.Add(constant);
result.Add(field);
result
").Result;
Assert.Equal(4, result.Count);
Assert.Equal(1, result[0]);
Assert.Equal(2, result[1]);
Assert.Equal(1, result[2]);
Assert.Equal(2, result[3]);
}
[Fact]
public void TestInteractiveClosures()
{
var result = CSharpScript.RunAsync(@"
using System.Collections.Generic;
static List<int> result = new List<int>();").
ContinueWith("int x = 1;").
ContinueWith("System.Func<int> f = () => x++;").
ContinueWith("result.Add(f());").
ContinueWith("result.Add(x);").
ContinueWith<List<int>>("result").Result.ReturnValue;
Assert.Equal(2, result.Count);
Assert.Equal(1, result[0]);
Assert.Equal(2, result[1]);
}
[Fact]
public void ExtensionMethods()
{
var options = ScriptOptions.Default.AddReferences(
typeof(Enumerable).GetTypeInfo().Assembly);
var result = CSharpScript.EvaluateAsync<int>(@"
using System.Linq;
string[] fruit = { ""banana"", ""orange"", ""lime"", ""apple"", ""kiwi"" };
fruit.Skip(1).Where(s => s.Length > 4).Count()", options).Result;
Assert.Equal(2, result);
}
[Fact]
public void ImplicitlyTypedFields()
{
var result = CSharpScript.EvaluateAsync<object[]>(@"
var x = 1;
var y = x;
var z = foo(x);
string foo(int a) { return null; }
int foo(string a) { return 0; }
new object[] { x, y, z }
").Result;
AssertEx.Equal(new object[] { 1, 1, null }, result);
}
/// <summary>
/// Name of PrivateImplementationDetails type needs to be unique across submissions.
/// The compiler should suffix it with a MVID of the current submission module so we should be fine.
/// </summary>
[WorkItem(949559)]
[WorkItem(540237)]
[WorkItem(9229, "DevDiv_Projects/Roslyn")]
[WorkItem(2721, "https://github.com/dotnet/roslyn/issues/2721")]
[Fact]
public async void PrivateImplementationDetailsType()
{
var result1 = await CSharpScript.EvaluateAsync<int[]>("new int[] { 1,2,3,4 }");
AssertEx.Equal(new[] { 1, 2, 3, 4 }, result1);
var result2 = await CSharpScript.EvaluateAsync<int[]>("new int[] { 1,2,3,4,5 }");
AssertEx.Equal(new[] { 1, 2, 3, 4, 5 }, result2);
var s1 = await CSharpScript.RunAsync<int[]>("new int[] { 1,2,3,4,5,6 }");
AssertEx.Equal(new[] { 1, 2, 3, 4, 5, 6 }, s1.ReturnValue);
var s2 = await s1.ContinueWithAsync<int[]>("new int[] { 1,2,3,4,5,6,7 }");
AssertEx.Equal(new[] { 1, 2, 3, 4, 5, 6, 7 }, s2.ReturnValue);
var s3 = await s2.ContinueWithAsync<int[]>("new int[] { 1,2,3,4,5,6,7,8 }");
AssertEx.Equal(new[] { 1, 2, 3, 4, 5, 6, 7, 8 }, s3.ReturnValue);
}
[Fact]
public void NoAwait()
{
// No await. The return value is Task<int> rather than int.
var result = CSharpScript.EvaluateAsync("System.Threading.Tasks.Task.FromResult(1)").Result;
Assert.Equal(1, ((Task<int>)result).Result);
}
/// <summary>
/// 'await' expression at top-level.
/// </summary>
[Fact]
public void Await()
{
Assert.Equal(2, CSharpScript.EvaluateAsync("await System.Threading.Tasks.Task.FromResult(2)").Result);
}
/// <summary>
/// 'await' in sub-expression.
/// </summary>
[Fact]
public void AwaitSubExpression()
{
Assert.Equal(3, CSharpScript.EvaluateAsync<int>("0 + await System.Threading.Tasks.Task.FromResult(3)").Result);
}
[Fact]
public void AwaitVoid()
{
var task = CSharpScript.EvaluateAsync<object>("await System.Threading.Tasks.Task.Run(() => { })");
Assert.Equal(null, task.Result);
Assert.Equal(TaskStatus.RanToCompletion, task.Status);
}
/// <summary>
/// 'await' in lambda should be ignored.
/// </summary>
[Fact]
public async void AwaitInLambda()
{
var s0 = await CSharpScript.RunAsync(@"
using System;
using System.Threading.Tasks;
static T F<T>(Func<Task<T>> f)
{
return f().Result;
}
static T G<T>(T t, Func<T, Task<T>> f)
{
return f(t).Result;
}");
var s1 = await s0.ContinueWithAsync("F(async () => await Task.FromResult(4))");
Assert.Equal(4, s1.ReturnValue);
var s2 = await s1.ContinueWithAsync("G(5, async x => await Task.FromResult(x))");
Assert.Equal(5, s2.ReturnValue);
}
[Fact]
public void AwaitChain1()
{
var options = ScriptOptions.Default.
AddReferences(typeof(Task).GetTypeInfo().Assembly).
AddNamespaces("System.Threading.Tasks");
var state =
CSharpScript.RunAsync("int i = 0;", options).
ContinueWith("await Task.Delay(1); i++;").
ContinueWith("await Task.Delay(1); i++;").
ContinueWith("await Task.Delay(1); i++;").
ContinueWith("i").
Result;
Assert.Equal(3, state.ReturnValue);
}
[Fact]
public void AwaitChain2()
{
var options = ScriptOptions.Default.
AddReferences(typeof(Task).GetTypeInfo().Assembly).
AddNamespaces("System.Threading.Tasks");
var state =
CSharpScript.Create("int i = 0;", options).
ContinueWith("await Task.Delay(1); i++;").
ContinueWith("await Task.Delay(1); i++;").
RunAsync().
ContinueWith("await Task.Delay(1); i++;").
ContinueWith("i").
Result;
Assert.Equal(3, state.ReturnValue);
}
#endregion
#region References
[Fact]
public void ReferenceDirective_FileWithDependencies()
{
string file1 = Temp.CreateFile().WriteAllBytes(TestResources.MetadataTests.InterfaceAndClass.CSClasses01).Path;
string file2 = Temp.CreateFile().WriteAllBytes(TestResources.MetadataTests.InterfaceAndClass.CSInterfaces01).Path;
// ICSPropImpl in CSClasses01.dll implements ICSProp in CSInterfces01.dll.
object result = CSharpScript.EvaluateAsync(@"
#r """ + file1 + @"""
#r """ + file2 + @"""
new Metadata.ICSPropImpl()
").Result;
Assert.NotNull(result);
}
#endregion
#region UsingDeclarations
[Fact]
public void UsingAlias()
{
object result = CSharpScript.EvaluateAsync(@"
using D = System.Collections.Generic.Dictionary<string, int>;
D d = new D();
d
").Result;
Assert.True(result is Dictionary<string, int>, "Expected Dictionary<string, int>");
}
[WorkItem(9229, "DevDiv_Projects/Roslyn")]
[Fact]
public void Usings1()
{
var options = ScriptOptions.Default.
AddNamespaces("System", "System.Linq").
AddReferences(typeof(Enumerable).GetTypeInfo().Assembly);
object result = CSharpScript.EvaluateAsync("new int[] { 1, 2, 3 }.First()", options).Result;
Assert.Equal(1, result);
}
[WorkItem(9229, "DevDiv_Projects/Roslyn")]
[Fact]
public void Usings2()
{
var options = ScriptOptions.Default.
AddNamespaces("System", "System.Linq").
AddReferences(typeof(Enumerable).GetTypeInfo().Assembly);
var s1 = CSharpScript.RunAsync("new int[] { 1, 2, 3 }.First()", options);
Assert.Equal(1, s1.Result.ReturnValue);
var s2 = s1.ContinueWith("new List<int>()", options.AddNamespaces("System.Collections.Generic"));
Assert.IsType<List<int>>(s2.Result.ReturnValue);
}
[Fact]
public void AddNamespaces_Errors()
{
// no immediate error, error is reported if the namespace can't be found when compiling:
var options = ScriptOptions.Default.AddNamespaces("?1", "?2");
ScriptingTestHelpers.AssertCompilationError(() => CSharpScript.EvaluateAsync("1", options),
// error CS0246: The type or namespace name '?1' could not be found (are you missing a using directive or an assembly reference?)
Diagnostic(ErrorCode.ERR_SingleTypeNameNotFound).WithArguments("?1"),
// error CS0246: The type or namespace name '?2' could not be found (are you missing a using directive or an assembly reference?)
Diagnostic(ErrorCode.ERR_SingleTypeNameNotFound).WithArguments("?2"));
options = ScriptOptions.Default.AddNamespaces("");
ScriptingTestHelpers.AssertCompilationError(() => CSharpScript.EvaluateAsync("1", options),
// error CS7088: Invalid 'Usings' value: ''.
Diagnostic(ErrorCode.ERR_BadCompilationOptionValue).WithArguments("Usings", ""));
options = ScriptOptions.Default.AddNamespaces(".abc");
ScriptingTestHelpers.AssertCompilationError(() => CSharpScript.EvaluateAsync("1", options),
// error CS7088: Invalid 'Usings' value: '.abc'.
Diagnostic(ErrorCode.ERR_BadCompilationOptionValue).WithArguments("Usings", ".abc"));
options = ScriptOptions.Default.AddNamespaces("a\0bc");
ScriptingTestHelpers.AssertCompilationError(() => CSharpScript.EvaluateAsync("1", options),
// error CS7088: Invalid 'Usings' value: '.abc'.
Diagnostic(ErrorCode.ERR_BadCompilationOptionValue).WithArguments("Usings", "a\0bc"));
}
#endregion
#region Host Object Binding and Conversions
public class C<T>
{
}
[Fact]
public void Submission_HostConversions()
{
Assert.Equal(2, CSharpScript.EvaluateAsync<int>("1+1").Result);
Assert.Equal(null, CSharpScript.EvaluateAsync<string>("null").Result);
try
{
CSharpScript.RunAsync<C<int>>("null");
Assert.True(false, "Expected an exception");
}
catch (CompilationErrorException e)
{
// error CS0400: The type or namespace name 'Microsoft.CodeAnalysis.CSharp.UnitTests.Symbols.Source.InteractiveSessionTests+C`1[[System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]], Roslyn.Compilers.CSharp.Emit.UnitTests, Version=42.42.42.42, Culture=neutral, PublicKeyToken=fc793a00266884fb' could not be found in the global namespace (are you missing an assembly reference?)
Assert.Equal(ErrorCode.ERR_GlobalSingleTypeNameNotFound, (ErrorCode)e.Diagnostics.Single().Code);
// Can't use Verify() because the version number of the test dll is different in the build lab.
}
var options = OptionsWithFacades.AddReferences(HostAssembly);
var cint = CSharpScript.EvaluateAsync<C<int>>("null", options).Result;
Assert.Equal(null, cint);
Assert.Equal(null, CSharpScript.EvaluateAsync<int?>("null", options).Result);
try
{
CSharpScript.RunAsync<int>("null");
Assert.True(false, "Expected an exception");
}
catch (CompilationErrorException e)
{
e.Diagnostics.Verify(
// (1,1): error CS0037: Cannot convert null to 'int' because it is a non-nullable value type
// null
Diagnostic(ErrorCode.ERR_ValueCantBeNull, "null").WithArguments("int"));
}
try
{
CSharpScript.RunAsync<string>("1+1");
Assert.True(false, "Expected an exception");
}
catch (CompilationErrorException e)
{
e.Diagnostics.Verify(
// (1,1): error CS0029: Cannot implicitly convert type 'int' to 'string'
// 1+1
Diagnostic(ErrorCode.ERR_NoImplicitConv, "1+1").WithArguments("int", "string"));
}
}
[Fact]
public void Submission_HostVarianceConversions()
{
var value = CSharpScript.EvaluateAsync<IEnumerable<Exception>>(@"
using System;
using System.Collections.Generic;
new List<ArgumentException>()
").Result;
Assert.Equal(null, value.FirstOrDefault());
}
public class B
{
public int x = 1, w = 4;
}
public class C : B, I
{
public static readonly int StaticField = 123;
public int Y => 2;
public string N { get; set; } = "2";
public int Z() => 3;
public override int GetHashCode() => 123;
}
public interface I
{
string N { get; set; }
int Z();
}
private class PrivateClass : I
{
public string N { get; set; } = null;
public int Z() => 3;
}
public class M<T>
{
private int F() => 3;
public T G() => default(T);
}
[Fact]
public void HostObjectBinding_PublicClassMembers()
{
var c = new C();
var s0 = CSharpScript.RunAsync<int>("x + Y + Z()", OptionsWithFacades, globals: c);
Assert.Equal(6, s0.Result.ReturnValue);
var s1 = s0.ContinueWith<int>("x");
Assert.Equal(1, s1.Result.ReturnValue);
var s2 = s1.ContinueWith<int>("int x = 20;");
var s3 = s2.ContinueWith<int>("x");
Assert.Equal(20, s3.Result.ReturnValue);
}
[Fact]
public void HostObjectBinding_PublicGenericClassMembers()
{
var m = new M<string>();
var result = CSharpScript.EvaluateAsync<string>("G()", OptionsWithFacades, globals: m);
Assert.Equal(null, result.Result);
}
[Fact]
public async void HostObjectBinding_Interface()
{
var c = new C();
var s0 = await CSharpScript.RunAsync<int>("Z()", OptionsWithFacades, c, typeof(I));
Assert.Equal(3, s0.ReturnValue);
ScriptingTestHelpers.AssertCompilationError(s0, @"x + Y",
// The name '{0}' does not exist in the current context
Diagnostic(ErrorCode.ERR_NameNotInContext, "x").WithArguments("x"),
Diagnostic(ErrorCode.ERR_NameNotInContext, "Y").WithArguments("Y"));
var s1 = await s0.ContinueWithAsync<string>("N");
Assert.Equal("2", s1.ReturnValue);
}
[Fact]
public void HostObjectBinding_PrivateClass()
{
var c = new PrivateClass();
ScriptingTestHelpers.AssertCompilationError(() => CSharpScript.EvaluateAsync("Z()", OptionsWithFacades, c),
// (1,1): error CS0122: '<Fully Qualified Name of PrivateClass>.Z()' is inaccessible due to its protection level
Diagnostic(ErrorCode.ERR_BadAccess, "Z").WithArguments(typeof(PrivateClass).FullName.Replace("+", ".") + ".Z()"));
}
[Fact]
public void HostObjectBinding_PrivateMembers()
{
object c = new M<int>();
ScriptingTestHelpers.AssertCompilationError(() => CSharpScript.EvaluateAsync("Z()", OptionsWithFacades, c),
// (1,1): error CS0103: The name 'z' does not exist in the current context
Diagnostic(ErrorCode.ERR_NameNotInContext, "Z").WithArguments("Z"));
}
[Fact]
public void HostObjectBinding_PrivateClassImplementingPublicInterface()
{
var c = new PrivateClass();
var result = CSharpScript.EvaluateAsync<int>("Z()", globals: c, globalsType: typeof(I));
Assert.Equal(3, result.Result);
}
[Fact]
public void HostObjectBinding_StaticMembers()
{
var s0 = CSharpScript.RunAsync("static int foo = StaticField;", globals: new C());
var s1 = s0.ContinueWith("static int bar { get { return foo; } }");
var s2 = s1.ContinueWith("class C { public static int baz() { return bar; } }");
var s3 = s2.ContinueWith("C.baz()");
Assert.Equal(123, s3.Result.ReturnValue);
}
public class D
{
public int foo(int a) { return 0; }
}
/// <summary>
/// Host object members don't form a method group with submission members.
/// </summary>
[Fact]
public void HostObjectBinding_Overloads()
{
var s0 = CSharpScript.RunAsync("int foo(double a) { return 2; }", globals: new D());
var s1 = s0.ContinueWith("foo(1)");
Assert.Equal(2, s1.Result.ReturnValue);
var s2 = s1.ContinueWith("foo(1.0)");
Assert.Equal(2, s2.Result.ReturnValue);
}
[Fact]
public void HostObjectInRootNamespace()
{
var obj = new InteractiveFixtures_TopLevelHostObject { X = 1, Y = 2, Z = 3 };
var r0 = CSharpScript.EvaluateAsync<int>("X + Y + Z", globals: obj);
Assert.Equal(6, r0.Result);
obj = new InteractiveFixtures_TopLevelHostObject { X = 1, Y = 2, Z = 3 };
var r1 = CSharpScript.EvaluateAsync<int>("X", globals: obj);
Assert.Equal(1, r1.Result);
}
#endregion
}
}
| |
using Lucene.Net.Documents;
using Lucene.Net.Store;
using System;
namespace Lucene.Net.Index
{
using NUnit.Framework;
using System.IO;
using CompoundFileDirectory = Lucene.Net.Store.CompoundFileDirectory;
using Directory = Lucene.Net.Store.Directory;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using Document = Documents.Document;
using Field = Field;
using IndexInput = Lucene.Net.Store.IndexInput;
using IndexOutput = Lucene.Net.Store.IndexOutput;
using IOContext = Lucene.Net.Store.IOContext;
using IOUtils = Lucene.Net.Util.IOUtils;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
using SimpleFSDirectory = Lucene.Net.Store.SimpleFSDirectory;
using TestUtil = Lucene.Net.Util.TestUtil;
[TestFixture]
public class TestCompoundFile : LuceneTestCase
{
private Directory Dir;
[SetUp]
public override void SetUp()
{
base.SetUp();
DirectoryInfo file = CreateTempDir("testIndex");
// use a simple FSDir here, to be sure to have SimpleFSInputs
Dir = new SimpleFSDirectory(file, null);
}
[TearDown]
public override void TearDown()
{
Dir.Dispose();
base.TearDown();
}
/// <summary>
/// Creates a file of the specified size with random data. </summary>
private void CreateRandomFile(Directory dir, string name, int size)
{
IndexOutput os = dir.CreateOutput(name, NewIOContext(Random()));
for (int i = 0; i < size; i++)
{
var b = unchecked((sbyte)(new Random(1).NextDouble() * 256));
os.WriteByte((byte)b);
}
os.Dispose();
}
/// <summary>
/// Creates a file of the specified size with sequential data. The first
/// byte is written as the start byte provided. All subsequent bytes are
/// computed as start + offset where offset is the number of the byte.
/// </summary>
private void CreateSequenceFile(Directory dir, string name, sbyte start, int size)
{
IndexOutput os = dir.CreateOutput(name, NewIOContext(Random()));
for (int i = 0; i < size; i++)
{
os.WriteByte((byte)start);
start++;
}
os.Dispose();
}
private void AssertSameStreams(string msg, IndexInput expected, IndexInput test)
{
Assert.IsNotNull(expected, msg + " null expected");
Assert.IsNotNull(test, msg + " null test");
Assert.AreEqual(expected.Length(), test.Length(), msg + " length");
Assert.AreEqual(expected.FilePointer, test.FilePointer, msg + " position");
var expectedBuffer = new byte[512];
var testBuffer = new byte[expectedBuffer.Length];
long remainder = expected.Length() - expected.FilePointer;
while (remainder > 0)
{
int readLen = (int)Math.Min(remainder, expectedBuffer.Length);
expected.ReadBytes(expectedBuffer, 0, readLen);
test.ReadBytes(testBuffer, 0, readLen);
AssertEqualArrays(msg + ", remainder " + remainder, expectedBuffer, testBuffer, 0, readLen);
remainder -= readLen;
}
}
private void AssertSameStreams(string msg, IndexInput expected, IndexInput actual, long seekTo)
{
if (seekTo >= 0 && seekTo < expected.Length())
{
expected.Seek(seekTo);
actual.Seek(seekTo);
AssertSameStreams(msg + ", seek(mid)", expected, actual);
}
}
private void AssertSameSeekBehavior(string msg, IndexInput expected, IndexInput actual)
{
// seek to 0
long point = 0;
AssertSameStreams(msg + ", seek(0)", expected, actual, point);
// seek to middle
point = expected.Length() / 2l;
AssertSameStreams(msg + ", seek(mid)", expected, actual, point);
// seek to end - 2
point = expected.Length() - 2;
AssertSameStreams(msg + ", seek(end-2)", expected, actual, point);
// seek to end - 1
point = expected.Length() - 1;
AssertSameStreams(msg + ", seek(end-1)", expected, actual, point);
// seek to the end
point = expected.Length();
AssertSameStreams(msg + ", seek(end)", expected, actual, point);
// seek past end
point = expected.Length() + 1;
AssertSameStreams(msg + ", seek(end+1)", expected, actual, point);
}
private void AssertEqualArrays(string msg, byte[] expected, byte[] test, int start, int len)
{
Assert.IsNotNull(expected, msg + " null expected");
Assert.IsNotNull(test, msg + " null test");
for (int i = start; i < len; i++)
{
Assert.AreEqual(expected[i], test[i], msg + " " + i);
}
}
// ===========================================================
// Tests of the basic CompoundFile functionality
// ===========================================================
/// <summary>
/// this test creates compound file based on a single file.
/// Files of different sizes are tested: 0, 1, 10, 100 bytes.
/// </summary>
[Test]
public virtual void TestSingleFile()
{
int[] data = new int[] { 0, 1, 10, 100 };
for (int i = 0; i < data.Length; i++)
{
string name = "t" + data[i];
CreateSequenceFile(Dir, name, (sbyte)0, data[i]);
CompoundFileDirectory csw = new CompoundFileDirectory(Dir, name + ".cfs", NewIOContext(Random()), true);
Dir.Copy(csw, name, name, NewIOContext(Random()));
csw.Dispose();
CompoundFileDirectory csr = new CompoundFileDirectory(Dir, name + ".cfs", NewIOContext(Random()), false);
IndexInput expected = Dir.OpenInput(name, NewIOContext(Random()));
IndexInput actual = csr.OpenInput(name, NewIOContext(Random()));
AssertSameStreams(name, expected, actual);
AssertSameSeekBehavior(name, expected, actual);
expected.Dispose();
actual.Dispose();
csr.Dispose();
}
}
/// <summary>
/// this test creates compound file based on two files.
///
/// </summary>
[Test]
public virtual void TestTwoFiles()
{
CreateSequenceFile(Dir, "d1", (sbyte)0, 15);
CreateSequenceFile(Dir, "d2", (sbyte)0, 114);
CompoundFileDirectory csw = new CompoundFileDirectory(Dir, "d.cfs", NewIOContext(Random()), true);
Dir.Copy(csw, "d1", "d1", NewIOContext(Random()));
Dir.Copy(csw, "d2", "d2", NewIOContext(Random()));
csw.Dispose();
CompoundFileDirectory csr = new CompoundFileDirectory(Dir, "d.cfs", NewIOContext(Random()), false);
IndexInput expected = Dir.OpenInput("d1", NewIOContext(Random()));
IndexInput actual = csr.OpenInput("d1", NewIOContext(Random()));
AssertSameStreams("d1", expected, actual);
AssertSameSeekBehavior("d1", expected, actual);
expected.Dispose();
actual.Dispose();
expected = Dir.OpenInput("d2", NewIOContext(Random()));
actual = csr.OpenInput("d2", NewIOContext(Random()));
AssertSameStreams("d2", expected, actual);
AssertSameSeekBehavior("d2", expected, actual);
expected.Dispose();
actual.Dispose();
csr.Dispose();
}
/// <summary>
/// this test creates a compound file based on a large number of files of
/// various length. The file content is generated randomly. The sizes range
/// from 0 to 1Mb. Some of the sizes are selected to test the buffering
/// logic in the file reading code. For this the chunk variable is set to
/// the length of the buffer used internally by the compound file logic.
/// </summary>
[Test]
public virtual void TestRandomFiles()
{
// Setup the test segment
string segment = "test";
int chunk = 1024; // internal buffer size used by the stream
CreateRandomFile(Dir, segment + ".zero", 0);
CreateRandomFile(Dir, segment + ".one", 1);
CreateRandomFile(Dir, segment + ".ten", 10);
CreateRandomFile(Dir, segment + ".hundred", 100);
CreateRandomFile(Dir, segment + ".big1", chunk);
CreateRandomFile(Dir, segment + ".big2", chunk - 1);
CreateRandomFile(Dir, segment + ".big3", chunk + 1);
CreateRandomFile(Dir, segment + ".big4", 3 * chunk);
CreateRandomFile(Dir, segment + ".big5", 3 * chunk - 1);
CreateRandomFile(Dir, segment + ".big6", 3 * chunk + 1);
CreateRandomFile(Dir, segment + ".big7", 1000 * chunk);
// Setup extraneous files
CreateRandomFile(Dir, "onetwothree", 100);
CreateRandomFile(Dir, segment + ".notIn", 50);
CreateRandomFile(Dir, segment + ".notIn2", 51);
// Now test
CompoundFileDirectory csw = new CompoundFileDirectory(Dir, "test.cfs", NewIOContext(Random()), true);
string[] data = new string[] { ".zero", ".one", ".ten", ".hundred", ".big1", ".big2", ".big3", ".big4", ".big5", ".big6", ".big7" };
for (int i = 0; i < data.Length; i++)
{
string fileName = segment + data[i];
Dir.Copy(csw, fileName, fileName, NewIOContext(Random()));
}
csw.Dispose();
CompoundFileDirectory csr = new CompoundFileDirectory(Dir, "test.cfs", NewIOContext(Random()), false);
for (int i = 0; i < data.Length; i++)
{
IndexInput check = Dir.OpenInput(segment + data[i], NewIOContext(Random()));
IndexInput test = csr.OpenInput(segment + data[i], NewIOContext(Random()));
AssertSameStreams(data[i], check, test);
AssertSameSeekBehavior(data[i], check, test);
test.Dispose();
check.Dispose();
}
csr.Dispose();
}
/// <summary>
/// Setup a larger compound file with a number of components, each of
/// which is a sequential file (so that we can easily tell that we are
/// reading in the right byte). The methods sets up 20 files - f0 to f19,
/// the size of each file is 1000 bytes.
/// </summary>
private void SetUp_2()
{
CompoundFileDirectory cw = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), true);
for (int i = 0; i < 20; i++)
{
CreateSequenceFile(Dir, "f" + i, (sbyte)0, 2000);
string fileName = "f" + i;
Dir.Copy(cw, fileName, fileName, NewIOContext(Random()));
}
cw.Dispose();
}
[Test]
public virtual void TestReadAfterClose()
{
try
{
Demo_FSIndexInputBug(Dir, "test");
}
catch (ObjectDisposedException ode)
{
// expected
}
}
private void Demo_FSIndexInputBug(Directory fsdir, string file)
{
// Setup the test file - we need more than 1024 bytes
IndexOutput os = fsdir.CreateOutput(file, IOContext.DEFAULT);
for (int i = 0; i < 2000; i++)
{
os.WriteByte((byte)(sbyte)i);
}
os.Dispose();
IndexInput @in = fsdir.OpenInput(file, IOContext.DEFAULT);
// this read primes the buffer in IndexInput
@in.ReadByte();
// Close the file
@in.Dispose();
// ERROR: this call should fail, but succeeds because the buffer
// is still filled
@in.ReadByte();
// ERROR: this call should fail, but succeeds for some reason as well
@in.Seek(1099);
try
{
// OK: this call correctly fails. We are now past the 1024 internal
// buffer, so an actual IO is attempted, which fails
@in.ReadByte();
Assert.Fail("expected readByte() to throw exception");
}
catch (IOException e)
{
// expected exception
}
}
[Test]
public virtual void TestClonedStreamsClosing()
{
SetUp_2();
CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false);
// basic clone
IndexInput expected = Dir.OpenInput("f11", NewIOContext(Random()));
// this test only works for FSIndexInput
Assert.IsTrue(TestHelper.IsSimpleFSIndexInput(expected));
Assert.IsTrue(TestHelper.IsSimpleFSIndexInputOpen(expected));
IndexInput one = cr.OpenInput("f11", NewIOContext(Random()));
IndexInput two = (IndexInput)one.Clone();
AssertSameStreams("basic clone one", expected, one);
expected.Seek(0);
AssertSameStreams("basic clone two", expected, two);
// Now close the first stream
one.Dispose();
// The following should really fail since we couldn't expect to
// access a file once close has been called on it (regardless of
// buffering and/or clone magic)
expected.Seek(0);
two.Seek(0);
AssertSameStreams("basic clone two/2", expected, two);
// Now close the compound reader
cr.Dispose();
// The following may also fail since the compound stream is closed
expected.Seek(0);
two.Seek(0);
//assertSameStreams("basic clone two/3", expected, two);
// Now close the second clone
two.Dispose();
expected.Seek(0);
two.Seek(0);
//assertSameStreams("basic clone two/4", expected, two);
expected.Dispose();
}
/// <summary>
/// this test opens two files from a compound stream and verifies that
/// their file positions are independent of each other.
/// </summary>
[Test]
public virtual void TestRandomAccess()
{
SetUp_2();
CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false);
// Open two files
IndexInput e1 = Dir.OpenInput("f11", NewIOContext(Random()));
IndexInput e2 = Dir.OpenInput("f3", NewIOContext(Random()));
IndexInput a1 = cr.OpenInput("f11", NewIOContext(Random()));
IndexInput a2 = Dir.OpenInput("f3", NewIOContext(Random()));
// Seek the first pair
e1.Seek(100);
a1.Seek(100);
Assert.AreEqual(100, e1.FilePointer);
Assert.AreEqual(100, a1.FilePointer);
byte be1 = e1.ReadByte();
byte ba1 = a1.ReadByte();
Assert.AreEqual(be1, ba1);
// Now seek the second pair
e2.Seek(1027);
a2.Seek(1027);
Assert.AreEqual(1027, e2.FilePointer);
Assert.AreEqual(1027, a2.FilePointer);
byte be2 = e2.ReadByte();
byte ba2 = a2.ReadByte();
Assert.AreEqual(be2, ba2);
// Now make sure the first one didn't move
Assert.AreEqual(101, e1.FilePointer);
Assert.AreEqual(101, a1.FilePointer);
be1 = e1.ReadByte();
ba1 = a1.ReadByte();
Assert.AreEqual(be1, ba1);
// Now more the first one again, past the buffer length
e1.Seek(1910);
a1.Seek(1910);
Assert.AreEqual(1910, e1.FilePointer);
Assert.AreEqual(1910, a1.FilePointer);
be1 = e1.ReadByte();
ba1 = a1.ReadByte();
Assert.AreEqual(be1, ba1);
// Now make sure the second set didn't move
Assert.AreEqual(1028, e2.FilePointer);
Assert.AreEqual(1028, a2.FilePointer);
be2 = e2.ReadByte();
ba2 = a2.ReadByte();
Assert.AreEqual(be2, ba2);
// Move the second set back, again cross the buffer size
e2.Seek(17);
a2.Seek(17);
Assert.AreEqual(17, e2.FilePointer);
Assert.AreEqual(17, a2.FilePointer);
be2 = e2.ReadByte();
ba2 = a2.ReadByte();
Assert.AreEqual(be2, ba2);
// Finally, make sure the first set didn't move
// Now make sure the first one didn't move
Assert.AreEqual(1911, e1.FilePointer);
Assert.AreEqual(1911, a1.FilePointer);
be1 = e1.ReadByte();
ba1 = a1.ReadByte();
Assert.AreEqual(be1, ba1);
e1.Dispose();
e2.Dispose();
a1.Dispose();
a2.Dispose();
cr.Dispose();
}
/// <summary>
/// this test opens two files from a compound stream and verifies that
/// their file positions are independent of each other.
/// </summary>
[Test]
public virtual void TestRandomAccessClones()
{
SetUp_2();
CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false);
// Open two files
IndexInput e1 = cr.OpenInput("f11", NewIOContext(Random()));
IndexInput e2 = cr.OpenInput("f3", NewIOContext(Random()));
IndexInput a1 = (IndexInput)e1.Clone();
IndexInput a2 = (IndexInput)e2.Clone();
// Seek the first pair
e1.Seek(100);
a1.Seek(100);
Assert.AreEqual(100, e1.FilePointer);
Assert.AreEqual(100, a1.FilePointer);
byte be1 = e1.ReadByte();
byte ba1 = a1.ReadByte();
Assert.AreEqual(be1, ba1);
// Now seek the second pair
e2.Seek(1027);
a2.Seek(1027);
Assert.AreEqual(1027, e2.FilePointer);
Assert.AreEqual(1027, a2.FilePointer);
byte be2 = e2.ReadByte();
byte ba2 = a2.ReadByte();
Assert.AreEqual(be2, ba2);
// Now make sure the first one didn't move
Assert.AreEqual(101, e1.FilePointer);
Assert.AreEqual(101, a1.FilePointer);
be1 = e1.ReadByte();
ba1 = a1.ReadByte();
Assert.AreEqual(be1, ba1);
// Now more the first one again, past the buffer length
e1.Seek(1910);
a1.Seek(1910);
Assert.AreEqual(1910, e1.FilePointer);
Assert.AreEqual(1910, a1.FilePointer);
be1 = e1.ReadByte();
ba1 = a1.ReadByte();
Assert.AreEqual(be1, ba1);
// Now make sure the second set didn't move
Assert.AreEqual(1028, e2.FilePointer);
Assert.AreEqual(1028, a2.FilePointer);
be2 = e2.ReadByte();
ba2 = a2.ReadByte();
Assert.AreEqual(be2, ba2);
// Move the second set back, again cross the buffer size
e2.Seek(17);
a2.Seek(17);
Assert.AreEqual(17, e2.FilePointer);
Assert.AreEqual(17, a2.FilePointer);
be2 = e2.ReadByte();
ba2 = a2.ReadByte();
Assert.AreEqual(be2, ba2);
// Finally, make sure the first set didn't move
// Now make sure the first one didn't move
Assert.AreEqual(1911, e1.FilePointer);
Assert.AreEqual(1911, a1.FilePointer);
be1 = e1.ReadByte();
ba1 = a1.ReadByte();
Assert.AreEqual(be1, ba1);
e1.Dispose();
e2.Dispose();
a1.Dispose();
a2.Dispose();
cr.Dispose();
}
[Test]
public virtual void TestFileNotFound()
{
SetUp_2();
CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false);
// Open two files
try
{
cr.OpenInput("bogus", NewIOContext(Random()));
Assert.Fail("File not found");
}
catch (Exception e)
{
/* success */
//System.out.println("SUCCESS: File Not Found: " + e);
}
cr.Dispose();
}
[Test]
public virtual void TestReadPastEOF()
{
SetUp_2();
var cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false);
IndexInput @is = cr.OpenInput("f2", NewIOContext(Random()));
@is.Seek(@is.Length() - 10);
var b = new byte[100];
@is.ReadBytes(b, 0, 10);
try
{
@is.ReadByte();
Assert.Fail("Single byte read past end of file");
}
catch (IOException e)
{
/* success */
//System.out.println("SUCCESS: single byte read past end of file: " + e);
}
@is.Seek(@is.Length() - 10);
try
{
@is.ReadBytes(b, 0, 50);
Assert.Fail("Block read past end of file");
}
catch (IOException e)
{
/* success */
//System.out.println("SUCCESS: block read past end of file: " + e);
}
@is.Dispose();
cr.Dispose();
}
/// <summary>
/// this test that writes larger than the size of the buffer output
/// will correctly increment the file pointer.
/// </summary>
[Test]
public virtual void TestLargeWrites()
{
IndexOutput os = Dir.CreateOutput("testBufferStart.txt", NewIOContext(Random()));
var largeBuf = new byte[2048];
for (int i = 0; i < largeBuf.Length; i++)
{
largeBuf[i] = (byte)unchecked((sbyte)(new Random(1).NextDouble() * 256));
}
long currentPos = os.FilePointer;
os.WriteBytes(largeBuf, largeBuf.Length);
try
{
Assert.AreEqual(currentPos + largeBuf.Length, os.FilePointer);
}
finally
{
os.Dispose();
}
}
[Test]
public virtual void TestAddExternalFile()
{
CreateSequenceFile(Dir, "d1", (sbyte)0, 15);
Directory newDir = NewDirectory();
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
Dir.Copy(csw, "d1", "d1", NewIOContext(Random()));
csw.Dispose();
CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
IndexInput expected = Dir.OpenInput("d1", NewIOContext(Random()));
IndexInput actual = csr.OpenInput("d1", NewIOContext(Random()));
AssertSameStreams("d1", expected, actual);
AssertSameSeekBehavior("d1", expected, actual);
expected.Dispose();
actual.Dispose();
csr.Dispose();
newDir.Dispose();
}
[Test]
public virtual void TestAppend()
{
Directory newDir = NewDirectory();
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
int size = 5 + Random().Next(128);
for (int j = 0; j < 2; j++)
{
IndexOutput os = csw.CreateOutput("seg_" + j + "_foo.txt", NewIOContext(Random()));
for (int i = 0; i < size; i++)
{
os.WriteInt(i * j);
}
os.Dispose();
string[] listAll = newDir.ListAll();
Assert.AreEqual(1, listAll.Length);
Assert.AreEqual("d.cfs", listAll[0]);
}
CreateSequenceFile(Dir, "d1", (sbyte)0, 15);
Dir.Copy(csw, "d1", "d1", NewIOContext(Random()));
string[] listAll_ = newDir.ListAll();
Assert.AreEqual(1, listAll_.Length);
Assert.AreEqual("d.cfs", listAll_[0]);
csw.Dispose();
CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
for (int j = 0; j < 2; j++)
{
IndexInput openInput = csr.OpenInput("seg_" + j + "_foo.txt", NewIOContext(Random()));
Assert.AreEqual(size * 4, openInput.Length());
for (int i = 0; i < size; i++)
{
Assert.AreEqual(i * j, openInput.ReadInt());
}
openInput.Dispose();
}
IndexInput expected = Dir.OpenInput("d1", NewIOContext(Random()));
IndexInput actual = csr.OpenInput("d1", NewIOContext(Random()));
AssertSameStreams("d1", expected, actual);
AssertSameSeekBehavior("d1", expected, actual);
expected.Dispose();
actual.Dispose();
csr.Dispose();
newDir.Dispose();
}
[Test]
public virtual void TestAppendTwice()
{
Directory newDir = NewDirectory();
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
CreateSequenceFile(newDir, "d1", (sbyte)0, 15);
IndexOutput @out = csw.CreateOutput("d.xyz", NewIOContext(Random()));
@out.WriteInt(0);
@out.Dispose();
Assert.AreEqual(1, csw.ListAll().Length);
Assert.AreEqual("d.xyz", csw.ListAll()[0]);
csw.Dispose();
CompoundFileDirectory cfr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
Assert.AreEqual(1, cfr.ListAll().Length);
Assert.AreEqual("d.xyz", cfr.ListAll()[0]);
cfr.Dispose();
newDir.Dispose();
}
[Test]
public virtual void TestEmptyCFS()
{
Directory newDir = NewDirectory();
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
csw.Dispose();
CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
Assert.AreEqual(0, csr.ListAll().Length);
csr.Dispose();
newDir.Dispose();
}
[Test]
public virtual void TestReadNestedCFP()
{
Directory newDir = NewDirectory();
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
CompoundFileDirectory nested = new CompoundFileDirectory(newDir, "b.cfs", NewIOContext(Random()), true);
IndexOutput @out = nested.CreateOutput("b.xyz", NewIOContext(Random()));
IndexOutput out1 = nested.CreateOutput("b_1.xyz", NewIOContext(Random()));
@out.WriteInt(0);
out1.WriteInt(1);
@out.Dispose();
out1.Dispose();
nested.Dispose();
newDir.Copy(csw, "b.cfs", "b.cfs", NewIOContext(Random()));
newDir.Copy(csw, "b.cfe", "b.cfe", NewIOContext(Random()));
newDir.DeleteFile("b.cfs");
newDir.DeleteFile("b.cfe");
csw.Dispose();
Assert.AreEqual(2, newDir.ListAll().Length);
csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
Assert.AreEqual(2, csw.ListAll().Length);
nested = new CompoundFileDirectory(csw, "b.cfs", NewIOContext(Random()), false);
Assert.AreEqual(2, nested.ListAll().Length);
IndexInput openInput = nested.OpenInput("b.xyz", NewIOContext(Random()));
Assert.AreEqual(0, openInput.ReadInt());
openInput.Dispose();
openInput = nested.OpenInput("b_1.xyz", NewIOContext(Random()));
Assert.AreEqual(1, openInput.ReadInt());
openInput.Dispose();
nested.Dispose();
csw.Dispose();
newDir.Dispose();
}
[Test]
public virtual void TestDoubleClose()
{
Directory newDir = NewDirectory();
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
IndexOutput @out = csw.CreateOutput("d.xyz", NewIOContext(Random()));
@out.WriteInt(0);
@out.Dispose();
csw.Dispose();
// close a second time - must have no effect according to IDisposable
csw.Dispose();
csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
IndexInput openInput = csw.OpenInput("d.xyz", NewIOContext(Random()));
Assert.AreEqual(0, openInput.ReadInt());
openInput.Dispose();
csw.Dispose();
// close a second time - must have no effect according to IDisposable
csw.Dispose();
newDir.Dispose();
}
// Make sure we don't somehow use more than 1 descriptor
// when reading a CFS with many subs:
[Test]
public virtual void TestManySubFiles()
{
Directory d = NewFSDirectory(CreateTempDir("CFSManySubFiles"));
int FILE_COUNT = AtLeast(500);
for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
{
IndexOutput @out = d.CreateOutput("file." + fileIdx, NewIOContext(Random()));
@out.WriteByte((byte)(sbyte)fileIdx);
@out.Dispose();
}
CompoundFileDirectory cfd = new CompoundFileDirectory(d, "c.cfs", NewIOContext(Random()), true);
for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
{
string fileName = "file." + fileIdx;
d.Copy(cfd, fileName, fileName, NewIOContext(Random()));
}
cfd.Dispose();
IndexInput[] ins = new IndexInput[FILE_COUNT];
CompoundFileDirectory cfr = new CompoundFileDirectory(d, "c.cfs", NewIOContext(Random()), false);
for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
{
ins[fileIdx] = cfr.OpenInput("file." + fileIdx, NewIOContext(Random()));
}
for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
{
Assert.AreEqual((byte)fileIdx, ins[fileIdx].ReadByte());
}
for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
{
ins[fileIdx].Dispose();
}
cfr.Dispose();
d.Dispose();
}
[Test]
public virtual void TestListAll()
{
Directory dir = NewDirectory();
// riw should sometimes create docvalues fields, etc
RandomIndexWriter riw = new RandomIndexWriter(Random(), dir);
Document doc = new Document();
// these fields should sometimes get term vectors, etc
Field idField = NewStringField("id", "", Field.Store.NO);
Field bodyField = NewTextField("body", "", Field.Store.NO);
doc.Add(idField);
doc.Add(bodyField);
for (int i = 0; i < 100; i++)
{
idField.StringValue = Convert.ToString(i);
bodyField.StringValue = TestUtil.RandomUnicodeString(Random());
riw.AddDocument(doc);
if (Random().Next(7) == 0)
{
riw.Commit();
}
}
riw.Dispose();
CheckFiles(dir);
dir.Dispose();
}
// checks that we can open all files returned by listAll!
private void CheckFiles(Directory dir)
{
foreach (string file in dir.ListAll())
{
if (file.EndsWith(IndexFileNames.COMPOUND_FILE_EXTENSION))
{
CompoundFileDirectory cfsDir = new CompoundFileDirectory(dir, file, NewIOContext(Random()), false);
CheckFiles(cfsDir); // recurse into cfs
cfsDir.Dispose();
}
IndexInput @in = null;
bool success = false;
try
{
@in = dir.OpenInput(file, NewIOContext(Random()));
success = true;
}
finally
{
if (success)
{
IOUtils.Close(@in);
}
else
{
IOUtils.CloseWhileHandlingException(@in);
}
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace System.Net.Http
{
[SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix",
Justification = "Represents a multipart/* content. Even if a collection of HttpContent is stored, " +
"suffix Collection is not appropriate.")]
public class MultipartContent : HttpContent, IEnumerable<HttpContent>
{
#region Fields
private const string CrLf = "\r\n";
private static readonly int s_crlfLength = GetEncodedLength(CrLf);
private static readonly int s_dashDashLength = GetEncodedLength("--");
private static readonly int s_colonSpaceLength = GetEncodedLength(": ");
private static readonly int s_commaSpaceLength = GetEncodedLength(", ");
private readonly List<HttpContent> _nestedContent;
private readonly string _boundary;
#endregion Fields
#region Construction
public MultipartContent()
: this("mixed", GetDefaultBoundary())
{ }
public MultipartContent(string subtype)
: this(subtype, GetDefaultBoundary())
{ }
public MultipartContent(string subtype, string boundary)
{
if (string.IsNullOrWhiteSpace(subtype))
{
throw new ArgumentException(SR.net_http_argument_empty_string, nameof(subtype));
}
ValidateBoundary(boundary);
_boundary = boundary;
string quotedBoundary = boundary;
if (!quotedBoundary.StartsWith("\"", StringComparison.Ordinal))
{
quotedBoundary = "\"" + quotedBoundary + "\"";
}
MediaTypeHeaderValue contentType = new MediaTypeHeaderValue("multipart/" + subtype);
contentType.Parameters.Add(new NameValueHeaderValue(nameof(boundary), quotedBoundary));
Headers.ContentType = contentType;
_nestedContent = new List<HttpContent>();
}
private static void ValidateBoundary(string boundary)
{
// NameValueHeaderValue is too restrictive for boundary.
// Instead validate it ourselves and then quote it.
if (string.IsNullOrWhiteSpace(boundary))
{
throw new ArgumentException(SR.net_http_argument_empty_string, nameof(boundary));
}
// RFC 2046 Section 5.1.1
// boundary := 0*69<bchars> bcharsnospace
// bchars := bcharsnospace / " "
// bcharsnospace := DIGIT / ALPHA / "'" / "(" / ")" / "+" / "_" / "," / "-" / "." / "/" / ":" / "=" / "?"
if (boundary.Length > 70)
{
throw new ArgumentOutOfRangeException(nameof(boundary), boundary,
SR.Format(System.Globalization.CultureInfo.InvariantCulture, SR.net_http_content_field_too_long, 70));
}
// Cannot end with space.
if (boundary.EndsWith(" ", StringComparison.Ordinal))
{
throw new ArgumentException(SR.Format(System.Globalization.CultureInfo.InvariantCulture, SR.net_http_headers_invalid_value, boundary), nameof(boundary));
}
const string AllowedMarks = @"'()+_,-./:=? ";
foreach (char ch in boundary)
{
if (('0' <= ch && ch <= '9') || // Digit.
('a' <= ch && ch <= 'z') || // alpha.
('A' <= ch && ch <= 'Z') || // ALPHA.
(AllowedMarks.Contains(ch))) // Marks.
{
// Valid.
}
else
{
throw new ArgumentException(SR.Format(System.Globalization.CultureInfo.InvariantCulture, SR.net_http_headers_invalid_value, boundary), nameof(boundary));
}
}
}
private static string GetDefaultBoundary()
{
return Guid.NewGuid().ToString();
}
public virtual void Add(HttpContent content)
{
if (content == null)
{
throw new ArgumentNullException(nameof(content));
}
_nestedContent.Add(content);
}
#endregion Construction
#region Dispose
protected override void Dispose(bool disposing)
{
if (disposing)
{
foreach (HttpContent content in _nestedContent)
{
content.Dispose();
}
_nestedContent.Clear();
}
base.Dispose(disposing);
}
#endregion Dispose
#region IEnumerable<HttpContent> Members
public IEnumerator<HttpContent> GetEnumerator()
{
return _nestedContent.GetEnumerator();
}
#endregion
#region IEnumerable Members
Collections.IEnumerator Collections.IEnumerable.GetEnumerator()
{
return _nestedContent.GetEnumerator();
}
#endregion
#region Serialization
// for-each content
// write "--" + boundary
// for-each content header
// write header: header-value
// write content.CopyTo[Async]
// write "--" + boundary + "--"
// Can't be canceled directly by the user. If the overall request is canceled
// then the stream will be closed an exception thrown.
protected override Task SerializeToStreamAsync(Stream stream, TransportContext context) =>
SerializeToStreamAsyncCore(stream, context, default);
internal override Task SerializeToStreamAsync(Stream stream, TransportContext context, CancellationToken cancellationToken) =>
// Only skip the original protected virtual SerializeToStreamAsync if this
// isn't a derived type that may have overridden the behavior.
GetType() == typeof(MultipartContent) ? SerializeToStreamAsyncCore(stream, context, cancellationToken) :
base.SerializeToStreamAsync(stream, context, cancellationToken);
private protected async Task SerializeToStreamAsyncCore(Stream stream, TransportContext context, CancellationToken cancellationToken)
{
Debug.Assert(stream != null);
try
{
// Write start boundary.
await EncodeStringToStreamAsync(stream, "--" + _boundary + CrLf, cancellationToken).ConfigureAwait(false);
// Write each nested content.
var output = new StringBuilder();
for (int contentIndex = 0; contentIndex < _nestedContent.Count; contentIndex++)
{
// Write divider, headers, and content.
HttpContent content = _nestedContent[contentIndex];
await EncodeStringToStreamAsync(stream, SerializeHeadersToString(output, contentIndex, content), cancellationToken).ConfigureAwait(false);
await content.CopyToAsync(stream, context, cancellationToken).ConfigureAwait(false);
}
// Write footer boundary.
await EncodeStringToStreamAsync(stream, CrLf + "--" + _boundary + "--" + CrLf, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
if (NetEventSource.IsEnabled) NetEventSource.Error(this, ex);
throw;
}
}
protected override async Task<Stream> CreateContentReadStreamAsync()
{
try
{
var streams = new Stream[2 + (_nestedContent.Count * 2)];
var scratch = new StringBuilder();
int streamIndex = 0;
// Start boundary.
streams[streamIndex++] = EncodeStringToNewStream("--" + _boundary + CrLf);
// Each nested content.
for (int contentIndex = 0; contentIndex < _nestedContent.Count; contentIndex++)
{
HttpContent nestedContent = _nestedContent[contentIndex];
streams[streamIndex++] = EncodeStringToNewStream(SerializeHeadersToString(scratch, contentIndex, nestedContent));
Stream readStream = (await nestedContent.ReadAsStreamAsync().ConfigureAwait(false)) ?? new MemoryStream();
if (!readStream.CanSeek)
{
// Seekability impacts whether HttpClientHandlers are able to rewind. To maintain compat
// and to allow such use cases when a nested stream isn't seekable (which should be rare),
// we fall back to the base behavior. We don't dispose of the streams already obtained
// as we don't necessarily own them yet.
return await base.CreateContentReadStreamAsync().ConfigureAwait(false);
}
streams[streamIndex++] = readStream;
}
// Footer boundary.
streams[streamIndex] = EncodeStringToNewStream(CrLf + "--" + _boundary + "--" + CrLf);
return new ContentReadStream(streams);
}
catch (Exception ex)
{
if (NetEventSource.IsEnabled) NetEventSource.Error(this, ex);
throw;
}
}
private string SerializeHeadersToString(StringBuilder scratch, int contentIndex, HttpContent content)
{
scratch.Clear();
// Add divider.
if (contentIndex != 0) // Write divider for all but the first content.
{
scratch.Append(CrLf + "--"); // const strings
scratch.Append(_boundary);
scratch.Append(CrLf);
}
// Add headers.
foreach (KeyValuePair<string, IEnumerable<string>> headerPair in content.Headers)
{
scratch.Append(headerPair.Key);
scratch.Append(": ");
string delim = string.Empty;
foreach (string value in headerPair.Value)
{
scratch.Append(delim);
scratch.Append(value);
delim = ", ";
}
scratch.Append(CrLf);
}
// Extra CRLF to end headers (even if there are no headers).
scratch.Append(CrLf);
return scratch.ToString();
}
private static ValueTask EncodeStringToStreamAsync(Stream stream, string input, CancellationToken cancellationToken)
{
byte[] buffer = HttpRuleParser.DefaultHttpEncoding.GetBytes(input);
return stream.WriteAsync(new ReadOnlyMemory<byte>(buffer), cancellationToken);
}
private static Stream EncodeStringToNewStream(string input)
{
return new MemoryStream(HttpRuleParser.DefaultHttpEncoding.GetBytes(input), writable: false);
}
internal override bool AllowDuplex => false;
protected internal override bool TryComputeLength(out long length)
{
int boundaryLength = GetEncodedLength(_boundary);
long currentLength = 0;
long internalBoundaryLength = s_crlfLength + s_dashDashLength + boundaryLength + s_crlfLength;
// Start Boundary.
currentLength += s_dashDashLength + boundaryLength + s_crlfLength;
bool first = true;
foreach (HttpContent content in _nestedContent)
{
if (first)
{
first = false; // First boundary already written.
}
else
{
// Internal Boundary.
currentLength += internalBoundaryLength;
}
// Headers.
foreach (KeyValuePair<string, IEnumerable<string>> headerPair in content.Headers)
{
currentLength += GetEncodedLength(headerPair.Key) + s_colonSpaceLength;
int valueCount = 0;
foreach (string value in headerPair.Value)
{
currentLength += GetEncodedLength(value);
valueCount++;
}
if (valueCount > 1)
{
currentLength += (valueCount - 1) * s_commaSpaceLength;
}
currentLength += s_crlfLength;
}
currentLength += s_crlfLength;
// Content.
long tempContentLength = 0;
if (!content.TryComputeLength(out tempContentLength))
{
length = 0;
return false;
}
currentLength += tempContentLength;
}
// Terminating boundary.
currentLength += s_crlfLength + s_dashDashLength + boundaryLength + s_dashDashLength + s_crlfLength;
length = currentLength;
return true;
}
private static int GetEncodedLength(string input)
{
return HttpRuleParser.DefaultHttpEncoding.GetByteCount(input);
}
private sealed class ContentReadStream : Stream
{
private readonly Stream[] _streams;
private readonly long _length;
private int _next;
private Stream _current;
private long _position;
internal ContentReadStream(Stream[] streams)
{
Debug.Assert(streams != null);
_streams = streams;
foreach (Stream stream in streams)
{
_length += stream.Length;
}
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
foreach (Stream s in _streams)
{
s.Dispose();
}
}
}
public override async ValueTask DisposeAsync()
{
foreach (Stream s in _streams)
{
await s.DisposeAsync().ConfigureAwait(false);
}
}
public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => false;
public override int Read(byte[] buffer, int offset, int count)
{
ValidateReadArgs(buffer, offset, count);
if (count == 0)
{
return 0;
}
while (true)
{
if (_current != null)
{
int bytesRead = _current.Read(buffer, offset, count);
if (bytesRead != 0)
{
_position += bytesRead;
return bytesRead;
}
_current = null;
}
if (_next >= _streams.Length)
{
return 0;
}
_current = _streams[_next++];
}
}
public override int Read(Span<byte> buffer)
{
if (buffer.Length == 0)
{
return 0;
}
while (true)
{
if (_current != null)
{
int bytesRead = _current.Read(buffer);
if (bytesRead != 0)
{
_position += bytesRead;
return bytesRead;
}
_current = null;
}
if (_next >= _streams.Length)
{
return 0;
}
_current = _streams[_next++];
}
}
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
ValidateReadArgs(buffer, offset, count);
return ReadAsyncPrivate(new Memory<byte>(buffer, offset, count), cancellationToken).AsTask();
}
public override ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default) =>
ReadAsyncPrivate(buffer, cancellationToken);
public override IAsyncResult BeginRead(byte[] array, int offset, int count, AsyncCallback asyncCallback, object asyncState) =>
TaskToApm.Begin(ReadAsync(array, offset, count, CancellationToken.None), asyncCallback, asyncState);
public override int EndRead(IAsyncResult asyncResult) =>
TaskToApm.End<int>(asyncResult);
public async ValueTask<int> ReadAsyncPrivate(Memory<byte> buffer, CancellationToken cancellationToken)
{
if (buffer.Length == 0)
{
return 0;
}
while (true)
{
if (_current != null)
{
int bytesRead = await _current.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (bytesRead != 0)
{
_position += bytesRead;
return bytesRead;
}
_current = null;
}
if (_next >= _streams.Length)
{
return 0;
}
_current = _streams[_next++];
}
}
public override long Position
{
get { return _position; }
set
{
if (value < 0)
{
throw new ArgumentOutOfRangeException(nameof(value));
}
long previousStreamsLength = 0;
for (int i = 0; i < _streams.Length; i++)
{
Stream curStream = _streams[i];
long curLength = curStream.Length;
if (value < previousStreamsLength + curLength)
{
_current = curStream;
i++;
_next = i;
curStream.Position = value - previousStreamsLength;
for (; i < _streams.Length; i++)
{
_streams[i].Position = 0;
}
_position = value;
return;
}
previousStreamsLength += curLength;
}
_current = null;
_next = _streams.Length;
_position = value;
}
}
public override long Seek(long offset, SeekOrigin origin)
{
switch (origin)
{
case SeekOrigin.Begin:
Position = offset;
break;
case SeekOrigin.Current:
Position += offset;
break;
case SeekOrigin.End:
Position = _length + offset;
break;
default:
throw new ArgumentOutOfRangeException(nameof(origin));
}
return Position;
}
public override long Length => _length;
private static void ValidateReadArgs(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (offset > buffer.Length - count)
{
throw new ArgumentException(SR.net_http_buffer_insufficient_length, nameof(buffer));
}
}
public override void Flush() { }
public override void SetLength(long value) { throw new NotSupportedException(); }
public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); }
public override void Write(ReadOnlySpan<byte> buffer) { throw new NotSupportedException(); }
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { throw new NotSupportedException(); }
public override ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = default) { throw new NotSupportedException(); }
}
#endregion Serialization
}
}
| |
/*
Copyright (c) Microsoft Corporation
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License
at http://www.apache.org/licenses/LICENSE-2.0
THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER
EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED WARRANTIES OR CONDITIONS OF
TITLE, FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT.
See the Apache Version 2.0 License for specific language governing permissions and
limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading.Tasks;
using Microsoft.Research.Peloponnese.NotHttpClient;
namespace Microsoft.Research.Dryad.ClusterInterface
{
/// <summary>
/// this is the connection to the application's logging interface, supplied
/// by the external application
/// </summary>
public interface ILogger
{
void Log(
string entry,
[CallerFilePath] string file = "(nofile)",
[CallerMemberName] string function = "(nofunction)",
[CallerLineNumber] int line = -1);
}
/// <summary>
/// this is the information available to the external application about a
/// "computer" in the cluster. This is really a slot in a cluster's resource manager;
/// there may be multiple computers on the same host.
/// </summary>
public interface IComputer
{
/// <summary>
/// the unique name of this resource
/// </summary>
string Name { get; }
/// <summary>
/// the URI for communicating with processes
/// </summary>
string ProcessServer { get; }
/// <summary>
/// the URI for fetching remote files
/// </summary>
string FileServer { get; }
/// <summary>
/// the directory for fetching local files
/// </summary>
string Directory { get; }
/// <summary>
/// the hostname this is running on, for data locality purposes
/// </summary>
string Host { get; }
/// <summary>
/// the cluster rack this is running on, for data locality purposes
/// </summary>
string RackName { get; }
}
/// <summary>
/// this is the information available to the external application about a Dryad
/// process that has been started on an IComputer. Basically just a UID to use in
/// logging, and an interface for constructing references to files that it wrote
/// </summary>
public interface IProcess
{
/// <summary>
/// a UID for this process
/// </summary>
string Id { get; }
/// <summary>
/// the directory for files written by this process
/// </summary>
string Directory { get; }
}
/// <summary>
/// affinities can be described at different levels. We only use Computer and Rack
/// for now
/// </summary>
public enum AffinityResourceLevel
{
Core = 0,
Socket,
Host,
Rack,
Cluster
}
/// <summary>
/// the external application supplies affinity resources when making scheduling requests
/// </summary>
public class AffinityResource
{
/// <summary>
/// the granularity of this affinity; for now we only use Computer and Rack
/// </summary>
public AffinityResourceLevel level;
/// <summary>
/// the identifying string for the affinity. It is a UID with respect to the level,
/// so identifies a unique host or rack
/// </summary>
public string locality;
/// <summary>
/// create a new affinity object
/// </summary>
/// <param name="l">the granularity of the affinity</param>
/// <param name="place">the locality of the affinity</param>
public AffinityResource(AffinityResourceLevel l, string place)
{
level = l;
locality = place;
}
}
/// <summary>
/// a particular affinity that a process has to be scheduled somewhere
/// </summary>
public class Affinity
{
/// <summary>
/// if this is true, the process may not run anywhere else
/// </summary>
public bool isHardContraint;
/// <summary>
/// this is a list of equally good places to run, e.g. the locations of
/// all the replicas of a file
/// </summary>
public List<AffinityResource> affinities;
/// <summary>
/// this is a weight, corresponding to the amount of data the process
/// would like to read from the locality in question
/// </summary>
public UInt64 weight;
/// <summary>
/// make a new affinity descriptor
/// </summary>
/// <param name="hc">true if this is a hard constraint and the process may not run anywhere else</param>
/// <param name="w">weight corresponding to the amount of data the process would like to read from here</param>
public Affinity(bool hc, UInt64 w)
{
isHardContraint = hc;
affinities = new List<AffinityResource>();
weight = w;
}
}
/// <summary>
/// state returned when a the status of a key at a process is queried after the process has started running
/// </summary>
public enum ProcessState
{
/// <summary>
/// the process is still running
/// </summary>
Running,
/// <summary>
/// the process has reported successful completion
/// </summary>
Completed,
/// <summary>
/// the process has exited without reporting successful completion
/// </summary>
Failed
}
/// <summary>
/// state returned when a process has exited
/// </summary>
public enum ProcessExitState
{
/// <summary>
/// the process was never started due to an error
/// </summary>
ScheduleFailed,
/// <summary>
/// contact was lost with the process
/// </summary>
StatusFailed,
/// <summary>
/// the process was canceled while scheduling
/// </summary>
ScheduleCanceled,
/// <summary>
/// the process had started, and has now exited
/// </summary>
ProcessExited
}
/// <summary>
/// When the higher level of the software stack wants to schedule a process, it
/// passes in an object that implements IProcessWatcher to receive updates in the
/// form of callbacks as the process is queued, matched, scheduled, run, etc.
/// </summary>
public interface IProcessWatcher
{
/// <summary>
/// OnQueued is called when the process has been placed in the scheduling queues.
/// </summary>
void OnQueued();
/// <summary>
/// OnMatched is called when the process has been matched to <param>computer</param>
/// and is about to be scheduled there.
/// </summary>
/// <param name="computer">The computer the process has been scheduled on</param>
/// <param name="timestamp">The UTC time on the local computer that the process was scheduled</param>
void OnMatched(IComputer computer, long timestamp);
/// <summary>
/// OnCreated is called when the process has been created on the remote computer.
/// </summary>
/// <param name="timestamp">The UTC time on the local computer that the remote daemon responded to the process create request</param>
void OnCreated(long timestamp);
/// <summary>
/// OnStarted is called when the process has started running on the remote computer.
/// </summary>
/// <param name="timestamp">The UTC time on the remote computer that the process started running</param>
void OnStarted(long timestamp);
/// <summary>
/// OnExited is called when the process has finished, either because it could not be
/// created (state=ScheduleFailed), because contact was lost with its daemon
/// (state=StatusFailed) or because it has finished (state=ProcessExited). exitCode is
/// the process exit code
/// </summary>
/// <param name="state">How far through scheduling the process got</param>
/// <param name="timestamp">The UTC time on the remote computer that the process stopped running, or on the local computer
/// if we lost contact with the remote daemon</param>
/// <param name="exitCode">The exit code of the process if it was started, or 1 otherwise</param>
/// <param name="errorText">A description of the error if the process didn't exit cleanly</param>
void OnExited(ProcessExitState state, long timestamp, int exitCode, string errorText);
}
/// <summary>
/// when the application wants to learn the status of a key on a running process, it passes in an
/// IProcessStatus object identifying the key being queried, and including a callback method
/// that is called when the status is known
/// </summary>
public interface IProcessKeyStatus
{
/// <summary>
/// the key to query at the process, filled in by the application
/// </summary>
/// <returns>the key the application wants to query</returns>
string GetKey();
/// <summary>
/// the heartbeat timeout, filled in by the application
/// </summary>
/// <returns>how long to block waiting for the key to change before returning its status.
/// OnCompleted will be called either when the version increases, or the timeout expires.</returns>
int GetTimeout();
/// <summary>
/// the last known version of the key, filled in by the application
/// </summary>
/// <returns>the last version of the key seen by the application. OnCompleted will be called either
/// when the version increases, or the timeout expires.</returns>
UInt64 GetVersion();
/// <summary>
/// called by the cluster interface when the status query completes
/// </summary>
/// <param name="newVersion">the new version of the key at the remote process</param>
/// <param name="statusData">the value of the key at the remote process</param>
/// <param name="processExitCode">the exit code if the process has finished, or 259 (STILL_ACTIVE)</param>
/// <param name="errorMessage">a descriptive message if something went wrong</param>
void OnCompleted(UInt64 newVersion, byte[] statusData, int processExitCode, string errorMessage);
}
/// <summary>
/// when the application wants to set a command key on a running process, it passes in an
/// IProcessCommand object identifying the key being set, and including a callback method
/// that is called when the RPC completes
/// </summary>
public interface IProcessCommand
{
/// <summary>
/// the key to set at the process, filled in by the application
/// </summary>
/// <returns>the key the application wants to set</returns>
string GetKey();
/// <summary>
/// a human-friendly summary of the value being set, filled in by the application
/// </summary>
/// <returns>summary of the value being set</returns>
string GetShortStatus();
/// <summary>
/// the value to set, filled in by the application
/// </summary>
/// <returns>the value being set</returns>
byte[] GetPayload();
/// <summary>
/// called by the cluster interface when the command set completes
/// </summary>
/// <param name="reason">null on success, or a descriptive error if there was a problem</param>
void OnCompleted(string reason);
}
public interface ICluster
{
/// <summary>
/// starts up the cluster. Blocks until the application is ready to proceed
/// </summary>
/// <returns>false if the cluster startup fails</returns>
bool Start();
/// <summary>
/// shuts down the connection to the cluster during application exit
/// </summary>
void Stop();
/// <summary>
/// retrieve a list of computers currently available in the cluster. The list
/// may change as failures occur, or the cluster elastically changes the resource
/// allocation of the application
/// </summary>
/// <returns>the computers currently available in the cluster</returns>
List<IComputer> GetComputers();
/// <summary>
/// get a Uri to read a file from a computer running on the same host
/// </summary>
/// <param name="computer">the computer that wrote the file</param>
/// <param name="fileName">the leafname of the file</param>
/// <param name="compressionMode">an integer compression mode to put in the query part of the Uri</param>
/// <returns>a uri that identifies the file locally</returns>
string GetLocalFilePath(IComputer computer, string directory, string fileName, int compressionMode);
/// <summary>
/// get a Uri to read a file from a remote computer
/// </summary>
/// <param name="computer">the computer that wrote the file</param>
/// <param name="fileName"></param>
/// <param name="compressionMode">an integer compression mode to put in the query part of the Uri</param>
/// <returns>a uri that identifies the file remotely</returns>
string GetRemoteFilePath(IComputer computer, string directory, string fileName, int compressionMode);
/// <summary>
/// generate a new Process object that will be used to schedule a process on a cluster
/// computer
/// </summary>
/// <param name="watcher">the callback handler that the cluster will use to update
/// the application on the process' lifecycle</param>
/// <param name="commandLine">the command line to use to start the process on the remote
/// computer</param>
/// <param name="commandLineArguments">arguments to provide to the remote process</param>
/// <returns>a handle to the new process</returns>
IProcess NewProcess(IProcessWatcher watcher, string commandLine, string commandLineArguments);
/// <summary>
/// request that a Process object, return from NewProcess, be scheduled according
/// to the supplied affinity hints and constraints
/// </summary>
/// <param name="process">the handle to the previously-created process</param>
/// <param name="affinities">the hints and constraints about where the process should be run</param>
void ScheduleProcess(IProcess process, List<Affinity> affinities);
/// <summary>
/// request that a process, previously created using NewProcess, be canceled, either before it is
/// scheduled or after it starts running.
/// </summary>
/// <param name="process">the handle to the process</param>
void CancelProcess(IProcess process);
/// <summary>
/// query the status of a key at a running process
/// </summary>
/// <param name="process">the handle to the process</param>
/// <param name="status">a description of the key, and a callback when the query completes</param>
void GetProcessStatus(IProcess process, IProcessKeyStatus status);
/// <summary>
/// set a command key at a running process
/// </summary>
/// <param name="process">the handle to the process</param>
/// <param name="command">a description of the command, and callback when the RPC completes</param>
void SetProcessCommand(IProcess process, IProcessCommand command);
}
public class HttpClient
{
private class PLogger : Microsoft.Research.Peloponnese.ILogger
{
private readonly ILogger logger;
public PLogger(ILogger l)
{
logger = l;
}
public void Log(
string entry,
[CallerFilePath] string file = "(nofile)",
[CallerMemberName] string function = "(nofunction)",
[CallerLineNumber] int line = -1)
{
logger.Log(entry, file, function, line);
}
public void Stop()
{
}
}
private static readonly string dummy;
private static NotHttpClient client;
static HttpClient()
{
dummy = "string to lock";
}
public static void Initialize(ILogger logger)
{
lock (dummy)
{
if (client == null)
{
client = new NotHttpClient(true, 1, 30000, new PLogger(logger));
}
}
}
public static IHttpRequest Create(string uri)
{
return client.CreateRequest(uri);
}
public static IHttpRequest Create(Uri uri)
{
return client.CreateRequest(uri);
}
}
// -----------------------
// below are interfaces implemented by schedulers
// -----------------------
/// <summary>
/// the handle for a process used internally by a scheduler
/// </summary>
public interface ISchedulerProcess
{
/// <summary>
/// a unique ID assigned by the scheduler to the process
/// </summary>
string Id { get; }
}
/// <summary>
/// the method called by the scheduler when a process is ready to be run on the cluster. When the returned
/// Task completes the scheduler reclaims the resource that the process was using.
/// </summary>
/// <param name="computer">The location where the process has been scheduled or null if there was a scheduling error</param>
/// <param name="processId">A unique integer ID for the process at the computer or -1 if there was a scheduling error</param>
/// <param name="blocker">This Task completes if the computer is being shut down in which case
/// RunProcess should return early, or null if there was a scheduling error</param>
/// <param name="errorReason">null if the process was scheduled, otherwise an error explaining the reason it wasn't</param>
/// <returns>a Task that completes when the process finishes. This should be immediately returned complete if there was a
/// scheduling error</returns>
public delegate Task RunProcess(IComputer computer, int processId, Task interrupt, string errorReason);
/// <summary>
/// the interface implemented by a Dryad scheduler
/// </summary>
public interface IScheduler
{
/// <summary>
/// Start the scheduler.
/// </summary>
/// <returns>true if the scheduler started successfully</returns>
bool Start();
/// <summary>
/// get a snapshot of the available computers in the cluster. This may change later
/// due to failures or elastic resource allocation.
/// </summary>
/// <returns></returns>
List<IComputer> GetComputers();
/// <summary>
/// get a computer that is currently running on a given host, or null if there isn't one
/// </summary>
/// <param name="host">the hostname to look up a computer on</param>
/// <returns>computer at the host, or null if there isn't one</returns>
IComputer GetComputerAtHost(string host);
/// <summary>
/// get a handle to a new process that can be scheduled later
/// </summary>
/// <returns>the handle</returns>
ISchedulerProcess NewProcess();
/// <summary>
/// add a process to the scheduling queues, along with affinity information about where it would
/// prefer to run and a callback that is triggered when the process has been scheduled
/// </summary>
/// <param name="process">a handle for the process, created earlier using NewProcess</param>
/// <param name="affinities">a description of the hints/constraints about where the process should run</param>
/// <param name="onScheduled">a callback that is invoked when the process has been scheduled, or if a
/// scheduling error occurs</param>
void ScheduleProcess(ISchedulerProcess process, List<Affinity> affinities, RunProcess onScheduled);
/// <summary>
/// cancel the scheduling of a process. This will trigger the onScheduled callback if it has not already
/// been sent, otherwise it does nothing.
/// </summary>
/// <param name="process">a handle to the process to be canceled</param>
void CancelProcess(ISchedulerProcess process);
/// <summary>
/// shut down the scheduler
/// </summary>
void Stop();
}
/// <summary>
/// a factory managing available schedulers
/// </summary>
public class Factory
{
/// <summary>
/// delegate to create a scheduler
/// </summary>
/// <param name="logger">handle to the application logging interface</param>
/// <returns>a new scheduler</returns>
public delegate IScheduler MakeFunction(ILogger logger);
/// <summary>
/// table of registered schedulers each associated with a string
/// </summary>
private static Dictionary<string, MakeFunction> registrations;
/// <summary>
/// static initializer
/// </summary>
static Factory()
{
registrations = new Dictionary<string, MakeFunction>();
}
/// <summary>
/// called by a scheduler dll to register itself with the factory
/// </summary>
/// <param name="type">string used to identify the scheduler</param>
/// <param name="factory">factory function to make a concrete instance of the scheduler</param>
/// <returns>true if it was registered</returns>
public static void Register(string type, MakeFunction factory)
{
registrations.Add(type, factory);
}
/// <summary>
/// used internally to create a scheduler
/// </summary>
/// <param name="type">string that the scheduler registered with</param>
/// <param name="logger">handle to the application logging interface</param>
/// <returns>a concrete scheduler</returns>
internal static IScheduler CreateScheduler(string type, ILogger logger)
{
MakeFunction factory;
if (registrations.TryGetValue(type, out factory))
{
return factory(logger);
}
else
{
throw new ApplicationException("Unknown scheduler type " + type);
}
}
}
}
| |
//#define DEBUG
using System;
using System.Collections.Generic;
namespace ICSimulator
{
public class Controller_Rate : Controller_ClassicBLESS
{
double[] m_isThrottled = new double[Config.N];
IPrioPktPool[] m_injPools = new IPrioPktPool[Config.N];
bool[] m_starved = new bool[Config.N];
//This represent the round-robin turns
int[] throttleTable = new int[Config.N];
bool isThrottling;
//This tell which group are allowed to run in a certain epoch
int currentAllow = 0;
int injLimit = 150;
public Controller_Rate()
{
isThrottling = false;
Console.WriteLine("init: Global_RR");
for (int i = 0; i < Config.N; i++)
{
MPKI[i]=0.0;
numInject[i]=0;
num_ins_last_epoch[i]=0;
m_isThrottled[i]=0.0;
L1misses[i]=0;
}
}
public override void resetStat()
{
#if DEBUG
Console.WriteLine("Reset MPKIs and num_ins after throttling");
#endif
for (int i = 0; i < Config.N; i++)
{
MPKI[i]=0.0;
num_ins_last_epoch[i] = Simulator.stats.insns_persrc[i].Count;
numInject[i]=0;
L1misses[i]=0;
}
}
void setThrottleRate(int node, double cond)
{
m_isThrottled[node] = cond;
}
// true to allow injection, false to block (throttle)
// RouterFlit uses this function to determine whether it can inject or not
// TODO: put this in a node?
public override bool tryInject(int node)
{
if(Simulator.rand.NextDouble()>m_isThrottled[node])
{
Simulator.stats.throttled_counts_persrc[node].Add();
return true;
}
else
return false;
}
public override void setInjPool(int node, IPrioPktPool pool)
{
m_injPools[node] = pool;
}
public override void reportStarve(int node)
{
m_starved[node] = true;
}
void doThrottling()
{
for(int i=0;i<Config.N;i++)
{
//if we want to enable interval based throttling
//if(throttleTable[i]>0)
//if we always throttle
if(numInject[i]>30)
{
//maximum throttling rate
if(((double)numInject[i]/(double)injLimit)>0.6)
setThrottleRate(i,0.6);
else
setThrottleRate(i,((double)numInject[i]/(double)injLimit));
}
else
setThrottleRate(i,0);
//if((throttleTable[i]==currentAllow)||(throttleTable[i]==0))
//{
// setThrottleRate(i,false);
//}
//else
//{
// setThrottleRate(i, true);
//TODO: hack to test the common ground
//setThrottleRate(i,false);
//}
}
currentAllow++;
//interval based
if(currentAllow > Config.num_epoch)
{
//wrap around here
currentAllow=1;
}
}
public override void doStep()
{
// for (int i = 0; i < Config.N; i++)
// {
// this is not needed. Can be uncommented if we want to consider the starvation
// avg_MPKI[i].accumulate(m_starved[i]);
// avg_qlen[i].accumulate(m_injPools[i].Count);
// m_starved[i] = false;
// }
if (Simulator.CurrentRound > 20000 &&
(Simulator.CurrentRound % (ulong)Config.throttle_sampling_period) == 0)
{
setThrottling();
resetStat();
}
if (isThrottling && Simulator.CurrentRound > 20000 &&
(Simulator.CurrentRound % (ulong)Config.interval_length) == 0)
{
doThrottling();
}
}
void setThrottling()
{
#if DEBUG
Console.Write("\n:: cycle {0} ::",
Simulator.CurrentRound);
#endif
//get the MPKI value
for (int i = 0; i < Config.N; i++)
{
if(num_ins_last_epoch[i]==0)
//NumInject gets incremented in RouterFlit.cs
MPKI[i]=((double)(numInject[i]*1000))/(Simulator.stats.insns_persrc[i].Count);
else
{
if(Simulator.stats.insns_persrc[i].Count-num_ins_last_epoch[i]!=0)
MPKI[i]=((double)(numInject[i]*1000))/(Simulator.stats.insns_persrc[i].Count-num_ins_last_epoch[i]);
else
MPKI[i]=0;
}
}
if(isThrottling)
{
//see if we can un-throttle the netowork
double avg = 0.0;
//check if we can go back to FFA
for (int i = 0; i < Config.N; i++)
{
#if DEBUG
Console.Write("[{1}] {0} |",(int)MPKI[i],i);
#endif
avg = avg + MPKI[i];
}
avg = avg/Config.N;
#if DEBUG
Console.WriteLine("Estimating MPKI, min_thresh {1}: avg MPKI {0}",avg,Config.MPKI_max_thresh);
#endif
if(avg < Config.MPKI_min_thresh)
{
#if DEBUG
Console.WriteLine("\n****OFF****Transition from Throttle mode to FFA! with avg MPKI {0}\n",avg);
#endif
isThrottling = false;
//un-throttle the network
for(int i=0;i<Config.N;i++)
setThrottleRate(i,0.0);
}
}
else
{
double avg = 0.0;
// determine whether any node is congested
int total_high = 0;
for (int i = 0; i < Config.N; i++)
avg = avg + MPKI[i];
avg = avg/Config.N;
#if DEBUG
Console.WriteLine("Estimating MPKI, max_thresh {1}: avg MPKI {0}",avg,Config.MPKI_max_thresh);
#endif
//greater than the max threshold
if (avg > Config.MPKI_max_thresh) // TODO: Change this to a dynamic scheme
{
#if DEBUG
Console.Write("Throttle mode turned on: cycle {0} (",
Simulator.CurrentRound);
#endif
for (int i = 0; i < Config.N; i++)
if (MPKI[i] > Config.MPKI_high_node)
{
total_high++;
//right now we randomly pick one epoch to run
//TODO: make this more intelligent
throttleTable[i] = Simulator.rand.Next(Config.num_epoch);
//TODO: why set it here?
setThrottleRate(i, 0.6);
//TODO: hack to test the common ground
//setThrottleRate(i,false);
#if DEBUG
Console.Write("#ON#:Node {0} with MPKI {1} ",i,MPKI[i]);
#endif
}
else
{
throttleTable[i]=0;
setThrottleRate(i, 0.0);
#if DEBUG
Console.Write("@OFF@:Node {0} with MPKI {1} ",i,MPKI[i]);
#endif
}
#if DEBUG
Console.WriteLine(")");
#endif
isThrottling = true;
currentAllow = 1;
}
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using System.Diagnostics;
using System.Reflection.Internal;
namespace System.Reflection.Metadata.Ecma335
{
/// <summary>
/// Provides extension methods for working with certain raw elements of the Ecma 335 metadata tables and heaps.
/// </summary>
public static class MetadataReaderExtensions
{
/// <summary>
/// Returns the number of rows in the specified table.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="tableIndex"/> is not a valid table index.</exception>
public static int GetTableRowCount(this MetadataReader reader, TableIndex tableIndex)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
if ((int)tableIndex >= TableIndexExtensions.Count)
{
throw new ArgumentOutOfRangeException("tableIndex");
}
return (int)reader.TableRowCounts[(int)tableIndex];
}
/// <summary>
/// Returns the size of a row in the specified table.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="tableIndex"/> is not a valid table index.</exception>
public static int GetTableRowSize(this MetadataReader reader, TableIndex tableIndex)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
switch (tableIndex)
{
case TableIndex.Module: return reader.ModuleTable.RowSize;
case TableIndex.TypeRef: return reader.TypeRefTable.RowSize;
case TableIndex.TypeDef: return reader.TypeDefTable.RowSize;
case TableIndex.FieldPtr: return reader.FieldPtrTable.RowSize;
case TableIndex.Field: return reader.FieldTable.RowSize;
case TableIndex.MethodPtr: return reader.MethodPtrTable.RowSize;
case TableIndex.MethodDef: return reader.MethodDefTable.RowSize;
case TableIndex.ParamPtr: return reader.ParamPtrTable.RowSize;
case TableIndex.Param: return reader.ParamTable.RowSize;
case TableIndex.InterfaceImpl: return reader.InterfaceImplTable.RowSize;
case TableIndex.MemberRef: return reader.MemberRefTable.RowSize;
case TableIndex.Constant: return reader.ConstantTable.RowSize;
case TableIndex.CustomAttribute: return reader.CustomAttributeTable.RowSize;
case TableIndex.FieldMarshal: return reader.FieldMarshalTable.RowSize;
case TableIndex.DeclSecurity: return reader.DeclSecurityTable.RowSize;
case TableIndex.ClassLayout: return reader.ClassLayoutTable.RowSize;
case TableIndex.FieldLayout: return reader.FieldLayoutTable.RowSize;
case TableIndex.StandAloneSig: return reader.StandAloneSigTable.RowSize;
case TableIndex.EventMap: return reader.EventMapTable.RowSize;
case TableIndex.EventPtr: return reader.EventPtrTable.RowSize;
case TableIndex.Event: return reader.EventTable.RowSize;
case TableIndex.PropertyMap: return reader.PropertyMapTable.RowSize;
case TableIndex.PropertyPtr: return reader.PropertyPtrTable.RowSize;
case TableIndex.Property: return reader.PropertyTable.RowSize;
case TableIndex.MethodSemantics: return reader.MethodSemanticsTable.RowSize;
case TableIndex.MethodImpl: return reader.MethodImplTable.RowSize;
case TableIndex.ModuleRef: return reader.ModuleRefTable.RowSize;
case TableIndex.TypeSpec: return reader.TypeSpecTable.RowSize;
case TableIndex.ImplMap: return reader.ImplMapTable.RowSize;
case TableIndex.FieldRva: return reader.FieldRvaTable.RowSize;
case TableIndex.EncLog: return reader.EncLogTable.RowSize;
case TableIndex.EncMap: return reader.EncMapTable.RowSize;
case TableIndex.Assembly: return reader.AssemblyTable.RowSize;
case TableIndex.AssemblyProcessor: return reader.AssemblyProcessorTable.RowSize;
case TableIndex.AssemblyOS: return reader.AssemblyOSTable.RowSize;
case TableIndex.AssemblyRef: return reader.AssemblyRefTable.RowSize;
case TableIndex.AssemblyRefProcessor: return reader.AssemblyRefProcessorTable.RowSize;
case TableIndex.AssemblyRefOS: return reader.AssemblyRefOSTable.RowSize;
case TableIndex.File: return reader.FileTable.RowSize;
case TableIndex.ExportedType: return reader.ExportedTypeTable.RowSize;
case TableIndex.ManifestResource: return reader.ManifestResourceTable.RowSize;
case TableIndex.NestedClass: return reader.NestedClassTable.RowSize;
case TableIndex.GenericParam: return reader.GenericParamTable.RowSize;
case TableIndex.MethodSpec: return reader.MethodSpecTable.RowSize;
case TableIndex.GenericParamConstraint: return reader.GenericParamConstraintTable.RowSize;
default:
throw new ArgumentOutOfRangeException("tableIndex");
}
}
/// <summary>
/// Returns the offset from the start of metadata to the specified table.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="tableIndex"/> is not a valid table index.</exception>
public static unsafe int GetTableMetadataOffset(this MetadataReader reader, TableIndex tableIndex)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return (int)(reader.GetTableMetadataBlock(tableIndex).Pointer - reader.Block.Pointer);
}
private static MemoryBlock GetTableMetadataBlock(this MetadataReader reader, TableIndex tableIndex)
{
Debug.Assert(reader != null);
switch (tableIndex)
{
case TableIndex.Module: return reader.ModuleTable.Block;
case TableIndex.TypeRef: return reader.TypeRefTable.Block;
case TableIndex.TypeDef: return reader.TypeDefTable.Block;
case TableIndex.FieldPtr: return reader.FieldPtrTable.Block;
case TableIndex.Field: return reader.FieldTable.Block;
case TableIndex.MethodPtr: return reader.MethodPtrTable.Block;
case TableIndex.MethodDef: return reader.MethodDefTable.Block;
case TableIndex.ParamPtr: return reader.ParamPtrTable.Block;
case TableIndex.Param: return reader.ParamTable.Block;
case TableIndex.InterfaceImpl: return reader.InterfaceImplTable.Block;
case TableIndex.MemberRef: return reader.MemberRefTable.Block;
case TableIndex.Constant: return reader.ConstantTable.Block;
case TableIndex.CustomAttribute: return reader.CustomAttributeTable.Block;
case TableIndex.FieldMarshal: return reader.FieldMarshalTable.Block;
case TableIndex.DeclSecurity: return reader.DeclSecurityTable.Block;
case TableIndex.ClassLayout: return reader.ClassLayoutTable.Block;
case TableIndex.FieldLayout: return reader.FieldLayoutTable.Block;
case TableIndex.StandAloneSig: return reader.StandAloneSigTable.Block;
case TableIndex.EventMap: return reader.EventMapTable.Block;
case TableIndex.EventPtr: return reader.EventPtrTable.Block;
case TableIndex.Event: return reader.EventTable.Block;
case TableIndex.PropertyMap: return reader.PropertyMapTable.Block;
case TableIndex.PropertyPtr: return reader.PropertyPtrTable.Block;
case TableIndex.Property: return reader.PropertyTable.Block;
case TableIndex.MethodSemantics: return reader.MethodSemanticsTable.Block;
case TableIndex.MethodImpl: return reader.MethodImplTable.Block;
case TableIndex.ModuleRef: return reader.ModuleRefTable.Block;
case TableIndex.TypeSpec: return reader.TypeSpecTable.Block;
case TableIndex.ImplMap: return reader.ImplMapTable.Block;
case TableIndex.FieldRva: return reader.FieldRvaTable.Block;
case TableIndex.EncLog: return reader.EncLogTable.Block;
case TableIndex.EncMap: return reader.EncMapTable.Block;
case TableIndex.Assembly: return reader.AssemblyTable.Block;
case TableIndex.AssemblyProcessor: return reader.AssemblyProcessorTable.Block;
case TableIndex.AssemblyOS: return reader.AssemblyOSTable.Block;
case TableIndex.AssemblyRef: return reader.AssemblyRefTable.Block;
case TableIndex.AssemblyRefProcessor: return reader.AssemblyRefProcessorTable.Block;
case TableIndex.AssemblyRefOS: return reader.AssemblyRefOSTable.Block;
case TableIndex.File: return reader.FileTable.Block;
case TableIndex.ExportedType: return reader.ExportedTypeTable.Block;
case TableIndex.ManifestResource: return reader.ManifestResourceTable.Block;
case TableIndex.NestedClass: return reader.NestedClassTable.Block;
case TableIndex.GenericParam: return reader.GenericParamTable.Block;
case TableIndex.MethodSpec: return reader.MethodSpecTable.Block;
case TableIndex.GenericParamConstraint: return reader.GenericParamConstraintTable.Block;
default:
throw new ArgumentOutOfRangeException("tableIndex");
}
}
/// <summary>
/// Returns the size of the specified heap.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="heapIndex"/> is not a valid heap index.</exception>
public static int GetHeapSize(this MetadataReader reader, HeapIndex heapIndex)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return reader.GetMetadataBlock(heapIndex).Length;
}
/// <summary>
/// Returns the offset from the start of metadata to the specified heap.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="heapIndex"/> is not a valid heap index.</exception>
public static unsafe int GetHeapMetadataOffset(this MetadataReader reader, HeapIndex heapIndex)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return (int)(reader.GetMetadataBlock(heapIndex).Pointer - reader.Block.Pointer);
}
/// <summary>
/// Returns the size of the specified heap.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="heapIndex"/> is not a valid heap index.</exception>
private static MemoryBlock GetMetadataBlock(this MetadataReader reader, HeapIndex heapIndex)
{
Debug.Assert(reader != null);
switch (heapIndex)
{
case HeapIndex.UserString:
return reader.UserStringStream.Block;
case HeapIndex.String:
return reader.StringStream.Block;
case HeapIndex.Blob:
return reader.BlobStream.Block;
case HeapIndex.Guid:
return reader.GuidStream.Block;
default:
throw new ArgumentOutOfRangeException("heapIndex");
}
}
/// <summary>
/// Returns the a handle to the UserString that follows the given one in the UserString heap or a nil handle if it is the last one.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
public static UserStringHandle GetNextHandle(this MetadataReader reader, UserStringHandle handle)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return reader.UserStringStream.GetNextHandle(handle);
}
/// <summary>
/// Returns the a handle to the Blob that follows the given one in the Blob heap or a nil handle if it is the last one.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
public static BlobHandle GetNextHandle(this MetadataReader reader, BlobHandle handle)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return reader.BlobStream.GetNextHandle(handle);
}
/// <summary>
/// Returns the a handle to the String that follows the given one in the String heap or a nil handle if it is the last one.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
public static StringHandle GetNextHandle(this MetadataReader reader, StringHandle handle)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
return reader.StringStream.GetNextHandle(handle);
}
/// <summary>
/// Enumerates entries of EnC log.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
public static IEnumerable<EditAndContinueLogEntry> GetEditAndContinueLogEntries(this MetadataReader reader)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
for (int rid = 1; rid <= reader.EncLogTable.NumberOfRows; rid++)
{
yield return new EditAndContinueLogEntry(
new EntityHandle(reader.EncLogTable.GetToken(rid)),
reader.EncLogTable.GetFuncCode(rid));
}
}
/// <summary>
/// Enumerates entries of EnC map.
/// </summary>
/// <exception cref="ArgumentNullException"><paramref name="reader"/> is null.</exception>
public static IEnumerable<EntityHandle> GetEditAndContinueMapEntries(this MetadataReader reader)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
for (int rid = 1; rid <= reader.EncMapTable.NumberOfRows; rid++)
{
yield return new EntityHandle(reader.EncMapTable.GetToken(rid));
}
}
/// <summary>
/// Enumerate types that define one or more properties.
/// </summary>
/// <returns>
/// The resulting sequence corresponds exactly to entries in PropertyMap table,
/// i.e. n-th returned <see cref="TypeDefinitionHandle"/> is stored in n-th row of PropertyMap.
/// </returns>
public static IEnumerable<TypeDefinitionHandle> GetTypesWithProperties(this MetadataReader reader)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
for (int rid = 1; rid <= reader.PropertyMapTable.NumberOfRows; rid++)
{
yield return reader.PropertyMapTable.GetParentType(rid);
}
}
/// <summary>
/// Enumerate types that define one or more events.
/// </summary>
/// <returns>
/// The resulting sequence corresponds exactly to entries in EventMap table,
/// i.e. n-th returned <see cref="TypeDefinitionHandle"/> is stored in n-th row of EventMap.
/// </returns>
public static IEnumerable<TypeDefinitionHandle> GetTypesWithEvents(this MetadataReader reader)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
for (int rid = 1; rid <= reader.EventMapTable.NumberOfRows; rid++)
{
yield return reader.EventMapTable.GetParentType(rid);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Security.Cryptography.X509Certificates;
using Xunit;
namespace System.Security.Cryptography.Pkcs.Tests
{
public static class CounterSigningDerOrder
{
[Fact]
public static void CounterSigningReindexes()
{
ContentInfo content = new ContentInfo(new byte[] { 7 });
SignedCms cms = new SignedCms(content);
using (X509Certificate2 cert1 = Certificates.RSA2048SignatureOnly.TryGetCertificateWithPrivateKey())
using (X509Certificate2 cert2 = Certificates.RSAKeyTransferCapi1.TryGetCertificateWithPrivateKey())
{
CmsSigner signer = new CmsSigner(SubjectIdentifierType.SubjectKeyIdentifier, cert1);
cms.ComputeSignature(signer);
SignerProfile yellow = new SignerProfile(
cert1,
SubjectIdentifierType.SubjectKeyIdentifier,
hasSignedAttrs: false,
hasUnsignedAttrs: false,
hasCounterSigners: false);
AssertSignerTraits(cms.SignerInfos[0], yellow);
signer.SignedAttributes.Add(new Pkcs9SigningTime());
signer.SignerIdentifierType = SubjectIdentifierType.IssuerAndSerialNumber;
cms.ComputeSignature(signer);
SignerProfile green = new SignerProfile(
cert1,
SubjectIdentifierType.IssuerAndSerialNumber,
hasSignedAttrs: true,
hasUnsignedAttrs: false,
hasCounterSigners: false);
// No reordering. 0 stayed 0, new entry becomes 1.
AssertSignerTraits(cms.SignerInfos[0], yellow);
AssertSignerTraits(cms.SignerInfos[1], green);
signer = new CmsSigner(SubjectIdentifierType.IssuerAndSerialNumber, cert2);
cms.ComputeSignature(signer);
SignerProfile grey = new SignerProfile(
cert2,
SubjectIdentifierType.IssuerAndSerialNumber,
hasSignedAttrs: false,
hasUnsignedAttrs: false,
hasCounterSigners: false);
// No reordering. 0 stayed 0, 1 stays 1, new entry is 2.
AssertSignerTraits(cms.SignerInfos[0], yellow);
AssertSignerTraits(cms.SignerInfos[1], green);
AssertSignerTraits(cms.SignerInfos[2], grey);
CmsSigner counterSigner = new CmsSigner(SubjectIdentifierType.IssuerAndSerialNumber, cert1);
SignerInfo toCounterSign = cms.SignerInfos[0];
toCounterSign.ComputeCounterSignature(counterSigner);
// Reordering just happened.
// We counter-signed the first element, so it gets bigger by ~a signerinfo, or 100% bigger.
// The sizes of the three were
// yellow: 311 bytes
// green: 455 bytes (IssuerAndSerialNumber takes more bytes, and it has attributes)
// grey: 212 bytes (1024-bit RSA signature instead of 2048-bit)
//
// Because yellow also contains cyan (444) bytes (then also some overhead) it has grown
// to 763 bytes. So the size-sorted order (DER SET-OF sorting) is { grey, green, yellow }.
// Record that yellow gained a countersigner (and thus an unsigned attribute)
yellow.HasUnsignedAttrs = true;
yellow.HasCounterSigners = true;
SignerProfile cyan = new SignerProfile(
cert1,
SubjectIdentifierType.IssuerAndSerialNumber,
hasSignedAttrs: true,
hasUnsignedAttrs: false,
hasCounterSigners: false);
AssertSignerTraits(cms.SignerInfos[0], grey);
AssertSignerTraits(cms.SignerInfos[1], green);
AssertSignerTraits(cms.SignerInfos[2], yellow);
AssertSignerTraits(cms.SignerInfos[2].CounterSignerInfos[0], cyan);
counterSigner.UnsignedAttributes.Add(new Pkcs9SigningTime());
toCounterSign.ComputeCounterSignature(counterSigner);
SignerProfile red = new SignerProfile(
cert1,
SubjectIdentifierType.IssuerAndSerialNumber,
hasSignedAttrs: true,
hasUnsignedAttrs: true,
hasCounterSigners: false);
// Since "red" has one more attribute than "cyan", but they're otherwise the same
// it will sort later.
AssertSignerTraits(cms.SignerInfos[0], grey);
AssertSignerTraits(cms.SignerInfos[1], green);
AssertSignerTraits(cms.SignerInfos[2], yellow);
AssertSignerTraits(cms.SignerInfos[2].CounterSignerInfos[0], cyan);
AssertSignerTraits(cms.SignerInfos[2].CounterSignerInfos[1], red);
counterSigner.SignerIdentifierType = SubjectIdentifierType.SubjectKeyIdentifier;
toCounterSign.ComputeCounterSignature(counterSigner);
SignerProfile clear = new SignerProfile(
cert1,
SubjectIdentifierType.SubjectKeyIdentifier,
hasSignedAttrs: true,
hasUnsignedAttrs: true,
hasCounterSigners: false);
// By changing from IssuerAndSerialNumber to SubjectKeyIdentifier, this copy will
// sort higher. It saves so many bytes, in this specific case, that it goes first.
AssertSignerTraits(cms.SignerInfos[0], grey);
AssertSignerTraits(cms.SignerInfos[1], green);
AssertSignerTraits(cms.SignerInfos[2], yellow);
AssertSignerTraits(cms.SignerInfos[2].CounterSignerInfos[0], clear);
AssertSignerTraits(cms.SignerInfos[2].CounterSignerInfos[1], cyan);
AssertSignerTraits(cms.SignerInfos[2].CounterSignerInfos[2], red);
// Now start removing things.
cms.SignerInfos[2].RemoveCounterSignature(1);
// Fairly predictable.
AssertSignerTraits(cms.SignerInfos[0], grey);
AssertSignerTraits(cms.SignerInfos[1], green);
AssertSignerTraits(cms.SignerInfos[2], yellow);
AssertSignerTraits(cms.SignerInfos[2].CounterSignerInfos[0], clear);
AssertSignerTraits(cms.SignerInfos[2].CounterSignerInfos[1], red);
cms.SignerInfos[2].RemoveCounterSignature(1);
// Fairly predictable.
AssertSignerTraits(cms.SignerInfos[0], grey);
AssertSignerTraits(cms.SignerInfos[1], green);
AssertSignerTraits(cms.SignerInfos[2], yellow);
AssertSignerTraits(cms.SignerInfos[2].CounterSignerInfos[0], clear);
cms.SignerInfos[2].RemoveCounterSignature(0);
// We have removed the last counter-signer.
// yellow is now smaller than grey.
// But the document only re-normalizes on addition.
yellow.HasCounterSigners = false;
yellow.HasUnsignedAttrs = false;
AssertSignerTraits(cms.SignerInfos[0], grey);
AssertSignerTraits(cms.SignerInfos[1], green);
AssertSignerTraits(cms.SignerInfos[2], yellow);
// Export/import to normalize. Everyone's back to their original size,
// but they were unsorted before. { grey, yellow, green } was the right order.
cms.Decode(cms.Encode());
AssertSignerTraits(cms.SignerInfos[0], grey);
AssertSignerTraits(cms.SignerInfos[1], yellow);
AssertSignerTraits(cms.SignerInfos[2], green);
cms.SignerInfos[0].ComputeCounterSignature(counterSigner);
// Move to the end of the line, Mr. Grey
grey.HasUnsignedAttrs = true;
grey.HasCounterSigners = true;
AssertSignerTraits(cms.SignerInfos[0], yellow);
AssertSignerTraits(cms.SignerInfos[1], green);
AssertSignerTraits(cms.SignerInfos[2], grey);
AssertSignerTraits(cms.SignerInfos[2].CounterSignerInfos[0], clear);
}
}
private static void AssertSignerTraits(SignerInfo signerInfo, SignerProfile profile)
{
Assert.Equal(profile.Type, signerInfo.SignerIdentifier.Type);
Assert.Equal(profile.Cert, signerInfo.Certificate);
AssertMaybeEmpty(profile.HasSignedAttrs, signerInfo.SignedAttributes);
AssertMaybeEmpty(profile.HasUnsignedAttrs, signerInfo.UnsignedAttributes);
AssertMaybeEmpty(profile.HasCounterSigners, signerInfo.CounterSignerInfos);
}
private static void AssertMaybeEmpty(bool shouldHaveData, IEnumerable collection)
{
if (shouldHaveData)
Assert.NotEmpty(collection);
else
Assert.Empty(collection);
}
private class SignerProfile
{
public X509Certificate2 Cert { get; }
public SubjectIdentifierType Type { get; }
public bool HasSignedAttrs { get; }
public bool HasUnsignedAttrs { get; set; }
public bool HasCounterSigners { get; set; }
internal SignerProfile(
X509Certificate2 cert,
SubjectIdentifierType type,
bool hasSignedAttrs,
bool hasUnsignedAttrs,
bool hasCounterSigners)
{
Cert = cert;
Type = type;
HasSignedAttrs = hasSignedAttrs;
HasUnsignedAttrs = hasUnsignedAttrs;
HasCounterSigners = hasCounterSigners;
}
}
}
}
| |
/*
Copyright 2019 Esri
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Text;
using System.IO;
using System.Runtime.InteropServices;
using System.Windows.Forms;
using ESRI.ArcGIS.Carto;
using ESRI.ArcGIS.ADF.BaseClasses;
using ESRI.ArcGIS.ADF.CATIDs;
using ESRI.ArcGIS.GlobeCore;
using ESRI.ArcGIS.Animation;
using ESRI.ArcGIS.Analyst3D;
using ESRI.ArcGIS.Geometry;
using ESRI.ArcGIS.Display;
using ESRI.ArcGIS.Geodatabase;
using ESRI.ArcGIS.Controls;
using ESRI.ArcGIS.DataSourcesGDB;
namespace VisualizeCameraPath
{
public class VisualizeCameraPath : ESRI.ArcGIS.Desktop.AddIns.Button
{
#region Member Variables
private ESRI.ArcGIS.GlobeCore.IGlobe globe;
private ESRI.ArcGIS.GlobeCore.IGlobeDisplay globeDisplay;
private ESRI.ArcGIS.Carto.IGraphicsContainer graphicsLayer;
private ESRI.ArcGIS.GlobeCore.IGlobeCamera globeCamera;
private ESRI.ArcGIS.Animation.IAGAnimationTracks animationTracks;
private ESRI.ArcGIS.Animation.IAGAnimationTrack animationTrack;
private VisualizeCameraPathForm theCamForm;
private ESRI.ArcGIS.Animation.IAGAnimationUtils animUtils;
private ESRI.ArcGIS.Animation.IAnimationEvents_Event animEvent;
private ESRI.ArcGIS.Animation.IAGAnimationEnvironment animEnv;
private ESRI.ArcGIS.Animation.IAGAnimationPlayer animPlayer;
private ESRI.ArcGIS.GlobeCore.IGlobeDisplayEvents_Event globeDispEvent;
private bool toolIsInitialized = false;
private double animationDuration = 0;
#endregion
#region DLLImportFunction
[DllImport("gdi32.dll")]
static extern bool DeleteObject(IntPtr hObject);
[DllImport("user32.dll")]
static extern int ShowWindow(int hwnd, int nCmdShow);
#endregion
public VisualizeCameraPath()
{
globe = ArcGlobe.Globe;
globeDisplay = globe.GlobeDisplay;
globeCamera = globeDisplay.ActiveViewer.Camera as IGlobeCamera;
}
~VisualizeCameraPath()
{
if (theCamForm != null)
{
theCamForm.Dispose();
}
}
protected override void OnClick()
{
//The first time button is clicked
if(toolIsInitialized == false)
{
theCamForm = new VisualizeCameraPathForm();
//Add event handlers for form's button click events
theCamForm.playButton.Click+= new System.EventHandler(formPlayButtonClickEventHandler);
theCamForm.stopButton.Click+= new System.EventHandler(formStopButtonClickEventHandler);
theCamForm.generatePathButton.Click+= new System.EventHandler(formGeneratePathButtonClickEventHandler);
theCamForm.generateCamPathCheckBox.CheckedChanged += new EventHandler(formCheckbox1CheckedChanged);
theCamForm.Closing += new CancelEventHandler(theCamForm_Closing);
animEnv = new AGAnimationEnvironmentClass();
//True = Button has been already clicked
toolIsInitialized = true;
}
//If the main form is already open - do not open another one
else if (toolIsInitialized == true)
{
//Clear the list of animation tracks
theCamForm.animTracksListBox.Items.Clear();
}
//Get the list of animation tracks
this.getCameraAnimationTracksFromGlobe();
theCamForm.Show();
}
protected override void OnUpdate()
{
Enabled = ArcGlobe.Application != null;
}
#region Custom Functions and Event Handlers
//function for getting camera animation tracks
public void getCameraAnimationTracksFromGlobe()
{
ESRI.ArcGIS.Animation.IAGAnimationType animationType = new AnimationTypeGlobeCameraClass();
animationTracks = (ESRI.ArcGIS.Animation.IAGAnimationTracks)globe;
int animCounter = 0;
while (animCounter < animationTracks.AGTracks.Count)
{
animationTrack = (ESRI.ArcGIS.Animation.IAGAnimationTrack)animationTracks.AGTracks.get_Element(animCounter);
if (animationTrack.AnimationType == animationType)
{
theCamForm.animTracksListBox.Items.Add(animationTrack.Name);
}
animCounter = animCounter + 1;
}
}
//function for enabling selected animation track
public void enableSelectedTrack()
{
if (theCamForm.animTracksListBox.SelectedItem != null)
{
string selectedTrack = theCamForm.animTracksListBox.SelectedItem.ToString();
int animCounter = 0;
while (animCounter < animationTracks.AGTracks.Count)
{
animationTrack = (IAGAnimationTrack)animationTracks.AGTracks.get_Element(animCounter);
if (animationTrack.Name != selectedTrack)
{
IAGAnimationTrack trackToDisable;
animationTracks.FindTrack(animationTrack.Name, out trackToDisable);
trackToDisable.IsEnabled = false;
}
else if (animationTrack.Name == selectedTrack)
{
animationTrack.IsEnabled = true;
}
animCounter = animCounter + 1;
}
}
else if (theCamForm.animTracksListBox.SelectedItem == null)
{
MessageBox.Show("No Track Selected - All enabled tracks will be played");
}
}
//function for playing animation
public void playAnimation()
{
animUtils = new AGAnimationUtilsClass();
//register/unregister events for tracing camera path based on selection
animEvent = (IAnimationEvents_Event)animUtils;
//set animation duration
if (theCamForm.animDurationTextBox.Text != "" & theCamForm.animDurationTextBox.Text != "Optional")
{
animEnv.AnimationDuration = Convert.ToDouble(theCamForm.animDurationTextBox.Text);
}
else
{
MessageBox.Show("Please enter animation duration", "Error");
return;
}
//register animation event handler
animEvent.StateChanged += new IAnimationEvents_StateChangedEventHandler(myAnimationEventHandler);
//enable/disable other buttons
theCamForm.stopButton.Enabled = true;
theCamForm.generatePathButton.Enabled = false;
theCamForm.playButton.Enabled = false;
animPlayer = (IAGAnimationPlayer)animUtils;
animationDuration = animEnv.AnimationDuration;
animPlayer.PlayAnimation(animationTracks, animEnv, null);
}
//function for creating specified number of graphics per second
public void generatePathPerSecond()
{
//set animation duration
if (theCamForm.animDurationTextBox.Text != "" & theCamForm.animDurationTextBox.Text != "Optional")
{
animEnv.AnimationDuration = Convert.ToDouble(theCamForm.animDurationTextBox.Text);
}
else
{
MessageBox.Show("Please enter animation duration", "Error");
return;
}
animationDuration = animEnv.AnimationDuration;
int numPtsPerSecond = 0;
if (theCamForm.numPtsPerSecTextBox.Text != "")
{
numPtsPerSecond = Convert.ToInt32(theCamForm.numPtsPerSecTextBox.Text);
}
addGraphicLayer();
string selectedTrack = theCamForm.animTracksListBox.SelectedItem.ToString();
animationTracks.FindTrack(selectedTrack, out animationTrack);
IAGAnimationTrackKeyframes kFrames = (IAGAnimationTrackKeyframes)animationTrack;
int kFrameCount = kFrames.KeyframeCount;
//total number of points to be created
int totalPts = (int)(numPtsPerSecond * animationDuration);
//this is the from point for the lines connecting the interpolated point graphics
IPoint previousPt = new PointClass();
IZAware prevPtZAware = (IZAware)previousPt;
prevPtZAware.ZAware = true;
previousPt.PutCoords(0, 0);
//this is the line connecting the interpolated camera positions
IPolyline connectingLine = new PolylineClass();
IZAware lineZAware = (IZAware)connectingLine;
lineZAware.ZAware = true;
//disable all buttons
theCamForm.playButton.Enabled = false;
theCamForm.stopButton.Enabled = false;
theCamForm.generatePathButton.Enabled = false;
//loop over the keyframes in the selected camera track
for (int i = 0; i < kFrameCount; i++)
{
IAGKeyframe currentKeyframe = kFrames.get_Keyframe(i);
IAGKeyframe prevKeyframe;
IAGKeyframe nextKeyframe;
IAGKeyframe afterNextKeyframe;
//if else statements to determine the keyframe arguments to the interpolate method
//this is needed because the first, second-last and the last keyframes should be handled differently
//than the middle keyframes
if (i > 0)
{
prevKeyframe = kFrames.get_Keyframe(i - 1);
}
else
{
prevKeyframe = kFrames.get_Keyframe(i);
}
if (i < kFrameCount - 1)
{
nextKeyframe = kFrames.get_Keyframe(i + 1);
}
else
{
nextKeyframe = kFrames.get_Keyframe(i);
}
if (i < kFrameCount - 2)
{
afterNextKeyframe = kFrames.get_Keyframe(i + 2);
}
else
{
//this should be equal to the nextKeyFrame for the last keyframe
afterNextKeyframe = nextKeyframe;//kFrames.get_Keyframe(i);
}
double origCamLat, origCamLong, origCamAlt;
double interLat, interLong, interAlt;
double tarLat, tarLong, tarAlt;
double interTarLat, interTarLong, interTarAlt;
globeCamera.GetObserverLatLonAlt(out origCamLat, out origCamLong, out origCamAlt);
globeCamera.GetTargetLatLonAlt(out tarLat, out tarLong, out tarAlt);
IAGAnimationContainer pAnimContainer = animationTracks.AnimationObjectContainer;
object objToInterpolate = (object)globeCamera;
double timeDiff = nextKeyframe.TimeStamp - currentKeyframe.TimeStamp;
int numPtsToInterpolateNow;
numPtsToInterpolateNow = Convert.ToInt32((timeDiff * totalPts));
//interpolate positions between keyframes and draw the graphics
//for 0 to n-1 keyframes
if (i < kFrameCount - 1)
{
for (int j = 0; j < numPtsToInterpolateNow; j++)
{
double timeToInterpolate;
timeToInterpolate = currentKeyframe.TimeStamp + j * (timeDiff / (numPtsToInterpolateNow));
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 1, timeToInterpolate, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 2, timeToInterpolate, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 3, timeToInterpolate, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 4, timeToInterpolate, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 5, timeToInterpolate, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 6, timeToInterpolate, nextKeyframe, prevKeyframe, afterNextKeyframe);
//get observer and target lat, long, alt after interpolation
globeCamera.GetObserverLatLonAlt(out interLat, out interLong, out interAlt);
globeCamera.GetTargetLatLonAlt(out interTarLat, out interTarLong, out interTarAlt);
//set observer and target lat, long, alt to original values before interpolation
globeCamera.SetObserverLatLonAlt(origCamLat, origCamLong, origCamAlt);
globeCamera.SetTargetLatLonAlt(tarLat, tarLong, tarAlt);
IPoint pObs = new PointClass();
IZAware obsZAware = (IZAware)pObs;
obsZAware.ZAware = true;
pObs.X = interLong;
pObs.Y = interLat;
pObs.Z = interAlt * 1000;
double symbolSize = 10000;
//change the symbol size based on distance to ground
if (pObs.Z >= 10000) symbolSize = 10000 + pObs.Z / 10;
else symbolSize = pObs.Z;
//add graphics - keyframes (j=0) are colored differently
if (j == 0) addPointGraphicElements(pObs, 2552550, symbolSize);
else addPointGraphicElements(pObs, 16732415, symbolSize);
connectingLine.FromPoint = previousPt;
connectingLine.ToPoint = pObs;
//barring the first keyframe, create the line connecting the interpolated points
if (i == 0 & j == 0) { }
else
{
addLineGraphicElements(connectingLine, 150150150);
}
//update the previous point
previousPt.PutCoords(pObs.X, pObs.Y);
previousPt.Z = pObs.Z;
//add camera to target direction
if (theCamForm.camToTargetDirectionCheckBox.Checked == true)
{
cameraToTargetDirection(interLat, interLong, interAlt, interTarLat, interTarLong, interTarAlt);
}
globeDisplay.RefreshViewers();
}
}
//for last keyframe
if (i == kFrameCount - 1)
{
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 4, 1, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 5, 1, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 6, 1, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 1, 1, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 2, 1, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 3, 1, nextKeyframe, prevKeyframe, afterNextKeyframe);
globeCamera.GetObserverLatLonAlt(out interLat, out interLong, out interAlt);
globeCamera.GetTargetLatLonAlt(out interTarLat, out interTarLong, out interTarAlt);
globeCamera.SetObserverLatLonAlt(origCamLat, origCamLong, origCamAlt);
globeCamera.SetTargetLatLonAlt(tarLat, tarLong, tarAlt);
IPoint pObs = new PointClass();
IZAware obsZAware = (IZAware)pObs;
obsZAware.ZAware = true;
pObs.X = interLong;
pObs.Y = interLat;
pObs.Z = interAlt * 1000;
double symbolSize = 10000;
if (pObs.Z >= 10000) symbolSize = 10000 + pObs.Z / 10;
else symbolSize = pObs.Z;
connectingLine.FromPoint = previousPt;
connectingLine.ToPoint = pObs;
addPointGraphicElements(pObs, 2552550, symbolSize);
addLineGraphicElements(connectingLine, 150150150);
//add camera to target orientation
if (theCamForm.camToTargetDirectionCheckBox.Checked == true)
{
cameraToTargetDirection(interLat, interLong, interAlt, interTarLat, interTarLong, interTarAlt);
}
globeDisplay.RefreshViewers();
}
}
//enable buttons
theCamForm.playButton.Enabled = true;
theCamForm.generatePathButton.Enabled = true;
}
//function for creating specified number of graphics between keyframe positions
public void generatePathBtwnKFrames()
{
int numPtsBtwnKFrames = 0;
//this is the from point for the lines connecting the interpolated point graphics
IPoint previousPt = new PointClass();
IZAware prevPtZAware = (IZAware)previousPt;
prevPtZAware.ZAware = true;
previousPt.PutCoords(0, 0);
//this is the line connecting the interpolated camera positions
IPolyline connectingLine = new PolylineClass();
IZAware lineZAware = (IZAware)connectingLine;
lineZAware.ZAware = true;
if (theCamForm.ptsBtwnKframeTextBox.Text != "")
{
numPtsBtwnKFrames = Convert.ToInt32(theCamForm.ptsBtwnKframeTextBox.Text);
}
else
{
MessageBox.Show("Please enter the number of points to be created");
return;
}
theCamForm.playButton.Enabled = false;
theCamForm.stopButton.Enabled = false;
theCamForm.generatePathButton.Enabled = false;
addGraphicLayer();
string selectedTrack = theCamForm.animTracksListBox.SelectedItem.ToString();
animationTracks.FindTrack(selectedTrack, out animationTrack);
IAGAnimationTrackKeyframes kFrames = (IAGAnimationTrackKeyframes)animationTrack;
int kFrameCount = kFrames.KeyframeCount;
//loop over the keyframes in the selected camera track
for (int i = 0; i < kFrameCount; i++)
{
IAGKeyframe currentKeyframe = kFrames.get_Keyframe(i);
IAGKeyframe prevKeyframe;
IAGKeyframe nextKeyframe;
IAGKeyframe afterNextKeyframe;
//if else statements to determine the keyframe arguments to the interpolate method
//this is needed because the first and the last keyframes should be handled differently
//than the middle keyframes
if (i > 0)
{
prevKeyframe = kFrames.get_Keyframe(i - 1);
}
else
{
prevKeyframe = kFrames.get_Keyframe(i);
}
if (i < kFrameCount - 1)
{
nextKeyframe = kFrames.get_Keyframe(i + 1);
}
else
{
nextKeyframe = kFrames.get_Keyframe(i);
}
if (i < kFrameCount - 2)
{
afterNextKeyframe = kFrames.get_Keyframe(i + 2);
}
else
{
//this should be equal to the nextKeyFrame for the last keyframe
afterNextKeyframe = nextKeyframe;//kFrames.get_Keyframe(i);
}
double origCamLat, origCamLong, origCamAlt;
double interLat, interLong, interAlt;
double tarLat, tarLong, tarAlt;
double interTarLat, interTarLong, interTarAlt;
globeCamera.GetObserverLatLonAlt(out origCamLat, out origCamLong, out origCamAlt);
globeCamera.GetTargetLatLonAlt(out tarLat, out tarLong, out tarAlt);
IAGAnimationContainer pAnimContainer = animationTracks.AnimationObjectContainer;
object objToInterpolate = (object)globeCamera;
double timeDiff = nextKeyframe.TimeStamp - currentKeyframe.TimeStamp;
//interpolate positions between keyframes and draw the graphics
for (int j = 0; j < numPtsBtwnKFrames + 1; j++)
{
double timeToInterpolate = currentKeyframe.TimeStamp + j * (timeDiff / (numPtsBtwnKFrames + 1));
//for 0 to n-1 keyframes
if (i >= 0 & i < kFrameCount - 1)
{
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 4, timeToInterpolate, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 5, timeToInterpolate, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 6, timeToInterpolate, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 1, timeToInterpolate, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 2, timeToInterpolate, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 3, timeToInterpolate, nextKeyframe, prevKeyframe, afterNextKeyframe);
globeCamera.GetObserverLatLonAlt(out interLat, out interLong, out interAlt);
globeCamera.GetTargetLatLonAlt(out interTarLat, out interTarLong, out interTarAlt);
globeCamera.SetObserverLatLonAlt(origCamLat, origCamLong, origCamAlt);
globeCamera.SetTargetLatLonAlt(tarLat, tarLong, tarAlt);
IPoint pObs = new PointClass();
IZAware obsZAware = (IZAware)pObs;
obsZAware.ZAware = true;
pObs.X = interLong;
pObs.Y = interLat;
pObs.Z = interAlt * 1000;
double symbolSize = 10000;
if (pObs.Z >= 10000) symbolSize = 10000 + pObs.Z / 10;
else symbolSize = pObs.Z;
if (j == 0) addPointGraphicElements(pObs, 2552550, symbolSize);
else addPointGraphicElements(pObs, 16732415, symbolSize);
connectingLine.FromPoint = previousPt;
connectingLine.ToPoint = pObs;
if (i == 0 & j == 0) { }
else
{
addLineGraphicElements(connectingLine, 150150150);
}
previousPt.PutCoords(pObs.X, pObs.Y);
previousPt.Z = pObs.Z;
//add camera to target orientation
if (theCamForm.camToTargetDirectionCheckBox.Checked == true)
{
cameraToTargetDirection(interLat, interLong, interAlt, interTarLat, interTarLong, interTarAlt);
}
globeDisplay.RefreshViewers();
}
//for last keyframe
else if (i == kFrameCount - 1)
{
if (j == 0)
{
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 4, 1, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 5, 1, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 6, 1, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 1, 1, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 2, 1, nextKeyframe, prevKeyframe, afterNextKeyframe);
currentKeyframe.Interpolate(animationTrack, pAnimContainer, objToInterpolate, 3, 1, nextKeyframe, prevKeyframe, afterNextKeyframe);
globeCamera.GetObserverLatLonAlt(out interLat, out interLong, out interAlt);
globeCamera.GetTargetLatLonAlt(out interTarLat, out interTarLong, out interTarAlt);
globeCamera.SetObserverLatLonAlt(origCamLat, origCamLong, origCamAlt);
globeCamera.SetTargetLatLonAlt(tarLat, tarLong, tarAlt);
IPoint pObs = new PointClass();
IZAware obsZAware = (IZAware)pObs;
obsZAware.ZAware = true;
pObs.X = interLong;
pObs.Y = interLat;
pObs.Z = interAlt * 1000;
double symbolSize = 10000;
if (pObs.Z >= 10000) symbolSize = 10000 + pObs.Z / 10;
else symbolSize = pObs.Z;
connectingLine.FromPoint = previousPt;
connectingLine.ToPoint = pObs;
addPointGraphicElements(pObs, 2552550, symbolSize);
addLineGraphicElements(connectingLine, 150150150);
//add camera to target orientation
if (theCamForm.camToTargetDirectionCheckBox.Checked == true)
{
cameraToTargetDirection(interLat, interLong, interAlt, interTarLat, interTarLong, interTarAlt);
}
globeDisplay.RefreshViewers();
}
}
}
}
//enable buttons
theCamForm.playButton.Enabled = true;
theCamForm.generatePathButton.Enabled = true;
}
//function for generating camera to target direction
public void cameraToTargetDirection(double camLat, double camLong, double camAlt, double tarLat, double tarLong, double tarAlt)
{
IPoint camPosition = new PointClass();
IPoint targetPosition = new PointClass();
ICamera pCamera = (ICamera)globeCamera;
IZAware obsZAware = (IZAware)camPosition;
obsZAware.ZAware = true;
camPosition.PutCoords(camLong, camLat);
camPosition.Z = camAlt * 1000;
IZAware targetZAware = (IZAware)targetPosition;
targetZAware.ZAware = true;
targetPosition.PutCoords(tarLong, tarLat);
targetPosition.Z = tarAlt;
IPolyline directionLine = new PolylineClass();
IZAware zAwareLine = (IZAware)directionLine;
zAwareLine.ZAware = true;
directionLine.FromPoint = camPosition;
directionLine.ToPoint = targetPosition;
addLineGraphicElements(directionLine, 255);
}
//function for adding a graphics layer
public void addGraphicLayer()
{
graphicsLayer = new GlobeGraphicsLayerClass();
ILayer pLayer;
pLayer = (ILayer)graphicsLayer;
pLayer.Name = "CameraPathGraphicsLayer";
globe.AddLayerType(pLayer, esriGlobeLayerType.esriGlobeLayerTypeDraped, true);
}
//function for adding point markers
public void addPointGraphicElements(ESRI.ArcGIS.Geometry.IPoint inPoint, int symbolColor, double symbolSize)
{
IElement pElement = new MarkerElementClass();
ISimpleMarker3DSymbol symbol3d = new SimpleMarker3DSymbolClass();
string markerStyle = "";
if (theCamForm.symbolTypeListBox.SelectedItem != null)
{
markerStyle = theCamForm.symbolTypeListBox.SelectedItem.ToString();
}
if (markerStyle == "Cone") symbol3d.Style = esriSimple3DMarkerStyle.esriS3DMSCone;
else if (markerStyle == "Sphere") symbol3d.Style = esriSimple3DMarkerStyle.esriS3DMSSphere;
else if (markerStyle == "Cylinder") symbol3d.Style = esriSimple3DMarkerStyle.esriS3DMSCylinder;
else if (markerStyle == "Cube") symbol3d.Style = esriSimple3DMarkerStyle.esriS3DMSCube;
else if (markerStyle == "Diamond") symbol3d.Style = esriSimple3DMarkerStyle.esriS3DMSDiamond;
else if (markerStyle == "Tetrahedron") symbol3d.Style = esriSimple3DMarkerStyle.esriS3DMSTetra;
else symbol3d.Style = esriSimple3DMarkerStyle.esriS3DMSCone;
symbol3d.ResolutionQuality = 1;
IColor pColor = new RgbColorClass();
pColor.RGB = symbolColor; //16732415;
IMarkerSymbol pMarkerSymbol;
pMarkerSymbol = (IMarkerSymbol)symbol3d;
pMarkerSymbol.Color = pColor;
if (symbolSize < 0) symbolSize = Math.Abs(symbolSize);
if (symbolSize == 0) symbolSize = 5000;
pMarkerSymbol.Size = symbolSize;
pElement.Geometry = inPoint;
IMarkerElement pMarkerElement;
pMarkerElement = (IMarkerElement)pElement;
pMarkerElement.Symbol = pMarkerSymbol;
graphicsLayer.AddElement(pElement, 1);
}
//function for adding line graphics elements
public void addLineGraphicElements(ESRI.ArcGIS.Geometry.IPolyline inLine, int symbolColor)
{
IElement pElement = new LineElementClass();// MarkerElementClass();
ISimpleLine3DSymbol symbol3d = new SimpleLine3DSymbolClass();
string markerStyle = "";
if (theCamForm.symbolTypeListBox.SelectedItem != null)
{
markerStyle = theCamForm.symbolTypeListBox.SelectedItem.ToString();
}
if (markerStyle == "Strip") symbol3d.Style = esriSimple3DLineStyle.esriS3DLSStrip;
else if (markerStyle == "Wall") symbol3d.Style = esriSimple3DLineStyle.esriS3DLSWall;
else symbol3d.Style = esriSimple3DLineStyle.esriS3DLSTube;
symbol3d.ResolutionQuality = 1;
IColor pColor = new RgbColorClass();
pColor.RGB = symbolColor;
ILineSymbol pLineSymbol;
pLineSymbol = (ILineSymbol)symbol3d;
pLineSymbol.Color = pColor;
pLineSymbol.Width = 1;
pElement.Geometry = inLine;
ILineElement pLineElement;
pLineElement = (ILineElement)pElement;
pLineElement.Symbol = pLineSymbol;
graphicsLayer.AddElement(pElement, 1);
}
//event handlers
public void formPlayButtonClickEventHandler(object sender, System.EventArgs e)
{
if (theCamForm.animTracksListBox.SelectedItem != null)
{
enableSelectedTrack();
//play the animation
this.playAnimation();
}
else
{
MessageBox.Show("Please select a camera track", "Error");
}
}
public void formStopButtonClickEventHandler(object sender, System.EventArgs e)
{
animPlayer.StopAnimation();
theCamForm.stopButton.Enabled = false;
if (theCamForm.generateCamPathCheckBox.Checked == true) theCamForm.generatePathButton.Enabled = true;
}
public void formGeneratePathButtonClickEventHandler(object sender, System.EventArgs e)
{
if (theCamForm.animTracksListBox.SelectedItem != null)
{
if (theCamForm.ptsPerSecRadioButton.Checked == true)
{
if (theCamForm.numPtsPerSecTextBox.Text == "")
{
MessageBox.Show("Please enter number of points to be created per second", "Error");
return;
}
generatePathPerSecond();
}
else if (theCamForm.ptsBtwnKframeRadioButton.Checked == true)
{
if (theCamForm.ptsBtwnKframeTextBox.Text == "")
{
MessageBox.Show("Please enter number of points to be created between keyframes", "Error");
return;
}
generatePathBtwnKFrames();
}
}
else
{
MessageBox.Show("Please select a camera track");
}
}
public void formCheckbox1CheckedChanged(object sender, System.EventArgs e)
{
if (theCamForm.generateCamPathCheckBox.Checked == true) theCamForm.generatePathButton.Enabled = true;
else theCamForm.generatePathButton.Enabled = false;
}
private void theCamForm_Closing(object sender, CancelEventArgs e)
{
theCamForm.animTracksListBox.Items.Clear();
toolIsInitialized = false;
}
public void myAnimationEventHandler(esriAnimationState animState)
{
globeDispEvent = (IGlobeDisplayEvents_Event)globeDisplay;
if (animState == esriAnimationState.esriAnimationStopped)
{
theCamForm.playButton.Enabled = true;
theCamForm.generatePathButton.Enabled = true;
theCamForm.stopButton.Enabled = false;
}
}
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.ContainerRegistry
{
using System.Linq;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
public partial class ContainerRegistryManagementClient : Microsoft.Rest.ServiceClient<ContainerRegistryManagementClient>, IContainerRegistryManagementClient, IAzureClient
{
/// <summary>
/// The base URI of the service.
/// </summary>
public System.Uri BaseUri { get; set; }
/// <summary>
/// Gets or sets json serialization settings.
/// </summary>
public Newtonsoft.Json.JsonSerializerSettings SerializationSettings { get; private set; }
/// <summary>
/// Gets or sets json deserialization settings.
/// </summary>
public Newtonsoft.Json.JsonSerializerSettings DeserializationSettings { get; private set; }
/// <summary>
/// Credentials needed for the client to connect to Azure.
/// </summary>
public Microsoft.Rest.ServiceClientCredentials Credentials { get; private set; }
/// <summary>
/// The Microsoft Azure subscription ID.
/// </summary>
public string SubscriptionId { get; set; }
/// <summary>
/// The client API version.
/// </summary>
public string ApiVersion { get; private set; }
/// <summary>
/// Gets or sets the preferred language for the response.
/// </summary>
public string AcceptLanguage { get; set; }
/// <summary>
/// Gets or sets the retry timeout in seconds for Long Running Operations.
/// Default value is 30.
/// </summary>
public int? LongRunningOperationRetryTimeout { get; set; }
/// <summary>
/// When set to true a unique x-ms-client-request-id value is generated and
/// included in each request. Default is true.
/// </summary>
public bool? GenerateClientRequestId { get; set; }
/// <summary>
/// Gets the IRegistriesOperations.
/// </summary>
public virtual IRegistriesOperations Registries { get; private set; }
/// <summary>
/// Initializes a new instance of the ContainerRegistryManagementClient class.
/// </summary>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected ContainerRegistryManagementClient(params System.Net.Http.DelegatingHandler[] handlers) : base(handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the ContainerRegistryManagementClient class.
/// </summary>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected ContainerRegistryManagementClient(System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : base(rootHandler, handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the ContainerRegistryManagementClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
protected ContainerRegistryManagementClient(System.Uri baseUri, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the ContainerRegistryManagementClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
protected ContainerRegistryManagementClient(System.Uri baseUri, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the ContainerRegistryManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public ContainerRegistryManagementClient(Microsoft.Rest.ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers)
{
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the ContainerRegistryManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public ContainerRegistryManagementClient(Microsoft.Rest.ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the ContainerRegistryManagementClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public ContainerRegistryManagementClient(System.Uri baseUri, Microsoft.Rest.ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
this.BaseUri = baseUri;
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the ContainerRegistryManagementClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public ContainerRegistryManagementClient(System.Uri baseUri, Microsoft.Rest.ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
this.BaseUri = baseUri;
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// An optional partial-method to perform custom initialization.
/// </summary>
partial void CustomInitialize();
/// <summary>
/// Initializes client properties.
/// </summary>
private void Initialize()
{
this.Registries = new RegistriesOperations(this);
this.BaseUri = new System.Uri("https://management.azure.com");
this.ApiVersion = "2016-06-27-preview";
this.AcceptLanguage = "en-US";
this.LongRunningOperationRetryTimeout = 30;
this.GenerateClientRequestId = true;
SerializationSettings = new Newtonsoft.Json.JsonSerializerSettings
{
Formatting = Newtonsoft.Json.Formatting.Indented,
DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc,
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize,
ContractResolver = new Microsoft.Rest.Serialization.ReadOnlyJsonContractResolver(),
Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter>
{
new Microsoft.Rest.Serialization.Iso8601TimeSpanConverter()
}
};
SerializationSettings.Converters.Add(new Microsoft.Rest.Serialization.TransformationJsonConverter());
DeserializationSettings = new Newtonsoft.Json.JsonSerializerSettings
{
DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc,
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize,
ContractResolver = new Microsoft.Rest.Serialization.ReadOnlyJsonContractResolver(),
Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter>
{
new Microsoft.Rest.Serialization.Iso8601TimeSpanConverter()
}
};
CustomInitialize();
DeserializationSettings.Converters.Add(new Microsoft.Rest.Serialization.TransformationJsonConverter());
DeserializationSettings.Converters.Add(new Microsoft.Rest.Azure.CloudErrorJsonConverter());
}
}
}
| |
using Lucene.Net.Diagnostics;
using Lucene.Net.Store;
using Lucene.Net.Util;
using System;
using System.Diagnostics;
using System.IO;
namespace Lucene.Net.Codecs.Bloom
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// A class used to represent a set of many, potentially large, values (e.g. many
/// long strings such as URLs), using a significantly smaller amount of memory.
/// <para/>
/// The set is "lossy" in that it cannot definitively state that is does contain
/// a value but it <em>can</em> definitively say if a value is <em>not</em> in
/// the set. It can therefore be used as a Bloom Filter.
/// <para/>
/// Another application of the set is that it can be used to perform fuzzy counting because
/// it can estimate reasonably accurately how many unique values are contained in the set.
/// <para/>
/// This class is NOT threadsafe.
/// <para/>
/// Internally a Bitset is used to record values and once a client has finished recording
/// a stream of values the <see cref="Downsize(float)"/> method can be used to create a suitably smaller set that
/// is sized appropriately for the number of values recorded and desired saturation levels.
/// <para/>
/// @lucene.experimental
/// </summary>
public class FuzzySet
{
public static readonly int VERSION_SPI = 1; // HashFunction used to be loaded through a SPI
public static readonly int VERSION_START = VERSION_SPI;
public static readonly int VERSION_CURRENT = 2;
public static HashFunction HashFunctionForVersion(int version)
{
if (version < VERSION_START)
throw new ArgumentException("Version " + version + " is too old, expected at least " +
VERSION_START);
if (version > VERSION_CURRENT)
throw new ArgumentException("Version " + version + " is too new, expected at most " +
VERSION_CURRENT);
return MurmurHash2.INSTANCE;
}
/// <remarks>
/// Result from <see cref="FuzzySet.Contains(BytesRef)"/>:
/// can never return definitively YES (always MAYBE),
/// but can sometimes definitely return NO.
/// </remarks>
public enum ContainsResult
{
MAYBE,
NO
};
private readonly HashFunction _hashFunction;
private readonly FixedBitSet _filter;
private readonly int _bloomSize;
//The sizes of BitSet used are all numbers that, when expressed in binary form,
//are all ones. This is to enable fast downsizing from one bitset to another
//by simply ANDing each set index in one bitset with the size of the target bitset
// - this provides a fast modulo of the number. Values previously accumulated in
// a large bitset and then mapped to a smaller set can be looked up using a single
// AND operation of the query term's hash rather than needing to perform a 2-step
// translation of the query term that mirrors the stored content's reprojections.
private static int[] _usableBitSetSizes = LoadUsableBitSetSizes();
private static int[] LoadUsableBitSetSizes() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
{
var usableBitSetSizes = new int[30];
const int mask = 1;
var size = mask;
for (var i = 0; i < usableBitSetSizes.Length; i++)
{
size = (size << 1) | mask;
usableBitSetSizes[i] = size;
}
return usableBitSetSizes;
}
/// <summary>
/// Rounds down required <paramref name="maxNumberOfBits"/> to the nearest number that is made up
/// of all ones as a binary number.
/// Use this method where controlling memory use is paramount.
/// </summary>
public static int GetNearestSetSize(int maxNumberOfBits)
{
int result = _usableBitSetSizes[0];
for (int i = 0; i < _usableBitSetSizes.Length; i++)
{
if (_usableBitSetSizes[i] <= maxNumberOfBits)
{
result = _usableBitSetSizes[i];
}
}
return result;
}
/// <summary>
/// Use this method to choose a set size where accuracy (low content saturation) is more important
/// than deciding how much memory to throw at the problem.
/// </summary>
/// <param name="maxNumberOfValuesExpected"></param>
/// <param name="desiredSaturation">A number between 0 and 1 expressing the % of bits set once all values have been recorded.</param>
/// <returns>The size of the set nearest to the required size.</returns>
public static int GetNearestSetSize(int maxNumberOfValuesExpected,
float desiredSaturation)
{
// Iterate around the various scales of bitset from smallest to largest looking for the first that
// satisfies value volumes at the chosen saturation level
for (int i = 0; i < _usableBitSetSizes.Length; i++)
{
int numSetBitsAtDesiredSaturation = (int)(_usableBitSetSizes[i] * desiredSaturation);
int estimatedNumUniqueValues = GetEstimatedNumberUniqueValuesAllowingForCollisions(
_usableBitSetSizes[i], numSetBitsAtDesiredSaturation);
if (estimatedNumUniqueValues > maxNumberOfValuesExpected)
{
return _usableBitSetSizes[i];
}
}
return -1;
}
public static FuzzySet CreateSetBasedOnMaxMemory(int maxNumBytes)
{
var setSize = GetNearestSetSize(maxNumBytes);
return new FuzzySet(new FixedBitSet(setSize + 1), setSize, HashFunctionForVersion(VERSION_CURRENT));
}
public static FuzzySet CreateSetBasedOnQuality(int maxNumUniqueValues, float desiredMaxSaturation)
{
var setSize = GetNearestSetSize(maxNumUniqueValues, desiredMaxSaturation);
return new FuzzySet(new FixedBitSet(setSize + 1), setSize, HashFunctionForVersion(VERSION_CURRENT));
}
private FuzzySet(FixedBitSet filter, int bloomSize, HashFunction hashFunction)
{
_filter = filter;
_bloomSize = bloomSize;
_hashFunction = hashFunction;
}
/// <summary>
/// The main method required for a Bloom filter which, given a value determines set membership.
/// Unlike a conventional set, the fuzzy set returns <see cref="ContainsResult.NO"/> or
/// <see cref="ContainsResult.MAYBE"/> rather than <c>true</c> or <c>false</c>.
/// </summary>
/// <returns><see cref="ContainsResult.NO"/> or <see cref="ContainsResult.MAYBE"/></returns>
public virtual ContainsResult Contains(BytesRef value)
{
var hash = _hashFunction.Hash(value);
if (hash < 0)
{
hash = hash*-1;
}
return MayContainValue(hash);
}
/// <summary>
/// Serializes the data set to file using the following format:
/// <list type="bullet">
/// <item><description>FuzzySet -->FuzzySetVersion,HashFunctionName,BloomSize,
/// NumBitSetWords,BitSetWord<sup>NumBitSetWords</sup></description></item>
/// <item><description>HashFunctionName --> String (<see cref="DataOutput.WriteString(string)"/>) The
/// name of a ServiceProvider registered <see cref="HashFunction"/></description></item>
/// <item><description>FuzzySetVersion --> Uint32 (<see cref="DataOutput.WriteInt32(int)"/>) The version number of the <see cref="FuzzySet"/> class</description></item>
/// <item><description>BloomSize --> Uint32 (<see cref="DataOutput.WriteInt32(int)"/>) The modulo value used
/// to project hashes into the field's Bitset</description></item>
/// <item><description>NumBitSetWords --> Uint32 (<see cref="DataOutput.WriteInt32(int)"/>) The number of
/// longs (as returned from <see cref="FixedBitSet.GetBits()"/>)</description></item>
/// <item><description>BitSetWord --> Long (<see cref="DataOutput.WriteInt64(long)"/>) A long from the array
/// returned by <see cref="FixedBitSet.GetBits()"/></description></item>
/// </list>
/// </summary>
/// <param name="output">Data output stream.</param>
/// <exception cref="IOException">If there is a low-level I/O error.</exception>
public virtual void Serialize(DataOutput output)
{
output.WriteInt32(VERSION_CURRENT);
output.WriteInt32(_bloomSize);
var bits = _filter.GetBits();
output.WriteInt32(bits.Length);
foreach (var t in bits)
{
// Can't used VLong encoding because cant cope with negative numbers
// output by FixedBitSet
output.WriteInt64(t);
}
}
public static FuzzySet Deserialize(DataInput input)
{
var version = input.ReadInt32();
if (version == VERSION_SPI)
input.ReadString();
var hashFunction = HashFunctionForVersion(version);
var bloomSize = input.ReadInt32();
var numLongs = input.ReadInt32();
var longs = new long[numLongs];
for (var i = 0; i < numLongs; i++)
{
longs[i] = input.ReadInt64();
}
var bits = new FixedBitSet(longs, bloomSize + 1);
return new FuzzySet(bits, bloomSize, hashFunction);
}
private ContainsResult MayContainValue(int positiveHash)
{
if (Debugging.AssertsEnabled) Debugging.Assert((positiveHash >= 0));
// Bloom sizes are always base 2 and so can be ANDed for a fast modulo
var pos = positiveHash & _bloomSize;
return _filter.Get(pos) ? ContainsResult.MAYBE : ContainsResult.NO;
}
/// <summary>
/// Records a value in the set. The referenced bytes are hashed and then modulo n'd where n is the
/// chosen size of the internal bitset.
/// </summary>
/// <param name="value">The Key value to be hashed.</param>
/// <exception cref="IOException">If there is a low-level I/O error.</exception>
public virtual void AddValue(BytesRef value)
{
var hash = _hashFunction.Hash(value);
if (hash < 0)
{
hash = hash*-1;
}
// Bitmasking using bloomSize is effectively a modulo operation.
var bloomPos = hash & _bloomSize;
_filter.Set(bloomPos);
}
/// <param name="targetMaxSaturation">
/// A number between 0 and 1 describing the % of bits that would ideally be set in the result.
/// Lower values have better accuracy but require more space.
/// </param>
/// <return>A smaller <see cref="FuzzySet"/> or <c>null</c> if the current set is already over-saturated.</return>
public virtual FuzzySet Downsize(float targetMaxSaturation)
{
var numBitsSet = _filter.Cardinality();
FixedBitSet rightSizedBitSet;
var rightSizedBitSetSize = _bloomSize;
//Hopefully find a smaller size bitset into which we can project accumulated values while maintaining desired saturation level
for (int i = 0; i < _usableBitSetSizes.Length; i++)
{
int candidateBitsetSize = _usableBitSetSizes[i];
float candidateSaturation = (float)numBitsSet
/ (float)candidateBitsetSize;
if (candidateSaturation <= targetMaxSaturation)
{
rightSizedBitSetSize = candidateBitsetSize;
break;
}
}
// Re-project the numbers to a smaller space if necessary
if (rightSizedBitSetSize < _bloomSize)
{
// Reset the choice of bitset to the smaller version
rightSizedBitSet = new FixedBitSet(rightSizedBitSetSize + 1);
// Map across the bits from the large set to the smaller one
var bitIndex = 0;
do
{
bitIndex = _filter.NextSetBit(bitIndex);
if (bitIndex < 0) continue;
// Project the larger number into a smaller one effectively
// modulo-ing by using the target bitset size as a mask
var downSizedBitIndex = bitIndex & rightSizedBitSetSize;
rightSizedBitSet.Set(downSizedBitIndex);
bitIndex++;
} while ((bitIndex >= 0) && (bitIndex <= _bloomSize));
}
else
{
return null;
}
return new FuzzySet(rightSizedBitSet, rightSizedBitSetSize, _hashFunction);
}
public virtual int GetEstimatedUniqueValues()
{
return GetEstimatedNumberUniqueValuesAllowingForCollisions(_bloomSize, _filter.Cardinality());
}
/// <summary>
/// Given a <paramref name="setSize"/> and a the number of set bits, produces an estimate of the number of unique values recorded.
/// </summary>
public static int GetEstimatedNumberUniqueValuesAllowingForCollisions(
int setSize, int numRecordedBits)
{
double setSizeAsDouble = setSize;
double numRecordedBitsAsDouble = numRecordedBits;
var saturation = numRecordedBitsAsDouble/setSizeAsDouble;
var logInverseSaturation = Math.Log(1 - saturation)*-1;
return (int) (setSizeAsDouble*logInverseSaturation);
}
public virtual float GetSaturation()
{
var numBitsSet = _filter.Cardinality();
return numBitsSet/(float) _bloomSize;
}
public virtual long RamBytesUsed()
{
return RamUsageEstimator.SizeOf(_filter.GetBits());
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
#if !IGNITOR
using Microsoft.AspNetCore.Components.RenderTree;
#endif
#if IGNITOR
namespace Ignitor
#elif BLAZOR_WEBVIEW
namespace Microsoft.AspNetCore.Components.WebView
#else
namespace Microsoft.AspNetCore.Components.Server.Circuits
#endif
{
// TODO: We should consider *not* having this type of infrastructure in the .Server
// project, but instead in some new project called .Remote or similar, since it
// would also be used in Electron and possibly WebWorker cases.
/// <summary>
/// Provides a custom binary serializer for <see cref="RenderBatch"/> instances.
/// This is designed with both server-side and client-side perf in mind:
///
/// * Array-like regions always have a fixed size per entry (even if some entry types
/// don't require as much space as others) so the recipient can index directly.
/// * The indices describing where field data starts, where each string value starts,
/// etc., are written *after* that data, so when writing the data we don't have to
/// compute the locations up front or seek back to an earlier point in the stream.
/// The recipient can only process the data after reading it all into a buffer,
/// so it's no disadvantage for the location info to be at the end.
/// * We only serialize the data that the JS side will need. For example, we don't
/// emit frame sequence numbers, or any representation of nonstring attribute
/// values, or component instances, etc.
///
/// We don't have or need a .NET reader for this format. We only read it from JS code.
/// </summary>
internal class RenderBatchWriter : IDisposable
{
private readonly ArrayBuilder<string> _strings;
private readonly Dictionary<string, int> _deduplicatedStringIndices;
private readonly BinaryWriter _binaryWriter;
public RenderBatchWriter(Stream output, bool leaveOpen)
{
_strings = new ArrayBuilder<string>();
_deduplicatedStringIndices = new Dictionary<string, int>();
_binaryWriter = new BinaryWriter(output, Encoding.UTF8, leaveOpen);
}
public void Write(in RenderBatch renderBatch)
{
var updatedComponentsOffset = Write(renderBatch.UpdatedComponents);
var referenceFramesOffset = Write(renderBatch.ReferenceFrames);
var disposedComponentIdsOffset = Write(renderBatch.DisposedComponentIDs);
var disposedEventHandlerIdsOffset = Write(renderBatch.DisposedEventHandlerIDs);
var stringTableOffset = WriteStringTable();
_binaryWriter.Write(updatedComponentsOffset);
_binaryWriter.Write(referenceFramesOffset);
_binaryWriter.Write(disposedComponentIdsOffset);
_binaryWriter.Write(disposedEventHandlerIdsOffset);
_binaryWriter.Write(stringTableOffset);
}
int Write(in ArrayRange<RenderTreeDiff> diffs)
{
var count = diffs.Count;
var diffsIndexes = new int[count];
var array = diffs.Array;
var baseStream = _binaryWriter.BaseStream;
for (var i = 0; i < count; i++)
{
diffsIndexes[i] = (int)baseStream.Position;
Write(array[i]);
}
// Now write out the table of locations
var tableStartPos = (int)baseStream.Position;
_binaryWriter.Write(count);
for (var i = 0; i < count; i++)
{
_binaryWriter.Write(diffsIndexes[i]);
}
return tableStartPos;
}
void Write(in RenderTreeDiff diff)
{
_binaryWriter.Write(diff.ComponentId);
var edits = diff.Edits;
_binaryWriter.Write(edits.Count);
var editsArray = edits.Array;
var editsEndIndexExcl = edits.Offset + edits.Count;
for (var i = edits.Offset; i < editsEndIndexExcl; i++)
{
Write(editsArray[i]);
}
}
void Write(in RenderTreeEdit edit)
{
// We want all RenderTreeEdit outputs to be of the same length, so that
// the recipient can index into the array directly without walking it.
// So we output some value for all properties, even when not applicable
// for this specific RenderTreeEditType.
_binaryWriter.Write((int)edit.Type);
_binaryWriter.Write(edit.SiblingIndex);
// ReferenceFrameIndex and MoveToSiblingIndex share a slot, so this writes
// whichever one applies to the edit type
_binaryWriter.Write(edit.ReferenceFrameIndex);
WriteString(edit.RemovedAttributeName, allowDeduplication: true);
}
int Write(in ArrayRange<RenderTreeFrame> frames)
{
var startPos = (int)_binaryWriter.BaseStream.Position;
var array = frames.Array;
var count = frames.Count;
_binaryWriter.Write(count);
for (var i = 0; i < count; i++)
{
Write(array[i]);
}
return startPos;
}
void Write(in RenderTreeFrame frame)
{
// TODO: Change this to write as a short, saving 2 bytes per frame
_binaryWriter.Write((int)frame.FrameType);
// We want each frame to take up the same number of bytes, so that the
// recipient can index into the array directly instead of having to
// walk through it.
// Since we can fit every frame type into 16 bytes, use that as the
// common size. For smaller frames, we add padding to expand it to
// 16 bytes.
switch (frame.FrameType)
{
case RenderTreeFrameType.Attribute:
WriteString(frame.AttributeName, allowDeduplication: true);
if (frame.AttributeValue is bool boolValue)
{
// Encoding the bool as either "" or null is pretty odd, but avoids
// having to pack any "what type of thing is this" info into the same
// 4 bytes as the string table index. If, later, we need a way of
// distinguishing whether an attribute value is really a bool or a string
// or something else, we'll need a different encoding mechanism. Since there
// would never be more than (say) 2^28 (268 million) distinct string table
// entries, we could use the first 4 bits to encode the value type.
WriteString(boolValue ? string.Empty : null, allowDeduplication: true);
}
else
{
var attributeValueString = frame.AttributeValue as string;
WriteString(attributeValueString, allowDeduplication: string.IsNullOrEmpty(attributeValueString));
}
_binaryWriter.Write(frame.AttributeEventHandlerId); // 8 bytes
break;
case RenderTreeFrameType.Component:
_binaryWriter.Write(frame.ComponentSubtreeLength);
_binaryWriter.Write(frame.ComponentId);
WritePadding(_binaryWriter, 8);
break;
case RenderTreeFrameType.ComponentReferenceCapture:
// The client doesn't need to know about these. But we still have
// to include them in the array otherwise the ReferenceFrameIndex
// values in the edits data would be wrong.
WritePadding(_binaryWriter, 16);
break;
case RenderTreeFrameType.Element:
_binaryWriter.Write(frame.ElementSubtreeLength);
WriteString(frame.ElementName, allowDeduplication: true);
WritePadding(_binaryWriter, 8);
break;
case RenderTreeFrameType.ElementReferenceCapture:
WriteString(frame.ElementReferenceCaptureId, allowDeduplication: false);
WritePadding(_binaryWriter, 12);
break;
case RenderTreeFrameType.Region:
_binaryWriter.Write(frame.RegionSubtreeLength);
WritePadding(_binaryWriter, 12);
break;
case RenderTreeFrameType.Text:
WriteString(
frame.TextContent,
allowDeduplication: string.IsNullOrWhiteSpace(frame.TextContent));
WritePadding(_binaryWriter, 12);
break;
case RenderTreeFrameType.Markup:
WriteString(frame.MarkupContent, allowDeduplication: false);
WritePadding(_binaryWriter, 12);
break;
default:
throw new ArgumentException($"Unsupported frame type: {frame.FrameType}");
}
}
int Write(in ArrayRange<int> numbers)
{
var startPos = (int)_binaryWriter.BaseStream.Position;
_binaryWriter.Write(numbers.Count);
var array = numbers.Array;
var count = numbers.Count;
for (var index = 0; index < count; index++)
{
_binaryWriter.Write(array[index]);
}
return startPos;
}
int Write(in ArrayRange<ulong> numbers)
{
var startPos = (int)_binaryWriter.BaseStream.Position;
_binaryWriter.Write(numbers.Count);
var array = numbers.Array;
var count = numbers.Count;
for (var index = 0; index < count; index++)
{
_binaryWriter.Write(array[index]);
}
return startPos;
}
void WriteString(string value, bool allowDeduplication)
{
if (value == null)
{
_binaryWriter.Write(-1);
}
else
{
int stringIndex;
if (!allowDeduplication || !_deduplicatedStringIndices.TryGetValue(value, out stringIndex))
{
stringIndex = _strings.Count;
_strings.Append(value);
if (allowDeduplication)
{
_deduplicatedStringIndices.Add(value, stringIndex);
}
}
_binaryWriter.Write(stringIndex);
}
}
int WriteStringTable()
{
// Capture the locations of each string
var stringsCount = _strings.Count;
var locations = new int[stringsCount];
for (var i = 0; i < stringsCount; i++)
{
var stringValue = _strings.Buffer[i];
locations[i] = (int)_binaryWriter.BaseStream.Position;
_binaryWriter.Write(stringValue);
}
// Now write the locations
var locationsStartPos = (int)_binaryWriter.BaseStream.Position;
for (var i = 0; i < stringsCount; i++)
{
_binaryWriter.Write(locations[i]);
}
return locationsStartPos;
}
static void WritePadding(BinaryWriter writer, int numBytes)
{
while (numBytes >= 4)
{
writer.Write(0);
numBytes -= 4;
}
while (numBytes > 0)
{
writer.Write((byte)0);
numBytes--;
}
}
public void Dispose()
{
_strings.Dispose();
_binaryWriter.Dispose();
}
}
}
| |
//
// Copyright (c) 2004-2021 Jaroslaw Kowalski <[email protected]>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
namespace NLog.UnitTests.Targets.Wrappers
{
using System;
using System.Collections.Generic;
using System.Threading;
using NLog.Common;
using NLog.Targets;
using NLog.Targets.Wrappers;
using Xunit;
public class BufferingTargetWrapperTests : NLogTestBase
{
[Fact]
public void BufferingTargetWrapperSyncTest1()
{
var myTarget = new MyTarget();
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
BufferSize = 10,
};
InitializeTargets(myTarget, targetWrapper);
const int totalEvents = 100;
var continuationHit = new bool[totalEvents];
var lastException = new Exception[totalEvents];
var continuationThread = new Thread[totalEvents];
var hitCount = 0;
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
lastException[eventNumber] = ex;
continuationThread[eventNumber] = Thread.CurrentThread;
continuationHit[eventNumber] = true;
Interlocked.Increment(ref hitCount);
};
// write 9 events - they will all be buffered and no final continuation will be reached
var eventCounter = 0;
for (var i = 0; i < 9; ++i)
{
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
Assert.Equal(0, hitCount);
Assert.Equal(0, myTarget.WriteCount);
// write one more event - everything will be flushed
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
Assert.Equal(10, hitCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(10, myTarget.BufferedTotalEvents);
Assert.Equal(10, myTarget.WriteCount);
for (var i = 0; i < hitCount; ++i)
{
Assert.Same(Thread.CurrentThread, continuationThread[i]);
Assert.Null(lastException[i]);
}
// write 9 more events - they will all be buffered and no final continuation will be reached
for (var i = 0; i < 9; ++i)
{
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
// no change
Assert.Equal(10, hitCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(10, myTarget.BufferedTotalEvents);
Assert.Equal(10, myTarget.WriteCount);
Exception flushException = null;
var flushHit = new ManualResetEvent(false);
targetWrapper.Flush(
ex =>
{
flushException = ex;
flushHit.Set();
});
Assert.True(flushHit.WaitOne(5000), "Wait Flush Timeout");
Assert.Null(flushException);
// make sure remaining events were written
Assert.Equal(19, hitCount);
Assert.Equal(2, myTarget.BufferedWriteCount);
Assert.Equal(19, myTarget.BufferedTotalEvents);
Assert.Equal(19, myTarget.WriteCount);
Assert.Equal(1, myTarget.FlushCount);
// flushes happen on the same thread
for (var i = 10; i < hitCount; ++i)
{
Assert.NotNull(continuationThread[i]);
Assert.Same(Thread.CurrentThread, continuationThread[i]);
Assert.Null(lastException[i]);
}
// flush again - should just invoke Flush() on the wrapped target
flushHit.Reset();
targetWrapper.Flush(
ex =>
{
flushException = ex;
flushHit.Set();
});
Assert.True(flushHit.WaitOne(5000), "Wait Again Flush Timeout");
Assert.Equal(19, hitCount);
Assert.Equal(2, myTarget.BufferedWriteCount);
Assert.Equal(19, myTarget.BufferedTotalEvents);
Assert.Equal(19, myTarget.WriteCount);
Assert.Equal(2, myTarget.FlushCount);
targetWrapper.Close();
myTarget.Close();
}
[Theory]
[InlineData(false)]
[InlineData(true)]
public void BufferingTargetWithFallbackGroupAndFirstTargetFails_Write_SecondTargetWritesEvents(bool enableBatchWrite)
{
const int totalEvents = 10;
var myTarget = new MyTarget { FailCounter = totalEvents / 2 };
var myTarget2 = new MyTarget();
var fallbackGroup = new FallbackGroupTarget(myTarget, myTarget2) { EnableBatchWrite = enableBatchWrite };
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = fallbackGroup,
BufferSize = totalEvents,
};
InitializeTargets(myTarget, targetWrapper, myTarget2, fallbackGroup);
var continuationHit = new bool[totalEvents];
var lastException = new Exception[totalEvents];
var continuationThread = new Thread[totalEvents];
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
lastException[eventNumber] = ex;
continuationThread[eventNumber] = Thread.CurrentThread;
continuationHit[eventNumber] = true;
};
using (new NoThrowNLogExceptions())
{
// write 9 events - they will all be buffered and no final continuation will be reached
var eventCounter = 0;
for (var i = 0; i < totalEvents - 1; ++i)
{
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
Assert.Equal(0, myTarget.WriteCount);
// write one more event - everything will be flushed
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
if (enableBatchWrite)
{
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(totalEvents, myTarget.BufferedTotalEvents);
Assert.Equal(totalEvents, myTarget.WriteCount);
Assert.Equal(totalEvents / 2, myTarget2.WriteCount);
}
else
{
Assert.Equal(0, myTarget.BufferedTotalEvents);
Assert.Equal(0, myTarget.BufferedWriteCount);
Assert.Equal(1, myTarget.WriteCount);
Assert.Equal(totalEvents, myTarget2.WriteCount);
}
targetWrapper.Close();
myTarget.Close();
}
}
[Fact]
public void BufferingTargetWrapperSyncWithTimedFlushTest()
{
var myTarget = new MyTarget();
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
BufferSize = 10,
FlushTimeout = 50,
};
var writeHit = new ManualResetEvent(false);
InitializeTargets(myTarget, targetWrapper);
const int totalEvents = 100;
var continuationHit = new bool[totalEvents];
var lastException = new Exception[totalEvents];
var continuationThread = new Thread[totalEvents];
var hitCount = 0;
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
lastException[eventNumber] = ex;
continuationThread[eventNumber] = Thread.CurrentThread;
continuationHit[eventNumber] = true;
Interlocked.Increment(ref hitCount);
writeHit.Set();
};
// write 9 events - they will all be buffered and no final continuation will be reached
var eventCounter = 0;
for (var i = 0; i < 9; ++i)
{
targetWrapper.WriteAsyncLogEvent(
new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
Assert.Equal(0, hitCount);
Assert.Equal(0, myTarget.WriteCount);
// sleep and wait for the trigger timer to flush all events
Assert.True(writeHit.WaitOne(5000), "Wait Write Timeout");
WaitAndAssertExpectedValue(ref hitCount, 9);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(9, myTarget.BufferedTotalEvents);
Assert.Equal(9, myTarget.WriteCount);
for (var i = 0; i < hitCount; ++i)
{
Assert.NotSame(Thread.CurrentThread, continuationThread[i]);
Assert.Null(lastException[i]);
}
// write 11 more events, 10 will be hit immediately because the buffer will fill up
// 1 will be pending
for (var i = 0; i < 11; ++i)
{
targetWrapper.WriteAsyncLogEvent(
new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
Assert.Equal(19, hitCount);
Assert.Equal(2, myTarget.BufferedWriteCount);
Assert.Equal(19, myTarget.BufferedTotalEvents);
Assert.Equal(19, myTarget.WriteCount);
// sleep and wait for the remaining one to be flushed
WaitAndAssertExpectedValue(ref hitCount, 20);
Assert.Equal(3, myTarget.BufferedWriteCount);
Assert.Equal(20, myTarget.BufferedTotalEvents);
Assert.Equal(20, myTarget.WriteCount);
}
[Fact]
public void BufferingTargetWrapperAsyncTest1()
{
RetryingIntegrationTest(3, () =>
{
var myTarget = new MyAsyncTarget();
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
BufferSize = 10,
};
var writeHit = new ManualResetEvent(false);
InitializeTargets(myTarget, targetWrapper);
const int totalEvents = 100;
var continuationHit = new bool[totalEvents];
var lastException = new Exception[totalEvents];
var continuationThread = new Thread[totalEvents];
var hitCount = 0;
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
lastException[eventNumber] = ex;
continuationThread[eventNumber] = Thread.CurrentThread;
continuationHit[eventNumber] = true;
Interlocked.Increment(ref hitCount);
writeHit.Set();
};
// write 9 events - they will all be buffered and no final continuation will be reached
var eventCounter = 0;
for (var i = 0; i < 9; ++i)
{
targetWrapper.WriteAsyncLogEvent(
new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
Assert.Equal(0, hitCount);
// write one more event - everything will be flushed
targetWrapper.WriteAsyncLogEvent(
new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
Assert.True(writeHit.WaitOne(5000), "Wait Write Timeout");
WaitAndAssertExpectedValue(ref hitCount, 10);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(10, myTarget.BufferedTotalEvents);
for (var i = 0; i < hitCount; ++i)
{
Assert.NotSame(Thread.CurrentThread, continuationThread[i]);
Assert.Null(lastException[i]);
}
// write 9 more events - they will all be buffered and no final continuation will be reached
for (var i = 0; i < 9; ++i)
{
targetWrapper.WriteAsyncLogEvent(
new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
// no change
Assert.Equal(10, hitCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(10, myTarget.BufferedTotalEvents);
Exception flushException = null;
var flushHit = new ManualResetEvent(false);
targetWrapper.Flush(
ex =>
{
flushException = ex;
flushHit.Set();
});
Assert.True(flushHit.WaitOne(5000), "Wait Flush Timeout");
Assert.Null(flushException);
// make sure remaining events were written
Assert.Equal(19, hitCount);
Assert.Equal(2, myTarget.BufferedWriteCount);
Assert.Equal(19, myTarget.BufferedTotalEvents);
// flushes happen on another thread
for (var i = 10; i < hitCount; ++i)
{
Assert.NotNull(continuationThread[i]);
Assert.NotSame(Thread.CurrentThread, continuationThread[i]);
Assert.Null(lastException[i]);
}
// flush again - should not do anything
flushHit.Reset();
targetWrapper.Flush(
ex =>
{
flushException = ex;
flushHit.Set();
});
Assert.True(flushHit.WaitOne(5000), "Wait Again Flush Timeout");
Assert.Equal(19, hitCount);
Assert.Equal(2, myTarget.BufferedWriteCount);
Assert.Equal(19, myTarget.BufferedTotalEvents);
targetWrapper.Close();
myTarget.Close();
});
}
[Fact]
public void BufferingTargetWrapperSyncWithTimedFlushNonSlidingTest()
{
RetryingIntegrationTest(3, () =>
{
var myTarget = new MyTarget();
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
BufferSize = 10,
FlushTimeout = 400,
SlidingTimeout = false,
};
InitializeTargets(myTarget, targetWrapper);
const int totalEvents = 100;
var continuationHit = new bool[totalEvents];
var lastException = new Exception[totalEvents];
var continuationThread = new Thread[totalEvents];
var hitCount = 0;
var resetEvent = new ManualResetEvent(false);
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
lastException[eventNumber] = ex;
continuationThread[eventNumber] = Thread.CurrentThread;
continuationHit[eventNumber] = true;
Interlocked.Increment(ref hitCount);
if (eventNumber > 0)
{
resetEvent.Set();
}
};
var eventCounter = 0;
targetWrapper.WriteAsyncLogEvent(
new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
Assert.Equal(0, hitCount);
Assert.Equal(0, myTarget.WriteCount);
targetWrapper.WriteAsyncLogEvent(
new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
Assert.True(resetEvent.WaitOne(5000), "Wait Write Timeout");
Assert.Equal(2, hitCount);
Assert.Equal(2, myTarget.WriteCount);
});
}
[Fact]
public void BufferingTargetWrapperSyncWithTimedFlushSlidingTest()
{
var myTarget = new MyTarget();
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
BufferSize = 10,
FlushTimeout = 400,
};
var writeEvent = new ManualResetEvent(false);
InitializeTargets(myTarget, targetWrapper);
const int totalEvents = 100;
var continuationHit = new bool[totalEvents];
var lastException = new Exception[totalEvents];
var continuationThread = new Thread[totalEvents];
var hitCount = 0;
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
lastException[eventNumber] = ex;
continuationThread[eventNumber] = Thread.CurrentThread;
continuationHit[eventNumber] = true;
Interlocked.Increment(ref hitCount);
writeEvent.Set();
};
var eventCounter = 0;
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
Thread.Sleep(100);
Assert.Equal(0, hitCount);
Assert.Equal(0, myTarget.WriteCount);
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
Thread.Sleep(100);
Assert.Equal(0, hitCount);
Assert.Equal(0, myTarget.WriteCount);
Assert.True(writeEvent.WaitOne(5000), "Wait Write Timeout");
WaitAndAssertExpectedValue(ref hitCount, 2);
Assert.Equal(2, myTarget.WriteCount);
}
[Fact]
public void WhenWrappedTargetThrowsExceptionThisIsHandled()
{
using (new NoThrowNLogExceptions())
{
var myTarget = new MyTarget { ThrowException = true };
var bufferingTargetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
FlushTimeout = -1
};
InitializeTargets(myTarget, bufferingTargetWrapper);
bufferingTargetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(_ => { }));
var flushHit = new ManualResetEvent(false);
bufferingTargetWrapper.Flush(ex => flushHit.Set());
Assert.True(flushHit.WaitOne(5000), "Wait Flush Timeout");
Assert.Equal(1, myTarget.FlushCount);
}
}
[Fact]
public void BufferingTargetWrapperSyncWithOverflowDiscardTest()
{
const int totalEvents = 15;
const int bufferSize = 10;
var myTarget = new MyTarget();
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
BufferSize = bufferSize,
OverflowAction = BufferingTargetWrapperOverflowAction.Discard
};
InitializeTargets(myTarget, targetWrapper);
var continuationHit = new bool[totalEvents];
var hitCount = 0;
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
continuationHit[eventNumber] = true;
Interlocked.Increment(ref hitCount);
};
Assert.Equal(0, myTarget.WriteCount);
for (int i = 0; i < totalEvents; i++)
{
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(i)));
}
// No events should be written to the wrapped target unless flushing manually.
Assert.Equal(0, myTarget.WriteCount);
Assert.Equal(0, myTarget.BufferedWriteCount);
Assert.Equal(0, myTarget.BufferedTotalEvents);
targetWrapper.Flush(e => { });
Assert.Equal(bufferSize, hitCount);
Assert.Equal(bufferSize, myTarget.WriteCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(bufferSize, myTarget.BufferedTotalEvents);
// Validate that we dropped the oldest events.
Assert.False(continuationHit[totalEvents - bufferSize - 1]);
Assert.True(continuationHit[totalEvents - bufferSize]);
// Make sure the events do not stay in the buffer.
targetWrapper.Flush(e => { });
Assert.Equal(bufferSize, hitCount);
Assert.Equal(bufferSize, myTarget.WriteCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(bufferSize, myTarget.BufferedTotalEvents);
// Make sure that events are discarded when closing target (config-reload + shutdown)
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(totalEvents)));
targetWrapper.Close();
Assert.Equal(bufferSize, hitCount);
Assert.Equal(bufferSize, myTarget.WriteCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(bufferSize, myTarget.BufferedTotalEvents);
}
private static void InitializeTargets(params Target[] targets)
{
foreach (var target in targets)
{
target.Initialize(null);
}
}
private class MyAsyncTarget : Target
{
private readonly NLog.Internal.AsyncOperationCounter _pendingWriteCounter = new NLog.Internal.AsyncOperationCounter();
public int BufferedWriteCount { get; private set; }
public int BufferedTotalEvents { get; private set; }
protected override void Write(LogEventInfo logEvent)
{
throw new NotSupportedException();
}
protected override void Write(IList<AsyncLogEventInfo> logEvents)
{
_pendingWriteCounter.BeginOperation();
BufferedWriteCount++;
BufferedTotalEvents += logEvents.Count;
for (int i = 0; i < logEvents.Count; ++i)
{
var @event = logEvents[i];
ThreadPool.QueueUserWorkItem(
s =>
{
try
{
if (ThrowExceptions)
{
@event.Continuation(new ApplicationException("Some problem!"));
}
else
{
@event.Continuation(null);
}
}
finally
{
_pendingWriteCounter.CompleteOperation(null);
}
});
}
}
protected override void FlushAsync(AsyncContinuation asyncContinuation)
{
var wrappedContinuation = _pendingWriteCounter.RegisterCompletionNotification(asyncContinuation);
ThreadPool.QueueUserWorkItem(
s =>
{
wrappedContinuation(null);
});
}
public bool ThrowExceptions { get; set; }
}
private class MyTarget : Target
{
public int FlushCount { get; private set; }
public int WriteCount { get; private set; }
public int BufferedWriteCount { get; private set; }
public int BufferedTotalEvents { get; private set; }
public bool ThrowException { get; set; }
public int FailCounter { get; set; }
protected override void Write(IList<AsyncLogEventInfo> logEvents)
{
BufferedWriteCount++;
BufferedTotalEvents += logEvents.Count;
base.Write(logEvents);
}
protected override void Write(LogEventInfo logEvent)
{
Assert.True(FlushCount <= WriteCount);
WriteCount++;
if (ThrowException)
{
throw new Exception("Target exception");
}
if (FailCounter > 0)
{
FailCounter--;
throw new ApplicationException("Some failure.");
}
}
protected override void FlushAsync(AsyncContinuation asyncContinuation)
{
FlushCount++;
asyncContinuation(null);
}
}
private delegate AsyncContinuation CreateContinuationFunc(int eventNumber);
private static void WaitAndAssertExpectedValue(ref int hitCount, int expectedValue)
{
for (int i = 0; i < 100; ++i)
{
if (Thread.VolatileRead(ref hitCount) >= expectedValue)
break; // Ready to assert
Thread.Sleep(50);
}
Assert.Equal(expectedValue, hitCount);
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using OLEDB.Test.ModuleCore;
namespace System.Xml.Tests
{
public partial class TCReadElementContentAsBinHex : TCXMLReaderBaseGeneral
{
// Type is System.Xml.Tests.TCReadElementContentAsBinHex
// Test Case
public override void AddChildren()
{
// for function TestReadBinHex_1
{
this.AddChild(new CVariation(TestReadBinHex_1) { Attribute = new Variation("ReadBinHex Element with all valid value") });
}
// for function TestReadBinHex_2
{
this.AddChild(new CVariation(TestReadBinHex_2) { Attribute = new Variation("ReadBinHex Element with all valid Num value") { Pri = 0 } });
}
// for function TestReadBinHex_3
{
this.AddChild(new CVariation(TestReadBinHex_3) { Attribute = new Variation("ReadBinHex Element with all valid Text value") });
}
// for function TestReadBinHex_4
{
this.AddChild(new CVariation(TestReadBinHex_4) { Attribute = new Variation("ReadBinHex Element with Comments and PIs") { Pri = 0 } });
}
// for function TestReadBinHex_5
{
this.AddChild(new CVariation(TestReadBinHex_5) { Attribute = new Variation("ReadBinHex Element with all valid value (from concatenation), Pri=0") });
}
// for function TestReadBinHex_6
{
this.AddChild(new CVariation(TestReadBinHex_6) { Attribute = new Variation("ReadBinHex Element with all long valid value (from concatenation)") });
}
// for function TestReadBinHex_7
{
this.AddChild(new CVariation(TestReadBinHex_7) { Attribute = new Variation("ReadBinHex with count > buffer size") });
}
// for function TestReadBinHex_8
{
this.AddChild(new CVariation(TestReadBinHex_8) { Attribute = new Variation("ReadBinHex with count < 0") });
}
// for function vReadBinHex_9
{
this.AddChild(new CVariation(vReadBinHex_9) { Attribute = new Variation("ReadBinHex with index > buffer size") });
}
// for function TestReadBinHex_10
{
this.AddChild(new CVariation(TestReadBinHex_10) { Attribute = new Variation("ReadBinHex with index < 0") });
}
// for function TestReadBinHex_11
{
this.AddChild(new CVariation(TestReadBinHex_11) { Attribute = new Variation("ReadBinHex with index + count exceeds buffer") });
}
// for function TestReadBinHex_12
{
this.AddChild(new CVariation(TestReadBinHex_12) { Attribute = new Variation("ReadBinHex index & count =0") });
}
// for function TestReadBinHex_13
{
this.AddChild(new CVariation(TestReadBinHex_13) { Attribute = new Variation("ReadBinHex Element multiple into same buffer (using offset), Pri=0") });
}
// for function TestReadBinHex_14
{
this.AddChild(new CVariation(TestReadBinHex_14) { Attribute = new Variation("ReadBinHex with buffer == null") });
}
// for function TestReadBinHex_16
{
this.AddChild(new CVariation(TestReadBinHex_16) { Attribute = new Variation("Read after partial ReadBinHex") });
}
// for function TestReadBinHex_18
{
this.AddChild(new CVariation(TestReadBinHex_18) { Attribute = new Variation("No op node types") });
}
// for function TestTextReadBinHex_21
{
this.AddChild(new CVariation(TestTextReadBinHex_21) { Attribute = new Variation("ReadBinHex with whitespaces") });
}
// for function TestTextReadBinHex_22
{
this.AddChild(new CVariation(TestTextReadBinHex_22) { Attribute = new Variation("ReadBinHex with odd number of chars") });
}
// for function TestTextReadBinHex_23
{
this.AddChild(new CVariation(TestTextReadBinHex_23) { Attribute = new Variation("ReadBinHex when end tag doesn't exist") });
}
// for function TestTextReadBinHex_24
{
this.AddChild(new CVariation(TestTextReadBinHex_24) { Attribute = new Variation("WS:WireCompat:hex binary fails to send/return data after 1787 bytes") });
}
// for function TestReadBinHex_430329
{
this.AddChild(new CVariation(TestReadBinHex_430329) { Attribute = new Variation("SubtreeReader inserted attributes don't work with ReadContentAsBinHex") });
}
// for function TestReadBinHex_27
{
this.AddChild(new CVariation(TestReadBinHex_27) { Attribute = new Variation("ReadBinHex with = in the middle") });
}
// for function TestReadBinHex_105376
{
this.AddChild(new CVariation(TestReadBinHex_105376) { Attribute = new Variation("ReadBinHex runs into an Overflow") { Params = new object[] { "1000000" } } });
this.AddChild(new CVariation(TestReadBinHex_105376) { Attribute = new Variation("ReadBinHex runs into an Overflow") { Params = new object[] { "10000000" } } });
}
// for function TestReadBinHex_28
{
this.AddChild(new CVariation(TestReadBinHex_28) { Attribute = new Variation("call ReadContentAsBinHex on two or more nodes") });
}
// for function TestReadBinHex_29
{
this.AddChild(new CVariation(TestReadBinHex_29) { Attribute = new Variation("read BinHex over invalid text node") });
}
// for function TestReadBinHex_30
{
this.AddChild(new CVariation(TestReadBinHex_30) { Attribute = new Variation("goto to text node, ask got.Value, readcontentasBinHex") });
}
// for function TestReadBinHex_31
{
this.AddChild(new CVariation(TestReadBinHex_31) { Attribute = new Variation("goto to text node, readcontentasBinHex, ask got.Value") });
}
// for function TestReadBinHex_32
{
this.AddChild(new CVariation(TestReadBinHex_32) { Attribute = new Variation("goto to huge text node, read several chars with ReadContentAsBinHex and Move forward with .Read()") });
}
// for function TestReadBinHex_33
{
this.AddChild(new CVariation(TestReadBinHex_33) { Attribute = new Variation("goto to huge text node with invalid chars, read several chars with ReadContentAsBinHex and Move forward with .Read()") });
}
// for function TestBinHex_34
{
this.AddChild(new CVariation(TestBinHex_34) { Attribute = new Variation("ReadContentAsBinHex on an xmlns attribute") { Param = "<foo xmlns='default'> <bar > id='1'/> </foo>" } });
this.AddChild(new CVariation(TestBinHex_34) { Attribute = new Variation("ReadContentAsBinHex on an xmlns:k attribute") { Param = "<k:foo xmlns:k='default'> <k:bar id='1'/> </k:foo>" } });
this.AddChild(new CVariation(TestBinHex_34) { Attribute = new Variation("ReadContentAsBinHex on an xml:space attribute") { Param = "<foo xml:space='default'> <bar > id='1'/> </foo>" } });
this.AddChild(new CVariation(TestBinHex_34) { Attribute = new Variation("ReadContentAsBinHex on an xml:lang attribute") { Param = "<foo xml:lang='default'> <bar > id='1'/> </foo>" } });
}
// for function TestReadBinHex_35
{
this.AddChild(new CVariation(TestReadBinHex_35) { Attribute = new Variation("call ReadContentAsBinHex on two or more nodes and whitespace") });
}
// for function TestReadBinHex_36
{
this.AddChild(new CVariation(TestReadBinHex_36) { Attribute = new Variation("call ReadContentAsBinHex on two or more nodes and whitespace after call Value") });
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Collections.Generic;
namespace System.Xml.Serialization
{
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public class XmlAnyElementAttributes : IList
{
private List<XmlAnyElementAttribute> _list = new List<XmlAnyElementAttribute>();
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlAnyElementAttribute this[int index]
{
get { return _list[index]; }
set
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
_list[index] = value;
}
}
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public int Add(XmlAnyElementAttribute value)
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
int index = _list.Count;
_list.Add(value);
return index;
}
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Insert(int index, XmlAnyElementAttribute value)
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
_list.Insert(index, value);
}
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public int IndexOf(XmlAnyElementAttribute value)
{
return _list.IndexOf(value);
}
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public bool Contains(XmlAnyElementAttribute value)
{
return _list.Contains(value);
}
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Remove(XmlAnyElementAttribute value)
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
if (!_list.Remove(value))
{
throw new ArgumentException(SR.Arg_RemoveArgNotFound);
}
}
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void CopyTo(XmlAnyElementAttribute[] array, int index)
{
_list.CopyTo(array, index);
}
private IList List
{
get { return _list; }
}
public int Count
{
get
{
return _list == null ? 0 : _list.Count;
}
}
public void Clear()
{
_list.Clear();
}
public void RemoveAt(int index)
{
_list.RemoveAt(index);
}
bool IList.IsReadOnly
{
get { return List.IsReadOnly; }
}
bool IList.IsFixedSize
{
get { return List.IsFixedSize; }
}
bool ICollection.IsSynchronized
{
get { return List.IsSynchronized; }
}
Object ICollection.SyncRoot
{
get { return List.SyncRoot; }
}
void ICollection.CopyTo(Array array, int index)
{
List.CopyTo(array, index);
}
Object IList.this[int index]
{
get
{
return List[index];
}
set
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
List[index] = value;
}
}
bool IList.Contains(Object value)
{
return List.Contains(value);
}
int IList.Add(Object value)
{
if (value == null)
{
throw new ArgumentException(nameof(value));
}
return List.Add(value);
}
void IList.Remove(Object value)
{
if (value == null)
{
throw new ArgumentException(nameof(value));
}
var attribute = value as XmlAnyElementAttribute;
if (attribute == null)
{
throw new ArgumentException(SR.Arg_RemoveArgNotFound);
}
Remove(attribute);
}
int IList.IndexOf(Object value)
{
return List.IndexOf(value);
}
void IList.Insert(int index, Object value)
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
List.Insert(index, value);
}
public IEnumerator GetEnumerator()
{
return List.GetEnumerator();
}
}
}
| |
//
// SHA512Test.cs - NUnit Test Cases for SHA512
//
// Author:
// Sebastien Pouliot <[email protected]>
//
// (C) 2002 Motus Technologies Inc. (http://www.motus.com)
// (C) 2004 Novell http://www.novell.com
//
using NUnit.Framework;
using System;
using System.IO;
using System.Security.Cryptography;
using System.Text;
namespace MonoTests.System.Security.Cryptography {
// References:
// a. FIPS PUB 180-2: Secure Hash Standard
// http://csrc.nist.gov/publications/fips/fips180-2/fip180-2.txt
// SHA512 is a abstract class - so most of the test included here wont be tested
// on the abstract class but should be tested in ALL its descendants.
[TestFixture]
public class SHA512Test : HashAlgorithmTest {
[SetUp]
protected override void SetUp ()
{
hash = SHA512.Create ();
}
// test vectors from NIST FIPS 186-2
private string input1 = "abc";
private string input2 = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu";
public void FIPS186_Test1 (SHA512 hash)
{
string className = hash.ToString ();
byte[] result = { 0xdd, 0xaf, 0x35, 0xa1, 0x93, 0x61, 0x7a, 0xba,
0xcc, 0x41, 0x73, 0x49, 0xae, 0x20, 0x41, 0x31,
0x12, 0xe6, 0xfa, 0x4e, 0x89, 0xa9, 0x7e, 0xa2,
0x0a, 0x9e, 0xee, 0xe6, 0x4b, 0x55, 0xd3, 0x9a,
0x21, 0x92, 0x99, 0x2a, 0x27, 0x4f, 0xc1, 0xa8,
0x36, 0xba, 0x3c, 0x23, 0xa3, 0xfe, 0xeb, 0xbd,
0x45, 0x4d, 0x44, 0x23, 0x64, 0x3c, 0xe8, 0x0e,
0x2a, 0x9a, 0xc9, 0x4f, 0xa5, 0x4c, 0xa4, 0x9f };
byte[] input = Encoding.Default.GetBytes (input1);
string testName = className + " 1";
FIPS186_a (testName, hash, input, result);
FIPS186_b (testName, hash, input, result);
FIPS186_c (testName, hash, input, result);
FIPS186_d (testName, hash, input, result);
FIPS186_e (testName, hash, input, result);
}
public void FIPS186_Test2 (SHA512 hash)
{
string className = hash.ToString ();
byte[] result = { 0x8e, 0x95, 0x9b, 0x75, 0xda, 0xe3, 0x13, 0xda,
0x8c, 0xf4, 0xf7, 0x28, 0x14, 0xfc, 0x14, 0x3f,
0x8f, 0x77, 0x79, 0xc6, 0xeb, 0x9f, 0x7f, 0xa1,
0x72, 0x99, 0xae, 0xad, 0xb6, 0x88, 0x90, 0x18,
0x50, 0x1d, 0x28, 0x9e, 0x49, 0x00, 0xf7, 0xe4,
0x33, 0x1b, 0x99, 0xde, 0xc4, 0xb5, 0x43, 0x3a,
0xc7, 0xd3, 0x29, 0xee, 0xb6, 0xdd, 0x26, 0x54,
0x5e, 0x96, 0xe5, 0x5b, 0x87, 0x4b, 0xe9, 0x09 };
byte[] input = Encoding.Default.GetBytes (input2);
string testName = className + " 2";
FIPS186_a (testName, hash, input, result);
FIPS186_b (testName, hash, input, result);
FIPS186_c (testName, hash, input, result);
FIPS186_d (testName, hash, input, result);
FIPS186_e (testName, hash, input, result);
}
public void FIPS186_Test3 (SHA512 hash)
{
string className = hash.ToString ();
byte[] result = { 0xe7, 0x18, 0x48, 0x3d, 0x0c, 0xe7, 0x69, 0x64,
0x4e, 0x2e, 0x42, 0xc7, 0xbc, 0x15, 0xb4, 0x63,
0x8e, 0x1f, 0x98, 0xb1, 0x3b, 0x20, 0x44, 0x28,
0x56, 0x32, 0xa8, 0x03, 0xaf, 0xa9, 0x73, 0xeb,
0xde, 0x0f, 0xf2, 0x44, 0x87, 0x7e, 0xa6, 0x0a,
0x4c, 0xb0, 0x43, 0x2c, 0xe5, 0x77, 0xc3, 0x1b,
0xeb, 0x00, 0x9c, 0x5c, 0x2c, 0x49, 0xaa, 0x2e,
0x4e, 0xad, 0xb2, 0x17, 0xad, 0x8c, 0xc0, 0x9b };
byte[] input = new byte [1000000];
for (int i = 0; i < 1000000; i++)
input[i] = 0x61; // a
string testName = className + " 3";
FIPS186_a (testName, hash, input, result);
FIPS186_b (testName, hash, input, result);
FIPS186_c (testName, hash, input, result);
FIPS186_d (testName, hash, input, result);
FIPS186_e (testName, hash, input, result);
}
public void FIPS186_a (string testName, SHA512 hash, byte[] input, byte[] result)
{
byte[] output = hash.ComputeHash (input);
AssertEquals (testName + ".a.1", result, output);
AssertEquals (testName + ".a.2", result, hash.Hash);
// required or next operation will still return old hash
hash.Initialize ();
}
public void FIPS186_b (string testName, SHA512 hash, byte[] input, byte[] result)
{
byte[] output = hash.ComputeHash (input, 0, input.Length);
AssertEquals (testName + ".b.1", result, output);
AssertEquals (testName + ".b.2", result, hash.Hash);
// required or next operation will still return old hash
hash.Initialize ();
}
public void FIPS186_c (string testName, SHA512 hash, byte[] input, byte[] result)
{
MemoryStream ms = new MemoryStream (input);
byte[] output = hash.ComputeHash (ms);
AssertEquals (testName + ".c.1", result, output);
AssertEquals (testName + ".c.2", result, hash.Hash);
// required or next operation will still return old hash
hash.Initialize ();
}
public void FIPS186_d (string testName, SHA512 hash, byte[] input, byte[] result)
{
byte[] output = hash.TransformFinalBlock (input, 0, input.Length);
// LAMESPEC or FIXME: TransformFinalBlock doesn't return HashValue !
// AssertEquals( testName + ".d.1", result, output );
AssertEquals (testName + ".d", result, hash.Hash);
// required or next operation will still return old hash
hash.Initialize ();
}
public void FIPS186_e (string testName, SHA512 hash, byte[] input, byte[] result)
{
byte[] copy = new byte [input.Length];
for (int i=0; i < input.Length - 1; i++)
hash.TransformBlock (input, i, 1, copy, i);
byte[] output = hash.TransformFinalBlock (input, input.Length - 1, 1);
// LAMESPEC or FIXME: TransformFinalBlock doesn't return HashValue !
// AssertEquals (testName + ".e.1", result, output);
AssertEquals (testName + ".e", result, hash.Hash);
// required or next operation will still return old hash
hash.Initialize ();
}
[Test]
public override void Create ()
{
// Note: These tests will only be valid without a "machine.config" file
// or a "machine.config" file that do not modify the default algorithm
// configuration.
const string defaultSHA512 = "System.Security.Cryptography.SHA512Managed";
// try to build the default implementation
SHA512 hash = SHA512.Create ();
AssertEquals ("SHA512.Create()", hash.ToString (), defaultSHA512);
// try to build, in every way, a SHA512 implementation
hash = SHA512.Create ("SHA512");
AssertEquals ("SHA512.Create('SHA512')", hash.ToString (), defaultSHA512);
hash = SHA512.Create ("SHA-512");
AssertEquals ("SHA512.Create('SHA-512')", hash.ToString (), defaultSHA512);
}
[Test]
[ExpectedException (typeof (InvalidCastException))]
public void CreateIncorrect ()
{
// try to build an incorrect hash algorithms
hash = SHA512.Create ("MD5");
}
[Test]
public void CreateInvalid ()
{
// try to build invalid implementation
hash = SHA512.Create ("InvalidHash");
AssertNull ("SHA512.Create('InvalidHash')", hash);
}
[Test]
[ExpectedException (typeof (ArgumentNullException))]
public override void CreateNull ()
{
// try to build null implementation
hash = SHA512.Create (null);
}
// none of those values changes for any implementation of defaultSHA512
[Test]
public virtual void StaticInfo ()
{
string className = hash.ToString ();
AssertEquals (className + ".HashSize", 512, hash.HashSize);
AssertEquals (className + ".InputBlockSize", 1, hash.InputBlockSize);
AssertEquals (className + ".OutputBlockSize", 1, hash.OutputBlockSize);
}
}
}
| |
// UrlRewriter - A .NET URL Rewriter module
// Version 2.0
//
// Copyright 2011 Intelligencia
// Copyright 2011 Seth Yates
//
using System;
using System.Xml;
using System.Configuration;
using System.Collections.Specialized;
using System.Collections.Generic;
using Intelligencia.UrlRewriter.Utilities;
using Intelligencia.UrlRewriter.Errors;
using Intelligencia.UrlRewriter.Transforms;
using Intelligencia.UrlRewriter.Logging;
namespace Intelligencia.UrlRewriter.Configuration
{
/// <summary>
/// Reads configuration from an XML Node.
/// </summary>
public static class RewriterConfigurationReader
{
/// <summary>
/// Reads configuration information from the given XML Node.
/// </summary>
/// <param name="config">The rewriter configuration object to populate.</param>
/// <param name="section">The XML node to read configuration from.</param>
/// <returns>The configuration information.</returns>
public static void Read(IRewriterConfiguration config, XmlNode section)
{
if (section == null)
{
throw new ArgumentNullException("section");
}
foreach (XmlNode node in section.ChildNodes)
{
if (node.NodeType == XmlNodeType.Element)
{
if (node.LocalName == Constants.ElementErrorHandler)
{
ReadErrorHandler(node, config);
}
else if (node.LocalName == Constants.ElementDefaultDocuments)
{
ReadDefaultDocuments(node, config);
}
else if (node.LocalName == Constants.ElementRegister)
{
if (node.Attributes[Constants.AttrParser] != null)
{
ReadRegisterParser(node, config);
}
else if (node.Attributes[Constants.AttrTransform] != null)
{
ReadRegisterTransform(node, config);
}
else if (node.Attributes[Constants.AttrLogger] != null)
{
ReadRegisterLogger(node, config);
}
}
else if (node.LocalName == Constants.ElementMapping)
{
ReadMapping(node, config);
}
else
{
ReadRule(node, config);
}
}
}
}
private static void ReadRegisterTransform(XmlNode node, IRewriterConfiguration config)
{
if (node.ChildNodes.Count > 0)
{
throw new ConfigurationErrorsException(MessageProvider.FormatString(Message.ElementNoElements, Constants.ElementRegister), node);
}
string type = node.GetRequiredAttribute(Constants.AttrTransform);
// Transform type specified.
// Create an instance and add it as the mapper handler for this map.
IRewriteTransform transform = TypeHelper.Activate(type, null) as IRewriteTransform;
if (transform == null)
{
throw new ConfigurationErrorsException(MessageProvider.FormatString(Message.InvalidTypeSpecified, type, typeof(IRewriteTransform)), node);
}
config.TransformFactory.Add(transform);
}
private static void ReadRegisterLogger(XmlNode node, IRewriterConfiguration config)
{
if (node.ChildNodes.Count > 0)
{
throw new ConfigurationErrorsException(MessageProvider.FormatString(Message.ElementNoElements, Constants.ElementRegister), node);
}
string type = node.GetRequiredAttribute(Constants.AttrLogger);
// Logger type specified. Create an instance and add it
// as the mapper handler for this map.
IRewriteLogger logger = TypeHelper.Activate(type, null) as IRewriteLogger;
if (logger != null)
{
config.Logger = logger;
}
}
private static void ReadRegisterParser(XmlNode node, IRewriterConfiguration config)
{
if (node.ChildNodes.Count > 0)
{
throw new ConfigurationErrorsException(MessageProvider.FormatString(Message.ElementNoElements, Constants.ElementRegister), node);
}
string type = node.GetRequiredAttribute(Constants.AttrParser);
object parser = TypeHelper.Activate(type, null);
IRewriteActionParser actionParser = parser as IRewriteActionParser;
if (actionParser != null)
{
config.ActionParserFactory.Add(actionParser);
}
IRewriteConditionParser conditionParser = parser as IRewriteConditionParser;
if (conditionParser != null)
{
config.ConditionParserPipeline.Add(conditionParser);
}
}
private static void ReadDefaultDocuments(XmlNode node, IRewriterConfiguration config)
{
foreach (XmlNode childNode in node.ChildNodes)
{
if (childNode.NodeType == XmlNodeType.Element && childNode.LocalName == Constants.ElementDocument)
{
config.DefaultDocuments.Add(childNode.InnerText);
}
}
}
private static void ReadErrorHandler(XmlNode node, IRewriterConfiguration config)
{
string code = node.GetRequiredAttribute(Constants.AttrCode);
XmlNode typeNode = node.Attributes[Constants.AttrType];
XmlNode urlNode = node.Attributes[Constants.AttrUrl];
if (typeNode == null && urlNode == null)
{
throw new ConfigurationErrorsException(MessageProvider.FormatString(Message.AttributeRequired, Constants.AttrUrl), node);
}
IRewriteErrorHandler handler = null;
if (typeNode != null)
{
// <error-handler code="500" url="/oops.aspx" />
handler = TypeHelper.Activate(typeNode.Value, null) as IRewriteErrorHandler;
if (handler == null)
{
throw new ConfigurationErrorsException(MessageProvider.FormatString(Message.InvalidTypeSpecified, typeNode.Value, typeof(IRewriteErrorHandler)), node);
}
}
else
{
handler = new DefaultErrorHandler(urlNode.Value);
}
int statusCode;
if (!Int32.TryParse(code, out statusCode))
{
throw new ConfigurationErrorsException(MessageProvider.FormatString(Message.InvalidHttpStatusCode, code), node);
}
config.ErrorHandlers.Add(statusCode, handler);
}
private static void ReadMapping(XmlNode node, IRewriterConfiguration config)
{
// Name attribute.
string mappingName = node.GetRequiredAttribute(Constants.AttrName);
// Mapper type not specified. Load in the hash map.
StringDictionary map = new StringDictionary();
foreach (XmlNode mapNode in node.ChildNodes)
{
if (mapNode.NodeType == XmlNodeType.Element)
{
if (mapNode.LocalName == Constants.ElementMap)
{
string fromValue = mapNode.GetRequiredAttribute(Constants.AttrFrom, true);
string toValue = mapNode.GetRequiredAttribute(Constants.AttrTo, true);
map.Add(fromValue, toValue);
}
else
{
throw new ConfigurationErrorsException(MessageProvider.FormatString(Message.ElementNotAllowed, mapNode.LocalName), node);
}
}
}
IRewriteTransform mapping = new StaticMappingTransform(mappingName, map);
config.TransformFactory.Add(mapping);
}
private static void ReadRule(XmlNode node, IRewriterConfiguration config)
{
bool parsed = false;
IList<IRewriteActionParser> parsers = config.ActionParserFactory.GetParsers(node.LocalName);
if (parsers != null)
{
foreach (IRewriteActionParser parser in parsers)
{
if (!parser.AllowsNestedActions && node.ChildNodes.Count > 0)
{
throw new ConfigurationErrorsException(MessageProvider.FormatString(Message.ElementNoElements, parser.Name), node);
}
if (!parser.AllowsAttributes && node.Attributes.Count > 0)
{
throw new ConfigurationErrorsException(MessageProvider.FormatString(Message.ElementNoAttributes, parser.Name), node);
}
IRewriteAction rule = parser.Parse(node, config);
if (rule != null)
{
config.Rules.Add(rule);
parsed = true;
break;
}
}
}
if (!parsed)
{
// No parsers recognised to handle this node.
throw new ConfigurationErrorsException(MessageProvider.FormatString(Message.ElementNotAllowed, node.LocalName), node);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
namespace Microsoft.CSharp.RuntimeBinder.Semantics
{
/*
// ===========================================================================
Defines structs that package an aggregate member together with
generic type argument information.
// ===========================================================================*/
/******************************************************************************
SymWithType and its cousins. These package an aggregate member (field,
prop, event, or meth) together with the particular instantiation of the
aggregate (the AggregateType).
The default constructor does nothing so these are not safe to use
uninitialized. Note that when they are used as member of an EXPR they
are automatically zero filled by newExpr.
******************************************************************************/
internal class SymWithType
{
private AggregateType _ats;
private Symbol _sym;
public SymWithType()
{
}
public SymWithType(Symbol sym, AggregateType ats)
{
Set(sym, ats);
}
public virtual void Clear()
{
_sym = null;
_ats = null;
}
public AggregateType Ats
{
get { return _ats; }
}
public Symbol Sym
{
get { return _sym; }
}
public new AggregateType GetType()
{
// This conflicts with object.GetType. Turn every usage of this
// into a get on Ats.
return Ats;
}
public static bool operator ==(SymWithType swt1, SymWithType swt2)
{
if (ReferenceEquals(swt1, swt2))
{
return true;
}
else if (ReferenceEquals(swt1, null))
{
return swt2._sym == null;
}
else if (ReferenceEquals(swt2, null))
{
return swt1._sym == null;
}
return swt1.Sym == swt2.Sym && swt1.Ats == swt2.Ats;
}
public static bool operator !=(SymWithType swt1, SymWithType swt2)
{
if (ReferenceEquals(swt1, swt2))
{
return false;
}
else if (ReferenceEquals(swt1, null))
{
return swt2._sym != null;
}
else if (ReferenceEquals(swt2, null))
{
return swt1._sym != null;
}
return swt1.Sym != swt2.Sym || swt1.Ats != swt2.Ats;
}
public override bool Equals(object obj)
{
SymWithType other = obj as SymWithType;
if (other == null) return false;
return Sym == other.Sym && Ats == other.Ats;
}
public override int GetHashCode()
{
return (Sym?.GetHashCode() ?? 0) + (Ats?.GetHashCode() ?? 0);
}
// The SymWithType is considered NULL iff the Symbol is NULL.
public static implicit operator bool (SymWithType swt)
{
return swt != null;
}
// These assert that the Symbol is of the correct type.
public MethodOrPropertySymbol MethProp()
{
return Sym as MethodOrPropertySymbol;
}
public MethodSymbol Meth()
{
return Sym as MethodSymbol;
}
public PropertySymbol Prop()
{
return Sym as PropertySymbol;
}
public FieldSymbol Field()
{
return Sym as FieldSymbol;
}
public EventSymbol Event()
{
return Sym as EventSymbol;
}
public void Set(Symbol sym, AggregateType ats)
{
if (sym == null)
ats = null;
Debug.Assert(ats == null || sym.parent == ats.getAggregate());
_sym = sym;
_ats = ats;
}
}
internal class MethPropWithType : SymWithType
{
public MethPropWithType()
{
}
public MethPropWithType(MethodOrPropertySymbol mps, AggregateType ats)
{
Set(mps, ats);
}
}
internal sealed class MethWithType : MethPropWithType
{
public MethWithType()
{
}
public MethWithType(MethodSymbol meth, AggregateType ats)
{
Set(meth, ats);
}
}
internal sealed class PropWithType : MethPropWithType
{
public PropWithType()
{ }
public PropWithType(PropertySymbol prop, AggregateType ats)
{
Set(prop, ats);
}
public PropWithType(SymWithType swt)
{
Set(swt.Sym as PropertySymbol, swt.Ats);
}
}
internal sealed class EventWithType : SymWithType
{
public EventWithType()
{
}
public EventWithType(EventSymbol @event, AggregateType ats)
{
Set(@event, ats);
}
}
internal sealed class FieldWithType : SymWithType
{
public FieldWithType()
{
}
public FieldWithType(FieldSymbol field, AggregateType ats)
{
Set(field, ats);
}
}
/******************************************************************************
MethPropWithInst and MethWithInst. These extend MethPropWithType with
the method type arguments. Properties will never have type args, but
methods and properties share a lot of code so it's convenient to allow
both here.
The default constructor does nothing so these are not safe to use
uninitialized. Note that when they are used as member of an EXPR they
are automatically zero filled by newExpr.
******************************************************************************/
internal class MethPropWithInst : MethPropWithType
{
public TypeArray TypeArgs { get; private set; }
public MethPropWithInst()
{
Set(null, null, null);
}
public MethPropWithInst(MethodOrPropertySymbol mps, AggregateType ats)
: this(mps, ats, null)
{
}
public MethPropWithInst(MethodOrPropertySymbol mps, AggregateType ats, TypeArray typeArgs)
{
Set(mps, ats, typeArgs);
}
public override void Clear()
{
base.Clear();
TypeArgs = null;
}
public void Set(MethodOrPropertySymbol mps, AggregateType ats, TypeArray typeArgs)
{
if (mps == null)
{
ats = null;
typeArgs = null;
}
Debug.Assert(ats == null || mps != null && mps.getClass() == ats.getAggregate());
base.Set(mps, ats);
TypeArgs = typeArgs;
}
}
internal sealed class MethWithInst : MethPropWithInst
{
public MethWithInst()
{
}
public MethWithInst(MethodSymbol meth, AggregateType ats)
: this(meth, ats, null)
{
}
public MethWithInst(MethodSymbol meth, AggregateType ats, TypeArray typeArgs)
{
Set(meth, ats, typeArgs);
}
public MethWithInst(MethPropWithInst mpwi)
{
Set(mpwi.Sym.AsMethodSymbol(), mpwi.Ats, mpwi.TypeArgs);
}
}
}
| |
/*
* Copyright (c) .NET Foundation and Contributors
*
* This software may be modified and distributed under the terms
* of the MIT license. See the LICENSE file for details.
*
* https://github.com/piranhacms/piranha.core
*
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Dynamic;
using System.Linq;
using System.Threading.Tasks;
using Piranha.Extend;
using Piranha.Extend.Fields;
using Piranha.Models;
using Piranha.Manager.Models;
using Piranha.Manager.Models.Content;
using Piranha.Services;
namespace Piranha.Manager.Services
{
public class SiteService
{
private readonly IApi _api;
private readonly IContentFactory _factory;
/// <summary>
/// Default constructor.
/// </summary>
/// <param name="api">The current api</param>
/// <param name="factory">The content factory</param>
public SiteService(IApi api, IContentFactory factory)
{
_api = api;
_factory = factory;
}
/// <summary>
/// Gets the edit model for the site with the given id.
/// </summary>
/// <param name="id">The unique id</param>
/// <returns>The edit model</returns>
public async Task<SiteEditModel> GetById(Guid id)
{
var site = await _api.Sites.GetByIdAsync(id);
if (site != null)
{
var model = Transform(site);
model.Languages = await _api.Languages.GetAllAsync();
return model;
}
return null;
}
/// <summary>
/// Gets the content edit model for the site with the given id.
/// </summary>
/// <param name="id">The unique id</param>
/// <returns>The edit model</returns>
public async Task<SiteContentEditModel> GetContentById(Guid id)
{
var site = await _api.Sites.GetContentByIdAsync(id);
if (site != null)
{
// Perform manager init
await _factory.InitDynamicManagerAsync(site,
App.SiteTypes.GetById(site.TypeId));
return Transform(site);
}
return null;
}
/// <summary>
/// Creates a new site edit model.
/// </summary>
/// <returns>The edit model</returns>
public async Task<SiteEditModel> Create()
{
return new SiteEditModel
{
Id = Guid.NewGuid(),
LanguageId = (await _api.Languages.GetDefaultAsync()).Id,
Languages = await _api.Languages.GetAllAsync()
};
}
/// <summary>
/// Saves the given site.
/// </summary>
/// <param name="model">The site edit model</param>
public async Task Save(SiteEditModel model)
{
var site = await _api.Sites.GetByIdAsync(model.Id);
if (site == null)
{
site = new Site
{
Id = model.Id
};
}
site.SiteTypeId = model.TypeId;
site.LanguageId = model.LanguageId;
site.Title = model.Title;
site.InternalId = model.InternalId;
site.Culture = model.Culture;
site.Hostnames = model.Hostnames;
site.Description = model.Description;
site.Logo = model.Logo;
site.IsDefault = model.IsDefault;
await _api.Sites.SaveAsync(site);
}
public async Task SaveContent(SiteContentEditModel model)
{
var siteType = App.SiteTypes.GetById(model.TypeId);
if (siteType != null)
{
if (model.Id == Guid.Empty)
{
model.Id = Guid.NewGuid();
}
var site = await _api.Sites.GetContentByIdAsync(model.Id);
if (site == null)
{
site = await _factory.CreateAsync<DynamicSiteContent>(siteType);
site.Id = model.Id;
}
site.TypeId = model.TypeId;
site.Title = model.Title;
// Save regions
foreach (var region in siteType.Regions)
{
var modelRegion = model.Regions
.FirstOrDefault(r => r.Meta.Id == region.Id);
if (region.Collection)
{
var listRegion = (IRegionList)((IDictionary<string, object>)site.Regions)[region.Id];
listRegion.Clear();
foreach (var item in modelRegion.Items)
{
if (region.Fields.Count == 1)
{
listRegion.Add(item.Fields[0].Model);
}
else
{
var postRegion = new ExpandoObject();
foreach (var field in region.Fields)
{
var modelField = item.Fields
.FirstOrDefault(f => f.Meta.Id == field.Id);
((IDictionary<string, object>)postRegion)[field.Id] = modelField.Model;
}
listRegion.Add(postRegion);
}
}
}
else
{
var postRegion = ((IDictionary<string, object>)site.Regions)[region.Id];
if (region.Fields.Count == 1)
{
((IDictionary<string, object>)site.Regions)[region.Id] =
modelRegion.Items[0].Fields[0].Model;
}
else
{
foreach (var field in region.Fields)
{
var modelField = modelRegion.Items[0].Fields
.FirstOrDefault(f => f.Meta.Id == field.Id);
((IDictionary<string, object>)postRegion)[field.Id] = modelField.Model;
}
}
}
}
// Save site
await _api.Sites.SaveContentAsync(model.Id, site);
}
else
{
throw new ValidationException("Invalid Post Type.");
}
}
/// <summary>
/// Deletes the site with the given id.
/// </summary>
/// <param name="id">The unique id</param>
public Task Delete(Guid id)
{
return _api.Sites.DeleteAsync(id);
}
private SiteEditModel Transform(Site site)
{
return new SiteEditModel
{
Id = site.Id,
TypeId = site.SiteTypeId,
LanguageId = site.LanguageId,
Title = site.Title,
InternalId = site.InternalId,
Culture = site.Culture,
Description = site.Description,
Logo = site.Logo,
Hostnames = site.Hostnames,
IsDefault = site.IsDefault,
SiteTypes = App.SiteTypes.Select(t => new ContentTypeModel
{
Id = t.Id,
Title = t.Title
}).ToList()
};
}
private SiteContentEditModel Transform(DynamicSiteContent site)
{
var type = App.SiteTypes.GetById(site.TypeId);
var model = new SiteContentEditModel
{
Id = site.Id,
TypeId = site.TypeId,
Title = site.Title,
UseBlocks = false
};
foreach (var regionType in type.Regions)
{
var region = new RegionModel
{
Meta = new RegionMeta
{
Id = regionType.Id,
Name = regionType.Title,
Description = regionType.Description,
Placeholder = regionType.ListTitlePlaceholder,
IsCollection = regionType.Collection,
Expanded = regionType.ListExpand,
Icon = regionType.Icon,
Display = regionType.Display.ToString().ToLower(),
Width = regionType.Width.ToString().ToLower()
}
};
var regionListModel = ((IDictionary<string, object>)site.Regions)[regionType.Id];
if (!regionType.Collection)
{
var regionModel = (IRegionList)Activator.CreateInstance(typeof(RegionList<>).MakeGenericType(regionListModel.GetType()));
regionModel.Add(regionListModel);
regionListModel = regionModel;
}
foreach (var regionModel in (IEnumerable)regionListModel)
{
var regionItem = new RegionItemModel();
foreach (var fieldType in regionType.Fields)
{
var appFieldType = App.Fields.GetByType(fieldType.Type);
var field = new FieldModel
{
Meta = new FieldMeta
{
Id = fieldType.Id,
Name = fieldType.Title,
Component = appFieldType.Component,
Placeholder = fieldType.Placeholder,
IsHalfWidth = fieldType.Options.HasFlag(FieldOption.HalfWidth),
Description = fieldType.Description
}
};
if (typeof(SelectFieldBase).IsAssignableFrom(appFieldType.Type))
{
foreach(var item in ((SelectFieldBase)Activator.CreateInstance(appFieldType.Type)).Items)
{
field.Meta.Options.Add(Convert.ToInt32(item.Value), item.Title);
}
}
if (regionType.Fields.Count > 1)
{
field.Model = (IField)((IDictionary<string, object>)regionModel)[fieldType.Id];
if (regionType.ListTitleField == fieldType.Id)
{
regionItem.Title = field.Model.GetTitle();
field.Meta.NotifyChange = true;
}
}
else
{
field.Model = (IField)regionModel;
field.Meta.NotifyChange = true;
regionItem.Title = field.Model.GetTitle();
}
regionItem.Fields.Add(field);
}
if (string.IsNullOrWhiteSpace(regionItem.Title))
{
regionItem.Title = "...";
}
region.Items.Add(regionItem);
}
model.Regions.Add(region);
}
return model;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Data.SqlTypes;
using System.Linq;
using System.Linq.Expressions;
using System.Numerics;
using System.Reflection;
using Alluvial.PartitionBuilders;
namespace Alluvial
{
/// <summary>
/// Methods for creating and evaluating query partitions.
/// </summary>
public static class Partition
{
/// <summary>
/// Creates a partition containing the full range of guids.
/// </summary>
public static IStreamQueryRangePartition<Guid> AllGuids() =>
ByRange(Guid.Empty,
Guid.Parse("ffffffff-ffff-ffff-ffff-ffffffffffff"));
/// <summary>
/// Splits a query partition into several smaller, non-overlapping, gapless partitions.
/// </summary>
public static IEnumerable<IStreamQueryRangePartition<TPartition>> Among<TPartition>(
this IStreamQueryRangePartition<TPartition> partition,
int numberOfPartitions)
{
if (typeof (TPartition) == typeof (Guid))
{
dynamic partitions = SqlGuidPartitionBuilder.ByRange(
(dynamic) partition.LowerBoundExclusive,
(dynamic) partition.UpperBoundInclusive,
numberOfPartitions);
return partitions;
}
if (typeof (TPartition) == typeof (int))
{
dynamic partitions = Int32PartitionBuilder.ByRange(
(dynamic) partition.LowerBoundExclusive,
(dynamic) partition.UpperBoundInclusive,
numberOfPartitions);
return partitions;
}
if (typeof (TPartition) == typeof (long))
{
dynamic partitions = Int64PartitionBuilder.ByRange(
(dynamic) partition.LowerBoundExclusive,
(dynamic) partition.UpperBoundInclusive,
numberOfPartitions);
return partitions;
}
if (typeof (TPartition) == typeof (BigInteger))
{
dynamic partitions = BigIntegerPartitionBuilder.ByRange(
(dynamic) partition.LowerBoundExclusive,
(dynamic) partition.UpperBoundInclusive,
numberOfPartitions);
return partitions;
}
throw new ArgumentException($"Partitions of type {typeof (TPartition)} cannot be generated dynamically.");
}
/// <summary>
/// Creates a stream query partition having the specified boundaries.
/// </summary>
/// <typeparam name="TPartition">The type of the partition boundaries.</typeparam>
/// <param name="lowerBoundExclusive">The lower, exclusive boundary.</param>
/// <param name="upperBoundInclusive">The upper, inclusive boundary.</param>
/// <returns></returns>
public static IStreamQueryRangePartition<TPartition> ByRange<TPartition>(
TPartition lowerBoundExclusive,
TPartition upperBoundInclusive) where TPartition : IComparable<TPartition>
{
if (typeof (TPartition) == typeof (Guid))
{
return (IStreamQueryRangePartition<TPartition>) new SqlGuidRangePartition(
(dynamic) lowerBoundExclusive,
(dynamic) upperBoundInclusive);
}
return new StreamQueryRangePartition<TPartition>(
lowerBoundExclusive,
upperBoundInclusive);
}
/// <summary>
/// Creates a partition specified by a single value.
/// </summary>
/// <typeparam name="TPartition">The type of the partition.</typeparam>
/// <param name="value">The value.</param>
public static IStreamQueryPartition<TPartition> ByValue<TPartition>(TPartition value) =>
new StreamQueryValuePartition<TPartition>(value);
/// <summary>
/// Distributes values into a set of partitions.
/// </summary>
/// <typeparam name="TPartition">The type of the partition.</typeparam>
/// <typeparam name="T">The types of the values to partition.</typeparam>
/// <param name="values">The values.</param>
/// <param name="partitions">The partitions.</param>
/// <returns>A sequence of groupings, by partition.</returns>
public static IEnumerable<IGrouping<TPartition, T>> DistributeInto<TPartition, T>(
this IEnumerable<T> values,
IEnumerable<TPartition> partitions)
where TPartition : IStreamQueryPartition<T>
{
return partitions
.Select(partition => Grouping.Create(partition,
values.Where(partition.Contains)));
}
/// <summary>
/// Determines whether a value is within the specified partition.
/// </summary>
/// <typeparam name="TPartition">The type of the partition.</typeparam>
/// <param name="value">The value.</param>
/// <param name="partition">The partition.</param>
public static bool IsWithinPartition<TPartition>(
this TPartition value,
IStreamQueryPartition<TPartition> partition) =>
partition.Contains(value);
/// <summary>
/// Creates a partition based on a predicate.
/// </summary>
/// <typeparam name="TPartition">The type of the partition.</typeparam>
/// <param name="where">A predicate that evaluates whether a given value falls within the partition.</param>
/// <param name="named">The name of the partition.</param>
public static IStreamQueryPartition<TPartition> Where<TPartition>(
Func<TPartition, bool> where,
string named) =>
new StreamQueryPartition<TPartition>(@where, named);
/// <summary>
/// Filters a queryable to the data within a specified range partition.
/// </summary>
/// <typeparam name="TData">The type of the data.</typeparam>
/// <typeparam name="TPartition">The type of the partition value.</typeparam>
/// <param name="source">The source queryable.</param>
/// <param name="key">A selector for the partitioned value.</param>
/// <param name="partition">The partition.</param>
public static IQueryable<TData> WithinPartition<TData, TPartition>(
this IQueryable<TData> source,
Expression<Func<TData, TPartition>> key,
IStreamQueryPartition<TPartition> partition)
{
var rangePartition = partition as IStreamQueryRangePartition<TPartition>;
if (rangePartition == null)
{
throw new NotSupportedException("Only range partitions are currently supported.");
}
Expression selectKey = key.Body;
Expression lower = Expression.Constant(rangePartition.LowerBoundExclusive);
Expression upper = Expression.Constant(rangePartition.UpperBoundInclusive);
MethodInfo compareTo;
if (typeof(TPartition) == typeof(Guid) &&
source is EnumerableQuery)
{
compareTo = MethodInfoFor<SqlGuid>.CompareTo;
lower = Expression.Convert(lower, typeof (SqlGuid));
upper = Expression.Convert(upper, typeof (SqlGuid));
selectKey = Expression.Convert(selectKey, typeof (SqlGuid));
}
else
{
compareTo = MethodInfoFor<TPartition>.CompareTo;
}
var selectLeft = Expression.GreaterThan(
Expression.Call(selectKey,
compareTo,
lower), Expression.Constant(0));
var selectRight = Expression.LessThanOrEqual(
Expression.Call(selectKey,
compareTo,
upper), Expression.Constant(0));
var filterExpression = Expression.AndAlso(selectLeft, selectRight);
return source.Where(
Expression.Lambda<Func<TData, bool>>(filterExpression, key.Parameters.Single()));
}
private static class MethodInfoFor<T>
{
public static readonly MethodInfo CompareTo = typeof (T).GetMethod("CompareTo",
new[] { typeof (T) });
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Collections.Generic;
using Tests.HashSet_HashSetTestSupport;
using Tests.HashSet_SetCollectionComparerTests;
namespace Tests
{
namespace HashSet_SetCollectionComparerTests
{ //Test framework for testing the use of comparers when
//determining the relationship between a collection and a set
public class SetCollectionComparerTests
{
#region Set/Item Comparer Tests where Item is in collection. Test 1-5
//Test 1 - Set/Item Comparer Test 1
public static void SetupTest1(out HashSet<ValueItem> set, out IEnumerable<ValueItem> other)
{
List<ValueItem> list = new List<ValueItem>();
ValueItem item1 = new ValueItem(134, -15);
ValueItem item2 = new ValueItem(14, 14);
ValueItem item3 = new ValueItem(19999, -12);
ValueItem innerItem;
SetItemComparerTests.SetupTest1(out set, out innerItem);
list.Add(item1);
list.Add(item2);
list.Add(item3);
list.Add(innerItem);
other = list;
}
//Test 2 - Set/Item Comparer Test 2
public static void SetupTest2(out HashSet<ValueItem> set, out IEnumerable<ValueItem> other)
{
List<ValueItem> list = new List<ValueItem>();
ValueItem item1 = new ValueItem(134, -15);
ValueItem item2 = new ValueItem(14, 14);
ValueItem item3 = new ValueItem(19999, -12);
ValueItem innerItem;
SetItemComparerTests.SetupTest2(out set, out innerItem);
list.Add(item1);
list.Add(item2);
list.Add(item3);
list.Add(innerItem);
other = list;
}
//Test 3 - Set/Item Comparer Test 3
public static void SetupTest3(out HashSet<ValueItem> set, out IEnumerable<ValueItem> other)
{
List<ValueItem> list = new List<ValueItem>();
ValueItem item1 = new ValueItem(134, -15);
ValueItem item2 = new ValueItem(14, 14);
ValueItem item3 = new ValueItem(19999, -12);
ValueItem innerItem;
SetItemComparerTests.SetupTest3(out set, out innerItem);
list.Add(item1);
list.Add(item2);
list.Add(item3);
list.Add(innerItem);
other = list;
}
//Test 4 - Set/Item Comparer Test 4
public static void SetupTest4(out HashSet<ValueItem> set, out IEnumerable<ValueItem> other)
{
List<ValueItem> list = new List<ValueItem>();
ValueItem item1 = new ValueItem(134, -15);
ValueItem item2 = new ValueItem(14, 14);
ValueItem item3 = new ValueItem(19999, -12);
ValueItem innerItem;
SetItemComparerTests.SetupTest4(out set, out innerItem);
list.Add(item1);
list.Add(item2);
list.Add(item3);
list.Add(innerItem);
other = list;
}
//Test 5 - Set/Item Comparer Test 5
public static void SetupTest5(out HashSet<HashSet<IEnumerable>> set, out IEnumerable<HashSet<IEnumerable>> other)
{
List<HashSet<IEnumerable>> other2 = new List<HashSet<IEnumerable>>();
HashSet<IEnumerable> hs1 = new HashSet<IEnumerable>(new ValueItem[] { new ValueItem(10, 10), new ValueItem(20, 20), new ValueItem(30, 30), new ValueItem(40, 40) }); ;
HashSet<IEnumerable> hs2 = new HashSet<IEnumerable>(new ValueItem[] { new ValueItem(1, 1), new ValueItem(2, 2), new ValueItem(3, 3), new ValueItem(4, 4) }); ;
HashSet<IEnumerable> hs3 = new HashSet<IEnumerable>(new ValueItem[] { new ValueItem(-1, -1), new ValueItem(-2, -2), new ValueItem(-3, -3), new ValueItem(-4, -4) }); ;
HashSet<IEnumerable> innerItem;
SetItemComparerTests.SetupTest5(out set, out innerItem);
other2.Add(hs1);
other2.Add(hs2);
other2.Add(hs3);
other2.Add(innerItem);
other = other2;
}
#endregion
#region Set/Item Comparer Tests where Item is the only item in collection. Test 6-10
//Test 6 - Set/Item Comparer Test 1
public static void SetupTest6(out HashSet<ValueItem> set, out IEnumerable<ValueItem> other)
{
List<ValueItem> list = new List<ValueItem>();
ValueItem innerItem;
SetItemComparerTests.SetupTest1(out set, out innerItem);
list.Add(innerItem);
other = list;
}
//Test 7 - Set/Item Comparer Test 2
public static void SetupTest7(out HashSet<ValueItem> set, out IEnumerable<ValueItem> other)
{
List<ValueItem> list = new List<ValueItem>();
ValueItem innerItem;
SetItemComparerTests.SetupTest2(out set, out innerItem);
list.Add(innerItem);
other = list;
}
//Test 8 - Set/Item Comparer Test 3
public static void SetupTest8(out HashSet<ValueItem> set, out IEnumerable<ValueItem> other)
{
List<ValueItem> list = new List<ValueItem>();
ValueItem innerItem;
SetItemComparerTests.SetupTest3(out set, out innerItem);
list.Add(innerItem);
other = list;
}
//Test 9 - Set/Item Comparer Test 4
public static void SetupTest9(out HashSet<ValueItem> set, out IEnumerable<ValueItem> other)
{
List<ValueItem> list = new List<ValueItem>();
ValueItem innerItem;
SetItemComparerTests.SetupTest4(out set, out innerItem);
list.Add(innerItem);
other = list;
}
//Test 10 - Set/Item Comparer Test 5
public static void SetupTest10(out HashSet<HashSet<IEnumerable>> set, out IEnumerable<HashSet<IEnumerable>> other)
{
List<HashSet<IEnumerable>> other2 = new List<HashSet<IEnumerable>>();
HashSet<IEnumerable> innerItem;
SetItemComparerTests.SetupTest5(out set, out innerItem);
other2.Add(innerItem);
other = other2;
}
#endregion
#region Set/Item Comparer Tests where collection is the item. Test 11-15
//Test 11 - Set/Item Comparer Test 1
public static void SetupTest11(out HashSet<ValueItem> set, out IEnumerable<ValueItem> other)
{
ValueItem innerItem;
SetItemComparerTests.SetupTest1(out set, out innerItem);
other = innerItem;
}
//Test 12 - Set/Item Comparer Test 2
public static void SetupTest12(out HashSet<ValueItem> set, out IEnumerable<ValueItem> other)
{
ValueItem innerItem;
SetItemComparerTests.SetupTest2(out set, out innerItem);
other = innerItem;
}
//Test 13 - Set/Item Comparer Test 3
public static void SetupTest13(out HashSet<ValueItem> set, out IEnumerable<ValueItem> other)
{
ValueItem innerItem;
SetItemComparerTests.SetupTest3(out set, out innerItem);
other = innerItem;
}
//Test 14 - Set/Item Comparer Test 4
public static void SetupTest14(out HashSet<ValueItem> set, out IEnumerable<ValueItem> other)
{
ValueItem innerItem;
SetItemComparerTests.SetupTest4(out set, out innerItem);
other = innerItem;
}
//Test 15 - Set/Item Comparer Test 5
public static void SetupTest15(out HashSet<HashSet<IEnumerable>> set, out IEnumerable<HashSet<IEnumerable>> other)
{
HashSet<IEnumerable> innerItem;
SetItemComparerTests.SetupTest5(out set, out innerItem);
other = set;
}
#endregion
}
//Test framework for testing the use of comparers when
//determining the relationship between an item and a set
public class SetItemComparerTests
{
//Test 1
// item same as element in set by default comparer, different by sets comparer - set contains item that is equal by sets comparer
public static void SetupTest1(out HashSet<ValueItem> set, out ValueItem item)
{
set = new HashSet<ValueItem>(new ValueItemYEqualityComparer());
set.Add(new ValueItem(34, -5));
set.Add(new ValueItem(4, 4));
set.Add(new ValueItem(9999, -2));
item = new ValueItem(34, -2);
}
//Test 2
// item same as element in set by default comparer, different by sets comparer - set does not contain item that is equal by sets comparer
public static void SetupTest2(out HashSet<ValueItem> set, out ValueItem item)
{
set = new HashSet<ValueItem>(new ValueItemYEqualityComparer());
set.Add(new ValueItem(34, -5));
set.Add(new ValueItem(4, 4));
set.Add(new ValueItem(9999, -20));
item = new ValueItem(34, -2);
}
//Test 3
// item same as element in set by sets comparer, different by default comparer - set contains item that is equal by default comparer
public static void SetupTest3(out HashSet<ValueItem> set, out ValueItem item)
{
set = new HashSet<ValueItem>(new ValueItemYEqualityComparer());
set.Add(new ValueItem(34, -5));
set.Add(new ValueItem(4, 4));
set.Add(new ValueItem(9999, -2));
item = new ValueItem(34, -2);
}
//Test 4
// item same as element in set by sets comparer, different by default comparer - set does not contain item that is equal by default comparer
public static void SetupTest4(out HashSet<ValueItem> set, out ValueItem item)
{
ValueItem item1 = new ValueItem(340, -5);
ValueItem item2 = new ValueItem(4, 4);
ValueItem item3 = new ValueItem(9999, -2);
set = new HashSet<ValueItem>(new ValueItemYEqualityComparer());
set.Add(item1);
set.Add(item2);
set.Add(item3);
item = new ValueItem(34, -2);
}
//Test 5
// item contains set and item in set with GetSetComparer<T> as comparer
public static void SetupTest5(out HashSet<HashSet<IEnumerable>> outerSet, out HashSet<IEnumerable> item)
{
ValueItem itemn4 = new ValueItem(-4, -4);
ValueItem itemn3 = new ValueItem(-3, -3);
ValueItem itemn2 = new ValueItem(-2, -2);
ValueItem itemn1 = new ValueItem(-1, -1);
ValueItem item1 = new ValueItem(1, 1);
ValueItem item2 = new ValueItem(2, 2);
ValueItem item3 = new ValueItem(3, 3);
ValueItem item4 = new ValueItem(4, 4);
HashSet<IEnumerable> itemhs1 = new HashSet<IEnumerable>(new ValueItem[] { item1, item2, item3, item4 });
HashSet<IEnumerable> itemhs2 = new HashSet<IEnumerable>(new ValueItem[] { itemn1, itemn2, itemn3, itemn4 });
HashSet<IEnumerable> set = new HashSet<IEnumerable>();
outerSet = new HashSet<HashSet<IEnumerable>>(new SetEqualityComparer<IEnumerable>());
set.Add(outerSet);
outerSet.Add(itemhs1);
outerSet.Add(itemhs2);
outerSet.Add(set);
item = set;
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Reflection;
using System.Runtime.Serialization;
using System.Web.Http;
using System.Web.Http.Description;
using System.Xml.Serialization;
using Newtonsoft.Json;
namespace ngLabels.Areas.HelpPage.ModelDescriptions
{
/// <summary>
/// Generates model descriptions for given types.
/// </summary>
public class ModelDescriptionGenerator
{
// Modify this to support more data annotation attributes.
private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>>
{
{ typeof(RequiredAttribute), a => "Required" },
{ typeof(RangeAttribute), a =>
{
RangeAttribute range = (RangeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum);
}
},
{ typeof(MaxLengthAttribute), a =>
{
MaxLengthAttribute maxLength = (MaxLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length);
}
},
{ typeof(MinLengthAttribute), a =>
{
MinLengthAttribute minLength = (MinLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length);
}
},
{ typeof(StringLengthAttribute), a =>
{
StringLengthAttribute strLength = (StringLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength);
}
},
{ typeof(DataTypeAttribute), a =>
{
DataTypeAttribute dataType = (DataTypeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString());
}
},
{ typeof(RegularExpressionAttribute), a =>
{
RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern);
}
},
};
// Modify this to add more default documentations.
private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string>
{
{ typeof(Int16), "integer" },
{ typeof(Int32), "integer" },
{ typeof(Int64), "integer" },
{ typeof(UInt16), "unsigned integer" },
{ typeof(UInt32), "unsigned integer" },
{ typeof(UInt64), "unsigned integer" },
{ typeof(Byte), "byte" },
{ typeof(Char), "character" },
{ typeof(SByte), "signed byte" },
{ typeof(Uri), "URI" },
{ typeof(Single), "decimal number" },
{ typeof(Double), "decimal number" },
{ typeof(Decimal), "decimal number" },
{ typeof(String), "string" },
{ typeof(Guid), "globally unique identifier" },
{ typeof(TimeSpan), "time interval" },
{ typeof(DateTime), "date" },
{ typeof(DateTimeOffset), "date" },
{ typeof(Boolean), "boolean" },
};
private Lazy<IModelDocumentationProvider> _documentationProvider;
public ModelDescriptionGenerator(HttpConfiguration config)
{
if (config == null)
{
throw new ArgumentNullException("config");
}
_documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider);
GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase);
}
public Dictionary<string, ModelDescription> GeneratedModels { get; private set; }
private IModelDocumentationProvider DocumentationProvider
{
get
{
return _documentationProvider.Value;
}
}
public ModelDescription GetOrCreateModelDescription(Type modelType)
{
if (modelType == null)
{
throw new ArgumentNullException("modelType");
}
Type underlyingType = Nullable.GetUnderlyingType(modelType);
if (underlyingType != null)
{
modelType = underlyingType;
}
ModelDescription modelDescription;
string modelName = ModelNameHelper.GetModelName(modelType);
if (GeneratedModels.TryGetValue(modelName, out modelDescription))
{
if (modelType != modelDescription.ModelType)
{
throw new InvalidOperationException(
String.Format(
CultureInfo.CurrentCulture,
"A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " +
"Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.",
modelName,
modelDescription.ModelType.FullName,
modelType.FullName));
}
return modelDescription;
}
if (DefaultTypeDocumentation.ContainsKey(modelType))
{
return GenerateSimpleTypeModelDescription(modelType);
}
if (modelType.IsEnum)
{
return GenerateEnumTypeModelDescription(modelType);
}
if (modelType.IsGenericType)
{
Type[] genericArguments = modelType.GetGenericArguments();
if (genericArguments.Length == 1)
{
Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments);
if (enumerableType.IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, genericArguments[0]);
}
}
if (genericArguments.Length == 2)
{
Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments);
if (dictionaryType.IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments);
if (keyValuePairType.IsAssignableFrom(modelType))
{
return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
}
}
if (modelType.IsArray)
{
Type elementType = modelType.GetElementType();
return GenerateCollectionModelDescription(modelType, elementType);
}
if (modelType == typeof(NameValueCollection))
{
return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string));
}
if (typeof(IDictionary).IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object));
}
if (typeof(IEnumerable).IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, typeof(object));
}
return GenerateComplexTypeModelDescription(modelType);
}
// Change this to provide different name for the member.
private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute)
{
JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>();
if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName))
{
return jsonProperty.PropertyName;
}
if (hasDataContractAttribute)
{
DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>();
if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name))
{
return dataMember.Name;
}
}
return member.Name;
}
private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute)
{
JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>();
XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>();
IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>();
NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>();
ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>();
bool hasMemberAttribute = member.DeclaringType.IsEnum ?
member.GetCustomAttribute<EnumMemberAttribute>() != null :
member.GetCustomAttribute<DataMemberAttribute>() != null;
// Display member only if all the followings are true:
// no JsonIgnoreAttribute
// no XmlIgnoreAttribute
// no IgnoreDataMemberAttribute
// no NonSerializedAttribute
// no ApiExplorerSettingsAttribute with IgnoreApi set to true
// no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute
return jsonIgnore == null &&
xmlIgnore == null &&
ignoreDataMember == null &&
nonSerialized == null &&
(apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) &&
(!hasDataContractAttribute || hasMemberAttribute);
}
private string CreateDefaultDocumentation(Type type)
{
string documentation;
if (DefaultTypeDocumentation.TryGetValue(type, out documentation))
{
return documentation;
}
if (DocumentationProvider != null)
{
documentation = DocumentationProvider.GetDocumentation(type);
}
return documentation;
}
private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel)
{
List<ParameterAnnotation> annotations = new List<ParameterAnnotation>();
IEnumerable<Attribute> attributes = property.GetCustomAttributes();
foreach (Attribute attribute in attributes)
{
Func<object, string> textGenerator;
if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator))
{
annotations.Add(
new ParameterAnnotation
{
AnnotationAttribute = attribute,
Documentation = textGenerator(attribute)
});
}
}
// Rearrange the annotations
annotations.Sort((x, y) =>
{
// Special-case RequiredAttribute so that it shows up on top
if (x.AnnotationAttribute is RequiredAttribute)
{
return -1;
}
if (y.AnnotationAttribute is RequiredAttribute)
{
return 1;
}
// Sort the rest based on alphabetic order of the documentation
return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase);
});
foreach (ParameterAnnotation annotation in annotations)
{
propertyModel.Annotations.Add(annotation);
}
}
private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType)
{
ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType);
if (collectionModelDescription != null)
{
return new CollectionModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
ElementDescription = collectionModelDescription
};
}
return null;
}
private ModelDescription GenerateComplexTypeModelDescription(Type modelType)
{
ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(complexModelDescription.Name, complexModelDescription);
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance);
foreach (PropertyInfo property in properties)
{
if (ShouldDisplayMember(property, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(property, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(property);
}
GenerateAnnotations(property, propertyModel);
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType);
}
}
FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance);
foreach (FieldInfo field in fields)
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(field, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(field);
}
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType);
}
}
return complexModelDescription;
}
private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new DictionaryModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType)
{
EnumTypeModelDescription enumDescription = new EnumTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static))
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
EnumValueDescription enumValue = new EnumValueDescription
{
Name = field.Name,
Value = field.GetRawConstantValue().ToString()
};
if (DocumentationProvider != null)
{
enumValue.Documentation = DocumentationProvider.GetDocumentation(field);
}
enumDescription.Values.Add(enumValue);
}
}
GeneratedModels.Add(enumDescription.Name, enumDescription);
return enumDescription;
}
private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new KeyValuePairModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private ModelDescription GenerateSimpleTypeModelDescription(Type modelType)
{
SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription);
return simpleModelDescription;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
/*
* A builder for a test connection to a server.
*/
class SSLTestBuilder {
/*
* Maximum supported protocol version advertised by the client.
*/
internal int MaxVersion {
get {
return maxVersion;
}
set {
maxVersion = value;
}
}
/*
* Version to use on outgoing records.
*/
internal int RecordVersion {
get {
return recordVersion;
}
set {
recordVersion = value;
}
}
/*
* Session ID to use in ClientHello. "null" is equivalent to
* an empty session ID.
*/
internal byte[] SessionID {
get {
return sessionID;
}
set {
if (value != null && value.Length > 32) {
throw new ArgumentException(
"Invalid session ID length");
}
sessionID = value;
}
}
/*
* Cipher suites to send in the ClientHello.
*/
internal int[] CipherSuites {
get {
return cipherSuites;
}
set {
if (value != null && value.Length > 32767) {
throw new ArgumentException(
"Invalid list of cipher suites");
}
cipherSuites = value;
}
}
/*
* If true, add the special fallback cipher suite.
*/
internal bool FallbackSCSV {
get {
return fallbackSCSV;
}
set {
fallbackSCSV = value;
}
}
/*
* If true, add the special "secure renegotiation" cipher suite.
*/
internal bool RenegotiationSCSV {
get {
return renegotiationSCSV;
}
set {
renegotiationSCSV = value;
}
}
/*
* If true, add the "secure renegotiation" extension.
*/
internal bool RenegotiationExtension {
get {
return renegotiationExtension;
}
set {
renegotiationExtension = value;
}
}
/*
* If not null and not empty, add the "supported elliptic curves"
* extension with the provided named curves.
*/
internal int[] SupportedCurves {
get {
return supportedCurves;
}
set {
supportedCurves = value;
}
}
/*
* If true, advertise support for Deflate compression.
*/
internal bool DeflateCompress {
get {
return deflateCompress;
}
set {
deflateCompress = value;
}
}
/*
* Set server name to send as SNI extension (null to not send SNI).
*/
internal string ServerName {
get {
return serverName;
}
set {
if (value == null) {
serverName = null;
return;
}
if (value.Length > 0xFFFF) {
throw new ArgumentException(
"Invalid server name (too long)");
}
foreach (char c in value) {
if (c <= 0x20 || c >= 0x7F) {
throw new ArgumentException("Invalid"
+ " server name (not ASCII)");
}
}
serverName = value;
}
}
int maxVersion;
int recordVersion;
byte[] sessionID;
int[] cipherSuites;
bool fallbackSCSV;
bool renegotiationSCSV;
bool renegotiationExtension;
int[] supportedCurves;
bool deflateCompress;
string serverName;
/*
* Create a new instance with default values.
*/
internal SSLTestBuilder()
{
Reset();
}
/*
* Set default values:
* -- maximum version is TLS 1.2
* -- outgoing record version is SSL 3.0
* -- no fallback SCSV
* -- no secure renegotiation SCSV
* -- no session ID
* -- deflate compression is supported
* -- secure renegotiation extension is sent
* -- no defined server name (for SNI)
*/
internal void Reset()
{
maxVersion = M.TLSv12;
recordVersion = M.SSLv30;
cipherSuites = null;
fallbackSCSV = false;
deflateCompress = true;
sessionID = null;
renegotiationSCSV = false;
renegotiationExtension = true;
supportedCurves = null;
}
/*
* This method computes the maximum number of cipher suites
* that can be configured, for the specified maximum record
* length. This is used to support old servers that have
* low tolerance to perfectly legal but relatively large
* ClientHello messages.
*
* If this method returns -1, then the provided maximum record
* length will necessarily be exceeded, with the current
* ClientHello configuration.
*/
internal int ComputeMaxCipherSuites(int maxRecordLen)
{
int a = 1;
int len = MakeClientHello(new int[a]).Length;
if ((len + 5) > maxRecordLen) {
return -1;
}
/*
* Bare minimum overhead in a ClientHello:
* 5 bytes for the record header
* 4 bytes for the handshake message header
* 37 bytes for version, random, session ID and compression
* 2 bytes for length of list of cipher suites
*/
int b = 1 + ((maxRecordLen - 48) >> 1);
if (b > 32767) {
b = 32767;
}
while ((b - a) > 1) {
int c = (a + b) >> 1;
len = MakeClientHello(new int[c]).Length;
if ((len + 5) <= maxRecordLen) {
a = c;
} else {
b = c;
}
}
return a;
}
/*
* Deactivate all extensions (including SNI). This is meant to
* support flawed servers that are allergic to extensions.
*/
internal void DisableExtensions()
{
ServerName = null;
RenegotiationExtension = false;
SupportedCurves = null;
}
/*
* Begin a new handshake, and return the server data. If the
* server refused to complete the handshake with an explicit
* alert, then an SSLAlertException is thrown; for all other
* error conditions, an other kind of exception is thrown.
*/
internal SSLTestResult RunTest(SSLRecord rec)
{
/*
* Send ClientHello.
*/
byte[] ch = MakeClientHello(cipherSuites);
rec.SetOutType(M.HANDSHAKE);
rec.SetOutVersion(recordVersion);
rec.Write(ch);
rec.Flush();
/*
* Read handshake messages from server, up to the
* ServerHelloDone.
*/
SSLTestResult tr = new SSLTestResult();
tr.Parse(rec);
tr.CipherSuiteInClientList = false;
foreach (int s in cipherSuites) {
if (s == tr.SelectedCipherSuite) {
tr.CipherSuiteInClientList = true;
}
}
return tr;
}
/*
* Build a ClientHello using the provided cipher suites.
* Returned array contains the complete message with its
* 4-byte header (but not the record header).
*/
byte[] MakeClientHello(int[] ccs)
{
/*
* Assemble ClientHello.
*/
HList chs = new HList(0xFFFFFF);
/*
* Maximum protocol version.
*/
M.Write2(chs, maxVersion);
/*
* Client random. The first four bytes encode the
* current time.
*/
byte[] clientRandom = new byte[32];
M.Enc32be((int)(M.CurrentTimeMillis() / 1000), clientRandom, 0);
M.Rand(clientRandom, 4, clientRandom.Length - 4);
chs.Write(clientRandom, 0, clientRandom.Length);
/*
* Session ID, for session resumption.
*/
if (sessionID == null) {
M.Write1(chs, 0);
} else {
M.Write1(chs, sessionID.Length);
chs.Write(sessionID, 0, sessionID.Length);
}
/*
* Cipher suites.
*/
List<int> lcs = new List<int>();
if (ccs != null) {
foreach (int s in ccs) {
lcs.Add(s);
}
if (renegotiationSCSV) {
lcs.Add(M.TLS_EMPTY_RENEGOTIATION_INFO_SCSV);
}
if (fallbackSCSV) {
lcs.Add(M.TLS_FALLBACK_SCSV);
}
}
M.Write2(chs, lcs.Count << 1);
foreach (int s in lcs) {
M.Write2(chs, s);
}
/*
* Compression support: the NULL compression must
* always be specified; optionally, Deflate compression
* can be supported.
*/
if (deflateCompress) {
M.Write1(chs, 2);
M.Write1(chs, 1);
M.Write1(chs, 0);
} else {
M.Write1(chs, 1);
M.Write1(chs, 0);
}
/*
* Extensions.
*/
HList exs = new HList(0xFFFF);
if (serverName != null) {
M.Write2(exs, M.EXT_SERVER_NAME);
HList sndata = new HList(0xFFFF);
HList snles = new HList(0xFFFF);
snles.WriteByte(0);
HList snes = new HList(0xFFFF);
snes.Write(Encoding.UTF8.GetBytes(serverName));
snles.Write(snes.ToArray());
sndata.Write(snles.ToArray());
exs.Write(sndata.ToArray());
}
if (renegotiationExtension) {
M.Write2(exs, M.EXT_RENEGOTIATION_INFO);
M.Write2(exs, 1);
M.Write1(exs, 0);
}
if (supportedCurves != null && supportedCurves.Length > 0) {
M.Write2(exs, M.EXT_SUPPORTED_CURVES);
HList ecdata = new HList(0xFFFF);
HList ecl = new HList(0xFFFF);
foreach (int ec in supportedCurves) {
M.Write2(ecl, ec);
}
ecdata.Write(ecl.ToArray());
exs.Write(ecdata.ToArray());
}
if (exs.Length != 0) {
chs.Write(exs.ToArray());
}
byte[] msg = chs.ToArray();
MemoryStream ms = new MemoryStream();
ms.WriteByte(M.CLIENT_HELLO);
ms.Write(msg, 0, msg.Length);
return ms.ToArray();
}
}
| |
/******************************************************************************\
* Copyright (C) 2012-2016 Leap Motion, Inc. All rights reserved. *
* Leap Motion proprietary and confidential. Not for distribution. *
* Use subject to the terms of the Leap Motion SDK Agreement available at *
* https://developer.leapmotion.com/sdk_agreement, or another agreement *
* between Leap Motion and you, your company or other organization. *
\******************************************************************************/
namespace LeapInternal
{
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Runtime.InteropServices;
using Leap;
public class Connection
{
private static Dictionary<int, Connection> connectionDictionary = new Dictionary<int, Connection>();
static Connection()
{
}
public static Connection GetConnection(int connectionKey = 0)
{
if (Connection.connectionDictionary.ContainsKey(connectionKey))
{
Connection conn;
Connection.connectionDictionary.TryGetValue(connectionKey, out conn);
return conn;
}
else {
Connection newConn = new Connection(connectionKey);
connectionDictionary.Add(connectionKey, newConn);
return newConn;
}
}
public int ConnectionKey { get; private set; }
public CircularObjectBuffer<Frame> Frames { get; set; }
private ServiceFrameFactory frameFactory = new ServiceFrameFactory();
private DeviceList _devices = new DeviceList();
private FailedDeviceList _failedDevices;
private Dictionary<UInt32, ImageReference> _pendingImageRequests = new Dictionary<UInt32, ImageReference>();
private ObjectPool<ImageData> _imageDataCache;
private ObjectPool<ImageData> _imageRawDataCache;
private CircularObjectBuffer<TrackedQuad> _quads;
private int _frameBufferLength = 60;
private int _imageBufferLength = 20 * 4;
private int _quadBufferLength = 60;
private ulong _standardImageBufferSize = 640 * 240 * 2; //width * heigth * 2
private ulong _standardRawBufferSize = 640 * 240 * 2 * 8; //width * heigth * 2 images * 8 bpp
private DistortionData _currentDistortionData = new DistortionData();
// private bool _growImageMemory = false;
private IntPtr _leapConnection;
private Thread _polster;
private bool _isRunning = false;
//Policy and enabled features
private UInt64 _requestedPolicies = 0;
private UInt64 _activePolicies = 0;
private bool _trackedQuadsAreEnabled = false;
//Config change status
private Dictionary<uint, string> _configRequests = new Dictionary<uint, string>();
//Connection events
public EventHandler<LeapEventArgs> LeapInit;
public EventHandler<ConnectionEventArgs> LeapConnection;
public EventHandler<ConnectionLostEventArgs> LeapConnectionLost;
public EventHandler<DeviceEventArgs> LeapDevice;
public EventHandler<DeviceEventArgs> LeapDeviceLost;
public EventHandler<DeviceFailureEventArgs> LeapDeviceFailure;
public EventHandler<PolicyEventArgs> LeapPolicyChange;
public EventHandler<FrameEventArgs> LeapFrame;
public EventHandler<ImageEventArgs> LeapImageReady;
public EventHandler<ImageRequestFailedEventArgs> LeapImageRequestFailed;
public EventHandler<TrackedQuadEventArgs> LeapTrackedQuad;
public EventHandler<LogEventArgs> LeapLogEvent;
public EventHandler<SetConfigResponseEventArgs> LeapConfigResponse;
public EventHandler<ConfigChangeEventArgs> LeapConfigChange;
public EventHandler<DistortionEventArgs> LeapDistortionChange;
private bool _disposed = false;
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
// Protected implementation of Dispose pattern.
protected virtual void Dispose(bool disposing)
{
if (_disposed)
return;
if (disposing)
{
Stop();
}
_disposed = true;
}
private Connection(int connectionKey)
{
ConnectionKey = connectionKey;
_leapConnection = IntPtr.Zero;
Frames = new CircularObjectBuffer<Frame>(_frameBufferLength);
_quads = new CircularObjectBuffer<TrackedQuad>(_quadBufferLength);
_imageDataCache = new ObjectPool<ImageData>(_imageBufferLength, false);
_imageRawDataCache = new ObjectPool<ImageData>(_imageBufferLength, false);
}
public void Start()
{
if (!_isRunning)
{
if (_leapConnection == IntPtr.Zero)
{
eLeapRS result = LeapC.CreateConnection(out _leapConnection);
reportAbnormalResults("LeapC CreateConnection call was ", result);
result = LeapC.OpenConnection(_leapConnection);
reportAbnormalResults("LeapC OpenConnection call was ", result);
}
_isRunning = true;
_polster = new Thread(new ThreadStart(this.processMessages));
_polster.IsBackground = true;
_polster.Start();
}
}
public void Stop()
{
_isRunning = false;
}
//Run in Polster thread, fills in object queues
private void processMessages()
{
try
{
eLeapRS result;
LeapInit.Dispatch<LeapEventArgs>(this, new LeapEventArgs(LeapEvent.EVENT_INIT));
while (_isRunning)
{
if (_leapConnection != IntPtr.Zero)
{
LEAP_CONNECTION_MESSAGE _msg = new LEAP_CONNECTION_MESSAGE();
uint timeout = 1000;
result = LeapC.PollConnection(_leapConnection, timeout, ref _msg);
reportAbnormalResults("LeapC PollConnection call was ", result);
if (result == eLeapRS.eLeapRS_Success)
{
switch (_msg.type)
{
case eLeapEventType.eLeapEventType_Connection:
LEAP_CONNECTION_EVENT connection_evt = LeapC.PtrToStruct<LEAP_CONNECTION_EVENT>(_msg.eventStructPtr);
handleConnection(ref connection_evt);
break;
case eLeapEventType.eLeapEventType_ConnectionLost:
LEAP_CONNECTION_LOST_EVENT connection_lost_evt = LeapC.PtrToStruct<LEAP_CONNECTION_LOST_EVENT>(_msg.eventStructPtr);
handleConnectionLost(ref connection_lost_evt);
break;
case eLeapEventType.eLeapEventType_Device:
LEAP_DEVICE_EVENT device_evt = LeapC.PtrToStruct<LEAP_DEVICE_EVENT>(_msg.eventStructPtr);
handleDevice(ref device_evt);
break;
case eLeapEventType.eLeapEventType_DeviceLost:
LEAP_DEVICE_EVENT device_lost_evt = LeapC.PtrToStruct<LEAP_DEVICE_EVENT>(_msg.eventStructPtr);
handleLostDevice(ref device_lost_evt);
break;
case eLeapEventType.eLeapEventType_DeviceFailure:
LEAP_DEVICE_FAILURE_EVENT device_failure_evt = LeapC.PtrToStruct<LEAP_DEVICE_FAILURE_EVENT>(_msg.eventStructPtr);
handleFailedDevice(ref device_failure_evt);
break;
case eLeapEventType.eLeapEventType_Tracking:
LEAP_TRACKING_EVENT tracking_evt = LeapC.PtrToStruct<LEAP_TRACKING_EVENT>(_msg.eventStructPtr);
handleTrackingMessage(ref tracking_evt);
break;
case eLeapEventType.eLeapEventType_ImageComplete:
completeCount++;
LEAP_IMAGE_COMPLETE_EVENT image_complete_evt = LeapC.PtrToStruct<LEAP_IMAGE_COMPLETE_EVENT>(_msg.eventStructPtr);
handleImageCompletion(ref image_complete_evt);
break;
case eLeapEventType.eLeapEventType_ImageRequestError:
failedCount++;
LEAP_IMAGE_FRAME_REQUEST_ERROR_EVENT failed_image_evt = LeapC.PtrToStruct<LEAP_IMAGE_FRAME_REQUEST_ERROR_EVENT>(_msg.eventStructPtr);
handleFailedImageRequest(ref failed_image_evt);
break;
case eLeapEventType.eLeapEventType_TrackedQuad:
LEAP_TRACKED_QUAD_EVENT quad_evt = LeapC.PtrToStruct<LEAP_TRACKED_QUAD_EVENT>(_msg.eventStructPtr);
handleQuadMessage(ref quad_evt);
break;
case eLeapEventType.eLeapEventType_LogEvent:
LEAP_LOG_EVENT log_evt = LeapC.PtrToStruct<LEAP_LOG_EVENT>(_msg.eventStructPtr);
reportLogMessage(ref log_evt);
break;
case eLeapEventType.eLeapEventType_PolicyChange:
LEAP_POLICY_EVENT policy_evt = LeapC.PtrToStruct<LEAP_POLICY_EVENT>(_msg.eventStructPtr);
handlePolicyChange(ref policy_evt);
break;
case eLeapEventType.eLeapEventType_ConfigChange:
LEAP_CONFIG_CHANGE_EVENT config_change_evt = LeapC.PtrToStruct<LEAP_CONFIG_CHANGE_EVENT>(_msg.eventStructPtr);
handleConfigChange(ref config_change_evt);
break;
case eLeapEventType.eLeapEventType_ConfigResponse:
handleConfigResponse(ref _msg);
break;
default:
//discard unknown message types
Logger.Log("Unhandled message type " + Enum.GetName(typeof(eLeapEventType), _msg.type));
break;
} //switch on _msg.type
} // if valid _msg.type
else if (result == eLeapRS.eLeapRS_NotConnected)
{
this.LeapConnectionLost.Dispatch<ConnectionLostEventArgs>(this, new ConnectionLostEventArgs());
result = LeapC.CreateConnection(out _leapConnection);
reportAbnormalResults("LeapC CreateConnection call was ", result);
result = LeapC.OpenConnection(_leapConnection);
reportAbnormalResults("LeapC OpenConnection call was ", result);
}
} // if have connection handle
} //while running
}
catch (Exception e)
{
Logger.Log("Exception: " + e);
}
}
private void handleTrackingMessage(ref LEAP_TRACKING_EVENT trackingMsg)
{
Frame newFrame = frameFactory.makeFrame(ref trackingMsg);
if (_trackedQuadsAreEnabled)
newFrame.TrackedQuad = this.findTrackQuadForFrame(newFrame.Id);
Frames.Put(newFrame);
this.LeapFrame.Dispatch<FrameEventArgs>(this, new FrameEventArgs(newFrame));
}
int requestCount = 0;
int completeCount = 0;
int failedCount = 0;
public Image RequestImages(Int64 frameId, Image.ImageType imageType)
{
ImageData imageData;
int bufferSize = 0;
if (imageType == Image.ImageType.DEFAULT)
{
imageData = _imageDataCache.CheckOut();
imageData.type = eLeapImageType.eLeapImageType_Default;
bufferSize = (int)_standardImageBufferSize;
}
else {
imageData = _imageRawDataCache.CheckOut();
imageData.type = eLeapImageType.eLeapImageType_Raw;
bufferSize = (int)_standardRawBufferSize;
}
if (imageData.pixelBuffer == null || imageData.pixelBuffer.Length != bufferSize)
{
imageData.pixelBuffer = new byte[bufferSize];
}
imageData.frame_id = frameId;
return RequestImages(imageData);
}
public Image RequestImages(Int64 frameId, Image.ImageType imageType, byte[] buffer)
{
ImageData imageData = new ImageData();
if (imageType == Image.ImageType.DEFAULT)
imageData.type = eLeapImageType.eLeapImageType_Default;
else
imageData.type = eLeapImageType.eLeapImageType_Raw;
imageData.frame_id = frameId;
imageData.pixelBuffer = buffer;
return RequestImages(imageData);
}
private Image RequestImages(ImageData imageData)
{
requestCount++;
LEAP_IMAGE_FRAME_DESCRIPTION imageSpecifier = new LEAP_IMAGE_FRAME_DESCRIPTION();
imageSpecifier.frame_id = imageData.frame_id;
imageSpecifier.type = imageData.type;
imageSpecifier.pBuffer = imageData.getPinnedHandle();
imageSpecifier.buffer_len = (ulong)imageData.pixelBuffer.LongLength;
LEAP_IMAGE_FRAME_REQUEST_TOKEN token;
eLeapRS result = LeapC.RequestImages(_leapConnection, ref imageSpecifier, out token);
if (result == eLeapRS.eLeapRS_Success)
{
imageData.isComplete = false;
imageData.index = token.requestID;
Image futureImage = new Image(imageData);
lock (lockPendingImageList)
{
_pendingImageRequests[token.requestID] = new ImageReference(futureImage, imageData, LeapC.GetNow());
}
return futureImage;
}
else {
imageData.unPinHandle();
reportAbnormalResults("LeapC Image Request call was ", result);
return Image.Invalid;
}
}
private object lockPendingImageList = new object();
private void handleImageCompletion(ref LEAP_IMAGE_COMPLETE_EVENT imageMsg)
{
LEAP_IMAGE_PROPERTIES props = LeapC.PtrToStruct<LEAP_IMAGE_PROPERTIES>(imageMsg.properties);
ImageReference pendingImage = null;
lock (lockPendingImageList)
{
_pendingImageRequests.TryGetValue(imageMsg.token.requestID, out pendingImage);
}
if (pendingImage != null)
{
//Update distortion data, if changed
if ((_currentDistortionData.Version != imageMsg.matrix_version) || !_currentDistortionData.IsValid)
{
_currentDistortionData = new DistortionData();
_currentDistortionData.Version = imageMsg.matrix_version;
_currentDistortionData.Width = LeapC.DistortionSize; //fixed value for now
_currentDistortionData.Height = LeapC.DistortionSize; //fixed value for now
if (_currentDistortionData.Data == null || _currentDistortionData.Data.Length != (2 * _currentDistortionData.Width * _currentDistortionData.Height * 2))
_currentDistortionData.Data = new float[(int)(2 * _currentDistortionData.Width * _currentDistortionData.Height * 2)]; //2 float values per map point
LEAP_DISTORTION_MATRIX matrix = LeapC.PtrToStruct<LEAP_DISTORTION_MATRIX>(imageMsg.distortionMatrix);
Array.Copy(matrix.matrix_data, _currentDistortionData.Data, matrix.matrix_data.Length);
this.LeapDistortionChange.Dispatch<DistortionEventArgs>(this, new DistortionEventArgs(_currentDistortionData));
}
pendingImage.imageData.CompleteImageData(props.type,
props.format,
props.bpp,
props.width,
props.height,
imageMsg.info.timestamp,
imageMsg.info.frame_id,
props.x_offset,
props.y_offset,
props.x_scale,
props.y_scale,
_currentDistortionData,
LeapC.DistortionSize,
imageMsg.matrix_version);
Image completedImage = pendingImage.imageObject;
lock (lockPendingImageList)
{
_pendingImageRequests.Remove(imageMsg.token.requestID);
}
this.LeapImageReady.Dispatch<ImageEventArgs>(this, new ImageEventArgs(completedImage));
}
}
private void handleFailedImageRequest(ref LEAP_IMAGE_FRAME_REQUEST_ERROR_EVENT failed_image_evt)
{
ImageReference pendingImage = null;
lock (lockPendingImageList)
{
_pendingImageRequests.TryGetValue(failed_image_evt.token.requestID, out pendingImage);
}
if (pendingImage != null)
{
pendingImage.imageData.CheckIn();
lock (_pendingImageRequests)
{
_pendingImageRequests.Remove(failed_image_evt.token.requestID);
}
ImageRequestFailedEventArgs errorEventArgs = new ImageRequestFailedEventArgs(failed_image_evt.description.frame_id, pendingImage.imageObject.Type);
switch (failed_image_evt.error)
{
case eLeapImageRequestError.eLeapImageRequestError_InsufficientBuffer:
errorEventArgs.message = "The buffer specified for the request was too small.";
errorEventArgs.reason = Image.RequestFailureReason.Insufficient_Buffer;
if (failed_image_evt.description.type == eLeapImageType.eLeapImageType_Default && _standardImageBufferSize < failed_image_evt.required_buffer_len)
_standardImageBufferSize = failed_image_evt.required_buffer_len;
else if (failed_image_evt.description.type == eLeapImageType.eLeapImageType_Raw && _standardRawBufferSize < failed_image_evt.required_buffer_len)
_standardRawBufferSize = failed_image_evt.required_buffer_len;
break;
case eLeapImageRequestError.eLeapImageRequestError_Unavailable:
errorEventArgs.message = "The image was request too late and is no longer available.";
errorEventArgs.reason = Image.RequestFailureReason.Image_Unavailable;
break;
case eLeapImageRequestError.eLeapImageRequestError_ImagesDisabled:
errorEventArgs.message = "Images are disabled by the current configuration settings.";
errorEventArgs.reason = Image.RequestFailureReason.Images_Disabled;
break;
default:
errorEventArgs.message = "The image request failed for an undetermined reason.";
errorEventArgs.reason = Image.RequestFailureReason.Unknown_Error;
break;
}
errorEventArgs.requiredBufferSize = (long)failed_image_evt.required_buffer_len;
this.LeapImageRequestFailed.Dispatch<ImageRequestFailedEventArgs>(this, errorEventArgs);
}
//Purge old requests
List<UInt32> keys = new List<UInt32>(_pendingImageRequests.Keys);
ImageReference request;
long now = LeapC.GetNow();
for (int k = 0; k < keys.Count; k++)
{
request = _pendingImageRequests[keys[k]];
if ((now - request.Timestamp) > 90000)
{
lock (_pendingImageRequests)
{
_pendingImageRequests.Remove(keys[k]);
}
request.imageData.CheckIn();
}
}
}
private void handleQuadMessage(ref LEAP_TRACKED_QUAD_EVENT quad_evt)
{
TrackedQuad quad = frameFactory.makeQuad(ref quad_evt);
_quads.Put(quad);
this.LeapTrackedQuad.Dispatch<TrackedQuadEventArgs>(this, new TrackedQuadEventArgs(quad));
}
private void handleConnection(ref LEAP_CONNECTION_EVENT connectionMsg)
{
//TODO update connection on CONNECTION_EVENT
this.LeapConnection.Dispatch<ConnectionEventArgs>(this, new ConnectionEventArgs()); //TODO Meaningful Connection event args
}
private void handleConnectionLost(ref LEAP_CONNECTION_LOST_EVENT connectionMsg)
{
//TODO update connection on CONNECTION_LOST_EVENT
this.LeapConnectionLost.Dispatch<ConnectionLostEventArgs>(this, new ConnectionLostEventArgs()); //TODO Meaningful ConnectionLost event args
this.Stop();
}
private void handleDevice(ref LEAP_DEVICE_EVENT deviceMsg)
{
IntPtr deviceHandle = deviceMsg.device.handle;
if (deviceHandle != IntPtr.Zero)
{
IntPtr device;
eLeapRS result = LeapC.OpenDevice(deviceMsg.device, out device);
LEAP_DEVICE_INFO deviceInfo = new LEAP_DEVICE_INFO();
uint defaultLength = 14;
deviceInfo.serial_length = defaultLength;
deviceInfo.serial = Marshal.AllocCoTaskMem((int)defaultLength);
deviceInfo.size = (uint)Marshal.SizeOf(deviceInfo);
result = LeapC.GetDeviceInfo(device, out deviceInfo);
if (result == eLeapRS.eLeapRS_InsufficientBuffer)
{
Marshal.FreeCoTaskMem(deviceInfo.serial);
deviceInfo.serial = Marshal.AllocCoTaskMem((int)deviceInfo.serial_length);
deviceInfo.size = (uint)Marshal.SizeOf(deviceInfo);
result = LeapC.GetDeviceInfo(deviceHandle, out deviceInfo);
}
if (result == eLeapRS.eLeapRS_Success)
{
Device apiDevice = new Device(deviceHandle,
deviceInfo.h_fov, //radians
deviceInfo.v_fov, //radians
deviceInfo.range / 1000, //to mm
deviceInfo.baseline / 1000, //to mm
(deviceInfo.caps == (UInt32)eLeapDeviceCaps.eLeapDeviceCaps_Embedded),
(deviceInfo.status == (UInt32)eLeapDeviceStatus.eLeapDeviceStatus_Streaming),
Marshal.PtrToStringAnsi(deviceInfo.serial));
Marshal.FreeCoTaskMem(deviceInfo.serial);
_devices.AddOrUpdate(apiDevice);
this.LeapDevice.Dispatch(this, new DeviceEventArgs(apiDevice));
}
}
}
private void handleLostDevice(ref LEAP_DEVICE_EVENT deviceMsg)
{
Device lost = _devices.FindDeviceByHandle(deviceMsg.device.handle);
if (lost != null)
{
_devices.Remove(lost);
this.LeapDeviceLost.Dispatch(this, new DeviceEventArgs(lost));
}
}
private void handleFailedDevice(ref LEAP_DEVICE_FAILURE_EVENT deviceMsg)
{
string failureMessage;
string failedSerialNumber = "Unavailable";
switch (deviceMsg.status)
{
case eLeapDeviceStatus.eLeapDeviceStatus_BadCalibration:
failureMessage = "Bad Calibration. Device failed because of a bad calibration record.";
break;
case eLeapDeviceStatus.eLeapDeviceStatus_BadControl:
failureMessage = "Bad Control Interface. Device failed because of a USB control interface error.";
break;
case eLeapDeviceStatus.eLeapDeviceStatus_BadFirmware:
failureMessage = "Bad Firmware. Device failed because of a firmware error.";
break;
case eLeapDeviceStatus.eLeapDeviceStatus_BadTransport:
failureMessage = "Bad Transport. Device failed because of a USB communication error.";
break;
default:
failureMessage = "Device failed for an unknown reason";
break;
}
Device failed = _devices.FindDeviceByHandle(deviceMsg.hDevice);
if (failed != null)
{
_devices.Remove(failed);
}
this.LeapDeviceFailure.Dispatch<DeviceFailureEventArgs>(this,
new DeviceFailureEventArgs((uint)deviceMsg.status, failureMessage, failedSerialNumber));
}
private void handleConfigChange(ref LEAP_CONFIG_CHANGE_EVENT configEvent)
{
string config_key = "";
_configRequests.TryGetValue(configEvent.requestId, out config_key);
if (config_key != null)
_configRequests.Remove(configEvent.requestId);
this.LeapConfigChange.Dispatch<ConfigChangeEventArgs>(this,
new ConfigChangeEventArgs(config_key, configEvent.status != 0, configEvent.requestId));
}
private void handleConfigResponse(ref LEAP_CONNECTION_MESSAGE configMsg)
{
LEAP_CONFIG_RESPONSE_EVENT config_response_evt = LeapC.PtrToStruct<LEAP_CONFIG_RESPONSE_EVENT>(configMsg.eventStructPtr);
string config_key = "";
_configRequests.TryGetValue(config_response_evt.requestId, out config_key);
if (config_key != null)
_configRequests.Remove (config_response_evt.requestId);
Config.ValueType dataType;
object value;
uint requestId = config_response_evt.requestId;
if (config_response_evt.value.type != eLeapValueType.eLeapValueType_String)
{
switch (config_response_evt.value.type)
{
case eLeapValueType.eLeapValueType_Boolean:
dataType = Config.ValueType.TYPE_BOOLEAN;
value = config_response_evt.value.boolValue;
break;
case eLeapValueType.eLeapValueType_Int32:
dataType = Config.ValueType.TYPE_INT32;
value = config_response_evt.value.intValue;
break;
case eLeapValueType.eleapValueType_Float:
dataType = Config.ValueType.TYPE_FLOAT;
value = config_response_evt.value.floatValue;
break;
default:
dataType = Config.ValueType.TYPE_UNKNOWN;
value = new object();
break;
}
}
else {
LEAP_CONFIG_RESPONSE_EVENT_WITH_REF_TYPE config_ref_value =
LeapC.PtrToStruct<LEAP_CONFIG_RESPONSE_EVENT_WITH_REF_TYPE>(configMsg.eventStructPtr);
dataType = Config.ValueType.TYPE_STRING;
value = config_ref_value.value.stringValue;
}
SetConfigResponseEventArgs args = new SetConfigResponseEventArgs(config_key, dataType, value, requestId);
this.LeapConfigResponse.Dispatch<SetConfigResponseEventArgs>(this, args);
}
private void reportLogMessage(ref LEAP_LOG_EVENT logMsg)
{
this.LeapLogEvent.Dispatch<LogEventArgs>(this, new LogEventArgs(publicSeverity(logMsg.severity), logMsg.timestamp, logMsg.message));
}
private MessageSeverity publicSeverity(eLeapLogSeverity leapCSeverity)
{
switch (leapCSeverity)
{
case eLeapLogSeverity.eLeapLogSeverity_Unknown:
return MessageSeverity.MESSAGE_UNKNOWN;
case eLeapLogSeverity.eLeapLogSeverity_Information:
return MessageSeverity.MESSAGE_INFORMATION;
case eLeapLogSeverity.eLeapLogSeverity_Warning:
return MessageSeverity.MESSAGE_WARNING;
case eLeapLogSeverity.eLeapLogSeverity_Critical:
return MessageSeverity.MESSAGE_CRITICAL;
default:
return MessageSeverity.MESSAGE_UNKNOWN;
}
}
private void handlePolicyChange(ref LEAP_POLICY_EVENT policyMsg)
{
this.LeapPolicyChange.Dispatch<PolicyEventArgs>(this, new PolicyEventArgs(policyMsg.current_policy, _activePolicies));
_activePolicies = policyMsg.current_policy;
if (_activePolicies != _requestedPolicies)
{
// This could happen when config is turned off, or
// this is the policy change event from the last SetPolicy, after that, the user called SetPolicy again
//TODO handle failure to set desired policy -- maybe a PolicyDenied event
}
}
public void SetPolicy(Controller.PolicyFlag policy)
{
UInt64 setFlags = (ulong)flagForPolicy(policy);
_requestedPolicies = _requestedPolicies | setFlags;
setFlags = _requestedPolicies;
UInt64 clearFlags = ~_requestedPolicies; //inverse of desired policies
eLeapRS result = LeapC.SetPolicyFlags(_leapConnection, setFlags, clearFlags);
reportAbnormalResults("LeapC SetPolicyFlags call was ", result);
}
public void ClearPolicy(Controller.PolicyFlag policy)
{
UInt64 clearFlags = (ulong)flagForPolicy(policy);
_requestedPolicies = _requestedPolicies & ~clearFlags;
eLeapRS result = LeapC.SetPolicyFlags(_leapConnection, 0, clearFlags);
reportAbnormalResults("LeapC SetPolicyFlags call was ", result);
}
private eLeapPolicyFlag flagForPolicy(Controller.PolicyFlag singlePolicy)
{
switch (singlePolicy)
{
case Controller.PolicyFlag.POLICY_BACKGROUND_FRAMES:
return eLeapPolicyFlag.eLeapPolicyFlag_BackgroundFrames;
case Controller.PolicyFlag.POLICY_OPTIMIZE_HMD:
return eLeapPolicyFlag.eLeapPolicyFlag_OptimizeHMD;
case Controller.PolicyFlag.POLICY_DEFAULT:
return 0;
default:
return 0;
}
}
/**
* Gets the active setting for a specific policy.
*
* Keep in mind that setting a policy flag is asynchronous, so changes are
* not effective immediately after calling setPolicyFlag(). In addition, a
* policy request can be declined by the user. You should always set the
* policy flags required by your application at startup and check that the
* policy change request was successful after an appropriate interval.
*
* If the controller object is not connected to the Leap Motion software, then the default
* state for the selected policy is returned.
*
* \include Controller_isPolicySet.txt
*
* @param flags A PolicyFlag value indicating the policy to query.
* @returns A boolean indicating whether the specified policy has been set.
* @since 2.1.6
*/
public bool IsPolicySet(Controller.PolicyFlag policy)
{
UInt64 policyToCheck = (ulong)flagForPolicy(policy);
return (_activePolicies & policyToCheck) == policyToCheck;
}
/**
* Returns a timestamp value as close as possible to the current time.
* Values are in microseconds, as with all the other timestamp values.
*
* @since 2.2.7
*
*/
public long Now()
{
return LeapC.GetNow();
}
public uint GetConfigValue(string config_key)
{
uint requestId = 0;
if (_leapConnection != IntPtr.Zero) { //Test IsServiceConnected (when it works)
eLeapRS result = LeapC.RequestConfigValue (_leapConnection, config_key, out requestId);
reportAbnormalResults ("LeapC RequestConfigValue call was ", result);
_configRequests [requestId] = config_key;
}
return requestId;
}
public uint SetConfigValue<T>(string config_key, T value) where T : IConvertible
{
uint requestId = 0;
eLeapRS result;
Type dataType = value.GetType();
if (dataType == typeof(bool))
{
result = LeapC.SaveConfigValue(_leapConnection, config_key, Convert.ToBoolean(value), out requestId);
}
else if (dataType == typeof(Int32))
{
result = LeapC.SaveConfigValue(_leapConnection, config_key, Convert.ToInt32(value), out requestId);
}
else if (dataType == typeof(float))
{
result = LeapC.SaveConfigValue(_leapConnection, config_key, Convert.ToSingle(value), out requestId);
}
else if (dataType == typeof(string))
{
result = LeapC.SaveConfigValue(_leapConnection, config_key, Convert.ToString(value), out requestId);
}
else {
throw new ArgumentException("Only boolean, Int32, float, and string types are supported.");
}
reportAbnormalResults("LeapC SaveConfigValue call was ", result);
_configRequests [requestId] = config_key;
return requestId;
}
/**
* Reports whether your application has a connection to the Leap Motion
* daemon/service. Can be true even if the Leap Motion hardware is not available.
* @since 1.2
*/
public bool IsServiceConnected
{
get
{
if (_leapConnection == IntPtr.Zero)
return false;
LEAP_CONNECTION_INFO pInfo = new LEAP_CONNECTION_INFO();
pInfo.size = (uint)Marshal.SizeOf (pInfo);
eLeapRS result = LeapC.GetConnectionInfo(_leapConnection, out pInfo);
reportAbnormalResults("LeapC GetConnectionInfo call was ", result);
if (pInfo.status == eLeapConnectionStatus.eLeapConnectionStatus_Connected)
return true;
return false;
}
}
/**
* Reports whether this Controller is connected to the Leap Motion service and
* the Leap Motion hardware is plugged in.
*
* When you first create a Controller object, isConnected() returns false.
* After the controller finishes initializing and connects to the Leap Motion
* software and if the Leap Motion hardware is plugged in, isConnected() returns true.
*
* You can either handle the onConnect event using a Listener instance or
* poll the isConnected() function if you need to wait for your
* application to be connected to the Leap Motion software before performing some other
* operation.
*
* \include Controller_isConnected.txt
* @returns True, if connected; false otherwise.
* @since 1.0
*/
public bool IsConnected
{
get
{
return IsServiceConnected && Devices.Count > 0;
}
}
private TrackedQuad findTrackQuadForFrame(long frameId)
{
TrackedQuad quad = null;
for (int q = 0; q < _quads.Count; q++)
{
quad = _quads.Get(q);
if (quad.Id == frameId)
return quad;
if (quad.Id < frameId)
break;
}
return quad; //null
}
public TrackedQuad GetLatestQuad()
{
return _quads.Get(0);
}
/**
* The list of currently attached and recognized Leap Motion controller devices.
*
* The Device objects in the list describe information such as the range and
* tracking volume.
*
* \include Controller_devices.txt
*
* Currently, the Leap Motion Controller only allows a single active device at a time,
* however there may be multiple devices physically attached and listed here. Any active
* device(s) are guaranteed to be listed first, however order is not determined beyond that.
*
* @returns The list of Leap Motion controllers.
* @since 1.0
*/
public DeviceList Devices
{
get
{
if (_devices == null)
{
_devices = new DeviceList();
}
return _devices;
}
}
public FailedDeviceList FailedDevices
{
get
{
if (_failedDevices == null)
{
_failedDevices = new FailedDeviceList();
}
return _failedDevices;
}
}
private eLeapRS _lastResult; //Used to avoid repeating the same log message, ie. for events like time out
private void reportAbnormalResults(string context, eLeapRS result)
{
if (result != eLeapRS.eLeapRS_Success &&
result != _lastResult)
{
string msg = context + " " + result;
this.LeapLogEvent.Dispatch<LogEventArgs>(this,
new LogEventArgs(MessageSeverity.MESSAGE_CRITICAL,
LeapC.GetNow(),
msg)
);
}
_lastResult = result;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.IO.PortsTests;
using System.Threading;
using System.Threading.Tasks;
using Legacy.Support;
using Xunit;
namespace System.IO.Ports.Tests
{
public class WriteTimeout_Property : PortsTest
{
//The default number of chars to write with when testing timeout with Write(char[], int, int)
private static readonly int s_DEFAULT_WRITE_CHAR_ARRAY_SIZE = TCSupport.MinimumBlockingByteCount;
//The default number of bytes to write with when testing timeout with Write(byte[], int, int)
private static readonly int s_DEFAULT_WRITE_BYTE_ARRAY_SIZE = TCSupport.MinimumBlockingByteCount;
//The ammount of time to wait when expecting an infinite timeout
private const int DEFAULT_WAIT_INFINITE_TIMEOUT = 250;
//The maximum acceptable time allowed when a write method should timeout immediately
private const int MAX_ACCEPTABLE_ZERO_TIMEOUT = 100;
//The maximum acceptable time allowed when a write method should timeout immediately when it is called for the first time
private const int MAX_ACCEPTABLE_WARMUP_ZERO_TIMEOUT = 5000;
//The default string to write with when testing timeout with Write(str)
private static readonly string s_DEFAULT_STRING_TO_WRITE = new string('H', TCSupport.MinimumBlockingByteCount);
private const int NUM_TRYS = 5;
private delegate void WriteMethodDelegate(SerialPort com);
private enum ThrowAt { Set, Open };
#region Test Cases
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_Default_Write_byte_int_int()
{
Debug.WriteLine("Verifying default WriteTimeout with Write(byte[] buffer, int offset, int count)");
VerifyInfiniteTimeout(Write_byte_int_int, false);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_Default_Write_char_int_int()
{
Debug.WriteLine("Verifying default WriteTimeout with Write(char[] buffer, int offset, int count)");
VerifyInfiniteTimeout(Write_char_int_int, false);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_Default_Write_str()
{
Debug.WriteLine("Verifying default WriteTimeout with Write(string)");
VerifyInfiniteTimeout(Write_str, false);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_Default_WriteLine()
{
Debug.WriteLine("Verifying default WriteTimeout with WriteLine()");
VerifyInfiniteTimeout(WriteLine, false);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_Infinite_Write_byte_int_int()
{
Debug.WriteLine("Verifying infinite WriteTimeout with Write(byte[] buffer, int offset, int count)");
VerifyInfiniteTimeout(Write_byte_int_int, true);
}
[ActiveIssue(15961)]
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_Infinite_Write_char_int_int()
{
Debug.WriteLine("Verifying infinite WriteTimeout with Write(char[] buffer, int offset, int count)");
VerifyInfiniteTimeout(Write_char_int_int, true);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_Infinite_Write_str()
{
Debug.WriteLine("Verifying infinite WriteTimeout with Write(string)");
VerifyInfiniteTimeout(Write_str, true);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_Infinite_WriteLine()
{
Debug.WriteLine("Verifying infinite WriteTimeout with WriteLine()");
VerifyInfiniteTimeout(WriteLine, true);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_1_Write_byte_int_int_BeforeOpen()
{
Debug.WriteLine("Verifying setting WriteTimeout=1 before Open() with Write(byte[] buffer, int offset, int count)");
Verify1TimeoutBeforeOpen(Write_byte_int_int);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_1_Write_char_int_int_BeforeOpen()
{
Debug.WriteLine("Verifying setting WriteTimeout=1 before Open() with Write(char[] buffer, int offset, int count)");
Verify1TimeoutBeforeOpen(Write_char_int_int);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_1_Write_str_BeforeOpen()
{
Debug.WriteLine("Verifying 1 WriteTimeout before Open with Write(string)");
Verify1TimeoutBeforeOpen(Write_str);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_1_WriteLine_BeforeOpen()
{
Debug.WriteLine("Verifying 1 WriteTimeout before Open with WriteLine()");
Verify1TimeoutBeforeOpen(WriteLine);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_1_Write_byte_int_int_AfterOpen()
{
Debug.WriteLine("Verifying setting WriteTimeout=1 after Open() with Write(byte[] buffer, int offset, int count)");
Verify1TimeoutAfterOpen(Write_byte_int_int);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_1_Write_char_int_int_AfterOpen()
{
Debug.WriteLine("Verifying setting WriteTimeout=1 after Open() with Write(char[] buffer, int offset, int count)");
Verify1TimeoutAfterOpen(Write_char_int_int);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_1_Write_str_AfterOpen()
{
Debug.WriteLine("Verifying 1 WriteTimeout after Open with Write(string)");
Verify1TimeoutAfterOpen(Write_str);
}
[ConditionalFact(nameof(HasOneSerialPort), nameof(HasHardwareFlowControl))]
public void WriteTimeout_1_WriteLine_AfterOpen()
{
Debug.WriteLine("Verifying 1 WriteTimeout after Open with WriteLine()");
Verify1TimeoutAfterOpen(WriteLine);
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void WriteTimeout_Int32MinValue()
{
Debug.WriteLine("Verifying Int32.MinValue WriteTimeout");
VerifyException(int.MinValue, ThrowAt.Set, typeof(ArgumentOutOfRangeException));
}
[ConditionalFact(nameof(HasOneSerialPort))]
public void WriteTimeout_NEG2()
{
Debug.WriteLine("Verifying -2 WriteTimeout");
VerifyException(int.MinValue, ThrowAt.Set, typeof(ArgumentOutOfRangeException));
}
#endregion
#region Verification for Test Cases
private void VerifyInfiniteTimeout(WriteMethodDelegate writeMethod, bool setInfiniteTimeout)
{
using (SerialPort com1 = TCSupport.InitFirstSerialPort())
{
SerialPortProperties serPortProp = new SerialPortProperties();
serPortProp.SetAllPropertiesToOpenDefaults();
serPortProp.SetProperty("PortName", TCSupport.LocalMachineSerialInfo.FirstAvailablePortName);
com1.Handshake = Handshake.RequestToSend;
serPortProp.SetProperty("ReadTimeout", 10);
com1.ReadTimeout = 10;
com1.Open();
if (setInfiniteTimeout)
{
com1.WriteTimeout = 500;
com1.WriteTimeout = SerialPort.InfiniteTimeout;
}
Task task = Task.Run(() => writeMethod(com1));
Thread.Sleep(DEFAULT_WAIT_INFINITE_TIMEOUT);
Assert.False(task.IsCompleted, "Task should not have completed while tx is blocked by flow-control");
com1.Handshake = Handshake.None;
TCSupport.WaitForTaskCompletion(task);
com1.DiscardOutBuffer();
// If we're looped-back, then there will be data queued on the receive side which we need to discard
com1.DiscardInBuffer();
serPortProp.VerifyPropertiesAndPrint(com1);
}
}
private void Verify1TimeoutBeforeOpen(WriteMethodDelegate writeMethod)
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
com.WriteTimeout = 1;
com.Open();
Verify1Timeout(com, writeMethod);
}
}
private void Verify1TimeoutAfterOpen(WriteMethodDelegate writeMethod)
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
com.Open();
com.WriteTimeout = 1;
Verify1Timeout(com, writeMethod);
}
}
private void Verify1Timeout(SerialPort com, WriteMethodDelegate writeMethod)
{
SerialPortProperties serPortProp = new SerialPortProperties();
Stopwatch sw = new Stopwatch();
int actualTime = 0;
serPortProp.SetAllPropertiesToOpenDefaults();
serPortProp.SetProperty("PortName", TCSupport.LocalMachineSerialInfo.FirstAvailablePortName);
serPortProp.SetProperty("WriteTimeout", 1);
serPortProp.SetProperty("Handshake", Handshake.RequestToSend);
com.Handshake = Handshake.RequestToSend;
serPortProp.SetProperty("ReadTimeout", 1000);
com.ReadTimeout = 1000;
Thread.CurrentThread.Priority = ThreadPriority.Highest;
sw.Start();
writeMethod(com);
sw.Stop();
if (MAX_ACCEPTABLE_WARMUP_ZERO_TIMEOUT < sw.ElapsedMilliseconds)
{
Fail("Err_2570ajdlkj!!! Write Method {0} timed out in {1}ms expected something less then {2}ms", writeMethod.Method.Name, sw.ElapsedMilliseconds, MAX_ACCEPTABLE_WARMUP_ZERO_TIMEOUT);
}
sw.Reset();
for (int i = 0; i < NUM_TRYS; i++)
{
sw.Start();
writeMethod(com);
sw.Stop();
actualTime += (int)sw.ElapsedMilliseconds;
sw.Reset();
}
Thread.CurrentThread.Priority = ThreadPriority.Normal;
actualTime /= NUM_TRYS;
if (MAX_ACCEPTABLE_ZERO_TIMEOUT < actualTime)
{
Fail("ERROR!!! Write Method {0} timed out in {1}ms expected something less then {2}ms", writeMethod.Method.Name, actualTime, MAX_ACCEPTABLE_ZERO_TIMEOUT);
}
serPortProp.VerifyPropertiesAndPrint(com);
}
private void VerifyException(int writeTimeout, ThrowAt throwAt, Type expectedException)
{
using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName))
{
VerifyExceptionAtOpen(com, writeTimeout, throwAt, expectedException);
if (com.IsOpen)
com.Close();
VerifyExceptionAfterOpen(com, writeTimeout, expectedException);
}
}
private void VerifyExceptionAtOpen(SerialPort com, int writeTimeout, ThrowAt throwAt, Type expectedException)
{
int origWriteTimeout = com.WriteTimeout;
SerialPortProperties serPortProp = new SerialPortProperties();
serPortProp.SetAllPropertiesToDefaults();
serPortProp.SetProperty("PortName", TCSupport.LocalMachineSerialInfo.FirstAvailablePortName);
if (ThrowAt.Open == throwAt)
serPortProp.SetProperty("WriteTimeout", writeTimeout);
try
{
com.WriteTimeout = writeTimeout;
if (ThrowAt.Open == throwAt)
com.Open();
if (null != expectedException)
{
Fail("ERROR!!! Expected Open() to throw {0} and nothing was thrown", expectedException);
}
}
catch (Exception e)
{
if (null == expectedException)
{
Fail("ERROR!!! Expected Open() NOT to throw an exception and {0} was thrown", e.GetType());
}
else if (e.GetType() != expectedException)
{
Fail("ERROR!!! Expected Open() throw {0} and {1} was thrown", expectedException, e.GetType());
}
}
serPortProp.VerifyPropertiesAndPrint(com);
com.WriteTimeout = origWriteTimeout;
}
private void VerifyExceptionAfterOpen(SerialPort com, int writeTimeout, Type expectedException)
{
SerialPortProperties serPortProp = new SerialPortProperties();
com.Open();
serPortProp.SetAllPropertiesToOpenDefaults();
serPortProp.SetProperty("PortName", TCSupport.LocalMachineSerialInfo.FirstAvailablePortName);
try
{
com.WriteTimeout = writeTimeout;
if (null != expectedException)
{
Fail("ERROR!!! Expected setting the WriteTimeout after Open() to throw {0} and nothing was thrown", expectedException);
}
}
catch (Exception e)
{
if (null == expectedException)
{
Fail("ERROR!!! Expected setting the WriteTimeout after Open() NOT to throw an exception and {0} was thrown", e.GetType());
}
else if (e.GetType() != expectedException)
{
Fail("ERROR!!! Expected setting the WriteTimeout after Open() throw {0} and {1} was thrown", expectedException, e.GetType());
}
}
serPortProp.VerifyPropertiesAndPrint(com);
}
private void Write_byte_int_int(SerialPort com)
{
try
{
com.Write(new byte[s_DEFAULT_WRITE_BYTE_ARRAY_SIZE], 0, s_DEFAULT_WRITE_BYTE_ARRAY_SIZE);
}
catch (TimeoutException)
{
}
}
private void Write_char_int_int(SerialPort com)
{
try
{
com.Write(new char[s_DEFAULT_WRITE_CHAR_ARRAY_SIZE], 0, s_DEFAULT_WRITE_CHAR_ARRAY_SIZE);
}
catch (TimeoutException)
{
}
}
private void Write_str(SerialPort com)
{
try
{
com.Write(s_DEFAULT_STRING_TO_WRITE);
}
catch (TimeoutException)
{
}
}
private void WriteLine(SerialPort com)
{
try
{
com.WriteLine(s_DEFAULT_STRING_TO_WRITE);
}
catch (TimeoutException)
{
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void ShiftRightLogicalInt3232()
{
var test = new ImmUnaryOpTest__ShiftRightLogicalInt3232();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class ImmUnaryOpTest__ShiftRightLogicalInt3232
{
private struct TestStruct
{
public Vector128<Int32> _fld;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref testStruct._fld), ref Unsafe.As<Int32, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
return testStruct;
}
public void RunStructFldScenario(ImmUnaryOpTest__ShiftRightLogicalInt3232 testClass)
{
var result = Sse2.ShiftRightLogical(_fld, 32);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Int32>>() / sizeof(Int32);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Int32>>() / sizeof(Int32);
private static Int32[] _data = new Int32[Op1ElementCount];
private static Vector128<Int32> _clsVar;
private Vector128<Int32> _fld;
private SimpleUnaryOpTest__DataTable<Int32, Int32> _dataTable;
static ImmUnaryOpTest__ShiftRightLogicalInt3232()
{
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _clsVar), ref Unsafe.As<Int32, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
}
public ImmUnaryOpTest__ShiftRightLogicalInt3232()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetInt32(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int32>, byte>(ref _fld), ref Unsafe.As<Int32, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector128<Int32>>());
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetInt32(); }
_dataTable = new SimpleUnaryOpTest__DataTable<Int32, Int32>(_data, new Int32[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Sse2.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Sse2.ShiftRightLogical(
Unsafe.Read<Vector128<Int32>>(_dataTable.inArrayPtr),
32
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Sse2.ShiftRightLogical(
Sse2.LoadVector128((Int32*)(_dataTable.inArrayPtr)),
32
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Sse2.ShiftRightLogical(
Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArrayPtr)),
32
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Sse2).GetMethod(nameof(Sse2.ShiftRightLogical), new Type[] { typeof(Vector128<Int32>), typeof(byte) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Int32>>(_dataTable.inArrayPtr),
(byte)32
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int32>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Sse2).GetMethod(nameof(Sse2.ShiftRightLogical), new Type[] { typeof(Vector128<Int32>), typeof(byte) })
.Invoke(null, new object[] {
Sse2.LoadVector128((Int32*)(_dataTable.inArrayPtr)),
(byte)32
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int32>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Sse2).GetMethod(nameof(Sse2.ShiftRightLogical), new Type[] { typeof(Vector128<Int32>), typeof(byte) })
.Invoke(null, new object[] {
Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArrayPtr)),
(byte)32
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Int32>)(result));
ValidateResult(_dataTable.inArrayPtr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Sse2.ShiftRightLogical(
_clsVar,
32
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var firstOp = Unsafe.Read<Vector128<Int32>>(_dataTable.inArrayPtr);
var result = Sse2.ShiftRightLogical(firstOp, 32);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var firstOp = Sse2.LoadVector128((Int32*)(_dataTable.inArrayPtr));
var result = Sse2.ShiftRightLogical(firstOp, 32);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var firstOp = Sse2.LoadAlignedVector128((Int32*)(_dataTable.inArrayPtr));
var result = Sse2.ShiftRightLogical(firstOp, 32);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(firstOp, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new ImmUnaryOpTest__ShiftRightLogicalInt3232();
var result = Sse2.ShiftRightLogical(test._fld, 32);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Sse2.ShiftRightLogical(_fld, 32);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Sse2.ShiftRightLogical(test._fld, 32);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<Int32> firstOp, void* result, [CallerMemberName] string method = "")
{
Int32[] inArray = new Int32[Op1ElementCount];
Int32[] outArray = new Int32[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Int32, byte>(ref inArray[0]), firstOp);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Int32>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(void* firstOp, void* result, [CallerMemberName] string method = "")
{
Int32[] inArray = new Int32[Op1ElementCount];
Int32[] outArray = new Int32[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(firstOp), (uint)Unsafe.SizeOf<Vector128<Int32>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int32, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Int32>>());
ValidateResult(inArray, outArray, method);
}
private void ValidateResult(Int32[] firstOp, Int32[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (0 != result[0])
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (0 != result[i])
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Sse2)}.{nameof(Sse2.ShiftRightLogical)}<Int32>(Vector128<Int32><9>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
using System;
using Newtonsoft.Json;
namespace PayStack.Net
{
public class Charge
{
public class Authorization
{
[JsonProperty("authorization_code")]
public string AuthorizationCode { get; set; }
[JsonProperty("bin")]
public string Bin { get; set; }
[JsonProperty("last4")]
public string Last4 { get; set; }
[JsonProperty("exp_month")]
public string ExpMonth { get; set; }
[JsonProperty("exp_year")]
public string ExpYear { get; set; }
[JsonProperty("channel")]
public string Channel { get; set; }
[JsonProperty("card_type")]
public string CardType { get; set; }
[JsonProperty("bank")]
public string Bank { get; set; }
[JsonProperty("country_code")]
public string CountryCode { get; set; }
[JsonProperty("brand")]
public string Brand { get; set; }
[JsonProperty("reusable")]
public bool Reusable { get; set; }
[JsonProperty("signature")]
public string Signature { get; set; }
}
public class Customer
{
[JsonProperty("id")]
public int Id { get; set; }
[JsonProperty("first_name")]
public object FirstName { get; set; }
[JsonProperty("last_name")]
public object LastName { get; set; }
[JsonProperty("email")]
public string Email { get; set; }
[JsonProperty("customer_code")]
public string CustomerCode { get; set; }
[JsonProperty("phone")]
public object Phone { get; set; }
[JsonProperty("metadata")]
public object Metadata { get; set; }
[JsonProperty("risk_action")]
public string RiskAction { get; set; }
}
public class Data
{
[JsonProperty("amount")]
public int Amount { get; set; }
[JsonProperty("currency")]
public string Currency { get; set; }
[JsonProperty("transaction_date")]
public DateTime TransactionDate { get; set; }
[JsonProperty("status")]
public string Status { get; set; }
[JsonProperty("url")]
public string Url { get; set; }
[JsonProperty("reference")]
public string Reference { get; set; }
[JsonProperty("domain")]
public string Domain { get; set; }
[JsonProperty("metadata")]
public Metadata Metadata { get; set; }
[JsonProperty("gateway_response")]
public string GatewayResponse { get; set; }
[JsonProperty("message")]
public string Message { get; set; }
[JsonProperty("display_text")]
public string DisplayText { get; set; }
[JsonProperty("channel")]
public string Channel { get; set; }
[JsonProperty("ip_address")]
public string IpAddress { get; set; }
[JsonProperty("log")]
public object Log { get; set; }
[JsonProperty("fees")]
public int Fees { get; set; }
[JsonProperty("authorization")]
public Authorization Authorization { get; set; }
[JsonProperty("customer")]
public Customer Customer { get; set; }
[JsonProperty("plan")]
public dynamic Plan { get; set; }
}
}
public class Bank
{
[JsonProperty("code")]
public string Code { get; set; }
[JsonProperty("account_number")]
public string AccountNumber { get; set; }
}
public class ChargeRequest : RequestMetadataExtender
{
public string Reference { get; set; }
[JsonProperty("device_id")]
public string DeviceId { get; set; }
}
public class BankChargeRequest : ChargeRequest
{
[JsonProperty("email")]
public string Email { get; set; }
[JsonProperty("amount")]
public string Amount { get; set; }
[JsonProperty("bank")]
public Bank Bank { get; set; }
[JsonProperty("birthday")]
public string Birthday { get; set; }
}
public class Card
{
[JsonProperty("cvv")]
public string Cvv { get; set; }
[JsonProperty("number")]
public string Number { get; set; }
[JsonProperty("expiry_month")]
public string ExpiryMonth { get; set; }
[JsonProperty("expiry_year")]
public string ExpiryYear { get; set; }
}
public class CardChargeRequest : ChargeRequest
{
[JsonProperty("email")]
public string Email { get; set; }
[JsonProperty("amount")]
public string Amount { get; set; }
[JsonProperty("card")]
public Card Card { get; set; }
[JsonProperty("pin")]
public string Pin { get; set; }
}
public class AuthorizationCodeChargeRequest : ChargeRequest
{
[JsonProperty("email")]
public string Email { get; set; }
[JsonProperty("amount")]
public string Amount { get; set; }
[JsonProperty("authorization_code")]
public string AuthorizationCode { get; set; }
[JsonProperty("pin")]
public string Pin { get; set; }
}
public class ChargeResponse : HasRawResponse
{
[JsonProperty("status")]
public bool Status { get; set; }
[JsonProperty("message")]
public string Message { get; set; }
[JsonProperty("data")]
public Charge.Data Data { get; set; }
}
}
| |
using System;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading.Tasks;
using NUnit.Framework;
using static PeanutButter.RandomGenerators.RandomValueGen;
using NExpect;
using PeanutButter.SimpleHTTPServer;
using static NExpect.Expectations;
// ReSharper disable ExpressionIsAlwaysNull
// ReSharper disable RedundantArgumentDefaultValue
// ReSharper disable MemberCanBePrivate.Global
// ReSharper disable UnusedAutoPropertyAccessor.Global
namespace PeanutButter.Utils.Tests
{
[TestFixture]
public class TestStreamExtensions
{
[TestFixture]
public class ReadingAllBytes
{
[Test]
public void OperatingOnNullStream_ShouldReturnNull()
{
//---------------Set up test pack-------------------
Stream src = null;
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
var result = src.ReadAllBytes();
//---------------Test Result -----------------------
Expect(result).To.Be.Null();
}
[Test]
public void OperatingOnStreamWithNoData_ShouldReturnEmptyArray()
{
//---------------Set up test pack-------------------
using var memStream = new MemoryStream();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
var result = memStream.ReadAllBytes();
//---------------Test Result -----------------------
Expect(result).To.Be.Empty();
}
[Test]
public void OperatingOnStreamWithData_ShouldReturnAllData()
{
//---------------Set up test pack-------------------
var expected = GetRandomBytes();
using var memStream = new MemoryStream(expected);
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
var result = memStream.ReadAllBytes();
//---------------Test Result -----------------------
Expect(result).To.Equal(expected);
}
[Test]
public void OperatingOnStreamWithData_WhenStreamIsNotAtBeginningAndCanSeek_ShouldReturnAllData()
{
//---------------Set up test pack-------------------
var expected = GetRandomBytes(20, 50);
using var memStream = new MemoryStream(expected);
memStream.Seek(GetRandomInt(1, 10), SeekOrigin.Begin);
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
var result = memStream.ReadAllBytes();
//---------------Test Result -----------------------
Expect(result).To.Equal(expected);
}
[Test]
public void OperatingOnStreamWithData_WhenCannotRewind_ShouldReadRemainingBytes()
{
// Arrange
var data = GetRandomBytes(100, 1000);
var part1 = new byte[1];
using var server = new HttpServer();
server.ServeFile("/bin.dat", data);
// Act
var req = WebRequest.Create(server.GetFullUrlFor("/bin.dat"));
using var res = req.GetResponse();
using var stream = res.GetResponseStream();
var firstRead = stream.Read(part1, 0, 1);
Expect(firstRead).To.Equal(1);
var remainder = stream.ReadAllBytes();
Expect(remainder.Length).To.Equal(data.Length - 1);
Expect(remainder).To.Equal(
data.Skip(1).ToArray());
// Assert
}
[Test]
public void ShouldBeAbleToReadAllBytesFromWebResponseStream()
{
// Arrange
var expected = GetRandomBytes(1024);
using var server = new HttpServer();
var path = $"/{GetRandomString(2)}";
server.ServeFile(path, expected);
// Act
var req = WebRequest.Create(server.GetFullUrlFor(path));
using var res = req.GetResponse();
using var stream = res.GetResponseStream();
var result = stream.ReadAllBytes();
// Assert
Expect(result).To.Equal(expected);
}
}
[TestFixture]
public class SavingStreamsToFiles
{
[Test]
public void ShouldCreateTheFile()
{
// Arrange
using var tempFile = new AutoTempFile();
var data = GetRandomBytes(1024);
using var src = new MemoryStream(data);
// Act
src.Save(tempFile.Path);
// Assert
var persisted = File.ReadAllBytes(tempFile.Path);
Expect(persisted)
.To.Equal(data);
}
[Test]
public void ShouldOverwriteAnExistingFile()
{
// Arrange
using var tempFile = new AutoTempFile();
var initialData = GetRandomBytes(1024);
var data = GetRandomBytes(1024);
using var src = new MemoryStream(data);
using var initialSource = new MemoryStream(initialData);
// Act
initialSource.Save(tempFile.Path);
src.Save(tempFile.Path);
// Assert
var persisted = File.ReadAllBytes(tempFile.Path);
Expect(persisted)
.To.Equal(data);
}
[Test]
public void ShouldCreateAnyRequiredContainingFolders()
{
// Arrange
using var tempFolder = new AutoTempFolder();
var target = Path.Combine(tempFolder.Path, "level1", "level2", "output.bin");
var data = GetRandomBytes();
using var src = new MemoryStream(data);
// Act
src.Save(target);
// Assert
var persisted = File.ReadAllBytes(target);
Expect(persisted)
.To.Equal(data);
}
}
[TestFixture]
public class SavingStreamsToFilesAsync
{
[Test]
public async Task ShouldCreateTheFile()
{
// Arrange
using var tempFile = new AutoTempFile();
var data = GetRandomBytes(1024);
using var src = new MemoryStream(data);
// Act
await src.SaveAsync(tempFile.Path);
// Assert
var persisted = File.ReadAllBytes(tempFile.Path);
Expect(persisted)
.To.Equal(data);
}
[Test]
public async Task ShouldOverwriteAnExistingFile()
{
// Arrange
using var tempFile = new AutoTempFile();
var initialData = GetRandomBytes(1024);
var data = GetRandomBytes(1024);
using var src = new MemoryStream(data);
using var initialSource = new MemoryStream(initialData);
// Act
await initialSource.SaveAsync(tempFile.Path);
await src.SaveAsync(tempFile.Path);
// Assert
var persisted = File.ReadAllBytes(tempFile.Path);
Expect(persisted)
.To.Equal(data);
}
[Test]
public async Task ShouldCreateAnyRequiredContainingFolders()
{
// Arrange
using var tempFolder = new AutoTempFolder();
var target = Path.Combine(tempFolder.Path, "level1", "level2", "output.bin");
var data = GetRandomBytes();
using var src = new MemoryStream(data);
// Act
await src.SaveAsync(target);
// Assert
var persisted = File.ReadAllBytes(target);
Expect(persisted)
.To.Equal(data);
}
}
[TestFixture]
public class ReadingAllBytesAsync
{
[Test]
public async Task OperatingOnNullStream_ShouldReturnNull()
{
//---------------Set up test pack-------------------
Stream src = null;
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
var result = await src.ReadAllBytesAsync();
//---------------Test Result -----------------------
Expect(result).To.Be.Null();
}
[Test]
public async Task OperatingOnStreamWithNoData_ShouldReturnEmptyArray()
{
//---------------Set up test pack-------------------
using var memStream = new MemoryStream();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
var result = await memStream.ReadAllBytesAsync();
//---------------Test Result -----------------------
Expect(result).To.Be.Empty();
}
[Test]
public async Task OperatingOnStreamWithData_ShouldReturnAllData()
{
//---------------Set up test pack-------------------
var expected = GetRandomBytes();
using var memStream = new MemoryStream(expected);
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
var result = await memStream.ReadAllBytesAsync();
//---------------Test Result -----------------------
Expect(result).To.Equal(expected);
}
[Test]
public async Task OperatingOnStreamWithData_WhenStreamIsNotAtBeginningAndCanSeek_ShouldReturnAllData()
{
//---------------Set up test pack-------------------
var expected = GetRandomBytes(20, 50);
using var memStream = new MemoryStream(expected);
memStream.Seek(GetRandomInt(1, 10), SeekOrigin.Begin);
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
var result = await memStream.ReadAllBytesAsync();
//---------------Test Result -----------------------
Expect(result).To.Equal(expected);
}
[Test]
public async Task OperatingOnStreamWithData_WhenCannotRewind_ShouldReadRemainingBytes()
{
// Arrange
var data = GetRandomBytes(100, 1000);
var part1 = new byte[1];
using var server = new HttpServer();
server.ServeFile("/bin.dat", data);
// Act
var req = WebRequest.Create(server.GetFullUrlFor("/bin.dat"));
using var res = await req.GetResponseAsync();
using var stream = res.GetResponseStream();
var firstRead = await stream.ReadAsync(part1, 0, 1);
Expect(firstRead).To.Equal(1);
var remainder = await stream.ReadAllBytesAsync();
Expect(remainder.Length).To.Equal(data.Length - 1);
Expect(remainder).To.Equal(
data.Skip(1).ToArray());
// Assert
}
[Test]
public async Task ShouldBeAbleToReadAllBytesAsyncFromWebResponseStream()
{
// Arrange
var expected = GetRandomBytes(1024);
using var server = new HttpServer();
var path = $"/{GetRandomString(2)}";
server.ServeFile(path, expected);
// Act
var req = WebRequest.Create(server.GetFullUrlFor(path));
using var res = await req.GetResponseAsync();
using var stream = res.GetResponseStream();
var result = await stream.ReadAllBytesAsync();
// Assert
Expect(result).To.Equal(expected);
}
}
[TestFixture]
public class WritingBytes
{
[Test]
public void OperatingOnNullStream_ShouldThrowIOException()
{
//---------------Set up test pack-------------------
Stream dst = null;
var expected = GetRandomBytes();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
Expect(
() => dst.WriteAllBytes(expected)
)
.To.Throw<IOException>();
//---------------Test Result -----------------------
}
[Test]
public void OperatingOnNonNullStream_ShouldWriteAllBytes()
{
//---------------Set up test pack-------------------
using var memStream = new MemoryStream();
var expected = GetRandomBytes();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
memStream.WriteAllBytes(expected);
//---------------Test Result -----------------------
memStream.Seek(0, SeekOrigin.Begin);
var result = memStream.ReadAllBytes();
Expect(result).To.Equal(expected);
}
[Test]
public void OperatingOnNonNullStream_GivenNulldata_ShouldNotThrow()
{
//---------------Set up test pack-------------------
using var memStream = new MemoryStream();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
memStream.WriteAllBytes(null);
//---------------Test Result -----------------------
memStream.Seek(0, SeekOrigin.Begin);
var result = memStream.ReadAllBytes();
Expect(result).To.Be.Empty();
}
[Test]
public void OperatingOnStreamWithExistingData_ShouldOverwrite()
{
//---------------Set up test pack-------------------
var initial = GetRandomBytes();
using var folder = new AutoTempFolder();
var file = CreateRandomFileIn(folder.Path);
using var fileStream = File.Open(
Path.Combine(folder.Path, file),
FileMode.Open,
FileAccess.ReadWrite);
fileStream.Write(initial, 0, initial.Length);
var expected = GetRandomBytes();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
fileStream.WriteAllBytes(expected);
//---------------Test Result -----------------------
fileStream.Rewind();
var result = fileStream.ReadAllBytes();
Expect(result).To.Equal(expected);
}
[Test]
public void OperatingOnNullStream_ShouldThrow()
{
// Arrange
// Pre-assert
// Act
Expect(
() => (null as MemoryStream).WriteAllBytes(GetRandomBytes())
).To.Throw<IOException>()
.With.Message.Containing("stream is null");
// Assert
}
[Test]
public void GivenNull_ShouldNotWriteOrThrow()
{
// Arrange
var buffer = GetRandomBytes(128);
var copy = buffer.DeepClone();
var stream = new MemoryStream(buffer);
// Pre-assert
// Act
Expect(() => stream.WriteAllBytes(null))
.Not.To.Throw();
// Assert
Expect(buffer).To.Equal(copy);
}
[Test]
public void GivenEmptyData_ShouldNotWriteOrThrow()
{
// Arrange
var buffer = GetRandomBytes(128);
var copy = buffer.DeepClone();
var stream = new MemoryStream(buffer);
// Pre-assert
// Act
Expect(() => stream.WriteAllBytes(new byte[0]))
.Not.To.Throw();
// Assert
Expect(buffer).To.Equal(copy);
}
}
[TestFixture]
public class WritingBytesAsync
{
[Test]
public void OperatingOnNullStream_ShouldThrowIOException()
{
//---------------Set up test pack-------------------
Stream dst = null;
var expected = GetRandomBytes(128);
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
Expect(
async () => await dst.WriteAllBytesAsync(expected)
)
.To.Throw<IOException>();
//---------------Test Result -----------------------
}
[Test]
public async Task OperatingOnNonNullStream_ShouldWriteAllBytesAsync()
{
//---------------Set up test pack-------------------
using var memStream = new MemoryStream();
var expected = GetRandomBytes(128);
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
await memStream.WriteAllBytesAsync(expected);
//---------------Test Result -----------------------
memStream.Seek(0, SeekOrigin.Begin);
var result = await memStream.ReadAllBytesAsync();
Expect(result).To.Equal(expected);
}
[Test]
public async Task OperatingOnNonNullStream_GivenNulldata_ShouldNotThrow()
{
//---------------Set up test pack-------------------
using var memStream = new MemoryStream();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
await memStream.WriteAllBytesAsync(null);
//---------------Test Result -----------------------
memStream.Seek(0, SeekOrigin.Begin);
var result = await memStream.ReadAllBytesAsync();
Expect(result).To.Be.Empty();
}
[Test]
public async Task OperatingOnStreamWithExistingData_ShouldOverwrite()
{
//---------------Set up test pack-------------------
var initial = GetRandomBytes();
using var folder = new AutoTempFolder();
var file = CreateRandomFileIn(folder.Path);
using var fileStream = File.Open(
Path.Combine(folder.Path, file),
FileMode.Open,
FileAccess.ReadWrite);
fileStream.Write(initial, 0, initial.Length);
var expected = GetRandomBytes(128);
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
await fileStream.WriteAllBytesAsync(expected);
//---------------Test Result -----------------------
fileStream.Rewind();
var result = await fileStream.ReadAllBytesAsync();
Expect(result).To.Equal(expected);
}
[Test]
public void OperatingOnNullStream_ShouldThrow()
{
// Arrange
// Pre-assert
// Act
Expect(
async () => await (null as MemoryStream).WriteAllBytesAsync(GetRandomBytes())
).To.Throw<IOException>()
.With.Message.Containing("stream is null");
// Assert
}
[Test]
public void GivenNull_ShouldNotWriteOrThrow()
{
// Arrange
var buffer = GetRandomBytes(128);
var copy = buffer.DeepClone();
var stream = new MemoryStream(buffer);
// Pre-assert
// Act
Expect(async () => await stream.WriteAllBytesAsync(null))
.Not.To.Throw();
// Assert
Expect(buffer).To.Equal(copy);
}
[Test]
public void GivenEmptyData_ShouldNotWriteOrThrow()
{
// Arrange
var buffer = GetRandomBytes(128);
var copy = buffer.DeepClone();
var stream = new MemoryStream(buffer);
// Pre-assert
// Act
Expect(async () => await stream.WriteAllBytesAsync(new byte[0]))
.Not.To.Throw();
// Assert
Expect(buffer).To.Equal(copy);
}
}
[TestFixture]
public class AppendingData
{
[Test]
public void OperatingOnNonNullStream_ShouldAppendData()
{
//---------------Set up test pack-------------------
using var folder = new AutoTempFolder();
var fileName = CreateRandomFileIn(folder.Path);
var initial = File.ReadAllBytes(Path.Combine(folder.Path, fileName));
using var fileStream =
File.Open(Path.Combine(folder.Path, fileName), FileMode.Open, FileAccess.ReadWrite);
var toAdd = GetRandomBytes(1, 1);
var expected = initial.Concat(toAdd).ToArray();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
fileStream.Append(toAdd);
//---------------Test Result -----------------------
var result = fileStream.ReadAllBytes();
Expect(result).To.Equal(expected);
}
[Test]
public void OperatingOnNullStream_ShouldThrowIOException()
{
//---------------Set up test pack-------------------
Stream dst = null;
var toAppend = GetRandomBytes();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
Expect(() => dst.Append(toAppend)).To.Throw<IOException>();
//---------------Test Result -----------------------
}
}
[TestFixture]
public class AppendingDataAsync
{
[Test]
public async Task OperatingOnNonNullStream_ShouldAppendData()
{
//---------------Set up test pack-------------------
using var folder = new AutoTempFolder();
var fileName = CreateRandomFileIn(folder.Path);
var initial = File.ReadAllBytes(Path.Combine(folder.Path, fileName));
using var fileStream =
File.Open(Path.Combine(folder.Path, fileName), FileMode.Open, FileAccess.ReadWrite);
var toAdd = GetRandomBytes(1, 1);
var expected = initial.Concat(toAdd).ToArray();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
await fileStream.AppendAsync(toAdd);
//---------------Test Result -----------------------
var result = await fileStream.ReadAllBytesAsync();
Expect(result).To.Equal(expected);
}
[Test]
public void OperatingOnNullStream_ShouldThrowIOException()
{
//---------------Set up test pack-------------------
Stream dst = null;
var toAppend = GetRandomBytes();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
Expect(
async () => await dst.AppendAsync(toAppend)
).To.Throw<IOException>();
//---------------Test Result -----------------------
}
}
[Test]
public void AsString_GivenStreamWithStringAndNullPadding_ShouldReturnString()
{
//---------------Set up test pack-------------------
var expected = GetRandomString();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
using var memStream = new MemoryStream(new byte[1024], true);
//---------------Test Result -----------------------
memStream.WriteAllBytes(Encoding.UTF8.GetBytes(expected));
memStream.Rewind();
var result = memStream.AsString();
Expect(result).To.Equal(expected);
}
[Test]
public async Task AsStringAsync_GivenStreamWithStringAndNullPadding_ShouldReturnString()
{
//---------------Set up test pack-------------------
var expected = GetRandomString();
//---------------Assert Precondition----------------
//---------------Execute Test ----------------------
using var memStream = new MemoryStream(new byte[1024], true);
//---------------Test Result -----------------------
await memStream.WriteAllBytesAsync(Encoding.UTF8.GetBytes(expected));
memStream.Rewind();
var result = await memStream.AsStringAsync();
Expect(result).To.Equal(expected);
}
[TestFixture]
public class WritingStrings
{
[Test]
public void OperatingOnStream_GivenDataAndNoEncoding_ShouldWriteToStream_WithUtf8Encoding()
{
//--------------- Arrange -------------------
var buffer = new byte[128];
var toWrite = GetRandomString(5, 10);
var expected = toWrite.AsBytes();
using var stream = new MemoryStream(buffer);
//--------------- Assume ----------------
//--------------- Act ----------------------
stream.WriteString(toWrite, Encoding.UTF8);
//--------------- Assert -----------------------
var copy = new byte[toWrite.Length];
Buffer.BlockCopy(buffer, 0, copy, 0, toWrite.Length);
Expect(copy).To.Equal(expected);
}
[Test]
public void GivenNoEncodig_ShouldDefaultTo_UTF8()
{
// Arrange
var buffer = new byte[128];
var toWrite = GetRandomString(10);
var expected = toWrite.AsBytes(Encoding.UTF8);
// Pre-assert
// Act
using (var stream = new MemoryStream(buffer))
{
stream.WriteString(toWrite);
}
// Assert
var copy = new byte[toWrite.Length];
Buffer.BlockCopy(buffer, 0, copy, 0, toWrite.Length);
Expect(copy).To.Equal(expected);
}
[Test]
public void OperatingOnStream_GivenNullData_ShouldWriteNothing()
{
//--------------- Arrange -------------------
var size = 128;
var buffer = new byte[size];
var expectedValue = (byte) GetRandomInt(2, 10);
for (var i = 0; i < buffer.Length; i++)
buffer[i] = expectedValue;
using var stream = new MemoryStream(buffer);
//--------------- Assume ----------------
//--------------- Act ----------------------
stream.WriteString(null, Encoding.UTF8);
//--------------- Assert -----------------------
Expect(buffer).To.Contain.Only(size).Equal.To(expectedValue);
}
public static Encoding[] Encodings { get; } =
{
Encoding.UTF8,
Encoding.ASCII,
Encoding.UTF7
};
[TestCaseSource(nameof(Encodings))]
public void OperatingOnStream_GivenDataAndEncoding_ShouldWriteToStream(Encoding encoding)
{
//--------------- Arrange -------------------
var buffer = new byte[128];
var toWrite = GetRandomString(5, 10);
var expected = toWrite.AsBytes(encoding);
using var stream = new MemoryStream(buffer);
//--------------- Assume ----------------
//--------------- Act ----------------------
stream.WriteString(toWrite, encoding);
//--------------- Assert -----------------------
var copy = new byte[toWrite.Length];
Buffer.BlockCopy(buffer, 0, copy, 0, toWrite.Length);
Expect(copy).To.Equal(expected);
}
}
[TestFixture]
public class WritingStringsAsync
{
[Test]
public async Task OperatingOnStream_GivenDataAndNoEncoding_ShouldWriteToStream_WithUtf8Encoding()
{
//--------------- Arrange -------------------
var buffer = new byte[128];
var toWrite = GetRandomString(5, 10);
var expected = toWrite.AsBytes();
using var stream = new MemoryStream(buffer);
//--------------- Assume ----------------
//--------------- Act ----------------------
await stream.WriteStringAsync(toWrite, Encoding.UTF8);
//--------------- Assert -----------------------
var copy = new byte[toWrite.Length];
Buffer.BlockCopy(buffer, 0, copy, 0, toWrite.Length);
Expect(copy).To.Equal(expected);
}
[Test]
public async Task GivenNoEncodig_ShouldDefaultTo_UTF8()
{
// Arrange
var buffer = new byte[128];
var toWrite = GetRandomString(10);
var expected = toWrite.AsBytes(Encoding.UTF8);
// Pre-assert
// Act
using (var stream = new MemoryStream(buffer))
{
await stream.WriteStringAsync(toWrite);
}
// Assert
var copy = new byte[toWrite.Length];
Buffer.BlockCopy(buffer, 0, copy, 0, toWrite.Length);
Expect(copy).To.Equal(expected);
}
[Test]
public async Task OperatingOnStream_GivenNullData_ShouldWriteNothing()
{
//--------------- Arrange -------------------
var size = 128;
var buffer = new byte[size];
var expectedValue = (byte) GetRandomInt(2, 10);
for (var i = 0; i < buffer.Length; i++)
buffer[i] = expectedValue;
using var stream = new MemoryStream(buffer);
//--------------- Assume ----------------
//--------------- Act ----------------------
await stream.WriteStringAsync(null, Encoding.UTF8);
//--------------- Assert -----------------------
Expect(buffer).To.Contain.Only(size).Equal.To(expectedValue);
}
public static Encoding[] Encodings { get; } =
{
Encoding.UTF8,
Encoding.ASCII,
Encoding.UTF7
};
[TestCaseSource(nameof(Encodings))]
public async Task OperatingOnStream_GivenDataAndEncoding_ShouldWriteToStream(Encoding encoding)
{
//--------------- Arrange -------------------
var buffer = new byte[128];
var toWrite = GetRandomString(5, 10);
var expected = toWrite.AsBytes(encoding);
using var stream = new MemoryStream(buffer);
//--------------- Assume ----------------
//--------------- Act ----------------------
await stream.WriteStringAsync(toWrite, encoding);
//--------------- Assert -----------------------
var copy = new byte[toWrite.Length];
Buffer.BlockCopy(buffer, 0, copy, 0, toWrite.Length);
Expect(copy).To.Equal(expected);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
namespace Rhino.Queues.Utils
{
public class MonoHttpEncoder
{
static char[] hexChars = "0123456789abcdef".ToCharArray();
static object entitiesLock = new object();
static SortedDictionary<string, char> entities;
#if NET_4_0
static Lazy <MonoHttpEncoder> defaultEncoder;
static Lazy <MonoHttpEncoder> currentEncoderLazy;
#else
static MonoHttpEncoder defaultEncoder;
#endif
static MonoHttpEncoder currentEncoder;
static IDictionary<string, char> Entities
{
get
{
lock (entitiesLock)
{
if (entities == null)
InitEntities();
return entities;
}
}
}
public static MonoHttpEncoder Current
{
get
{
#if NET_4_0
if (currentEncoder == null)
currentEncoder = currentEncoderLazy.Value;
#endif
return currentEncoder;
}
#if NET_4_0
set {
if (value == null)
throw new ArgumentNullException ("value");
currentEncoder = value;
}
#endif
}
public static MonoHttpEncoder Default
{
get
{
#if NET_4_0
return defaultEncoder.Value;
#else
return defaultEncoder;
#endif
}
}
static MonoHttpEncoder()
{
#if NET_4_0
defaultEncoder = new Lazy <MonoHttpEncoder> (() => new MonoHttpEncoder ());
currentEncoderLazy = new Lazy <MonoHttpEncoder> (new Func <MonoHttpEncoder> (GetCustomEncoderFromConfig));
#else
defaultEncoder = new MonoHttpEncoder();
currentEncoder = defaultEncoder;
#endif
}
public MonoHttpEncoder()
{
}
#if NET_4_0
protected internal virtual
#else
internal static
#endif
void HeaderNameValueEncode(string headerName, string headerValue, out string encodedHeaderName, out string encodedHeaderValue)
{
if (String.IsNullOrEmpty(headerName))
encodedHeaderName = headerName;
else
encodedHeaderName = EncodeHeaderString(headerName);
if (String.IsNullOrEmpty(headerValue))
encodedHeaderValue = headerValue;
else
encodedHeaderValue = EncodeHeaderString(headerValue);
}
static void StringBuilderAppend(string s, ref StringBuilder sb)
{
if (sb == null)
sb = new StringBuilder(s);
else
sb.Append(s);
}
static string EncodeHeaderString(string input)
{
StringBuilder sb = null;
char ch;
for (int i = 0; i < input.Length; i++)
{
ch = input[i];
if ((ch < 32 && ch != 9) || ch == 127)
StringBuilderAppend(String.Format("%{0:x2}", (int)ch), ref sb);
}
if (sb != null)
return sb.ToString();
return input;
}
#if NET_4_0
protected internal virtual void HtmlAttributeEncode (string value, TextWriter output)
{
if (output == null)
throw new ArgumentNullException ("output");
if (String.IsNullOrEmpty (value))
return;
output.Write (HtmlAttributeEncode (value));
}
protected internal virtual void HtmlDecode (string value, TextWriter output)
{
if (output == null)
throw new ArgumentNullException ("output");
output.Write (HtmlDecode (value));
}
protected internal virtual void HtmlEncode (string value, TextWriter output)
{
if (output == null)
throw new ArgumentNullException ("output");
output.Write (HtmlEncode (value));
}
protected internal virtual byte[] UrlEncode (byte[] bytes, int offset, int count)
{
return UrlEncodeToBytes (bytes, offset, count);
}
static MonoHttpEncoder GetCustomEncoderFromConfig ()
{
var cfg = WebConfigurationManager.GetSection ("system.web/httpRuntime") as HttpRuntimeSection;
string typeName = cfg.EncoderType;
if (String.Compare (typeName, "Rhino.Queues.Utils.MonoHttpEncoder", StringComparison.OrdinalIgnoreCase) == 0)
return Default;
Type t = Type.GetType (typeName, false);
if (t == null)
throw new ConfigurationErrorsException (String.Format ("Could not load type '{0}'.", typeName));
if (!typeof (MonoHttpEncoder).IsAssignableFrom (t))
throw new ConfigurationErrorsException (
String.Format ("'{0}' is not allowed here because it does not extend class 'System.Web.Util.MonoHttpEncoder'.", typeName)
);
return Activator.CreateInstance (t, false) as MonoHttpEncoder;
}
#endif
#if NET_4_0
protected internal virtual
#else
internal static
#endif
string UrlPathEncode(string value)
{
if (String.IsNullOrEmpty(value))
return value;
MemoryStream result = new MemoryStream();
int length = value.Length;
for (int i = 0; i < length; i++)
UrlPathEncodeChar(value[i], result);
return Encoding.ASCII.GetString(result.ToArray());
}
internal static byte[] UrlEncodeToBytes(byte[] bytes, int offset, int count)
{
if (bytes == null)
throw new ArgumentNullException("bytes");
int blen = bytes.Length;
if (blen == 0)
return new byte[0];
if (offset < 0 || offset >= blen)
throw new ArgumentOutOfRangeException("offset");
if (count < 0 || count > blen - offset)
throw new ArgumentOutOfRangeException("count");
MemoryStream result = new MemoryStream(count);
int end = offset + count;
for (int i = offset; i < end; i++)
UrlEncodeChar((char)bytes[i], result, false);
return result.ToArray();
}
internal static string HtmlEncode(string s)
{
if (s == null)
return null;
if (s.Length == 0)
return String.Empty;
bool needEncode = false;
for (int i = 0; i < s.Length; i++)
{
char c = s[i];
if (c == '&' || c == '"' || c == '<' || c == '>' || c > 159
#if NET_4_0
|| c == '\''
#endif
)
{
needEncode = true;
break;
}
}
if (!needEncode)
return s;
StringBuilder output = new StringBuilder();
char ch;
int len = s.Length;
for (int i = 0; i < len; i++)
{
switch (s[i])
{
case '&':
output.Append("&");
break;
case '>':
output.Append(">");
break;
case '<':
output.Append("<");
break;
case '"':
output.Append(""");
break;
#if NET_4_0
case '\'':
output.Append ("'");
break;
#endif
case '\uff1c':
output.Append("<");
break;
case '\uff1e':
output.Append(">");
break;
default:
ch = s[i];
if (ch > 159 && ch < 256)
{
output.Append("&#");
output.Append(((int)ch).ToString(CultureInfo.InvariantCulture));
output.Append(";");
}
else
output.Append(ch);
break;
}
}
return output.ToString();
}
internal static string HtmlAttributeEncode(string s)
{
#if NET_4_0
if (String.IsNullOrEmpty (s))
return String.Empty;
#else
if (s == null)
return null;
if (s.Length == 0)
return String.Empty;
#endif
bool needEncode = false;
for (int i = 0; i < s.Length; i++)
{
char c = s[i];
if (c == '&' || c == '"' || c == '<'
#if NET_4_0
|| c == '\''
#endif
)
{
needEncode = true;
break;
}
}
if (!needEncode)
return s;
StringBuilder output = new StringBuilder();
int len = s.Length;
for (int i = 0; i < len; i++)
switch (s[i])
{
case '&':
output.Append("&");
break;
case '"':
output.Append(""");
break;
case '<':
output.Append("<");
break;
#if NET_4_0
case '\'':
output.Append ("'");
break;
#endif
default:
output.Append(s[i]);
break;
}
return output.ToString();
}
internal static string HtmlDecode(string s)
{
if (s == null)
return null;
if (s.Length == 0)
return String.Empty;
if (s.IndexOf('&') == -1)
return s;
#if NET_4_0
StringBuilder rawEntity = new StringBuilder ();
#endif
StringBuilder entity = new StringBuilder();
StringBuilder output = new StringBuilder();
int len = s.Length;
// 0 -> nothing,
// 1 -> right after '&'
// 2 -> between '&' and ';' but no '#'
// 3 -> '#' found after '&' and getting numbers
int state = 0;
int number = 0;
bool is_hex_value = false;
bool have_trailing_digits = false;
for (int i = 0; i < len; i++)
{
char c = s[i];
if (state == 0)
{
if (c == '&')
{
entity.Append(c);
#if NET_4_0
rawEntity.Append (c);
#endif
state = 1;
}
else
{
output.Append(c);
}
continue;
}
if (c == '&')
{
state = 1;
if (have_trailing_digits)
{
entity.Append(number.ToString(CultureInfo.InvariantCulture));
have_trailing_digits = false;
}
output.Append(entity.ToString());
entity.Length = 0;
entity.Append('&');
continue;
}
if (state == 1)
{
if (c == ';')
{
state = 0;
output.Append(entity.ToString());
output.Append(c);
entity.Length = 0;
}
else
{
number = 0;
is_hex_value = false;
if (c != '#')
{
state = 2;
}
else
{
state = 3;
}
entity.Append(c);
#if NET_4_0
rawEntity.Append (c);
#endif
}
}
else if (state == 2)
{
entity.Append(c);
if (c == ';')
{
string key = entity.ToString();
if (key.Length > 1 && Entities.ContainsKey(key.Substring(1, key.Length - 2)))
key = Entities[key.Substring(1, key.Length - 2)].ToString();
output.Append(key);
state = 0;
entity.Length = 0;
#if NET_4_0
rawEntity.Length = 0;
#endif
}
}
else if (state == 3)
{
if (c == ';')
{
#if NET_4_0
if (number == 0)
output.Append (rawEntity.ToString () + ";");
else
#endif
if (number > 65535)
{
output.Append("&#");
output.Append(number.ToString(CultureInfo.InvariantCulture));
output.Append(";");
}
else
{
output.Append((char)number);
}
state = 0;
entity.Length = 0;
#if NET_4_0
rawEntity.Length = 0;
#endif
have_trailing_digits = false;
}
else if (is_hex_value && Uri.IsHexDigit(c))
{
number = number * 16 + Uri.FromHex(c);
have_trailing_digits = true;
#if NET_4_0
rawEntity.Append (c);
#endif
}
else if (Char.IsDigit(c))
{
number = number * 10 + ((int)c - '0');
have_trailing_digits = true;
#if NET_4_0
rawEntity.Append (c);
#endif
}
else if (number == 0 && (c == 'x' || c == 'X'))
{
is_hex_value = true;
#if NET_4_0
rawEntity.Append (c);
#endif
}
else
{
state = 2;
if (have_trailing_digits)
{
entity.Append(number.ToString(CultureInfo.InvariantCulture));
have_trailing_digits = false;
}
entity.Append(c);
}
}
}
if (entity.Length > 0)
{
output.Append(entity.ToString());
}
else if (have_trailing_digits)
{
output.Append(number.ToString(CultureInfo.InvariantCulture));
}
return output.ToString();
}
internal static bool NotEncoded(char c)
{
return (c == '!' || c == '(' || c == ')' || c == '*' || c == '-' || c == '.' || c == '_'
#if !NET_4_0
|| c == '\''
#endif
);
}
internal static void UrlEncodeChar(char c, Stream result, bool isUnicode)
{
if (c > 255)
{
//FIXME: what happens when there is an internal error?
//if (!isUnicode)
// throw new ArgumentOutOfRangeException ("c", c, "c must be less than 256");
int idx;
int i = (int)c;
result.WriteByte((byte)'%');
result.WriteByte((byte)'u');
idx = i >> 12;
result.WriteByte((byte)hexChars[idx]);
idx = (i >> 8) & 0x0F;
result.WriteByte((byte)hexChars[idx]);
idx = (i >> 4) & 0x0F;
result.WriteByte((byte)hexChars[idx]);
idx = i & 0x0F;
result.WriteByte((byte)hexChars[idx]);
return;
}
if (c > ' ' && NotEncoded(c))
{
result.WriteByte((byte)c);
return;
}
if (c == ' ')
{
result.WriteByte((byte)'+');
return;
}
if ((c < '0') ||
(c < 'A' && c > '9') ||
(c > 'Z' && c < 'a') ||
(c > 'z'))
{
if (isUnicode && c > 127)
{
result.WriteByte((byte)'%');
result.WriteByte((byte)'u');
result.WriteByte((byte)'0');
result.WriteByte((byte)'0');
}
else
result.WriteByte((byte)'%');
int idx = ((int)c) >> 4;
result.WriteByte((byte)hexChars[idx]);
idx = ((int)c) & 0x0F;
result.WriteByte((byte)hexChars[idx]);
}
else
result.WriteByte((byte)c);
}
internal static void UrlPathEncodeChar(char c, Stream result)
{
if (c < 33 || c > 126)
{
byte[] bIn = Encoding.UTF8.GetBytes(c.ToString());
for (int i = 0; i < bIn.Length; i++)
{
result.WriteByte((byte)'%');
int idx = ((int)bIn[i]) >> 4;
result.WriteByte((byte)hexChars[idx]);
idx = ((int)bIn[i]) & 0x0F;
result.WriteByte((byte)hexChars[idx]);
}
}
else if (c == ' ')
{
result.WriteByte((byte)'%');
result.WriteByte((byte)'2');
result.WriteByte((byte)'0');
}
else
result.WriteByte((byte)c);
}
static void InitEntities()
{
// Build the hash table of HTML entity references. This list comes
// from the HTML 4.01 W3C recommendation.
entities = new SortedDictionary<string, char>(StringComparer.Ordinal);
entities.Add("nbsp", '\u00A0');
entities.Add("iexcl", '\u00A1');
entities.Add("cent", '\u00A2');
entities.Add("pound", '\u00A3');
entities.Add("curren", '\u00A4');
entities.Add("yen", '\u00A5');
entities.Add("brvbar", '\u00A6');
entities.Add("sect", '\u00A7');
entities.Add("uml", '\u00A8');
entities.Add("copy", '\u00A9');
entities.Add("ordf", '\u00AA');
entities.Add("laquo", '\u00AB');
entities.Add("not", '\u00AC');
entities.Add("shy", '\u00AD');
entities.Add("reg", '\u00AE');
entities.Add("macr", '\u00AF');
entities.Add("deg", '\u00B0');
entities.Add("plusmn", '\u00B1');
entities.Add("sup2", '\u00B2');
entities.Add("sup3", '\u00B3');
entities.Add("acute", '\u00B4');
entities.Add("micro", '\u00B5');
entities.Add("para", '\u00B6');
entities.Add("middot", '\u00B7');
entities.Add("cedil", '\u00B8');
entities.Add("sup1", '\u00B9');
entities.Add("ordm", '\u00BA');
entities.Add("raquo", '\u00BB');
entities.Add("frac14", '\u00BC');
entities.Add("frac12", '\u00BD');
entities.Add("frac34", '\u00BE');
entities.Add("iquest", '\u00BF');
entities.Add("Agrave", '\u00C0');
entities.Add("Aacute", '\u00C1');
entities.Add("Acirc", '\u00C2');
entities.Add("Atilde", '\u00C3');
entities.Add("Auml", '\u00C4');
entities.Add("Aring", '\u00C5');
entities.Add("AElig", '\u00C6');
entities.Add("Ccedil", '\u00C7');
entities.Add("Egrave", '\u00C8');
entities.Add("Eacute", '\u00C9');
entities.Add("Ecirc", '\u00CA');
entities.Add("Euml", '\u00CB');
entities.Add("Igrave", '\u00CC');
entities.Add("Iacute", '\u00CD');
entities.Add("Icirc", '\u00CE');
entities.Add("Iuml", '\u00CF');
entities.Add("ETH", '\u00D0');
entities.Add("Ntilde", '\u00D1');
entities.Add("Ograve", '\u00D2');
entities.Add("Oacute", '\u00D3');
entities.Add("Ocirc", '\u00D4');
entities.Add("Otilde", '\u00D5');
entities.Add("Ouml", '\u00D6');
entities.Add("times", '\u00D7');
entities.Add("Oslash", '\u00D8');
entities.Add("Ugrave", '\u00D9');
entities.Add("Uacute", '\u00DA');
entities.Add("Ucirc", '\u00DB');
entities.Add("Uuml", '\u00DC');
entities.Add("Yacute", '\u00DD');
entities.Add("THORN", '\u00DE');
entities.Add("szlig", '\u00DF');
entities.Add("agrave", '\u00E0');
entities.Add("aacute", '\u00E1');
entities.Add("acirc", '\u00E2');
entities.Add("atilde", '\u00E3');
entities.Add("auml", '\u00E4');
entities.Add("aring", '\u00E5');
entities.Add("aelig", '\u00E6');
entities.Add("ccedil", '\u00E7');
entities.Add("egrave", '\u00E8');
entities.Add("eacute", '\u00E9');
entities.Add("ecirc", '\u00EA');
entities.Add("euml", '\u00EB');
entities.Add("igrave", '\u00EC');
entities.Add("iacute", '\u00ED');
entities.Add("icirc", '\u00EE');
entities.Add("iuml", '\u00EF');
entities.Add("eth", '\u00F0');
entities.Add("ntilde", '\u00F1');
entities.Add("ograve", '\u00F2');
entities.Add("oacute", '\u00F3');
entities.Add("ocirc", '\u00F4');
entities.Add("otilde", '\u00F5');
entities.Add("ouml", '\u00F6');
entities.Add("divide", '\u00F7');
entities.Add("oslash", '\u00F8');
entities.Add("ugrave", '\u00F9');
entities.Add("uacute", '\u00FA');
entities.Add("ucirc", '\u00FB');
entities.Add("uuml", '\u00FC');
entities.Add("yacute", '\u00FD');
entities.Add("thorn", '\u00FE');
entities.Add("yuml", '\u00FF');
entities.Add("fnof", '\u0192');
entities.Add("Alpha", '\u0391');
entities.Add("Beta", '\u0392');
entities.Add("Gamma", '\u0393');
entities.Add("Delta", '\u0394');
entities.Add("Epsilon", '\u0395');
entities.Add("Zeta", '\u0396');
entities.Add("Eta", '\u0397');
entities.Add("Theta", '\u0398');
entities.Add("Iota", '\u0399');
entities.Add("Kappa", '\u039A');
entities.Add("Lambda", '\u039B');
entities.Add("Mu", '\u039C');
entities.Add("Nu", '\u039D');
entities.Add("Xi", '\u039E');
entities.Add("Omicron", '\u039F');
entities.Add("Pi", '\u03A0');
entities.Add("Rho", '\u03A1');
entities.Add("Sigma", '\u03A3');
entities.Add("Tau", '\u03A4');
entities.Add("Upsilon", '\u03A5');
entities.Add("Phi", '\u03A6');
entities.Add("Chi", '\u03A7');
entities.Add("Psi", '\u03A8');
entities.Add("Omega", '\u03A9');
entities.Add("alpha", '\u03B1');
entities.Add("beta", '\u03B2');
entities.Add("gamma", '\u03B3');
entities.Add("delta", '\u03B4');
entities.Add("epsilon", '\u03B5');
entities.Add("zeta", '\u03B6');
entities.Add("eta", '\u03B7');
entities.Add("theta", '\u03B8');
entities.Add("iota", '\u03B9');
entities.Add("kappa", '\u03BA');
entities.Add("lambda", '\u03BB');
entities.Add("mu", '\u03BC');
entities.Add("nu", '\u03BD');
entities.Add("xi", '\u03BE');
entities.Add("omicron", '\u03BF');
entities.Add("pi", '\u03C0');
entities.Add("rho", '\u03C1');
entities.Add("sigmaf", '\u03C2');
entities.Add("sigma", '\u03C3');
entities.Add("tau", '\u03C4');
entities.Add("upsilon", '\u03C5');
entities.Add("phi", '\u03C6');
entities.Add("chi", '\u03C7');
entities.Add("psi", '\u03C8');
entities.Add("omega", '\u03C9');
entities.Add("thetasym", '\u03D1');
entities.Add("upsih", '\u03D2');
entities.Add("piv", '\u03D6');
entities.Add("bull", '\u2022');
entities.Add("hellip", '\u2026');
entities.Add("prime", '\u2032');
entities.Add("Prime", '\u2033');
entities.Add("oline", '\u203E');
entities.Add("frasl", '\u2044');
entities.Add("weierp", '\u2118');
entities.Add("image", '\u2111');
entities.Add("real", '\u211C');
entities.Add("trade", '\u2122');
entities.Add("alefsym", '\u2135');
entities.Add("larr", '\u2190');
entities.Add("uarr", '\u2191');
entities.Add("rarr", '\u2192');
entities.Add("darr", '\u2193');
entities.Add("harr", '\u2194');
entities.Add("crarr", '\u21B5');
entities.Add("lArr", '\u21D0');
entities.Add("uArr", '\u21D1');
entities.Add("rArr", '\u21D2');
entities.Add("dArr", '\u21D3');
entities.Add("hArr", '\u21D4');
entities.Add("forall", '\u2200');
entities.Add("part", '\u2202');
entities.Add("exist", '\u2203');
entities.Add("empty", '\u2205');
entities.Add("nabla", '\u2207');
entities.Add("isin", '\u2208');
entities.Add("notin", '\u2209');
entities.Add("ni", '\u220B');
entities.Add("prod", '\u220F');
entities.Add("sum", '\u2211');
entities.Add("minus", '\u2212');
entities.Add("lowast", '\u2217');
entities.Add("radic", '\u221A');
entities.Add("prop", '\u221D');
entities.Add("infin", '\u221E');
entities.Add("ang", '\u2220');
entities.Add("and", '\u2227');
entities.Add("or", '\u2228');
entities.Add("cap", '\u2229');
entities.Add("cup", '\u222A');
entities.Add("int", '\u222B');
entities.Add("there4", '\u2234');
entities.Add("sim", '\u223C');
entities.Add("cong", '\u2245');
entities.Add("asymp", '\u2248');
entities.Add("ne", '\u2260');
entities.Add("equiv", '\u2261');
entities.Add("le", '\u2264');
entities.Add("ge", '\u2265');
entities.Add("sub", '\u2282');
entities.Add("sup", '\u2283');
entities.Add("nsub", '\u2284');
entities.Add("sube", '\u2286');
entities.Add("supe", '\u2287');
entities.Add("oplus", '\u2295');
entities.Add("otimes", '\u2297');
entities.Add("perp", '\u22A5');
entities.Add("sdot", '\u22C5');
entities.Add("lceil", '\u2308');
entities.Add("rceil", '\u2309');
entities.Add("lfloor", '\u230A');
entities.Add("rfloor", '\u230B');
entities.Add("lang", '\u2329');
entities.Add("rang", '\u232A');
entities.Add("loz", '\u25CA');
entities.Add("spades", '\u2660');
entities.Add("clubs", '\u2663');
entities.Add("hearts", '\u2665');
entities.Add("diams", '\u2666');
entities.Add("quot", '\u0022');
entities.Add("amp", '\u0026');
entities.Add("lt", '\u003C');
entities.Add("gt", '\u003E');
entities.Add("OElig", '\u0152');
entities.Add("oelig", '\u0153');
entities.Add("Scaron", '\u0160');
entities.Add("scaron", '\u0161');
entities.Add("Yuml", '\u0178');
entities.Add("circ", '\u02C6');
entities.Add("tilde", '\u02DC');
entities.Add("ensp", '\u2002');
entities.Add("emsp", '\u2003');
entities.Add("thinsp", '\u2009');
entities.Add("zwnj", '\u200C');
entities.Add("zwj", '\u200D');
entities.Add("lrm", '\u200E');
entities.Add("rlm", '\u200F');
entities.Add("ndash", '\u2013');
entities.Add("mdash", '\u2014');
entities.Add("lsquo", '\u2018');
entities.Add("rsquo", '\u2019');
entities.Add("sbquo", '\u201A');
entities.Add("ldquo", '\u201C');
entities.Add("rdquo", '\u201D');
entities.Add("bdquo", '\u201E');
entities.Add("dagger", '\u2020');
entities.Add("Dagger", '\u2021');
entities.Add("permil", '\u2030');
entities.Add("lsaquo", '\u2039');
entities.Add("rsaquo", '\u203A');
entities.Add("euro", '\u20AC');
}
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
// <auto-generated />
namespace SAEON.Observations.Data
{
/// <summary>
/// Strongly-typed collection for the OrganisationRole class.
/// </summary>
[Serializable]
public partial class OrganisationRoleCollection : ActiveList<OrganisationRole, OrganisationRoleCollection>
{
public OrganisationRoleCollection() {}
/// <summary>
/// Filters an existing collection based on the set criteria. This is an in-memory filter
/// Thanks to developingchris for this!
/// </summary>
/// <returns>OrganisationRoleCollection</returns>
public OrganisationRoleCollection Filter()
{
for (int i = this.Count - 1; i > -1; i--)
{
OrganisationRole o = this[i];
foreach (SubSonic.Where w in this.wheres)
{
bool remove = false;
System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName);
if (pi.CanRead)
{
object val = pi.GetValue(o, null);
switch (w.Comparison)
{
case SubSonic.Comparison.Equals:
if (!val.Equals(w.ParameterValue))
{
remove = true;
}
break;
}
}
if (remove)
{
this.Remove(o);
break;
}
}
}
return this;
}
}
/// <summary>
/// This is an ActiveRecord class which wraps the OrganisationRole table.
/// </summary>
[Serializable]
public partial class OrganisationRole : ActiveRecord<OrganisationRole>, IActiveRecord
{
#region .ctors and Default Settings
public OrganisationRole()
{
SetSQLProps();
InitSetDefaults();
MarkNew();
}
private void InitSetDefaults() { SetDefaults(); }
public OrganisationRole(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
ForceDefaults();
MarkNew();
}
public OrganisationRole(object keyID)
{
SetSQLProps();
InitSetDefaults();
LoadByKey(keyID);
}
public OrganisationRole(string columnName, object columnValue)
{
SetSQLProps();
InitSetDefaults();
LoadByParam(columnName,columnValue);
}
protected static void SetSQLProps() { GetTableSchema(); }
#endregion
#region Schema and Query Accessor
public static Query CreateQuery() { return new Query(Schema); }
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
SetSQLProps();
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("OrganisationRole", TableType.Table, DataService.GetInstance("ObservationsDB"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarId = new TableSchema.TableColumn(schema);
colvarId.ColumnName = "ID";
colvarId.DataType = DbType.Guid;
colvarId.MaxLength = 0;
colvarId.AutoIncrement = false;
colvarId.IsNullable = false;
colvarId.IsPrimaryKey = true;
colvarId.IsForeignKey = false;
colvarId.IsReadOnly = false;
colvarId.DefaultSetting = @"(newid())";
colvarId.ForeignKeyTableName = "";
schema.Columns.Add(colvarId);
TableSchema.TableColumn colvarCode = new TableSchema.TableColumn(schema);
colvarCode.ColumnName = "Code";
colvarCode.DataType = DbType.AnsiString;
colvarCode.MaxLength = 50;
colvarCode.AutoIncrement = false;
colvarCode.IsNullable = false;
colvarCode.IsPrimaryKey = false;
colvarCode.IsForeignKey = false;
colvarCode.IsReadOnly = false;
colvarCode.DefaultSetting = @"";
colvarCode.ForeignKeyTableName = "";
schema.Columns.Add(colvarCode);
TableSchema.TableColumn colvarName = new TableSchema.TableColumn(schema);
colvarName.ColumnName = "Name";
colvarName.DataType = DbType.AnsiString;
colvarName.MaxLength = 150;
colvarName.AutoIncrement = false;
colvarName.IsNullable = false;
colvarName.IsPrimaryKey = false;
colvarName.IsForeignKey = false;
colvarName.IsReadOnly = false;
colvarName.DefaultSetting = @"";
colvarName.ForeignKeyTableName = "";
schema.Columns.Add(colvarName);
TableSchema.TableColumn colvarDescription = new TableSchema.TableColumn(schema);
colvarDescription.ColumnName = "Description";
colvarDescription.DataType = DbType.AnsiString;
colvarDescription.MaxLength = 5000;
colvarDescription.AutoIncrement = false;
colvarDescription.IsNullable = true;
colvarDescription.IsPrimaryKey = false;
colvarDescription.IsForeignKey = false;
colvarDescription.IsReadOnly = false;
colvarDescription.DefaultSetting = @"";
colvarDescription.ForeignKeyTableName = "";
schema.Columns.Add(colvarDescription);
TableSchema.TableColumn colvarUserId = new TableSchema.TableColumn(schema);
colvarUserId.ColumnName = "UserId";
colvarUserId.DataType = DbType.Guid;
colvarUserId.MaxLength = 0;
colvarUserId.AutoIncrement = false;
colvarUserId.IsNullable = false;
colvarUserId.IsPrimaryKey = false;
colvarUserId.IsForeignKey = true;
colvarUserId.IsReadOnly = false;
colvarUserId.DefaultSetting = @"";
colvarUserId.ForeignKeyTableName = "aspnet_Users";
schema.Columns.Add(colvarUserId);
TableSchema.TableColumn colvarAddedAt = new TableSchema.TableColumn(schema);
colvarAddedAt.ColumnName = "AddedAt";
colvarAddedAt.DataType = DbType.DateTime;
colvarAddedAt.MaxLength = 0;
colvarAddedAt.AutoIncrement = false;
colvarAddedAt.IsNullable = true;
colvarAddedAt.IsPrimaryKey = false;
colvarAddedAt.IsForeignKey = false;
colvarAddedAt.IsReadOnly = false;
colvarAddedAt.DefaultSetting = @"(getdate())";
colvarAddedAt.ForeignKeyTableName = "";
schema.Columns.Add(colvarAddedAt);
TableSchema.TableColumn colvarUpdatedAt = new TableSchema.TableColumn(schema);
colvarUpdatedAt.ColumnName = "UpdatedAt";
colvarUpdatedAt.DataType = DbType.DateTime;
colvarUpdatedAt.MaxLength = 0;
colvarUpdatedAt.AutoIncrement = false;
colvarUpdatedAt.IsNullable = true;
colvarUpdatedAt.IsPrimaryKey = false;
colvarUpdatedAt.IsForeignKey = false;
colvarUpdatedAt.IsReadOnly = false;
colvarUpdatedAt.DefaultSetting = @"(getdate())";
colvarUpdatedAt.ForeignKeyTableName = "";
schema.Columns.Add(colvarUpdatedAt);
TableSchema.TableColumn colvarRowVersion = new TableSchema.TableColumn(schema);
colvarRowVersion.ColumnName = "RowVersion";
colvarRowVersion.DataType = DbType.Binary;
colvarRowVersion.MaxLength = 0;
colvarRowVersion.AutoIncrement = false;
colvarRowVersion.IsNullable = false;
colvarRowVersion.IsPrimaryKey = false;
colvarRowVersion.IsForeignKey = false;
colvarRowVersion.IsReadOnly = true;
colvarRowVersion.DefaultSetting = @"";
colvarRowVersion.ForeignKeyTableName = "";
schema.Columns.Add(colvarRowVersion);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["ObservationsDB"].AddSchema("OrganisationRole",schema);
}
}
#endregion
#region Props
[XmlAttribute("Id")]
[Bindable(true)]
public Guid Id
{
get { return GetColumnValue<Guid>(Columns.Id); }
set { SetColumnValue(Columns.Id, value); }
}
[XmlAttribute("Code")]
[Bindable(true)]
public string Code
{
get { return GetColumnValue<string>(Columns.Code); }
set { SetColumnValue(Columns.Code, value); }
}
[XmlAttribute("Name")]
[Bindable(true)]
public string Name
{
get { return GetColumnValue<string>(Columns.Name); }
set { SetColumnValue(Columns.Name, value); }
}
[XmlAttribute("Description")]
[Bindable(true)]
public string Description
{
get { return GetColumnValue<string>(Columns.Description); }
set { SetColumnValue(Columns.Description, value); }
}
[XmlAttribute("UserId")]
[Bindable(true)]
public Guid UserId
{
get { return GetColumnValue<Guid>(Columns.UserId); }
set { SetColumnValue(Columns.UserId, value); }
}
[XmlAttribute("AddedAt")]
[Bindable(true)]
public DateTime? AddedAt
{
get { return GetColumnValue<DateTime?>(Columns.AddedAt); }
set { SetColumnValue(Columns.AddedAt, value); }
}
[XmlAttribute("UpdatedAt")]
[Bindable(true)]
public DateTime? UpdatedAt
{
get { return GetColumnValue<DateTime?>(Columns.UpdatedAt); }
set { SetColumnValue(Columns.UpdatedAt, value); }
}
[XmlAttribute("RowVersion")]
[Bindable(true)]
public byte[] RowVersion
{
get { return GetColumnValue<byte[]>(Columns.RowVersion); }
set { SetColumnValue(Columns.RowVersion, value); }
}
#endregion
#region PrimaryKey Methods
protected override void SetPrimaryKey(object oValue)
{
base.SetPrimaryKey(oValue);
SetPKValues();
}
public SAEON.Observations.Data.OrganisationInstrumentCollection OrganisationInstrumentRecords()
{
return new SAEON.Observations.Data.OrganisationInstrumentCollection().Where(OrganisationInstrument.Columns.OrganisationRoleID, Id).Load();
}
public SAEON.Observations.Data.OrganisationSiteCollection OrganisationSiteRecords()
{
return new SAEON.Observations.Data.OrganisationSiteCollection().Where(OrganisationSite.Columns.OrganisationRoleID, Id).Load();
}
public SAEON.Observations.Data.OrganisationStationCollection OrganisationStationRecords()
{
return new SAEON.Observations.Data.OrganisationStationCollection().Where(OrganisationStation.Columns.OrganisationRoleID, Id).Load();
}
#endregion
#region ForeignKey Properties
private SAEON.Observations.Data.AspnetUser _AspnetUser = null;
/// <summary>
/// Returns a AspnetUser ActiveRecord object related to this OrganisationRole
///
/// </summary>
public SAEON.Observations.Data.AspnetUser AspnetUser
{
// get { return SAEON.Observations.Data.AspnetUser.FetchByID(this.UserId); }
get { return _AspnetUser ?? (_AspnetUser = SAEON.Observations.Data.AspnetUser.FetchByID(this.UserId)); }
set { SetColumnValue("UserId", value.UserId); }
}
#endregion
//no ManyToMany tables defined (0)
#region ObjectDataSource support
/// <summary>
/// Inserts a record, can be used with the Object Data Source
/// </summary>
public static void Insert(Guid varId,string varCode,string varName,string varDescription,Guid varUserId,DateTime? varAddedAt,DateTime? varUpdatedAt,byte[] varRowVersion)
{
OrganisationRole item = new OrganisationRole();
item.Id = varId;
item.Code = varCode;
item.Name = varName;
item.Description = varDescription;
item.UserId = varUserId;
item.AddedAt = varAddedAt;
item.UpdatedAt = varUpdatedAt;
item.RowVersion = varRowVersion;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
/// <summary>
/// Updates a record, can be used with the Object Data Source
/// </summary>
public static void Update(Guid varId,string varCode,string varName,string varDescription,Guid varUserId,DateTime? varAddedAt,DateTime? varUpdatedAt,byte[] varRowVersion)
{
OrganisationRole item = new OrganisationRole();
item.Id = varId;
item.Code = varCode;
item.Name = varName;
item.Description = varDescription;
item.UserId = varUserId;
item.AddedAt = varAddedAt;
item.UpdatedAt = varUpdatedAt;
item.RowVersion = varRowVersion;
item.IsNew = false;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
#endregion
#region Typed Columns
public static TableSchema.TableColumn IdColumn
{
get { return Schema.Columns[0]; }
}
public static TableSchema.TableColumn CodeColumn
{
get { return Schema.Columns[1]; }
}
public static TableSchema.TableColumn NameColumn
{
get { return Schema.Columns[2]; }
}
public static TableSchema.TableColumn DescriptionColumn
{
get { return Schema.Columns[3]; }
}
public static TableSchema.TableColumn UserIdColumn
{
get { return Schema.Columns[4]; }
}
public static TableSchema.TableColumn AddedAtColumn
{
get { return Schema.Columns[5]; }
}
public static TableSchema.TableColumn UpdatedAtColumn
{
get { return Schema.Columns[6]; }
}
public static TableSchema.TableColumn RowVersionColumn
{
get { return Schema.Columns[7]; }
}
#endregion
#region Columns Struct
public struct Columns
{
public static string Id = @"ID";
public static string Code = @"Code";
public static string Name = @"Name";
public static string Description = @"Description";
public static string UserId = @"UserId";
public static string AddedAt = @"AddedAt";
public static string UpdatedAt = @"UpdatedAt";
public static string RowVersion = @"RowVersion";
}
#endregion
#region Update PK Collections
public void SetPKValues()
{
}
#endregion
#region Deep Save
public void DeepSave()
{
Save();
}
#endregion
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.