/// <summary> /// Initializes the strategy with the specified nodes and cluster configuration /// </summary> /// <param name="nodes"> The nodes. </param> /// <param name="config"> The config. </param> public ExclusiveConnectionStrategy(Ring nodes, ClusterConfig config) { _nodes = nodes; _config = config; _connections = new ConcurrentStack<Connection>(); _rndGen = new Random((int)DateTime.Now.Ticks); }
static void Main(string[] args) { //Stack - Pilha (LIFO) ConcurrentStack<int> stack = new ConcurrentStack<int>(); //Adiciona um item na pilha stack.Push(42); int result; //metodo trypop retorna o ultimo item a ser adicionado na lista, caso não tenha mais item ele não dar por que ele "tenta(try)" pega um item //quando usar o metodo trypop o item é removido da coleção if (stack.TryPop(out result)) { Console.WriteLine("Popped: {0}", result); } if (stack.TryPop(out result)) { Console.WriteLine("Popped: {0}", result); } stack.PushRange(new int[] { 1, 2, 3 }); int[] values = new int[2]; //metod retorna uma coleção de itens da pilha stack.TryPopRange(values); foreach (var item in values) { Console.WriteLine(item); } Console.ReadLine(); }
public FileHistoryService() { _storage = "recent.dat"; _basePath = Path.GetDirectoryName(typeof(FileHistoryService).Assembly.Location); _container = new ConcurrentStack<string>(); //InitializeFromFile(); }
public async Task Should_Succeed_With_Multiple_Rpc_Calls_At_The_Same_Time() { /* Setup */ var payloads = new List<Guid> { Guid.NewGuid(), Guid.NewGuid(), Guid.NewGuid() }; var uniqueResponse = new ConcurrentStack<Guid>(payloads); var requester = BusClientFactory.CreateDefault(); var responder = BusClientFactory.CreateDefault(); responder.RespondAsync<BasicRequest, BasicResponse>((req, i) => { Guid payload; if (!uniqueResponse.TryPop(out payload)) { Assert.True(false, "No entities in stack. Try purgin the response queue."); }; return Task.FromResult(new BasicResponse { Payload = payload }); }); /* Test */ var first = requester.RequestAsync<BasicRequest, BasicResponse>(new BasicRequest { Number = 1 }); var second = requester.RequestAsync<BasicRequest, BasicResponse>(new BasicRequest { Number = 2 }); var third = requester.RequestAsync<BasicRequest, BasicResponse>(new BasicRequest { Number = 3 }); Task.WaitAll(first, second, third); /* Assert */ Assert.Contains(first.Result.Payload, payloads); Assert.Contains(second.Result.Payload, payloads); Assert.Contains(third.Result.Payload, payloads); Assert.NotEqual(first.Result.Payload, second.Result.Payload); Assert.NotEqual(second.Result.Payload, third.Result.Payload); Assert.NotEqual(first.Result.Payload, third.Result.Payload); }
static void TestMultiplex() { counter = 0; ThreadMultiplex multiplex = new ThreadMultiplex(100); Random exec = new Random(); Thread[] threadArray = new Thread[1000]; ConcurrentStack<int> answer = new ConcurrentStack<int>(); for (int i = 0; i < 1000; i++) { int temp = -1; threadArray[i] = new Thread( ()=>{ multiplex.Enter(); Thread.Sleep(exec.Next(576)); temp = ++counter; multiplex.Release(); Thread.Sleep(exec.Next(146)); answer.Push(temp); } ); threadArray[i].Start(); //Console.WriteLine(temp); } foreach (var t in threadArray) { t.Join(); } foreach(var t in answer) { Console.WriteLine(t); } }
static void Main(string[] args) { ConcurrentStack<int> stack = new ConcurrentStack<int>(); stack.Push(42); int result; if (stack.TryPop(out result)) { Console.WriteLine(result); } stack.PushRange(new int[] { 1, 2, 3 }); int[] values = new int[2]; stack.TryPopRange(values); foreach (var i in values) { Console.WriteLine(i); } Console.Write("Press a key to exit"); Console.ReadKey(); }
public ConnectionWorkersPool( uint numbersOfBuffers, uint buffersSize, Action<object, SocketAsyncEventArgs> ioCompleted, IRequestProcessorFactory requestProcessorFactory) { _buffersSize = buffersSize; _numbersOfBuffers = numbersOfBuffers; _connectionWorkers = new ConcurrentStack<ConnectionWorker>(); for (var i = 0; i < numbersOfBuffers; i++) { var buffer = new byte[buffersSize]; for (var j = 0; j < buffer.Length; j++) { buffer[j] = (byte)j; } var connectionWorker = new ConnectionWorker { RequestProcessor = requestProcessorFactory.GetRequestProcessor() }; var readWriteAsync = new SocketAsyncEventArgs {UserToken = connectionWorker}; connectionWorker.SocketAsyncEventArgs = readWriteAsync; readWriteAsync.Completed += new EventHandler<SocketAsyncEventArgs>(ioCompleted); _connectionWorkers.Push(connectionWorker); } }
public SocketSniffer(NetworkInterfaceInfo nic, Filters<IPPacket> filters, IOutput output) { this.outputQueue = new BlockingCollection<TimestampedData>(); this.filters = filters; this.output = output; this.bufferManager = new BufferManager(BUFFER_SIZE, MAX_RECEIVE); this.receivePool = new ConcurrentStack<SocketAsyncEventArgs>(); var endPoint = new IPEndPoint(nic.IPAddress, 0); // IPv4 this.socket = new Socket(AddressFamily.InterNetwork, SocketType.Raw, ProtocolType.IP); this.socket.Bind(endPoint); this.socket.SetSocketOption(SocketOptionLevel.IP, SocketOptionName.HeaderIncluded, true); // Enter promiscuous mode try { this.socket.IOControl(IOControlCode.ReceiveAll, BitConverter.GetBytes(1), new byte[4]); } catch (Exception ex) { Console.WriteLine("Unable to enter promiscuous mode: {0}", ex); throw; } }
private static void DemoConcurrentStack() { Console.WriteLine("Demo Concurrent Stack ----------------------"); var shirts = new ConcurrentStack<string>(); shirts.Push("Pluralsight"); shirts.Push("WordPress"); shirts.Push("Code School"); Console.WriteLine("After enqueuing, count = " + shirts.Count); string item1; //= shirts.Dequeue(); bool success = shirts.TryPop(out item1); if (success) Console.WriteLine("\r\nRemoving " + item1); else Console.WriteLine("queue was empty"); string item2; //= shirts.Peek(); success = shirts.TryPeek(out item2); if (success) Console.WriteLine("Peeking " + item2); else Console.WriteLine("queue was empty"); Console.WriteLine("\r\nEnumerating:"); foreach (string item in shirts) Console.WriteLine(item); Console.WriteLine("\r\nAfter enumerating, count = " + shirts.Count); }
public static void TestBasicScenarios() { ConcurrentStack<int> cs = new ConcurrentStack<int>(); cs.Push(1); Task[] tks = new Task[2]; tks[0] = Task.Run(() => { cs.Push(2); cs.Push(3); cs.Push(4); }); tks[1] = Task.Run(() => { int item1, item2; var ret1 = cs.TryPop(out item1); // at least one item Assert.True(ret1); var ret2 = cs.TryPop(out item2); // two item if (ret2) { Assert.True(item1 > item2, String.Format("{0} should greater than {1}", item1, item2)); } else // one item { Assert.Equal(1, item1); } }); Task.WaitAll(tks); }
//simulate sim; //simulateHist simHist; public ConcurrentBag<double> generatePaths(double initialPrice, int numberOfPaths, double timeToExpiry) { ConcurrentBag<double> toReturn = new ConcurrentBag<double>{}; //var indices = Enumerable.Range(0, numberOfPaths); var rnd = new Random(42); ConcurrentStack<int> seeds = new ConcurrentStack<int> {}; for (int i = 0; i < numberOfPaths; ++i) seeds.Push(rnd.Next(1, numberOfPaths - 1)); int steps = Convert.ToInt32 (Math.Floor(timeToExpiry / bm.deltaT)); Parallel.ForEach(seeds, //new ParallelOptions { MaxDegreeOfParallelism = 2 }, seed => { Thread.Sleep(1); simulate mySim = new simulate(simulator.simulate); double res = new double(); res = mySim(steps, initialPrice, seed, bm); toReturn.Add(res); } ); return toReturn; }
public BufferManager(int totalBytes, int totalBufferBytesInEachSaeaObject) { _totalBytesInBufferBlock = totalBytes; _currentIndex = 0; _bufferBytesAllocatedForEachSaea = totalBufferBytesInEachSaeaObject; _freeIndexPool = new ConcurrentStack<int>(); }
public void PushTryPop(int producerThreads, int consumerThreads) { var stack = new ConcurrentStack<int>(); var startEvent = new ManualResetEventSlim(false); var finished = 0; var stop = false; var producerTasks = Enumerable.Range(0, producerThreads).Select(i => Task.Factory.StartNew(() => { var count = iterations/producerThreads; startEvent.Wait(); for (var j = 0; j < count; j++) stack.Push(0); Interlocked.Increment(ref finished); if (finished >= producerThreads) stop = true; }, TaskCreationOptions.LongRunning)).ToArray(); var consumerTasks = Enumerable.Range(0, consumerThreads).Select(i => Task.Factory.StartNew(() => { int num; startEvent.Wait(); while (!stop) stack.TryPop(out num); }, TaskCreationOptions.LongRunning)).ToArray(); var stopwatch = Stopwatch.StartNew(); startEvent.Set(); stop = true; Task.WaitAll(producerTasks); Task.WaitAll(consumerTasks); stopwatch.StopAndLog(iterations); }
public void verify_bahaviour_for_concurrent_access_under_identical_keys() { var keys = new[] {"a", "a"}; var counter = new ConcurrentStack<int>(); var storage = new ConcurrentStack<TestItem>(); // first run var threads = MakeThreads(keys); threads.ForEach(t => t.Start(new object[] {storage, counter})); threads.ForEach(t => t.Join()); Assert.Equal(1, counter.Count); Assert.Equal(2, storage.Count); var a = storage.First(); Assert.Same(storage.First(), storage.Last()); // cleanups and second run storage.Clear(); counter.Clear(); threads = MakeThreads(keys); threads.ForEach(t => t.Start(new object[] {storage, counter})); threads.ForEach(t => t.Join()); Assert.Equal(0, counter.Count); Assert.Equal(2, storage.Count); var aa = storage.First(); Assert.Same(storage.First(), storage.Last()); Assert.Same(a, aa); }
public void verify_bahaviour_for_concurrent_access_under_different_keys() { var keys = new[] {"a", "b"}; var counter = new ConcurrentStack<int>(); // value factory threads var storage = new ConcurrentStack<TestItem>(); // cached items // first run var threads = MakeThreads(keys); threads.ForEach(t => t.Start(new object[] {storage, counter})); threads.ForEach(t => t.Join()); Assert.Equal(2, counter.Count); Assert.Equal(2, storage.Count); Assert.NotSame(storage.First(), storage.Last()); var a = storage.FirstOrDefault(x => x.Id == "a"); var b = storage.FirstOrDefault(x => x.Id == "b"); // cleanups and second run storage.Clear(); counter.Clear(); threads = MakeThreads(keys); threads.ForEach(t => t.Start(new object[] {storage, counter})); threads.ForEach(t => t.Join()); Assert.Equal(0, counter.Count); Assert.Equal(2, storage.Count); Assert.NotSame(storage.First(), storage.Last()); var aa = storage.FirstOrDefault(x => x.Id == "a"); var bb = storage.FirstOrDefault(x => x.Id == "b"); Assert.Same(a, aa); Assert.Same(b, bb); }
public void Setup() { stack = new ConcurrentStack<int>(); for (int i = 0; i < 10; i++) { stack.Push(i); } }
public SocketAsyncEventArgsPool(int count) { _pool = new ConcurrentStack<SocketAsyncEventArgs>(); for (var i = 0; i < count; i++) { _pool.Push(new SocketAsyncEventArgs()); } }
/// <summary> /// /// </summary> /// <param name="bufferSize"></param> public BufferManager(int bufferSize, int chunkCount) { m_bufferSize = bufferSize; m_freeOffset = new ConcurrentStack<int>(); m_bufferBlock = new byte[bufferSize * chunkCount]; for (int i = 0; i < chunkCount; i++) m_freeOffset.Push(bufferSize * i); }
static ProductsController() { _products = new ConcurrentStack<Product>(new List<Product> { new Product { Name = "Milk", Price = 2.33m }, new Product { Name = "Cheese", Price = 55.33m }, new Product { Name = "Tesla", Price = 8989.33m } }); }
private SocketAsyncEventArgsFactory(int chunkSize) { this.UpdateLock = new object(); this.chunkSize = chunkSize; this.factories = new ConcurrentStack<BufferFactory>(); this.knownEventArgs = new ConcurrentDictionary<SocketAsyncEventArgs, Tuple<BufferFactory, ArraySegment<byte>>>(); }
public int ConcurrentStackExampleRange () { var stack = new ConcurrentStack<int> (); stack.PushRange (new int[] { 1, 2, 3 }); return stack.TryPopRange (new int[2]); }
public void Setup() { trueVaultClient = new TrueVaultClient(TestConfig.Instance.TrueVaultApiKey); documentSuccessResponses = new ConcurrentStack<DocumentSaveSuccessResponse>(); schemaSuccessResponses = new ConcurrentStack<SchemaSaveSuccessResponse>(); testVaultId = Guid.Parse(TestConfig.Instance.TrueVaultTestVault); Mapper.AssertConfigurationIsValid(); }
static ProductsController() { _products = new ConcurrentStack<Product>(new List<Product> { new Product { Name = "Peanut Butter", Price = 3.99m }, new Product { Name = "Jelly", Price = 4.99m } }); }
/// <summary> /// Slow, does not use pointers and Lockbits /// </summary> /// <param name="targetBitmapImage"></param> /// <param name="matrix"></param> /// <param name="offset"></param> /// <param name="factor"></param> /// <returns></returns> public static BitmapImage ApplyFilterFromMatrix(BitmapImage targetBitmapImage, int[,] matrix, int offset, float factor) { Bitmap internalBitmap = ConvertBitmapImageToBitmap(targetBitmapImage); int width = internalBitmap.Width; int height = internalBitmap.Height; var slices = SplitBitmap(internalBitmap); var threadStack = new ConcurrentStack<Pixel>(); Parallel.ForEach(slices, slice => { var sliceWidth = slice.Bitmap.Width; var sliceHeigth = slice.Bitmap.Height; //Skip columns that are in the offset for (var w = 0 + slice.OffsetLeft; w < sliceWidth - slice.OffsetRight; w++) { for (var h = 0; h < sliceHeigth; h++) { int red = 0; int green = 0; int blue = 0; for (int r = w - (matrix.GetLength(0) / 2), matrixRow = 0; r <= w + (matrix.GetLength(0) / 2); r++, matrixRow++) { if (r < 0 || r >= sliceWidth) continue; for (int c = h - (matrix.GetLength(1) / 2), matrixCol = 0; c <= h + (matrix.GetLength(1) / 2); c++, matrixCol++) { if (c < 0 || c >= sliceHeigth) continue; if (matrix[matrixRow, matrixCol] != 0) { var currentColor = slice.Bitmap.GetPixel(r, c); red += (currentColor.R) * (matrix[matrixRow, matrixCol]); blue += (currentColor.B) * (matrix[matrixRow, matrixCol]); green += (currentColor.G) * (matrix[matrixRow, matrixCol]); } } } red = Math.Min(Math.Max((int)(red / factor + offset), 0), 255); green = Math.Min(Math.Max((int)(green / factor + offset), 0), 255); blue = Math.Min(Math.Max((int)(blue / factor + offset), 0), 255); var pixel = new Pixel { X = w + slice.SliceXStartInOriginal, Y = h, Color = Color.FromArgb(red, green, blue) }; threadStack.Push(pixel); } } }); internalBitmap = ConstructBitmap(threadStack, internalBitmap); BitmapImage outputBitmapImage = ConvertBitmapToBitmapImage(internalBitmap); return outputBitmapImage; }
/// <summary> /// Specific constructor to provide Maxium Stack Size. /// </summary> /// <param name="maximumCommandStackSize">The default stack size is 50, unless a positive value is provided.</param> public CommandManager(int maximumCommandStackSize) { MaximumCommandStackSize = maximumCommandStackSize > 0 ? maximumCommandStackSize : 50; Commands = new ConcurrentStack<ICommand>(); UndoneCommands = new ConcurrentStack<ICommand>(); RedoneCommands = new ConcurrentStack<ICommand>(); }
public AsyncBarrier1(int participantCount) { if (participantCount <= 0) { throw new ArgumentOutOfRangeException("participantCount"); } _remainingParticipants = _participantCount = participantCount; _waiters = new ConcurrentStack<TaskCompletionSource<bool>>(); }
public void Setup() { var pages = new ConcurrentStack<IGooglePageModel>(); Parallel.Invoke(() => pages.Push(new GooglePageModel<InternetExplorerGrid>()), () => pages.Push(new GooglePageModel<FirefoxGrid>())); var parallelPage = new ParallelPageModel<IGooglePageModel>(pages.ToArray()); _page = parallelPage.Cast(); _page.Search("SQL For .NET Programmers"); }
static MoviesController() { _movies = new ConcurrentStack<Movie>(new List<Movie> { new Movie { Title = "Fight Club", Director = "Nolan" }, new Movie { Title = "Inception", Director = "Fincher" }, new Movie { Title = "Fargo", Director = "Cohen Brothers"} }); }
public BatchManager(BatchSettings BatchSettings) { processed = 0; isRunning = false; files = new ConcurrentStack<string>(); batchSettings = BatchSettings; ParameterizedThreadStart threadStart = new ParameterizedThreadStart(operationExecute); operationThread = new Thread(threadStart); regEx = new Regex(":"); }
public BufferManager(int maximumAllocations, int allocatedBufferSize) { _buffers = new byte[maximumAllocations*allocatedBufferSize]; _maximumAllocations = maximumAllocations; _allocatedBufferSize = allocatedBufferSize; _allocationIndex = 0; _allocationPool = new ConcurrentStack<BufferAllocation>(); }
public static T[] ToArray <T>(ConcurrentStack <T> concurrentStack) { ConcurrentCollectionHelper.Interleave(); return(concurrentStack.ToArray()); }
public static int TryPopRange <T>(ConcurrentStack <T> concurrenStack, T[] items, int startIndex, int count) { ConcurrentCollectionHelper.Interleave(); return(concurrenStack.TryPopRange(items, startIndex, count)); }
public static int TryPopRange <T>(ConcurrentStack <T> concurrenStack, T[] items) { ConcurrentCollectionHelper.Interleave(); return(concurrenStack.TryPopRange(items)); }
static SasManagerHub() { Connections = new ConcurrentStack <string>(); }
public static void Clear <T>(ConcurrentStack <T> concurrentStack) { ConcurrentCollectionHelper.Interleave(); concurrentStack.Clear(); }
public static void PushRange <T>(ConcurrentStack <T> concurrentStack, T[] items, int startIndex, int count) { ConcurrentCollectionHelper.Interleave(); concurrentStack.PushRange(items, startIndex, count); }
public static void CopyTo <T>(ConcurrentStack <T> concurrentStack, T[] array, int index) { ConcurrentCollectionHelper.Interleave(); concurrentStack.CopyTo(array, index); }
public MemoryMappedFileManager() { MemoryMappedFileCollection = new ConcurrentStack <MemoryMappedFile>(); }
private void AddCustomActionsToResult(UserCustomActionCollection coll, ref ConcurrentStack <CustomActionsResult> customActions, ref ConcurrentDictionary <string, CustomizationResult> customizationResults, ref ConcurrentStack <UIExperienceScanError> UIExpScanErrors, string listUrl = "", string listTitle = "") { var baseUri = new Uri(this.url); var webAppUrl = baseUri.Scheme + "://" + baseUri.Host; foreach (UserCustomAction uca in coll) { try { bool add = false; CustomActionsResult result = new CustomActionsResult() { SiteUrl = this.url, Url = !String.IsNullOrEmpty(listUrl) ? $"{webAppUrl}{listUrl}" : this.url, SiteColUrl = this.siteColUrl, ListTitle = listUrl, Title = uca.Title, Name = uca.Name, Location = uca.Location, RegistrationType = uca.RegistrationType, RegistrationId = uca.RegistrationId, CommandActions = "", //ImageMaps = "", ScriptBlock = "", ScriptSrc = "", }; if (!(uca.Location.Equals("EditControlBlock", StringComparison.InvariantCultureIgnoreCase) || uca.Location.Equals("CommandUI.Ribbon", StringComparison.InvariantCultureIgnoreCase))) { add = true; result.ScriptBlock = uca.ScriptBlock != null ? uca.ScriptBlock : ""; result.ScriptSrc = uca.ScriptSrc != null ? uca.ScriptSrc : ""; result.Problem = "Invalid location"; } Guid registrationIDGuid; if (Guid.TryParse(uca.RegistrationId, out registrationIDGuid)) { result.Problem = !String.IsNullOrEmpty(result.Problem) ? $"{result.Problem}, Specific list registration" : "Specific list registration"; add = true; } if (!string.IsNullOrEmpty(uca.CommandUIExtension)) { XmlDocument doc = new XmlDocument(); string xmlString = uca.CommandUIExtension; xmlString = xmlString.Replace("http://schemas.microsoft.com/sharepoint/", ""); doc.LoadXml(xmlString); XmlNodeList handlers = doc.SelectNodes("/CommandUIExtension/CommandUIHandlers/CommandUIHandler"); foreach (XmlNode handler in handlers) { if (handler.Attributes["CommandAction"] != null && handler.Attributes["CommandAction"].Value.ToLower().Contains("javascript")) { result.CommandActions = "JS Found"; result.Problem = !String.IsNullOrEmpty(result.Problem) ? $"{result.Problem}, JavaScript embedded" : "JavaScript embedded"; add = true; break; } } // Skipping image maps as these UCA do show, but without image //XmlNodeList imageButtons = doc.SelectNodes("//Button"); //foreach (XmlNode btn in imageButtons) //{ // //Image16by16Left, Image16by16Top, Image32by32Left, Image32by32Top // if (btn.Attributes["Image16by16"] != null || btn.Attributes["Image32by32"] != null) // { // result.ImageMaps = "Found"; // result.Problem = !String.IsNullOrEmpty(result.Problem) ? $"{result.Problem}, ImageMap used" : "ImageMap used"; // add = true; // break; // } //} } if (add) { customActions.Push(result); if (customizationResults.ContainsKey(result.Url)) { var customizationResult = customizationResults[result.Url]; customizationResult.IgnoredCustomAction = true; if (!customizationResults.TryUpdate(result.Url, customizationResult, customizationResult)) { UIExperienceScanError error = new UIExperienceScanError() { Error = $"Could not update custom action scan result for {customizationResult.Url}", SiteURL = this.url, SiteColUrl = this.siteColUrl }; UIExpScanErrors.Push(error); Console.WriteLine($"Could not update custom action scan result for {customizationResult.Url}"); } } else { var customizationResult = new CustomizationResult() { SiteUrl = result.SiteUrl, Url = result.Url, SiteColUrl = this.siteColUrl, IgnoredCustomAction = true }; if (!customizationResults.TryAdd(customizationResult.Url, customizationResult)) { UIExperienceScanError error = new UIExperienceScanError() { Error = $"Could not add custom action scan result for {customizationResult.Url}", SiteURL = url, SiteColUrl = siteColUrl }; UIExpScanErrors.Push(error); Console.WriteLine($"Could not add custom action scan result for {customizationResult.Url}"); } } } } catch (Exception ex) { UIExperienceScanError error = new UIExperienceScanError() { Error = ex.Message, SiteURL = this.url, SiteColUrl = this.siteColUrl }; UIExpScanErrors.Push(error); Console.WriteLine("Error for site {1}: {0}", ex.Message, this.url); } } }
public static IEnumerator <T> GetEnumerator <T>(ConcurrentStack <T> concurrentStack) { ConcurrentCollectionHelper.Interleave(); return(concurrentStack.GetEnumerator()); }
public static bool TryPop <T>(ConcurrentStack <T> concurrentStack, out T result) { ConcurrentCollectionHelper.Interleave(); return(concurrentStack.TryPop(out result)); }
public static void Push <T>(ConcurrentStack <T> concurrentStack, T item) { ConcurrentCollectionHelper.Interleave(); concurrentStack.Push(item); }
public static void PushRange <T>(ConcurrentStack <T> concurrentStack, T[] items) { ConcurrentCollectionHelper.Interleave(); concurrentStack.PushRange(items); }
public DisposeScope(ConcurrentStack <object> scope) => _scope = scope;
private static T Pop <T>(ConcurrentStack <T> stack) => stack.TryPop(out var t) ? t : default;
/// <summary> /// Learns a model that can map the given inputs to the desired outputs. /// </summary> /// <param name="x">The model inputs.</param> /// <param name="weights">The weight of importance for each input sample.</param> /// <returns>A model that has learned how to produce suitable outputs /// given the input data <paramref name="x" />.</returns> public MeanShiftClusterCollection Learn(double[][] x, int[] weights = null) { if (x.Length != weights.Length) { throw new DimensionMismatchException("weights", "The weights and points vector must have the same dimension."); } // First of all, construct map of the original points. We will // be saving the weight of every point in the node of the tree. KDTree <int> tree = KDTree.FromData(x, weights, Distance); // Let's sample some points in the problem surface double[][] seeds = createSeeds(x, 2 * Bandwidth); // Now, we will duplicate those points and make them "move" // into this surface in the direction of the surface modes. double[][] current = seeds.MemberwiseClone(); // We will store any modes that we find here var maxima = new ConcurrentStack <double[]>(); // Optimization for uniform kernel Action <ICollection <NodeDistance <KDTreeNode <int> > >, double[]> func; if (kernel is UniformKernel) { func = uniform; } else { func = general; } // For each seed if (UseParallelProcessing) { Parallel.For(0, current.Length, i => move(tree, current, i, maxima, func)); for (int i = 0; i < current.Length; i++) { supress(current, i, maxima); } } else { for (int i = 0; i < current.Length; i++) { move(tree, current, i, maxima, func); } } var modes = maxima.ToArray(); // At this point, the current points have moved into // the location of the modes of the surface. Now we // have to backtrack and check, for each mode, from // where those points departed from. int[] labels = classify(modes: modes, points: current); // Now we create a decision map using the original seed positions tree = KDTree.FromData(seeds, labels, Distance, inPlace: true); clusters = new MeanShiftClusterCollection(this, modes.Length, tree, modes); if (ComputeLabels || ComputeProportions) { int sum = 0; int[] counts = new int[modes.Length]; labels = new int[x.Length]; for (int i = 0; i < labels.Length; i++) { int j = tree.Nearest(x[i]).Value; labels[i] = j; counts[j] += weights[i]; sum += weights[i]; } for (int i = 0; i < counts.Length; i++) { clusters.Proportions[i] = counts[i] / (double)sum; } } return(clusters); }
public ObjPool() { freeObjects = new ConcurrentStack <T>(); }
void FillHashBuff(ParallelOptions po) { int TotalHashGenCount = 0; int HashGenCnt = 0; int LoadedCnt = 0; HashRec[] hashX; Stopwatch sw = Stopwatch.StartNew(); do { Extract next = null; #region Partition // prescan enough entries to not overspill the specified hash buffer count long CountForMaxBuff = 0; ConcurrentStack <Extract> ReadyList = new ConcurrentStack <Extract>(); while (!DoneDirScan || !LoadList.IsEmpty) { LoadList.TryPop(out next); if (next == null && !DoneDirScan) { if (po.CancellationToken.IsCancellationRequested) { return; } Thread.Yield(); continue; } foreach (var ms in next.Sections) { if (!ms.IsCode && !ms.IsExec) { continue; } var BufferSize = (uint)((ms.RawFileSize + 0xfff) & ~0xfff); CountForMaxBuff += FractHashTree.TotalHashesForSize(BufferSize, MinHashSize); } if (CountForMaxBuff < BufferCount) { ReadyList.Push(next); } // add it back for reprocessing else { LoadList.Push(next); if (po.CancellationToken.IsCancellationRequested) { return; } po.CancellationToken.ThrowIfCancellationRequested(); break; } } #endregion try { hashX = new HashRec[BufferCount]; } catch (Exception ex) { WriteColor(ConsoleColor.Red, $"BuferCount {BufferCount} too large, try something a bit smaller (however keep it as large as you can :)"); WriteColor(ConsoleColor.Yellow, $"{ex.ToString()}"); source.Cancel(); return; } //WriteColor(ConsoleColor.White, $"Parallel partition from {StartingAvailable} to {CurrAvailableMax} starting."); Parallel.ForEach(ReadyList, (hashFile) => //for (int i = StartingAvailable; i < CurrAvailableMax; i++) { if (po.CancellationToken.IsCancellationRequested) { return; } Interlocked.Increment(ref LoadedCnt); foreach (var ms in hashFile.Sections) { // ONLY hash CODE/EXEC file sections & PEHeader if (!ms.IsCode && !ms.IsExec) { continue; } if (ms.RawFileSize <= 0) { LogEx(0, $"Compressed/malishous PE {hashFile.FileName} is too small. Consider manual review of section [{ms.Name}] (e.g. UPX will overlap sections so we will hash it on next pass, TODO: UPX decoder)."); continue; } //var tot = (int)FractHashTree.TotalHashesForSize(ms.RawFileSize, MinHashSize); //var myCnt = Interlocked.Add(ref HashGenCnt, tot); //var fht = new FractHashTree(hashFile.FileName, ms, MinHashSize, GetHP); //var dht = fht.DumpRecTree(); //var len = dht.Count(); //var myLim = Interlocked.Add(ref HashGenCnt, len); //dht.CopyTo(0, hashX, myLim - len, len); var ReadSize = ms.VirtualSize; var BufferSize = (int)((ReadSize + 0xfff) & ~0xfff); var memBuff = new byte[BufferSize]; using (var fread = new FileStream(hashFile.FileName, FileMode.Open, FileAccess.Read, FileShare.Read, PAGE_SIZE)) { fread.Seek(ms.RawFilePointer, SeekOrigin.Begin); fread.Read(memBuff, 0, (int)ReadSize); } var recs = FractHashTree.CreateRecsFromMemory(memBuff, MinHashSize, GetHP, hashFile.rID, 0, 64, false); if (HashGenCnt + recs.Length > hashX.Length) { LoadList.Push(hashFile); break; } var myLim = Interlocked.Add(ref HashGenCnt, recs.Length); recs.CopyTo(hashX, myLim - recs.Length); //FractHashTree.CreateRecsFromFile(hashFile.FileName, ms, MinHashSize, tot, hashX, myCnt - tot, GetHP); if ((LoadedCnt % 100) == 0 && sw.Elapsed.TotalSeconds > 0) { WriteColor(ConsoleColor.Green, $"HashGen entries: {HashGenCnt:N0} - per second { ((TotalHashGenCount + HashGenCnt) / sw.Elapsed.TotalSeconds):N0}"); } //} } }); if (po.CancellationToken.IsCancellationRequested) { return; } TotalHashGenCount += HashGenCnt; WriteColor(ConsoleColor.Green, $"Filled queue {HashGenCnt:N0}, signaling readyqueue."); WriteColor(ConsoleColor.Green, $"Loaded-Files/Generated-Hash-Values {LoadedCnt:N0}/{TotalHashGenCount:N0}. HashGen: {(TotalHashGenCount / sw.Elapsed.TotalSeconds):N0} per second."); sw.Stop(); ReadyQueue.Add(Tuple.Create <int, HashRec[]>(HashGenCnt, hashX)); HashGenCnt = 0; sw.Start(); } while (!DoneDirScan || !LoadList.IsEmpty); sw.Stop(); WriteColor(ConsoleColor.Green, $"Finished Files/Hashes {LoadedCnt:N0}/{TotalHashGenCount:N0}. HashGen: {(TotalHashGenCount / sw.Elapsed.TotalSeconds):N0} per second."); return; }
public DisposableBag(string objectName, string message = null) { _objectName = objectName; _message = message; _disposables = new ConcurrentStack <Action>(); }
private static void ImportDataFromCSV(string exportPath, string exportFileName, string headerText, ConcurrentStack <Person> csvFileList) { try { var targetPath = Path.Combine(Environment.CurrentDirectory, exportPath); var fullFilePath = Path.Combine(targetPath, exportFileName) + ".csv"; if (File.Exists(fullFilePath)) { if (new FileInfo(fullFilePath).Length == 0) { Console.WriteLine(string.Format("Target file: {0} without content inside(is empty)", fullFilePath)); return; } headerText = headerText.Replace(',', '|'); Console.WriteLine($"|{headerText}|"); var sortList = csvFileList.OrderBy(p => p.ID); var allLines = string.Join(Environment.NewLine, sortList.Select(s => s.ToOutputString())); Console.WriteLine(allLines); } else { Console.WriteLine(string.Format("Target file: {0} not existed", fullFilePath)); } } catch (Exception ex) { Console.WriteLine(string.Format("Error message: {0}", "StackTrace: {1}"), ex.Message, ex.StackTrace); } }
private BuilderManager() { _syncRoot = new object(); _stack = new ConcurrentStack <BuildToken>(); }
static void Main(string[] args) { var appSettings = ConfigurationManager.AppSettings; try { var exportPath = appSettings["ExportPath"] ?? string.Empty; var exportFileName = appSettings["ExportName"] ?? string.Empty; var token = appSettings["InputToken"] ?? ","; var headerString = appSettings["CSVHeader"] ?? string.Empty; var sequenceFormat = appSettings["InputSequence"] ?? string.Empty; IList <Person> exportList = new List <Person>(); ConcurrentStack <Person> csvFileList = new ConcurrentStack <Person>(); Debug.Assert(!string.IsNullOrWhiteSpace(exportPath)); Debug.Assert(!string.IsNullOrWhiteSpace(exportFileName)); Debug.Assert(!string.IsNullOrWhiteSpace(token)); Debug.Assert(!string.IsNullOrWhiteSpace(headerString)); var curStatus = Status.Exit; var preStatus = Status.Exit; Console.WriteLine("Load CSV file to memory..."); // load CSV file Task loadTask = new Task(() => LoadDataFromCSVFile(exportPath, exportFileName, csvFileList)); loadTask.Start(); loadTask.Wait(); do { if (curStatus != Status.Contiune) { Console.Write("Please enter the mode you want to proceed (0: ExportToCSVFile, 1: ImportFromCSV, Others: Exit): "); var mode = Console.ReadLine(); curStatus = GetProceedMode(mode); } else { curStatus = preStatus; } switch (curStatus) { case Status.ExportCSV: Console.WriteLine($"Please enter the format in sequence, {{{sequenceFormat}}}."); Console.WriteLine($"Note you should sperate each column in token '{token}'"); var inString = Console.ReadLine(); string[] data = inString.Split(Convert.ToChar(token)); string[] headers = headerString.Split(Convert.ToChar(token)); var person = ValidateCSVInput(data, headers, exportList, csvFileList); if (person != null) { // add to the export list exportList.Add(person); } Console.Write("Want to export next reocrd (Y/N): "); var ans = Console.ReadLine(); var goNext = string.Compare(ans, "Y", true) == 0 ? true : false; if (goNext) { curStatus = Status.Contiune; preStatus = Status.ExportCSV; } else { // write data to CSV file Task exportTask = new Task(() => ExportDataToCSV(exportList, exportPath, exportFileName, headerString)); exportTask.Start(); exportTask.Wait(); exportList.Clear(); // reset the list Console.WriteLine("Load CSV file to memory..."); loadTask = new Task(() => LoadDataFromCSVFile(exportPath, exportFileName, csvFileList)); loadTask.Start(); loadTask.Wait(); } break; case Status.ImportCSV: Console.WriteLine("Trying to import file to console, please wait..."); Task importTask = new Task(() => ImportDataFromCSV(exportPath, exportFileName, headerString, csvFileList)); importTask.Start(); importTask.Wait(); break; case Status.Exit: break; } } while (curStatus != Status.Exit); } catch (Exception ex) { Console.WriteLine(string.Format("Error message: {0}", "StackTrace: {1}"), ex.Message, ex.StackTrace); } finally { Console.WriteLine("-------------"); Console.WriteLine("Please press any key to exit."); Console.ReadKey(); } }
/// <summary> /// Analyze passed site for ignored user custom actions /// </summary> /// <param name="cc">ClientContext object of the site to scan</param> /// <param name="customActions">Custom action details</param> /// <param name="customizationResults">Customization details</param> public void Analyze(ClientContext cc, ref ConcurrentStack <CustomActionsResult> customActions, ref ConcurrentDictionary <string, CustomizationResult> customizationResults, ref ConcurrentStack <UIExperienceScanError> UIExpScanErrors) { Console.WriteLine("Custom actions... " + url); // List scoped user custom actions var lists = cc.Web.GetListsToScan(); foreach (var list in lists) { AddCustomActionsToResult(list.UserCustomActions, ref customActions, ref customizationResults, ref UIExpScanErrors, list.RootFolder.ServerRelativeUrl, list.Title); } Web web = cc.Web; if (!web.IsSubSite()) { // Site scoped user custom actions Site site = cc.Site; site.EnsureProperty(p => p.UserCustomActions); AddCustomActionsToResult(site.UserCustomActions, ref customActions, ref customizationResults, ref UIExpScanErrors); } // Web scoped user custom actions web.EnsureProperty(p => p.UserCustomActions); AddCustomActionsToResult(web.UserCustomActions, ref customActions, ref customizationResults, ref UIExpScanErrors); }
private static void LoadDataFromCSVFile(string exportPath, string exportFileName, ConcurrentStack <Person> csvFileList) { try { csvFileList.Clear(); var targetPath = Path.Combine(Environment.CurrentDirectory, exportPath); var fullFilePath = Path.Combine(targetPath, exportFileName) + ".csv"; if (!File.Exists(fullFilePath)) { return; // do nothing } string[] allLines = new string[MAX]; allLines = File.ReadAllLines(fullFilePath); Parallel.For(0, allLines.Length, index => { if (index == 0 || string.IsNullOrWhiteSpace(allLines[index])) { return; } string[] data = allLines[index].Split(','); var person = new Person() { ID = int.Parse(data[0]), Name = data[1], Sex = (Sex)Enum.Parse(typeof(Sex), Extension.SexDic[data[2]], true), Birthday = DateTime.Parse(data[3]) }; csvFileList.Push(person); }); } catch (Exception ex) { Console.WriteLine(string.Format("Error message: {0}", "StackTrace: {1}"), ex.Message, ex.StackTrace); } }
/// <summary> /// Initializes the connection. /// </summary> /// <exception cref="SocketException">Throws a SocketException when the connection could not be established with the host</exception> /// <exception cref="AuthenticationException" /> /// <exception cref="UnsupportedProtocolVersionException"></exception> public Task <Response> Open() { _freeOperations = new ConcurrentStack <short>(Enumerable.Range(0, MaxConcurrentRequests).Select(s => (short)s).Reverse()); _pendingOperations = new ConcurrentDictionary <short, OperationState>(); _writeQueue = new ConcurrentQueue <OperationState>(); if (Options.CustomCompressor != null) { Compressor = Options.CustomCompressor; } else if (Options.Compression == CompressionType.LZ4) { Compressor = new LZ4Compressor(); } else if (Options.Compression == CompressionType.Snappy) { Compressor = new SnappyCompressor(); } //Init TcpSocket _tcpSocket.Init(); _tcpSocket.Error += CancelPending; _tcpSocket.Closing += () => CancelPending(null, null); //Read and write event handlers are going to be invoked using IO Threads _tcpSocket.Read += ReadHandler; _tcpSocket.WriteCompleted += WriteCompletedHandler; return(_tcpSocket .Connect() .Then(_ => Startup()) .ContinueWith(t => { if (t.IsFaulted && t.Exception != null) { //Adapt the inner exception and rethrow var ex = t.Exception.InnerException; var protocolVersion = _serializer.ProtocolVersion; if (ex is ProtocolErrorException) { //As we are starting up, check for protocol version errors //There is no other way than checking the error message from Cassandra if (ex.Message.Contains("Invalid or unsupported protocol version")) { throw new UnsupportedProtocolVersionException(protocolVersion, ex); } } if (ex is ServerErrorException && protocolVersion >= 3 && ex.Message.Contains("ProtocolException: Invalid or unsupported protocol version")) { //For some versions of Cassandra, the error is wrapped into a server error //See CASSANDRA-9451 throw new UnsupportedProtocolVersionException(protocolVersion, ex); } throw ex; } return t.Result; }, TaskContinuationOptions.ExecuteSynchronously) .Then(response => { if (response is AuthenticateResponse) { return StartAuthenticationFlow(((AuthenticateResponse)response).Authenticator); } if (response is ReadyResponse) { return TaskHelper.ToTask(response); } throw new DriverInternalError("Expected READY or AUTHENTICATE, obtained " + response.GetType().Name); })); }
private static Person ValidateCSVInput(string[] data, string[] headers, IList <Person> exportList, ConcurrentStack <Person> csvFileList) { if (data.Length != headers.Length) { Console.WriteLine("The input data format were unexpected, please follow the input rule"); return(null); } // check id int id = -1; if (!int.TryParse(data[0], out id)) { Console.WriteLine("Invaild [ID] input, please try again"); return(null); } if (exportList.Any(p => p.ID == id) || csvFileList.Any(p => p.ID == id)) { Console.WriteLine("Duplicated [ID] input, please try again"); return(null); } // check name var name = data[1]; if (string.IsNullOrWhiteSpace(name)) { Console.WriteLine("Invaild [Name] input, please try again"); return(null); } // check sex Sex sex; if (!Enum.TryParse(data[2], out sex) || (sex != Sex.Female && sex != Sex.Male)) { Console.WriteLine("Invaild [Sex] input, please try again"); return(null); } // check birthday if (!DateTime.TryParse(data[3], out DateTime birthday)) { Console.WriteLine("Invaild [Birthday] input, please try again"); return(null); } return(new Person() { ID = id, Name = name, Sex = sex, Birthday = birthday }); }
/// <summary> /// Builds the specified targets. /// </summary> /// <param name="loggingContext">The logging context for the project.</param> /// <param name="entry">The BuildRequestEntry for which we are building targets.</param> /// <param name="callback">The callback to be used to handle new project build requests.</param> /// <param name="targetNames">The names of the targets to build.</param> /// <param name="baseLookup">The Lookup containing all current items and properties for this target.</param> /// <param name="cancellationToken">The <see cref="CancellationToken"/> to use when building the targets.</param> /// <returns>The target's outputs and result codes</returns> public async Task <BuildResult> BuildTargets(ProjectLoggingContext loggingContext, BuildRequestEntry entry, IRequestBuilderCallback callback, string[] targetNames, Lookup baseLookup, CancellationToken cancellationToken) { ErrorUtilities.VerifyThrowArgumentNull(loggingContext, "projectLoggingContext"); ErrorUtilities.VerifyThrowArgumentNull(entry, nameof(entry)); ErrorUtilities.VerifyThrowArgumentNull(callback, "requestBuilderCallback"); ErrorUtilities.VerifyThrowArgumentNull(targetNames, nameof(targetNames)); ErrorUtilities.VerifyThrowArgumentNull(baseLookup, nameof(baseLookup)); ErrorUtilities.VerifyThrow(targetNames.Length > 0, "List of targets must be non-empty"); ErrorUtilities.VerifyThrow(_componentHost != null, "InitializeComponent must be called before building targets."); _requestEntry = entry; _requestBuilderCallback = callback; _projectLoggingContext = loggingContext; _cancellationToken = cancellationToken; // Clone the base lookup so that if we are re-entered by another request while this one in blocked, we don't have visibility to // their state, and they have no visibility into ours. _baseLookup = baseLookup.Clone(); _targetsToBuild = new ConcurrentStack <TargetEntry>(); // Get the actual target objects from the names BuildRequestConfiguration configuration = _requestEntry.RequestConfiguration; bool previousCacheableStatus = configuration.IsCacheable; configuration.IsCacheable = false; configuration.RetrieveFromCache(); _projectInstance = configuration.Project; // Now get the current results cache entry. IResultsCache resultsCache = (IResultsCache)_componentHost.GetComponent(BuildComponentType.ResultsCache); BuildResult existingBuildResult = resultsCache.GetResultsForConfiguration(_requestEntry.Request.ConfigurationId); _buildResult = new BuildResult(entry.Request, existingBuildResult, null); if (existingBuildResult == null) { // Add this result so that if our project gets re-entered we won't rebuild any targets we have already built. resultsCache.AddResult(_buildResult); } List <TargetSpecification> targets = new List <TargetSpecification>(targetNames.Length); foreach (string targetName in targetNames) { var targetExists = _projectInstance.Targets.ContainsKey(targetName); if (!targetExists && entry.Request.BuildRequestDataFlags.HasFlag(BuildRequestDataFlags.SkipNonexistentTargets)) { _projectLoggingContext.LogComment(Framework.MessageImportance.Low, "TargetSkippedWhenSkipNonexistentTargets", targetName); continue; } targets.Add(new TargetSpecification(targetName, targetExists ? _projectInstance.Targets[targetName].Location : _projectInstance.ProjectFileLocation)); } // Push targets onto the stack. This method will reverse their push order so that they // get built in the same order specified in the array. await PushTargets(targets, null, baseLookup, false, false, TargetBuiltReason.None); // Now process the targets ITaskBuilder taskBuilder = _componentHost.GetComponent(BuildComponentType.TaskBuilder) as ITaskBuilder; try { await ProcessTargetStack(taskBuilder); } finally { // If there are still targets left on the stack, they need to be removed from the 'active targets' list foreach (TargetEntry target in _targetsToBuild) { configuration.ActivelyBuildingTargets.Remove(target.Name); } ((IBuildComponent)taskBuilder).ShutdownComponent(); } if (_cancellationToken.IsCancellationRequested) { throw new BuildAbortedException(); } // Gather up outputs for the requested targets and return those. All of our information should be in the base lookup now. ComputeAfterTargetFailures(targetNames); BuildResult resultsToReport = new BuildResult(_buildResult, targetNames); // Return after-build project state if requested. if (_requestEntry.Request.BuildRequestDataFlags.HasFlag(BuildRequestDataFlags.ProvideProjectStateAfterBuild)) { resultsToReport.ProjectStateAfterBuild = _projectInstance; } if (_requestEntry.Request.RequestedProjectState != null) { resultsToReport.ProjectStateAfterBuild = _projectInstance.FilteredCopy(_requestEntry.Request.RequestedProjectState); } configuration.IsCacheable = previousCacheableStatus; return(resultsToReport); }
private double[] move(KDTree <int> tree, double[][] points, int index, ConcurrentStack <double[]> modes, Action <ICollection <NodeDistance <KDTreeNode <int> > >, double[]> computeMean) { double[] current = points[index]; double[] mean = new double[current.Length]; double[] shift = new double[current.Length]; // we will keep moving it in the // direction of the density modes int iterations = 0; // until convergence or max iterations reached while (iterations < MaxIterations) { iterations++; // Get points near the current point var neighbors = tree.Nearest(current, Bandwidth * 3, maximum); // compute the mean on the region computeMean(neighbors, mean); // extract the mean shift vector for (int j = 0; j < mean.Length; j++) { shift[j] = current[j] - mean[j]; } // move the point towards a mode for (int j = 0; j < mean.Length; j++) { current[j] = mean[j]; } // Check if we are near to a maximum point if (cut) { // Check if we are near a known mode foreach (double[] mode in modes) { // compute the distance between points // if they are near, they are duplicates if (Distance.Distance(points[index], mode) < Bandwidth) { // Yes, we are close to a known mode. Let's substitute // this point with a reference to this nearest mode return(points[index] = mode); // and stop moving this point } } } // check for convergence: magnitude of the mean shift // vector converges to zero (Comaniciu 2002, page 606) if (Norm.Euclidean(shift) < Tolerance * Bandwidth) { break; } } return(supress(points, index, modes)); }
/// <summary> /// Scans a list for "modern" compatibility /// </summary> /// <param name="file">List form page to start the scan from</param> /// <param name="list">List linked to the form page</param> /// <returns>Object describing modern compatiblity</returns> public static ListScanResult ModernCompatability(this File file, List list, ref ConcurrentStack <ScanError> scanErrors) { if (file == null) { throw new ArgumentNullException("file"); } if (list == null) { throw new ArgumentNullException("list"); } ClientContext cc = file.Context as ClientContext; ListScanResult result = new ListScanResult(); // Load properties file.EnsureProperty(p => p.PageRenderType); // If it works in modern, we're good if (file.PageRenderType == ListPageRenderType.Modern) { // let's return since we know it will work return(result); } else { result.PageRenderType = file.PageRenderType; } // Hmmm...it's not working, so let's list *all* reasons why it's not working in modern // Step 1: load the tenant / site / web / list level blocking options // Tenant // Currently we've no API to detect tenant setting...but since we anyhow should scan all lists this does not matter that much // Site Site site = cc.Site; site.EnsureProperties(p => p.Features, p => p.Url); result.BlockedAtSiteLevel = site.Features.Where(f => f.DefinitionId == FeatureId_Site_Modern).Count() > 0; // Web cc.Web.EnsureProperties(p => p.Features, p => p.Url); result.BlockedAtWebLevel = cc.Web.Features.Where(f => f.DefinitionId == FeatureId_Web_Modern).Count() > 0; // List list.EnsureProperties(p => p.ListExperienceOptions, p => p.UserCustomActions, p => p.BaseTemplate); result.ListExperience = list.ListExperienceOptions; result.XsltViewWebPartCompatibility.ListBaseTemplate = list.BaseTemplate; if (list.ListExperienceOptions == ListExperience.ClassicExperience) { result.BlockedAtListLevel = true; } // Step 2: verify we can load a web part manager and ensure there's only one web part of the page LimitedWebPartManager wpm; try { wpm = file.GetLimitedWebPartManager(PersonalizationScope.Shared); file.Context.Load(wpm.WebParts, wps => wps.Include(wp => wp.WebPart.Title, wp => wp.WebPart.Properties)); file.Context.ExecuteQueryRetry(); } catch (Exception ex) { result.BlockedByNotBeingAbleToLoadPage = true; result.BlockedByNotBeingAbleToLoadPageException = ex.ToString(); return(result); } if (wpm.WebParts.Count != 1) { result.BlockedByZeroOrMultipleWebParts = true; return(result); } var webPart = wpm.WebParts[0].WebPart; // Step 3: Inspect the web part used to render the list // Step 3.1: JSLink web part check if (webPart.Properties.FieldValues.Keys.Contains("JSLink")) { if (webPart.Properties["JSLink"] != null && !String.IsNullOrEmpty(webPart.Properties["JSLink"].ToString()) && webPart.Properties["JSLink"].ToString().ToLower() != "clienttemplates.js") { result.XsltViewWebPartCompatibility.BlockedByJSLink = true; result.XsltViewWebPartCompatibility.JSLink = webPart.Properties["JSLink"].ToString(); } } // Step 3.2: XslLink web part check if (webPart.Properties.FieldValues.Keys.Contains("XslLink")) { if (webPart.Properties["XslLink"] != null && !String.IsNullOrEmpty(webPart.Properties["XslLink"].ToString()) && webPart.Properties["XslLink"].ToString().ToLower() != "main.xsl") { result.XsltViewWebPartCompatibility.BlockedByXslLink = true; result.XsltViewWebPartCompatibility.XslLink = webPart.Properties["XslLink"].ToString(); } } // Step 3.3: Xsl web part check if (webPart.Properties.FieldValues.Keys.Contains("Xsl")) { if (webPart.Properties["Xsl"] != null && !String.IsNullOrEmpty(webPart.Properties["Xsl"].ToString())) { result.XsltViewWebPartCompatibility.BlockedByXsl = true; } } // Step 3.4: Process fields in view if (webPart.Properties.FieldValues.Keys.Contains("XmlDefinition")) { if (webPart.Properties["XmlDefinition"] != null && !String.IsNullOrEmpty(webPart.Properties["XmlDefinition"].ToString())) { try { // Get the fields in this view var viewFields = GetViewFields(webPart.Properties["XmlDefinition"].ToString()); // Load fields in one go List <Field> fieldsToProcess = new List <Field>(viewFields.Count); try { foreach (var viewField in viewFields) { Field field = list.Fields.GetByInternalNameOrTitle(viewField); cc.Load(field, p => p.JSLink, p => p.TypeAsString, p => p.FieldTypeKind, p => p.InternalName); fieldsToProcess.Add(field); } cc.ExecuteQueryRetry(); } catch { // try to load the fields again, but now individually so we can collect the needed errors + evaulate the fields that do load fieldsToProcess.Clear(); foreach (var viewField in viewFields) { try { Field field = list.Fields.GetByInternalNameOrTitle(viewField); cc.Load(field, p => p.JSLink, p => p.TypeAsString, p => p.FieldTypeKind); cc.ExecuteQueryRetry(); fieldsToProcess.Add(field); } catch (Exception ex) { ScanError error = new ScanError() { Error = ex.Message, SiteURL = cc.Web.Url, SiteColUrl = site.Url }; scanErrors.Push(error); Console.WriteLine("Error for site {1}: {0}", ex.Message, cc.Web.Url); } } } // Verify the fields foreach (var field in fieldsToProcess) { try { // JSLink on field if (!string.IsNullOrEmpty(field.JSLink) && field.JSLink != "clienttemplates.js" && field.JSLink != "sp.ui.reputation.js" && !field.IsTaxField()) { result.XsltViewWebPartCompatibility.BlockedByJSLinkField = true; result.XsltViewWebPartCompatibility.JSLinkFields = string.IsNullOrEmpty(result.XsltViewWebPartCompatibility.JSLinkFields) ? $"{field.InternalName}" : $"{result.XsltViewWebPartCompatibility.JSLinkFields},{field.InternalName}"; } //Business data field if (field.IsBusinessDataField()) { result.XsltViewWebPartCompatibility.BlockedByBusinessDataField = true; result.XsltViewWebPartCompatibility.BusinessDataFields = string.IsNullOrEmpty(result.XsltViewWebPartCompatibility.BusinessDataFields) ? $"{field.InternalName}" : $"{result.XsltViewWebPartCompatibility.BusinessDataFields},{field.InternalName}"; } // Geolocation field if (field.FieldTypeKind == FieldType.Geolocation) { result.XsltViewWebPartCompatibility.BlockedByGeoLocationField = true; result.XsltViewWebPartCompatibility.GeoLocationFields = string.IsNullOrEmpty(result.XsltViewWebPartCompatibility.GeoLocationFields) ? $"{field.InternalName}" : $"{result.XsltViewWebPartCompatibility.GeoLocationFields},{field.InternalName}"; } // TaskOutcome field if (field.IsTaskOutcomeField()) { result.XsltViewWebPartCompatibility.BlockedByTaskOutcomeField = true; result.XsltViewWebPartCompatibility.TaskOutcomeFields = string.IsNullOrEmpty(result.XsltViewWebPartCompatibility.TaskOutcomeFields) ? $"{field.InternalName}" : $"{result.XsltViewWebPartCompatibility.TaskOutcomeFields},{field.InternalName}"; } // Publishing field if (field.IsPublishingField()) { result.XsltViewWebPartCompatibility.BlockedByPublishingField = true; result.XsltViewWebPartCompatibility.PublishingFields = string.IsNullOrEmpty(result.XsltViewWebPartCompatibility.PublishingFields) ? $"{field.InternalName}" : $"{result.XsltViewWebPartCompatibility.PublishingFields},{field.InternalName}"; } } catch (Exception ex) { ScanError error = new ScanError() { Error = ex.Message, SiteURL = cc.Web.Url, SiteColUrl = site.Url, }; scanErrors.Push(error); Console.WriteLine("Error for site {1}: {0}", ex.Message, cc.Web.Url); } } } catch (Exception ex) { ScanError error = new ScanError() { Error = ex.Message, SiteURL = cc.Web.Url, SiteColUrl = site.Url }; scanErrors.Push(error); Console.WriteLine("Error for site {1}: {0}", ex.Message, cc.Web.Url); } } } // Step 3.5: Process list custom actions if (list.UserCustomActions.Count > 0) { foreach (var customAction in list.UserCustomActions) { if (!string.IsNullOrEmpty(customAction.Location) && customAction.Location.Equals("scriptlink", StringComparison.InvariantCultureIgnoreCase)) { if (!string.IsNullOrEmpty(customAction.ScriptSrc)) { result.XsltViewWebPartCompatibility.BlockedByListCustomAction = true; result.XsltViewWebPartCompatibility.ListCustomActions = string.IsNullOrEmpty(result.XsltViewWebPartCompatibility.ListCustomActions) ? $"{customAction.Name}" : $"{result.XsltViewWebPartCompatibility.ListCustomActions},{customAction.Name}"; } } } } // Step 3.6: managed metadata navigation is not an issue anymore result.XsltViewWebPartCompatibility.BlockedByManagedMetadataNavFeature = false; // Step 4: check the view if (webPart.Properties.FieldValues.Keys.Contains("ViewFlags") && webPart.Properties["ViewFlags"] != null && !String.IsNullOrEmpty(webPart.Properties["ViewFlags"].ToString())) { uint flags; if (uint.TryParse(webPart.Properties["ViewFlags"].ToString(), out flags)) { if ((flags & ViewFlag_Gantt) != 0 || (flags & ViewFlag_Calendar) != 0 || (flags & ViewFlag_Grid) != 0) { result.XsltViewWebPartCompatibility.BlockedByViewType = true; if ((flags & ViewFlag_Gantt) != 0) { result.XsltViewWebPartCompatibility.ViewType = "Gantt"; } else if ((flags & ViewFlag_Calendar) != 0) { result.XsltViewWebPartCompatibility.ViewType = "Calendar"; } else if ((flags & ViewFlag_Grid) != 0) { if (list.BaseTemplate == (int)ListTemplateType.GenericList || list.BaseTemplate == (int)ListTemplateType.DocumentLibrary) { // unblock...we've added support for datasheet rendering for custom lists in July 2018 (see https://techcommunity.microsoft.com/t5/Microsoft-SharePoint-Blog/Updates-to-metadata-handling-and-list-templates/ba-p/202113) result.XsltViewWebPartCompatibility.BlockedByViewType = false; } else { result.XsltViewWebPartCompatibility.ViewType = "Grid"; } } } } } // Step 5: check the list // Step 5.1: check the base template if (!list.CanRenderNewExperience()) { result.XsltViewWebPartCompatibility.BlockedByListBaseTemplate = true; result.XsltViewWebPartCompatibility.ListBaseTemplate = list.BaseTemplate; } return(result); }
/// <summary> /// Creates a new <see cref="ServerSender"/> instance. /// </summary> public ServerSender() { this.WritePool = new ConcurrentStack <SocketAsyncEventArgs>(); }