public void ValidateIndexedItemGetHashCode() { // Another test which fails when we version .NET; removing. IndexedItem<byte> indexedObj = new IndexedItem<byte>(0, Encoding.ASCII.GetBytes("A")[0]); Assert.AreEqual(372029391, indexedObj.GetHashCode()); ApplicationLog.WriteLine("IndexedItem BVT: Successfully validated the Hashcode."); }
public void ValidateIndexedItemEquals() { IndexedItem<byte> indexedObj = new IndexedItem<byte>(0, Encoding.ASCII.GetBytes("A")[0]); IndexedItem<byte> indexedObj2 = new IndexedItem<byte>(0, Encoding.ASCII.GetBytes("A")[0]); IndexedItem<byte> indexedObj3 = new IndexedItem<byte>(0, Encoding.ASCII.GetBytes("A")[0]); Assert.IsTrue(indexedObj.Equals(indexedObj2)); Assert.IsTrue(indexedObj.Equals((object)indexedObj3)); ApplicationLog.WriteLine( "IndexedItem BVT: Successfully validated the Equals() method."); }
public void CtorTest() { var index = RandomValueEx.GetRandomInt32(); var value = RandomValueEx.GetRandomString(); var target = new IndexedItem<String>( index, value ); target.Index.Should() .Be( index ); target.Item.Should() .Be( value ); }
public void ValidateIndexedItemCompareTo() { IndexedItem<byte> indexedObj = new IndexedItem<byte>(0, Encoding.ASCII.GetBytes("A")[0]); IndexedItem<byte> indexedObj2 = new IndexedItem<byte>(0, Encoding.ASCII.GetBytes("A")[0]); IndexedItem<byte> indexedObj3 = new IndexedItem<byte>(0, Encoding.ASCII.GetBytes("A")[0]); Assert.AreEqual(0, indexedObj.CompareTo(indexedObj2)); Assert.AreEqual(0, indexedObj.CompareTo((object)indexedObj3)); Assert.AreEqual(1, indexedObj.CompareTo(null)); ApplicationLog.WriteLine( "IndexedItem BVT: Successfully validated the CompareTo() method."); }
public void Enqueue(T item) { if (_size >= _items.Length) { var temp = _items; _items = new IndexedItem[_items.Length * 2]; Array.Copy(temp, _items, temp.Length); } var index = _size++; _items[index] = new IndexedItem { Value = item, Id = Interlocked.Increment(ref _count) }; Percolate(index); }
public void Enqueue(TK priority, TV item) { if (_size >= _items.Length) { var temp = _items; _items = new IndexedItem[_items.Length * 2]; Array.Copy(temp, _items, temp.Length); } var index = _size++; _items[index] = new IndexedItem { Value = item, Pro = priority, Id = ++_count }; Percolate(index); }
public void Enqueue(T item) { if (mSize >= mItems.Length) { var temp = mItems; mItems = new IndexedItem[mItems.Length * 2]; Array.Copy(temp, mItems, temp.Length); } var index = mSize++; mItems[index] = new IndexedItem { Value = item, Id = Interlocked.Increment(ref mCount) }; Percolate(index); }
public void Enqueue(T item) { if (_size >= _items.Length) { var temp = _items; _items = new IndexedItem[_items.Length * 2]; Array.Copy(temp, _items, temp.Length); } var index = _size++; _items[index] = new IndexedItem { Value = item, Id = ++_count, comparison = comparison }; Percolate(index); }
private IndexedItem[] GetIndexedItems(int[] height, bool[] localMaxes) { IndexedItem[] result = new IndexedItem[localMaxes.Where(lm => lm).Count()]; int j = 0; for (int i = 0; i < height.Length; i++) { if (localMaxes[i]) { result[j] = new IndexedItem { Index = i, Value = height[i] }; j++; } } return(result); }
public void Add(T t) { if (_size >= _items.Length) { var temp = _items; _items = new IndexedItem[_items.Length * 2]; Array.Copy(temp, _items, temp.Length); } var index = _size++; _items[index] = new IndexedItem() { Value = t, Id = Interlocked.Increment(ref _count) }; Prewave(index); }
public void ValidateIndexedItemOperators() { IndexedItem<byte> indexedObj = new IndexedItem<byte>(0, Encoding.ASCII.GetBytes("A")[0]); IndexedItem<byte> indexedObj1 = new IndexedItem<byte>(0, Encoding.ASCII.GetBytes("A")[0]); IndexedItem<byte> indexedObj2 = new IndexedItem<byte>(0, Encoding.ASCII.GetBytes("G")[0]); Assert.IsTrue(indexedObj == indexedObj1); Assert.IsFalse(indexedObj == indexedObj2); Assert.IsFalse(indexedObj < indexedObj2); Assert.IsTrue(indexedObj <= indexedObj2); Assert.IsFalse(indexedObj2 > indexedObj); Assert.IsTrue(indexedObj2 >= indexedObj); Assert.IsTrue(indexedObj1 != indexedObj2); ApplicationLog.WriteLine( "IndexedItem BVT: Successfully validated all the properties."); }
public void Enqueue(T item) { if (_size >= _items.Length) { IndexedItem[] items = _items; _items = new IndexedItem[_items.Length * 2]; Array.Copy(items, _items, items.Length); } int num = _size++; _items[num] = new IndexedItem { Value = item, Id = Interlocked.Increment(ref _count) }; Percolate(num); }
public void ValidateIndexedItemOperators() { IndexedItem <byte> indexedObj = new IndexedItem <byte>(0, Encoding.ASCII.GetBytes("A")[0]); IndexedItem <byte> indexedObj1 = new IndexedItem <byte>(0, Encoding.ASCII.GetBytes("A")[0]); IndexedItem <byte> indexedObj2 = new IndexedItem <byte>(0, Encoding.ASCII.GetBytes("G")[0]); Assert.IsTrue(indexedObj == indexedObj1); Assert.IsFalse(indexedObj == indexedObj2); Assert.IsFalse(indexedObj < indexedObj2); Assert.IsTrue(indexedObj <= indexedObj2); Assert.IsFalse(indexedObj2 > indexedObj); Assert.IsTrue(indexedObj2 >= indexedObj); Assert.IsTrue(indexedObj1 != indexedObj2); ApplicationLog.WriteLine( "IndexedItem BVT: Successfully validated all the properties."); }
public void Append(T value) { var item = new IndexedItem { Index = _nextIndex, Value = value }; if (_count == _list.Count) { _list.Add(item); } else { _list[_count] = item; } _nextIndex++; _count++; }
private catalogProductCreateEntity LoadData() { using (var entityRepository = ResolverFactory.Resolve <EntityRepository>()) { var relatedIndexedItems = entityRepository.GetIndexedItemsBySourceId(GetIndexModel(), IndexedItem.GetSourceId()); var normalizedValues = relatedIndexedItems .Select(i => new { k = i.GetId(), v = GetNormalizedValuesByDependencies(i) }) .ToDictionary(x => x.k, x => x.v); var attributes = IndexedItem.Properties().Where(p => !HexaFields.Contains(p.Name)) .Select(a => new associativeMultiEntity { key = a.Name, value = normalizedValues.Select(v => { var relatedIndexedItem = relatedIndexedItems.FirstOrDefault(r => r.GetId() == v.Key); if (relatedIndexedItem.HasState(ItemState.Removed)) { return(null); } var normalizedValue = v.Value; if (!normalizedValue.ContainsKey(a.Name)) { return(relatedIndexedItem.Value <string>(a.Name)); } return(normalizedValue[a.Name]); }) .Where(x => !string.IsNullOrWhiteSpace(x)) .ToArray() }) .ToArray(); var result = new catalogProductCreateEntity { additional_attributes = new catalogProductAdditionalAttributesEntity { multi_data = attributes } }; return(result); } }
/// <summary> /// Enqueues the specified item. /// </summary> /// <param name="item">The item.</param> public void Enqueue(T item) { if (_size >= _items.Length) { // exponential allocation. var temp = _items; _items = new IndexedItem[_items.Length * 2]; Array.Copy(temp, _items, temp.Length); } var index = _size++; _items[index] = new IndexedItem { Value = item, Id = _count++, Comparer = _comparer }; Percolate(index); CheckHeapInvariant(); }
private static void FormatKey(IndexedItem p_objItem, bool p_blnDetail) { try { Console.WriteLine(p_objItem.Flat + " = " + p_objItem.Value); if (p_blnDetail) { Console.WriteLine(" File: " + p_objItem.ParentFile.Name); Console.WriteLine(" File index: " + p_objItem.ParentFile.Items.IndexOf(p_objItem)); Console.WriteLine(" Global index: " + SirtisIndexerBO.Instance.IndexedFolder.Itens.IndexOf(p_objItem)); } Console.WriteLine(""); } catch (Exception) { throw; } }
public override string GetDestinationId() { soap.SetOptions(Adapter.Options); try { soap.Begin(); var client = soap.GetClient(); var product = client.catalogProductInfo( soap.GetSession(), IndexedItem.Value <string>("sku"), "0" /* Store 0 always contains all products */, null, "sku"); return(product.product_id); } finally { soap.End(); } }
private catalogProductCreateEntity Load() { var qty = IndexedItem.Value <double>("stock"); var inStock = qty > 0 ? 1 : 0; var manageStock = Options.FirstOrDefault(o => o.Name == "manage_stock").Value == bool.TrueString ? 1 : 0; var useQtyDecimal = Options.FirstOrDefault(o => o.Name == "decimal").Value == bool.TrueString ? 1 : 0; return(new catalogProductCreateEntity { stock_data = new catalogInventoryStockItemUpdateEntity() { qty = IndexedItem.Value <string>("stock"), is_in_stock = inStock, manage_stock = manageStock, is_in_stockSpecified = true, is_qty_decimal = useQtyDecimal, is_qty_decimalSpecified = true, } }); }
public T[] CopySortedArray() { IndexedItem[] itemsNew = new IndexedItem[_size]; Array.Copy(_items, itemsNew, _size); for (int i = 0; i < _size - 1; i++) { for (int j = 0; j < _size - 1 - i; j++) { if (IsHigherPriority(j, j + 1)) { var temp = itemsNew[j]; itemsNew[j] = itemsNew[j + 1]; itemsNew[j + 1] = temp; } } } return(itemsNew.Select(x => x.Value).ToArray()); }
public Dictionary <string, object> BuildAttributes( SearchConfiguration searchConfiguration, IndexedItem row) { var fields = searchConfiguration?.DisplayFields; if (fields?.Any() ?? false) { return(fields.ToDictionary(f => Label(f, searchConfiguration, f), f => { var value = row.Fields.ContainsKey(f) ? row.Fields[f] : null; return Label($"{f}.{value}", searchConfiguration, value) as object; }).Where(f => f.Value != null).ToDictionary(f => f.Key, f => f.Value)); } return(null); }
private catalogProductCreateEntity LoadEntity() { var normalizedValues = GetNormalizedValuesByDependencies(); var attributes = IndexedItem.Properties().Where(p => !HexaFields.Contains(p.Name)) .Select(a => new associativeEntity { key = a.Name, value = normalizedValues.ContainsKey(a.Name) ? normalizedValues[a.Name] : IndexedItem.Value <string>(a.Name) }) .ToArray(); var result = new catalogProductCreateEntity { additional_attributes = new catalogProductAdditionalAttributesEntity { single_data = attributes }, }; return(result); }
private void Percolate(int index) { if (index >= _size || index < 0) { return; } int parent = (index - 1) / 2; if (parent < 0 || parent == index) { return; } if (IsHigherPriority(index, parent)) { IndexedItem temp = _items[index]; _items[index] = _items[parent]; _items[parent] = temp; Percolate(parent); } }
public void Enqueue(T item) { if (size >= items.Length) { var temp = items; items = new IndexedItem[items.Length * 2]; Array.Copy(temp, items, temp.Length); } var index = size++; items[index] = new IndexedItem { Id = ++count, Value = item }; Percolate(index); }
public void Enqueue(T item) { if (_size >= _items.Length) { var temp = _items; _items = new IndexedItem[_items.Length * 2]; Array.Copy(temp, _items, temp.Length); } var index = _size++; // 애초에 큐에 long 개 이상의 task가 들어 있으면 문제가 있다.. _items[index] = new IndexedItem { Value = item, Id = (++_count) & ID_MASK }; #if UNITY_EDITOR indexedItemList.Add(_items[index]); indexedItemList.Sort((lhs, rhs) => lhs.CompareTo(rhs)); #endif Percolate(index); }
private void Heapify(int index) { if (index < _size && index >= 0) { int num = 2 * index + 1; int num2 = 2 * index + 2; int num3 = index; if (num < _size && IsHigherPriority(num, num3)) { num3 = num; } if (num2 < _size && IsHigherPriority(num2, num3)) { num3 = num2; } if (num3 != index) { IndexedItem indexedItem = _items[index]; _items[index] = _items[num3]; _items[num3] = indexedItem; Heapify(num3); } } }
private PushState Update() { var result = PushState.Success; var destinationId = IndexedItem.Value <string>("DestinationId"); soap.SetOptions(Adapter.Options); try { var data = LoadEntity(); var storeIds = Regex.Split(Options.FirstOrDefault(o => o.Name == "store_ids").Value, "[,;|]", RegexOptions.Multiline | RegexOptions.IgnoreCase); soap.Begin(); var client = soap.GetClient(); foreach (var storeId in storeIds) { var success = client.catalogProductUpdate(soap.GetSession(), destinationId, data, storeId, "id"); } return(result); } finally { soap.End(); } }
public override PushState Update(string destinationId = null) { var destId = !string.IsNullOrWhiteSpace(destinationId) ? destinationId : IndexedItem.GetDestinationId(); var pushState = PushState.Success; soap.SetOptions(Adapter.Options); try { var websiteIds = Regex.Split(Options.FirstOrDefault(o => o.Name == "website_ids").Value, "[,;|]", RegexOptions.Multiline | RegexOptions.IgnoreCase); var storeIds = Regex.Split(Options.FirstOrDefault(o => o.Name == "store_ids").Value, "[,;|]", RegexOptions.Multiline | RegexOptions.IgnoreCase); soap.Begin(); var client = soap.GetClient(); foreach (var storeId in storeIds) { client.catalogProductMultiUpdateAsync(soap.GetSession(), new[] { destId }, new[] { new catalogProductCreateEntity { website_ids = websiteIds, status = "1" } }, storeId, "id"); } } finally { soap.End(); } return(pushState); }
public override double Get(IndexedItem <T> a, IndexedItem <T> b) { return(dist.Get(a.Value, b.Value)); }
public IndexedItemAndPreviousItem(T previousItem, IndexedItem <T> currentItem) { PreviousItem = previousItem; CurrentItem = currentItem; }
public void TestIndexedItemWithISequenceItem() { IndexedItem <ISequenceItem> item1 = new IndexedItem <ISequenceItem>(2, Alphabets.DNA.A); IndexedItem <ISequenceItem> item2 = new IndexedItem <ISequenceItem>(2, Alphabets.DNA.A); IndexedItem <ISequenceItem> item3 = new IndexedItem <ISequenceItem>(0, Alphabets.DNA.G); IndexedItem <ISequenceItem> item4 = new IndexedItem <ISequenceItem>(1, Alphabets.DNA.A); IndexedItem <ISequenceItem> item5 = new IndexedItem <ISequenceItem>(2, Alphabets.DNA.T); IndexedItem <ISequenceItem> nullitem1 = null; IndexedItem <ISequenceItem> nullitem2 = null; Assert.AreEqual(item1.Index, 2); Assert.AreSame(item1.Item, Alphabets.DNA.A); Assert.AreEqual(item1, item2); Assert.IsTrue(item3 <= item1); Assert.IsTrue(item2 <= item1); Assert.IsTrue(item3 < item1); Assert.IsTrue(item1 > item3); Assert.IsTrue(item1 >= item3); Assert.IsTrue(item1 >= item2); Assert.AreNotEqual(item1, item3); #region Test - CompareTo Assert.IsTrue(item1.CompareTo(item2) == 0); Assert.IsTrue(item3.CompareTo(item1) < 0); Assert.IsTrue(item1.CompareTo(item3) > 0); Assert.IsTrue(item1.CompareTo(null) > 0); try { Assert.IsFalse(item1.CompareTo("ABCD") == 0); Assert.Fail(); } catch { } List <IndexedItem <ISequenceItem> > seqItemList = new List <IndexedItem <ISequenceItem> >(); seqItemList.Add(item1); seqItemList.Add(item3); seqItemList.Add(item4); seqItemList.Sort(); Assert.AreEqual(seqItemList[0].Index, 0); Assert.AreEqual(seqItemList[1].Index, 1); Assert.AreEqual(seqItemList[2].Index, 2); Assert.AreSame(seqItemList[0], item3); Assert.AreSame(seqItemList[1], item4); Assert.AreSame(seqItemList[2], item1); #endregion Test - CompareTo #region Test - Equals Assert.IsTrue(item1.Equals(item2)); Assert.IsFalse(item1.Equals(item3)); Assert.IsFalse(item1.Equals(null)); Assert.IsFalse(item1.Equals(nullitem1)); Assert.IsFalse(item5.Equals(item1)); Assert.IsFalse(item1.Equals("ABCD")); Assert.IsFalse(object.ReferenceEquals(item1, item2)); IndexedItem <ISequenceItem> refItem = item1; Assert.IsTrue(object.ReferenceEquals(item1, refItem)); #endregion Test - Equals #region Test - "==" operator Assert.IsTrue(nullitem1 == nullitem2); Assert.IsFalse(item1 == item2); Assert.IsFalse(item1 == item3); Assert.IsFalse(item1 == nullitem1); Assert.IsFalse(nullitem1 == item1); #endregion Test - "==" operator #region Test - "!=" operator Assert.IsFalse(nullitem1 != nullitem2); Assert.IsTrue(item1 != item3); Assert.IsTrue(item1 != item2); Assert.IsTrue(item1 != nullitem1); Assert.IsTrue(nullitem1 != item1); #endregion Test - "!=" operator #region Test - "<" operator Assert.IsFalse(nullitem1 < nullitem2); Assert.IsTrue(item4 < item1); Assert.IsFalse(item1 < item4); Assert.IsTrue(nullitem1 < item1); Assert.IsFalse(item1 < nullitem1); #endregion Test - "<" operator #region Test - "<=" operator Assert.IsTrue(nullitem1 <= nullitem2); Assert.IsTrue(item4 <= item1); Assert.IsFalse(item1 <= item4); Assert.IsTrue(nullitem1 <= item1); Assert.IsFalse(item1 <= nullitem1); Assert.IsTrue(item1 <= item2); #endregion Test - "<=" operator #region Test - ">" operator Assert.IsFalse(nullitem1 > nullitem2); Assert.IsFalse(item4 > item1); Assert.IsTrue(item1 > item4); Assert.IsFalse(nullitem1 > item1); Assert.IsTrue(item1 > nullitem1); #endregion Test - ">" operator #region Test - ">=" operator Assert.IsTrue(nullitem1 >= nullitem2); Assert.IsFalse(item4 >= item1); Assert.IsTrue(item1 >= item4); Assert.IsFalse(nullitem1 >= item1); Assert.IsTrue(item1 >= nullitem1); Assert.IsTrue(item1 >= item2); #endregion Test - ">=" operator #region Test - GetHashCode Assert.AreEqual(item1.GetHashCode(), item2.GetHashCode()); Assert.AreNotEqual(item1.GetHashCode(), item5.GetHashCode()); #endregion Test - GetHashCode }
/// <summary> /// XsvSparse formatter generic method called by all the test cases /// to validate the test case based on the parameters passed. /// </summary> /// <param name="nodename">Xml node Name.</param> /// <param name="additionalParam">Additional parameter /// based on which the validation of test case is done.</param> static void XsvSparseFormatterGeneralTestCases(string nodename, AdditionalParameters additionalParam) { // Gets the expected sequence from the Xml string filePathObj = Utility._xmlUtil.GetTextValue(nodename, Constants.FilePathNode); Assert.IsTrue(File.Exists(filePathObj)); // Logs information to the log file ApplicationLog.WriteLine(string.Format(null, "XsvSparse Formatter BVT: File Exists in the Path '{0}'.", filePathObj)); IList <ISequence> seqList = null; SparseSequence sparseSeq = null; XsvContigParser parserObj = new XsvContigParser(Encodings.IupacNA, Alphabets.DNA, Constants.CharSeperator, Constants.SequenceIDPrefix); seqList = parserObj.Parse(filePathObj); sparseSeq = (SparseSequence)seqList[0]; IList <IndexedItem <ISequenceItem> > sparseSeqItems = sparseSeq.GetKnownSequenceItems(); XsvSparseFormatter formatterObj = new XsvSparseFormatter(Constants.CharSeperator, Constants.SequenceIDPrefix); switch (additionalParam) { case AdditionalParameters.FormatFilePath: formatterObj.Format(sparseSeq, Constants.XsvTempFileName); break; default: break; case AdditionalParameters.ForamtListWithFilePath: formatterObj.Format(seqList, Constants.XsvTempFileName); break; case AdditionalParameters.FormatTextWriter: using (TextWriter writer = new StreamWriter(Constants.XsvTempFileName)) { formatterObj.Format(sparseSeq, writer); } break; case AdditionalParameters.FormatTextWriterWithOffset: using (TextWriter writer = new StreamWriter(Constants.XsvTempFileName)) { formatterObj.Format(sparseSeq, 0, writer); } break; case AdditionalParameters.FormatListTextWriter: using (TextWriter writer = new StreamWriter(Constants.XsvTempFileName)) { formatterObj.Format(seqList, writer); } break; } // Parse a formatted Xsv file and validate. SparseSequence expectedSeq; seqList = parserObj.Parse(Constants.XsvTempFileName); expectedSeq = (SparseSequence)seqList[0]; IList <IndexedItem <ISequenceItem> > expectedSparseSeqItems = expectedSeq.GetKnownSequenceItems(); for (int i = 0; i < sparseSeqItems.Count; i++) { IndexedItem <ISequenceItem> seqItem = sparseSeqItems[i]; IndexedItem <ISequenceItem> expectedSeqItem = expectedSparseSeqItems[i]; Assert.AreEqual(seqItem.Index, expectedSeqItem.Index); } // Log to Nunit GUI. Console.WriteLine("Successfully validated the format Xsv file"); ApplicationLog.WriteLine("Successfully validated the format Xsv file"); // Delete the temporary file. if (File.Exists(Constants.XsvTempFileName)) { File.Delete(Constants.XsvTempFileName); } }
/// <summary> /// XsvSparse formatter generic method called by all the test cases /// to validate the test case based on the parameters passed. /// </summary> /// <param name="nodename">Xml node Name.</param> /// <param name="additionalParam">Additional parameter /// based on which the validation of test case is done.</param> void XsvSparseFormatterGeneralTestCases(string nodename, AdditionalParameters additionalParam) { // Gets the expected sequence from the Xml string filePathObj = utilityObj.xmlUtil.GetTextValue(nodename, Constants.FilePathNode); Assert.IsTrue(File.Exists(filePathObj)); // Logs information to the log file ApplicationLog.WriteLine(string.Format((IFormatProvider)null, "XsvSparse Formatter BVT: File Exists in the Path '{0}'.", filePathObj)); IEnumerable <ISequence> seqList = null; SparseSequence sparseSeq = null; XsvContigParser parserObj = new XsvContigParser(filePathObj, Alphabets.DNA, Constants.CharSeperator, Constants.SequenceIDPrefix); seqList = parserObj.Parse(); sparseSeq = (SparseSequence)seqList.ElementAt(0); IList <IndexedItem <byte> > sparseSeqItems = sparseSeq.GetKnownSequenceItems(); string tempFile = Path.GetTempFileName(); XsvSparseFormatter formatterObj = new XsvSparseFormatter(tempFile, Constants.CharSeperator, Constants.SequenceIDPrefix); switch (additionalParam) { case AdditionalParameters.FormatFilePath: formatterObj.Write(sparseSeq); break; default: break; case AdditionalParameters.ForamtListWithFilePath: formatterObj.Write(seqList.ToList()); break; } XsvContigParser newParserObj = new XsvContigParser(filePathObj, Alphabets.DNA, Constants.CharSeperator, Constants.SequenceIDPrefix); // Parse a formatted Xsv file and validate. SparseSequence expectedSeq; seqList = newParserObj.Parse(); expectedSeq = (SparseSequence)seqList.ElementAt(0); IList <IndexedItem <byte> > expectedSparseSeqItems = expectedSeq.GetKnownSequenceItems(); for (int i = 0; i < sparseSeqItems.Count; i++) { IndexedItem <byte> seqItem = sparseSeqItems[i]; IndexedItem <byte> expectedSeqItem = expectedSparseSeqItems[i]; Assert.AreEqual(seqItem.Index, expectedSeqItem.Index); } // Log to GUI. Console.WriteLine("Successfully validated the format Xsv file"); ApplicationLog.WriteLine("Successfully validated the format Xsv file"); }
public DikstraProvider2(LightWeightGraph g, int v) : base(g.Nodes.Length) { int numNodes = g.Nodes.Length; numberOfShortestPaths[v] = 1; fromList = new List <int> [numNodes]; //List of nodes (we will use this to //countPostcessors = new int[numNodes]; //This will hold a count of the number of shortestpaths stemming from //we must set each node to infinite distance for (int i = 0; i < numNodes; i++) { g.Nodes[i].NodeWeight = float.MaxValue; fromList[i] = new List <int>(5); } //now we need to set our node to 0 g.Nodes[v].NodeWeight = 0.0f; //now we need to setup our heap ADT.IndexedItem[] items = new IndexedItem[numNodes]; for (int i = 0; i < numNodes; i++) { var n = g.Nodes[i]; items[i] = new IndexedItem(n.Id, n.NodeWeight); } MinHeapDikstra minHeap = new MinHeapDikstra(numNodes, items[v]); //dikstra main while (!minHeap.isEmpty()) { var h = minHeap.extractMin(); int uIndex = h.NodeIndex; this.S.Push(uIndex); //check all edges var u = g.Nodes[uIndex]; int uEdgeCount = g.Nodes[uIndex].Count; for (int i = 0; i < uEdgeCount; i++) { float newWeight = h.NodeWeight + u.EdgeWeights[i]; int toIndex = u.Edge[i]; var to = items[toIndex]; float toNodeWeight = to.NodeWeight; if (newWeight < toNodeWeight) { to.NodeWeight = newWeight; fromList[toIndex].Clear(); fromList[toIndex].Add(uIndex); numberOfShortestPaths[toIndex] = numberOfShortestPaths[uIndex]; if (to.HeapIndex == -1) //first encounter { minHeap.addItem(to); } else { minHeap.decreaseKey(to.HeapIndex); } } else if (newWeight == toNodeWeight) { fromList[toIndex].Add(uIndex);//Add the node numberOfShortestPaths[toIndex] += numberOfShortestPaths[uIndex]; } } } }
public override PushState Create(out string destinationId) { var pushState = PushState.Success; soap.SetOptions(Adapter.Options); var indexedModel = GetIndexModel(); var additionalFields = IndexedItem.Properties().Where(p => !HexaFields.Contains(p.Name) && !ProductFields.Contains(p.Name)).Select(p => p.Name); var websiteIds = Regex.Split(Options.FirstOrDefault(o => o.Name == "website_ids").Value, "[,;|]", RegexOptions.Multiline | RegexOptions.IgnoreCase); var storeIds = Regex.Split(Options.FirstOrDefault(o => o.Name == "store_ids").Value, "[,;|]", RegexOptions.Multiline | RegexOptions.IgnoreCase); var normalizedValues = GetNormalizedValuesByDependencies(); try { var additionalAttrs = additionalFields.Select(f => new associativeEntity { key = f, value = normalizedValues.ContainsKey(f) ? normalizedValues[f] : IndexedItem.Value <string>(f) }); var attributes = new catalogProductAdditionalAttributesEntity { single_data = additionalAttrs.ToArray() }; var data = new catalogProductCreateEntity { website_ids = websiteIds, additional_attributes = attributes, status = "2", visibility = "4", stock_data = new catalogInventoryStockItemUpdateEntity() { qty = "0", is_in_stock = 1, } }; soap.Begin(); var client = soap.GetClient(); destinationId = client .catalogProductCreate(soap.GetSession(), normalizedValues.ContainsKey("type") ? normalizedValues["type"] : IndexedItem.Value <string>("type"), normalizedValues.ContainsKey("attribute_set_id") ? normalizedValues["attribute_set_id"] : IndexedItem.Value <string>("attribute_set_id"), IndexedItem.Value <string>("sku"), data, "0").ToString(); foreach (var storeId in storeIds) { client.catalogProductMultiUpdateAsync( soap.GetSession(), new string[] { destinationId }, new catalogProductCreateEntity[] { new catalogProductCreateEntity { website_ids = websiteIds, status = "2" // Creating product always set status to false. itemModel.removed == DIndex.ConstYes ? "2" : "1" } }, storeId, "id"); } return(pushState); } finally { soap.End(); } }