public bool Authorize(HttpRequestAuthParameters ap)//IHeaders headers, IPEndPoint endPoint, string mac) { //if (headers == null) { // throw new ArgumentNullException("headers"); //} //string ua; //if (!headers.TryGetValue("User-Agent", out ua)) { // return false; //} if (string.IsNullOrEmpty(ap.UserAgent)) { return(false); } var rv = userAgents.ContainsKey(ap.UserAgent); if (!rv) { _logger.DebugFormat("Rejecting {0}. Not in User-Agent whitelist", ap.UserAgent); } else { _logger.DebugFormat("Accepted {0} via User-Agent whitelist", ap.UserAgent); } return(rv); }
internal void NotifyDevice(UpnpDevice dev, string type, bool sticky) { _logger.Debug("NotifyDevice"); var headers = new RawHeaders(); headers.Add("HOST", "239.255.255.250:1900"); headers.Add("CACHE-CONTROL", "max-age = 600"); headers.Add("LOCATION", dev.Descriptor.ToString()); headers.Add("SERVER", HttpServer.Signature); headers.Add("NTS", "ssdp:" + type); headers.Add("NT", dev.Type); headers.Add("USN", dev.USN); SendDatagram( SSDP_ENDP, dev.Address, String.Format("NOTIFY * HTTP/1.1\r\n{0}\r\n", headers.HeaderBlock), sticky ); // Some buggy network equipment will swallow multicast packets, so lets // cheat, increase the odds, by sending to broadcast. SendDatagram( BROAD_ENDP, dev.Address, String.Format("NOTIFY * HTTP/1.1\r\n{0}\r\n", headers.HeaderBlock), sticky ); _logger.DebugFormat("{0} said {1}", dev.USN, type); }
public FileReadStream(FileInfo info) : base(info.FullName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete, 1, FileOptions.Asynchronous | FileOptions.SequentialScan) { this.info = info; _logger.DebugFormat("Opened file {0}", this.info.FullName); }
public void FilterProductGroupByParent(List <MasterGroupMapping> listOfProductGroups) { listOfProductGroups.ForEach(productGroup => { List <Product> productsInProductGroup = productRepo.GetListOfMappedProductsByMasterGroupMapping(productGroup.MasterGroupMappingID); List <Product> productsInParentProductGroup = productRepo.GetListOfMappedProductsByMasterGroupMapping(productGroup.ParentMasterGroupMappingID.Value); List <Product> productsToDelete = ( from p in productsInProductGroup join pp in productsInParentProductGroup on p.ProductID equals pp.ProductID into notExistProducts from nep in notExistProducts.DefaultIfEmpty() where nep == null select p ) .ToList(); productsToDelete.ForEach(product => { MasterGroupMappingProduct newMasterGroupMappingProduct = new MasterGroupMappingProduct() { MasterGroupMappingID = productGroup.MasterGroupMappingID, ProductID = product.ProductID }; masterGroupMappingRepo.DeleteMasterGroupMappingProduct(newMasterGroupMappingProduct); }); log.DebugFormat("{0} Products Deleted From Product Group {1}", productsToDelete.Count, productGroup.MasterGroupMappingID); }); }
public bool Authorize(HttpRequestAuthParameters ap)//IHeaders headers, IPEndPoint endPoint, string mac) { if (string.IsNullOrEmpty(ap.Mac)) { return(false); } var rv = macs.ContainsKey(ap.Mac); if (!rv) { _logger.DebugFormat("Rejecting {0}. Not in MAC whitelist", ap.Mac ?? "<UNKNOWN>"); } else { _logger.DebugFormat("Accepted {0} via MAC whitelist", ap.Mac); } return(rv); }
private static void Cleanup(object o) { lock (streams) { var keys = new List <string>(streams.Keys); foreach (var key in keys) { CacheItem item; if (streams.TryGetValue(key, out item)) { var diff = (DateTime.UtcNow - item.insertionPoint); if (diff.TotalMilliseconds > 2500) { _logger.DebugFormat("Removed file stream {0} from cache", key); item.stream.Kill(); streams.Remove(key); } } } } }
public bool Authorize(HttpRequestAuthParameters ap)//IHeaders headers, IPEndPoint endPoint, string mac) { //if (endPoint == null) { // return false; //} //var addr = endPoint.Address; if (ap.Address == null) { return(false); } var rv = ips.ContainsKey(ap.Address); if (!rv) { _logger.DebugFormat("Rejecting {0}. Not in IP whitelist", ap.Address); } else { _logger.DebugFormat("Accepted {0} via IP whitelist", ap.Address); } return(rv); }
public List <MasterGroupMapping> GetListOfHighestProductGroupWithFlattenHierachy(List <MasterGroupMapping> listOfFlattenHierachyProductGroups) { List <MasterGroupMapping> listOfHighestFlattenHierachyProductGroups = new List <MasterGroupMapping>(); listOfHighestFlattenHierachyProductGroups.AddRange(listOfFlattenHierachyProductGroups); listOfFlattenHierachyProductGroups.ForEach(productGroup => { List <MasterGroupMapping> productGroupChildren = masterGroupMappingRepo.GetListOfMasterGroupMappingChildren(productGroup.MasterGroupMappingID); log.DebugFormat("Flatten Product Group {0}. Product Group {0} have {1} Sub Product Groups", productGroup.MasterGroupMappingID, productGroupChildren.Count); productGroupChildren.ForEach(productGroupChild => { listOfHighestFlattenHierachyProductGroups.RemoveAll(x => x.MasterGroupMappingID == productGroupChild.MasterGroupMappingID); }); }); return(listOfHighestFlattenHierachyProductGroups); }
public void Cleanup() { GC.Collect(); var pc = paths.Count; var ic = ids.Count; var npaths = new Dictionary <string, string>(); foreach (var p in paths) { if (ids[p.Value].Target == null) { ids.Remove(p.Value); } else { npaths.Add(p.Key, p.Value); } } paths = npaths; _logger.DebugFormat("Cleanup complete: ids (evicted) {0} ({1}), paths {2} ({3})", ids.Count, ic - ids.Count, paths.Count, pc - paths.Count); }
public void ImportProductGroups() { log.DebugFormat(""); log.DebugFormat("-------> Start Importing ProductGroups"); log.DebugFormat("Create Parent Master Group Mapping 'Old Product Groups'"); int masterGroupMappingID = CreateNewMasterGroupMapping("Old Product Groups", null); int productGroupID = CreateNewMasterGroupMapping("Product Groups", masterGroupMappingID); int brandProductGroupID = CreateNewMasterGroupMapping("Brand", masterGroupMappingID); log.DebugFormat("MasterGroupMappingID: {0}", masterGroupMappingID); log.DebugFormat("Set Product GroupID to -1 by all current Master Group Mappings"); UpdateCurrentMasterGroupMappings(); log.DebugFormat("Copy Product Groups To Master Group Mapping"); CopyProductGroupsToMasterGroupMapping(productGroupID, brandProductGroupID); log.DebugFormat("-------> End Importing ProductGroups"); }
private void ReadCallback(IAsyncResult result) { if (state == HttpStates.CLOSED) { return; } State = HttpStates.READING; var read = 0; try { read = stream.EndRead(result); if (read < 0) { throw new HttpException("Client did not send anything"); } _logger.DebugFormat("{0} - Read {1} bytes", this, read); readStream.Write(buffer, 0, read); lastActivity = DateTime.Now; } catch (Exception) { if (!IsATimeout) { _logger.WarnFormat("{0} - Failed to read data", this); Close(); } return; } try { if (!hasHeaders) { readStream.Seek(0, SeekOrigin.Begin); var reader = new StreamReader(readStream); for (var line = reader.ReadLine(); line != null; line = reader.ReadLine()) { line = line.Trim(); if (string.IsNullOrEmpty(line)) { hasHeaders = true; readStream = new MemoryStream(); if (headers.ContainsKey("content-length") && uint.TryParse(headers["content-length"], out bodyBytes)) { if (bodyBytes > (1 << 20)) { throw new IOException("Body too long"); } var bytes = Encoding.ASCII.GetBytes(reader.ReadToEnd()); readStream.Write(bytes, 0, bytes.Length); _logger.DebugFormat("Must read body bytes {0}", bodyBytes); } else { readStream = new MemoryStream(); } break; } if (method == null) { var parts = line.Split(new char[] { ' ' }, 3); method = parts[0].Trim().ToUpperInvariant(); path = parts[1].Trim(); _logger.DebugFormat("{0} - {1} request for {2}", this, method, path); } else { var parts = line.Split(new char[] { ':' }, 2); headers[parts[0]] = Uri.UnescapeDataString(parts[1]).Trim(); } } } if (bodyBytes != 0 && bodyBytes > readStream.Length) { _logger.DebugFormat( "{0} - Bytes to go {1}", this, bodyBytes - readStream.Length); Read(); return; } using (readStream) { body = Encoding.UTF8.GetString(readStream.ToArray()); _logger.Debug(body); _logger.Debug(headers); } SetupResponse(); } catch (Exception ex) { _logger.Warn(String.Format("{0} - Failed to process request", this), ex); response = Error500.HandleRequest(this); SendResponse(); } }
public ItemResponse(string prefix, IRequest request, IMediaResource item, string transferMode = "Streaming") { this.item = item; headers = new ResponseHeaders(noCache: !(item is IMediaCoverResource)); var meta = item as IMetaInfo; if (meta != null) { headers.Add("Content-Length", meta.InfoSize.ToString()); headers.Add("Last-Modified", meta.InfoDate.ToString("R")); } headers.Add("Accept-Ranges", "bytes"); headers.Add("Content-Type", DlnaMaps.Mime[item.Type]); if (request.Headers.ContainsKey("getcontentFeatures.dlna.org")) { try { if (item.MediaType == DlnaMediaTypes.Image) { headers.Add( "contentFeatures.dlna.org", String.Format( "DLNA.ORG_PN={0};DLNA.ORG_OP=00;DLNA.ORG_CI=0;DLNA.ORG_FLAGS={1}", item.PN, DlnaMaps.DefaultInteractive ) ); } else { headers.Add( "contentFeatures.dlna.org", String.Format( "DLNA.ORG_PN={0};DLNA.ORG_OP=01;DLNA.ORG_CI=0;DLNA.ORG_FLAGS={1}", item.PN, DlnaMaps.DefaultStreaming ) ); } } catch (NotSupportedException) { } catch (NotImplementedException) { } } if (request.Headers.ContainsKey("getCaptionInfo.sec")) { var mvi = item as IMetaVideoItem; if (mvi != null && mvi.Subtitle.HasSubtitle) { var surl = String.Format( "http://{0}:{1}{2}subtitle/{3}/st.srt", request.LocalEndPoint.Address, request.LocalEndPoint.Port, prefix, item.Id ); _logger.DebugFormat("Sending subtitles {0}", surl); headers.Add("CaptionInfo.sec", surl); } } if (request.Headers.ContainsKey("getMediaInfo.sec")) { var md = item as IMetaDuration; if (md != null && md.MetaDuration.HasValue) { headers.Add( "MediaInfo.sec", string.Format( "SEC_Duration={0};", md.MetaDuration.Value.TotalMilliseconds ) ); } } headers.Add("transferMode.dlna.org", transferMode); _logger.Debug(headers); }
private void syncProductGroupMapping(List <MasterGroupMapping> listOfSourceProductGroupMappings, int?sourceParentProductGroupMappingID, int?destParentProductGroupMappingID, Connector destConnector) { List <MasterGroupMapping> listOfSourceProductGroupMappingsByParentID = new List <MasterGroupMapping>(); List <MasterGroupMapping> listOfDestProductGroupMappingsByParentID = new List <MasterGroupMapping>(); if (sourceParentProductGroupMappingID.HasValue && sourceParentProductGroupMappingID.Value > 0) { listOfSourceProductGroupMappingsByParentID = listOfSourceProductGroupMappings .Where(x => x.ParentMasterGroupMappingID == sourceParentProductGroupMappingID.Value) .ToList(); listOfDestProductGroupMappingsByParentID = masterGroupMappingRepo .GetListOfProductGroupsByConnector(destConnector.ConnectorID) .Where(x => x.SourceProductGroupMappingID != null && x.ParentMasterGroupMappingID == destParentProductGroupMappingID) .ToList(); } else { listOfSourceProductGroupMappingsByParentID = listOfSourceProductGroupMappings .Where(x => x.ParentMasterGroupMappingID == null) .ToList(); listOfDestProductGroupMappingsByParentID = masterGroupMappingRepo .GetListOfProductGroupsByConnector(destConnector.ConnectorID) .Where(x => x.ParentMasterGroupMappingID == null && x.SourceProductGroupMappingID != null) .ToList(); } List <MasterGroupMapping> listOfProductGroupMappingsToInsert = ( from parentPGM in listOfSourceProductGroupMappingsByParentID join childPGM in listOfDestProductGroupMappingsByParentID on parentPGM.MasterGroupMappingID equals childPGM.SourceProductGroupMappingID into mappedPGM from pgm in mappedPGM.DefaultIfEmpty() where pgm == null select parentPGM ).ToList(); List <MasterGroupMapping> listOfProductGroupMappingsToUpdate = ( from parentPGM in listOfSourceProductGroupMappingsByParentID join childPGM in listOfDestProductGroupMappingsByParentID on parentPGM.MasterGroupMappingID equals childPGM.SourceProductGroupMappingID where parentPGM.Score != childPGM.Score || parentPGM.FlattenHierarchy != childPGM.FlattenHierarchy || parentPGM.FilterByParentGroup != childPGM.FilterByParentGroup || parentPGM.ExportID != childPGM.ExportID select childPGM ).ToList(); List <MasterGroupMapping> listOfProductGroupMappingsToDelete = ( from childPGM in listOfDestProductGroupMappingsByParentID join parentPGM in listOfSourceProductGroupMappingsByParentID on childPGM.SourceProductGroupMappingID equals parentPGM.MasterGroupMappingID into mappedPGM from pgm in mappedPGM.DefaultIfEmpty() where pgm == null select childPGM ).ToList(); if (listOfProductGroupMappingsToInsert.Count > 0) { InsertProductGroupMappingToDestConnector(listOfProductGroupMappingsToInsert, destConnector, (destParentProductGroupMappingID.HasValue && destParentProductGroupMappingID.Value > 0) ? destParentProductGroupMappingID : null); } if (listOfProductGroupMappingsToUpdate.Count > 0) { UpdateProductGroupMappingInDestConnector(listOfProductGroupMappingsToUpdate, listOfSourceProductGroupMappingsByParentID); } if (listOfProductGroupMappingsToDelete.Count > 0) { DeleteProductGroupMappingFromDestConnector(listOfProductGroupMappingsToDelete); } if (listOfProductGroupMappingsToInsert.Count + listOfProductGroupMappingsToDelete.Count > 0) { if (sourceParentProductGroupMappingID.HasValue && sourceParentProductGroupMappingID.Value > 0) { listOfDestProductGroupMappingsByParentID = masterGroupMappingRepo .GetListOfProductGroupsByConnector(destConnector.ConnectorID) .Where(x => x.SourceProductGroupMappingID != null && x.ParentMasterGroupMappingID == destParentProductGroupMappingID) .ToList(); } else { listOfDestProductGroupMappingsByParentID = masterGroupMappingRepo .GetListOfProductGroupsByConnector(destConnector.ConnectorID) .Where(x => x.ParentMasterGroupMappingID == null && x.SourceProductGroupMappingID != null) .ToList(); } } listOfSourceProductGroupMappingsByParentID.ForEach(productGroupMapping => { if (!productGroupMapping.ParentMasterGroupMappingID.HasValue) { string productGroupMappingName = masterGroupMappingRepo.GetListOfMasterGroupMappingLanguagesByMasterGroupMappingID(productGroupMapping.MasterGroupMappingID).Where(x => x.LanguageID == 2).Select(x => x.Name).FirstOrDefault(); log.DebugFormat("Sync Product Group Mapping {0}({1})", productGroupMappingName, productGroupMapping.MasterGroupMappingID); } int countProductGroupMappingChildren = listOfSourceProductGroupMappings .Where(x => x.ParentMasterGroupMappingID == productGroupMapping.MasterGroupMappingID) .Count(); if (countProductGroupMappingChildren > 0) { MasterGroupMapping childProductGroupMapping = listOfDestProductGroupMappingsByParentID.Single(x => x.SourceProductGroupMappingID == productGroupMapping.MasterGroupMappingID); if (childProductGroupMapping != null) { syncProductGroupMapping(listOfSourceProductGroupMappings, productGroupMapping.MasterGroupMappingID, childProductGroupMapping.MasterGroupMappingID, destConnector); } else { log.DebugFormat("ProductGroupMapping with SourceProductGroupMappingID {0} in Connector {1} should exist! But i cant find it.", productGroupMapping.MasterGroupMappingID, destConnector); } } }); }
/// <summary> /// Synchronize Product Groups in Connector Mapping /// </summary> /// <param name="productsToCopy">Dictionary(ProductGroupID, List<VendorProductInfo>)</param> public void SyncProductGroup(Dictionary <int, List <MasterGroupMappingProduct> > productsToSync) { log.DebugFormat(""); log.DebugFormat("------> Start Syncing Product Groups"); int syncedProducts = 0; // var prods = (from p in productsToSync // from c in p.Value // select new MasterGroupMappingBulkModel // { // ConnectorPublicationRuleID = c.ConnectorPublicationRuleID ?? 0, // IsApproved = c.IsApproved, // IsCustom = c.IsCustom, // IsProductMapped = c.IsProductMapped, // ProductID = c.ProductID, // MasterGroupMappingID = p.Key // }).ToList(); // string tableName = string.Format("Temp_Master_Group_Mapping_{0}", connectorID); // petaPoco.Execute(string.Format(@"IF (EXISTS (SELECT * // FROM INFORMATION_SCHEMA.TABLES // WHERE TABLE_SCHEMA = 'dbo' // AND TABLE_NAME = '{0}')) // BEGIN // drop table [{0}] // END // ", tableName)); // petaPoco.Execute(string.Format(@"Create table [{0}]( // MasterGroupMappingID int not null, // ProductID int not null, // IsApproved bit not null, // IsCustom bit not null, // IsProductMapped bit not null, // ConnectorPublicationRuleID int not null // ) // //", tableName)); // using (var connection = new SqlConnection(connectionString)) // { // connection.Open(); // using (SqlBulkCopy copyBulk = new SqlBulkCopy(connection)) // { // copyBulk.BatchSize = 100000; // copyBulk.BulkCopyTimeout = 180; // copyBulk.DestinationTableName = tableName; // copyBulk.NotifyAfter = 100000; // copyBulk.SqlRowsCopied += (s, e) => log.DebugFormat("{0} Records inserted ", e.RowsCopied); // using (var collection = new GenericCollectionReader<MasterGroupMappingBulkModel>(prods)) // { // copyBulk.WriteToServer(collection); // } // } // } // petaPoco.Execute(string.Format(@"merge mastergroupmappingproduct trg //using {0} src //on trg.mastergroupmappingid = src.mastergroupmappingid and trg.productid = src.productid //when not matched by target // then insert (MasterGroupMappingID, ProductID, IsApproved, IsCustom, IsProductMapped, ConnectorPublicationRuleID) // values (src.MasterGroupMappingID, src.ProductID, src.IsApproved, src.IsCustom, src.IsProductMapped, case when src.ConnectorPublicationRuleID = 0 then null else src.ConnectorPublicationRuleID end) //when matched //then update // set trg.ConnectorPublicationRuleID = case when src.ConnectorPublicationRuleID = 0 then null else src.ConnectorPublicationRuleID end //when not matched by source and trg.mastergroupmappingid in (select mastergroupmappingid from mastergroupmapping where connectorid = {1}) // then delete;", tableName, connectorID)); // petaPoco.Execute(string.Format(@"drop table {0}", tableName)); productsToSync.ForEach(productGroup => { var currentProductsInProductGroup = masterGroupMappingRepo.GetListOfMappedProductsByMasterGroupMapping(productGroup.Key); List <MasterGroupMappingProduct> productsToCopy = (from p in productGroup.Value.Distinct() join c in currentProductsInProductGroup on p.ProductID equals c.ProductID into notExistProducts from nep in notExistProducts.DefaultIfEmpty() where nep == null select p) .ToList(); List <MasterGroupMappingProduct> productsToUpdate = ( from p in productGroup.Value.Distinct() join cp in currentProductsInProductGroup on p.ProductID equals cp.ProductID where p.ConnectorPublicationRuleID != cp.ConnectorPublicationRuleID select p ).ToList(); List <MasterGroupMappingProduct> productsToDelete = (from c in currentProductsInProductGroup.Where(x => x.IsCustom == false) join p in productGroup.Value on c.ProductID equals p.ProductID into existProducts from ep in existProducts.DefaultIfEmpty() where ep == null select c) .ToList(); if (productsToCopy.Count() > 0) { CopyProducts(productGroup.Key, productsToCopy); syncedProducts += productsToCopy.Count; } if (productsToUpdate.Count > 0) { UpdateProducts(productGroup.Key, productsToUpdate); syncedProducts += productsToDelete.Count; } if (productsToDelete.Count > 0) { DeleteProducts(productGroup.Key, productsToDelete); syncedProducts += productsToDelete.Count; } }); log.DebugFormat("------> End Syncing Product Groups done. {0} products are synchronized", syncedProducts); }
public void Process() { //op dit moment is er al een mgm //het doel van deze plugin is het vullen ven de content en content product tabel List <Connector> connectors = new List <Connector>(); connectors = connectorRepo.GetListOfActiveConnectors(); #if DEBUG //connectors = connectorRepo.GetListOfActiveConnectors().Where(x => x.ConnectorID == 1 || x.ConnectorID == 5468 || x.ConnectorID == 5474).OrderByDescending(x => x.ConnectorID).ToList(); connectors = connectorRepo.GetListOfActiveConnectors().Where(x => x.ConnectorID == 5480).ToList(); #else connectors = connectorRepo.GetListOfActiveConnectors(); #endif int countConnectors = 0; connectors.ForEach(connector => { log.DebugFormat(""); log.DebugFormat("------------------------- Syncing Connector: {0} ({1}). Connector to sync: {2} -------------------------------------", connector.ConnectorID, connector.Name, connectors.Count - countConnectors); //kijkt of deze connector een parent heeft, zo ja dan gaat deze synchroniseren if (connector.ParentConnectorID.HasValue && connector.ParentConnectorID.Value > 0) { ProcessSyncChildConnector(connector); } // Sync Products //kijken welke producten volgens connector publication rule gekopieerd moeten worden vanuit mgm naar connector mgm. ProcessSyncProductGroups(connector); // Filter By Parent //als een mgm 'filter by parent' optie heeft dan kijkt de plugin naar de child van die mgm. Zitten er producten in de child die niet in de parent voorkomen dan zullen die verwijderd worden ProcessFilterByParentProductGroups(connector); EmptyParentProductGroupOfFilterByParentProductGroups(connector); // Flatten Hierachy //kijkt of een mgm flatten by hierarchy heeft, zo ja dan voeg je alle producten (van de childs van die mgm) bij elkaar ProcessFlattenHierachyProductGroups(connector); // Sync Contents //vanuit connector mgm gaan we de content tabel vullen ProcessSyncContent(connector); // Sync Content Product Groups //vanuit connector mgm gaan we de content product group tabel vullen ProcessSyncContentProductGroup(connector); countConnectors++; }); }