public GitWrapper(string outputDirectory, Logger logger, Encoding commitEncoding, bool forceAnnotatedTags) : base(outputDirectory, logger, gitExecutable, gitMetaDir) { this.commitEncoding = commitEncoding; this.forceAnnotatedTags = forceAnnotatedTags; }
public FileList(IIndexerManagerService i, IWebClient wc, Logger l, IProtectionService ps) : base(name: "FileList", description: "The best Romanian site.", link: "http://filelist.ro/", caps: TorznabUtil.CreateDefaultTorznabTVCaps(), manager: i, client: wc, logger: l, p: ps, configData: new ConfigurationDataFileList()) { AddCategoryMapping(24, TorznabCatType.TVAnime); AddCategoryMapping(11, TorznabCatType.Audio); AddCategoryMapping(15, TorznabCatType.TV); //AddCategoryMapping(18, TorznabCatType.); Other AddCategoryMapping(16, TorznabCatType.TVDocumentary); AddCategoryMapping(25, TorznabCatType.Movies3D); AddCategoryMapping(20, TorznabCatType.MoviesBluRay); AddCategoryMapping(2, TorznabCatType.MoviesSD); AddCategoryMapping(3, TorznabCatType.MoviesForeign); //RO AddCategoryMapping(4, TorznabCatType.MoviesHD); AddCategoryMapping(19, TorznabCatType.MoviesForeign); // RO AddCategoryMapping(1, TorznabCatType.MoviesSD); AddCategoryMapping(10, TorznabCatType.Console); AddCategoryMapping(9, TorznabCatType.PCGames); //AddCategoryMapping(17, TorznabCatType); Linux No cat AddCategoryMapping(22, TorznabCatType.PCPhoneOther); //Apps/mobile AddCategoryMapping(8, TorznabCatType.PC); AddCategoryMapping(21, TorznabCatType.TVHD); AddCategoryMapping(23, TorznabCatType.TVSD); AddCategoryMapping(13, TorznabCatType.TVSport); AddCategoryMapping(14, TorznabCatType.TV); AddCategoryMapping(12, TorznabCatType.AudioVideo); AddCategoryMapping(7, TorznabCatType.XXX); }
public PluginList() { InitializeComponent(); _logger = new Logger(); _dte = Package.GetGlobalService(typeof(DTE)) as DTE; if (_dte == null) return; _solution = _dte.Solution; if (_solution == null) return; _events = _dte.Events; var windowEvents = _events.WindowEvents; windowEvents.WindowActivated += WindowEventsOnWindowActivated; _solutionEvents = _events.SolutionEvents; _solutionEvents.BeforeClosing += BeforeSolutionClosing; _solutionEvents.BeforeClosing += SolutionBeforeClosing; _solutionEvents.ProjectAdded += SolutionProjectAdded; _solutionEvents.ProjectRemoved += SolutionProjectRemoved; _solutionEvents.ProjectRenamed += SolutionProjectRenamed; SelectedAssemblyItem.PropertyChanged += SelectedAssemblyItem_PropertyChanged; }
public override async Task OnActivateAsync() { await base.OnActivateAsync(); string id = this.GetPrimaryKeyLong().ToString(); logger = GetLogger(String.Format("{0}-{1}", GetType().Name, id)); logger.Info("Activate."); }
public TorznabController(IIndexerManagerService i, Logger l, IServerService s, ICacheService c) { indexerService = i; logger = l; serverService = s; cacheService = c; }
public AssemblyResolver(WeavingTask config, Logger logger, BuildEnginePropertyExtractor buildEnginePropertyExtractor) { this.config = config; this.logger = logger; this.buildEnginePropertyExtractor = buildEnginePropertyExtractor; assemblyDefinitionCache = new Dictionary<string, AssemblyDefinition>(StringComparer.OrdinalIgnoreCase); }
/// <summary> /// Default constructor. /// </summary> public SurfaceWindow1() { log = new Logger("Rememo"); log.Write("Starting application"); if (DiaryManager.ConfigExists()) { InitializeComponent(); clock.Elapsed += new System.Timers.ElapsedEventHandler(clock_Elapsed); log.Write("Config found!"); clock.Enabled = true; log.Write("Clock enabled"); log.Write("Welcome Text set"); } else { InitializeComponent(); clock.Elapsed += new System.Timers.ElapsedEventHandler(clock_Elapsed); log.Write("No config file!"); clock.Enabled = true; log.Write("clock enabled"); welcomeText(false); } // Add handlers for Application activation events AddActivationHandlers(); }
public static void EventsHandler(BaseEvent evt, Logger l) { if (evt is BlowingEvent) { var be = evt as BlowingEvent; Keeper.SetBlowingStatus(be.BlowingFlag == 1); } else if (evt is FlexEvent) { var fxe = evt as FlexEvent; if (fxe.Operation.StartsWith("UDP.OffGasAnalysisEvent")) { var fxh = new FlexHelper(fxe); var co = fxh.GetDbl("CO"); var co2 = fxh.GetDbl("CO2"); SetGasAnalysis(co, co2); } else if (fxe.Operation.StartsWith("OPC.WatchdogsFromL1")) { var fxh = new FlexHelper(fxe); var wd1 = fxh.GetInt("WDPLC1"); var wd2 = fxh.GetInt("WDPLC2"); var wd3 = fxh.GetInt("WDPLC3"); var wd01 = fxh.GetInt("WDPLC01"); SetWatchdogReceive(wd1, wd2, wd3, wd01); } } }
public PirateTheNet(IIndexerManagerService i, Logger l, IWebClient w, IProtectionService ps) : base(name: "PirateTheNet", description: "A movie tracker", link: "http://piratethe.net/", caps: new TorznabCapabilities(), manager: i, client: w, logger: l, p: ps, configData: new ConfigurationDataBasicLoginWithRSSAndDisplay()) { Encoding = Encoding.GetEncoding("UTF-8"); Language = "en-us"; this.configData.DisplayText.Value = "Only the results from the first search result page are shown, adjust your profile settings to show the maximum."; this.configData.DisplayText.Name = "Notice"; AddCategoryMapping("1080P", TorznabCatType.MoviesHD); AddCategoryMapping("720P", TorznabCatType.MoviesHD); AddCategoryMapping("BDRip", TorznabCatType.MoviesSD); AddCategoryMapping("BluRay", TorznabCatType.MoviesBluRay); AddCategoryMapping("BRRip", TorznabCatType.MoviesSD); AddCategoryMapping("DVDR", TorznabCatType.MoviesDVD); AddCategoryMapping("DVDRip", TorznabCatType.MoviesSD); AddCategoryMapping("FLAC", TorznabCatType.AudioLossless); AddCategoryMapping("MP3", TorznabCatType.AudioMP3); AddCategoryMapping("MP4", TorznabCatType.AudioOther); AddCategoryMapping("Packs", TorznabCatType.Movies); AddCategoryMapping("R5", TorznabCatType.MoviesDVD); AddCategoryMapping("Remux", TorznabCatType.Movies); AddCategoryMapping("TVRip", TorznabCatType.MoviesOther); AddCategoryMapping("WebRip", TorznabCatType.MoviesWEBDL); }
public ExamplePrograms() { var loggerNamePattern = "BitSharp.Examples.*"; var logLevel = LogLevel.Info; // log layout format var layout = "${message} ${exception:separator=\r\n:format=message,type,method,stackTrace:maxInnerExceptionLevel=10:innerExceptionSeparator=\r\n:innerFormat=message,type,method,stackTrace}"; // initialize logging configuration var config = new LoggingConfiguration(); // create console target var consoleTarget = new ColoredConsoleTarget(); consoleTarget.Layout = layout; config.AddTarget("console", consoleTarget); config.LoggingRules.Add(new LoggingRule(loggerNamePattern, logLevel, consoleTarget)); // create debugger target, if attached if (Debugger.IsAttached) { var debuggerTarget = new DebuggerTarget(); debuggerTarget.Layout = layout; config.AddTarget("debugger", debuggerTarget); config.LoggingRules.Add(new LoggingRule(loggerNamePattern, logLevel, debuggerTarget)); } LogManager.Configuration = config; logger = LogManager.GetCurrentClassLogger(); }
/// <summary> /// Server's main loop implementation. /// </summary> /// <param name="log"> The Logger instance to be used.</param> public void Run( Logger log ) { TcpListener srv = null; try { srv = new TcpListener( IPAddress.Loopback, portNumber ); srv.Start(); while ( true ) { log.LogMessage( "Listener - Waiting for connection requests." ); TcpClient socket = srv.AcceptTcpClient(); socket.LingerState = new LingerOption( true, 10 ); log.LogMessage( String.Format( "Listener - Connection established with {0}.", socket.Client.RemoteEndPoint ) ); // Instantiating protocol handler and associate it to the current TCP connection _dispatcher.ProcessConnection(socket.GetStream(), log); //Handler protocolHandler = new Handler( socket.GetStream(), log ); // Synchronously process requests made through de current TCP connection //Task.Factory.StartNew((handler) => ((Handler) handler).Run(), protocolHandler); //protocolHandler.Run(); Program.ShowInfo( Store.Instance ); } } finally { log.LogMessage( "Listener - Ending." ); if ( srv != null ) { srv.Stop(); } } }
public ImmortalSeed(IIndexerManagerService i, IWebClient wc, Logger l) : base(name: "ImmortalSeed", description: "ImmortalSeed", link: "http://immortalseed.me/", caps: TorznabCapsUtil.CreateDefaultTorznabTVCaps(), manager: i, client: wc, logger: l) { AddCategoryMapping(32, TorznabCatType.Anime); AddCategoryMapping(47, TorznabCatType.TVSD); AddCategoryMapping(8, TorznabCatType.TVHD); AddCategoryMapping(48, TorznabCatType.TVHD); AddCategoryMapping(9, TorznabCatType.TVSD); AddCategoryMapping(4, TorznabCatType.TVHD); AddCategoryMapping(6, TorznabCatType.TVSD); AddCategoryMapping(22, TorznabCatType.Books); AddCategoryMapping(41, TorznabCatType.Comic); AddCategoryMapping(23, TorznabCatType.Apps); AddCategoryMapping(16, TorznabCatType.MoviesHD); AddCategoryMapping(17, TorznabCatType.MoviesSD); AddCategoryMapping(14, TorznabCatType.MoviesSD); AddCategoryMapping(34, TorznabCatType.MoviesForeign); AddCategoryMapping(18, TorznabCatType.MoviesForeign); AddCategoryMapping(33, TorznabCatType.MoviesForeign); AddCategoryMapping(34, TorznabCatType.Audio); AddCategoryMapping(37, TorznabCatType.AudioLossless); AddCategoryMapping(35, TorznabCatType.AudioBooks); AddCategoryMapping(36, TorznabCatType.AudioLossy); }
public IndexerManagerService(IContainer c, IConfigurationService config, Logger l, ICacheService cache) { container = c; configService = config; logger = l; cacheService = cache; }
public NLogLogger(Logger logger) { if (logger == null) throw new ArgumentNullException("logger"); this.logger = logger; }
public void OnEvent(BaseEvent evt) { using (var l = new Logger("SublanceGenerator Listener")) { if (evt is HeatChangeEvent) { var hce = evt as HeatChangeEvent; if (HeatNumber != hce.HeatNumber) { l.msg("Heat Changed. New Heat ID: {0}\n", hce.HeatNumber); HeatNumber = hce.HeatNumber; Program.Reset(); } else l.msg("Heat No Changed. Heat ID: {0}\n", hce.HeatNumber); } if (evt is FlexEvent) { var fxe = evt as FlexEvent; CarbonEventHandler(fxe, 0, "OffGasDecarbonater"); CarbonEventHandler(fxe, 1, "SMFCarbon"); CarbonEventHandler(fxe, 2, "CPlusProcessor"); CarbonEventHandler(fxe, 3, "NeuralProcessorC"); CarbonEventHandler(fxe, 4, "CorrectionCT"); } } }
public CommentsAnalyzerTests() { this.code = FileUtil.ReadAllText(path); this.root = ASTUtil.GetSyntaxTreeFromSource(code).GetRoot(); this.retriever = RetrieverFactory.GetCommentsRetriever(); this.logger = NLoggerUtil.GetNLogger(typeof (CommentsAnalyzerTests)); }
public ShipController(IMyCubeBlock block) : base(block) { myLogger = new Logger("ShipController", () => CubeBlock.CubeGrid.DisplayName); myController = CubeBlock as Ingame.IMyShipController; Registrar.Add(CubeBlock, this); }
public AlphaRatio(IIndexerManagerService i, IWebClient w, Logger l, IProtectionService ps) : base(name: "AlphaRatio", description: "Legendary", link: "https://alpharatio.cc/", caps: new TorznabCapabilities(), manager: i, client: w, logger: l, p: ps, downloadBase: "https://alpharatio.cc/torrents.php?action=download&id=", configData: new ConfigurationDataBasicLogin()) { Encoding = Encoding.GetEncoding("UTF-8"); Language = "en-us"; AddCategoryMapping(1, TorznabCatType.TVSD); AddCategoryMapping(2, TorznabCatType.TVHD); AddCategoryMapping(6, TorznabCatType.MoviesSD); AddCategoryMapping(7, TorznabCatType.MoviesHD); AddCategoryMapping(10, TorznabCatType.XXX); AddCategoryMapping(20, TorznabCatType.XXX); AddCategoryMapping(12, TorznabCatType.PCGames); AddCategoryMapping(13, TorznabCatType.ConsoleXbox); AddCategoryMapping(14, TorznabCatType.ConsolePS3); AddCategoryMapping(15, TorznabCatType.ConsoleWii); AddCategoryMapping(16, TorznabCatType.PC); AddCategoryMapping(17, TorznabCatType.PCMac); AddCategoryMapping(19, TorznabCatType.PCPhoneOther); AddCategoryMapping(21, TorznabCatType.BooksEbook); AddCategoryMapping(22, TorznabCatType.AudioAudiobook); AddCategoryMapping(23, TorznabCatType.Audio); }
/// <summary> /// Initializes a new instance of the <see cref="LoggerViewModel"/> class with a single logger. /// </summary> /// <param name="serviceProvider">A service provider that can provide a <see cref="IDispatcherService"/> to use for this view model.</param> /// <param name="logger">The <see cref="Logger"/> to monitor.</param> public LoggerViewModel(IViewModelServiceProvider serviceProvider, Logger logger) : this(serviceProvider) { if (logger == null) throw new ArgumentNullException("logger"); Loggers.Add(logger, new List<ILogMessage>()); logger.MessageLogged += MessageLogged; }
public TwitterShareService(TwitterPersistenceService twitterPersistenceService,TweetMessageService tweetMessageService) { _twitterPersistenceService = twitterPersistenceService; _tweetMessageService = tweetMessageService; _logger = LogManager.GetLogger("log"); Authenticate(); }
protected void Page_Load(object sender, EventArgs e) { links = Links.Instance; gui = GUIVariables.Instance; dbOps = DBOperations.Instance; categories = Categories.Instance; log = Logger.Instance; engine = ProcessingEngine.Instance; general = General.Instance; imageEngine = ImageEngine.Instance; seperator = gui.Seperator; if (string.IsNullOrEmpty(Request.QueryString["UID"])) { } else { queryStringUID = Request.QueryString["UID"].Trim().ToLower(); } if (string.IsNullOrEmpty(queryStringUID)) { } else { LoadComments(queryStringUID); } }
public bool Create(SKNFile file, Logger logger) { // This function converts the handedness of the DirectX style input data // into the handedness OpenGL expects. // So, vector inputs have their Z value negated and quaternion inputs have their // Z and W values negated. List<float> vertexPositions = new List<float>(); List<float> vertexNormals = new List<float>(); List<float> vertexTextureCoordinates = new List<float>(); for (int i = 0; i < file.numVertices; ++i) { vertexPositions.Add(file.vertices[i].position[0]); vertexPositions.Add(file.vertices[i].position[1]); vertexPositions.Add(-file.vertices[i].position[2]); vertexNormals.Add(file.vertices[i].normal[0]); vertexNormals.Add(file.vertices[i].normal[1]); vertexNormals.Add(-file.vertices[i].normal[2]); vertexTextureCoordinates.Add(file.vertices[i].texCoords[0]); vertexTextureCoordinates.Add(file.vertices[i].texCoords[1]); } List<uint> iData = new List<uint>(); for (int i = 0; i < numIndices; ++i) { iData.Add((uint)file.indices[i]); } return Create(vertexPositions, vertexNormals, vertexTextureCoordinates, iData, logger); }
public ScanWatchFolder(ICacheManager cacheManager, IDiskScanService diskScanService, IDiskProvider diskProvider, Logger logger) { _logger = logger; _diskProvider = diskProvider; _diskScanService = diskScanService; _watchFolderItemCache = cacheManager.GetCache<Dictionary<string, WatchFolderItem>>(GetType()); }
public Language(Konfiguration cfg, Logger log) { this.config = cfg; this.logger = log; this.parser = new LanguageParser(this.config, this.logger); }
static void Main() { Logger logger = new Logger(new FileAppender(new SimpleLayout(), "log.txt")); logger.Critical("asdasd"); logger.Warn("asdasd"); logger.Info("asdasd"); }
/// <summary> /// /// </summary> /// <param name="sJid">S jid.</param> /// <param name="sPassword">S password.</param> public XmppManager(AuthInfo authInfo, XmppServiceDescription xmppServiceDescription, OTRKeyRing _OTRKeyRing, OpenPGPRing _OpenPGPRing, ConversationManager _conversationManager, AddressBook _addressBook, Logger _logger) { try { m_Logger = _logger; m_OTRKeyRing = _OTRKeyRing; m_OpenPGPRing = _OpenPGPRing; m_AuthInfo = authInfo; m_OwnJid = new Jid (m_AuthInfo.m_sId); m_ClientConnection = new XmppClientConnection(m_OwnJid.Server); m_Contacts = new Dictionary<string, string> (); m_Logger.log(ELogLevel.LVL_INFO, "Trying to log in xmpp user", m_sModuleName); m_ClientConnection.Open(m_OwnJid.User, m_AuthInfo.m_sPassword); m_ConversationManager = _conversationManager; m_AddressBook = _addressBook; m_PresenceManager = new PresenceManager(m_AddressBook); m_OtrConnections = new Dictionary<string, int>(); //register EventHandlers m_ClientConnection.OnLogin += new ObjectHandler(onLogin); m_ClientConnection.OnPresence += new PresenceHandler(OnPresence); } catch(Exception e) { Console.Write (e.Message); } //info: message callback is registered in onRosterItem callback }
public BitHdtv(IIndexerManagerService i, Logger l, IWebClient w, IProtectionService ps) : base(name: "BIT-HDTV", description: "Home of high definition invites", link: "https://www.bit-hdtv.com/", caps: new TorznabCapabilities(), manager: i, client: w, logger: l, p: ps, configData: new ConfigurationDataBasicLogin()) { Encoding = Encoding.GetEncoding("iso-8859-1"); Language = "en-us"; AddCategoryMapping(1, TorznabCatType.TVAnime); // Anime AddCategoryMapping(2, TorznabCatType.MoviesBluRay); // Blu-ray AddCategoryMapping(4, TorznabCatType.TVDocumentary); // Documentaries AddCategoryMapping(6, TorznabCatType.AudioLossless); // HQ Audio AddCategoryMapping(7, TorznabCatType.Movies); // Movies AddCategoryMapping(8, TorznabCatType.AudioVideo); // Music Videos AddCategoryMapping(5, TorznabCatType.TVSport); // Sports AddCategoryMapping(10, TorznabCatType.TV); // TV AddCategoryMapping(12, TorznabCatType.TV); // TV/Seasonpack AddCategoryMapping(11, TorznabCatType.XXX); // XXX }
public DownloadService(IProvideDownloadClient downloadClientProvider, IEventAggregator eventAggregator, Logger logger) { _downloadClientProvider = downloadClientProvider; _eventAggregator = eventAggregator; _logger = logger; }
public MainForm(string[] args) { InitializeComponent(); logger = LogManager.GetLogger(""); logger.Log(LogLevel.Info, "Starting up..."); btnExit.Click += delegate { Exit(); }; btnMinimize.Click += delegate { ShowHide(); }; showHideToolStripMenuItem.Click += delegate { ShowHide(); }; exitToolStripMenuItem.Click += delegate { Exit(); }; notifyIcon.Text = Program.FullName; this.Text = Program.FullName; model = new Model(this.components, logger); HookModel(); if (!model.Initialize()) { this.Close(); //Application.Exit(); } PopulateFolderList(); if (args.Length > 0) { if (!model.LoadTorrent(args[0])) errorOnLoad = true; } }
public ArduiPort(string name, int baud, Logger logger) { _Logger = logger; _port = new SerialPort(name, baud); SetName(name); SetBaudRate(baud); }
/// <remarks> /// Please set the following connection strings in app.config for this WebJob to run: /// AzureWebJobsDashboard and AzureWebJobsStorage /// Better yet, add them to your Azure portal so they can be changed at runtime without re-deploying or re-compiling. /// </remarks> public virtual void Go() { CoreHost = this; StartHost(); Logger.LogInfo("Application Stopped."); }
public AuthenticationProcess(IAuthenticationConfiguration configuration, IUnitOfWorkScopeFactory scopeFactory, Logger logger, IAccountRepository accountRepository, ILogonHistoryRepository logonHistoryRepository, IBusinessTransactionObservable transaction) { _scopeFactory = scopeFactory; _logger = logger; _logonHistoryRepository = logonHistoryRepository; _accountRepository = accountRepository; _transaction = transaction; _configuration = configuration; }
public bool TryProcessCMD(ChatEventArgs e) { if (!Enabled || !e.Message.StartsWith("@")) return false; if (e.Message == "@clear") { Clear(e.SourceID); return true; } foreach (var cmd in e.Message.Substring(1).Split(',')) { var m = rlv_regex.Match(cmd); if (!m.Success) continue; var rule = new RLVRule(); rule.Behaviour = m.Groups["behaviour"].ToString().ToLower(); ; rule.Option = m.Groups["option"].ToString().ToLower(); rule.Param = m.Groups["param"].ToString().ToLower(); rule.Sender = e.SourceID; rule.SenderName = e.FromName; Logger.DebugLog(rule.ToString()); if (rule.Param == "rem") rule.Param = "y"; if (rule.Param == "add") rule.Param = "n"; if (rule.Param == "n") { lock (rules) { var existing = rules.Find(r => r.Behaviour == rule.Behaviour && r.Sender == rule.Sender && r.Option == rule.Option); if (existing != null) { rules.Remove(existing); } rules.Add(rule); OnRLVRuleChanged(new RLVEventArgs(rule)); } continue; } if (rule.Param == "y") { lock (rules) { if (rule.Option == "") { rules.RemoveAll((RLVRule r) => { return r.Behaviour == rule.Behaviour && r.Sender == rule.Sender; }); } else { rules.RemoveAll((RLVRule r) => { return r.Behaviour == rule.Behaviour && r.Sender == rule.Sender && r.Option == rule.Option; }); } } OnRLVRuleChanged(new RLVEventArgs(rule)); continue; } switch (rule.Behaviour) { case "version": int chan = 0; if (int.TryParse(rule.Param, out chan) && chan > 0) { Respond(chan, "RestrainedLife viewer v1.23 (" + Properties.Resources.RadegastTitle + "." + RadegastBuild.CurrentRev + ")"); } break; case "versionnew": chan = 0; if (int.TryParse(rule.Param, out chan) && chan > 0) { Respond(chan, "RestrainedLove viewer v1.23 (" + Properties.Resources.RadegastTitle + "." + RadegastBuild.CurrentRev + ")"); } break; case "versionnum": if (int.TryParse(rule.Param, out chan) && chan > 0) { Respond(chan, "1230100"); } break; case "getgroup": if (int.TryParse(rule.Param, out chan) && chan > 0) { UUID gid = client.Self.ActiveGroup; if (instance.Groups.ContainsKey(gid)) { Respond(chan, instance.Groups[gid].Name); } } break; case "setgroup": { if (rule.Param == "force") { foreach (var g in instance.Groups.Values) { if (g.Name.ToLower() == rule.Option) { client.Groups.ActivateGroup(g.ID); } } } } break; case "getsitid": if (int.TryParse(rule.Param, out chan) && chan > 0) { Avatar me; if (client.Network.CurrentSim.ObjectsAvatars.TryGetValue(client.Self.LocalID, out me)) { if (me.ParentID != 0) { Primitive seat; if (client.Network.CurrentSim.ObjectsPrimitives.TryGetValue(me.ParentID, out seat)) { Respond(chan, seat.ID.ToString()); break; } } } Respond(chan, UUID.Zero.ToString()); } break; case "getstatusall": case "getstatus": if (int.TryParse(rule.Param, out chan) && chan > 0) { string sep = "/"; string filter = ""; if (!string.IsNullOrEmpty(rule.Option)) { var parts = rule.Option.Split(';'); if (parts.Length > 1 && parts[1].Length > 0) { sep = parts[1].Substring(0, 1); } if (parts.Length > 0 && parts[0].Length > 0) { filter = parts[0].ToLower(); } } lock (rules) { string res = ""; rules .FindAll(r => (rule.Behaviour == "getstatusall" || r.Sender == rule.Sender) && r.Behaviour.Contains(filter)) .ForEach(objRule => { res += sep + objRule.Behaviour; if (!string.IsNullOrEmpty(objRule.Option)) { res += ":" + objRule.Option; } }); Respond(chan, res); } } break; case "sit": UUID sitTarget = UUID.Zero; if (rule.Param == "force" && UUID.TryParse(rule.Option, out sitTarget) && sitTarget != UUID.Zero) { instance.State.SetSitting(true, sitTarget); } break; case "unsit": if (rule.Param == "force") { instance.State.SetSitting(false, UUID.Zero); } break; case "setrot": double rot = 0.0; if (rule.Param == "force" && double.TryParse(rule.Option, System.Globalization.NumberStyles.Float, Utils.EnUsCulture, out rot)) { client.Self.Movement.UpdateFromHeading(Math.PI / 2d - rot, true); } break; case "tpto": var coord = rule.Option.Split('/'); try { float gx = float.Parse(coord[0], Utils.EnUsCulture); float gy = float.Parse(coord[1], Utils.EnUsCulture); float z = float.Parse(coord[2], Utils.EnUsCulture); float x = 0, y = 0; instance.TabConsole.DisplayNotificationInChat("Starting teleport..."); ulong h = Helpers.GlobalPosToRegionHandle(gx, gy, out x, out y); client.Self.RequestTeleport(h, new Vector3(x, y, z)); } catch (Exception) { } break; #region #RLV folder and outfit manipulation case "getoutfit": if (int.TryParse(rule.Param, out chan) && chan > 0) { var wearables = client.Appearance.GetWearables(); string res = ""; // Do we have a specific wearable to check, ie @getoutfit:socks=99 if (!string.IsNullOrEmpty(rule.Option)) { if (wearables.ContainsKey(WearableFromString(rule.Option))) { res = "1"; } else { res = "0"; } } else { for (int i = 0; i < RLVWearables.Count; i++) { if (wearables.ContainsKey(RLVWearables[i].Type)) { res += "1"; } else { res += "0"; } } } Respond(chan, res); } break; case "getattach": if (int.TryParse(rule.Param, out chan) && chan > 0) { string res = ""; var attachments = client.Network.CurrentSim.ObjectsPrimitives.FindAll(p => p.ParentID == client.Self.LocalID); if (attachments.Count > 0) { var myPoints = new List<AttachmentPoint>(attachments.Count); for (int i = 0; i < attachments.Count; i++) { if (!myPoints.Contains(attachments[i].PrimData.AttachmentPoint)) { myPoints.Add(attachments[i].PrimData.AttachmentPoint); } } // Do we want to check one single attachment if (!string.IsNullOrEmpty(rule.Option)) { if (myPoints.Contains(AttachmentPointFromString(rule.Option))) { res = "1"; } else { res = "0"; } } else { for (int i = 0; i < RLVAttachments.Count; i++) { if (myPoints.Contains(RLVAttachments[i].Point)) { res += "1"; } else { res += "0"; } } } } Respond(chan, res); } break; case "remattach": case "detach": if (rule.Param == "force") { if (!string.IsNullOrEmpty(rule.Option)) { var point = RLVAttachments.Find(a => a.Name == rule.Option); if (point.Name == rule.Option) { var attachment = client.Network.CurrentSim.ObjectsPrimitives.Find(p => p.ParentID == client.Self.LocalID && p.PrimData.AttachmentPoint == point.Point); if (attachment != null && client.Inventory.Store.Items.ContainsKey(CurrentOutfitFolder.GetAttachmentItem(attachment))) { instance.COF.Detach((InventoryItem)client.Inventory.Store.Items[CurrentOutfitFolder.GetAttachmentItem(attachment)].Data); } } else { InventoryNode folder = FindFolder(rule.Option); if (folder != null) { var outfit = new List<InventoryItem>(); foreach (var item in folder.Nodes.Values) { if (CurrentOutfitFolder.CanBeWorn(item.Data)) { outfit.Add((InventoryItem)(item.Data)); } } instance.COF.RemoveFromOutfit(outfit); } } } else { client.Network.CurrentSim.ObjectsPrimitives.FindAll(p => p.ParentID == client.Self.LocalID).ForEach(attachment => { if (client.Inventory.Store.Items.ContainsKey(CurrentOutfitFolder.GetAttachmentItem(attachment))) { instance.COF.Detach((InventoryItem)client.Inventory.Store.Items[CurrentOutfitFolder.GetAttachmentItem(attachment)].Data); } }); } } break; case "remoutfit": if (rule.Param == "force") { if (!string.IsNullOrEmpty(rule.Option)) { var w = RLVWearables.Find(a => a.Name == rule.Option); if (w.Name == rule.Option) { var items = instance.COF.GetWornAt(w.Type); instance.COF.RemoveFromOutfit(items); } } } break; case "attach": case "attachoverorreplace": case "attachover": case "attachall": case "attachallover": if (rule.Param == "force") { if (!string.IsNullOrEmpty(rule.Option)) { InventoryNode folder = FindFolder(rule.Option); if (folder != null) { List<InventoryItem> outfit = new List<InventoryItem>(); if(rule.Behaviour == "attachall" || rule.Behaviour == "attachallover") { GetAllItems(folder, true, ref outfit); } else { GetAllItems(folder, false, ref outfit); } if (rule.Behaviour == "attachover" || rule.Behaviour == "attachallover") { instance.COF.AddToOutfit(outfit, false); } else { instance.COF.AddToOutfit(outfit, true); } } } } break; case "getinv": if (int.TryParse(rule.Param, out chan) && chan > 0) { string res = string.Empty; InventoryNode folder = FindFolder(rule.Option); if (folder != null) { foreach (var f in folder.Nodes.Values) { if (f.Data is InventoryFolder && !f.Data.Name.StartsWith(".")) { res += f.Data.Name + ","; } } } Respond(chan, res.TrimEnd(',')); } break; case "getinvworn": if (int.TryParse(rule.Param, out chan) && chan > 0) { string res = string.Empty; InventoryNode root = FindFolder(rule.Option); if (root != null) { res += "|" + GetWornIndicator(root) + ","; foreach (var n in root.Nodes.Values) { if (n.Data is InventoryFolder && !n.Data.Name.StartsWith(".")) { res += n.Data.Name + "|" + GetWornIndicator(n) + ","; } } } Respond(chan, res.TrimEnd(',')); } break; case "findfolder": case "findfolders": if (int.TryParse(rule.Param, out chan) && chan > 0) { StringBuilder response = new StringBuilder(); string[] keywordsArray = rule.Option.Split(new string[] {"&&"}, StringSplitOptions.None); if (keywordsArray.Any()) { List<InventoryNode> matching_nodes = FindFoldersKeyword(keywordsArray); if(matching_nodes.Any()) { if(rule.Behaviour == "findfolder") { InventoryNode bestCandidate = null; int bestCandidateSlashCount = -1; foreach (var match in matching_nodes) { string fullPath = FindFullInventoryPath(match, ""); int numSlashes = fullPath.Count(ch => ch == '/'); if(bestCandidate == null || numSlashes > bestCandidateSlashCount) { bestCandidateSlashCount = numSlashes; bestCandidate = match; } } string bestCandidatePath = bestCandidate.Data.Name; if (bestCandidatePath.Substring(0, 5).ToLower() == @"#rlv/") { bestCandidatePath = bestCandidatePath.Substring(5); } response.Append(bestCandidatePath); } else { StringBuilder sb = new StringBuilder(); foreach (var node in matching_nodes) { string fullPath = FindFullInventoryPath(node, ""); if (fullPath.Length > 4 && fullPath.Substring(0, 5).ToLower() == @"#rlv/") { fullPath = fullPath.Substring(5); } response.Append(fullPath + ","); } } } } Respond(chan, response.ToString().TrimEnd(',')); } break; #endregion #RLV folder and outfit manipulation } } return true; }
public RoslynTypeReferenceBuilder() { logger = Logger.GetLogger("dynamic-compilation"); logger.OutputToListeners = false; }
public async Task <OperationResult <IEnumerable <ArtistSearchResult> > > PerformArtistSearch(string query, int resultsCount) { ArtistSearchResult data = null; try { var cacheKey = $"uri:discogs:artistsearch:{ query.ToAlphanumericName() }"; data = await CacheManager.GetAsync <ArtistSearchResult>(cacheKey, async() => { Logger.LogTrace("DiscogsHelper:PerformArtistSearch:{0}", query); var request = BuildSearchRequest(query, 1, "artist"); var client = new RestClient("https://api.discogs.com/database") { UserAgent = WebHelper.UserAgent }; var response = await client.ExecuteAsync <DiscogsResult>(request); if (response.ResponseStatus == ResponseStatus.Error) { if (response.StatusCode == HttpStatusCode.Unauthorized) { throw new AuthenticationException("Unauthorized"); } throw new Exception(string.Format("Request Error Message: {0}. Content: {1}.", response.ErrorMessage, response.Content)); } var responseData = response.Data.results != null && response.Data.results.Any() ? response.Data.results.First() : null; if (responseData != null) { request = BuildArtistRequest(responseData.id); var c2 = new RestClient("https://api.discogs.com/") { UserAgent = WebHelper.UserAgent }; var artistResponse = await c2.ExecuteTaskAsync <DiscogArtistResponse>(request); var artist = artistResponse.Data; if (artist != null) { var urls = new List <string>(); var images = new List <string>(); var alternateNames = new List <string>(); string artistThumbnailUrl = null; urls.Add(artist.uri); if (artist.urls != null) { urls.AddRange(artist.urls); } if (artist.images != null) { images.AddRange(artist.images.Where(x => x.type != "primary").Select(x => x.uri)); var primaryImage = artist.images.FirstOrDefault(x => x.type == "primary"); if (primaryImage != null) { artistThumbnailUrl = primaryImage.uri; } if (string.IsNullOrEmpty(artistThumbnailUrl)) { artistThumbnailUrl = artist.images.First(x => !string.IsNullOrEmpty(x.uri)).uri; } } if (artist.namevariations != null) { alternateNames.AddRange(artist.namevariations.Distinct()); } return(new ArtistSearchResult { ArtistName = artist.name, DiscogsId = artist.id.ToString(), ArtistType = responseData.type, Profile = artist.profile, AlternateNames = alternateNames, ArtistThumbnailUrl = artistThumbnailUrl, Urls = urls, ImageUrls = images }); } } return(null); }, "uri:metadata"); } catch (Exception ex) { Logger.LogError(ex); } return(new OperationResult <IEnumerable <ArtistSearchResult> > { IsSuccess = data != null, Data = new[] { data } }); }
public async Task <OperationResult <IEnumerable <ReleaseSearchResult> > > PerformReleaseSearch(string artistName, string query, int resultsCount) { ReleaseSearchResult data = null; try { var cacheKey = $"uri:discogs:releasesearch:{ artistName.ToAlphanumericName() }:{ query.ToAlphanumericName() }"; data = await CacheManager.GetAsync <ReleaseSearchResult>(cacheKey, async() => { var request = BuildSearchRequest(query, 10, "release", artistName); var client = new RestClient("https://api.discogs.com/database") { UserAgent = WebHelper.UserAgent, ReadWriteTimeout = SafeParser.ToNumber <int>(Configuration.Integrations.DiscogsReadWriteTimeout), Timeout = SafeParser.ToNumber <int>(Configuration.Integrations.DiscogsTimeout) }; var response = await client.ExecuteAsync <DiscogsReleaseSearchResult>(request); if (response?.ResponseStatus == null || response.ResponseStatus == ResponseStatus.Error) { if (response.StatusCode == HttpStatusCode.Unauthorized) { throw new AuthenticationException("Unauthorized"); } throw new Exception($"Request Error Message: {response?.ErrorMessage}. Content: {response?.Content}."); } var responseData = response?.Data?.results?.OrderBy(x => x.year).FirstOrDefault(); if (responseData?.id != null) { request = BuildReleaseRequest(responseData.id); var c2 = new RestClient("https://api.discogs.com/") { UserAgent = WebHelper.UserAgent }; var releaseResult = await c2.ExecuteTaskAsync <DiscogReleaseDetail>(request); var release = releaseResult?.Data; if (release != null) { var urls = new List <string>(); var images = new List <string>(); string releaseThumbnailUrl = null; urls.Add(release.uri); if (release.images != null) { images.AddRange(release.images.Where(x => x.type != "primary").Select(x => x.uri)); var primaryImage = release.images.FirstOrDefault(x => x.type == "primary"); if (primaryImage != null) { releaseThumbnailUrl = primaryImage.uri; } if (string.IsNullOrEmpty(releaseThumbnailUrl)) { releaseThumbnailUrl = release.images.First(x => !string.IsNullOrEmpty(x.uri)).uri; } } data = new ReleaseSearchResult { DiscogsId = release.id.ToString(), ReleaseType = responseData.type, ReleaseDate = SafeParser.ToDateTime(release.released), Profile = release.notes, ReleaseThumbnailUrl = releaseThumbnailUrl, Urls = urls, ImageUrls = images }; if (release.genres != null) { data.ReleaseGenres = release.genres.ToList(); } if (release.labels != null) { data.ReleaseLabel = release.labels.Select(x => new ReleaseLabelSearchResult { CatalogNumber = x.catno, Label = new LabelSearchResult { LabelName = x.name, DiscogsId = x.id.ToString() } }).ToList(); } if (release.tracklist != null) { var releaseMediaCount = 1; var releaseMedias = new List <ReleaseMediaSearchResult>(); for (short?i = 1; i <= releaseMediaCount; i++) { var releaseTracks = new List <TrackSearchResult>(); short?looper = 0; foreach (var dTrack in release.tracklist.OrderBy(x => x.position)) { looper++; releaseTracks.Add(new TrackSearchResult { TrackNumber = looper, Title = dTrack.title, Duration = dTrack.duration.ToTrackDuration(), TrackType = dTrack.type_ }); } releaseMedias.Add(new ReleaseMediaSearchResult { ReleaseMediaNumber = i, TrackCount = (short)releaseTracks.Count(), Tracks = releaseTracks }); } data.ReleaseMedia = releaseMedias; } if (release.identifiers != null) { var barcode = release.identifiers.FirstOrDefault(x => (x.type ?? string.Empty) == "Barcode"); if (barcode?.value != null) { data.Tags = new[] { "barcode:" + barcode.value }; } } } } return(null); }, "uri:metadata"); } catch (Exception ex) { Logger.LogError(ex, $"DiscogsHelper: Error in PerformReleaseSearch artistname [{ artistName }], query [{ query }], resultsCount [{ resultsCount }]"); } return(new OperationResult <IEnumerable <ReleaseSearchResult> > { IsSuccess = data != null, Data = new[] { data } }); }
public async Task <OperationResult <IEnumerable <LabelSearchResult> > > PerformLabelSearch(string labelName, int resultsCount) { LabelSearchResult data = null; try { var cacheKey = $"uri:discogs:labelsearch:{ labelName.ToAlphanumericName() }"; data = await CacheManager.GetAsync <LabelSearchResult>(cacheKey, async() => { var request = BuildSearchRequest(labelName, 1, "label"); var client = new RestClient("https://api.discogs.com/database"); client.UserAgent = WebHelper.UserAgent; var response = await client.ExecuteAsync <DiscogsResult>(request); if (response.ResponseStatus == ResponseStatus.Error) { if (response.StatusCode == HttpStatusCode.Unauthorized) { throw new AuthenticationException("Unauthorized"); } throw new Exception(string.Format("Request Error Message: {0}. Content: {1}.", response.ErrorMessage, response.Content)); } var responseData = response.Data.results != null && response.Data.results.Any() ? response.Data.results.First() : null; if (responseData != null) { request = BuildLabelRequest(responseData.id); var c2 = new RestClient("https://api.discogs.com/"); c2.UserAgent = WebHelper.UserAgent; var labelResponse = await c2.ExecuteTaskAsync <DiscogsLabelResult>(request); var label = labelResponse.Data; if (label != null) { var urls = new List <string>(); var images = new List <string>(); var alternateNames = new List <string>(); string labelThumbnailUrl = null; urls.Add(label.uri); if (label.urls != null) { urls.AddRange(label.urls); } if (label.images != null) { images.AddRange(label.images.Where(x => x.type != "primary").Select(x => x.uri)); var primaryImage = label.images.FirstOrDefault(x => x.type == "primary"); if (primaryImage != null) { labelThumbnailUrl = primaryImage.uri; } if (string.IsNullOrEmpty(labelThumbnailUrl)) { labelThumbnailUrl = label.images.First(x => !string.IsNullOrEmpty(x.uri)).uri; } } data = new LabelSearchResult { LabelName = label.name, DiscogsId = label.id.ToString(), Profile = label.profile, AlternateNames = alternateNames, LabelImageUrl = labelThumbnailUrl, Urls = urls, ImageUrls = images }; } } return(null); }, "uri:metadata"); } catch (Exception ex) { Logger.LogError(ex); } return(new OperationResult <IEnumerable <LabelSearchResult> > { IsSuccess = data != null, Data = new[] { data } }); }
public ScanWatchFolder(ICacheManager cacheManager, IDiskScanService diskScanService, IDiskProvider diskProvider, Logger logger) { _logger = logger; _diskProvider = diskProvider; _diskScanService = diskScanService; _watchFolderItemCache = cacheManager.GetCache <Dictionary <string, WatchFolderItem> >(GetType()); }
public static IEnumerator LoginByThirdCoroutine(string platform, string channel, string uid, string accessToken) { ThirdLoginAction = null; using (new BlockingLayerHelper(0)) { NetManager.Instance.Stop(); NetManager.Instance.ServerAddress = GameUtils.GetServerAddress(); var result = new AsyncResult<int>(); var connectToGate = NetManager.Instance.ConnectToGate(result); yield return connectToGate; if (0 == result.Result) { // 连接失败! UIManager.Instance.ShowMessage(MessageBoxType.Ok, 270111, "", PlatformHelper.UserLogout); yield break; } if (string.IsNullOrEmpty(uid) && string.IsNullOrEmpty(accessToken)) { UIManager.Instance.ShowMessage(MessageBoxType.Ok, "uid and accessToken empty!"); yield break; } var loginMsg = NetManager.Instance.PlayerLoginByThirdKey(platform, channel, uid, accessToken); yield return loginMsg.SendAndWaitUntilDone(); if (loginMsg.State == MessageState.Reply) { if ((int)ErrorCodes.OK == loginMsg.ErrorCode) { NetManager.Instance.NeedReconnet = true; PlayerDataManager.Instance.LastLoginServerId = loginMsg.Response.LastServerId == 0 ? 1 : loginMsg.Response.LastServerId; if (channel.Equals("BaiDu")) { PlayerDataManager.Instance.UidForPay = uid; } else { PlayerDataManager.Instance.UidForPay = loginMsg.Response.Uid; TypeSDKHelper.Instance.userIdforPay = loginMsg.Response.Uid; } NetManager.Instance.StartCoroutine(LoginSuccess()); } else if (loginMsg.ErrorCode == (int)ErrorCodes.Error_PLayerLoginWait) { NetManager.Instance.NeedReconnet = false; PlayerDataManager.Instance.AccountDataModel.LineUpShow = true; var e = new Show_UI_Event(UIConfig.ServerListUI, null); EventDispatcher.Instance.DispatchEvent(e); } else { NetManager.Instance.NeedReconnet = false; UIManager.Instance.ShowNetError(loginMsg.ErrorCode); } } else { NetManager.Instance.NeedReconnet = false; Logger.Error("LoginByThirdCoroutine MessageState:{0}", loginMsg.State); GameUtils.ShowLoginTimeOutTip(); } } }
public static IEnumerator LoginCoroutine() { using (new BlockingLayerHelper(0)) { //NetManager.Instance.Stop(); var result = new AsyncResult<int>(); var connectToGate = NetManager.Instance.ConnectToGate(result); yield return connectToGate; if (0 == result.Result) { // 连接失败! UIManager.Instance.ShowMessage(MessageBoxType.Ok, 270111); yield break; } if (string.IsNullOrEmpty(PlayerDataManager.Instance.UserName)) { UIManager.Instance.ShowMessage(MessageBoxType.Ok, "user name empty!"); yield break; } var loginMsg = NetManager.Instance.PlayerLoginByUserNamePassword(PlayerDataManager.Instance.UserName, PlayerDataManager.Instance.Password); yield return loginMsg.SendAndWaitUntilDone(); if (loginMsg.State == MessageState.Reply) { if ((int)ErrorCodes.OK == loginMsg.ErrorCode) { NetManager.Instance.NeedReconnet = true; PlayerDataManager.Instance.LastLoginServerId = loginMsg.Response.LastServerId == 0 ? 1 : loginMsg.Response.LastServerId; NetManager.Instance.StartCoroutine(LoginSuccess()); } else if (loginMsg.ErrorCode == (int)ErrorCodes.Error_PLayerLoginWait) { NetManager.Instance.NeedReconnet = false; PlayerDataManager.Instance.AccountDataModel.LineUpShow = true; var e = new Show_UI_Event(UIConfig.ServerListUI, null); EventDispatcher.Instance.DispatchEvent(e); } else if ((int)ErrorCodes.PasswordIncorrect == loginMsg.ErrorCode) { var errorCode = loginMsg.ErrorCode; var dicId = errorCode + 200000000; var tbDic = Table.GetDictionary(dicId); var info = ""; if (tbDic == null) { info = GameUtils.GetDictionaryText(200000001) + errorCode; Logger.Error(GameUtils.GetDictionaryText(200098), errorCode); } else { info = tbDic.Desc[GameUtils.LanguageIndex]; } UIManager.Instance.ShowMessage(MessageBoxType.Ok, info); } else { NetManager.Instance.NeedReconnet = false; //Logger.Error("PlayerLoginByUserNamePassword ErrorCode" + loginMsg.ErrorCode); UIManager.Instance.ShowNetError(loginMsg.ErrorCode); } } else { NetManager.Instance.NeedReconnet = false; Logger.Error("PlayerLoginByUserNamePassword MessageState:{0}", loginMsg.State); GameUtils.ShowLoginTimeOutTip(); } } }
public void Build(IList <DocumentBuildParameters> parameters, string outputDirectory) { if (parameters == null) { throw new ArgumentNullException(nameof(parameters)); } if (parameters.Count == 0) { throw new ArgumentException("Parameters are empty.", nameof(parameters)); } var markdownServiceProvider = GetMarkdownServiceProvider(); var logCodesLogListener = new LogCodesLogListener(); Logger.RegisterListener(logCodesLogListener); // Load schema driven processor from template var sdps = LoadSchemaDrivenDocumentProcessors(parameters[0]).ToList(); BuildInfo lastBuildInfo = null; var currentBuildInfo = new BuildInfo { BuildStartTime = DateTime.UtcNow, DocfxVersion = EnvironmentContext.Version, }; try { using (new PerformanceScope("LoadLastBuildInfo")) { lastBuildInfo = BuildInfo.Load(_intermediateFolder, true); } EnrichCurrentBuildInfo(currentBuildInfo, lastBuildInfo); _postProcessorsManager.IncrementalInitialize(_intermediateFolder, currentBuildInfo, lastBuildInfo, parameters[0].ForcePostProcess, parameters[0].MaxParallelism); var manifests = new List <Manifest>(); bool transformDocument = false; if (parameters.All(p => p.Files.Count == 0)) { Logger.LogWarning( "No file found, nothing will be generated. Please make sure docfx.json is correctly configured.", code: WarningCodes.Build.EmptyInputFiles); } var noContentFound = true; var emptyContentGroups = new List <string>(); foreach (var parameter in parameters) { if (parameter.CustomLinkResolver != null) { if (_container.TryGetExport(parameter.CustomLinkResolver, out ICustomHrefGenerator chg)) { parameter.ApplyTemplateSettings.HrefGenerator = chg; } else { Logger.LogWarning($"Custom href generator({parameter.CustomLinkResolver}) is not found."); } } FileAbstractLayerBuilder falBuilder; if (_intermediateFolder == null) { falBuilder = FileAbstractLayerBuilder.Default .ReadFromRealFileSystem(EnvironmentContext.BaseDirectory) .WriteToRealFileSystem(parameter.OutputBaseDir); } else { falBuilder = FileAbstractLayerBuilder.Default .ReadFromRealFileSystem(EnvironmentContext.BaseDirectory) .WriteToLink(Path.Combine(_intermediateFolder, currentBuildInfo.DirectoryName)); } if (!string.IsNullOrEmpty(parameter.FALName)) { if (_container.TryGetExport <IInputFileAbstractLayerBuilderProvider>( parameter.FALName, out var provider)) { falBuilder = provider.Create(falBuilder, parameter); } else { Logger.LogWarning($"Input fal builder provider not found, name: {parameter.FALName}."); } } EnvironmentContext.FileAbstractLayerImpl = falBuilder.Create(); if (parameter.ApplyTemplateSettings.TransformDocument) { transformDocument = true; } if (parameter.Files.Count == 0) { manifests.Add(new Manifest() { SourceBasePath = StringExtension.ToNormalizedPath(EnvironmentContext.BaseDirectory) }); } else { if (!parameter.Files.EnumerateFiles().Any(s => s.Type == DocumentType.Article)) { if (!string.IsNullOrEmpty(parameter.GroupInfo?.Name)) { emptyContentGroups.Add(parameter.GroupInfo.Name); } } else { noContentFound = false; } parameter.Metadata = _postProcessorsManager.PrepareMetadata(parameter.Metadata); if (!string.IsNullOrEmpty(parameter.VersionName)) { Logger.LogInfo($"Start building for version: {parameter.VersionName}"); } using (new LoggerPhaseScope("BuildCore")) { manifests.Add(BuildCore(parameter, markdownServiceProvider, currentBuildInfo, lastBuildInfo)); } } } if (noContentFound) { Logger.LogWarning( "No content file found. Please make sure the content section of docfx.json is correctly configured.", code: WarningCodes.Build.EmptyInputContents); } else if (emptyContentGroups.Count > 0) { Logger.LogWarning( $"No content file found in group: {string.Join(",", emptyContentGroups)}. Please make sure the content section of docfx.json is correctly configured.", code: WarningCodes.Build.EmptyInputContents); } using (new LoggerPhaseScope("Postprocess", LogLevel.Verbose)) { var generatedManifest = ManifestUtility.MergeManifest(manifests); generatedManifest.SitemapOptions = parameters.FirstOrDefault()?.SitemapOptions; ManifestUtility.RemoveDuplicateOutputFiles(generatedManifest.Files); ManifestUtility.ApplyLogCodes(generatedManifest.Files, logCodesLogListener.Codes); // We can only globally shrink once to avoid invalid reference. // Shrink multiple times may remove files that are already linked in saved manifest. if (_intermediateFolder != null) { // TODO: shrink here is not safe as post processor may update it. // should shrink once at last to handle everything, or make FAL support copy on writes generatedManifest.Files.Shrink(_intermediateFolder, parameters[0].MaxParallelism); currentBuildInfo.SaveVersionsManifet(_intermediateFolder); } EnvironmentContext.FileAbstractLayerImpl = FileAbstractLayerBuilder.Default .ReadFromManifest(generatedManifest, parameters[0].OutputBaseDir) .WriteToManifest(generatedManifest, parameters[0].OutputBaseDir) .Create(); using (new PerformanceScope("Process")) { _postProcessorsManager.Process(generatedManifest, outputDirectory); } using (new PerformanceScope("Dereference")) { if (parameters[0].KeepFileLink) { var count = (from f in generatedManifest.Files from o in f.OutputFiles select o.Value into v where v.LinkToPath != null select v).Count(); if (count > 0) { Logger.LogInfo($"Skip dereferencing {count} files."); } } else { generatedManifest.Dereference(parameters[0].OutputBaseDir, parameters[0].MaxParallelism); } } using (new PerformanceScope("SaveManifest")) { // Save to manifest.json EnvironmentContext.FileAbstractLayerImpl = FileAbstractLayerBuilder.Default .ReadFromRealFileSystem(parameters[0].OutputBaseDir) .WriteToRealFileSystem(parameters[0].OutputBaseDir) .Create(); SaveManifest(generatedManifest); } using (new PerformanceScope("Cleanup")) { EnvironmentContext.FileAbstractLayerImpl = null; // overwrite intermediate cache files if (_intermediateFolder != null && transformDocument) { try { if (Logger.WarningCount >= Logger.WarningThrottling) { currentBuildInfo.IsValid = false; currentBuildInfo.Message = $"Warning count {Logger.WarningCount} exceeds throttling {Logger.WarningThrottling}"; } currentBuildInfo.Save(_intermediateFolder); if (_cleanupCacheHistory) { ClearCacheExcept(currentBuildInfo.DirectoryName); } } catch (Exception ex) { Logger.LogWarning($"Error happened while saving cache. Message: {ex.Message}."); } } } } } catch { // Leave cache folder there as it contains historical data // exceptions happens in this build does not corrupt the cache theoretically // however the cache file created by this build will never be cleaned up with DisableIncrementalFolderCleanup option if (_intermediateFolder != null && _cleanupCacheHistory) { ClearCacheExcept(lastBuildInfo?.DirectoryName); } throw; } finally { Logger.UnregisterListener(logCodesLogListener); } IMarkdownServiceProvider GetMarkdownServiceProvider() { using (new PerformanceScope(nameof(GetMarkdownServiceProvider))) { var result = CompositionContainer.GetExport <IMarkdownServiceProvider>(_container, parameters[0].MarkdownEngineName); if (result == null) { Logger.LogError($"Unable to find markdown engine: {parameters[0].MarkdownEngineName}"); throw new DocfxException($"Unable to find markdown engine: {parameters[0].MarkdownEngineName}"); } Logger.LogInfo($"Markdown engine is {parameters[0].MarkdownEngineName}", code: InfoCodes.Build.MarkdownEngineName); return(result); } } void EnrichCurrentBuildInfo(BuildInfo current, BuildInfo last) { current.CommitFromSHA = _commitFromSHA; current.CommitToSHA = _commitToSHA; if (_intermediateFolder != null) { current.PluginHash = ComputePluginHash(_assemblyList); current.TemplateHash = _templateHash; if (!_cleanupCacheHistory && last != null) { // Reuse the directory for last incremental if cleanup is disabled current.DirectoryName = last.DirectoryName; } else { current.DirectoryName = IncrementalUtility.CreateRandomDirectory(Environment.ExpandEnvironmentVariables(_intermediateFolder)); } } } }
public override void Attack(Creature foe) { if (!(foe is Monster)) { throw new ArgumentException(); } Monster foe_ = foe as Monster; //player can only attack a monster Pack tempPack = foe_.pack; //monster's pack Node packLocation = tempPack.location; //same as player's location //gives information to the user Logger.log("Location is " + tempPack.location.id); Logger.log("All monsters in the pack: "); foreach (Monster m in tempPack.members) { Logger.log(m.id); } Logger.log("All packs in location "); foreach (Pack p in packLocation.packs) { Logger.log(p.id); } if (!accelerated) //if user is not accelerated, player only attacks to parameter monster { foe.HP = (int)Math.Max(0, foe.HP - AttackRating); //HP can not be less than 0 String killMsg = foe.HP == 0 ? ", KILLING it" : ""; //monster dies Logger.log("Creature " + id + " attacks " + foe.id + killMsg + "."); if (foe.HP == 0) //if the monster died { tempPack.members.Remove(foe_); //remove monster from its pack if (tempPack.members.Count == 0) //check if the pack is empty { Logger.log("Pack is now empty' pack id " + tempPack.id); Logger.log("Remaining packs in the location:"); foreach (Pack pack in packLocation.packs) { Logger.log("Pack " + pack.id + " in node " + packLocation); } //THIS PART SHOULD BE TESTED, I forgot why Chris and I checked for tempPack==null // We probably forgot to change it back after debugging //commented null comparison //pack is removed from the node, regarding what AttackBool returns to the main flow /* * if (tempPack == null) * { * Logger.log("it is null"); * foreach (Pack pack in packLocation.packs) * { * Logger.log("Pack " + pack.id + " in node " + packLocation); * } * } * else { * Logger.log("It is not null"); * foreach (Pack pack in packLocation.packs) * { * Logger.log("Pack " + pack.id + " in node " + packLocation); * } * }*/ //packLocation.packs.Remove(tempPack); //Logger.log("Killed the pack' commented remove"); } KillPoint++; } } else { //player attacks every monster in the pack int packCount = foe_.pack.members.Count; foe_.pack.members.RemoveAll(target => target.HP <= 0); //already dead monsters? KillPoint += (uint)(packCount - foe_.pack.members.Count); // Added the following for (int i = packCount - 1; i >= 0; i--) //for each monster in the pack { Monster target = foe_.pack.members.ElementAt(i); target.HP = (int)Math.Max(0, target.HP - AttackRating); //player attacks to the monster String killMsg = target.HP == 0 ? ", KILLING it" : ""; Logger.log("Creature " + id + " attacks " + target.id + killMsg + "."); //base.Attack(target); if (target.HP == 0) //if the monster dies { foe_.pack.members.Remove(target); //remove it from the list KillPoint++; } } accelerated = false; //player not accelerated anymore } }
private void Search() { RegistryAircraft reg_ac = mainForm.registryForm.registry_aircrafts[currentIndex]; Logger.Log(reg_ac.icao + " " + reg_ac.airline); List<Aircraft> unsorted = new List<Aircraft>(); List<Aircraft> result = new List<Aircraft>(); // filter if (reg_ac.current_title != "" && checkNew.Checked) { //Logger.Log("Skipping (new filter)"); SearchNext(); return; } if (checkAirlineFilter.Checked) { if (filterAirline.Text.Length > 0) { if (!reg_ac.airline.Contains(filterAirline.Text)) { //Logger.Log("Skipping (AL filter)"); SearchNext(); return; } } } if (checkFamilyFilter.Checked) { if (filterIcao.Text.Length > 0) { if (!reg_ac.icao.Contains(filterIcao.Text)) { //Logger.Log("Skipping (icao filter)"); SearchNext(); return; } } } // unsorted.AddRange(FindExact(reg_ac)); bool exactMatch = false; if (unsorted.Count > 0) exactMatch = true; bool resolvedAirlineFamily = false; if (unsorted.Count == 0) { // exact match was not found - will try to search by airline and family (first 3 letters from icao) unsorted.AddRange(FindByAirlineAndFamily(reg_ac, false)); if (result.Count == 0) { // nothing was found by airline and family - will search by airline only unsorted.AddRange(FindByAirlineAndFamily(reg_ac, true)); } else { resolvedAirlineFamily = true; } } result.AddRange(SortResult(unsorted, reg_ac.icao)); if (result.Count > 1 || (resolvedAirlineFamily && radioButton2.Checked)) { if (radioButton1.Checked || (exactMatch && radioButton2.Checked)) { Aircraft res_ac = result.ElementAt(0); Logger.Log("Automatic choice for " + reg_ac.icao + " / " + reg_ac.airline + " (curr " + reg_ac.current_title + " ) --> " + res_ac.title + " (" + res_ac.match_count + "pts)"); reg_ac.new_title = res_ac.title; } else { string message = result.Count.ToString() + " matches found for " + reg_ac.icao + " / " + reg_ac.airline + "\n"; DecideForm decide = new DecideForm(this, message, result); decide.ShowDialog(); Aircraft res_ac = result.ElementAt(decideResult); decide.Dispose(); Logger.Log("User choice for " + reg_ac.icao + " / " + reg_ac.airline + " (curr " + reg_ac.current_title + " ) --> " + res_ac.title); reg_ac.new_title = res_ac.title; } } else if (result.Count == 1) { Aircraft res_ac = result.ElementAt(0); Logger.Log("One match found for " + reg_ac.icao + " / " + reg_ac.airline + " (curr " + reg_ac.current_title + " ) --> " + res_ac.title); reg_ac.new_title = res_ac.title; } else { Logger.Log("No match was found"); } SearchNext(); }
private static void SaveManifest(Manifest manifest) { JsonUtility.Serialize(Constants.ManifestFileName, manifest, Formatting.Indented); Logger.LogInfo($"Manifest file saved to {Constants.ManifestFileName}."); }
protected virtual async Task <RefreshResult> RefreshWithProviders( MetadataResult <TItemType> metadata, TIdType id, MetadataRefreshOptions options, List <IMetadataProvider> providers, ItemImageProvider imageService, CancellationToken cancellationToken) { var refreshResult = new RefreshResult { UpdateType = ItemUpdateType.None }; var item = metadata.Item; var customProviders = providers.OfType <ICustomMetadataProvider <TItemType> >().ToList(); var logName = !item.IsFileProtocol ? item.Name ?? item.Path : item.Path ?? item.Name; foreach (var provider in customProviders.Where(i => i is IPreRefreshProvider)) { await RunCustomProvider(provider, item, logName, options, refreshResult, cancellationToken).ConfigureAwait(false); } var temp = new MetadataResult <TItemType> { Item = CreateNew() }; temp.Item.Path = item.Path; var userDataList = new List <UserItemData>(); // If replacing all metadata, run internet providers first if (options.ReplaceAllMetadata) { var remoteResult = await ExecuteRemoteProviders(temp, logName, id, providers.OfType <IRemoteMetadataProvider <TItemType, TIdType> >(), cancellationToken) .ConfigureAwait(false); refreshResult.UpdateType |= remoteResult.UpdateType; refreshResult.ErrorMessage = remoteResult.ErrorMessage; refreshResult.Failures += remoteResult.Failures; } var hasLocalMetadata = false; foreach (var provider in providers.OfType <ILocalMetadataProvider <TItemType> >().ToList()) { var providerName = provider.GetType().Name; Logger.LogDebug("Running {0} for {1}", providerName, logName); var itemInfo = new ItemInfo(item); try { var localItem = await provider.GetMetadata(itemInfo, options.DirectoryService, cancellationToken).ConfigureAwait(false); if (localItem.HasMetadata) { foreach (var remoteImage in localItem.RemoteImages) { await ProviderManager.SaveImage(item, remoteImage.url, remoteImage.type, null, cancellationToken).ConfigureAwait(false); refreshResult.UpdateType |= ItemUpdateType.ImageUpdate; } if (imageService.MergeImages(item, localItem.Images)) { refreshResult.UpdateType |= ItemUpdateType.ImageUpdate; } if (localItem.UserDataList != null) { userDataList.AddRange(localItem.UserDataList); } MergeData(localItem, temp, Array.Empty <MetadataField>(), !options.ReplaceAllMetadata, true); refreshResult.UpdateType |= ItemUpdateType.MetadataImport; // Only one local provider allowed per item if (item.IsLocked || localItem.Item.IsLocked || IsFullLocalMetadata(localItem.Item)) { hasLocalMetadata = true; } break; } Logger.LogDebug("{0} returned no metadata for {1}", providerName, logName); } catch (OperationCanceledException) { throw; } catch (Exception ex) { Logger.LogError(ex, "Error in {provider}", provider.Name); // If a local provider fails, consider that a failure refreshResult.ErrorMessage = ex.Message; } } // Local metadata is king - if any is found don't run remote providers if (!options.ReplaceAllMetadata && (!hasLocalMetadata || options.MetadataRefreshMode == MetadataRefreshMode.FullRefresh || !item.StopRefreshIfLocalMetadataFound)) { var remoteResult = await ExecuteRemoteProviders(temp, logName, id, providers.OfType <IRemoteMetadataProvider <TItemType, TIdType> >(), cancellationToken) .ConfigureAwait(false); refreshResult.UpdateType |= remoteResult.UpdateType; refreshResult.ErrorMessage = remoteResult.ErrorMessage; refreshResult.Failures += remoteResult.Failures; } if (providers.Any(i => !(i is ICustomMetadataProvider))) { if (refreshResult.UpdateType > ItemUpdateType.None) { if (hasLocalMetadata) { MergeData(temp, metadata, item.LockedFields, true, true); } else { // TODO: If the new metadata from above has some blank data, this can cause old data to get filled into those empty fields MergeData(metadata, temp, Array.Empty <MetadataField>(), false, false); MergeData(temp, metadata, item.LockedFields, true, false); } } } // var isUnidentified = failedProviderCount > 0 && successfulProviderCount == 0; foreach (var provider in customProviders.Where(i => !(i is IPreRefreshProvider))) { await RunCustomProvider(provider, item, logName, options, refreshResult, cancellationToken).ConfigureAwait(false); } // ImportUserData(item, userDataList, cancellationToken); return(refreshResult); }
public async Task <ItemUpdateType> RefreshMetadata(BaseItem item, MetadataRefreshOptions refreshOptions, CancellationToken cancellationToken) { var itemOfType = (TItemType)item; var updateType = ItemUpdateType.None; var requiresRefresh = false; var libraryOptions = LibraryManager.GetLibraryOptions(item); if (!requiresRefresh && libraryOptions.AutomaticRefreshIntervalDays > 0 && (DateTime.UtcNow - item.DateLastRefreshed).TotalDays >= libraryOptions.AutomaticRefreshIntervalDays) { requiresRefresh = true; } if (!requiresRefresh && refreshOptions.MetadataRefreshMode != MetadataRefreshMode.None) { // TODO: If this returns true, should we instead just change metadata refresh mode to Full? requiresRefresh = item.RequiresRefresh(); if (requiresRefresh) { Logger.LogDebug("Refreshing {0} {1} because item.RequiresRefresh() returned true", typeof(TItemType).Name, item.Path ?? item.Name); } } var localImagesFailed = false; var allImageProviders = ((ProviderManager)ProviderManager).GetImageProviders(item, refreshOptions).ToList(); // Start by validating images try { // Always validate images and check for new locally stored ones. if (ImageProvider.ValidateImages(item, allImageProviders.OfType <ILocalImageProvider>(), refreshOptions.DirectoryService)) { updateType |= ItemUpdateType.ImageUpdate; } } catch (Exception ex) { localImagesFailed = true; Logger.LogError(ex, "Error validating images for {0}", item.Path ?? item.Name ?? "Unknown name"); } var metadataResult = new MetadataResult <TItemType> { Item = itemOfType }; bool hasRefreshedMetadata = true; bool hasRefreshedImages = true; var isFirstRefresh = item.DateLastRefreshed == default; // Next run metadata providers if (refreshOptions.MetadataRefreshMode != MetadataRefreshMode.None) { var providers = GetProviders(item, libraryOptions, refreshOptions, isFirstRefresh, requiresRefresh) .ToList(); if (providers.Count > 0 || isFirstRefresh || requiresRefresh) { if (item.BeforeMetadataRefresh(refreshOptions.ReplaceAllMetadata)) { updateType |= ItemUpdateType.MetadataImport; } } if (providers.Count > 0) { var id = itemOfType.GetLookupInfo(); if (refreshOptions.SearchResult != null) { ApplySearchResult(id, refreshOptions.SearchResult); } // await FindIdentities(id, cancellationToken).ConfigureAwait(false); id.IsAutomated = refreshOptions.IsAutomated; var result = await RefreshWithProviders(metadataResult, id, refreshOptions, providers, ImageProvider, cancellationToken).ConfigureAwait(false); updateType |= result.UpdateType; if (result.Failures > 0) { hasRefreshedMetadata = false; } } } // Next run remote image providers, but only if local image providers didn't throw an exception if (!localImagesFailed && refreshOptions.ImageRefreshMode != MetadataRefreshMode.ValidationOnly) { var providers = GetNonLocalImageProviders(item, allImageProviders, refreshOptions).ToList(); if (providers.Count > 0) { var result = await ImageProvider.RefreshImages(itemOfType, libraryOptions, providers, refreshOptions, cancellationToken).ConfigureAwait(false); updateType |= result.UpdateType; if (result.Failures > 0) { hasRefreshedImages = false; } } } var beforeSaveResult = BeforeSave(itemOfType, isFirstRefresh || refreshOptions.ReplaceAllMetadata || refreshOptions.MetadataRefreshMode == MetadataRefreshMode.FullRefresh || requiresRefresh || refreshOptions.ForceSave, updateType); updateType |= beforeSaveResult; // Save if changes were made, or it's never been saved before if (refreshOptions.ForceSave || updateType > ItemUpdateType.None || isFirstRefresh || refreshOptions.ReplaceAllMetadata || requiresRefresh) { if (item.IsFileProtocol) { var file = TryGetFile(item.Path, refreshOptions.DirectoryService); if (file != null) { item.DateModified = file.LastWriteTimeUtc; } } // If any of these properties are set then make sure the updateType is not None, just to force everything to save if (refreshOptions.ForceSave || refreshOptions.ReplaceAllMetadata) { updateType |= ItemUpdateType.MetadataDownload; } if (hasRefreshedMetadata && hasRefreshedImages) { item.DateLastRefreshed = DateTime.UtcNow; } else { item.DateLastRefreshed = default; } // Save to database await SaveItemAsync(metadataResult, updateType, cancellationToken).ConfigureAwait(false); } await AfterMetadataRefresh(itemOfType, refreshOptions, cancellationToken).ConfigureAwait(false); return(updateType); }
private SolutionImportResult VerifySolutionImport( bool importAsync, Guid importJobId, AsyncOperation asyncOperation, Exception syncImportException) { SolutionImportResult result = new SolutionImportResult(); Logger.LogVerbose("Verifying Solution Import"); ImportJobManager jobManager = new ImportJobManager(Logger, OrganizationService); ImportJob importJob = jobManager.GetImportJob( importJobId, new ColumnSet("importjobid", "completedon", "progress", "data")); if (importJob == null) { result.ImportJobAvailable = false; if (importAsync) { result.ErrorMessage = asyncOperation != null ? asyncOperation.Message : ""; } else { result.ErrorMessage = syncImportException != null ? syncImportException.Message : ""; } Logger.LogError("Can't verify as import job couldn't be found. Error Message: {0}", result.ErrorMessage); return(result); } else { result.ImportJobAvailable = true; } if (importJob.Progress == 100) { Logger.LogInformation("Completed Progress: {0}", importJob.Progress); } else { Logger.LogWarning("Completed Progress: {0}", importJob.Progress); } Logger.LogInformation("Completed On: {0}", importJob.CompletedOn); XmlDocument doc = new XmlDocument(); doc.LoadXml(importJob.Data); XmlNode resultNode = doc.SelectSingleNode("//solutionManifest/result/@result"); String solutionImportResult = resultNode != null ? resultNode.Value : null; Logger.LogInformation("Import Result: {0}", solutionImportResult); XmlNode errorNode = doc.SelectSingleNode("//solutionManifest/result/@errortext"); String solutionImportError = errorNode != null ? errorNode.Value : null; Logger.LogInformation("Import Error: {0}", solutionImportError); result.ErrorMessage = solutionImportError; XmlNodeList unprocessedNodes = doc.SelectNodes("//*[@processed=\"false\"]"); result.UnprocessedComponents = unprocessedNodes.Count; if (unprocessedNodes.Count > 0) { Logger.LogWarning("Total number of unprocessed components: {0}", unprocessedNodes.Count); } else { Logger.LogInformation("Total number of unprocessed components: {0}", unprocessedNodes.Count); } result.Success = solutionImportResult == ImportSuccess; if (importAsync) { result.Success = result.Success && asyncOperation.StatusCodeEnum == AsyncOperation_StatusCode.Succeeded; } return(result); }
public void Transform(IEnumerable <string> htmlFilePaths) { Guard.ArgumentNotNull(htmlFilePaths, nameof(htmlFilePaths)); Parallel.ForEach( htmlFilePaths, htmlFilePath => { if (!File.Exists(htmlFilePath)) { Logger.LogVerbose($"Can not find toc page file: {htmlFilePath}.", htmlFilePath); return; } try { var doc = new HtmlDocument(); doc.Load(htmlFilePath); var tags = doc.DocumentNode.SelectNodes("//a[@href]"); if (tags?.Count > 0) { bool isTransformed = false; foreach (var tag in tags) { var src = tag.Attributes["href"].Value; if (Uri.TryCreate(src, UriKind.Relative, out Uri uri)) { try { if (Path.IsPathRooted(src)) { if (string.IsNullOrEmpty(_pdfOptions.Host)) { Logger.LogVerbose($"No host passed, so just keep the url as origin: {src}.", htmlFilePath); continue; } if (Uri.TryCreate(_pdfOptions.Host, UriKind.Absolute, out Uri host)) { tag.Attributes["href"].Value = new Uri(host, uri.OriginalString).ToString(); isTransformed = true; } else { Logger.LogVerbose($"The host format:{_pdfOptions.Host} is invalid, so just keep the url as origin: {src}.", htmlFilePath); } } } catch (Exception ex) { Logger.LogWarning(ex.Message, htmlFilePath); } } } if (isTransformed) { doc.Save(htmlFilePath); } } } catch (Exception ex) { Logger.LogWarning($"Transfer absolute path in toc page file error, details: {ex.Message}", htmlFilePath); } }); }
public Solution CreatePatch(string uniqueName, string versionNumber, string displayName) { using (var context = new CIContext(OrganizationService)) { if (string.IsNullOrEmpty(versionNumber)) { Logger.LogVerbose("VersionNumber not supplied. Generating default VersionNumber"); var solution = (from sol in context.SolutionSet where sol.UniqueName == uniqueName || sol.UniqueName.StartsWith(uniqueName + "_Patch") orderby sol.Version descending select new Solution { Version = sol.Version, FriendlyName = sol.FriendlyName }).FirstOrDefault(); if (solution == null || string.IsNullOrEmpty(solution.Version)) { throw new Exception(string.Format("Parent solution with unique name {0} not found.", uniqueName)); } string[] versions = solution.Version.Split('.'); char dot = '.'; versionNumber = string.Concat(versions[0], dot, versions[1], dot, Convert.ToInt32(versions[2]) + 1, dot, 0); Logger.LogVerbose("New VersionNumber: {0}", versionNumber); } if (string.IsNullOrEmpty(displayName)) { Logger.LogVerbose("displayName not supplied. Generating default DisplayName"); var solution = (from sol in context.SolutionSet where sol.UniqueName == uniqueName select new Solution { FriendlyName = sol.FriendlyName }).FirstOrDefault(); if (solution == null || string.IsNullOrEmpty(solution.FriendlyName)) { throw new Exception(string.Format("Parent solution with unique name {0} not found.", uniqueName)); } displayName = solution.FriendlyName; } var cloneAsPatch = new CloneAsPatchRequest { DisplayName = displayName, ParentSolutionUniqueName = uniqueName, VersionNumber = versionNumber, }; CloneAsPatchResponse response = OrganizationService.Execute(cloneAsPatch) as CloneAsPatchResponse; Logger.LogInformation("Patch solution created with Id {0}", response.SolutionId); Solution patch = GetSolution(response.SolutionId, new ColumnSet(true)); Logger.LogInformation("Patch solution name: {0}", patch.UniqueName); return(patch); } }
public SolutionImportResult ImportSolution( string solutionFilePath, bool publishWorkflows, bool convertToManaged, bool overwriteUnmanagedCustomizations, bool skipProductUpdateDependencies, bool holdingSolution, bool overrideSameVersion, bool importAsync, int sleepInterval, int asyncWaitTimeout, Guid?importJobId, bool downloadFormattedLog, string logDirectory, string logFileName ) { Logger.LogInformation("Importing Solution: {0}", solutionFilePath); if (!importJobId.HasValue || importJobId.Value == Guid.Empty) { importJobId = Guid.NewGuid(); } Logger.LogVerbose("ImportJobId {0}", importJobId); if (asyncWaitTimeout == 0) { asyncWaitTimeout = 15 * 60; } Logger.LogVerbose("AsyncWaitTimeout: {0}", asyncWaitTimeout); if (sleepInterval == 0) { sleepInterval = 15; } Logger.LogVerbose("SleepInterval: {0}", sleepInterval); if (!File.Exists(solutionFilePath)) { Logger.LogError("Solution File does not exist: {0}", solutionFilePath); throw new FileNotFoundException("Solution File does not exist", solutionFilePath); } SolutionImportResult result = null; SolutionXml solutionXml = new SolutionXml(Logger); XrmSolutionInfo info = solutionXml.GetSolutionInfoFromZip(solutionFilePath); if (info == null) { result = new SolutionImportResult() { ErrorMessage = "Invalid Solution File" }; return(result); } else { Logger.LogInformation("Solution Unique Name: {0}, Version: {1}", info.UniqueName, info.Version); } bool skipImport = SkipImport(info, holdingSolution, overrideSameVersion); if (skipImport) { Logger.LogInformation("Solution Import Skipped"); result = new SolutionImportResult() { Success = true, ImportSkipped = true }; result.SolutionName = info.UniqueName; result.VersionNumber = info.Version; return(result); } if (downloadFormattedLog) { if (string.IsNullOrEmpty(logFileName)) { logFileName = $"ImportLog_{Path.GetFileNameWithoutExtension(solutionFilePath)}_{DateTime.Now.ToString("yyyy_MM_dd__HH_mm")}.xml"; Logger.LogVerbose("Setting logFileName to {0}", logFileName); } if (string.IsNullOrEmpty(logDirectory)) { logDirectory = Path.GetDirectoryName(solutionFilePath); Logger.LogVerbose("Settings logDirectory to {0}", logDirectory); } if (!Directory.Exists(logDirectory)) { Logger.LogError("logDirectory not exist: {0}", logDirectory); throw new DirectoryNotFoundException("logDirectory does not exist"); } } byte[] solutionBytes = File.ReadAllBytes(solutionFilePath); var importSolutionRequest = new ImportSolutionRequest { CustomizationFile = solutionBytes, PublishWorkflows = publishWorkflows, ConvertToManaged = convertToManaged, OverwriteUnmanagedCustomizations = overwriteUnmanagedCustomizations, SkipProductUpdateDependencies = skipProductUpdateDependencies, ImportJobId = importJobId.Value, RequestId = importJobId, HoldingSolution = holdingSolution }; if (importAsync) { Logger.LogVerbose(string.Format("Importing solution in Async Mode")); var asyncRequest = new ExecuteAsyncRequest { Request = importSolutionRequest }; var asyncResponse = OrganizationService.Execute(asyncRequest) as ExecuteAsyncResponse; Guid asyncJobId = asyncResponse.AsyncJobId; Logger.LogVerbose("Awaiting for Async Operation Completion"); AsyncUpdateHandler updateHandler = new AsyncUpdateHandler( Logger, PollingOrganizationService, importJobId.Value); AsyncOperationManager operationManager = new AsyncOperationManager(Logger, PollingOrganizationService); AsyncOperation asyncOperation = operationManager.AwaitCompletion( asyncJobId, asyncWaitTimeout, sleepInterval, updateHandler); Logger.LogInformation("Async Operation completed with status: {0}", ((AsyncOperation_StatusCode)asyncOperation.StatusCode.Value).ToString()); Logger.LogInformation("Async Operation completed with message: {0}", asyncOperation.Message); result = VerifySolutionImport(importAsync, importJobId.Value, asyncOperation, null); } else { Logger.LogVerbose("Importing solution in Sync Mode"); SyncImportHandler importHandler = new SyncImportHandler( Logger, OrganizationService, importSolutionRequest); ImportJobHandler jobHandler = new ImportJobHandler( Logger, OrganizationService, importHandler); Logger.LogVerbose("Creating Import Task"); Action importAction = () => importHandler.ImportSolution(); Task importTask = new Task(importAction); Logger.LogVerbose("Starting Import Task"); importTask.Start(); Logger.LogVerbose("Thread Started. Starting to Query Import Status"); ImportJobManager jobManager = new ImportJobManager(Logger, PollingOrganizationService); jobManager.AwaitImportJob(importJobId.Value, asyncWaitTimeout, sleepInterval, true, jobHandler); importTask.Wait(); result = VerifySolutionImport(importAsync, importJobId.Value, null, importHandler.Error); } result.SolutionName = info.UniqueName; result.VersionNumber = info.Version; if (result.ImportJobAvailable && downloadFormattedLog) { ImportJobManager jobManager = new ImportJobManager(Logger, OrganizationService); jobManager.SaveFormattedLog(importJobId.Value, logDirectory, logFileName); } if (result.Success) { Logger.LogInformation("Solution Import Completed Successfully"); } else { Logger.LogInformation("Solution Import Failed"); } return(result); }
private void btn_Home_Hibernate_Click(object sender, RoutedEventArgs e) { Logger.Instance().Log("FrontView+", "Hibernate", true); Process.Start("shutdown.exe", "-h -t 01"); }
public SolutionApplyResult ApplySolution( string solutionName, bool importAsync, int sleepInterval, int asyncWaitTimeout ) { Logger.LogVerbose("Upgrading Solution: {0}", solutionName); if (SkipUpgrade(solutionName)) { return(new SolutionApplyResult() { Success = true, ApplySkipped = true }); } Exception syncApplyException = null; AsyncOperation asyncOperation = null; var upgradeSolutionRequest = new DeleteAndPromoteRequest { UniqueName = solutionName }; if (importAsync) { var asyncRequest = new ExecuteAsyncRequest { Request = upgradeSolutionRequest }; Logger.LogVerbose("Applying using Async Mode"); var asyncResponse = OrganizationService.Execute(asyncRequest) as ExecuteAsyncResponse; Guid asyncJobId = asyncResponse.AsyncJobId; Logger.LogInformation(string.Format("Async JobId: {0}", asyncJobId)); Logger.LogVerbose("Awaiting for Async Operation Completion"); AsyncOperationManager asyncOperationManager = new AsyncOperationManager( Logger, PollingOrganizationService); asyncOperation = asyncOperationManager.AwaitCompletion( asyncJobId, asyncWaitTimeout, sleepInterval, null); Logger.LogInformation("Async Operation completed with status: {0}", ((AsyncOperation_StatusCode)asyncOperation.StatusCode.Value).ToString()); Logger.LogInformation("Async Operation completed with message: {0}", asyncOperation.Message); } else { try { OrganizationService.Execute(upgradeSolutionRequest); } catch (Exception ex) { syncApplyException = ex; } } SolutionApplyResult result = VerifyUpgrade( solutionName, asyncOperation, syncApplyException); if (result.Success) { Logger.LogInformation("Solution Apply Completed Successfully"); } else { Logger.LogInformation("Solution Apply Failed"); } return(result); }
internal void TrainOneEpoch(int ep, IEnumerable<SntPairBatch> trainCorpus, IEnumerable<SntPairBatch> validCorpus, ILearningRate learningRate, AdamOptimizer solver, List<IMetric> metrics, IModelMetaData modelMetaData, Func<IComputeGraph, List<List<string>>, List<List<string>>, int, bool, float> ForwardOnSingleDevice) { int processedLineInTotal = 0; DateTime startDateTime = DateTime.Now; double costInTotal = 0.0; long srcWordCntsInTotal = 0; long tgtWordCntsInTotal = 0; double avgCostPerWordInTotal = 0.0; Logger.WriteLine($"Start to process training corpus."); List<SntPairBatch> sntPairBatchs = new List<SntPairBatch>(); foreach (SntPairBatch sntPairBatch in trainCorpus) { sntPairBatchs.Add(sntPairBatch); if (sntPairBatchs.Count == m_deviceIds.Length) { // Copy weights from weights kept in default device to all other devices CopyWeightsFromDefaultDeviceToAllOtherDevices(); int batchSplitFactor = 1; bool runNetwordSuccssed = false; while (runNetwordSuccssed == false) { try { (float cost, int sWordCnt, int tWordCnt, int processedLine) = RunNetwork(ForwardOnSingleDevice, sntPairBatchs, batchSplitFactor); processedLineInTotal += processedLine; srcWordCntsInTotal += sWordCnt; tgtWordCntsInTotal += tWordCnt; //Sum up gradients in all devices, and kept it in default device for parameters optmization SumGradientsToTensorsInDefaultDevice(); //Optmize parameters float lr = learningRate.GetCurrentLearningRate(); List<IWeightTensor> models = GetParametersFromDefaultDevice(); solver.UpdateWeights(models, processedLine, lr, m_regc, m_weightsUpdateCount + 1); costInTotal += cost; avgCostPerWordInTotal = costInTotal / tgtWordCntsInTotal; m_weightsUpdateCount++; if (IterationDone != null && m_weightsUpdateCount % 100 == 0) { IterationDone(this, new CostEventArg() { LearningRate = lr, CostPerWord = cost / tWordCnt, AvgCostInTotal = avgCostPerWordInTotal, Epoch = ep, Update = m_weightsUpdateCount, ProcessedSentencesInTotal = processedLineInTotal, ProcessedWordsInTotal = srcWordCntsInTotal + tgtWordCntsInTotal, StartDateTime = startDateTime }); } runNetwordSuccssed = true; } catch (AggregateException err) { if (err.InnerExceptions != null) { string oomMessage = String.Empty; bool isOutOfMemException = false; bool isArithmeticException = false; foreach (var excep in err.InnerExceptions) { if (excep is OutOfMemoryException) { isOutOfMemException = true; oomMessage = excep.Message; break; } else if (excep is ArithmeticException) { isArithmeticException = true; oomMessage = excep.Message; break; } } if (isOutOfMemException) { batchSplitFactor *= 2; Logger.WriteLine($"Got an exception ('{oomMessage}'), so we increase batch split factor to {batchSplitFactor}, and retry it."); if (batchSplitFactor >= sntPairBatchs[0].BatchSize) { Logger.WriteLine($"Batch split factor is larger than batch size, so ignore current mini-batch."); break; } } else if (isArithmeticException) { Logger.WriteLine($"Arithmetic exception: '{err.Message}'"); break; } else { Logger.WriteLine(Logger.Level.err, ConsoleColor.Red, $"Exception: {err.Message}, Call stack: {err.StackTrace}"); throw err; } } else { Logger.WriteLine(Logger.Level.err, ConsoleColor.Red, $"Exception: {err.Message}, Call stack: {err.StackTrace}"); throw err; } } catch (OutOfMemoryException err) { batchSplitFactor *= 2; Logger.WriteLine($"Got an exception ('{err.Message}'), so we increase batch split factor to {batchSplitFactor}, and retry it."); if (batchSplitFactor >= sntPairBatchs[0].BatchSize) { Logger.WriteLine($"Batch split factor is larger than batch size, so ignore current mini-batch."); break; } } catch (ArithmeticException err) { Logger.WriteLine($"Arithmetic exception: '{err.Message}'"); break; } catch (Exception err) { Logger.WriteLine(Logger.Level.err, ConsoleColor.Red, $"Exception: {err.Message}, Call stack: {err.StackTrace}"); throw err; } } // Evaluate model every hour and save it if we could get a better one. TimeSpan ts = DateTime.Now - m_lastCheckPointDateTime; if (ts.TotalHours > 1.0) { CreateCheckPoint(validCorpus, metrics, modelMetaData, ForwardOnSingleDevice, avgCostPerWordInTotal); m_lastCheckPointDateTime = DateTime.Now; } sntPairBatchs.Clear(); } } Logger.WriteLine(Logger.Level.info, ConsoleColor.Green, $"Epoch '{ep}' took '{DateTime.Now - startDateTime}' time to finish. AvgCost = {avgCostPerWordInTotal.ToString("F6")}, AvgCostInLastEpoch = {m_avgCostPerWordInTotalInLastEpoch.ToString("F6")}"); // CreateCheckPoint(validCorpus, metrics, modelMetaData, ForwardOnSingleDevice, avgCostPerWordInTotal); m_avgCostPerWordInTotalInLastEpoch = avgCostPerWordInTotal; }
public ActionResult ThanhToan() { HOCVIEN hocvien = (HOCVIEN)Session[SesHocVien]; List<KHOAHOC> giohang = (List<KHOAHOC>)Session["GioHang"]; DONDATHANG donhang = new DONDATHANG(); if (hocvien == null) { return RedirectToAction(actionName: "DangNhap"); } if(giohang != null) { donhang.MaHV = hocvien.MaHV; donhang.Ngaydat = DateTime.Now; donhang.Dathanhtoan = false; foreach(KHOAHOC khoahoc in giohang) { donhang.CHITIETDONTHANGs.Add(new CHITIETDONTHANG() { MaKH = khoahoc.MaKH }); } db.DONDATHANGs.InsertOnSubmit(donhang); db.SubmitChanges(); } else { return View(); } APIContext apiContext = Configuration.GetAPIContext(); try { string payerId = Request.Params["PayerID"]; if (string.IsNullOrEmpty(payerId)) { string baseURI = Request.Url.Scheme + "://" + Request.Url.Authority + "/HocVien/ThanhToan?"; var guid = Convert.ToString((new Random()).Next(100000)); var createdPayment = CreatePayment(apiContext, baseURI + "guid=" + guid); var links = createdPayment.links.GetEnumerator(); string paypalRedirectUrl = null; while (links.MoveNext()) { Links lnk = links.Current; if (lnk.rel.ToLower().Trim().Equals("approval_url")) { paypalRedirectUrl = lnk.href; } } Session.Add(guid, createdPayment.id); return Redirect(paypalRedirectUrl); } else { var guid = Request.Params["guid"]; var executedPayment = ExecutePayment(apiContext, payerId, Session[guid] as string); if (executedPayment.state.ToLower() != "approved") { return View("Faile"); } } } catch (Exception ex) { Logger.Log("Error " + ex.Message); return View("Faile"); } donhang.Dathanhtoan = true; db.SubmitChanges(); return View("Success"); }
private async void checkForUpdateAsync(bool useDeltaPatching = true, UpdateProgressNotification notification = null) { //should we schedule a retry on completion of this check? bool scheduleRetry = true; try { if (updateManager == null) updateManager = await UpdateManager.GitHubUpdateManager(@"https://github.com/ppy/osu", @"osulazer", null, null, true); var info = await updateManager.CheckForUpdate(!useDeltaPatching); if (info.ReleasesToApply.Count == 0) //no updates available. bail and retry later. return; if (notification == null) { notification = new UpdateProgressNotification(this) { State = ProgressNotificationState.Active }; Schedule(() => notificationOverlay.Post(notification)); } notification.Progress = 0; notification.Text = @"Downloading update..."; try { await updateManager.DownloadReleases(info.ReleasesToApply, p => notification.Progress = p / 100f); notification.Progress = 0; notification.Text = @"Installing update..."; await updateManager.ApplyReleases(info, p => notification.Progress = p / 100f); notification.State = ProgressNotificationState.Completed; } catch (Exception e) { if (useDeltaPatching) { Logger.Error(e, @"delta patching failed!"); //could fail if deltas are unavailable for full update path (https://github.com/Squirrel/Squirrel.Windows/issues/959) //try again without deltas. checkForUpdateAsync(false, notification); scheduleRetry = false; } else { Logger.Error(e, @"update failed!"); } } } catch (Exception) { // we'll ignore this and retry later. can be triggered by no internet connection or thread abortion. } finally { if (scheduleRetry) { if (notification != null) notification.State = ProgressNotificationState.Cancelled; //check again in 30 minutes. Scheduler.AddDelayed(() => checkForUpdateAsync(), 60000 * 30); } } }
public async virtual Task SignInAsync(ClaimsPrincipal user, AuthenticationProperties properties) { if (user == null) { throw new ArgumentNullException(nameof(user)); } properties = properties ?? new AuthenticationProperties(); _signInCalled = true; // Process the request cookie to initialize members like _sessionKey. await EnsureCookieTicket(); var cookieOptions = BuildCookieOptions(); var signInContext = new CookieSigningInContext( Context, Scheme, Options, user, properties, cookieOptions); DateTimeOffset issuedUtc; if (signInContext.Properties.IssuedUtc.HasValue) { issuedUtc = signInContext.Properties.IssuedUtc.Value; } else { issuedUtc = Clock.UtcNow; signInContext.Properties.IssuedUtc = issuedUtc; } if (!signInContext.Properties.ExpiresUtc.HasValue) { signInContext.Properties.ExpiresUtc = issuedUtc.Add(Options.ExpireTimeSpan); } await Events.SigningIn(signInContext); if (signInContext.Properties.IsPersistent) { var expiresUtc = signInContext.Properties.ExpiresUtc ?? issuedUtc.Add(Options.ExpireTimeSpan); signInContext.CookieOptions.Expires = expiresUtc.ToUniversalTime(); } var ticket = new AuthenticationTicket(signInContext.Principal, signInContext.Properties, signInContext.Scheme.Name); if (Options.SessionStore != null) { if (_sessionKey != null) { await Options.SessionStore.RemoveAsync(_sessionKey); } _sessionKey = await Options.SessionStore.StoreAsync(ticket); var principal = new ClaimsPrincipal( new ClaimsIdentity( new[] { new Claim(SessionIdClaim, _sessionKey, ClaimValueTypes.String, Options.ClaimsIssuer) }, Options.ClaimsIssuer)); ticket = new AuthenticationTicket(principal, null, Scheme.Name); } var cookieValue = Options.TicketDataFormat.Protect(ticket, GetTlsTokenBinding()); Options.CookieManager.AppendResponseCookie( Context, Options.Cookie.Name, cookieValue, signInContext.CookieOptions); var signedInContext = new CookieSignedInContext( Context, Scheme, signInContext.Principal, signInContext.Properties, Options); await Events.SignedIn(signedInContext); // Only redirect on the login path var shouldRedirect = Options.LoginPath.HasValue && OriginalPath == Options.LoginPath; await ApplyHeaders(shouldRedirect, signedInContext.Properties); Logger.SignedIn(Scheme.Name); }
private static void OnClientConnected(IWebSocketConnection socket) { Logger.AddInformation("WebSocket: Client Connectedon port:" + socket.ConnectionInfo.ClientPort); XULRunner = socket; }
public OperationsLauncher(Main notify) { logger = LogManager.GetCurrentClassLogger(); this.notify = notify; }
private static void OnClientDisconnected() { Logger.AddInformation("WebSocket: Client disconnected"); XULRunner = null; }
private (float, int, int, int) RunNetwork(Func<IComputeGraph, List<List<string>>, List<List<string>>, int, bool, float> ForwardOnSingleDevice, List<SntPairBatch> sntPairBatchs, int batchSplitFactor) { float cost = 0.0f; int processedLine = 0; int srcWordCnts = 0; int tgtWordCnts = 0; //Clear gradient over all devices ZeroGradientOnAllDevices(); // Run forward and backward on all available processors Parallel.For(0, m_deviceIds.Length, i => { try { SntPairBatch sntPairBatch_i = sntPairBatchs[i]; int batchSegSize = sntPairBatch_i.BatchSize / batchSplitFactor; for (int k = 0; k < batchSplitFactor; k++) { // Construct sentences for encoding and decoding List<List<string>> srcTkns = new List<List<string>>(); List<List<string>> tgtTkns = new List<List<string>>(); int sLenInBatch = 0; int tLenInBatch = 0; for (int j = k * batchSegSize; j < (k + 1) * batchSegSize; j++) { srcTkns.Add(sntPairBatch_i.SntPairs[j].SrcSnt.ToList()); sLenInBatch += sntPairBatch_i.SntPairs[j].SrcSnt.Length; tgtTkns.Add(sntPairBatch_i.SntPairs[j].TgtSnt.ToList()); tLenInBatch += sntPairBatch_i.SntPairs[j].TgtSnt.Length; } float lcost = 0.0f; // Create a new computing graph instance using (IComputeGraph computeGraph_i = CreateComputGraph(i)) { // Run forward part lcost = ForwardOnSingleDevice(computeGraph_i, srcTkns, tgtTkns, i, true); // Run backward part and compute gradients computeGraph_i.Backward(); } lock (locker) { cost += lcost; srcWordCnts += sLenInBatch; tgtWordCnts += tLenInBatch; processedLine += batchSegSize; } } } catch (OutOfMemoryException err) { throw err; } catch (Exception err) { Logger.WriteLine(Logger.Level.err, ConsoleColor.Red, $"Exception: '{err.Message}'"); Logger.WriteLine(Logger.Level.err, ConsoleColor.Red, $"Call stack: '{err.StackTrace}'"); throw err; } }); return (cost, srcWordCnts, tgtWordCnts, processedLine); }