public BrainForm() { ms_singleton = this; InitializeComponent(); this.TopMost = false; DEFAULT_OPACITY = this.Opacity; try { m_database = new FichesDB(); m_database.FicheSuccessOccurred += database_FicheSuccessOccurred; m_database.FicheWarningOccurred += database_FicheWarningOccurred; m_database.FicheErrorOccurred += database_FicheErrorOccurred; m_database.Log += database_Log; // Setup the fiches' default dimensions to the primary monitor's dimensions Rectangle primaryScreenRect = Screen.PrimaryScreen.Bounds; Fiche.ChunkWebPageSnapshot.ms_defaultWebPageWidth = (uint)primaryScreenRect.Width; Fiche.ChunkWebPageSnapshot.ms_defaultWebPageHeight = (uint)primaryScreenRect.Height; Fiche.ChunkWebPageSnapshot.ms_maxWebPagePieces = (uint)Math.Ceiling(20000.0 / primaryScreenRect.Height); Rectangle desktopBounds = Interop.GetDesktopBounds(); // Create the modeless forms m_logForm = new LogForm(); m_logForm.Location = new Point(desktopBounds.Right - m_logForm.Width, desktopBounds.Bottom - m_logForm.Height); // Spawn in bottom-right corner of the desktop to avoid being annoying... m_preferenceForm = new PreferencesForm(this); m_preferenceForm.RootDBFolderChanged += preferenceForm_RootDBFolderChanged; m_preferenceForm.Visible = false; m_ficheWebPageEditorForm = new FicheWebPageEditorForm(this); m_ficheWebPageEditorForm.Visible = false; m_ficheWebPageEditorForm.VisibleChanged += ficheWebPageEditorForm_VisibleChanged; m_ficheWebPageAnnotatorForm = new FicheWebPageAnnotatorForm(this); m_ficheWebPageAnnotatorForm.Visible = false; m_ficheWebPageAnnotatorForm.VisibleChanged += ficheWebPageAnnotatorForm_VisibleChanged; m_fastTaggerForm = new FastTaggerForm(this); m_fastTaggerForm.Visible = false; m_notificationForm = new NotificationForm(this); m_notificationForm.Visible = false; // Parse fiches and load database DirectoryInfo rootDBFolder = new DirectoryInfo(m_preferenceForm.RootDBFolder); if (!rootDBFolder.Exists) { rootDBFolder.Create(); rootDBFolder.Refresh(); int waitCount = 0; while (!rootDBFolder.Exists) { System.Threading.Thread.Sleep(100); if (waitCount++ > 10) // Wait for a full second { throw new Exception("Failed to create root DB folder \"" + rootDBFolder + "\"! Time elapsed..."); } } } m_database.LoadFichesDescriptions(rootDBFolder); //SelectedFiche = URLHandler.CreateURLFiche( m_database, null, WebHelpers.CreateCanonicalURL( "https://twitter.com/HMaler/status/1217484876372480008" ) ); // OK! //SelectedFiche = URLHandler.CreateURLFiche( m_database, null, WebHelpers.CreateCanonicalURL( "https://twitter.com/SylvieGaillard/status/1211726353726394379" ) ); // OK! //SelectedFiche = URLHandler.CreateURLFiche( m_database, null, WebHelpers.CreateCanonicalURL( "https://twitter.com/MFrippon/status/1134377488233226245" ) ); // OK! //SelectedFiche = URLHandler.CreateURLFiche( m_database, null, WebHelpers.CreateCanonicalURL( "https://stackoverflow.com/questions/4964205/non-transparent-click-through-form-in-net" ) ); // OK! //SelectedFiche = URLHandler.CreateURLFiche( m_database, null, WebHelpers.CreateCanonicalURL( "http://www.patapom.com/" ) ); // OK! //SelectedFiche = URLHandler.CreateURLFiche( m_database, null, WebHelpers.CreateCanonicalURL( "https://www.monde-diplomatique.fr/2020/03/HOLLAR/61546" ) ); // OK! //SelectedFiche = URLHandler.CreateURLFiche( m_database, null, WebHelpers.CreateCanonicalURL( "https://docs.google.com/document/d/1_iJeEDcoDJS8EUyaprAL4Eu67Tbox_DnYnzQPFiTsa0/edit#heading=h.bktvm5f5g3wf" ) ); // OK! //SelectedFiche = URLHandler.CreateURLFiche( m_database, null, WebHelpers.CreateCanonicalURL( "https://www.breakin.se/mc-intro/" ) ); // OK! //SelectedFiche = URLHandler.CreateURLFiche( m_database, null, WebHelpers.CreateCanonicalURL( "https://www.frontiersin.org/articles/10.3389/fpsyg.2017.02124/full" ) ); // OK! // Ici on a un crash de temps en temps quand la fiche est sauvée et que les images sont disposed alors que l'éditeur tente de les lire pour en faire des bitmaps! C'est très très chiant à repro! //SelectedFiche = URLHandler.CreateURLFiche( m_database, null, WebHelpers.CreateCanonicalURL( "https://en.wikipedia.org/wiki/Quantum_mind" ) ); // Crash bitmap copy // Content rectangles still off // Sur le premier lien c'est un bandeau dynamique qui doit foutre la merde // Sur mediapart aussi apparemment (ça se voit uniquement en mode "incognito" sinon je suis loggé avec mon compte et l'article est complet) //SelectedFiche = URLHandler.CreateURLFiche( m_database, null, WebHelpers.CreateCanonicalURL( "http://variances.eu/?p=3221" ) ); SelectedFiche = URLHandler.CreateURLFiche(m_database, null, WebHelpers.CreateCanonicalURL("https://www.mediapart.fr/journal/economie/040320/la-banque-publique-d-investissement-va-eponger-les-pertes-du-cac-40")); /* * Essayer ça: * https://www.republicain-lorrain.fr/edition-thionville-hayange/2020/03/05/ces-frontaliers-qui-laissent-tomber-le-train?preview=true&fbclid=IwAR3jgJaj0wjepYYTEHNiXbJzQ4B9giZ4htO6gge4q7BwXUGQrvSpql8Sh9M * * Exemple d'article "parfait" avec un court résumé au début et des liens "bio" sur les gens et les organisations: * (pourrait-on tenter de créer ce genre de digests automatiquement?) * https://www.ftm.nl/dutch-multinationals-funded-climate-sceptic * * Tester blog jean Chon "questions à charge" où la page se charge au fur * * * Tester ça avec les trucs à droite qui s'updatent bizarrement + cleanup d'URL * https://www.huffingtonpost.fr/entry/agnes-buzyn-livre-des-confessions-accablantes-sur-le-coronavirus_fr_5e70b8cec5b6eab7793c6642?ncid=other_twitter_cooo9wqtham&utm_campaign=share_twitter */ m_logForm.Show(); } catch (FichesDB.DatabaseLoadException _e) { // Log errors... foreach (Exception e in _e.m_errors) { LogError(e); } } catch (Exception _e) { MessageBox("Error when creating forms!\r\n\n", _e); // Close(); // Application.Exit(); } try { // Attempt to retrieve containing monitor IntPtr hMonitor; Screen screen; Interop.GetMonitorFromPosition(Control.MousePosition, out screen, out hMonitor); // Rescale window to fullscreen overlay this.SetDesktopBounds(screen.Bounds.X, screen.Bounds.Y, screen.Bounds.Width, screen.Bounds.Height); // Create fullscreen windowed device m_device.Init(this.Handle, false, true); m_CB_main = new ConstantBuffer <CB_Main>(m_device, 0); m_CB_camera = new ConstantBuffer <CB_Camera>(m_device, 1); m_CB_camera.m._camera2World = new float4x4(); m_CB_camera.m._camera2Proj = new float4x4(); // Create primitives and shaders m_shader_displayCube = new Shader(m_device, new System.IO.FileInfo("./Shaders/DisplayCube.hlsl"), VERTEX_FORMAT.P3N3, "VS", null, "PS", null); BuildCube(); m_startTime = DateTime.Now; Application.Idle += Application_Idle; // Register global shortcuts m_preferenceForm.RegisterHotKeys(); Focus(); } catch (Exception _e) { MessageBox("Error when creating D3D device!\r\n\n", _e); Close(); // Application.Exit(); } }
public Bookmark(FichesDB _database, Bookmark _parent, WebServices.JSON.JSONObject _JSON, List <Bookmark> _bookmarks) { m_database = _database; m_parent = _parent; if (_JSON == null || !_JSON.IsDictionary) { throw new Exception("Invalid JSON object type!"); } Dictionary <string, object> dictionary = _JSON.AsDictionary; foreach (string key in dictionary.Keys) { switch (key) { case "name": m_name = dictionary[key] as string; break; case "date_added": // From https://stackoverflow.com/questions/19074423/how-to-parse-the-date-added-field-in-chrome-bookmarks-file string strTicks = dictionary[key] as string; long microseconds; if (long.TryParse(strTicks, out microseconds)) { long milliseconds = microseconds / 1000; long seconds = milliseconds / 1000; long minutes = seconds / 60; long hours = minutes / 60; long days = hours / 24; TimeSpan delay = new TimeSpan((int)days, (int)(hours % 24), (int)(minutes % 60), (int)(seconds % 60), (int)(milliseconds % 1000)); m_dateAdded = new DateTime(1601, 1, 1) + delay; } break; case "guid": string strGUID = dictionary[key] as string; Guid.TryParse(strGUID, out m_GUID); break; case "url": string strURL = dictionary[key] as string; m_URL = WebHelpers.CreateCanonicalURL(strURL); break; case "children": RecurseImportBookmarks(this, dictionary[key] as WebServices.JSON.JSONObject, _bookmarks); break; case "type": string strType = dictionary[key] as string; switch (strType) { case "url": m_type = TYPE.URL; break; case "folder": m_type = TYPE.FOLDER; break; } break; default: // Try import children... RecurseImportBookmarks(this, dictionary[key] as WebServices.JSON.JSONObject, _bookmarks); break; } } }
/// <summary> /// Reads the fiche's description and HTML content /// </summary> /// <param name="_reader"></param> /// <remarks>Heavy chunks are NOT read and will only be accessible asynchronously</remarks> public void Read(BinaryReader _reader) { uint signature = _reader.ReadUInt32(); if (signature != SIGNATURE) { throw new Exception("Unexpected signature!"); } uint versionMajor, versionMinor; versionMajor = (uint)_reader.ReadUInt16(); versionMinor = (uint)_reader.ReadUInt16(); uint version = (versionMajor << 16) | versionMinor; // Read hierarchy string strGUID = _reader.ReadString(); if (!Guid.TryParse(strGUID, out m_GUID)) { throw new Exception("Failed to parse fiche GUID!"); } string strCreationTime = _reader.ReadString(); if (!DateTime.TryParse(strCreationTime, out m_creationTime)) { throw new Exception("Failed to parse fiche creation time!"); } // We only read the GUIDs while the actual fiches will be processed later uint parentsCount = _reader.ReadUInt32(); while (m_tags.Count > 0) { RemoveTag(m_tags[0]); } m_tagGUIDs = new Guid[parentsCount]; for (int parentIndex = 0; parentIndex < parentsCount; parentIndex++) { strGUID = _reader.ReadString(); if (!Guid.TryParse(strGUID, out m_tagGUIDs[parentIndex])) { throw new Exception("Failed to parse fiche's parent GUID!"); } } // Read content string strType = _reader.ReadString(); if (!Enum.TryParse(strType, out m_type)) { throw new Exception("Failed to parse fiche's type!"); } m_title = _reader.ReadString(); if (_reader.ReadBoolean()) { string strURL = _reader.ReadString(); m_URL = WebHelpers.CreateCanonicalURL(strURL); } if (_reader.ReadBoolean()) { m_HTMLContent = _reader.ReadString(); } if (_reader.ReadBoolean()) { m_rootElement = new Brain2.DOMElement(_reader); } // Read chunks while (m_chunks.Count > 0) { m_chunks[0].Dispose(); m_chunks.RemoveAt(0); } uint chunksCount = _reader.ReadUInt32(); for (uint chunkIndex = 0; chunkIndex < chunksCount; chunkIndex++) { string chunkType = _reader.ReadString(); uint chunkLength = _reader.ReadUInt32(); ulong chunkStartOffset = (ulong)_reader.BaseStream.Position; ChunkBase chunk = CreateChunkFromType(chunkType, chunkStartOffset, chunkLength); if (chunk != null) { chunk.Read(_reader); // Only shallow data will be available, heavy data will be loaded asynchonously on demand } // Always jump to chunk's end, whether it read something or not... ulong chunkEndOffset = chunkStartOffset + chunkLength; _reader.BaseStream.Seek((long)chunkEndOffset, SeekOrigin.Begin); } // Fiche is now ready! m_status = STATUS.READY; }