// UIContext can be passed to BusyViewModel, when it needs to update progress-controls and it is created on a worker-thread. internal BusyViewModel(string Message, string SubMessage = null, ulong?MaxProgressValue = null, SynchronizationContext UIContext = null, bool ShowAnimation = true, bool ShowRebootHelp = false) { LogFile.Log(Message); this.UIContext = UIContext ?? SynchronizationContext.Current; this.Message = Message; this.SubMessage = SubMessage; this.ShowAnimation = ShowAnimation; this.ShowRebootHelp = ShowRebootHelp; if (MaxProgressValue != null) { ProgressPercentage = 0; this.MaxProgressValue = (ulong)MaxProgressValue; ProgressUpdater = new ProgressUpdater((ulong)MaxProgressValue, (p, t) => { if ((this.UIContext == null) || (this.UIContext == SynchronizationContext.Current)) { ProgressPercentage = p; TimeRemaining = t; } else { this.UIContext.Post((s) => { ProgressPercentage = p; TimeRemaining = t; }, null); } }); } }
void Init() { //数据库配置信息 if (null != DBConnectionRoot.InitDBConnection) { this.txtInitDbServer.Text = DBConnectionRoot.InitDBConnection.DataServer; this.txtInitDbName.Text = DBConnectionRoot.InitDBConnection.DataBase; this.txtInitDbAccount.Text = DBConnectionRoot.InitDBConnection.LoginAccount; this.txtInitDbPwd.Text = DBConnectionRoot.InitDBConnection.Password; } if (null != DBConnectionRoot.DownloadSourceDBConnection) { this.txtDownDbServer.Text = DBConnectionRoot.DownloadSourceDBConnection.DataServer; this.txtDownDbName.Text = DBConnectionRoot.DownloadSourceDBConnection.DataBase; this.txtDownDbAccount.Text = DBConnectionRoot.DownloadSourceDBConnection.LoginAccount; this.txtDownDbPwd.Text = DBConnectionRoot.DownloadSourceDBConnection.Password; } //初始化类型项下拉控件数据绑定 combInitType var list = EnumExtensions.GetNameDescription(typeof(DataInitType)).ToList(); list.Insert(0, new KeyValuePair <string, string>("0", "请选择")); this.combInitType.ValueMember = "key"; this.combInitType.DisplayMember = "value"; this.combInitType.DataSource = list; writerDelegate = WriteMessage; MsgWriter.InitInstance(this, writerDelegate); progressDelegate = UpdateProgress; ProgressUpdater.InitInstance(this, progressDelegate); }
public OneFileTransferViewModel(FileTransfersViewModel fileTransfersViewModel, OneFileTransferModel oneFileTransferModel) { _fileTransfersViewModel = fileTransfersViewModel; _oneFileTransferModel = oneFileTransferModel; _oneFileTransferModel.PropertyChanged += ModelPropertyChangedHandler; _progressUpdater = new ProgressUpdater(this); }
public void Flash(UInt32 StartInBytes, byte[] Data, Action <int, TimeSpan?> ProgressUpdateCallback, ProgressUpdater UpdaterPerSector, UInt32 OffsetInBytes = 0, UInt32 LengthInBytes = UInt32.MaxValue) { long RemainingBytes; if (OffsetInBytes > (Data.Length - 1)) { throw new ArgumentException("Wrong offset"); } RemainingBytes = (LengthInBytes == UInt32.MaxValue) || (LengthInBytes > (Data.Length - OffsetInBytes)) ? Data.Length - OffsetInBytes : LengthInBytes; UInt32 CurrentLength; UInt32 CurrentOffset = OffsetInBytes; byte[] Buffer = new byte[0x405]; byte[] ResponsePattern = new byte[5]; byte[] FinalCommand; Buffer[0] = 0x07; ResponsePattern[0] = 0x08; UInt32 CurrentPosition = StartInBytes; ProgressUpdater Progress = UpdaterPerSector; if ((Progress == null) && (ProgressUpdateCallback != null)) { Progress = new ProgressUpdater(GetSectorCount((UInt64)RemainingBytes), ProgressUpdateCallback); } while (RemainingBytes > 0) { System.Buffer.BlockCopy(BitConverter.GetBytes(CurrentPosition), 0, Buffer, 1, 4); // Start position is in bytes and in Little Endian (on Samsung phones the start position is in Sectors!!) System.Buffer.BlockCopy(BitConverter.GetBytes(CurrentPosition), 0, ResponsePattern, 1, 4); // Start position is in bytes and in Little Endian (on Samsung phones the start position is in Sectors!!) CurrentLength = RemainingBytes >= 0x400 ? 0x400 : (UInt32)RemainingBytes; System.Buffer.BlockCopy(Data, (int)CurrentOffset, Buffer, 5, (int)CurrentLength); if (CurrentLength < 0x400) { FinalCommand = new byte[CurrentLength + 5]; System.Buffer.BlockCopy(Buffer, 0, FinalCommand, 0, (int)CurrentLength + 5); } else { FinalCommand = Buffer; } Serial.SendCommand(FinalCommand, ResponsePattern); CurrentPosition += CurrentLength; CurrentOffset += CurrentLength; RemainingBytes -= CurrentLength; Progress?.IncreaseProgress(GetSectorCount(CurrentLength)); } }
private void bw_DoWork(object sender, DoWorkEventArgs e) { ProjectInfo projInfo = null; try { _ProgressUpdater = new ProgressUpdater(sender as BackgroundWorker, e); projInfo = new ProjectInfo(epubPath, projPath); Epub epubFile = projInfo.EpubFile; Console.WriteLine("Total Content Pages: " + epubFile.Content.Count); _ProgressUpdater.Initialize(epubFile.Content.Count); foreach (ContentData cData in epubFile.Content.Values) { int id = projInfo.Contents.Count; Content content = new Content(id, cData, projInfo); projInfo.AddContent(content); _ProgressUpdater.Increment(); } int totalSentences = projInfo.TotalSentences; Console.WriteLine("Total Sentences: " + totalSentences); _ProgressUpdater.Initialize(totalSentences); foreach (Content content in projInfo.Contents) { foreach (Block block in content.Blocks) { foreach (Sentence sentence in block.Sentences) { sentence.Synthesize(); _ProgressUpdater.Increment(); } } } projInfo.Save(); _ProgressUpdater.Result = new Tuple <String, ProjectInfo, int>(epubPath, projInfo, totalSentences); } catch (Exception ex) { throw ex; } finally { if (projInfo != null) { projInfo.Dispose(); } } }
/// <summary> /// Copies given dataset into dataset determined by <paramref name="dstUri"/>. /// </summary> /// <param name="src">Original dataset to clone.</param> /// <param name="dstUri">URI of the destination dataset.</param> /// <param name="updater">Delegate accepting update progressm notifications.</param> /// <returns>New instance of <see cref="DataSet"/> class.</returns> /// <remarks> /// This method splits the original dataser into parts and therefore is able /// to clone very large datasets not fitting to memory. /// </remarks> public static DataSet Clone(DataSet src, DataSetUri dstUri, ProgressUpdater updater) { DataSet dst = null; try { dst = DataSet.Open(dstUri); return Clone(src, dst, updater); } catch { if (dst != null) dst.Dispose(); throw; } }
private void WriteSectors(UInt64 StartSector, string Path, Action <int, TimeSpan?> ProgressUpdateCallback, ProgressUpdater UpdaterPerSector) { bool VolumeWasOpen = IsVolumeOpen(); if (!VolumeWasOpen) { OpenVolume(true); } SetSectorPosition(StartSector); byte[] Buffer; using (BinaryReader Reader = new BinaryReader(File.Open(Path, FileMode.Open))) { ProgressUpdater Progress = UpdaterPerSector; if ((Progress == null) && (ProgressUpdateCallback != null)) { Progress = new ProgressUpdater((UInt64)(Reader.BaseStream.Length / 0x200), ProgressUpdateCallback); } if (Reader.BaseStream.Length >= 0x10000) { Buffer = new byte[0x10000]; } else { Buffer = new byte[Reader.BaseStream.Length]; } int Count; for (UInt64 i = 0; i < (UInt64)(Reader.BaseStream.Length / 0x200); i += 0x80) { Count = Reader.Read(Buffer, 0, Buffer.Length); WriteSectors(Buffer, (uint)Count); if (Progress != null) { Progress.IncreaseProgress((ulong)Count / 0x200); } } } if (!VolumeWasOpen) { CloseVolume(); } }
public void Flash(UInt32 StartInBytes, Stream Data, Action <int, TimeSpan?> ProgressUpdateCallback, ProgressUpdater UpdaterPerSector, UInt32 LengthInBytes = UInt32.MaxValue) { long Remaining = (LengthInBytes == UInt32.MaxValue) || (LengthInBytes > (Data.Length - Data.Position)) ? Data.Length - Data.Position : LengthInBytes; UInt32 CurrentLength; byte[] Buffer = new byte[0x405]; byte[] ResponsePattern = new byte[5]; byte[] FinalCommand; Buffer[0] = 0x07; ResponsePattern[0] = 0x08; UInt32 CurrentPosition = StartInBytes; ProgressUpdater Progress = UpdaterPerSector; if ((Progress == null) && (ProgressUpdateCallback != null)) { Progress = new ProgressUpdater(GetSectorCount((UInt64)Remaining), ProgressUpdateCallback); } while (Remaining > 0) { System.Buffer.BlockCopy(BitConverter.GetBytes(CurrentPosition), 0, Buffer, 1, 4); // Start is in bytes and in Little Endian (on Samsung devices start is in sectors!) System.Buffer.BlockCopy(BitConverter.GetBytes(CurrentPosition), 0, ResponsePattern, 1, 4); // Start is in bytes and in Little Endian (on Samsung devices start is in sectors!) CurrentLength = Remaining >= 0x400 ? 0x400 : (UInt32)Remaining; CurrentLength = (uint)Data.Read(Buffer, 5, (int)CurrentLength); if (CurrentLength < 0x400) { FinalCommand = new byte[CurrentLength + 5]; System.Buffer.BlockCopy(Buffer, 0, FinalCommand, 0, (int)CurrentLength + 5); } else { FinalCommand = Buffer; } Serial.SendCommand(FinalCommand, ResponsePattern); CurrentPosition += CurrentLength; Remaining -= CurrentLength; Progress?.IncreaseProgress(GetSectorCount(CurrentLength)); } }
/// <summary> /// Copies given dataset into dataset determined by <paramref name="dstUri"/>. /// </summary> /// <param name="src">Original dataset to clone.</param> /// <param name="dstUri">URI of the destination dataset.</param> /// <param name="updater">Delegate accepting update progressm notifications.</param> /// <returns>New instance of <see cref="DataSet"/> class.</returns> /// <remarks> /// This method splits the original dataser into parts and therefore is able /// to clone very large datasets not fitting to memory. /// </remarks> public static DataSet Clone(DataSet src, DataSetUri dstUri, ProgressUpdater updater) { DataSet dst = null; try { dst = DataSet.Open(dstUri); return(Clone(src, dst, updater)); } catch { if (dst != null) { dst.Dispose(); } throw; } }
private void DumpSectors(UInt64 StartSector, UInt64 SectorCount, string Path, Stream OutputStream, Action <int, TimeSpan?> ProgressUpdateCallback, ProgressUpdater UpdaterPerSector) { bool VolumeWasOpen = IsVolumeOpen(); if (!VolumeWasOpen) { OpenVolume(false); } SetSectorPosition(StartSector); ProgressUpdater Progress = UpdaterPerSector; if ((Progress == null) && (ProgressUpdateCallback != null)) { Progress = new ProgressUpdater(SectorCount, ProgressUpdateCallback); } byte[] Buffer = SectorCount >= 0x80 ? (new byte[0x10000]) : (new byte[SectorCount * 0x200]); Stream Stream = Path == null ? OutputStream : File.Open(Path, FileMode.Create); using (BinaryWriter Writer = new(Stream)) { for (UInt64 i = 0; i < SectorCount; i += 0x80) { // TODO: Reading sectors and writing to compressed stream should be on different threads. // Backup of 3 partitions without compression takes about 40 minutes. // Backup of same partitions with compression takes about 70 minutes. // Separation reading and compression could potentially speed up a lot. // BinaryWriter doesnt support async. // Calling async directly on the EntryStream of a Ziparchive blocks. ReadSectors(Buffer, out uint ActualSectorsRead, (SectorCount - i) >= 0x80 ? 0x80 : (uint)(SectorCount - i)); Writer.Write(Buffer, 0, (int)ActualSectorsRead * 0x200); Progress?.IncreaseProgress(ActualSectorsRead); } Stream.Flush(); } if (!VolumeWasOpen) { CloseVolume(); } }
private async void MenuItem_Dialogs_Progress_Click(object sender, RoutedEventArgs e) { const int msTime = 3000; _ProgressDialog.Show($"Showing for {msTime} milliseconds", null, 0); ProgressUpdater progressUpdater = _ProgressDialog.Updater; await Task.Run(async delegate { int progressSteps = 100; int msStep = (int)Math.Round(msTime / (double)progressSteps); progressUpdater.StartNewLoop(progressSteps); for (int i = 0; i < progressSteps; ++i) { progressUpdater.SetForLoop(i, true); await Task.Delay(msStep); } }); _ProgressDialog.Hide(); }
internal void DoDumpRom(string FFUPath, string EFIESPPath, bool CompressEFIESP, string MainOSPath, bool CompressMainOS, string DataPath, bool CompressData) { new Thread(() => { bool Result = true; ActivateSubContext(new BusyViewModel("Initializing ROM dump...")); ulong TotalSizeSectors = 0; int PartitionCount = 0; Partition Partition; FFU FFU = null; try { FFU = new FFU(FFUPath); if (EFIESPPath != null) { Partition = FFU.GPT.Partitions.First(p => p.Name == "EFIESP"); TotalSizeSectors += Partition.SizeInSectors; PartitionCount++; } if (MainOSPath != null) { Partition = FFU.GPT.Partitions.First(p => p.Name == "MainOS"); TotalSizeSectors += Partition.SizeInSectors; PartitionCount++; } if (DataPath != null) { Partition = FFU.GPT.Partitions.First(p => p.Name == "Data"); TotalSizeSectors += Partition.SizeInSectors; PartitionCount++; } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } // We are on a worker thread! // So we must pass the SynchronizationContext of the UI thread BusyViewModel Busy = new("Dumping ROM...", MaxProgressValue: TotalSizeSectors, UIContext: UIContext); ProgressUpdater Updater = Busy.ProgressUpdater; ActivateSubContext(Busy); int i = 0; if (Result) { try { if (EFIESPPath != null) { i++; Busy.Message = "Dumping partition EFIESP (" + i.ToString() + "/" + PartitionCount.ToString() + ")"; FFU.WritePartition("EFIESP", EFIESPPath, Updater, CompressEFIESP); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } } if (Result) { try { if (MainOSPath != null) { i++; Busy.Message = "Dumping partition MainOS (" + i.ToString() + "/" + PartitionCount.ToString() + ")"; FFU.WritePartition("MainOS", MainOSPath, Updater, CompressMainOS); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } } if (Result) { try { if (DataPath != null) { i++; Busy.Message = "Dumping partition Data (" + i.ToString() + "/" + PartitionCount.ToString() + ")"; FFU.WritePartition("Data", DataPath, Updater, CompressData); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } } if (!Result) { ActivateSubContext(new MessageViewModel("Failed to dump ROM partitions!", Restart)); return; } ActivateSubContext(new MessageViewModel("Successfully dumped ROM partitions!", Restart)); }).Start(); }
internal void FlashPartitionsTask(string EFIESPPath, string MainOSPath, string DataPath) { new Thread(() => { bool Result = true; ActivateSubContext(new BusyViewModel("Initializing flash...")); NokiaFlashModel Phone = (NokiaFlashModel)PhoneNotifier.CurrentModel; GPT GPT = Phone.ReadGPT(); ulong TotalSizeSectors = 0; int PartitionCount = 0; ulong MainOSOldSectorCount = 0; ulong MainOSNewSectorCount = 0; ulong DataOldSectorCount = 0; ulong DataNewSectorCount = 0; ulong FirstMainOSSector = 0; try { if (EFIESPPath != null) { using (Stream Stream = new DecompressedStream(File.Open(EFIESPPath, FileMode.Open))) { ulong StreamLengthInSectors = (ulong)Stream.Length / 0x200; TotalSizeSectors += StreamLengthInSectors; PartitionCount++; Partition Partition = GPT.Partitions.Where(p => string.Compare(p.Name, "EFIESP", true) == 0).FirstOrDefault(); if (StreamLengthInSectors > Partition.SizeInSectors) { LogFile.Log("Flash failed! Size of partition 'EFIESP' is too big."); ExitFailure("Flash failed!", "Size of partition 'EFIESP' is too big."); return; } } } if (MainOSPath != null) { using (Stream Stream = new DecompressedStream(File.Open(MainOSPath, FileMode.Open))) { ulong StreamLengthInSectors = (ulong)Stream.Length / 0x200; TotalSizeSectors += StreamLengthInSectors; PartitionCount++; Partition Partition = GPT.Partitions.Where(p => string.Compare(p.Name, "MainOS", true) == 0).FirstOrDefault(); MainOSOldSectorCount = Partition.SizeInSectors; MainOSNewSectorCount = StreamLengthInSectors; FirstMainOSSector = Partition.FirstSector; } } if (DataPath != null) { using (Stream Stream = new DecompressedStream(File.Open(DataPath, FileMode.Open))) { ulong StreamLengthInSectors = (ulong)Stream.Length / 0x200; TotalSizeSectors += StreamLengthInSectors; PartitionCount++; Partition Partition = GPT.Partitions.Where(p => string.Compare(p.Name, "Data", true) == 0).FirstOrDefault(); DataOldSectorCount = Partition.SizeInSectors; DataNewSectorCount = StreamLengthInSectors; } } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } if ((MainOSNewSectorCount > 0) && (DataNewSectorCount > 0)) { if ((MainOSNewSectorCount > MainOSOldSectorCount) || (DataNewSectorCount > DataOldSectorCount)) { UInt64 OSSpace = GPT.LastUsableSector - FirstMainOSSector + 1; if ((MainOSNewSectorCount + DataNewSectorCount) <= OSSpace) { // MainOS and Data partitions need to be re-aligned! Partition MainOSPartition = GPT.Partitions.Where(p => string.Compare(p.Name, "MainOS", true) == 0).Single(); Partition DataPartition = GPT.Partitions.Where(p => string.Compare(p.Name, "Data", true) == 0).Single(); MainOSPartition.LastSector = MainOSPartition.FirstSector + MainOSNewSectorCount - 1; DataPartition.FirstSector = MainOSPartition.LastSector + 1; DataPartition.LastSector = DataPartition.FirstSector + DataNewSectorCount - 1; Phone.WriteGPT(GPT); } else { LogFile.Log("Flash failed! Size of partitions 'MainOS' and 'Data' together are too big."); ExitFailure("Flash failed!", "Sizes of partitions 'MainOS' and 'Data' together are too big."); return; } } } else if ((MainOSNewSectorCount > 0) && (MainOSNewSectorCount > MainOSOldSectorCount)) { LogFile.Log("Flash failed! Size of partition 'MainOS' is too big."); ExitFailure("Flash failed!", "Size of partition 'MainOS' is too big."); return; } else if ((DataNewSectorCount > 0) && (DataNewSectorCount > DataOldSectorCount)) { LogFile.Log("Flash failed! Size of partition 'Data' is too big."); ExitFailure("Flash failed!", "Size of partition 'Data' together is too big."); return; } BusyViewModel Busy = new BusyViewModel("Flashing...", MaxProgressValue: TotalSizeSectors, UIContext: UIContext); ProgressUpdater Updater = Busy.ProgressUpdater; ActivateSubContext(Busy); int i = 0; if (Result) { try { if (EFIESPPath != null) { i++; Busy.Message = "Flashing partition EFIESP (" + i.ToString() + @"/" + PartitionCount.ToString() + ")"; Phone.FlashRawPartition(EFIESPPath, "EFIESP", Updater); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } } if (Result) { try { if (MainOSPath != null) { i++; Busy.Message = "Flashing partition MainOS (" + i.ToString() + @"/" + PartitionCount.ToString() + ")"; Phone.FlashRawPartition(MainOSPath, "MainOS", Updater); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } } if (Result) { try { if (DataPath != null) { i++; Busy.Message = "Flashing partition Data (" + i.ToString() + @"/" + PartitionCount.ToString() + ")"; Phone.FlashRawPartition(DataPath, "Data", Updater); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } } if (!Result) { ExitFailure("Flash failed!", null); return; } ExitSuccess("Flash successful! Make sure you disable Windows Update on the phone!", null); }).Start(); }
internal void FlashArchiveTask(string ArchivePath) { new Thread(() => { ActivateSubContext(new BusyViewModel("Initializing flash...")); NokiaFlashModel Phone = (NokiaFlashModel)PhoneNotifier.CurrentModel; ulong TotalSizeSectors = 0; int PartitionCount = 0; ulong MainOSOldSectorCount = 0; ulong MainOSNewSectorCount = 0; ulong DataOldSectorCount = 0; ulong DataNewSectorCount = 0; ulong FirstMainOSSector = 0; bool GPTChanged = false; try { GPT GPT = Phone.ReadGPT(); using (FileStream FileStream = new FileStream(ArchivePath, FileMode.Open)) { using (ZipArchive Archive = new ZipArchive(FileStream, ZipArchiveMode.Read)) { foreach (ZipArchiveEntry Entry in Archive.Entries) { // Determine if there is a partition layout present ZipArchiveEntry PartitionEntry = Archive.GetEntry("Partitions.xml"); if (PartitionEntry == null) { GPT.MergePartitions(null, false, Archive); GPTChanged |= GPT.HasChanged; } else { using (Stream ZipStream = PartitionEntry.Open()) { using (StreamReader ZipReader = new StreamReader(ZipStream)) { string PartitionXml = ZipReader.ReadToEnd(); GPT.MergePartitions(PartitionXml, false, Archive); GPTChanged |= GPT.HasChanged; } } } // First determine if we need a new GPT! if (!Entry.FullName.Contains("/")) // No subfolders { string PartitionName = System.IO.Path.GetFileNameWithoutExtension(Entry.Name); int P = PartitionName.IndexOf('.'); if (P >= 0) { PartitionName = PartitionName.Substring(0, P); // Example: Data.bin.gz -> Data } Partition Partition = GPT.Partitions.Where(p => string.Compare(p.Name, PartitionName, true) == 0).FirstOrDefault(); if (Partition != null) { DecompressedStream DecompressedStream = new DecompressedStream(Entry.Open()); ulong StreamLengthInSectors = (ulong)Entry.Length / 0x200; try { StreamLengthInSectors = (ulong)DecompressedStream.Length / 0x200; } catch { } TotalSizeSectors += StreamLengthInSectors; PartitionCount++; if (string.Compare(PartitionName, "MainOS", true) == 0) { MainOSOldSectorCount = Partition.SizeInSectors; MainOSNewSectorCount = StreamLengthInSectors; FirstMainOSSector = Partition.FirstSector; } else if (string.Compare(PartitionName, "Data", true) == 0) { DataOldSectorCount = Partition.SizeInSectors; DataNewSectorCount = StreamLengthInSectors; } else if (StreamLengthInSectors > Partition.SizeInSectors) { LogFile.Log("Flash failed! Size of partition '" + PartitionName + "' is too big."); ExitFailure("Flash failed!", "Size of partition '" + PartitionName + "' is too big."); return; } } } } if ((MainOSNewSectorCount > 0) && (DataNewSectorCount > 0)) { if ((MainOSNewSectorCount > MainOSOldSectorCount) || (DataNewSectorCount > DataOldSectorCount)) { UInt64 OSSpace = GPT.LastUsableSector - FirstMainOSSector + 1; if ((MainOSNewSectorCount + DataNewSectorCount) <= OSSpace) { // MainOS and Data partitions need to be re-aligned! Partition MainOSPartition = GPT.Partitions.Where(p => string.Compare(p.Name, "MainOS", true) == 0).Single(); Partition DataPartition = GPT.Partitions.Where(p => string.Compare(p.Name, "Data", true) == 0).Single(); MainOSPartition.LastSector = MainOSPartition.FirstSector + MainOSNewSectorCount - 1; DataPartition.FirstSector = MainOSPartition.LastSector + 1; DataPartition.LastSector = DataPartition.FirstSector + DataNewSectorCount - 1; GPTChanged = true; } else { LogFile.Log("Flash failed! Size of partitions 'MainOS' and 'Data' together are too big."); ExitFailure("Flash failed!", "Sizes of partitions 'MainOS' and 'Data' together are too big."); return; } } } else if ((MainOSNewSectorCount > 0) && (MainOSNewSectorCount > MainOSOldSectorCount)) { LogFile.Log("Flash failed! Size of partition 'MainOS' is too big."); ExitFailure("Flash failed!", "Size of partition 'MainOS' is too big."); return; } else if ((DataNewSectorCount > 0) && (DataNewSectorCount > DataOldSectorCount)) { LogFile.Log("Flash failed! Size of partition 'Data' is too big."); ExitFailure("Flash failed!", "Size of partition 'Data' is too big."); return; } if (GPTChanged) { Phone.WriteGPT(GPT); } if (PartitionCount > 0) { BusyViewModel Busy = new BusyViewModel("Flashing...", MaxProgressValue: TotalSizeSectors, UIContext: UIContext); ProgressUpdater Updater = Busy.ProgressUpdater; ActivateSubContext(Busy); int i = 0; foreach (ZipArchiveEntry Entry in Archive.Entries) { // "MainOS.bin.gz" => "MainOS" string PartitionName = Entry.Name; int Pos = PartitionName.IndexOf('.'); if (Pos >= 0) { PartitionName = PartitionName.Substring(0, Pos); } Partition Partition = GPT.Partitions.Where(p => string.Compare(p.Name, PartitionName, true) == 0).FirstOrDefault(); if (Partition != null) { Stream DecompressedStream = new DecompressedStream(Entry.Open()); ulong StreamLengthInSectors = (ulong)Entry.Length / 0x200; try { StreamLengthInSectors = (ulong)DecompressedStream.Length / 0x200; } catch { } if (StreamLengthInSectors <= Partition.SizeInSectors) { i++; Busy.Message = "Flashing partition " + Partition.Name + " (" + i.ToString() + @"/" + PartitionCount.ToString() + ")"; Phone.FlashRawPartition(DecompressedStream, Partition.Name, Updater); } DecompressedStream.Close(); } } } else { LogFile.Log("Flash failed! No valid partitions found in the archive."); ExitFailure("Flash failed!", "No valid partitions found in the archive"); return; } } } } catch (Exception Ex) { LogFile.LogException(Ex); if (Ex is WPinternalsException) { ExitFailure("Flash failed!", ((WPinternalsException)Ex).SubMessage); } else { ExitFailure("Flash failed!", null); } return; } ExitSuccess("Flash successful! Make sure you disable Windows Update on the phone!", null); }).Start(); }
/// <summary> /// Copies given dataset into another dataset. /// </summary> /// <param name="src">Original dataset to copy.</param> /// <param name="dst">Destination dataset.</param> /// <param name="updater">Delegate accepting update progressm notifications.</param> /// <returns>New instance of <see cref="DataSet"/> class.</returns> /// <remarks> /// This method splits the original dataser into parts and therefore is able /// to clone very large datasets not fitting to memory. /// </remarks> public static DataSet Clone(DataSet src, DataSet dst, ProgressUpdater updater) { if (src == null) { throw new ArgumentNullException("src"); } if (dst == null) { throw new ArgumentNullException("dst"); } if (dst.IsReadOnly) { throw new NotSupportedException("Destination DataSet is read-only"); } // Maximum memory capacity in bytes ulong N = 200 * 1024 * 1024; // Estimated size of a single string in bytes int sizeofString = 100 * 1024; /*********************************************************************************** * Preparing output ***********************************************************************************/ bool isAutoCommit = dst.IsAutocommitEnabled; try { dst.IsAutocommitEnabled = false; DataSetSchema srcSchema = src.GetSchema(); Dictionary <int, int> IDs = new Dictionary <int, int>(); // Creating empty variables and copying global metadata and scalar variables if (updater != null) { updater(0, "Creating structure and copying global metadata and scalar variables..."); } VariableSchema globalMetadataVar = null; foreach (VariableSchema v in srcSchema.Variables) { if (v.ID == DataSet.GlobalMetadataVariableID) { globalMetadataVar = v; continue; } Variable t = dst.AddVariable(v.TypeOfData, v.Name, null, v.Dimensions.AsNamesArray()); IDs.Add(v.ID, t.ID); foreach (var attr in v.Metadata) { t.Metadata[attr.Key] = attr.Value; } if (t.Rank == 0) // scalar { t.PutData(src.Variables.GetByID(v.ID).GetData()); } } if (globalMetadataVar != null) { // Copying global metadata foreach (var attr in globalMetadataVar.Metadata) { dst.Metadata[attr.Key] = attr.Value; } } dst.Commit(); // Console.Out.WriteLine("Done.\n"); /*********************************************************************************** * Adjusting dimensions deltas ***********************************************************************************/ Dimension[] srcDims = srcSchema.GetDimensions(); Dictionary <string, int> deltas = new Dictionary <string, int>(srcDims.Length); foreach (var d in srcDims) { deltas[d.Name] = d.Length; } // Console.Out.WriteLine("Total memory capacity: " + (N / 1024.0 / 1024.0).ToString("F2") + " Mb"); ulong totalSize; do { totalSize = 0; foreach (var var in srcSchema.Variables) { if (var.Rank == 0) { continue; // scalar } int typeSize = SizeOf(var.TypeOfData, sizeofString); ulong count = 0; foreach (var vdim in var.Dimensions) { int dimDelta = deltas[vdim.Name]; if (count == 0) { count = (ulong)dimDelta; } else { count *= (ulong)dimDelta; } } totalSize += (ulong)typeSize * count; } if (totalSize > N) { string maxDim = null; int max = int.MinValue; foreach (var dim in deltas) { if (dim.Value > max) { max = dim.Value; maxDim = dim.Key; } } if (maxDim == null || max <= 1) { throw new NotSupportedException("Cannot copy the DataSet: it is too large to be copied entirely by the utility for the provided memory capacity"); } deltas[maxDim] = max >> 1; } } while (totalSize > N); // Printing deltas if (updater != null) { updater(0, String.Format("Deltas for the dimensions adjusted (max iteration capacity: " + (totalSize / 1024.0 / 1024.0).ToString("F2") + " Mb)")); } /*********************************************************************************** * Copying data ***********************************************************************************/ // Console.WriteLine(); if (updater != null) { updater(0, "Copying data ..."); } Dictionary <int, int[]> origins = new Dictionary <int, int[]>(srcSchema.Variables.Length); Dictionary <int, int[]> shapes = new Dictionary <int, int[]>(srcSchema.Variables.Length); List <VariableSchema> copyVars = srcSchema.Variables.Where(vs => (vs.Rank > 0 && vs.ID != DataSet.GlobalMetadataVariableID)).ToList(); Dictionary <string, int> dimOrigin = new Dictionary <string, int>(srcDims.Length); foreach (var d in srcDims) { dimOrigin[d.Name] = 0; } Array.Sort(srcDims, (d1, d2) => d1.Length - d2.Length); int totalDims = srcDims.Length; do { // for each variable: for (int varIndex = copyVars.Count; --varIndex >= 0;) { VariableSchema var = copyVars[varIndex]; bool hasChanged = false; // Getting its origin int[] origin; if (!origins.TryGetValue(var.ID, out origin)) { origin = new int[var.Rank]; origins[var.ID] = origin; hasChanged = true; } // Getting its shape int[] shape; if (!shapes.TryGetValue(var.ID, out shape)) { shape = new int[var.Rank]; for (int i = 0; i < var.Dimensions.Count; i++) { shape[i] = deltas[var.Dimensions[i].Name]; } shapes.Add(var.ID, shape); } // Updating origin for the variable: if (!hasChanged) { for (int i = 0; i < shape.Length; i++) { int o = dimOrigin[var.Dimensions[i].Name]; if (origin[i] != o) { hasChanged = true; origin[i] = o; } } } if (!hasChanged) // this block is already copied { continue; } bool doCopy = false; bool shapeUpdated = false; for (int i = 0; i < shape.Length; i++) { int s = origin[i] + shape[i]; int len = var.Dimensions[i].Length; if (s > len) { if (!shapeUpdated) { shapeUpdated = true; shape = (int[])shape.Clone(); } shape[i] = len - origin[i]; } if (shape[i] > 0) { doCopy = true; } } if (doCopy) { Array data = src.Variables.GetByID(var.ID).GetData(origin, shape); // Compute real size here for strings dst.Variables.GetByID(IDs[var.ID]).PutData(origin, data); } else // variable is copied { copyVars.RemoveAt(varIndex); } } dst.Commit(); // Updating dimensions origin bool isOver = true; for (int i = 0; i < totalDims; i++) { Dimension dim = srcDims[i]; int origin = dimOrigin[dim.Name] + deltas[dim.Name]; if (origin < dim.Length) { dimOrigin[dim.Name] = origin; isOver = false; // Progress indicator if (i == totalDims - 1) { double perc = (double)origin / dim.Length * 100.0; if (updater != null) { updater(perc, "Copying data ..."); } } break; } dimOrigin[dim.Name] = 0; } if (isOver) { break; } } while (copyVars.Count > 0); if (updater != null) { updater(100.0, "Done."); } } finally { dst.IsAutocommitEnabled = isAutoCommit; } return(dst); }
private void bw_DoWork(object sender, DoWorkEventArgs e) { _ProgressUpdater = new ProgressUpdater(sender as BackgroundWorker, e); Project.Export(ProjectInfo.EpubProjectPath, _ExportPath, _ProgressUpdater); }
internal void WritePartition(string Name, string FilePath, ProgressUpdater UpdaterPerSector, bool Compress = false) { WritePartition(Name, FilePath, null, UpdaterPerSector, Compress); }
public void Flash(UInt32 StartInBytes, Stream Data, ProgressUpdater UpdaterPerSector, UInt32 LengthInBytes = UInt32.MaxValue) { Flash(StartInBytes, Data, null, UpdaterPerSector, LengthInBytes); }
internal void BackupTask(string EFIESPPath, string MainOSPath, string DataPath) { IsSwitchingInterface = false; new Thread(() => { bool Result = true; ActivateSubContext(new BusyViewModel("Initializing backup...")); ulong TotalSizeSectors = 0; int PartitionCount = 0; MassStorage Phone = (MassStorage)PhoneNotifier.CurrentModel; Phone.OpenVolume(false); byte[] GPTBuffer = Phone.ReadSectors(1, 33); GPT GPT = new(GPTBuffer); Partition Partition; try { if (EFIESPPath != null) { Partition = GPT.Partitions.First(p => p.Name == "EFIESP"); TotalSizeSectors += Partition.SizeInSectors; PartitionCount++; } if (MainOSPath != null) { Partition = GPT.Partitions.First(p => p.Name == "MainOS"); TotalSizeSectors += Partition.SizeInSectors; PartitionCount++; } if (DataPath != null) { Partition = GPT.Partitions.First(p => p.Name == "Data"); TotalSizeSectors += Partition.SizeInSectors; PartitionCount++; } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } BusyViewModel Busy = new("Create backup...", MaxProgressValue: TotalSizeSectors, UIContext: UIContext); ProgressUpdater Updater = Busy.ProgressUpdater; ActivateSubContext(Busy); int i = 0; if (Result) { try { if (EFIESPPath != null) { i++; Busy.Message = "Create backup of partition EFIESP (" + i.ToString() + "/" + PartitionCount.ToString() + ")"; Phone.BackupPartition("EFIESP", EFIESPPath, Updater); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } } if (Result) { try { if (MainOSPath != null) { i++; Busy.Message = "Create backup of partition MainOS (" + i.ToString() + "/" + PartitionCount.ToString() + ")"; Phone.BackupPartition("MainOS", MainOSPath, Updater); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } } if (Result) { try { if (DataPath != null) { i++; Busy.Message = "Create backup of partition Data (" + i.ToString() + "/" + PartitionCount.ToString() + ")"; Phone.BackupPartition("Data", DataPath, Updater); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } } Phone.CloseVolume(); if (!Result) { ActivateSubContext(new MessageViewModel("Failed to create backup!", Exit)); return; } ActivateSubContext(new MessageViewModel("Successfully created a backup!", Exit)); }).Start(); }
private void BackupPartition(string PartitionName, string Path, Stream OutputStream, Action <int, TimeSpan?> ProgressUpdateCallback, ProgressUpdater UpdaterPerSector) { bool VolumeWasOpen = IsVolumeOpen(); if (!VolumeWasOpen) { OpenVolume(false); } SetSectorPosition(1); byte[] GPTBuffer = ReadSectors(1, 33); GPT GPT = new GPT(GPTBuffer); Partition Partition = GPT.Partitions.Where((p) => p.Name == PartitionName).First(); DumpSectors(Partition.FirstSector, Partition.LastSector - Partition.FirstSector + 1, Path, OutputStream, ProgressUpdateCallback, UpdaterPerSector); if (!VolumeWasOpen) { CloseVolume(); } }
internal void WriteSectors(UInt64 StartSector, string Path, ProgressUpdater UpdaterPerSector) { WriteSectors(StartSector, Path, null, UpdaterPerSector); }
private async Task <List <string> > SearchDialogLoadingFunction(string searchText, CancellationToken ct, ProgressUpdater progressUpdater) { // simulate one second of loading await Task.Delay(1000, ct); // generate some random items int count = rand.Next(2, 30); var items = new List <string>(count); progressUpdater?.StartNewLoop(count); for (int i = count - 1; i >= 0; --i) { progressUpdater?.SetForLoop(i); items.Add($"{rand.Next(100)}-{searchText}"); ct.ThrowIfCancellationRequested(); } return(items); }
internal void RestorePartition(string Path, string PartitionName, ProgressUpdater UpdaterPerSector) { RestorePartition(Path, PartitionName, null, UpdaterPerSector); }
internal void BackupArchiveTask(string ArchivePath) { IsSwitchingInterface = false; new Thread(() => { bool Result = true; ActivateSubContext(new BusyViewModel("Initializing backup...")); ulong TotalSizeSectors = 0; int PartitionCount = 3; MassStorage Phone = (MassStorage)PhoneNotifier.CurrentModel; try { Phone.OpenVolume(false); byte[] GPTBuffer = Phone.ReadSectors(1, 33); GPT GPT = new WPinternals.GPT(GPTBuffer); Partition Partition; try { Partition = GPT.Partitions.Where(p => p.Name == "EFIESP").First(); TotalSizeSectors += Partition.SizeInSectors; Partition = GPT.Partitions.Where(p => p.Name == "MainOS").First(); TotalSizeSectors += Partition.SizeInSectors; Partition = GPT.Partitions.Where(p => p.Name == "Data").First(); TotalSizeSectors += Partition.SizeInSectors; } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } BusyViewModel Busy = new BusyViewModel("Create backup...", MaxProgressValue: TotalSizeSectors, UIContext: UIContext); ProgressUpdater Updater = Busy.ProgressUpdater; ActivateSubContext(Busy); ZipArchiveEntry Entry; Stream EntryStream = null; using (FileStream FileStream = new FileStream(ArchivePath, FileMode.Create)) { using (ZipArchive Archive = new ZipArchive(FileStream, ZipArchiveMode.Create)) { int i = 0; if (Result) { try { Entry = Archive.CreateEntry("EFIESP.bin", CompressionLevel.Optimal); EntryStream = Entry.Open(); i++; Busy.Message = "Create backup of partition EFIESP (" + i.ToString() + @"/" + PartitionCount.ToString() + ")"; Phone.BackupPartition("EFIESP", EntryStream, Updater); } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } finally { if (EntryStream != null) { EntryStream.Close(); } EntryStream = null; } } if (Result) { try { Entry = Archive.CreateEntry("MainOS.bin", CompressionLevel.Optimal); EntryStream = Entry.Open(); i++; Busy.Message = "Create backup of partition MainOS (" + i.ToString() + @"/" + PartitionCount.ToString() + ")"; Phone.BackupPartition("MainOS", EntryStream, Updater); } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } finally { if (EntryStream != null) { EntryStream.Close(); } EntryStream = null; } } if (Result) { try { Entry = Archive.CreateEntry("Data.bin", CompressionLevel.Optimal); EntryStream = Entry.Open(); i++; Busy.Message = "Create backup of partition Data (" + i.ToString() + @"/" + PartitionCount.ToString() + ")"; Phone.BackupPartition("Data", EntryStream, Updater); } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } finally { if (EntryStream != null) { EntryStream.Close(); } EntryStream = null; } } } } } catch { } finally { Phone.CloseVolume(); } if (!Result) { ActivateSubContext(new MessageViewModel("Failed to create backup!", Exit)); return; } ActivateSubContext(new MessageViewModel("Successfully created a backup!", Exit)); }).Start(); }
public void Flash(UInt32 StartInBytes, byte[] Data, ProgressUpdater UpdaterPerSector, UInt32 OffsetInBytes = 0, UInt32 LengthInBytes = UInt32.MaxValue) { Flash(StartInBytes, Data, null, UpdaterPerSector, OffsetInBytes, LengthInBytes); }
internal void BackupArchiveProvisioningTask(string ArchiveProvisioningPath) { IsSwitchingInterface = false; new Thread(() => { bool Result = true; ActivateSubContext(new BusyViewModel("Initializing backup...")); ulong TotalSizeSectors = 0; int PartitionCount = 0; MassStorage Phone = (MassStorage)PhoneNotifier.CurrentModel; try { Phone.OpenVolume(false); byte[] GPTBuffer = Phone.ReadSectors(1, 33); GPT GPT = new(GPTBuffer); Partition Partition; try { foreach (string PartitionName in ProvisioningPartitions) { if (GPT.Partitions.Any(p => p.Name == PartitionName)) { Partition = GPT.Partitions.First(p => p.Name == PartitionName); if (PartitionName == "UEFI_BS_NV" && GPT.Partitions.Any(p => p.Name == "BACKUP_BS_NV")) { Partition = GPT.Partitions.First(p => p.Name == "BACKUP_BS_NV"); } TotalSizeSectors += Partition.SizeInSectors; PartitionCount++; } } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } BusyViewModel Busy = new("Create backup...", MaxProgressValue: TotalSizeSectors, UIContext: UIContext); ProgressUpdater Updater = Busy.ProgressUpdater; ActivateSubContext(Busy); ZipArchiveEntry Entry; Stream EntryStream = null; using FileStream FileStream = new(ArchiveProvisioningPath, FileMode.Create); using ZipArchive Archive = new(FileStream, ZipArchiveMode.Create); int i = 0; foreach (string PartitionName in ProvisioningPartitions) { if (GPT.Partitions.Any(p => p.Name == PartitionName) && Result) { try { Entry = Archive.CreateEntry(PartitionName + ".bin", CompressionLevel.Optimal); EntryStream = Entry.Open(); i++; Busy.Message = "Create backup of partition " + PartitionName + " (" + i.ToString() + "/" + PartitionCount.ToString() + ")"; if (PartitionName == "UEFI_BS_NV" && GPT.Partitions.Any(p => p.Name == "BACKUP_BS_NV")) { Phone.BackupPartition("BACKUP_BS_NV", EntryStream, Updater); } else { Phone.BackupPartition(PartitionName, EntryStream, Updater); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } finally { EntryStream?.Close(); EntryStream = null; } } } } catch { } finally { Phone.CloseVolume(); } if (!Result) { ActivateSubContext(new MessageViewModel("Failed to create backup!", Exit)); return; } ActivateSubContext(new MessageViewModel("Successfully created a backup!", Exit)); }).Start(); }
internal void BackupPartition(string PartitionName, Stream OutputStream, ProgressUpdater UpdaterPerSector) { BackupPartition(PartitionName, null, OutputStream, null, UpdaterPerSector); }
private void SwitchFromFlashToLabelMode(bool Continuation = false) { string ProgressText; if (Continuation) { ProgressText = "And now preparing to boot the phone to Label mode..."; } else { ProgressText = "Preparing to boot the phone to Label mode..."; } NokiaFlashModel FlashModel = (NokiaFlashModel)CurrentModel; if (CurrentMode == PhoneInterfaces.Lumia_Bootloader) { try { FlashModel.SwitchToFlashAppContext(); } catch { } } PhoneInfo Info = FlashModel.ReadPhoneInfo(ExtendedInfo: true); if (Info.MmosOverUsbSupported) { new Thread(() => { LogFile.BeginAction("SwitchToLabelMode"); try { ModeSwitchProgressWrapper(ProgressText, null); string TempFolder = Environment.GetEnvironmentVariable("TEMP") + @"\WPInternals"; if (Info.Type == "RM-1152") { Info.Type = "RM-1151"; } string ENOSWPackage = LumiaDownloadModel.SearchENOSW(Info.Type, Info.Firmware); SetWorkingStatus("Downloading " + Info.Type + " Test Mode package...", MaxProgressValue: 100); DownloadEntry downloadEntry = new DownloadEntry(ENOSWPackage, TempFolder, null, null, null); downloadEntry.PropertyChanged += (object sender, System.ComponentModel.PropertyChangedEventArgs e) => { if (e.PropertyName == "Progress") { int progress = (sender as DownloadEntry).Progress; ulong progressret; ulong.TryParse(progress.ToString(), out progressret); UpdateWorkingStatus(null, CurrentProgressValue: progressret); if (progress == 100) { ModeSwitchProgressWrapper("Initializing Flash...", null); string MMOSPath = TempFolder + "\\" + (sender as DownloadEntry).Name; PhoneNotifier.NewDeviceArrived += NewDeviceArrived; FileInfo info = new FileInfo(MMOSPath); uint length = uint.Parse(info.Length.ToString()); int maximumbuffersize = 0x00240000; uint totalcounts = (uint)Math.Truncate((decimal)length / maximumbuffersize); SetWorkingStatus("Flashing Test Mode package...", MaxProgressValue: 100); ProgressUpdater progressUpdater = new ProgressUpdater(totalcounts + 1, (int i, TimeSpan? time) => UpdateWorkingStatus(null, CurrentProgressValue: (ulong)i)); FlashModel.FlashMMOS(MMOSPath, progressUpdater); SetWorkingStatus("And now booting phone to MMOS...", "If the phone stays on the lightning cog screen for a while, you may need to unplug and replug the phone to continue the boot process."); } } }; } catch (Exception Ex) { LogFile.LogException(Ex); ModeSwitchErrorWrapper(Ex.Message); } LogFile.EndAction("SwitchToLabelMode"); }).Start(); } else { byte[] BootModeFlagCommand = new byte[] { 0x4E, 0x4F, 0x4B, 0x58, 0x46, 0x57, 0x00, 0x55, 0x42, 0x46, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00 }; // NOKFW UBF byte[] RebootCommand = new byte[] { 0x4E, 0x4F, 0x4B, 0x52 }; // NOKR BootModeFlagCommand[0x0F] = 0x59; ((NokiaPhoneModel)CurrentModel).ExecuteRawMethod(BootModeFlagCommand); ((NokiaPhoneModel)CurrentModel).ExecuteRawVoidMethod(RebootCommand); PhoneNotifier.NewDeviceArrived += NewDeviceArrived; ModeSwitchProgressWrapper("Rebooting phone to Label mode", null); LogFile.Log("Rebooting phone to Label mode", LogType.FileAndConsole); } }
/// <summary> /// Copies given dataset into another dataset. /// </summary> /// <param name="src">Original dataset to copy.</param> /// <param name="dst">Destination dataset.</param> /// <param name="updater">Delegate accepting update progressm notifications.</param> /// <returns>New instance of <see cref="DataSet"/> class.</returns> /// <remarks> /// This method splits the original dataser into parts and therefore is able /// to clone very large datasets not fitting to memory. /// </remarks> public static DataSet Clone(DataSet src, DataSet dst, ProgressUpdater updater) { if (src == null) throw new ArgumentNullException("src"); if (dst == null) throw new ArgumentNullException("dst"); if (dst.IsReadOnly) throw new NotSupportedException("Destination DataSet is read-only"); // Maximum memory capacity in bytes ulong N = 200 * 1024 * 1024; // Estimated size of a single string in bytes int sizeofString = 100 * 1024; /*********************************************************************************** * Preparing output ***********************************************************************************/ bool isAutoCommit = dst.IsAutocommitEnabled; try { dst.IsAutocommitEnabled = false; DataSetSchema srcSchema = src.GetSchema(); Dictionary<int, int> IDs = new Dictionary<int, int>(); // Creating empty variables and copying global metadata and scalar variables if (updater != null) updater(0, "Creating structure and copying global metadata and scalar variables..."); VariableSchema globalMetadataVar = null; foreach (VariableSchema v in srcSchema.Variables) { if (v.ID == DataSet.GlobalMetadataVariableID) { globalMetadataVar = v; continue; } Variable t = dst.AddVariable(v.TypeOfData, v.Name, null, v.Dimensions.AsNamesArray()); IDs.Add(v.ID, t.ID); foreach (var attr in v.Metadata) t.Metadata[attr.Key] = attr.Value; if (t.Rank == 0) // scalar t.PutData(src.Variables.GetByID(v.ID).GetData()); } if (globalMetadataVar != null) { // Copying global metadata foreach (var attr in globalMetadataVar.Metadata) dst.Metadata[attr.Key] = attr.Value; } dst.Commit(); // Console.Out.WriteLine("Done.\n"); /*********************************************************************************** * Adjusting dimensions deltas ***********************************************************************************/ Dimension[] srcDims = srcSchema.GetDimensions(); Dictionary<string, int> deltas = new Dictionary<string, int>(srcDims.Length); foreach (var d in srcDims) deltas[d.Name] = d.Length; // Console.Out.WriteLine("Total memory capacity: " + (N / 1024.0 / 1024.0).ToString("F2") + " Mb"); ulong totalSize; do { totalSize = 0; foreach (var var in srcSchema.Variables) { if (var.Rank == 0) continue; // scalar int typeSize = SizeOf(var.TypeOfData, sizeofString); ulong count = 0; foreach (var vdim in var.Dimensions) { int dimDelta = deltas[vdim.Name]; if (count == 0) count = (ulong)dimDelta; else count *= (ulong)dimDelta; } totalSize += (ulong)typeSize * count; } if (totalSize > N) { string maxDim = null; int max = int.MinValue; foreach (var dim in deltas) if (dim.Value > max) { max = dim.Value; maxDim = dim.Key; } if (maxDim == null || max <= 1) throw new NotSupportedException("Cannot copy the DataSet: it is too large to be copied entirely by the utility for the provided memory capacity"); deltas[maxDim] = max >> 1; } } while (totalSize > N); // Printing deltas if (updater != null) updater(0, String.Format("Deltas for the dimensions adjusted (max iteration capacity: " + (totalSize / 1024.0 / 1024.0).ToString("F2") + " Mb)")); /*********************************************************************************** * Copying data ***********************************************************************************/ // Console.WriteLine(); if (updater != null) updater(0, "Copying data ..."); Dictionary<int, int[]> origins = new Dictionary<int, int[]>(srcSchema.Variables.Length); Dictionary<int, int[]> shapes = new Dictionary<int, int[]>(srcSchema.Variables.Length); List<VariableSchema> copyVars = srcSchema.Variables.Where(vs => (vs.Rank > 0 && vs.ID != DataSet.GlobalMetadataVariableID)).ToList(); Dictionary<string, int> dimOrigin = new Dictionary<string, int>(srcDims.Length); foreach (var d in srcDims) dimOrigin[d.Name] = 0; Array.Sort(srcDims, (d1, d2) => d1.Length - d2.Length); int totalDims = srcDims.Length; do { // for each variable: for (int varIndex = copyVars.Count; --varIndex >= 0; ) { VariableSchema var = copyVars[varIndex]; bool hasChanged = false; // Getting its origin int[] origin; if (!origins.TryGetValue(var.ID, out origin)) { origin = new int[var.Rank]; origins[var.ID] = origin; hasChanged = true; } // Getting its shape int[] shape; if (!shapes.TryGetValue(var.ID, out shape)) { shape = new int[var.Rank]; for (int i = 0; i < var.Dimensions.Count; i++) shape[i] = deltas[var.Dimensions[i].Name]; shapes.Add(var.ID, shape); } // Updating origin for the variable: if (!hasChanged) for (int i = 0; i < shape.Length; i++) { int o = dimOrigin[var.Dimensions[i].Name]; if (origin[i] != o) { hasChanged = true; origin[i] = o; } } if (!hasChanged) // this block is already copied continue; bool doCopy = false; bool shapeUpdated = false; for (int i = 0; i < shape.Length; i++) { int s = origin[i] + shape[i]; int len = var.Dimensions[i].Length; if (s > len) { if (!shapeUpdated) { shapeUpdated = true; shape = (int[])shape.Clone(); } shape[i] = len - origin[i]; } if (shape[i] > 0) doCopy = true; } if (doCopy) { Array data = src.Variables.GetByID(var.ID).GetData(origin, shape); // Compute real size here for strings dst.Variables.GetByID(IDs[var.ID]).PutData(origin, data); } else // variable is copied { copyVars.RemoveAt(varIndex); } } dst.Commit(); // Updating dimensions origin bool isOver = true; for (int i = 0; i < totalDims; i++) { Dimension dim = srcDims[i]; int origin = dimOrigin[dim.Name] + deltas[dim.Name]; if (origin < dim.Length) { dimOrigin[dim.Name] = origin; isOver = false; // Progress indicator if (i == totalDims - 1) { double perc = (double)origin / dim.Length * 100.0; if (updater != null) updater(perc, "Copying data ..."); } break; } dimOrigin[dim.Name] = 0; } if (isOver) break; } while (copyVars.Count > 0); if (updater != null) updater(100.0, "Done."); } finally { dst.IsAutocommitEnabled = isAutoCommit; } return dst; }
/// <summary> /// Gibbs sampling iterations. /// </summary> public void GibbsSampling(ProgressUpdater updater) { #region Local variables // Count statistics and their sums int[,] nmk = new int[M, K]; int[] nm = new int[M]; int[,] nkv = new int[K, V]; int[] nk = new int[K]; // Memory for full conditional array int[][] zassign = new int[M][]; #endregion #region Initialization if (updater != null) updater.UpdateMessage("Initializing..."); for (int m = 0; m < M; m++) { int N = docs[m].Length; zassign[m] = new int[N]; for (int n = 0; n < N; n++) { int z = rand.Next(0, K); nmk[m, z]++; int v = docs[m][n]; nkv[z, v]++; nk[z]++; zassign[m][n] = z; } nm[m] = N; } #endregion #region Gibbs sampling if (updater != null) updater.UpdateMessage("Gibbs sampling..."); for (int itr = 0; itr < iterations; itr++) { for (int m = 0; m < M; m++) { int N = docs[m].Length; for (int n = 0; n < N; n++) { // For the current topic assignment z to word token docs[m][n] int z = zassign[m][n]; int v = docs[m][n]; nmk[m, z]--; nm[m]--; nkv[z, v]--; nk[z]--; // For the new topic assignment z to the word token docs[m][n] z = SimpleZ(nkv, nk, nmk, nm, m, n); nm[m]++; nmk[m, z]++; nk[z]++; nkv[z, v]++; zassign[m][n] = z; } } if (updater != null) updater.UpdateProgress((double)(itr + 1) / iterations); } #endregion #region Calculate result, theta and phi if (updater != null) updater.UpdateMessage("Calcaulating result..."); for (int k = 0; k < K; k++) { for (int v = 0; v < V; v++) { phi[k, v] = (nkv[k, v] + beta) / (nk[k] + V * beta); } for (int m = 0; m < M; m++) { theta[m, k] = (nmk[m, k] + alpha) / (nm[m] + K * alpha); } } #endregion }
private void WritePartition(string Name, string FilePath, Action <int, TimeSpan?> ProgressUpdateCallback, ProgressUpdater UpdaterPerSector, bool Compress = false) { Partition Target = GPT.Partitions.Find(p => string.Equals(p.Name, Name, StringComparison.CurrentCultureIgnoreCase)); if (Target == null) { throw new ArgumentOutOfRangeException(); } int FirstChunk = GetChunkIndexFromSectorIndex((int)Target.FirstSector); int LastChunk = GetChunkIndexFromSectorIndex((int)Target.LastSector); ProgressUpdater Updater = UpdaterPerSector; if ((Updater == null) && (ProgressUpdateCallback != null)) { Updater = new ProgressUpdater(Target.LastSector - Target.FirstSector + 1, ProgressUpdateCallback); } byte[] Buffer = new byte[ChunkSize]; OpenFile(); FileStream OutputFile = new(FilePath, FileMode.Create, FileAccess.Write); Stream OutStream = OutputFile; // We use gzip compression // // LZMA is about 60 times slower (compression is twice as good, but compressed size is already really small, so it doesnt matter much) // OutStream = new LZMACompressionStream(OutputFile, System.IO.Compression.CompressionMode.Compress, false); // // DeflateStream is a raw compression stream without recognizable header // Deflate has almost no performance penalty // OutStream = new DeflateStream(OutputFile, CompressionLevel.Optimal, false); // // GZip can be recognized. It always starts with 1F 8B 08 (1F 8B is the magic value, 08 is the Deflate compression method) // With GZip compression, dump time goes from 1m to 1m37s. So that doesnt matter much. if (Compress) { OutStream = new CompressedStream(OutputFile, (Target.LastSector - Target.FirstSector + 1) * 0x200); } for (int j = FirstChunk; j <= LastChunk; j++) { GetChunk(Buffer, j); int FirstSector = 0; int LastSector = (ChunkSize / 0x200) - 1; if (j == FirstChunk) { FirstSector = GetSectorNumberInChunkFromSectorIndex((int)Target.FirstSector); } if (j == LastChunk) { LastSector = GetSectorNumberInChunkFromSectorIndex((int)Target.LastSector); } int Offset = FirstSector * 0x200; int Size = (LastSector - FirstSector + 1) * 0x200; OutStream.Write(Buffer, Offset, Size); Updater?.IncreaseProgress((UInt64)(ChunkSize / 0x200)); } OutStream.Close(); CloseFile(); }
internal void RestoreTask(string EFIESPPath, string MainOSPath, string DataPath) { new Thread(() => { bool Result = true; ActivateSubContext(new BusyViewModel("Initializing restore...")); ulong TotalSizeSectors = 0; int PartitionCount = 0; try { if (EFIESPPath != null) { TotalSizeSectors += (ulong)new FileInfo(EFIESPPath).Length / 0x200; PartitionCount++; } if (MainOSPath != null) { TotalSizeSectors += (ulong)new FileInfo(MainOSPath).Length / 0x200; PartitionCount++; } if (DataPath != null) { TotalSizeSectors += (ulong)new FileInfo(DataPath).Length / 0x200; PartitionCount++; } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } NokiaFlashModel Phone = (NokiaFlashModel)PhoneNotifier.CurrentModel; BusyViewModel Busy = new BusyViewModel("Restoring...", MaxProgressValue: TotalSizeSectors, UIContext: UIContext); ProgressUpdater Updater = Busy.ProgressUpdater; ActivateSubContext(Busy); int i = 0; if (Result) { try { if (EFIESPPath != null) { i++; Busy.Message = "Restoring partition EFIESP (" + i.ToString() + @"/" + PartitionCount.ToString() + ")"; Phone.FlashRawPartition(EFIESPPath, "EFIESP", Updater); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } } if (Result) { try { if (MainOSPath != null) { i++; Busy.Message = "Restoring partition MainOS (" + i.ToString() + @"/" + PartitionCount.ToString() + ")"; Phone.FlashRawPartition(EFIESPPath, "MainOS", Updater); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } } if (Result) { try { if (DataPath != null) { i++; Busy.Message = "Restoring partition Data (" + i.ToString() + @"/" + PartitionCount.ToString() + ")"; Phone.FlashRawPartition(EFIESPPath, "Data", Updater); } } catch (Exception Ex) { LogFile.LogException(Ex); Result = false; } } if (!Result) { ActivateSubContext(new MessageViewModel("Failed to restore!", Exit)); return; } ActivateSubContext(new MessageViewModel("Successfully restored!", Exit)); }).Start(); }
private static int WaitForProc(Process proc, CancellationToken ct, ProgressUpdater progressUpdater = null) { int id = proc.Id; while (!proc.HasExited) { if (ct.IsCancellationRequested) { KillChildProcs(id); if (!proc.HasExited) { proc.Kill(); } } proc.WaitForExit(kPollInterval); if (!ct.IsCancellationRequested && (progressUpdater != null)) { progressUpdater(); } } return proc.ExitCode; }
private void RestorePartition(string Path, string PartitionName, Action <int, TimeSpan?> ProgressUpdateCallback, ProgressUpdater UpdaterPerSector) { bool VolumeWasOpen = IsVolumeOpen(); if (!VolumeWasOpen) { OpenVolume(true); } SetSectorPosition(1); byte[] GPTBuffer = ReadSectors(1, 33); GPT GPT = new GPT(GPTBuffer); Partition Partition = GPT.Partitions.Where((p) => p.Name == PartitionName).First(); ulong PartitionSize = (Partition.LastSector - Partition.FirstSector + 1) * 0x200; ulong FileSize = (ulong)new FileInfo(Path).Length; if (FileSize > PartitionSize) { throw new InvalidOperationException("Partition can not be restored, because its size is too big!"); } WriteSectors(Partition.FirstSector, Path, ProgressUpdateCallback); if (!VolumeWasOpen) { CloseVolume(); } }