public void Set(ArchiveReader archive, byte[] dataVector) { Dispose(); _archive = archive; _archive.AddByteStream(dataVector, 0, dataVector.Length); _needRemove = true; _active = true; }
/// <summary> /// Create all required <see cref="BeatmapInfo"/>s for the provided archive. /// </summary> private List <BeatmapInfo> createBeatmapDifficulties(ArchiveReader reader) { var beatmapInfos = new List <BeatmapInfo>(); foreach (var name in reader.Filenames.Where(f => f.EndsWith(".osu"))) { using (var raw = reader.GetStream(name)) using (var ms = new MemoryStream()) //we need a memory stream so we can seek and shit using (var sr = new StreamReader(ms)) { raw.CopyTo(ms); ms.Position = 0; var decoder = Decoder.GetDecoder <Beatmap>(sr); IBeatmap beatmap = decoder.Decode(sr); beatmap.BeatmapInfo.Path = name; beatmap.BeatmapInfo.Hash = ms.ComputeSHA2Hash(); beatmap.BeatmapInfo.MD5Hash = ms.ComputeMD5Hash(); // check that no existing beatmap exists that is imported with the same online beatmap ID. if so, give it precedence. if (beatmap.BeatmapInfo.OnlineBeatmapID.HasValue && QueryBeatmap(b => b.OnlineBeatmapID.Value == beatmap.BeatmapInfo.OnlineBeatmapID.Value) != null) { beatmap.BeatmapInfo.OnlineBeatmapID = null; } RulesetInfo ruleset = rulesets.GetRuleset(beatmap.BeatmapInfo.RulesetID); beatmap.BeatmapInfo.Ruleset = ruleset; if (ruleset != null) { // TODO: this should be done in a better place once we actually need to dynamically update it. beatmap.BeatmapInfo.StarDifficulty = ruleset.CreateInstance().CreateDifficultyCalculator(new DummyConversionBeatmap(beatmap)).Calculate().StarRating; } else { beatmap.BeatmapInfo.StarDifficulty = 0; } beatmapInfos.Add(beatmap.BeatmapInfo); } } return(beatmapInfos); }
protected override async Task Populate(SkinInfo model, ArchiveReader archive, CancellationToken cancellationToken = default) { await base.Populate(model, archive, cancellationToken); Skin reference = getSkin(model); if (!string.IsNullOrEmpty(reference.Configuration.SkinInfo.Name)) { model.Name = reference.Configuration.SkinInfo.Name; model.Creator = reference.Configuration.SkinInfo.Creator; } else { model.Name = model.Name.Replace(".osk", ""); model.Creator = "Unknown"; } }
protected override void Populate(SkinInfo model, ArchiveReader archive) { base.Populate(model, archive); Skin reference = getSkin(model); if (!string.IsNullOrEmpty(reference.Configuration.SkinInfo.Name)) { model.Name = reference.Configuration.SkinInfo.Name; model.Creator = reference.Configuration.SkinInfo.Creator; } else { model.Name = model.Name.Replace(".osk", ""); model.Creator = "Unknown"; } }
/// <summary> /// Create all required <see cref="FileInfo"/>s for the provided archive, adding them to the global file store. /// </summary> private List <TFileModel> createFileInfos(ArchiveReader reader, FileStore files) { var fileInfos = new List <TFileModel>(); // import files to manager foreach (string file in reader.Filenames) { using (Stream s = reader.GetStream(file)) fileInfos.Add(new TFileModel { Filename = FileSafety.PathStandardise(file), FileInfo = files.Add(s) }); } return(fileInfos); }
/// <summary> /// Updates the chart after changes have been made to the chart boundaries or the data displayed in the chart. /// </summary> public void UpdateChart() { Cursor windowCursor = Cursor; int colorIndex = 0; DateTime startTime = m_xAxis.Minimum ?? TimeTag.MinValue.ToDateTime(); DateTime endTime = m_xAxis.Maximum ?? TimeTag.MaxValue.ToDateTime(); Cursor = Cursors.Wait; m_chart.Series.Clear(); foreach (Tuple <ArchiveReader, MetadataRecord> visiblePoint in m_visiblePoints) { if (m_chartResolution > 0 && colorIndex < m_lineColors.Count) { ArchiveReader reader = visiblePoint.Item1; MetadataRecord record = visiblePoint.Item2; IEnumerable <IDataPoint> data = reader.ReadData(record.HistorianID, startTime, endTime, false); LineSeries series = new LineSeries(); int interval = (data.Count() / m_chartResolution) + 1; int pointCount = 0; // Change how data points are displayed. series.DataPointStyle = new Style(typeof(LineDataPoint)); series.DataPointStyle.Setters.Add(new Setter(BackgroundProperty, new SolidColorBrush(m_lineColors[colorIndex]))); series.DataPointStyle.Setters.Add(new Setter(TemplateProperty, new ControlTemplate())); colorIndex++; // Set the title of the series as it will appear in the legend. series.Title = record.Name; // Filter the data to 100 data points. series.ItemsSource = data.Where(point => (pointCount++ % interval) == 0).Select(point => new DataPointWrapper(point)); series.IndependentValuePath = "Time"; series.DependentValuePath = "Value"; // Add the series to the chart. m_chart.Series.Add(series); } } UpdateLayout(); Cursor = windowCursor; OnChartUpdated(); }
protected override string ComputeHash(SkinInfo item, ArchiveReader reader = null) { var instance = GetSkin(item); // in the case the skin has a skin.ini file, we are going to create a hash based on that. // we don't want to do this in the case we don't have a skin.ini, as it would match only on the filename portion, // causing potentially unique skin imports to be considered as a duplicate. if (!string.IsNullOrEmpty(instance.Configuration.SkinInfo.Name)) { // we need to populate early to create a hash based off skin.ini contents populateMetadata(item, instance, reader?.Name); return(item.ToString().ComputeSHA2Hash()); } return(base.ComputeHash(item, reader)); }
/// <summary> /// Create all required <see cref="FileInfo"/>s for the provided archive, adding them to the global file store. /// </summary> private List <BeatmapSetFileInfo> createFileInfos(ArchiveReader reader, FileStore files) { List <BeatmapSetFileInfo> fileInfos = new List <BeatmapSetFileInfo>(); // import files to manager foreach (string file in reader.Filenames) { using (Stream s = reader.GetStream(file)) fileInfos.Add(new BeatmapSetFileInfo { Filename = file, FileInfo = files.Add(s) }); } return(fileInfos); }
/// <summary> /// Populate all the archive entries from the game archive folder /// </summary> static private void _Populate() { _archiveFilesVersions = new Dictionary <string, ArchiveFileVersions>(); if (Directory.Exists(Directories.ProjectGameArchivesFolder)) { _archiveFiles = new ArchiveFiles(); bool __mutextCreated = false; Mutex __mutex = new Mutex(true, "LeagueSharpArchives", out __mutextCreated); // Wait until it is safe to enter. __mutex.WaitOne(); _archiveFiles.DeSerialize(_dbPath + ".list"); _archiveFileEntries.DeSerialize(_dbPath + ".dat"); if (__mutextCreated) { List <string> rafFilePaths = _GetArchiveFiles(Directories.ProjectGameArchivesFolder); ArchivesCount = rafFilePaths.Count; bool __modified = false; foreach (string path in rafFilePaths) { if (!_archiveFiles.Contains(path.ToLowerInvariant())) { __modified = true; ArchiveReader raf = new ArchiveReader(path); _archiveFileEntries.AddRange(raf.FileDictFull.Values); _archiveFiles.Add(path.ToLowerInvariant()); } } if (__modified) { _archiveFiles.Serialize(_dbPath + ".list"); _archiveFileEntries.Serialize(_dbPath + ".dat"); } } __mutex.ReleaseMutex(); for (int __i = 0; __i < _archiveFileEntries.Count; __i++) { string __filename = _archiveFileEntries[__i].FileName.ToLowerInvariant(); if (!_archiveFilesVersions.ContainsKey(__filename)) { _archiveFilesVersions.Add(__filename, new ArchiveFileVersions()); } _archiveFilesVersions[__filename].Add(__i); } } }
private string computeHashFast(ArchiveReader reader) { MemoryStream hashable = new MemoryStream(); foreach (string?file in reader.Filenames.Where(f => HashableFileTypes.Any(ext => f.EndsWith(ext, StringComparison.OrdinalIgnoreCase))).OrderBy(f => f)) { using (Stream s = reader.GetStream(file)) s.CopyTo(hashable); } if (hashable.Length > 0) { return(hashable.ComputeSHA2Hash()); } return(reader.Name.ComputeSHA2Hash()); }
public int Run() { using (var writer = new ArchiveWriter("test_archive.pak", false)) { writer.Compress(Assembly.GetExecutingAssembly(), "examples.Resources.lorem_ipsum.txt", "lorem_ipsum.txt"); } using (var reader = new ArchiveReader("test_archive.pak")) { var file = reader.GetFile("lorem_ipsum.txt"); var data = reader.Decompress(file); string content = Encoding.ASCII.GetString(data); Console.WriteLine(content); } return(0); }
private void CloseGSFHistorianArchive() { if ((object)m_archiveReader != null) { m_archiveReader.RolloverStart -= m_archiveReader_RolloverStart; m_archiveReader.RolloverComplete -= m_archiveReader_RolloverComplete; m_archiveReader.HistoricFileListBuildStart -= m_archiveReader_HistoricFileListBuildStart; m_archiveReader.HistoricFileListBuildComplete -= m_archiveReader_HistoricFileListBuildComplete; m_archiveReader.HistoricFileListBuildException -= m_archiveReader_HistoricFileListBuildException; m_archiveReader.DataReadException -= m_archiveReader_DataReadException; m_archiveReader.Dispose(); m_archiveReader = null; } m_enumerator = null; ShowUpdateMessage("[GSFHistorian] Archive reader closed."); }
protected override string ComputeHash(SkinInfo item, ArchiveReader reader = null) { // we need to populate early to create a hash based off skin.ini contents if (item.Name?.Contains(".osk") == true) { populateMetadata(item); } if (item.Creator != null && item.Creator != unknown_creator_string) { // this is the optimal way to hash legacy skins, but will need to be reconsidered when we move forward with skin implementation. // likely, the skin should expose a real version (ie. the version of the skin, not the skin.ini version it's targeting). return(item.ToString().ComputeSHA2Hash()); } // if there was no creator, the ToString above would give the filename, which alone isn't really enough to base any decisions on. return(base.ComputeHash(item, reader)); }
public void WithFilter() { var src = GetSource("SampleFilter.zip"); var dest = Get(nameof(WithFilter)); var files = new[] { ".DS_Store", "Thumbs.db", "__MACOSX", "desktop.ini" }; var opts = new ArchiveOption { Filter = Filter.From(files) }; using (var archive = new ArchiveReader(src, opts)) archive.Save(dest); Assert.That(Io.Exists(Io.Combine(dest, @"フィルタリング テスト用")), Is.True); Assert.That(Io.Exists(Io.Combine(dest, @"フィルタリング テスト用\.DS_Store")), Is.False); Assert.That(Io.Exists(Io.Combine(dest, @"フィルタリング テスト用\desktop.ini")), Is.False); Assert.That(Io.Exists(Io.Combine(dest, @"フィルタリング テスト用\DS_Store.txt")), Is.True); Assert.That(Io.Exists(Io.Combine(dest, @"フィルタリング テスト用\Thumbs.db")), Is.False); Assert.That(Io.Exists(Io.Combine(dest, @"フィルタリング テスト用\__MACOSX")), Is.False); Assert.That(Io.Exists(Io.Combine(dest, @"フィルタリング テスト用\フィルタリングされないファイル.txt")), Is.True); }
public int Archive_Filter(bool filter) { var names = new[] { "Filter.txt", "FilterDirectory" }; var s = filter ? "True" : "False"; var dest = GetResultsWith($"Filter{s}.zip"); using (var writer = new ArchiveWriter(Format.Zip)) { if (filter) { writer.Filters = names; } writer.Add(GetExamplesWith("Sample.txt")); writer.Add(GetExamplesWith("Sample 00..01")); writer.Save(dest); } using (var reader = new ArchiveReader(dest)) return(reader.Items.Count); }
/// <summary> /// Import a beatmap from an <see cref="ArchiveReader"/>. /// </summary> /// <param name="archive">The beatmap to be imported.</param> public BeatmapSetInfo Import(ArchiveReader archive) { using (contextFactory.GetForWrite()) // used to share a context for full import. keep in mind this will block all writes. { // create a new set info (don't yet add to database) var beatmapSet = createBeatmapSetInfo(archive); // check if this beatmap has already been imported and exit early if so var existingHashMatch = beatmaps.BeatmapSets.FirstOrDefault(b => b.Hash == beatmapSet.Hash); if (existingHashMatch != null) { Undelete(existingHashMatch); return(existingHashMatch); } // check if a set already exists with the same online id if (beatmapSet.OnlineBeatmapSetID != null) { var existingOnlineId = beatmaps.BeatmapSets.FirstOrDefault(b => b.OnlineBeatmapSetID == beatmapSet.OnlineBeatmapSetID); if (existingOnlineId != null) { Delete(existingOnlineId); beatmaps.Cleanup(s => s.ID == existingOnlineId.ID); } } beatmapSet.Files = createFileInfos(archive, files); beatmapSet.Beatmaps = createBeatmapDifficulties(archive); // remove metadata from difficulties where it matches the set foreach (BeatmapInfo b in beatmapSet.Beatmaps) { if (beatmapSet.Metadata.Equals(b.Metadata)) { b.Metadata = null; } } // import to beatmap store Import(beatmapSet); return(beatmapSet); } }
public void Extract(string filename, string password) => IgnoreCultureError(() => { var src = GetExamplesWith(filename); var dest = GetResultsWith(nameof(Extract), filename); var report = CreateReport(); using (var obj = new ArchiveReader(src, password)) obj.Extract(dest, Create(report)); foreach (var cmp in Expect(filename)) { var fi = IO.Get(IO.Combine(dest, cmp.Key)); Assert.That(fi.Exists, Is.True, cmp.Key); Assert.That(fi.Length, Is.EqualTo(cmp.Value), cmp.Key); Assert.That(fi.CreationTime, Is.Not.EqualTo(DateTime.MinValue), cmp.Key); Assert.That(fi.LastWriteTime, Is.Not.EqualTo(DateTime.MinValue), cmp.Key); Assert.That(fi.LastAccessTime, Is.Not.EqualTo(DateTime.MinValue), cmp.Key); } }, $"{filename}, {password}");
public void Extract_Filters() { var src = GetExamplesWith("SampleFilter.zip"); var dest = GetResultsWith(nameof(Extract_Filters)); using (var archive = new ArchiveReader(src)) { archive.Filters = new[] { ".DS_Store", "Thumbs.db", "__MACOSX", "desktop.ini" }; archive.Extract(dest); } Assert.That(IO.Exists(IO.Combine(dest, @"フィルタリング テスト用")), Is.True); Assert.That(IO.Exists(IO.Combine(dest, @"フィルタリング テスト用\.DS_Store")), Is.False); Assert.That(IO.Exists(IO.Combine(dest, @"フィルタリング テスト用\desktop.ini")), Is.False); Assert.That(IO.Exists(IO.Combine(dest, @"フィルタリング テスト用\DS_Store.txt")), Is.True); Assert.That(IO.Exists(IO.Combine(dest, @"フィルタリング テスト用\Thumbs.db")), Is.False); Assert.That(IO.Exists(IO.Combine(dest, @"フィルタリング テスト用\__MACOSX")), Is.False); Assert.That(IO.Exists(IO.Combine(dest, @"フィルタリング テスト用\フィルタリングされないファイル.txt")), Is.True); }
private void CloseStreams() { if (!_leaveOpen) { ArchiveStream.Dispose(); _backingStream?.Dispose(); ArchiveReader?.Dispose(); } else { // if _backingStream isn't null, that means we assigned the original stream they passed // us to _backingStream (which they requested we leave open), and _archiveStream was // the temporary copy that we needed if (_backingStream != null) { ArchiveStream.Dispose(); } } }
private async Task <ScoreInfo> loadIntoOsu(OsuGameBase osu, ScoreInfo score, ArchiveReader archive = null) { var beatmapManager = osu.Dependencies.Get <BeatmapManager>(); if (score.Beatmap == null) { score.Beatmap = beatmapManager.GetAllUsableBeatmapSets().First().Beatmaps.First(); } if (score.Ruleset == null) { score.Ruleset = new OsuRuleset().RulesetInfo; } var scoreManager = osu.Dependencies.Get <ScoreManager>(); await scoreManager.Import(score, archive); return(scoreManager.GetAllUsableScores().FirstOrDefault()); }
protected override ScoreInfo CreateModel(ArchiveReader archive) { if (archive == null) { return(null); } using (var stream = archive.GetStream(archive.Filenames.First(f => f.EndsWith(".osr", StringComparison.OrdinalIgnoreCase)))) { try { return(new DatabasedLegacyScoreDecoder(rulesets, beatmaps()).Parse(stream).ScoreInfo); } catch (LegacyScoreDecoder.BeatmapNotFoundException e) { Logger.Log(e.Message, LoggingTarget.Information, LogLevel.Error); return(null); } } }
public void ArchiveThrowTest() { //Assert.NotThrows IConfiguration config = new ConfigurationBuilder().AddInMemoryCollection(new List <KeyValuePair <string, string> > { new KeyValuePair <string, string>("startYear", currentYear.ToString()), }).Build(); ArchiveReader reader = new ArchiveReader(settings, YearMock(currentYear)); //Assert.Throws reader.GetYears(); IAppSettings failSettings = Mock.Of <IAppSettings>(mock => mock.StartYear == currentYear + 1 ); ArchiveReader failReader = new ArchiveReader(failSettings, YearMock(currentYear)); Assert.Throws <ArgumentOutOfRangeException>(failReader.GetYears); }
/// <summary> /// Create all required <see cref="BeatmapInfo"/>s for the provided archive. /// </summary> private List <BeatmapInfo> createBeatmapDifficulties(ArchiveReader reader) { var beatmapInfos = new List <BeatmapInfo>(); foreach (var name in reader.Filenames.Where(f => f.EndsWith(".osu"))) { using (var raw = reader.GetStream(name)) using (var ms = new MemoryStream()) //we need a memory stream so we can seek and shit using (var sr = new StreamReader(ms)) { raw.CopyTo(ms); ms.Position = 0; var decoder = Decoder.GetDecoder <Beatmap>(sr); IBeatmap beatmap = decoder.Decode(sr); beatmap.BeatmapInfo.Path = name; beatmap.BeatmapInfo.Hash = ms.ComputeSHA2Hash(); beatmap.BeatmapInfo.MD5Hash = ms.ComputeMD5Hash(); RulesetInfo ruleset = rulesets.GetRuleset(beatmap.BeatmapInfo.RulesetID); beatmap.BeatmapInfo.Ruleset = ruleset; if (ruleset != null) { // TODO: this should be done in a better place once we actually need to dynamically update it. var converted = new DummyConversionBeatmap(beatmap).GetPlayableBeatmap(ruleset); beatmap.BeatmapInfo.StarDifficulty = ruleset.CreateInstance().CreateDifficultyCalculator(converted).Calculate(); } else { beatmap.BeatmapInfo.StarDifficulty = 0; } beatmapInfos.Add(beatmap.BeatmapInfo); } } return(beatmapInfos); }
public override List <ExplorerItem> GetSchemaAndBuildAssembly(IConnectionInfo cxInfo, AssemblyName assemblyToBuild, ref string nameSpace, ref string typeName) { string fileName = cxInfo.DriverData.Element("FileName")?.Value; if (!Boolean.TryParse(cxInfo.DriverData.Element("Capitalize").Value, out bool capitalize)) { capitalize = false; } if (!int.TryParse(cxInfo.DriverData.Element("BufferSize").Value, out int bufferSize)) { bufferSize = 65536; } if (!Enum.TryParse(cxInfo.DriverData.Element("RowStrategy").Value, out RowStrategy rowStrategy)) { rowStrategy = RowStrategy.Lazy; } nameSpace = "DwCArchive"; typeName = "ArchiveDb"; var driverFolder = GetDriverFolder(); using (var archive = new ArchiveReader(fileName)) { var coreFileMetaData = archive.CoreFile.FileMetaData; var extensionFileMetaData = archive.Extensions .GetFileReaders() .Select(n => n.FileMetaData); var archiveDbSchemaBuilder = new ArchiveDbAssemblyBuilder(capitalize, new FileReaderConfiguration() { BufferSize = bufferSize }, new RowFactoryConfiguration() { Strategy = rowStrategy }); archiveDbSchemaBuilder.GenerateArchiveDbAssembly(coreFileMetaData, extensionFileMetaData, assemblyToBuild.CodeBase, driverFolder); var linQPadSchemaGenerator = new LINQPadSchemaGenerator(capitalize); return(linQPadSchemaGenerator.GenerateSchema(fileName, coreFileMetaData, extensionFileMetaData)); } }
public MainForm() { InitializeComponent(); this.ArchiveSaved = true; this.TitleList = new AutoCompleteStringCollection(); this.EntryList = new List <Entry>(); this.Opacity = 0; this.textBoxSearch.AutoCompleteCustomSource = TitleList; this.panel1.BackColor = Settings.UI.ColorAccent; this.labelSearchIcon.Text = Settings.UI.SearchIconText; this.Size = Properties.Settings.Default.MainSize; this.listView.Columns[0].Width = 400 + (this.Width - 600); this.listView.HideSelection = true; if (File.Exists(Settings.Files.ArchivePath)) { LoadEntryList(ArchiveReader.ReadArchive(Session.MasterKey)); } SearchMan = new Engine.Search.SearchManager(ref EntryList); Animation.FadeIn(this); }
/* ----------------------------------------------------------------- */ /// /// Invoke /// /// <summary> /// Extracts an archive item of the specified index. /// </summary> /// /* ----------------------------------------------------------------- */ private void Invoke(ArchiveReader src, int index, ExtractDirectory dir) { GetType().LogDebug($"Format:{src.Format}", $"Source:{src.Source}"); SetDestination(src, dir); var item = src.Items[index]; Retry(() => src.Save(Temp, item, GetProgress())); var dest = Io.Combine(Temp, item.FullName); if (Formatter.FromFile(dest) != Format.Tar) { Move(item); } else { using var e = new ArchiveReader(dest, Password, src.Options); Invoke(e, dir); } }
/// <summary> /// Attempts to disconnect from this <see cref="LocalInputAdapter"/>. /// </summary> protected override void AttemptDisconnection() { if ((object)m_readTimer != null) { m_readTimer.Enabled = false; lock (m_readTimer) m_dataReader = null; } if ((object)m_archiveReader != null) { m_archiveReader.HistoricFileListBuildStart -= m_archiveReader_HistoricFileListBuildStart; m_archiveReader.HistoricFileListBuildComplete -= m_archiveReader_HistoricFileListBuildComplete; m_archiveReader.HistoricFileListBuildException -= m_archiveReader_HistoricFileListBuildException; m_archiveReader.DataReadException -= m_archiveReader_DataReadException; m_archiveReader.Dispose(); } m_archiveReader = null; }
/// <summary> /// Import an item from an <see cref="ArchiveReader"/>. /// </summary> /// <param name="archive">The archive to be imported.</param> public TModel Import(ArchiveReader archive) { try { var model = CreateModel(archive); if (model == null) { return(null); } model.Hash = computeHash(archive); return(Import(model, archive)); } catch (Exception e) { Logger.Error(e, $"Model creation of {archive.Name} failed.", LoggingTarget.Database); return(null); } }
protected override Task Populate(BeatmapSetInfo beatmapSet, ArchiveReader archive, CancellationToken cancellationToken = default) { if (archive != null) { beatmapSet.Beatmaps = createBeatmapDifficulties(beatmapSet.Files); } foreach (BeatmapInfo b in beatmapSet.Beatmaps) { // remove metadata from difficulties where it matches the set if (beatmapSet.Metadata.Equals(b.Metadata)) { b.Metadata = null; } b.BeatmapSet = beatmapSet; } validateOnlineIds(beatmapSet); return(updateQueue.UpdateAsync(beatmapSet, cancellationToken)); }
public void Set(ArchiveReader archive, List <byte[]> dataVector) { this.Dispose(); this._active = true; if (archive.ReadByte() != 0) { int num2 = archive.ReadNum(); if ((num2 < 0) || (num2 >= dataVector.Count)) { throw new InvalidOperationException(); } Log.WriteLine("[switch to stream {0}]", new object[] { num2 }); this._archive = archive; this._archive.AddByteStream(dataVector[num2], 0, dataVector[num2].Length); this._needRemove = true; this._active = true; } else { Log.WriteLine("[inline data]"); } }
/// <summary> /// Create a SHA-2 hash from the provided archive based on file content of all files matching <see cref="HashableFileTypes"/>. /// </summary> /// <remarks> /// In the case of no matching files, a hash will be generated from the passed archive's <see cref="ArchiveReader.Name"/>. /// </remarks> private string computeHash(TModel item, ArchiveReader reader = null) { // for now, concatenate all .osu files in the set to create a unique hash. MemoryStream hashable = new MemoryStream(); foreach (TFileModel file in item.Files.Where(f => HashableFileTypes.Any(f.Filename.EndsWith)).OrderBy(f => f.Filename)) { using (Stream s = Files.Store.GetStream(file.FileInfo.StoragePath)) s.CopyTo(hashable); } if (hashable.Length > 0) { return(hashable.ComputeSHA2Hash()); } if (reader != null) { return(reader.Name.ComputeSHA2Hash()); } return(item.Hash); }
public void Set(ArchiveReader archive, List<byte[]> dataVector) { Dispose(); _active = true; byte external = archive.ReadByte(); if (external != 0) { int dataIndex = archive.ReadNum(); if (dataIndex < 0 || dataIndex >= dataVector.Count) throw new InvalidDataException(); Log.WriteLine("[switch to stream {0}]", dataIndex); _archive = archive; _archive.AddByteStream(dataVector[dataIndex], 0, dataVector[dataIndex].Length); _needRemove = true; _active = true; } else { Log.WriteLine("[inline data]"); } }