/// <summary> /// 更新枚举 /// </summary> /// <param name="dataEntity"></param> public void Update(EnumEntityDev enumEntity) { Debug.Assert(enumEntity != null, "enumEntity 为 null"); if (enumEntity == null) { return; } //更新索引信息 XElement element = _indexXml.XPathSelectElement(String.Format(XPATH_Index_SelectDictionary, enumEntity.Id)); Debug.Assert(element != null, "更新枚举索引时未找到指定枚举的索引记录"); if (element != null) { element.ReplaceWith(ArchiveHelper.GetEntityArchiveIndex(enumEntity)); SaveIndexFile(); string xml = enumEntity.ToXml(); XElement xElement = XElement.Parse(xml); //更新数据实体文件 _packageService.Current.AddFileContent(xml, Path.Combine(Constant.PACKAGE_DICTIONARY_FOLDER, enumEntity.Id)); _cachingService.Add(enumEntity.Id, xElement); //发布事件 EnumEventArgs args = new EnumEventArgs(enumEntity); _eventAggregator.GetEvent <EnumUpdatedEvent>().Publish(args); } }
public ArchiveContentForm(string zipFile) { InitializeComponent(); _zipFile = zipFile; var lines = ArchiveHelper.GetArchiveContent(zipFile); txtContent.Text = Helpers.Join(Environment.NewLine, lines); tvContent.BeginUpdate(); tvContent.Nodes.Clear(); tvContent.Nodes.Add("Archive", "Archive"); foreach (var line in lines) { var node = tvContent.Nodes[0]; var nameList = line.Split(new[] { "\\" }, StringSplitOptions.RemoveEmptyEntries); for (var i = 0; i < nameList.Length; i++) { var name = nameList[i]; if (!node.Nodes.ContainsKey(name)) { node.Nodes.Add(name, name); } node = node.Nodes[name]; } } tvContent.EndUpdate(); }
public async Task <JToken> ReadJsonAttachmentAsync(string name) { Guard.NotNullOrEmpty(name, nameof(name)); var attachmentEntry = archive.GetEntry(ArchiveHelper.GetAttachmentPath(name)); if (attachmentEntry == null) { throw new FileNotFoundException("Cannot find attachment.", name); } JToken result; using (var stream = attachmentEntry.Open()) { using (var textReader = new StreamReader(stream)) { using (var jsonReader = new JsonTextReader(textReader)) { result = await JToken.ReadFromAsync(jsonReader); guidMapper.NewGuids(result); } } } readAttachments++; return(result); }
/// <summary> /// 添加一个枚举 /// </summary> /// <param name="enumEntity"></param> public void Add(EnumEntityDev enumEntity) { Debug.Assert(enumEntity != null, "enumEntity 为 null"); if (enumEntity == null) { return; } //添加索引信息 _indexXml.XPathSelectElement(XPATH_Index_Dictionary).Add(ArchiveHelper.GetEntityArchiveIndex(enumEntity)); SaveIndexFile(); string xml = enumEntity.ToXml(); XElement xElement = XElement.Parse(xml); //添加数据实体文件 _packageService.Current.AddFileContent(xml, Path.Combine(Constant.PACKAGE_DICTIONARY_FOLDER, enumEntity.Id)); _cachingService.Add(enumEntity.Id, xElement); //发布事件 EnumEventArgs args = new EnumEventArgs(enumEntity); _eventAggregator.GetEvent <EnumAddedEvent>().Publish(args); }
public bool WidgetImport(string archiveFile, string importPath) { const string widgetInfoFile = "widget.info"; var success = false; var extractedFiles = ArchiveHelper.Unarchive(archiveFile, importPath); if (File.Exists(Path.Combine(importPath, widgetInfoFile))) { // Read "widget.info" and, if a mapping is present, add it to "html/pages/control/widgets/configuration.json" var mapping = File.ReadAllText(Path.Combine(importPath, widgetInfoFile)); if (mapping.StartsWith("[")) { AddWidgetMapping(mapping); } foreach (var f in extractedFiles) { // copy only files contained in sub-folders, avoid copying zip-root files if (Path.GetDirectoryName(f) != "") { var destFolder = Path.Combine(_widgetBasePath, Path.GetDirectoryName(f)); if (!Directory.Exists(destFolder)) { Directory.CreateDirectory(destFolder); } // TODO HGBE-10 Move custom widgets to data folder and correct JS and C# code related to widgets File.Copy(Path.Combine(importPath, f), Path.Combine(_widgetBasePath, f), true); } } success = true; } return(success); }
/// <summary> /// 更新数据实体 /// 实现方式是删除旧节点后重新添加,避免在这里再次操作XML具体内容 /// </summary> /// <param name="dataEntity"></param> public void Update(DataEntityDev dataEntity) { Debug.Assert(dataEntity != null, "dataEntity 为 null"); if (dataEntity == null) { return; } //更新索引信息 XElement element = _indexXml.XPathSelectElement(String.Format(XPATH_Index_SelectDataEntity, dataEntity.Id)); Debug.Assert(element != null, "更新数据实体索引时未找到指定数据实体的索引记录"); if (element != null) { element.ReplaceWith(ArchiveHelper.GetEntityArchiveIndex(dataEntity)); SaveIndexFile(); string xml = dataEntity.ToXml(); XElement xElement = XElement.Parse(xml); //更新数据实体文件 _packageService.Current.AddFileContent(xml, Path.Combine(Constant.PACKAGE_DATAENTITY_FOLDER, dataEntity.Id)); _cachingService.Add(dataEntity.Id, xElement); //发布事件 DataEntityEventArgs args = new DataEntityEventArgs(dataEntity); _eventAggregator.GetEvent <DataEntityUpdatedEvent>().Publish(args); } }
/// <summary> /// 添加一个数据实体 /// </summary> /// <param name="dataEntity"></param> public void Add(DataEntityDev dataEntity) { Debug.Assert(dataEntity != null, "dataEntity 为 null"); if (dataEntity == null) { return; } //添加索引信息 _indexXml.XPathSelectElement(XPATH_Index_DataEntity).Add(ArchiveHelper.GetEntityArchiveIndex(dataEntity)); SaveIndexFile(); string xml = dataEntity.ToXml(); XElement xElement = XElement.Parse(xml); //添加数据实体文件 _packageService.Current.AddFileContent(xml, Path.Combine(Constant.PACKAGE_DATAENTITY_FOLDER, dataEntity.Id)); _cachingService.Add(dataEntity.Id, xElement); //发布事件 DataEntityEventArgs args = new DataEntityEventArgs(dataEntity); _eventAggregator.GetEvent <DataEntityAddedEvent>().Publish(args); }
/// <summary> /// Saves and exports this project. /// </summary> /// <param name="exportAssetZip">If the assets folder should be exported as an archive with the pack.</param> public static bool Export(bool exportAssetZip) { EditorSceneManager.SaveScene(SceneManager.GetActiveScene()); var path = Path.Combine(Project.Value.ProjectDirectory, Project.Value.ProjectFile); if (Save() && AssetPack.CreateAssetBundle()) { File.Copy(path, Path.Combine(Project.Value.ModDirectory, Project.Value.ProjectFile), true); var assetZipPath = Path.Combine(Project.Value.ModDirectory, "assets.zip"); // Always delete the old file. It will get recreated if the user checked the checkbox. if (File.Exists(assetZipPath)) { File.Delete(assetZipPath); } if (exportAssetZip) { Debug.Log(string.Format("Archiving {0} to {1}", Project.Value.ProjectDirectory, assetZipPath)); ArchiveHelper.CreateZip(assetZipPath, Project.Value.ProjectDirectory); } var previewImagePath = Path.Combine(Project.Value.ProjectDirectory, "Resources/preview.png"); File.Copy(previewImagePath, Path.Combine(Project.Value.ModDirectory, "preview.png"), true); return(true); } Debug.LogWarning(string.Format("Failed saving project {0}", path)); return(false); }
public override object Invoke(object[] args) { if (args == null || args.Length < 1) { return(null); } if (args[0] == null) { return(null); } if (args[0].GetType() != typeof(byte[])) { throw new Exception("DecompressText() requires BLOB as parameter."); } var text = args[0] as byte[]; string encoding = null; if (args.Length > 1) { encoding = Convert.ToString(args[1]); } var result = ArchiveHelper.DecompressText(text, encoding); return(result); }
protected bool ProcessPriceFile(string InFile, out string ExtrFile, ulong sourceTypeId) { ExtrFile = InFile; if (ArchiveHelper.IsArchive(InFile)) { if ((drCurrent[SourcesTableColumns.colExtrMask] is String) && !String.IsNullOrEmpty(drCurrent[SourcesTableColumns.colExtrMask].ToString())) { ExtrFile = PriceProcessor.FileHelper.FindFromArhive( InFile + ExtrDirSuffix, (string)drCurrent[SourcesTableColumns.colExtrMask]); } else { ExtrFile = String.Empty; } } if (String.IsNullOrEmpty(ExtrFile)) { DownloadLogEntity.Log(sourceTypeId, CurrPriceItemId, String.Format( "Не удалось найти файл в архиве. Маска файла в архиве : '{0}'", drCurrent[SourcesTableColumns.colExtrMask])); return(false); } return(true); }
public ProgramBlock ProgramImport(int newPid, string archiveName, string groupName) { var reader = new StreamReader(archiveName); var signature = new char[2]; reader.Read(signature, 0, 2); reader.Close(); if (signature[0] == 'P' && signature[1] == 'K') { // Read and uncompress zip file content (arduino program bundle) var zipFileName = archiveName.Replace(".hgx", ".zip"); if (File.Exists(zipFileName)) { File.Delete(zipFileName); } File.Move(archiveName, zipFileName); var destFolder = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, Utility.GetTmpFolder(), "import"); if (Directory.Exists(destFolder)) { Directory.Delete(destFolder, true); } ArchiveHelper.Unarchive(zipFileName, destFolder); var bundleFolder = Path.Combine(FilePaths.ProgramsFolder, "arduino", newPid.ToString()); if (Directory.Exists(bundleFolder)) { Directory.Delete(bundleFolder, true); } if (!Directory.Exists(Path.Combine(FilePaths.ProgramsFolder, "arduino"))) { Directory.CreateDirectory(Path.Combine(FilePaths.ProgramsFolder, "arduino")); } Directory.Move(Path.Combine(destFolder, "src"), bundleFolder); reader = new StreamReader(Path.Combine(destFolder, "program.hgx")); } else { reader = new StreamReader(archiveName); } var serializer = new XmlSerializer(typeof(ProgramBlock)); var newProgram = (ProgramBlock)serializer.Deserialize(reader); reader.Close(); newProgram.Address = newPid; newProgram.Group = groupName; _homegenie.ProgramManager.ProgramAdd(newProgram); newProgram.IsEnabled = false; newProgram.ScriptErrors = ""; newProgram.Engine.SetHost(_homegenie); if (newProgram.Type.ToLower() != "arduino") { _homegenie.ProgramManager.CompileScript(newProgram); } return(newProgram); }
public void TestCompressDecompressText() { var value = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; var comp = ArchiveHelper.CompressText(value); var value2 = ArchiveHelper.DecompressText(comp); Assert.Equal(value, value2); }
public void TestCompressDecompressVectorF() { var vector = Vector <float> .Build.Random(1000); var comp = ArchiveHelper.CompressVectorF(vector); var vector2 = ArchiveHelper.DecompressVectorF(comp); Assert.Equal(vector, vector2); }
public void TestUseLocation() { try { ArchiveHelper.IsArchive(Environment.CurrentDirectory + "\\Data\\552.dbf"); } catch (Exception exception) { Assert.Fail("получили неожидаемое исключение при активации ArchiveHelper: {0}", exception); } }
public static async Task <string> GetFileContentFromContainerAsync(this IContainerOperations containerOperations, string containerId, string sourcePath) { var response = await containerOperations.GetArchiveFromContainerAsync(containerId, new GetArchiveFromContainerParameters { Path = sourcePath }, false).ConfigureAwait(false); return(await ArchiveHelper.ExtractSingleFileFromTarToStringAsync(response.Stream)); }
/// <summary> /// Download one Maven package and extract it to the target directory. /// </summary> /// <param name="purl">Package URL of the package to download.</param> /// <returns>n/a</returns> public override async Task <IEnumerable <string> > DownloadVersionAsync(PackageURL purl, bool doExtract, bool cached = false) { Logger.Trace("DownloadVersion {0}", purl?.ToString()); string? packageNamespace = purl?.Namespace?.Replace('.', '/'); string? packageName = purl?.Name; string? packageVersion = purl?.Version; List <string> downloadedPaths = new(); if (string.IsNullOrWhiteSpace(packageNamespace) || string.IsNullOrWhiteSpace(packageName) || string.IsNullOrWhiteSpace(packageVersion)) { Logger.Warn("Unable to download [{0} {1} {2}]. Both must be defined.", packageNamespace, packageName, packageVersion); return(downloadedPaths); } try { string[] suffixes = new string[] { "-javadoc", "-sources", "" }; foreach (string suffix in suffixes) { string url = $"{ENV_MAVEN_ENDPOINT}/{packageNamespace}/{packageName}/{packageVersion}/{packageName}-{packageVersion}{suffix}.jar"; HttpClient httpClient = CreateHttpClient(); System.Net.Http.HttpResponseMessage result = await httpClient.GetAsync(url); result.EnsureSuccessStatusCode(); Logger.Debug($"Downloading {purl}..."); string targetName = $"maven-{packageNamespace}-{packageName}{suffix}@{packageVersion}"; targetName = targetName.Replace('/', '-'); string extractionPath = Path.Combine(TopLevelExtractionDirectory, targetName); if (doExtract && Directory.Exists(extractionPath) && cached == true) { downloadedPaths.Add(extractionPath); return(downloadedPaths); } if (doExtract) { downloadedPaths.Add(await ArchiveHelper.ExtractArchiveAsync(TopLevelExtractionDirectory, targetName, await result.Content.ReadAsStreamAsync(), cached)); } else { extractionPath += Path.GetExtension(url) ?? ""; await File.WriteAllBytesAsync(extractionPath, await result.Content.ReadAsByteArrayAsync()); downloadedPaths.Add(extractionPath); } } } catch (Exception ex) { Logger.Warn(ex, "Error downloading Maven package: {0}", ex.Message); } return(downloadedPaths); }
public void Internalize(ImportAction action) { // Check for internalization if (_PhysicalLocationURI.IndexOf(HomeRootPathProtocol) != 0) // If we are not using an internal address { // Check to see if already in home, so we condense the path a bit if (action == ImportAction.Refer) { if (Path.IndexOf(Home.Location) == 0) { _PhysicalLocationURI = HomeRootPathProtocol + _PhysicalLocationURI.Substring(Home.Location.Length); } else { return; } } // Replace path else { string extension = System.IO.Path.GetExtension(Path); string relativePath; string newPath = GetLegalPhysicalLocation(out relativePath, extension); // Take appropriate file action using new path switch (action) { case ImportAction.Cut: FileHelper.MoveOrRenameDirectoryOrFile(Path, newPath); break; case ImportAction.Clone: if (Type == DocumentType.Archive) { ArchiveHelper.CopyDirectory(Path, newPath); } else { System.IO.File.Copy(Path, newPath); } break; case ImportAction.Refer: throw new InvalidOperationException("Should have been handled."); default: break; } // Change file path _PhysicalLocationURI = HomeRootPathProtocol + relativePath; // Notice original file's meta-name remains unchanged: meta names will tend to diverge from actual file name } } }
public string Run(DownloadModel model)//string url, ArchiveType archiveType) { Guard.AssertNotNull(model, "Source code repository URL is required."); Guard.AssertNotNullOrEmpty(model.Url, "Source code repository URL is required."); _targetUrl = new Uri(model.Url); RepositoryType repositoryType = RepositoryHelper.GetRepositoryTypeFromUrl(_targetUrl); RepositoryBase repositoryBase; Arguments args = new Arguments { Url = _targetUrl.ToString(), Revision = null }; Results?results = null; if (repositoryType == RepositoryType.GOOGLECODE_HG) { //hack - https://code.google.com/r/steverauny-treeview/ - https://steverauny-treeview.googlecode.com/hg/ string segment = _targetUrl.Segments[2].Replace("/", ""); string newUrl = string.Format("{0}://{1}.{2}", _targetUrl.Scheme, segment, "googlecode.com/hg/"); _targetUrl = new Uri(newUrl); repositoryBase = new SvnHttpManager(); results = repositoryBase.Run(args); } else if (repositoryType == RepositoryType.CODEPLEX_SVN || repositoryType == RepositoryType.GOOGLECODE_SVN) { repositoryBase = new SvnHttpManager(); results = repositoryBase.Run(args); } else if (repositoryType == RepositoryType.GITHUB) { repositoryBase = new GithubHttpManager(); results = repositoryBase.Run(args); } if (results.HasValue) { TargetFolder = results.Value.Path; } ArchiveType archive = model.ArchiveType == ".tar.gz" ? ArchiveType.TAR_GZ : ArchiveType.ZIP; string archivePath = new ArchiveHelper().CreateArchive(TargetFolder, archive); try { IOHelper.DeleteDirectory(TargetFolder); } catch (Exception) { throw; } return(archivePath); }
public void ExtractFilesFromTarGz_WithGzipTarContentSpecified_ExtractsArchive() { // Arrange // Act List <InMemoryFile> filesInArchive = new ArchiveHelper().ExtractFilesFromTarGz(validFileContents); // Assert Assert.True(filesInArchive.Count == 2, $"Extracted archive must contain 2 files."); Assert.True(string.Compare(Path.GetFileName(filesInArchive[0].FileName), "terms_of_service.css") == 0, $"Returned file 1 must have name of 'terms_of_service.css'."); Assert.True(string.Compare(Path.GetFileName(filesInArchive[1].FileName), "terms_of_service.html") == 0, $"Returned file 1 must have name of 'terms_of_service.html'."); }
public static string[] TryExtractArchive(string file, string dstDir, string password = null) { if (ArchiveHelper.IsArchive(file)) { if (ArchiveHelper.TestArchive(file)) { return(ExtractFromArhive(file, dstDir, password)); } } return(null); }
/// <summary> /// Download one Cargo package and extract it to the target directory. /// </summary> /// <param name="purl"> Package URL of the package to download. </param> /// <returns> Path to the downloaded package </returns> public override async Task <IEnumerable <string> > DownloadVersionAsync(PackageURL purl, bool doExtract, bool cached = false) { Logger.Trace("DownloadVersion {0}", purl?.ToString()); string? packageName = purl?.Name; string? packageVersion = purl?.Version; string? fileName = purl?.ToStringFilename(); List <string> downloadedPaths = new(); if (string.IsNullOrWhiteSpace(packageName) || string.IsNullOrWhiteSpace(packageVersion) || string.IsNullOrWhiteSpace(fileName)) { Logger.Debug("Error with 'purl' argument. Unable to download [{0} {1}] @ {2}. Both must be defined.", packageName, packageVersion, fileName); return(downloadedPaths); } string url = $"{ENV_CARGO_ENDPOINT}/api/v1/crates/{packageName}/{packageVersion}/download"; try { string targetName = $"cargo-{fileName}"; string extractionPath = Path.Combine(TopLevelExtractionDirectory, targetName); // if the cache is already present, no need to extract if (doExtract && cached && Directory.Exists(extractionPath)) { downloadedPaths.Add(extractionPath); return(downloadedPaths); } Logger.Debug("Downloading {0}", url); HttpClient httpClient = CreateHttpClient(); System.Net.Http.HttpResponseMessage result = await httpClient.GetAsync(url); result.EnsureSuccessStatusCode(); if (doExtract) { downloadedPaths.Add(await ArchiveHelper.ExtractArchiveAsync(TopLevelExtractionDirectory, targetName, await result.Content.ReadAsStreamAsync(), cached)); } else { extractionPath += Path.GetExtension(url) ?? ""; await File.WriteAllBytesAsync(extractionPath, await result.Content.ReadAsByteArrayAsync()); downloadedPaths.Add(extractionPath); } } catch (Exception ex) { Logger.Debug(ex, "Error downloading Cargo package: {0}", ex.Message); } return(downloadedPaths); }
/// <summary> /// Download one Hackage (Haskell) package and extract it to the target directory. /// </summary> /// <param name="purl"> Package URL of the package to download. </param> /// <returns> n/a </returns> public override async Task <IEnumerable <string> > DownloadVersionAsync(PackageURL purl, bool doExtract, bool cached = false) { Logger.Trace("DownloadVersion {0}", purl?.ToString()); if (purl is null || purl.Name is null || purl.Version is null) { return(Array.Empty <string>()); } string packageName = purl.Name; string packageVersion = purl.Version; if (string.IsNullOrWhiteSpace(packageName) || string.IsNullOrWhiteSpace(packageVersion)) { Logger.Debug("Unable to download [{0} {1}]. Both must be defined.", packageName, packageVersion); return(Array.Empty <string>()); } List <string> downloadedPaths = new(); try { string url = $"{ENV_HACKAGE_ENDPOINT}/package/{packageName}-{packageVersion}/{packageName}-{packageVersion}.tar.gz"; HttpClient httpClient = CreateHttpClient(); System.Net.Http.HttpResponseMessage result = await httpClient.GetAsync(url); result.EnsureSuccessStatusCode(); Logger.Debug("Downloading {0}...", purl.ToString()); string targetName = $"hackage-{packageName}@{packageVersion}"; string extractionPath = Path.Combine(TopLevelExtractionDirectory, targetName); if (doExtract && Directory.Exists(extractionPath) && cached == true) { downloadedPaths.Add(extractionPath); return(downloadedPaths); } if (doExtract) { downloadedPaths.Add(await ArchiveHelper.ExtractArchiveAsync(TopLevelExtractionDirectory, targetName, await result.Content.ReadAsStreamAsync(), cached)); } else { extractionPath += Path.GetExtension(url) ?? ""; await File.WriteAllBytesAsync(extractionPath, await result.Content.ReadAsByteArrayAsync()); downloadedPaths.Add(extractionPath); } } catch (Exception ex) { Logger.Debug(ex, "Error downloading Hackage package: {0}", ex.Message); } return(downloadedPaths); }
private void CreateMappings(string tag, AddonSiteId siteId) { txtOut.Text = string.Empty; var tagList = tag.Split(','); foreach (var itTag in tagList) { var currTag = itTag.Trim(); try { var site = AddonSiteBase.GetSite(siteId); var map = new Mapping(siteId, currTag) { Addon = new Addon("tag") }; var downLink = site.GetFilePath(map); var archiveFilePath = WebHelper.DownloadFileToTemp(downLink); var cont = ArchiveHelper.GetArchiveContent(archiveFilePath); var folderList = new List <string>(); foreach (var s in cont) { var index = s.IndexOf("\\"); if (index > 0) { Helpers.AddIfNeeded(folderList, s.Substring(0, index)); } } var main = string.Empty; for (var i = 0; i < folderList.Count; i++) { var s = folderList[i]; if (i == 0) { txtOut.Text += string.Format(@"GetAddon(""{0}"").Mappings.Add(new Mapping(""{1}"", AddonSiteId.{2}));{3}", s, currTag, siteId, Environment.NewLine); main = s; } else { txtOut.Text += string.Format(@"GetAddon(""{0}"").SubAddons.Add(GetAddon(""{1}""));{2}", main, s, Environment.NewLine); } } } catch { txtOut.Text += string.Format(@"Tag ""{0}"" failed{1}", currTag, Environment.NewLine); } txtOut.Text += Environment.NewLine; Application.DoEvents(); //System.Threading.Thread.Sleep(1000); } }
private ZipArchiveEntry GetEntry(string name) { var attachmentEntry = archive.GetEntry(ArchiveHelper.GetAttachmentPath(name)); if (attachmentEntry == null || attachmentEntry.Length == 0) { throw new FileNotFoundException("Cannot find attachment.", name); } readAttachments++; return(attachmentEntry); }
public async Task WriteBlobAsync(string name, Func <Stream, Task> handler) { Guard.NotNullOrEmpty(name); Guard.NotNull(handler); var attachmentEntry = archive.CreateEntry(ArchiveHelper.GetAttachmentPath(name)); using (var stream = attachmentEntry.Open()) { await handler(stream); } writtenAttachments++; }
/// <summary> /// Download one NPM package and extract it to the target directory. /// </summary> /// <param name="purl">Package URL of the package to download.</param> /// <returns>n/a</returns> public override async Task <IEnumerable <string> > DownloadVersionAsync(PackageURL purl, bool doExtract, bool cached = false) { Logger.Trace("DownloadVersion {0}", purl?.ToString()); string? packageName = purl?.Name; string? packageVersion = purl?.Version; List <string> downloadedPaths = new(); // shouldn't happen here, but check if (string.IsNullOrWhiteSpace(packageName) || string.IsNullOrWhiteSpace(packageVersion)) { Logger.Debug("Unable to download [{0} {1}]. Both must be defined.", packageName, packageVersion); return(downloadedPaths); } try { HttpClient httpClient = CreateHttpClient(); JsonDocument doc = await GetJsonCache(httpClient, $"{ENV_NPM_API_ENDPOINT}/{packageName}"); string?tarball = doc.RootElement.GetProperty("versions").GetProperty(packageVersion).GetProperty("dist").GetProperty("tarball").GetString(); HttpResponseMessage result = await httpClient.GetAsync(tarball); result.EnsureSuccessStatusCode(); Logger.Debug("Downloading {0}...", purl?.ToString()); string targetName = $"npm-{packageName}@{packageVersion}"; string extractionPath = Path.Combine(TopLevelExtractionDirectory, targetName); if (doExtract && Directory.Exists(extractionPath) && cached == true) { downloadedPaths.Add(extractionPath); return(downloadedPaths); } if (doExtract) { downloadedPaths.Add(await ArchiveHelper.ExtractArchiveAsync(TopLevelExtractionDirectory, targetName, await result.Content.ReadAsStreamAsync(), cached)); } else { extractionPath += Path.GetExtension(tarball) ?? ""; await File.WriteAllBytesAsync(extractionPath, await result.Content.ReadAsByteArrayAsync()); downloadedPaths.Add(extractionPath); } } catch (Exception ex) { Logger.Debug(ex, "Error downloading NPM package: {0}", ex.Message); } return(downloadedPaths); }
public void TestCompressDecompress() { var data = new byte[1000]; for (int i = 0; i < data.Length; i++) { data[i] = (byte)((i + 1) % 256); } var comp = ArchiveHelper.CompressBytes(data); var data2 = ArchiveHelper.DecompressBytes(comp); Assert.Equal(data, data2); }
/// <summary> /// Download one RubyGems package and extract it to the target directory. /// </summary> /// <param name="purl"> Package URL of the package to download. </param> /// <returns> n/a </returns> public override async Task <IEnumerable <string> > DownloadVersionAsync(PackageURL purl, bool doExtract, bool cached = false) { Logger.Trace("DownloadVersion {0}", purl?.ToString()); string? packageNamespace = purl?.Namespace; string? packageName = purl?.Name; string? packageVersion = purl?.Version; List <string> downloadedPaths = new(); if (string.IsNullOrWhiteSpace(packageNamespace) || string.IsNullOrWhiteSpace(packageName) || string.IsNullOrWhiteSpace(packageVersion)) { Logger.Debug("Unable to download [{0} {1} {2}]. All three must be defined.", packageNamespace, packageName, packageVersion); return(downloadedPaths); } try { string url = $"{ENV_GO_PROXY_ENDPOINT}/{packageNamespace.ToLowerInvariant()}/{packageName.ToLowerInvariant()}/@v/{packageVersion}.zip"; HttpClient httpClient = CreateHttpClient(); System.Net.Http.HttpResponseMessage result = await httpClient.GetAsync(url); result.EnsureSuccessStatusCode(); Logger.Debug("Downloading {0}...", purl); string targetName = $"golang-{packageNamespace}-{packageName}@{packageVersion}"; string extractionPath = Path.Combine(TopLevelExtractionDirectory, targetName); if (doExtract && Directory.Exists(extractionPath) && cached == true) { downloadedPaths.Add(extractionPath); return(downloadedPaths); } if (doExtract) { downloadedPaths.Add(await ArchiveHelper.ExtractArchiveAsync(TopLevelExtractionDirectory, targetName, await result.Content.ReadAsStreamAsync(), cached)); } else { extractionPath += Path.GetExtension(url) ?? ""; await File.WriteAllBytesAsync(extractionPath, await result.Content.ReadAsByteArrayAsync()); downloadedPaths.Add(extractionPath); } } catch (Exception ex) { Logger.Debug(ex, "Error downloading Go package: {0}", ex.Message); } return(downloadedPaths); }
public void WriteEvent(StoredEvent storedEvent) { Guard.NotNull(storedEvent); var eventEntry = archive.CreateEntry(ArchiveHelper.GetEventPath(writtenEvents)); using (var stream = eventEntry.Open()) { var @event = converter(storedEvent); serializer.Serialize(@event, stream); } writtenEvents++; }
public Task WriteJsonAsync(string name, object value) { Guard.NotNullOrEmpty(name); var attachmentEntry = archive.CreateEntry(ArchiveHelper.GetAttachmentPath(name)); using (var stream = attachmentEntry.Open()) { serializer.Serialize(value, stream); } writtenAttachments++; return(TaskHelper.Done); }