public static async Task CreateFromDir(string directory, Stream archiveStream, ZipArchiveMode mode) { var files = FileData.InPath(directory); using (ZipArchive archive = new ZipArchive(archiveStream, mode, true)) { foreach (var i in files) { if (i.IsFolder) { string entryName = i.FullName; ZipArchiveEntry e = archive.CreateEntry(entryName.Replace('\\', '/') + "/"); e.LastWriteTime = i.LastModifiedDate; } } foreach (var i in files) { if (i.IsFile) { string entryName = i.FullName; var installStream = await StreamHelpers.CreateTempCopyStream(Path.Combine(i.OrigFolder, i.FullName)); if (installStream != null) { ZipArchiveEntry e = archive.CreateEntry(entryName.Replace('\\', '/')); e.LastWriteTime = i.LastModifiedDate; using (Stream entryStream = e.Open()) { installStream.CopyTo(entryStream); } } } } } }
public static async Task ReadModeInvalidOpsTest() { ZipArchive archive = new ZipArchive(await StreamHelpers.CreateTempCopyStream(ZipTest.zfile("normal.zip")), ZipArchiveMode.Read); ZipArchiveEntry e = archive.GetEntry("first.txt"); //should also do it on deflated stream //on archive Assert.Throws <NotSupportedException>(() => archive.CreateEntry("hi there")); //"Should not be able to create entry" //on entry Assert.Throws <NotSupportedException>(() => e.Delete()); //"Should not be able to delete entry" //Throws<NotSupportedException>(() => e.MoveTo("dirka")); Assert.Throws <NotSupportedException>(() => e.LastWriteTime = new DateTimeOffset()); //"Should not be able to update time" //on stream Stream s = e.Open(); Assert.Throws <NotSupportedException>(() => s.Flush()); //"Should not be able to flush on read stream" Assert.Throws <NotSupportedException>(() => s.WriteByte(25)); //"should not be able to write to read stream" Assert.Throws <NotSupportedException>(() => s.Position = 4); //"should not be able to seek on read stream" Assert.Throws <NotSupportedException>(() => s.Seek(0, SeekOrigin.Begin)); //"should not be able to seek on read stream" Assert.Throws <NotSupportedException>(() => s.SetLength(0)); //"should not be able to resize read stream" archive.Dispose(); //after disposed Assert.Throws <ObjectDisposedException>(() => { var x = archive.Entries; }); //"Should not be able to get entries on disposed archive" Assert.Throws <NotSupportedException>(() => archive.CreateEntry("dirka")); //"should not be able to create on disposed archive" Assert.Throws <ObjectDisposedException>(() => e.Open()); //"should not be able to open on disposed archive" Assert.Throws <NotSupportedException>(() => e.Delete()); //"should not be able to delete on disposed archive" Assert.Throws <ObjectDisposedException>(() => { e.LastWriteTime = new DateTimeOffset(); }); //"Should not be able to update on disposed archive" Assert.Throws <NotSupportedException>(() => s.ReadByte()); //"should not be able to read on disposed archive" s.Dispose(); }
/// <summary> /// Verifies the streams data. /// </summary> /// <param name="expectedStreamValue">The expected stream value.</param> /// <param name="response">The stream response.</param> /// <returns>The result of stream verification</returns> protected bool VerifyStreams(AstoriaQueryStreamValue expectedStreamValue, DataServiceStreamResponse response) { var expectedBytes = new byte[0]; if (!expectedStreamValue.IsNull) { expectedBytes = expectedStreamValue.Value; } var expectedStream = new MemoryStream(expectedBytes); try { ExceptionUtilities.Assert(response.Stream.CanRead, "Cannot read from the stream"); return(StreamHelpers.CompareStream(response.Stream, expectedStream)); } finally { response.Stream.Dispose(); expectedStream.Dispose(); } }
private async Task DecryptAsync(Stream st, string destinationFile) { var pw = await Vault.GetSecretAsync(Bal.Services.Vault.CommonSecretUris.TraffkPgpPrivateKeyPasswordUri); var sk = await Vault.GetSecretAsync(Bal.Services.Vault.CommonSecretUris.TraffkPgpPrivateKeyUri); lock (DecryptAsyncLocker) { var ring = Cache.DataCacher.FindOrCreateValWithSimpleKey(sk, () => { using (var keyStream = StreamHelpers.Create(sk, System.Text.ASCIIEncoding.ASCII)) { return(EasyNetPGP.PgpEncryptorDecryptor.CreatePgpSecretKeyRingBundle(keyStream)); } }); EasyNetPGP.PgpEncryptorDecryptor.DecryptFile( st, null, pw, destinationFile, ring); } }
/// <remarks>https://docs.microsoft.com/en-us/rest/api/</remarks> /// <remarks>https://docs.microsoft.com/en-us/rest/api/sql/databases</remarks> /// <remarks>https://docs.microsoft.com/en-us/azure/sql-database/sql-database-copy</remarks> /// <remarks>https://docs.microsoft.com/en-us/powershell/module/azure/start-azuresqldatabasecopy?view=azuresmps-4.0.0</remarks> /// <remarks>https://docs.microsoft.com/en-us/azure/azure-resource-manager/powershell-azure-resource-manager</remarks> /// <remarks>https://docs.microsoft.com/en-us/azure/sql-database/scripts/sql-database-copy-database-to-new-server-powershell?toc=%2fpowershell%2fmodule%2ftoc.json</remarks> private async Task CopyDatabaseInitiateAsync( Microsoft.Azure.Management.Sql.Models.Server sourceServer, Microsoft.Azure.Management.Sql.Models.Database sourceDatabase, string destinationDatabase) { Requires.NonNull(sourceDatabase, nameof(sourceDatabase)); Requires.Text(destinationDatabase, nameof(destinationDatabase)); var req = new CopyDatabaseRequest { location = sourceDatabase.Location, properties = new CopyDatabaseRequest.Properties { sourceDatabaseId = sourceDatabase.Id } }; using (var c = new HttpClient()) { using (var st = StreamHelpers.Create(req.ToJson())) { var token = await CredentialFactory.GetTokenCredentialAsync(SqlAzureManagementApiResource); c.DefaultRequestHeaders.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", token); c.DefaultRequestHeaders.Add("CommandName", "New-AzureRmSqlDatabaseCopy"); c.DefaultRequestHeaders.Add("ParameterSetName", "__AllParameterSets"); var content = new StreamContent(st); content.Headers.ContentType = MimeType.Application.Json; var resp = await c.PutAsync( $"https://management.azure.com/subscriptions/{ConfigOptions.Value.SubscriptionId}/resourceGroups/{ConfigOptions.Value.ResourceGroupName}/providers/Microsoft.Sql/servers/{sourceServer.Name}/databases/{destinationDatabase}?api-version=2014-04-01", content); var ret = resp.Content.ReadAsStringAsync(); } } }
public static async Task UpdateAddFile() { //add file var testArchive = await StreamHelpers.CreateTempCopyStream(ZipTest.zfile("normal.zip")); using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) { await updateArchive(archive, ZipTest.zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); } ZipTest.IsZipSameAsDir(testArchive, ZipTest.zmodified("addFile"), ZipArchiveMode.Read, false, false); //add file and read entries before testArchive = await StreamHelpers.CreateTempCopyStream(ZipTest.zfile("normal.zip")); using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) { var x = archive.Entries; await updateArchive(archive, ZipTest.zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); } ZipTest.IsZipSameAsDir(testArchive, ZipTest.zmodified("addFile"), ZipArchiveMode.Read, false, false); //add file and read entries after testArchive = await StreamHelpers.CreateTempCopyStream(ZipTest.zfile("normal.zip")); using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) { await updateArchive(archive, ZipTest.zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); var x = archive.Entries; } ZipTest.IsZipSameAsDir(testArchive, ZipTest.zmodified("addFile"), ZipArchiveMode.Read, false, false); }
public static async Task UpdateAddFile() { //add file String testArchive = StreamHelpers.CreateTempCopyFile(zfile("normal.zip")); using (ZipArchive archive = ZipFile.Open(testArchive, ZipArchiveMode.Update)) { await UpdateArchive(archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); } await IsZipSameAsDirAsync(testArchive, zmodified("addFile"), ZipArchiveMode.Read); //add file and read entries before testArchive = StreamHelpers.CreateTempCopyFile(zfile("normal.zip")); using (ZipArchive archive = ZipFile.Open(testArchive, ZipArchiveMode.Update)) { var x = archive.Entries; await UpdateArchive(archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); } await IsZipSameAsDirAsync(testArchive, zmodified("addFile"), ZipArchiveMode.Read); //add file and read entries after testArchive = StreamHelpers.CreateTempCopyFile(zfile("normal.zip")); using (ZipArchive archive = ZipFile.Open(testArchive, ZipArchiveMode.Update)) { await UpdateArchive(archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); var x = archive.Entries; } await IsZipSameAsDirAsync(testArchive, zmodified("addFile"), ZipArchiveMode.Read); }
public void StreamHelpers_EmptyArray() { Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadSByte(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadByte(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadInt16(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadUInt16(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadInt32(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadUInt32(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadInt64(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadUInt64(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadSingle(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadDouble(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadDecimal(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadChar(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadBoolean(new MemoryStream())); Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadGuid(new MemoryStream())); using var stream = new MemoryStream(); stream.WriteUInt32Compact(42u); stream.Position = 0; Assert.ThrowsException <EndOfStreamException>(() => StreamHelpers.ReadString(stream)); }
public override Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken) { return(StreamHelpers.ArrayPoolCopyToAsync(this, destination, bufferSize, cancellationToken)); }
public static void ReadArchive_WithEOCDComment_TrailingPrecedingGarbage() { void InsertEntry(ZipArchive archive, string name, string contents) { ZipArchiveEntry entry = archive.CreateEntry(name); using (StreamWriter writer = new StreamWriter(entry.Open())) { writer.WriteLine(contents); } } int GetEntryContentsLength(ZipArchiveEntry entry) { int length = 0; using (Stream stream = entry.Open()) { using (var reader = new StreamReader(stream)) { length = reader.ReadToEnd().Length; } } return(length); } void VerifyValidEntry(ZipArchiveEntry entry, string expectedName, int expectedMinLength) { Assert.NotNull(entry); Assert.Equal(expectedName, entry.Name); // The file has a few more bytes, but should be at least as large as its contents Assert.True(GetEntryContentsLength(entry) >= expectedMinLength); } string name0 = "huge0.txt"; string name1 = "huge1.txt"; string str64KB = new string('x', ushort.MaxValue); byte[] byte64KB = Text.Encoding.ASCII.GetBytes(str64KB); // Open empty file with 64KB EOCD comment string path = strange("extradata/emptyWith64KBComment.zip"); using (MemoryStream archiveStream = StreamHelpers.CreateTempCopyStream(path).Result) { // Insert 2 64KB txt entries using (ZipArchive archive = new ZipArchive(archiveStream, ZipArchiveMode.Update, leaveOpen: true)) { InsertEntry(archive, name0, str64KB); InsertEntry(archive, name1, str64KB); } // Open and verify items archiveStream.Seek(0, SeekOrigin.Begin); using (ZipArchive archive = new ZipArchive(archiveStream, ZipArchiveMode.Read, leaveOpen: true)) { Assert.Equal(2, archive.Entries.Count); VerifyValidEntry(archive.Entries[0], name0, ushort.MaxValue); VerifyValidEntry(archive.Entries[1], name1, ushort.MaxValue); } // Append 64KB of garbage archiveStream.Seek(0, SeekOrigin.End); archiveStream.Write(byte64KB, 0, byte64KB.Length); // Open should not be possible because we can't find the EOCD in the max search length from the end Assert.Throws <InvalidDataException>(() => { ZipArchive archive = new ZipArchive(archiveStream, ZipArchiveMode.Read, leaveOpen: true); }); // Create stream with 64KB of prepended garbage, then the above stream appended // Attempting to create a ZipArchive should fail: no EOCD found using (MemoryStream prependStream = new MemoryStream()) { prependStream.Write(byte64KB, 0, byte64KB.Length); archiveStream.WriteTo(prependStream); Assert.Throws <InvalidDataException>(() => { ZipArchive archive = new ZipArchive(prependStream, ZipArchiveMode.Read); }); } } }
public static async Task StrangeFiles(string zipFile, string zipFolder, bool requireExplicit) { IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream(strange(zipFile)), zfolder(zipFolder), ZipArchiveMode.Update, requireExplicit, checkTimes: true); }
protected AlbumSerializer(string workDirectory) { _workDirectory = workDirectory; StreamHelpers.CheckDir(_workDirectory); }
public static IEnumerable <Tuple <long, long> > Split(Stream what, byte[] by, long whatFirst = 0, long?whatLast = null, long byFirst = 0, long?byLast = null, int seekBuffSize = BUF_SIZE) { long initialSrcPos = what.Position; try { if (!whatLast.HasValue) { whatLast = what.Length - 1; } if (what.Length == 0) { yield break; } if (by.Length == 0) { yield return(new Tuple <long, long>(whatFirst, whatLast.Value)); yield break; } long bySignificantLength = byLast.HasValue ? byLast.Value - byFirst + 1 : by.Length - byFirst; long prevMatch = NOT_FOUND_POS; long curMatch = NOT_FOUND_POS; while (true) { curMatch = StreamHelpers.Find(what, by, whatFirst, whatLast, byFirst, byLast, seekBuffSize); if (curMatch == NOT_FOUND_POS) { if (prevMatch == NOT_FOUND_POS) { yield break; } long curSegmentFirst = prevMatch + bySignificantLength; if (curSegmentFirst <= whatLast) { yield return(new Tuple <long, long>(curSegmentFirst, whatLast.Value)); } yield break; } else { if (prevMatch == NOT_FOUND_POS) { if (whatFirst < curMatch) { yield return(new Tuple <long, long>(whatFirst, curMatch - 1)); } } else { long curSegmentFirst = prevMatch + bySignificantLength; if (curSegmentFirst < curMatch) { yield return(new Tuple <long, long>(curSegmentFirst, curMatch - 1)); } } } prevMatch = curMatch; whatFirst = curMatch + by.Length; } } finally { what.Position = initialSrcPos; } }
public static async Task CompatibilityTestsMsFiles(string withTrailing, string withoutTrailing, bool requireExplicit, bool checkTimes) { IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream(compat(withTrailing)), compat(withoutTrailing), ZipArchiveMode.Update, requireExplicit, checkTimes); }
public static async Task StrangeFiles5() { ZipTest.IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream( ZipTest.strange("filenameTimeAndSizesDifferentInLH.zip")), ZipTest.zfolder("verysmall"), ZipArchiveMode.Update, true, false); }
/// <param name="useSpansForWriting">Tests the Span overloads of Write</param> /// <param name="writeInChunks">Writes in chunks of 5 to test Write with a nonzero offset</param> public static async Task CreateFromDir(string directory, Stream archiveStream, ZipArchiveMode mode, bool useSpansForWriting = false, bool writeInChunks = false) { var files = FileData.InPath(directory); using (ZipArchive archive = new ZipArchive(archiveStream, mode, true)) { foreach (var i in files) { if (i.IsFolder) { string entryName = i.FullName; ZipArchiveEntry e = archive.CreateEntry(entryName.Replace('\\', '/') + "/"); e.LastWriteTime = i.LastModifiedDate; } } foreach (var i in files) { if (i.IsFile) { string entryName = i.FullName; var installStream = await StreamHelpers.CreateTempCopyStream(Path.Combine(i.OrigFolder, i.FullName)); if (installStream != null) { ZipArchiveEntry e = archive.CreateEntry(entryName.Replace('\\', '/')); e.LastWriteTime = i.LastModifiedDate; using (Stream entryStream = e.Open()) { int bytesRead; var buffer = new byte[1024]; if (useSpansForWriting) { while ((bytesRead = installStream.Read(new Span <byte>(buffer))) != 0) { entryStream.Write(new ReadOnlySpan <byte>(buffer, 0, bytesRead)); } } else if (writeInChunks) { while ((bytesRead = installStream.Read(buffer, 0, buffer.Length)) != 0) { for (int k = 0; k < bytesRead; k += 5) { entryStream.Write(buffer, k, Math.Min(5, bytesRead - k)); } } } else { while ((bytesRead = installStream.Read(buffer, 0, buffer.Length)) != 0) { entryStream.Write(buffer, 0, bytesRead); } } } } } } } }
public static async Task StrangeFiles3() { ZipTest.IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream( ZipTest.strange(Path.Combine("extradata", "zip64ThenExtraData.zip"))), ZipTest.zfolder("verysmall"), ZipArchiveMode.Update, false, false); }
public Task DownloadLiveStreamChatLogic(string channel, long vodId, CancellationToken cancellationToken) { using (var irc = new TcpClient("irc.chat.twitch.tv", 6667)) using (var stream = irc.GetStream()) using (var reader = new StreamReader(stream)) using (var writer = new StreamWriter(stream)) { writer.AutoFlush = true; string accessToken; using (var context = new MainDataContext()) { accessToken = context.Authentications.First().accessToken; //todo check for working auth on all external calls } var dbUserName = GlobalConfig.GetGlobalConfig("userName"); var userName = dbUserName ?? new UserDetails().SaveUserDataToDb(); writer.WriteLine($"PASS oauth:{accessToken}"); writer.WriteLine($"NICK {userName}"); writer.WriteLine($"JOIN #{channel}"); writer.WriteLine("CAP REQ :twitch.tv/tags"); string inputLine; int databaseCounter = 0; List <Chat> chats = new List <Chat>(); while ((inputLine = reader.ReadLine()) != null) { if (cancellationToken.IsCancellationRequested) { _logger.Warn("IRC shut down initiated, stream must have finished..."); AddLiveStreamChatToDb(chats, vodId); StreamHelpers.SetChatDownloadToFinished(vodId, true); _logger.Info("Done!"); break; } if (inputLine == "PING :tmi.twitch.tv") { writer.WriteLine("PONG :tmi.twitch.tv"); } if (inputLine.Contains("PRIVMSG")) { Chat chat = MessageBuilder(inputLine); chat.streamId = vodId; chats.Add(chat); databaseCounter++; } if (databaseCounter == 50) { AddLiveStreamChatToDb(chats, vodId); databaseCounter = 0; chats.Clear(); } } // todo check emote compatibility; does it send offline notification in irc?? } return(Task.CompletedTask); }
public static AtlasEntry Read(Stream input, bool isManifest) { // Validate input var size = input.ReadInt32BE(); if (size < 0) { throw new EntryReadException(string.Format(ERR_ATLAS_SIZE, size)); } // Read the header of the atlas. int atlasVersionCode = 0; int numSubAtlases = input.ReadInt32BE(); if (numSubAtlases == NEW_ATLAS_VERSION_MAGIC) { // New texture atlas format: Ignore the first integer atlasVersionCode = input.ReadInt32BE(); numSubAtlases = input.ReadInt32BE(); } var entry = new AtlasEntry(); entry.Entries = new List <SubAtlas>(); entry.VersionCode = atlasVersionCode; // Read all sub atlases. for (int i = 0; i < numSubAtlases; i++) { string name = StreamHelpers.ReadString(input); Rectangle rect = new Rectangle(input.ReadInt32BE(), input.ReadInt32BE(), input.ReadInt32BE(), input.ReadInt32BE()); Point topLeft = new Point(input.ReadInt32BE(), input.ReadInt32BE()); Point originalSize = new Point(input.ReadInt32BE(), input.ReadInt32BE()); Vector2 scaleRatio = new Vector2(input.ReadSingleBE(), input.ReadSingleBE()); bool isMultiTexture = false; bool isMip = false; if (atlasVersionCode > 0) { int atlasType = input.ReadByte(); if (atlasVersionCode > 1) { isMultiTexture = (atlasType & IS_MULTI_TEXTURE_FLAG) != 0; isMip = (atlasType & IS_MIP_FLAG) != 0; } else { isMultiTexture = atlasType != 0; } } List <IntVector2> hull = null; if (atlasVersionCode > 2) { int hullCount = input.ReadInt32BE(); hull = new List <IntVector2>(); for (int j = 0; j < hullCount; j++) { int num10 = input.ReadInt32BE(); int num11 = input.ReadInt32BE(); hull.Add(new IntVector2(num10, num11)); } } entry.Entries.Add(new SubAtlas() { Parent = entry, Name = name, Rect = rect, TopLeft = topLeft, OriginalSize = originalSize, ScaleRatio = scaleRatio, IsMultiTexture = isMultiTexture, IsMip = isMip, Hull = hull, }); } // Is this a reference to the texture, or the actual thing? If we're reading a manifest, then its always a // reference. //byte unkByte = (byte)input.ReadByte(); byte refByte = (byte)input.ReadByte(); bool isReference = refByte == REFERENCE_CODE || isManifest; entry.IsReference = isReference; if (isReference) { // Read the name. entry.ReferencedTextureName = StreamHelpers.ReadString(input); } else { entry.IncludedTextureEntry = TextureEntry.Read(input); } return(entry); }
private void Serialize(Stream stream, MidiSysExBuffer buffer) { StreamHelpers.CopyTo(buffer.Stream, stream, 0); }
public static async Task UpdateReadNormal(string zipFile, string zipFolder) { IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream(zfile(zipFile)), zfolder(zipFolder), ZipArchiveMode.Update, requireExplicit: true, checkTimes: true); }
private void parseStream() { m_Stream.Position = 0; long firstEOLPos = StreamHelpers.Find(m_Stream, EOL_CRLF_BYTES); if (firstEOLPos == StreamHelpers.NOT_FOUND_POS) { firstEOLPos = StreamHelpers.Find(m_Stream, EOL_LF_BYTES); if (firstEOLPos == StreamHelpers.NOT_FOUND_POS) { throw new NFXException(StringConsts.MULTIPART_NO_LF_NOR_CRLF_ISNT_FOUND_ERROR + this.GetType().Name + ".ParseStream"); } m_EOL = EOL_LF_BYTES; } else { m_EOL = EOL_CRLF_BYTES; } byte[] boundaryBytes = new byte[firstEOLPos]; boundaryBytes = new byte[firstEOLPos]; m_Stream.Read(boundaryBytes, 0, (int)firstEOLPos); if (m_Boundary != null) { string streamBoundaryStr = m_Encoding.GetString(boundaryBytes).Substring(2); if (streamBoundaryStr != m_Boundary) { throw new NFXException(StringConsts.MULTIPART_BOUNDARY_MISMATCH_ERROR.Args(m_Boundary, streamBoundaryStr) + this.GetType().Name + ".ParseStream"); } } else { var fullBoundary = m_Encoding.GetString(boundaryBytes); if (fullBoundary.Length < 3) { throw new NFXException(StringConsts.MULTIPART_BOUNDARY_COULDNT_BE_SHORTER_3_ERROR + this.GetType().Name + ".ParseStream"); } m_Boundary = fullBoundary.Substring(2); // remove two leading hyphens } m_Stream.Position = 0; int boundaryLength = boundaryBytes.Length; byte[] endBoundaryBytes = new byte[boundaryLength + 2]; m_Stream.Read(endBoundaryBytes, 0, boundaryLength); endBoundaryBytes[boundaryLength] = HYPHEN_BYTE; endBoundaryBytes[boundaryLength + 1] = HYPHEN_BYTE; long terminatorPos = StreamHelpers.Find(m_Stream, endBoundaryBytes); if (terminatorPos == StreamHelpers.NOT_FOUND_POS) { throw new NFXException(StringConsts.MULTIPART_TERMINATOR_ISNT_FOUND_ERROR + this.GetType().Name + ".ParseStream"); } var splitSegmentCoordinates = StreamHelpers.Split(m_Stream, boundaryBytes, whatLast: terminatorPos).ToArray(); foreach (var coordinate in splitSegmentCoordinates.Where(c => (c.Item2 - c.Item1) > m_EOL.Length)) { if (!StreamHelpers.EndsWith(m_Stream, m_EOL, coordinate.Item1, coordinate.Item2)) { throw new NFXException(StringConsts.MULTIPART_PART_SEGMENT_ISNT_TERMINATED_CORRECTLY_ERROR.Args(m_EOL) + this.GetType().Name + ".ParseStream"); } var part = new MultiPart(m_Stream, coordinate.Item1, coordinate.Item2 - m_EOL.Length, m_EOL, m_Encoding); m_Parts.Add(part); } }
public static async Task IsZipSameAsDirAsync(string archiveFile, string directory, ZipArchiveMode mode, bool requireExplicit, bool checkTimes) { var s = await StreamHelpers.CreateTempCopyStream(archiveFile); IsZipSameAsDir(s, directory, mode, requireExplicit, checkTimes); }
public override void Execute() { _log.LogInformation($"Started function execution: {DateTime.Now}"); var storageConnString = Environment.GetEnvironmentVariable("AzureWebJobsStorage"); BlobServiceClient bsc = new BlobServiceClient(storageConnString); BlobContainerClient bcc = bsc.GetBlobContainerClient(ContainerName); // create the container bcc.CreateIfNotExists(); _log.LogInformation("Storage account accessed"); DateTime to = DateTime.UtcNow; DateTime fromTwoDaysBack = DateTime.UtcNow.AddDays(-2); foreach (var repositoryConfig in _cmdLine.RepositoriesList) { // retrieve the last accessed time for this repository BlobClient bc = bcc.GetBlobClient($"{repositoryConfig.Owner}_{repositoryConfig.Repo}"); DateTime lastDateRun = DateTime.UtcNow.AddDays(-1); try { string content = StreamHelpers.GetContentAsString(bc.Download().Value.Content); lastDateRun = DateTime.Parse(content); } catch { } _log.LogInformation("Last processed date for {0} is {1}", repositoryConfig, lastDateRun); string owner = repositoryConfig.Owner; string repo = repositoryConfig.Repo; _log.LogInformation("Processing repository {0}\\{1}", owner, repo); HtmlPageCreator emailBody = new HtmlPageCreator($"New items in {repo}"); SearchIssuesRequest requestOptions = new SearchIssuesRequest() { #pragma warning disable CS0618 // Type or member is obsolete Created = DateRange.Between(fromTwoDaysBack, to), #pragma warning restore CS0618 // Type or member is obsolete Order = SortDirection.Descending, Repos = new RepositoryCollection() }; requestOptions.Repos.Add(owner, repo); // get the issues requestOptions.Is = new[] { IssueIsQualifier.Open, IssueIsQualifier.Issue }; RetrieveItemsFromGitHub(requestOptions, lastDateRun, emailBody, "New issues"); // get the PRs requestOptions.Is = new[] { IssueIsQualifier.Open, IssueIsQualifier.PullRequest }; RetrieveItemsFromGitHub(requestOptions, lastDateRun, emailBody, "New PRs"); emailBody.AddContent($"<p>Last checked range: {lastDateRun} -> {to} </p>"); _log.LogInformation("Sending email..."); // send the email EmailSender.SendEmail(_cmdLine.EmailToken, _cmdLine.FromEmail, emailBody.GetContent(), repositoryConfig.ToEmail, repositoryConfig.CcEmail, $"New issues in the {repo} repo as of {to.ToShortDateString()}", _log); _log.LogInformation("Email sent..."); bc.Upload(StreamHelpers.GetStreamForString(to.ToUniversalTime().ToString()), overwrite: true); _log.LogInformation($"Persisted last event time for {repositoryConfig.Owner}\\{repositoryConfig.Repo} as {to}"); } }
public static async Task StrangeFiles1() { ZipTest.IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream( ZipTest.strange(Path.Combine("extradata", "extraDataLHandCDentryAndArchiveComments.zip"))), ZipTest.zfolder("verysmall"), ZipArchiveMode.Update, false, false); }
public static async Task UpdateModifications() { //delete and move var testArchive = await StreamHelpers.CreateTempCopyStream(ZipTest.zfile("normal.zip")); using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) { ZipArchiveEntry toBeDeleted = archive.GetEntry("binary.wmv"); toBeDeleted.Delete(); toBeDeleted.Delete(); //delete twice should be okay ZipArchiveEntry moved = archive.CreateEntry("notempty/secondnewname.txt"); ZipArchiveEntry orig = archive.GetEntry("notempty/second.txt"); using (Stream origMoved = orig.Open(), movedStream = moved.Open()) { origMoved.CopyTo(movedStream); } moved.LastWriteTime = orig.LastWriteTime; orig.Delete(); } ZipTest.IsZipSameAsDir(testArchive, ZipTest.zmodified("deleteMove"), ZipArchiveMode.Read, false, false); //append testArchive = await StreamHelpers.CreateTempCopyStream(ZipTest.zfile("normal.zip")); using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) { ZipArchiveEntry e = archive.GetEntry("first.txt"); using (StreamWriter s = new StreamWriter(e.Open())) { s.BaseStream.Seek(0, SeekOrigin.End); s.Write("\r\n\r\nThe answer my friend, is blowin' in the wind."); } e.LastWriteTime = new DateTimeOffset(2010, 7, 7, 11, 57, 18, new TimeSpan(-7, 0, 0)); } ZipTest.IsZipSameAsDir(testArchive, ZipTest.zmodified("append"), ZipArchiveMode.Read, false, false); //Overwrite file testArchive = await StreamHelpers.CreateTempCopyStream(ZipTest.zfile("normal.zip")); using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) { String fileName = ZipTest.zmodified(Path.Combine("overwrite", "first.txt")); ZipArchiveEntry e = archive.GetEntry("first.txt"); var file = FileData.GetFile(fileName); e.LastWriteTime = file.LastModifiedDate; using (var stream = await StreamHelpers.CreateTempCopyStream(fileName)) { using (Stream es = e.Open()) { es.SetLength(0); stream.CopyTo(es); } } } ZipTest.IsZipSameAsDir(testArchive, ZipTest.zmodified("overwrite"), ZipArchiveMode.Read, false, false); }
public static async Task StrangeFiles4() { ZipTest.IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream( ZipTest.strange("dataDescriptor.zip")), ZipTest.zfolder("normalWithoutBinary"), ZipArchiveMode.Update, true, false); }
public static async Task CompatibilityTests(string zipFile, string zipFolder, bool requireExplicit, bool checkTimes) { IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream(compat(zipFile)), zfolder(zipFolder), ZipArchiveMode.Update, requireExplicit, checkTimes); }
public static async Task ReadInterleaved() { using (ZipArchive archive = new ZipArchive(await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")))) { ZipArchiveEntry e1 = archive.GetEntry("first.txt"); ZipArchiveEntry e2 = archive.GetEntry("notempty/second.txt"); //read all of e1 and e2's contents Byte[] e1readnormal = new Byte[e1.Length]; Byte[] e2readnormal = new Byte[e2.Length]; Byte[] e1interleaved = new Byte[e1.Length]; Byte[] e2interleaved = new Byte[e2.Length]; using (Stream e1s = e1.Open()) { ReadBytes(e1s, e1readnormal, e1.Length); } using (Stream e2s = e2.Open()) { ReadBytes(e2s, e2readnormal, e2.Length); } //now read interleaved, assume we are working with < 4gb files const Int32 bytesAtATime = 15; using (Stream e1s = e1.Open(), e2s = e2.Open()) { Int32 e1pos = 0; Int32 e2pos = 0; while (e1pos < e1.Length || e2pos < e2.Length) { if (e1pos < e1.Length) { Int32 e1bytesRead = e1s.Read(e1interleaved, e1pos, bytesAtATime + e1pos > e1.Length ? (Int32)e1.Length - e1pos : bytesAtATime); e1pos += e1bytesRead; } if (e2pos < e2.Length) { Int32 e2bytesRead = e2s.Read(e2interleaved, e2pos, bytesAtATime + e2pos > e2.Length ? (Int32)e2.Length - e2pos : bytesAtATime); e2pos += e2bytesRead; } } } //now compare to original read ArraysEqual <Byte>(e1readnormal, e1interleaved, e1readnormal.Length); ArraysEqual <Byte>(e2readnormal, e2interleaved, e2readnormal.Length); //now read one entry interleaved Byte[] e1selfInterleaved1 = new Byte[e1.Length]; Byte[] e1selfInterleaved2 = new Byte[e2.Length]; using (Stream s1 = e1.Open(), s2 = e1.Open()) { Int32 s1pos = 0; Int32 s2pos = 0; while (s1pos < e1.Length || s2pos < e1.Length) { if (s1pos < e1.Length) { Int32 s1bytesRead = s1.Read(e1interleaved, s1pos, bytesAtATime + s1pos > e1.Length ? (Int32)e1.Length - s1pos : bytesAtATime); s1pos += s1bytesRead; } if (s2pos < e1.Length) { Int32 s2bytesRead = s2.Read(e2interleaved, s2pos, bytesAtATime + s2pos > e1.Length ? (Int32)e1.Length - s2pos : bytesAtATime); s2pos += s2bytesRead; } } } //now compare to original read ArraysEqual <Byte>(e1readnormal, e1selfInterleaved1, e1readnormal.Length); ArraysEqual <Byte>(e1readnormal, e1selfInterleaved2, e1readnormal.Length); } }
public static async Task Deflate64Zip() { IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream(compat("deflate64.zip")), zfolder("normal"), ZipArchiveMode.Update, requireExplicit: true, checkTimes: true); }