protected override int ProcessIncoming(NetContext context, Connection connection, Stream incomingBuffer)
        {
            int len = FindHeadersLength(incomingBuffer);
            if (len <= 0) return 0;

            incomingBuffer.Position = 0;
            int extraConsumed;
            if (len < NetContext.BufferSize)
            {
                byte[] buffer = null;
                try
                {
                    buffer = context.GetBuffer();
                    NetContext.Fill(incomingBuffer, buffer, len);
                    using (var ms = new MemoryStream(buffer, 0, len))
                    {
                        extraConsumed = ProcessHeadersAndUpgrade(context, connection, ms, incomingBuffer, len);
                    }
                }
                finally
                {
                    context.Recycle(buffer);
                }
            }
            else
            {
                using (var ss = new SubStream(incomingBuffer, len))
                {
                    extraConsumed = ProcessHeadersAndUpgrade(context, connection, ss, incomingBuffer, len);
                }
            }
            if (extraConsumed < 0) return 0; // means: wasn't usable (yet)

            return len + extraConsumed;
        }
Exemple #2
0
 internal bool TryOpenFile(string path, out Stream stream)
 {
     if (_files.ContainsKey(path))
     {
         FileRecord file = _files[path];
         stream = new SubStream(_fileStream, file.Start, file.Length);
         return true;
     }
     stream = null;
     return false;
 }
        internal override List <BuilderExtent> FixExtents(out long totalLength)
        {
            List <BuilderExtent> extents = new List <BuilderExtent>();

            MemoryStream descriptorStream = new MemoryStream();

            _descriptor.Write(descriptorStream);

            // Figure out grain size and number of grain tables, and adjust actual extent size to be a multiple
            // of grain size
            const int GtesPerGt      = 512;
            long      grainSize      = 128;
            int       numGrainTables = (int)Utilities.Ceil(_content.Length, grainSize * GtesPerGt * Sizes.Sector);

            long descriptorLength = 10 * Sizes.OneKiB; // Utilities.RoundUp(descriptorStream.Length, Sizes.Sector);
            long descriptorStart  = 0;

            if (descriptorLength != 0)
            {
                descriptorStart = 1;
            }

            long redundantGrainDirStart  = Math.Max(descriptorStart, 1) + Utilities.Ceil(descriptorLength, Sizes.Sector);
            long redundantGrainDirLength = numGrainTables * 4;

            long redundantGrainTablesStart  = redundantGrainDirStart + Utilities.Ceil(redundantGrainDirLength, Sizes.Sector);
            long redundantGrainTablesLength = numGrainTables * Utilities.RoundUp(GtesPerGt * 4, Sizes.Sector);

            long grainDirStart  = redundantGrainTablesStart + Utilities.Ceil(redundantGrainTablesLength, Sizes.Sector);
            long grainDirLength = numGrainTables * 4;

            long grainTablesStart  = grainDirStart + Utilities.Ceil(grainDirLength, Sizes.Sector);
            long grainTablesLength = numGrainTables * Utilities.RoundUp(GtesPerGt * 4, Sizes.Sector);

            long dataStart = Utilities.RoundUp(grainTablesStart + Utilities.Ceil(grainTablesLength, Sizes.Sector), grainSize);

            // Generate the header, and write it
            HostedSparseExtentHeader header = new HostedSparseExtentHeader();

            header.Flags            = HostedSparseExtentFlags.ValidLineDetectionTest | HostedSparseExtentFlags.RedundantGrainTable;
            header.Capacity         = Utilities.RoundUp(_content.Length, grainSize * Sizes.Sector) / Sizes.Sector;
            header.GrainSize        = grainSize;
            header.DescriptorOffset = descriptorStart;
            header.DescriptorSize   = descriptorLength / Sizes.Sector;
            header.NumGTEsPerGT     = GtesPerGt;
            header.RgdOffset        = redundantGrainDirStart;
            header.GdOffset         = grainDirStart;
            header.Overhead         = dataStart;

            extents.Add(new BuilderBytesExtent(0, header.GetBytes()));

            // The descriptor extent
            if (descriptorLength > 0)
            {
                extents.Add(new BuilderStreamExtent(descriptorStart * Sizes.Sector, descriptorStream));
            }

            // The grain directory extents
            extents.Add(new GrainDirectoryExtent(redundantGrainDirStart * Sizes.Sector, redundantGrainTablesStart, numGrainTables, GtesPerGt));
            extents.Add(new GrainDirectoryExtent(grainDirStart * Sizes.Sector, grainTablesStart, numGrainTables, GtesPerGt));

            // For each graintable span that's present...
            long dataSectorsUsed = 0;
            long gtSpan          = GtesPerGt * grainSize * Sizes.Sector;

            foreach (var gtRange in StreamExtent.Blocks(_content.Extents, grainSize * GtesPerGt * Sizes.Sector))
            {
                for (long i = 0; i < gtRange.Count; ++i)
                {
                    int gt = (int)(gtRange.Offset + i);

                    SubStream gtStream = new SubStream(_content, gt * gtSpan, Math.Min(gtSpan, _content.Length - (gt * gtSpan)));

                    GrainTableDataExtent dataExtent = new GrainTableDataExtent((dataStart + dataSectorsUsed) * Sizes.Sector, gtStream, grainSize);
                    extents.Add(dataExtent);

                    extents.Add(new GrainTableExtent(GrainTablePosition(redundantGrainTablesStart, gt, GtesPerGt), gtStream, dataStart + dataSectorsUsed, GtesPerGt, grainSize));
                    extents.Add(new GrainTableExtent(GrainTablePosition(grainTablesStart, gt, GtesPerGt), gtStream, dataStart + dataSectorsUsed, GtesPerGt, grainSize));

                    dataSectorsUsed += dataExtent.Length / Sizes.Sector;
                }
            }

            totalLength = (dataStart + dataSectorsUsed) * Sizes.Sector;
            return(extents);
        }
Exemple #4
0
        public IList <IArchiveFileInfo> Load(Stream input)
        {
            using var br = new BinaryReaderX(input, true);

            // Read header
            _header = br.ReadType <G4txHeader>();

            // Read entries
            _entries    = br.ReadMultiple <G4txEntry>(_header.textureCount);
            _subEntries = br.ReadMultiple <G4txSubEntry>(_header.subTextureCount);
            br.SeekAlignment();

            // Skip hashes
            br.ReadMultiple <uint>(_header.totalCount);

            // Read ids
            _ids = br.ReadMultiple <byte>(_header.totalCount);
            br.SeekAlignment(4);

            // Prepare string reader
            var nxtchBase    = (_header.headerSize + _header.tableSize + 0xF) & ~0xF;
            var stringSize   = nxtchBase - input.Position;
            var stringStream = new SubStream(input, input.Position, stringSize);

            using var stringBr = new BinaryReaderX(stringStream);

            // Read string offsets
            var stringOffsets = br.ReadMultiple <short>(_header.totalCount);

            // Add files
            // TODO: Check if name is set by order of entries or ID
            var result     = new List <IArchiveFileInfo>();
            var subEntryId = _header.textureCount;

            for (var i = 0; i < _header.textureCount; i++)
            {
                var entry = _entries[i];

                // Prepare base information
                stringStream.Position = stringOffsets[i];
                var name = stringBr.ReadCStringASCII();

                var fileStream = new SubStream(input, nxtchBase + entry.nxtchOffset, entry.nxtchSize);

                // Prepare sub entries
                var subEntries = new List <G4txSubTextureEntry>();
                foreach (var unkEntry in _subEntries.Where(x => x.entryId == i))
                {
                    stringStream.Position = stringOffsets[subEntryId];
                    var subName = stringBr.ReadCStringASCII();

                    subEntries.Add(new G4txSubTextureEntry(_ids[subEntryId++], unkEntry, subName));
                }

                result.Add(new G4txArchiveFileInfo(fileStream, name + ".nxtch", entry, _ids[i], subEntries)
                {
                    PluginIds = new[] { Guid.Parse("89222f8f-a345-45ed-9b79-e9e873bda1e9") }
                });
            }

            return(result);
        }
Exemple #5
0
        public IList <ArchiveFileInfo> Load(Stream input)
        {
            var wiiDiscStream = new WiiDiscStream(input);

            using var br = new BinaryReaderX(wiiDiscStream, ByteOrder.BigEndian);

            // Read disc header
            var header = br.ReadType <WiiDiscHeader>();

            // Read partition infos
            br.BaseStream.Position = 0x40000;
            var partitionInformation = br.ReadType <WiiDiscPartitionInformation>();

            // Read partitions
            var partitions = new List <WiiDiscPartitionEntry>();

            br.BaseStream.Position = partitionInformation.partitionOffset1 << 2;
            partitions.AddRange(br.ReadMultiple <WiiDiscPartitionEntry>(partitionInformation.partitionCount1));

            // Read region settings
            br.BaseStream.Position = 0x4E000;
            var regionSettings = br.ReadType <WiiDiscRegionSettings>();

            // Read magic word
            br.BaseStream.Position = 0x4FFFC;
            var magic = br.ReadUInt32();

            if (magic != 0xC3F81A8E)
            {
                throw new InvalidOperationException("Invalid Wii disc magic word.");
            }

            // Read data partitions
            var result = new List <ArchiveFileInfo>();

            foreach (var partition in partitions.Where(x => x.type == 0))
            {
                br.BaseStream.Position = partition.offset << 2;
                var partitionHeader = br.ReadType <WiiDiscPartitionHeader>();

                var partitionStream     = new SubStream(wiiDiscStream, (partition.offset << 2) + ((long)partitionHeader.dataOffset << 2), (long)partitionHeader.dataSize << 2);
                var partitionDataStream = new WiiDiscPartitionDataStream(partitionStream);

                using (var partitionBr = new BinaryReaderX(partitionDataStream, true, ByteOrder.BigEndian))
                {
                    // Read partition data header
                    var partitionDataHeader = partitionBr.ReadType <WiiDiscHeader>();

                    // Read file system offset
                    partitionBr.BaseStream.Position = 0x424;
                    var fileSystemOffset = partitionBr.ReadInt32() << 2;
                    var fileSystemSize   = partitionBr.ReadInt32() << 2;

                    // Parse file system
                    var fileSystem = new U8FileSystem("DATA");
                    result.AddRange(fileSystem.Parse(partitionDataStream, fileSystemOffset, fileSystemSize, fileSystemOffset));
                }
            }

            return(result);
        }
        private static void InitializeDynamicInternal(Stream stream, long capacity, long blockSize)
        {
            if (blockSize < Sizes.OneMiB || blockSize > Sizes.OneMiB * 256 || !Utilities.IsPowerOfTwo(blockSize))
            {
                throw new ArgumentOutOfRangeException("blockSize", blockSize, "BlockSize must be a power of 2 between 1MB and 256MB");
            }

            int logicalSectorSize = 512;
            int physicalSectorSize = 4096;
            long chunkRatio = (0x800000L * logicalSectorSize) / blockSize;
            long dataBlocksCount = Utilities.Ceil(capacity, blockSize);
            long sectorBitmapBlocksCount = Utilities.Ceil(dataBlocksCount, chunkRatio);
            long totalBatEntriesDynamic = dataBlocksCount + ((dataBlocksCount - 1) / chunkRatio);

            FileHeader fileHeader = new FileHeader() { Creator = ".NET DiscUtils" };

            long fileEnd = Sizes.OneMiB;

            VhdxHeader header1 = new VhdxHeader();
            header1.SequenceNumber = 0;
            header1.FileWriteGuid = Guid.NewGuid();
            header1.DataWriteGuid = Guid.NewGuid();
            header1.LogGuid = Guid.Empty;
            header1.LogVersion = 0;
            header1.Version = 1;
            header1.LogLength = (uint)Sizes.OneMiB;
            header1.LogOffset = (ulong)fileEnd;
            header1.CalcChecksum();

            fileEnd += header1.LogLength;

            VhdxHeader header2 = new VhdxHeader(header1);
            header2.SequenceNumber = 1;
            header2.CalcChecksum();

            RegionTable regionTable = new RegionTable();

            RegionEntry metadataRegion = new RegionEntry();
            metadataRegion.Guid = RegionEntry.MetadataRegionGuid;
            metadataRegion.FileOffset = fileEnd;
            metadataRegion.Length = (uint)Sizes.OneMiB;
            metadataRegion.Flags = RegionFlags.Required;
            regionTable.Regions.Add(metadataRegion.Guid, metadataRegion);

            fileEnd += metadataRegion.Length;

            RegionEntry batRegion = new RegionEntry();
            batRegion.Guid = RegionEntry.BatGuid;
            batRegion.FileOffset = 3 * Sizes.OneMiB;
            batRegion.Length = (uint)Utilities.RoundUp(totalBatEntriesDynamic * 8, Sizes.OneMiB);
            batRegion.Flags = RegionFlags.Required;
            regionTable.Regions.Add(batRegion.Guid, batRegion);

            fileEnd += batRegion.Length;

            stream.Position = 0;
            Utilities.WriteStruct(stream, fileHeader);

            stream.Position = 64 * Sizes.OneKiB;
            Utilities.WriteStruct(stream, header1);

            stream.Position = 128 * Sizes.OneKiB;
            Utilities.WriteStruct(stream, header2);

            stream.Position = 192 * Sizes.OneKiB;
            Utilities.WriteStruct(stream, regionTable);

            stream.Position = 256 * Sizes.OneKiB;
            Utilities.WriteStruct(stream, regionTable);

            // Set stream to min size
            stream.Position = fileEnd - 1;
            stream.WriteByte(0);

            // Metadata
            FileParameters fileParams = new FileParameters() { BlockSize = (uint)blockSize, Flags = FileParametersFlags.None };
            ParentLocator parentLocator = new ParentLocator();

            Stream metadataStream = new SubStream(stream, metadataRegion.FileOffset, metadataRegion.Length);
            Metadata metadata = Metadata.Initialize(metadataStream, fileParams, (ulong)capacity, (uint)logicalSectorSize, (uint)physicalSectorSize, null);
        }
Exemple #7
0
        private void Process(System.Net.HttpListenerContext obj)
        {
            Stream org = null;

            try
            {

                bool fname = false;
                string[] dta = obj.Request.RawUrl.Split(new char[] { '/' }, StringSplitOptions.RemoveEmptyEntries);
                if (dta.Length < 3)
                    return;
                string cmd = dta[0].ToLower();
                string user = dta[1];
                string arg = dta[2];
                string fullname;
                int userid = 0;
                int.TryParse(user, out userid);
                VideoLocal loc=null;
                if (cmd == "videolocal")
                {
                    int sid = 0;
                    int.TryParse(arg, out sid);
                    if (sid == 0)
                    {
                        obj.Response.StatusCode = (int)HttpStatusCode.BadRequest;
                        obj.Response.StatusDescription = "Stream Id missing.";
                        return;
                    }
                    VideoLocalRepository rep = new VideoLocalRepository();
                    loc = rep.GetByID(sid);
                    if (loc == null)
                    {
                        obj.Response.StatusCode = (int)HttpStatusCode.NotFound;
                        obj.Response.StatusDescription = "Stream Id not found.";
                        return;

                    }
                    fullname = loc.FullServerPath;
                }
                else if (cmd == "file")
                {
                    fullname = Base64DecodeUrl(arg);

                }
                else
                {
                    obj.Response.StatusCode = (int)HttpStatusCode.BadRequest;
                    obj.Response.StatusDescription = "Not know command";
                    return;
                }

                bool range = false;

                try
                {
                    if (!File.Exists(fullname))
                    {
                        obj.Response.StatusCode = (int)HttpStatusCode.NotFound;
                        obj.Response.StatusDescription = "File '" + fullname + "' not found.";
                        return;
                    }

                }
                catch (Exception)
                {
                    obj.Response.StatusCode = (int)HttpStatusCode.InternalServerError;
                    obj.Response.StatusDescription = "Unable to access File '" + fullname + "'.";
                    return;
                }
                obj.Response.ContentType = GetMime(fullname);
                obj.Response.AddHeader("Accept-Ranges", "bytes");
                obj.Response.AddHeader("X-Plex-Protocol", "1.0");
                if (obj.Request.HttpMethod == "OPTIONS")
                {
                    obj.Response.AddHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, DELETE, PUT, HEAD");
                    obj.Response.AddHeader("Access-Control-Max-Age", "1209600");
                    obj.Response.AddHeader("Access-Control-Allow-Headers",
                        "accept, x-plex-token, x-plex-client-identifier, x-plex-username, x-plex-product, x-plex-device, x-plex-platform, x-plex-platform-version, x-plex-version, x-plex-device-name");
                    obj.Response.AddHeader("Cache-Control", "no-cache");
                    obj.Response.ContentType = "text/plain";
                    return;
                }
                string rangevalue = null;
                if (obj.Request.Headers.AllKeys.Contains("Range"))
                    rangevalue = obj.Request.Headers["Range"].Replace("bytes=", string.Empty).Trim();
                if (obj.Request.Headers.AllKeys.Contains("range"))
                    rangevalue = obj.Request.Headers["range"].Replace("bytes=", string.Empty).Trim();

                if (obj.Request.HttpMethod != "HEAD")
                {
                    org = new FileStream(fullname, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
                    long totalsize = org.Length;
                    long start = 0;
                    long end = 0;
                    if (!string.IsNullOrEmpty(rangevalue))
                    {
                        range = true;
                        string[] split = rangevalue.Split('-');
                        if (split.Length == 2)
                        {
                            if (string.IsNullOrEmpty(split[0]) && !string.IsNullOrEmpty(split[1]))
                            {
                                long e = long.Parse(split[1]);
                                start = totalsize - e;
                                end = totalsize - 1;
                            }
                            else if (!string.IsNullOrEmpty(split[0]) && string.IsNullOrEmpty(split[1]))
                            {
                                start = long.Parse(split[0]);
                                end = totalsize - 1;
                            }
                            else if (!string.IsNullOrEmpty(split[0]) && !string.IsNullOrEmpty(split[1]))
                            {
                                start = long.Parse(split[0]);
                                end = long.Parse(split[1]);
                                if (start > totalsize - 1)
                                    start = totalsize - 1;
                                if (end > totalsize - 1)
                                    end = totalsize - 1;
                            }
                            else
                            {
                                start = 0;
                                end = totalsize - 1;
                            }
                        }
                    }
                    SubStream outstream;
                    if (range)
                    {
                        obj.Response.StatusCode = (int) HttpStatusCode.PartialContent;
                        obj.Response.AddHeader("Content-Range", "bytes " + start + "-" + end + "/" + totalsize);
                        outstream = new SubStream(org, start, end - start + 1);
                        obj.Response.ContentLength64 = end - start + 1;
                    }
                    else
                    {
                        outstream=new SubStream(org,0,totalsize);
                        obj.Response.ContentLength64 = totalsize;
                        obj.Response.StatusCode = (int) HttpStatusCode.OK;
                    }
                    if ((userid != 0) && (loc != null))
                    {
                        outstream.CrossPosition = (long) ((double) totalsize*WatchedThreshold);
                        outstream.CrossPositionCrossed += (a) =>
                        {
                            Task.Factory.StartNew(() =>
                            {
                                loc.ToggleWatchedStatus(true, userid);
                            }, new CancellationToken(), TaskCreationOptions.LongRunning, TaskScheduler.Default);
                        };
                    }
                    obj.Response.SendChunked = false;
                    outstream.CopyTo(obj.Response.OutputStream);
                    obj.Response.OutputStream.Close();
                    outstream.Close();
                }
                else
                {
                    obj.Response.SendChunked = false;
                    obj.Response.StatusCode = (int)HttpStatusCode.OK;
                    obj.Response.ContentLength64 = new FileInfo(fullname).Length;
                    obj.Response.KeepAlive = false;
                    obj.Response.OutputStream.Close();
                }
            }
            catch (HttpListenerException e)
            {
            }
            catch (Exception e)
            {
                logger.Error(e.ToString);
            }
            finally
            {
                if (org != null)
                    org.Close();
                if ((obj != null) && (obj.Response != null) && (obj.Response.OutputStream != null))
                    obj.Response.OutputStream.Close();
            }
        }
        /// <summary>
        /// Reads a <see cref="VersionResource"/> object from the current <see cref="Stream"/>.
        /// </summary>
        /// <returns>
        /// A <see cref="VersionResource"/> object.
        /// </returns>
        public VersionResource Read()
        {
            this.Stream.Position = 0;

            VersionResource value = new VersionResource();

            using (SubStream stream = new SubStream(this.Stream, 0, this.Stream.Length, leaveParentOpen: true))
                using (BinaryReader reader = new BinaryReader(stream, Encoding.Default))
                {
                    long offset = this.Stream.Position;

                    var  versionInfo = reader.ReadVersionInfo();
                    long end         = offset + versionInfo.Header.Length;

                    // The root element MUST be a "VS_VERSION_INFO" element of binary type.
                    // https://msdn.microsoft.com/en-us/library/windows/desktop/ms647001(v=vs.85).aspx
                    // It contains at most three children - a VS_FIXEDFILEINFO object, a StringFileInfo struct
                    // and a VarFileInfo struct.
                    if (versionInfo.Key != "VS_VERSION_INFO")
                    {
                        throw new VersionResourceFormatException();
                    }

                    if (versionInfo.Header.Type != VersionDataType.Binary)
                    {
                        throw new VersionResourceFormatException();
                    }

                    // We know a VS_FIXEDFILEINFO struct is present if the ValueLength > 0
                    if (versionInfo.Header.ValueLength != 0)
                    {
                        // Read the file info
                        value.FixedFileInfo = reader.ReadStruct <VS_FIXEDFILEINFO>();
                        reader.Align();
                    }

                    // Read the children: At most one StringFileInfo and at most one VarFileInfo
                    while (this.Stream.Position < end)
                    {
                        var childOffset = this.Stream.Position;

                        var  childInfo = reader.ReadVersionInfo();
                        long childEnd  = childOffset + childInfo.Header.Length;

                        switch (childInfo.Key)
                        {
                        case "VarFileInfo":
                            if (childInfo.Header.Type != VersionDataType.Text)
                            {
                                throw new VersionResourceFormatException();
                            }

                            value.VarFileInfo = this.ReadVarFileInfo(reader);
                            break;

                        case "StringFileInfo":
                            if (childInfo.Header.Type != VersionDataType.Text)
                            {
                                throw new VersionResourceFormatException();
                            }

                            value.StringFileInfo = this.ReadStringFileInfo(reader, childEnd);
                            break;
                        }
                    }

                    return(value);
                }
        }
Exemple #9
0
        /// <summary>
        /// <para>
        /// Uploads a file in multiple parts from the local workstation to S3, returning the
        /// <see cref="DownloadManifest"/> details. required by <see cref="DeploymentHelper.DownloadMultiPart(DownloadManifest, string, DownloadProgressDelegate, IRetryPolicy, TimeSpan)"/>
        /// and <see cref="DeploymentHelper.DownloadMultiPartAsync(DownloadManifest, string, DownloadProgressDelegate, TimeSpan, IRetryPolicy, System.Threading.CancellationToken)"/>
        /// to actually download the entire file.  The URI to the uploaded <see cref="DownloadManifest"/> details is also returned.
        /// </para>
        /// <para>
        /// See the remarks for details about how this works.
        /// </para>
        /// </summary>
        /// <param name="sourcePath">Path to the file being uploaded.</param>
        /// <param name="targetFolderUri">
        /// <para>
        /// The target S3 URI structured like <b>https://s3.REGION.amazonaws.com/BUCKET/...</b>
        /// URI referencing an S3 bucket and the optional folder where the file's download information
        /// and parts will be uploaded.
        /// </para>
        /// <note>
        /// The <b>s3://</b> URI scheme is not supported.
        /// </note>
        /// </param>
        /// <param name="version">Optionally specifies the download file version.</param>
        /// <param name="name">Optionally overrides the download file name specified by <paramref name="sourcePath"/> to initialize <see cref="DownloadManifest.Name"/>.</param>
        /// <param name="filename">Optionally overrides the download file name specified by <paramref name="sourcePath"/> to initialize <see cref="DownloadManifest.Filename"/>.</param>
        /// <param name="noMd5File">
        /// This method creates a file named [<paramref name="sourcePath"/>.md5] with the MD5 hash for the entire
        /// uploaded file by default.  You may override this behavior by passing <paramref name="noMd5File"/>=<c>true</c>.
        /// </param>
        /// <param name="maxPartSize">Optionally overrides the maximum part size (defailts to 100 MiB).</param>
        /// <param name="publicReadAccess">Optionally grant the upload public read access.</param>
        /// <param name="progressAction">Optional action called as the file is uploaded, passing the <c>long</c> percent complete.</param>
        /// <returns>The <see cref="DownloadManifest"/> information.</returns>
        /// <returns>The <see cref="DownloadManifest"/> information as well as the URI to the uploaded manifest.</returns>
        /// <remarks>
        /// <para>
        /// This method works by splitting the <paramref name="sourcePath"/> file into parts no larger than
        /// <paramref name="maxPartSize"/> bytes each and the uploading these parts to the specified bucket
        /// and path along with a file holding <see cref="DownloadManifest"/> information describing the download
        /// and its constituent parts.  This information includes details about the download including the
        /// overall MD5 and size as well records describing each part including their URIs, sizes and MD5.
        /// </para>
        /// <para>
        /// The <see cref="DownloadManifest"/> details returned include all of the information required by
        /// <see cref="DeploymentHelper.DownloadMultiPart(DownloadManifest, string, DownloadProgressDelegate, IRetryPolicy, TimeSpan)"/> and
        /// <see cref="DeploymentHelper.DownloadMultiPartAsync(DownloadManifest, string, DownloadProgressDelegate, TimeSpan, IRetryPolicy, System.Threading.CancellationToken)"/>
        /// to actually download the entire file and the URI returned references these msame details as
        /// uploaded to S3.
        /// </para>
        /// <para>
        /// You'll need to pass <paramref name="sourcePath"/> as the path to the file being uploaded
        /// and <paramref name="targetFolderUri"/> as the S3 location where the download information and the
        /// file parts will be uploaded.  <paramref name="targetFolderUri"/> may use with the <b>https://</b>
        /// or <b>s3://</b> URI scheme.
        /// </para>
        /// <para>
        /// By default the uploaded file and parts names will be based on the filename part of <paramref name="sourcePath"/>,
        /// but this can be overridden via <paramref name="filename"/>.  The <see cref="DownloadManifest"/> information for the
        /// file will be uploaded as <b>FILENAME.manifest</b> and the parts will be written to a subfolder named
        /// <b>FILENAME.parts</b>.  For example, uploading a large file named <b>myfile.json</b> to <b>https://s3.uswest.amazonaws.com/mybucket</b>
        /// will result S3 file layout like:
        /// </para>
        /// <code>
        /// https://s3.uswest.amazonaws.com/mybucket
        ///     myfile.json.manifest
        ///     myfile.json.parts/
        ///         part-0000
        ///         part-0001
        ///         part-0002
        ///         ...
        /// </code>
        /// <para>
        /// The URI returned in this case will be <b>https://s3.uswest.amazonaws.com/mybucket/myfile.json.manifest</b>.
        /// </para>
        /// </remarks>
        public static (DownloadManifest manifest, string manifestUri) S3UploadMultiPart(
            string sourcePath,
            string targetFolderUri,
            string version               = null,
            string name                  = null,
            string filename              = null,
            bool noMd5File               = false,
            long maxPartSize             = (long)(100 * ByteUnits.MebiBytes),
            bool publicReadAccess        = false,
            Action <long> progressAction = null)
        {
            Covenant.Requires <ArgumentNullException>(!string.IsNullOrEmpty(sourcePath), nameof(sourcePath));
            Covenant.Requires <ArgumentNullException>(!string.IsNullOrEmpty(targetFolderUri), nameof(targetFolderUri));

            if (!Uri.TryCreate(targetFolderUri, UriKind.Absolute, out var uriCheck))
            {
                Covenant.Assert(false, $"Invalid [{nameof(targetFolderUri)}={targetFolderUri}].");
            }

            Covenant.Assert(uriCheck.Scheme == "https", $"Invalid scheme in [{nameof(targetFolderUri)}={targetFolderUri}].  Only [https://] is supported.");

            name     = name ?? Path.GetFileName(sourcePath);
            filename = filename ?? Path.GetFileName(sourcePath);

            // Determine the base URI for the download manifest and parts on S3.

            var baseUri = targetFolderUri;

            if (!baseUri.EndsWith('/'))
            {
                baseUri += '/';
            }

            baseUri += filename;

            // Remove any existing manifest object as well as any parts.

            var manifestUri = $"{baseUri}.manifest";
            var partsFolder = $"{baseUri}.parts/";

            if (progressAction != null)
            {
                progressAction(0L);
            }

            S3Remove(manifestUri);
            S3Remove(partsFolder, recursive: true, include: $"{partsFolder}*");

            // We're going to upload the parts first, while initializing the download manifest as we go.

            var manifest = new DownloadManifest()
            {
                Name = name, Version = version, Filename = filename
            };

            using (var input = File.OpenRead(sourcePath))
            {
                var partCount   = NeonHelper.PartitionCount(input.Length, maxPartSize);
                var partNumber  = 0;
                var partStart   = 0L;
                var cbRemaining = input.Length;

                manifest.Md5   = CryptoHelper.ComputeMD5String(input);
                input.Position = 0;

                while (cbRemaining > 0)
                {
                    var partSize = Math.Min(cbRemaining, maxPartSize);
                    var part     = new DownloadPart()
                    {
                        Uri    = $"{partsFolder}part-{partNumber:000#}",
                        Number = partNumber,
                        Size   = partSize,
                    };

                    // We're going to use a substream to compute the MD5 hash for the part
                    // as well as to actually upload the part to S3.

                    using (var partStream = new SubStream(input, partStart, partSize))
                    {
                        part.Md5            = CryptoHelper.ComputeMD5String(partStream);
                        partStream.Position = 0;

                        S3Upload(partStream, part.Uri, publicReadAccess: publicReadAccess);
                    }

                    manifest.Parts.Add(part);

                    // Loop to handle the next part (if any).

                    partNumber++;
                    partStart   += partSize;
                    cbRemaining -= partSize;

                    if (progressAction != null)
                    {
                        progressAction(Math.Min(99L, (long)(100.0 * (double)partNumber / (double)partCount)));
                    }
                }

                manifest.Size = manifest.Parts.Sum(part => part.Size);
            }

            // Upload the manifest.

            S3UploadText(NeonHelper.JsonSerialize(manifest, Formatting.Indented), manifestUri, metadata: $"Content-Type={DeploymentHelper.DownloadManifestContentType}", publicReadAccess: publicReadAccess);

            // Write the MD5 file unless disabled.

            if (!noMd5File)
            {
                File.WriteAllText($"{sourcePath}.md5", manifest.Md5);
            }

            if (progressAction != null)
            {
                progressAction(100L);
            }

            return(manifest : manifest, manifestUri : manifestUri);
        }
        private void Process(System.Net.HttpListenerContext obj)
        {
            Stream org = null;

            try
            {
                bool fname = false;
                string[] dta = obj.Request.RawUrl.Split(new char[] {'/'}, StringSplitOptions.RemoveEmptyEntries);
                if (dta.Length < 4)
                    return;
                string cmd = dta[0].ToLower();
                string user = dta[1];
                string aw = dta[2];
                string arg = dta[3];
                string fullname = string.Empty;
                int userid = 0;
                int autowatch = 0;
                int.TryParse(user, out userid);
                int.TryParse(aw, out autowatch);
                VideoLocal loc = null;
                IFile file = null;
                if (cmd == "videolocal")
                {
                    int sid = 0;
                    int.TryParse(arg, out sid);
                    if (sid == 0)
                    {
                        obj.Response.StatusCode = (int) HttpStatusCode.BadRequest;
                        obj.Response.StatusDescription = "Stream Id missing.";
                        return;
                    }
                    loc = RepoFactory.VideoLocal.GetByID(sid);
                    if (loc == null)
                    {
                        obj.Response.StatusCode = (int)HttpStatusCode.NotFound;
                        obj.Response.StatusDescription = "Stream Id not found.";
                        return;
                    }
            #if DEBUG_STREAM
                    if (loc.VideoLocalID == 6393488934891)
                    {
                        FileSystemResult<IFileSystem> ff = CloudFileSystemPluginFactory.Instance.List.FirstOrDefault(a => a.Name == "Local File System")?.Init("", null, null);
                        if (ff == null || !ff.IsOk)
                            throw new Exception(ff?.Error ?? "Error Opening Local Filesystem");
                        FileSystemResult<IObject> o=ff.Result.Resolve(@"C:\test\unsort\[FTV-Wasurenai] 11eyes - 01 [1280x720 BD H264] [07238189].mkv");
                        if (o.IsOk)
                            file = (IFile) o.Result;
                    }
                    else
            #endif
                        file = loc.GetBestFileLink();
                    if (file == null)
                    {
                        obj.Response.StatusCode = (int) HttpStatusCode.NotFound;
                        obj.Response.StatusDescription = "Stream Id not found.";
                        return;
                    }
                    fullname = file.FullName;
                }
                else if (cmd == "file")
                {
                    fullname = Base64DecodeUrl(arg);
                    file = VideoLocal.ResolveFile(fullname);
                    if (file == null)
                    {
                        obj.Response.StatusCode = (int) HttpStatusCode.NotFound;
                        obj.Response.StatusDescription = "File not found.";
                        return;
                    }
                }
                else
                {
                    obj.Response.StatusCode = (int) HttpStatusCode.BadRequest;
                    obj.Response.StatusDescription = "Not know command";
                    return;
                }

                bool range = false;
                obj.Response.ContentType = GetMime(fullname);
                obj.Response.AddHeader("Accept-Ranges", "bytes");
                obj.Response.AddHeader("X-Plex-Protocol", "1.0");
                if (obj.Request.HttpMethod == "OPTIONS")
                {
                    obj.Response.AddHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, DELETE, PUT, HEAD");
                    obj.Response.AddHeader("Access-Control-Max-Age", "1209600");
                    obj.Response.AddHeader("Access-Control-Allow-Headers",
                        "accept, x-plex-token, x-plex-client-identifier, x-plex-username, x-plex-product, x-plex-device, x-plex-platform, x-plex-platform-version, x-plex-version, x-plex-device-name");
                    obj.Response.AddHeader("Cache-Control", "no-cache");
                    obj.Response.ContentType = "text/plain";
                    return;
                }
                string rangevalue = null;
                if (obj.Request.Headers.AllKeys.Contains("Range"))
                    rangevalue = obj.Request.Headers["Range"].Replace("bytes=", string.Empty).Trim();
                if (obj.Request.Headers.AllKeys.Contains("range"))
                    rangevalue = obj.Request.Headers["range"].Replace("bytes=", string.Empty).Trim();

                if (obj.Request.HttpMethod != "HEAD")
                {
                    FileSystemResult<Stream> fr = file.OpenRead();
                    if (fr == null || !fr.IsOk)
                    {
                        obj.Response.StatusCode = (int) HttpStatusCode.InternalServerError;
                        obj.Response.StatusDescription = "Unable to open '" + fullname + "' " + fr?.Error ??
                                                         string.Empty;
                        return;
                    }
                    org = fr.Result;
                    long totalsize = org.Length;
                    long start = 0;
                    long end = 0;
                    if (!string.IsNullOrEmpty(rangevalue))
                    {
                        range = true;
                        string[] split = rangevalue.Split('-');
                        if (split.Length == 2)
                        {
                            if (string.IsNullOrEmpty(split[0]) && !string.IsNullOrEmpty(split[1]))
                            {
                                long e = long.Parse(split[1]);
                                start = totalsize - e;
                                end = totalsize - 1;
                            }
                            else if (!string.IsNullOrEmpty(split[0]) && string.IsNullOrEmpty(split[1]))
                            {
                                start = long.Parse(split[0]);
                                end = totalsize - 1;
                            }
                            else if (!string.IsNullOrEmpty(split[0]) && !string.IsNullOrEmpty(split[1]))
                            {
                                start = long.Parse(split[0]);
                                end = long.Parse(split[1]);
                                if (start > totalsize - 1)
                                    start = totalsize - 1;
                                if (end > totalsize - 1)
                                    end = totalsize - 1;
                            }
                            else
                            {
                                start = 0;
                                end = totalsize - 1;
                            }
                        }
                    }
                    SubStream outstream;
                    if (range)
                    {
                        obj.Response.StatusCode = (int) HttpStatusCode.PartialContent;
                        obj.Response.AddHeader("Content-Range", "bytes " + start + "-" + end + "/" + totalsize);
                        outstream = new SubStream(org, start, end - start + 1);
                        obj.Response.ContentLength64 = end - start + 1;
                    }
                    else
                    {
                        outstream = new SubStream(org, 0, totalsize);
                        obj.Response.ContentLength64 = totalsize;
                        obj.Response.StatusCode = (int) HttpStatusCode.OK;
                    }
                    if ((userid != 0) && (loc != null) && autowatch == 1)
                    {
                        outstream.CrossPosition = (long) ((double) totalsize*WatchedThreshold);
                        outstream.CrossPositionCrossed +=
                            (a) =>
                            {
                                Task.Factory.StartNew(() => { loc.ToggleWatchedStatus(true, userid); },
                                    new CancellationToken(),
                                    TaskCreationOptions.LongRunning, TaskScheduler.Default);
                            };
                    }
                    obj.Response.SendChunked = false;
                    outstream.CopyTo(obj.Response.OutputStream);
                    obj.Response.OutputStream.Close();
                    outstream.Close();
                }
                else
                {
                    obj.Response.SendChunked = false;
                    obj.Response.StatusCode = (int) HttpStatusCode.OK;
                    obj.Response.ContentLength64 = new FileInfo(fullname).Length;
                    obj.Response.KeepAlive = false;
                    obj.Response.OutputStream.Close();
                }
            }
            catch (HttpListenerException)
            {
                //ignored
            }
            catch (Exception e)
            {
                try
                {
                    obj.Response.StatusCode = (int)HttpStatusCode.InternalServerError;
                    obj.Response.StatusDescription = "Internal Server Error";
                }
                catch
                {
                    // ignored
                }
                logger.Warn(e.ToString);
            }
            finally
            {
                try
                {
                    org?.Dispose();
                }
                catch
                {
                    // ignored
                }
                try
                {
                    obj?.Response.OutputStream?.Close();
                    obj?.Response.Close();
                }
                catch
                {
                    // ignored
                }
            }
        }
Exemple #11
0
        public void Save(Stream output, IList <ArchiveFileInfo> files)
        {
            var sha256 = new Kryptography.Hash.Sha256();

            using var bw = new BinaryWriterX(output);

            bw.BaseStream.Position = _ncchHeaderSize;

            // Write and update exHeader information
            var exHeaderFile = files.FirstOrDefault(f => f.FilePath.GetName() == ExHeaderFileName_);

            if (exHeaderFile != null)
            {
                var exHeaderPosition = bw.BaseStream.Position;
                var writtenSize      = exHeaderFile.SaveFileData(output);

                bw.WriteAlignment(MediaSize_);

                _ncchHeader.exHeaderSize = (int)(exHeaderFile.FileSize / 2);
                _ncchHeader.exHeaderHash = sha256.Compute(new SubStream(output, exHeaderPosition, _ncchHeader.exHeaderSize));
            }
            else
            {
                Array.Clear(_ncchHeader.exHeaderHash, 0, 0x20);
                _ncchHeader.exHeaderSize = 0;
            }

            // Write and update logo region information
            var logoRegionFile = files.FirstOrDefault(f => f.FilePath.GetName() == LogoRegionFileName_);

            if (logoRegionFile != null)
            {
                var logoRegionPosition = bw.BaseStream.Position;
                var writtenSize        = logoRegionFile.SaveFileData(output);

                bw.WriteAlignment(MediaSize_);

                _ncchHeader.logoRegionOffset = (int)(logoRegionPosition / MediaSize_);
                _ncchHeader.logoRegionSize   = (int)((bw.BaseStream.Position - logoRegionPosition) / MediaSize_);
                _ncchHeader.logoRegionHash   = sha256.Compute(new SubStream(output, logoRegionPosition, writtenSize));
            }
            else
            {
                _ncchHeader.logoRegionOffset = 0;
                _ncchHeader.logoRegionSize   = 0;
                Array.Clear(_ncchHeader.logoRegionHash, 0, 0x20);
            }

            // Write and update plain region information
            var plainRegionFile = files.FirstOrDefault(f => f.FilePath.GetName() == PlainRegionFileName_);

            if (plainRegionFile != null)
            {
                var plainRegionPosition = bw.BaseStream.Position;
                plainRegionFile.SaveFileData(output);

                bw.WriteAlignment(MediaSize_);

                _ncchHeader.plainRegionOffset = (int)(plainRegionPosition / MediaSize_);
                _ncchHeader.plainRegionSize   = (int)((bw.BaseStream.Position - plainRegionPosition) / MediaSize_);
            }
            else
            {
                _ncchHeader.plainRegionOffset = 0;
                _ncchHeader.plainRegionSize   = 0;
            }

            // Write and update ExeFs
            var exeFsFiles = files.Where(x => x.FilePath.ToRelative().IsInDirectory(ExeFsFolder_, true)).ToArray();

            if (exeFsFiles.Any())
            {
                var exeFsPosition = bw.BaseStream.Position;
                var exeFsSize     = ExeFsBuilder.Build(output, exeFsFiles);

                _ncchHeader.exeFsOffset         = (int)(exeFsPosition / MediaSize_);
                _ncchHeader.exeFsSize           = (int)(exeFsSize / MediaSize_);
                _ncchHeader.exeFsHashRegionSize = _exeFsHeaderSize / MediaSize_;
                _ncchHeader.exeFsSuperBlockHash = sha256.Compute(new SubStream(output, exeFsPosition, _exeFsHeaderSize));

                bw.WriteAlignment(0x1000);
            }
            else
            {
                _ncchHeader.exeFsOffset         = 0;
                _ncchHeader.exeFsSize           = 0;
                _ncchHeader.exeFsHashRegionSize = 0;
                Array.Clear(_ncchHeader.exeFsSuperBlockHash, 0, 0x20);
            }

            // Write and update RomFs
            var romFsFiles = files.Where(x => x.FilePath.ToRelative().IsInDirectory(RomFsFolder_, true)).ToArray();

            if (romFsFiles.Any())
            {
                var romFsPosition = bw.BaseStream.Position;
                var romFsSize1    = RomFsBuilder.CalculateRomFsSize(romFsFiles, RomFsFolder_);

                var buffer = new byte[0x4000];
                var size   = romFsSize1;
                while (size > 0)
                {
                    var length = (int)Math.Min(size, 0x4000);
                    bw.BaseStream.Write(buffer, 0, length);

                    size -= length;
                }
                var romFsStream = new SubStream(bw.BaseStream, romFsPosition, romFsSize1);

                var(_, _) = RomFsBuilder.Build(romFsStream, romFsFiles, RomFsFolder_);

                _ncchHeader.romFsOffset         = (int)(romFsPosition / MediaSize_);
                _ncchHeader.romFsSize           = (int)(romFsSize1 / MediaSize_);
                _ncchHeader.romFsHashRegionSize = 1;    // Only the first 0x200 of the RomFs get into the hash region apparently
                _ncchHeader.romFsSuperBlockHash = sha256.Compute(new SubStream(output, romFsPosition, MediaSize_));
            }
            else
            {
                _ncchHeader.romFsOffset         = 0;
                _ncchHeader.romFsSize           = 0;
                _ncchHeader.romFsHashRegionSize = 0;
                Array.Clear(_ncchHeader.romFsSuperBlockHash, 0, 0x20);
            }

            // Write header
            // HINT: Set NCCH flags to NoCrypto mode
            _ncchHeader.ncchFlags[7] = 4;
            _ncchHeader.ncchSize     = (int)(output.Length / MediaSize_);

            bw.BaseStream.Position = 0;
            bw.WriteType(_ncchHeader);
        }
Exemple #12
0
        private object StreamFromIFile(InfoResult r, bool?autowatch)
        {
            try
            {
                string rangevalue = Request.Headers["Range"].FirstOrDefault() ??
                                    Request.Headers["range"].FirstOrDefault();


                Stream fr    = null;
                string error = null;
                try
                {
                    fr = r.File?.OpenRead();
                }
                catch (Exception e)
                {
                    Logger.Error(e);
                    error = e.ToString();
                }

                if (fr == null)
                {
                    return(StatusCode((int)HttpStatusCode.BadRequest,
                                      "Unable to open file '" + r.File?.FullName + "': " + error));
                }
                long totalsize = fr.Length;
                long start     = 0;
                long end       = totalsize - 1;

                rangevalue = rangevalue?.Replace("bytes=", string.Empty);
                bool range = !string.IsNullOrEmpty(rangevalue);

                if (range)
                {
                    // range: bytes=split[0]-split[1]
                    string[] split = rangevalue.Split('-');
                    if (split.Length == 2)
                    {
                        // bytes=-split[1] - tail of specified length
                        if (string.IsNullOrEmpty(split[0]) && !string.IsNullOrEmpty(split[1]))
                        {
                            long e = long.Parse(split[1]);
                            start = totalsize - e;
                            end   = totalsize - 1;
                        }
                        // bytes=split[0] - split[0] to end of file
                        else if (!string.IsNullOrEmpty(split[0]) && string.IsNullOrEmpty(split[1]))
                        {
                            start = long.Parse(split[0]);
                            end   = totalsize - 1;
                        }
                        // bytes=split[0]-split[1] - specified beginning and end
                        else if (!string.IsNullOrEmpty(split[0]) && !string.IsNullOrEmpty(split[1]))
                        {
                            start = long.Parse(split[0]);
                            end   = long.Parse(split[1]);
                            if (start > totalsize - 1)
                            {
                                start = totalsize - 1;
                            }
                            if (end > totalsize - 1)
                            {
                                end = totalsize - 1;
                            }
                        }
                    }
                }

                Response.ContentType = r.Mime;
                Response.Headers.Add("Server", SERVER_VERSION);
                Response.Headers.Add("Connection", "keep-alive");
                Response.Headers.Add("Accept-Ranges", "bytes");
                Response.Headers.Add("Content-Range", "bytes " + start + "-" + end + "/" + totalsize);
                Response.ContentLength = end - start + 1;

                Response.StatusCode = (int)(range ? HttpStatusCode.PartialContent : HttpStatusCode.OK);

                var outstream = new SubStream(fr, start, end - start + 1);
                if (r.User != null && autowatch.HasValue && autowatch.Value && r.VideoLocal != null)
                {
                    outstream.CrossPosition         = (long)(totalsize * WatchedThreshold);
                    outstream.CrossPositionCrossed +=
                        a =>
                    {
                        Task.Factory.StartNew(() => { r.VideoLocal.ToggleWatchedStatus(true, r.User.JMMUserID); },
                                              new CancellationToken(),
                                              TaskCreationOptions.LongRunning, TaskScheduler.Default);
                    };
                }

                return(outstream);
            }
            catch (Exception e)
            {
                Logger.Error("An error occurred while serving a file: " + e);
                return(StatusCode(500, e.Message));
            }
        }
Exemple #13
0
        public IList <ArchiveFileInfo> Load(Stream input)
        {
            using var br = new BinaryReaderX(input, true);

            // Read header
            _ncchHeader = br.ReadType <NcchHeader>();

            var result = new List <ArchiveFileInfo>();

            // Add ExtendedHeader
            if (_ncchHeader.exHeaderSize != 0)
            {
                // ExHeader is stored 2 times, but stored size only reflects one of them
                var exHeaderStream = new SubStream(input, br.BaseStream.Position, _ncchHeader.exHeaderSize * 2);
                result.Add(new ArchiveFileInfo(exHeaderStream, ExHeaderFileName_));
            }

            // Add PlainRegion
            if (_ncchHeader.plainRegionOffset != 0 && _ncchHeader.plainRegionSize != 0)
            {
                var plainRegionStream = new SubStream(input, _ncchHeader.plainRegionOffset * MediaSize_, _ncchHeader.plainRegionSize * MediaSize_);
                result.Add(new ArchiveFileInfo(plainRegionStream, PlainRegionFileName_));
            }

            // Add LogoRegion
            if (_ncchHeader.logoRegionOffset != 0 && _ncchHeader.logoRegionSize != 0)
            {
                var logoStream = new SubStream(input, _ncchHeader.logoRegionOffset * MediaSize_, _ncchHeader.logoRegionSize * MediaSize_);
                result.Add(new ArchiveFileInfo(logoStream, LogoRegionFileName_));
                // TODO: Add Guid for logo icn
            }

            // Add ExeFS
            if (_ncchHeader.exeFsOffset != 0 && _ncchHeader.exeFsSize != 0)
            {
                // Read and resolve ExeFS data
                br.BaseStream.Position = _ncchHeader.exeFsOffset * MediaSize_;
                var exeFs             = br.ReadType <NcchExeFsHeader>();
                var exeFsFilePosition = br.BaseStream.Position;

                // Add Files from ExeFS
                foreach (var file in exeFs.fileEntries)
                {
                    if (file.offset == 0 && file.size == 0)
                    {
                        break;
                    }

                    var exeFsFileStream = new SubStream(input, exeFsFilePosition + file.offset, file.size);
                    result.Add(new ArchiveFileInfo(exeFsFileStream, ExeFsFolder_ + "/" + file.name.TrimEnd('\0')));
                    // TODO: Add decompression if file.name == ".code" && (exHeader.sci.flag & 0x1) == 1
                }
            }

            // Add RomFS
            if (_ncchHeader.romFsOffset != 0 && _ncchHeader.romFsSize != 0)
            {
                // Read and resolve RomFS data
                br.BaseStream.Position = _ncchHeader.romFsOffset * MediaSize_;
                var romFs = new NcchRomFs(input);

                // Add Files from RomFS
                foreach (var file in romFs.Files)
                {
                    var romFsFileStream = new SubStream(br.BaseStream, file.fileOffset, file.fileSize);
                    result.Add(new ArchiveFileInfo(romFsFileStream, RomFsFolder_ + file.filePath));
                }
            }

            return(result);
        }
Exemple #14
0
        /// <summary>
        /// Write IVFC hash levels.
        /// </summary>
        /// <param name="output">The stream to write to.</param>
        /// <param name="metaDataPosition">The position of the initial data to hash.</param>
        /// <param name="metaDataSize">The position at which to start writing.</param>
        /// <param name="masterHashPosition">The separate position at which the master hash level is written.</param>
        /// <param name="levels">Number of levels to write.</param>
        /// <returns>Position and size of each written level.</returns>
        private static IList <(long, long, long)> WriteIvfcLevels(Stream output, long metaDataPosition, long metaDataSize,
                                                                  long masterHashPosition, int levels)
        {
            // Pre-calculate hash level sizes
            var hashLevelSizes = new long[levels];

            var alignedMetaDataSize = (metaDataSize + BlockSize_ - 1) & ~(BlockSize_ - 1);

            for (var level = 0; level < levels - 1; level++)
            {
                var previousSize     = level == 0 ? alignedMetaDataSize : hashLevelSizes[level - 1];
                var levelSize        = previousSize / BlockSize_ * 0x20;
                var alignedLevelSize = (levelSize + BlockSize_ - 1) & ~(BlockSize_ - 1);

                hashLevelSizes[level] = alignedLevelSize;
            }

            // Pre-calculate hash level position
            var hashLevelPositions = new long[levels];

            var alignedMetaDataPosition = (metaDataPosition + BlockSize_ - 1) & ~(BlockSize_ - 1);

            for (var level = 0; level < levels - 1; level++)
            {
                var levelPosition = alignedMetaDataPosition + alignedMetaDataSize + hashLevelSizes.Skip(level + 1).Take(levels - level - 2).Sum(x => x);

                hashLevelPositions[level] = levelPosition;
            }

            // Add master hash position
            hashLevelSizes[levels - 1]     = BlockSize_;
            hashLevelPositions[levels - 1] = masterHashPosition;

            // Write hash levels
            var result = new List <(long, long, long)>();
            var sha256 = new Kryptography.Hash.Sha256();

            var previousLevelPosition = alignedMetaDataPosition;
            var previousLevelSize     = alignedMetaDataSize;

            for (var level = 0; level < levels; level++)
            {
                var previousLevelStream = new SubStream(output, previousLevelPosition, previousLevelSize);
                var levelStream         = new SubStream(output, hashLevelPositions[level], hashLevelSizes[level]);

                var block = new byte[BlockSize_];
                while (previousLevelStream.Position < previousLevelStream.Length)
                {
                    previousLevelStream.Read(block, 0, BlockSize_);
                    var hash = sha256.Compute(block);
                    levelStream.Write(hash);
                }

                result.Add((hashLevelPositions[level], levelStream.Position, hashLevelSizes[level]));

                previousLevelPosition = hashLevelPositions[level];
                previousLevelSize     = hashLevelSizes[level];
            }

            //var dataPosition = metaDataPosition;
            //var writePosition = hashLevelInformation[0];
            //var dataSize = writePosition - dataPosition;

            //for (var level = 0; level < levels; level++)
            //{
            //    bw.BaseStream.Position = writePosition;

            //    var dataEnd = dataPosition + dataSize;
            //    while (dataPosition < dataEnd)
            //    {
            //        var blockSize = Math.Min(BlockSize_, dataEnd - dataPosition);
            //        var hash = sha256.Compute(new SubStream(output, dataPosition, blockSize));
            //        bw.Write(hash);

            //        dataPosition += BlockSize_;
            //    }

            //    dataPosition = writePosition;
            //    dataSize = bw.BaseStream.Position - writePosition;

            //    writePosition = level + 1 >= levels - 1 ? masterHashPosition : hashLevelInformation[level + 1];

            //    // Pad hash level to next block
            //    // Do not pad master hash level
            //    // TODO: Make general padding code that also works with unaligned master hash position
            //    var alignSize = 0L;
            //    if (level + 1 < levels - 1)
            //    {
            //        alignSize = ((dataSize + BlockSize_ - 1) & ~(BlockSize_ - 1)) - dataSize;
            //        bw.WritePadding((int)alignSize);
            //    }

            //    result.Add((dataPosition, dataSize, dataSize + alignSize));
            //}

            return(result);
        }
Exemple #15
0
        public IList <ArchiveFileInfo> Load(Stream input)
        {
            using var br = new BinaryReaderX(input, true);

            // Select byte order
            br.ByteOrder           = ByteOrder.BigEndian;
            br.BaseStream.Position = 4;
            _byteOrder             = br.ReadType <ByteOrder>();

            br.ByteOrder = _byteOrder;

            // Read header
            br.BaseStream.Position = 0;
            var header = br.ReadType <DarcHeader>();

            // Read entries
            br.BaseStream.Position = header.tableOffset;
            var rootEntry = br.ReadType <DarcEntry>();

            br.BaseStream.Position = header.tableOffset;
            var entries = br.ReadMultiple <DarcEntry>(rootEntry.size);

            // Read names
            var nameStream = new SubStream(input, br.BaseStream.Position, header.dataOffset - br.BaseStream.Position);

            // Add files
            using var nameBr = new BinaryReaderX(nameStream);

            var result             = new List <ArchiveFileInfo>();
            var lastDirectoryEntry = entries[0];

            foreach (var entry in entries.Skip(1))
            {
                // A file does not know of its parent directory
                // The tree is structured so that the last directory entry read must hold the current file

                // Remember the last directory entry
                if (entry.IsDirectory)
                {
                    lastDirectoryEntry = entry;
                    continue;
                }

                // Find whole path recursively from lastDirectoryEntry
                var currentDirectoryEntry = lastDirectoryEntry;
                var currentPath           = UPath.Empty;
                while (currentDirectoryEntry != entries[0])
                {
                    nameBr.BaseStream.Position = currentDirectoryEntry.NameOffset;
                    currentPath = nameBr.ReadCStringUTF16() / currentPath;

                    currentDirectoryEntry = entries[currentDirectoryEntry.offset];
                }

                // Get file name
                nameBr.BaseStream.Position = entry.NameOffset;
                var fileName = currentPath / nameBr.ReadCStringUTF16();

                var fileStream = new SubStream(input, entry.offset, entry.size);
                result.Add(new ArchiveFileInfo(fileStream, fileName.FullName));
            }

            return(result);
        }
Exemple #16
0
        public void Save(Stream output, IList <IArchiveFileInfo> files)
        {
            var key = GetCipherKey("imaguy_uyrag_igurustim_", "enokok_ikorodo_odohuran");

            using var bw = new BinaryWriterX(output, _byteOrder);

            var isExtendedHeader = _byteOrder == ByteOrder.LittleEndian && _header.version != 7 && _header.version != 8 && _header.version != 9;

            // Calculate offsets
            var entryOffset = HeaderSize + (isExtendedHeader ? 4 : 0);
            var fileOffset  = MtArcSupport.DetermineFileOffset(_byteOrder, _header.version, files.Count, entryOffset);

            // Write files
            var entries = new List <IMtEntry>();

            var filePosition = fileOffset;

            foreach (var file in files.Cast <MtArchiveFileInfo>())
            {
                output.Position = filePosition;

                long writtenSize;
                if (!_isEncrypted)
                {
                    writtenSize = file.SaveFileData(output);
                }
                else
                {
                    var fileStream = file.GetFinalStream();

                    var ms = new MemoryStream();
                    var encryptedStream = new MtBlowfishStream(ms, key);

                    fileStream.CopyTo(encryptedStream);

                    ms.Position = 0;
                    ms.CopyTo(output);

                    writtenSize = fileStream.Length;
                }

                file.Entry.Offset = filePosition;
                file.Entry.SetDecompressedSize((int)file.FileSize, _platform);
                file.Entry.CompSize = (int)writtenSize;
                entries.Add(file.Entry);

                filePosition += (int)writtenSize;
            }

            // Write entries
            Stream entryStream = new SubStream(output, entryOffset, output.Length - entryOffset);

            if (_isEncrypted)
            {
                entryStream = new MtBlowfishStream(entryStream, key);
            }

            using var entryBw = new BinaryWriterX(entryStream, _byteOrder);
            entryBw.WriteMultiple(entries);

            // Write header
            _header.entryCount = (short)files.Count;

            output.Position = 0;
            bw.WriteType(_header);
        }
Exemple #17
0
 /// <summary>
 /// Initializes a new instance of the <see cref="Asn1Real"/> class.
 /// Preferably used when reading REAL. The encoding of a real value shall be primitive.
 /// </summary>
 /// <param name="content">BER encoded value in a Stream.</param>
 /// <param name="constructed">Flag if type is constructed or primitive.</param>
 internal Asn1Real(SubStream content, bool constructed)
     : base(Asn1Class.Universal, constructed, (int)Asn1Type.Real, content)
 {
 }
        private void Import(Stream Source)
        {

            // Format from http://simswiki.info/wiki.php?title=Sims_3:0x6B20C4F3
            // Read Main Header


            //Stream F = File.Create(@"C:\temp\clips\test.clip");
            //Source.CopyTo(F);
            //F.Close();

            //Source.Position = 0;


            BinaryReader Reader = new BinaryReader(Source);

            UInt32 TID = Reader.ReadUInt32();
            if (TID != 0x6b20c4f3)
                throw new InvalidDataException();

            UInt32 Offset = Reader.ReadUInt32();
            UInt32 ClipSize = Reader.ReadUInt32();
            UInt32 ClipOffset = (UInt32)Reader.BaseStream.Position + Reader.ReadUInt32();
            UInt32 SlotOffset = (UInt32)Reader.BaseStream.Position + Reader.ReadUInt32();
            UInt32 ActorOffset = (UInt32)Reader.BaseStream.Position + Reader.ReadUInt32();
            UInt32 EventOffset = (UInt32)Reader.BaseStream.Position + Reader.ReadUInt32();

            Unknown1 = Reader.ReadUInt32();
            Unknown2 = Reader.ReadUInt32();

            UInt32 EndOffset = Reader.ReadUInt32();
            byte[] Buffer = Reader.ReadBytes(16);

            // get the end buffer values
            Reader.BaseStream.Position = EndOffset;
            EndData = new Single[4];
            EndData[0] = Reader.ReadSingle();
            EndData[1] = Reader.ReadSingle();
            EndData[2] = Reader.ReadSingle();
            EndData[3] = Reader.ReadSingle();

            // Get Clip
            Stream ClipStream = new SubStream(Source, ClipOffset, ClipSize);
            ImportClip(ClipStream);

            // Get Slot Table
            Stream SlotStream = new SubStream(Source, SlotOffset, ActorOffset - SlotOffset);
            ImportSlotTable(SlotStream);


            // Get Actor Name            
            Reader.BaseStream.Position = ActorOffset;
            

            int actorlen = (int)(EventOffset - ActorOffset);
            ActorName = ReadNullASCIIString(Reader, actorlen);

            // Get Event Table       
            Reader.BaseStream.Position = EventOffset;
            string ceSIG = Encoding.ASCII.GetString(Reader.ReadBytes(4));
            if (ceSIG != "=CE=")
                throw new InvalidDataException();

            UInt32 ceVersion = Reader.ReadUInt32();
            if (ceVersion != 0x0103)
                throw new InvalidDataException();

            UInt32 ceCount = Reader.ReadUInt32();
            UInt32 ceSize = Reader.ReadUInt32();
            UInt32 ceOffset = Reader.ReadUInt32();


            ClipTable = new ClipEvent[ceCount];

            for (int ceI = 0; ceI < ceCount; ceI++)
            {
                UInt16 ceType = Reader.ReadUInt16();

                ClipEvent Event;

                switch (ceType)
                {
                    case 1: Event = new ClipEventAttach(); break;
                    case 2: Event = new ClipEventUnParent(); break;
                    case 3: Event = new ClipEventPlaySound(); break;
                    case 4: Event = new ClipEventSACS(); break;
                    case 5: Event = new ClipEventPlayEffect(); break;
                    case 6: Event = new ClipEventVisibility(); break;
                    case 9: Event = new ClipEventDestroyProp(); break;
                    case 10: Event = new ClipEventStopEffect(); break;
                    default: throw new InvalidDataException();
                }

                ClipTable[ceI] = Event;

                Event.Import(Reader);               
                //Console.WriteLine(Event.ToString());                
            }


            Source.Close();
        }
Exemple #19
0
            internal override List <BuilderExtent> FixExtents(out long totalLength)
            {
                if (_diskType != DiskType.Dynamic)
                {
                    throw new NotSupportedException("Creation of only dynamic disks currently implemented");
                }

                List <BuilderExtent> extents = new List <BuilderExtent>();

                int  logicalSectorSize       = 512;
                int  physicalSectorSize      = 4096;
                long chunkRatio              = (0x800000L * logicalSectorSize) / _blockSize;
                long dataBlocksCount         = Utilities.Ceil(_content.Length, _blockSize);
                long sectorBitmapBlocksCount = Utilities.Ceil(dataBlocksCount, chunkRatio);
                long totalBatEntriesDynamic  = dataBlocksCount + ((dataBlocksCount - 1) / chunkRatio);

                FileHeader fileHeader = new FileHeader()
                {
                    Creator = ".NET DiscUtils"
                };

                long fileEnd = Sizes.OneMiB;

                VhdxHeader header1 = new VhdxHeader();

                header1.SequenceNumber = 0;
                header1.FileWriteGuid  = Guid.NewGuid();
                header1.DataWriteGuid  = Guid.NewGuid();
                header1.LogGuid        = Guid.Empty;
                header1.LogVersion     = 0;
                header1.Version        = 1;
                header1.LogLength      = (uint)Sizes.OneMiB;
                header1.LogOffset      = (ulong)fileEnd;
                header1.CalcChecksum();

                fileEnd += header1.LogLength;

                VhdxHeader header2 = new VhdxHeader(header1);

                header2.SequenceNumber = 1;
                header2.CalcChecksum();

                RegionTable regionTable = new RegionTable();

                RegionEntry metadataRegion = new RegionEntry();

                metadataRegion.Guid       = RegionEntry.MetadataRegionGuid;
                metadataRegion.FileOffset = fileEnd;
                metadataRegion.Length     = (uint)Sizes.OneMiB;
                metadataRegion.Flags      = RegionFlags.Required;
                regionTable.Regions.Add(metadataRegion.Guid, metadataRegion);


                fileEnd += metadataRegion.Length;

                RegionEntry batRegion = new RegionEntry();

                batRegion.Guid       = RegionEntry.BatGuid;
                batRegion.FileOffset = fileEnd;
                batRegion.Length     = (uint)Utilities.RoundUp(totalBatEntriesDynamic * 8, Sizes.OneMiB);
                batRegion.Flags      = RegionFlags.Required;
                regionTable.Regions.Add(batRegion.Guid, batRegion);

                fileEnd += batRegion.Length;

                extents.Add(ExtentForStruct(fileHeader, 0));
                extents.Add(ExtentForStruct(header1, 64 * Sizes.OneKiB));
                extents.Add(ExtentForStruct(header2, 128 * Sizes.OneKiB));
                extents.Add(ExtentForStruct(regionTable, 192 * Sizes.OneKiB));
                extents.Add(ExtentForStruct(regionTable, 256 * Sizes.OneKiB));

                // Metadata
                FileParameters fileParams = new FileParameters()
                {
                    BlockSize = (uint)_blockSize, Flags = FileParametersFlags.None
                };
                ParentLocator parentLocator = new ParentLocator();

                byte[]       metadataBuffer = new byte[metadataRegion.Length];
                MemoryStream metadataStream = new MemoryStream(metadataBuffer);

                Metadata.Initialize(metadataStream, fileParams, (ulong)_content.Length, (uint)logicalSectorSize, (uint)physicalSectorSize, null);
                extents.Add(new BuilderBufferExtent(metadataRegion.FileOffset, metadataBuffer));

                List <Range <long, long> > presentBlocks = new List <Range <long, long> >(StreamExtent.Blocks(_content.Extents, _blockSize));

                // BAT
                BlockAllocationTableBuilderExtent batExtent = new BlockAllocationTableBuilderExtent(batRegion.FileOffset, batRegion.Length, presentBlocks, fileEnd, _blockSize, chunkRatio);

                extents.Add(batExtent);

                // Stream contents
                foreach (var range in presentBlocks)
                {
                    long substreamStart = range.Offset * _blockSize;
                    long substreamCount = Math.Min(_content.Length - substreamStart, range.Count * _blockSize);

                    SubStream dataSubStream = new SubStream(_content, substreamStart, substreamCount);
                    BuilderSparseStreamExtent dataExtent = new BuilderSparseStreamExtent(fileEnd, dataSubStream);
                    extents.Add(dataExtent);

                    fileEnd += range.Count * _blockSize;
                }


                totalLength = fileEnd;

                return(extents);
            }
 private void LoadDescriptor(Stream s)
 {
     s.Position = 0;
     byte[] header = Utilities.ReadFully(s, (int)Math.Min(Sizes.Sector, s.Length));
     if (header.Length < Sizes.Sector || Utilities.ToUInt32LittleEndian(header, 0) != HostedSparseExtentHeader.VmdkMagicNumber)
     {
         s.Position = 0;
         _descriptor = new DescriptorFile(s);
         if (_access != FileAccess.Read)
         {
             _descriptor.ContentId = (uint)_rng.Next();
             s.Position = 0;
             _descriptor.Write(s);
             s.SetLength(s.Position);
         }
     }
     else
     {
         // This is a sparse disk extent, hopefully with embedded descriptor...
         HostedSparseExtentHeader hdr = HostedSparseExtentHeader.Read(header, 0);
         if (hdr.DescriptorOffset != 0)
         {
             Stream descriptorStream = new SubStream(s, hdr.DescriptorOffset * Sizes.Sector, hdr.DescriptorSize * Sizes.Sector);
             _descriptor = new DescriptorFile(descriptorStream);
             if (_access != FileAccess.Read)
             {
                 _descriptor.ContentId = (uint)_rng.Next();
                 descriptorStream.Position = 0;
                 _descriptor.Write(descriptorStream);
                 byte[] blank = new byte[descriptorStream.Length - descriptorStream.Position];
                 descriptorStream.Write(blank, 0, blank.Length);
             }
         }
     }
 }
Exemple #21
0
            internal override List<BuilderExtent> FixExtents(out long totalLength)
            {
                if (_diskType != DiskType.Dynamic)
                {
                    throw new NotSupportedException("Creation of only dynamic disks currently implemented");
                }

                List<BuilderExtent> extents = new List<BuilderExtent>();

                int logicalSectorSize = 512;
                int physicalSectorSize = 4096;
                long chunkRatio = (0x800000L * logicalSectorSize) / _blockSize;
                long dataBlocksCount = Utilities.Ceil(_content.Length, _blockSize);
                long sectorBitmapBlocksCount = Utilities.Ceil(dataBlocksCount, chunkRatio);
                long totalBatEntriesDynamic = dataBlocksCount + ((dataBlocksCount - 1) / chunkRatio);

                FileHeader fileHeader = new FileHeader() { Creator = ".NET DiscUtils" };

                long fileEnd = Sizes.OneMiB;

                VhdxHeader header1 = new VhdxHeader();
                header1.SequenceNumber = 0;
                header1.FileWriteGuid = Guid.NewGuid();
                header1.DataWriteGuid = Guid.NewGuid();
                header1.LogGuid = Guid.Empty;
                header1.LogVersion = 0;
                header1.Version = 1;
                header1.LogLength = (uint)Sizes.OneMiB;
                header1.LogOffset = (ulong)fileEnd;
                header1.CalcChecksum();

                fileEnd += header1.LogLength;

                VhdxHeader header2 = new VhdxHeader(header1);
                header2.SequenceNumber = 1;
                header2.CalcChecksum();

                RegionTable regionTable = new RegionTable();

                RegionEntry metadataRegion = new RegionEntry();
                metadataRegion.Guid = RegionEntry.MetadataRegionGuid;
                metadataRegion.FileOffset = fileEnd;
                metadataRegion.Length = (uint)Sizes.OneMiB;
                metadataRegion.Flags = RegionFlags.Required;
                regionTable.Regions.Add(metadataRegion.Guid, metadataRegion);

                fileEnd += metadataRegion.Length;

                RegionEntry batRegion = new RegionEntry();
                batRegion.Guid = RegionEntry.BatGuid;
                batRegion.FileOffset = fileEnd;
                batRegion.Length = (uint)Utilities.RoundUp(totalBatEntriesDynamic * 8, Sizes.OneMiB);
                batRegion.Flags = RegionFlags.Required;
                regionTable.Regions.Add(batRegion.Guid, batRegion);

                fileEnd += batRegion.Length;

                extents.Add(ExtentForStruct(fileHeader, 0));
                extents.Add(ExtentForStruct(header1, 64 * Sizes.OneKiB));
                extents.Add(ExtentForStruct(header2, 128 * Sizes.OneKiB));
                extents.Add(ExtentForStruct(regionTable, 192 * Sizes.OneKiB));
                extents.Add(ExtentForStruct(regionTable, 256 * Sizes.OneKiB));

                // Metadata
                FileParameters fileParams = new FileParameters() { BlockSize = (uint)_blockSize, Flags = FileParametersFlags.None };
                ParentLocator parentLocator = new ParentLocator();

                byte[] metadataBuffer = new byte[metadataRegion.Length];
                MemoryStream metadataStream = new MemoryStream(metadataBuffer);
                Metadata.Initialize(metadataStream, fileParams, (ulong)_content.Length, (uint)logicalSectorSize, (uint)physicalSectorSize, null);
                extents.Add(new BuilderBufferExtent(metadataRegion.FileOffset, metadataBuffer));

                List<Range<long, long>> presentBlocks = new List<Range<long, long>>(StreamExtent.Blocks(_content.Extents, _blockSize));

                // BAT
                BlockAllocationTableBuilderExtent batExtent = new BlockAllocationTableBuilderExtent(batRegion.FileOffset, batRegion.Length, presentBlocks, fileEnd, _blockSize, chunkRatio);
                extents.Add(batExtent);

                // Stream contents
                foreach (var range in presentBlocks)
                {
                    long substreamStart = range.Offset * _blockSize;
                    long substreamCount = Math.Min(_content.Length - substreamStart, range.Count * _blockSize);

                    SubStream dataSubStream = new SubStream(_content, substreamStart, substreamCount);
                    BuilderSparseStreamExtent dataExtent = new BuilderSparseStreamExtent(fileEnd, dataSubStream);
                    extents.Add(dataExtent);

                    fileEnd += range.Count * _blockSize;
                }

                totalLength = fileEnd;

                return extents;
            }
Exemple #22
0
        /// <summary>
        /// Bundle streams to one in given order.
        /// Stream-1 (startPos->End) ... Stream-n (Begin->End) ... Stream-last (Begin->endPos)
        /// endPos = Stream-last.length - (AllStreams.length - startPos - numOfBytes)
        /// </summary>
        /// <param name="streams"></param>
        /// <param name="startPos">Position in first stream, where bundle should begin.</param>
        /// <param name="numOfBytes">Number of bytes the uber-stream covers.</param>
        public UberStream(Stream[] streams, long startPos, long numOfBytes)
            : base()
        {
            if (streams.Any(s => s == null))
            {
                throw new ArgumentNullException();
            }
            if (startPos < 0)
            {
                throw new ArgumentOutOfRangeException("startPos can not be less than 0.");
            }
            if (streams.All(s => s.CanSeek) && (startPos + numOfBytes) > streams.Select(s => s.Length).Sum())
            {
                throw new ArgumentOutOfRangeException();
            }

            m_position    = 0;
            m_totalLength = numOfBytes;

            m_streams = new SubStream[streams.Count()];

            // set first element
            if (streams.First().Length < startPos)
            {
                throw new ArgumentException("First stream is shorter than startPos.");
            }

            m_streams[0] = new SubStream()
            {
                Stream    = streams.First(),
                StartPos  = startPos,
                Length    = streams.First().Length - startPos,
                PosOffset = 0
            };

            // set elements
            for (var i = 1; i < streams.Count(); i++)
            {
                m_streams[i] = new SubStream()
                {
                    Stream    = streams[i],
                    StartPos  = 0,
                    Length    = streams[i].Length,
                    PosOffset = m_streams[i - 1].PosOffset + m_streams[i - 1].Length
                };
            }

            // set last element
            var streamsTotalLength = streams.Select(stream => stream.Length).Sum();
            var diff = streamsTotalLength - startPos - numOfBytes;

            if (diff < 0)
            {
                throw new ArgumentException("streams don't have enough length (< startPos + numOfBytes).");
            }

            if (streams.Last().Length < diff)
            {
                throw new ArgumentException("Last stream is shorter than the requested bytes.");
            }

            m_streams[m_streams.Length - 1] = new SubStream()
            {
                Stream    = m_streams[m_streams.Length - 1].Stream,
                StartPos  = m_streams[m_streams.Length - 1].StartPos,
                Length    = m_streams[m_streams.Length - 1].Length - diff,
                PosOffset = m_streams[m_streams.Length - 1].PosOffset // numOfBytes - Length // also: streamsTotalLength - streams.Last().Length - startPos
            };
        }
Exemple #23
0
        private string UploadPart(Stream baseStream, HttpClient client, string url, long length, int retryCount)
        {
            // saving the position if we need to retry
            var position = baseStream.Position;

            using (var subStream = new SubStream(baseStream, offset: 0, length: length))
            {
                var now = SystemTime.UtcNow;

                // stream is disposed by the HttpClient
                var content = new ProgressableStreamContent(subStream, Progress)
                {
                    Headers =
                    {
                        { "Content-Length", subStream.Length.ToString(CultureInfo.InvariantCulture) }
                    }
                };

                UpdateHeaders(content.Headers, now, subStream);

                var headers = ConvertToHeaders(content.Headers);
                client.DefaultRequestHeaders.Authorization = CalculateAuthorizationHeaderValue(HttpMethods.Put, url, now, headers);

                try
                {
                    var response = client.PutAsync(url, content, CancellationToken).Result;
                    if (response.IsSuccessStatusCode)
                    {
                        var etagHeader = response.Headers.GetValues("ETag");
                        return(etagHeader.First());
                    }

                    if (retryCount == MaxRetriesForMultiPartUpload)
                    {
                        throw StorageException.FromResponseMessage(response);
                    }
                }
                catch (Exception)
                {
                    if (retryCount == MaxRetriesForMultiPartUpload)
                    {
                        throw;
                    }
                }

                // revert the uploaded count before retry
                Progress?.UploadProgress.UpdateUploaded(-content.Uploaded);
            }

            // wait for one second before trying again to send the request
            // maybe there was a network issue?
            CancellationToken.WaitHandle.WaitOne(1000);
            CancellationToken.ThrowIfCancellationRequested();

            retryCount++;

            if (_logger?.IsInfoEnabled == true)
            {
                _logger.Info($"Trying to send the request again. Retries count: '{retryCount}', BucketName: '{_bucketName}'.");
            }

            // restore the stream position before retrying
            baseStream.Position = position;
            return(UploadPart(baseStream, client, url, length, retryCount));
        }
        public Stream OpenFile(string path)
        {
            var entry = GetEntry(path);
            if (entry == null) throw new FileNotFoundException();

            lock (this)
            {
                if (!_streams.ContainsKey(entry.ArchiveIndex))
                {
                    var file = _directory.Chunks[entry.ArchiveIndex];
                    var stream = _directory.OpenFile(file);
                    _streams.Add(entry.ArchiveIndex, stream);
                }
            }

            var offset = entry.ArchiveIndex == VpkDirectory.DirectoryIndex ? _directory.HeaderLength + _directory.TreeLength + entry.EntryOffset : entry.EntryOffset;
            var sub = new SubStream(_streams[entry.ArchiveIndex], offset, entry.EntryLength);
            return new BufferedStream(new VpkEntryStream(entry, sub));
        }
Exemple #25
0
 /// <summary>
 /// Initializes a new instance of the <see cref="Asn1GeneralizedTime"/> class. Preferably used when reading GENERALIZED TIME.
 /// </summary>
 /// <param name="content">BER encoded value in a Stream.</param>
 /// <param name="constructed">Flag if type is constructed or primitive.</param>
 internal Asn1GeneralizedTime(SubStream content, bool constructed)
     : base(Asn1Class.Universal, constructed, (int)Asn1Type.GeneralizedTime, content)
 {
 }
Exemple #26
0
 /// <summary>
 /// Initializes a new instance of the <see cref="Asn1Utf8String"/> class.
 /// Preferably used when reading UTF8 STRING.
 /// </summary>
 /// <param name="content">BER encoded value in a Stream.</param>
 /// <param name="constructed">Flag if type is constructed or primitive.</param>
 internal Asn1Utf8String(SubStream content, bool constructed)
     : base(Asn1Class.Universal, constructed, (int)Asn1Type.Utf8String, content)
 {
 }
        private async Task <IDictionary <string, (int imageIndex, int pbiIndex)> > LoadResBin(IFileSystem archiveFileSystem)
        {
            var resBinPath = archiveFileSystem
                             .EnumeratePaths(UPath.Root, "*RES.bin", SearchOption.TopDirectoryOnly, SearchTarget.File)
                             .First();
            var resFileStream = await archiveFileSystem.OpenFileAsync(resBinPath);

            using var br = new BinaryReaderX(resFileStream);

            // Check header magic
            if (br.PeekString() != "ANMC")
            {
                throw new InvalidOperationException("RES.bin is no valid ANMC file.");
            }

            // Read header
            var resHeader = br.ReadType <ResHeader>();

            // Read image tables
            var imageEntries = br.ReadMultiple <ResImageEntry>(resHeader.imageTables[0].entryCount);
            var imageAreas   = br.ReadMultiple <ResImageArea>(resHeader.imageTables[1].entryCount);

            // Read pbi entries
            br.BaseStream.Position = resHeader.tableCluster2[1].offset << 2;
            var unkEntries = br.ReadMultiple <TableCluster2Table2>(resHeader.tableCluster2[1].entryCount);

            IList <ResPbiDimensionEntry> pbiEntries = Array.Empty <ResPbiDimensionEntry>();

            if (resHeader.tableCluster2.Length >= 4)
            {
                br.BaseStream.Position = resHeader.tableCluster2[3].offset << 2;
                pbiEntries             = br.ReadMultiple <ResPbiDimensionEntry>(resHeader.tableCluster2[3].entryCount);
            }

            // Create string reader
            var stringOffset = resHeader.stringTablesOffset << 2;
            var stringStream = new SubStream(resFileStream, stringOffset, resFileStream.Length - stringOffset);

            using var stringBr = new BinaryReaderX(stringStream);

            // Create string connection to data
            var result = new Dictionary <string, (int, int)>();

            for (var i = 0; i < resHeader.tableCluster2[1].entryCount; i++)
            {
                var pbiName = i.ToString();
                if (pbiEntries.Count > i)
                {
                    stringBr.BaseStream.Position = pbiEntries[i].stringPointer.offset;
                    pbiName = stringBr.ReadCStringSJIS();
                }

                if (i >= unkEntries.Count)
                {
                    continue;
                }

                var imageAreaParent = unkEntries[i].imageAreaParent;
                var imageArea       = imageAreas.First(x => x.stringPointer.crc32 == imageAreaParent);

                var imageEntryParent = imageArea.imageEntryParent;
                var imageEntry       = imageEntries.First(x => x.stringPointer.crc32 == imageEntryParent);
                var imageEntryIndex  = imageEntries.IndexOf(imageEntry);

                result[pbiName] = (imageEntryIndex, i);
            }

            return(result);
        }
Exemple #28
0
        private Stream StreamFromIFile(InfoResult r, bool?autowatch)
        {
            Nancy.Request request = RestModule.CurrentModule.Request;

            FileSystemResult <Stream> fr = r.File.OpenRead();

            if (fr == null || !fr.IsOk)
            {
                return(new StreamWithResponse(HttpStatusCode.InternalServerError,
                                              "Unable to open file '" + r.File.FullName + "': " + fr?.Error));
            }
            Stream org       = fr.Result;
            long   totalsize = org.Length;
            long   start     = 0;
            long   end       = totalsize - 1;

            string rangevalue = request.Headers["Range"].FirstOrDefault() ?? request.Headers["range"].FirstOrDefault();

            rangevalue = rangevalue?.Replace("bytes=", string.Empty);
            bool range = !string.IsNullOrEmpty(rangevalue);

            if (range)
            {
                // range: bytes=split[0]-split[1]
                string[] split = rangevalue.Split('-');
                if (split.Length == 2)
                {
                    // bytes=-split[1] - tail of specified length
                    if (string.IsNullOrEmpty(split[0]) && !string.IsNullOrEmpty(split[1]))
                    {
                        long e = long.Parse(split[1]);
                        start = totalsize - e;
                        end   = totalsize - 1;
                    }
                    // bytes=split[0] - split[0] to end of file
                    else if (!string.IsNullOrEmpty(split[0]) && string.IsNullOrEmpty(split[1]))
                    {
                        start = long.Parse(split[0]);
                        end   = totalsize - 1;
                    }
                    // bytes=split[0]-split[1] - specified beginning and end
                    else if (!string.IsNullOrEmpty(split[0]) && !string.IsNullOrEmpty(split[1]))
                    {
                        start = long.Parse(split[0]);
                        end   = long.Parse(split[1]);
                        if (start > totalsize - 1)
                        {
                            start = totalsize - 1;
                        }
                        if (end > totalsize - 1)
                        {
                            end = totalsize - 1;
                        }
                    }
                }
            }
            var outstream = new SubStream(org, start, end - start + 1);
            var resp      = new StreamWithResponse {
                ContentType = r.Mime
            };

            resp.Headers.Add("Server", ServerVersion);
            resp.Headers.Add("Connection", "keep-alive");
            resp.Headers.Add("Accept-Ranges", "bytes");
            resp.Headers.Add("Content-Range", "bytes " + start + "-" + end + "/" + totalsize);
            resp.ContentLength = end - start + 1;

            resp.ResponseStatus = range ? HttpStatusCode.PartialContent : HttpStatusCode.OK;

            if (r.User != null && autowatch.HasValue && autowatch.Value && r.VideoLocal != null)
            {
                outstream.CrossPosition         = (long)((double)totalsize * WatchedThreshold);
                outstream.CrossPositionCrossed +=
                    (a) =>
                {
                    Task.Factory.StartNew(() => { r.VideoLocal.ToggleWatchedStatus(true, r.User.JMMUserID); },
                                          new CancellationToken(),
                                          TaskCreationOptions.LongRunning, TaskScheduler.Default);
                };
            }
            resp.Stream = outstream;
            return(resp);
        }
Exemple #29
0
        /// <summary>
        /// Creates a new stream that contains the XVA image.
        /// </summary>
        /// <returns>The new stream</returns>
        public override SparseStream Build()
        {
            TarFileBuilder tarBuilder = new TarFileBuilder();

            int[] diskIds;

            string ovaFileContent = GenerateOvaXml(out diskIds);

            tarBuilder.AddFile("ova.xml", Encoding.ASCII.GetBytes(ovaFileContent));

            int diskIdx = 0;

            foreach (var diskRec in _disks)
            {
                SparseStream        diskStream = diskRec.Second;
                List <StreamExtent> extents    = new List <StreamExtent>(diskStream.Extents);

                int lastChunkAdded = -1;
                foreach (StreamExtent extent in extents)
                {
                    int firstChunk = (int)(extent.Start / Sizes.OneMiB);
                    int lastChunk  = (int)((extent.Start + extent.Length - 1) / Sizes.OneMiB);

                    for (int i = firstChunk; i <= lastChunk; ++i)
                    {
                        if (i != lastChunkAdded)
                        {
                            HashAlgorithm hashAlg = new SHA1Managed();
                            Stream        chunkStream;

                            long diskBytesLeft = diskStream.Length - (i * Sizes.OneMiB);
                            if (diskBytesLeft < Sizes.OneMiB)
                            {
                                chunkStream = new ConcatStream(
                                    Ownership.Dispose,
                                    new SubStream(diskStream, i * Sizes.OneMiB, diskBytesLeft),
                                    new ZeroStream(Sizes.OneMiB - diskBytesLeft));
                            }
                            else
                            {
                                chunkStream = new SubStream(diskStream, i * Sizes.OneMiB, Sizes.OneMiB);
                            }

                            HashStream chunkHashStream = new HashStream(chunkStream, Ownership.Dispose, hashAlg);

                            tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}", diskIds[diskIdx], i), chunkHashStream);
                            tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}.checksum", diskIds[diskIdx], i), new ChecksumStream(hashAlg));

                            lastChunkAdded = i;
                        }
                    }
                }

                // Make sure the last chunk is present, filled with zero's if necessary
                int lastActualChunk = (int)((diskStream.Length - 1) / Sizes.OneMiB);
                if (lastChunkAdded < lastActualChunk)
                {
                    HashAlgorithm hashAlg         = new SHA1Managed();
                    Stream        chunkStream     = new ZeroStream(Sizes.OneMiB);
                    HashStream    chunkHashStream = new HashStream(chunkStream, Ownership.Dispose, hashAlg);
                    tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}", diskIds[diskIdx], lastActualChunk), chunkHashStream);
                    tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}.checksum", diskIds[diskIdx], lastActualChunk), new ChecksumStream(hashAlg));
                }

                ++diskIdx;
            }

            return(tarBuilder.Build());
        }
        /// <summary>
        /// Creates a RPM Package.
        /// </summary>
        /// <param name="archiveEntries">
        /// The archive entries which make up the RPM package.
        /// </param>
        /// <param name="payloadStream">
        /// A <see cref="Stream"/> which contains the CPIO archive for the RPM package.
        /// </param>
        /// <param name="name">
        /// The name of the package.
        /// </param>
        /// <param name="version">
        /// The version of the software.
        /// </param>
        /// <param name="arch">
        /// The architecture targetted by the package.
        /// </param>
        /// <param name="release">
        /// The release version.
        /// </param>
        /// <param name="createUser">
        /// <see langword="true"/> to create a user account; otherwise, <see langword="false"/>.
        /// </param>
        /// <param name="userName">
        /// The name of the user account to create.
        /// </param>
        /// <param name="installService">
        /// <see langword="true"/> to install a system service, otherwise, <see langword="false"/>.
        /// </param>
        /// <param name="serviceName">
        /// The name of the system service to create.
        /// </param>
        /// <param name="vendor">
        /// The package vendor.
        /// </param>
        /// <param name="description">
        /// The package description.
        /// </param>
        /// <param name="url">
        /// The package URL.
        /// </param>
        /// <param name="prefix">
        /// A prefix to use.
        /// </param>
        /// <param name="preInstallScript">
        /// Pre-Install script
        /// </param>
        /// <param name="postInstallScript">
        /// Post-Install script
        /// </param>
        /// <param name="preRemoveScript">
        /// Pre-Remove script
        /// </param>
        /// <param name="postRemoveScript">
        /// Post-Remove script
        /// </param>
        /// <param name="additionalDependencies">
        /// Additional dependencies to add to the RPM package.
        /// </param>
        /// <param name="additionalMetadata">
        /// Any additional metadata.
        /// </param>
        /// <param name="signer">
        /// The signer to use when signing the package.
        /// </param>
        /// <param name="targetStream">
        /// The <see cref="Stream"/> to which to write the package.
        /// </param>
        /// <param name="includeVersionInName">
        /// <see langword="true"/> to include the version number and release number
        /// in the <see cref="RpmLead.Name"/>; <see langword="false"/> to only
        /// use the package name.
        /// </param>
        /// <param name="payloadIsCompressed">
        /// <see langword="true"/> if <paramref name="payloadStream"/> is already
        /// compressed. In this case, the <paramref name="payloadStream"/> will be
        /// copied "as is" to the resulting RPM package.
        /// </param>
        public void CreatePackage(
            List <ArchiveEntry> archiveEntries,
            Stream payloadStream,
            string name,
            string version,
            string arch,
            string release,
            bool createUser,
            string userName,
            bool installService,
            string serviceName,
            string vendor,
            string description,
            string url,
            string prefix,
            string preInstallScript,
            string postInstallScript,
            string preRemoveScript,
            string postRemoveScript,
            IEnumerable <PackageDependency> additionalDependencies,
            Action <RpmMetadata> additionalMetadata,
            IPackageSigner signer,
            Stream targetStream,
            bool includeVersionInName = false,
            bool payloadIsCompressed  = false)
        {
            // This routine goes roughly like:
            // 1. Calculate all the metadata, including a signature,
            //    but use an empty compressed payload to calculate
            //    the signature
            // 2. Write out the rpm file, and compress the payload
            // 3. Update the signature
            //
            // This way, we avoid having to compress the payload into a temporary
            // file.

            // Core routine to populate files and dependencies (part of the metadata
            // in the header)
            RpmPackage package  = new RpmPackage();
            var        metadata = new RpmMetadata(package)
            {
                Name    = name,
                Version = version,
                Arch    = arch,
                Release = release,
            };

            this.AddPackageProvides(metadata);
            this.AddLdDependencies(metadata);

            var files = this.CreateFiles(archiveEntries);

            metadata.Files = files;

            this.AddRpmDependencies(metadata, additionalDependencies);

            // Try to define valid defaults for most metadata
            metadata.Locales = new Collection <string> {
                "C"
            };                                                 // Should come before any localizable data.
            metadata.BuildHost         = "dotnet-rpm";
            metadata.BuildTime         = DateTimeOffset.Now;
            metadata.Cookie            = "dotnet-rpm";
            metadata.FileDigetsAlgo    = PgpHashAlgo.PGPHASHALGO_SHA256;
            metadata.Group             = "System Environment/Libraries";
            metadata.OptFlags          = string.Empty;
            metadata.Os                = "linux";
            metadata.PayloadCompressor = "xz";
            metadata.PayloadFlags      = "2";
            metadata.PayloadFormat     = "cpio";
            metadata.Platform          = "x86_64-redhat-linux-gnu";
            metadata.RpmVersion        = "4.11.3";
            metadata.SourcePkgId       = new byte[0x10];
            metadata.SourceRpm         = $"{name}-{version}-{release}.src.rpm";

            // Scripts which run before & after installation and removal.
            var preIn  = preInstallScript ?? string.Empty;
            var postIn = postInstallScript ?? string.Empty;
            var preUn  = preRemoveScript ?? string.Empty;
            var postUn = postRemoveScript ?? string.Empty;

            if (createUser)
            {
                // Add the user and group, under which the service runs.
                // These users are never removed because UIDs are re-used on Linux.
                preIn += $"/usr/sbin/groupadd -r {userName} 2>/dev/null || :\n" +
                         $"/usr/sbin/useradd -g {userName} -s /sbin/nologin -r -d {prefix} {userName} 2>/dev/null || :\n";
            }

            if (installService)
            {
                // Install and activate the service.
                postIn +=
                    $"if [ $1 -eq 1 ] ; then \n" +
                    $"    systemctl enable --now {serviceName}.service >/dev/null 2>&1 || : \n" +
                    $"fi\n";

                preUn +=
                    $"if [ $1 -eq 0 ] ; then \n" +
                    $"    # Package removal, not upgrade \n" +
                    $"    systemctl --no-reload disable --now {serviceName}.service > /dev/null 2>&1 || : \n" +
                    $"fi\n";

                postUn +=
                    $"if [ $1 -ge 1 ] ; then \n" +
                    $"    # Package upgrade, not uninstall \n" +
                    $"    systemctl try-restart {serviceName}.service >/dev/null 2>&1 || : \n" +
                    $"fi\n";
            }

            // Remove all directories marked as such (these are usually directories which contain temporary files)
            foreach (var entryToRemove in archiveEntries.Where(e => e.RemoveOnUninstall))
            {
                preUn +=
                    $"if [ $1 -eq 0 ] ; then \n" +
                    $"    # Package removal, not upgrade \n" +
                    $"    /usr/bin/rm -rf {entryToRemove.TargetPath}\n" +
                    $"fi\n";
            }

            if (!string.IsNullOrEmpty(preIn))
            {
                metadata.PreInProg = "/bin/sh";
                metadata.PreIn     = preIn;
            }

            if (!string.IsNullOrEmpty(postIn))
            {
                metadata.PostInProg = "/bin/sh";
                metadata.PostIn     = postIn;
            }

            if (!string.IsNullOrEmpty(preUn))
            {
                metadata.PreUnProg = "/bin/sh";
                metadata.PreUn     = preUn;
            }

            if (!string.IsNullOrEmpty(postUn))
            {
                metadata.PostUnProg = "/bin/sh";
                metadata.PostUn     = postUn;
            }

            // Not providing these (or setting empty values) would cause rpmlint errors
            metadata.Description = string.IsNullOrEmpty(description)
                ? $"{name} version {version}-{release}"
                : description;
            metadata.Summary = $"{name} version {version}-{release}";
            metadata.License = $"{name} License";

            metadata.Distribution = string.Empty;
            metadata.DistUrl      = string.Empty;
            metadata.Url          = url ?? string.Empty;
            metadata.Vendor       = vendor ?? string.Empty;

            metadata.ChangelogEntries = new Collection <ChangelogEntry>()
            {
                new ChangelogEntry(DateTimeOffset.Now, "dotnet-rpm", "Created a RPM package using dotnet-rpm")
            };

            // User-set metadata
            if (additionalMetadata != null)
            {
                additionalMetadata(metadata);
            }

            this.CalculateHeaderOffsets(package);

            using (MemoryStream dummyCompressedPayload = new MemoryStream())
            {
                using (XZOutputStream dummyPayloadCompressor = new XZOutputStream(dummyCompressedPayload, 1, XZOutputStream.DefaultPreset, leaveOpen: true))
                {
                    dummyPayloadCompressor.Write(new byte[] { 0 }, 0, 1);
                }

                this.CalculateSignature(package, signer, dummyCompressedPayload);
            }

            this.CalculateSignatureOffsets(package);

            // Write out all the data - includes the lead
            byte[] nameBytes  = new byte[66];
            var    nameInLead = includeVersionInName ? $"{name}-{version}-{release}" : name;

            Encoding.UTF8.GetBytes(nameInLead, 0, nameInLead.Length, nameBytes, 0);

            var lead = new RpmLead()
            {
                ArchNum       = 1,
                Magic         = 0xedabeedb,
                Major         = 0x03,
                Minor         = 0x00,
                NameBytes     = nameBytes,
                OsNum         = 0x0001,
                Reserved      = new byte[16],
                SignatureType = 0x0005,
                Type          = 0x0000,
            };

            // Write out the lead, signature and header
            targetStream.Position = 0;
            targetStream.SetLength(0);

            targetStream.WriteStruct(lead);
            this.WriteSignature(package, targetStream);
            this.WriteHeader(package, targetStream);

            // Write out the compressed payload
            int compressedPayloadOffset = (int)targetStream.Position;

            // The user can choose to pass an already-comrpessed
            // payload. In this case, no need to re-compress.
            if (payloadIsCompressed)
            {
                payloadStream.CopyTo(targetStream);
            }
            else
            {
                using (XZOutputStream compressor = new XZOutputStream(targetStream, 1, XZOutputStream.DefaultPreset, leaveOpen: true))
                {
                    payloadStream.Position = 0;
                    payloadStream.CopyTo(compressor);
                }
            }

            using (SubStream compressedPayloadStream = new SubStream(
                       targetStream,
                       compressedPayloadOffset,
                       targetStream.Length - compressedPayloadOffset,
                       leaveParentOpen: true,
                       readOnly: true))
            {
                this.CalculateSignature(package, signer, compressedPayloadStream);
                this.CalculateSignatureOffsets(package);
            }

            // Update the lead and signature
            targetStream.Position = 0;

            targetStream.WriteStruct(lead);
            this.WriteSignature(package, targetStream);
        }
 public void SubclassedCtor()
 {
     using (var s = new SubStream()) {
         Assert.That(s.Handle, Is.Not.EqualTo(IntPtr.Zero), "Handle");
     }
 }
Exemple #32
0
 /// <summary>
 /// handles the go button for automated encoding
 /// checks if we're in automated 2 pass video mode and that we're not using the snow codec
 /// then the video and audio configuration is checked, and if it checks out
 /// the audio job, video jobs and muxing job are generated, audio and video job are linked
 /// and encoding is started
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 private void queueButton_Click(object sender, System.EventArgs e)
 {
     if (!this.muxedOutput.Text.Equals(""))
     {
         long desiredSizeBytes;
         if (!noTargetRadio.Checked)
         {
             try
             {
                 desiredSizeBytes = Int64.Parse(this.muxedSizeMBs.Text) * 1048576L;
             }
             catch (Exception f)
             {
                 MessageBox.Show("I'm not sure how you want me to reach a target size of <empty>.\r\nWhere I'm from that number doesn't exist.\r\n",
                                 "Target size undefined", MessageBoxButtons.OK);
                 Console.Write(f.Message);
                 return;
             }
         }
         else
         {
             desiredSizeBytes = -1;
         }
         int splitSize = 0;
         if (splitOutput.Checked)
         {
             splitSize = Int32.Parse(this.splitSize.Text);
         }
         if (desiredSizeBytes > 0)
         {
             logBuilder.Append("Desired size of this automated encoding series: " + desiredSizeBytes + " bytes, split size: " + splitSize + "MB\r\n");
         }
         else
         {
             logBuilder.Append("No desired size of this encode. The profile settings will be used");
         }
         SubStream[]   audio;
         AudioStream[] aStreams;
         MuxableType[] muxTypes;
         separateEncodableAndMuxableAudioStreams(out aStreams, out audio, out muxTypes);
         SubStream[]   subtitles   = new SubStream[0];
         string        chapters    = "";
         string        videoInput  = mainForm.Video.Info.VideoInput;
         string        videoOutput = mainForm.Video.Info.VideoOutput;
         string        muxedOutput = this.muxedOutput.Text;
         ContainerType cot         = this.container.SelectedItem as ContainerType;
         if (addSubsNChapters.Checked)
         {
             AdaptiveMuxWindow amw = new AdaptiveMuxWindow(mainForm);
             amw.setMinimizedMode(videoOutput, videoStream.VideoType, jobUtil.getFramerate(videoInput), audio,
                                  muxTypes, muxedOutput, splitSize, cot);
             if (amw.ShowDialog() == DialogResult.OK)
             {
                 amw.getAdditionalStreams(out audio, out subtitles, out chapters, out muxedOutput, out cot);
             }
             else // user aborted, abort the whole process
             {
                 return;
             }
         }
         removeStreamsToBeEncoded(ref audio, aStreams);
         this.vUtil.GenerateJobSeries(this.videoStream, muxedOutput, aStreams, subtitles, chapters,
                                      desiredSizeBytes, splitSize, cot, this.prerender, audio, new List <string>());
         this.Close();
     }
 }
Exemple #33
0
        private void Process(System.Net.HttpListenerContext obj)
        {
            Stream org = null;

            try
            {
                bool     fname = false;
                string[] dta   = obj.Request.RawUrl.Split(new char[] { '/' }, StringSplitOptions.RemoveEmptyEntries);
                if (dta.Length < 4)
                {
                    return;
                }
                string cmd       = dta[0].ToLower();
                string user      = dta[1];
                string aw        = dta[2];
                string arg       = dta[3];
                string fullname  = string.Empty;
                int    userid    = 0;
                int    autowatch = 0;
                int.TryParse(user, out userid);
                int.TryParse(aw, out autowatch);
                VideoLocal loc  = null;
                IFile      file = null;
                if (cmd == "videolocal")
                {
                    int sid = 0;
                    int.TryParse(arg, out sid);
                    if (sid == 0)
                    {
                        obj.Response.StatusCode        = (int)HttpStatusCode.BadRequest;
                        obj.Response.StatusDescription = "Stream Id missing.";
                        return;
                    }
                    loc = RepoFactory.VideoLocal.GetByID(sid);
                    if (loc == null)
                    {
                        obj.Response.StatusCode        = (int)HttpStatusCode.NotFound;
                        obj.Response.StatusDescription = "Stream Id not found.";
                        return;
                    }
#if DEBUG_STREAM
                    if (loc.VideoLocalID == 6393488934891)
                    {
                        FileSystemResult <IFileSystem> ff = CloudFileSystemPluginFactory.Instance.List.FirstOrDefault(a => a.Name == "Local File System")?.Init("", null, null);
                        if (ff == null || !ff.IsOk)
                        {
                            throw new Exception(ff?.Error ?? "Error Opening Local Filesystem");
                        }
                        FileSystemResult <IObject> o = ff.Result.Resolve(@"C:\test\unsort\[FTV-Wasurenai] 11eyes - 01 [1280x720 BD H264] [07238189].mkv");
                        if (o.IsOk)
                        {
                            file = (IFile)o.Result;
                        }
                    }
                    else
#endif
                    file = loc.GetBestFileLink();
                    if (file == null)
                    {
                        obj.Response.StatusCode        = (int)HttpStatusCode.NotFound;
                        obj.Response.StatusDescription = "Stream Id not found.";
                        return;
                    }
                    fullname = file.FullName;
                }
                else if (cmd == "file")
                {
                    fullname = Base64DecodeUrl(arg);
                    file     = VideoLocal.ResolveFile(fullname);
                    if (file == null)
                    {
                        obj.Response.StatusCode        = (int)HttpStatusCode.NotFound;
                        obj.Response.StatusDescription = "File not found.";
                        return;
                    }
                }
                else
                {
                    obj.Response.StatusCode        = (int)HttpStatusCode.BadRequest;
                    obj.Response.StatusDescription = "Not know command";
                    return;
                }

                bool range = false;
                obj.Response.ContentType = GetMime(fullname);
                obj.Response.AddHeader("Accept-Ranges", "bytes");
                obj.Response.AddHeader("X-Plex-Protocol", "1.0");
                if (obj.Request.HttpMethod == "OPTIONS")
                {
                    obj.Response.AddHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, DELETE, PUT, HEAD");
                    obj.Response.AddHeader("Access-Control-Max-Age", "1209600");
                    obj.Response.AddHeader("Access-Control-Allow-Headers",
                                           "accept, x-plex-token, x-plex-client-identifier, x-plex-username, x-plex-product, x-plex-device, x-plex-platform, x-plex-platform-version, x-plex-version, x-plex-device-name");
                    obj.Response.AddHeader("Cache-Control", "no-cache");
                    obj.Response.ContentType = "text/plain";
                    return;
                }
                string rangevalue = null;
                if (obj.Request.Headers.AllKeys.Contains("Range"))
                {
                    rangevalue = obj.Request.Headers["Range"].Replace("bytes=", string.Empty).Trim();
                }
                if (obj.Request.Headers.AllKeys.Contains("range"))
                {
                    rangevalue = obj.Request.Headers["range"].Replace("bytes=", string.Empty).Trim();
                }

                if (obj.Request.HttpMethod != "HEAD")
                {
                    FileSystemResult <Stream> fr = file.OpenRead();
                    if (fr == null || !fr.IsOk)
                    {
                        obj.Response.StatusCode        = (int)HttpStatusCode.InternalServerError;
                        obj.Response.StatusDescription = "Unable to open '" + fullname + "' " + fr?.Error ??
                                                         string.Empty;
                        return;
                    }
                    org = fr.Result;
                    long totalsize = org.Length;
                    long start     = 0;
                    long end       = 0;
                    if (!string.IsNullOrEmpty(rangevalue))
                    {
                        range = true;
                        string[] split = rangevalue.Split('-');
                        if (split.Length == 2)
                        {
                            if (string.IsNullOrEmpty(split[0]) && !string.IsNullOrEmpty(split[1]))
                            {
                                long e = long.Parse(split[1]);
                                start = totalsize - e;
                                end   = totalsize - 1;
                            }
                            else if (!string.IsNullOrEmpty(split[0]) && string.IsNullOrEmpty(split[1]))
                            {
                                start = long.Parse(split[0]);
                                end   = totalsize - 1;
                            }
                            else if (!string.IsNullOrEmpty(split[0]) && !string.IsNullOrEmpty(split[1]))
                            {
                                start = long.Parse(split[0]);
                                end   = long.Parse(split[1]);
                                if (start > totalsize - 1)
                                {
                                    start = totalsize - 1;
                                }
                                if (end > totalsize - 1)
                                {
                                    end = totalsize - 1;
                                }
                            }
                            else
                            {
                                start = 0;
                                end   = totalsize - 1;
                            }
                        }
                    }
                    SubStream outstream;
                    if (range)
                    {
                        obj.Response.StatusCode = (int)HttpStatusCode.PartialContent;
                        obj.Response.AddHeader("Content-Range", "bytes " + start + "-" + end + "/" + totalsize);
                        outstream = new SubStream(org, start, end - start + 1);
                        obj.Response.ContentLength64 = end - start + 1;
                    }
                    else
                    {
                        outstream = new SubStream(org, 0, totalsize);
                        obj.Response.ContentLength64 = totalsize;
                        obj.Response.StatusCode      = (int)HttpStatusCode.OK;
                    }
                    if ((userid != 0) && (loc != null) && autowatch == 1)
                    {
                        outstream.CrossPosition         = (long)((double)totalsize * WatchedThreshold);
                        outstream.CrossPositionCrossed +=
                            (a) =>
                        {
                            Task.Factory.StartNew(() => { loc.ToggleWatchedStatus(true, userid); },
                                                  new CancellationToken(),
                                                  TaskCreationOptions.LongRunning, TaskScheduler.Default);
                        };
                    }
                    obj.Response.SendChunked = false;
                    outstream.CopyTo(obj.Response.OutputStream);
                    obj.Response.OutputStream.Close();
                    outstream.Close();
                }
                else
                {
                    obj.Response.SendChunked     = false;
                    obj.Response.StatusCode      = (int)HttpStatusCode.OK;
                    obj.Response.ContentLength64 = new FileInfo(fullname).Length;
                    obj.Response.KeepAlive       = false;
                    obj.Response.OutputStream.Close();
                }
            }
            catch (HttpListenerException)
            {
                //ignored
            }
            catch (Exception e)
            {
                try
                {
                    obj.Response.StatusCode        = (int)HttpStatusCode.InternalServerError;
                    obj.Response.StatusDescription = "Internal Server Error";
                }
                catch
                {
                    // ignored
                }
                logger.Warn(e.ToString);
            }
            finally
            {
                try
                {
                    org?.Dispose();
                }
                catch
                {
                    // ignored
                }
                try
                {
                    obj?.Response.OutputStream?.Close();
                    obj?.Response.Close();
                }
                catch
                {
                    // ignored
                }
            }
        }
        public void CalculateSignatureTest()
        {
            using (Stream stream = File.OpenRead(@"Rpm/libplist-2.0.1.151-1.1.x86_64.rpm"))
            {
                var originalPackage = RpmPackageReader.Read(stream);

                RpmPackageCreator    creator = new RpmPackageCreator(new PlistFileAnalyzer());
                Collection <RpmFile> files;

                using (var payloadStream = RpmPayloadReader.GetDecompressedPayloadStream(originalPackage))
                    using (var cpio = new CpioFile(payloadStream, false))
                    {
                        ArchiveBuilder builder = new ArchiveBuilder(new PlistFileAnalyzer());
                        var            entries = builder.FromCpio(cpio);
                        files = creator.CreateFiles(entries);
                    }

                // Core routine to populate files and dependencies
                RpmPackage package  = new RpmPackage();
                var        metadata = new PublicRpmMetadata(package);
                metadata.Name    = "libplist";
                metadata.Version = "2.0.1.151";
                metadata.Arch    = "x86_64";
                metadata.Release = "1.1";

                creator.AddPackageProvides(metadata);
                creator.AddLdDependencies(metadata);

                metadata.Files = files;
                creator.AddRpmDependencies(metadata, null);

                PlistMetadata.ApplyDefaultMetadata(metadata);

                metadata.Vendor      = "obs://build.opensuse.org/home:qmfrederik";
                metadata.Description = "libplist is a library for manipulating Apple Binary and XML Property Lists";
                metadata.Url         = "http://www.libimobiledevice.org/";

                creator.CalculateHeaderOffsets(package);

                // Make sure the header is really correct
                using (Stream originalHeaderStream = new SubStream(
                           originalPackage.Stream,
                           originalPackage.HeaderOffset,
                           originalPackage.PayloadOffset - originalPackage.HeaderOffset,
                           leaveParentOpen: true,
                           readOnly: true))
                    using (Stream headerStream = creator.GetHeaderStream(package))
                    {
                        byte[] originalData = new byte[originalHeaderStream.Length];
                        originalHeaderStream.Read(originalData, 0, originalData.Length);

                        byte[] data = new byte[headerStream.Length];
                        headerStream.Read(data, 0, data.Length);

                        int      delta     = 0;
                        int      dataDelta = 0;
                        IndexTag tag;
                        for (int i = 0; i < data.Length; i++)
                        {
                            if (originalData[i] != data[i])
                            {
                                delta     = i;
                                dataDelta = delta - package.Header.Records.Count * Marshal.SizeOf <IndexHeader>();
                                tag       = package.Header.Records.OrderBy(r => r.Value.Header.Offset).Last(r => r.Value.Header.Offset <= dataDelta).Key;

                                break;
                            }
                        }

                        Assert.Equal(originalData, data);
                    }

                var krgen         = PgpSigner.GenerateKeyRingGenerator("dotnet", "dotnet");
                var secretKeyRing = krgen.GenerateSecretKeyRing();
                var privateKey    = secretKeyRing.GetSecretKey().ExtractPrivateKey("dotnet".ToCharArray());

                using (var payload = RpmPayloadReader.GetCompressedPayloadStream(originalPackage))
                {
                    // Header should be OK now (see previous test), so now get the signature block and the
                    // trailer
                    creator.CalculateSignature(package, privateKey, payload);
                    creator.CalculateSignatureOffsets(package);

                    foreach (var record in originalPackage.Signature.Records)
                    {
                        if (record.Key == SignatureTag.RPMTAG_HEADERSIGNATURES)
                        {
                            continue;
                        }

                        this.AssertTagEqual(record.Key, originalPackage, package);
                    }

                    this.AssertTagEqual(SignatureTag.RPMTAG_HEADERSIGNATURES, originalPackage, package);
                }
            }
        }
Exemple #35
0
        /// <summary>
        /// Method to load a MIDI file an parse the data
        /// </summary>
        public static bool LoadFile(
            string filePath,
            out MidiFile midiFile,
            bool retainAll = false)
        {
            midiFile = null;

            if (!File.Exists(filePath))
            {
                Debug.LogError($"File {filePath} does not exist.");
                return(false);
            }

            try
            {
                using (FileStream fileStream = File.OpenRead(filePath))
                {
                    //Now Read Chunks
                    //Each chunk starts with an 8 byte header, so we probably reached padding if
                    //there is less than that remaining
                    while (fileStream.CanRead && (fileStream.Length - fileStream.Position) >= 8)
                    {
                        byte[] smallBuffer = new byte[8];
                        fileStream.Read(smallBuffer, 0, 8);
                        string chunkID   = Encoding.UTF8.GetString(smallBuffer, 0, 4).Trim();
                        int    chunkSize =
                            smallBuffer[4] << 24 |
                                smallBuffer[5] << 16 |
                                smallBuffer[6] << 8 |
                                smallBuffer[7];

                        using (Stream chunkStream = new SubStream(fileStream, chunkSize, ownsStream: false))
                        {
                            switch (chunkID)
                            {
                            case MidiFile.HEADER_CHUNK_NAME:
                                midiFile = new MidiFile(chunkStream, retainAll);
                                break;

                            case MidiTrack.TRACK_CHUNK_NAME:
                                if (midiFile == null)
                                {
                                    throw new MidiParsingException($"\"{MidiFile.HEADER_CHUNK_NAME}\" chunk not found before \"{MidiTrack.TRACK_CHUNK_NAME}\".");
                                }
                                midiFile.ReadTrack(chunkStream);
                                break;

                            default:
                                Debug.Log($"Skipping unexpected Chunk in File {filePath}: {chunkID}.");
                                //Do nothing with it
                                break;
                            }
                        }
                    }

                    if (midiFile == null)
                    {
                        throw new MidiParsingException($"Finished parsing file without locating Header or Track chunks");
                    }

                    if (midiFile.tracks.Count == 0)
                    {
                        midiFile = null;
                        throw new MidiParsingException($"Finished parsing file without locating Track chunks");
                    }
                }
            }
            catch (MidiParsingException excp)
            {
                Debug.LogException(new MidiParsingException($"Error parsing Midi file \"{filePath}\"", excp));
                midiFile = null;
                return(false);
            }

            return(midiFile != null);
        }
Exemple #36
0
        internal SparseStream OpenResourceStream(ShortResourceHeader hdr)
        {
            SparseStream fileSectionStream = new SubStream(_fileStream, Ownership.None, hdr.FileOffset, hdr.CompressedSize);
            if ((hdr.Flags & ResourceFlags.Compressed) == 0)
            {
                return fileSectionStream;
            }

            return new FileResourceStream(fileSectionStream, hdr, (_fileHeader.Flags & FileFlags.LzxCompression) != 0, _fileHeader.CompressionSize);
        }
Exemple #37
0
        private void Process(System.Net.HttpListenerContext obj)
        {
            Stream org = null;

            try
            {
                bool     fname = false;
                string[] dta   = obj.Request.RawUrl.Split(new char[] { '/' }, StringSplitOptions.RemoveEmptyEntries);
                if (dta.Length < 2)
                {
                    return;
                }
                string cmd = dta[0].ToLower();
                string arg = dta[1];
                string fullname;
                if (cmd == "videolocal")
                {
                    int sid = 0;
                    int.TryParse(arg, out sid);
                    if (sid == 0)
                    {
                        obj.Response.StatusCode        = (int)HttpStatusCode.BadRequest;
                        obj.Response.StatusDescription = "Stream Id missing.";
                        return;
                    }
                    VideoLocalRepository rep = new VideoLocalRepository();
                    VideoLocal           loc = rep.GetByID(sid);
                    if (loc == null)
                    {
                        obj.Response.StatusCode        = (int)HttpStatusCode.NotFound;
                        obj.Response.StatusDescription = "Stream Id not found.";
                        return;
                    }
                    fullname = loc.FullServerPath;
                }
                else if (cmd == "file")
                {
                    fullname = Base64DecodeUrl(arg);
                }
                else
                {
                    obj.Response.StatusCode        = (int)HttpStatusCode.BadRequest;
                    obj.Response.StatusDescription = "Not know command";
                    return;
                }

                bool range = false;

                try
                {
                    if (!File.Exists(fullname))
                    {
                        obj.Response.StatusCode        = (int)HttpStatusCode.NotFound;
                        obj.Response.StatusDescription = "File '" + fullname + "' not found.";
                        return;
                    }
                }
                catch (Exception)
                {
                    obj.Response.StatusCode        = (int)HttpStatusCode.InternalServerError;
                    obj.Response.StatusDescription = "Unable to access File '" + fullname + "'.";
                    return;
                }
                obj.Response.ContentType = GetMime(fullname);
                obj.Response.AddHeader("Accept-Ranges", "bytes");
                obj.Response.AddHeader("X-Plex-Protocol", "1.0");
                if (obj.Request.HttpMethod == "OPTIONS")
                {
                    obj.Response.AddHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, DELETE, PUT, HEAD");
                    obj.Response.AddHeader("Access-Control-Max-Age", "1209600");
                    obj.Response.AddHeader("Access-Control-Allow-Headers",
                                           "accept, x-plex-token, x-plex-client-identifier, x-plex-username, x-plex-product, x-plex-device, x-plex-platform, x-plex-platform-version, x-plex-version, x-plex-device-name");
                    obj.Response.AddHeader("Cache-Control", "no-cache");
                    obj.Response.ContentType = "text/plain";
                    return;
                }
                string rangevalue = null;
                if (obj.Request.Headers.AllKeys.Contains("Range"))
                {
                    rangevalue = obj.Request.Headers["Range"].Replace("bytes=", string.Empty).Trim();
                }
                if (obj.Request.Headers.AllKeys.Contains("range"))
                {
                    rangevalue = obj.Request.Headers["range"].Replace("bytes=", string.Empty).Trim();
                }

                if (obj.Request.HttpMethod != "HEAD")
                {
                    org = new FileStream(fullname, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
                    long totalsize = org.Length;
                    long start     = 0;
                    long end       = 0;
                    if (!string.IsNullOrEmpty(rangevalue))
                    {
                        range = true;
                        string[] split = rangevalue.Split('-');
                        if (split.Length == 2)
                        {
                            if (string.IsNullOrEmpty(split[0]) && !string.IsNullOrEmpty(split[1]))
                            {
                                long e = long.Parse(split[1]);
                                start = totalsize - e;
                                end   = totalsize - 1;
                            }
                            else if (!string.IsNullOrEmpty(split[0]) && string.IsNullOrEmpty(split[1]))
                            {
                                start = long.Parse(split[0]);
                                end   = totalsize - 1;
                            }
                            else if (!string.IsNullOrEmpty(split[0]) && !string.IsNullOrEmpty(split[1]))
                            {
                                start = long.Parse(split[0]);
                                end   = long.Parse(split[1]);
                                if (start > totalsize - 1)
                                {
                                    start = totalsize - 1;
                                }
                                if (end > totalsize - 1)
                                {
                                    end = totalsize - 1;
                                }
                            }
                            else
                            {
                                start = 0;
                                end   = totalsize - 1;
                            }
                        }
                    }

                    if (range)
                    {
                        obj.Response.StatusCode = (int)HttpStatusCode.PartialContent;
                        obj.Response.AddHeader("Content-Range", "bytes " + start + "-" + end + "/" + totalsize);
                        org = new SubStream(org, start, end - start + 1);
                        obj.Response.ContentLength64 = end - start + 1;
                    }
                    else
                    {
                        obj.Response.ContentLength64 = totalsize;
                        obj.Response.StatusCode      = (int)HttpStatusCode.OK;
                    }

                    obj.Response.SendChunked = false;
                    org.CopyTo(obj.Response.OutputStream);
                    obj.Response.OutputStream.Close();
                    org.Close();
                }
                else
                {
                    obj.Response.SendChunked     = false;
                    obj.Response.StatusCode      = (int)HttpStatusCode.OK;
                    obj.Response.ContentLength64 = new FileInfo(fullname).Length;
                    obj.Response.KeepAlive       = false;
                    obj.Response.OutputStream.Close();
                }
            }
            catch (HttpListenerException e)
            {
            }
            catch (Exception e)
            {
                logger.Error(e.ToString);
            }
            finally
            {
                if (org != null)
                {
                    org.Close();
                }
                if ((obj != null) && (obj.Response != null) && (obj.Response.OutputStream != null))
                {
                    obj.Response.OutputStream.Close();
                }
            }
        }
        internal override List<BuilderExtent> FixExtents(out long totalLength)
        {
            List<BuilderExtent> extents = new List<BuilderExtent>();

            MemoryStream descriptorStream = new MemoryStream();
            _descriptor.Write(descriptorStream);

            // Figure out grain size and number of grain tables, and adjust actual extent size to be a multiple
            // of grain size
            const int GtesPerGt = 512;
            long grainSize = 128;
            int numGrainTables = (int)Utilities.Ceil(_content.Length, grainSize * GtesPerGt * Sizes.Sector);

            long descriptorLength = 10 * Sizes.OneKiB; // Utilities.RoundUp(descriptorStream.Length, Sizes.Sector);
            long descriptorStart = 0;
            if (descriptorLength != 0)
            {
                descriptorStart = 1;
            }

            long redundantGrainDirStart = Math.Max(descriptorStart, 1) + Utilities.Ceil(descriptorLength, Sizes.Sector);
            long redundantGrainDirLength = numGrainTables * 4;

            long redundantGrainTablesStart = redundantGrainDirStart + Utilities.Ceil(redundantGrainDirLength, Sizes.Sector);
            long redundantGrainTablesLength = numGrainTables * Utilities.RoundUp(GtesPerGt * 4, Sizes.Sector);

            long grainDirStart = redundantGrainTablesStart + Utilities.Ceil(redundantGrainTablesLength, Sizes.Sector);
            long grainDirLength = numGrainTables * 4;

            long grainTablesStart = grainDirStart + Utilities.Ceil(grainDirLength, Sizes.Sector);
            long grainTablesLength = numGrainTables * Utilities.RoundUp(GtesPerGt * 4, Sizes.Sector);

            long dataStart = Utilities.RoundUp(grainTablesStart + Utilities.Ceil(grainTablesLength, Sizes.Sector), grainSize);

            // Generate the header, and write it
            HostedSparseExtentHeader header = new HostedSparseExtentHeader();
            header.Flags = HostedSparseExtentFlags.ValidLineDetectionTest | HostedSparseExtentFlags.RedundantGrainTable;
            header.Capacity = Utilities.RoundUp(_content.Length, grainSize * Sizes.Sector) / Sizes.Sector;
            header.GrainSize = grainSize;
            header.DescriptorOffset = descriptorStart;
            header.DescriptorSize = descriptorLength / Sizes.Sector;
            header.NumGTEsPerGT = GtesPerGt;
            header.RgdOffset = redundantGrainDirStart;
            header.GdOffset = grainDirStart;
            header.Overhead = dataStart;

            extents.Add(new BuilderBytesExtent(0, header.GetBytes()));

            // The descriptor extent
            if (descriptorLength > 0)
            {
                extents.Add(new BuilderStreamExtent(descriptorStart * Sizes.Sector, descriptorStream));
            }

            // The grain directory extents
            extents.Add(new GrainDirectoryExtent(redundantGrainDirStart * Sizes.Sector, redundantGrainTablesStart, numGrainTables, GtesPerGt));
            extents.Add(new GrainDirectoryExtent(grainDirStart * Sizes.Sector, grainTablesStart, numGrainTables, GtesPerGt));

            // For each graintable span that's present...
            long dataSectorsUsed = 0;
            long gtSpan = GtesPerGt * grainSize * Sizes.Sector;
            foreach (var gtRange in StreamExtent.Blocks(_content.Extents, grainSize * GtesPerGt * Sizes.Sector))
            {
                for (long i = 0; i < gtRange.Count; ++i)
                {
                    int gt = (int)(gtRange.Offset + i);

                    SubStream gtStream = new SubStream(_content, gt * gtSpan, Math.Min(gtSpan, _content.Length - (gt * gtSpan)));

                    GrainTableDataExtent dataExtent = new GrainTableDataExtent((dataStart + dataSectorsUsed) * Sizes.Sector, gtStream, grainSize);
                    extents.Add(dataExtent);

                    extents.Add(new GrainTableExtent(GrainTablePosition(redundantGrainTablesStart, gt, GtesPerGt), gtStream, dataStart + dataSectorsUsed, GtesPerGt, grainSize));
                    extents.Add(new GrainTableExtent(GrainTablePosition(grainTablesStart, gt, GtesPerGt), gtStream, dataStart + dataSectorsUsed, GtesPerGt, grainSize));

                    dataSectorsUsed += dataExtent.Length / Sizes.Sector;
                }
            }

            totalLength = (dataStart + dataSectorsUsed) * Sizes.Sector;
            return extents;
        }
Exemple #39
0
        private void extractAllToolStripMenuItem_Click(object sender, EventArgs e)
        {
            FolderBrowserDialog savepath = new FolderBrowserDialog();

            if (savepath.ShowDialog() == DialogResult.OK)
            {
                Logger.log("Extracting all entry items..");
                try
                {
                    var numbersAndWords = idEntryList.Zip(nameEntryList, (n, w) => new { id = n, name = w });
                    foreach (var nw in numbersAndWords)
                    {
                        var pkgPath = filenames;
                        var idx     = int.Parse(nw.id);
                        var name    = nw.name;
                        var outPath = savepath.SelectedPath + "\\" + name.Replace("_SHA", ".SHA").Replace("_DAT", ".DAT").Replace("_SFO", ".SFO").Replace("_XML", ".XML").Replace("_SIG", ".SIG").Replace("_PNG", ".PNG").Replace("_JSON", ".JSON").Replace("_DDS", ".DDS").Replace("_TRP", ".TRP").Replace("_AT9", ".AT9");;

                        using (var pkgFile = File.OpenRead(pkgPath))
                        {
                            var pkg = new PkgReader(pkgFile).ReadPkg();
                            if (idx < 0 || idx >= pkg.Metas.Metas.Count)
                            {
                                DarkMessageBox.ShowError("Error: entry number out of range.", "PS4 PKG Tool");
                                Logger.log("Error: entry number out of range.");
                                return;
                            }
                            using (var outFile = File.Create(outPath))
                            {
                                var meta = pkg.Metas.Metas[idx];
                                outFile.SetLength(meta.DataSize);
                                if (meta.Encrypted)
                                {
                                    if (passcode == null)
                                    {
                                        DarkMessageBox.ShowWarning("Warning: Entry is encrypted but no passcode was provided! Saving encrypted bytes.", "PS4 PKG Tool");
                                        Logger.log("Warning: Entry is encrypted but no passcode was provided! Saving encrypted bytes.");
                                    }
                                    else
                                    {
                                        var entry = new SubStream(pkgFile, meta.DataOffset, (meta.DataSize + 15) & ~15);
                                        var tmp   = new byte[entry.Length];
                                        entry.Read(tmp, 0, tmp.Length);
                                        tmp = LibOrbisPkg.PKG.Entry.Decrypt(tmp, pkg.Header.content_id, passcode, meta);
                                        outFile.Write(tmp, 0, (int)meta.DataSize);
                                        return;
                                    }
                                }
                                new SubStream(pkgFile, meta.DataOffset, meta.DataSize).CopyTo(outFile);
                            }
                        }
                    }

                    DarkMessageBox.ShowInformation("All entry item extracted.", "PS4 PKG Tool");
                    Logger.log("All entry item extracted.");
                }
                catch (Exception a)
                {
                    DarkMessageBox.ShowError(a.Message, "PS4 PKG Tool");
                    Logger.log(a.Message);
                }
            }
        }
        private Stream OpenChunkStream(int chunk)
        {
            int targetUncompressed = _chunkSize;
            if (chunk == _chunkLength.Length - 1)
            {
                targetUncompressed = (int)(Length - _position);
            }

            Stream rawChunkStream = new SubStream(_baseStream, _offsetDelta + _chunkOffsets[chunk], _chunkLength[chunk]);
            if ((_header.Flags & ResourceFlags.Compressed) != 0 && _chunkLength[chunk] != targetUncompressed)
            {
                if (_lzxCompression)
                {
                    return new LzxStream(rawChunkStream, 15, E8DecodeFileSize);
                }
                else
                {
                    return new XpressStream(rawChunkStream, targetUncompressed);
                }
            }
            else
            {
                return rawChunkStream;
            }
        }
Exemple #41
0
        public Stream GetStream(Context context)
        {
            if (Encryption)
            {
                throw new IOException("Extent encryption is not supported");
            }
            Stream stream;

            switch (Type)
            {
            case ExtentDataType.Inline:
                byte[] data = InlineData;
                stream = new MemoryStream(data);
                break;

            case ExtentDataType.Regular:
                var address = ExtentAddress;
                if (address == 0)
                {
                    stream = new ZeroStream((long)LogicalSize);
                }
                else
                {
                    var physicalAddress = context.MapToPhysical(address);
                    stream = new SubStream(context.RawStream, Ownership.None, (long)(physicalAddress + ExtentOffset), (long)ExtentSize);
                }
                break;

            case ExtentDataType.PreAlloc:
                throw new NotImplementedException();

            default:
                throw new IOException("invalid extent type");
            }
            switch (Compression)
            {
            case ExtentDataCompression.None:
                break;

            case ExtentDataCompression.Zlib:
            {
                var zlib   = new ZlibStream(stream, CompressionMode.Decompress, false);
                var sparse = SparseStream.FromStream(zlib, Ownership.Dispose);
                var length = new LengthWrappingStream(sparse, (long)LogicalSize, Ownership.Dispose);
                stream = new PositionWrappingStream(length, 0, Ownership.Dispose);
                break;
            }

            case ExtentDataCompression.Lzo:
            {
                var  buffer      = StreamUtilities.ReadExact(stream, sizeof(uint));
                var  totalLength = EndianUtilities.ToUInt32LittleEndian(buffer, 0);
                long processed   = sizeof(uint);
                var  parts       = new List <SparseStream>();
                var  remaining   = (long)LogicalSize;
                while (processed < totalLength)
                {
                    stream.Position = processed;
                    StreamUtilities.ReadExact(stream, buffer, 0, sizeof(uint));
                    var partLength = EndianUtilities.ToUInt32LittleEndian(buffer, 0);
                    processed += sizeof(uint);
                    var part         = new SubStream(stream, Ownership.Dispose, processed, partLength);
                    var uncompressed = new SeekableLzoStream(part, CompressionMode.Decompress, false);
                    uncompressed.SetLength(Math.Min(Sizes.OneKiB * 4, remaining));
                    remaining -= uncompressed.Length;
                    parts.Add(SparseStream.FromStream(uncompressed, Ownership.Dispose));
                    processed += partLength;
                }
                stream = new ConcatStream(Ownership.Dispose, parts.ToArray());
                break;
            }

            default:
                throw new IOException($"Unsupported extent compression ({Compression})");
            }
            return(stream);
        }
Exemple #42
0
        private void LoadRootDirectory()
        {
            Stream fatStream;
            if (_type != FatType.Fat32)
            {
                fatStream = new SubStream(_data, (_bpbRsvdSecCnt + (_bpbNumFATs * _bpbFATSz16)) * _bpbBytesPerSec, _bpbRootEntCnt * 32);
            }
            else
            {
                fatStream = new ClusterStream(this, FileAccess.ReadWrite, _bpbRootClus, uint.MaxValue);
            }

            _rootDir = new Directory(this, fatStream);
        }
Exemple #43
0
        private async Task <string> UploadPart(Stream baseStream, HttpClient client, string url, long length, int retryCount)
        {
            // saving the position if we need to retry
            var position = baseStream.Position;

            using (var subStream = new SubStream(baseStream, offset: 0, length: length))
            {
                var now         = SystemTime.UtcNow;
                var payloadHash = RavenAwsHelper.CalculatePayloadHash(subStream);

                // stream is disposed by the HttpClient
                var content = new ProgressableStreamContent(subStream, Progress)
                {
                    Headers =
                    {
                        { "x-amz-date",           RavenAwsHelper.ConvertToString(now)                     },
                        { "x-amz-content-sha256", payloadHash                                             },
                        { "Content-Length",       subStream.Length.ToString(CultureInfo.InvariantCulture) }
                    }
                };

                var headers = ConvertToHeaders(content.Headers);
                client.DefaultRequestHeaders.Authorization = CalculateAuthorizationHeaderValue(HttpMethods.Put, url, now, headers);

                try
                {
                    var response = await client.PutAsync(url, content, CancellationToken);

                    if (response.IsSuccessStatusCode)
                    {
                        var etagHeader = response.Headers.GetValues("ETag");
                        return(etagHeader.First());
                    }

                    if (retryCount == MaxRetriesForMultiPartUpload)
                    {
                        throw StorageException.FromResponseMessage(response);
                    }
                }
                catch (Exception)
                {
                    if (retryCount == MaxRetriesForMultiPartUpload)
                    {
                        throw;
                    }
                }

                // revert the uploaded count before retry
                Progress?.UploadProgress.UpdateUploaded(-content.Uploaded);
            }

            // wait for one second before trying again to send the request
            // maybe there was a network issue?
            await Task.Delay(1000);

            CancellationToken.ThrowIfCancellationRequested();

            // restore the stream position before retrying
            baseStream.Position = position;
            return(await UploadPart(baseStream, client, url, length, ++retryCount));
        }
        /// <summary>
        /// Creates a new stream that contains the XVA image.
        /// </summary>
        /// <returns>The new stream.</returns>
        public override SparseStream Build()
        {
            TarFileBuilder tarBuilder = new TarFileBuilder();

            int[] diskIds;

            string ovaFileContent = GenerateOvaXml(out diskIds);
            tarBuilder.AddFile("ova.xml", Encoding.ASCII.GetBytes(ovaFileContent));

            int diskIdx = 0;
            foreach (var diskRec in _disks)
            {
                SparseStream diskStream = diskRec.Second;
                List<StreamExtent> extents = new List<StreamExtent>(diskStream.Extents);

                int lastChunkAdded = -1;
                foreach (StreamExtent extent in extents)
                {
                    int firstChunk = (int)(extent.Start / Sizes.OneMiB);
                    int lastChunk = (int)((extent.Start + extent.Length - 1) / Sizes.OneMiB);

                    for (int i = firstChunk; i <= lastChunk; ++i)
                    {
                        if (i != lastChunkAdded)
                        {
                            HashAlgorithm hashAlg = new SHA1Managed();
                            Stream chunkStream;

                            long diskBytesLeft = diskStream.Length - (i * Sizes.OneMiB);
                            if (diskBytesLeft < Sizes.OneMiB)
                            {
                                chunkStream = new ConcatStream(
                                    Ownership.Dispose,
                                    new SubStream(diskStream, i * Sizes.OneMiB, diskBytesLeft),
                                    new ZeroStream(Sizes.OneMiB - diskBytesLeft));
                            }
                            else
                            {
                                chunkStream = new SubStream(diskStream, i * Sizes.OneMiB, Sizes.OneMiB);
                            }

                            HashStream chunkHashStream = new HashStream(chunkStream, Ownership.Dispose, hashAlg);

                            tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}", diskIds[diskIdx], i), chunkHashStream);
                            tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}.checksum", diskIds[diskIdx], i), new ChecksumStream(hashAlg));

                            lastChunkAdded = i;
                        }
                    }
                }

                // Make sure the last chunk is present, filled with zero's if necessary
                int lastActualChunk = (int)((diskStream.Length - 1) / Sizes.OneMiB);
                if (lastChunkAdded < lastActualChunk)
                {
                    HashAlgorithm hashAlg = new SHA1Managed();
                    Stream chunkStream = new ZeroStream(Sizes.OneMiB);
                    HashStream chunkHashStream = new HashStream(chunkStream, Ownership.Dispose, hashAlg);
                    tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}", diskIds[diskIdx], lastActualChunk), chunkHashStream);
                    tarBuilder.AddFile(string.Format(CultureInfo.InvariantCulture, "Ref:{0}/{1:D8}.checksum", diskIds[diskIdx], lastActualChunk), new ChecksumStream(hashAlg));
                }

                ++diskIdx;
            }

            return tarBuilder.Build();
        }
        public override void Write(byte[] buffer, int offset, int count)
        {
            SubStream str = Substreams.SingleOrDefault(s => s.StartPosition <= Position && s.StartPosition + s.Length >= Position);
            if (str == null)
            {
                str = new SubStream(Position);
                Substreams.Add(str);
            }

            List<SubStream> overlappedStreams = Substreams.Where(s => s != str && s.StartPosition >= Position && s.StartPosition < Position + count).ToList();
            foreach (SubStream s in overlappedStreams)
            {
                str.Seek(s.StartPosition - str.StartPosition, SeekOrigin.Begin);
                s.Seek(0, SeekOrigin.Begin);
                s.CopyTo(str);
                Substreams.Remove(s);
            }

            str.Seek(Position - str.StartPosition, SeekOrigin.Begin);
            str.Write(buffer, offset, count);

            Position = str.StartPosition + str.Position;
        }