private void doAnneal(BakedVolumes rec)
    {
        Console.WriteLine($"[Anneal: {rec._id}]");

        var partList = new Dictionary <int, BakedVolumes_PartValues?>();

        var ps = new string[] { "A", "B", "C", "D" };

        for (var i = 0; i < ps.Length; i++)
        {
            partList[i] = null;
            if (rec.Parts.ContainsKey(ps[i]))
            {
                partList[i] = rec.Parts[ps[i]];
            }
        }

        var skip = false;       // No need to skip unless missing pieces

        for (var i = 0; i < ps.Length; i++)
        {
            if (!partList.ContainsKey(i) ||
                !checkPart(partList[i]))
            {
                skip = true;
                continue;
            }

            procPart(partList[i], skip, ps[i], i);

            skip = false;
        }
    }
        //ulong real_position = 0;
        //ulong decomp_position = 0;

        public AssetFile(string subpath, IMongoDatabase db, IMongoCollection <BakedAssets> bac, IMongoCollection <BakedVolumes> bVol = null)
        {
            this.db      = db;
            this.bac     = bac;
            this.subpath = subpath.ToLowerInvariant();   // These come in upper case occasionally

            // We may want to enforce a file heirarchy here, but for not just need an
            // asset record to match (key is simply the sha1 in lower case and text)

            var theFilter = Builders <BakedAssets> .Filter.Eq("_id", this.subpath);

            baRec = bac.FindSync(theFilter).FirstOrDefault();

            // This file could conceivably exist as an unanneled file but we would
            // need a different abstraction here to read it -- we can generate a different
            // Stream, but we will need a different file service
            // Also this scheme needs access controls
            if (baRec == null)
            {
                Console.WriteLine($"Can't file BakedAsset: _id: {this.subpath}");
                return;
            }

            var vCol = bVol;

            if (vCol == null)
            {
                vCol = db.BakedVolumes();
            }

            try
            {
                var volFilter = Builders <BakedVolumes> .Filter.Eq("_id", baRec.Volume);

                volRec = vCol.FindSync(volFilter).FirstOrDefault();
            }
            catch (Exception ex)
            {
                Console.WriteLine($"Error: ${ex.Message}");
            }

            unBaker = new UnbakeContext(db, volRec, baRec);

            //var channel = GrpcChannel.ForAddress("http://feanor:5000");
            //assetClient = new BakedVolumeData.BakedVolumeDataClient(channel);
        }
            public FetchPayload OpenStream()
            {
                var retRec = new FetchPayload();

                var filter = Builders <BakedVolumes> .Filter.Eq("_id", Volume);

                VolumeRec = Bvs.FindSync(filter).FirstOrDefault();
                if (VolumeRec == null)
                {
                    retRec.Error = $"Volume: {Volume} does not exist";
                    return(retRec);
                }

                if (!VolumeRec.Parts.ContainsKey(Part))
                {
                    retRec.Error = $"Volume: {Volume} Part {Part} does not exist";
                    return(retRec);
                }

                var p = VolumeRec.Parts[Part];

                fName = Path.Combine(p.Path, p.Name);

                try
                {
                    binOpen      = new BinaryReader(File.Open(fName, FileMode.Open, FileAccess.Read));
                    VolumeStream = binOpen.BaseStream;
                }
                catch (Exception ex)
                {
                    retRec.Error = ex.Message;
                    if (logger != null)
                    {
                        logger.LogError(ex, $"Cannot open file {fName} for {Volume}/{Part}");
                    }
                    return(retRec);
                }

                //Console.WriteLine($"[Open Volume: {fName}]");

                return(retRec);
            }
 public MakeStream(BakedAssets baRec, BakedVolumes volRec, AssetFile file)
 {
     this.baRec  = baRec;
     this.volRec = volRec;
     fileCreator = file;
 }
Пример #5
0
            public UnbakeContext(IMongoDatabase db, BakedVolumes volRec, BakedAssets baRec)
            {
                this.db     = db;
                this.volRec = volRec;
                this.baRec  = baRec;

                //Console.WriteLine($"UnbakeContext: {volRec._id} file={baRec.Part} Offset={baRec.Offset}/Block={baRec.Block}");

                volumelength = volRec.ArchSize / volRec.NumParts;

                //Console.WriteLine($"Unbake Open: Length={volumelength} Arch={volRec.ArchSize} Parts={volRec.NumParts}");

                // Read starting settings

                //block = baRec.Block + 1;        // Block is pointer to tar directory entry (+1 is our content)
                //address = block << 9;

                //FileNum = Convert.ToInt32(address / volumelength);

                //offsetx = address % volumelength;
                //remain = volumelength - offsetx;


                // Check block

                var checkFileNum = baRec.Part[0] - (byte)'A';

                //var newBlock = baRec.Offset + ((ulong) checkFileNum * volumelength);
                //newBlock >>= 9;

                //if (baRec.Block != newBlock)
                //    Console.WriteLine($"File Block: {baRec.Block} != Computed: {newBlock}");

                // Alternate check computation

                var checkOffset = baRec.Offset + 512;
                var checkRemain = volumelength - checkOffset;

                FileNum = checkFileNum;
                offsetx = checkOffset;
                remain  = checkRemain;

                //if (FileNum != checkFileNum ||
                //    offsetx != checkOffset ||
                //    remain != checkRemain)
                //{
                //    Console.WriteLine($"Unbake Open: Org Offset={offsetx}, Remain={remain} File={FileNum}");
                //    Console.WriteLine($"Unbake Open: Tst Offset={checkOffset}, Remain={checkRemain} File={checkFileNum} - override");

                //    FileNum = checkFileNum;
                //    offsetx = checkOffset;
                //    remain = checkRemain;
                //}

                // File Info (meaning the file we're pullout out)

                realLength = baRec.RealLength;
                fileRemain = realLength;

                fileRead = 0;

                // Part Info (the piece of the archive we're reading)

                currentPartRemain = volumelength - offsetx;
                currentFileNum    = -1;
                currentPart       = null;

                //Console.WriteLine($"Compressed={realLength} Actual={baRec.FileLength}");

                //bytesRead = 0;
            }