Example #1
0
        void bw_DoWork(object sender, DoWorkEventArgs e)
        {
            ExtractArgs ea = (ExtractArgs)e.Argument;

            using (var fs = new System.IO.FileStream(ea.Input, System.IO.FileMode.Open)) {
                var df   = ProcMonParser.FF7Files.LoadLGP(fs, ea.Input);
                int file = 0;
                foreach (var item in df.Items)
                {
                    (sender as BackgroundWorker).ReportProgress(100 * file++ / df.Items.Count);
                    if (System.IO.Path.GetExtension(item.Name).Length == 0)
                    {
                        byte[] ff = new byte[item.Length - 24];
                        fs.Position = item.Start + 24;
                        fs.Read(ff, 0, ff.Length);
                        var chunks = _7thWrapperLib.FieldFile.Unchunk(ff);
                        if (chunks.Count > 0)
                        {
                            foreach (int i in ea.Chunks)
                            {
                                string fn = System.IO.Path.Combine(ea.Output, System.IO.Path.GetFileNameWithoutExtension(item.Name) + ".chunk." + i);
                                System.IO.File.WriteAllBytes(fn, chunks[i - 1]);
                            }
                        }
                    }
                }
            }
        }
Example #2
0
        public void Extract(ExtractArgs args)
        {
            args.Validate();

            var connectionString = args.GetConnectionString();

            using (var connection = new SqlConnection(connectionString))
            {
                connection.Open();

                Log.Information(
                    LoggingMessageTemplates.Extract,
                    args.SchemaName,
                    args.DeploymentId,
                    args.TargetDirectory,
                    args.DatabaseName,
                    args.ServerName,
                    args.NoVerify);

                DeploymentHistory.Extract(
                    args.DeploymentId,
                    connection,
                    args.SchemaName,
                    args.TargetDirectory,
                    args.NoVerify);

                connection.Close();
            }
        }
Example #3
0
        void bw_CompressPSX(object sender, DoWorkEventArgs e)
        {
            ExtractArgs ea = (ExtractArgs)e.Argument;

            using (var fs = new FileStream(ea.Input, FileMode.Open))
            {
                List <byte[]> chunks = new List <byte[]>();
                string        dat    = Path.Combine(ea.Output, Path.GetFileName("NEW.DAT"));

                byte[] ff = new byte[fs.Length];
                fs.Position = 0;
                fs.Read(ff, 0, ff.Length);

                chunks = PSXFieldFile.Unchunk(ff, dat);

                // >>>>
                // Can adjust the uncompressed chunks here if need be before they go to recompression
                // Randomisation tasks should be called here, triggered based on section #
                // >>>>

                using (var stream = new FileStream(dat, FileMode.Append))
                {
                    stream.Write(chunks[0], 0, chunks[0].Length);
                }

                // Recompresses the chunks into a field
                var field = PSXFieldFile.Chunk(chunks, fs.Name);

                // Write the compressed output as a new DAT file
                using (var stream = new FileStream(dat, FileMode.Append))
                {
                    stream.Write(field, 0, field.Length);
                }
            }
        }
Example #4
0
        private void Extract(ExtractArgs args)
        {
            GTestTestListParser parser = new GTestTestListParser(args.Suite);
            DataReceivedEventHandler handler = (sender, eventArgs) => parser.ParseLine(eventArgs.Data);
            parser.TestDiscovered += OnTestDiscovered;

            ProcessStartInfo gtestProcInfo = new ProcessStartInfo(args.Suite.RunTarget)
            {
                RedirectStandardOutput = true,
                CreateNoWindow = true,
                UseShellExecute = false,
                Arguments = "--gtest_list_tests"
            };

            Process gtestProc = new Process();
            gtestProc.StartInfo = gtestProcInfo;
            gtestProc.OutputDataReceived += handler;

            gtestProc.Start();
            gtestProc.BeginOutputReadLine();

            while (!gtestProc.HasExited)
            {
                gtestProc.WaitForExit(500);
                if (_worker.CancellationPending)
                {
                    gtestProc.Close();
                    break;
                }
            }

            gtestProc.WaitForExit(); //clear stdout buffer

            gtestProc.OutputDataReceived -= handler;
            parser.TestDiscovered -= OnTestDiscovered;
        }
Example #5
0
 /// <summary>
 ///     Extracts all the files in the specified archive to the specified directory
 ///     on the file system.
 /// </summary>
 /// <param name="srcFile">
 ///     The path of the archive to extract.
 /// </param>
 /// <param name="destDir">
 ///     The path to the directory to place the extracted files in.
 /// </param>
 /// <param name="windowStyle">
 ///     The window state to use when the process is started.
 /// </param>
 /// <param name="dispose">
 ///     <see langword="true"/> to release all resources used by the
 ///     <see cref="Process"/> component; otherwise, <see langword="false"/>.
 /// </param>
 public Process Extract(string srcFile, string destDir, ProcessWindowStyle windowStyle = ProcessWindowStyle.Minimized, bool dispose = false) =>
 !File.Exists(ExtractExePath) ? null : ProcessEx.Start(ExtractExePath, ExtractArgs.FormatCurrent(srcFile, destDir), false, windowStyle, dispose);
Example #6
0
        // Method that chunks an flevel then re-assembles it
        void bw_Compress(object sender, DoWorkEventArgs e)
        {
            ExtractArgs ea = (ExtractArgs)e.Argument;

            using (var fs = new FileStream(ea.Input, FileMode.Open))
            {
                var           df     = FF7Files.LoadLGP(fs, ea.Input);
                int           file   = 0;
                List <byte[]> chunks = new List <byte[]>();
                //string flev = Path.Combine(ea.Output, Path.GetFileName("flevel.lgp"));
                string flev = Directory.GetCurrentDirectory() + "\\PC\\Output File\\flevel.lgp";

                // Apply ToC and CRC to the new flevel
                byte[] addStart = new byte[23301];
                fs.Position = 0;
                fs.Read(addStart, 0, addStart.Length);
                using (var appendStart = new FileStream(flev, FileMode.Append))
                {
                    appendStart.Write(addStart, 0, addStart.Length);
                    appendStart.Close();
                }

                int   tocPointer  = 36;     // Holds pointer location for where to write new offset for ToC
                ulong fieldOffset = 0x5B05; // Holds offset value to write into tocPointer
                int   fieldCount  = 0;      // Counts fields
                foreach (var item in df.Items)
                {
                    (sender as BackgroundWorker).ReportProgress(100 * file++ / df.Items.Count);
                    // This route is for field files
                    if (Path.GetExtension(item.Name).Length == 0 &&
                        item.Name != "maplist"
                        // These files 'shrink' when included; seem functional but excluded for now
                        && item.Name != "blackbgb" &&
                        item.Name != "frcyo" &&
                        item.Name != "fship_4" &&
                        item.Name != "las0_8" &&
                        item.Name != "las2_1" &&
                        item.Name != "uutai1"
                        )
                    {
                        byte[] ff = new byte[item.Length - 24];
                        fs.Position = item.Start + 24;
                        fs.Read(ff, 0, ff.Length);
                        chunks = FieldFile.Unchunk(ff);

                        // >>>>
                        // Can adjust the uncompressed chunks here if need be before they go to recompression
                        // Randomisation tasks should be called here, triggered based on section #
                        // >>>>

                        // If using consistent allocation of models, then that logic should be done somewhere here so
                        // that all fields passing through have access to the newly assigned HRC strings and can pass
                        // them through to the rando logic.

                        // Sends Field Script chunk of field to be randomised
                        if (chkItems.Checked)
                        {
                            chunks[0] = FieldScript.ChangeItemsMateria(chunks[0], item.Name);
                        }

                        // Sends Model Loader chunk of field to be randomised
                        if (chkModels.Checked)
                        {
                            chunks[2] = ModelLoader.SwapFieldModels(chunks[2]);
                        }

                        // Recompresses the chunks into a field
                        var field = FieldFile.Chunk(chunks, item.Name);

                        // Skip the first ToC offset as this won't change
                        if (fieldCount != 0)
                        {
                            // Adds field length to pointer so we know where next section starts
                            tocPointer += 27;
                            byte[] byteOffset = EndianMethods.GetLittleEndianConvert(fieldOffset);
                            using (Stream stream = File.Open(flev, FileMode.Open))
                            {
                                stream.Position = tocPointer;
                                stream.Write(byteOffset, 0, 4);
                            }
                        }

                        // Takes the size of the chunked field; used to determine next offset for ToC
                        fieldOffset += (ulong)field.Length;
                        fieldCount++;

                        // Writes it into the new flevel
                        using (var stream = new FileStream(flev, FileMode.Append))
                        {
                            stream.Write(field, 0, field.Length);
                        }
                    }
                    // This route is for non-field files
                    else
                    {
                        byte[] field = new byte[item.Length];
                        fs.Position = item.Start;
                        fs.Read(field, 0, field.Length);

                        // Adds field length to pointer so we know where next section starts
                        tocPointer += 27;
                        byte[] byteOffset = EndianMethods.GetLittleEndianConvert(fieldOffset);
                        using (Stream stream = File.Open(flev, FileMode.Open))
                        {
                            stream.Position = tocPointer;
                            stream.Write(byteOffset, 0, 4);
                        }
                        // Takes the size of the misc file
                        fieldOffset += (ulong)field.Length;
                        fieldCount++;

                        // Writes it into the new flevel
                        using (var stream = new FileStream(flev, FileMode.Append))
                        {
                            stream.Write(field, 0, field.Length);
                        }
                    }
                }
                // Adds the final terminating string to the flevel
                byte[] terminate = new byte[] { 0x46, 0x49, 0x4E, 0x41, 0x4C, 0x20, 0x46, 0x41, 0x4E, 0x54, 0x41, 0x53, 0x59, 0x37 };
                using (var finalStream = new FileStream(flev, FileMode.Append))
                {
                    finalStream.Write(terminate, 0, terminate.Length);
                }
            }
        }