public void Set(TKey key, IPtr ptr) { if (key == null) { throw new ArgumentNullException("key"); } if (ptr == null) { throw new ArgumentNullException("ptr"); } var keydata = serializeKey(key); var ptrdata = ptr.Serialize(); var lenkey = BitConverter.GetBytes((short)keydata.Length); var lenptr = BitConverter.GetBytes((short)ptrdata.Length); long position; Idx newidx; lock (indexFile) { position = indexFile.Seek(0, SeekOrigin.End); indexFile.WriteByte(1); indexFile.Write(lenkey, 0, 2); indexFile.Write(keydata, 0, keydata.Length); indexFile.Write(lenptr, 0, 2); indexFile.Write(ptrdata, 0, ptrdata.Length); newidx = new Idx() { IndexFilePosition = position, Ptr = ptr }; indexes.AddOrUpdate(key, newidx, (key1, idx) => newidx); } // Нам вообще не важно, что было перед нами. //Все новые записи добавляются в конец файла, т.е. при чтении файла - самая правильная та, что записана последней #if DEBUG onSet(key, newidx.Ptr); #endif }
protected int OnExecute(CommandLineApplication app) { const long EsitmatedMaximumImgFileSize = 4L * 1024 * 1024 * 1024; // 4GB const int EsitmatedMaximumIdxFileSize = 600 * 1024; // 600KB const int EstimatedMaximumIdxEntryAmountToBeValid = EsitmatedMaximumIdxFileSize / 0x10 - 4; using var isoStream = File.Open(InputIso, FileMode.Open, FileAccess.ReadWrite); if (IdxIsoBlock == -1 || ImgIsoBlock == -1) { const int needleLength = 0x0B; var imgNeedle = new byte[needleLength] { 0x01, 0x09, 0x4B, 0x48, 0x32, 0x2E, 0x49, 0x4D, 0x47, 0x3B, 0x31 }; var idxNeedle = new byte[needleLength] { 0x01, 0x09, 0x4B, 0x48, 0x32, 0x2E, 0x49, 0x44, 0x58, 0x3B, 0x31 }; const uint basePosition = 0x105 * 0x800; for (int i = 0; i < 0x500; i++) { isoStream.Position = basePosition + i; var hayRead = isoStream.ReadBytes(needleLength); var idxCmp = hayRead.SequenceEqual(idxNeedle); var imgCmp = hayRead.SequenceEqual(imgNeedle); if (imgCmp || idxCmp) { isoStream.Position -= 0x24; var blockStack = isoStream.ReadBytes(0x04); var blockCorrect = new byte[0x04] { blockStack[3], blockStack[2], blockStack[1], blockStack[0] }; if (idxCmp && IdxIsoBlock == -1) { IdxIsoBlock = BitConverter.ToInt32(blockCorrect); } else if (imgCmp && ImgIsoBlock == -1) { ImgIsoBlock = BitConverter.ToInt32(blockCorrect); } } } if (IdxIsoBlock == -1 || ImgIsoBlock == -1) { throw new IOException("Could not determine the LBA Offsets of KH2.IDX or KH2.IMG, is this ISO valid?"); } } var idxStream = OpenIsoSubStream(isoStream, IdxIsoBlock, EsitmatedMaximumIdxFileSize); using var imgStream = OpenIsoSubStream(isoStream, ImgIsoBlock, EsitmatedMaximumImgFileSize); var idxEntryCount = idxStream.ReadInt32(); if (idxEntryCount > EstimatedMaximumIdxEntryAmountToBeValid) { throw new CustomException("There is a high chance that the IDX block is not valid, therefore the injection will terminate to avoid corruption."); } var idxEntries = Idx.Read(idxStream.SetPosition(0)); var entry = idxEntries.FirstOrDefault(x => x.Hash32 == Idx.GetHash32(FilePath) && x.Hash16 == Idx.GetHash16(FilePath)); if (entry == null) { idxStream = GetIdxStreamWhichContainsTargetedFile(idxEntries, imgStream, FilePath); if (idxStream == null) { throw new CustomException($"The file {FilePath} has not been found inside the KH2.IDX, therefore the injection will terminate."); } idxEntries = Idx.Read(idxStream.SetPosition(0)); entry = idxEntries.FirstOrDefault(x => x.Hash32 == Idx.GetHash32(FilePath) && x.Hash16 == Idx.GetHash16(FilePath)); } var inputData = File.ReadAllBytes(InputFile); var decompressedLength = inputData.Length; if (Uncompressed == false) { inputData = Img.Compress(inputData); } var blockCountRequired = (inputData.Length + 0x7ff) / 0x800 - 1; if (blockCountRequired > entry.BlockLength) { throw new CustomException($"The file to inject is too big: the actual is {inputData.Length} but the maximum allowed is {GetLength(entry.BlockLength)}."); } imgStream.SetPosition(GetOffset(entry.Offset)); // Clean completely the content of the previous file to not mess up the decompression imgStream.Write(new byte[GetLength(entry.BlockLength)]); imgStream.SetPosition(GetOffset(entry.Offset)); imgStream.Write(inputData); entry.IsCompressed = !Uncompressed; entry.Length = decompressedLength; // we are intentionally not patching entry.BlockLength because it would not allow to insert back bigger files. Idx.Write(idxStream.SetPosition(0), idxEntries); return(0); }
public static void SearchTransactionOneByOne(string SearchType, string Class, string QueryType, string Query, Boolean saveFile, Boolean PersistOnDatabase, int Count = 1, string Limit = "None", int Offset = 1, string Culture = "en-CA", string Format = "STANDARD-XML") { string requestArguments = "?Format=" + Format + "&SearchType=" + SearchType + "&Class=" + Class + "&QueryType=" + QueryType + "&Query=" + String.Format("(ID={0})", Query) + "&Count=" + Count + "&Limit=" + Limit + "&Offset=" + Offset + "&Culture=" + Culture; string searchService = RetsUrl + "/Search.svc/Search" + requestArguments; httpWebRequest = (HttpWebRequest)WebRequest.Create(searchService); httpWebRequest.CookieContainer = cookieJar; //GRAB THE COOKIE httpWebRequest.Credentials = requestCredentials; //PASS CREDENTIALS DumpJson dump = DumpJson.Get(); try { using (HttpWebResponse httpResponse = httpWebRequest.GetResponse() as HttpWebResponse) { Stream stream = httpResponse.GetResponseStream(); // READ THE RESPONSE STREAM USING XMLTEXTREADER String Content = String.Empty; if (saveFile) { String filecomplete = String.Format(@"{0}\{1}.xml", AppSettings.GetAppSettings().load_folder, Query); using (Stream file = File.Create(filecomplete)) { CopyStream(stream, file); } Content = Archive.ReadFile(filecomplete); } else { Content = Treatment.StreamToString(stream); } //GetImages Quantity Content = Treatment.Replaces(Content); XmlSerializer serializer = new XmlSerializer(typeof(Idx)); using (TextReader reader = new StringReader(Content)) { Idx result = (Idx)serializer.Deserialize(reader); if (PersistOnDatabase) { result.Persist(dump); } Int32 TotalPhotos = Treatment.GetPhotosQuantity(result); for (int i = 0; i < TotalPhotos; i++) { GetImagesFromId(Query, Model.Type.TYPE_LARGET_PHOTO, String.Format("{0}", i)); } } } } catch (Exception ex) { Console.WriteLine(ex); } }
public void CalculateHash16(string text, ushort hash) { Assert.Equal(hash, Idx.GetHash16(text)); }
public void CalculateHash32(string text, uint hash) { Assert.Equal(hash, Idx.GetHash32(text)); }
protected int OnExecute(CommandLineApplication app) { const long EsitmatedMaximumImgFileSize = 4L * 1024 * 1024 * 1024; // 4GB const int EsitmatedMaximumIdxFileSize = 600 * 1024; // 600KB const int EstimatedMaximumIdxEntryAmountToBeValid = EsitmatedMaximumIdxFileSize / 0x10 - 4; using var isoStream = File.Open(InputIso, FileMode.Open, FileAccess.ReadWrite); if (IdxIsoBlock == -1 || ImgIsoBlock == -1) { var bufferedStream = new BufferedStream(isoStream); IdxIsoBlock = IsoUtility.GetFileOffset(bufferedStream, "KH2.IDX;1"); ImgIsoBlock = IsoUtility.GetFileOffset(bufferedStream, "KH2.IMG;1"); if (IdxIsoBlock == -1 || ImgIsoBlock == -1) { throw new IOException("Could not determine the LBA Offsets of KH2.IDX or KH2.IMG, is this ISO valid?"); } } var idxStream = OpenIsoSubStream(isoStream, IdxIsoBlock, EsitmatedMaximumIdxFileSize); using var imgStream = OpenIsoSubStream(isoStream, ImgIsoBlock, EsitmatedMaximumImgFileSize); var idxEntryCount = idxStream.ReadInt32(); if (idxEntryCount > EstimatedMaximumIdxEntryAmountToBeValid) { throw new CustomException("There is a high chance that the IDX block is not valid, therefore the injection will terminate to avoid corruption."); } var idxEntries = Idx.Read(idxStream.SetPosition(0)); var entry = idxEntries.FirstOrDefault(x => x.Hash32 == Idx.GetHash32(FilePath) && x.Hash16 == Idx.GetHash16(FilePath)); if (entry == null) { idxStream = GetIdxStreamWhichContainsTargetedFile(idxEntries, imgStream, FilePath); if (idxStream == null) { throw new CustomException($"The file {FilePath} has not been found inside the KH2.IDX, therefore the injection will terminate."); } idxEntries = Idx.Read(idxStream.SetPosition(0)); entry = idxEntries.FirstOrDefault(x => x.Hash32 == Idx.GetHash32(FilePath) && x.Hash16 == Idx.GetHash16(FilePath)); } var inputData = File.ReadAllBytes(InputFile); var decompressedLength = inputData.Length; if (Uncompressed == false) { inputData = Img.Compress(inputData); } var blockCountRequired = (inputData.Length + 0x7ff) / 0x800 - 1; if (blockCountRequired > entry.BlockLength) { throw new CustomException($"The file to inject is too big: the actual is {inputData.Length} but the maximum allowed is {GetLength(entry.BlockLength)}."); } imgStream.SetPosition(GetOffset(entry.Offset)); // Clean completely the content of the previous file to not mess up the decompression imgStream.Write(new byte[GetLength(entry.BlockLength)]); imgStream.SetPosition(GetOffset(entry.Offset)); imgStream.Write(inputData); entry.IsCompressed = !Uncompressed; entry.Length = decompressedLength; // we are intentionally not patching entry.BlockLength because it would not allow to insert back bigger files. Idx.Write(idxStream.SetPosition(0), idxEntries); return(0); }
private static Idx OpenIdx(string fileName) { using (var idxStream = File.OpenRead(fileName)) return(Idx.Read(idxStream)); }
public IdxDataContent(Stream idxStream, Stream imgStream) { _idx = Idx.Read(idxStream); _img = new Img(imgStream, _idx, false); }
/// <summary> /// Get lines of a given index type and value /// </summary> /// <param name="IndexType"></param> /// <param name="IndexValue"></param> /// <returns></returns> /// <remarks></remarks> public LogEntryCollection <LogEntry> Lines(string IndexType, string IndexValue) { TraitTypes.TryGetValue(IndexType, out var Idx); return(Idx?.LinesFromTraitValue(IndexValue) ?? new LogEntryCollection <LogEntry>()); }
protected override NameValueCollection Values() { var collection = new NameValueCollection { { "format", "json" }, { "username", Credentials.Username }, { "password", Credentials.Password } }; if (Sender != null) { collection.Add("from", Sender); } if (To != null) { collection.Add("to", String.Join(",", To)); } if (Group != null) { collection.Add("group", Group); } collection.Add("message", Text); collection.Add("single", Single ? "1" : "0"); collection.Add("nounicode", NoUnicode ? "1" : "0"); collection.Add("flash", Flash ? "1" : "0"); collection.Add("fast", Fast ? "1" : "0"); if (DataCoding != null) { collection.Add("datacoding", DataCoding); } if (MaxParts > 0) { collection.Add("max_parts", MaxParts.ToString()); } if (DateSent != null) { collection.Add("date", DateSent); } if (DateExpire != null) { collection.Add("expiration_date", DateExpire); } if (Partner != null) { collection.Add("partner_id", Partner); } collection.Add("encoding", Encoding); if (Normalize == true) { collection.Add("normalize", "1"); } if (Test == true) { collection.Add("test", "1"); } if (Idx != null && Idx.Any()) { collection.Add("check_idx", (IdxCheck ? "1" : "0")); collection.Add("idx", string.Join("|", Idx)); } if (Details == true) { collection.Add("details", "1"); } if (Params != null) { for (int i = 0; i < Params.Length; i++) { if (Params[i] != null) { collection.Add("param" + (i + 1).ToString(), Params[i]); } } } return(collection); }