/// <summary> /// <para>Issues an asynchronous file read operation. Returns a ReadHandle.</para> /// </summary> /// <param name="filename">The filename to read from.</param> /// <param name="readCmds">A pointer to an array of ReadCommand structs that specify offset, size, and destination buffer.</param> /// <param name="readCmdCount">The number of read commands pointed to by readCmds.</param> /// <returns> /// <para>Used to monitor the progress and status of the read command.</para> /// </returns> public static unsafe ReadHandle Read( string filename, ReadCommand *readCmds, uint readCmdCount) { // For now just assume one read operation and assume it's the whole file // since we only support the AsyncReadManager in the Unity.Scenes which has no other usecase Assert.IsTrue(readCmdCount == 1); Assert.IsTrue(readCmds[0].Offset == 0); ReadHandle handle = default; AsyncOp asyncOp; // Always provide a new buffer for reads as the data will be compressed and needs to be decoded into the // passed in buffer here. asyncOp = IOService.RequestAsyncRead(filename); handle.mAsyncOp = asyncOp; #if !UNITY_SINGLETHREADED_JOBS handle.mJobHandle = new ReadJob() { m_Op = handle.mAsyncOp }.Schedule(); #endif return(handle); }
public ReadHandle Read(ReadCommand *readCmds, uint cmdCount, ReadMode mode = ReadMode.Async) { var handle = AsyncReadManager.Read(FilePath, readCmds, cmdCount); if (mode == ReadMode.Blocking) { handle.JobHandle.Complete(); } return(handle); }
unsafe ReadHandle ScheduleConstSizeElementArrayRead(Entry entry, uint firstElement, long readSize, void *dst) { var block = m_Blocks[(int)entry.Header.BlockIndex]; var blockOffset = entry.Header.EntriesMeta * firstElement; var chunkSize = block.Header.ChunkSize; var chunkIndex = (uint)(blockOffset / chunkSize); var chunk = block.GetOffsetsPtr() + chunkIndex; var chunkWithLocalOffset = *chunk + (uint)(blockOffset % chunkSize); byte *dstPtr = (byte *)dst; using (NativeBlockList <ReadCommand> chunkReads = new NativeBlockList <ReadCommand>(64, 64)) { var readCmd = ReadCommandBufferUtils.GetCommand(dstPtr, readSize, chunkWithLocalOffset); chunkReads.Push(readCmd); dstPtr += (long)(chunkSize - (blockOffset % chunkSize)); readSize -= (long)(chunkSize - (blockOffset % chunkSize)); while (readSize > 0) { ++chunkIndex; var chunkReadSize = Math.Min(readSize, (long)chunkSize); chunk = block.GetOffsetsPtr() + chunkIndex; readCmd = ReadCommandBufferUtils.GetCommand(dstPtr, chunkReadSize, *chunk); dstPtr += chunkReadSize; readSize -= chunkReadSize; chunkReads.Push(readCmd); } //TODO: find a way to use the block array chunks directly for scheduling, probably add readcommandbuffer using (var tempCmds = new NativeArray <ReadCommand>((int)chunkReads.Count, Allocator.TempJob)) { ReadCommand *cmdPtr = (ReadCommand *)tempCmds.GetUnsafePtr(); for (int i = 0; i < tempCmds.Length; ++i) { *(cmdPtr + i) = chunkReads[i]; } return(m_FileReader.Read(cmdPtr, (uint)tempCmds.Length, ReadMode.Async)); } } }
unsafe static ReadError BuildDataBlocks(LowLevelFileReader file, NativeArray <long> blockOffsets, out NativeArray <Block> blockStorage) { blockStorage = new NativeArray <Block>(blockOffsets.Length, Allocator.Persistent); using (var blockReads = new NativeArray <ReadCommand>(blockOffsets.Length, Allocator.Temp)) { using (var headers = new NativeArray <BlockHeader>(blockOffsets.Length, Allocator.Temp)) { BlockHeader *headerPtr = (BlockHeader *)headers.GetUnsafePtr(); ReadCommand *commandPtr = (ReadCommand *)blockReads.GetUnsafePtr(); for (int i = 0; i < blockStorage.Length; ++i) { var blockOffset = blockOffsets[i]; * commandPtr++ = ReadCommandBufferUtils.GetCommand(headerPtr++, sizeof(BlockHeader), blockOffset); } commandPtr = (ReadCommand *)blockReads.GetUnsafePtr(); if (file.Read((ReadCommand *)blockReads.GetUnsafePtr(), (uint)blockReads.Length, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } for (int i = 0; i < blockStorage.Length; ++i) { var blockOffset = blockOffsets[i]; var block = new Block(headers[i]); blockStorage[i] = block; *commandPtr++ = ReadCommandBufferUtils.GetCommand(block.GetOffsetsPtr(), sizeof(long) * block.OffsetCount, blockOffset + sizeof(BlockHeader)); } commandPtr = (ReadCommand *)blockReads.GetUnsafePtr(); if (file.Read((ReadCommand *)blockReads.GetUnsafePtr(), (uint)blockReads.Length, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } } } return(ReadError.Success); }
public static ReadHandle Read(string filename, ReadCommand *readCmds, uint readCmdCount, string assetName = "", UInt64 typeID = 0, AssetLoadingSubsystem subsystem = AssetLoadingSubsystem.Scripts) { return(ReadInternal(filename, readCmds, readCmdCount, assetName, typeID, subsystem)); }
public static ReadHandle Read(string filename, ReadCommand *readCmds, uint readCmdCount) { return(ReadInternal(filename, readCmds, readCmdCount)); }
public unsafe static ReadHandle Read(string filename, ReadCommand *readCmds, uint readCmdCount, string assetName = "", ulong typeID = 0uL, AssetLoadingSubsystem subsystem = AssetLoadingSubsystem.Scripts) { return(AsyncReadManager.ReadInternal(filename, (void *)readCmds, readCmdCount, assetName, typeID, subsystem)); }
unsafe static ReadError BuildDataEntries(LowLevelFileReader file, NativeArray <long> entryTypeOffsets, out NativeArray <Entry> entryStorage) { entryStorage = new NativeArray <Entry>(entryTypeOffsets.Length, Allocator.Persistent); using (var headers = new NativeArray <EntryHeader>(entryTypeOffsets.Length, Allocator.TempJob, NativeArrayOptions.ClearMemory)) { uint writtenCommands = 0; EntryHeader *headersPtr = (EntryHeader *)headers.GetUnsafePtr(); using (var readCommands = new NativeArray <ReadCommand>(entryTypeOffsets.Length, Allocator.TempJob)) { ReadCommand *readCommandsPtr = (ReadCommand *)readCommands.GetUnsafePtr(); for (int i = 0; i < entryTypeOffsets.Length; ++i) { var offset = entryTypeOffsets[i]; if (offset != 0) { var typeSize = sizeof(EntryHeader); readCommandsPtr[writtenCommands++] = ReadCommandBufferUtils.GetCommand((headersPtr + i), sizeof(EntryHeader), offset); } } if (file.Read(readCommandsPtr, writtenCommands, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } writtenCommands = 0; for (int i = 0; i < headers.Length; ++i) { var entry = new Entry(*(headersPtr + i)); entryStorage[i] = entry; var header = (EntryHeader *)(&entry); if (header->Format == EntryFormat.DynamicSizeElementArray) { readCommandsPtr[writtenCommands++] = ReadCommandBufferUtils.GetCommand(entry.GetAdditionalStoragePtr() , sizeof(long) * entry.Count, entryTypeOffsets[i] + sizeof(EntryHeader)); } } if (file.Read(readCommandsPtr, writtenCommands, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } } } var entriesBegin = (Entry *)entryStorage.GetUnsafePtr(); var entriesEnd = entriesBegin + entryStorage.Length; int counter = 0; while (entriesBegin != entriesEnd) { if (entriesBegin->Header.Format == EntryFormat.DynamicSizeElementArray) { //swap back the first entry we read during the header read with the total size at the end of the entries array //also memmove the array by one to the right to make space for the first entry //This is required as we should not have to take cache hits when computing size by having to always jump to the end of the array and back long *storagePtr = entriesBegin->GetAdditionalStoragePtr(); long *headerMetaPtr = (long *)((byte *)entriesBegin + sizeof(EntryFormat) + sizeof(uint) * 2); long totalSize = storagePtr[entriesBegin->Count - 1]; UnsafeUtility.MemMove(storagePtr + 1, storagePtr, sizeof(long) * (entriesBegin->Count - 1)); *storagePtr = *headerMetaPtr; *headerMetaPtr = totalSize; } ++counter; ++entriesBegin; } return(ReadError.Success); }
unsafe static ReadError TryGetBlockEntriesOffsetsWithIntegrityChecks(LowLevelFileReader file, out Blob16Byte blockEntriesOffsets) { const int readCommandCount = 3; Blob16Byte offsets; FormatSignature *sig = stackalloc FormatSignature[2]; ReadCommand * readCommands = stackalloc ReadCommand[readCommandCount]; //read first chapter offset long _8ByteBuffer = -1; //read header sig readCommands[0] = ReadCommandBufferUtils.GetCommand(sig, sizeof(uint), 0); //read tail sig readCommands[1] = ReadCommandBufferUtils.GetCommand(sig + 1, sizeof(uint), file.FileLength - sizeof(uint)); //read chapters start offset readCommands[2] = ReadCommandBufferUtils.GetCommand(&_8ByteBuffer, sizeof(ulong), readCommands[1].Offset - sizeof(ulong)); if (file.Read(readCommands, readCommandCount, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } if (*sig != FormatSignature.HeaderSignature) { return(ReadError.InvalidHeaderSignature); } if (*(sig + 1) != FormatSignature.FooterSignature) { return(ReadError.InvalidFooterSignature); } if (!(_8ByteBuffer < file.FileLength && _8ByteBuffer > 0)) { return(ReadError.InvalidChapterLocation); } //read directory signature readCommands[0] = ReadCommandBufferUtils.GetCommand(sig, sizeof(uint), _8ByteBuffer); //read chapter version readCommands[1] = ReadCommandBufferUtils.GetCommand(sig + 1, sizeof(uint), _8ByteBuffer + sizeof(uint)); //read blocks offset readCommands[2] = ReadCommandBufferUtils.GetCommand(&_8ByteBuffer, sizeof(ulong), readCommands[1].Offset + sizeof(uint)); if (file.Read(readCommands, 3, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } if (*sig != FormatSignature.DirectorySignature) { return(ReadError.InvalidDirectorySignature); } if (*(sig + 1) != FormatSignature.ChapterSectionVersion) { return(ReadError.InvalidChapterSectionVersion); } //computed offset in file for entries var tmpEntriesOffset = readCommands[2].Offset + sizeof(ulong); readCommands[0] = ReadCommandBufferUtils.GetCommand(sig, sizeof(uint), _8ByteBuffer); if (file.Read(readCommands, 1, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } if (*sig != FormatSignature.BlockSectionVersion) { return(ReadError.InvalidBlockSectionVersion); } long *dataPtr = (long *)(&offsets); *dataPtr++ = tmpEntriesOffset; *dataPtr = _8ByteBuffer + sizeof(uint); blockEntriesOffsets = offsets; return(ReadError.Success); }
unsafe ReadError InternalOpen(string filePath) { Dispose(); m_FileReader = new LowLevelFileReader(filePath); //first 8byte are entriesCount offset, the next are blockCount offset Blob16Byte fileOffsets; var error = TryGetBlockEntriesOffsetsWithIntegrityChecks(m_FileReader, out fileOffsets); if (error != ReadError.Success) { return(error); } long * fileOffsetsPtr = (long *)(&fileOffsets); int * counts = stackalloc int[2]; ReadCommand *commands = stackalloc ReadCommand[2]; //read entry offset count commands[0] = ReadCommandBufferUtils.GetCommand(counts, sizeof(int), *fileOffsetsPtr); //read block offset count commands[1] = ReadCommandBufferUtils.GetCommand(counts + 1, sizeof(int), *(fileOffsetsPtr + 1)); if (m_FileReader.Read(commands, 2, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } if (*(counts + 1) < 1) { return(ReadError.InvalidBlockSectionCount); } if (*counts > (int)EntryType.Count) { *counts = (int)EntryType.Count; } var entryTypeToChapterOffset = new NativeArray <long>(counts[0], Allocator.TempJob, NativeArrayOptions.ClearMemory); var dataBlockOffsets = new NativeArray <long>(counts[1], Allocator.TempJob); //read entry offsets commands[0] = ReadCommandBufferUtils.GetCommand(entryTypeToChapterOffset.GetUnsafePtr(), sizeof(long) * counts[0], commands[0].Offset + sizeof(int)); //read block offsets commands[1] = ReadCommandBufferUtils.GetCommand(dataBlockOffsets.GetUnsafePtr(), sizeof(long) * counts[1], commands[1].Offset + sizeof(uint)); if (m_FileReader.Read(commands, 2, ReadMode.Blocking).Status != ReadStatus.Complete) { return(ReadError.FileReadFailed); } error = BuildDataBlocks(m_FileReader, dataBlockOffsets, out m_Blocks); dataBlockOffsets.Dispose(); //dispose of the block offsets as they are no longer needed if (error != ReadError.Success) { return(error); } error = BuildDataEntries(m_FileReader, entryTypeToChapterOffset, out m_Entries); entryTypeToChapterOffset.Dispose(); if (error != ReadError.Success) { return(error); } return(ReadError.Success); }
private void ReadAsyncUpdate() { bool isAnyRunning = false; for (int i = 0; i < Length; i++) { if (!ReadHandles[i].IsValid()) { continue; } switch (ReadHandles[i].Status) { case ReadStatus.Complete: ReadHandles[i].Dispose(); if (IsUtf16) { FilesPtr[0][i] = TextFile.FromRawTextFileUtf16(RawFiles[i], allocator); DeleteCommentJobHandles[i] = DeleteCommentJob.Schedule(FilesPtr[0] + i, IsDebug); } else { FilesPtr[0][i] = TextFile.FromRawTextFileCp932(RawFiles[i], allocator); DeleteCommentJobHandles[i] = DeleteCommentJob.Schedule(FilesPtr[0] + i, IsDebug); } break; case ReadStatus.Failed: ReadHandles[i].Dispose(); break; case ReadStatus.InProgress: isAnyRunning = true; break; } } if (isAnyRunning) { return; } _currentStage = Stage.DeleteCommentAsync; if (RawFiles != null) { if (IsUtf16) { UnsafeUtility.Free(RawFiles, allocator); } else { for (int i = 0; i < Length; i++) { RawFiles[i].Dispose(); } UnsafeUtility.Free(RawFiles, allocator); } RawFiles = null; } if (ReadHandles != null) { UnsafeUtility.Free(ReadHandles, allocator); ReadHandles = null; } if (ReadCommands != null) { UnsafeUtility.Free(ReadCommands, allocator); ReadCommands = null; } }