void writeTempTerrainToZip(ZipArchive zip) { int version = 0; //Write our terrain information AbstractFile md = zip.CreateFile("terrain.tdl", true); //Write the size of our data.. Stream stream = md.OpenWrite(true);//File.Open(filename, FileMode.OpenOrCreate, FileAccess.Write); BinaryWriter bw = new BinaryWriter(stream); bw.Write(version); bw.Write(TerrainGlobals.getTerrain().getNumXVerts()); bw.Write(TerrainGlobals.getTerrain().getNumZVerts()); bw.Write(TerrainGlobals.getTerrain().getTileScale()); //min Vector3 min = TerrainGlobals.getTerrain().getQuadNodeRoot().getDesc().m_min; bw.Write(min.X); bw.Write(min.Y); bw.Write(min.Z); //max Vector3 max = TerrainGlobals.getTerrain().getQuadNodeRoot().getDesc().m_max; bw.Write(max.X); bw.Write(max.Y); bw.Write(max.Z); //write terrain positions for (int x = 0; x < TerrainGlobals.getTerrain().getNumXVerts(); x++) { for (int z = 0; z < TerrainGlobals.getTerrain().getNumZVerts(); z++) { Vector3 pos = TerrainGlobals.getTerrain().getRelPos(x, z); bw.Write(pos.X); bw.Write(pos.Y); bw.Write(pos.Z); } } for (int x = 0; x < TerrainGlobals.getTerrain().getNumXVerts(); x++) { for (int z = 0; z < TerrainGlobals.getTerrain().getNumZVerts(); z++) { Vector3 pos = TerrainGlobals.getTerrain().getNormal(x, z); bw.Write(pos.X); bw.Write(pos.Y); bw.Write(pos.Z); } } //write quadnodes BTerrainQuadNode[] mNodes = TerrainGlobals.getTerrain().getQuadNodeLeafArray(); bw.Write(mNodes.Length); for (int i = 0; i < mNodes.Length; i++) { BTerrainQuadNodeDesc desc = mNodes[i].getDesc(); bw.Write(desc.mMinXVert); bw.Write(desc.mMinZVert); } bw.Close(); stream.Close(); }
public void Test1() { FileManager newSys = new FileManager(); DateTime date1 = new DateTime(2019, 7, 20, 18, 30, 25); AbstractFile file1 = new AbstractFile("Test1", 100, date1); AbstractFile file2 = new AbstractFile("Test2", 100, date1); newSys.AddFile(file1); newSys.AddFile(file2); List <AbstractFile> list = newSys.EndEditing(); StorageAlgorithms algorithm = new StorageAlgorithms(); var pointInfo1 = algorithm.SeparateBackup(list, Type.Full); var pointInfo2 = algorithm.SeparateBackup(list, Type.Full); RestoreSystem newChain = new RestoreSystem(); newChain.AddPoint(pointInfo1); newChain.AddPoint(pointInfo2); ICleaningPoints Clean = new CleanByPoints(1); Clean.Clean(newChain.Points); Assert.AreEqual(1, newChain.ShowRestorePoints().Count); Assert.AreEqual(1, newChain.ShowRestorePoints().Count); }
public void Test2() { FileManager newSys = new FileManager(); DateTime date1 = new DateTime(2019, 7, 20, 18, 30, 25); AbstractFile file1 = new AbstractFile("Test1", 100, date1); AbstractFile file2 = new AbstractFile("Test2", 100, date1); newSys.AddFile(file1); newSys.AddFile(file2); List <AbstractFile> list = newSys.EndEditing(); StorageAlgorithms algorithm = new StorageAlgorithms(); var pointInfo1 = algorithm.SeparateBackup(list, Type.Full); var pointInfo2 = algorithm.SeparateBackup(list, Type.Full); RestoreSystem newChain = new RestoreSystem(); newChain.AddPoint(pointInfo1); newChain.AddPoint(pointInfo2); ICleaningPoints Clean = new CleanBySize(250); Clean.StartClean(newChain.Points); //Не совсем понимаю почему должен остаться один бекап если мы закидываем два FULL Backup(каждый по 200мб), то теоретически мы их можем удалять без последствий и удалим каждый с весом >150 Assert.AreEqual(1, newChain.ShowRestorePoints().Count); }
public async Task Rename(string path, AbstractFile file) { using (NewFolderForm CrtFldrFrm = new NewFolderForm()) { if (file.Type() == FileDir.File) { ConcreteFile CurrentFile = (ConcreteFile)file; CrtFldrFrm.SetItemName(CurrentFile.FileName); CrtFldrFrm.ShowDialog(); if (CrtFldrFrm.DialogResult == DialogResult.OK) { await client.Files.MoveAsync(new RelocationArg(CurrentFile.FilePath, path + "/" + CrtFldrFrm.FolderName)); } } if (file.Type() == FileDir.Folder) { ConcreteFolder CurrentFile = (ConcreteFolder)file; CrtFldrFrm.SetItemName(CurrentFile.FolderName); CrtFldrFrm.ShowDialog(); if (CrtFldrFrm.DialogResult == DialogResult.OK) { await client.Files.MoveAsync(new RelocationArg(CurrentFile.FolderPath, path + "/" + CrtFldrFrm.FolderName)); } } } }
private static ZipArchive CreateZipArchive(AbstractFile zipFile) { // Now that the file does not exist, we can create a new zip file. ZipArchive archive = new ZipArchive(zipFile); archive.DefaultCompressionMethod = DefaultCompressionMethod; archive.DefaultCompressionLevel = DefaultCompressionLevel; return(archive); }
/// <summary> /// 上传文件 /// </summary> /// <param name="ftpAddress"></param> /// <param name="remoteFilename"></param> /// <param name="localFilename"></param> /// <param name="resumeOperation"></param> public bool UploadFile(string ftpAddress, string localFilename, string remoteFilename, bool resumeOperation) { FtpSiteData siteData = ParseFtpAddress(ftpAddress); if (siteData == null) { throw new ArgumentException("Invalid ftp address format!"); } using (FtpConnection connection = new FtpConnection(siteData.Host, siteData.Port, siteData.UserName, siteData.Password)) { SetConnection(connection); AbstractFolder remoteFolder = new FtpFolder(connection); AbstractFile remoteFile = remoteFolder.GetFile(remoteFilename); AbstractFile localFile = new DiskFile(localFilename); if (!resumeOperation || !remoteFile.Exists || remoteFile.Size > localFile.Size) { localFile.CopyTo(remoteFile, true); } else if (remoteFile.Size == localFile.Size) { return(true); } else if (remoteFile.Size < localFile.Size) { byte[] buf = new byte[1024]; int cnt = -1; using (System.IO.Stream remoteStream = remoteFile.OpenWrite(false)) { using (System.IO.Stream localStream = localFile.OpenRead()) { localStream.Seek(remoteFile.Size, System.IO.SeekOrigin.Begin); // can't seek. OpenWrite如果不overwrite自动append //remoteStream.Seek(0, System.IO.SeekOrigin.End); do { cnt = localStream.Read(buf, 0, buf.Length); remoteStream.Write(buf, 0, cnt); } while (cnt == buf.Length); } } } return(true); } //FtpClient client = LoginFtp(ftpAddress); //client.SendFile(localFilename, remoteFilename); }
public AbstractFile GetNew(FileDir type) { AbstractFile Result = null; switch (type) { case FileDir.File: Result = new ConcreteFile(); break; case FileDir.Folder: Result = new ConcreteFolder(); break; } return(Result); }
TreeNode CreateVirtualFileNode(AbstractFile f, TreeNode parent) { TreeNode node = new TreeNode(); node.ImageKey = "FileForm"; node.SelectedImageKey = node.ImageKey; node.Tag = f; node.Name = f.filename; node.Text = f.ToString(); if (f.IsSubfileProvider) { node.Nodes.Add(CreateDummyNode()); } parent.Nodes.Add(node); return(node); }
public void Test3() //Тест на раздельное хранение. Узнаём что все файлы дошли до конца. { FileManager newSys = new FileManager(); DateTime date1 = new DateTime(2019, 7, 20, 18, 30, 25); AbstractFile file1 = new AbstractFile("Test1", 100, date1); AbstractFile file2 = new AbstractFile("Test2", 100, date1); newSys.AddFile(file1); newSys.AddFile(file2); List <AbstractFile> list = newSys.EndEditing(); StorageAlgorithms algorithm = new StorageAlgorithms(); var pointInfo1 = algorithm.SeparateBackup(list, Type.Full); RestoreSystem newChain = new RestoreSystem(); newChain.AddPoint(pointInfo1); List <FileRestoreCopyInfo> restoredFiles = newChain.ShowRestoreFiles(1); Assert.AreEqual(2, restoredFiles.Count); }
public void Test4() { FileManager newSys = new FileManager(); DateTime date1 = new DateTime(2019, 7, 20, 18, 30, 25); AbstractFile file1 = new AbstractFile("Test1", 100, date1); AbstractFile file2 = new AbstractFile("Test2", 100, date1); newSys.AddFile(file1); newSys.AddFile(file2); List <AbstractFile> list = newSys.EndEditing(); StorageAlgorithms algorithm = new StorageAlgorithms(); var pointInfo1 = algorithm.SeparateBackup(list, Type.Full); RestoreSystem newChain = new RestoreSystem(); newChain.AddPoint(pointInfo1); List <RestorePoint> restoredFiles = newChain.ShowRestorePoints(); Assert.AreEqual(200, restoredFiles[0].BackupSize); }
public static bool loadModelFromArchive(string filename, AbstractFolder folder) { AbstractFile file = folder.GetFile(filename); if (!file.Exists) { return(false); } ModelContainer model = new ModelContainer(); model.mGR2Name = filename; { //Since granny is unmanaged, we have to allocate a contiguous block of memory for this Stream stream = file.OpenRead(); BinaryReader br = new BinaryReader(stream); byte[] fullFule = br.ReadBytes((int)file.Size); br.Close(); stream.Close(); IntPtr pMarshaledIndexMem = System.Runtime.InteropServices.Marshal.AllocHGlobal(fullFule.Length); System.Runtime.InteropServices.Marshal.Copy(fullFule, 0, pMarshaledIndexMem, fullFule.Length); model.mVisualModel = GrannyBridge.LoadGR2FromIntPtr(pMarshaledIndexMem, fullFule.Length); System.Runtime.InteropServices.Marshal.FreeHGlobal(pMarshaledIndexMem); } mModels.Add(model); return(true); }
/// <summary> /// Reads a session archive zip file into an array of Session objects /// </summary> /// <param name="sFilename">Filename to load</param> /// <param name="bVerboseDialogs"></param> /// <returns>Loaded array of sessions or null, in case of failure</returns> private static Session[] ReadSessionArchive(string sFilename, bool bVerboseDialogs) { /* Okay, given the zip, we gotta: * Unzip * Find all matching pairs of request, response * Create new Session object for each pair */ if (!File.Exists(sFilename)) { FiddlerApplication.Log.LogString("SAZFormat> ReadSessionArchive Failed. File " + sFilename + " does not exist."); return(null); } ZipArchive oZip = null; List <Session> outSessions = new List <Session>(); try { // Sniff for ZIP file. FileStream oSniff = File.Open(sFilename, FileMode.Open, FileAccess.Read, FileShare.Read); if (oSniff.Length < 64 || oSniff.ReadByte() != 0x50 || oSniff.ReadByte() != 0x4B) { // Sniff for 'PK' FiddlerApplication.Log.LogString("SAZFormat> ReadSessionArchive Failed. " + sFilename + " is not a Fiddler-generated .SAZ archive of HTTP Sessions."); oSniff.Close(); return(null); } oSniff.Close(); oZip = new ZipArchive(new DiskFile(sFilename)); oZip.BeginUpdate(); AbstractFolder oRaw = oZip.GetFolder("raw"); if (!oRaw.Exists) { FiddlerApplication.Log.LogString("SAZFormat> ReadSessionArchive Failed. The selected ZIP is not a Fiddler-generated .SAZ archive of HTTP Sessions."); oZip.EndUpdate(); return(null); } foreach (AbstractFile oRequestFile in oRaw.GetFiles(true, @"*_c.txt")) { try { byte[] arrRequest = new byte[oRequestFile.Size]; Stream oFS; RetryWithPassword: try { oFS = oRequestFile.OpenRead(FileShare.Read); } catch (Xceed.Zip.InvalidDecryptionPasswordException) { Console.Write("Password-Protected Session Archive.\nEnter the password to decrypt, or enter nothing to abort opening.\n>"); string sPassword = Console.ReadLine(); if (sPassword != String.Empty) { oZip.DefaultDecryptionPassword = sPassword; goto RetryWithPassword; } return(null); } int iRead = Utilities.ReadEntireStream(oFS, arrRequest); oFS.Close(); Debug.Assert(iRead == arrRequest.Length, "Failed to read entire request."); AbstractFile oResponseFile = oRaw.GetFile(oRequestFile.Name.Replace("_c.txt", "_s.txt")); if (!oResponseFile.Exists) { FiddlerApplication.Log.LogString("Could not find a server response for: " + oResponseFile.FullName); continue; } byte[] arrResponse = new byte[oResponseFile.Size]; oFS = oResponseFile.OpenRead(); iRead = Utilities.ReadEntireStream(oFS, arrResponse); oFS.Close(); Debug.Assert(iRead == arrResponse.Length, "Failed to read entire response."); oResponseFile = oRaw.GetFile(oRequestFile.Name.Replace("_c.txt", "_m.xml")); Session oSession = new Session(arrRequest, arrResponse); if (oResponseFile.Exists) { oSession.LoadMetadata(oResponseFile.OpenRead()); } oSession.oFlags["x-LoadedFrom"] = oRequestFile.Name.Replace("_c.txt", "_s.txt"); outSessions.Add(oSession); } catch (Exception eX) { FiddlerApplication.Log.LogString("SAZFormat> ReadSessionArchive incomplete. Invalid data was present in session: " + oRequestFile.FullName + ".\n\n\n" + eX.Message + "\n" + eX.StackTrace); } } } catch (Exception eX) { FiddlerApplication.ReportException(eX, "ReadSessionArchive Error"); return(null); } if (null != oZip) { oZip.EndUpdate(); oZip = null; } return(outSessions.ToArray()); }
public override void Remove(AbstractFile file) { System.Console.WriteLine("Not Supported!"); }
public bool SaveData(string fileName, bool overwrite, bool checkReadOnly) { try { using (PerfSection p = new PerfSection("SaveData() " + Path.GetFileName(fileName))) { if (File.Exists(fileName)) { if (!overwrite) { return(false); } if (checkReadOnly && File.GetAttributes(fileName) == FileAttributes.ReadOnly) { return(false); } File.SetAttributes(fileName, FileAttributes.Normal); File.Delete(fileName); } DiskFile zipFile = new DiskFile(fileName); zipFile.Create(); if (!zipFile.Exists) { return(false); } ZipArchive zip = new ZipArchive(zipFile); Dictionary <string, IDataStream> .Enumerator it = mStreams.GetEnumerator(); while (it.MoveNext() != false) { AbstractFile md = zip.CreateFile(it.Current.Key, true); Stream s = md.OpenWrite(true); BufferedStream bs = null; if (CoreGlobals.OutOfMemory == false) { bs = new BufferedStream(s, 10000000); //~10mb buffer it.Current.Value.Save(bs); } else { it.Current.Value.Save(s); } if (bs != null) { bs.Flush(); bs.Close(); } else { s.Close(); } } } return(true); } catch (System.Exception ex) { CoreGlobals.FatalEdDataSaveError = true; throw ex; } return(false); }
bool loadModelsSet(ZipArchive zip) { AbstractFile file = zip.GetFile("modelPositions.xml"); if (!file.Exists) { return(false); } SceneObjectsXML objectsXLM = new SceneObjectsXML(); Stream stream = null; try { stream = file.OpenRead(); XmlSerializer s = new XmlSerializer(typeof(SceneObjectsXML), new Type[] { }); objectsXLM = (SceneObjectsXML)s.Deserialize(stream); stream.Close(); } catch (Exception e) { if (stream != null) { stream.Close(); } Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Error serializing the modelPositions.xml file"); Console.ForegroundColor = ConsoleColor.White; return(false); } try { AbstractFolder fold = zip.GetFolder("models"); for (int modelIdx = 0; modelIdx < objectsXLM.objectGR2Names.Count; modelIdx++) { if (!ModelManager.loadModelFromDisk(TerrainGlobals.mGameDir + objectsXLM.objectGR2Names[modelIdx])) { // if(!ModelManager.loadModelFromArchive(objectsXLM.objectGR2Names[modelIdx], fold)) Console.WriteLine("Error loading model " + objectsXLM.objectGR2Names[modelIdx]); } } for (int instIdx = 0; instIdx < objectsXLM.objectinstances.Count; instIdx++) { Matrix orient = objectsXLM.objectinstances[instIdx].getMatrix(); ModelManager.addInstance(TerrainGlobals.mGameDir + objectsXLM.objectinstances[instIdx].GR2Filename, orient); } ModelManager.calcModelInstanceBuffers(); if (objectsXLM.aabbmin != null) { ModelManager.mAABBMin = Vec3FromString(objectsXLM.aabbmin); } if (objectsXLM.aabbmax != null) { ModelManager.mAABBMax = Vec3FromString(objectsXLM.aabbmax); } } catch (Exception e) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Error loading gr2 model data from source file"); Console.ForegroundColor = ConsoleColor.White; return(false); } return(true); }
public abstract void Remove(AbstractFile file);
public override void Delete(AbstractFile file) { throw new NotImplementedException(); }
void writeTempModelsToZip(ZipArchive zip) { SceneObjectsXML sceneObjects = new SceneObjectsXML(); BBoundingBox objectAABB = new BBoundingBox(); objectAABB.empty(); string baseDir = CoreGlobals.getWorkPaths().mGameDirectory; if (mIncludeObjects) { //searalize an XML file to memorystream holding position and model names List <EditorObject> editObjs = SimGlobals.getSimMain().getEditorObjects(false, SimMain.eFilterTypes.cFilterAll, -1, false); for (int objIdx = 0; objIdx < editObjs.Count; objIdx++) { if (editObjs[objIdx] == null) { continue; } if (editObjs[objIdx].GetType() == typeof(SimObject)) { SimObject obj = editObjs[objIdx] as SimObject; if (obj.IgnoreToAO) { continue; } if (obj != null && obj.ProtoObject != null) { string grannyName = obj.ProtoObject.getGrannyFileName(); if (grannyName == "") { continue; } if (grannyName.Contains(baseDir)) { grannyName = grannyName.Remove(0, baseDir.Length); } //if this GR2 isn't already listed, then list it. if (!sceneObjects.objectGR2Names.Contains(grannyName)) { sceneObjects.objectGR2Names.Add(grannyName); } //add our instance ObjectInstanceXML inst = new ObjectInstanceXML(); inst.GR2Filename = grannyName; inst.setOrientation(obj.getMatrix()); sceneObjects.objectinstances.Add(inst); //add our transformed BB to the global BB list if (obj != null && obj.mVisual != null) { if (!obj.IgnoreToAO) { objectAABB.addPoint(obj.mAABB.max + obj.getPosition()); objectAABB.addPoint(obj.mAABB.min + obj.getPosition()); } } } } } sceneObjects.aabbmin = TextVectorHelper.ToString(objectAABB.min); sceneObjects.aabbmax = TextVectorHelper.ToString(objectAABB.max); } //write it to an XML stream AbstractFile md = zip.CreateFile("modelPositions.xml", true); Stream stream = md.OpenWrite(true); XmlSerializer s = new XmlSerializer(typeof(SceneObjectsXML), new Type[] { }); s.Serialize(stream, sceneObjects); stream.Close(); //Create a folder and copy our GR2s into it //AbstractFolder fold = zip.CreateFolder("models"); //if (mIncludeObjects) //{ // for (int modelIdx = 0; modelIdx < sceneObjects.objectGR2Names.Count; modelIdx++) // { // if (mWorkerThread.CancellationPending) // return; // try // { // if (fullGR2Names[modelIdx] == "") // continue; // DiskFile modelFile = new DiskFile(fullGR2Names[modelIdx]); // modelFile.CopyTo(fold, true); // } // catch (Exception e) // { // continue; // } // } //} }
public void Test5()//комбинируем дату и количество (должны выполняться все условия) { FileManager newSys = new FileManager(); DateTime date1 = new DateTime(2019, 7, 20, 18, 30, 25); DateTime date2 = new DateTime(2020, 10, 20, 12, 30, 30); AbstractFile file1 = new AbstractFile("Test1", 100, date1); AbstractFile file2 = new AbstractFile("Test2", 100, date1); AbstractFile file3 = new AbstractFile("Test3", 100, date1); AbstractFile file4 = new AbstractFile("Test1_a", 100, date2); AbstractFile file5 = new AbstractFile("Test2_a", 100, date2); AbstractFile file6 = new AbstractFile("Test3_a", 100, date2); newSys.AddFile(file1); newSys.AddFile(file2); newSys.AddFile(file3); List <AbstractFile> list = newSys.EndEditing(); StorageAlgorithms algorithm = new StorageAlgorithms(); RestoreSystem newChain = new RestoreSystem(); var pointInfo1 = algorithm.SeparateBackup(list, Type.Full); newChain.AddPoint(pointInfo1); newSys.DelFile(2); var pointInfo2 = algorithm.SeparateBackup(list, Type.Incremental); newChain.AddPoint(pointInfo2); newSys.DelFile(0); newSys.DelFile(0); newSys.AddFile(file4); newSys.AddFile(file5); newSys.AddFile(file6); List <AbstractFile> list1 = newSys.EndEditing(); Thread.Sleep(5000); DateTime maxDate = DateTime.Now; Thread.Sleep(10000); var pointInfo3 = algorithm.SeparateBackup(list1, Type.Full); newChain.AddPoint(pointInfo3); var pointInfo4 = algorithm.SeparateBackup(list1, Type.Incremental); newChain.AddPoint(pointInfo4); var pointInfo5 = algorithm.SeparateBackup(list1, Type.Incremental); newChain.AddPoint(pointInfo5); var pointInfo6 = algorithm.SeparateBackup(list1, Type.Full); newChain.AddPoint(pointInfo6); var pointInfo7 = algorithm.SeparateBackup(list1, Type.Incremental); newChain.AddPoint(pointInfo7); var pointInfo8 = algorithm.SeparateBackup(list1, Type.Incremental); newChain.AddPoint(pointInfo8); List <ICleaningPoints> Hybrid1 = new List <ICleaningPoints>(); ICleaningPoints cleanByPoints = new CleanByPoints(3); ICleaningPoints CleanByDate = new CleanByDate(maxDate); Hybrid1.Add(cleanByPoints); Hybrid1.Add(CleanByDate); ICleaningPoints HybrydAllTerm = new HybridCleanAllTerm(Hybrid1); HybrydAllTerm.StartClean(newChain.Points); Assert.AreEqual(6, newChain.ShowRestorePoints().Count); }
TreeNode CreateVirtualFileNode(AbstractFile f, TreeNode parent) { TreeNode node = new TreeNode(); node.ImageKey = "FileForm"; node.SelectedImageKey = node.ImageKey; node.Tag = f; node.Name = f.filename; node.Text = f.ToString(); if (f.IsSubfileProvider) node.Nodes.Add(CreateDummyNode()); parent.Nodes.Add(node); return node; }
private static ZipArchive CreateZipArchive(AbstractFile zipFile) { // Now that the file does not exist, we can create a new zip file. ZipArchive archive = new ZipArchive(zipFile); archive.DefaultCompressionMethod = DefaultCompressionMethod; archive.DefaultCompressionLevel = DefaultCompressionLevel; return archive; }
public Task Rename(string path, AbstractFile file) { throw new NotImplementedException(); }
bool loadTerrainDat(ZipArchive zip) { AbstractFile file = zip.GetFile("terrain.TDL"); if (!file.Exists) { return(false); } try { Stream stream = file.OpenRead(); BinaryReader br = new BinaryReader(stream); TLDHeader header = new TLDHeader(); header.Version = br.ReadInt32(); //TERRAIN DATA TerrainGlobals.getTerrain().mNumXVerts = br.ReadUInt32(); TerrainGlobals.getTerrain().mNumZVerts = br.ReadUInt32(); TerrainGlobals.getTerrain().mTileScale = br.ReadSingle(); TerrainGlobals.getTerrain().mTerrainBBMin = new Vector3(); TerrainGlobals.getTerrain().mTerrainBBMin.X = br.ReadSingle(); TerrainGlobals.getTerrain().mTerrainBBMin.Y = br.ReadSingle(); TerrainGlobals.getTerrain().mTerrainBBMin.Z = br.ReadSingle(); TerrainGlobals.getTerrain().mTerrainBBMax = new Vector3(); TerrainGlobals.getTerrain().mTerrainBBMax.X = br.ReadSingle(); TerrainGlobals.getTerrain().mTerrainBBMax.Y = br.ReadSingle(); TerrainGlobals.getTerrain().mTerrainBBMax.Z = br.ReadSingle(); TerrainGlobals.getTerrain().mTerrainRelativePositions = new Vector3[TerrainGlobals.getTerrain().mNumXVerts *TerrainGlobals.getTerrain().mNumZVerts]; TerrainGlobals.getTerrain().mTerrainNormals = new Vector3[TerrainGlobals.getTerrain().mNumXVerts *TerrainGlobals.getTerrain().mNumZVerts]; TerrainGlobals.getTerrain().mTerrainAOVals = new float[TerrainGlobals.getTerrain().mNumXVerts *TerrainGlobals.getTerrain().mNumZVerts]; //start reading terrain data for (int i = 0; i < TerrainGlobals.getTerrain().mNumXVerts *TerrainGlobals.getTerrain().mNumZVerts; i++) { TerrainGlobals.getTerrain().mTerrainRelativePositions[i] = new Vector3(); TerrainGlobals.getTerrain().mTerrainRelativePositions[i].X = br.ReadSingle(); TerrainGlobals.getTerrain().mTerrainRelativePositions[i].Y = br.ReadSingle(); TerrainGlobals.getTerrain().mTerrainRelativePositions[i].Z = br.ReadSingle(); } for (int i = 0; i < TerrainGlobals.getTerrain().mNumXVerts *TerrainGlobals.getTerrain().mNumZVerts; i++) { TerrainGlobals.getTerrain().mTerrainNormals[i] = new Vector3(); TerrainGlobals.getTerrain().mTerrainNormals[i].X = br.ReadSingle(); TerrainGlobals.getTerrain().mTerrainNormals[i].Y = br.ReadSingle(); TerrainGlobals.getTerrain().mTerrainNormals[i].Z = br.ReadSingle(); } //Read our quadNode Descriptions int numQuadNodes = br.ReadInt32(); TerrainGlobals.getTerrain().mQuadNodeDescArray = new BTerrainQuadNodeDesc[numQuadNodes]; for (int i = 0; i < numQuadNodes; i++) { TerrainGlobals.getTerrain().mQuadNodeDescArray[i] = new BTerrainQuadNodeDesc(); TerrainGlobals.getTerrain().mQuadNodeDescArray[i].mMinXVert = br.ReadInt32(); TerrainGlobals.getTerrain().mQuadNodeDescArray[i].mMinZVert = br.ReadInt32(); } br.Close(); stream.Close(); } catch (Exception e) { Console.WriteLine("TDL loading FAILED!!!"); return(false); } return(true); }
public abstract void add(AbstractFile element);
public abstract void Add(AbstractFile file);
public abstract void remove(AbstractFile element);
public void Test6()//комбинируем количество и размер (должно выполняться одно из условий ) { FileManager newSys = new FileManager(); DateTime date1 = new DateTime(2019, 7, 20, 18, 30, 25); DateTime date2 = new DateTime(2020, 10, 20, 12, 30, 30); AbstractFile file1 = new AbstractFile("Test1", 100, date1); AbstractFile file2 = new AbstractFile("Test2", 100, date1); AbstractFile file3 = new AbstractFile("Test3", 100, date1); AbstractFile file4 = new AbstractFile("Test1_a", 100, date2); AbstractFile file5 = new AbstractFile("Test2_a", 100, date2); AbstractFile file6 = new AbstractFile("Test3_a", 100, date2); newSys.AddFile(file1); newSys.AddFile(file2); newSys.AddFile(file3); List <AbstractFile> list = newSys.EndEditing(); StorageAlgorithms algorithm = new StorageAlgorithms(); RestoreSystem newChain = new RestoreSystem(); var pointInfo1 = algorithm.SeparateBackup(list, Type.Full); newChain.AddPoint(pointInfo1); newSys.DelFile(2); var pointInfo2 = algorithm.SeparateBackup(list, Type.Incremental); newChain.AddPoint(pointInfo2); newSys.DelFile(0); newSys.DelFile(0); newSys.AddFile(file4); newSys.AddFile(file5); newSys.AddFile(file6); List <AbstractFile> list1 = newSys.EndEditing(); var pointInfo3 = algorithm.SeparateBackup(list1, Type.Full); newChain.AddPoint(pointInfo3); var pointInfo4 = algorithm.SeparateBackup(list1, Type.Incremental); newChain.AddPoint(pointInfo4); var pointInfo5 = algorithm.SeparateBackup(list1, Type.Incremental); newChain.AddPoint(pointInfo5); var pointInfo6 = algorithm.SeparateBackup(list1, Type.Full); newChain.AddPoint(pointInfo6); var pointInfo7 = algorithm.SeparateBackup(list1, Type.Incremental); newChain.AddPoint(pointInfo7); var pointInfo8 = algorithm.SeparateBackup(list1, Type.Incremental); newChain.AddPoint(pointInfo8); List <ICleaningPoints> Hybrid2 = new List <ICleaningPoints>(); ICleaningPoints cleanByPoints = new CleanByPoints(4); ICleaningPoints cleanBySize = new CleanBySize(2000); Hybrid2.Add(cleanBySize); Hybrid2.Add(cleanByPoints); ICleaningPoints CleanByOneTerm = new HybridCleanOneTerm(Hybrid2); CleanByOneTerm.StartClean(newChain.Points); Assert.AreEqual(6, newChain.ShowRestorePoints().Count); }