public void TestOrder() { TestHelpers.InMethod(); // log4net.Config.XmlConfigurator.Configure(); MemoryStream archiveReadStream = new MemoryStream(m_iarStreamBytes); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); string filePath; TarArchiveReader.TarEntryType tarEntryType; byte[] data = tar.ReadEntry(out filePath, out tarEntryType); Assert.That(filePath, Is.EqualTo(ArchiveConstants.CONTROL_FILE_PATH)); InventoryArchiveReadRequest iarr = new InventoryArchiveReadRequest(UUID.Random(), null, null, null, null, null, null, (Stream)null, false); iarr.LoadControlFile(filePath, data); Assert.That(iarr.ControlFileLoaded, Is.True); }
private void DearchiveRegion0DotStar() { int successfulAssetRestores = 0; int failedAssetRestores = 0; List<string> serializedSceneObjects = new List<string>(); string filePath = "NONE"; try { TarArchiveReader archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { //m_log.DebugFormat( // "[ARCHIVER]: Successfully read {0} ({1} bytes)", filePath, data.Length); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) continue; if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { serializedSceneObjects.Add(Encoding.UTF8.GetString(data)); } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { if (LoadAsset(filePath, data)) successfulAssetRestores++; else failedAssetRestores++; } else if (!m_merge && filePath.StartsWith(ArchiveConstants.TERRAINS_PATH)) { LoadTerrain(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.SETTINGS_PATH)) { LoadRegionSettings(filePath, data); } } //m_log.Debug("[ARCHIVER]: Reached end of archive"); archive.Close(); } catch (Exception e) { m_log.ErrorFormat( "[ARCHIVER]: Aborting load with error in archive file {0}. {1}", filePath, e); m_errorMessage += e.ToString(); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); return; } m_log.InfoFormat("[ARCHIVER]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { m_log.ErrorFormat("[ARCHIVER]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += String.Format("Failed to load {0} assets", failedAssetRestores); } // Reload serialized prims m_log.InfoFormat("[ARCHIVER]: Preparing {0} scene objects. Please wait.", serializedSceneObjects.Count); IRegionSerializerModule serializer = m_scene.RequestModuleInterface<IRegionSerializerModule>(); int sceneObjectsLoadedCount = 0; List<SceneObjectGroup> backupObjects = new List<SceneObjectGroup>(); Dictionary<UUID, UUID> OriginalBackupIDs = new Dictionary<UUID, UUID>(); bool objectFixingFailed = false; foreach (string serializedSceneObject in serializedSceneObjects) { SceneObjectGroup sceneObject; try { sceneObject = serializer.DeserializeGroupFromXml2(serializedSceneObject); } catch (Exception e) { m_log.InfoFormat("[ARCHIVER]: Error while deserializing group: {0}", e); if (m_skipErrorGroups) continue; else throw; } if (sceneObject == null) { if (m_skipErrorGroups) continue; else throw new Exception("Error while deserializing group"); } // For now, give all incoming scene objects new uuids. This will allow scenes to be cloned // on the same region server and multiple examples a single object archive to be imported // to the same scene (when this is possible). UUID OldUUID = sceneObject.UUID; sceneObject.ResetIDs(); // if sceneObject is no-copy, save the old ID with the new ID. OriginalBackupIDs[sceneObject.UUID] = OldUUID; // Try to retain the original creator/owner/lastowner if their uuid is present on this grid // otherwise, use the master avatar uuid instead UUID masterAvatarId = m_scene.RegionInfo.MasterAvatarAssignedUUID; if (m_scene.RegionInfo.EstateSettings.EstateOwner != UUID.Zero) masterAvatarId = m_scene.RegionInfo.EstateSettings.EstateOwner; foreach (SceneObjectPart part in sceneObject.GetParts()) { if (!ResolveUserUuid(part.CreatorID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' part creator", part.CreatorID, sceneObject.Name); objectFixingFailed = true; part.CreatorID = masterAvatarId; } if (!ResolveUserUuid(part.OwnerID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' part owner", part.OwnerID, sceneObject.Name); objectFixingFailed = true; part.OwnerID = masterAvatarId; } if (!ResolveUserUuid(part.LastOwnerID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' part last owner", part.LastOwnerID, sceneObject.Name); objectFixingFailed = true; part.LastOwnerID = masterAvatarId; } // Fix ownership/creator of inventory items // Not doing so results in inventory items // being no copy/no mod for everyone lock (part.TaskInventory) { TaskInventoryDictionary inv = part.TaskInventory; foreach (KeyValuePair<UUID, TaskInventoryItem> kvp in inv) { if (!ResolveUserUuid(kvp.Value.OwnerID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' inventory item owner", kvp.Value.OwnerID, sceneObject.Name); objectFixingFailed = true; kvp.Value.OwnerID = masterAvatarId; } if (!ResolveUserUuid(kvp.Value.CreatorID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' inventory item creator", kvp.Value.CreatorID, sceneObject.Name); objectFixingFailed = true; kvp.Value.CreatorID = masterAvatarId; } } } } backupObjects.Add(sceneObject); } if (objectFixingFailed && !m_allowUserReassignment) { m_log.Error("[ARCHIVER]: Could not restore scene objects. One or more avatar accounts not found."); return; } Dictionary<UUID, SceneObjectGroup> ExistingNoCopyObjects = new Dictionary<UUID,SceneObjectGroup>(); if (!m_merge) { m_log.Info("[ARCHIVER]: Clearing all existing scene objects"); m_scene.DeleteAllSceneObjectsExcept(delegate(SceneObjectGroup existingSOG) { // Return true if this object should be skipped in the delete. // Don't delete any no-copy objects. if (NoCopyObjectOrContents(existingSOG)) { ExistingNoCopyObjects.Add(existingSOG.UUID, existingSOG); return true; } return false; }); } m_log.InfoFormat("[ARCHIVER]: Loading {0} scene objects. Please wait.", serializedSceneObjects.Count); // sceneObject is the one from backup to restore to the scene foreach (SceneObjectGroup backupObject in backupObjects) { SceneObjectGroup existingObject = null; UUID originalUUID = OriginalBackupIDs[backupObject.UUID]; // Don't restore any no-copy objects unless there was an existing matching UUID in the scene. if (ExistingNoCopyObjects.ContainsKey(originalUUID)) existingObject = ExistingNoCopyObjects[originalUUID]; // existingSOG here means existing NO-COPY object, not deleted from scene above if (NoCopyObjectOrContents(backupObject)) { if ((existingObject != null) && !existingObject.IsAttachment) { // copy only position and rotation from backup existingObject.Rotation = backupObject.Rotation; existingObject.AbsolutePosition = backupObject.AbsolutePosition; } // don't restore no-copy items } else if (m_scene.AddRestoredSceneObject(backupObject, true, false)) { // this may have added 2nd copyable copy if existingObject is no-copy sceneObjectsLoadedCount++; backupObject.CreateScriptInstances(0, ScriptStartFlags.PostOnRez, m_scene.DefaultScriptEngine, 0, null); } } m_log.InfoFormat("[ARCHIVER]: Restored {0} scene objects to the scene", sceneObjectsLoadedCount); int ignoredObjects = serializedSceneObjects.Count - sceneObjectsLoadedCount; if (ignoredObjects > 0) m_log.WarnFormat("[ARCHIVER]: Ignored {0} scene objects that already existed in the scene", ignoredObjects); m_log.InfoFormat("[ARCHIVER]: Successfully loaded archive"); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); }
public void TestSaveItemToIarV0_1() { TestHelper.InMethod(); // log4net.Config.XmlConfigurator.Configure(); InventoryArchiverModule archiverModule = new InventoryArchiverModule(true); Scene scene = SceneSetupHelpers.SetupScene("Inventory"); SceneSetupHelpers.SetupSceneModules(scene, archiverModule); // Create user string userFirstName = "Jock"; string userLastName = "Stirrup"; string userPassword = "******"; UUID userId = UUID.Parse("00000000-0000-0000-0000-000000000020"); UserProfileTestUtils.CreateUserWithInventory(scene, userFirstName, userLastName, userId, userPassword); // Create asset SceneObjectGroup object1; SceneObjectPart part1; { string partName = "My Little Dog Object"; UUID ownerId = UUID.Parse("00000000-0000-0000-0000-000000000040"); PrimitiveBaseShape shape = PrimitiveBaseShape.CreateSphere(); Vector3 groupPosition = new Vector3(10, 20, 30); Quaternion rotationOffset = new Quaternion(20, 30, 40, 50); Vector3 offsetPosition = new Vector3(5, 10, 15); part1 = new SceneObjectPart( ownerId, shape, groupPosition, rotationOffset, offsetPosition); part1.Name = partName; object1 = new SceneObjectGroup(part1); scene.AddNewSceneObject(object1, false); } UUID asset1Id = UUID.Parse("00000000-0000-0000-0000-000000000060"); AssetBase asset1 = AssetHelpers.CreateAsset(asset1Id, object1); scene.AssetService.Store(asset1); // Create item UUID item1Id = UUID.Parse("00000000-0000-0000-0000-000000000080"); string item1Name = "My Little Dog"; InventoryItemBase item1 = new InventoryItemBase(); item1.Name = item1Name; item1.AssetID = asset1.FullID; item1.ID = item1Id; InventoryFolderBase objsFolder = InventoryArchiveUtils.FindFolderByPath(scene.InventoryService, userId, "Objects")[0]; item1.Folder = objsFolder.ID; scene.AddInventoryItem(item1); MemoryStream archiveWriteStream = new MemoryStream(); archiverModule.OnInventoryArchiveSaved += SaveCompleted; mre.Reset(); archiverModule.ArchiveInventory( Guid.NewGuid(), userFirstName, userLastName, "Objects/" + item1Name, userPassword, archiveWriteStream); mre.WaitOne(60000, false); byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); //bool gotControlFile = false; bool gotObject1File = false; //bool gotObject2File = false; string expectedObject1FileName = InventoryArchiveWriteRequest.CreateArchiveItemName(item1); string expectedObject1FilePath = string.Format( "{0}{1}", ArchiveConstants.INVENTORY_PATH, expectedObject1FileName); string filePath; TarArchiveReader.TarEntryType tarEntryType; // Console.WriteLine("Reading archive"); while (tar.ReadEntry(out filePath, out tarEntryType) != null) { Console.WriteLine("Got {0}", filePath); // if (ArchiveConstants.CONTROL_FILE_PATH == filePath) // { // gotControlFile = true; // } if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH) && filePath.EndsWith(".xml")) { // string fileName = filePath.Remove(0, "Objects/".Length); // // if (fileName.StartsWith(part1.Name)) // { Assert.That(expectedObject1FilePath, Is.EqualTo(filePath)); gotObject1File = true; // } // else if (fileName.StartsWith(part2.Name)) // { // Assert.That(fileName, Is.EqualTo(expectedObject2FileName)); // gotObject2File = true; // } } } // Assert.That(gotControlFile, Is.True, "No control file in archive"); Assert.That(gotObject1File, Is.True, "No item1 file in archive"); // Assert.That(gotObject2File, Is.True, "No object2 file in archive"); // TODO: Test presence of more files and contents of files. }
/// <summary> /// Execute the request /// </summary> /// <returns> /// A list of the inventory nodes loaded. If folders were loaded then only the root folders are /// returned /// </returns> public HashSet<InventoryNodeBase> Execute(bool loadAll) { try { string filePath = "ERROR"; int successfulAssetRestores = 0; int failedAssetRestores = 0; int successfulItemRestores = 0; HashSet<InventoryNodeBase> loadedNodes = loadAll ? new HashSet<InventoryNodeBase>() : null; List<InventoryFolderBase> folderCandidates = InventoryArchiveUtils.FindFolderByPath( m_registry.RequestModuleInterface<IInventoryService>(), m_userInfo.PrincipalID, m_invPath); if (folderCandidates.Count == 0) { // Possibly provide an option later on to automatically create this folder if it does not exist MainConsole.Instance.ErrorFormat("[INVENTORY ARCHIVER]: Inventory path {0} does not exist", m_invPath); return loadedNodes; } InventoryFolderBase rootDestinationFolder = folderCandidates[0]; archive = new TarArchiveReader(m_loadStream); // In order to load identically named folders, we need to keep track of the folders that we have already // resolved Dictionary<string, InventoryFolderBase> resolvedFolders = new Dictionary<string, InventoryFolderBase>(); byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { if (LoadAsset(filePath, data)) successfulAssetRestores++; else failedAssetRestores++; if ((successfulAssetRestores)%50 == 0) MainConsole.Instance.InfoFormat( "[INVENTORY ARCHIVER]: Loaded {0} assets...", successfulAssetRestores); } else if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH)) { filePath = filePath.Substring(ArchiveConstants.INVENTORY_PATH.Length); // Trim off the file portion if we aren't already dealing with a directory path if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY != entryType) filePath = filePath.Remove(filePath.LastIndexOf("/") + 1); InventoryFolderBase foundFolder = ReplicateArchivePathToUserInventory( filePath, rootDestinationFolder, ref resolvedFolders, ref loadedNodes); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY != entryType) { InventoryItemBase item = LoadItem(data, foundFolder); if (item != null) { successfulItemRestores++; if ((successfulItemRestores)%50 == 0) MainConsole.Instance.InfoFormat( "[INVENTORY ARCHIVER]: Loaded {0} items...", successfulItemRestores); // If we aren't loading the folder containing the item then well need to update the // viewer separately for that item. if (loadAll && !loadedNodes.Contains(foundFolder)) loadedNodes.Add(item); } item = null; } } data = null; } MainConsole.Instance.InfoFormat( "[INVENTORY ARCHIVER]: Successfully loaded {0} assets with {1} failures", successfulAssetRestores, failedAssetRestores); MainConsole.Instance.InfoFormat("[INVENTORY ARCHIVER]: Successfully loaded {0} items", successfulItemRestores); return loadedNodes; } finally { m_loadStream.Close(); } }
public void TestSaveOarNoAssets() { TestHelpers.InMethod(); // log4net.Config.XmlConfigurator.Configure(); SceneObjectPart part1 = CreateSceneObjectPart1(); SceneObjectGroup sog1 = new SceneObjectGroup(part1); m_scene.AddNewSceneObject(sog1, false); SceneObjectPart part2 = CreateSceneObjectPart2(); AssetNotecard nc = new AssetNotecard(); nc.BodyText = "Hello World!"; nc.Encode(); UUID ncAssetUuid = new UUID("00000000-0000-0000-1000-000000000000"); UUID ncItemUuid = new UUID("00000000-0000-0000-1100-000000000000"); AssetBase ncAsset = AssetHelpers.CreateAsset(ncAssetUuid, AssetType.Notecard, nc.AssetData, UUID.Zero); m_scene.AssetService.Store(ncAsset); SceneObjectGroup sog2 = new SceneObjectGroup(part2); TaskInventoryItem ncItem = new TaskInventoryItem { Name = "ncItem", AssetID = ncAssetUuid, ItemID = ncItemUuid }; part2.Inventory.AddInventoryItem(ncItem, true); m_scene.AddNewSceneObject(sog2, false); MemoryStream archiveWriteStream = new MemoryStream(); Guid requestId = new Guid("00000000-0000-0000-0000-808080808080"); Dictionary<string, Object> options = new Dictionary<string, Object>(); options.Add("noassets", true); m_archiverModule.ArchiveRegion(archiveWriteStream, requestId, options); // Don't wait for completion - with --noassets save oar happens synchronously // Monitor.Wait(this, 60000); Assert.That(m_lastRequestId, Is.EqualTo(requestId)); byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); List<string> foundPaths = new List<string>(); List<string> expectedPaths = new List<string>(); expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog1)); expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog2)); string filePath; TarArchiveReader.TarEntryType tarEntryType; byte[] data = tar.ReadEntry(out filePath, out tarEntryType); Assert.That(filePath, Is.EqualTo(ArchiveConstants.CONTROL_FILE_PATH)); Dictionary<string, object> archiveOptions = new Dictionary<string, object>(); ArchiveReadRequest arr = new ArchiveReadRequest(m_scene, (Stream)null, Guid.Empty, archiveOptions); arr.LoadControlFile(filePath, data, new DearchiveScenesInfo()); Assert.That(arr.ControlFileLoaded, Is.True); while (tar.ReadEntry(out filePath, out tarEntryType) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { Assert.Fail("Asset was found in saved oar of TestSaveOarNoAssets()"); } else if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { foundPaths.Add(filePath); } } Assert.That(foundPaths, Is.EquivalentTo(expectedPaths)); // TODO: Test presence of more files and contents of files. }
/// <summary> /// Execute the request /// </summary> /// <remarks> /// Only call this once. To load another IAR, construct another request object. /// </remarks> /// <returns> /// A list of the inventory nodes loaded. If folders were loaded then only the root folders are /// returned /// </returns> /// <exception cref="System.Exception">Thrown if load fails.</exception> public HashSet<InventoryNodeBase> Execute() { try { string filePath = "ERROR"; List<InventoryFolderBase> folderCandidates = InventoryArchiveUtils.FindFolderByPath( m_scene.InventoryService, m_userInfo.PrincipalID, m_invPath); if (folderCandidates.Count == 0) { // Possibly provide an option later on to automatically create this folder if it does not exist m_log.ErrorFormat("[INVENTORY ARCHIVER]: Inventory path {0} does not exist", m_invPath); return m_loadedNodes; } m_rootDestinationFolder = folderCandidates[0]; archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(filePath, data); } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { LoadAssetFile(filePath, data); } else if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH)) { LoadInventoryFile(filePath, entryType, data); } } archive.Close(); m_log.DebugFormat( "[INVENTORY ARCHIVER]: Successfully loaded {0} assets with {1} failures", m_successfulAssetRestores, m_failedAssetRestores); m_log.InfoFormat("[INVENTORY ARCHIVER]: Successfully loaded {0} items", m_successfulItemRestores); return m_loadedNodes; } finally { m_loadStream.Close(); } }
private void DearchiveRegion0DotStar() { int successfulAssetRestores = 0; int failedAssetRestores = 0; //List<string> serialisedSceneObjects = new List<string>(); List<string> serialisedParcels = new List<string>(); string filePath = "NONE"; DateTime start = DateTime.Now; TarArchiveReader archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; if (!m_skipAssets) m_threadpool = new Aurora.Framework.AuroraThreadPool(new Aurora.Framework.AuroraThreadPoolStartInfo() { Threads = 1, priority = System.Threading.ThreadPriority.BelowNormal }); IBackupModule backup = m_scene.RequestModuleInterface<IBackupModule>(); if (!m_merge) { DateTime before = DateTime.Now; m_log.Info("[ARCHIVER]: Clearing all existing scene objects"); if (backup != null) backup.DeleteAllSceneObjects(); m_log.Info("[ARCHIVER]: Cleared all existing scene objects in " + (DateTime.Now - before).Minutes + ":" + (DateTime.Now - before).Seconds); } IScriptModule[] modules = m_scene.RequestModuleInterfaces<IScriptModule>(); //Disable the script engine so that it doesn't load in the background and kill OAR loading foreach (IScriptModule module in modules) { module.Disabled = true; } //Disable backup for now as well if (backup != null) backup.LoadingPrims = true; IRegionSerialiserModule serialiser = m_scene.RequestModuleInterface<IRegionSerialiserModule>(); int sceneObjectsLoadedCount = 0; //We save the groups so that we can back them up later List<SceneObjectGroup> groupsToBackup = new List<SceneObjectGroup>(); try { while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { //m_log.DebugFormat( // "[ARCHIVER]: Successfully read {0} ({1} bytes)", filePath, data.Length); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) continue; if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { /* m_log.DebugFormat("[ARCHIVER]: Loading xml with raw size {0}", serialisedSceneObject.Length); // Really large xml files (multi megabyte) appear to cause // memory problems // when loading the xml. But don't enable this check yet if (serialisedSceneObject.Length > 5000000) { m_log.Error("[ARCHIVER]: Ignoring xml since size > 5000000);"); continue; } */ SceneObjectGroup sceneObject = (SceneObjectGroup)serialiser.DeserializeGroupFromXml2 (data, m_scene); if (sceneObject == null) { //! big error! m_log.Error("Error reading SOP XML (Please mantis this!): " + m_asciiEncoding.GetString(data)); continue; } foreach (SceneObjectPart part in sceneObject.ChildrenList) { if (!ResolveUserUuid(part.CreatorID)) part.CreatorID = m_scene.RegionInfo.EstateSettings.EstateOwner; if (!ResolveUserUuid(part.OwnerID)) part.OwnerID = m_scene.RegionInfo.EstateSettings.EstateOwner; if (!ResolveUserUuid(part.LastOwnerID)) part.LastOwnerID = m_scene.RegionInfo.EstateSettings.EstateOwner; // And zap any troublesome sit target information part.SitTargetOrientation = new Quaternion(0, 0, 0, 1); part.SitTargetPosition = new Vector3(0, 0, 0); // Fix ownership/creator of inventory items // Not doing so results in inventory items // being no copy/no mod for everyone lock (part.TaskInventory) { TaskInventoryDictionary inv = part.TaskInventory; foreach (KeyValuePair<UUID, TaskInventoryItem> kvp in inv) { if (!ResolveUserUuid(kvp.Value.OwnerID)) { kvp.Value.OwnerID = m_scene.RegionInfo.EstateSettings.EstateOwner; } if (!ResolveUserUuid(kvp.Value.CreatorID)) { kvp.Value.CreatorID = m_scene.RegionInfo.EstateSettings.EstateOwner; } } } } //Add the offsets of the region Vector3 newPos = new Vector3(sceneObject.AbsolutePosition.X + m_offsetX, sceneObject.AbsolutePosition.Y + m_offsetY, sceneObject.AbsolutePosition.Z + m_offsetZ); if (m_flipX) newPos.X = m_scene.RegionInfo.RegionSizeX - newPos.X; if (m_flipY) newPos.Y = m_scene.RegionInfo.RegionSizeY - newPos.Y; sceneObject.SetAbsolutePosition(false, newPos); if (m_scene.SceneGraph.AddPrimToScene(sceneObject)) { groupsToBackup.Add(sceneObject); sceneObject.ScheduleGroupUpdate (PrimUpdateFlags.ForcedFullUpdate); sceneObjectsLoadedCount++; sceneObject.CreateScriptInstances(0, false, 0, UUID.Zero); sceneObject.ResumeScripts(); } sceneObjectsLoadedCount++; if (sceneObjectsLoadedCount % 250 == 0) m_log.Info("[ARCHIVER]: Loaded " + sceneObjectsLoadedCount + " objects..."); } else if (!m_skipAssets && filePath.StartsWith (ArchiveConstants.ASSETS_PATH)) { if (LoadAsset (filePath, data)) successfulAssetRestores++; else failedAssetRestores++; if ((successfulAssetRestores + failedAssetRestores) % 250 == 0) m_log.Info ("[ARCHIVER]: Loaded " + successfulAssetRestores + " assets and failed to load " + failedAssetRestores + " assets..."); } else if (filePath.StartsWith (ArchiveConstants.TERRAINS_PATH)) { LoadTerrain (filePath, data); } else if (!m_merge && filePath.StartsWith (ArchiveConstants.SETTINGS_PATH)) { LoadRegionSettings (filePath, data); } else if (!m_merge && filePath.StartsWith (ArchiveConstants.LANDDATA_PATH)) { serialisedParcels.Add (m_utf8Encoding.GetString (data)); } else if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile (filePath, data); } else { m_log.Debug ("[ARCHIVER]:UNKNOWN PATH: " + filePath); } } //m_log.Debug("[ARCHIVER]: Reached end of archive"); } catch (Exception e) { m_log.ErrorFormat( "[ARCHIVER]: Aborting load with error in archive file {0}. {1}", filePath, e); m_errorMessage += e.ToString(); m_scene.EventManager.TriggerOarFileLoaded(UUID.Zero.Guid, m_errorMessage); return; } finally { archive.Close(); m_loadStream.Close(); m_loadStream.Dispose (); //Reeanble now that we are done foreach (IScriptModule module in modules) { module.Disabled = false; } //Reset backup too if (backup != null) backup.LoadingPrims = false; } //Now back up the prims foreach (SceneObjectGroup grp in groupsToBackup) { //Backup! grp.HasGroupChanged = true; } if (!m_skipAssets) { if (m_useAsync && !AssetSaverIsRunning) m_threadpool.QueueEvent(SaveAssets, 0); else if (!AssetSaverIsRunning) SaveAssets(); } if (!m_skipAssets) { m_log.InfoFormat("[ARCHIVER]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { m_log.ErrorFormat("[ARCHIVER]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += String.Format("Failed to load {0} assets", failedAssetRestores); } } // Try to retain the original creator/owner/lastowner if their uuid is present on this grid // otherwise, use the master avatar uuid instead // Reload serialized parcels if (!m_merge) { m_log.InfoFormat ("[ARCHIVER]: Loading {0} parcels. Please wait.", serialisedParcels.Count); List<LandData> landData = new List<LandData> (); foreach (string serialisedParcel in serialisedParcels) { LandData parcel = LandDataSerializer.Deserialize (serialisedParcel); if (!ResolveUserUuid (parcel.OwnerID)) parcel.OwnerID = m_scene.RegionInfo.EstateSettings.EstateOwner; landData.Add (parcel); } IParcelManagementModule parcelManagementModule = m_scene.RequestModuleInterface<IParcelManagementModule> (); if (parcelManagementModule != null) parcelManagementModule.ClearAllParcels (); if (landData.Count > 0) { m_scene.EventManager.TriggerIncomingLandDataFromStorage (landData); //Update the database as well! if (parcelManagementModule != null) { foreach (LandData parcel in landData) { parcelManagementModule.UpdateLandObject (parcel.LocalID, parcel); } } } else if (parcelManagementModule != null) parcelManagementModule.ResetSimLandObjects (); m_log.InfoFormat ("[ARCHIVER]: Restored {0} parcels.", landData.Count); //Clean it out landData.Clear (); serialisedParcels.Clear (); } m_log.InfoFormat("[ARCHIVER]: Successfully loaded archive in " + (DateTime.Now - start).Minutes + ":" + (DateTime.Now - start).Seconds); m_validUserUuids.Clear(); m_scene.EventManager.TriggerOarFileLoaded(UUID.Zero.Guid, m_errorMessage); }
/// <summary> /// Searches through the files in the archive for the control file, and reads it. /// We must read the control file first, in order to know which regions are available. /// </summary> /// <remarks> /// In most cases the control file *is* first, since that's how we create archives. However, /// it's possible that someone rewrote the archive externally so we can't rely on this fact. /// </remarks> /// <param name="archive"></param> /// <param name="dearchivedScenes"></param> private void FindAndLoadControlFile(out TarArchiveReader archive, out DearchiveScenesInfo dearchivedScenes) { archive = new TarArchiveReader(m_loadStream); dearchivedScenes = new DearchiveScenesInfo(); string filePath; byte[] data; TarArchiveReader.TarEntryType entryType; bool firstFile = true; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) continue; if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(filePath, data, dearchivedScenes); // Find which scenes are available in the simulator ArchiveScenesGroup simulatorScenes = new ArchiveScenesGroup(); SceneManager.Instance.ForEachScene(delegate(Scene scene2) { simulatorScenes.AddScene(scene2); }); simulatorScenes.CalcSceneLocations(); dearchivedScenes.SetSimulatorScenes(m_rootScene, simulatorScenes); // If the control file wasn't the first file then reset the read pointer if (!firstFile) { m_log.Warn("Control file wasn't the first file in the archive"); if (m_loadStream.CanSeek) { m_loadStream.Seek(0, SeekOrigin.Begin); } else if (m_loadPath != null) { archive.Close(); archive = null; m_loadStream.Close(); m_loadStream = null; m_loadStream = new GZipStream(ArchiveHelpers.GetStream(m_loadPath), CompressionMode.Decompress); archive = new TarArchiveReader(m_loadStream); } else { // There isn't currently a scenario where this happens, but it's best to add a check just in case throw new Exception("Error reading archive: control file wasn't the first file, and the input stream doesn't allow seeking"); } } return; } firstFile = false; } throw new Exception("Control file not found"); }
/// <summary> /// Execute the request /// </summary> /// <returns> /// A list of the inventory nodes loaded. If folders were loaded then only the root folders are /// returned /// </returns> public List<InventoryNodeBase> Execute() { string filePath = "ERROR"; int successfulAssetRestores = 0; int failedAssetRestores = 0; int successfulItemRestores = 0; List<InventoryNodeBase> nodesLoaded = new List<InventoryNodeBase>(); //InventoryFolderImpl rootDestinationFolder = m_userInfo.RootFolder.FindFolderByPath(m_invPath); InventoryFolderBase rootDestinationFolder = InventoryArchiveUtils.FindFolderByPath( m_scene.InventoryService, m_userInfo.UserProfile.ID, m_invPath); if (null == rootDestinationFolder) { // Possibly provide an option later on to automatically create this folder if it does not exist m_log.ErrorFormat("[INVENTORY ARCHIVER]: Inventory path {0} does not exist", m_invPath); return nodesLoaded; } archive = new TarArchiveReader(m_loadStream); // In order to load identically named folders, we need to keep track of the folders that we have already // created Dictionary <string, InventoryFolderBase> foldersCreated = new Dictionary<string, InventoryFolderBase>(); byte[] data; TarArchiveReader.TarEntryType entryType; try { while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { if (LoadAsset(filePath, data)) successfulAssetRestores++; else failedAssetRestores++; if ((successfulAssetRestores) % 50 == 0) m_log.DebugFormat( "[INVENTORY ARCHIVER]: Loaded {0} assets...", successfulAssetRestores); } else if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH)) { InventoryFolderBase foundFolder = ReplicateArchivePathToUserInventory( filePath, TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType, rootDestinationFolder, foldersCreated, nodesLoaded); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY != entryType) { InventoryItemBase item = LoadItem(data, foundFolder); if (item != null) { successfulItemRestores++; // If we're loading an item directly into the given destination folder then we need to record // it separately from any loaded root folders if (rootDestinationFolder == foundFolder) nodesLoaded.Add(item); } } } } } finally { archive.Close(); } m_log.DebugFormat( "[INVENTORY ARCHIVER]: Successfully loaded {0} assets with {1} failures", successfulAssetRestores, failedAssetRestores); m_log.InfoFormat("[INVENTORY ARCHIVER]: Successfully loaded {0} items", successfulItemRestores); return nodesLoaded; }
public void TestSaveMultiRegionOar() { TestHelpers.InMethod(); // Create test regions int WIDTH = 2; int HEIGHT = 2; List <Scene> scenes = new List <Scene>(); // Maps (Directory in OAR file -> scene) Dictionary <string, Scene> regionPaths = new Dictionary <string, Scene>(); // Maps (Scene -> expected object paths) Dictionary <UUID, List <string> > expectedPaths = new Dictionary <UUID, List <string> >(); // List of expected assets List <UUID> expectedAssets = new List <UUID>(); for (uint y = 0; y < HEIGHT; y++) { for (uint x = 0; x < WIDTH; x++) { Scene scene; if (x == 0 && y == 0) { scene = m_scene; // this scene was already created in SetUp() } else { scene = m_sceneHelpers.SetupScene(string.Format("Unit test region {0}", (y * WIDTH) + x + 1), UUID.Random(), 1000 + x, 1000 + y); SceneHelpers.SetupSceneModules(scene, new ArchiverModule(), m_serialiserModule, new TerrainModule()); } scenes.Add(scene); string dir = String.Format("{0}_{1}_{2}", x + 1, y + 1, scene.RegionInfo.RegionName.Replace(" ", "_")); regionPaths[dir] = scene; SceneObjectGroup sog1; SceneObjectGroup sog2; UUID ncAssetUuid; CreateTestObjects(scene, out sog1, out sog2, out ncAssetUuid); expectedPaths[scene.RegionInfo.RegionID] = new List <string>(); expectedPaths[scene.RegionInfo.RegionID].Add(ArchiveHelpers.CreateObjectPath(sog1)); expectedPaths[scene.RegionInfo.RegionID].Add(ArchiveHelpers.CreateObjectPath(sog2)); expectedAssets.Add(ncAssetUuid); } } // Save OAR MemoryStream archiveWriteStream = new MemoryStream(); m_scene.EventManager.OnOarFileSaved += SaveCompleted; Guid requestId = new Guid("00000000-0000-0000-0000-808080808080"); Dictionary <string, Object> options = new Dictionary <string, Object>(); options.Add("all", true); lock (this) { m_archiverModule.ArchiveRegion(archiveWriteStream, requestId, options); Monitor.Wait(this, 60000); } // Check that the OAR contains the expected data Assert.That(m_lastRequestId, Is.EqualTo(requestId)); byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); Dictionary <UUID, List <string> > foundPaths = new Dictionary <UUID, List <string> >(); List <UUID> foundAssets = new List <UUID>(); foreach (Scene scene in scenes) { foundPaths[scene.RegionInfo.RegionID] = new List <string>(); } string filePath; TarArchiveReader.TarEntryType tarEntryType; byte[] data = tar.ReadEntry(out filePath, out tarEntryType); Assert.That(filePath, Is.EqualTo(ArchiveConstants.CONTROL_FILE_PATH)); Dictionary <string, object> archiveOptions = new Dictionary <string, object>(); ArchiveReadRequest arr = new ArchiveReadRequest(m_scene, (Stream)null, Guid.Empty, archiveOptions); arr.LoadControlFile(filePath, data, new DearchiveScenesInfo()); Assert.That(arr.ControlFileLoaded, Is.True); while (tar.ReadEntry(out filePath, out tarEntryType) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { // Assets are shared, so this file doesn't belong to any specific region. string fileName = filePath.Remove(0, ArchiveConstants.ASSETS_PATH.Length); if (fileName.EndsWith("_notecard.txt")) { foundAssets.Add(UUID.Parse(fileName.Substring(0, fileName.Length - "_notecard.txt".Length))); } } else { // This file belongs to one of the regions. Find out which one. Assert.IsTrue(filePath.StartsWith(ArchiveConstants.REGIONS_PATH)); string[] parts = filePath.Split(new Char[] { '/' }, 3); Assert.AreEqual(3, parts.Length); string regionDirectory = parts[1]; string relativePath = parts[2]; Scene scene = regionPaths[regionDirectory]; if (relativePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { foundPaths[scene.RegionInfo.RegionID].Add(relativePath); } } } Assert.AreEqual(scenes.Count, foundPaths.Count); foreach (Scene scene in scenes) { Assert.That(foundPaths[scene.RegionInfo.RegionID], Is.EquivalentTo(expectedPaths[scene.RegionInfo.RegionID])); } Assert.That(foundAssets, Is.EquivalentTo(expectedAssets)); }
public void TestSaveOarNoAssets() { TestHelpers.InMethod(); // log4net.Config.XmlConfigurator.Configure(); SceneObjectPart part1 = CreateSceneObjectPart1(); SceneObjectGroup sog1 = new SceneObjectGroup(part1); m_scene.AddNewSceneObject(sog1, false); SceneObjectPart part2 = CreateSceneObjectPart2(); AssetNotecard nc = new AssetNotecard(); nc.BodyText = "Hello World!"; nc.Encode(); UUID ncAssetUuid = new UUID("00000000-0000-0000-1000-000000000000"); UUID ncItemUuid = new UUID("00000000-0000-0000-1100-000000000000"); AssetBase ncAsset = AssetHelpers.CreateAsset(ncAssetUuid, AssetType.Notecard, nc.AssetData, UUID.Zero); m_scene.AssetService.Store(ncAsset); SceneObjectGroup sog2 = new SceneObjectGroup(part2); TaskInventoryItem ncItem = new TaskInventoryItem { Name = "ncItem", AssetID = ncAssetUuid, ItemID = ncItemUuid }; part2.Inventory.AddInventoryItem(ncItem, true); m_scene.AddNewSceneObject(sog2, false); MemoryStream archiveWriteStream = new MemoryStream(); Guid requestId = new Guid("00000000-0000-0000-0000-808080808080"); Dictionary <string, Object> options = new Dictionary <string, Object>(); options.Add("noassets", true); m_archiverModule.ArchiveRegion(archiveWriteStream, requestId, options); // Don't wait for completion - with --noassets save oar happens synchronously // Monitor.Wait(this, 60000); Assert.That(m_lastRequestId, Is.EqualTo(requestId)); byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); List <string> foundPaths = new List <string>(); List <string> expectedPaths = new List <string>(); expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog1)); expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog2)); string filePath; TarArchiveReader.TarEntryType tarEntryType; byte[] data = tar.ReadEntry(out filePath, out tarEntryType); Assert.That(filePath, Is.EqualTo(ArchiveConstants.CONTROL_FILE_PATH)); Dictionary <string, object> archiveOptions = new Dictionary <string, object>(); ArchiveReadRequest arr = new ArchiveReadRequest(m_scene, (Stream)null, Guid.Empty, archiveOptions); arr.LoadControlFile(filePath, data, new DearchiveScenesInfo()); Assert.That(arr.ControlFileLoaded, Is.True); while (tar.ReadEntry(out filePath, out tarEntryType) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { Assert.Fail("Asset was found in saved oar of TestSaveOarNoAssets()"); } else if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { foundPaths.Add(filePath); } } Assert.That(foundPaths, Is.EquivalentTo(expectedPaths)); // TODO: Test presence of more files and contents of files. }
public void TestSaveOar() { TestHelpers.InMethod(); // log4net.Config.XmlConfigurator.Configure(); SceneObjectGroup sog1; SceneObjectGroup sog2; UUID ncAssetUuid; CreateTestObjects(m_scene, out sog1, out sog2, out ncAssetUuid); MemoryStream archiveWriteStream = new MemoryStream(); m_scene.EventManager.OnOarFileSaved += SaveCompleted; Guid requestId = new Guid("00000000-0000-0000-0000-808080808080"); lock (this) { m_archiverModule.ArchiveRegion(archiveWriteStream, requestId); //AssetServerBase assetServer = (AssetServerBase)scene.CommsManager.AssetCache.AssetServer; //while (assetServer.HasWaitingRequests()) // assetServer.ProcessNextRequest(); Monitor.Wait(this, 60000); } Assert.That(m_lastRequestId, Is.EqualTo(requestId)); byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); bool gotNcAssetFile = false; string expectedNcAssetFileName = string.Format("{0}_{1}", ncAssetUuid, "notecard.txt"); List <string> foundPaths = new List <string>(); List <string> expectedPaths = new List <string>(); expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog1)); expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog2)); string filePath; TarArchiveReader.TarEntryType tarEntryType; byte[] data = tar.ReadEntry(out filePath, out tarEntryType); Assert.That(filePath, Is.EqualTo(ArchiveConstants.CONTROL_FILE_PATH)); Dictionary <string, object> archiveOptions = new Dictionary <string, object>(); ArchiveReadRequest arr = new ArchiveReadRequest(m_scene, (Stream)null, Guid.Empty, archiveOptions); arr.LoadControlFile(filePath, data, new DearchiveScenesInfo()); Assert.That(arr.ControlFileLoaded, Is.True); while (tar.ReadEntry(out filePath, out tarEntryType) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { string fileName = filePath.Remove(0, ArchiveConstants.ASSETS_PATH.Length); Assert.That(fileName, Is.EqualTo(expectedNcAssetFileName)); gotNcAssetFile = true; } else if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { foundPaths.Add(filePath); } } Assert.That(gotNcAssetFile, Is.True, "No notecard asset file in archive"); Assert.That(foundPaths, Is.EquivalentTo(expectedPaths)); // TODO: Test presence of more files and contents of files. }
/// <summary> /// Save a backup of the sim /// </summary> /// <param name="appendedFilePath">The file path where the backup will be saved</param> protected virtual void SaveBackup (string appendedFilePath, bool saveAssets) { if (appendedFilePath == "/") appendedFilePath = ""; IBackupModule backupModule = m_scene.RequestModuleInterface<IBackupModule> (); if (backupModule != null && backupModule.LoadingPrims) //Something is changing lots of prims { m_log.Info ("[Backup]: Not saving backup because the backup module is loading prims"); return; } //Save any script state saves that might be around IScriptModule[] engines = m_scene.RequestModuleInterfaces<IScriptModule> (); try { if (engines != null) { foreach (IScriptModule engine in engines) { if (engine != null) { engine.SaveStateSaves (); } } } } catch(Exception ex) { m_log.WarnFormat ("[Backup]: Exception caught: {0}", ex.ToString()); } m_log.Info ("[FileBasedSimulationData]: Saving Backup for region " + m_scene.RegionInfo.RegionName); string fileName = appendedFilePath + m_scene.RegionInfo.RegionName + m_saveAppenedFileName + ".abackup"; if (File.Exists (fileName)) { //Do new style saving here! GZipStream m_saveStream = new GZipStream (new FileStream (fileName + ".tmp", FileMode.Create), CompressionMode.Compress); TarArchiveWriter writer = new TarArchiveWriter (m_saveStream); GZipStream m_loadStream = new GZipStream (new FileStream (fileName, FileMode.Open), CompressionMode.Decompress); TarArchiveReader reader = new TarArchiveReader (m_loadStream); writer.WriteDir ("parcels"); IParcelManagementModule module = m_scene.RequestModuleInterface<IParcelManagementModule> (); if (module != null) { List<ILandObject> landObject = module.AllParcels (); foreach (ILandObject parcel in landObject) { OSDMap parcelMap = parcel.LandData.ToOSD (); writer.WriteFile ("parcels/" + parcel.LandData.GlobalID.ToString (), OSDParser.SerializeLLSDBinary (parcelMap)); parcelMap = null; } } writer.WriteDir ("newstyleterrain"); writer.WriteDir ("newstylerevertterrain"); writer.WriteDir ("newstylewater"); writer.WriteDir ("newstylerevertwater"); ITerrainModule tModule = m_scene.RequestModuleInterface<ITerrainModule> (); if (tModule != null) { try { byte[] sdata = WriteTerrainToStream (tModule.TerrainMap); writer.WriteFile ("newstyleterrain/" + m_scene.RegionInfo.RegionID.ToString () + ".terrain", sdata); sdata = null; sdata = WriteTerrainToStream (tModule.TerrainRevertMap); writer.WriteFile ("newstylerevertterrain/" + m_scene.RegionInfo.RegionID.ToString () + ".terrain", sdata); sdata = null; if (tModule.TerrainWaterMap != null) { sdata = WriteTerrainToStream (tModule.TerrainWaterMap); writer.WriteFile ("newstylewater/" + m_scene.RegionInfo.RegionID.ToString () + ".terrain", sdata); sdata = null; sdata = WriteTerrainToStream (tModule.TerrainWaterRevertMap); writer.WriteFile ("newstylerevertwater/" + m_scene.RegionInfo.RegionID.ToString () + ".terrain", sdata); sdata = null; } } catch (Exception ex) { m_log.WarnFormat ("[Backup]: Exception caught: {0}", ex.ToString ()); } } IDictionary<UUID, AssetType> assets = new Dictionary<UUID, AssetType>(); UuidGatherer assetGatherer = new UuidGatherer(m_scene.AssetService); ISceneEntity[] saveentities = m_scene.Entities.GetEntities(); List<UUID> entitiesToSave = new List<UUID>(); foreach(ISceneEntity entity in saveentities) { try { if(entity.IsAttachment || ((entity.RootChild.Flags & PrimFlags.Temporary) == PrimFlags.Temporary) || ((entity.RootChild.Flags & PrimFlags.TemporaryOnRez) == PrimFlags.TemporaryOnRez)) continue; if(entity.HasGroupChanged) { entity.HasGroupChanged = false; //Write all entities byte[] xml = ((ISceneObject)entity).ToBinaryXml2(); writer.WriteFile("entities/" + entity.UUID.ToString(), xml); xml = null; } else entitiesToSave.Add(entity.UUID); if(saveAssets) assetGatherer.GatherAssetUuids(entity, assets, m_scene); } catch(Exception ex) { m_log.WarnFormat("[Backup]: Exception caught: {0}", ex.ToString()); entitiesToSave.Add(entity.UUID); } } byte[] data; string filePath; TarArchiveReader.TarEntryType entryType; //Load the archive data that we need try { while ((data = reader.ReadEntry (out filePath, out entryType)) != null) { if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) continue; if (filePath.StartsWith ("entities/")) { UUID entityID = UUID.Parse (filePath.Remove (0, 9)); if (entitiesToSave.Contains (entityID)) { writer.WriteFile (filePath, data); entitiesToSave.Remove (entityID); } } data = null; } } catch (Exception ex) { m_log.WarnFormat ("[Backup]: Exception caught: {0}", ex.ToString ()); } if (entitiesToSave.Count > 0) { m_log.Fatal (entitiesToSave.Count + " PRIMS WERE NOT GOING TO BE SAVED! FORCE SAVING NOW! "); foreach (ISceneEntity entity in saveentities) { if (entitiesToSave.Contains(entity.UUID)) { if (entity.IsAttachment || ((entity.RootChild.Flags & PrimFlags.Temporary) == PrimFlags.Temporary) || ((entity.RootChild.Flags & PrimFlags.TemporaryOnRez) == PrimFlags.TemporaryOnRez)) continue; //Write all entities byte[] xml = ((ISceneObject)entity).ToBinaryXml2 (); writer.WriteFile ("entities/" + entity.UUID.ToString (), xml); xml = null; } } } if(saveAssets) { foreach(UUID assetID in new List<UUID>(assets.Keys)) { try { AssetBase asset = m_scene.AssetService.GetCached(assetID.ToString()); if(asset == null) asset = m_scene.AssetService.Get(assetID.ToString()); WriteAsset(assetID.ToString(), asset, writer); } catch(Exception ex) { m_log.WarnFormat("[Backup]: Exception caught: {0}", ex.ToString()); } } } writer.Close (); m_loadStream.Close (); m_saveStream.Close (); GC.Collect (); if (m_keepOldSave && !m_oldSaveHasBeenSaved) { //Havn't moved it yet, so make sure the directory exists, then move it m_oldSaveHasBeenSaved = true; if (!Directory.Exists (m_oldSaveDirectory)) Directory.CreateDirectory (m_oldSaveDirectory); File.Copy(fileName + ".tmp", Path.Combine(m_oldSaveDirectory, m_scene.RegionInfo.RegionName + SerializeDateTime() + m_saveAppenedFileName + ".abackup")); } //Just remove the file File.Delete (fileName); } else { //Add the .temp since we might need to make a backup and so that if something goes wrong, we don't corrupt the main backup GZipStream m_saveStream = new GZipStream (new FileStream (fileName + ".tmp", FileMode.Create), CompressionMode.Compress); TarArchiveWriter writer = new TarArchiveWriter (m_saveStream); IAuroraBackupArchiver archiver = m_scene.RequestModuleInterface<IAuroraBackupArchiver> (); //Turn off prompting so that we don't ask the user questions every time we need to save the backup archiver.AllowPrompting = false; archiver.SaveRegionBackup (writer, m_scene); archiver.AllowPrompting = true; //If we got this far, we assume that everything went well, so now we move the stuff around if (File.Exists (fileName)) { //If keepOldSave is enabled, the user wants us to move the first backup that we originally loaded from into the oldSaveDirectory if(m_keepOldSave && !m_oldSaveHasBeenSaved) { //Havn't moved it yet, so make sure the directory exists, then move it m_oldSaveHasBeenSaved = true; if(!Directory.Exists(m_oldSaveDirectory)) Directory.CreateDirectory(m_oldSaveDirectory); File.Copy(fileName + ".tmp", Path.Combine(m_oldSaveDirectory, m_scene.RegionInfo.RegionName + SerializeDateTime() + m_saveAppenedFileName + ".abackup")); } //Just remove the file File.Delete(fileName); } } ISceneEntity[] entities = m_scene.Entities.GetEntities (); try { foreach (ISceneEntity entity in entities) { if (entity.HasGroupChanged) entity.HasGroupChanged = false; } } catch (Exception ex) { m_log.WarnFormat ("[Backup]: Exception caught: {0}", ex.ToString ()); } //Now make it the full file again File.Move (fileName + ".tmp", fileName); m_log.Info ("[FileBasedSimulationData]: Saved Backup for region " + m_scene.RegionInfo.RegionName); }
/// <summary> /// Test saving a V0.1 OpenSim Inventory Archive (subject to change since there is no fixed format yet). /// </summary> //[Test] public void TestSaveIarV0_1() { TestHelper.InMethod(); log4net.Config.XmlConfigurator.Configure(); InventoryArchiverModule archiverModule = new InventoryArchiverModule(); Scene scene = SceneSetupHelpers.SetupScene(""); SceneSetupHelpers.SetupSceneModules(scene, archiverModule); CommunicationsManager cm = scene.CommsManager; // Create user string userFirstName = "Jock"; string userLastName = "Stirrup"; UUID userId = UUID.Parse("00000000-0000-0000-0000-000000000020"); CachedUserInfo userInfo; lock (this) { userInfo = UserProfileTestUtils.CreateUserWithInventory( cm, userFirstName, userLastName, userId, InventoryReceived); Monitor.Wait(this, 60000); } /* cm.UserAdminService.AddUser(userFirstName, userLastName, string.Empty, string.Empty, 1000, 1000, userId); CachedUserInfo userInfo = cm.UserProfileCacheService.GetUserDetails(userId, InventoryReceived); userInfo.FetchInventory(); for (int i = 0 ; i < 50 ; i++) { if (userInfo.HasReceivedInventory == true) break; Thread.Sleep(200); } Assert.That(userInfo.HasReceivedInventory, Is.True, "FetchInventory timed out (10 seconds)"); */ // Create asset SceneObjectGroup object1; SceneObjectPart part1; { string partName = "My Little Dog Object"; UUID ownerId = UUID.Parse("00000000-0000-0000-0000-000000000040"); PrimitiveBaseShape shape = PrimitiveBaseShape.CreateSphere(); Vector3 groupPosition = new Vector3(10, 20, 30); Quaternion rotationOffset = new Quaternion(20, 30, 40, 50); Vector3 offsetPosition = new Vector3(5, 10, 15); part1 = new SceneObjectPart( ownerId, shape, groupPosition, rotationOffset, offsetPosition); part1.Name = partName; object1 = new SceneObjectGroup(part1); scene.AddNewSceneObject(object1, false); } UUID asset1Id = UUID.Parse("00000000-0000-0000-0000-000000000060"); AssetBase asset1 = new AssetBase(); asset1.FullID = asset1Id; asset1.Data = Encoding.ASCII.GetBytes(SceneObjectSerializer.ToXml2Format(object1)); scene.AssetService.Store(asset1); // Create item UUID item1Id = UUID.Parse("00000000-0000-0000-0000-000000000080"); InventoryItemBase item1 = new InventoryItemBase(); item1.Name = "My Little Dog"; item1.AssetID = asset1.FullID; item1.ID = item1Id; item1.Folder = userInfo.RootFolder.FindFolderByPath("Objects").ID; scene.AddInventoryItem(userId, item1); MemoryStream archiveWriteStream = new MemoryStream(); archiverModule.OnInventoryArchiveSaved += SaveCompleted; lock (this) { archiverModule.ArchiveInventory(userFirstName, userLastName, "Objects", archiveWriteStream); Monitor.Wait(this, 60000); } byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); InventoryFolderImpl objectsFolder = userInfo.RootFolder.FindFolderByPath("Objects"); //bool gotControlFile = false; bool gotObject1File = false; //bool gotObject2File = false; string expectedObject1FilePath = string.Format( "{0}{1}/{2}_{3}.xml", ArchiveConstants.INVENTORY_PATH, string.Format( "Objects{0}{1}", ArchiveConstants.INVENTORY_NODE_NAME_COMPONENT_SEPARATOR, objectsFolder.ID), item1.Name, item1Id); // string expectedObject2FileName = string.Format( // "{0}_{1:000}-{2:000}-{3:000}__{4}.xml", // part2.Name, // Math.Round(part2.GroupPosition.X), Math.Round(part2.GroupPosition.Y), Math.Round(part2.GroupPosition.Z), // part2.UUID); string filePath; TarArchiveReader.TarEntryType tarEntryType; Console.WriteLine("Reading archive"); while (tar.ReadEntry(out filePath, out tarEntryType) != null) { Console.WriteLine("Got {0}", filePath); // if (ArchiveConstants.CONTROL_FILE_PATH == filePath) // { // gotControlFile = true; // } if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH) && filePath.EndsWith(".xml")) { // string fileName = filePath.Remove(0, "Objects/".Length); // // if (fileName.StartsWith(part1.Name)) // { Assert.That(filePath, Is.EqualTo(expectedObject1FilePath)); gotObject1File = true; // } // else if (fileName.StartsWith(part2.Name)) // { // Assert.That(fileName, Is.EqualTo(expectedObject2FileName)); // gotObject2File = true; // } } } // Assert.That(gotControlFile, Is.True, "No control file in archive"); Assert.That(gotObject1File, Is.True, "No item1 file in archive"); // Assert.That(gotObject2File, Is.True, "No object2 file in archive"); // TODO: Test presence of more files and contents of files. }
public void TestSaveOarV0_2() { TestHelper.InMethod(); //log4net.Config.XmlConfigurator.Configure(); SceneObjectPart part1 = CreateSceneObjectPart1(); SceneObjectGroup sog1 = new SceneObjectGroup(part1); m_scene.AddNewSceneObject(sog1, false); SceneObjectPart part2 = CreateSceneObjectPart2(); AssetNotecard nc = new AssetNotecard(); nc.BodyText = "Hello World!"; nc.Encode(); UUID ncAssetUuid = new UUID("00000000-0000-0000-1000-000000000000"); UUID ncItemUuid = new UUID("00000000-0000-0000-1100-000000000000"); AssetBase ncAsset = AssetHelpers.CreateAsset(ncAssetUuid, AssetType.Notecard, nc.AssetData, UUID.Zero); m_scene.AssetService.Store(ncAsset); SceneObjectGroup sog2 = new SceneObjectGroup(part2); TaskInventoryItem ncItem = new TaskInventoryItem { Name = "ncItem", AssetID = ncAssetUuid, ItemID = ncItemUuid }; part2.Inventory.AddInventoryItem(ncItem, true); m_scene.AddNewSceneObject(sog2, false); MemoryStream archiveWriteStream = new MemoryStream(); m_scene.EventManager.OnOarFileSaved += SaveCompleted; Guid requestId = new Guid("00000000-0000-0000-0000-808080808080"); lock (this) { m_archiverModule.ArchiveRegion(archiveWriteStream, requestId); //AssetServerBase assetServer = (AssetServerBase)scene.CommsManager.AssetCache.AssetServer; //while (assetServer.HasWaitingRequests()) // assetServer.ProcessNextRequest(); Monitor.Wait(this, 60000); } Assert.That(m_lastRequestId, Is.EqualTo(requestId)); byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); bool gotControlFile = false; bool gotNcAssetFile = false; string expectedNcAssetFileName = string.Format("{0}_{1}", ncAssetUuid, "notecard.txt"); List <string> foundPaths = new List <string>(); List <string> expectedPaths = new List <string>(); expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog1)); expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog2)); string filePath; TarArchiveReader.TarEntryType tarEntryType; while (tar.ReadEntry(out filePath, out tarEntryType) != null) { if (ArchiveConstants.CONTROL_FILE_PATH == filePath) { gotControlFile = true; } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { string fileName = filePath.Remove(0, ArchiveConstants.ASSETS_PATH.Length); Assert.That(fileName, Is.EqualTo(expectedNcAssetFileName)); gotNcAssetFile = true; } else if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { foundPaths.Add(filePath); } } Assert.That(gotControlFile, Is.True, "No control file in archive"); Assert.That(gotNcAssetFile, Is.True, "No notecard asset file in archive"); Assert.That(foundPaths, Is.EquivalentTo(expectedPaths)); // TODO: Test presence of more files and contents of files. }
public void TestSaveOarV0_2() { TestHelper.InMethod(); //log4net.Config.XmlConfigurator.Configure(); SceneObjectPart part1 = CreateSceneObjectPart1(); m_scene.AddNewSceneObject(new SceneObjectGroup(part1), false); SceneObjectPart part2 = CreateSceneObjectPart2(); m_scene.AddNewSceneObject(new SceneObjectGroup(part2), false); MemoryStream archiveWriteStream = new MemoryStream(); m_scene.EventManager.OnOarFileSaved += SaveCompleted; Guid requestId = new Guid("00000000-0000-0000-0000-808080808080"); lock (this) { m_archiverModule.ArchiveRegion(archiveWriteStream, requestId); //AssetServerBase assetServer = (AssetServerBase)scene.CommsManager.AssetCache.AssetServer; //while (assetServer.HasWaitingRequests()) // assetServer.ProcessNextRequest(); Monitor.Wait(this, 60000); } Assert.That(m_lastRequestId, Is.EqualTo(requestId)); byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); bool gotControlFile = false; bool gotObject1File = false; bool gotObject2File = false; string expectedObject1FileName = string.Format( "{0}_{1:000}-{2:000}-{3:000}__{4}.xml", part1.Name, Math.Round(part1.GroupPosition.X), Math.Round(part1.GroupPosition.Y), Math.Round(part1.GroupPosition.Z), part1.UUID); string expectedObject2FileName = string.Format( "{0}_{1:000}-{2:000}-{3:000}__{4}.xml", part2.Name, Math.Round(part2.GroupPosition.X), Math.Round(part2.GroupPosition.Y), Math.Round(part2.GroupPosition.Z), part2.UUID); string filePath; TarArchiveReader.TarEntryType tarEntryType; while (tar.ReadEntry(out filePath, out tarEntryType) != null) { if (ArchiveConstants.CONTROL_FILE_PATH == filePath) { gotControlFile = true; } else if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { string fileName = filePath.Remove(0, ArchiveConstants.OBJECTS_PATH.Length); if (fileName.StartsWith(part1.Name)) { Assert.That(fileName, Is.EqualTo(expectedObject1FileName)); gotObject1File = true; } else if (fileName.StartsWith(part2.Name)) { Assert.That(fileName, Is.EqualTo(expectedObject2FileName)); gotObject2File = true; } } } Assert.That(gotControlFile, Is.True, "No control file in archive"); Assert.That(gotObject1File, Is.True, "No object1 file in archive"); Assert.That(gotObject2File, Is.True, "No object2 file in archive"); // TODO: Test presence of more files and contents of files. }
public void TestSaveNonRootFolderToIar() { TestHelpers.InMethod(); // TestHelpers.EnableLogging(); string userFirstName = "Jock"; string userLastName = "Stirrup"; string userPassword = "******"; UUID userId = TestHelpers.ParseTail(0x20); UserAccountHelpers.CreateUserWithInventory(m_scene, userFirstName, userLastName, userId, userPassword); // Create base folder InventoryFolderBase f1 = UserInventoryHelpers.CreateInventoryFolder(m_scene.InventoryService, userId, "f1", true); // Create item1 SceneObjectGroup so1 = SceneHelpers.CreateSceneObject(1, userId, "My Little Dog Object", 0x5); InventoryItemBase i1 = UserInventoryHelpers.AddInventoryItem(m_scene, so1, 0x50, 0x60, "f1"); // Create embedded folder InventoryFolderBase f1_1 = UserInventoryHelpers.CreateInventoryFolder(m_scene.InventoryService, userId, "f1/f1.1", true); // Create embedded item SceneObjectGroup so1_1 = SceneHelpers.CreateSceneObject(1, userId, "My Little Cat Object", 0x6); InventoryItemBase i2 = UserInventoryHelpers.AddInventoryItem(m_scene, so1_1, 0x500, 0x600, "f1/f1.1"); MemoryStream archiveWriteStream = new MemoryStream(); m_archiverModule.OnInventoryArchiveSaved += SaveCompleted; mre.Reset(); m_archiverModule.ArchiveInventory( UUID.Random(), userFirstName, userLastName, "f1", userPassword, archiveWriteStream); mre.WaitOne(60000, false); // Test created iar byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); // InventoryArchiveUtils. bool gotf1 = false, gotf1_1 = false, gotso1 = false, gotso2 = false; string f1FileName = string.Format("{0}{1}", ArchiveConstants.INVENTORY_PATH, InventoryArchiveWriteRequest.CreateArchiveFolderName(f1)); string f1_1FileName = string.Format("{0}{1}", f1FileName, InventoryArchiveWriteRequest.CreateArchiveFolderName(f1_1)); string so1FileName = string.Format("{0}{1}", f1FileName, InventoryArchiveWriteRequest.CreateArchiveItemName(i1)); string so2FileName = string.Format("{0}{1}", f1_1FileName, InventoryArchiveWriteRequest.CreateArchiveItemName(i2)); string filePath; TarArchiveReader.TarEntryType tarEntryType; while (tar.ReadEntry(out filePath, out tarEntryType) != null) { // Console.WriteLine("Got {0}", filePath); if (filePath == f1FileName) gotf1 = true; else if (filePath == f1_1FileName) gotf1_1 = true; else if (filePath == so1FileName) gotso1 = true; else if (filePath == so2FileName) gotso2 = true; } // Assert.That(gotControlFile, Is.True, "No control file in archive"); Assert.That(gotf1, Is.True); Assert.That(gotf1_1, Is.True); Assert.That(gotso1, Is.True); Assert.That(gotso2, Is.True); // TODO: Test presence of more files and contents of files. }
private void DearchiveRegion0DotStar() { int successfulAssetRestores = 0; int failedAssetRestores = 0; List<string> serialisedSceneObjects = new List<string>(); List<string> serialisedParcels = new List<string>(); string filePath = "NONE"; TarArchiveReader archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; try { while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { //m_log.DebugFormat( // "[ARCHIVER]: Successfully read {0} ({1} bytes)", filePath, data.Length); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) continue; if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { serialisedSceneObjects.Add(Encoding.UTF8.GetString(data)); } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH) && !m_skipAssets) { if (LoadAsset(filePath, data)) successfulAssetRestores++; else failedAssetRestores++; if ((successfulAssetRestores + failedAssetRestores) % 250 == 0) m_log.Debug("[ARCHIVER]: Loaded " + successfulAssetRestores + " assets and failed to load " + failedAssetRestores + " assets..."); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.TERRAINS_PATH)) { LoadTerrain(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.SETTINGS_PATH)) { LoadRegionSettings(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.LANDDATA_PATH)) { serialisedParcels.Add(Encoding.UTF8.GetString(data)); } else if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(filePath, data); } } //m_log.Debug("[ARCHIVER]: Reached end of archive"); } catch (Exception e) { m_log.ErrorFormat( "[ARCHIVER]: Aborting load with error in archive file {0}. {1}", filePath, e); m_errorMessage += e.ToString(); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); return; } finally { archive.Close(); } if (!m_skipAssets) { m_log.InfoFormat("[ARCHIVER]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { m_log.ErrorFormat("[ARCHIVER]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += String.Format("Failed to load {0} assets", failedAssetRestores); } } if (!m_merge) { m_log.Info("[ARCHIVER]: Clearing all existing scene objects"); m_scene.DeleteAllSceneObjects(); } LoadParcels(serialisedParcels); LoadObjects(serialisedSceneObjects); m_log.InfoFormat("[ARCHIVER]: Successfully loaded archive"); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); }
public void TestSaveItemToIarNoAssets() { TestHelpers.InMethod(); // log4net.Config.XmlConfigurator.Configure(); // Create user string userFirstName = "Jock"; string userLastName = "Stirrup"; string userPassword = "******"; UUID userId = UUID.Parse("00000000-0000-0000-0000-000000000020"); UserAccountHelpers.CreateUserWithInventory(m_scene, userFirstName, userLastName, userId, userPassword); // Create asset UUID ownerId = UUID.Parse("00000000-0000-0000-0000-000000000040"); SceneObjectGroup object1 = SceneHelpers.CreateSceneObject(1, ownerId, "My Little Dog Object", 0x50); UUID asset1Id = UUID.Parse("00000000-0000-0000-0000-000000000060"); AssetBase asset1 = AssetHelpers.CreateAsset(asset1Id, object1); m_scene.AssetService.Store(asset1); // Create item UUID item1Id = UUID.Parse("00000000-0000-0000-0000-000000000080"); string item1Name = "My Little Dog"; InventoryItemBase item1 = new InventoryItemBase(); item1.Name = item1Name; item1.AssetID = asset1.FullID; item1.ID = item1Id; InventoryFolderBase objsFolder = InventoryArchiveUtils.FindFoldersByPath(m_scene.InventoryService, userId, "Objects")[0]; item1.Folder = objsFolder.ID; m_scene.AddInventoryItem(item1); MemoryStream archiveWriteStream = new MemoryStream(); Dictionary<string, Object> options = new Dictionary<string, Object>(); options.Add("noassets", true); // When we're not saving assets, archiving is being done synchronously. m_archiverModule.ArchiveInventory( UUID.Random(), userFirstName, userLastName, "Objects/" + item1Name, userPassword, archiveWriteStream, options); byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); //bool gotControlFile = false; bool gotObject1File = false; //bool gotObject2File = false; string expectedObject1FileName = InventoryArchiveWriteRequest.CreateArchiveItemName(item1); string expectedObject1FilePath = string.Format( "{0}{1}", ArchiveConstants.INVENTORY_PATH, expectedObject1FileName); string filePath; TarArchiveReader.TarEntryType tarEntryType; // Console.WriteLine("Reading archive"); while (tar.ReadEntry(out filePath, out tarEntryType) != null) { Console.WriteLine("Got {0}", filePath); // if (ArchiveConstants.CONTROL_FILE_PATH == filePath) // { // gotControlFile = true; // } if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH) && filePath.EndsWith(".xml")) { // string fileName = filePath.Remove(0, "Objects/".Length); // // if (fileName.StartsWith(part1.Name)) // { Assert.That(expectedObject1FilePath, Is.EqualTo(filePath)); gotObject1File = true; // } // else if (fileName.StartsWith(part2.Name)) // { // Assert.That(fileName, Is.EqualTo(expectedObject2FileName)); // gotObject2File = true; // } } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { Assert.Fail("Found asset path in TestSaveItemToIarNoAssets()"); } } // Assert.That(gotControlFile, Is.True, "No control file in archive"); Assert.That(gotObject1File, Is.True, "No item1 file in archive"); // Assert.That(gotObject2File, Is.True, "No object2 file in archive"); // TODO: Test presence of more files and contents of files. }
private void DearchiveRegion0DotStar() { int successfulAssetRestores = 0; int failedAssetRestores = 0; List<string> serialisedSceneObjects = new List<string>(); List<string> serialisedParcels = new List<string>(); string filePath = "NONE"; TarArchiveReader archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; try { while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { //m_log.DebugFormat( // "[ARCHIVER]: Successfully read {0} ({1} bytes)", filePath, data.Length); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) continue; if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { serialisedSceneObjects.Add(Encoding.UTF8.GetString(data)); } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH) && !m_skipAssets) { if (LoadAsset(filePath, data)) successfulAssetRestores++; else failedAssetRestores++; if ((successfulAssetRestores + failedAssetRestores) % 250 == 0) m_log.Debug("[ARCHIVER]: Loaded " + successfulAssetRestores + " assets and failed to load " + failedAssetRestores + " assets..."); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.TERRAINS_PATH)) { LoadTerrain(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.SETTINGS_PATH)) { LoadRegionSettings(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.LANDDATA_PATH)) { serialisedParcels.Add(Encoding.UTF8.GetString(data)); } else if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(filePath, data); } } //m_log.Debug("[ARCHIVER]: Reached end of archive"); } catch (Exception e) { m_log.ErrorFormat( "[ARCHIVER]: Aborting load with error in archive file {0}. {1}", filePath, e); m_errorMessage += e.ToString(); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); return; } finally { archive.Close(); } if (!m_skipAssets) { m_log.InfoFormat("[ARCHIVER]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { m_log.ErrorFormat("[ARCHIVER]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += String.Format("Failed to load {0} assets", failedAssetRestores); } } if (!m_merge) { m_log.Info("[ARCHIVER]: Clearing all existing scene objects"); m_scene.DeleteAllSceneObjects(); } // Try to retain the original creator/owner/lastowner if their uuid is present on this grid // otherwise, use the master avatar uuid instead // Reload serialized parcels m_log.InfoFormat("[ARCHIVER]: Loading {0} parcels. Please wait.", serialisedParcels.Count); List<LandData> landData = new List<LandData>(); foreach (string serialisedParcel in serialisedParcels) { LandData parcel = LandDataSerializer.Deserialize(serialisedParcel); if (!ResolveUserUuid(parcel.OwnerID)) parcel.OwnerID = m_scene.RegionInfo.EstateSettings.EstateOwner; landData.Add(parcel); } m_scene.EventManager.TriggerIncomingLandDataFromStorage(landData); m_log.InfoFormat("[ARCHIVER]: Restored {0} parcels.", landData.Count); // Reload serialized prims m_log.InfoFormat("[ARCHIVER]: Loading {0} scene objects. Please wait.", serialisedSceneObjects.Count); IRegionSerialiserModule serialiser = m_scene.RequestModuleInterface<IRegionSerialiserModule>(); int sceneObjectsLoadedCount = 0; foreach (string serialisedSceneObject in serialisedSceneObjects) { /* m_log.DebugFormat("[ARCHIVER]: Loading xml with raw size {0}", serialisedSceneObject.Length); // Really large xml files (multi megabyte) appear to cause // memory problems // when loading the xml. But don't enable this check yet if (serialisedSceneObject.Length > 5000000) { m_log.Error("[ARCHIVER]: Ignoring xml since size > 5000000);"); continue; } */ SceneObjectGroup sceneObject = serialiser.DeserializeGroupFromXml2(serialisedSceneObject); // For now, give all incoming scene objects new uuids. This will allow scenes to be cloned // on the same region server and multiple examples a single object archive to be imported // to the same scene (when this is possible). sceneObject.ResetIDs(); foreach (SceneObjectPart part in sceneObject.Parts) { if (!ResolveUserUuid(part.CreatorID)) part.CreatorID = m_scene.RegionInfo.EstateSettings.EstateOwner; if (!ResolveUserUuid(part.OwnerID)) part.OwnerID = m_scene.RegionInfo.EstateSettings.EstateOwner; if (!ResolveUserUuid(part.LastOwnerID)) part.LastOwnerID = m_scene.RegionInfo.EstateSettings.EstateOwner; // And zap any troublesome sit target information part.SitTargetOrientation = new Quaternion(0, 0, 0, 1); part.SitTargetPosition = new Vector3(0, 0, 0); // Fix ownership/creator of inventory items // Not doing so results in inventory items // being no copy/no mod for everyone lock (part.TaskInventory) { TaskInventoryDictionary inv = part.TaskInventory; foreach (KeyValuePair<UUID, TaskInventoryItem> kvp in inv) { if (!ResolveUserUuid(kvp.Value.OwnerID)) { kvp.Value.OwnerID = m_scene.RegionInfo.EstateSettings.EstateOwner; } if (!ResolveUserUuid(kvp.Value.CreatorID)) { kvp.Value.CreatorID = m_scene.RegionInfo.EstateSettings.EstateOwner; } } } } if (m_scene.AddRestoredSceneObject(sceneObject, true, false)) { sceneObjectsLoadedCount++; sceneObject.CreateScriptInstances(0, false, m_scene.DefaultScriptEngine, 0); sceneObject.ResumeScripts(); } } m_log.InfoFormat("[ARCHIVER]: Restored {0} scene objects to the scene", sceneObjectsLoadedCount); int ignoredObjects = serialisedSceneObjects.Count - sceneObjectsLoadedCount; if (ignoredObjects > 0) m_log.WarnFormat("[ARCHIVER]: Ignored {0} scene objects that already existed in the scene", ignoredObjects); m_log.InfoFormat("[ARCHIVER]: Successfully loaded archive"); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); }
public void TestSaveRootFolderToIar() { TestHelpers.InMethod(); // TestHelpers.EnableLogging(); string userFirstName = "Jock"; string userLastName = "Stirrup"; string userPassword = "******"; UUID userId = TestHelpers.ParseTail(0x20); UserAccountHelpers.CreateUserWithInventory(m_scene, userFirstName, userLastName, userId, userPassword); MemoryStream archiveWriteStream = new MemoryStream(); m_archiverModule.OnInventoryArchiveSaved += SaveCompleted; mre.Reset(); m_archiverModule.ArchiveInventory( UUID.Random(), userFirstName, userLastName, "/", userPassword, archiveWriteStream); mre.WaitOne(60000, false); // Test created iar byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); // InventoryArchiveUtils. bool gotObjectsFolder = false; string objectsFolderName = string.Format( "{0}{1}", ArchiveConstants.INVENTORY_PATH, InventoryArchiveWriteRequest.CreateArchiveFolderName( UserInventoryHelpers.GetInventoryFolder(m_scene.InventoryService, userId, "Objects"))); string filePath; TarArchiveReader.TarEntryType tarEntryType; while (tar.ReadEntry(out filePath, out tarEntryType) != null) { // Console.WriteLine("Got {0}", filePath); // Lazily, we only bother to look for the system objects folder created when we call CreateUserWithInventory() // XXX: But really we need to stop all that stuff being created in tests or check for such folders // more thoroughly if (filePath == objectsFolderName) gotObjectsFolder = true; } Assert.That(gotObjectsFolder, Is.True); }
public void TestSaveOar() { TestHelpers.InMethod(); // log4net.Config.XmlConfigurator.Configure(); SceneObjectGroup sog1; SceneObjectGroup sog2; UUID ncAssetUuid; CreateTestObjects(m_scene, out sog1, out sog2, out ncAssetUuid); MemoryStream archiveWriteStream = new MemoryStream(); m_scene.EventManager.OnOarFileSaved += SaveCompleted; Guid requestId = new Guid("00000000-0000-0000-0000-808080808080"); lock (this) { m_archiverModule.ArchiveRegion(archiveWriteStream, requestId); //AssetServerBase assetServer = (AssetServerBase)scene.CommsManager.AssetCache.AssetServer; //while (assetServer.HasWaitingRequests()) // assetServer.ProcessNextRequest(); Monitor.Wait(this, 60000); } Assert.That(m_lastRequestId, Is.EqualTo(requestId)); byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); bool gotNcAssetFile = false; string expectedNcAssetFileName = string.Format("{0}_{1}", ncAssetUuid, "notecard.txt"); List<string> foundPaths = new List<string>(); List<string> expectedPaths = new List<string>(); expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog1)); expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog2)); string filePath; TarArchiveReader.TarEntryType tarEntryType; byte[] data = tar.ReadEntry(out filePath, out tarEntryType); Assert.That(filePath, Is.EqualTo(ArchiveConstants.CONTROL_FILE_PATH)); Dictionary<string, object> archiveOptions = new Dictionary<string, object>(); ArchiveReadRequest arr = new ArchiveReadRequest(m_scene, (Stream)null, Guid.Empty, archiveOptions); arr.LoadControlFile(filePath, data, new DearchiveScenesInfo()); Assert.That(arr.ControlFileLoaded, Is.True); while (tar.ReadEntry(out filePath, out tarEntryType) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { string fileName = filePath.Remove(0, ArchiveConstants.ASSETS_PATH.Length); Assert.That(fileName, Is.EqualTo(expectedNcAssetFileName)); gotNcAssetFile = true; } else if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { foundPaths.Add(filePath); } } Assert.That(gotNcAssetFile, Is.True, "No notecard asset file in archive"); Assert.That(foundPaths, Is.EquivalentTo(expectedPaths)); // TODO: Test presence of more files and contents of files. }
protected virtual void ReadBackup (IScene scene) { List<uint> foundLocalIDs = new List<uint> (); GZipStream m_loadStream; try { m_loadStream = new GZipStream (ArchiveHelpers.GetStream (m_loadDirectory + m_fileName), CompressionMode.Decompress); } catch { CheckForOldDataBase (); return; } TarArchiveReader reader = new TarArchiveReader (m_loadStream); byte[] data; string filePath; TarArchiveReader.TarEntryType entryType; //Load the archive data that we need while ((data = reader.ReadEntry (out filePath, out entryType)) != null) { if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) continue; if (filePath.StartsWith ("parcels/")) { //Only use if we are not merging LandData parcel = new LandData (); OSD parcelData = OSDParser.DeserializeLLSDBinary (data); parcel.FromOSD ((OSDMap)parcelData); m_parcels.Add (parcel); } else if (filePath.StartsWith ("terrain/")) { m_terrain = data; } else if (filePath.StartsWith ("revertterrain/")) { m_revertTerrain = data; } else if (filePath.StartsWith ("water/")) { m_water = data; } else if (filePath.StartsWith ("revertwater/")) { m_revertWater = data; } else if (filePath.StartsWith ("entities/")) { MemoryStream ms = new MemoryStream (data); SceneObjectGroup sceneObject = SceneObjectSerializer.FromXml2Format (ms, scene); ms.Close (); ms = null; data = null; foreach (ISceneChildEntity part in sceneObject.ChildrenEntities ()) { if (!foundLocalIDs.Contains (part.LocalId)) foundLocalIDs.Add (part.LocalId); else part.LocalId = 0; //Reset it! Only use it once! } m_groups.Add (sceneObject); } } m_loadStream.Close (); m_loadStream = null; foundLocalIDs.Clear (); GC.Collect (); }
public void TestSaveMultiRegionOar() { TestHelpers.InMethod(); // Create test regions int WIDTH = 2; int HEIGHT = 2; List<Scene> scenes = new List<Scene>(); // Maps (Directory in OAR file -> scene) Dictionary<string, Scene> regionPaths = new Dictionary<string, Scene>(); // Maps (Scene -> expected object paths) Dictionary<UUID, List<string>> expectedPaths = new Dictionary<UUID, List<string>>(); // List of expected assets List<UUID> expectedAssets = new List<UUID>(); for (uint y = 0; y < HEIGHT; y++) { for (uint x = 0; x < WIDTH; x++) { Scene scene; if (x == 0 && y == 0) { scene = m_scene; // this scene was already created in SetUp() } else { scene = m_sceneHelpers.SetupScene(string.Format("Unit test region {0}", (y * WIDTH) + x + 1), UUID.Random(), 1000 + x, 1000 + y); SceneHelpers.SetupSceneModules(scene, new ArchiverModule(), m_serialiserModule, new TerrainModule()); } scenes.Add(scene); string dir = String.Format("{0}_{1}_{2}", x + 1, y + 1, scene.RegionInfo.RegionName.Replace(" ", "_")); regionPaths[dir] = scene; SceneObjectGroup sog1; SceneObjectGroup sog2; UUID ncAssetUuid; CreateTestObjects(scene, out sog1, out sog2, out ncAssetUuid); expectedPaths[scene.RegionInfo.RegionID] = new List<string>(); expectedPaths[scene.RegionInfo.RegionID].Add(ArchiveHelpers.CreateObjectPath(sog1)); expectedPaths[scene.RegionInfo.RegionID].Add(ArchiveHelpers.CreateObjectPath(sog2)); expectedAssets.Add(ncAssetUuid); } } // Save OAR MemoryStream archiveWriteStream = new MemoryStream(); m_scene.EventManager.OnOarFileSaved += SaveCompleted; Guid requestId = new Guid("00000000-0000-0000-0000-808080808080"); Dictionary<string, Object> options = new Dictionary<string, Object>(); options.Add("all", true); lock (this) { m_archiverModule.ArchiveRegion(archiveWriteStream, requestId, options); Monitor.Wait(this, 60000); } // Check that the OAR contains the expected data Assert.That(m_lastRequestId, Is.EqualTo(requestId)); byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); Dictionary<UUID, List<string>> foundPaths = new Dictionary<UUID, List<string>>(); List<UUID> foundAssets = new List<UUID>(); foreach (Scene scene in scenes) { foundPaths[scene.RegionInfo.RegionID] = new List<string>(); } string filePath; TarArchiveReader.TarEntryType tarEntryType; byte[] data = tar.ReadEntry(out filePath, out tarEntryType); Assert.That(filePath, Is.EqualTo(ArchiveConstants.CONTROL_FILE_PATH)); Dictionary<string, object> archiveOptions = new Dictionary<string, object>(); ArchiveReadRequest arr = new ArchiveReadRequest(m_scene, (Stream)null, Guid.Empty, archiveOptions); arr.LoadControlFile(filePath, data, new DearchiveScenesInfo()); Assert.That(arr.ControlFileLoaded, Is.True); while (tar.ReadEntry(out filePath, out tarEntryType) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { // Assets are shared, so this file doesn't belong to any specific region. string fileName = filePath.Remove(0, ArchiveConstants.ASSETS_PATH.Length); if (fileName.EndsWith("_notecard.txt")) foundAssets.Add(UUID.Parse(fileName.Substring(0, fileName.Length - "_notecard.txt".Length))); } else { // This file belongs to one of the regions. Find out which one. Assert.IsTrue(filePath.StartsWith(ArchiveConstants.REGIONS_PATH)); string[] parts = filePath.Split(new Char[] { '/' }, 3); Assert.AreEqual(3, parts.Length); string regionDirectory = parts[1]; string relativePath = parts[2]; Scene scene = regionPaths[regionDirectory]; if (relativePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { foundPaths[scene.RegionInfo.RegionID].Add(relativePath); } } } Assert.AreEqual(scenes.Count, foundPaths.Count); foreach (Scene scene in scenes) { Assert.That(foundPaths[scene.RegionInfo.RegionID], Is.EquivalentTo(expectedPaths[scene.RegionInfo.RegionID])); } Assert.That(foundAssets, Is.EquivalentTo(expectedAssets)); }
public void TestSaveOar() { TestHelper.InMethod(); // log4net.Config.XmlConfigurator.Configure(); SceneObjectPart part1 = CreateSceneObjectPart1(); SceneObjectGroup sog1 = new SceneObjectGroup(part1); m_scene.AddNewSceneObject(sog1, false); SceneObjectPart part2 = CreateSceneObjectPart2(); AssetNotecard nc = new AssetNotecard(); nc.BodyText = "Hello World!"; nc.Encode(); UUID ncAssetUuid = new UUID("00000000-0000-0000-1000-000000000000"); UUID ncItemUuid = new UUID("00000000-0000-0000-1100-000000000000"); AssetBase ncAsset = AssetHelpers.CreateAsset(ncAssetUuid, AssetType.Notecard, nc.AssetData, UUID.Zero); m_scene.AssetService.Store(ncAsset); SceneObjectGroup sog2 = new SceneObjectGroup(part2); TaskInventoryItem ncItem = new TaskInventoryItem { Name = "ncItem", AssetID = ncAssetUuid, ItemID = ncItemUuid }; part2.Inventory.AddInventoryItem(ncItem, true); m_scene.AddNewSceneObject(sog2, false); MemoryStream archiveWriteStream = new MemoryStream(); m_scene.EventManager.OnOarFileSaved += SaveCompleted; Guid requestId = new Guid("00000000-0000-0000-0000-808080808080"); lock (this) { m_archiverModule.ArchiveRegion(archiveWriteStream, requestId); //AssetServerBase assetServer = (AssetServerBase)scene.CommsManager.AssetCache.AssetServer; //while (assetServer.HasWaitingRequests()) // assetServer.ProcessNextRequest(); Monitor.Wait(this, 60000); } Assert.That(m_lastRequestId, Is.EqualTo(requestId)); byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); bool gotNcAssetFile = false; string expectedNcAssetFileName = string.Format("{0}_{1}", ncAssetUuid, "notecard.txt"); List<string> foundPaths = new List<string>(); List<string> expectedPaths = new List<string>(); expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog1)); expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog2)); string filePath; TarArchiveReader.TarEntryType tarEntryType; byte[] data = tar.ReadEntry(out filePath, out tarEntryType); Assert.That(filePath, Is.EqualTo(ArchiveConstants.CONTROL_FILE_PATH)); ArchiveReadRequest arr = new ArchiveReadRequest(m_scene, (Stream)null, false, false, Guid.Empty); arr.LoadControlFile(filePath, data); Assert.That(arr.ControlFileLoaded, Is.True); while (tar.ReadEntry(out filePath, out tarEntryType) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { string fileName = filePath.Remove(0, ArchiveConstants.ASSETS_PATH.Length); Assert.That(fileName, Is.EqualTo(expectedNcAssetFileName)); gotNcAssetFile = true; } else if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { foundPaths.Add(filePath); } } Assert.That(gotNcAssetFile, Is.True, "No notecard asset file in archive"); Assert.That(foundPaths, Is.EquivalentTo(expectedPaths)); // TODO: Test presence of more files and contents of files. }
public void TestSaveOarV0_2() { TestHelper.InMethod(); //log4net.Config.XmlConfigurator.Configure(); ArchiverModule archiverModule = new ArchiverModule(); SerialiserModule serialiserModule = new SerialiserModule(); TerrainModule terrainModule = new TerrainModule(); Scene scene = SceneSetupHelpers.SetupScene("asset"); SceneSetupHelpers.SetupSceneModules(scene, archiverModule, serialiserModule, terrainModule); SceneObjectPart part1; // Create and add prim 1 { string partName = "My Little Pony"; UUID ownerId = UUID.Parse("00000000-0000-0000-0000-000000000015"); PrimitiveBaseShape shape = PrimitiveBaseShape.CreateSphere(); Vector3 groupPosition = new Vector3(10, 20, 30); Quaternion rotationOffset = new Quaternion(20, 30, 40, 50); Vector3 offsetPosition = new Vector3(5, 10, 15); part1 = new SceneObjectPart( ownerId, shape, groupPosition, rotationOffset, offsetPosition); part1.Name = partName; scene.AddNewSceneObject(new SceneObjectGroup(part1), false); } SceneObjectPart part2; // Create and add prim 2 { string partName = "Action Man"; UUID ownerId = UUID.Parse("00000000-0000-0000-0000-000000000016"); PrimitiveBaseShape shape = PrimitiveBaseShape.CreateCylinder(); Vector3 groupPosition = new Vector3(90, 80, 70); Quaternion rotationOffset = new Quaternion(60, 70, 80, 90); Vector3 offsetPosition = new Vector3(20, 25, 30); part2 = new SceneObjectPart( ownerId, shape, groupPosition, rotationOffset, offsetPosition); part2.Name = partName; scene.AddNewSceneObject(new SceneObjectGroup(part2), false); } MemoryStream archiveWriteStream = new MemoryStream(); scene.EventManager.OnOarFileSaved += SaveCompleted; Guid requestId = new Guid("00000000-0000-0000-0000-808080808080"); lock (this) { archiverModule.ArchiveRegion(archiveWriteStream, requestId); //AssetServerBase assetServer = (AssetServerBase)scene.CommsManager.AssetCache.AssetServer; //while (assetServer.HasWaitingRequests()) // assetServer.ProcessNextRequest(); Monitor.Wait(this, 60000); } Assert.That(m_lastRequestId, Is.EqualTo(requestId)); byte[] archive = archiveWriteStream.ToArray(); MemoryStream archiveReadStream = new MemoryStream(archive); TarArchiveReader tar = new TarArchiveReader(archiveReadStream); bool gotControlFile = false; bool gotObject1File = false; bool gotObject2File = false; string expectedObject1FileName = string.Format( "{0}_{1:000}-{2:000}-{3:000}__{4}.xml", part1.Name, Math.Round(part1.GroupPosition.X), Math.Round(part1.GroupPosition.Y), Math.Round(part1.GroupPosition.Z), part1.UUID); string expectedObject2FileName = string.Format( "{0}_{1:000}-{2:000}-{3:000}__{4}.xml", part2.Name, Math.Round(part2.GroupPosition.X), Math.Round(part2.GroupPosition.Y), Math.Round(part2.GroupPosition.Z), part2.UUID); string filePath; TarArchiveReader.TarEntryType tarEntryType; while (tar.ReadEntry(out filePath, out tarEntryType) != null) { if (ArchiveConstants.CONTROL_FILE_PATH == filePath) { gotControlFile = true; } else if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { string fileName = filePath.Remove(0, ArchiveConstants.OBJECTS_PATH.Length); if (fileName.StartsWith(part1.Name)) { Assert.That(fileName, Is.EqualTo(expectedObject1FileName)); gotObject1File = true; } else if (fileName.StartsWith(part2.Name)) { Assert.That(fileName, Is.EqualTo(expectedObject2FileName)); gotObject2File = true; } } } Assert.That(gotControlFile, Is.True, "No control file in archive"); Assert.That(gotObject1File, Is.True, "No object1 file in archive"); Assert.That(gotObject2File, Is.True, "No object2 file in archive"); // TODO: Test presence of more files and contents of files. }
/// <summary> /// Execute the request /// </summary> /// <returns> /// A list of the inventory nodes loaded. If folders were loaded then only the root folders are /// returned /// </returns> public List<InventoryNodeBase> Execute() { string filePath = "ERROR"; int successfulAssetRestores = 0; int failedAssetRestores = 0; int successfulItemRestores = 0; List<InventoryNodeBase> nodesLoaded = new List<InventoryNodeBase>(); if (!m_userInfo.HasReceivedInventory) { // If the region server has access to the user admin service (by which users are created), // then we'll assume that it's okay to fiddle with the user's inventory even if they are not on the // server. // // FIXME: FetchInventory should probably be assumed to by async anyway, since even standalones might // use a remote inventory service, though this is vanishingly rare at the moment. if (null == m_commsManager.UserAdminService) { m_log.ErrorFormat( "[INVENTORY ARCHIVER]: Have not yet received inventory info for user {0} {1}", m_userInfo.UserProfile.Name, m_userInfo.UserProfile.ID); return nodesLoaded; } else { m_userInfo.FetchInventory(); for (int i = 0 ; i < 50 ; i++) { if (m_userInfo.HasReceivedInventory == true) break; Thread.Sleep(200); } } } InventoryFolderImpl rootDestinationFolder = m_userInfo.RootFolder.FindFolderByPath(m_invPath); if (null == rootDestinationFolder) { // Possibly provide an option later on to automatically create this folder if it does not exist m_log.ErrorFormat("[INVENTORY ARCHIVER]: Inventory path {0} does not exist", m_invPath); return nodesLoaded; } archive = new TarArchiveReader(m_loadStream); // In order to load identically named folders, we need to keep track of the folders that we have already // created Dictionary <string, InventoryFolderImpl> foldersCreated = new Dictionary<string, InventoryFolderImpl>(); byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { if (LoadAsset(filePath, data)) successfulAssetRestores++; else failedAssetRestores++; } else if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH)) { InventoryFolderImpl foundFolder = ReplicateArchivePathToUserInventory( filePath, TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType, rootDestinationFolder, foldersCreated, nodesLoaded); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY != entryType) { InventoryItemBase item = UserInventoryItemSerializer.Deserialize(data); // Don't use the item ID that's in the file item.ID = UUID.Random(); UUID ospResolvedId = OspResolver.ResolveOspa(item.CreatorId, m_commsManager); if (UUID.Zero != ospResolvedId) item.CreatorIdAsUuid = ospResolvedId; item.Owner = m_userInfo.UserProfile.ID; // Reset folder ID to the one in which we want to load it item.Folder = foundFolder.ID; m_userInfo.AddItem(item); successfulItemRestores++; // If we're loading an item directly into the given destination folder then we need to record // it separately from any loaded root folders if (rootDestinationFolder == foundFolder) nodesLoaded.Add(item); } } } archive.Close(); m_log.DebugFormat("[INVENTORY ARCHIVER]: Restored {0} assets", successfulAssetRestores); m_log.InfoFormat("[INVENTORY ARCHIVER]: Restored {0} items", successfulItemRestores); return nodesLoaded; }
public void LoadRegionBackup(TarArchiveReader reader, IScene scene) { IAuroraBackupModule[] modules = scene.RequestModuleInterfaces<IAuroraBackupModule>(); byte[] data; string filePath; TarArchiveReader.TarEntryType entryType; foreach (IAuroraBackupModule module in modules) module.BeginLoadModuleFromArchive(scene); while ((data = reader.ReadEntry(out filePath, out entryType)) != null) { if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) continue; foreach (IAuroraBackupModule module in modules) module.LoadModuleFromArchive(data, filePath, entryType, scene); } reader.Close(); foreach (IAuroraBackupModule module in modules) module.EndLoadModuleFromArchive(scene); }
/// <summary> /// Execute the request /// </summary> /// <remarks> /// Only call this once. To load another IAR, construct another request object. /// </remarks> /// <returns> /// A list of the inventory nodes loaded. If folders were loaded then only the root folders are /// returned /// </returns> /// <exception cref="System.Exception">Thrown if load fails.</exception> public HashSet<InventoryNodeBase> Execute() { try { Exception reportedException = null; string filePath = "ERROR"; List<InventoryFolderBase> folderCandidates = InventoryArchiveUtils.FindFoldersByPath( m_InventoryService, m_userInfo.PrincipalID, m_invPath); if (folderCandidates.Count == 0) { // Possibly provide an option later on to automatically create this folder if it does not exist m_log.ErrorFormat("[INVENTORY ARCHIVER]: Inventory path {0} does not exist", m_invPath); return m_loadedNodes; } m_rootDestinationFolder = folderCandidates[0]; archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(filePath, data); } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { LoadAssetFile(filePath, data); } else if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH)) { LoadInventoryFile(filePath, entryType, data); } } archive.Close(); m_log.DebugFormat( "[INVENTORY ARCHIVER]: Successfully loaded {0} assets with {1} failures", m_successfulAssetRestores, m_failedAssetRestores); //Alicia: When this is called by LibraryModule or Tests, m_module will be null as event is not required if(m_module != null) m_module.TriggerInventoryArchiveLoaded(m_id, true, m_userInfo, m_invPath, m_loadStream, reportedException, m_successfulItemRestores); return m_loadedNodes; } catch(Exception Ex) { // Trigger saved event with failed result and exception data if (m_module != null) m_module.TriggerInventoryArchiveLoaded(m_id, false, m_userInfo, m_invPath, m_loadStream, Ex, 0); return m_loadedNodes; } finally { m_loadStream.Close(); } }