/// <summary> /// Execute the request /// </summary> /// <remarks> /// Only call this once. To load another IAR, construct another request object. /// </remarks> /// <returns> /// A list of the inventory nodes loaded. If folders were loaded then only the root folders are /// returned /// </returns> /// <exception cref="System.Exception">Thrown if load fails.</exception> public HashSet <InventoryNodeBase> Execute() { try { string filePath = "ERROR"; List <InventoryFolderBase> folderCandidates = InventoryArchiveUtils.FindFoldersByPath( m_InventoryService, m_userInfo.PrincipalID, m_invPath); if (folderCandidates.Count == 0) { // Possibly provide an option later on to automatically create this folder if it does not exist m_log.ErrorFormat("[INVENTORY ARCHIVER]: Inventory path {0} does not exist", m_invPath); return(m_loadedNodes); } m_rootDestinationFolder = folderCandidates[0]; TarArchiveReader archive = null; try { archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(filePath, data); } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { LoadAssetFile(filePath, data); } else if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH)) { LoadInventoryFile(filePath, entryType, data); } } } finally { archive.Close(); } FixupInventoryLinks(); m_log.DebugFormat( "[INVENTORY ARCHIVER]: Successfully loaded {0} assets and {1} failures", m_successfulAssetRestores, m_failedAssetRestores); m_log.InfoFormat("[INVENTORY ARCHIVER]: Successfully loaded {0} items and {1} failures", m_successfulItemRestores, m_failedItemRestores); return(m_loadedNodes); } finally { m_loadStream.Close(); } }
public void LoadRegionBackup(TarArchiveReader reader, IScene scene) { IAuroraBackupModule[] modules = scene.RequestModuleInterfaces <IAuroraBackupModule>(); byte[] data; string filePath; TarArchiveReader.TarEntryType entryType; foreach (IAuroraBackupModule module in modules) { module.BeginLoadModuleFromArchive(scene); } while ((data = reader.ReadEntry(out filePath, out entryType)) != null) { if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) { continue; } foreach (IAuroraBackupModule module in modules) { module.LoadModuleFromArchive(data, filePath, entryType, scene); } } reader.Close(); foreach (IAuroraBackupModule module in modules) { module.EndLoadModuleFromArchive(scene); } }
/// <summary> /// Searches through the files in the archive for the control file, and reads it. /// We must read the control file first, in order to know which regions are available. /// </summary> /// <remarks> /// In most cases the control file *is* first, since that's how we create archives. However, /// it's possible that someone rewrote the archive externally so we can't rely on this fact. /// </remarks> /// <param name="archive"></param> /// <param name="dearchivedScenes"></param> private void FindAndLoadControlFile(out TarArchiveReader archive, out DearchiveScenesInfo dearchivedScenes) { archive = new TarArchiveReader(m_loadStream); dearchivedScenes = new DearchiveScenesInfo(); string filePath; byte[] data; TarArchiveReader.TarEntryType entryType; bool firstFile = true; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) continue; if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(filePath, data, dearchivedScenes); // Find which scenes are available in the simulator ArchiveScenesGroup simulatorScenes = new ArchiveScenesGroup(); SceneManager.Instance.ForEachScene(delegate(Scene scene2) { simulatorScenes.AddScene(scene2); }); simulatorScenes.CalcSceneLocations(); dearchivedScenes.SetSimulatorScenes(m_rootScene, simulatorScenes); // If the control file wasn't the first file then reset the read pointer if (!firstFile) { m_log.Warn("Control file wasn't the first file in the archive"); if (m_loadStream.CanSeek) { m_loadStream.Seek(0, SeekOrigin.Begin); } else if (m_loadPath != null) { archive.Close(); archive = null; m_loadStream.Close(); m_loadStream = null; m_loadStream = new GZipStream(ArchiveHelpers.GetStream(m_loadPath), CompressionMode.Decompress); archive = new TarArchiveReader(m_loadStream); } else { // There isn't currently a scenario where this happens, but it's best to add a check just in case throw new Exception("Error reading archive: control file wasn't the first file, and the input stream doesn't allow seeking"); } } return; } firstFile = false; } throw new Exception("Control file not found"); }
private void LoadIAR(string fileName) { //Load the iar into memory TarArchiveReader archive = new TarArchiveReader(new GZipStream(ArchiveHelpers.GetStream(fileName), CompressionMode.Decompress)); byte[] data; TarArchiveReader.TarEntryType entryType; string filePath; InventoryFolderBase rootDestFolder = new InventoryFolderBase(UUID.Zero, UUID.Zero); Dictionary <string, InventoryFolderBase> resolvedFolders = new Dictionary <string, InventoryFolderBase> (); while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { LoadAsset(filePath, data); } else if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH)) { filePath = filePath.Substring(ArchiveConstants.INVENTORY_PATH.Length); // Trim off the file portion if we aren't already dealing with a directory path if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY != entryType) { filePath = filePath.Remove(filePath.LastIndexOf("/") + 1); } InventoryFolderBase foundFolder = ReplicateArchivePathToUserInventory( filePath, rootDestFolder, ref resolvedFolders); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY != entryType) { LoadItem(data, foundFolder); } } } archive.Close(); //Got the .iar loaded into memory now // Time to put it into the GUI RebuildTreeView(); }
/// <summary> /// Execute the request /// </summary> /// <remarks> /// Only call this once. To load another IAR, construct another request object. /// </remarks> /// <returns> /// A list of the inventory nodes loaded. If folders were loaded then only the root folders are /// returned /// </returns> /// <exception cref="System.Exception">Thrown if load fails.</exception> public Dictionary <UUID, InventoryNodeBase> Execute() { try { Exception reportedException = null; string filePath = "ERROR"; List <InventoryFolderBase> folderCandidates = InventoryArchiveUtils.FindFoldersByPath( m_InventoryService, m_userInfo.PrincipalID, m_invPath); if (folderCandidates.Count == 0) { // Possibly provide an option later on to automatically create this folder if it does not exist m_log.ErrorFormat("[INVENTORY ARCHIVER]: Inventory path {0} does not exist", m_invPath); return(m_loadedNodes); } m_rootDestinationFolder = folderCandidates[0]; archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(filePath, data); } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { LoadAssetFile(filePath, data); } else if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH)) { LoadInventoryFile(filePath, entryType, data); } } archive.Close(); LoadInventoryLinks(); m_log.DebugFormat( "[INVENTORY ARCHIVER]: Successfully loaded {0} assets with {1} failures", m_successfulAssetRestores, m_failedAssetRestores); //Alicia: When this is called by LibraryModule or Tests, m_module will be null as event is not required if (m_module != null) { m_module.TriggerInventoryArchiveLoaded(m_id, true, m_userInfo, m_invPath, m_loadStream, reportedException, m_successfulItemRestores); } return(m_loadedNodes); } catch (Exception Ex) { // Trigger saved event with failed result and exception data if (m_module != null) { m_module.TriggerInventoryArchiveLoaded(m_id, false, m_userInfo, m_invPath, m_loadStream, Ex, 0); } return(m_loadedNodes); } finally { m_loadStream.Close(); } }
/// <summary> /// Dearchive the region embodied in this request. /// </summary> public void DearchiveRegion() { int successfulAssetRestores = 0; int failedAssetRestores = 0; DearchiveScenesInfo dearchivedScenes; // We dearchive all the scenes at once, because the files in the TAR archive might be mixed. // Therefore, we have to keep track of the dearchive context of all the scenes. Dictionary <UUID, DearchiveContext> sceneContexts = new Dictionary <UUID, DearchiveContext>(); string fullPath = "NONE"; TarArchiveReader archive = null; byte[] data; TarArchiveReader.TarEntryType entryType; try { FindAndLoadControlFile(out archive, out dearchivedScenes); while ((data = archive.ReadEntry(out fullPath, out entryType)) != null) { //m_log.DebugFormat( // "[ARCHIVER]: Successfully read {0} ({1} bytes)", filePath, data.Length); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) { continue; } // Find the scene that this file belongs to Scene scene; string filePath; if (!dearchivedScenes.GetRegionFromPath(fullPath, out scene, out filePath)) { continue; // this file belongs to a region that we're not loading } DearchiveContext sceneContext = null; if (scene != null) { if (!sceneContexts.TryGetValue(scene.RegionInfo.RegionID, out sceneContext)) { sceneContext = new DearchiveContext(scene); sceneContexts.Add(scene.RegionInfo.RegionID, sceneContext); } } // Process the file if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { sceneContext.SerialisedSceneObjects.Add(Encoding.UTF8.GetString(data)); } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH) && !m_skipAssets) { if (LoadAsset(filePath, data)) { successfulAssetRestores++; } else { failedAssetRestores++; } if ((successfulAssetRestores + failedAssetRestores) % 250 == 0) { m_log.Debug("[ARCHIVER]: Loaded " + successfulAssetRestores + " assets and failed to load " + failedAssetRestores + " assets..."); } } else if (!m_merge && filePath.StartsWith(ArchiveConstants.TERRAINS_PATH)) { LoadTerrain(scene, filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.SETTINGS_PATH)) { LoadRegionSettings(scene, filePath, data, dearchivedScenes); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.LANDDATA_PATH)) { sceneContext.SerialisedParcels.Add(Encoding.UTF8.GetString(data)); } else if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { // Ignore, because we already read the control file } } //m_log.Debug("[ARCHIVER]: Reached end of archive"); } catch (Exception e) { m_log.Error( String.Format("[ARCHIVER]: Aborting load with error in archive file {0} ", fullPath), e); m_errorMessage += e.ToString(); m_rootScene.EventManager.TriggerOarFileLoaded(m_requestId, new List <UUID>(), m_errorMessage); return; } finally { if (archive != null) { archive.Close(); } } if (!m_skipAssets) { m_log.InfoFormat("[ARCHIVER]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { m_log.ErrorFormat("[ARCHIVER]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += String.Format("Failed to load {0} assets", failedAssetRestores); } } foreach (DearchiveContext sceneContext in sceneContexts.Values) { m_log.InfoFormat("[ARCHIVER]: Loading region {0}", sceneContext.Scene.RegionInfo.RegionName); if (!m_merge) { m_log.Info("[ARCHIVER]: Clearing all existing scene objects"); sceneContext.Scene.DeleteAllSceneObjects(); } try { LoadParcels(sceneContext.Scene, sceneContext.SerialisedParcels); LoadObjects(sceneContext.Scene, sceneContext.SerialisedSceneObjects, sceneContext.SceneObjects); // Inform any interested parties that the region has changed. We waited until now so that all // of the region's objects will be loaded when we send this notification. IEstateModule estateModule = sceneContext.Scene.RequestModuleInterface <IEstateModule>(); if (estateModule != null) { estateModule.TriggerRegionInfoChange(); } } catch (Exception e) { m_log.Error("[ARCHIVER]: Error loading parcels or objects ", e); m_errorMessage += e.ToString(); m_rootScene.EventManager.TriggerOarFileLoaded(m_requestId, new List <UUID>(), m_errorMessage); return; } } // Start the scripts. We delayed this because we want the OAR to finish loading ASAP, so // that users can enter the scene. If we allow the scripts to start in the loop above // then they significantly increase the time until the OAR finishes loading. Util.FireAndForget(delegate(object o) { Thread.Sleep(15000); m_log.Info("[ARCHIVER]: Starting scripts in scene objects"); foreach (DearchiveContext sceneContext in sceneContexts.Values) { foreach (SceneObjectGroup sceneObject in sceneContext.SceneObjects) { sceneObject.CreateScriptInstances(0, false, sceneContext.Scene.DefaultScriptEngine, 0); // StateSource.RegionStart sceneObject.ResumeScripts(); } sceneContext.SceneObjects.Clear(); } }); m_log.InfoFormat("[ARCHIVER]: Successfully loaded archive"); m_rootScene.EventManager.TriggerOarFileLoaded(m_requestId, dearchivedScenes.GetLoadedScenes(), m_errorMessage); }
private void DearchiveRegion0DotStar() { int successfulAssetRestores = 0; int failedAssetRestores = 0; List <string> serialisedSceneObjects = new List <string>(); string filePath = "NONE"; try { TarArchiveReader archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { //m_log.DebugFormat( // "[ARCHIVER]: Successfully read {0} ({1} bytes)", filePath, data.Length); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) { continue; } if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { serialisedSceneObjects.Add(m_asciiEncoding.GetString(data)); } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { if (LoadAsset(filePath, data)) { successfulAssetRestores++; } else { failedAssetRestores++; } } else if (!m_merge && filePath.StartsWith(ArchiveConstants.TERRAINS_PATH)) { LoadTerrain(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.SETTINGS_PATH)) { LoadRegionSettings(filePath, data); } } //m_log.Debug("[ARCHIVER]: Reached end of archive"); archive.Close(); } catch (Exception e) { m_log.ErrorFormat( "[ARCHIVER]: Aborting load with error in archive file {0}. {1}", filePath, e); m_errorMessage += e.ToString(); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); return; } m_log.InfoFormat("[ARCHIVER]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { m_log.ErrorFormat("[ARCHIVER]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += String.Format("Failed to load {0} assets", failedAssetRestores); } if (!m_merge) { m_log.Info("[ARCHIVER]: Clearing all existing scene objects"); m_scene.DeleteAllSceneObjects(); } // Reload serialized prims m_log.InfoFormat("[ARCHIVER]: Loading {0} scene objects. Please wait.", serialisedSceneObjects.Count); IRegionSerialiserModule serialiser = m_scene.RequestModuleInterface <IRegionSerialiserModule>(); int sceneObjectsLoadedCount = 0; foreach (string serialisedSceneObject in serialisedSceneObjects) { /* * m_log.DebugFormat("[ARCHIVER]: Loading xml with raw size {0}", serialisedSceneObject.Length); * * // Really large xml files (multi megabyte) appear to cause * // memory problems * // when loading the xml. But don't enable this check yet * * if (serialisedSceneObject.Length > 5000000) * { * m_log.Error("[ARCHIVER]: Ignoring xml since size > 5000000);"); * continue; * } */ SceneObjectGroup sceneObject = serialiser.DeserializeGroupFromXml2(serialisedSceneObject); // For now, give all incoming scene objects new uuids. This will allow scenes to be cloned // on the same region server and multiple examples a single object archive to be imported // to the same scene (when this is possible). sceneObject.ResetIDs(); // Try to retain the original creator/owner/lastowner if their uuid is present on this grid // otherwise, use the master avatar uuid instead UUID masterAvatarId = m_scene.RegionInfo.MasterAvatarAssignedUUID; if (m_scene.RegionInfo.EstateSettings.EstateOwner != UUID.Zero) { masterAvatarId = m_scene.RegionInfo.EstateSettings.EstateOwner; } foreach (SceneObjectPart part in sceneObject.Children.Values) { if (!ResolveUserUuid(part.CreatorID)) { part.CreatorID = masterAvatarId; } if (!ResolveUserUuid(part.OwnerID)) { part.OwnerID = masterAvatarId; } if (!ResolveUserUuid(part.LastOwnerID)) { part.LastOwnerID = masterAvatarId; } // And zap any troublesome sit target information part.SitTargetOrientation = new Quaternion(0, 0, 0, 1); part.SitTargetPosition = new Vector3(0, 0, 0); // Fix ownership/creator of inventory items // Not doing so results in inventory items // being no copy/no mod for everyone lock (part.TaskInventory) { TaskInventoryDictionary inv = part.TaskInventory; foreach (KeyValuePair <UUID, TaskInventoryItem> kvp in inv) { if (!ResolveUserUuid(kvp.Value.OwnerID)) { kvp.Value.OwnerID = masterAvatarId; } if (!ResolveUserUuid(kvp.Value.CreatorID)) { kvp.Value.CreatorID = masterAvatarId; } } } } if (m_scene.AddRestoredSceneObject(sceneObject, true, false)) { sceneObjectsLoadedCount++; sceneObject.CreateScriptInstances(0, true, m_scene.DefaultScriptEngine, 0); } } m_log.InfoFormat("[ARCHIVER]: Restored {0} scene objects to the scene", sceneObjectsLoadedCount); int ignoredObjects = serialisedSceneObjects.Count - sceneObjectsLoadedCount; if (ignoredObjects > 0) { m_log.WarnFormat("[ARCHIVER]: Ignored {0} scene objects that already existed in the scene", ignoredObjects); } m_log.InfoFormat("[ARCHIVER]: Successfully loaded archive"); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); }
private void DearchiveRegion0DotStar() { if (m_loadStream == null) { return; } int successfulAssetRestores = 0; int failedAssetRestores = 0; string filePath = "NONE"; DateTime start = DateTime.Now; TarArchiveReader archive = new TarArchiveReader(m_loadStream); if (!m_skipAssets) { m_threadpool = new AuroraThreadPool(new AuroraThreadPoolStartInfo() { Threads = 1, priority = System.Threading.ThreadPriority .BelowNormal }); } IBackupModule backup = m_scene.RequestModuleInterface <IBackupModule>(); if (!m_merge) { DateTime before = DateTime.Now; MainConsole.Instance.Info("[ARCHIVER]: Clearing all existing scene objects"); if (backup != null) { backup.DeleteAllSceneObjects(); } MainConsole.Instance.Info("[ARCHIVER]: Cleared all existing scene objects in " + (DateTime.Now - before).Minutes + ":" + (DateTime.Now - before).Seconds); } IScriptModule[] modules = m_scene.RequestModuleInterfaces <IScriptModule>(); //Disable the script engine so that it doesn't load in the background and kill OAR loading foreach (IScriptModule module in modules) { module.Disabled = true; } //Disable backup for now as well if (backup != null) { backup.LoadingPrims = true; } IRegionSerialiserModule serialiser = m_scene.RequestModuleInterface <IRegionSerialiserModule>(); int sceneObjectsLoadedCount = 0; //We save the groups so that we can back them up later List <ISceneEntity> groupsToBackup = new List <ISceneEntity>(); List <LandData> landData = new List <LandData>(); // must save off some stuff until after assets have been saved and recieved new uuids // keeping these collection local because I am sure they will get large and garbage collection is better that way List <byte[]> seneObjectGroups = new List <byte[]>(); Dictionary <UUID, UUID> assetBinaryChangeRecord = new Dictionary <UUID, UUID>(); Queue <UUID> assets2Save = new Queue <UUID>(); try { byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) { continue; } if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { seneObjectGroups.Add(data); } else if (!m_skipAssets && filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { AssetBase asset; if (LoadAsset(filePath, data, out asset)) { successfulAssetRestores++; if (m_useAsync) { lock (AssetsToAdd) AssetsToAdd.Add(asset); } else { if (asset.IsBinaryAsset) { UUID aid = asset.ID; asset.ID = m_scene.AssetService.Store(asset); if (asset.ID != aid && asset.ID != UUID.Zero) { assetBinaryChangeRecord.Add(aid, asset.ID); } } else { if (!assetNonBinaryCollection.ContainsKey(asset.ID)) { assetNonBinaryCollection.Add(asset.ID, asset); // I need something I can safely loop through assets2Save.Enqueue(asset.ID); } } } } else { failedAssetRestores++; } if ((successfulAssetRestores + failedAssetRestores) % 250 == 0) { MainConsole.Instance.Info("[ARCHIVER]: Loaded " + successfulAssetRestores + " assets and failed to load " + failedAssetRestores + " assets..."); } } else if (filePath.StartsWith(ArchiveConstants.TERRAINS_PATH)) { LoadTerrain(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.SETTINGS_PATH)) { LoadRegionSettings(filePath, data); } else if (filePath.StartsWith(ArchiveConstants.LANDDATA_PATH)) { LandData parcel = LandDataSerializer.Deserialize(m_utf8Encoding.GetString(data)); parcel.OwnerID = ResolveUserUuid(parcel.OwnerID, UUID.Zero, "", Vector3.Zero, null); landData.Add(parcel); } else if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(data); } } // Save Assets int savingAssetsCount = 0; while (assets2Save.Count > 0) { try { UUID assetid = assets2Save.Dequeue(); SaveNonBinaryAssets(assetid, assetNonBinaryCollection[assetid], assetBinaryChangeRecord); savingAssetsCount++; if ((savingAssetsCount) % 250 == 0) { MainConsole.Instance.Info("[ARCHIVER]: Saving " + savingAssetsCount + " assets..."); } } catch (Exception ex) { MainConsole.Instance.Info("[ARCHIVER]: Exception in saving an asset: " + ex.ToString()); } } foreach (byte[] data2 in seneObjectGroups) { byte[] data3 = data2; string stringData = Utils.BytesToString(data3); MatchCollection mc = Regex.Matches(stringData, sPattern); bool didChange = false; if (mc.Count >= 1) { foreach (Match match in mc) { UUID thematch = new UUID(match.Value); UUID newvalue = thematch; if (assetNonBinaryCollection.ContainsKey(thematch)) { newvalue = assetNonBinaryCollection[thematch].ID; } else if (assetBinaryChangeRecord.ContainsKey(thematch)) { newvalue = assetBinaryChangeRecord[thematch]; } if (thematch == newvalue) { continue; } stringData = stringData.Replace(thematch.ToString().Trim(), newvalue.ToString().Trim()); didChange = true; } } if (didChange) { data3 = Utils.StringToBytes(stringData); } ISceneEntity sceneObject = serialiser.DeserializeGroupFromXml2(data3, m_scene); if (sceneObject == null) { //! big error! MainConsole.Instance.Error("Error reading SOP XML (Please mantis this!): " + m_asciiEncoding.GetString(data3)); continue; } foreach (ISceneChildEntity part in sceneObject.ChildrenEntities()) { if (string.IsNullOrEmpty(part.CreatorData)) { part.CreatorID = ResolveUserUuid(part.CreatorID, part.CreatorID, part.CreatorData, part.AbsolutePosition, landData); } part.OwnerID = ResolveUserUuid(part.OwnerID, part.CreatorID, part.CreatorData, part.AbsolutePosition, landData); part.LastOwnerID = ResolveUserUuid(part.LastOwnerID, part.CreatorID, part.CreatorData, part.AbsolutePosition, landData); // And zap any troublesome sit target information part.SitTargetOrientation = new Quaternion(0, 0, 0, 1); part.SitTargetPosition = new Vector3(0, 0, 0); // Fix ownership/creator of inventory items // Not doing so results in inventory items // being no copy/no mod for everyone lock (part.TaskInventory) { TaskInventoryDictionary inv = part.TaskInventory; foreach (KeyValuePair <UUID, TaskInventoryItem> kvp in inv) { kvp.Value.OwnerID = ResolveUserUuid(kvp.Value.OwnerID, kvp.Value.CreatorID, kvp.Value.CreatorData, part.AbsolutePosition, landData); if (string.IsNullOrEmpty(kvp.Value.CreatorData)) { kvp.Value.CreatorID = ResolveUserUuid(kvp.Value.CreatorID, kvp.Value.CreatorID, kvp.Value.CreatorData, part.AbsolutePosition, landData); } } } } //Add the offsets of the region Vector3 newPos = new Vector3(sceneObject.AbsolutePosition.X + m_offsetX, sceneObject.AbsolutePosition.Y + m_offsetY, sceneObject.AbsolutePosition.Z + m_offsetZ); if (m_flipX) { newPos.X = m_scene.RegionInfo.RegionSizeX - newPos.X; } if (m_flipY) { newPos.Y = m_scene.RegionInfo.RegionSizeY - newPos.Y; } sceneObject.SetAbsolutePosition(false, newPos); if (m_scene.SceneGraph.AddPrimToScene(sceneObject)) { groupsToBackup.Add(sceneObject); sceneObject.ScheduleGroupUpdate(PrimUpdateFlags.ForcedFullUpdate); sceneObject.CreateScriptInstances(0, false, StateSource.RegionStart, UUID.Zero, true); } sceneObjectsLoadedCount++; if (sceneObjectsLoadedCount % 250 == 0) { MainConsole.Instance.Info("[ARCHIVER]: Loaded " + sceneObjectsLoadedCount + " objects..."); } } assetNonBinaryCollection.Clear(); assetBinaryChangeRecord.Clear(); seneObjectGroups.Clear(); } catch (Exception e) { MainConsole.Instance.ErrorFormat( "[ARCHIVER]: Aborting load with error in archive file {0}. {1}", filePath, e); m_errorMessage += e.ToString(); m_scene.EventManager.TriggerOarFileLoaded(UUID.Zero.Guid, m_errorMessage); return; } finally { archive.Close(); m_loadStream.Close(); m_loadStream.Dispose(); //Reeanble now that we are done foreach (IScriptModule module in modules) { module.Disabled = false; } //Reset backup too if (backup != null) { backup.LoadingPrims = false; } } //Now back up the prims foreach (ISceneEntity grp in groupsToBackup) { //Backup! grp.HasGroupChanged = true; } if (!m_skipAssets && m_useAsync && !AssetSaverIsRunning) { m_threadpool.QueueEvent(SaveAssets, 0); } if (!m_skipAssets) { MainConsole.Instance.InfoFormat("[ARCHIVER]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { MainConsole.Instance.ErrorFormat("[ARCHIVER]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += String.Format("Failed to load {0} assets", failedAssetRestores); } } // Try to retain the original creator/owner/lastowner if their uuid is present on this grid // otherwise, use the master avatar uuid instead // Reload serialized parcels MainConsole.Instance.InfoFormat("[ARCHIVER]: Loading {0} parcels. Please wait.", landData.Count); IParcelManagementModule parcelManagementModule = m_scene.RequestModuleInterface <IParcelManagementModule>(); if (parcelManagementModule != null) { parcelManagementModule.IncomingLandDataFromOAR(landData, m_merge, new Vector2(m_offsetX, m_offsetY)); } MainConsole.Instance.InfoFormat("[ARCHIVER]: Restored {0} parcels.", landData.Count); //Clean it out landData.Clear(); MainConsole.Instance.InfoFormat("[ARCHIVER]: Successfully loaded archive in " + (DateTime.Now - start).Minutes + ":" + (DateTime.Now - start).Seconds); m_validUserUuids.Clear(); m_scene.EventManager.TriggerOarFileLoaded(UUID.Zero.Guid, m_errorMessage); }
/// <summary> /// Save a backup of the sim /// </summary> /// <param name = "appendedFilePath">The file path where the backup will be saved</param> protected virtual void SaveBackup(string appendedFilePath, bool saveAssets) { if (appendedFilePath == "/") { appendedFilePath = ""; } if (m_scene.RegionInfo.HasBeenDeleted) { return; } IBackupModule backupModule = m_scene.RequestModuleInterface <IBackupModule>(); if (backupModule != null && backupModule.LoadingPrims) //Something is changing lots of prims { MainConsole.Instance.Info("[Backup]: Not saving backup because the backup module is loading prims"); return; } //Save any script state saves that might be around IScriptModule[] engines = m_scene.RequestModuleInterfaces <IScriptModule>(); try { if (engines != null) { #if (!ISWIN) foreach (IScriptModule engine in engines) { if (engine != null) { engine.SaveStateSaves(); } } #else foreach (IScriptModule engine in engines.Where(engine => engine != null)) { engine.SaveStateSaves(); } #endif } } catch (Exception ex) { MainConsole.Instance.WarnFormat("[Backup]: Exception caught: {0}", ex); } MainConsole.Instance.Info("[FileBasedSimulationData]: Saving backup for region " + m_scene.RegionInfo.RegionName); string fileName = appendedFilePath + m_scene.RegionInfo.RegionName + m_saveAppendedFileName + ".abackup"; if (File.Exists(fileName)) { //Do new style saving here! GZipStream m_saveStream = new GZipStream(new FileStream(fileName + ".tmp", FileMode.Create), CompressionMode.Compress); TarArchiveWriter writer = new TarArchiveWriter(m_saveStream); GZipStream m_loadStream = new GZipStream(new FileStream(fileName, FileMode.Open), CompressionMode.Decompress); TarArchiveReader reader = new TarArchiveReader(m_loadStream); writer.WriteDir("parcels"); IParcelManagementModule module = m_scene.RequestModuleInterface <IParcelManagementModule>(); if (module != null) { List <ILandObject> landObject = module.AllParcels(); foreach (ILandObject parcel in landObject) { OSDMap parcelMap = parcel.LandData.ToOSD(); var binary = OSDParser.SerializeLLSDBinary(parcelMap); writer.WriteFile("parcels/" + parcel.LandData.GlobalID.ToString(), binary); binary = null; parcelMap = null; } } writer.WriteDir("newstyleterrain"); writer.WriteDir("newstylerevertterrain"); writer.WriteDir("newstylewater"); writer.WriteDir("newstylerevertwater"); ITerrainModule tModule = m_scene.RequestModuleInterface <ITerrainModule>(); if (tModule != null) { try { byte[] sdata = WriteTerrainToStream(tModule.TerrainMap); writer.WriteFile("newstyleterrain/" + m_scene.RegionInfo.RegionID.ToString() + ".terrain", sdata); sdata = null; sdata = WriteTerrainToStream(tModule.TerrainRevertMap); writer.WriteFile( "newstylerevertterrain/" + m_scene.RegionInfo.RegionID.ToString() + ".terrain", sdata); sdata = null; if (tModule.TerrainWaterMap != null) { sdata = WriteTerrainToStream(tModule.TerrainWaterMap); writer.WriteFile("newstylewater/" + m_scene.RegionInfo.RegionID.ToString() + ".terrain", sdata); sdata = null; sdata = WriteTerrainToStream(tModule.TerrainWaterRevertMap); writer.WriteFile( "newstylerevertwater/" + m_scene.RegionInfo.RegionID.ToString() + ".terrain", sdata); sdata = null; } } catch (Exception ex) { MainConsole.Instance.WarnFormat("[Backup]: Exception caught: {0}", ex); } } IDictionary <UUID, AssetType> assets = new Dictionary <UUID, AssetType>(); UuidGatherer assetGatherer = new UuidGatherer(m_scene.AssetService); ISceneEntity[] saveentities = m_scene.Entities.GetEntities(); List <UUID> entitiesToSave = new List <UUID>(); foreach (ISceneEntity entity in saveentities) { try { if (entity.IsAttachment || ((entity.RootChild.Flags & PrimFlags.Temporary) == PrimFlags.Temporary) || ((entity.RootChild.Flags & PrimFlags.TemporaryOnRez) == PrimFlags.TemporaryOnRez)) { continue; } if (entity.HasGroupChanged) { entity.HasGroupChanged = false; //Write all entities byte[] xml = ((ISceneObject)entity).ToBinaryXml2(); writer.WriteFile("entities/" + entity.UUID.ToString(), xml); xml = null; } else { entitiesToSave.Add(entity.UUID); } if (saveAssets) { assetGatherer.GatherAssetUuids(entity, assets, m_scene); } } catch (Exception ex) { MainConsole.Instance.WarnFormat("[Backup]: Exception caught: {0}", ex); entitiesToSave.Add(entity.UUID); } } byte[] data; string filePath; TarArchiveReader.TarEntryType entryType; //Load the archive data that we need try { while ((data = reader.ReadEntry(out filePath, out entryType)) != null) { if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) { continue; } if (filePath.StartsWith("entities/")) { UUID entityID = UUID.Parse(filePath.Remove(0, 9)); if (entitiesToSave.Contains(entityID)) { writer.WriteFile(filePath, data); entitiesToSave.Remove(entityID); } } data = null; } } catch (Exception ex) { MainConsole.Instance.WarnFormat("[Backup]: Exception caught: {0}", ex); } if (entitiesToSave.Count > 0) { MainConsole.Instance.Fatal(entitiesToSave.Count + " PRIMS WERE NOT GOING TO BE SAVED! FORCE SAVING NOW! "); foreach (ISceneEntity entity in saveentities) { if (entitiesToSave.Contains(entity.UUID)) { if (entity.IsAttachment || ((entity.RootChild.Flags & PrimFlags.Temporary) == PrimFlags.Temporary) || ((entity.RootChild.Flags & PrimFlags.TemporaryOnRez) == PrimFlags.TemporaryOnRez)) { continue; } //Write all entities byte[] xml = ((ISceneObject)entity).ToBinaryXml2(); writer.WriteFile("entities/" + entity.UUID.ToString(), xml); xml = null; } } } if (saveAssets) { foreach (UUID assetID in new List <UUID>(assets.Keys)) { try { WriteAsset(assetID.ToString(), m_scene.AssetService.Get(assetID.ToString()), writer); } catch (Exception ex) { MainConsole.Instance.WarnFormat("[Backup]: Exception caught: {0}", ex); } } } reader.Close(); writer.Close(); m_loadStream.Close(); m_saveStream.Close(); GC.Collect(); if (m_keepOldSave && !m_oldSaveHasBeenSaved) { //Havn't moved it yet, so make sure the directory exists, then move it m_oldSaveHasBeenSaved = true; if (!Directory.Exists(m_oldSaveDirectory)) { Directory.CreateDirectory(m_oldSaveDirectory); } File.Copy(fileName + ".tmp", Path.Combine(m_oldSaveDirectory, m_scene.RegionInfo.RegionName + SerializeDateTime() + m_saveAppendedFileName + ".abackup")); } //Just remove the file File.Delete(fileName); } else { //Add the .temp since we might need to make a backup and so that if something goes wrong, we don't corrupt the main backup GZipStream m_saveStream = new GZipStream(new FileStream(fileName + ".tmp", FileMode.Create), CompressionMode.Compress); TarArchiveWriter writer = new TarArchiveWriter(m_saveStream); IAuroraBackupArchiver archiver = m_scene.RequestModuleInterface <IAuroraBackupArchiver>(); //Turn off prompting so that we don't ask the user questions every time we need to save the backup archiver.AllowPrompting = false; archiver.SaveRegionBackup(writer, m_scene); archiver.AllowPrompting = true; m_saveStream.Close(); writer.Close(); GC.Collect(); } File.Move(fileName + ".tmp", fileName); ISceneEntity[] entities = m_scene.Entities.GetEntities(); try { #if (!ISWIN) foreach (ISceneEntity entity in entities) { if (entity.HasGroupChanged) { entity.HasGroupChanged = false; } } #else foreach (ISceneEntity entity in entities.Where(entity => entity.HasGroupChanged)) { entity.HasGroupChanged = false; } #endif } catch (Exception ex) { MainConsole.Instance.WarnFormat("[Backup]: Exception caught: {0}", ex); } //Now make it the full file again MapTileNeedsGenerated = true; MainConsole.Instance.Info("[FileBasedSimulationData]: Saved Backup for region " + m_scene.RegionInfo.RegionName); }
private void DearchiveRegion0DotStar() { int successfulAssetRestores = 0; int failedAssetRestores = 0; List <string> serializedSceneObjects = new List <string>(); string filePath = "NONE"; try { TarArchiveReader archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { //m_log.DebugFormat( // "[ARCHIVER]: Successfully read {0} ({1} bytes)", filePath, data.Length); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) { continue; } if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { serializedSceneObjects.Add(Encoding.UTF8.GetString(data)); } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { if (LoadAsset(filePath, data)) { successfulAssetRestores++; } else { failedAssetRestores++; } } else if (!m_merge && filePath.StartsWith(ArchiveConstants.TERRAINS_PATH)) { LoadTerrain(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.SETTINGS_PATH)) { LoadRegionSettings(filePath, data); } } //m_log.Debug("[ARCHIVER]: Reached end of archive"); archive.Close(); } catch (Exception e) { m_log.ErrorFormat( "[ARCHIVER]: Aborting load with error in archive file {0}. {1}", filePath, e); m_errorMessage += e.ToString(); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); return; } m_log.InfoFormat("[ARCHIVER]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { m_log.ErrorFormat("[ARCHIVER]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += String.Format("Failed to load {0} assets", failedAssetRestores); } // Reload serialized prims m_log.InfoFormat("[ARCHIVER]: Preparing {0} scene objects. Please wait.", serializedSceneObjects.Count); IRegionSerializerModule serializer = m_scene.RequestModuleInterface <IRegionSerializerModule>(); int sceneObjectsLoadedCount = 0; List <SceneObjectGroup> backupObjects = new List <SceneObjectGroup>(); Dictionary <UUID, UUID> OriginalBackupIDs = new Dictionary <UUID, UUID>(); bool objectFixingFailed = false; foreach (string serializedSceneObject in serializedSceneObjects) { SceneObjectGroup sceneObject; try { sceneObject = serializer.DeserializeGroupFromXml2(serializedSceneObject); } catch (Exception e) { m_log.InfoFormat("[ARCHIVER]: Error while deserializing group: {0}", e); if (m_skipErrorGroups) { continue; } else { throw; } } if (sceneObject == null) { if (m_skipErrorGroups) { continue; } else { throw new Exception("Error while deserializing group"); } } // For now, give all incoming scene objects new uuids. This will allow scenes to be cloned // on the same region server and multiple examples a single object archive to be imported // to the same scene (when this is possible). UUID OldUUID = sceneObject.UUID; sceneObject.ResetIDs(); // if sceneObject is no-copy, save the old ID with the new ID. OriginalBackupIDs[sceneObject.UUID] = OldUUID; // Try to retain the original creator/owner/lastowner if their uuid is present on this grid // otherwise, use the master avatar uuid instead UUID masterAvatarId = m_scene.RegionInfo.MasterAvatarAssignedUUID; if (m_scene.RegionInfo.EstateSettings.EstateOwner != UUID.Zero) { masterAvatarId = m_scene.RegionInfo.EstateSettings.EstateOwner; } foreach (SceneObjectPart part in sceneObject.GetParts()) { if (!ResolveUserUuid(part.CreatorID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' part creator", part.CreatorID, sceneObject.Name); // Don't fail to load an object owned by a valid user, just because a creator no longer exists in the DB. (We've seen this with some of YadNi's stuff.) // objectFixingFailed = true; // part.CreatorID = masterAvatarId; } if (!ResolveUserUuid(part.OwnerID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' part owner", part.OwnerID, sceneObject.Name); objectFixingFailed = true; part.OwnerID = masterAvatarId; } if (!ResolveUserUuid(part.LastOwnerID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' part last owner", part.LastOwnerID, sceneObject.Name); objectFixingFailed = true; part.LastOwnerID = masterAvatarId; } // Fix ownership/creator of inventory items // Not doing so results in inventory items // being no copy/no mod for everyone lock (part.TaskInventory) { TaskInventoryDictionary inv = part.TaskInventory; foreach (KeyValuePair <UUID, TaskInventoryItem> kvp in inv) { if (!ResolveUserUuid(kvp.Value.OwnerID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' inventory item owner", kvp.Value.OwnerID, sceneObject.Name); objectFixingFailed = true; kvp.Value.OwnerID = masterAvatarId; } if (!ResolveUserUuid(kvp.Value.CreatorID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' inventory item creator", kvp.Value.CreatorID, sceneObject.Name); // Don't fail to load an object owned by a valid user, just because a creator no longer exists in the DB. (We've seen this with some of YadNi's stuff.) // objectFixingFailed = true; // kvp.Value.CreatorID = masterAvatarId; } } } } backupObjects.Add(sceneObject); } if (objectFixingFailed && !m_allowUserReassignment) { m_log.Error("[ARCHIVER]: Could not restore scene objects. One or more avatar accounts not found."); return; } Dictionary <UUID, SceneObjectGroup> ExistingNoCopyObjects = new Dictionary <UUID, SceneObjectGroup>(); if (!m_merge) { m_log.Info("[ARCHIVER]: Clearing all existing scene objects"); m_scene.DeleteAllSceneObjectsExcept(delegate(SceneObjectGroup existingSOG) { // Return true if this object should be skipped in the delete. // Don't delete any no-copy objects. if (NoCopyObjectOrContents(existingSOG)) { ExistingNoCopyObjects.Add(existingSOG.UUID, existingSOG); return(true); } return(false); }); } m_log.InfoFormat("[ARCHIVER]: Loading {0} scene objects. Please wait.", serializedSceneObjects.Count); // sceneObject is the one from backup to restore to the scene foreach (SceneObjectGroup backupObject in backupObjects) { SceneObjectGroup existingObject = null; UUID originalUUID = OriginalBackupIDs[backupObject.UUID]; // Don't restore any no-copy objects unless there was an existing matching UUID in the scene. if (ExistingNoCopyObjects.ContainsKey(originalUUID)) { existingObject = ExistingNoCopyObjects[originalUUID]; } // existingSOG here means existing NO-COPY object, not deleted from scene above if (NoCopyObjectOrContents(backupObject)) { if ((existingObject != null) && !existingObject.IsAttachment) { // copy only position and rotation from backup existingObject.Rotation = backupObject.Rotation; existingObject.AbsolutePosition = backupObject.AbsolutePosition; } // don't restore no-copy items } else if (m_scene.AddRestoredSceneObject(backupObject, true, false)) { // this may have added 2nd copyable copy if existingObject is no-copy sceneObjectsLoadedCount++; backupObject.CreateScriptInstances(0, ScriptStartFlags.PostOnRez, m_scene.DefaultScriptEngine, 0, null); } } m_log.InfoFormat("[ARCHIVER]: Restored {0} scene objects to the scene", sceneObjectsLoadedCount); int ignoredObjects = serializedSceneObjects.Count - sceneObjectsLoadedCount; if (ignoredObjects > 0) { m_log.WarnFormat("[ARCHIVER]: Ignored {0} scene objects that already existed in the scene", ignoredObjects); } m_log.InfoFormat("[ARCHIVER]: Successfully loaded archive"); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); }
/// <summary> /// Execute the request /// </summary> /// <returns> /// A list of the inventory nodes loaded. If folders were loaded then only the root folders are /// returned /// </returns> public HashSet <InventoryNodeBase> Execute(bool loadAll) { try { string filePath = "ERROR"; int successfulAssetRestores = 0; int failedAssetRestores = 0; int successfulItemRestores = 0; HashSet <InventoryNodeBase> loadedNodes = new HashSet <InventoryNodeBase>(); List <InventoryFolderBase> folderCandidates = InventoryArchiveUtils.FindFolderByPath( m_registry.RequestModuleInterface <IInventoryService>(), m_userInfo.PrincipalID, m_invPath); if (folderCandidates.Count == 0) { // Possibly provide an option later on to automatically create this folder if it does not exist m_log.ErrorFormat("[INVENTORY ARCHIVER]: Inventory path {0} does not exist", m_invPath); return(loadedNodes); } InventoryFolderBase rootDestinationFolder = folderCandidates[0]; archive = new TarArchiveReader(m_loadStream); // In order to load identically named folders, we need to keep track of the folders that we have already // resolved Dictionary <string, InventoryFolderBase> resolvedFolders = new Dictionary <string, InventoryFolderBase>(); byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { if (LoadAsset(filePath, data)) { successfulAssetRestores++; } else { failedAssetRestores++; } if ((successfulAssetRestores) % 50 == 0) { m_log.DebugFormat( "[INVENTORY ARCHIVER]: Loaded {0} assets...", successfulAssetRestores); } } else if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH)) { filePath = filePath.Substring(ArchiveConstants.INVENTORY_PATH.Length); // Trim off the file portion if we aren't already dealing with a directory path if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY != entryType) { filePath = filePath.Remove(filePath.LastIndexOf("/") + 1); } InventoryFolderBase foundFolder = ReplicateArchivePathToUserInventory( filePath, rootDestinationFolder, resolvedFolders, loadedNodes); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY != entryType) { InventoryItemBase item = LoadItem(data, foundFolder); if (item != null) { successfulItemRestores++; // If we aren't loading the folder containing the item then well need to update the // viewer separately for that item. if (!loadedNodes.Contains(foundFolder)) { if (loadAll) { loadedNodes.Add(item); } } } } } } archive.Close(); m_log.DebugFormat( "[INVENTORY ARCHIVER]: Successfully loaded {0} assets with {1} failures", successfulAssetRestores, failedAssetRestores); m_log.InfoFormat("[INVENTORY ARCHIVER]: Successfully loaded {0} items", successfulItemRestores); return(loadedNodes); } finally { m_loadStream.Close(); } }
private void DearchiveRegion0DotStar() { int successfulAssetRestores = 0; int failedAssetRestores = 0; List <string> serialisedSceneObjects = new List <string>(); List <string> serialisedParcels = new List <string>(); string filePath = "NONE"; TarArchiveReader archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; try { while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { //m_log.DebugFormat( // "[ARCHIVER]: Successfully read {0} ({1} bytes)", filePath, data.Length); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) { continue; } if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { serialisedSceneObjects.Add(Encoding.UTF8.GetString(data)); } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH) && !m_skipAssets) { if (LoadAsset(filePath, data)) { successfulAssetRestores++; } else { failedAssetRestores++; } if ((successfulAssetRestores + failedAssetRestores) % 250 == 0) { m_log.Debug("[ARCHIVER]: Loaded " + successfulAssetRestores + " assets and failed to load " + failedAssetRestores + " assets..."); } } else if (!m_merge && filePath.StartsWith(ArchiveConstants.TERRAINS_PATH)) { LoadTerrain(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.SETTINGS_PATH)) { LoadRegionSettings(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.LANDDATA_PATH)) { serialisedParcels.Add(Encoding.UTF8.GetString(data)); } else if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(filePath, data); } } //m_log.Debug("[ARCHIVER]: Reached end of archive"); } catch (Exception e) { m_log.ErrorFormat( "[ARCHIVER]: Aborting load with error in archive file {0}. {1}", filePath, e); m_errorMessage += e.ToString(); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); return; } finally { archive.Close(); } if (!m_skipAssets) { m_log.InfoFormat("[ARCHIVER]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { m_log.ErrorFormat("[ARCHIVER]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += String.Format("Failed to load {0} assets", failedAssetRestores); } } if (!m_merge) { m_log.Info("[ARCHIVER]: Clearing all existing scene objects"); m_scene.DeleteAllSceneObjects(); } LoadParcels(serialisedParcels); LoadObjects(serialisedSceneObjects); m_log.InfoFormat("[ARCHIVER]: Successfully loaded archive"); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); }
private void DearchiveRegion0DotStar() { int successfulAssetRestores = 0; int failedAssetRestores = 0; //List<string> serialisedSceneObjects = new List<string>(); List <string> serialisedParcels = new List <string>(); string filePath = "NONE"; DateTime start = DateTime.Now; TarArchiveReader archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; if (!m_skipAssets) { m_threadpool = new Aurora.Framework.AuroraThreadPool(new Aurora.Framework.AuroraThreadPoolStartInfo() { Threads = 1, priority = System.Threading.ThreadPriority.BelowNormal }); } IBackupModule backup = m_scene.RequestModuleInterface <IBackupModule>(); if (!m_merge) { DateTime before = DateTime.Now; m_log.Info("[ARCHIVER]: Clearing all existing scene objects"); if (backup != null) { backup.DeleteAllSceneObjects(); } m_log.Info("[ARCHIVER]: Cleared all existing scene objects in " + (DateTime.Now - before).Minutes + ":" + (DateTime.Now - before).Seconds); } IScriptModule[] modules = m_scene.RequestModuleInterfaces <IScriptModule>(); //Disable the script engine so that it doesn't load in the background and kill OAR loading foreach (IScriptModule module in modules) { module.Disabled = true; } //Disable backup for now as well if (backup != null) { backup.LoadingPrims = true; } IRegionSerialiserModule serialiser = m_scene.RequestModuleInterface <IRegionSerialiserModule>(); int sceneObjectsLoadedCount = 0; //We save the groups so that we can back them up later List <SceneObjectGroup> groupsToBackup = new List <SceneObjectGroup>(); try { while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { //m_log.DebugFormat( // "[ARCHIVER]: Successfully read {0} ({1} bytes)", filePath, data.Length); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) { continue; } if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { string sogdata = m_utf8Encoding.GetString(data); //serialisedSceneObjects.Add(m_utf8Encoding.GetString(data)); /* * m_log.DebugFormat("[ARCHIVER]: Loading xml with raw size {0}", serialisedSceneObject.Length); * * // Really large xml files (multi megabyte) appear to cause * // memory problems * // when loading the xml. But don't enable this check yet * * if (serialisedSceneObject.Length > 5000000) * { * m_log.Error("[ARCHIVER]: Ignoring xml since size > 5000000);"); * continue; * } */ string serialisedSceneObject = sogdata; SceneObjectGroup sceneObject = (SceneObjectGroup)serialiser.DeserializeGroupFromXml2(serialisedSceneObject, m_scene); if (sceneObject == null) { //! big error! m_log.Error("Error reading SOP XML (Please mantis this!): " + serialisedSceneObject); continue; } foreach (SceneObjectPart part in sceneObject.ChildrenList) { if (!ResolveUserUuid(part.CreatorID)) { part.CreatorID = m_scene.RegionInfo.EstateSettings.EstateOwner; } if (!ResolveUserUuid(part.OwnerID)) { part.OwnerID = m_scene.RegionInfo.EstateSettings.EstateOwner; } if (!ResolveUserUuid(part.LastOwnerID)) { part.LastOwnerID = m_scene.RegionInfo.EstateSettings.EstateOwner; } // And zap any troublesome sit target information part.SitTargetOrientation = new Quaternion(0, 0, 0, 1); part.SitTargetPosition = new Vector3(0, 0, 0); // Fix ownership/creator of inventory items // Not doing so results in inventory items // being no copy/no mod for everyone lock (part.TaskInventory) { TaskInventoryDictionary inv = part.TaskInventory; foreach (KeyValuePair <UUID, TaskInventoryItem> kvp in inv) { if (!ResolveUserUuid(kvp.Value.OwnerID)) { kvp.Value.OwnerID = m_scene.RegionInfo.EstateSettings.EstateOwner; } if (!ResolveUserUuid(kvp.Value.CreatorID)) { kvp.Value.CreatorID = m_scene.RegionInfo.EstateSettings.EstateOwner; } } } } //Add the offsets of the region Vector3 newPos = new Vector3(sceneObject.AbsolutePosition.X + m_offsetX, sceneObject.AbsolutePosition.Y + m_offsetY, sceneObject.AbsolutePosition.Z + m_offsetZ); if (m_flipX) { newPos.X = m_scene.RegionInfo.RegionSizeX - newPos.X; } if (m_flipY) { newPos.Y = m_scene.RegionInfo.RegionSizeY - newPos.Y; } sceneObject.SetAbsolutePosition(false, newPos); if (m_scene.SceneGraph.AddPrimToScene(sceneObject)) { groupsToBackup.Add(sceneObject); sceneObject.ScheduleGroupUpdate(PrimUpdateFlags.FullUpdate); sceneObjectsLoadedCount++; sceneObject.CreateScriptInstances(0, false, 0, UUID.Zero); sceneObject.ResumeScripts(); } sceneObjectsLoadedCount++; if (sceneObjectsLoadedCount % 250 == 0) { m_log.Debug("[ARCHIVER]: Loaded " + sceneObjectsLoadedCount + " objects..."); } } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { if (!m_skipAssets) { if (LoadAsset(filePath, data)) { successfulAssetRestores++; } else { failedAssetRestores++; } if ((successfulAssetRestores + failedAssetRestores) % 250 == 0) { m_log.Debug("[ARCHIVER]: Loaded " + successfulAssetRestores + " assets and failed to load " + failedAssetRestores + " assets..."); } } } else if (!m_merge && filePath.StartsWith(ArchiveConstants.TERRAINS_PATH)) { LoadTerrain(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.SETTINGS_PATH)) { LoadRegionSettings(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.LANDDATA_PATH)) { serialisedParcels.Add(m_utf8Encoding.GetString(data)); } else if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(filePath, data); } else { m_log.Debug("[ARCHIVER]:UNKNOWN PATH: " + filePath); } } //m_log.Debug("[ARCHIVER]: Reached end of archive"); } catch (Exception e) { m_log.ErrorFormat( "[ARCHIVER]: Aborting load with error in archive file {0}. {1}", filePath, e); m_errorMessage += e.ToString(); m_scene.EventManager.TriggerOarFileLoaded(UUID.Zero.Guid, m_errorMessage); return; } finally { archive.Close(); m_loadStream.Close(); m_loadStream.Dispose(); } //Reeanble now that we are done foreach (IScriptModule module in modules) { module.Disabled = false; } //Reset backup too if (backup != null) { backup.LoadingPrims = false; } //Now back up the prims foreach (SceneObjectGroup grp in groupsToBackup) { //Backup! grp.HasGroupChanged = true; } if (!m_skipAssets) { if (m_useAsync && !AssetSaverIsRunning) { m_threadpool.QueueEvent(SaveAssets, 0); } else if (!AssetSaverIsRunning) { SaveAssets(); } } if (!m_skipAssets) { m_log.InfoFormat("[ARCHIVER]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { m_log.ErrorFormat("[ARCHIVER]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += String.Format("Failed to load {0} assets", failedAssetRestores); } } // Try to retain the original creator/owner/lastowner if their uuid is present on this grid // otherwise, use the master avatar uuid instead // Reload serialized parcels m_log.InfoFormat("[ARCHIVER]: Loading {0} parcels. Please wait.", serialisedParcels.Count); List <LandData> landData = new List <LandData>(); foreach (string serialisedParcel in serialisedParcels) { LandData parcel = LandDataSerializer.Deserialize(serialisedParcel); if (!ResolveUserUuid(parcel.OwnerID)) { parcel.OwnerID = m_scene.RegionInfo.EstateSettings.EstateOwner; } landData.Add(parcel); } m_scene.EventManager.TriggerIncomingLandDataFromStorage(landData); //Update the database as well! IParcelManagementModule parcelManagementModule = m_scene.RequestModuleInterface <IParcelManagementModule>(); if (parcelManagementModule != null) { foreach (LandData parcel in landData) { parcelManagementModule.UpdateLandObject(parcel.LocalID, parcel); } } m_log.InfoFormat("[ARCHIVER]: Restored {0} parcels.", landData.Count); //Clean it out landData.Clear(); serialisedParcels.Clear(); m_log.InfoFormat("[ARCHIVER]: Successfully loaded archive in " + (DateTime.Now - start).Minutes + ":" + (DateTime.Now - start).Seconds); m_validUserUuids.Clear(); m_scene.EventManager.TriggerOarFileLoaded(UUID.Zero.Guid, m_errorMessage); }
public bool SaveBackup(string fileName, RegionData regiondata) { try { bool oldFileExists = File.Exists(fileName); //Do new style saving here! GZipStream m_saveStream = new GZipStream(new FileStream(fileName + ".tmp", FileMode.Create), CompressionMode.Compress); TarArchiveWriter writer = new TarArchiveWriter(m_saveStream); GZipStream m_loadStream = new GZipStream(new FileStream(fileName, FileMode.Open), CompressionMode.Decompress); TarArchiveReader reader = new TarArchiveReader(m_loadStream); writer.WriteDir("parcels"); foreach (LandData parcel in regiondata.Parcels) { OSDMap parcelMap = parcel.ToOSD(); var binary = OSDParser.SerializeLLSDBinary(parcelMap); writer.WriteFile("parcels/" + parcel.GlobalID.ToString(), binary); binary = null; parcelMap = null; } writer.WriteDir("newstyleterrain"); writer.WriteDir("newstylerevertterrain"); writer.WriteDir("newstylewater"); writer.WriteDir("newstylerevertwater"); writer.WriteDir("regioninfo"); byte[] regionData = OSDParser.SerializeLLSDBinary(regiondata.RegionInfo.PackRegionInfoData()); writer.WriteFile("regioninfo/regioninfo", regionData); try { writer.WriteFile("newstyleterrain/" + regiondata.RegionInfo.RegionID.ToString() + ".terrain", regiondata.Terrain); writer.WriteFile( "newstylerevertterrain/" + regiondata.RegionInfo.RegionID.ToString() + ".terrain", regiondata.RevertTerrain); if (regiondata.Water != null) { writer.WriteFile("newstylewater/" + regiondata.RegionInfo.RegionID.ToString() + ".terrain", regiondata.Water); writer.WriteFile( "newstylerevertwater/" + regiondata.RegionInfo.RegionID.ToString() + ".terrain", regiondata.RevertWater); } } catch (Exception ex) { MainConsole.Instance.WarnFormat("[Backup]: Exception caught: {0}", ex); } List <UUID> entitiesToSave = new List <UUID>(); foreach (ISceneEntity entity in regiondata.Groups) { try { if (entity.IsAttachment || ((entity.RootChild.Flags & PrimFlags.Temporary) == PrimFlags.Temporary) || ((entity.RootChild.Flags & PrimFlags.TemporaryOnRez) == PrimFlags.TemporaryOnRez)) { continue; } if (entity.HasGroupChanged || !oldFileExists) { entity.HasGroupChanged = false; //Write all entities writer.WriteFile("entities/" + entity.UUID.ToString(), entity.ToBinaryXml2()); } else { entitiesToSave.Add(entity.UUID); } } catch (Exception ex) { MainConsole.Instance.WarnFormat("[Backup]: Exception caught: {0}", ex); entitiesToSave.Add(entity.UUID); } } if (oldFileExists) { byte[] data; string filePath; TarArchiveReader.TarEntryType entryType; //Load the archive data that we need try { while ((data = reader.ReadEntry(out filePath, out entryType)) != null) { if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) { continue; } if (filePath.StartsWith("entities/")) { UUID entityID = UUID.Parse(filePath.Remove(0, 9)); if (entitiesToSave.Contains(entityID)) { writer.WriteFile(filePath, data); entitiesToSave.Remove(entityID); } } data = null; } } catch (Exception ex) { MainConsole.Instance.WarnFormat("[Backup]: Exception caught: {0}", ex); } if (entitiesToSave.Count > 0) { MainConsole.Instance.Fatal(entitiesToSave.Count + " PRIMS WERE NOT GOING TO BE SAVED! FORCE SAVING NOW! "); foreach (ISceneEntity entity in regiondata.Groups) { if (entitiesToSave.Contains(entity.UUID)) { if (entity.IsAttachment || ((entity.RootChild.Flags & PrimFlags.Temporary) == PrimFlags.Temporary) || ((entity.RootChild.Flags & PrimFlags.TemporaryOnRez) == PrimFlags.TemporaryOnRez)) { continue; } //Write all entities byte[] xml = entity.ToBinaryXml2(); writer.WriteFile("entities/" + entity.UUID.ToString(), xml); xml = null; } } } } reader.Close(); writer.Close(); m_loadStream.Close(); m_saveStream.Close(); GC.Collect(); } catch (Exception ex) { MainConsole.Instance.Warn("[ProtobufRegionLoader]: Failed to save backup: " + ex.ToString()); return(false); } return(true); }
/// <summary> /// Execute the request /// </summary> /// <returns> /// A list of the inventory nodes loaded. If folders were loaded then only the root folders are /// returned /// </returns> public List <InventoryNodeBase> Execute() { string filePath = "ERROR"; int successfulAssetRestores = 0; int failedAssetRestores = 0; int successfulItemRestores = 0; List <InventoryNodeBase> nodesLoaded = new List <InventoryNodeBase>(); if (!m_userInfo.HasReceivedInventory) { // If the region server has access to the user admin service (by which users are created), // then we'll assume that it's okay to fiddle with the user's inventory even if they are not on the // server. // // FIXME: FetchInventory should probably be assumed to by async anyway, since even standalones might // use a remote inventory service, though this is vanishingly rare at the moment. if (null == m_commsManager.UserAdminService) { m_log.ErrorFormat( "[INVENTORY ARCHIVER]: Have not yet received inventory info for user {0} {1}", m_userInfo.UserProfile.Name, m_userInfo.UserProfile.ID); return(nodesLoaded); } else { m_userInfo.FetchInventory(); } } InventoryFolderImpl rootDestinationFolder = m_userInfo.RootFolder.FindFolderByPath(m_invPath); if (null == rootDestinationFolder) { // Possibly provide an option later on to automatically create this folder if it does not exist m_log.ErrorFormat("[INVENTORY ARCHIVER]: Inventory path {0} does not exist", m_invPath); return(nodesLoaded); } archive = new TarArchiveReader(m_loadStream); // In order to load identically named folders, we need to keep track of the folders that we have already // created Dictionary <string, InventoryFolderImpl> foldersCreated = new Dictionary <string, InventoryFolderImpl>(); byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { if (LoadAsset(filePath, data)) { successfulAssetRestores++; } else { failedAssetRestores++; } } else if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH)) { InventoryFolderImpl foundFolder = ReplicateArchivePathToUserInventory( filePath, TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType, rootDestinationFolder, foldersCreated, nodesLoaded); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY != entryType) { InventoryItemBase item = UserInventoryItemSerializer.Deserialize(data); // Don't use the item ID that's in the file item.ID = UUID.Random(); UUID ospResolvedId = OspResolver.ResolveOspa(item.CreatorId, m_commsManager); if (UUID.Zero != ospResolvedId) { item.CreatorIdAsUuid = ospResolvedId; } item.Owner = m_userInfo.UserProfile.ID; // Reset folder ID to the one in which we want to load it item.Folder = foundFolder.ID; m_userInfo.AddItem(item); successfulItemRestores++; // If we're loading an item directly into the given destination folder then we need to record // it separately from any loaded root folders if (rootDestinationFolder == foundFolder) { nodesLoaded.Add(item); } } } } archive.Close(); m_log.DebugFormat("[INVENTORY ARCHIVER]: Restored {0} assets", successfulAssetRestores); m_log.InfoFormat("[INVENTORY ARCHIVER]: Restored {0} items", successfulItemRestores); return(nodesLoaded); }