bool DearchiveRegion0DotStar() { if (m_loadStream == null) { return(false); } int successfulAssetRestores = 0; int failedAssetRestores = 0; string filePath = "NONE"; DateTime start = DateTime.Now; TarArchiveReader archive = new TarArchiveReader(m_loadStream); if (!m_skipAssets) { m_threadpool = new UniverseThreadPool( new UniverseThreadPoolStartInfo() { Threads = 1, priority = System.Threading.ThreadPriority.BelowNormal }); } IBackupModule backup = m_scene.RequestModuleInterface <IBackupModule> (); if (!m_merge) { DateTime before = DateTime.Now; MainConsole.Instance.Info("[Archiver]: Clearing all existing scene objects"); if (backup != null) { backup.DeleteAllSceneObjects(); } MainConsole.Instance.Info("[Archiver]: Cleared all existing scene objects in " + (DateTime.Now - before).Minutes + ":" + (DateTime.Now - before).Seconds); } IScriptModule [] modules = m_scene.RequestModuleInterfaces <IScriptModule> (); //Disable the script engine so that it doesn't load in the background and kill OAR loading foreach (IScriptModule module in modules) { module.Disabled = true; } //Disable backup for now as well if (backup != null) { backup.LoadingPrims = true; } IRegionSerializerModule serializer = m_scene.RequestModuleInterface <IRegionSerializerModule> (); int sceneObjectsLoadedCount = 0; //We save the groups so that we can back them up later List <ISceneEntity> groupsToBackup = new List <ISceneEntity> (); List <LandData> landData = new List <LandData> (); // must save off some stuff until after assets have been saved and received new uuids // keeping these collection local because I am sure they will get large and garbage collection is better that way List <byte []> seneObjectGroups = new List <byte []> (); Dictionary <UUID, UUID> assetBinaryChangeRecord = new Dictionary <UUID, UUID> (); Queue <UUID> assets2Save = new Queue <UUID> (); MainConsole.Instance.Info("[Archiver]: Commencing load from archive"); int ticker = 0; try { byte [] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) { continue; } if (TarArchiveReader.TarEntryType.TYPE_NORMAL_FILE == entryType) { string fName; try { fName = Path.GetFileName(filePath); if (fName.StartsWith(".", StringComparison.Ordinal)) // ignore hidden files { continue; } } catch { MainConsole.Instance.ErrorFormat("[Archiver]: Invalid file name in archive: {0}", filePath); continue; } } ticker++; if (ticker % 10 == 0) { MainConsole.Instance.Ticker(); } if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH, StringComparison.Ordinal)) { seneObjectGroups.Add(data); if (seneObjectGroups.Count % 100 == 0) { MainConsole.Instance.Ticker( string.Format("[Archiver]: Found {0} scene object groups...", seneObjectGroups.Count), true); } } else if (!m_skipAssets && filePath.StartsWith(ArchiveConstants.ASSETS_PATH, StringComparison.Ordinal)) { AssetBase asset; if (LoadAsset(filePath, data, out asset)) { successfulAssetRestores++; if (m_useAsync) { lock (AssetsToAdd) AssetsToAdd.Add(asset); } else { if (asset.IsBinaryAsset) { UUID aid = asset.ID; asset.ID = m_scene.AssetService.Store(asset); if (asset.ID != aid && asset.ID != UUID.Zero) { assetBinaryChangeRecord.Add(aid, asset.ID); } } else { if (!assetNonBinaryCollection.ContainsKey(asset.ID)) { assetNonBinaryCollection.Add(asset.ID, asset); // I need something I can safely loop through assets2Save.Enqueue(asset.ID); } } } } else { failedAssetRestores++; } if ((successfulAssetRestores + failedAssetRestores) % 100 == 0) { MainConsole.Instance.Ticker( string.Format("[Archiver]: Loaded {0} assets, with {1} failures...", successfulAssetRestores, failedAssetRestores), true); } } else if (!m_skipTerrain && filePath.StartsWith(ArchiveConstants.TERRAINS_PATH, StringComparison.Ordinal)) { LoadTerrain(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.SETTINGS_PATH, StringComparison.Ordinal)) { LoadRegionSettings(filePath, data); } else if (!m_skipTerrain && filePath.StartsWith(ArchiveConstants.LANDDATA_PATH, StringComparison.Ordinal)) { var parcel = LoadLandData(data); landData.Add(parcel); } else if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(data); } } MainConsole.Instance.CleanInfo(""); MainConsole.Instance.Info("[Archiver]: Saving non binary assets"); ticker = 0; // Save Assets int savingAssetsCount = 0; while (assets2Save.Count > 0) { ticker++; if (ticker % 10 == 0) { MainConsole.Instance.Ticker(); } try { UUID assetid = assets2Save.Dequeue(); SaveNonBinaryAssets(assetid, assetNonBinaryCollection [assetid], assetBinaryChangeRecord); savingAssetsCount++; if ((savingAssetsCount) % 100 == 0) { MainConsole.Instance.Ticker( string.Format("[Archiver]: Saved {0} assets...", savingAssetsCount), true); } } catch (Exception ex) { MainConsole.Instance.Info("[Archiver]: Exception in saving an asset: " + ex); } } MainConsole.Instance.CleanInfo(""); MainConsole.Instance.Info("[Archiver]: Saving loaded objects"); ticker = 0; foreach (byte [] data2 in seneObjectGroups) { ticker++; if (ticker % 10 == 0) { MainConsole.Instance.Ticker(); } byte [] data3 = data2; string stringData = Utils.BytesToString(data3); MatchCollection mc = Regex.Matches(stringData, sPattern); bool didChange = false; if (mc.Count >= 1) { foreach (Match match in mc) { UUID thematch = new UUID(match.Value); UUID newvalue = thematch; if (assetNonBinaryCollection.ContainsKey(thematch)) { newvalue = assetNonBinaryCollection [thematch].ID; } else if (assetBinaryChangeRecord.ContainsKey(thematch)) { newvalue = assetBinaryChangeRecord [thematch]; } if (thematch == newvalue) { continue; } stringData = stringData.Replace(thematch.ToString().Trim(), newvalue.ToString().Trim()); didChange = true; } } if (didChange) { data3 = Utils.StringToBytes(stringData); } ISceneEntity sceneObject = serializer.DeserializeGroupFromXml2(data3, m_scene); if (sceneObject == null) { //! big error! MainConsole.Instance.Error("Error reading SOP XML (Please mantis this!): " + m_asciiEncoding.GetString(data3)); continue; } // check sceneObject ownership sceneObject.OwnerID = ResolveUserUuid(sceneObject.OwnerID, sceneObject.LastSignificantPosition, landData); //... and children foreach (ISceneChildEntity part in sceneObject.ChildrenEntities()) { // check user ID's part.CreatorID = ResolveUserUuid(part.CreatorID, part.AbsolutePosition, landData); part.OwnerID = ResolveUserUuid(part.OwnerID, part.AbsolutePosition, landData); part.LastOwnerID = ResolveUserUuid(part.LastOwnerID, part.AbsolutePosition, landData); //check group ID's part.GroupID = ResolveGroupUuid(part.GroupID); // And zap any troublesome sit target information part.SitTargetOrientation = new Quaternion(0, 0, 0, 1); part.SitTargetPosition = new Vector3(0, 0, 0); // Fix ownership/creator of inventory items // Not doing so results in inventory items // being no copy/no mod for everyone lock (part.TaskInventory) { TaskInventoryDictionary inv = part.TaskInventory; foreach (KeyValuePair <UUID, TaskInventoryItem> kvp in inv) { // check user ID's kvp.Value.OwnerID = ResolveUserUuid( kvp.Value.OwnerID, part.AbsolutePosition, landData ); kvp.Value.LastOwnerID = ResolveUserUuid( kvp.Value.LastOwnerID, part.AbsolutePosition, landData ); kvp.Value.CreatorID = ResolveUserUuid( kvp.Value.CreatorID, part.AbsolutePosition, landData ); // ..and possible group ID's kvp.Value.GroupID = ResolveGroupUuid(kvp.Value.GroupID); } } } //Add the offsets of the region Vector3 newPos = new Vector3(sceneObject.AbsolutePosition.X + m_offsetX, sceneObject.AbsolutePosition.Y + m_offsetY, sceneObject.AbsolutePosition.Z + m_offsetZ); if (m_flipX) { newPos.X = m_scene.RegionInfo.RegionSizeX - newPos.X; } if (m_flipY) { newPos.Y = m_scene.RegionInfo.RegionSizeY - newPos.Y; } sceneObject.SetAbsolutePosition(false, newPos); if (m_scene.SceneGraph.AddPrimToScene(sceneObject)) { groupsToBackup.Add(sceneObject); sceneObject.ScheduleGroupUpdate(PrimUpdateFlags.ForcedFullUpdate); sceneObject.CreateScriptInstances(0, false, StateSource.RegionStart, UUID.Zero, true); } sceneObjectsLoadedCount++; if (sceneObjectsLoadedCount % 100 == 0) { MainConsole.Instance.Ticker( string.Format("[Archiver]: Saved {0} objects...", sceneObjectsLoadedCount), true); } } assetNonBinaryCollection.Clear(); assetBinaryChangeRecord.Clear(); seneObjectGroups.Clear(); } catch (Exception e) { MainConsole.Instance.ErrorFormat( "[Archiver]: Aborting load with error in archive file {0}. {1}", filePath, e); m_errorMessage += e.ToString(); m_scene.EventManager.TriggerOarFileLoaded(UUID.Zero.Guid, m_errorMessage); return(false); } finally { archive.Close(); m_loadStream.Close(); m_loadStream.Dispose(); assets2Save.Clear(); assetNonBinaryCollection.Clear(); // Re-enable scripts now that we are done foreach (IScriptModule module in modules) { module.Disabled = false; } // Reset backup too if (backup != null) { backup.LoadingPrims = false; } } // finished with the ticker MainConsole.Instance.CleanInfo(""); // Now back up the prims foreach (ISceneEntity grp in groupsToBackup) { // Backup! grp.HasGroupChanged = true; } if (!m_skipAssets && m_useAsync && !AssetSaverIsRunning) { m_threadpool.QueueEvent(SaveAssets, 0); } if (!m_skipAssets) { MainConsole.Instance.InfoFormat("[Archiver]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { MainConsole.Instance.ErrorFormat("[Archiver]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += string.Format("Failed to load {0} assets", failedAssetRestores); } } // Reload serialized parcels if (!m_skipTerrain) { MainConsole.Instance.InfoFormat("[Archiver]: Loading {0} parcels.", landData.Count); IParcelManagementModule parcelManagementModule = m_scene.RequestModuleInterface <IParcelManagementModule> (); if (parcelManagementModule != null) { parcelManagementModule.IncomingLandDataFromOAR(landData, m_merge, new Vector2(m_offsetX, m_offsetY)); } MainConsole.Instance.InfoFormat("[Archiver]: Restored {0} parcels.", landData.Count); } // Clean it out landData.Clear(); MainConsole.Instance.InfoFormat("[Archiver]: Successfully loaded archive in " + (DateTime.Now - start).Minutes + ":" + (DateTime.Now - start).Seconds); m_validUserUuids.Clear(); m_validGroupUuids.Clear(); m_scene.EventManager.TriggerOarFileLoaded(UUID.Zero.Guid, m_errorMessage); return(true); // all good }
private void DearchiveRegion0DotStar() { int successfulAssetRestores = 0; int failedAssetRestores = 0; List <string> serializedSceneObjects = new List <string>(); string filePath = "NONE"; try { TarArchiveReader archive = new TarArchiveReader(m_loadStream); byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { //m_log.DebugFormat( // "[ARCHIVER]: Successfully read {0} ({1} bytes)", filePath, data.Length); if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) { continue; } if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { serializedSceneObjects.Add(Encoding.UTF8.GetString(data)); } else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { if (LoadAsset(filePath, data)) { successfulAssetRestores++; } else { failedAssetRestores++; } } else if (!m_merge && filePath.StartsWith(ArchiveConstants.TERRAINS_PATH)) { LoadTerrain(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.SETTINGS_PATH)) { LoadRegionSettings(filePath, data); } } //m_log.Debug("[ARCHIVER]: Reached end of archive"); archive.Close(); } catch (Exception e) { m_log.ErrorFormat( "[ARCHIVER]: Aborting load with error in archive file {0}. {1}", filePath, e); m_errorMessage += e.ToString(); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); return; } m_log.InfoFormat("[ARCHIVER]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { m_log.ErrorFormat("[ARCHIVER]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += String.Format("Failed to load {0} assets", failedAssetRestores); } // Reload serialized prims m_log.InfoFormat("[ARCHIVER]: Preparing {0} scene objects. Please wait.", serializedSceneObjects.Count); IRegionSerializerModule serializer = m_scene.RequestModuleInterface <IRegionSerializerModule>(); int sceneObjectsLoadedCount = 0; List <SceneObjectGroup> backupObjects = new List <SceneObjectGroup>(); Dictionary <UUID, UUID> OriginalBackupIDs = new Dictionary <UUID, UUID>(); bool objectFixingFailed = false; foreach (string serializedSceneObject in serializedSceneObjects) { SceneObjectGroup sceneObject; try { sceneObject = serializer.DeserializeGroupFromXml2(serializedSceneObject); } catch (Exception e) { m_log.InfoFormat("[ARCHIVER]: Error while deserializing group: {0}", e); if (m_skipErrorGroups) { continue; } else { throw; } } if (sceneObject == null) { if (m_skipErrorGroups) { continue; } else { throw new Exception("Error while deserializing group"); } } // For now, give all incoming scene objects new uuids. This will allow scenes to be cloned // on the same region server and multiple examples a single object archive to be imported // to the same scene (when this is possible). UUID OldUUID = sceneObject.UUID; sceneObject.ResetIDs(); // if sceneObject is no-copy, save the old ID with the new ID. OriginalBackupIDs[sceneObject.UUID] = OldUUID; // Try to retain the original creator/owner/lastowner if their uuid is present on this grid // otherwise, use the master avatar uuid instead UUID masterAvatarId = m_scene.RegionInfo.MasterAvatarAssignedUUID; if (m_scene.RegionInfo.EstateSettings.EstateOwner != UUID.Zero) { masterAvatarId = m_scene.RegionInfo.EstateSettings.EstateOwner; } foreach (SceneObjectPart part in sceneObject.GetParts()) { if (!ResolveUserUuid(part.CreatorID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' part creator", part.CreatorID, sceneObject.Name); // Don't fail to load an object owned by a valid user, just because a creator no longer exists in the DB. (We've seen this with some of YadNi's stuff.) // objectFixingFailed = true; // part.CreatorID = masterAvatarId; } if (!ResolveUserUuid(part.OwnerID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' part owner", part.OwnerID, sceneObject.Name); objectFixingFailed = true; part.OwnerID = masterAvatarId; } if (!ResolveUserUuid(part.LastOwnerID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' part last owner", part.LastOwnerID, sceneObject.Name); objectFixingFailed = true; part.LastOwnerID = masterAvatarId; } // Fix ownership/creator of inventory items // Not doing so results in inventory items // being no copy/no mod for everyone lock (part.TaskInventory) { TaskInventoryDictionary inv = part.TaskInventory; foreach (KeyValuePair <UUID, TaskInventoryItem> kvp in inv) { if (!ResolveUserUuid(kvp.Value.OwnerID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' inventory item owner", kvp.Value.OwnerID, sceneObject.Name); objectFixingFailed = true; kvp.Value.OwnerID = masterAvatarId; } if (!ResolveUserUuid(kvp.Value.CreatorID)) { m_log.WarnFormat("[ARCHIVER]: Could not resolve av/group ID {0} for object '{1}' inventory item creator", kvp.Value.CreatorID, sceneObject.Name); // Don't fail to load an object owned by a valid user, just because a creator no longer exists in the DB. (We've seen this with some of YadNi's stuff.) // objectFixingFailed = true; // kvp.Value.CreatorID = masterAvatarId; } } } } backupObjects.Add(sceneObject); } if (objectFixingFailed && !m_allowUserReassignment) { m_log.Error("[ARCHIVER]: Could not restore scene objects. One or more avatar accounts not found."); return; } Dictionary <UUID, SceneObjectGroup> ExistingNoCopyObjects = new Dictionary <UUID, SceneObjectGroup>(); if (!m_merge) { m_log.Info("[ARCHIVER]: Clearing all existing scene objects"); m_scene.DeleteAllSceneObjectsExcept(delegate(SceneObjectGroup existingSOG) { // Return true if this object should be skipped in the delete. // Don't delete any no-copy objects. if (NoCopyObjectOrContents(existingSOG)) { ExistingNoCopyObjects.Add(existingSOG.UUID, existingSOG); return(true); } return(false); }); } m_log.InfoFormat("[ARCHIVER]: Loading {0} scene objects. Please wait.", serializedSceneObjects.Count); // sceneObject is the one from backup to restore to the scene foreach (SceneObjectGroup backupObject in backupObjects) { SceneObjectGroup existingObject = null; UUID originalUUID = OriginalBackupIDs[backupObject.UUID]; // Don't restore any no-copy objects unless there was an existing matching UUID in the scene. if (ExistingNoCopyObjects.ContainsKey(originalUUID)) { existingObject = ExistingNoCopyObjects[originalUUID]; } // existingSOG here means existing NO-COPY object, not deleted from scene above if (NoCopyObjectOrContents(backupObject)) { if ((existingObject != null) && !existingObject.IsAttachment) { // copy only position and rotation from backup existingObject.Rotation = backupObject.Rotation; existingObject.AbsolutePosition = backupObject.AbsolutePosition; } // don't restore no-copy items } else if (m_scene.AddRestoredSceneObject(backupObject, true, false)) { // this may have added 2nd copyable copy if existingObject is no-copy sceneObjectsLoadedCount++; backupObject.CreateScriptInstances(0, ScriptStartFlags.PostOnRez, m_scene.DefaultScriptEngine, 0, null); } } m_log.InfoFormat("[ARCHIVER]: Restored {0} scene objects to the scene", sceneObjectsLoadedCount); int ignoredObjects = serializedSceneObjects.Count - sceneObjectsLoadedCount; if (ignoredObjects > 0) { m_log.WarnFormat("[ARCHIVER]: Ignored {0} scene objects that already existed in the scene", ignoredObjects); } m_log.InfoFormat("[ARCHIVER]: Successfully loaded archive"); m_scene.EventManager.TriggerOarFileLoaded(m_requestId, m_errorMessage); }