public MaintenanceThread(ScriptEngine Engine) { m_ScriptEngine = Engine; EventManager = Engine.EventManager; RunInMainProcessingThread = Engine.Config.GetBoolean("RunInMainProcessingThread", false); RunInMainProcessingThread = false; // temporary false until code is fix to work with true //There IS a reason we start this, even if RunInMain is enabled // If this isn't enabled, we run into issues with the CmdHandlerQueue, // as it always must be async, so we must run the pool anyway UniverseThreadPoolStartInfo info = new UniverseThreadPoolStartInfo { priority = ThreadPriority.Normal, Threads = 1, MaxSleepTime = Engine.Config.GetInt("SleepTime", 100), SleepIncrementTime = Engine.Config.GetInt("SleepIncrementTime", 1), Name = "Script Cmd Thread Pools" }; cmdThreadpool = new UniverseThreadPool(info); UniverseThreadPoolStartInfo scinfo = new UniverseThreadPoolStartInfo { priority = ThreadPriority.Normal, Threads = 1, MaxSleepTime = Engine.Config.GetInt("SleepTime", 100), SleepIncrementTime = Engine.Config.GetInt("SleepIncrementTime", 1), Name = "Script Loading Thread Pools" }; scriptChangeThreadpool = new UniverseThreadPool(scinfo); MaxScriptThreads = Engine.Config.GetInt("MaxScriptThreads", 100); // leave control threads out of user option UniverseThreadPoolStartInfo sinfo = new UniverseThreadPoolStartInfo { priority = ThreadPriority.Normal, Threads = MaxScriptThreads, MaxSleepTime = Engine.Config.GetInt("SleepTime", 100), SleepIncrementTime = Engine.Config.GetInt("SleepIncrementTime", 1), KillThreadAfterQueueClear = true, Name = "Script Event Thread Pools" }; scriptThreadpool = new UniverseThreadPool(sinfo); AppDomain.CurrentDomain.AssemblyResolve += m_ScriptEngine.AssemblyResolver.OnAssemblyResolve; }
public virtual void RegionLoaded(IScene scene) { if (!m_Enabled) return; UniverseThreadPoolStartInfo info = new UniverseThreadPoolStartInfo {priority = ThreadPriority.Lowest, Threads = 1}; threadpool = new UniverseThreadPool(info); blockthreadpool = new UniverseThreadPool(info); }
bool DearchiveRegion0DotStar() { if (m_loadStream == null) return false; int successfulAssetRestores = 0; int failedAssetRestores = 0; string filePath = "NONE"; DateTime start = DateTime.Now; TarArchiveReader archive = new TarArchiveReader(m_loadStream); if (!m_skipAssets) m_threadpool = new UniverseThreadPool(new UniverseThreadPoolStartInfo() { Threads = 1, priority = System.Threading.ThreadPriority .BelowNormal }); IBackupModule backup = m_scene.RequestModuleInterface<IBackupModule>(); if (!m_merge) { DateTime before = DateTime.Now; MainConsole.Instance.Info("[ARCHIVER]: Clearing all existing scene objects"); if (backup != null) backup.DeleteAllSceneObjects(); MainConsole.Instance.Info("[ARCHIVER]: Cleared all existing scene objects in " + (DateTime.Now - before).Minutes + ":" + (DateTime.Now - before).Seconds); } IScriptModule[] modules = m_scene.RequestModuleInterfaces<IScriptModule>(); //Disable the script engine so that it doesn't load in the background and kill OAR loading foreach (IScriptModule module in modules) { module.Disabled = true; } //Disable backup for now as well if (backup != null) backup.LoadingPrims = true; IRegionSerialiserModule serialiser = m_scene.RequestModuleInterface<IRegionSerialiserModule>(); int sceneObjectsLoadedCount = 0; //We save the groups so that we can back them up later List<ISceneEntity> groupsToBackup = new List<ISceneEntity>(); List<LandData> landData = new List<LandData>(); // must save off some stuff until after assets have been saved and recieved new uuids // keeping these collection local because I am sure they will get large and garbage collection is better that way List<byte[]> seneObjectGroups = new List<byte[]>(); Dictionary<UUID, UUID> assetBinaryChangeRecord = new Dictionary<UUID, UUID>(); Queue<UUID> assets2Save = new Queue<UUID>(); MainConsole.Instance.Info("[ARCHIVER]: Commencing load from archive"); int ticker = 0; try { byte[] data; TarArchiveReader.TarEntryType entryType; while ((data = archive.ReadEntry(out filePath, out entryType)) != null) { if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY == entryType) continue; if (TarArchiveReader.TarEntryType.TYPE_NORMAL_FILE == entryType) { var fName = Path.GetFileName (filePath); if (fName.StartsWith (".")) // ignore hidden files continue; } ticker ++; if (ticker % 10 == 0) MainConsole.Instance.Ticker(); if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH)) { seneObjectGroups.Add(data); if (seneObjectGroups.Count % 100 == 0) MainConsole.Instance.InfoFormat("[ARCHIVER]: Found {0} scene object groups...",seneObjectGroups.Count); } else if (!m_skipAssets && filePath.StartsWith(ArchiveConstants.ASSETS_PATH)) { AssetBase asset; if (LoadAsset(filePath, data, out asset)) { successfulAssetRestores++; if (m_useAsync) lock (AssetsToAdd) AssetsToAdd.Add(asset); else { if (asset.IsBinaryAsset) { UUID aid = asset.ID; asset.ID = m_scene.AssetService.Store(asset); if (asset.ID != aid && asset.ID != UUID.Zero) assetBinaryChangeRecord.Add(aid, asset.ID); } else { if (!assetNonBinaryCollection.ContainsKey(asset.ID)) { assetNonBinaryCollection.Add(asset.ID, asset); // I need something I can safely loop through assets2Save.Enqueue(asset.ID); } } } } else failedAssetRestores++; if ((successfulAssetRestores + failedAssetRestores)%100 == 0) MainConsole.Instance.Info("[ARCHIVER]: Loaded " + successfulAssetRestores + " assets and failed to load " + failedAssetRestores + " assets..."); } else if (!m_skipTerrain && filePath.StartsWith(ArchiveConstants.TERRAINS_PATH)) { LoadTerrain(filePath, data); } else if (!m_merge && filePath.StartsWith(ArchiveConstants.SETTINGS_PATH)) { LoadRegionSettings(filePath, data); } else if (!m_skipTerrain && filePath.StartsWith(ArchiveConstants.LANDDATA_PATH)) { var parcel = LoadLandData(data); landData.Add(parcel); } else if (filePath == ArchiveConstants.CONTROL_FILE_PATH) { LoadControlFile(data); } } MainConsole.Instance.CleanInfo(""); MainConsole.Instance.Info("[ARCHIVER]: Saving loaded assets"); ticker = 0; // Save Assets int savingAssetsCount = 0; while (assets2Save.Count > 0) { ticker++; if (ticker % 10 == 0) MainConsole.Instance.Ticker(); try { UUID assetid = assets2Save.Dequeue(); SaveNonBinaryAssets(assetid, assetNonBinaryCollection[assetid], assetBinaryChangeRecord); savingAssetsCount++; if ((savingAssetsCount)%100 == 0) MainConsole.Instance.Info("[ARCHIVER]: Saving " + savingAssetsCount + " assets..."); } catch (Exception ex) { MainConsole.Instance.Info("[ARCHIVER]: Exception in saving an asset: " + ex.ToString()); } } MainConsole.Instance.CleanInfo(""); MainConsole.Instance.Info("[ARCHIVER]: Saving loaded objects"); ticker = 0; foreach (byte[] data2 in seneObjectGroups) { ticker++; if (ticker % 10 == 0) MainConsole.Instance.Ticker(); byte[] data3 = data2; string stringData = Utils.BytesToString(data3); MatchCollection mc = Regex.Matches(stringData, sPattern); bool didChange = false; if (mc.Count >= 1) { foreach (Match match in mc) { UUID thematch = new UUID(match.Value); UUID newvalue = thematch; if (assetNonBinaryCollection.ContainsKey(thematch)) newvalue = assetNonBinaryCollection[thematch].ID; else if (assetBinaryChangeRecord.ContainsKey(thematch)) newvalue = assetBinaryChangeRecord[thematch]; if (thematch == newvalue) continue; stringData = stringData.Replace(thematch.ToString().Trim(), newvalue.ToString().Trim()); didChange = true; } } if (didChange) data3 = Utils.StringToBytes(stringData); ISceneEntity sceneObject = serialiser.DeserializeGroupFromXml2(data3, m_scene); if (sceneObject == null) { //! big error! MainConsole.Instance.Error("Error reading SOP XML (Please mantis this!): " + m_asciiEncoding.GetString(data3)); continue; } // check sceneObject ownership sceneObject.OwnerID = ResolveUserUuid(sceneObject.OwnerID, sceneObject.LastSignificantPosition, landData); //... and children foreach (ISceneChildEntity part in sceneObject.ChildrenEntities()) { // check user ID's part.CreatorID = ResolveUserUuid(part.CreatorID, part.AbsolutePosition, landData); part.OwnerID = ResolveUserUuid(part.OwnerID, part.AbsolutePosition, landData); part.LastOwnerID = ResolveUserUuid(part.LastOwnerID, part.AbsolutePosition, landData); //check group ID's part.GroupID = ResolveGroupUuid(part.GroupID); // And zap any troublesome sit target information part.SitTargetOrientation = new Quaternion(0, 0, 0, 1); part.SitTargetPosition = new Vector3(0, 0, 0); // Fix ownership/creator of inventory items // Not doing so results in inventory items // being no copy/no mod for everyone lock (part.TaskInventory) { TaskInventoryDictionary inv = part.TaskInventory; foreach (KeyValuePair<UUID, TaskInventoryItem> kvp in inv) { // check user ID's kvp.Value.OwnerID = ResolveUserUuid( kvp.Value.OwnerID, part.AbsolutePosition, landData ); kvp.Value.LastOwnerID = ResolveUserUuid( kvp.Value.LastOwnerID, part.AbsolutePosition, landData ); kvp.Value.CreatorID = ResolveUserUuid( kvp.Value.CreatorID, part.AbsolutePosition, landData ); // ..and possible group ID's kvp.Value.GroupID = ResolveGroupUuid(kvp.Value.GroupID); } } } //Add the offsets of the region Vector3 newPos = new Vector3(sceneObject.AbsolutePosition.X + m_offsetX, sceneObject.AbsolutePosition.Y + m_offsetY, sceneObject.AbsolutePosition.Z + m_offsetZ); if (m_flipX) newPos.X = m_scene.RegionInfo.RegionSizeX - newPos.X; if (m_flipY) newPos.Y = m_scene.RegionInfo.RegionSizeY - newPos.Y; sceneObject.SetAbsolutePosition(false, newPos); if (m_scene.SceneGraph.AddPrimToScene(sceneObject)) { groupsToBackup.Add(sceneObject); sceneObject.ScheduleGroupUpdate(PrimUpdateFlags.ForcedFullUpdate); sceneObject.CreateScriptInstances(0, false, StateSource.RegionStart, UUID.Zero, true); } sceneObjectsLoadedCount++; if (sceneObjectsLoadedCount%100 == 0) MainConsole.Instance.Info("[ARCHIVER]: Loaded " + sceneObjectsLoadedCount + " objects..."); } assetNonBinaryCollection.Clear(); assetBinaryChangeRecord.Clear(); seneObjectGroups.Clear(); } catch (Exception e) { MainConsole.Instance.ErrorFormat( "[ARCHIVER]: Aborting load with error in archive file {0}. {1}", filePath, e); m_errorMessage += e.ToString(); m_scene.EventManager.TriggerOarFileLoaded(UUID.Zero.Guid, m_errorMessage); return false; } finally { archive.Close(); m_loadStream.Close(); m_loadStream.Dispose(); //Re-enable scripts now that we are done foreach (IScriptModule module in modules) { module.Disabled = false; } //Reset backup too if (backup != null) backup.LoadingPrims = false; } // finished with the ticker MainConsole.Instance.CleanInfo(""); //Now back up the prims foreach (ISceneEntity grp in groupsToBackup) { //Backup! grp.HasGroupChanged = true; } if (!m_skipAssets && m_useAsync && !AssetSaverIsRunning) m_threadpool.QueueEvent(SaveAssets, 0); if (!m_skipAssets) { MainConsole.Instance.InfoFormat("[ARCHIVER]: Restored {0} assets", successfulAssetRestores); if (failedAssetRestores > 0) { MainConsole.Instance.ErrorFormat("[ARCHIVER]: Failed to load {0} assets", failedAssetRestores); m_errorMessage += String.Format("Failed to load {0} assets", failedAssetRestores); } } // Reload serialized parcels if (!m_skipTerrain) { MainConsole.Instance.InfoFormat ("[ARCHIVER]: Loading {0} parcels. Please wait.", landData.Count); IParcelManagementModule parcelManagementModule = m_scene.RequestModuleInterface<IParcelManagementModule> (); if (parcelManagementModule != null) parcelManagementModule.IncomingLandDataFromOAR (landData, m_merge, new Vector2 (m_offsetX, m_offsetY)); MainConsole.Instance.InfoFormat ("[ARCHIVER]: Restored {0} parcels.", landData.Count); } //Clean it out landData.Clear(); MainConsole.Instance.InfoFormat("[ARCHIVER]: Successfully loaded archive in " + (DateTime.Now - start).Minutes + ":" + (DateTime.Now - start).Seconds); m_validUserUuids.Clear(); m_validGroupUuids.Clear(); m_scene.EventManager.TriggerOarFileLoaded(UUID.Zero.Guid, m_errorMessage); return true; // all good }