public void BasicReadWriteTest() { String fileName = "testSerialiser.dat"; Vector3 aTestVector = new Vector3(0.3f, 15.2f, -12.0f); String aTestString = "Some text here"; int aTestValue = 99; int[] aTestArray = new int[5] { 5, 4, 3, 2, 1 }; uint chunkID = StreamSerializer.MakeIdentifier("TEST"); byte[] buffer = new byte[1024]; // write the data { Stream stream = new MemoryStream(buffer); // arch.Create(fileName, true)); using ( StreamSerializer serializer = new StreamSerializer( stream ) ) { serializer.WriteChunkBegin( chunkID ); serializer.Write( aTestVector ); serializer.Write( aTestString ); serializer.Write( aTestValue ); serializer.Write( aTestArray ); serializer.WriteChunkEnd( chunkID ); } } // read it back { Stream stream = new MemoryStream( buffer ); //arch.Open(fileName); using ( StreamSerializer serializer = new StreamSerializer( stream ) ) { Chunk c = serializer.ReadChunkBegin(); Assert.AreEqual( chunkID, c.id ); Assert.AreEqual( sizeof( float ) * 3 + sizeof( int ) + aTestString.Length + 4 + sizeof( int ) * aTestArray.Length + sizeof( int ), (int)c.length ); Vector3 inVector; String inString; int inValue; int[] inArray; serializer.Read( out inVector ); serializer.Read( out inString ); serializer.Read( out inValue ); serializer.Read( out inArray ); serializer.ReadChunkEnd( chunkID ); Assert.AreEqual( aTestVector, inVector ); Assert.AreEqual( aTestString, inString ); Assert.AreEqual( aTestValue, inValue ); Assert.AreEqual( aTestArray, inArray ); } } }
/// <summary> /// /// </summary> /// <param name="stream"></param> public override void Save(StreamSerializer stream) { stream.WriteChunkBegin(SUBCLASS_CHUNK_ID, SUBCLASS_CHUNK_VERSION); foreach (PageContent c in mContentList) c.Save(stream); stream.WriteChunkEnd(SUBCLASS_CHUNK_ID); }
/// <summary> /// Load this section from a stream (returns true if successful) /// </summary> /// <param name="stream"></param> /// <returns></returns> public virtual bool Load(StreamSerializer stream) { if (stream.ReadChunkBegin(CHUNK_ID, CHUNK_VERSION, "PagedWorldSection") == null) return false; //name stream.Read(out mName); // AABB stream.Read(out mAABB); //page strategy name string stratName = string.Empty; stream.Read(out stratName); SetStrategy(stratName); //page strategy data bool strategyDataOk = mStrategyData.Load(stream); if (!strategyDataOk) LogManager.Instance.Write("Error: PageStrategyData for section '" + mName + "' was not loaded correctly, check file contens"); stream.ReadChunkEnd(CHUNK_ID); return true; }
public bool Load( StreamSerializer stream ) { if ( stream.ReadChunkBegin( CHUNK_ID, CHUNK_VERSION, "Grid2DPageStrategyData" ) == null ) { return false; } byte readMode = 0; stream.Read( out readMode ); this.mMode = (Grid2Mode)readMode; Vector3 orgin; stream.Read( out orgin ); Origin = orgin; stream.Read( out this.mCellSize ); stream.Read( out this.mLoadRadius ); stream.Read( out this.mHoldRadius ); stream.Read( out this.mMinCellX ); stream.Read( out this.mMaxCellX ); stream.Read( out this.mMinCellY ); stream.Read( out this.mMaxCellY ); stream.ReadChunkEnd( CHUNK_ID ); return true; }
/// <summary> /// /// </summary> /// <param name="stream"></param> public virtual void Save(StreamSerializer stream) { stream.WriteChunkBegin(CHUNK_ID, CHUNK_VERSION); //page id stream.Write(mID.Value); //content collections foreach (PageContentCollection coll in mContentCollections) { coll.Save(stream); } stream.WriteChunkEnd(CHUNK_ID); }
/// <summary> /// Save world data to a stream /// </summary> /// <param name="stream"></param> public void Save(Stream stream) { StreamSerializer ser = new StreamSerializer(stream); Save(ser); }
/// <summary> /// Load world data from a stream /// </summary> /// <param name="stream"></param> public void Load(Stream stream) { StreamSerializer ser = new StreamSerializer(stream); Load(ser); }
public void Save( StreamSerializer stream ) { // save LOD data we need foreach ( var ll in this.mLodLevels ) { stream.Write( ll.MaxHeightDelta ); } if ( !IsLeaf ) { for ( int i = 0; i < 4; ++i ) { this.mChildren[ i ].Save( stream ); } } }
public bool Prepare( string fileName ) { var stream = (FileStream)ResourceGroupManager.Instance.OpenResource( fileName, DerivedResourceGroup ); var ser = new StreamSerializer( stream ); return Prepare( ser ); }
public static bool ReadLayerInstanceList( ref StreamSerializer stream, int numSamplers, ref List<LayerInstance> targetLayers ) { byte numLayers; stream.Read( out numLayers ); targetLayers = new List<LayerInstance>( numLayers ); for ( var l = 0; l < numLayers; ++l ) { if ( stream.ReadChunkBegin( TERRAINLAYERINSTANCE_CHUNK_ID, TERRAINLAYERINSTANCE_CHUNK_VERSION ) == null ) { return false; } var inst = new LayerInstance(); stream.Read( out inst.WorldSize ); inst.TextureNames = new List<string>( numSamplers ); for ( var t = 0; t < numSamplers; ++t ) { string texName; stream.Read( out texName ); inst.TextureNames.Add( texName ); } stream.ReadChunkEnd( TERRAINLAYERINSTANCE_CHUNK_ID ); targetLayers.Add( inst ); } return true; }
public static void WriteLayerInstanceList( List<LayerInstance> layers, ref StreamSerializer stream ) { var numLayers = (byte)layers.Count; stream.Write( numLayers ); foreach ( var inst in layers ) { stream.WriteChunkBegin( TERRAINLAYERINSTANCE_CHUNK_ID, TERRAINLAYERINSTANCE_CHUNK_VERSION ); stream.Write( inst.WorldSize ); foreach ( var t in inst.TextureNames ) { stream.Write( t ); } stream.WriteChunkEnd( TERRAINLAYERINSTANCE_CHUNK_ID ); } }
public static bool ReadLayerDeclaration( ref StreamSerializer stream, ref TerrainLayerDeclaration targetDecl ) { if ( stream.ReadChunkBegin( TERRAINLAYERDECLARATION_CHUNK_ID, TERRAINLAYERDECLARATION_CHUNK_VERSION ) == null ) { return false; } // samplers byte numSamplers; stream.Read( out numSamplers ); targetDecl.Samplers = new List<TerrainLayerSampler>( numSamplers ); for ( var s = 0; s < numSamplers; ++s ) { if ( stream.ReadChunkBegin( TERRAINLAYERSAMPLER_CHUNK_ID, TERRAINLAYERSAMPLER_CHUNK_VERSION ) == null ) { return false; } var sampler = new TerrainLayerSampler(); stream.Read( out sampler.Alias ); byte pixFmt; stream.Read( out pixFmt ); sampler.Format = (PixelFormat)pixFmt; stream.ReadChunkEnd( TERRAINLAYERSAMPLER_CHUNK_ID ); targetDecl.Samplers.Add( sampler ); } // elements byte numElems; stream.Read( out numElems ); targetDecl.Elements = new List<TerrainLayerSamplerElement>( numElems ); for ( var e = 0; e < numElems; ++e ) { if ( stream.ReadChunkBegin( TERRAINLAYERSAMPLERELEMENT_CHUNK_ID, TERRAINLAYERSAMPLERELEMENT_CHUNK_VERSION ) == null ) { return false; } var samplerElem = new TerrainLayerSamplerElement(); stream.Read( out samplerElem.Source ); byte sem; stream.Read( out sem ); samplerElem.Semantic = (TerrainLayerSamplerSemantic)sem; stream.Read( out samplerElem.ElementStart ); stream.Read( out samplerElem.ElementCount ); stream.ReadChunkEnd( TERRAINLAYERSAMPLERELEMENT_CHUNK_ID ); targetDecl.Elements.Add( samplerElem ); } stream.ReadChunkEnd( TERRAINLAYERDECLARATION_CHUNK_ID ); return true; }
public static void WriteLayerDeclaration( TerrainLayerDeclaration decl, ref StreamSerializer stream ) { // Layer declaration stream.WriteChunkBegin( TERRAINLAYERDECLARATION_CHUNK_ID, TERRAINLAYERDECLARATION_CHUNK_VERSION ); // samplers var numSamplers = (byte)decl.Samplers.Count; stream.Write( numSamplers ); foreach ( var sampler in decl.Samplers ) { stream.WriteChunkBegin( TERRAINLAYERSAMPLER_CHUNK_ID, TERRAINLAYERSAMPLER_CHUNK_VERSION ); stream.Write( sampler.Alias ); var pixFmt = (byte)sampler.Format; stream.Write( pixFmt ); stream.WriteChunkEnd( TERRAINLAYERSAMPLER_CHUNK_ID ); } // elements var numElems = (byte)decl.Elements.Count; stream.Write( numElems ); foreach ( var elem in decl.Elements ) { stream.WriteChunkBegin( TERRAINLAYERSAMPLERELEMENT_CHUNK_ID, TERRAINLAYERSAMPLERELEMENT_CHUNK_VERSION ); stream.Write( elem.Source ); var sem = (byte)elem.Semantic; stream.Write( sem ); stream.Write( elem.ElementStart ); stream.Write( elem.ElementCount ); stream.WriteChunkEnd( TERRAINLAYERSAMPLERELEMENT_CHUNK_ID ); } stream.WriteChunkEnd( TERRAINLAYERDECLARATION_CHUNK_ID ); }
public void Save( StreamSerializer stream ) { // wait for any queued processes to finish WaitForDerivedProcesses(); if ( IsHeightDataModified ) { // When modifying, for efficiency we only increase the max deltas at each LOD, // we never reduce them (since that would require re-examining more samples) // Since we now save this data in the file though, we need to make sure we've // calculated the optimal var rect = new Rectangle(); rect.Top = 0; rect.Bottom = this.mSize; rect.Left = 0; rect.Right = this.mSize; CalculateHeightDeltas( rect ); FinalizeHeightDeltas( rect, false ); } stream.WriteChunkBegin( TERRAIN_CHUNK_ID, TERRAIN_CHUNK_VERSION ); var align = (byte)Alignment; stream.Write( align ); stream.Write( this.mSize ); stream.Write( this.mWorldSize ); stream.Write( this.mMaxBatchSize ); stream.Write( this.mMinBatchSize ); stream.Write( this.mPos ); for ( var i = 0; i < this.mHeightData.Length; i++ ) { stream.Write( this.mHeightData[ i ] ); } WriteLayerDeclaration( this.mLayerDecl, ref stream ); //Layers CheckLayers( false ); var numLayers = (byte)this.mLayers.Count; WriteLayerInstanceList( this.mLayers, ref stream ); //packed layer blend data if ( this.mCpuBlendMapStorage.Count > 0 ) { // save from CPU data if it's there, it means GPU data was never created stream.Write( this.mLayerBlendMapSize ); // load packed cpu data var numBlendTex = (byte)GetBlendTextureCount( numLayers ); for ( var i = 0; i < numBlendTex; ++i ) { var fmt = GetBlendTextureFormat( (byte)i, numLayers ); var channels = PixelUtil.GetNumElemBytes( fmt ); var dataSz = channels*this.mLayerBlendMapSize*this.mLayerBlendMapSize; var pData = this.mCpuBlendMapStorage[ i ]; stream.Write( pData ); stream.Write( dataSz ); } } else { if ( this.mLayerBlendMapSize != this.mLayerBlendSizeActual ) { LogManager.Instance.Write( @"WARNING: blend maps were requested at a size larger than was supported on this hardware, which means the quality has been degraded" ); } stream.Write( this.mLayerBlendSizeActual ); var tmpData = new byte[this.mLayerBlendSizeActual*this.mLayerBlendSizeActual*4]; var pTmpDataF = BufferBase.Wrap( tmpData ); foreach ( var tex in this.mBlendTextureList ) { var dst = new PixelBox( this.mLayerBlendSizeActual, this.mLayerBlendSizeActual, 1, tex.Format, pTmpDataF ); tex.GetBuffer().BlitToMemory( dst ); int dataSz = PixelUtil.GetNumElemBytes( tex.Format )*this.mLayerBlendSizeActual*this.mLayerBlendSizeActual; stream.Write( tmpData ); stream.Write( dataSz ); } } //other data //normals stream.WriteChunkBegin( TERRAINDERIVEDDATA_CHUNK_ID, TERRAINDERIVEDDATA_CHUNK_VERSION ); stream.Write( "normalmap" ); stream.Write( this.mSize ); if ( this.mCpuTerrainNormalMap != null ) { var aData = new byte[this.mSize*this.mSize*3]; using ( var dest = BufferBase.Wrap( aData ) ) { Memory.Copy( this.mCpuTerrainNormalMap.Data, dest, aData.Length ); } // save from CPU data if it's there, it means GPU data was never created stream.Write( aData ); } else { var tmpData = new byte[this.mSize*this.mSize*3]; using ( var wrap = BufferBase.Wrap( tmpData ) ) { var dst = new PixelBox( this.mSize, this.mSize, 1, PixelFormat.BYTE_RGB, wrap ); TerrainNormalMap.GetBuffer().BlitToMemory( dst ); stream.Write( tmpData ); } tmpData = null; } stream.WriteChunkEnd( TERRAINDERIVEDDATA_CHUNK_ID ); //color map if ( IsGlobalColorMapEnabled ) { stream.WriteChunkBegin( TERRAINDERIVEDDATA_CHUNK_ID, TERRAINDERIVEDDATA_CHUNK_VERSION ); stream.Write( "colormap" ); stream.Write( GlobalColorMapSize ); if ( this.mCpuColorMapStorage != null ) { // save from CPU data if it's there, it means GPU data was never created stream.Write( this.mCpuColorMapStorage ); } else { var aData = new byte[GlobalColorMapSize*GlobalColorMapSize*3]; using ( var pDataF = BufferBase.Wrap( aData ) ) { var dst = new PixelBox( GlobalColorMapSize, GlobalColorMapSize, 1, PixelFormat.BYTE_RGB, pDataF ); GlobalColorMap.GetBuffer().BlitToMemory( dst ); } stream.Write( aData ); } stream.WriteChunkEnd( TERRAINDERIVEDDATA_CHUNK_ID ); } //ligthmap if ( this.mLightMapRequired ) { stream.WriteChunkBegin( TERRAINDERIVEDDATA_CHUNK_ID, TERRAINDERIVEDDATA_CHUNK_VERSION ); stream.Write( "lightmap" ); stream.Write( LightMapSize ); if ( this.mCpuLightmapStorage != null ) { // save from CPU data if it's there, it means GPU data was never created stream.Write( this.mCpuLightmapStorage ); } else { var aData = new byte[LightMapSize*LightMapSize]; using ( var pDataF = BufferBase.Wrap( aData ) ) { var dst = new PixelBox( LightMapSize, LightMapSize, 1, PixelFormat.L8, pDataF ); LightMap.GetBuffer().BlitToMemory( dst ); } stream.Write( aData ); } stream.WriteChunkEnd( TERRAIN_CHUNK_ID ); } // composite map if ( this.mCompositeMapRequired ) { stream.WriteChunkBegin( TERRAINDERIVEDDATA_CHUNK_ID, TERRAINDERIVEDDATA_CHUNK_VERSION ); stream.Write( "compositemap" ); stream.Write( this.mCompositeMapSize ); if ( this.mCpuCompositeMapStorage != null ) { // save from CPU data if it's there, it means GPU data was never created stream.Write( this.mCpuCompositeMapStorage ); } else { // composite map is 4 channel, 3x diffuse, 1x specular mask var aData = new byte[this.mCompositeMapSize*this.mCompositeMapSize*4]; using ( var pDataF = BufferBase.Wrap( aData ) ) { var dst = new PixelBox( this.mCompositeMapSize, this.mCompositeMapSize, 1, PixelFormat.BYTE_RGB, pDataF ); CompositeMap.GetBuffer().BlitToMemory( dst ); } stream.Write( aData ); } stream.WriteChunkEnd( TERRAINDERIVEDDATA_CHUNK_ID ); } //write deltas stream.Write( this.mDeltaDataPtr ); //write the quadtree QuadTree.Save( stream ); stream.WriteChunkEnd( TERRAIN_CHUNK_ID ); IsModified = false; IsHeightDataModified = false; }
public void Save( string filename ) { var stream = Root.Instance.CreateFileStream( filename, DerivedResourceGroup, true ); var ser = new StreamSerializer( stream ); Save( ser ); }
protected override void SaveSubtypeData( StreamSerializer ser ) { this.terrainGroup.SaveGroupDefinition( ref ser ); // params that are in the Grid2DStrategyData will have already been saved // as part of the main save() routine }
public void Prepare( StreamSerializer stream ) { // load LOD data we need for ( int i = 0; i < this.mLodLevels.Count; ++i ) { var ll = this.mLodLevels[ i ]; // only read 'calc' and then copy to final (separation is only for // real-time calculation // Basically this is what finaliseHeightDeltas does in calc path stream.Read( out ll.CalcMaxHeightDelta ); ll.MaxHeightDelta = ll.CalcMaxHeightDelta; ll.LastCFactor = 0; } if ( !IsLeaf ) { for ( int i = 0; i < 4; ++i ) { this.mChildren[ i ].Prepare( stream ); } } // If this is the root, do the post delta calc to finish if ( this.mParent == null ) { var rect = new Rectangle(); rect.Top = this.mOffsetY; rect.Bottom = this.mBoundaryY; rect.Left = this.mOffsetX; rect.Right = this.mBoundaryX; PostDeltaCalculation( rect ); } }
public bool Prepare( StreamSerializer stream ) { FreeTemporaryResources(); FreeCPUResources(); CopyGlobalOptions(); if ( stream.ReadChunkBegin( TERRAIN_CHUNK_ID, TERRAIN_CHUNK_VERSION ) == null ) { return false; } byte align; stream.Read( out align ); Alignment = (Alignment)align; stream.Read( out this.mSize ); stream.Read( out this.mWorldSize ); stream.Read( out this.mMaxBatchSize ); stream.Read( out this.mMinBatchSize ); stream.Read( out this.mPos ); RootSceneNode.Position = this.mPos; UpdateBaseScale(); DetermineLodLevels(); int numVertices = this.mSize*this.mSize; this.mHeightData = new float[numVertices]; stream.Read( out this.mHeightData ); // layer declaration if ( !ReadLayerDeclaration( ref stream, ref this.mLayerDecl ) ) { return false; } CheckDeclaration(); // Layers if ( !ReadLayerInstanceList( ref stream, this.mLayerDecl.Elements.Count, ref this.mLayers ) ) { return false; } DeriveUVMultipliers(); // Packed layer blend data var numLayers = (byte)this.mLayers.Count; stream.Read( out this.mLayerBlendMapSize ); this.mLayerBlendSizeActual = this.mLayerBlendMapSize; // for now, until we check //load packed CPU data var numBlendTex = GetBlendTextureCount( numLayers ); for ( var i = 0; i < numBlendTex; ++i ) { var fmt = GetBlendTextureFormat( (byte)i, numLayers ); var channels = PixelUtil.GetNumElemBytes( fmt ); var dataSz = channels*this.mLayerBlendMapSize*this.mLayerBlendMapSize; var data = new byte[dataSz]; stream.Read( out data ); this.mCpuBlendMapStorage.AddRange( data ); } //derived data while ( !stream.IsEndOfChunk( TERRAIN_CHUNK_ID ) && stream.NextChunkId == TERRAINDERIVEDDATA_CHUNK_ID ) { stream.ReadChunkBegin( TERRAINDERIVEDDATA_CHUNK_ID, TERRAINDERIVEDDATA_CHUNK_VERSION ); //name var name = string.Empty; stream.Read( out name ); ushort sz; stream.Read( out sz ); if ( name == "normalmap" ) { this.mNormalMapRequired = true; var data = new byte[sz*sz*3]; stream.Read( out data ); using ( var pDataF = BufferBase.Wrap( data ) ) { this.mCpuTerrainNormalMap = new PixelBox( sz, sz, 1, PixelFormat.BYTE_RGB, pDataF ); } } else if ( name == "colormap" ) { IsGlobalColorMapEnabled = true; GlobalColorMapSize = sz; this.mCpuColorMapStorage = new byte[sz*sz*3]; stream.Read( out this.mCpuColorMapStorage ); } else if ( name == "lightmap" ) { this.mLightMapRequired = true; LightMapSize = sz; this.mCpuLightmapStorage = new byte[sz*sz]; stream.Read( out this.mCpuLightmapStorage ); } else if ( name == "compositemap" ) { this.mCompositeMapRequired = true; this.mCompositeMapSize = sz; this.mCpuCompositeMapStorage = new byte[sz*sz*4]; stream.Read( out this.mCpuCompositeMapStorage ); } stream.ReadChunkEnd( TERRAINDERIVEDDATA_CHUNK_ID ); } //Load delta data var deltaData = new byte[ sizeof( float ) * numVertices ]; stream.Read( out deltaData ); this.mDeltaDataPtr = BufferBase.Wrap( deltaData ); //Create and load quadtree QuadTree = new TerrainQuadTreeNode( this, null, 0, 0, this.mSize, (ushort)( NumLodLevels - 1 ), 0, 0 ); QuadTree.Prepare(); stream.ReadChunkEnd( TERRAIN_CHUNK_ID ); DistributeVertexData(); IsModified = false; IsHeightDataModified = false; return true; }
public void Load( StreamSerializer stream ) { if ( Prepare( stream ) ) { Load(); } else { throw new AxiomException( "Error while preparing from stream, see log for details. Terrain.Load" ); } }
public override void Save( StreamSerializer stream ) { stream.WriteChunkBegin( SUBCLASS_CHUNK_ID, SUBCLASS_CHUNK_VERSION ); foreach ( var c in this.mContentList ) { c.Save( stream ); } stream.WriteChunkEnd( SUBCLASS_CHUNK_ID ); }
/// <summary> /// Load world data from a serialiser (returns true if successful) /// </summary> /// <param name="stream"></param> public bool Load(StreamSerializer stream) { if (stream.ReadChunkBegin(CHUNK_ID, CHUNK_VERSION, "PageWorld") == null) return false; //name stream.Read(out mName); //sections while (stream.NextChunkId == PagedWorldSection.CHUNK_ID) { PagedWorldSection sec = new PagedWorldSection(this); bool sectionOk = sec.Load(stream); if (sectionOk) mSections.Add(sec.Name, sec); else { sec = null; break; } } stream.ReadChunkEnd(CHUNK_ID); return true; }
public override bool Prepare( StreamSerializer stream ) { if ( stream.ReadChunkBegin( SUBCLASS_CHUNK_ID, SUBCLASS_CHUNK_VERSION, "SimplePageContentCollection" ) == null ) { return false; } bool ret = true; foreach ( var i in this.mContentList ) { ret &= i.Prepare( stream ); } stream.ReadChunkEnd( SUBCLASS_CHUNK_ID ); return ret; }
/// <summary> /// Save world data to a serialiser /// </summary> /// <param name="stream"></param> public void Save(StreamSerializer stream) { stream.WriteChunkBegin(CHUNK_ID, CHUNK_VERSION); //name stream.Write(mName); //sections foreach (PagedWorldSection section in mSections.Values) section.Save(stream); stream.WriteChunkEnd(CHUNK_ID); }
/// <summary> /// /// </summary> /// <param name="stream"></param> public void SaveGroupDefinition(ref StreamSerializer stream) { stream.WriteChunkBegin(ChunkID, ChunkVersion); stream.Write(_alignment); stream.Write(_terrainSize); stream.Write(_terrainWorldSize); stream.Write(_filenamePrefix); stream.Write(_filenameExtension); stream.Write(_resourceGroup); stream.Write(_origin); stream.Write(_defaultImportData.ConstantHeight); stream.Write(_defaultImportData.InputBias); stream.Write(_defaultImportData.InputScale); stream.Write(_defaultImportData.MaxBatchSize); stream.Write(_defaultImportData.MinBatchSize); Terrain.WriteLayerDeclaration(_defaultImportData.LayerDeclaration, ref stream); Terrain.WriteLayerInstanceList(_defaultImportData.LayerList, ref stream); stream.WriteChunkEnd(ChunkID); }
/// <summary> /// /// </summary> /// <param name="stream"></param> /// <returns></returns> protected override bool PrepareImpl(StreamSerializer stream) { //now do the real loading if (stream.ReadChunkBegin(CHUNK_ID, CHUNK_VERSION, "Page") == null) return false; // pageID check (we should know the ID we're expecting) int storedID = -1; stream.Read(out storedID); if (mID.Value != storedID) { LogManager.Instance.Write("Error: Tried to populate Page ID " + mID.Value + " with data corresponding to page ID " + storedID); stream.UndoReadChunk(CHUNK_ID); return false; } PageManager mgr = Manager; while (stream.NextChunkId == PageContentCollection.CHUNK_ID) { Chunk collChunk = stream.ReadChunkBegin(); string factoryName = string.Empty; stream.Read(out factoryName); //Supported type? IPageContentCollectionFactory collFact = mgr.GetContentCollectionFactory(factoryName); if (collFact != null) { PageContentCollection collInst = collFact.CreateInstance(); if (collInst.Prepare(stream)) { AttachContentCollection(collInst); } else { LogManager.Instance.Write("Error preparing PageContentCollection type: " + factoryName + " in + " + this.ToString()); collFact.DestroyInstance(ref collInst); } } else { LogManager.Instance.Write("Unsupported PageContentCollection type: " + factoryName + " in + " + this.ToString()); //skip //stream.ReadChunkEnd(ref collChunk.ID); stream.ReadChunkEnd(collChunk.id); } } return true; }
/// <summary> /// /// </summary> /// <param name="stream"></param> public void LoadGroupDefinition(ref StreamSerializer stream) { if (stream.ReadChunkBegin(ChunkID, ChunkVersion) == null) throw new AxiomException("Item not found!,Stream does not contain TerrainGroup data", new object[] { }); // Base details stream.Read(out _alignment); stream.Read(out _terrainSize); stream.Read(out _terrainWorldSize); stream.Read(out _filenamePrefix); stream.Read(out _filenameExtension); stream.Read(out _resourceGroup); stream.Read(out _origin); stream.Read(out _defaultImportData.ConstantHeight); stream.Read(out _defaultImportData.InputBias); stream.Read(out _defaultImportData.InputScale); stream.Read(out _defaultImportData.MaxBatchSize); stream.Read(out _defaultImportData.MinBatchSize); _defaultImportData.LayerDeclaration = new TerrainLayerDeclaration(); Terrain.ReadLayerDeclaration(ref stream, ref _defaultImportData.LayerDeclaration); _defaultImportData.LayerList = new List<LayerInstance>(); Terrain.ReadLayerInstanceList(ref stream, _defaultImportData.LayerDeclaration.Samplers.Count, ref _defaultImportData.LayerList); _defaultImportData.TerrainAlign = _alignment; _defaultImportData.TerrainSize = _terrainSize; _defaultImportData.WorldSize = _terrainWorldSize; _defaultImportData.DeleteInputData = true; stream.ReadChunkEnd(ChunkID); }
public void Save( StreamSerializer stream ) { stream.WriteChunkBegin( CHUNK_ID, CHUNK_VERSION ); stream.Write( (byte)this.mMode ); stream.Write( this.mWorldOrigin ); stream.Write( this.mCellSize ); stream.Write( this.mLoadRadius ); stream.Write( this.mHoldRadius ); stream.Write( this.mMinCellX ); stream.Write( this.mMaxCellX ); stream.Write( this.mMinCellY ); stream.Write( this.mMaxCellY ); stream.WriteChunkEnd( CHUNK_ID ); }
protected override void LoadSubtypeData( StreamSerializer ser ) { // we load the TerrainGroup information from here if ( this.terrainGroup == null ) { this.terrainGroup = new TerrainGroup( SceneManager ); } this.terrainGroup.LoadGroupDefinition( ref ser ); // params that are in the Grid2DStrategyData will have already been loaded // as part of the main load() routine SyncSettings(); }
/// <summary> /// Save this section to a stream /// </summary> /// <param name="stream"></param> public virtual void Save(StreamSerializer stream) { stream.WriteChunkBegin(CHUNK_ID, CHUNK_VERSION); //name stream.Write(mName); //AABB stream.Write(mAABB); //page strategy name stream.Write(mStrategy.Name); //page strategy data mStrategyData.Save(stream); //save all pages #warning TODO: save all pages. stream.WriteChunkEnd(CHUNK_ID); }
public virtual void Save( StreamSerializer stream ) { stream.WriteChunkBegin( CHUNK_ID, CHUNK_VERSION ); //name stream.Write( this.mName ); //AABB stream.Write( this.mAABB ); // SceneManager type & name stream.Write( this.mSceneMgr.TypeName ); stream.Write( this.mSceneMgr.Name ); //page strategy name stream.Write( this.mStrategy.Name ); //page strategy data this.mStrategyData.Save( stream ); // Save any data specific to a subtype of this class SaveSubtypeData( stream ); stream.WriteChunkEnd( CHUNK_ID ); // save all pages (in separate files) foreach ( var i in this.mPages ) { i.Value.Save(); } }