public void BasicReadWriteTest() { String fileName = "testSerialiser.dat"; Vector3 aTestVector = new Vector3(0.3f, 15.2f, -12.0f); String aTestString = "Some text here"; int aTestValue = 99; int[] aTestArray = new int[5] { 5, 4, 3, 2, 1 }; uint chunkID = StreamSerializer.MakeIdentifier("TEST"); byte[] buffer = new byte[1024]; // write the data { Stream stream = new MemoryStream(buffer); // arch.Create(fileName, true)); using ( StreamSerializer serializer = new StreamSerializer( stream ) ) { serializer.WriteChunkBegin( chunkID ); serializer.Write( aTestVector ); serializer.Write( aTestString ); serializer.Write( aTestValue ); serializer.Write( aTestArray ); serializer.WriteChunkEnd( chunkID ); } } // read it back { Stream stream = new MemoryStream( buffer ); //arch.Open(fileName); using ( StreamSerializer serializer = new StreamSerializer( stream ) ) { Chunk c = serializer.ReadChunkBegin(); Assert.AreEqual( chunkID, c.id ); Assert.AreEqual( sizeof( float ) * 3 + sizeof( int ) + aTestString.Length + 4 + sizeof( int ) * aTestArray.Length + sizeof( int ), (int)c.length ); Vector3 inVector; String inString; int inValue; int[] inArray; serializer.Read( out inVector ); serializer.Read( out inString ); serializer.Read( out inValue ); serializer.Read( out inArray ); serializer.ReadChunkEnd( chunkID ); Assert.AreEqual( aTestVector, inVector ); Assert.AreEqual( aTestString, inString ); Assert.AreEqual( aTestValue, inValue ); Assert.AreEqual( aTestArray, inArray ); } } }
/// <summary> /// Save this section to a stream /// </summary> /// <param name="stream"></param> public virtual void Save(StreamSerializer stream) { stream.WriteChunkBegin(CHUNK_ID, CHUNK_VERSION); //name stream.Write(mName); //AABB stream.Write(mAABB); //page strategy name stream.Write(mStrategy.Name); //page strategy data mStrategyData.Save(stream); //save all pages #warning TODO: save all pages. stream.WriteChunkEnd(CHUNK_ID); }
public void Save( StreamSerializer stream ) { stream.WriteChunkBegin( CHUNK_ID, CHUNK_VERSION ); stream.Write( (byte)this.mMode ); stream.Write( this.mWorldOrigin ); stream.Write( this.mCellSize ); stream.Write( this.mLoadRadius ); stream.Write( this.mHoldRadius ); stream.Write( this.mMinCellX ); stream.Write( this.mMaxCellX ); stream.Write( this.mMinCellY ); stream.Write( this.mMaxCellY ); stream.WriteChunkEnd( CHUNK_ID ); }
/// <summary> /// /// </summary> /// <param name="stream"></param> public virtual void Save(StreamSerializer stream) { stream.WriteChunkBegin(CHUNK_ID, CHUNK_VERSION); //page id stream.Write(mID.Value); //content collections foreach (PageContentCollection coll in mContentCollections) { coll.Save(stream); } stream.WriteChunkEnd(CHUNK_ID); }
/// <summary> /// Save world data to a serialiser /// </summary> /// <param name="stream"></param> public void Save(StreamSerializer stream) { stream.WriteChunkBegin(CHUNK_ID, CHUNK_VERSION); //name stream.Write(mName); //sections foreach (PagedWorldSection section in mSections.Values) section.Save(stream); stream.WriteChunkEnd(CHUNK_ID); }
public override void Save( StreamSerializer stream ) { stream.WriteChunkBegin( SUBCLASS_CHUNK_ID, SUBCLASS_CHUNK_VERSION ); foreach ( var c in this.mContentList ) { c.Save( stream ); } stream.WriteChunkEnd( SUBCLASS_CHUNK_ID ); }
/// <summary> /// /// </summary> /// <param name="stream"></param> public void SaveGroupDefinition(ref StreamSerializer stream) { stream.WriteChunkBegin(ChunkID, ChunkVersion); stream.Write(_alignment); stream.Write(_terrainSize); stream.Write(_terrainWorldSize); stream.Write(_filenamePrefix); stream.Write(_filenameExtension); stream.Write(_resourceGroup); stream.Write(_origin); stream.Write(_defaultImportData.ConstantHeight); stream.Write(_defaultImportData.InputBias); stream.Write(_defaultImportData.InputScale); stream.Write(_defaultImportData.MaxBatchSize); stream.Write(_defaultImportData.MinBatchSize); Terrain.WriteLayerDeclaration(_defaultImportData.LayerDeclaration, ref stream); Terrain.WriteLayerInstanceList(_defaultImportData.LayerList, ref stream); stream.WriteChunkEnd(ChunkID); }
public static void WriteLayerDeclaration( TerrainLayerDeclaration decl, ref StreamSerializer stream ) { // Layer declaration stream.WriteChunkBegin( TERRAINLAYERDECLARATION_CHUNK_ID, TERRAINLAYERDECLARATION_CHUNK_VERSION ); // samplers var numSamplers = (byte)decl.Samplers.Count; stream.Write( numSamplers ); foreach ( var sampler in decl.Samplers ) { stream.WriteChunkBegin( TERRAINLAYERSAMPLER_CHUNK_ID, TERRAINLAYERSAMPLER_CHUNK_VERSION ); stream.Write( sampler.Alias ); var pixFmt = (byte)sampler.Format; stream.Write( pixFmt ); stream.WriteChunkEnd( TERRAINLAYERSAMPLER_CHUNK_ID ); } // elements var numElems = (byte)decl.Elements.Count; stream.Write( numElems ); foreach ( var elem in decl.Elements ) { stream.WriteChunkBegin( TERRAINLAYERSAMPLERELEMENT_CHUNK_ID, TERRAINLAYERSAMPLERELEMENT_CHUNK_VERSION ); stream.Write( elem.Source ); var sem = (byte)elem.Semantic; stream.Write( sem ); stream.Write( elem.ElementStart ); stream.Write( elem.ElementCount ); stream.WriteChunkEnd( TERRAINLAYERSAMPLERELEMENT_CHUNK_ID ); } stream.WriteChunkEnd( TERRAINLAYERDECLARATION_CHUNK_ID ); }
public static void WriteLayerInstanceList( List<LayerInstance> layers, ref StreamSerializer stream ) { var numLayers = (byte)layers.Count; stream.Write( numLayers ); foreach ( var inst in layers ) { stream.WriteChunkBegin( TERRAINLAYERINSTANCE_CHUNK_ID, TERRAINLAYERINSTANCE_CHUNK_VERSION ); stream.Write( inst.WorldSize ); foreach ( var t in inst.TextureNames ) { stream.Write( t ); } stream.WriteChunkEnd( TERRAINLAYERINSTANCE_CHUNK_ID ); } }
/// <summary> /// /// </summary> /// <param name="stream"></param> public override void Save(StreamSerializer stream) { stream.WriteChunkBegin(SUBCLASS_CHUNK_ID, SUBCLASS_CHUNK_VERSION); foreach (PageContent c in mContentList) c.Save(stream); stream.WriteChunkEnd(SUBCLASS_CHUNK_ID); }
public void Save( StreamSerializer stream ) { // wait for any queued processes to finish WaitForDerivedProcesses(); if ( IsHeightDataModified ) { // When modifying, for efficiency we only increase the max deltas at each LOD, // we never reduce them (since that would require re-examining more samples) // Since we now save this data in the file though, we need to make sure we've // calculated the optimal var rect = new Rectangle(); rect.Top = 0; rect.Bottom = this.mSize; rect.Left = 0; rect.Right = this.mSize; CalculateHeightDeltas( rect ); FinalizeHeightDeltas( rect, false ); } stream.WriteChunkBegin( TERRAIN_CHUNK_ID, TERRAIN_CHUNK_VERSION ); var align = (byte)Alignment; stream.Write( align ); stream.Write( this.mSize ); stream.Write( this.mWorldSize ); stream.Write( this.mMaxBatchSize ); stream.Write( this.mMinBatchSize ); stream.Write( this.mPos ); for ( var i = 0; i < this.mHeightData.Length; i++ ) { stream.Write( this.mHeightData[ i ] ); } WriteLayerDeclaration( this.mLayerDecl, ref stream ); //Layers CheckLayers( false ); var numLayers = (byte)this.mLayers.Count; WriteLayerInstanceList( this.mLayers, ref stream ); //packed layer blend data if ( this.mCpuBlendMapStorage.Count > 0 ) { // save from CPU data if it's there, it means GPU data was never created stream.Write( this.mLayerBlendMapSize ); // load packed cpu data var numBlendTex = (byte)GetBlendTextureCount( numLayers ); for ( var i = 0; i < numBlendTex; ++i ) { var fmt = GetBlendTextureFormat( (byte)i, numLayers ); var channels = PixelUtil.GetNumElemBytes( fmt ); var dataSz = channels*this.mLayerBlendMapSize*this.mLayerBlendMapSize; var pData = this.mCpuBlendMapStorage[ i ]; stream.Write( pData ); stream.Write( dataSz ); } } else { if ( this.mLayerBlendMapSize != this.mLayerBlendSizeActual ) { LogManager.Instance.Write( @"WARNING: blend maps were requested at a size larger than was supported on this hardware, which means the quality has been degraded" ); } stream.Write( this.mLayerBlendSizeActual ); var tmpData = new byte[this.mLayerBlendSizeActual*this.mLayerBlendSizeActual*4]; var pTmpDataF = BufferBase.Wrap( tmpData ); foreach ( var tex in this.mBlendTextureList ) { var dst = new PixelBox( this.mLayerBlendSizeActual, this.mLayerBlendSizeActual, 1, tex.Format, pTmpDataF ); tex.GetBuffer().BlitToMemory( dst ); int dataSz = PixelUtil.GetNumElemBytes( tex.Format )*this.mLayerBlendSizeActual*this.mLayerBlendSizeActual; stream.Write( tmpData ); stream.Write( dataSz ); } } //other data //normals stream.WriteChunkBegin( TERRAINDERIVEDDATA_CHUNK_ID, TERRAINDERIVEDDATA_CHUNK_VERSION ); stream.Write( "normalmap" ); stream.Write( this.mSize ); if ( this.mCpuTerrainNormalMap != null ) { var aData = new byte[this.mSize*this.mSize*3]; using ( var dest = BufferBase.Wrap( aData ) ) { Memory.Copy( this.mCpuTerrainNormalMap.Data, dest, aData.Length ); } // save from CPU data if it's there, it means GPU data was never created stream.Write( aData ); } else { var tmpData = new byte[this.mSize*this.mSize*3]; using ( var wrap = BufferBase.Wrap( tmpData ) ) { var dst = new PixelBox( this.mSize, this.mSize, 1, PixelFormat.BYTE_RGB, wrap ); TerrainNormalMap.GetBuffer().BlitToMemory( dst ); stream.Write( tmpData ); } tmpData = null; } stream.WriteChunkEnd( TERRAINDERIVEDDATA_CHUNK_ID ); //color map if ( IsGlobalColorMapEnabled ) { stream.WriteChunkBegin( TERRAINDERIVEDDATA_CHUNK_ID, TERRAINDERIVEDDATA_CHUNK_VERSION ); stream.Write( "colormap" ); stream.Write( GlobalColorMapSize ); if ( this.mCpuColorMapStorage != null ) { // save from CPU data if it's there, it means GPU data was never created stream.Write( this.mCpuColorMapStorage ); } else { var aData = new byte[GlobalColorMapSize*GlobalColorMapSize*3]; using ( var pDataF = BufferBase.Wrap( aData ) ) { var dst = new PixelBox( GlobalColorMapSize, GlobalColorMapSize, 1, PixelFormat.BYTE_RGB, pDataF ); GlobalColorMap.GetBuffer().BlitToMemory( dst ); } stream.Write( aData ); } stream.WriteChunkEnd( TERRAINDERIVEDDATA_CHUNK_ID ); } //ligthmap if ( this.mLightMapRequired ) { stream.WriteChunkBegin( TERRAINDERIVEDDATA_CHUNK_ID, TERRAINDERIVEDDATA_CHUNK_VERSION ); stream.Write( "lightmap" ); stream.Write( LightMapSize ); if ( this.mCpuLightmapStorage != null ) { // save from CPU data if it's there, it means GPU data was never created stream.Write( this.mCpuLightmapStorage ); } else { var aData = new byte[LightMapSize*LightMapSize]; using ( var pDataF = BufferBase.Wrap( aData ) ) { var dst = new PixelBox( LightMapSize, LightMapSize, 1, PixelFormat.L8, pDataF ); LightMap.GetBuffer().BlitToMemory( dst ); } stream.Write( aData ); } stream.WriteChunkEnd( TERRAIN_CHUNK_ID ); } // composite map if ( this.mCompositeMapRequired ) { stream.WriteChunkBegin( TERRAINDERIVEDDATA_CHUNK_ID, TERRAINDERIVEDDATA_CHUNK_VERSION ); stream.Write( "compositemap" ); stream.Write( this.mCompositeMapSize ); if ( this.mCpuCompositeMapStorage != null ) { // save from CPU data if it's there, it means GPU data was never created stream.Write( this.mCpuCompositeMapStorage ); } else { // composite map is 4 channel, 3x diffuse, 1x specular mask var aData = new byte[this.mCompositeMapSize*this.mCompositeMapSize*4]; using ( var pDataF = BufferBase.Wrap( aData ) ) { var dst = new PixelBox( this.mCompositeMapSize, this.mCompositeMapSize, 1, PixelFormat.BYTE_RGB, pDataF ); CompositeMap.GetBuffer().BlitToMemory( dst ); } stream.Write( aData ); } stream.WriteChunkEnd( TERRAINDERIVEDDATA_CHUNK_ID ); } //write deltas stream.Write( this.mDeltaDataPtr ); //write the quadtree QuadTree.Save( stream ); stream.WriteChunkEnd( TERRAIN_CHUNK_ID ); IsModified = false; IsHeightDataModified = false; }
public virtual void Save( StreamSerializer stream ) { stream.WriteChunkBegin( CHUNK_ID, CHUNK_VERSION ); //name stream.Write( this.mName ); //AABB stream.Write( this.mAABB ); // SceneManager type & name stream.Write( this.mSceneMgr.TypeName ); stream.Write( this.mSceneMgr.Name ); //page strategy name stream.Write( this.mStrategy.Name ); //page strategy data this.mStrategyData.Save( stream ); // Save any data specific to a subtype of this class SaveSubtypeData( stream ); stream.WriteChunkEnd( CHUNK_ID ); // save all pages (in separate files) foreach ( var i in this.mPages ) { i.Value.Save(); } }
public virtual void Save( StreamSerializer stream ) { stream.WriteChunkBegin( CHUNK_ID, CHUNK_VERSION ); //page id stream.Write( this.mID.Value ); //content collections foreach ( var coll in this.mContentCollections ) { //declaration stream.WriteChunkBegin( CHUNK_CONTENTCOLLECTION_DECLARATION_ID ); stream.Write( coll.Type ); stream.WriteChunkEnd( CHUNK_CONTENTCOLLECTION_DECLARATION_ID ); //data coll.Save( stream ); } stream.WriteChunkEnd( CHUNK_ID ); this.mModified = false; }
/// <summary> /// Save this data to a stream /// </summary> /// <param name="stream"></param> public virtual void Save(StreamSerializer stream) { stream.WriteChunkBegin(CHUNK_ID, CHUNK_VERSION); stream.Write((int)mMode); stream.Write(mWorldOrigin); stream.Write(mCellSize); stream.Write(mLoadRadius); stream.Write(mHoldRadius); stream.WriteChunkEnd(CHUNK_ID); }
public void SaveGroupDefinition( ref StreamSerializer stream ) { stream.WriteChunkBegin( ChunkID, ChunkVersion ); // Base details stream.Write( this._alignment ); stream.Write( this._terrainSize ); stream.Write( this._terrainWorldSize ); stream.Write( this._filenamePrefix ); stream.Write( this._filenameExtension ); stream.Write( this._resourceGroup ); stream.Write( this._origin ); // Default import settings (those not duplicated by the above) stream.Write( this._defaultImportData.ConstantHeight ); stream.Write( this._defaultImportData.InputBias ); stream.Write( this._defaultImportData.InputScale ); stream.Write( this._defaultImportData.MaxBatchSize ); stream.Write( this._defaultImportData.MinBatchSize ); Terrain.WriteLayerDeclaration( this._defaultImportData.LayerDeclaration, ref stream ); Terrain.WriteLayerInstanceList( this._defaultImportData.LayerList, ref stream ); stream.WriteChunkEnd( ChunkID ); }
public void Save( StreamSerializer stream ) { stream.WriteChunkBegin( CHUNK_ID, CHUNK_VERSION ); //name stream.Write( this.mName ); //sections foreach ( var section in this.mSections.Values ) { //declaration stream.Write( CHUNK_SECTIONDECLARATION_ID ); stream.Write( section.Type ); stream.Write( section.Name ); stream.WriteChunkEnd( CHUNK_SECTIONDECLARATION_ID ); //data section.Save( stream ); } stream.WriteChunkEnd( CHUNK_ID ); }