public bool Load( StreamSerializer stream ) { if ( stream.ReadChunkBegin( CHUNK_ID, CHUNK_VERSION, "Grid2DPageStrategyData" ) == null ) { return false; } byte readMode = 0; stream.Read( out readMode ); this.mMode = (Grid2Mode)readMode; Vector3 orgin; stream.Read( out orgin ); Origin = orgin; stream.Read( out this.mCellSize ); stream.Read( out this.mLoadRadius ); stream.Read( out this.mHoldRadius ); stream.Read( out this.mMinCellX ); stream.Read( out this.mMaxCellX ); stream.Read( out this.mMinCellY ); stream.Read( out this.mMaxCellY ); stream.ReadChunkEnd( CHUNK_ID ); return true; }
/// <summary> /// Load this section from a stream (returns true if successful) /// </summary> /// <param name="stream"></param> /// <returns></returns> public virtual bool Load(StreamSerializer stream) { if (stream.ReadChunkBegin(CHUNK_ID, CHUNK_VERSION, "PagedWorldSection") == null) return false; //name stream.Read(out mName); // AABB stream.Read(out mAABB); //page strategy name string stratName = string.Empty; stream.Read(out stratName); SetStrategy(stratName); //page strategy data bool strategyDataOk = mStrategyData.Load(stream); if (!strategyDataOk) LogManager.Instance.Write("Error: PageStrategyData for section '" + mName + "' was not loaded correctly, check file contens"); stream.ReadChunkEnd(CHUNK_ID); return true; }
/// <summary> /// Load world data from a serialiser (returns true if successful) /// </summary> /// <param name="stream"></param> public bool Load(StreamSerializer stream) { if (stream.ReadChunkBegin(CHUNK_ID, CHUNK_VERSION, "PageWorld") == null) return false; //name stream.Read(out mName); //sections while (stream.NextChunkId == PagedWorldSection.CHUNK_ID) { PagedWorldSection sec = new PagedWorldSection(this); bool sectionOk = sec.Load(stream); if (sectionOk) mSections.Add(sec.Name, sec); else { sec = null; break; } } stream.ReadChunkEnd(CHUNK_ID); return true; }
/// <summary> /// /// </summary> /// <param name="stream"></param> /// <returns></returns> protected override bool PrepareImpl(StreamSerializer stream) { //now do the real loading if (stream.ReadChunkBegin(CHUNK_ID, CHUNK_VERSION, "Page") == null) return false; // pageID check (we should know the ID we're expecting) int storedID = -1; stream.Read(out storedID); if (mID.Value != storedID) { LogManager.Instance.Write("Error: Tried to populate Page ID " + mID.Value + " with data corresponding to page ID " + storedID); stream.UndoReadChunk(CHUNK_ID); return false; } PageManager mgr = Manager; while (stream.NextChunkId == PageContentCollection.CHUNK_ID) { Chunk collChunk = stream.ReadChunkBegin(); string factoryName = string.Empty; stream.Read(out factoryName); //Supported type? IPageContentCollectionFactory collFact = mgr.GetContentCollectionFactory(factoryName); if (collFact != null) { PageContentCollection collInst = collFact.CreateInstance(); if (collInst.Prepare(stream)) { AttachContentCollection(collInst); } else { LogManager.Instance.Write("Error preparing PageContentCollection type: " + factoryName + " in + " + this.ToString()); collFact.DestroyInstance(ref collInst); } } else { LogManager.Instance.Write("Unsupported PageContentCollection type: " + factoryName + " in + " + this.ToString()); //skip //stream.ReadChunkEnd(ref collChunk.ID); stream.ReadChunkEnd(collChunk.id); } } return true; }
/// <summary> /// /// </summary> /// <param name="stream"></param> public void LoadGroupDefinition(ref StreamSerializer stream) { if (stream.ReadChunkBegin(ChunkID, ChunkVersion) == null) throw new AxiomException("Item not found!,Stream does not contain TerrainGroup data", new object[] { }); // Base details stream.Read(out _alignment); stream.Read(out _terrainSize); stream.Read(out _terrainWorldSize); stream.Read(out _filenamePrefix); stream.Read(out _filenameExtension); stream.Read(out _resourceGroup); stream.Read(out _origin); stream.Read(out _defaultImportData.ConstantHeight); stream.Read(out _defaultImportData.InputBias); stream.Read(out _defaultImportData.InputScale); stream.Read(out _defaultImportData.MaxBatchSize); stream.Read(out _defaultImportData.MinBatchSize); _defaultImportData.LayerDeclaration = new TerrainLayerDeclaration(); Terrain.ReadLayerDeclaration(ref stream, ref _defaultImportData.LayerDeclaration); _defaultImportData.LayerList = new List<LayerInstance>(); Terrain.ReadLayerInstanceList(ref stream, _defaultImportData.LayerDeclaration.Samplers.Count, ref _defaultImportData.LayerList); _defaultImportData.TerrainAlign = _alignment; _defaultImportData.TerrainSize = _terrainSize; _defaultImportData.WorldSize = _terrainWorldSize; _defaultImportData.DeleteInputData = true; stream.ReadChunkEnd(ChunkID); }
public void BasicReadWriteTest() { String fileName = "testSerialiser.dat"; Vector3 aTestVector = new Vector3(0.3f, 15.2f, -12.0f); String aTestString = "Some text here"; int aTestValue = 99; int[] aTestArray = new int[5] { 5, 4, 3, 2, 1 }; uint chunkID = StreamSerializer.MakeIdentifier("TEST"); byte[] buffer = new byte[1024]; // write the data { Stream stream = new MemoryStream(buffer); // arch.Create(fileName, true)); using ( StreamSerializer serializer = new StreamSerializer( stream ) ) { serializer.WriteChunkBegin( chunkID ); serializer.Write( aTestVector ); serializer.Write( aTestString ); serializer.Write( aTestValue ); serializer.Write( aTestArray ); serializer.WriteChunkEnd( chunkID ); } } // read it back { Stream stream = new MemoryStream( buffer ); //arch.Open(fileName); using ( StreamSerializer serializer = new StreamSerializer( stream ) ) { Chunk c = serializer.ReadChunkBegin(); Assert.AreEqual( chunkID, c.id ); Assert.AreEqual( sizeof( float ) * 3 + sizeof( int ) + aTestString.Length + 4 + sizeof( int ) * aTestArray.Length + sizeof( int ), (int)c.length ); Vector3 inVector; String inString; int inValue; int[] inArray; serializer.Read( out inVector ); serializer.Read( out inString ); serializer.Read( out inValue ); serializer.Read( out inArray ); serializer.ReadChunkEnd( chunkID ); Assert.AreEqual( aTestVector, inVector ); Assert.AreEqual( aTestString, inString ); Assert.AreEqual( aTestValue, inValue ); Assert.AreEqual( aTestArray, inArray ); } } }
public bool Prepare( StreamSerializer stream ) { FreeTemporaryResources(); FreeCPUResources(); CopyGlobalOptions(); if ( stream.ReadChunkBegin( TERRAIN_CHUNK_ID, TERRAIN_CHUNK_VERSION ) == null ) { return false; } byte align; stream.Read( out align ); Alignment = (Alignment)align; stream.Read( out this.mSize ); stream.Read( out this.mWorldSize ); stream.Read( out this.mMaxBatchSize ); stream.Read( out this.mMinBatchSize ); stream.Read( out this.mPos ); RootSceneNode.Position = this.mPos; UpdateBaseScale(); DetermineLodLevels(); int numVertices = this.mSize*this.mSize; this.mHeightData = new float[numVertices]; stream.Read( out this.mHeightData ); // layer declaration if ( !ReadLayerDeclaration( ref stream, ref this.mLayerDecl ) ) { return false; } CheckDeclaration(); // Layers if ( !ReadLayerInstanceList( ref stream, this.mLayerDecl.Elements.Count, ref this.mLayers ) ) { return false; } DeriveUVMultipliers(); // Packed layer blend data var numLayers = (byte)this.mLayers.Count; stream.Read( out this.mLayerBlendMapSize ); this.mLayerBlendSizeActual = this.mLayerBlendMapSize; // for now, until we check //load packed CPU data var numBlendTex = GetBlendTextureCount( numLayers ); for ( var i = 0; i < numBlendTex; ++i ) { var fmt = GetBlendTextureFormat( (byte)i, numLayers ); var channels = PixelUtil.GetNumElemBytes( fmt ); var dataSz = channels*this.mLayerBlendMapSize*this.mLayerBlendMapSize; var data = new byte[dataSz]; stream.Read( out data ); this.mCpuBlendMapStorage.AddRange( data ); } //derived data while ( !stream.IsEndOfChunk( TERRAIN_CHUNK_ID ) && stream.NextChunkId == TERRAINDERIVEDDATA_CHUNK_ID ) { stream.ReadChunkBegin( TERRAINDERIVEDDATA_CHUNK_ID, TERRAINDERIVEDDATA_CHUNK_VERSION ); //name var name = string.Empty; stream.Read( out name ); ushort sz; stream.Read( out sz ); if ( name == "normalmap" ) { this.mNormalMapRequired = true; var data = new byte[sz*sz*3]; stream.Read( out data ); using ( var pDataF = BufferBase.Wrap( data ) ) { this.mCpuTerrainNormalMap = new PixelBox( sz, sz, 1, PixelFormat.BYTE_RGB, pDataF ); } } else if ( name == "colormap" ) { IsGlobalColorMapEnabled = true; GlobalColorMapSize = sz; this.mCpuColorMapStorage = new byte[sz*sz*3]; stream.Read( out this.mCpuColorMapStorage ); } else if ( name == "lightmap" ) { this.mLightMapRequired = true; LightMapSize = sz; this.mCpuLightmapStorage = new byte[sz*sz]; stream.Read( out this.mCpuLightmapStorage ); } else if ( name == "compositemap" ) { this.mCompositeMapRequired = true; this.mCompositeMapSize = sz; this.mCpuCompositeMapStorage = new byte[sz*sz*4]; stream.Read( out this.mCpuCompositeMapStorage ); } stream.ReadChunkEnd( TERRAINDERIVEDDATA_CHUNK_ID ); } //Load delta data var deltaData = new byte[ sizeof( float ) * numVertices ]; stream.Read( out deltaData ); this.mDeltaDataPtr = BufferBase.Wrap( deltaData ); //Create and load quadtree QuadTree = new TerrainQuadTreeNode( this, null, 0, 0, this.mSize, (ushort)( NumLodLevels - 1 ), 0, 0 ); QuadTree.Prepare(); stream.ReadChunkEnd( TERRAIN_CHUNK_ID ); DistributeVertexData(); IsModified = false; IsHeightDataModified = false; return true; }
public override bool Prepare( StreamSerializer stream ) { if ( stream.ReadChunkBegin( SUBCLASS_CHUNK_ID, SUBCLASS_CHUNK_VERSION, "SimplePageContentCollection" ) == null ) { return false; } bool ret = true; foreach ( var i in this.mContentList ) { ret &= i.Prepare( stream ); } stream.ReadChunkEnd( SUBCLASS_CHUNK_ID ); return ret; }
public static bool ReadLayerDeclaration( ref StreamSerializer stream, ref TerrainLayerDeclaration targetDecl ) { if ( stream.ReadChunkBegin( TERRAINLAYERDECLARATION_CHUNK_ID, TERRAINLAYERDECLARATION_CHUNK_VERSION ) == null ) { return false; } // samplers byte numSamplers; stream.Read( out numSamplers ); targetDecl.Samplers = new List<TerrainLayerSampler>( numSamplers ); for ( var s = 0; s < numSamplers; ++s ) { if ( stream.ReadChunkBegin( TERRAINLAYERSAMPLER_CHUNK_ID, TERRAINLAYERSAMPLER_CHUNK_VERSION ) == null ) { return false; } var sampler = new TerrainLayerSampler(); stream.Read( out sampler.Alias ); byte pixFmt; stream.Read( out pixFmt ); sampler.Format = (PixelFormat)pixFmt; stream.ReadChunkEnd( TERRAINLAYERSAMPLER_CHUNK_ID ); targetDecl.Samplers.Add( sampler ); } // elements byte numElems; stream.Read( out numElems ); targetDecl.Elements = new List<TerrainLayerSamplerElement>( numElems ); for ( var e = 0; e < numElems; ++e ) { if ( stream.ReadChunkBegin( TERRAINLAYERSAMPLERELEMENT_CHUNK_ID, TERRAINLAYERSAMPLERELEMENT_CHUNK_VERSION ) == null ) { return false; } var samplerElem = new TerrainLayerSamplerElement(); stream.Read( out samplerElem.Source ); byte sem; stream.Read( out sem ); samplerElem.Semantic = (TerrainLayerSamplerSemantic)sem; stream.Read( out samplerElem.ElementStart ); stream.Read( out samplerElem.ElementCount ); stream.ReadChunkEnd( TERRAINLAYERSAMPLERELEMENT_CHUNK_ID ); targetDecl.Elements.Add( samplerElem ); } stream.ReadChunkEnd( TERRAINLAYERDECLARATION_CHUNK_ID ); return true; }
public static bool ReadLayerInstanceList( ref StreamSerializer stream, int numSamplers, ref List<LayerInstance> targetLayers ) { byte numLayers; stream.Read( out numLayers ); targetLayers = new List<LayerInstance>( numLayers ); for ( var l = 0; l < numLayers; ++l ) { if ( stream.ReadChunkBegin( TERRAINLAYERINSTANCE_CHUNK_ID, TERRAINLAYERINSTANCE_CHUNK_VERSION ) == null ) { return false; } var inst = new LayerInstance(); stream.Read( out inst.WorldSize ); inst.TextureNames = new List<string>( numSamplers ); for ( var t = 0; t < numSamplers; ++t ) { string texName; stream.Read( out texName ); inst.TextureNames.Add( texName ); } stream.ReadChunkEnd( TERRAINLAYERINSTANCE_CHUNK_ID ); targetLayers.Add( inst ); } return true; }
public virtual bool Load( StreamSerializer stream ) { if ( stream.ReadChunkBegin( CHUNK_ID, CHUNK_VERSION, "PagedWorldSection" ) == null ) { return false; } //name stream.Read( out this.mName ); // AABB stream.Read( out this.mAABB ); // SceneManager type string smType, smInstanceName; SceneManager sm = null; stream.Read( out smType ); stream.Read( out smInstanceName ); if ( Root.Instance.HasSceneManager( smInstanceName ) ) { sm = Root.Instance.GetSceneManager( smInstanceName ); } else { sm = Root.Instance.CreateSceneManager( smType, smInstanceName ); } SceneManager = sm; //page strategy name string stratName = string.Empty; stream.Read( out stratName ); SetStrategy( stratName ); //page strategy data bool strategyDataOk = this.mStrategyData.Load( stream ); if ( !strategyDataOk ) { LogManager.Instance.Write( "Error: PageStrategyData for section '{0}' was not loaded correctly, check file contens", this.mName ); } // Load any data specific to a subtype of this class LoadSubtypeData( stream ); stream.ReadChunkEnd( CHUNK_ID ); return true; }
protected virtual bool PrepareImpl( StreamSerializer stream, ref PageData dataToPopulate ) { //now do the real loading if ( stream.ReadChunkBegin( CHUNK_ID, CHUNK_VERSION, "Page" ) == null ) { return false; } // pageID check (we should know the ID we're expecting) int storedID = -1; stream.Read( out storedID ); if ( this.mID.Value != storedID ) { LogManager.Instance.Write( "Error: Tried to populate Page ID {0} with data corresponding to page ID {1}", this.mID.Value, storedID ); stream.UndoReadChunk( CHUNK_ID ); return false; } PageManager mgr = Manager; while ( stream.NextChunkId == Page.CHUNK_CONTENTCOLLECTION_DECLARATION_ID ) { Chunk collChunk = stream.ReadChunkBegin(); string factoryName; stream.Read( out factoryName ); stream.ReadChunkEnd( CHUNK_CONTENTCOLLECTION_DECLARATION_ID ); //Supported type? IPageContentCollectionFactory collFact = mgr.GetContentCollectionFactory( factoryName ); if ( collFact != null ) { PageContentCollection collInst = collFact.CreateInstance(); if ( collInst.Prepare( stream ) ) { dataToPopulate.collectionsToAdd.Add( collInst ); } else { LogManager.Instance.Write( "Error preparing PageContentCollection type: {0} in {1}", factoryName, ToString() ); collFact.DestroyInstance( ref collInst ); } } else { LogManager.Instance.Write( "Unsupported PageContentCollection type: {0} in {1}", factoryName, ToString() ); //skip stream.ReadChunkEnd( collChunk.id ); } } this.mModified = false; return true; }
/// <summary> /// Load this data from a stream (returns true if successful) /// </summary> /// <param name="stream"></param> /// <returns></returns> public virtual bool Load(StreamSerializer stream) { if (stream.ReadChunkBegin(CHUNK_ID, CHUNK_VERSION, "Grid2DPageStrategyData") == null) return false; int readMode = 0; stream.Read(out readMode); mMode = (Grid2Mode)readMode; Vector3 orgin = new Vector3(); stream.Read(out orgin); Origin = orgin; stream.Read(out mCellSize); stream.Read(out mLoadRadius); stream.Read(out mHoldRadius); uint id = 0; stream.ReadChunkEnd(id); CHUNK_ID = id; return true; }
public void LoadGroupDefinition( ref StreamSerializer stream ) { if ( stream.ReadChunkBegin( ChunkID, ChunkVersion ) == null ) { throw new AxiomException( "Item not found!,Stream does not contain TerrainGroup data" ); } // Base details stream.Read( out this._alignment ); stream.Read( out this._terrainSize ); stream.Read( out this._terrainWorldSize ); stream.Read( out this._filenamePrefix ); stream.Read( out this._filenameExtension ); stream.Read( out this._resourceGroup ); stream.Read( out this._origin ); // Default import settings (those not duplicated by the above) stream.Read( out this._defaultImportData.ConstantHeight ); stream.Read( out this._defaultImportData.InputBias ); stream.Read( out this._defaultImportData.InputScale ); stream.Read( out this._defaultImportData.MaxBatchSize ); stream.Read( out this._defaultImportData.MinBatchSize ); this._defaultImportData.LayerDeclaration = new TerrainLayerDeclaration(); Terrain.ReadLayerDeclaration( ref stream, ref this._defaultImportData.LayerDeclaration ); this._defaultImportData.LayerList = new List<LayerInstance>(); Terrain.ReadLayerInstanceList( ref stream, this._defaultImportData.LayerDeclaration.Samplers.Count, ref this._defaultImportData.LayerList ); // copy data that would have normally happened on construction this._defaultImportData.TerrainAlign = this._alignment; this._defaultImportData.TerrainSize = this._terrainSize; this._defaultImportData.WorldSize = this._terrainWorldSize; this._defaultImportData.DeleteInputData = true; stream.ReadChunkEnd( ChunkID ); }
public bool Load( StreamSerializer stream ) { if ( stream.ReadChunkBegin( CHUNK_ID, CHUNK_VERSION, "PageWorld" ) == null ) { return false; } //name stream.Read( out this.mName ); //sections while ( stream.NextChunkId == PagedWorld.CHUNK_SECTIONDECLARATION_ID ) { stream.ReadChunkBegin(); string sectionType, sectionName; stream.Read( out sectionType ); stream.Read( out sectionName ); stream.ReadChunkEnd( CHUNK_SECTIONDECLARATION_ID ); // Scene manager will be loaded PagedWorldSection sec = CreateSection( null, sectionType, sectionName ); bool sectionOk = sec.Load( stream ); if ( !sectionOk ) { DestroySection( sec ); } } stream.ReadChunkEnd( CHUNK_ID ); return true; }