示例#1
0
        private EffectBytecodeCompilerResult CompileBytecode(ShaderMixinSource mixinTree, EffectCompilerParameters effectParameters, CompilerParameters compilerParameters, ObjectId mixinObjectId, DatabaseFileProvider database, string compiledUrl)
        {
            // Open the database for writing
            var log       = new LoggerResult();
            var effectLog = GlobalLogger.GetLogger("EffectCompilerCache");

            // Note: this compiler is expected to not be async and directly write stuff in localLogger
            var compiledShader = base.Compile(mixinTree, effectParameters, compilerParameters).WaitForResult();

            compiledShader.CompilationLog.CopyTo(log);

            // If there are any errors, return immediately
            if (log.HasErrors)
            {
                lock (compilingShaders)
                {
                    compilingShaders.Remove(mixinObjectId);
                }

                log.CopyTo(effectLog);
                return(new EffectBytecodeCompilerResult(null, log));
            }

            // Compute the bytecodeId
            var newBytecodeId = compiledShader.Bytecode.ComputeId();

            // Check if we really need to store the bytecode
            lock (bytecodes)
            {
                // Using custom serialization to the database to store an object with a custom id
                // TODO: Check if we really need to write the bytecode everytime even if id is not changed
                var memoryStream = new MemoryStream();
                compiledShader.Bytecode.WriteTo(memoryStream);

                // Write current cache at the end (not part of the pure bytecode, but we use this as meta info)
                var writer = new BinarySerializationWriter(memoryStream);
                writer.Write(CurrentCache);

                memoryStream.Position = 0;
                database.ObjectDatabase.Write(memoryStream, newBytecodeId, true);
                database.ContentIndexMap[compiledUrl] = newBytecodeId;

                // Save bytecode Id to the database cache as well
                memoryStream.SetLength(0);
                memoryStream.Write((byte[])newBytecodeId, 0, ObjectId.HashSize);
                memoryStream.Position = 0;
                database.ObjectDatabase.Write(memoryStream, mixinObjectId, true);

                if (!bytecodes.ContainsKey(newBytecodeId))
                {
                    log.Verbose($"New effect compiled #{effectCompileCount} [{mixinObjectId}] (db: {newBytecodeId})\r\n{compilerParameters?.ToStringPermutationsDetailed()}");
                    Interlocked.Increment(ref effectCompileCount);

                    // Replace or add new bytecode
                    bytecodes[newBytecodeId] = new KeyValuePair <EffectBytecode, EffectBytecodeCacheLoadSource>(compiledShader.Bytecode, EffectBytecodeCacheLoadSource.JustCompiled);
                }
            }

            lock (compilingShaders)
            {
                compilingShaders.Remove(mixinObjectId);
            }

            log.CopyTo(effectLog);
            return(compiledShader);
        }
示例#2
0
文件: Command.cs 项目: rohitshe/Code
 protected virtual void ComputeParameterHash(BinarySerializationWriter writer)
 {
     // Do nothing by default
 }
示例#3
0
        protected override void ComputeParameterHash(BinarySerializationWriter writer)
        {
            base.ComputeParameterHash(writer);

            writer.SerializeExtended(this, ArchiveMode.Serialize);
        }
示例#4
0
        protected override void ComputeParameterHash(BinarySerializationWriter writer)
        {
            base.ComputeParameterHash(writer);

            writer.Write(commandId);
        }
示例#5
0
文件: ScriptSync.cs 项目: cg123/xenko
        private static byte[] Encode(object obj, Serializer serializer = null)
        {
            var result = new MemoryStream();
            var stream = new BinarySerializationWriter(result);
            if (serializer != null)
                stream.Context.Serializer = serializer;
            stream.SerializeExtended(null, ref obj, ArchiveMode.Serialize);

            return result.ToArray();
        }
示例#6
0
        public async static Task ProcessClient(EngineContext engineContext, SocketContext socketContext, SocketContext socketContextAsync)
        {
            socketContext.AddPacketHandler<DownloadFileQuery>(
                async (packet) =>
                {
                    var stream = await VirtualFileSystem.OpenStreamAsync(packet.Url, VirtualFileMode.Open, VirtualFileAccess.Read);
                    var data = new byte[stream.Length];
                    await stream.ReadAsync(data, 0, data.Length);
                    stream.Close();
                    socketContext.Send(new DownloadFileAnswer { StreamId = packet.StreamId, Data = data });
                });

            socketContext.AddPacketHandler<UploadFilePacket>(
                async (packet) =>
                {
                    var stream = await VirtualFileSystem.OpenStreamAsync(packet.Url, VirtualFileMode.Create, VirtualFileAccess.Write);
                    await stream.WriteAsync(packet.Data, 0, packet.Data.Length);
                    stream.Close();
                });

            var viewModelGlobalContext = new ViewModelGlobalContext();

            selectedEntitiesContext = new ViewModelContext(viewModelGlobalContext);
            selectedEntitiesContext.ChildrenPropertyEnumerators.Add(new EntityComponentEnumerator(engineContext));
            selectedEntitiesContext.ChildrenPropertyEnumerators.Add(new RenderPassPluginEnumerator());
            selectedEntitiesContext.ChildrenPropertyEnumerators.Add(new ChildrenPropertyInfoEnumerator());
            //selectedEntitiesContext.ChildrenPropertyEnumerators.Add(new EffectPropertyEnumerator(engineContext));

            var renderPassHierarchyContext = new ViewModelContext(viewModelGlobalContext);
            renderPassHierarchyContext.ChildrenPropertyEnumerators.Add(new RenderPassHierarchyEnumerator());
            renderPassHierarchyContext.Root = new ViewModelNode("Root", engineContext.RenderContext.RootRenderPass).GenerateChildren(renderPassHierarchyContext);

            var renderPassPluginsContext = new ViewModelContext(viewModelGlobalContext);
            renderPassPluginsContext.ChildrenPropertyEnumerators.Add(new RenderPassPluginsEnumerator { SelectedRenderPassPluginContext = selectedEntitiesContext });
            renderPassPluginsContext.Root = new ViewModelNode("Root", new EnumerableViewModelContent<ViewModelReference>(
                () => engineContext.RenderContext.RenderPassPlugins.Select(x => new ViewModelReference(x, true))));


            var entityHierarchyEnumerator = new EntityHierarchyEnumerator(engineContext.EntityManager, selectedEntitiesContext);
            var entityHierarchyContext = new ViewModelContext(viewModelGlobalContext);
            entityHierarchyContext.ChildrenPropertyEnumerators.Add(entityHierarchyEnumerator);
            entityHierarchyContext.ChildrenPropertyEnumerators.Add(new ChildrenPropertyInfoEnumerator());
            entityHierarchyContext.Root = new ViewModelNode("EntityHierarchyRoot", new EnumerableViewModelContent<ViewModelReference>(
                        () => engineContext.EntityManager.Entities
                                           .Where(x =>
                                           {
                                               var transformationComponent = x.Transformation;
                                               return (transformationComponent == null || transformationComponent.Parent == null);
                                           })
                                           .Select(x => new ViewModelReference(x, true))));

            entityHierarchyEnumerator.SelectedEntities.CollectionChanged += (sender, args) =>
                {
                    SelectEntity(entityHierarchyEnumerator.SelectedEntities);
                };
            //entityHierarchyContext.Root.Children.Add(new ViewModelNode("SelectedItems", EnumerableViewModelContent.FromUnaryLambda<ViewModelReference, ViewModelReference>(new NullViewModelContent(),
            //    (x) => { return new[] { new ViewModelReference(pickingSystem.SelectedEntity) }; })));
                /*(value) =>
                    {
                        var entityModelView = value != null ? entityHierarchyContext.GetModelView(value.Guid) : null;
                        var entity = entityModelView != null ? (Entity)entityModelView.NodeValue : null;
                        SelectEntity(entity);
                    })));*/
            entityHierarchyContext.Root.Children.Add(new ViewModelNode("DropEntity", new RootViewModelContent((ExecuteCommand)((viewModel2, parameter) =>
                {
                    var dropParameters = (DropCommandParameters)parameter;

                    var movedItem = dropParameters.Data is Guid ? entityHierarchyContext.GetModelView((Guid)dropParameters.Data) : null;
                    var newParent = dropParameters.Parent is Guid ? entityHierarchyContext.GetModelView((Guid)dropParameters.Parent) : null;

                    if (newParent == null || movedItem == null)
                        return;

                    var parent = ((Entity)newParent.NodeValue).Transformation;
                    if (dropParameters.TargetIndex > parent.Children.Count)
                        return;

                    var transformationComponent = ((Entity)movedItem.NodeValue).Transformation;
                    transformationComponent.Parent = null;
                    parent.Children.Insert(dropParameters.TargetIndex, transformationComponent);
                }))));

            entityHierarchyContext.Root.Children.Add(new ViewModelNode("DropAsset", new RootViewModelContent((ExecuteCommand)(async (viewModel2, parameter) =>
                {
                    var dropParameters = (DropCommandParameters)parameter;

                    var assetUrl = (string)dropParameters.Data;
                    /*var newParent = entityHierarchyContext.GetModelView((Guid)dropParameters.Parent);

                    if (newParent == null || assetUrl == null)
                        return;

                    var parent = ((Entity)newParent.NodeValue).Transformation;
                    if (dropParameters.ItemIndex > parent.Children.Count)
                        return;*/

                    engineContext.Scheduler.Add(async () =>
                    {
                        // Load prefab entity
                        var loadedEntityPrefab = await engineContext.AssetManager.LoadAsync<Entity>(assetUrl + "#");

                        // Build another entity from prefab
                        var loadedEntity = Prefab.Inherit(loadedEntityPrefab);

                        // Add it to scene
                        engineContext.EntityManager.AddEntity(loadedEntity);

                        if (loadedEntity.ContainsKey(AnimationComponent.Key))
                        {
                            Scheduler.Current.Add(() => AnimScript.AnimateFBXModel(engineContext, loadedEntity));
                        }
                    });
                }))));

            var scriptEngineContext = new ViewModelContext(viewModelGlobalContext);
            scriptEngineContext.ChildrenPropertyEnumerators.Add(new ScriptAssemblyEnumerator(engineContext));
            scriptEngineContext.ChildrenPropertyEnumerators.Add(new ChildrenPropertyInfoEnumerator());
            scriptEngineContext.Root = new ViewModelNode(new EnumerableViewModelContent<ViewModelReference>(
                        () => engineContext.ScriptManager.ScriptAssemblies.Select(x => new ViewModelReference(x, true))));
            scriptEngineContext.Root.Children.Add(new ViewModelNode("RunScript", new RootViewModelContent((ExecuteCommand)(async (viewModel2, parameter) =>
                {
                    var scriptName = (string)parameter;
                    var matchingScript = engineContext.ScriptManager.Scripts.Where(x => x.TypeName + "." + x.MethodName == scriptName);
                    if (matchingScript.Any())
                    {
                        var scriptEntry = matchingScript.Single();
                        var microThread = engineContext.ScriptManager.RunScript(scriptEntry, null);
                    }
                }))));

            var runningScriptsContext = new ViewModelContext(viewModelGlobalContext);
            runningScriptsContext.ChildrenPropertyEnumerators.Add(new MicroThreadEnumerator(selectedEntitiesContext));
            runningScriptsContext.ChildrenPropertyEnumerators.Add(new ChildrenPropertyInfoEnumerator());
            runningScriptsContext.Root = new ViewModelNode("MicroThreads", new EnumerableViewModelContent<ViewModelReference>(
                    () => engineContext.Scheduler.MicroThreads.Select(x => new ViewModelReference(x, true))
                ));

            var effectsContext = new ViewModelContext(viewModelGlobalContext);
            effectsContext.ChildrenPropertyEnumerators.Add(new EffectEnumerator(selectedEntitiesContext));
            effectsContext.ChildrenPropertyEnumerators.Add(new ChildrenPropertyInfoEnumerator());
            effectsContext.Root = new ViewModelNode("Effects", new EnumerableViewModelContent<ViewModelReference>(
                    () => engineContext.RenderContext.Effects.Select(x => new ViewModelReference(x, true))
                ));
            //effectsContext.Root.Children.Add(new ViewModelNode("PluginDefinitions", new RootViewModelContent()));

            var assetBrowserContext = new ViewModelContext(viewModelGlobalContext);
            assetBrowserContext.ChildrenPropertyEnumerators.Add(new AssetBrowserEnumerator(engineContext));
            assetBrowserContext.ChildrenPropertyEnumerators.Add(new ChildrenPropertyInfoEnumerator());
            assetBrowserContext.Root = new ViewModelNode("Root", "Root").GenerateChildren(assetBrowserContext);

            var editorContext = new ViewModelContext(viewModelGlobalContext);
            editorContext.Root = new ViewModelNode("Root");
            editorContext.Root.Children.Add(new ViewModelNode("SwitchSelectionMode", new CommandViewModelContent((sender, parameters) => { pickingSystem.ActiveGizmoActionMode = PickingSystem.GizmoAction.None; })));
            editorContext.Root.Children.Add(new ViewModelNode("SwitchTranslationMode", new CommandViewModelContent((sender, parameters) => { pickingSystem.ActiveGizmoActionMode = PickingSystem.GizmoAction.Translation; })));
            editorContext.Root.Children.Add(new ViewModelNode("SwitchRotationMode", new CommandViewModelContent((sender, parameters) => { pickingSystem.ActiveGizmoActionMode = PickingSystem.GizmoAction.Rotation; })));

            var contexts = new Dictionary<string, Tuple<ViewModelContext, ViewModelState>>();
            contexts.Add("Editor", Tuple.Create(editorContext, new ViewModelState()));
            contexts.Add("RenderPassPlugins", Tuple.Create(renderPassPluginsContext, new ViewModelState()));
            contexts.Add("RenderPasses", Tuple.Create(renderPassHierarchyContext, new ViewModelState()));
            contexts.Add("SelectedEntities", Tuple.Create(selectedEntitiesContext, new ViewModelState()));
            contexts.Add("EntityHierarchy", Tuple.Create(entityHierarchyContext, new ViewModelState()));
            contexts.Add("ScriptEngine", Tuple.Create(scriptEngineContext, new ViewModelState()));
            contexts.Add("MicroThreads", Tuple.Create(runningScriptsContext, new ViewModelState()));
            contexts.Add("AssetBrowser", Tuple.Create(assetBrowserContext, new ViewModelState()));
            contexts.Add("Effects", Tuple.Create(effectsContext, new ViewModelState()));

            int lastAckPacket = 0;

            var entitiesChangePackets = new ConcurrentQueue<EntitiesChangePacket>();
            socketContext.AddPacketHandler<EntitiesChangePacket>(
                (packet) =>
                    {
                        entitiesChangePackets.Enqueue(packet);
                        entitiesChangePacketEvent.Set();
                    });

            Action asyncThreadStart = () =>
                {
                    while (true)
                    {
                        Thread.Sleep(100);
                        foreach (var context in contexts)
                        {
                            // Process async data
                            Guid[] path = null;
                            object value = null;
                            lock (context.Value.Item1)
                            {
                                var pendingNode = context.Value.Item1.GetNextPendingAsyncNode();
                                if (pendingNode != null)
                                {
                                    value = pendingNode.Value;
                                    path = ViewModelController.BuildPath(pendingNode);
                                }
                            }
                            if (path != null)
                            {
                                // Temporary encoding through our serializer (until our serializer are used for packets)
                                var memoryStream = new MemoryStream();
                                var writer = new BinarySerializationWriter(memoryStream);
                                writer.SerializeExtended(null, value, ArchiveMode.Serialize);

                                var change = new NetworkChange { Path = path.ToArray(), Type = NetworkChangeType.ValueUpdateAsync, Value = memoryStream.ToArray() };
                                var packet = new EntitiesChangePacket { GroupKey = context.Key, Changes = new NetworkChange[] { change } };
                                socketContextAsync.Send(packet);
                                break;
                            }
                        }
                    }
                };

            new Thread(new ThreadStart(asyncThreadStart)).Start();

            // TODO: Move some of this code directly inside ViewModelContext/Controller classes
            while (true)
            {
                await TaskEx.WhenAny(TaskEx.Delay(250), entitiesChangePacketEvent.WaitAsync());

                EntitiesChangePacket packet;
                while (entitiesChangePackets.TryDequeue(out packet))
                {
                    ViewModelController.NetworkApplyChanges(contexts[packet.GroupKey].Item1, packet.Changes);
                    lastAckPacket = packet.Index;
                }

                // Wait a single frame so that network updates get applied properly by all rendering systems for next update
                await Scheduler.Current.NextFrame();

                // If entity disappeared, try to replace it with new one (happen during file reload)
                // It's little bit cumbersome to test, need some simplification of this specific entity view model root.
                if (selectedEntitiesContext.Root != null
                    && selectedEntitiesContext.Root.Parent != null
                    && selectedEntitiesContext.Root.Parent.NodeValue is Entity)
                {
                    var entity = (Entity)selectedEntitiesContext.Root.Parent.NodeValue;
                    if (!engineContext.EntityManager.Entities.Contains(entity))
                    {
                        entity = engineContext.EntityManager.Entities.FirstOrDefault(x => x.Guid == entity.Guid);
                        if (entity != null)
                        {
                            selectedEntitiesContext.ViewModelByGuid.Clear();
                            selectedEntitiesContext.Root = selectedEntitiesContext.GetModelView(entity).Children.First(x => x.PropertyName == "Components");
                        }
                    }
                }

                var data = new Dictionary<string, byte[]>();
                foreach (var context in contexts)
                {
                    lock (context.Value.Item1)
                    {
                        if (context.Value.Item1.Root != null)
                            context.Value.Item1.AddModelView(context.Value.Item1.Root);
                        ViewModelController.UpdateReferences(context.Value.Item1, true);
                        data[context.Key] = ViewModelController.NetworkSerialize(context.Value.Item1, context.Value.Item2);
                    }
                }

                viewModelGlobalContext.UpdateObjects(contexts.Select(x => x.Value.Item1));

                //Console.WriteLine("DataSize: {0}", data.Sum(x => x.Value.Length));
                await Task.Factory.StartNew(() => socketContext.Send(new EntitiesUpdatePacket { AckIndex = lastAckPacket, Data = data }));
            }
        }
示例#7
0
        private void UpgradeNavigationMeshGroups(IEnumerable <PackageLoadingAssetFile> navigationMeshAssets, PackageLoadingAssetFile gameSettingsAsset)
        {
            // Collect all unique groups from all navigation mesh assets
            Dictionary <ObjectId, YamlMappingNode> agentSettings = new Dictionary <ObjectId, YamlMappingNode>();

            foreach (var navigationMeshAsset in navigationMeshAssets)
            {
                using (var navigationMesh = navigationMeshAsset.AsYamlAsset())
                {
                    HashSet <ObjectId> selectedGroups = new HashSet <ObjectId>();
                    foreach (var setting in navigationMesh.DynamicRootNode.NavigationMeshAgentSettings)
                    {
                        var currentAgentSettings = setting.Value;
                        using (DigestStream digestStream = new DigestStream(Stream.Null))
                        {
                            BinarySerializationWriter writer = new BinarySerializationWriter(digestStream);
                            writer.Write((float)currentAgentSettings.Height);
                            writer.Write((float)currentAgentSettings.Radius);
                            writer.Write((float)currentAgentSettings.MaxClimb);
                            writer.Write((float)currentAgentSettings.MaxSlope.Radians);
                            if (!agentSettings.ContainsKey(digestStream.CurrentHash))
                            {
                                agentSettings.Add(digestStream.CurrentHash, currentAgentSettings.Node);
                            }
                            selectedGroups.Add(digestStream.CurrentHash);
                        }
                    }

                    // Replace agent settings with group reference on the navigation mesh
                    navigationMesh.DynamicRootNode.NavigationMeshAgentSettings = DynamicYamlEmpty.Default;
                    dynamic selectedGroupsMapping = navigationMesh.DynamicRootNode.SelectedGroups = new DynamicYamlMapping(new YamlMappingNode());
                    foreach (var selectedGroup in selectedGroups)
                    {
                        selectedGroupsMapping.AddChild(Guid.NewGuid().ToString("N"), selectedGroup.ToGuid().ToString("D"));
                    }
                }
            }

            // Add them to the game settings
            int groupIndex = 0;

            using (var gameSettings = gameSettingsAsset.AsYamlAsset())
            {
                var defaults = gameSettings.DynamicRootNode.Defaults;
                foreach (var setting in defaults)
                {
                    if (setting.Node.Tag == "!SiliconStudio.Xenko.Navigation.NavigationSettings,SiliconStudio.Xenko.Navigation")
                    {
                        var groups = setting.Groups as DynamicYamlArray;
                        foreach (var groupToAdd in agentSettings)
                        {
                            dynamic newGroup = new DynamicYamlMapping(new YamlMappingNode());
                            newGroup.Id            = groupToAdd.Key.ToGuid().ToString("D");
                            newGroup.Name          = $"Group {groupIndex++}";
                            newGroup.AgentSettings = groupToAdd.Value;
                            groups.Add(newGroup);
                        }
                    }
                }
            }
        }
        /// <inheritdoc/>
        public CompilationResult Compile(string shaderSource, string entryPoint, string profile, string sourceFileName = "unknown")
        {
            string realShaderSource;

            if (profile == "glsl")
            {
                // Compile directly as GLSL
                realShaderSource = shaderSource;
            }
            else
            {
                // Convert HLSL to GLSL
                PipelineStage stage;
                var           profileParts = profile.Split('_');
                switch (profileParts[0])
                {
                case "vs":
                    stage = PipelineStage.Vertex;
                    break;

                case "ps":
                    stage = PipelineStage.Pixel;
                    break;

                case "gs":
                case "hs":
                case "ds":
                case "cs":
                    throw new NotImplementedException("This shader stage can't be converted to OpenGL.");

                default:
                    throw new NotSupportedException("Unknown shader profile.");
                }

                // Convert from HLSL to GLSL
                // Note that for now we parse from shader as a string, but we could simply clone effectPass.Shader to avoid multiple parsing.
                var glslConvertor = new ShaderConverter(isOpenGLES);
                var glslShader    = glslConvertor.Convert(shaderSource, entryPoint, stage);

                // Add std140 layout
                foreach (var constantBuffer in glslShader.Declarations.OfType <ConstantBuffer>())
                {
                    constantBuffer.Qualifiers |= new LayoutQualifier(new LayoutKeyValue("std140"));
                }

                // Output the result
                var glslShaderWriter = new HlslToGlslWriter();

                if (isOpenGLES)
                {
                    glslShaderWriter.TrimFloatSuffix       = true;
                    glslShaderWriter.GenerateUniformBlocks = false;
                    foreach (var variable in glslShader.Declarations.OfType <Variable>())
                    {
                        if (variable.Qualifiers.Contains(ParameterQualifier.In))
                        {
                            variable.Qualifiers.Values.Remove(ParameterQualifier.In);
                            // "in" becomes "attribute" in VS, "varying" in other stages
                            variable.Qualifiers.Values.Add(
                                stage == PipelineStage.Vertex
                                    ? global::SiliconStudio.Shaders.Ast.Glsl.ParameterQualifier.Attribute
                                    : global::SiliconStudio.Shaders.Ast.Glsl.ParameterQualifier.Varying);
                        }
                        if (variable.Qualifiers.Contains(ParameterQualifier.Out))
                        {
                            variable.Qualifiers.Values.Remove(ParameterQualifier.Out);
                            variable.Qualifiers.Values.Add(global::SiliconStudio.Shaders.Ast.Glsl.ParameterQualifier.Varying);
                        }
                    }
                }

                // Write shader
                glslShaderWriter.Visit(glslShader);

                // Build shader source
                var glslShaderCode = new StringBuilder();

                // Append some header depending on target
                if (!isOpenGLES)
                {
                    glslShaderCode
                    .AppendLine("#version 410")
                    .AppendLine();

                    if (stage == PipelineStage.Pixel)
                    {
                        glslShaderCode
                        .AppendLine("out vec4 gl_FragData[1];")
                        .AppendLine();
                    }
                }

                if (isOpenGLES)
                {
                    if (stage == PipelineStage.Pixel)
                    {
                        glslShaderCode
                        .AppendLine("precision mediump float;")
                        .AppendLine();
                    }
                }

                glslShaderCode.Append(glslShaderWriter.Text);

                realShaderSource = glslShaderCode.ToString();
            }

            var shaderBytecodeData = new OpenGLShaderBytecodeData
            {
                IsBinary   = false,
                EntryPoint = entryPoint,
                Profile    = profile,
                Source     = realShaderSource,
            };

            // Encode shader source to a byte array (no universal StageBytecode format for OpenGL)
            var memoryStream = new MemoryStream();
            var binarySerializationWriter = new BinarySerializationWriter(memoryStream);

            shaderBytecodeData.Serialize(binarySerializationWriter, ArchiveMode.Serialize);

            return(new CompilationResult(new ShaderBytecode(memoryStream.ToArray()), false, null));
        }
示例#9
0
            /// <inheritdoc />
            protected override async Task <ResultStatus> DoCommandOverride(ICommandContext commandContext)
            {
                // Get path to ffmpeg
                var installationDir = DirectoryHelper.GetPackageDirectory("Xenko");
                var binDir          = UPath.Combine(installationDir, new UDirectory("Bin"));

                binDir = UPath.Combine(binDir, new UDirectory("Windows"));
                var ffmpeg = UPath.Combine(binDir, new UFile("ffmpeg.exe")).ToWindowsPath();

                if (!File.Exists(ffmpeg))
                {
                    throw new AssetException("Failed to compile a sound asset. ffmpeg was not found.");
                }

                // Get absolute path of asset source on disk
                var assetDirectory = Parameters.Source.GetParent();
                var assetSource    = UPath.Combine(assetDirectory, Parameters.Source);

                // Execute ffmpeg to convert source to PCM and then encode with Celt
                var tempFile = Path.GetTempFileName();

                try
                {
                    var channels    = Parameters.Spatialized ? 1 : 2;
                    var commandLine = "  -hide_banner -loglevel error" +                                          // hide most log output
                                      $" -i \"{assetSource.ToWindowsPath()}\"" +                                  // input file
                                      $" -f f32le -acodec pcm_f32le -ac {channels} -ar {Parameters.SampleRate}" + // codec
                                      $" -map 0:{Parameters.Index}" +                                             // stream index
                                      $" -y \"{tempFile}\"";                                                      // output file (always overwrite)
                    var ret = await ShellHelper.RunProcessAndGetOutputAsync(ffmpeg, commandLine, commandContext.Logger);

                    if (ret != 0 || commandContext.Logger.HasErrors)
                    {
                        throw new AssetException($"Failed to compile a sound asset, ffmpeg failed to convert {assetSource}");
                    }

                    var encoder = new Celt(Parameters.SampleRate, CompressedSoundSource.SamplesPerFrame, channels, false);

                    var uncompressed = CompressedSoundSource.SamplesPerFrame * channels * sizeof(short); //compare with int16 for CD quality comparison.. but remember we are dealing with 32 bit floats for encoding!!
                    var target       = (int)Math.Floor(uncompressed / (float)Parameters.CompressionRatio);

                    var dataUrl  = Url + "_Data";
                    var newSound = new Sound
                    {
                        CompressedDataUrl = dataUrl,
                        Channels          = channels,
                        SampleRate        = Parameters.SampleRate,
                        StreamFromDisk    = Parameters.StreamFromDisk,
                        Spatialized       = Parameters.Spatialized,
                    };

                    //make sure we don't compress celt data
                    commandContext.AddTag(new ObjectUrl(UrlType.Content, dataUrl), Builder.DoNotCompressTag);

                    var frameSize = CompressedSoundSource.SamplesPerFrame * channels;
                    using (var reader = new BinaryReader(new FileStream(tempFile, FileMode.Open, FileAccess.Read)))
                        using (var outputStream = MicrothreadLocalDatabases.DatabaseFileProvider.OpenStream(dataUrl, VirtualFileMode.Create, VirtualFileAccess.Write, VirtualFileShare.Read, StreamFlags.Seekable))
                        {
                            var writer = new BinarySerializationWriter(outputStream);

                            var outputBuffer = new byte[target];
                            var buffer       = new float[frameSize];
                            var count        = 0;
                            var length       = reader.BaseStream.Length; // Cache the length, because this getter is expensive to use
                            for (var position = 0; position < length; position += sizeof(float))
                            {
                                if (count == frameSize) //flush
                                {
                                    var len = encoder.Encode(buffer, outputBuffer);
                                    writer.Write((short)len);
                                    outputStream.Write(outputBuffer, 0, len);

                                    newSound.Samples += count / channels;
                                    newSound.NumberOfPackets++;
                                    newSound.MaxPacketLength = Math.Max(newSound.MaxPacketLength, len);

                                    count = 0;
                                    Array.Clear(buffer, 0, frameSize);
                                }

                                buffer[count] = reader.ReadSingle();
                                count++;
                            }

                            if (count > 0) //flush
                            {
                                var len = encoder.Encode(buffer, outputBuffer);
                                writer.Write((short)len);
                                outputStream.Write(outputBuffer, 0, len);

                                newSound.Samples += count / channels;
                                newSound.NumberOfPackets++;
                                newSound.MaxPacketLength = Math.Max(newSound.MaxPacketLength, len);
                            }
                        }

                    var assetManager = new ContentManager(MicrothreadLocalDatabases.ProviderService);
                    assetManager.Save(Url, newSound);

                    return(ResultStatus.Successful);
                }
                finally
                {
                    File.Delete(tempFile);
                }
            }
示例#10
0
        /// <summary>
        /// Synchronizes files to an android device.
        /// </summary>
        /// <param name="logger">The logger.</param>
        /// <param name="device">The device.</param>
        /// <param name="fileMapping">The file mapping (relative target path, source HDD filename).</param>
        /// <param name="androidPath">The android path.</param>
        /// <param name="cacheFile">The cache file.</param>
        public static void Synchronize(Logger logger, string device, Dictionary <string, string> fileMapping, string androidPath, string cacheFile)
        {
            // Ensure android path ends up with directory separator
            if (!androidPath.EndsWith("/"))
            {
                androidPath = androidPath + "/";
            }

            // Search files
            var currentVersions = fileMapping
                                  .ToDictionary(x => x.Key, x => new FileVersion(x.Value));

            // Try to read previous cache file
            var previousVersions = new Dictionary <string, FileVersion>();

            try
            {
                using (var file = File.OpenRead(cacheFile))
                {
                    var binaryReader = new BinarySerializationReader(file);
                    binaryReader.Serialize(ref previousVersions, ArchiveMode.Deserialize);
                }
            }
            catch (IOException)
            {
            }

            var filesToRemove = new List <string>();
            var filesToUpload = new List <string>();

            // Remove unecessary files (in previousVersions but not in currentVersions)
            foreach (var file in previousVersions.Where(x => !currentVersions.ContainsKey(x.Key)))
            {
                filesToRemove.Add(file.Key);
            }

            // Upload files that are either not uploaded yet, or not up to date
            foreach (var file in currentVersions)
            {
                FileVersion fileVersion;
                if (!previousVersions.TryGetValue(file.Key, out fileVersion) ||
                    fileVersion.FileSize != file.Value.FileSize ||
                    fileVersion.LastModifiedDate != file.Value.LastModifiedDate)
                {
                    filesToUpload.Add(file.Key);
                }
            }

            // Upload files
            foreach (var file in filesToUpload)
            {
                if (logger != null)
                {
                    logger.Verbose("Copying file {0}", file);
                }
                RunAdb(device, string.Format("push \"{0}\" \"{1}\"", fileMapping[file], androidPath + file.Replace('\\', '/')));
            }

            // Remove files
            foreach (var file in filesToRemove)
            {
                if (logger != null)
                {
                    logger.Verbose("Deleting file {0}", file);
                }
                RunAdb(device, string.Format("shell \"rm {0}\"", androidPath + file.Replace('\\', '/')));
            }

            // Write new cache file
            using (var file = File.Create(cacheFile))
            {
                var binaryWriter = new BinarySerializationWriter(file);
                binaryWriter.Write(currentVersions);
            }
        }
示例#11
0
        public void Test()
        {
            var prefab = new PrefabAsset();

            var modelComponent = new ModelComponent();
            var entity         = new Entity()
            {
                modelComponent
            };

            prefab.Hierarchy.Entities.Add(entity);
            prefab.Hierarchy.RootEntities.Add(entity.Id);

            var material1 = new MaterialNull();

            IdentifiableHelper.SetId(material1, new Guid("39E2B226-8752-4678-8E93-76FFBFBA337B"));
            var material2 = new MaterialNull();

            IdentifiableHelper.SetId(material2, new Guid("CC4F1B31-FBB7-4360-A3E7-060BDFDA0695"));
            modelComponent.Materials.Add(material1);
            modelComponent.Materials.Add(material2);

            Action <PrefabAsset> checkPrefab = (newPrefab) =>
            {
                var previousEntityDesign = newPrefab.Hierarchy.Entities.FirstOrDefault();

                Assert.NotNull(previousEntityDesign);

                var previousEntity = previousEntityDesign.Entity;


                var component = previousEntity.Get <ModelComponent>();
                Assert.NotNull(component);

                Assert.AreEqual(2, component.Materials.Count);

                var newMaterial1 = component.Materials[0];
                Assert.AreEqual(IdentifiableHelper.GetId(material1), IdentifiableHelper.GetId(newMaterial1));
                var newMaterial2 = component.Materials[1];
                Assert.AreEqual(IdentifiableHelper.GetId(material2), IdentifiableHelper.GetId(newMaterial2));
            };

            // Test yaml serialization
            {
                using (var stream = new MemoryStream())
                {
                    AssetSerializer.Save(stream, prefab);

                    stream.Position = 0;
                    var serializedVersion = Encoding.UTF8.GetString(stream.ToArray());
                    Console.WriteLine(serializedVersion);

                    stream.Position = 0;
                    var newPrefab = (PrefabAsset)AssetSerializer.Load(stream, "myentity");
                    checkPrefab(newPrefab);
                }
            }

            // Test cloning
            var newPrefabClone = (PrefabAsset)AssetCloner.Clone(prefab);

            checkPrefab(newPrefabClone);

            // Test runtime serialization (runtime serialization is removing MaterialNull and replacing it by a null)
            {
                var stream = new MemoryStream();
                var writer = new BinarySerializationWriter(stream)
                {
                    Context = { SerializerSelector = SerializerSelector.AssetWithReuse }
                };
                writer.SerializeExtended(entity, ArchiveMode.Serialize);
                writer.Flush();
                stream.Position = 0;

                var reader = new BinarySerializationReader(stream)
                {
                    Context = { SerializerSelector = SerializerSelector.AssetWithReuse }
                };

                Entity newEntity = null;
                reader.SerializeExtended(ref newEntity, ArchiveMode.Deserialize);

                Assert.NotNull(newEntity);

                var component = newEntity.Get <ModelComponent>();
                Assert.NotNull(component);

                Assert.AreEqual(2, component.Materials.Count);

                Assert.Null(component.Materials[0]);
                Assert.Null(component.Materials[1]);
            }
        }
示例#12
0
        public static void MergeTest(EngineContext engineContext)
        {
            // TODO: Currently hardcoded
            var db            = new FileRepository(new FilePath(@"C:\DEV\hotei_scene", Constants.DOT_GIT));
            var git           = new Git(db);
            var tree1Ref      = db.GetRef("test");
            var tree2Ref      = db.GetRef(Constants.HEAD);
            var tree1CommitId = tree1Ref.GetObjectId();
            var tree2CommitId = tree2Ref.GetObjectId();

            // Merge tree1 into current tree
            var mergeResult = git.Merge().Include(tree1CommitId).Call();

            if (mergeResult.GetMergeStatus() == MergeStatus.CONFLICTING)
            {
                foreach (var conflict in mergeResult.GetConflicts())
                {
                    if (conflict.Key.EndsWith(".hotei"))
                    {
                        // Search base tree (common ancestor), if any
                        var walk = new RevWalk(db);
                        walk.SetRevFilter(RevFilter.MERGE_BASE);
                        walk.MarkStart(walk.ParseCommit(tree1CommitId));
                        walk.MarkStart(walk.ParseCommit(tree2CommitId));
                        var baseTree = walk.Next();

                        var tw = new NameConflictTreeWalk(db);
                        tw.AddTree(new RevWalk(db).ParseTree(tree1CommitId).ToObjectId());
                        tw.AddTree(new RevWalk(db).ParseTree(tree2CommitId).ToObjectId());
                        if (baseTree != null)
                        {
                            tw.AddTree(new RevWalk(db).ParseTree(baseTree.ToObjectId()).ToObjectId());
                        }
                        tw.Filter = PathFilter.Create(conflict.Key);

                        // Should be only one iteration
                        while (tw.Next())
                        {
                            var tree0 = baseTree != null?tw.GetTree <AbstractTreeIterator>(2) : null;

                            var tree1 = tw.GetTree <AbstractTreeIterator>(0);
                            var tree2 = tw.GetTree <AbstractTreeIterator>(1);

                            // Get contents of every versions for the 3-way merge
                            var data0 = baseTree != null?LoadEntities(new MemoryStream(tw.ObjectReader.Open(tree0.EntryObjectId).GetBytes())) : null;

                            var data1 = LoadEntities(new MemoryStream(tw.ObjectReader.Open(tree1.EntryObjectId).GetBytes()));
                            var data2 = LoadEntities(new MemoryStream(tw.ObjectReader.Open(tree2.EntryObjectId).GetBytes()));

                            // Perform 3-way merge
                            var entities = new List <EntityDefinition>();
                            ThreeWayMergeOrdered.Merge(entities, data0, data1, data2, x => x.Guid, (x, y) => x == y, ResolveEntityConflicts);

                            // Save new merged file
                            var fileStream = new FileStream(new FilePath(db.WorkTree, conflict.Key), FileMode.Create, FileAccess.Write);
                            var stream     = new BinarySerializationWriter(fileStream);
                            stream.Context.Serializer = Serializer;
                            stream.SerializeClass(null, ref entities, ArchiveMode.Serialize);
                            fileStream.Close();

                            // TODO: Check if all conflicts are really resolved
                            // Add resolved file for merge commit
                            git.Add().AddFilepattern(conflict.Key).Call();
                        }
                    }
                }
            }
        }
示例#13
0
 /// <inheritdoc/>
 protected override void ComputeParameterHash(BinarySerializationWriter writer)
 {
     base.ComputeParameterHash(writer);
     writer.Write(2); // Change this number to recompute the hash when prefiltering algorithm are changed
 }
示例#14
0
        public static void CreateBundle(string bundleUrl, IOdbBackend backend, ObjectId[] objectIds, ISet <ObjectId> disableCompressionIds, Dictionary <string, ObjectId> indexMap, IList <string> dependencies, bool useIncrementalBundle)
        {
            if (objectIds.Length == 0)
            {
                throw new InvalidOperationException("Nothing to pack.");
            }

            var objectsToIndex = new Dictionary <ObjectId, int>(objectIds.Length);

            var objects = new List <KeyValuePair <ObjectId, ObjectInfo> >();

            for (int i = 0; i < objectIds.Length; ++i)
            {
                objectsToIndex.Add(objectIds[i], objects.Count);
                objects.Add(new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], new ObjectInfo()));
            }

            var incrementalBundles = new List <ObjectId>();

            // If there is a .bundle, add incremental id before it
            var bundleExtensionLength = (bundleUrl.EndsWith(BundleExtension) ? BundleExtension.Length : 0);

            // Early exit if package didn't change (header-check only)
            if (VirtualFileSystem.FileExists(bundleUrl))
            {
                try
                {
                    using (var packStream = VirtualFileSystem.OpenStream(bundleUrl, VirtualFileMode.Open, VirtualFileAccess.Read))
                    {
                        var bundle = ReadBundleDescription(packStream);

                        // If package didn't change since last time, early exit!
                        if (ArrayExtensions.ArraysEqual(bundle.Dependencies, dependencies) &&
                            ArrayExtensions.ArraysEqual(bundle.Assets.OrderBy(x => x.Key).ToList(), indexMap.OrderBy(x => x.Key).ToList()) &&
                            ArrayExtensions.ArraysEqual(bundle.Objects.Select(x => x.Key).OrderBy(x => x).ToList(), objectIds.OrderBy(x => x).ToList()))
                        {
                            // Make sure all incremental bundles exist
                            // Also, if we don't want incremental bundles but we have some (or vice-versa), let's force a regeneration
                            if ((useIncrementalBundle == (bundle.IncrementalBundles.Count > 0)) &&
                                bundle.IncrementalBundles.Select(x => bundleUrl.Insert(bundleUrl.Length - bundleExtensionLength, "." + x)).All(x =>
                            {
                                if (!VirtualFileSystem.FileExists(x))
                                {
                                    return(false);
                                }
                                using (var incrementalStream = VirtualFileSystem.OpenStream(x, VirtualFileMode.Open, VirtualFileAccess.Read))
                                    return(ValidateHeader(incrementalStream));
                            }))
                            {
                                return;
                            }
                        }
                    }

                    // Process existing incremental bundles one by one
                    // Try to find if there is enough to reuse in each of them
                    var filename  = VirtualFileSystem.GetFileName(bundleUrl);
                    var directory = VirtualFileSystem.GetParentFolder(bundleUrl);

                    foreach (var incrementalBundleUrl in VirtualFileSystem.ListFiles(directory, filename.Insert(filename.Length - bundleExtensionLength, ".*"), VirtualSearchOption.TopDirectoryOnly).Result)
                    {
                        var      incrementalIdString = incrementalBundleUrl.Substring(incrementalBundleUrl.Length - bundleExtensionLength - ObjectId.HashStringLength, ObjectId.HashStringLength);
                        ObjectId incrementalId;
                        if (!ObjectId.TryParse(incrementalIdString, out incrementalId))
                        {
                            continue;
                        }

                        // If we don't want incremental bundles, delete old ones from previous build
                        if (!useIncrementalBundle)
                        {
                            VirtualFileSystem.FileDelete(incrementalBundleUrl);
                            continue;
                        }

                        long sizeNeededItems = 0;
                        long sizeTotal       = 0;

                        BundleDescription incrementalBundle;
                        try
                        {
                            using (var packStream = VirtualFileSystem.OpenStream(incrementalBundleUrl, VirtualFileMode.Open, VirtualFileAccess.Read))
                            {
                                incrementalBundle = ReadBundleDescription(packStream);
                            }

                            // Compute size of objects (needed ones and everything)
                            foreach (var @object in incrementalBundle.Objects)
                            {
                                var objectCompressedSize = @object.Value.EndOffset - @object.Value.StartOffset;

                                // TODO: Detect object that are stored without ObjectId being content hash: we need to check actual content hash is same in this case
                                if (objectsToIndex.ContainsKey(@object.Key))
                                {
                                    sizeNeededItems += objectCompressedSize;
                                }
                                sizeTotal += objectCompressedSize;
                            }

                            // Check if we would reuse at least 50% of the incremental bundle, otherwise let's just get rid of it
                            var reuseRatio = (float)((double)sizeNeededItems / (double)sizeTotal);
                            if (reuseRatio < 0.5f)
                            {
                                VirtualFileSystem.FileDelete(incrementalBundleUrl);
                            }
                            else
                            {
                                // We will reuse this incremental bundle
                                // Let's add ObjectId entries
                                foreach (var @object in incrementalBundle.Objects)
                                {
                                    int objectIndex;
                                    if (objectsToIndex.TryGetValue(@object.Key, out objectIndex))
                                    {
                                        var objectInfo = @object.Value;
                                        objectInfo.IncrementalBundleIndex = incrementalBundles.Count + 1;
                                        objects[objectIndex] = new KeyValuePair <ObjectId, ObjectInfo>(@object.Key, objectInfo);
                                    }
                                }

                                // Add this incremental bundle in the list
                                incrementalBundles.Add(incrementalId);
                            }
                        }
                        catch (Exception)
                        {
                            // Could not read incremental bundle (format changed?)
                            // Let's delete it
                            VirtualFileSystem.FileDelete(incrementalBundleUrl);
                        }
                    }
                }
                catch (Exception)
                {
                    // Could not read previous bundle (format changed?)
                    // Let's just mute this error as new bundle will overwrite it anyway
                }
            }

            // Count objects which needs to be saved
            var incrementalObjects = new List <KeyValuePair <ObjectId, ObjectInfo> >();

            if (useIncrementalBundle)
            {
                for (int i = 0; i < objectIds.Length; ++i)
                {
                    // Skip if already part of an existing incremental package
                    if (objects[i].Value.IncrementalBundleIndex > 0)
                    {
                        continue;
                    }

                    incrementalObjects.Add(new KeyValuePair <ObjectId, ObjectInfo>(objects[i].Key, new ObjectInfo()));
                }
            }

            // Create an incremental package
            var newIncrementalId       = ObjectId.New();
            var incrementalBundleIndex = incrementalBundles.Count;

            if (useIncrementalBundle && incrementalObjects.Count > 0)
            {
                incrementalBundles.Add(newIncrementalId);
            }

            using (var packStream = VirtualFileSystem.OpenStream(bundleUrl, VirtualFileMode.Create, VirtualFileAccess.Write))
            {
                var header = new Header();
                header.MagicHeader = Header.MagicHeaderValid;

                var packBinaryWriter = new BinarySerializationWriter(packStream);
                packBinaryWriter.Write(header);
                // Write dependencies
                packBinaryWriter.Write(dependencies.ToList());
                // Write inecremental bundles
                packBinaryWriter.Write(incrementalBundles.ToList());

                // Save location of object ids
                var packObjectIdPosition = packStream.Position;

                // Write empty object ids (reserve space, will be rewritten later)
                packBinaryWriter.Write(objects);

                // Write index
                packBinaryWriter.Write(indexMap.ToList());

                using (var incrementalStream = incrementalObjects.Count > 0 ? VirtualFileSystem.OpenStream(bundleUrl.Insert(bundleUrl.Length - bundleExtensionLength, "." + newIncrementalId), VirtualFileMode.Create, VirtualFileAccess.Write) : null)
                {
                    var  incrementalBinaryWriter     = incrementalStream != null ? new BinarySerializationWriter(incrementalStream) : null;
                    long incrementalObjectIdPosition = 0;
                    if (incrementalStream != null)
                    {
                        incrementalBinaryWriter.Write(header);
                        // Write dependencies
                        incrementalBinaryWriter.Write(new List <string>());
                        // Write inecremental bundles
                        incrementalBinaryWriter.Write(new List <ObjectId>());

                        // Save location of object ids
                        incrementalObjectIdPosition = incrementalStream.Position;

                        // Write empty object ids (reserve space, will be rewritten later)
                        incrementalBinaryWriter.Write(incrementalObjects);

                        // Write index
                        incrementalBinaryWriter.Write(new List <KeyValuePair <string, ObjectId> >());
                    }

                    var objectOutputStream     = incrementalStream ?? packStream;
                    int incrementalObjectIndex = 0;
                    for (int i = 0; i < objectIds.Length; ++i)
                    {
                        // Skip if already part of an existing incremental package
                        if (objects[i].Value.IncrementalBundleIndex > 0)
                        {
                            continue;
                        }

                        using (var objectStream = backend.OpenStream(objectIds[i]))
                        {
                            // Prepare object info
                            var objectInfo = new ObjectInfo {
                                StartOffset = objectOutputStream.Position, SizeNotCompressed = objectStream.Length
                            };

                            // re-order the file content so that it is not necessary to seek while reading the input stream (header/object/refs -> header/refs/object)
                            var inputStream          = objectStream;
                            var originalStreamLength = objectStream.Length;
                            var streamReader         = new BinarySerializationReader(inputStream);
                            var chunkHeader          = ChunkHeader.Read(streamReader);
                            if (chunkHeader != null)
                            {
                                // create the reordered stream
                                var reorderedStream = new MemoryStream((int)originalStreamLength);

                                // copy the header
                                var streamWriter = new BinarySerializationWriter(reorderedStream);
                                chunkHeader.Write(streamWriter);

                                // copy the references
                                var newOffsetReferences = reorderedStream.Position;
                                inputStream.Position = chunkHeader.OffsetToReferences;
                                inputStream.CopyTo(reorderedStream);

                                // copy the object
                                var newOffsetObject = reorderedStream.Position;
                                inputStream.Position = chunkHeader.OffsetToObject;
                                inputStream.CopyTo(reorderedStream, chunkHeader.OffsetToReferences - chunkHeader.OffsetToObject);

                                // rewrite the chunk header with correct offsets
                                chunkHeader.OffsetToObject     = (int)newOffsetObject;
                                chunkHeader.OffsetToReferences = (int)newOffsetReferences;
                                reorderedStream.Position       = 0;
                                chunkHeader.Write(streamWriter);

                                // change the input stream to use reordered stream
                                inputStream          = reorderedStream;
                                inputStream.Position = 0;
                            }

                            // compress the stream
                            if (!disableCompressionIds.Contains(objectIds[i]))
                            {
                                objectInfo.IsCompressed = true;

                                var lz4OutputStream = new LZ4Stream(objectOutputStream, CompressionMode.Compress);
                                inputStream.CopyTo(lz4OutputStream);
                                lz4OutputStream.Flush();
                            }
                            // copy the stream "as is"
                            else
                            {
                                // Write stream
                                inputStream.CopyTo(objectOutputStream);
                            }

                            // release the reordered created stream
                            if (chunkHeader != null)
                            {
                                inputStream.Dispose();
                            }

                            // Add updated object info
                            objectInfo.EndOffset = objectOutputStream.Position;
                            // Note: we add 1 because 0 is reserved for self; first incremental bundle starts at 1
                            objectInfo.IncrementalBundleIndex = objectOutputStream == incrementalStream ? incrementalBundleIndex + 1 : 0;
                            objects[i] = new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], objectInfo);

                            if (useIncrementalBundle)
                            {
                                // Also update incremental bundle object info
                                objectInfo.IncrementalBundleIndex            = 0; // stored in same bundle
                                incrementalObjects[incrementalObjectIndex++] = new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], objectInfo);
                            }
                        }
                    }

                    // First finish to write incremental package so that main one can't be valid on the HDD without the incremental one being too
                    if (incrementalStream != null)
                    {
                        // Rewrite headers
                        header.Size = incrementalStream.Length;
                        incrementalStream.Position = 0;
                        incrementalBinaryWriter.Write(header);

                        // Rewrite object with updated offsets/size
                        incrementalStream.Position = incrementalObjectIdPosition;
                        incrementalBinaryWriter.Write(incrementalObjects);
                    }
                }

                // Rewrite headers
                header.Size         = packStream.Length;
                packStream.Position = 0;
                packBinaryWriter.Write(header);

                // Rewrite object with updated offsets/size
                packStream.Position = packObjectIdPosition;
                packBinaryWriter.Write(objects);
            }
        }
示例#15
0
        private void SerializeObject(Queue <SerializeOperation> serializeOperations, string url, object obj, bool publicReference, Type storageType = null)
        {
            // Don't create context in case we don't want to serialize referenced objects
            //if (!SerializeReferencedObjects && obj != RootObject)
            //    return null;

            // Already saved?
            // TODO: Ref counting? Should we change it on save? Probably depends if we cache or not.
            if (LoadedAssetReferences.ContainsKey(obj))
            {
                return;
            }

            var serializer = Serializer.GetSerializer(storageType, obj.GetType());

            if (serializer == null)
            {
                throw new InvalidOperationException($"Content serializer for {obj.GetType()} could not be found.");
            }

            var contentSerializerContext = new ContentSerializerContext(url, ArchiveMode.Serialize, this);

            using (var stream = FileProvider.OpenStream(url, VirtualFileMode.Create, VirtualFileAccess.Write))
            {
                var streamWriter = new BinarySerializationWriter(stream);
                PrepareSerializerContext(contentSerializerContext, streamWriter.Context);

                ChunkHeader header = null;

                // Allocate space in the stream, and also include header version in the hash computation, which is better
                // If serialization type is null, it means there should be no header.
                var serializationType = serializer.SerializationType;
                if (serializationType != null)
                {
                    header = new ChunkHeader {
                        Type = serializer.SerializationType.AssemblyQualifiedName
                    };
                    header.Write(streamWriter);
                    header.OffsetToObject = (int)streamWriter.NativeStream.Position;
                }

                contentSerializerContext.SerializeContent(streamWriter, serializer, obj);

                // Write references and updated header
                if (header != null)
                {
                    header.OffsetToReferences = (int)streamWriter.NativeStream.Position;
                    contentSerializerContext.SerializeReferences(streamWriter);

                    // Move back to the pre-allocated header position in the steam
                    stream.Seek(0, SeekOrigin.Begin);

                    // Write actual header.
                    header.Write(new BinarySerializationWriter(stream));
                }
            }

            var assetReference = new Reference(url, publicReference);

            SetAssetObject(assetReference, obj);

            // Process content references
            // TODO: Should we work at ChunkReference level?
            foreach (var contentReference in contentSerializerContext.ContentReferences)
            {
                if (contentReference.ObjectValue != null)
                {
                    var attachedReference = AttachedReferenceManager.GetAttachedReference(contentReference.ObjectValue);
                    if (attachedReference == null || attachedReference.IsProxy)
                    {
                        continue;
                    }

                    serializeOperations.Enqueue(new SerializeOperation(contentReference.Location, contentReference.ObjectValue, false));
                }
            }
        }
示例#16
0
 protected override void ComputeParameterHash(BinarySerializationWriter writer)
 {
     base.ComputeParameterHash(writer);
     writer.Write(designResolution);
 }
示例#17
0
文件: Store.cs 项目: vol16bit/xenko
        protected virtual void WriteEntry(Stream stream, T value)
        {
            var reader = new BinarySerializationWriter(stream);

            reader.Serialize(ref value, ArchiveMode.Serialize);
        }
示例#18
0
文件: ScriptSync.cs 项目: cg123/xenko
        public static void MergeTest(EngineContext engineContext)
        {
            // TODO: Currently hardcoded
            var db = new FileRepository(new FilePath(@"C:\DEV\hotei_scene", Constants.DOT_GIT));
            var git = new Git(db);
            var tree1Ref = db.GetRef("test");
            var tree2Ref = db.GetRef(Constants.HEAD);
            var tree1CommitId = tree1Ref.GetObjectId();
            var tree2CommitId = tree2Ref.GetObjectId();

            // Merge tree1 into current tree
            var mergeResult = git.Merge().Include(tree1CommitId).Call();

            if (mergeResult.GetMergeStatus() == MergeStatus.CONFLICTING)
            {
                foreach (var conflict in mergeResult.GetConflicts())
                {
                    if (conflict.Key.EndsWith(".hotei"))
                    {
                        // Search base tree (common ancestor), if any
                        var walk = new RevWalk(db);
                        walk.SetRevFilter(RevFilter.MERGE_BASE);
                        walk.MarkStart(walk.ParseCommit(tree1CommitId));
                        walk.MarkStart(walk.ParseCommit(tree2CommitId));
                        var baseTree = walk.Next();

                        var tw = new NameConflictTreeWalk(db);
                        tw.AddTree(new RevWalk(db).ParseTree(tree1CommitId).ToObjectId());
                        tw.AddTree(new RevWalk(db).ParseTree(tree2CommitId).ToObjectId());
                        if (baseTree != null)
                            tw.AddTree(new RevWalk(db).ParseTree(baseTree.ToObjectId()).ToObjectId());
                        tw.Filter = PathFilter.Create(conflict.Key);

                        // Should be only one iteration
                        while (tw.Next())
                        {
                            var tree0 = baseTree != null ? tw.GetTree<AbstractTreeIterator>(2) : null;
                            var tree1 = tw.GetTree<AbstractTreeIterator>(0);
                            var tree2 = tw.GetTree<AbstractTreeIterator>(1);

                            // Get contents of every versions for the 3-way merge
                            var data0 = baseTree != null ? LoadEntities(new MemoryStream(tw.ObjectReader.Open(tree0.EntryObjectId).GetBytes())) : null;
                            var data1 = LoadEntities(new MemoryStream(tw.ObjectReader.Open(tree1.EntryObjectId).GetBytes()));
                            var data2 = LoadEntities(new MemoryStream(tw.ObjectReader.Open(tree2.EntryObjectId).GetBytes()));

                            // Perform 3-way merge
                            var entities = new List<EntityDefinition>();
                            ThreeWayMergeOrdered.Merge(entities, data0, data1, data2, x => x.Guid, (x, y) => x == y, ResolveEntityConflicts);

                            // Save new merged file
                            var fileStream = new FileStream(new FilePath(db.WorkTree, conflict.Key), FileMode.Create, FileAccess.Write);
                            var stream = new BinarySerializationWriter(fileStream);
                            stream.Context.Serializer = Serializer;
                            stream.SerializeClass(null, ref entities, ArchiveMode.Serialize);
                            fileStream.Close();

                            // TODO: Check if all conflicts are really resolved
                            // Add resolved file for merge commit
                            git.Add().AddFilepattern(conflict.Key).Call();
                        }
                    }
                }
            }
        }
示例#19
0
 protected override void ComputeParameterHash(BinarySerializationWriter writer)
 {
     base.ComputeParameterHash(writer);
     ComputeCompileTimeDependenciesHash(package, writer, AssetParameters);
 }
示例#20
0
文件: ScriptSync.cs 项目: cg123/xenko
        public static void SaveAssets(EngineContext engineContext)
        {
            var entities = new List<EntityDefinition>();

            foreach (var entity in engineContext.EntityManager.Entities.OrderBy(x => x.Guid).Where(x => x.Name == "him"))
            {
                var entityDefinition = new EntityDefinition(entity.Guid);
                entities.Add(entityDefinition);

                foreach (var entityComponent in entity.Properties.Where(x => x.Value is EntityComponent).OrderBy(x => x.Key.Name))
                {
                    var componentDefinition = new EntityComponentDefinition { Name = entityComponent.Key.Name, Properties = new List<EntityComponentProperty>() };
                    entityDefinition.Components.Add(componentDefinition);

                    var entityComponentValue = entityComponent.Value as EntityComponent;

                    foreach (var field in entityComponentValue.GetType().GetFields())
                    {
                        if (field.GetCustomAttributes(typeof(VersionableAttribute), true).Length == 0)
                            continue;

                        componentDefinition.Properties.Add(new EntityComponentProperty(EntityComponentPropertyType.Field, field.Name, Encode(field.GetValue(entityComponentValue))));
                    }

                    foreach (var property in entityComponentValue.GetType().GetProperties())
                    {
                        if (property.GetCustomAttributes(typeof(VersionableAttribute), true).Length == 0)
                            continue;

                        componentDefinition.Properties.Add(new EntityComponentProperty(EntityComponentPropertyType.Property, property.Name, Encode(property.GetValue(entityComponentValue, null))));
                    }

                    componentDefinition.Properties = componentDefinition.Properties.OrderBy(x => x.Name).ToList();
                }
            }

            var fileStream = new FileStream(@"C:\DEV\hotei_scene\scene.hotei", FileMode.Create, FileAccess.Write);
            var stream = new BinarySerializationWriter(fileStream);
            stream.Context.Serializer = Serializer;
            stream.SerializeClass(null, ref entities, ArchiveMode.Serialize);
            fileStream.Close();
        }
示例#21
0
 protected override void ComputeParameterHash(BinarySerializationWriter writer)
 {
     writer.Write(Source);
     writer.Write(OutputUrl);
 }
示例#22
0
        /// <summary>
        /// Synchronizes files to an android device.
        /// </summary>
        /// <param name="logger">The logger.</param>
        /// <param name="device">The device.</param>
        /// <param name="fileMapping">The file mapping (relative target path, source HDD filename).</param>
        /// <param name="androidPath">The android path.</param>
        /// <param name="cacheFile">The cache file.</param>
        public static void Synchronize(Logger logger, string device, Dictionary<string, string> fileMapping, string androidPath, string cacheFile)
        {
            // Ensure android path ends up with directory separator
            if (!androidPath.EndsWith("/"))
                androidPath = androidPath + "/";

            // Search files
            var currentVersions = fileMapping
                .ToDictionary(x => x.Key, x => new FileVersion(x.Value));

            // Try to read previous cache file
            var previousVersions = new Dictionary<string, FileVersion>();
            try
            {
                using (var file = File.OpenRead(cacheFile))
                {
                    var binaryReader = new BinarySerializationReader(file);
                    binaryReader.Serialize(ref previousVersions, ArchiveMode.Deserialize);
                }
            }
            catch (IOException)
            {
            }

            var filesToRemove = new List<string>();
            var filesToUpload = new List<string>();

            // Remove unecessary files (in previousVersions but not in currentVersions)
            foreach (var file in previousVersions.Where(x => !currentVersions.ContainsKey(x.Key)))
            {
                filesToRemove.Add(file.Key);
            }

            // Upload files that are either not uploaded yet, or not up to date
            foreach (var file in currentVersions)
            {
                FileVersion fileVersion;
                if (!previousVersions.TryGetValue(file.Key, out fileVersion)
                    || fileVersion.FileSize != file.Value.FileSize
                    || fileVersion.LastModifiedDate != file.Value.LastModifiedDate)
                {
                    filesToUpload.Add(file.Key);
                }
            }

            // Upload files
            foreach (var file in filesToUpload)
            {
                if (logger != null)
                    logger.Verbose("Copying file {0}", file);
                RunAdb(device, string.Format("push \"{0}\" \"{1}\"", fileMapping[file], androidPath + file.Replace('\\', '/')));
            }

            // Remove files
            foreach (var file in filesToRemove)
            {
                if (logger != null)
                    logger.Verbose("Deleting file {0}", file);
                RunAdb(device, string.Format("shell \"rm {0}\"", androidPath + file.Replace('\\', '/')));
            }

            // Write new cache file
            using (var file = File.Create(cacheFile))
            {
                var binaryWriter = new BinarySerializationWriter(file);
                binaryWriter.Write(currentVersions);
            }
        }
示例#23
0
            protected override Task <ResultStatus> DoCommandOverride(ICommandContext commandContext)
            {
                var assetManager = new ContentManager();

                // Get absolute path of asset source on disk
                var assetDirectory = AssetParameters.Source.GetParent();
                var assetSource    = UPath.Combine(assetDirectory, AssetParameters.Source);

                var installationDir = DirectoryHelper.GetInstallationDirectory("Xenko");
                var binDir          = UPath.Combine(installationDir, new UDirectory("Bin"));

                binDir = UPath.Combine(binDir, new UDirectory("Windows-Direct3D11"));
                var ffmpeg = UPath.Combine(binDir, new UFile("ffmpeg.exe"));

                if (!File.Exists(ffmpeg))
                {
                    throw new AssetException("Failed to compile a sound asset, ffmpeg was not found.");
                }

                var channels    = AssetParameters.Spatialized ? 1 : 2;
                var tempPcmFile = Path.GetTempFileName();
                var ret         = RunProcessAndGetOutput(ffmpeg, $"-i \"{assetSource}\" -f f32le -acodec pcm_f32le -ac {channels} -ar {AssetParameters.SampleRate} -y \"{tempPcmFile}\"");

                if (ret != 0)
                {
                    File.Delete(tempPcmFile);
                    throw new AssetException($"Failed to compile a sound asset, ffmpeg failed to convert {assetSource}");
                }

                var encoder = new Celt(AssetParameters.SampleRate, CompressedSoundSource.SamplesPerFrame, channels, false);

                var uncompressed = CompressedSoundSource.SamplesPerFrame * channels * sizeof(short); //compare with int16 for CD quality comparison.. but remember we are dealing with 32 bit floats for encoding!!
                var target       = (int)Math.Floor(uncompressed / (float)AssetParameters.CompressionRatio);

                var dataUrl  = Url + "_Data";
                var newSound = new Sound
                {
                    CompressedDataUrl = dataUrl,
                    Channels          = channels,
                    SampleRate        = AssetParameters.SampleRate,
                    StreamFromDisk    = AssetParameters.StreamFromDisk,
                    Spatialized       = AssetParameters.Spatialized,
                };

                //make sure we don't compress celt data
                commandContext.AddTag(new ObjectUrl(UrlType.ContentLink, dataUrl), disableCompressionSymbol);

                var frameSize = CompressedSoundSource.SamplesPerFrame * channels;

                using (var reader = new BinaryReader(new FileStream(tempPcmFile, FileMode.Open, FileAccess.Read)))
                    using (var outputStream = ContentManager.FileProvider.OpenStream(dataUrl, VirtualFileMode.Create, VirtualFileAccess.Write, VirtualFileShare.Read, StreamFlags.Seekable))
                    {
                        var writer = new BinarySerializationWriter(outputStream);

                        var outputBuffer = new byte[target];
                        var buffer       = new float[frameSize];
                        var count        = 0;
                        for (;;)
                        {
                            if (count == frameSize) //flush
                            {
                                var len = encoder.Encode(buffer, outputBuffer);
                                writer.Write((short)len);
                                outputStream.Write(outputBuffer, 0, len);

                                count = 0;
                                Array.Clear(buffer, 0, frameSize);

                                newSound.NumberOfPackets++;
                                newSound.MaxPacketLength = Math.Max(newSound.MaxPacketLength, len);
                            }

                            buffer[count] = reader.ReadSingle();
                            count++;

                            if (reader.BaseStream.Position == reader.BaseStream.Length)
                            {
                                break;
                            }
                        }

                        if (count > 0) //flush
                        {
                            var len = encoder.Encode(buffer, outputBuffer);
                            writer.Write((short)len);
                            outputStream.Write(outputBuffer, 0, len);

                            newSound.NumberOfPackets++;
                            newSound.MaxPacketLength = Math.Max(newSound.MaxPacketLength, len);
                        }
                    }

                File.Delete(tempPcmFile);

                assetManager.Save(Url, newSound);

                return(Task.FromResult(ResultStatus.Successful));
            }