public async Task OnActionExecutionAsync(ActionExecutingContext context, ActionExecutionDelegate next)
        {
            // Read the Request-Id header so we can assign it to the activity.
            string parentOperationId = context.HttpContext.Request.Headers["Request-Id"];


            OpenActivityOptions options = default;

            // Set the parent id.
            if (!string.IsNullOrEmpty(parentOperationId))
            {
                options.SyntheticParentId = parentOperationId;
            }
            else
            {
                options.IsSyntheticRootId = true;
            }

            // Process cross-context properties (aka baggage).
            string correlationContext = context.HttpContext.Request.Headers["Correlation-Context"];

            if (!string.IsNullOrEmpty(correlationContext))
            {
                options.Properties = ParseCorrelationContext(correlationContext)?.ToArray();
            }

            var request = context.HttpContext.Request;

            using (var activity = logger.Default.OpenActivity(Semantic("Request", ("Path", request.Path),
                                                                       ("Query", request.QueryString), ("Method", request.Method)), options))
            {
                try
                {
                    await next();

                    var response = context.HttpContext.Response;

                    if (response.StatusCode >= (int)HttpStatusCode.OK && response.StatusCode <= 299)
                    {
                        // Success.
                        activity.SetOutcome(LogLevel.Info, Semantic("Success", ("StatusCode", response.StatusCode)));
                    }
                    else
                    {
                        // Failure.
                        activity.SetOutcome(LogLevel.Warning, Semantic("Failure", ("StatusCode", response.StatusCode)));
                    }
                }
                catch (Exception e)
                {
                    activity.SetException(e);
                }
            }
        }
        public void Put(int id, [FromBody] string value)
        {
            if (value.Length > 5)
            {
                throw new ArgumentOutOfRangeException(nameof(value));
            }

            logSource.Debug.EnabledOrNull?.Write(Semantic("SetValue", ("Id", id), ("Value", value)));

            this.values[id] = value;
        }
        public void Post(int id, [FromBody] string value)
        {
            if (value.Length > 5)
            {
                throw new ArgumentOutOfRangeException(nameof(value));
            }

            logSource.Debug.EnabledOrNull?.Write(Semantic("AddValue", ("Id", id), ("Value", value)));

            if (!this.values.TryAdd(id, value))
            {
                this.Conflict();
            }
        }
Example #4
0
        private void AddSemantic(Semantic s)
        {
            this.schema.Add(s);
            var p = new NamedPattern(this.property, this.compiledPattern);

            this.composer.Add(p);
        }
Example #5
0
        /*--------------------------------------------------------------------------------------------*/
        private static void InsertLexAndSemForSynSet(ISession pSess, string pSynSetId, SynSet pSynSet)
        {
            Synset dbSynSet = SynsetCache[pSynSetId];
            List <LexicalRelation> lexRels = pSynSet.GetLexicallyRelated();

            foreach (LexicalRelation lr in lexRels)
            {
                var dbLex = new Lexical();
                dbLex.Synset       = dbSynSet;
                dbLex.Word         = WordCache[dbLex.Synset.SsId + "|" + lr.FromWord];
                dbLex.RelationId   = (byte)lr.Relation;
                dbLex.TargetSynset = SynsetCache[lr.ToSyn.ID];
                dbLex.TargetWord   = WordCache[dbLex.TargetSynset.SsId + "|" + lr.ToWord];
                pSess.Save(dbLex);
            }

            foreach (WordNetEngine.SynSetRelation rel in pSynSet.SemanticRelations)
            {
                Set <SynSet> relSet = pSynSet.GetRelatedSynSets(rel, false);

                foreach (SynSet rs in relSet)
                {
                    var dbSem = new Semantic();
                    dbSem.Synset       = dbSynSet;
                    dbSem.RelationId   = (byte)rel;
                    dbSem.TargetSynset = SynsetCache[rs.ID];
                    pSess.Save(dbSem);
                }
            }
        }
Example #6
0
 internal override void SemanticValidation(Semantic.SemanticContext semanticContext)
 {
     foreach (var expr in this.Exprlist)
     {
         expr.SemanticValidation(semanticContext);
     }
 }
Example #7
0
 public void analisaComandos(Lexicon lexicon, Semantic semantic)
 {
     if (!isErrorToken(token) && token.simbolo == "sinicio")
     {
         updateToken(lexicon);
         analisaComandoSimples(lexicon, semantic);
         while (token.simbolo != "sfim")
         {
             if (!isErrorToken(token) && token.simbolo == "sponto_virgula")
             {
                 updateToken(lexicon);
                 if (!isErrorToken(token) && token.simbolo != "sfim")
                 {
                     analisaComandoSimples(lexicon, semantic);
                 }
             }
             else
             {
                 line = token.line;
                 //message = "Ponto e vírgula não encontrado";
                 message = "'" + token.lexema + "'" + " encontrado, mas era esperado ';'";
                 throw new SyntacticException(token.line, message);
             }
         }
         updateToken(lexicon);
     }
     else
     {
         line = token.line;
         //message = "Inicio não encontrado";
         message = "'" + token.lexema + "'" + " encontrado, mas era esperado 'inicio'";
         throw new SyntacticException(token.line, message);
     }
 }
Example #8
0
 public void SetSource(Semantic semantic, Vector2[] value)
 {
     removeSource(semantic);
     Sources.Add(new Source {
         Semantic = semantic, Number = 0, DataVector2 = value
     });
 }
Example #9
0
        static void Main(string[] args)
        {
            // в зависимости от наличия параметров командной строки разбираем
            // либо файл с именем, переданным первым параметром, либо стандартный ввод
            ICharStream input = args.Length == 1 ? (ICharStream) new ANTLRFileStream(args[0])
                                                 : (ICharStream) new ANTLRReaderStream(Console.In);
            MyGrammarLexer    lexer   = new MyGrammarLexer(input);
            CommonTokenStream tokens  = new CommonTokenStream(lexer);
            MyGrammarParser   parser  = new MyGrammarParser(tokens);
            ITree             program = (ITree)parser.execute().Tree;
            //AstNodePrinter.Print(program);

            //AstNodePrinter.Print(program);

            Semantic semantic = new Semantic(program);

            AstNodePrinter.Print(program);

            Codegenerator gen = new Codegenerator();

            dynamic root   = program;
            string  output = gen.Start(root, semantic.Context.MethodList, semantic.Context.Vars);

            Console.WriteLine(output);

            FileWorker file = new FileWorker();

            file.WriteToFile(output);

            Console.Read();
        }
Example #10
0
        public static InputOffset CreateWithSemantic(string name, Semantic semantic, ulong dataSource, ulong set = 0, ulong offset = 0)
        {
            switch (semantic)
            {
            case Semantic.VERTEX:
                return(new InputOffset("VERTEX", "#" + name + "-VERTEX", offset));

            case Semantic.NORMAL:
                return(new InputOffset("NORMAL", "#" + name + "-Normal" + dataSource, offset));

            case Semantic.TEXCOORD:
                return(new InputOffset("TEXCOORD", "#" + name + "-UV" + dataSource, offset, set));

            case Semantic.COLOR:
                return(new InputOffset("COLOR", "#" + name + "-VERTEX_COLOR" + dataSource, offset, set));

            case Semantic.JOINT:
                return(new InputOffset("JOINT", "#" + name + "-Joints", offset));

            case Semantic.WEIGHT:
                return(new InputOffset("WEIGHT", "#" + name + "-Weights", offset));

            case Semantic.INV_BIND_MATRIX:
                return(new InputOffset("INV_BIND_MATRIX", "#" + name + "-Matrices", offset));
            }

            return(null);
        }
Example #11
0
        public void analisaEtVariaveis(Lexicon lexicon, Semantic semantic)
        {
            quantDecleredVars = 0;

            if (!isErrorToken(token) && token.simbolo == "svar")
            {
                updateToken(lexicon);
                if (!isErrorToken(token) && token.simbolo == "sidentificador")
                {
                    while (!isErrorToken(token) && token.simbolo == "sidentificador")
                    {
                        analisaVariaveis(lexicon, semantic);
                        if (!isErrorToken(token) && token.simbolo == "sponto_virgula")
                        {
                            updateToken(lexicon);
                        }
                        else
                        {
                            line = token.line;
                            //message = "Ponto e vírgula não encontrado";
                            message = "'" + token.lexema + "'" + " encontrado, mas era esperado ';'";
                            throw new SyntacticException(token.line, message);
                        }
                    }
                }
                else
                {
                    line = token.line;
                    //message = "Identificador não encontrado";
                    message = "'" + token.lexema + "'" + " encontrado, mas era esperado 'identificador'";
                    throw new SyntacticException(token.line, message);
                }
            }
        }
Example #12
0
        public void analisaComandoSimples(Lexicon lexicon, Semantic semantic)
        {
            isReturnDeclared = false;

            if (!isErrorToken(token) && token.simbolo == "sidentificador")
            {
                AnalisaAtribChprocedimento(lexicon, semantic);
            }
            else if (!isErrorToken(token) && token.simbolo == "sse")
            {
                analisaSe(lexicon, semantic);
            }
            else if (!isErrorToken(token) && token.simbolo == "senquanto")
            {
                analisaEnquanto(lexicon, semantic);
            }
            else if (!isErrorToken(token) && token.simbolo == "sleia")
            {
                analisaLeia(lexicon, semantic);
            }
            else if (!isErrorToken(token) && token.simbolo == "sescreva")
            {
                analisaEscreva(lexicon, semantic);
            }
            else
            {
                analisaComandos(lexicon, semantic);
            }
        }
 internal override void SemanticValidation(Semantic.SemanticContext semanticContext)
 {
     /* if ( semanticContext.IdExistInSameScope(this.Id))
     {
         throw new Semantic.SemanticValidationException("Redefinición de id en el mismo alcance: " + this.Id);
     }*/
 }
Example #14
0
        public void AnalisaAtribChprocedimento(Lexicon lexicon, Semantic semantic)
        {
            string flagFuncName;

            atribVarName = token.lexema;

            if (semantic.pesquisaProcTabela(token.lexema) != 0)
            {
                analisaChamadaProcedimento(lexicon);
            }
            else
            {
                int ret = semantic.validaAtribuicao(token.lexema);
                if (ret != -1)
                {
                    flagFuncName = token.lexema;
                    updateToken(lexicon);

                    if (FunctionSatck.Count > 0 && flagFuncName.Equals(FunctionSatck.Peek())) // return de função
                    {
                        analisaAtribuicao(lexicon, semantic);
                        if (semantic.validaCompatibilidadeTipo(postfix.convertedExpression) != ret)
                        {
                            line    = token.line;
                            message = "Tipos nao compativeis";
                            throw new SemanticException(token.line, "Tipos nao compativeis");
                        }
                        isReturnDeclared = true;
                        ReturnDeclaredCount++;
                        flagFuncName = "";
                    }

                    else if (!isErrorToken(token) && token.simbolo == "satribuicao")
                    {
                        analisaAtribuicao(lexicon, semantic);
                        if (semantic.validaCompatibilidadeTipo(postfix.convertedExpression) != ret)
                        {
                            line    = token.line;
                            message = "Tipos nao compativeis";
                            throw new SemanticException(token.line, "Tipos nao compativeis");
                        }
                    }
                    else
                    {
                        if (semantic.pesquisaFuncTabela(flagFuncName) != 0)
                        {
                            line    = token.line;
                            message = "Chamada de função inválida";
                            throw new SemanticException(token.line, "Chamada de função inválida");
                        }
                    }
                }
                else
                {
                    line    = token.line;
                    message = "Formato da Atribuicao ou da chamada de Procedimento incorreto";
                    throw new SemanticException(token.line, "Formato da Atribuicao ou da chamada de Procedimento incorreto");
                }
            }
        }
Example #15
0
        private static TelemetryKey register(String code, Semantic semantic, Subsystem subsystem, int subsystemId = 0)
        {
            TelemetryKey key = new TelemetryKey(subsystem, code);

            TelemetryFields.Add(key, TelemetryField.Create(code, semantic, subsystem, subsystemId));
            return(key);
        }
Example #16
0
        public static Object GetSemanticObject(this VertexDivided value, string _semantic)
        {
            Type type = value.GetType();

            // Get fieldinfo for this type
            FieldInfo[] fieldInfos = type.GetFields();
            Object      ret        = null;

            foreach (FieldInfo fi in fieldInfos)
            {
                Semantic attrib = (fi.GetCustomAttributes(typeof(SemanticMatch), false) as SemanticMatch[]).First().InputSemantic;
                if (attrib.ToString() == _semantic)
                {
                    ret = fi.GetValue(value);
                    return(ret);
                }
            }

            // Return the first if there was a match.
            try
            {
                return(ret);
            }
            catch (Exception ex)
            {
                throw new SystemException(ex.Message);
            }
        }
Example #17
0
        public void analisaBloco(Lexicon lexicon, Semantic semantic)
        {
            updateToken(lexicon);
            analisaEtVariaveis(lexicon, semantic);

            if (quantDecleredVars > 0)
            {
                codeGenerator.generate("", "ALLOC", quantAllocatedVars.ToString(), quantDecleredVars.ToString());
                quantAllocatedVars += quantDecleredVars;
            }
            localDecleredVars.Push(quantDecleredVars);
            localAllocatedVars.Push(quantAllocatedVars);

            analisaSubrotinas(lexicon, semantic);
            analisaComandos(lexicon, semantic);

            if (localDecleredVars.Count > 0)
            {
                quantDecleredVars  = localDecleredVars.Pop();
                quantAllocatedVars = localAllocatedVars.Pop();

                quantAllocatedVars -= quantDecleredVars;
                if (quantDecleredVars > 0)
                {
                    codeGenerator.generate("", "DALLOC", quantAllocatedVars.ToString(), quantDecleredVars.ToString());
                }
            }
        }
Example #18
0
        public void analisaVariaveis(Lexicon lexicon, Semantic semantic)
        {
            flagVar = "";
            int cont = 0;

            do
            {
                if (!isErrorToken(token) && token.simbolo == "sidentificador")
                {
                    if (semantic.pesquisaDuplicVarTabela(token.lexema) == 0)                                   //n ha duplicidade
                    {
                        if (cont == 0)                                                                         //
                        {
                            flagVar = token.lexema;                                                            //
                        }
                        semantic.insereTabela(token.lexema, "var", 0, quantAllocatedVars + quantDecleredVars); //
                        quantDecleredVars++;
                        updateToken(lexicon);
                        if (!isErrorToken(token) && (token.simbolo == "svirgula" || token.simbolo == "sdoispontos"))
                        {
                            if (!isErrorToken(token) && token.simbolo == "svirgula")
                            {
                                updateToken(lexicon);
                                if (!isErrorToken(token) && token.simbolo == "sdoispontos")
                                {
                                    line = token.line;
                                    //message = "Dois pontos não encontrado";
                                    message = "'" + token.lexema + "'" + " encontrado, mas era esperado ':'";
                                    throw new SyntacticException(token.line, message);
                                }
                            }
                        }
                        else
                        {
                            line = token.line;
                            //message = "Vírgula ou dois pontos não encontrado";
                            message = "'" + token.lexema + "'" + " encontrado, mas era esperado ', ou :'";
                            throw new SyntacticException(token.line, message);
                        }
                    }
                    else  //se ha duplicidade
                    {
                        line    = token.line;
                        message = "Variavel com identificador duplicado";
                        throw new SemanticException(token.line, "Variavel com identificador duplicado");
                    }
                }
                else
                {
                    line = token.line;
                    //message = "Identificador não encontrado";
                    message = "'" + token.lexema + "'" + " encontrado, mas era esperado 'identificador'";
                    throw new SyntacticException(token.line, message);
                }
                cont++;//
            } while (token.simbolo != "sdoispontos");
            updateToken(lexicon);
            analisaTipo(lexicon, semantic);
        }
Example #19
0
        public override int GetHashCode()
        {
            int hashCode = 396976950;

            hashCode = hashCode * -1521134295 + Semantic.GetHashCode();
            hashCode = hashCode * -1521134295 + Stage.GetHashCode();
            return(hashCode);
        }
Example #20
0
        public VertexStream(int _vertexCount, bool canRead, bool canWrite, Semantic _vertexDefinition)
        {
            vertexDefinition = _vertexDefinition.reorderDefinition();
            vertexType = DynamicVertex.CreateVertex(vertexDefinition);
            dataStream = new DataStream(vertexType.SizeOf()*_vertexCount, canRead, canWrite);

            createReadWriteMethods();
        }
Example #21
0
 public Text getText(Semantic sem)
 {
     if (!textMap.ContainsKey(sem))
     {
         return(null);
     }
     return(textMap[sem]);
 }
Example #22
0
 public SLRParser(Semantic semantic, Syntactic syntactic)
 {
     _tableParser = new TableParser();
     Generated    = false;
     IsSLR        = true;
     _semantic    = semantic;
     _syntactic   = syntactic;
 }
Example #23
0
        public void analisaSe(Lexicon lexicon, Semantic semantic)
        {
            int auxrot1, auxrot2;

            updateToken(lexicon);
            postfix.clearExpression();
            analisaExpressao(lexicon, semantic);
            if (semantic.validaCompatibilidadeTipo(postfix.convertedExpression) == 0)
            {
                if (!isErrorToken(token) && token.simbolo == "sentao")
                {
                    auxrot1 = rotulo;
                    codeGenerator.generate("", "JMPF", auxrot1.ToString(), "");
                    rotulo++;

                    bool ifReturnDeclared = false, elseReturnDeclared = false;

                    updateToken(lexicon);
                    analisaComandoSimples(lexicon, semantic);

                    ifReturnDeclared = isReturnDeclared;

                    auxrot2 = rotulo;
                    codeGenerator.generate("", "JMP", auxrot2.ToString(), "");
                    rotulo++;

                    codeGenerator.generate(auxrot1.ToString(), "NULL", "", "");

                    if (!isErrorToken(token) && token.simbolo == "ssenao")
                    {
                        updateToken(lexicon);
                        analisaComandoSimples(lexicon, semantic);

                        elseReturnDeclared = isReturnDeclared;
                    }
                    else
                    {
                        elseReturnDeclared = false;
                    }
                    isReturnDeclared = ifReturnDeclared && elseReturnDeclared;

                    codeGenerator.generate(auxrot2.ToString(), "NULL", "", "");
                }
                else
                {
                    line = token.line;
                    //message = "Então não encontrado";
                    message = "'" + token.lexema + "'" + " encontrado, mas era esperado 'entao'";
                    throw new SyntacticException(token.line, message);
                }
            }
            else
            {
                line    = token.line - 1;
                message = "Expressao do 'se' nao booleana";
                throw new SemanticException(token.line, "Expressao do 'se' nao booleana");
            }
        }
Example #24
0
        public void analisaEscreva(Lexicon lexicon, Semantic semantic)
        {
            updateToken(lexicon);
            if (!isErrorToken(token) && token.simbolo == "sabre_parenteses")
            {
                updateToken(lexicon);
                if (!isErrorToken(token) && token.simbolo == "sidentificador")
                {
                    if (semantic.pesquisaDeclVarFuncTabela(token.lexema) != 0 && semantic.validaEscrevaELeia(token.lexema) != 0)
                    {
                        string readVarName = token.lexema;
                        updateToken(lexicon);
                        if (!isErrorToken(token) && token.simbolo == "sfecha_parenteses")
                        {
                            if (semantic.getPesquisaTabela(readVarName).tipo == "funcInteiro" || semantic.getPesquisaTabela(readVarName).tipo == "funcBooleano")
                            {
                                codeGenerator.generate("", "CALL", semantic.getPesquisaTabela(readVarName).rotulo.ToString(), "");
                                codeGenerator.generate("", "LDV", "0", "");
                            }
                            else
                            {
                                codeGenerator.generate("", "LDV", semantic.getPesquisaTabela(readVarName).rotulo.ToString(), "");
                            }
                            codeGenerator.generate("", "PRN", "", "");

                            updateToken(lexicon);
                        }
                        else
                        {
                            line = token.line;
                            //message = "Fecha parentes não encontrado";
                            message = "'" + token.lexema + "'" + " encontrado, mas era esperado ')'";
                            throw new SyntacticException(token.line, message);
                        }
                    }
                    else
                    {
                        line    = token.line;
                        message = "Formato Escreva incorreto";
                        throw new SemanticException(token.line, "Formato Escreva incorreto");
                    }
                }
                else
                {
                    line = token.line;
                    //message = "Identificador não encontrado";
                    message = "'" + token.lexema + "'" + " encontrado, mas era esperado 'identificador'";
                    throw new SyntacticException(token.line, message);
                }
            }
            else
            {
                line = token.line;
                //message = "Abre parentes não encontrado";
                message = "'" + token.lexema + "'" + " encontrado, mas era esperado '('";
                throw new SyntacticException(token.line, message);
            }
        }
Example #25
0
        public void SemanticUnderStandTest()
        {
            var accessToken       = AccessTokenContainer.GetToken(_appId);
            var result            = Semantic.SemanticUnderStand(accessToken, "百度一下明天从北京到上海的南航机票", "flight,search", "北京", _appId);
            SearchResultJson json = new SearchResultJson();

            Assert.IsNotNull(result.query);
            Assert.AreEqual("百度一下明天从北京到上海的南航机票", result.query);
        }
Example #26
0
        private void removeSource(Semantic semantic)
        {
            var old = Sources.Find(o => o.Semantic == semantic);

            if (old != null)
            {
                Sources.Remove(old);
            }
        }
Example #27
0
        public Destination(Replica parent, Semantic semantic) : base()
        {
            parent.OnFreeze   += Parent_OnFreeze;
            parent.OnUnfreeze += Parent_OnUnfreeze;
            parent.OnStart    += Parent_OnStart;
            parent.OnInterval += Parent_OnInterval;

            Semantic = semantic;
        }
Example #28
0
 public LALRParser(Semantic semantic, Syntactic syntactic)
 {
     Generated    = false;
     IsLALR       = true;
     _semantic    = semantic;
     _syntactic   = syntactic;
     _tableParser = new TableParser();
     states       = new List <LRState>();
 }
Example #29
0
        public void SemanticUnderStandTest()
        {
            var accessToken       = AccessTokenContainer.GetToken(_appId);
            var result            = Semantic.SemanticSend(accessToken, SemanticPostData);
            SearchResultJson json = new SearchResultJson();

            Assert.IsNotNull(result.query);
            Assert.AreEqual("附近有什么川菜馆", result.query);
        }
 internal override void SemanticValidation(Semantic.SemanticContext semanticContext)
 {
     Expression.SemanticValidation(semanticContext);
     if (!(Expression.GetIRType() is BoolType))
     {
         throw new Semantic.SemanticValidationException("No se puede negar");
     }else
         returnType = new BoolType(); //a exp tiene que poder negarse
 }
Example #31
0
 protected VertexStream(SerializationInfo info, StreamingContext context)
 {
     vertexDefinition = (Semantic)info.GetValue("vertexDefinition", typeof(Semantic));
     vertexType = DynamicVertex.CreateVertex(vertexDefinition);
     buffer = (byte[])info.GetValue("buffer", typeof(byte[]));
     dataStream = new DataStream(buffer.Length, true, true);
     dataStream.Write(buffer, 0, buffer.Length);
     createReadWriteMethods();
 }
Example #32
0
        /// <summary>
        /// Parse stream output declarations.
        /// Format is "[slot :] semantic[index][.mask] ; ...".
        /// </summary>
        /// <param name="entries">The parsed entries.</param>
        /// <param name="strides">The output strides.</param>
        /// <param name="streams">The output declarations to parse.</param>
        public static void Parse(IList <ShaderStreamOutputDeclarationEntry> entries, out int[] strides, string[] streams, IList <Variable> fields)
        {
            strides = new int[4];

            var fieldsBySemantic = fields.ToDictionary(x => Semantic.Parse(x.Qualifiers.OfType <Semantic>().Single().Name));

            for (int streamIndex = 0; streamIndex < streams.Length; ++streamIndex)
            {
                // Parse multiple declarations separated by semicolon
                var stream = streams[streamIndex];
                foreach (var streamOutput in stream.Split(';'))
                {
                    // Parse a single declaration: "[slot :] semantic[index][.mask]"
                    var match = streamOutputRegex.Match(streamOutput);
                    if (!match.Success)
                    {
                        throw new InvalidOperationException("Could not parse stream output.");
                    }

                    var streamOutputDecl = new ShaderStreamOutputDeclarationEntry();

                    // Split semantic into (name, index)
                    var semantic = Semantic.Parse(match.Groups[3].Value);

                    streamOutputDecl.SemanticName  = semantic.Key;
                    streamOutputDecl.SemanticIndex = semantic.Value;
                    //if (streamOutputDecl.SemanticName == "$SKIP")
                    //    streamOutputDecl.SemanticName = null;

                    var matchingField     = fieldsBySemantic[semantic];
                    var matchingFieldType = matchingField.Type.TypeInference.TargetType ?? matchingField.Type;

                    if (matchingFieldType is VectorType)
                    {
                        streamOutputDecl.ComponentCount = (byte)((VectorType)matchingFieldType).Dimension;
                    }
                    else if (matchingFieldType is ScalarType)
                    {
                        streamOutputDecl.ComponentCount = 1;
                    }
                    else
                    {
                        throw new InvalidOperationException(string.Format("Could not recognize type of stream output for {0}.", matchingField));
                    }

                    var mask = match.Groups[5].Value;
                    ParseMask(mask, ref streamOutputDecl.StartComponent, ref streamOutputDecl.ComponentCount);

                    byte.TryParse(match.Groups[2].Value, out streamOutputDecl.OutputSlot);

                    streamOutputDecl.Stream = streamIndex;

                    strides[streamOutputDecl.OutputSlot] += streamOutputDecl.ComponentCount * sizeof(float);
                    entries.Add(streamOutputDecl);
                }
            }
        }
 internal override void SemanticValidation(Semantic.SemanticContext semanticContext)
 {
     /* Expression.SemanticValidation(semanticContext);
     if (!(Expression.GetIRType() is NumericType))
     {
         throw new Semantic.SemanticValidationException("No se puede PostIncrement");
     }
     else
         returnType = new IntType();*/
 }
 internal override void SemanticValidation(Semantic.SemanticContext semanticContext)
 {
     Expression.SemanticValidation(semanticContext);
     if (!(Expression.GetIRType() is NumericType))
     {
         throw new Semantic.SemanticValidationException("No se puede negar");
     }
     else
         returnType = new PointerType();
 }
        protected Keyword KeywordOf(Semantic semantic)
        {
            var defined = Require(semantic);
            var keyword = Semantics.Values.FirstOrDefault(builtin => (builtin is Keyword) && Equals(((Keyword)builtin).Semantic, defined)) as Keyword;

            if (keyword == null)
            {
                throw Error("missing reserved keyword definition: {0}", defined.Method.Name);
            }
            return(keyword);
        }
Example #36
0
        public TablesForm(Syntactic syntactic, Semantic semantic)
        {
            _semantic   = semantic;
            _syntactic  = syntactic;
            _llanalyser = new LLParser(_semantic, _syntactic);
            _lrParser   = new LRParser(_semantic, _syntactic);
            _slrParser  = new SLRParser(_semantic, _syntactic);
            _lalrParser = new LALRParser(_semantic, _syntactic);

            InitializeComponent();
        }
Example #37
0
        public void analisaTermo(Lexicon lexicon, Semantic semantic)
        {
            analisaFator(lexicon, semantic);

            while (token.simbolo == "smult" || token.simbolo == "sdiv" || token.simbolo == "se")
            {
                expression.Add(token.lexema);
                updateToken(lexicon);
                analisaFator(lexicon, semantic);
            }
        }
Example #38
0
        internal override void SemanticValidation(Semantic.SemanticContext semanticContext)
        {
            Left.SemanticValidation(semanticContext);
            Right.SemanticValidation(semanticContext);

            if (!(Left.GetIRType() is NumericType && Right.GetIRType() is NumericType))
            {
                throw new Semantic.SemanticValidationException("No se puede operar");
            }
            else
            {
                returnType = Left.GetIRType();
            }
        }
        internal override void SemanticValidation(Semantic.SemanticContext semanticContext)
        {
            Left.SemanticValidation(semanticContext);
            Right.SemanticValidation(semanticContext);

            if (!(Left.GetIRType() is NumericType && Right.GetIRType() is NumericType))
            {
                throw new Semantic.SemanticValidationException("No se puede asignar");
            }
            else
            {
                returnType = Left.GetIRType(); //evaluar cual tipo de podría asignar dependiendo de su tamanio
            }
        }
        private void VerifyCssSelector(string selector, Semantic.LineInfo lineInfo)
        {
            if (string.IsNullOrEmpty(selector))
                return;

            HtmlDocument doc = new HtmlDocument();
            doc.LoadHtml("<html></html>");
            try
            {
                doc.DocumentNode.QuerySelector(selector);
            }
            catch(Exception e)
            {
                Errors.Add(new BadCssSelector(e.Message, lineInfo));
            }
        }
 internal override void SemanticValidation(Semantic.SemanticContext semanticContext)
 {
     /*this.FunctionDeclaration = semanticContext.FunctionDeclarations.Where(f => {
         bool match = true;
         match = f.Id == this.Id && f.Parameters.Count == this.Parameters.Exprlist.Count;
         if ( match )
         {
             for (int i = 0; i < f.Parameters.Count; i++)
             {
                 match = f.Parameters[i].Type == this.Parameters.Exprlist[i].GetIRType();
                 if (!match)
                     break;
             }
         }
         return match;
     }).First();*/
 }
Example #42
0
 internal override void SemanticValidation(Semantic.SemanticContext semanticContext)
 {
 }
 internal override void SemanticValidation(Semantic.SemanticContext semanticContext)
 {
     this.Expression.SemanticValidation(semanticContext);
 }
Example #44
0
 internal override void SemanticValidation(Semantic.SemanticContext semanticContext)
 {
     //throw new NotImplementedException();
 }
Example #45
0
 internal override void SemanticValidation(Semantic.SemanticContext semanticContext)
 {
     returnType = new FloatType();
 }
Example #46
0
 public static void SetSemantic(string _semantic_string)
 {
     List<string> semantic_Keywords = _semantic_string.Split(' ').ToList();
     List<Semantic> list_keywords = new List<Semantic>();
     foreach (string keyword in semantic_Keywords)
     {
         list_keywords.Add((Semantic)Enum.Parse(typeof(Semantic), keyword));
     }
     semantic = list_keywords[0];
     for (int i = 1; i < list_keywords.Count; i++)
     {
         semantic |= list_keywords[i];
     }
 }
Example #47
0
 public static void SetSemantic(Semantic _semantic)
 {
     semantic = _semantic;
 }
Example #48
0
        public static void ComputeVertexData(IMesh _mMesh, IISkinContextData _skin, Semantic _semantic, string _filepath)
        {
            Dictionary<int, VertexUndivided> verticesFullData = new Dictionary<int, VertexUndivided>();
            List<FaceData> facesFullData = new List<FaceData>();
            List<List<VertexDivided>> verticesParsedData = new List<List<VertexDivided>>();

            IList<IFace> faces = _mMesh.Faces;
            IList<ITVFace> Tfaces = _mMesh.TvFace;
            IList<IPoint3> vertices = _mMesh.Verts;
            IList<IPoint3> Tvertices = _mMesh.TVerts;

            /*
            foreach (IPoint3 _v in vertices)
            {
                float temp = _v.Y;
                _v.Y = -_v.Z;
                _v.Z = temp;
            }
            */
            for (int _fID = 0; _fID < faces.Count; _fID++)
            {
                FaceData _face = new FaceData((int)faces[_fID].SmGroup);

                // vectors are inverted to make up for max being clockwise
                Vector3 A_B = vertices[(int)faces[_fID].GetVert(1)].convertToVector3() - vertices[(int)faces[_fID].GetVert(0)].convertToVector3();
                Vector3 A_C = vertices[(int)faces[_fID].GetVert(2)].convertToVector3() - vertices[(int)faces[_fID].GetVert(0)].convertToVector3();
                Vector3 U = Tvertices[(int)Tfaces[_fID].GetTVert(1)].convertToVector3() - Tvertices[(int)Tfaces[_fID].GetTVert(0)].convertToVector3();
                Vector3 V = Tvertices[(int)Tfaces[_fID].GetTVert(2)].convertToVector3() - Tvertices[(int)Tfaces[_fID].GetTVert(0)].convertToVector3();

                Vector3 normUnsure = Vector3.Cross(A_B, A_C);
                normUnsure.Normalize();

                float area = U.X * V.Y - U.Y * V.X;
                int sign = area < 0 ? -1 : 1;
                Vector3 tangent = new Vector3(0, 0, 1);
                tangent.X = A_B.X * V.Y - U.Y * A_C.X;
                tangent.Y = A_B.Y * V.Y - U.Y * A_C.Y;
                tangent.Z = A_B.Z * V.Y - U.Y * A_C.Z;
                tangent.Normalize();
                tangent *= sign;

                for (int i = 0; i < 3; i++)
                {
                    _face.vertices.Add((int)faces[_fID].GetVert(i));

                    if (verticesFullData.ContainsKey((int)faces[_fID].GetVert(i)))
                    {
                        VertexUndivided _v = verticesFullData[(int)faces[_fID].GetVert(i)];
                        _v.faceInfo.Add(new PerFaceInfo(_fID, (int)faces[_fID].SmGroup, (Tvertices[(int)Tfaces[_fID].GetTVert(i)]).convertToVector2(), normUnsure, tangent));
                    }
                    else
                    {
                        VertexUndivided _v = new VertexUndivided();
                        _v.ID = faces[_fID].GetVert(i);
                        int nbBonesA=_skin.GetNumAssignedBones((int)_v.ID);
                        List<int> bonesID = new List<int>();
                        List<float> bonesWeights = new List<float>();
                        for (int b = 0; b < 4; b++)
                        {
                            if(nbBonesA<b+1)
                            {
                                bonesID.Add(0);
                                bonesWeights.Add(0);
                            }
                            else
                            {
                                bonesID.Add(_skin.GetAssignedBone((int)_v.ID, b));
                                bonesWeights.Add(_skin.GetBoneWeight((int)_v.ID, b));
                            }
                        }
                        _v.bonesID = new ROD_core.BoneIndices(bonesID[0], bonesID[1], bonesID[2], bonesID[3]);
                        _v.bonesWeights = new Vector4(bonesWeights[0], bonesWeights[1], bonesWeights[2], bonesWeights[3]);
                        _v.pos = (vertices[(int)_v.ID]).convertToVector3();
                        _v.faceInfo.Add(new PerFaceInfo(_fID, (int)faces[_fID].SmGroup, (Tvertices[(int)Tfaces[_fID].GetTVert(i)]).convertToVector2(), normUnsure, tangent));
                        verticesFullData.Add((int)faces[_fID].GetVert(i), _v);
                    }
                }

                facesFullData.Add(_face);
            }
            List<int> IndexBuffer = new List<int>();
            List<VertexDivided> VertexBuffer = new List<VertexDivided>();
            // vertex index in vertexfullData "undivided" et sa valeur en fonction du SMG
            Dictionary<int, Dictionary<int, int>> VertexDictionary = new Dictionary<int, Dictionary<int, int>>();

            Mesh mesh = new Mesh();

            for (int _faceID = 0; _faceID < facesFullData.Count; _faceID++)
            {
                facesFullData[_faceID].vertices.Reverse();
                foreach (int _vertex in facesFullData[_faceID].vertices)
                {
                    Dictionary<int, int> vertexTranslation;
                    int _vID = (int)verticesFullData[_vertex].ID;
                    List<PerFaceInfo> unitedVertex = verticesFullData[_vertex].faceInfo.Where(x => x.SMG == facesFullData[_faceID].SMG).ToList();
                    if (!VertexDictionary.ContainsKey(_vID))
                    {
                        VertexDictionary.Add(_vID, new Dictionary<int, int>());
                    }
                    vertexTranslation = VertexDictionary[_vID];
                    VertexDivided _newVertex = new VertexDivided();
                    _newVertex.pos = verticesFullData[_vertex].pos;

                    _newVertex.UV.X = verticesFullData[_vertex].faceInfo.Where(x => x.ID == _faceID).FirstOrDefault().UV.X;
                    _newVertex.UV.Y = 1 - verticesFullData[_vertex].faceInfo.Where(x => x.ID == _faceID).FirstOrDefault().UV.Y;
                    Vector3 _normal_aggreagate = new Vector3(0, 0, 0);
                    Vector3 _tangent_aggreagate = new Vector3(0, 0, 0);
                    foreach (PerFaceInfo _FI in unitedVertex)
                    {
                        _normal_aggreagate += _FI.normal;
                        _tangent_aggreagate += _FI.tangent;
                    }
                    _normal_aggreagate.Normalize();
                    _tangent_aggreagate.Normalize();
                    _newVertex.normal = _normal_aggreagate;
                    _newVertex.tangent = _tangent_aggreagate;
                    _newVertex.binormal = Vector3.Cross(_normal_aggreagate, _tangent_aggreagate);
                    _newVertex.bonesID = verticesFullData[_vertex].bonesID;
                    _newVertex.bonesWeights = verticesFullData[_vertex].bonesWeights;
                    IndexBuffer.Add(VertexBuffer.Count);
                    VertexBuffer.Add(_newVertex);
                }
            }
            mesh._indexStream = new IndexStream(IndexBuffer.Count, typeof(UInt16), true, true);
            mesh._vertexStream = new VertexStream(VertexBuffer.Count, true, true, _semantic);
            foreach (int id in IndexBuffer)
            {

                UInt16 _id = Convert.ToUInt16(id);
                VertexDivided res = VertexBuffer[_id];
                mesh._indexStream.WriteIndex(_id);
            }
            Type dv = DynamicVertex.CreateVertex(_semantic);
            FieldInfo[] PI = dv.GetFields();

            foreach (VertexDivided vd in VertexBuffer)
            {
                if (mesh._boundingBox.Minimum == null)
                {
                    mesh._boundingBox.Minimum = new Vector3(vd.pos.X, vd.pos.Y, vd.pos.Z);
                    mesh._boundingBox.Maximum = new Vector3(vd.pos.X, vd.pos.Y, vd.pos.Z);
                }
                mesh._boundingBox.Minimum.X = Math.Min(mesh._boundingBox.Minimum.X, vd.pos.X);
                mesh._boundingBox.Minimum.Y = Math.Min(mesh._boundingBox.Minimum.Y, vd.pos.Y);
                mesh._boundingBox.Minimum.Z = Math.Min(mesh._boundingBox.Minimum.Z, vd.pos.Z);
                mesh._boundingBox.Maximum.X = Math.Max(mesh._boundingBox.Maximum.X, vd.pos.X);
                mesh._boundingBox.Maximum.Y = Math.Max(mesh._boundingBox.Maximum.Y, vd.pos.Y);
                mesh._boundingBox.Maximum.Z = Math.Max(mesh._boundingBox.Maximum.Z, vd.pos.Z);
                List<object> vertexData = new List<object>();
                for (int i = 0; i < PI.Length; i++)
                {
                    string fieldSemantic = ((InputElementAttribute)PI[i].GetCustomAttributes(true).First()).Semantic;
                    vertexData.Add(vd.GetSemanticObject(fieldSemantic));
                }
                object[] obj = vertexData.ToArray();
                //object[] obj = new object[] { vd.pos, vd.normal, vd.UV, vd.binormal, vd.bonesID, vd.bonesWeights, vd.tangent };
                //object[] obj = new object[] { vd.pos, vd.normal, vd.UV, vd.binormal, vd.tangent };
                //object[] obj = new object[] { vd.pos, vd.normal, vd.UV};
                mesh._vertexStream.WriteVertex(obj);
            }
            Mesh.saveToFile(mesh, _filepath);
        }
Example #49
0
 private void OnPalettePropertyChanged(Semantic.Reporting.Windows.Common.Internal.ResourceDictionaryCollection newValue)
 {
     if (this.PaletteDispenser == null)
         return;
     this.PaletteDispenser.ResourceDictionaries = (IList<ResourceDictionary>)newValue;
 }
Example #50
0
 public SemanticMatch(Semantic _inputSemantic)
 {
     this.inputSemantic = _inputSemantic;
 }