public static IExtTerm BinaryToTerm(EndianBinaryReader reader, bool needMarker = true) { if (needMarker) { var marker = reader.ReadByte(); if (marker != TermStartMarker) { throw new BadTermMarkerException(marker, TermStartMarker); } } var tag = (EExtTermType)reader.ReadByte(); IExtTerm value = tag switch { EExtTermType.List => TermList.Read(reader), EExtTermType.Tuple8 => TermTuple.Read(reader, ETermSize.Small), EExtTermType.Tuple32 => TermTuple.Read(reader, ETermSize.Large), EExtTermType.AtomLatin18 => TermAtom.Read(reader, EEncodingType.Latin1, ETermSize.Small), EExtTermType.AtomLatin116 => TermAtom.Read(reader, EEncodingType.Latin1, ETermSize.Large), EExtTermType.AtomUtf88 => TermAtom.Read(reader, EEncodingType.Utf8, ETermSize.Small), EExtTermType.AtomUtf816 => TermAtom.Read(reader, EEncodingType.Utf8, ETermSize.Large), EExtTermType.String => TermString.Read(reader), EExtTermType.Nil => TermNil.Read(), EExtTermType.FloatString => TermFloatString.Read(reader), EExtTermType.Int32 => TermInt32.Read(reader), EExtTermType.UInt8 => TermUInt8.Read(reader), EExtTermType.Big8 => TermBigNumber.Read(reader, ETermSize.Small), EExtTermType.Big32 => TermBigNumber.Read(reader, ETermSize.Large), EExtTermType.NewFloat64 => TermNewFloat64.Read(reader), EExtTermType.Binary => TermBinary.Read(reader), _ => throw new NotImplementedException($"{tag} not supported"), }; return(value); }
/// <summary> /// Return lookup lemmas for each element in the old relation as well as the definition of the old relation, which /// is defined via an association list /// </summary> private static StateRelationData OldRelation( ISet <Variable> oldGlobalVars, IVariableTranslation <Variable> variableTranslation, out IList <OuterDecl> oldRelDecls) { //assume that passive version representing old variable of "g" is "g" itself var oldRelTuples = new List <Term>(); var varList = new List <Variable>(); var varToLookupLemma = new Dictionary <Variable, LemmaDecl>(); var uniqueNamer = new IsaUniqueNamer(); foreach (var v in oldGlobalVars) { if (variableTranslation.TryTranslateVariableId(v, out var varTermId, out _)) { oldRelTuples.Add(new TermTuple(varTermId, IsaCommonTerms.Inl(varTermId))); var lemma = new LemmaDecl( "lookup_old_rel" + uniqueNamer.GetName(v, v.Name), TermBinary.Eq(new TermApp(oldRel, varTermId), IsaCommonTerms.SomeOption(IsaCommonTerms.Inl(varTermId))), new Proof(new List <string> { "unfolding " + oldRelListName + "_def " + oldRelName + "_def", "by simp" }) ); varToLookupLemma.Add(v, lemma); varList.Add(v); }
public static Term BinderStateEmpty(Term normalState) { return(TermBinary.Eq( IsaBoogieTerm.BinderState(normalState), IsaCommonTerms.EmptyMap )); }
public override string VisitTermBinary(TermBinary t) { //TODO: for critical operations, use a stack var isaSymbol = GetStringFromBinary(t.Op); var leftString = t.ArgLeft.Dispatch(this); var rightString = t.ArgRight.Dispatch(this); return(H.Parenthesis(leftString + " " + isaSymbol + " " + rightString)); }
public static Term VariableTypeAssumption( Variable v, Term vcVar, TypeIsaVisitor typeIsaVisitor, Term absValTyMap) { var left = IsaBoogieTerm.TypeToVal(absValTyMap, vcVar); var right = typeIsaVisitor.Translate(v.TypedIdent.Type); return(TermBinary.Eq(left, right)); }
private static Term ConclusionBlock( IEnumerable <Block> b_successors, Term normalInitState, Term finalState, IDictionary <NamedDeclaration, Term> declToVCMapping, VCInstantiation <Block> vcinst, bool useMagicFinalState = false) { if (useMagicFinalState) { return(new TermBinary(finalState, IsaBoogieTerm.Magic(), TermBinary.BinaryOpCode.Eq)); } Term nonFailureConclusion = new TermBinary(finalState, IsaBoogieTerm.Failure(), TermBinary.BinaryOpCode.Neq); var normalFinalState = IsaCommonTerms.TermIdentFromName("n_s'"); Term ifNormalConclusionLhs = new TermBinary(finalState, IsaBoogieTerm.Normal(normalFinalState), TermBinary.BinaryOpCode.Eq); Term ifNormalConclusionRhs1 = new TermBinary(normalFinalState, normalInitState, TermBinary.BinaryOpCode.Eq); var ifNormalConclusionRhs = !b_successors.Any() ? ifNormalConclusionRhs1 : new TermBinary( ifNormalConclusionRhs1, LemmaHelper.ConjunctionOfSuccessorBlocks(b_successors, declToVCMapping, vcinst), TermBinary.BinaryOpCode.And); Term ifNormalConclusion = new TermQuantifier( TermQuantifier.QuantifierKind.ALL, new List <Identifier> { normalFinalState.Id }, new TermBinary( ifNormalConclusionLhs, ifNormalConclusionRhs, TermBinary.BinaryOpCode.Implies) ); return(new TermBinary(nonFailureConclusion, ifNormalConclusion, TermBinary.BinaryOpCode.And)); }
public static Term NonEmptyTypesAssumption(Term absValTyMap) { Identifier bvType = new SimpleIdentifier("t"); Term bvTypeTerm = new TermIdent(bvType); Identifier bvValue = new SimpleIdentifier("v"); Term bvValueTerm = new TermIdent(bvValue); return(TermQuantifier.MetaAll(new List <Identifier> { bvType }, null, TermBinary.MetaImplies(IsaBoogieTerm.IsClosedType(bvTypeTerm), TermQuantifier.Exists(new List <Identifier> { bvValue }, null, TermBinary.Eq(IsaBoogieTerm.TypeToVal(absValTyMap, bvValueTerm), bvTypeTerm) )))); }
public LemmaDecl GenerateBlockLemma(Block block, Block finalCfgBlock, IEnumerable <Block> finalCfgSuccessors, string lemmaName, string vcHintsName) { var cmdsReduce = IsaBoogieTerm.RedCmdList(boogieContext, IsaCommonTerms.TermIdentFromName(isaBlockInfo.CmdsQualifiedName(block)), initState, finalState); var vcAssm = vcinst.GetVCObjInstantiation(finalCfgBlock, declToVCMapping); //do not use separate assumption, leads to issues var conclusion = ConclusionBlock(finalCfgSuccessors, normalInitState, finalState, declToVCMapping, vcinst, LemmaHelper.FinalStateIsMagic(block)); Term statement = TermBinary.MetaImplies(cmdsReduce, TermBinary.MetaImplies(vcAssm, conclusion)); var proof = BlockCorrectProof(block, finalCfgBlock, vcHintsName); return(new LemmaDecl(lemmaName, ContextElem.CreateEmptyContext(), statement, proof)); }
public static Term FunctionVcCorresAssm( Function f, IDictionary <Function, TermIdent> funInterpMapping, IDictionary <NamedDeclaration, Term> declToVCMapping, BoogieContextIsa boogieContext ) { var converter = new PureToBoogieValConverter(); //TODO: unique naming scheme var boundParamVars = GetNames("farg", f.InParams.Count); TypeUtil.SplitTypeParams(f.TypeParameters, f.InParams.Select(v => v.TypedIdent.Type), out var explicitTypeVars, out var implicitTypeVars); var boundTypeVars = GetNames("targ", f.TypeParameters.Count); IDictionary <TypeVariable, Term> substitution = new Dictionary <TypeVariable, Term>(); var i = 0; foreach (var tv in f.TypeParameters) { substitution.Add(tv, new TermIdent(boundTypeVars[i])); i++; } var varSubstitution = new SimpleVarSubstitution <TypeVariable>(substitution); var typeIsaVisitor = new TypeIsaVisitor(varSubstitution); IEnumerable <Term> valueArgConstraints = f.InParams .Select((v, idx) => !TypeUtil.IsPrimitive(v.TypedIdent.Type) ? TermBinary.Eq( IsaBoogieTerm.TypeToVal(boogieContext.absValTyMap, new TermIdent(boundParamVars[idx])), typeIsaVisitor.Translate(v.TypedIdent.Type)) : null) .Where(t => t != null); var boogieFunTyArgs = boundTypeVars.Select(id => (Term) new TermIdent(id)).ToList(); var vcFunTyArgs = new List <Term>(); f.TypeParameters.ZipDo(boogieFunTyArgs, (tv, tvTerm) => { if (explicitTypeVars.Contains(tv)) { vcFunTyArgs.Add(IsaBoogieVC.TyToClosed(tvTerm)); } }); var boogieFunValArgs = f.InParams.Select( (v, idx) => converter.ConvertToBoogieVal(v.TypedIdent.Type, new TermIdent(boundParamVars[idx])) ).ToList(); Term left = new TermApp(funInterpMapping[f], new List <Term> { new TermList(boogieFunTyArgs), new TermList(boogieFunValArgs) }); Term vcFunApp = new TermApp(declToVCMapping[f], vcFunTyArgs.Union( boundParamVars.Select(bv => (Term) new TermIdent(bv)).ToList() ).ToList()); var outputType = f.OutParams.First().TypedIdent.Type; var right = IsaCommonTerms.SomeOption( converter.ConvertToBoogieVal(outputType, vcFunApp) ); Term equation = TermBinary.Eq(left, right); Term conclusion; if (!TypeUtil.IsPrimitive(outputType)) { //if type is not primitive, then the type information is not yet included conclusion = TermBinary.And(equation, TermBinary.Eq( IsaBoogieTerm.TypeToVal(boogieContext.absValTyMap, vcFunApp), typeIsaVisitor.Translate(outputType) )); } else { conclusion = equation; } if (valueArgConstraints.Any()) { conclusion = TermBinary.MetaImplies(valueArgConstraints.Aggregate((t1, t2) => TermBinary.And(t2, t1)), conclusion); } if (boogieFunTyArgs.Any()) { var closednessAssms = boogieFunTyArgs.Select(t1 => IsaBoogieTerm.IsClosedType(t1)) .Aggregate((t1, t2) => TermBinary.And(t2, t1)); conclusion = TermBinary.MetaImplies(closednessAssms, conclusion); } if (boundParamVars.Any()) { return(new TermQuantifier(TermQuantifier.QuantifierKind.META_ALL, boundParamVars.Union(boundTypeVars).ToList(), conclusion)); } return(conclusion); }
public static Term OldGlobalStateAssumption(Term normalState) { return(TermBinary.Eq(IsaBoogieTerm.GlobalState(normalState), IsaBoogieTerm.OldGlobalState(normalState))); }
/// <summary> /// return declarations to rewrite vc expression /// </summary> private List <LemmaDecl> RewriteVcLemmas(Expr expr, bool proveVc, bool hasTypeQuantification) { // To be safe create new erasers (erasers have state that change when certain methods are applied) /* Note that vcExtracted is supposed to be the same as directly erasing translatedVcExpr. The reason we * translate the Boogie expression again to a VC expression before erasing it, is that erasure of a VCExpr * has side effects on that VCExpr. */ Func <bool, int, VCExpr> eraseToVc = (b, i) => _typeEraserFactory.NewEraser(b).TranslateAndErase(expr, i); bool lhsExtractArgs; string proofMethod; if (proveVc) { /* Prove vcNotOptimized ==> vcOptimized. * This is the easier direction, because the type quantifiers in the premise can be directly instantiated * with the extracted versions. */ lhsExtractArgs = false; if (hasTypeQuantification) { proofMethod = "unfolding Let_def using prim_type_vc_lemmas by blast"; } else { proofMethod = "by blast"; } } else { /* Prove vcOptimized ==> vcNotOptimized. * This is the harder direction. */ lhsExtractArgs = true; if (hasTypeQuantification) { proofMethod = "using vc_extractor_lemmas by smt"; } else { proofMethod = "by blast"; } } var lemmaNameNeg = "expr_equiv_" + _lemmaId + "_neg"; var lemmaNamePos = "expr_equiv_" + _lemmaId + "_pos"; _lemmaId++; var lhsNeg = _vcToIsaTranslator.Translate(eraseToVc(lhsExtractArgs, -1)); var lhsPos = _vcToIsaTranslator.Translate(eraseToVc(lhsExtractArgs, 1)); var rhsNeg = _vcToIsaTranslator.Translate(eraseToVc(!lhsExtractArgs, -1)); var rhsPos = _vcToIsaTranslator.Translate(eraseToVc(!lhsExtractArgs, 1)); var result = new List <LemmaDecl> { new LemmaDecl(lemmaNameNeg, TermBinary.MetaImplies(lhsNeg, rhsNeg), new Proof(new List <string> { proofMethod })), new LemmaDecl(lemmaNamePos, TermBinary.MetaImplies(lhsPos, rhsPos), new Proof(new List <string> { proofMethod })) }; return(result); }
public LemmaDecl GenerateCfgLemma( Block block, Block finalCfgBlock, bool isContainedInFinalCfg, IEnumerable <Block> successors, IEnumerable <Block> finalCfgSuccessors, Term cfg, Func <Block, string> cfgLemmaName, LemmaDecl BlockLemma) { var red = IsaBoogieTerm.RedCFGMulti( boogieContext, cfg, IsaBoogieTerm.CFGConfigNode(new NatConst(isaBlockInfo.BlockIds[block]), IsaBoogieTerm.Normal(normalInitState)), IsaBoogieTerm.CFGConfig(finalNode, finalState)); var assumption = new List <Term> { red }; var hasVcAssm = false; if (isContainedInFinalCfg) { assumption.Add(vcinst.GetVCObjInstantiation(finalCfgBlock, declToVCMapping)); hasVcAssm = true; } else { //vc assumption is conjunction of reachable successors in final cfg if (finalCfgSuccessors.Any()) { assumption.Add( LemmaHelper.ConjunctionOfSuccessorBlocks(finalCfgSuccessors, declToVCMapping, vcinst)); hasVcAssm = true; } } Term conclusion = new TermBinary(finalState, IsaBoogieTerm.Failure(), TermBinary.BinaryOpCode.Neq); var nodeLemma = isaBlockInfo.BlockCmdsMembershipLemma(block); var outEdgesLemma = isaBlockInfo.OutEdgesMembershipLemma(block); var proofMethods = new List <string>(); var eruleLocalBlock = "erule " + (hasVcAssm ? ProofUtil.OF(BlockLemma.Name, "_", "assms(2)") : BlockLemma.Name); if (isContainedInFinalCfg && LemmaHelper.FinalStateIsMagic(block)) { proofMethods.Add("apply (rule converse_rtranclpE2[OF assms(1)], fastforce)"); proofMethods.Add(ProofUtil.Apply("rule " + ProofUtil.OF("red_cfg_multi_backwards_step_magic", "assms(1)", nodeLemma))); proofMethods.Add(ProofUtil.By(eruleLocalBlock)); return(new LemmaDecl(cfgLemmaName(block), ContextElem.CreateWithAssumptions(assumption), conclusion, new Proof(proofMethods))); } if (successors.Any()) { proofMethods.Add("apply (rule converse_rtranclpE2[OF assms(1)], fastforce)"); var cfg_lemma = finalCfgSuccessors.Any() ? "red_cfg_multi_backwards_step" : "red_cfg_multi_backwards_step_2"; proofMethods.Add(ProofUtil.Apply("rule " + ProofUtil.OF(cfg_lemma, "assms(1)", nodeLemma))); proofMethods.Add(ProofUtil.Apply(eruleLocalBlock)); proofMethods.Add("apply (" + ProofUtil.Simp(outEdgesLemma) + ")"); foreach (var bSuc in successors) { proofMethods.Add("apply (erule member_elim, simp)"); proofMethods.Add("apply (erule " + cfgLemmaName(bSuc) + ", simp?" + ")"); } proofMethods.Add("by (simp add: member_rec(2))"); } else { proofMethods.Add("apply (rule converse_rtranclpE2[OF assms(1)], fastforce)"); proofMethods.Add("apply (rule " + ProofUtil.OF("red_cfg_multi_backwards_step_no_succ", "assms(1)", nodeLemma, outEdgesLemma) + ")"); if (isContainedInFinalCfg) { proofMethods.Add("using " + ProofUtil.OF(BlockLemma.Name, "_", "assms(2)") + " by blast"); } else { proofMethods.Add("using " + BlockLemma.Name + " by blast"); } } return(new LemmaDecl(cfgLemmaName(block), ContextElem.CreateWithAssumptions(assumption), conclusion, new Proof(proofMethods))); }