/bin/storm.py
Python | 2396 lines | 2394 code | 1 blank | 1 comment | 5 complexity | 89c98e850de8cef6534394a249c839f8 MD5 | raw file
Large files files are truncated, but you can click here to view the full file
- # Main STORM instrumentation script
- usage = """
- STORM 0.1
- Usage: python storm.py -d directory_name [options ...]
- STORM command line options:
- -d (or --dir) - directory of the example
- -h (or --help) - help
- -o (or --output) - final instrumented output bpl file name (default is a.bpl)
- -k - max number of contexts K (default is 1, which is sequential
- case)
- --no-exceptions - don't throw exceptions in Schedule, etc. This can be unsound,
- but improves performance.
- --no-check - skip checking (running boogie) and just generate instrumented
- bpl file
- --no-compile - do not compile the code, just use already created test.bpl
- (speed up testing if the example is compiling slowly)
- --only-compile - just compile the code with HAVOC. HAVOC's output is test.bpl.
- --generate-sx - just generate the sx query with the given name.
- --generate-smt - just generate the smt query with the given name.
- --havoc-output - specify HAVOC's output file to be used as STORM's input.
- Default is test.bpl.
- --generate-trace - adds instrumentation necessary for generating error traces.
- --time - measure Boogie execution time using the ptime utility (ptime
- has to be installed for this switch to work)
- -z (or --z3) - prover log output name. If specified, Z3 is going to be
- executed on the query after Boogie. Needed only if we want to
- measure Z3 time separately.
- --cegar - runs the cegar algorithm for refining tracked fields
- """
- import sys, re, os, ConfigParser, getopt
- from collections import deque
- from generate_error_trace import generateErrorTrace
- # Globals
- boogieDir = os.environ.get("HAVOC_BOOGIE_DIR")
- assert boogieDir, "Set HAVOC_BOOGIE_DIR environment variable"
- boogieBin = str(boogieDir) + '\\boogie.exe'
- f = os.popen(boogieBin, 'r')
- boogieCheckStr = f.read()
- f.close()
- assert boogieCheckStr == '*** Error: No input files were specified.\n', "Calling booogie.exe failed or unexpected output from boogie.exe"
- boogieOpts = '/noinfer /timeLimit:600 /errorLimit:1'
- boogieHeuristics = [
- ['/z3opt:ARRAY_WEAK=true',
- '/z3opt:ARRAY_EXTENSIONAL=false',
- '/useArrayTheory'],
- ['/z3opt:RELEVANCY=0',
- '/z3opt:ARRAY_WEAK=true',
- '/z3opt:ARRAY_EXTENSIONAL=false',
- '/useArrayTheory']
- ]
- stormPrelude = """
- var raiseException : bool;
- var errorReached : bool;
- var k : int;
- var __storm_atomic : bool;
- var __storm_init : bool;
- var tid : int;
- var tidCount : int;
- procedure /* dummy comment */ {:inline 1} storm_getThreadID() returns (tidRet:int)
- {
- tidRet := tid;
- return;
- }
- """
- contextSwitch = """
- procedure contextSwitch();
- modifies k;
- ensures __storm_atomic ==> old(k) == k;
- ensures(old(k) <= k);
- ensures(k < %%K%%);
- """
- contextSwitchCall = """
- call contextSwitch();
- """
- mainProcedure = 'storm_main'
- procNotInlined = set(['havoc_assert', 'havoc_assume', '__HAVOC_free',
- '__HAVOC_malloc', 'nondet_choice', 'det_choice', 'storm_nondet',
- 'storm_getThreadID',
- '_strdup', '_xstrcasecmp', '_xstrcmp', 'contextSwitch',
- '__storm_assert_dummy',
- '__storm_atomic_begin_dummy', '__storm_atomic_end_dummy',
- '__storm_init_begin_dummy', '__storm_init_end_dummy',
- '__storm_end_dummy', '__storm_switch_dummy'])
- procNoRaiseExceptionCheck = set([mainProcedure])
- instrumentedGlobals = set()
- uninstrumentedGlobals = set()
- globalVars = set(['alloc', 'k', 'errorReached', 'raiseException', '__storm_atomic',
- '__storm_init', 'tid', 'tidCount'])
- abstractKeepFields = []
- abstractKeepFieldsRead = []
- abstractKeepFieldsAll = set()
- constantFields = []
- resources = []
- K = 1
- outputBplFile = 'a.bpl'
- outputZ3File = ''
- runBoogie = True
- noExceptions = False
- measureTime = False
- exampleDir = ''
- compile = True
- onlyCompile = False
- generateSx = ''
- generateSmt = ''
- havocOutput = 'test.bpl'
- generateTrace = False
- cegar = False
- loopUnroll = ''
- # Main - this is where everything starts
- def main():
- processCommandLine()
- os.chdir(exampleDir) # go to the example directory
- readConfiguration()
-
- # compile the example with HAVOC
- if compile or onlyCompile:
- runHavoc()
- if onlyCompile:
- sys.exit()
- # read output of HAVOC into bpl
- fin = open(havocOutput, 'r')
- bpl = fin.read()
- fin.close()
- if cegar:
- cegarLoop(bpl)
- else:
- bpl = initialProcessing(bpl) # get rid of alloc assumes, havoc_free etc.
- emptyProcedures = findEmptyProcedures(bpl) # Don't inline procedures without bodies
- procNotInlined.update(emptyProcedures)
- bpl = instrumentStormMain(bpl)
- bpl = instrumentThreads(bpl)
- bpl = replaceGlobalsWithScalars(bpl) # replace Mem[errorReached] --> errorReached, Mem[k] --> k
- if not noExceptions:
- bpl = putRaiseExceptionChecks(bpl)
- boogieResult = runOnlyBoogie(bpl)
- if generateTrace:
- l = re.compile('Boogie program verifier finished with 1 verified, 0 errors')
- m = l.search(boogieResult)
- if not m:
- l = re.compile('(.*).bpl')
- m = l.match(outputBplFile)
- assert m
- traceName = str(m.group(1)) + '_trace.txt'
- generateErrorTrace(boogieResult, 'tmp.bpl', outputBplFile, traceName)
- # Process command line arguments
- def processCommandLine():
- global K
- global outputBplFile
- global outputZ3File
- global runBoogie
- global noExceptions
- global measureTime
- global contextSwitch
- global contextSwitchCall
- global exampleDir
- global compile
- global onlyCompile
- global generateSx
- global generateSmt
- global havocOutput
- global generateTrace
- global cegar
- global loopUnroll
- try:
- opts, args = getopt.getopt(sys.argv[1:], "hd:o:k:z:",
- ["help", "dir=", "output=", "no-exceptions", "no-check",
- "no-compile", "only-compile", "generate-sx=", "generate-smt=",
- "havoc-output=", "generate-trace", "time", "z3=", "cegar",
- "loop-unroll="])
- except getopt.GetoptError, err:
- # print help information and exit:
- print str(err) # will print something like "option -a not recognized"
- print "Type python storm.py -h for help."
- sys.exit(2)
- for o, a in opts:
- if o in ("-h", "--help"):
- print usage
- sys.exit()
- elif o in ("-d", "--dir"):
- exampleDir = a
- elif o in ("-o", "--output"):
- outputBplFile = a
- elif o in ("-k"):
- K = int(a)
- elif o in ("--no-exceptions"):
- noExceptions = True
- elif o in ("--no-check"):
- runBoogie = False
- elif o in ("--no-compile"):
- compile = False
- elif o in ("--only-compile"):
- onlyCompile = True
- elif o in ("--generate-sx"):
- generateSx = a
- elif o in ("--generate-smt"):
- generateSmt = a
- elif o in ("--havoc-output"):
- havocOutput = a
- elif o in ("--generate-trace"):
- generateTrace = True
- elif o in ("--time"):
- measureTime = True
- elif o in ("-z", "--z3"):
- outputZ3File = a
- elif o in ("--cegar"):
- cegar = True
- elif o in ("--loop-unroll"):
- loopUnroll = '/loopUnroll:' + str(int(a))
- else:
- print "Unhandled option.\nType python storm.py -h for help."
- sys.exit(2)
- # print "Dir = " + exampleDir
- # print "Output = " + outputBplFile
- # print "K = " + K
- if exampleDir == '':
- print "Example directory not specified.\nType python storm.py -h for help."
- sys.exit(2)
- assert generateSx == '' or generateSmt == ''
- assert K >= 1
- if generateTrace:
- assert loopUnroll, "Error trace generation works only with the loop-unroll option"
- contextSwitchCall += '\n'
- for i in range(0, K):
- if i == 0:
- contextSwitchCall += ' if (k == 0) {\n'
- else:
- contextSwitchCall += ' } else if (k == ' + str(i) + ') {\n'
- contextSwitchCall += ' call storm_context_' + str(i) + '();\n'
- contextSwitchCall += ' }\n'
- # Read configuration from storm.config
- def readConfiguration():
- config = ConfigParser.ConfigParser()
- if cegar:
- config.read("storm_cegar.config")
- else:
- config.read("storm.config")
- instrumentedGlobals.update(set(tokenize(config.get("storm_config", "instrumented_globals"))))
- if config.has_option("storm_config", "uninstrumented_globals"):
- uninstrumentedGlobals.update(set(tokenize(config.get("storm_config", "uninstrumented_globals"))))
- procNotInlined.update(set(tokenize(config.get("storm_config", "proc_not_inlined"))))
- procNoRaiseExceptionCheck.update(procNotInlined)
- abstractKeepFields.extend(tokenize(config.get("storm_config", "abstract_keep_fields")))
- abstractKeepFieldsRead.extend(tokenize(config.get("storm_config", "abstract_keep_fields_read")))
- constantFields.extend(tokenize(config.get("storm_config", "cegar_constant_fields")))
- resources.extend(tokenize(config.get("storm_config", "resources")))
- abstractKeepFieldsAll.update(set(abstractKeepFields), set(abstractKeepFieldsRead))
- def tokenize(configStr):
- tokens = []
- p = re.compile('([a-zA-Z0-9_]+)([, ]*)')
- iter = p.finditer(configStr)
- for match in iter:
- token = match.group(1)
- tokens.append(token)
- return tokens
- # Running HAVOC in the example's directory
- def runHavoc():
- print 'Compiling example directory with HAVOC...\n'
- havocDir = os.environ.get("STORM_ROOT") + '\\havoc'
- # Run EspFe
- os.system('nmake -nologo > espfe.dmp')
- os.system('dir /b/s *.rawcfgf > system.cfg.files')
- # Generate blob
- os.system(havocDir + '\\cfglink /DoVMap=false /DoSCFGMerging=true /RemoveLinkage=true /CFGFiles=@system.cfg.files')
- # Run translator
- os.system(havocDir + '\\ctobpl /BreakAtAssert=1 /RunDirectory=. /SETTINGS=' + havocDir + '\\havoc.config')
- # Clean
- f = os.popen('nmake clean', 'r')
- f.close()
- print 'Done compiling...\n'
- # Initial processing doing a few replacementes not falling into any category...
- # Most of this stuff should be done by HAVOC.
- def initialProcessing(bpl):
- print 'Initial processing...\n'
- p = re.compile('%%K%%')
- contextSwitchTmp = p.sub(str(K), contextSwitch)
- contextProcedures = '\n'
- for i in range(0, K):
- procNotInlined.add('storm_context_' + str(i))
- contextProcedures += 'procedure storm_context_' + str(i) + '();\n'
- bpl = stormPrelude + contextProcedures + contextSwitchTmp + bpl
- p = re.compile('//TAG: alloc is always > 0.*?free ensures INT_LEQ\(old\(alloc\), alloc\);', re.DOTALL)
- bpl = p.sub('', bpl)
- p = re.compile('free ensures .*;\n')
- bpl = p.sub('', bpl)
- p = re.compile('//TAG: havoc memory locations by default[ ]*\n')
- bpl = p.sub('', bpl)
- p = re.compile('//TAG: requires .*?\n')
- bpl = p.sub('', bpl)
- p = re.compile('requires INT_GEQ\(obj_size, 0\);.*\n')
- bpl = p.sub('free requires INT_GEQ(obj_size, 0);\n', bpl)
- p = re.compile('assume LOOP_([a-zA-Z0-9_]*)_Res_.+ == Res_.+;\n')
- bpl = p.sub('', bpl)
- p = re.compile('ensures old\(alloc\) <= alloc;\n')
- bpl = p.sub('', bpl)
- p = re.compile('[ ]*assume[ ]+INT_LT\(.+,[ ]*alloc\)[ ]*;[ ]*\n')
- bpl = p.sub('', bpl)
- p = re.compile('/\*assert \*/ assume INT_LEQ\(.+, alloc\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('procedure storm_getThreadID\(\) returns \(ret:int\);\n')
- bpl = p.sub('', bpl)
- # Removing the "name" type
- p = re.compile('type name;\n')
- bpl = p.sub('', bpl)
- p = re.compile('var Mem: \[name\]\[int\]int;\n')
- bpl = p.sub('', bpl)
- p = re.compile('function Field\(int\) returns \(name\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('const unique .*:name;\n')
- bpl = p.sub('', bpl)
- p = re.compile('var LOOP_.*_Mem:\[name\]\[int\]int;\n')
- bpl = p.sub('', bpl)
- p = re.compile('LOOP_.*_Mem := Mem;\n')
- bpl = p.sub('', bpl)
- # Get rid of all Base stuff - sometimes those blow up
- p = re.compile('function Base\(int\) returns \(int\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('//axiom\(forall x: int :: {Base\(x\)} Base\(x\) <= x\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom\(forall x: int :: {Base\(x\)} INT_LEQ\(Base\(x\), x\)\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom\(forall b:int, a:int, t:name :: {MatchBase\(b, a, T.Ptr\(t\)\)} MatchBase\(b, a, T.Ptr\(t\)\) <==> Base\(a\) == b\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom\(forall v:int, t:name :: {HasType\(v, T.Ptr\(t\)\)} HasType\(v, T.Ptr\(t\)\) <==> \(v == 0 \|\| \(INT_GT\(v, 0\) && Match\(v, t\) && MatchBase\(Base\(v\), v, t\)\)\)\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('ensures Base\(new\) == new;\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom\(Base\(.+\) == .+\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('assume \(Base\(.+\) == .+\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('type byte;.*FourBytesToInt\(c0, c1, c2, c3\) ==> b0 == c0 && b1 == c1 && b2 == c2 && b3 == c3\);', re.DOTALL)
- bpl = p.sub('', bpl)
- p = re.compile('function Equal\(\[int\]bool, \[int\]bool\) returns \(bool\);.*Unified\(M\[Field\(x\) := M\[Field\(x\)\]\[x := y\]\]\) == Unified\(M\)\[x := y\]\);', re.DOTALL)
- bpl = p.sub('', bpl)
- p = re.compile('function Match\(a:int, t:name\) returns \(bool\);.*Field\(a\) == T.Ptr\(t\)\);', re.DOTALL)
- bpl = p.sub('', bpl)
- p = re.compile('axiom\(forall a:int, b:int :: {BIT_BAND\(a,b\)}.*{BIT_BAND\(a,b\)} a == 0 \|\| b == 0 ==> BIT_BAND\(a,b\) == 0\);', re.DOTALL)
- bpl = p.sub('', bpl)
- p = re.compile('axiom\(forall a:int, b:int :: {DIV\(a,b\)}.*a > b \* \(DIV\(a,b\) \+ 1\)[ ]*\n[ ]*\);', re.DOTALL)
- bpl = p.sub('', bpl)
- p = re.compile('function POW2\(a:int\) returns \(bool\);.*axiom POW2\(33554432\);', re.DOTALL)
- bpl = p.sub('', bpl)
- p = re.compile('procedure nondet_choice\(\) returns \(x:int\);.*ensures x == DetChoiceFunc\(old\(detChoiceCnt\)\);', re.DOTALL)
- bpl = p.sub('', bpl)
- p = re.compile('[ ]*call[ ]+__storm_assert_dummy[ ]*\(\);\n([ ]*goto[ ]+[^;]*;)\n')
- bpl = p.sub('errorReached := true;\nraiseException := true;\n__storm_atomic := false;\n__storm_init := false;\ngoto label_1;\n', bpl)
- p = re.compile('[ ]*call[ ]+__storm_atomic_begin_dummy[ ]*\(\);\n')
- bpl = p.sub('__storm_atomic := true;\n', bpl)
- p = re.compile('[ ]*call[ ]+__storm_atomic_end_dummy[ ]*\(\);\n')
- bpl = p.sub('if (!__storm_init) {__storm_atomic := false;}\n' + contextSwitchCall, bpl)
- p = re.compile('[ ]*call[ ]+__storm_init_begin_dummy[ ]*\(\);\n')
- bpl = p.sub('__storm_atomic := true; __storm_init := true;\n', bpl)
- p = re.compile('[ ]*call[ ]+__storm_init_end_dummy[ ]*\(\);\n')
- bpl = p.sub('__storm_atomic := false; __storm_init := false;\n', bpl)
- p = re.compile('[ ]*call[ ]+__storm_switch_dummy[ ]*\(\);\n')
- bpl = p.sub(contextSwitchCall, bpl)
- p = re.compile('function .*Inv\(int\) returns \(int\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('function _S_.*Inv\(\[int\]bool\) returns \(\[int\]bool\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('function _S_.*\(\[int\]bool\) returns \(\[int\]bool\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom \(forall x:int :: {.*Inv\(.*\(x\)\)} .*Inv\(.*\(x\)\) == x\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom \(forall x:int :: {.*Inv\(x\)} .*\(.*Inv\(x\)\) == x\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom \(forall x:int, S:\[int\]bool :: {_S_.*\(S\)\[x\]} _S_.*\(S\)\[x\] <==> S\[.*Inv\(x\)\]\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom \(forall x:int, S:\[int\]bool :: {_S_.*Inv\(S\)\[x\]} _S_.*Inv\(S\)\[x\] <==> S\[.*\(x\)\]\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom \(forall x:int, S:\[int\]bool :: {S\[x\], _S_.*\(S\)} S\[x\] ==> _S_.*\(S\)\[.*\(x\)\]\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom \(forall x:int, S:\[int\]bool :: {_S_.*\(S\)\[x\]} _S_.*\(S\)\[x\] <==> S\[.*Inv\(x\)\]\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom \(forall x:int, S:\[int\]bool :: {S\[x\], _S_.*Inv\(S\)} S\[x\] ==> _S_.*Inv\(S\)\[.*Inv\(x\)\]\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('//axiom \(forall x:int :: {.*Inv\(x\)} .*Inv\(x\) == x - .*\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom \(forall x:int :: {.*Inv\(x\)} .*Inv\(x\) == INT_SUB\(x,.+\)\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom \(forall x:int :: {MINUS_BOTH_PTR_OR_BOTH_INT\(x, .*, .*\)} MINUS_BOTH_PTR_OR_BOTH_INT\(x, .*, .*\) == .*Inv\(x\)\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom \(forall x:int :: {MINUS_LEFT_PTR\(x, .*, .*\)} MINUS_LEFT_PTR\(x, .*, .*\) == .*Inv\(x\)\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('//axiom \(forall x:int :: {.+\(x\)} .+\(x\) == PLUS\(x, 1, [0-9]+\)\);\n')
- bpl = p.sub('', bpl)
- p = re.compile('axiom \(forall x:int :: {.+\(x\)} .+\(x\) == PLUS\(x, 1, [0-9]+\)\);\n')
- bpl = p.sub('', bpl)
- return bpl
- # Find procedures without bodies in our bpl file. We should not try to inline
- # those.
- def findEmptyProcedures(bpl):
- print 'Finding empty procedures...'
-
- lines = bpl.splitlines(True)
- emptyProcedures = set()
- procedureEmpty = False
- for line in lines:
- procDef = re.compile('procedure[ ]*([a-zA-Z0-9_]*)\((.*)\)[ ]*(;?)')
- m = procDef.match(line)
- if m:
- if procedureEmpty and not procedureName in procNotInlined:
- emptyProcedures.add(procedureName)
- procedureName = m.group(1)
- procedureEmpty = True
- start = re.compile('start:')
- m = start.match(line)
- if m:
- procedureEmpty = False
- if procedureEmpty and not procedureName in procNotInlined:
- emptyProcedures.add(procedureName)
- print 'Empty procedures: ' + ', '.join(emptyProcedures) + '\n'
- return emptyProcedures
- # Add k = 0, errorReached = false in the beginning of storm_main, and the
- # final assertion in the end.
- def instrumentStormMain(bpl):
- print 'Instrumenting storm_main...\n'
-
- lines = bpl.splitlines(True)
- # count the total number of threads
- threadCounter = 0
- asyncRe = re.compile('[ ]*call[ ]+{:async}[ ]+.*;\n')
- for line in lines:
- match = asyncRe.match(line)
- if match:
- threadCounter += 1
- # create __storm_thread_done variables
- newLines = []
- for i in range(threadCounter):
- globalVars.add('__storm_thread_done_' + str(i))
- newLines.append('var __storm_thread_done_' + str(i) + ' : bool;\n')
- # instrument storm_main
- foundMain = False
- for line in lines:
- l = re.compile('[ ]*procedure[ ]+' + mainProcedure + '.*\)\n')
- m = l.match(line)
- if m:
- foundMain = True
- newLines.append(line)
- newLines.append('free requires INT_LT(0, alloc);\n')
- newLines.append('free requires INT_LT(0, tid);\n')
- newLines.append('free requires INT_LT(tid, tidCount);\n')
- continue
-
- l = re.compile('[ ]*start:\n')
- m = l.match(line)
- if m and foundMain:
- newLines.append(line)
- for i in range(threadCounter):
- newLines.append('__storm_thread_done_' + str(i) + ' := false;\n')
- newLines.append('k := 0;\n')
- newLines.append('errorReached := false;\n')
- newLines.append('__storm_atomic := false;\n')
- newLines.append('__storm_init := false;\n')
- continue
- l = re.compile('[ ]*call[ ]+__storm_end_dummy[ ]*\(\);\n')
- m = l.match(line)
- if m:
- for i in range(threadCounter):
- newLines.append('assume errorReached || __storm_thread_done_' + str(i) + ';\n')
- newLines.append('if (!errorReached) {k := ' + str(K - 1) + ';}\n')
- newLines.append('raiseException := false;\n')
- continue
- l = re.compile('[ ]*label_1:\n')
- m = l.match(line)
- if m and foundMain:
- foundMain = False
- newLines.append(line)
- newLines.append('assert !errorReached;\n')
- continue
- newLines.append(line)
- return ''.join(newLines)
- # Add k = 0, raiseException = false in the beginning of each thread.
- def instrumentThreads(bpl):
- print 'Instrumenting threads...\n'
-
- lines = bpl.splitlines(True)
- inProc = False # used to mark procedure entry
- newLines = []
- procLines = []
- totalThreadCounter = 0
- for line in lines:
- if not inProc:
- newLines.append(line)
- # matching procedure start
- l = re.compile('{\n')
- m = l.match(line)
- if m:
- threadCounter = 0
- inProc = True
- continue
- if not inProc:
- continue
- # matching procedure end
- l = re.compile('}\n')
- m = l.match(line)
- if m:
- # adding declarations of introduced help variables to each procedure
- if 0 < threadCounter:
- procLines.insert(0, 'var tidCount_old : int;\n')
- for i in range(threadCounter):
- procLines.insert(0, 'var k_old_' + str(i) + ' : int;\n')
- procLines.insert(0, 'var tid_old_' + str(i) + ' : int;\n')
- newLines.extend(procLines)
- del procLines[:]
- newLines.append(line)
- inProc = False
- continue
- asyncRe = re.compile('[ ]*call[ ]+{:async}[ ]+(.*);\n')
- match = asyncRe.match(line)
- if match:
- procLines.append('k_old_' + str(threadCounter) + ' := k;\n')
- procLines.append('tid_old_' + str(threadCounter) + ' := tid;\n')
- procLines.append('tidCount_old := tidCount; havoc tidCount; assume tidCount_old < tidCount;\n')
- procLines.append('tid := tidCount;\n')
- procLines.append('raiseException := false;\n')
- for i in range(0, K):
- if i == 0:
- procLines.append(' if (k == 0) {\n')
- else:
- procLines.append(' } else if (k == ' + str(i) + ') {\n')
- procLines.append(' call storm_context_' + str(i) + '();\n')
- procLines.append(' }\n')
- procLines.append(contextSwitchCall)
- procLines.append('/* Don\'t put raiseException check on this call */ call ' + match.group(1) + ';\n')
- procLines.append('if (errorReached || !raiseException) {__storm_thread_done_' + str(totalThreadCounter) + ' := true;}\n')
- procLines.append('k := k_old_' + str(threadCounter) + ';\n')
- procLines.append('tid := tid_old_' + str(threadCounter) + ';\n')
- threadCounter += 1
- totalThreadCounter += 1
- else:
- procLines.append(line)
- return ''.join(newLines)
- # Replace instrumented globals stored on heap with scalars, i.e. replace Mem[x] with x
- # when x is an instrumented global. Also add those globals to modifies sets of
- # all procedures that are inlined and mark those procedures with ":inline 1".
- def replaceGlobalsWithScalars(bpl):
- print 'Replacing instrumented globals with scalars...\n'
- orGlobalVars = '|'.join(globalVars)
- p = re.compile('const unique (' + orGlobalVars + ') : int;')
- bpl = p.sub('', bpl)
- # replace declarations of instrumented globals with scalars and introduce
- # their copies
- orInstrumentedGlobals = '|'.join(instrumentedGlobals)
- p = re.compile('const unique (' + orInstrumentedGlobals + ') : int;')
- substitution = ''
- for i in range(0, K):
- substitution += r'var \1_' + str(i) + r' : int;\n'
- for i in range(1, K):
- substitution += r'var \1_s_' + str(i) + r' : int;\n'
- bpl = p.sub(substitution, bpl)
- orUninstrumentedGlobals = '|'.join(uninstrumentedGlobals)
- p = re.compile('const unique (' + orUninstrumentedGlobals + ') : int;')
- bpl = p.sub(r'var \1 : int;\n', bpl)
- modifiedGlobals = set(globalVars)
- modifiedGlobals.update(uninstrumentedGlobals)
- modifiedGlobals.update(g + '_' + str(i) for i in range(0, K) for g in instrumentedGlobals)
- modifiedGlobals.update(g + '_s_' + str(i) for i in range(1, K) for g in instrumentedGlobals)
- p = re.compile('procedure[ ]*([a-zA-Z0-9_]*)\((.*)\)[ ]*(;?)')
- bpl = p.sub(lambda match: addInlineModifies(match, modifiedGlobals), bpl)
-
- orGlobalVars = '|'.join(globalVars | instrumentedGlobals | uninstrumentedGlobals)
- p = re.compile('axiom\((' + orGlobalVars + ') != 0\);\n')
- bpl = p.sub('', bpl)
- # p = re.compile('axiom\(Base\((' + orGlobalVars + ')\) == (' + orGlobalVars + ')\);\n')
- # bpl = p.sub('', bpl)
- p = re.compile('Mem_T.[a-zA-Z0-9_]*\[(' + orGlobalVars + ')\]')
- bpl = p.sub(r'\1', bpl)
-
- p = re.compile('LOOP_[a-zA-Z0-9_]*_Mem_T.[a-zA-Z0-9_]*\[(LOOP_[a-zA-Z0-9_]*_(' + orGlobalVars + '))\]')
- bpl = p.sub(r'\1', bpl)
-
- p = re.compile('Mem_T.[a-zA-Z0-9_]* := Mem_T.[a-zA-Z0-9_]*\[(' + orGlobalVars + ') := ([a-zA-Z0-9_$.]*)\];')
- bpl = p.sub(r'\1 := \2;', bpl)
- return bpl
- def addInlineModifies(match, modifiedGlobals):
- procName = match.group(1)
- procDef = ''
- if procName in procNotInlined or procName == mainProcedure:
- procDef += 'procedure ' + procName + '(' + match.group(2) + ')' + match.group(3)
- else:
- procDef += 'procedure {:inline 1} ' + procName + '(' + match.group(2) + ')' + match.group(3)
- if not procName in procNotInlined:
- procDef += '\nmodifies ' + ', '.join(modifiedGlobals) + ';'
- return procDef
- # Putting raiseException checks after procedure calls.
- #
- # raiseException variable is used to model exceptions that are introduced during
- # instrumentation (see CAV paper). raiseException is going to be set to true
- # when exception is thrown. Once raiseException is set, we have to return from
- # all procedures in the current call chain. That is accomplished by instrumenting
- # each procedure call like this:
- #
- # call foo(); ---> call foo(); if (raiseException) {return;}
- #
- def putRaiseExceptionChecks(bpl):
- print 'Putting raiseException checks after procedure calls...\n'
-
- lines = bpl.splitlines(True)
- newLines = []
- for line in lines:
- newLines.append(line);
- l = re.compile('call .*?([^ :=()]*)[ ]*\(.*;\n')
- m = l.match(line)
- if m:
- if m.group(1) not in procNoRaiseExceptionCheck:
- newLines.append('if (raiseException) {goto label_1;}\n')
- return ''.join(newLines)
- def justRunBoogie(checkCmd):
- f = os.popen(checkCmd, 'r')
- boogieResult = f.read()
- f.close()
- return boogieResult
- # Run just boogie on abstraction
- def runOnlyBoogie(bpl):
- bpl = fieldAbstraction(bpl) # havocs reads/removes writes based on "abstract_keep_fields"
- bpl = explicitCaseSplit(bpl, abstractKeepFields, [], True) # if (k==i) St[Mem_i/Mem]
- if resources:
- bpl = explicitCaseSplitResource(bpl, resources, True); # if (k==i) St[Res_i/Res]
- bpl, header = inline(bpl) # write tmp.bpl to disk, call Boogie to do inlining, collect inlined storm_main
- # write output
- fout = open(outputBplFile, 'w')
- fout.write(bpl)
- fout.close()
-
- boogieResult = ''
- if runBoogie:
- # run Boogie
- print 'Running Boogie...\n'
- if measureTime:
- checkCmd = 'ptime ' + boogieBin + ' ' + outputBplFile + ' ' + boogieOpts
- else:
- checkCmd = boogieBin + ' ' + outputBplFile + ' ' + boogieOpts
- if not outputZ3File == '':
- checkCmd += ' /proverLog:' + outputZ3File
- if not generateSx == '':
- checkCmd += ' /noinfer /timeLimit:5 /errorLimit:1 /proverLog:' + generateSx
- elif not generateSmt == '':
- checkCmd += ' /noinfer /timeLimit:5 /errorLimit:1 /prover:SMTLib /smtOutput:' + generateSmt
- p = re.compile('time out')
- for parameters in boogieHeuristics:
- checkCmdTmp = checkCmd
- for param in parameters:
- checkCmdTmp += ' ' + param
- boogieResult = justRunBoogie(checkCmdTmp)
- if not p.search(boogieResult):
- break
- print boogieResult
- # run Z3
- if not outputZ3File == '':
- print 'Running Z3...\n'
- if measureTime:
- z3Cmd = 'ptime z3 ' + outputZ3File + ' /T:3600'
- else:
- z3Cmd = 'z3 ' + outputZ3File + ' /T:3600'
- f = os.popen(z3Cmd, 'r')
- boogieResult = f.read()
- f.close()
- print boogieResult
- return boogieResult
- # Field abstraction (see CAV paper).
- # Havocks (i.e. introduces nondet values) memory reads and skips (i.e. removes)
- # memory writes to untracked fields.
- def fieldAbstraction(bpl):
- print 'Field abstraction...\n'
-
- p = re.compile('var Mem_T.([a-zA-Z0-9_]*) : \[int\]int;\n')
- bpl = p.sub(removeMem, bpl)
-
- p = re.compile('modifies Mem_T.([a-zA-Z0-9_]*);\n')
- bpl = p.sub(removeMem, bpl)
-
- lines = bpl.splitlines(True)
-
- inProc = False # used to mark procedure entry
- newLines = []
- procLines = []
- for line in lines:
- if not inProc:
- newLines.append(line)
-
- # matching procedure start
- l = re.compile('{\n')
- m = l.match(line)
- if m:
- inProc = True
- nondetVarCounterMax = 0
- varCounterMax = 0
- continue
- if not inProc:
- continue
- # matching procedure end
- l = re.compile('}\n')
- m = l.match(line)
- if m:
- # adding declarations of introduced help variables to each procedure
- for i in reversed(range(0, varCounterMax)):
- procLines.insert(0, 'var myVar_' + str(i) + ':int;\n')
- for i in reversed(range(0, nondetVarCounterMax)):
- procLines.insert(0, 'var myNondetVar_' + str(i) + ':int;\n')
- newLines.extend(procLines)
- del procLines[:]
- newLines.append(line)
- inProc = False
- continue
-
- l = re.compile('assert[ ]+(.*);\n')
- m = l.match(line)
- if m:
- procLines.append(line)
- continue
-
- nondetVarCounter = 0
- varCounter = 0
- l = re.compile('call .*;\n')
- m = l.match(line)
- if m:
- # replacing all memory reads in a call statement with scalar variables
- # myVar_n or myNondetVar_n (where n is a counter) depending on whether the
- # field is tracked or not (myNondetVar_n is a nondeterministic variable,
- # i.e. havocked, introduced for untracked fields)
- l = re.compile('(.*)(Mem_T.([a-zA-Z0-9_]*)\[[^\[\]=]*\])(.*);\n')
- m = l.match(line)
- while m:
- field = m.group(3)
- if field in abstractKeepFieldsAll:
- var = 'myVar_' + str(varCounter)
- procLines.append(var + ' := ' + m.group(2) + ';\n')
- varCounter += 1
- else:
- var = 'myNondetVar_' + str(nondetVarCounter)
- procLines.append('havoc ' + var + ';\n')
- nondetVarCounter += 1
- line = m.group(1) + var + m.group(4) + ';\n'
- m = l.match(line)
- else:
- # replacing all memory reads of untracked fields in a statement with
- # nondeterministic scalar variables myNondetVar_n (where n is a counter)
- brackets = 0
- atLeastOneMatch = True
- while atLeastOneMatch:
- atLeastOneMatch = False
- lineChanged = True
- # this regular expression is black magic :), but it works
- l = re.compile('Mem_T.([a-zA-Z0-9_]*)\[([^=]*?\[){' + str(brackets) + ',' + str(brackets) + '}' \
- '[^\[\]=]*?(\][^=]*?){' + str(brackets) + ',' + str(brackets) + '}\]')
- brackets += 1
- while lineChanged:
- lineChanged = False
- iter = l.finditer(line)
- for match in iter:
- atLeastOneMatch = True
- field = match.group(1)
- if field in abstractKeepFieldsAll:
- var = 'myVar_' + str(varCounter)
- procLines.append(var + ' := ' + line[match.start():match.end()] + ';\n')
- line = line[0:match.start()] + var + line[match.end():]
- varCounter += 1
- lineChanged = True
- break
- else:
- var = 'myNondetVar_' + str(nondetVarCounter)
- procLines.append('havoc ' + var + ';\n')
- line = line[0:match.start()] + var + line[match.end():]
- nondetVarCounter += 1
- lineChanged = True
- break
- if varCounter > varCounterMax:
- varCounterMax = varCounter
- if nondetVarCounter > nondetVarCounterMax:
- nondetVarCounterMax = nondetVarCounter
- # skipping memory writes to untracked fields
- l = re.compile('Mem_T.([a-zA-Z0-9_]*) := Mem_T.[a-zA-Z0-9_]*\[.*\];\n')
- m = l.match(line)
- if m and not m.group(1) in abstractKeepFieldsAll:
- procLines.append('// skip memory write\n')
- continue
- # removing untracked fields from loop invariants (loop assignments used in
- # the translation of 'old' by HAVOC)
- l = re.compile('LOOP_[a-zA-Z0-9_]*_Mem_T.([a-zA-Z0-9_]*):=Mem_T.[a-zA-Z0-9_]*;\n')
- m = l.match(line)
- if m and not m.group(1) in abstractKeepFieldsAll:
- continue
-
- procLines.append(line)
- return ''.join(newLines)
- def removeMem(match):
- field = match.group(1)
- if not field in abstractKeepFieldsAll:
- return ''
- else:
- return match.group()
- # Explicit case split for memory and instrumented globals.
- # Replaces every memory and instrumented globals access (i.e. read or write of
- # one of the Mem maps, or read or write to an instrumented global G) in a
- # statement St with the appropriate copy of the Mem map/global and then
- # introduces the case-split on k (see CAV paper):
- #
- # St --->
- # if (k==0) St[Mem_0/Mem,G_0/G];
- # else if (k==1) St[Mem_1/Mem,G_1/G];
- # ....
- # else if (k==i) St[Mem_i/Mem,G_i/G];
- # ....
- # else if (k==K - 1) St[Mem_K-1/Mem,G_K-1/G]
- #
- # In addition, a call to contextSwitch (i.e. procedure Schedule from CAV paper)
- # is added after each such statement.
- # In the end, adds instrumentation assumes to the beggining and end of
- # storm_main.
- # Note: processedFields argument is used only with CEGAR
- #
- def explicitCaseSplit(bpl, abstractKeepFields, processedFields, putSwitch):
- print 'Explicit case splitting memories and instrumented globals...\n'
-
- lines = bpl.splitlines(True)
- newLines = [] # freshly generated lines that will replace what we currently have
- for line in lines:
- memRegExpr = re.compile('(.*)Mem_T\.([a-zA-Z0-9_]+)\[(.*);\n')
- if instrumentedGlobals:
- orGlobalVars = '|'.join(instrumentedGlobals)
- instrGlobalRegExpr = re.compile('(.*[^_]|^)(' + orGlobalVars + ')([^_].*;|;)\n')
- instrGlobalMatch = instrGlobalRegExpr.match(line)
- else:
- instrGlobalRegExpr = None
- instrGlobalMatch = None
- memMatch = memRegExpr.match(line)
- if memMatch and memMatch.group(2) in abstractKeepFields:
- # is statement contains Mem reads/writes
- instrLines = []
- for i in range(0, K):
- # replace Mem and G with Mem_i and G_i and store into instrLines
- tmpLine = line
- memMatch = memRegExpr.match(tmpLine)
- while memMatch:
- tmpLine = memRegExpr.sub(r'\1Mem_' + str(i) + r'_T.\2[\3;\n', tmpLine)
- memMatch = memRegExpr.match(tmpLine)
- if instrGlobalRegExpr:
- instrGlobalMatch = instrGlobalRegExpr.match(tmpLine)
- while instrGlobalMatch:
- tmpLine = instrGlobalRegExpr.sub(r'\1\2_' + str(i) + r'\3\n', tmpLine)
- instrGlobalMatch = instrGlobalRegExpr.match(tmpLine)
- instrLines.append(tmpLine)
- # assert(F); is instrumented as:
- # assert(k == 0 ==> F[Mem_0/Mem,G_0/G]);
- # assert(k == 1 ==> F[Mem_1/Mem,G_1/G]);
- # ....
- # assert(k == i ==> F[Mem_i/Mem,G_i/G]);
- # ....
- # assert(k == K-1 ==> F[Mem_K-1/Mem,G_K-1/G]);
- l = re.compile('[ ]*assert[ ]*(.*);\n')
- m = l.match(line)
- if m:
- for i in range(0, K):
- m = l.match(instrLines[i])
- newLines.append('assert (k == ' + str(i) + ' ==> (' + m.group(1) + '));\n')
- continue
-
- # assume(F); is instrumented as:
- # assume(k == 0 ==> F[Mem_0/Mem,G_0/G]);
- # assume(k == 1 ==> F[Mem_1/Mem,G_1/G]);
- # ....
- # assume(k == i ==> F[Mem_i/Mem,G_i/G]);
- # ....
- # assume(k == K-1 ==> F[Mem_K-1/Mem,G_K-1/G]);
- l = re.compile('[ ]*assume[ ]*(.*);\n')
- m = l.match(line)
- if m:
- for i in range(0, K):
- m = l.match(instrLines[i])
- newLines.append('assume (k == ' + str(i) + ' ==> (' + m.group(1) + '));\n')
- if putSwitch:
- newLines.append(contextSwitchCall)
- continue
-
- l = re.compile('[ ]*Mem.*:=.*;\n')
- m = l.match(line)
- if not m:
- # if statement is not memory assignment
- if K > 1:
- newLines.append(' if (k == 0) {\n')
- newLines.append(instrLines[0])
- for i in range(1, K):
- newLines.append(' } else if (k == ' + str(i) + ') {\n')
- newLines.append(instrLines[i])
- newLines.append(' }\n')
- else:
- newLines.append(instrLines[0])
- if putSwitch:
- newLines.append(contextSwitchCall)
- continue
-
- l = re.compile('[ ]*Mem_T\.([a-zA-Z0-9_]+) := Mem_T\.([a-zA-Z0-9_]+)\[.*\];\n')
- m = l.match(line)
- if m and m.group(1) in abstractKeepFields and m.group(2) in abstractKeepFields:
- # if statement is memory assignment
- if K > 1:
- newLines.append(' if (k == 0) {\n')
- l = re.compile('[ ]*Mem_T\.([a-zA-Z0-9_]*) := Mem_0_T\.([a-zA-Z0-9_]*\[.*\]);\n')
- m = l.match(instrLines[0])
- newLines.append('Mem_0_T.' + m.group(1) + ' := Mem_0_T.' + m.group(2) + ';\n')
- for i in range(1, K):
- newLines.append(' } else if (k == ' + str(i) + ') {\n')
- l = re.compile('[ ]*Mem_T\.([a-zA-Z0-9_]*) := Mem_' + str(i) + '_T\.([a-zA-Z0-9_]*\[.*\]);\n')
- m = l.match(instrLines[i])
- newLines.append('Mem_' + str(i) + '_T.' + m.group(1) + ' := Mem_' + str(i) + '_T.' + m.group(2) + ';\n')
- newLines.append(' }\n')
- else:
- l = re.compile('[ ]*Mem_T\.([a-zA-Z0-9_]*) := Mem_0_T\.([a-zA-Z0-9_]*\[.*\]);\n')
- m = l.match(instrLines[0])
- newLines.append('Mem_0_T.' + m.group(1) + ' := Mem_0_T.' + m.group(2) + ';\n')
- if putSwitch:
- newLines.append(contextSwitchCall)
- continue
- assert False
- elif instrGlobalMatch and not line.startswith('modifies ') and not line.startswith('var '):
- # if statement contains only instrumented global reads/writes
- instrLines = []
- for i in range(0, K):
- tmpLine = line
- instrGlobalMatch = instrGlobalRegExpr.match(tmpLine)
- while instrGlobalMatch:
- tmpLine = instrGlobalRegExpr.sub(r'\1\2_' + str(i) + r'\3\n', tmpLine)
- instrGlobalMatch = instrGlobalRegExpr.match(tmpLine)
- instrLines.append(tmpLine)
-
- l = re.compile('assert[ ]*(.*);\n')
- m = l.match(line)
- if m:
- for i in range(0, K):
- m = l.match(instrLines[i])
- newLines.append('assert (k == ' + str(i) + ' ==> (' + m.group(1) + '));\n')
- continue
-
- l = re.compile('assume[ ]*(.*);\n')
- m = l.match(line)
- if m:
- for i in range(0, K):
- m = l.match(instrLines[i])
- newLines.append('assume (k == ' + str(i) + ' ==> (' + m.group(1) + '));\n')
- if putSwitch:
- newLines.append(contextSwitchCall)
- continue
-
- if K > 1:
- newLines.append(' if (k == 0) {\n')
- newLines.append(instrLines[0])
- for i in range(1, K):
- newLines.append(' } else if (k == ' + str(i) + ') {\n')
- newLines.append(instrLines[i])
- newLines.append(' }\n')
- else:
- newLines.append(instrLines[0])
- if putSwitch and not line.startswith('LOOP'):
- newLines.append(contextSwitchCall)
- else:
- newLines.append(line)
- lines = newLines
-
- newLines = []
- memories = []
- inlinedProcedure = True
- for line in lines:
- l = re.compile('[ ]*procedure([ ]+{:inline 1}[ ]+|[ ]+)([a-zA-Z0-9_]*)\((.*)\)')
- m = l.search(line)
- if m:
- procName = m.group(2)
- if procName in procNotInlined:
- inlinedProcedure = False
- else:
- inlinedProcedure = True
- l = re.compile('[ ]*var Mem_T\.([a-zA-Z0-9_]+) : \[int\]int;\n')
- m = l.match(line)
- if m and m.group(1) in abstractKeepFields:
- # replace memory maps declarations with declarations of copies of memory
- # maps (Mem_i) and unconstrained constants (Mem_s_i)
- if not m.group(1) in processedFields:
- for i in range(0, K):
- newLines.append('var Mem_' + str(i) + '_T.' + m.group(1) + ' : [int]int;\n')
- for i in range(1, K):
- newLines.append('var Mem_s_' + str(i) + '_T.' + m.group(1) + ' : [int]int;\n')
- memories.append(m.group(1));
- continue
-
- l = re.compile('[ ]*var LOOP_([a-zA-Z0-9_]*)_Mem_T\.([a-zA-Z0-9_]+):\[int\]int;\n')
- m = l.match(line)
- if m and m.group(2) in abstractKeepFields:
- if not m.group(1) in processedFields:
- for i in range(0, K):
- newLines.append('var LOOP_' + m.group(1) + '_Mem_' + str(i) + '_T.' + m.group(2) + ':[int]int;\n')
- continue
-
- l = re.compile('[ ]*LOOP_([a-zA-Z0-9_]*)_Mem_T\.([a-zA-Z0-9_]+):=Mem_T\.[a-zA-Z0-9_]*;\n')
- m = l.match(line)
- if m and m.group(2) in abstractKeepFields:
- if not m.group(1) in processedFields:
- for i in range(0, K):
- newLines.append('LOOP_' + m.group(1) + '_Mem_' + str(i) + '_T.' + m.group(2) + ':=Mem_' + str(i) + '_T.' + m.group(2) + ';\n')
- continue
-
- l = re.compile('[ ]*modifies Mem_T\.([a-zA-Z0-9_]+);\n')
- m = l.match(line)
- if m and m.group(1) in abstractKeepFields:
- if inlinedProcedure and not m.group(1) in processedFields:
- for i in range(0, K):
- newLines.append('modifies Mem_' + str(i) + '_T.' + m.group(1) + ';\n')
- continue
-
- newLines.append(line)
- return addInstrumentationAssumes(''.join(newLines), memories)
- # Adds instrumentation assumes for memories and instrumented globals to the
- # beginning of the storm_main procedure and in the end, just before the
- # errorReached assertion
- def addInstrumentationAssumes(bpl, memories):
- lines = bpl.splitlines(True)
- newLines = []
- foundMain = False
- for line in lines:
- l = re.compile('(.*)' + mainProcedure + '\(\)\n')
- m = l.match(line)
- if m:
- foundMain = True
-
- l = re.compile('[ ]*start(#[0-9])?:\n')
- m = l.match(line)
- if m and foundMain:
- newLines.append(line)
- for mem in memories:
- for i in range(1, K):
- newLines.append('assume Mem_' + str(i) + '_T.' + mem + ' == Mem_s_' + str(i) + '_T.' + mem + ';\n')
- for var in instrumentedGlobals:
- for i in range(1, K):
- newLines.append('assume ' + var + '_' + str(i) + ' == ' + var + '_s_' + str(i) + ';\n')
- continue
-
- l = re.compile('[ ]*assert.+!errorReached.*;\n')
- m = l.match(line)
- if m and foundMain:
- foundMain = False
- for mem in memories:
- for i in range(0, K-1):
- newLines.append('assume Mem_' + str(i) + '_T.' + mem + ' == Mem_s_' + str(i+1) + '_T.' + mem + ';\n')
- for var in instrumentedGlobals:
- for i in range(0, K-1):
- newLines.append('assume ' + var + '_' + str(i) + ' == ' + var + '_s_' + str(i+1) + ';\n')
- newLines.append(line)
- continue
- newLines.append(line)
- return ''.join(newLines)
- # Explicit case split for HAVOC's resources.
- # Replaces every resource access (i.e. read or write of one of the resource
- # maps) in a statement St with the appropriate copy of the resource and then
- # introduces the case-split on k (see CAV paper):
- #
- # St --->
- # if (k==0) St[Res_0/Res];
- # else if (k==1) St[Res_1/Res];
- # ....
- # else if (k==i) St[Res_i/Res];
- # ....
- # else if (k==K - 1) St[Res_K-1/Res]
- #
- # In addition, a call to contextSwitch (i.e. procedure Schedule from CAV paper)
- # is added after each such statement.
- # In the end, adds instrumentation assumes to the beggining and end of
- # storm_main.
- #
- def explicitCaseSplitResource(bpl, resources, putSwitch):
- print 'Explicit case splitting resources...\n'
- lines = bpl.splitlines(True)
- orResources = '|'.join(resources)
- usedResources = []
- newLines = []
- inlinedProcedure = True
- for line in lines:
- l = re.compile('[ ]*procedure([ ]+{:inline 1}[ ]+|[ ]+)([a-zA-Z0-9_]*)\((.*)\)')
- m = l.search(line)
- if m:
- procName = m.group(2)
- if procName in procNotInlined:
- inlinedProcedure = False
- else:
- inlinedProcedure = True
- resourceRegExpr = re.compile('[ ]*var Res_(' + orResources + '):\[int\]int;\n')
- resourceMatch = resourceRegExpr.match(line)
- if resourceMatch:
- usedResources.append(resourceMatch.group(1))
- for i in range(0, K):
- newLines.append('var Res_' + str(i) + '_' + resourceMatch.group(1) + ' : [int]int;\n')
- for i in range(1, K):
- newLines.append('var Res_s_' + str(i) + '_' + resourceMatch.group(1) + ' : [int]int;\n')
- continue
- l = re.compile('[ ]*var LOOP_([a-zA-Z0-9_]*)_Res_(' + orResources + '):\[int\]int;\n')
- m = l.match(line)
- if m:
- for i in range(0, K):
- newLines.append('var LOOP_' + m.group(1) + '_Res_' + str(i) + '_' + m.group(2) + ':[int]int;\n')
- continue
- l = re.compile('[ ]*LOOP_([a-zA-Z0-9_]*)_Res_(' + orResources + ') := Res_(' + orResources + ');\n')
- m = l.match(line)
- if m:
- for i in range(0, K):
- newLines.append('LOOP_' + m.group(1) + '_Res_' + str(i) + '_' + m.group(2) + ' := Res_' + str(i) + '_' + m.group(2) + ';\n')
- continue
-
- l = re.compile('[ ]*modifies Res_(' + orResources + ');\n')
- m = l.match(line)
- if m:
- if inlinedProcedure:
- for i in range(0, K):
- newLines.append('modifies Res_' + str(i) + '_' + m.group(1) + ';\n')
- continue
-
- newLines.append(line)
- lines = newLines
- newLines = []
- for line in lines:
- l = re.compile('(.*)Res_(' + orResources + ')(.*);\n')
- m = l.match(line)
- if m:
- instrLines = []
- for i in range(0, K):
- tmpLine = line
- m = l.match(tmpLine)
- while m:
- tmpLine = l.sub(r'\1Res_' + str(i) + r'_\2\3;\n', tmpLine)
- m = l.match(tmpLine)
- instrLines.append(tmpLine)
-
- l = re.compile('[ ]*assert[ ]*(.*);\n')
- m = l.match(line)
- if m:
- for i in range(0, K):
- m = l.match(instrLines[i])
- newLines.append('assert (k == ' + str(i) + ' ==> (' + m.group(1) + '));\n')
- continue
-
- l = re.compile('[ ]*assume[ ]*(.*);\n')
- m = l.match(line)
- if m:
- for i in range(0, K):
- m = l.match(instrLines[i])
- newLines.append('assume (k == ' + str(i) + ' ==> (' + m.group(1) + '));\n')
- if putSwitch:
- newLines.append(contextSwitchCall)
- continue
-
- l = re.compile('[ ]*Res.*:=.*;\n')
- m = l.match(line)
- if not m:
- if K > 1:
- newLines.append(' if (k == 0) {\n')
- newLines.append(instrLines[0])
- for i in range(1, K):
- newLines.append(' } else if (k == ' + str(i) + ') {\n')
- newLines.append(instrLines[i])
- newLines.append(' }\n')
- else:
- newLines.append(instrLines[0])
- if putSwitch:
- newLines.append(contextSwitchCall)
- continue
-
- l = re.compile('[ ]*Res_(' + orResources + ') := Res_((' + orResources + ')\[.*\]);\n')
- m = l.match(line)
- if m:
- if K > 1:
- newLines.append(' if (k == 0) {\n')
- l = re.compile('[ ]*Res_0_(' + orResources + ') := Res_0_([a-zA-Z0-9_]*\[.*\]);\n')
- m = l.match(instrLines[0])
- newLines.append('Res_0_' + m.group(1) + ' := Res_0_' + m.group(2) + ';\n')
- for i in range(1, K):
- newLines.append(' } else if (k == ' + str(i) + ') {\n')
- l = re.compile('[ ]*Res_' + str(i) + '_(' + orResources + ') := Res_' + str(i) + '_([a-zA-Z0-9_]*\[.*\]);\n')
- m = l.match(instrLines[i])
- newLines.append('Res_' + str(i) + '_' + m.group(1) + ' := Res_' + str(i) + '_' + m.group(2) + ';\n')
- newLines.append(' }\n')
- else:
- l = re.compile('[ ]*Res_0_(' + orResources + ') := Res_0_([a-zA-Z0-9_]*\[.*\]);\n')
- m = l.match(instrLines[0])
- newLines.append('Res_0_' + m.group(1) + ' := Res_0_' + m.group(2) + ';\n')
- if putSwitch:
- newLines.append(contextSwitchCall)
- continue
- assert False
- else:
- newLines.append(line)
- return addInstrumentationResourceAssumes(''.join(newLines), usedResources)
- # Adds instrumentation assumes for used resources to the beginning of the storm_main
- # procedure and in the end, just before the errorReached assertion
- def addInstrumentationResourceAssumes(bpl, usedResources):
- lines = bpl.splitlines(True)
- newLines = []
- foundMain = False
- for line in lines:
- l = re.compile('(.*)' + mainProcedure + '\(\)\n')
- m = l.match(line)
- if m:
- foundMain = True
-
- l = re.compile('[ ]*start:\n')
- m = l.match(line)
- if m and foundMain:
- newLines.append(line)
- for resource in usedResources:
- for i in range(1, K):
- newLines.append('assume Res_' + str(i) + '_' + resource + ' == Res_s_' + str(i) + '_' + resource + ';\n')
- continue
-
- l = re.compile('[ ]*assert.+!errorReached.*;\n')
- m = l.match(line)
- if m and foundMain:
- foundMain = False
- for resource in usedResources:
- for i in range(0, K-1):
- newLines.append('assume Res_' + str(i) + '_' + resource + ' == Res_s_' + str(i+1) + '_' + resource + ';\n')
- newLines.append(line)
- continue
-
- newLines.append(line)
- return ''.join(newLines)
- # Inlining: write tmp.bpl to disk, call Boogie to do inlining, collect inlined storm_main
- def inline(bpl):
- # bpl = cleanUpNondets(bpl) # clean up nondet variables that we don't need
- # Raise exceptions before __hv_assumes
- if not noExceptions:
- p = re.compile('//TAG: .*?\n')
- bpl = p.sub('havoc raiseException; if (raiseException) {return;}\n', bpl)
- print 'Inlining...\n'
- fout = open('tmp.bpl', 'w')
- fout.write(bpl)
- fout.close()
-
- inlineCmd = boogieBin + ' tmp.bpl /noinfer /inline:assume /proc:' + mainProcedure + ' /noVerify /coalesceBlocks:0 /printInstrumented'
- f = os.popen(inlineCmd, 'r')
- inlined = f.read()
- f.close()
- p = re.compile(r'\*\*\* Error:')
- if p.search(inlined):
- print 'Boogie ERROR when inlining!'
- sys.exit()
-
- p = re.compile('\A.*(procedure ' + mainProcedure + '\(\);(.*?))(after inlining procedure calls|Boogie program verifier finished|procedure[ ]+{:inline 1}[ ]+|\Z).*', re.DOTALL)
- match = p.match(inlined)
- inlined = match.group(1)
- p = re.compile('\A(.*?)(procedure {:inline 1}|procedure ' + mainProcedure + '[ ]*\(\))', re.DOTALL)
- match = p.match(bpl)
- header = match.group(1)
- bpl = header + inlined
- bpl = removeDeadVariables(bpl) # remove dead vars
- bpl = removeDeadVariables(bpl) # remove dead vars
- print 'Unrolling loops...\n'
- fout = open('tmp1.bpl', 'w')
- fout.write(bpl)
- fout.close()
-
- unrollCmd = boogieBin + ' tmp1.bpl /noinfer /proc:' + mainProcedure + ' /noVerify /coalesceBlocks:0 /printInstrumented ' + loopUnroll
- f …
Large files files are truncated, but you can click here to view the full file