PageRenderTime 40ms CodeModel.GetById 20ms app.highlight 16ms RepoModel.GetById 2ms app.codeStats 0ms

/ruffus/test/test_transform_inputs.py

https://code.google.com/p/ruffus/
Python | 207 lines | 128 code | 53 blank | 26 comment | 12 complexity | ce5eabd2891c0964a0614191979395ff MD5 | raw file
  1#!/usr/bin/env python
  2"""
  3
  4    test_transform_with_no_re_matches.py
  5    
  6        test messages with no regular expression matches
  7        
  8"""
  9
 10
 11#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 12
 13#   options        
 14
 15
 16#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 17
 18from optparse import OptionParser
 19import sys, os
 20import os.path
 21import StringIO
 22import re,time
 23
 24# add self to search path for testing
 25exe_path = os.path.split(os.path.abspath(sys.argv[0]))[0]
 26sys.path.insert(0,os.path.abspath(os.path.join(exe_path,"..", "..")))
 27if __name__ == '__main__':
 28    module_name = os.path.split(sys.argv[0])[1]
 29    module_name = os.path.splitext(module_name)[0];
 30else:
 31    module_name = __name__
 32
 33
 34
 35import ruffus
 36print "\tRuffus Version = ", ruffus.__version__
 37parser = OptionParser(version="%%prog v1.0, ruffus v%s" % ruffus.ruffus_version.__version)
 38parser.add_option("-t", "--target_tasks", dest="target_tasks",
 39                  action="append",
 40                  default = list(),
 41                  metavar="JOBNAME", 
 42                  type="string",
 43                  help="Target task(s) of pipeline.")
 44parser.add_option("-f", "--forced_tasks", dest="forced_tasks",
 45                  action="append",
 46                  default = list(),
 47                  metavar="JOBNAME", 
 48                  type="string",
 49                  help="Pipeline task(s) which will be included even if they are up to date.")
 50parser.add_option("-j", "--jobs", dest="jobs",
 51                  default=1,
 52                  metavar="jobs", 
 53                  type="int",
 54                  help="Specifies  the number of jobs (commands) to run simultaneously.")
 55parser.add_option("-v", "--verbose", dest = "verbose",
 56                  action="count", default=0,
 57                  help="Print more verbose messages for each additional verbose level.")
 58parser.add_option("-d", "--dependency", dest="dependency_file",
 59                  #default="simple.svg",
 60                  metavar="FILE", 
 61                  type="string",
 62                  help="Print a dependency graph of the pipeline that would be executed "
 63                        "to FILE, but do not execute it.")
 64parser.add_option("-F", "--dependency_graph_format", dest="dependency_graph_format",
 65                  metavar="FORMAT", 
 66                  type="string",
 67                  default = 'svg',
 68                  help="format of dependency graph file. Can be 'ps' (PostScript), "+
 69                  "'svg' 'svgz' (Structured Vector Graphics), " +
 70                  "'png' 'gif' (bitmap  graphics) etc ")
 71parser.add_option("-n", "--just_print", dest="just_print",
 72                    action="store_true", default=False,
 73                    help="Print a description of the jobs that would be executed, "
 74                        "but do not execute them.")
 75parser.add_option("-M", "--minimal_rebuild_mode", dest="minimal_rebuild_mode",
 76                    action="store_true", default=False,
 77                    help="Rebuild a minimum of tasks necessary for the target. "
 78                    "Ignore upstream out of date tasks if intervening tasks are fine.")
 79parser.add_option("-K", "--no_key_legend_in_graph", dest="no_key_legend_in_graph",
 80                    action="store_true", default=False,
 81                    help="Do not print out legend and key for dependency graph.")
 82parser.add_option("-H", "--draw_graph_horizontally", dest="draw_horizontally",
 83                    action="store_true", default=False,
 84                    help="Draw horizontal dependency graph.")
 85
 86parameters = [  
 87                ]
 88
 89
 90
 91
 92
 93
 94
 95#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 96
 97#   imports        
 98
 99
100#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
101
102import StringIO
103import re
104import operator
105import sys,os
106from collections import defaultdict
107import random
108
109sys.path.append(os.path.abspath(os.path.join(exe_path,"..", "..")))
110from ruffus import *
111
112# use simplejson in place of json for python < 2.6
113try:
114    import json
115except ImportError:
116    import simplejson
117    json = simplejson
118
119#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
120
121#   Main logic
122
123
124#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
125
126
127
128
129
130# get help string
131f =StringIO.StringIO()
132parser.print_help(f)
133helpstr = f.getvalue()
134(options, remaining_args) = parser.parse_args()
135
136
137#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
138
139#   Tasks
140
141
142#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
143tempdir = "tempdir/"
144@follows(mkdir(tempdir))
145@files([[None, tempdir+ "a.1"], [None, tempdir+ "b.1"]])
146def task1(i, o): 
147    open(o,  "w")
148
149
150@follows(mkdir(tempdir))
151@files([[None, tempdir+ "c.1"], [None, tempdir+ "d.1"]])
152def task2(i, o): 
153    open(o,  "w")
154
155    
156@transform(task1, regex(r"(.*)"), inputs(((r"\1"), task2, "test_transform_inputs.*")), r"\1.output")
157def task3(i, o):
158    names = ",".join(sorted(i))
159    for f in o:
160        open(o,  "w").write(names)
161    
162@merge((task3), tempdir + "final.output")
163def task4(i, o):
164    o_file = open(o, "w")
165    for f in sorted(i):
166        o_file.write(f +":" +open(f).read() + ";")
167
168import unittest
169
170class Test_task(unittest.TestCase):
171
172    def tearDown (self):
173        """
174        """
175        import glob
176        for f in glob.glob(tempdir + "*"):
177            os.unlink(f)
178        os.rmdir(tempdir)
179
180
181    def test_task (self):
182        pipeline_run([task4], options.forced_tasks, multiprocess = options.jobs,
183                            verbose = options.verbose)
184        
185        correct_output = "tempdir/a.1.output:tempdir/a.1,tempdir/c.1,tempdir/d.1,test_transform_inputs.py;tempdir/b.1.output:tempdir/b.1,tempdir/c.1,tempdir/d.1,test_transform_inputs.py;"
186        real_output = open(tempdir + "final.output").read()
187        self.assert_(correct_output == real_output)
188        
189
190if __name__ == '__main__':
191    if options.just_print:
192        pipeline_printout(sys.stdout, options.target_tasks, options.forced_tasks,
193                            verbose = options.verbose,
194                            gnu_make_maximal_rebuild_mode = not options.minimal_rebuild_mode)
195
196    elif options.dependency_file:
197        pipeline_printout_graph (     open(options.dependency_file, "w"),
198                             options.dependency_graph_format,
199                             options.target_tasks,
200                             options.forced_tasks,
201                             draw_vertically = not options.draw_horizontally,
202                             gnu_make_maximal_rebuild_mode  = not options.minimal_rebuild_mode,
203                             no_key_legend  = options.no_key_legend_in_graph)
204    else:
205        sys.argv= sys.argv[0:1]
206        unittest.main()        
207