replace optparse by argparse
This commit is contained in:
parent
d5ba77346e
commit
bfe6b95b4a
2 changed files with 140 additions and 165 deletions
163
flowy.py
163
flowy.py
|
@ -1,32 +1,149 @@
|
|||
#!/usr/bin/python
|
||||
import options
|
||||
from optparse import OptionParser
|
||||
import flowy_exec
|
||||
import sys
|
||||
#!/usr/bin/env python
|
||||
|
||||
from parser import Parser
|
||||
from filter_validator import FilterValidator
|
||||
from splitter_validator import SplitterValidator
|
||||
from grouper_validator import GrouperValidator
|
||||
from groupfilter_validator import GroupFilterValidator
|
||||
from merger_validator import MergerValidator
|
||||
from ungrouper_validator import UngrouperValidator
|
||||
from threading import Thread
|
||||
import argparse
|
||||
import profiler
|
||||
import time
|
||||
import ply
|
||||
import pickle
|
||||
#profiler.profile_on()
|
||||
|
||||
start = time.clock()
|
||||
print start
|
||||
|
||||
def run(flwfile):
|
||||
|
||||
#valstart_elapsed = (time.clock() - start)
|
||||
#print "Parsing and validation started:", valstart_elapsed
|
||||
|
||||
p = Parser()
|
||||
|
||||
doc = flwfile.read()
|
||||
|
||||
p.parse(doc)
|
||||
|
||||
#inps = get_inputs_list(p)
|
||||
#print get_input_fields_types(inps[0])
|
||||
# hdf_file = "../testFT2.h5"
|
||||
# r = pytables.FlowRecordsTable(hdf_file)
|
||||
# recordReader = record.RecordReader(r)
|
||||
f = FilterValidator(p)
|
||||
# fl = f.impl
|
||||
s = SplitterValidator(p, f)
|
||||
spl = s.impl
|
||||
|
||||
|
||||
gr = GrouperValidator(p, s)
|
||||
# grs = gr.impl
|
||||
|
||||
gr_filt = GroupFilterValidator(p, gr)
|
||||
# Returns a number of group-filter instances
|
||||
# with accordance to the number of branches.
|
||||
gr_filters = gr_filt.impl
|
||||
|
||||
|
||||
mr = MergerValidator(p, gr_filt)
|
||||
mergers = mr.impl
|
||||
|
||||
#valend_elapsed = (time.clock() - start)
|
||||
#print "Parsing and validation finished:", valend_elapsed
|
||||
|
||||
splitter_thread = Thread(target=spl.go)
|
||||
|
||||
gf_threads = [Thread(target=gf.go)for gf in gr_filters]
|
||||
|
||||
splitter_elapsed = (time.clock() - start)
|
||||
print "Splitter time estarted:", splitter_elapsed
|
||||
splitter_thread.start()
|
||||
|
||||
|
||||
|
||||
groupfil_start= (time.clock() - start)
|
||||
print "Group filter time started:", groupfil_start
|
||||
for gf_thread in gf_threads:
|
||||
gf_thread.start()
|
||||
|
||||
#Originally it was after gf_thread.start()
|
||||
splitter_thread.join()
|
||||
print "Splitter finished"
|
||||
|
||||
splitter_elapsed = (time.clock() - start)
|
||||
print "Splitter time elapsed:", splitter_elapsed
|
||||
|
||||
for gf_thread in gf_threads:
|
||||
gf_thread.join()
|
||||
|
||||
groupfil_elapsed = (time.clock() - start)
|
||||
print "Group filter threads joined:", groupfil_elapsed
|
||||
|
||||
merger_threads = [Thread(target=m.go()) for m in mergers]
|
||||
for merger_thread in merger_threads:
|
||||
merger_thread.start()
|
||||
|
||||
|
||||
for merger_thread in merger_threads:
|
||||
merger_thread.join()
|
||||
|
||||
|
||||
merger_elapsed = (time.clock() - start)
|
||||
print "Merger time elapsed:", merger_elapsed
|
||||
|
||||
|
||||
ung = UngrouperValidator(p, mr)
|
||||
ungroupers = ung.impl
|
||||
|
||||
ungrouper_threads = [Thread(target=u.go) for u in ungroupers]
|
||||
for ungrouper_thread in ungrouper_threads:
|
||||
ungrouper_thread.start()
|
||||
|
||||
for ungrouper_thread in ungrouper_threads:
|
||||
ungrouper_thread.join()
|
||||
|
||||
|
||||
# profiler.profile_off()
|
||||
# import pickle
|
||||
# stats = profiler.get_profile_stats()
|
||||
# sorted_stats = sorted(stats.iteritems(), key=lambda a: a[1][1]/a[1][0])
|
||||
# for st in sorted_stats:
|
||||
# print st
|
||||
# print ' '
|
||||
|
||||
print "FINISHED!"
|
||||
overall_elapsed = (time.clock() - start)
|
||||
print "Overall time elapsed:", overall_elapsed
|
||||
# fname = mergers[0].merger_table.tuples_table.file_path
|
||||
# print fname
|
||||
|
||||
|
||||
|
||||
import ft2hdf
|
||||
|
||||
if __name__ == '__main__':
|
||||
usage = 'usage: %prog [options] input_file.flw'
|
||||
p = OptionParser(usage)
|
||||
option_names = ['--time_index_interval_ms', '--max_unsatisfiable_deltas',
|
||||
'--unsat_delta_threshold_mul', '--do_not_expand_groups']
|
||||
for opt_name in option_names:
|
||||
p.add_option(opt_name)
|
||||
opts, arguments = p.parse_args()
|
||||
parser = argparse.ArgumentParser(description='some meaningful description here')
|
||||
parser.add_argument('-p', '--profile', action='store_true', help="turn profiling on")
|
||||
parser.add_argument('--version', action='version', version='%(prog)s 2.0')
|
||||
parser.add_argument('flwfile', type=argparse.FileType('r'), help="*.flw file to evaluate")
|
||||
args = parser.parse_args()
|
||||
|
||||
for opt_name in map(lambda x: x[2:], option_names):
|
||||
opt = getattr(opts, opt_name)
|
||||
if opt:
|
||||
setattr(options, opt_name, opt)
|
||||
|
||||
if len(arguments) != 1:
|
||||
sys.stderr.write('Exactly one argument expected\n')
|
||||
exit(1)
|
||||
|
||||
file = arguments[0]
|
||||
if args.profile:
|
||||
profiler.profile_on()
|
||||
|
||||
try:
|
||||
flowy_exec.run(file)
|
||||
run(args.flwfile)
|
||||
except (ply.yacc.YaccError, SyntaxError) as e:
|
||||
import sys
|
||||
sys.stderr.write(str(e)+'\n')
|
||||
|
||||
if args.profile:
|
||||
profiler.profile_off()
|
||||
stats = profiler.get_profile_stats()
|
||||
sorted_stats = sorted(stats.iteritems(), key=lambda a: a[1][0])
|
||||
with open('./profile_stats1', 'w') as f:
|
||||
pickle.dump(sorted_stats,f)
|
||||
|
|
142
flowy_exec.py
142
flowy_exec.py
|
@ -1,142 +0,0 @@
|
|||
from parser import Parser
|
||||
from filter_validator import FilterValidator
|
||||
from splitter_validator import SplitterValidator
|
||||
from grouper_validator import GrouperValidator
|
||||
from groupfilter_validator import GroupFilterValidator
|
||||
from merger_validator import MergerValidator
|
||||
from ungrouper_validator import UngrouperValidator
|
||||
from threading import Thread
|
||||
import options
|
||||
import profiler
|
||||
import time
|
||||
#profiler.profile_on()
|
||||
start = time.clock()
|
||||
print start
|
||||
|
||||
def run(filename):
|
||||
|
||||
#valstart_elapsed = (time.clock() - start)
|
||||
#print "Parsing and validation started:", valstart_elapsed
|
||||
|
||||
p = Parser()
|
||||
|
||||
file = open(filename)
|
||||
doc = file.read()
|
||||
|
||||
p.parse(doc)
|
||||
|
||||
#inps = get_inputs_list(p)
|
||||
#print get_input_fields_types(inps[0])
|
||||
# hdf_file = "../testFT2.h5"
|
||||
# r = pytables.FlowRecordsTable(hdf_file)
|
||||
# recordReader = record.RecordReader(r)
|
||||
f = FilterValidator(p)
|
||||
# fl = f.impl
|
||||
s = SplitterValidator(p, f)
|
||||
spl = s.impl
|
||||
|
||||
|
||||
gr = GrouperValidator(p, s)
|
||||
# grs = gr.impl
|
||||
|
||||
gr_filt = GroupFilterValidator(p, gr)
|
||||
# Returns a number of group-filter instances
|
||||
# with accordance to the number of branches.
|
||||
gr_filters = gr_filt.impl
|
||||
|
||||
|
||||
mr = MergerValidator(p, gr_filt)
|
||||
mergers = mr.impl
|
||||
|
||||
#valend_elapsed = (time.clock() - start)
|
||||
#print "Parsing and validation finished:", valend_elapsed
|
||||
|
||||
splitter_thread = Thread(target=spl.go)
|
||||
|
||||
gf_threads = [Thread(target=gf.go)for gf in gr_filters]
|
||||
|
||||
splitter_elapsed = (time.clock() - start)
|
||||
print "Splitter time estarted:", splitter_elapsed
|
||||
splitter_thread.start()
|
||||
|
||||
|
||||
|
||||
groupfil_start= (time.clock() - start)
|
||||
print "Group filter time started:", groupfil_start
|
||||
for gf_thread in gf_threads:
|
||||
gf_thread.start()
|
||||
|
||||
#Originally it was after gf_thread.start()
|
||||
splitter_thread.join()
|
||||
print "Splitter finished"
|
||||
|
||||
splitter_elapsed = (time.clock() - start)
|
||||
print "Splitter time elapsed:", splitter_elapsed
|
||||
|
||||
for gf_thread in gf_threads:
|
||||
gf_thread.join()
|
||||
|
||||
groupfil_elapsed = (time.clock() - start)
|
||||
print "Group filter threads joined:", groupfil_elapsed
|
||||
|
||||
merger_threads = [Thread(target=m.go()) for m in mergers]
|
||||
for merger_thread in merger_threads:
|
||||
merger_thread.start()
|
||||
|
||||
|
||||
for merger_thread in merger_threads:
|
||||
merger_thread.join()
|
||||
|
||||
|
||||
merger_elapsed = (time.clock() - start)
|
||||
print "Merger time elapsed:", merger_elapsed
|
||||
|
||||
|
||||
ung = UngrouperValidator(p, mr)
|
||||
ungroupers = ung.impl
|
||||
|
||||
ungrouper_threads = [Thread(target=u.go) for u in ungroupers]
|
||||
for ungrouper_thread in ungrouper_threads:
|
||||
ungrouper_thread.start()
|
||||
|
||||
for ungrouper_thread in ungrouper_threads:
|
||||
ungrouper_thread.join()
|
||||
|
||||
|
||||
# profiler.profile_off()
|
||||
# import pickle
|
||||
# stats = profiler.get_profile_stats()
|
||||
# sorted_stats = sorted(stats.iteritems(), key=lambda a: a[1][1]/a[1][0])
|
||||
# for st in sorted_stats:
|
||||
# print st
|
||||
# print ' '
|
||||
|
||||
print "FINISHED!"
|
||||
overall_elapsed = (time.clock() - start)
|
||||
print "Overall time elapsed:", overall_elapsed
|
||||
# fname = mergers[0].merger_table.tuples_table.file_path
|
||||
# print fname
|
||||
|
||||
|
||||
|
||||
import ft2hdf
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
options.delete_temp_files = True
|
||||
import ply
|
||||
# import profiler
|
||||
# profiler.profile_on()
|
||||
run('www_one_dir.flw')
|
||||
#
|
||||
#
|
||||
# profiler.profile_off()
|
||||
# import pickle
|
||||
# stats = profiler.get_profile_stats()
|
||||
# sorted_stats = sorted(stats.iteritems(), key=lambda a: a[1][0])
|
||||
# for st in sorted_stats:
|
||||
# print st
|
||||
#
|
||||
# f = open('./profile_stats1', 'w')
|
||||
# pickle.dump(sorted_stats,f)
|
Loading…
Reference in a new issue