diff --git a/3http-download.flw b/3http-download.flw new file mode 100644 index 0000000..3f7de34 --- /dev/null +++ b/3http-download.flw @@ -0,0 +1,72 @@ +splitter S {} + +filter www_req { + dstport = 80 +} + +filter www_res { + srcport = 80 +} + +filter www_res1 { + srcport = 80 +} + +grouper g_www_req { + module g1 { + srcip = srcip + dstip = dstip + etime < stime delta 1s + } + aggregate srcip, dstip, sum(bytes) as bytes, count(rec_id) as n, + bitOR(tcp_flags) as flags, union(srcport) as srcports +} + +grouper g_www_res { + module g1 { + srcip = srcip + dstip = dstip + etime < stime delta 1s + } + aggregate srcip, dstip, sum(bytes) as bytes, count(rec_id) as n, + bitOR(tcp_flags) as flags, union(dstport) as dstports +} + +grouper g_www_res1 { + module g1 { + srcip = srcip + dstip = dstip + etime < stime delta 5s + } + aggregate srcip, dstip, sum(bytes) as bytes, count(rec_id) as n, + bitOR(tcp_flags) as flags, union(dstport) as dstports +} + +groupfilter ggf { + bitAND(flags, 0x13) = 0x13 +} + +merger M { + module m1 { + branches C, B, A + A.srcip = B.dstip + A.srcip = C.dstip + A.srcports = B.dstports + A.srcports = C.dstports + A.bytes < B.bytes + A.bytes < C.bytes + B oi A OR B d A + C o B + C m A + + } + export m1 +} + +ungrouper U {} + +"./netflow-trace.h5" -> S +S branch A -> www_req -> g_www_req -> ggf -> M +S branch B -> www_res -> g_www_res -> ggf -> M +S branch C -> www_res1 -> g_www_res1 -> ggf -> M +M->U->"./ungroped.h5" diff --git a/README b/README new file mode 100644 index 0000000..03bbed3 --- /dev/null +++ b/README @@ -0,0 +1,17 @@ +Flowy - Network Flow Analysis Application + +Requirements: + +Python 2.5 or higher (tested with Python 2.6) +Pytables 2.1 or higher +PLY (Python Lex-Yacc) 2.5 or higher +pyflowtools 3.1 or higher + + +Usage: + +ft2hdf.py - convert flow-tools traces to hdf hdf file +printhdf.py - print flowy hdf file +print_hdf_in_step.py - print two or more hdf files printing one record + from each file at each step +flowy.py - the main flowy program diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/aggr_operators.py b/aggr_operators.py new file mode 100644 index 0000000..8d198bb --- /dev/null +++ b/aggr_operators.py @@ -0,0 +1,152 @@ +import options +from tables import UInt32Col, UInt64Col + +if options.import_grouper_ops: + external_import = __import__(options.import_grouper_ops) + +class last(object): + __slots__ = ['field', 'gr_field', 'field_type', 'last'] + def __init__(self, field, gr_field, field_type): + self.field = field + self.gr_field = gr_field + self.field_type = field_type + self.last = None + + def __call__(self, record = None): + if record == None: + return self.last + else: + self.last = getattr(record, self.field) + return self.last + + +class sum(object): + __slots__ = ['field', 'gr_field', 'field_type','sum'] + def __init__(self, field, gr_field, field_type): + self.field = field + self.gr_field = gr_field + self.field_type = field_type + self.sum = 0 + + def __call__(self, record = None): + if record == None: + return self.sum + else: + self.sum += getattr(record, self.field) + return self.sum + +class avg(object): + __slots__ = ['field', 'gr_field', 'field_type','sum','n','avg'] + def __init__(self, field, gr_field, field_type): + self.field = field + self.gr_field = gr_field + self.field_type = field_type + self.sum = 0 + self.n = 0 + self.avg = None + + def __call__(self, record = None): + if record == None: + if str(self.field_type).find('Int') != -1: + return int(round(self.avg)) + else: + return self.avg + else: + self.sum += getattr(record, self.field) + self.n += 1 + self.avg = self.sum / self.n + return self.avg + +class max(object): + __slots__ = ['field', 'gr_field', 'field_type','max'] + def __init__(self, field, gr_field, field_type): + self.field = field + self.gr_field = gr_field + self.field_type = field_type + self.max = float("-inf") + + def __call__(self, record = None): + if record == None: + return self.max + else: + new_val = getattr(record, self.field) + if self.max < new_val: + self.max = new_val + return self.max + +class min(object): + __slots__ = ['field', 'gr_field', 'field_type','min'] + def __init__(self, field, gr_field, field_type): + self.field = field + self.gr_field = gr_field + self.field_type = field_type + self.min = float("inf") + + def __call__(self, record = None): + if record == None: + return self.min + else: + new_val = getattr(record, self.field) + if self.min > new_val: + self.min = new_val + return self.min + +class count(object): + __slots__ = ['field', 'gr_field', 'field_type','count'] + def __init__(self, field, gr_field, field_type): + self.field = field + self.gr_field = gr_field + self.field_type = field_type + self.count = 0 + + def __call__(self, record = None): + if record == None: + return self.count + else: + self.count += 1 + return self.count + +class union(object): + __slots__ = ['field', 'gr_field', 'field_type','union'] + def __init__(self, field, gr_field, field_type): + self.field = field + self.gr_field = gr_field + self.field_type = field_type + self.union = [] + + def __call__(self, record = None): + if record == None: + return sorted(set(self.union)) + else: + self.union.append(getattr(record, self.field)) + return self.union + +class bitAND(object): + __slots__ = ['field', 'gr_field', 'field_type','bitAND'] + def __init__(self, field, gr_field, field_type): + self.field = field + self.gr_field = gr_field + self.field_type = field_type + self.bitAND = pow(2,field_type.size) - 1 # all 1s for the given size + + def __call__(self, record = None): + if record == None: + return self.bitAND + else: + self.bitAND &= getattr(record, self.field) + return self.bitAND + +class bitOR(object): + __slots__ = ['field', 'gr_field', 'field_type','bitOR'] + def __init__(self, field, gr_field, field_type): + self.field = field + self.gr_field = gr_field + self.field_type = field_type + self.bitOR = 0 + + def __call__(self, record = None): + if record == None: + return self.bitOR + else: + self.bitOR |= getattr(record, self.field) + return self.bitOR diff --git a/aggr_operators.pyc b/aggr_operators.pyc new file mode 100644 index 0000000..1345a1d Binary files /dev/null and b/aggr_operators.pyc differ diff --git a/allen_index.py b/allen_index.py new file mode 100644 index 0000000..cdc6993 --- /dev/null +++ b/allen_index.py @@ -0,0 +1,172 @@ +class LT(object): + """ + X < Y + x before y + """ + def __init__(self, src, target, delta): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.etime, x.etime + self.delta + +class GT(object): + """ + X > Y + x after y + """ + def __init__(self, src, target, delta): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.stime - self.delta, x.stime + +class m(object): + """ + X m Y + x meets y (x starts before y) + y should occur at end of x + """ + def __init__(self, src, target, delta=1): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.etime, x.etime + self.delta + +class mi(object): + """ + X mi Y + inverse x meets y (x starts after y) + y should occur at the beginning of x + """ + def __init__(self, src, target, delta=1): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.stime - self.delta, x.stime + +class o(object): + """ + X o Y + x overlaps y (x starts before y) + y should occur at the end of x + """ + def __init__(self, src, target, delta=0): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.etime-self.delta, x.etime+self.delta + +class oi(object): + """ + X oi Y + inverse x overlaps y (x starts after y) + """ + def __init__(self, src, target, delta=0): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.stime, x.stime + +class d(object): + """ + X d Y + x during y + """ + def __init__(self, src, target, delta=0): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.stime, x.stime + +class di(object): + """ + X di Y + inverse x during y (y during x) + """ + def __init__(self, src, target, delta=0): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.stime, x.etime + + +class f(object): + """ + X f Y + x finishes y (x starts after y, x and y end together) + """ + def __init__(self, src, target, delta=1): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.etime - self.delta, x.etime + self.delta + +class fi(object): + """ + X fi Y + inverse x finishes y (x is finished by y) + """ + def __init__(self, src, target, delta=1): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.etime - self.delta, x.etime + self.delta + +class s(object): + """ + X s Y + x starts y (x ends before y, x and y starts together) + """ + def __init__(self, src, target, delta=0): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.stime - self.delta, x.stime + self.delta + +class si(object): + """ + X si Y + inverse x starts y (x is started by y) + """ + def __init__(self, src, target, delta=1): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.stime - self.delta, x.stime + self.delta + +class EQ(object): + """ + X = Y + X lasts the same time as Y and both start together. + """ + def __init__(self, src, target, delta=1): + self.delta = delta + self.src = src + self.target = target + + def __call__(self, x): + return x.stime - self.delta, x.stime + self.delta diff --git a/allen_index.pyc b/allen_index.pyc new file mode 100644 index 0000000..3e3569d Binary files /dev/null and b/allen_index.pyc differ diff --git a/allen_ops.py b/allen_ops.py new file mode 100644 index 0000000..686d15b --- /dev/null +++ b/allen_ops.py @@ -0,0 +1,232 @@ +from math import floor, ceil + +def inv_op_str(op_name_string): + inverse = { + 'LT' : 'GT', + 'GT' : 'LT', + 'm' : 'mi', + 'mi' : 'm', + 'o' : 'oi', + 'oi' : 'o', + 's' : 'si', + 'si' : 's', + 'd' : 'di', + 'di' : 'd', + 'f' : 'fi', + 'fi' : 'f', + '=' : '=' + } + return inverse[op_name_string] + +class AllenOpIndex(object): + def __init__(self, index): + self.index + + def LT(self, x, delta): + """ + X < Y + x before y + """ + return x.etime, x.etime + delta + + def GT(self, x, delta): + """ + X > Y + x after y + """ + return x.stime - delta, x.stime + + def m(self, x, delta=1): + """ + X m Y + x meets y (x starts before y) + y should occur at end of x + """ + return x.etime, x.etime + delta + + def mi(self, x, delta=1): + """ + X mi Y + inverse x meets y (x starts after y) + y should occur at the beginning of x + """ + return x.stime - delta, x.stime + + def o(self, x, delta=1): + """ + X o Y + x overlaps y (x starts before y) + y should occur at the end of x + """ + return x.etime-delta, x.etime+delta + + def oi(self, x, delta=1): + """ + X oi Y + inverse x overlaps y (x starts after y) + """ + return x.stime, x.stime + + def d(self, x, delta=0): + """ + X d Y + x during y + """ + return x.stime, x.stime + + def di(self, x, delta=0): + """ + X di Y + inverse x during y (y during x) + """ + return x.stime, x.etime + + + def f(self, x, delta=1): + """ + X f Y + x finishes y (x starts after y, x and y end together) + """ + # delta disregarded here + return x.etime - delta, x.etime + delta + + def fi(self, x, delta=1): + """ + X fi Y + inverse x finishes y (x is finished by y) + """ + return x.etime - delta, x.etime + delta + + def s(self, x, delta=1): + """ + X s Y + x starts y (x ends before y, x and y starts together) + """ + return x.stime - delta, x.stime + delta + + def si(self, x, delta=1): + """ + X si Y + inverse x starts y (x is started by y) + """ + # delta disregarded here + return x.stime - delta, x.stime + delta + + def EQ(self, x, delta=1): + """ + X = Y + X lasts the same time as Y + """ + # delta disregarded here + return int((x.stime + x.etime)/2) - delta, int((x.stime + + x.etime)/2) + delta + + def composite_intervals(self, op_x_delta_tuples): + intervals = set() + for op_x_delta in op_x_delta_tuples: + op = op_x_delta[0] + args = op_x_delta[1:] + intervals.update(getattr(self, op)(*args)) + + res = list(intervals) + res.sort() + return res + + +def LT(x, y, delta=0): + """ + X < Y + x before y + """ + return x.etime < y.stime + +def GT(x, y, delta=1): + """ + X > Y + x after y + """ + return x.stime > y.etime + +def m(x, y, delta=1): + """ + X m Y + x meets y (x starts before y) + y should occur at end of x + """ + return abs(x.etime - y.stime) < delta + +def mi(x, y, delta=1): + """ + X mi Y + inverse x meets y (x starts after y) + y should occur at the beginning of x + """ + return abs(x.stime - y.etime) < delta + +def o(x, y, delta=1): + """ + X o Y + x overlaps y (x starts before y) + y should occur at the end of x + """ + return y.stime < x.etime < y.etime + +def oi(x, y, delta=1): + """ + X oi Y + inverse x overlaps y (x starts after y) + """ + return y.stime < x.stime < y.etime + +def d(x, y, delta=0): + """ + X d Y + x during y + """ + return y.stime < x.stime and x.etime < y.etime + +def di(x, y, delta=0): + """ + X di Y + inverse x during y (y during x) + """ + return y.stime > x.stime and x.etime > y.etime + + +def f(x, y, delta=1): + """ + X f Y + x finishes y (x starts after y, x and y end together) + """ + # delta disregarded here + return x.stime > y.etime and abs(x.etime - y.etime) < delta + +def fi(x, y, delta=1): + """ + X fi Y + inverse x finishes y (x is finished by y) + """ + return x.stime < y.etime and abs(x.etime - y.etime) < delta + +def s(x, y, delta=1): + """ + X s Y + x starts y (x ends before y, x and y start together) + """ + return x.etime < y.etime and abs(x.stime - y.stime) < delta + +def si(x, y, delta=1): + """ + X si Y + inverse x starts y (x is started by y) + """ + # delta disregarded here + return x.etime > y.etime and abs(x.stime - y.stime) < delta + +def EQ(x, y, delta=1): + """ + X fi Y + inverse x finishes y (x is finished by y) + """ + # delta disregarded here + return abs(x.stime - y.stime) < delta and abs(x.etime - y.etime) < delta diff --git a/allen_ops.pyc b/allen_ops.pyc new file mode 100644 index 0000000..ede6cf2 Binary files /dev/null and b/allen_ops.pyc differ diff --git a/custops.py b/custops.py new file mode 100644 index 0000000..6b8e9de --- /dev/null +++ b/custops.py @@ -0,0 +1,5 @@ +def minus(*args): + res = args[0] + for arg in args[1:]: + res -= arg + return res \ No newline at end of file diff --git a/custops.pyc b/custops.pyc new file mode 100644 index 0000000..251b0b5 Binary files /dev/null and b/custops.pyc differ diff --git a/filter.py b/filter.py new file mode 100644 index 0000000..1082688 --- /dev/null +++ b/filter.py @@ -0,0 +1,197 @@ +from copy import deepcopy +from copy import copy +from statement import Field +from record import RecordReader +import time +import profiler + +class NoMatch(Exception): + pass + +class Filter(object): + def __init__(self,rules, records, br_mask, nbranches): + self.rules = rules + self.records = records + self.br_mask = br_mask + +# print "The filter has just been initiated" + + # Iteration of the filter happens at the splitter function go() + # In this teration function, each of the records is being matched + # against all of the conditions in each of the filters, and based + # on what condition it matches, it is assigned an appropriate + # branch mask. I.e., if Branch A has a srcport=443, then the record + # that matches this requirement gets a mask of [True, False], else + # if Branch B's filter is matched, then a mask of [False, True] is + # assigned. + def __iter__(self): + print "Started filtering" +# start = time.clock() +# print "Fitlering time started at:", start + for record in self.records: + self.br_mask.reset() + try: + for rule in self.rules: + rule_result = rule.match(record) + self.br_mask.mask(rule.branch_mask, rule_result) + except NoMatch: + continue + + branches = self.br_mask.final_result() + if True in branches: + yield record, branches + + +# print "Finished filtering" +# time_elapsed = (time.clock() - start) +# print "Filtering required:", time_elapsed + +#class Field(object): +# def __init__(self, name): +# self.name = name +# def __repr__(self): +# return "Field('%s')"%self.name + +# Implementation of a self-defined deepcopy function that operates +# for the simple data types. + +def deep_copy(org): + out = dict().fromkeys(org) + for k,v in org.iteritems(): + try: + out[k] = v.copy() # dicts, sets + except AttributeError: + try: + out[k] = v[:] # lists, tuples, strings, unicode + except TypeError: + out[k] = v # ints + + return out + + + +class BranchMask(object): + def __init__(self, branch_masks, pseudo_branches, n_real_branches): + self.masks = branch_masks +# self.orig_mask = deepcopy(branch_masks) + self.orig_mask = deep_copy(branch_masks) +# self.pseudo_branches = deepcopy(pseudo_branches) + self.pseudo_branches = deep_copy(pseudo_branches) + self.n_real_branches = n_real_branches + + + def reset(self): +# self.masks = deepcopy(self.orig_mask) + self.masks = deep_copy(self.orig_mask) + #self.masks = copy(self.orig_mask) +# self.masks = self.orig_mask + + + def mask(self, sub_branches, result): + for br, sub_br, NOT in sub_branches: + res = not result if NOT else result + if sub_br == 0: + self.masks[br][sub_br] = self.masks[br][sub_br] and res + else: + self.masks[br][sub_br] = self.masks[br][sub_br] or res + + + def final_result(self): + final_mask = {} + + for br, mask in self.masks.iteritems(): + final_mask[br] = True if False not in mask else False + result = [] + for id in xrange(self.n_real_branches): + try: + result.append(final_mask[id]) + + except KeyError: + gr_res = True + for or_group in self.pseudo_branches[id]: + res = False + for ref in or_group: + if ref[1]: + res = res or not final_mask[ref[0]] + else: + res = res or final_mask[ref[0]] + + gr_res = gr_res and res + + result.append(gr_res) + + return result + + +class Rule(object): + def __init__(self, branch_mask, operation, args): + self.operation = operation + self.args = args + self.branch_mask = branch_mask + + # This match operation is used at both the filtering and group-filering + # stages, since group-filter also relies on this Rule class. + def match(self, record): + args = [] + for arg in self.args: + if type(arg) is Field: # Used both at filterin and group-filtering stages + args.append(getattr(record, arg.name)) + elif type(arg) is Rule: # Used only at the group-fitlering stage + args.append(arg.match(record)) + else: # Used at both stages. The actual argument numbers, i.e., port 80 + args.append(arg) + + return self.operation(*args) + +class PreSplitRule(Rule): + def match(self,record): + result = Rule.match(self,record) + if not result: + raise NoMatch() + +class GroupFilter(object): + def __init__(self, rules, records, branch_name, groups_table, index): + self.rules = rules + self.records = records + self.branch_name = branch_name + self.index = index + self.groups_table = groups_table + self.record_reader = RecordReader(self.groups_table) + + def go(self): + + count = 0 + for record in self.records: + for or_rules in self.rules: + matched = False + for rule in or_rules: + if rule.match(record): + matched = True + break + if not matched: + break + if matched: + record.rec_id = count + count += 1 + self.index.add(record) + self.groups_table.append(record) + print "Finished filtering groups for branch " + self.branch_name + self.groups_table.flush() + + def __iter__(self): + for rec in self.record_reader: + yield rec + +class AcceptGroupFilter(GroupFilter): + def __init__(self, records, branch_name, groups_table, index): + GroupFilter.__init__(self, None, records, branch_name, groups_table, + index) + def go(self): + count = 0 + for record in self.records: + record.rec_id = count + count += 1 + self.index.add(record) + self.groups_table.append(record) + print "Finished filtering groups for branch " + self.branch_name + self.groups_table.flush() diff --git a/filter.pyc b/filter.pyc new file mode 100644 index 0000000..0defe92 Binary files /dev/null and b/filter.pyc differ diff --git a/filter_validator.py b/filter_validator.py new file mode 100644 index 0000000..fe7b76f --- /dev/null +++ b/filter_validator.py @@ -0,0 +1,178 @@ +from validator_common import * +from copy import deepcopy +from record import RecordReader +from statement import FilterRef +from filter import Rule as RuleImpl +from filter import Filter as FilterImpl +from filter import BranchMask + +class FilterValidator(object): + def __init__(self, parser): + self.parser = parser + self.n_real_branches = len(self.parser.branch_names) + self.filters = deepcopy(parser.filters) + self.filter_names = dict((filter.name, filter) for filter in self.filters) + self.branch_names = self.parser.branch_names # note! not a copy + # get_input_fields_types() comes from validator_common.py + # get_input_reader()comes from validator_common.py, takes parsed query + # as an input and returns a reader for the parser's input - a reader + # object for an HDF table of flow records + self.fields = get_input_fields_types(get_input_reader(self.parser)).keys() + self.pseudo_branches = {} + # Argument is a reader object that has an access to the description of the + # stored records, and can create a list of available fields + self.input_reader = RecordReader(get_input_reader(parser)) + self.impl = self.create_impl() + + def check_for_unused_filters(self): + for filter in self.filters: + if len(filter.branches) == 0: + msg = "Warning filter %s "%filter.name + msg += "defined on line %s"%filter.line + msg += " is not used in any branch." + print msg + continue # skips unused filters + + def check_duplicate_filter_names(self): + duplicates = {} + for filter in self.filters: + old_val = duplicates.setdefault(filter.name, 0) + duplicates[filter.name] = old_val + 1 + + duplicate_names = [k for k,v in duplicates.iteritems() if v > 1] + if len(duplicate_names) > 0: + msg = "Filter(s) %s"%duplicate_names + msg += " is/are all defined more than once." + raise SyntaxError(msg) + + def check_field_refs(self): + "Check record field references, for unknown fields" + for filter in self.filters: + for rule in iterate_rules(filter): + check_rule_fields(rule, self.fields) + + def change_branch_names_to_id(self): + """ + Turn branch names into numerical ids. This helps with mask creation. + """ + # create numerical branch id's: + self.branches_ids = dict((branch, id) + for id, branch in enumerate(self.parser.branch_names)) + self.ids_branches = dict((id, branch) + for id, branch in enumerate(self.parser.branch_names)) + for filter in self.filters: + filter.branches = [self.branches_ids[br] for br in filter.branches] + + def create_pseudobranches(self): + """ + Finds all Filter ref's and adds their branches to the referenced + filters. If a filter is ORed with another a new branch is created for + each OR-ed rule. + """ + + max_id = len(self.branches_ids) + for filter in self.filters: + for or_rule in filter.rules: + if type(or_rule[0]) is not FilterRef: + # Not a composite rule, so there can't be need for + # pseudo branches + break + if len(or_rule) == 1: + # Not an ORed FilterRef. Just add FilterRef's branches + # to the referenced filter + ref_filt = self.parser.names[or_rule[0].name] + ref_filt.branches.update(filter.branches) + else: + # ORed FilteRef create pseudo branches + pseudo_branch_group = [] + for br in filter.branches: + for filter_ref in or_rule: + try: + ref_filt = self.filter_names[filter_ref.name] + except KeyError, ex: + msg = "Filter %s referenced in "%ex.message + msg += "%s is not defined"%filter.name + raise SyntaxError(msg) + id = max_id + max_id += 1 + self.branch_names.add(id) + ref_filt.branches.append(id) + pseudo_branch_group.append((id, filter_ref.NOT)) + ps_br_set = self.pseudo_branches.setdefault(br, []) + ps_br_set.append(pseudo_branch_group) + + def create_masks(self): + branches_masks = {} + rule_masks = {} + for filter in self.filters: + if type(filter.rules[0][0]) is FilterRef: + continue + for branch in filter.branches: + + for or_rule in filter.rules: + if len(or_rule) == 1: + #not an OR rule: + branches_masks.setdefault(branch,[True])[0] = True + sub_br_id = 0 + else: + branches_masks.setdefault(branch, + [True]).append(False) + sub_br_id = len(branches_masks[branch]) - 1 + + for rule in or_rule: + rule_masks.setdefault(rule,[]).append((branch, + sub_br_id, + rule.NOT)) + + self.branches_masks = branches_masks + self.rule_masks = rule_masks + + def create_rule_implementations(self): + rules = [] + for rule, br_mask in self.rule_masks.iteritems(): +# print rule, br_mask + self.replace_nested_rules(rule) +# print rule, br_mask + op = find_op(rule) + args = rule.args + rules.append(RuleImpl(br_mask, op, args)) + + return rules + + def replace_nested_rules(self, rule): + if Rule not in map(type, rule.args): + op = find_op(rule) + args = rule.args + return RuleImpl(None, op, args) + + for i, arg in enumerate(rule.args): + if type(arg) is Rule: + rule.args[i] = self.replace_nested_rules(arg) + + def validate(self): + self.check_duplicate_filter_names() + self.check_field_refs() + self.change_branch_names_to_id() + for filter in self.filters: + replace_bound_rules(filter) + replace_with_vals(filter) + + self.create_pseudobranches() + self.check_for_unused_filters() + self.create_masks() + + def create_impl(self): + self.validate() + rules = self.create_rule_implementations() + pseudo_branches = self.pseudo_branches + branch_masks = self.branches_masks + br_mask = BranchMask(branch_masks, pseudo_branches, + self.n_real_branches) + + filter_impl = FilterImpl(rules, self.input_reader, br_mask, + self.n_real_branches) + + + return filter_impl + + diff --git a/filter_validator.pyc b/filter_validator.pyc new file mode 100644 index 0000000..51bd382 Binary files /dev/null and b/filter_validator.pyc differ diff --git a/flowy-py-files.tar.gz b/flowy-py-files.tar.gz new file mode 100644 index 0000000..a7b4064 Binary files /dev/null and b/flowy-py-files.tar.gz differ diff --git a/flowy-run/GroupsA-merged.h5 b/flowy-run/GroupsA-merged.h5 new file mode 100644 index 0000000..af2f7ca Binary files /dev/null and b/flowy-run/GroupsA-merged.h5 differ diff --git a/flowy-run/GroupsA.h5 b/flowy-run/GroupsA.h5 new file mode 100644 index 0000000..5665bd6 Binary files /dev/null and b/flowy-run/GroupsA.h5 differ diff --git a/flowy-run/GroupsB-merged.h5 b/flowy-run/GroupsB-merged.h5 new file mode 100644 index 0000000..4a792c4 Binary files /dev/null and b/flowy-run/GroupsB-merged.h5 differ diff --git a/flowy-run/GroupsB.h5 b/flowy-run/GroupsB.h5 new file mode 100644 index 0000000..8dc8eda Binary files /dev/null and b/flowy-run/GroupsB.h5 differ diff --git a/flowy-run/GroupsC.h5 b/flowy-run/GroupsC.h5 new file mode 100644 index 0000000..b3a0cb3 Binary files /dev/null and b/flowy-run/GroupsC.h5 differ diff --git a/flowy-run/GroupsD.h5 b/flowy-run/GroupsD.h5 new file mode 100644 index 0000000..c501743 Binary files /dev/null and b/flowy-run/GroupsD.h5 differ diff --git a/flowy-run/MergedM.h5 b/flowy-run/MergedM.h5 new file mode 100644 index 0000000..97ac88e Binary files /dev/null and b/flowy-run/MergedM.h5 differ diff --git a/flowy.py b/flowy.py new file mode 100755 index 0000000..580a065 --- /dev/null +++ b/flowy.py @@ -0,0 +1,32 @@ +#!/usr/bin/python +import options +from optparse import OptionParser +import flowy_exec +import sys +import ply + +if __name__ == '__main__': + usage = 'usage: %prog [options] input_file.flw' + p = OptionParser(usage) + option_names = ['--time_index_interval_ms', '--max_unsatisfiable_deltas', + '--unsat_delta_threshold_mul', '--do_not_expand_groups'] + for opt_name in option_names: + p.add_option(opt_name) + opts, arguments = p.parse_args() + + for opt_name in map(lambda x: x[2:], option_names): + opt = getattr(opts, opt_name) + if opt: + setattr(options, opt_name, opt) + + if len(arguments) != 1: + sys.stderr.write('Exactly one argument expected\n') + exit(1) + + file = arguments[0] + + try: + flowy_exec.run(file) + except (ply.yacc.YaccError, SyntaxError) as e: + import sys + sys.stderr.write(str(e)+'\n') diff --git a/flowy_exec.py b/flowy_exec.py new file mode 100644 index 0000000..3125819 --- /dev/null +++ b/flowy_exec.py @@ -0,0 +1,142 @@ +from parser import Parser +from filter_validator import FilterValidator +from splitter_validator import SplitterValidator +from grouper_validator import GrouperValidator +from groupfilter_validator import GroupFilterValidator +from merger_validator import MergerValidator +from ungrouper_validator import UngrouperValidator +from threading import Thread +import options +import profiler +import time +#profiler.profile_on() +start = time.clock() +print start + +def run(filename): + + #valstart_elapsed = (time.clock() - start) + #print "Parsing and validation started:", valstart_elapsed + + p = Parser() + + file = open(filename) + doc = file.read() + + p.parse(doc) + + #inps = get_inputs_list(p) + #print get_input_fields_types(inps[0]) +# hdf_file = "../testFT2.h5" +# r = pytables.FlowRecordsTable(hdf_file) +# recordReader = record.RecordReader(r) + f = FilterValidator(p) +# fl = f.impl + s = SplitterValidator(p, f) + spl = s.impl + + + gr = GrouperValidator(p, s) +# grs = gr.impl + + gr_filt = GroupFilterValidator(p, gr) + # Returns a number of group-filter instances + # with accordance to the number of branches. + gr_filters = gr_filt.impl + + + mr = MergerValidator(p, gr_filt) + mergers = mr.impl + + #valend_elapsed = (time.clock() - start) + #print "Parsing and validation finished:", valend_elapsed + + splitter_thread = Thread(target=spl.go) + + gf_threads = [Thread(target=gf.go)for gf in gr_filters] + + splitter_elapsed = (time.clock() - start) + print "Splitter time estarted:", splitter_elapsed + splitter_thread.start() + + + + groupfil_start= (time.clock() - start) + print "Group filter time started:", groupfil_start + for gf_thread in gf_threads: + gf_thread.start() + + #Originally it was after gf_thread.start() + splitter_thread.join() + print "Splitter finished" + + splitter_elapsed = (time.clock() - start) + print "Splitter time elapsed:", splitter_elapsed + + for gf_thread in gf_threads: + gf_thread.join() + + groupfil_elapsed = (time.clock() - start) + print "Group filter threads joined:", groupfil_elapsed + + merger_threads = [Thread(target=m.go()) for m in mergers] + for merger_thread in merger_threads: + merger_thread.start() + + + for merger_thread in merger_threads: + merger_thread.join() + + + merger_elapsed = (time.clock() - start) + print "Merger time elapsed:", merger_elapsed + + + ung = UngrouperValidator(p, mr) + ungroupers = ung.impl + + ungrouper_threads = [Thread(target=u.go) for u in ungroupers] + for ungrouper_thread in ungrouper_threads: + ungrouper_thread.start() + + for ungrouper_thread in ungrouper_threads: + ungrouper_thread.join() + + +# profiler.profile_off() +# import pickle +# stats = profiler.get_profile_stats() +# sorted_stats = sorted(stats.iteritems(), key=lambda a: a[1][1]/a[1][0]) +# for st in sorted_stats: +# print st +# print ' ' + + print "FINISHED!" + overall_elapsed = (time.clock() - start) + print "Overall time elapsed:", overall_elapsed +# fname = mergers[0].merger_table.tuples_table.file_path +# print fname + + + + import ft2hdf + + + +if __name__ == '__main__': + options.delete_temp_files = True + import ply +# import profiler +# profiler.profile_on() + run('www_one_dir.flw') +# +# +# profiler.profile_off() +# import pickle +# stats = profiler.get_profile_stats() +# sorted_stats = sorted(stats.iteritems(), key=lambda a: a[1][0]) +# for st in sorted_stats: +# print st +# +# f = open('./profile_stats1', 'w') +# pickle.dump(sorted_stats,f) diff --git a/flowy_exec.pyc b/flowy_exec.pyc new file mode 100644 index 0000000..5cc95e6 Binary files /dev/null and b/flowy_exec.pyc differ diff --git a/ft2hdf.py b/ft2hdf.py new file mode 100755 index 0000000..344c3e2 --- /dev/null +++ b/ft2hdf.py @@ -0,0 +1,183 @@ +#!/usr/bin/python +from pytables import FlowRecordsTable +import pytables +import ftreader +import record +import os +from os.path import split, join, islink +import re +import sys +from bisect import bisect, bisect_left +from operator import itemgetter +from optparse import OptionParser + +#def ft2hdf(ft_file, hdf_file): +# ft_fields = ftreader.find_fields(ft_file) +# fields = ftreader.translate_field_names(ft_fields, +# ftreader.default_names_dict) +# field_types = dict((field,pytables.default_ft_types[field]) +# for field in fields) +## print field_types +# pytables.create_table_file(hdf_file, field_types) +# rec_table = pytables.FlowRecordsTable(hdf_file) +# # since pytables is initiated with dictionary there is no way to +# # sort the fields order, so we have to translate back in order +# # to keep the fields names order +# ordered_ft_fields = ftreader.translate_field_names(rec_table.fields, +# ftreader.reverse_names_dict) +# flow_set = ftreader.FlowToolsReader(ft_file, ordered_ft_fields) +# for flow in flow_set: +# rec_table.append(flow) +# rec_table.close() + + +def ft2hdf_single(ft_file, hdf_file): + ft_fields = ftreader.find_fields(ft_file) + fields = ftreader.translate_field_names(ft_fields, + ftreader.default_names_dict) + field_types = dict((field,pytables.default_ft_types[field]) + for field in fields) +# print field_types + pytables.create_table_file(hdf_file, field_types) + rec_table = pytables.FlowRecordsTable(hdf_file) + # since pytables is initiated with dictionary there is no way to + # sort the fields order, so we have to translate back in order + # to keep the fields names order + ordered_ft_fields = ftreader.translate_field_names(rec_table.fields, + ftreader.reverse_names_dict) + flow_set = ftreader.FlowToolsReader(ft_file, + ordered_ft_fields, rec_table.fields[1:]) + rec_set = record.RecordReader(flow_set) + for flow in rec_set: + rec_table.append(flow) + rec_table.close() + +def ft2hdf(many_files, hdf_file): + ft_file = many_files[0] + ft_fields = ftreader.find_fields(ft_file) # returns fields present in the flow record + fields = ftreader.translate_field_names(ft_fields, ftreader.default_names_dict) + field_types = dict((field,pytables.default_ft_types[field]) for field in fields) +# print ft_fields +# print fields + pytables.create_table_file(hdf_file, field_types) + rec_table = pytables.FlowRecordsTable(hdf_file) + # since pytables is initiated with dictionary there is no way to + # sort the fields order, so we have to translate back in order + # to keep the fields names order + ordered_ft_fields = ftreader.translate_field_names(rec_table.fields, ftreader.reverse_names_dict) + + for ft_file in many_files: + flow_set = ftreader.FlowToolsReader(ft_file, ordered_ft_fields, rec_table.fields[1:]) # all fields except 'id_rec' + rec_set = record.RecordReader(flow_set) + for flow in rec_set: + rec_table.append(flow) + rec_table.close() + +def printHDF(hdf_file): + r = pytables.FlowRecordsTable(hdf_file) + recordReader = record.RecordReader(r) + for rec in recordReader: + print rec + +class FSLoop(Exception): + pass + +def findFiles(path, start_time, end_time, filter_files = False): + timeExp = re.compile(r"ft-v05\.(\d{4})-(\d{2})-(\d{2}).(\d{6}).(\d{4})") + + time_file_list = [] + dir_links = [path] + def walkDirs(dir_links): + file_list = [] + more_dir_links = [] + for link in dir_links: + for root, dirs, files in os.walk(link): + for file in files: + match = timeExp.search(file) + if match: + element = (int(''.join(match.groups()[:-1])), join(root,file)) + if element in time_file_list: + raise FSLoop + file_list.append(element) + for dir in dirs: + if islink(join(root,dir)): + print file + more_dir_links.append(join(root,dir)) + return file_list, more_dir_links + + while len(dir_links) > 0: + tf, dir_links = walkDirs(dir_links) + time_file_list.extend(tf) + + def cmp((a,x),(b,y)): + if a-b < 0: + return -1 + elif a-b>0: + return 1 + else: + return 0 + + time_file_list.sort(cmp) + + if (filter_files): + keys = [r[0] for r in time_file_list] + begin = 0 + end = len(time_file_list) + if start_time is not None: + begin = bisect_left(keys, long(start_time)) + if end_time is not None: + end = bisect(keys, long(end_time)) + # the start and end time must be converted to long + time_file_list = time_file_list[begin:end] + + time_file_list = map(lambda (x,y):y,time_file_list) + return time_file_list + +def dateToInt(date): + number_of_digits = [4, 2, 2, 2, 2, 2] + separators = '[- :/]*' + expr = "\d{%s}"%number_of_digits[0] + for digit in number_of_digits[1:]: + expr += separators + "(\d{%s})"%digit + timeExp = re.compile(expr) + result = timeExp.match(date) + if result is None: + raise ValueError("invalid date format") + return date.translate(None, '- :/') + +def lotsOfFolders(paths, start_time=None, end_time=None): + full_file_paths=[] + start_time, end_time = [dateToInt(d) if d != None else d for d in (start_time, end_time)] + for path in paths: + full_file_paths.extend(findFiles(path, start_time, end_time, True)) + # sort the results + split_paths = map(split, full_file_paths) + split_paths = set(split_paths) + split_paths = sorted(split_paths, key=itemgetter(1)) + full_file_paths = [join(x, y) for x, y in split_paths] + + return full_file_paths + +def main(): + usage = 'usage: %prog [options] input_path1 [input_path2 [...]] output_file.h5' + p = OptionParser(usage) + p.add_option('--start-time', '-s') + p.add_option('--end-time', '-e') + options, arguments = p.parse_args() + start_time = options.start_time + end_time = options.end_time + folders = arguments[:-1] + output = arguments[-1] + if not (output[output.find('.h5'):] == '.h5'): + sys.stderr.write('Output file should have an .h5 extension\n') + exit(1) + file_paths = lotsOfFolders(folders, start_time,end_time) + if len(file_paths) < 1: + sys.stderr.write('No flow-tools files found\n') + exit(1) + ft2hdf(file_paths, output) + + +if __name__ == "__main__": + main() + diff --git a/ft2hdf.pyc b/ft2hdf.pyc new file mode 100644 index 0000000..0ee4ebe Binary files /dev/null and b/ft2hdf.pyc differ diff --git a/ftreader.py b/ftreader.py new file mode 100644 index 0000000..b52eb34 --- /dev/null +++ b/ftreader.py @@ -0,0 +1,109 @@ +import flowtools +from os.path import exists + +default_names_dict = { + 'dFlows': 'dFlows', 'dOctets': 'bytes', 'dPkts': 'dPkts', + 'dst_as': 'dst_as', 'dst_mask': 'dst_mask', 'dst_tag': 'dst_tag', + 'dstaddr_raw': 'dstip', 'dstport': 'dstport', + 'engine_id': 'engine_id', 'engine_type': 'engine_type', + 'exaddr_raw': 'exaddr', 'extra_pkts': 'extra_pkts', + 'first_raw': 'stime', 'in_encaps': 'in_encaps', + 'input': 'input', 'last_raw': 'etime', 'marked_tos': 'marked_tos', + 'nexthop_raw': 'nexthop', 'out_encaps': 'out_encaps', + 'output': 'output', 'peer_nexthop_raw': 'peer_nexthop', + 'prot': 'prot', 'router_sc': 'router_sc', 'src_as': 'src_as', + 'src_mask': 'src_mask', 'src_tag': 'src_tag', + 'srcaddr_raw': 'srcip', 'srcport': 'srcport', + 'sysUpTime': 'sysUpTime', 'tcp_flags': 'tcp_flags', + 'tos': 'tos', 'unix_nsecs': 'unix_nsecs', + 'unix_secs': 'unix_secs'} + +reverse_names_dict = dict(zip(default_names_dict.values(), + default_names_dict.keys())) + +# list of the possible fields in the flow tools file +flow_tools_fields = ['dFlows', 'dOctets', 'dPkts', 'dst_as', 'dst_mask', + 'dst_tag', 'dstaddr_raw', 'dstport', 'engine_id', + 'engine_type', 'exaddr_raw', 'extra_pkts', 'first_raw', + 'in_encaps', 'input', 'last_raw', 'marked_tos', + 'nexthop_raw', 'out_encaps', 'output', 'peer_nexthop_raw', + 'prot', 'router_sc', 'src_as', 'src_mask', 'src_tag', + 'srcaddr_raw', 'srcport', 'sysUpTime', 'tcp_flags', 'tos', + 'unix_nsecs', 'unix_secs'] + +def find_fields(flowtools_file, fields_of_interest=flow_tools_fields): + ''' + Returns list of fields_of_interest which are present in + flotools_file. + Arguments: + flowtools_file - path to flowtools records file + fields_of_interest - names of the fields for which to check + if none is given all possible fields are searched for. + ''' + # read first record to see which fields are present: + flowset = flowtools.FlowSet(flowtools_file) + flow = iter(flowset).next() + + # Find which fields are present in the file + # (The flow record should have these attributes): + present_fields = [k for k in fields_of_interest if hasattr(flow,k)] + return present_fields + +def translate_field_names(fields_list, dictionary): + ''' + Translates names of fields which have keys dictionary. + For names not present in dictionary the name remains unchanged. + ''' + return [dictionary[k] for k in fields_list if dictionary.has_key(k)] + +def create_flowtools_value_reader(fields): + def get_fields(record): + x = tuple(getattr(record,attr) for attr in fields) + return x + + return get_fields + + +class FlowToolsReader(object): + + def __init__(self, path, ft_fields=None, fields=None): + self.ft_fields = ft_fields if ft_fields else flow_tools_fields + self.fields = fields if fields else ft_fields + self.fields = ('rec_id',) + self.fields + self.get_vals = create_flowtools_value_reader(self.ft_fields) + if exists(path): + self.path = path + else: + raise IOError("File %s cannot be accessed."%path) + + + def __iter__(self): + flowset = flowtools.FlowSet(self.path) + for id, flow in enumerate(flowset): + yield (id,) + self.get_vals(flow) + raise StopIteration + + + +#ft_file = "../ft-v05.2008-10-02.120001+0200" +#ft_fields = find_fields(ft_file) +#print ft_fields +#fields = translate_field_names(ft_fields, default_names_dict) +#import pytables +#field_types = dict((field,pytables.default_ft_types[field]) for field in fields) +#ordered_ft_fields = translate_field_names(field_types.keys(), reverse_names_dict) +#print ordered_ft_fields +#flow_set = FlowToolsReader(ft_file, ft_fields, ft_fields) +#import record +#rec_set = record.RecordReader(flow_set) +#print len(flow_set.fields) +#unix_secs = 0 +#sysuptime = 0 +#uptime_set = set() +#for i, flow in enumerate(rec_set): +# if sysuptime != flow.sysUpTime: +# sysuptime = flow.sysUpTime +# uptime_set.add(sysuptime) +# print i, 'ut', flow.sysUpTime - flow.last_raw, 'usecs', flow.unix_secs, 'first - last', flow.last_raw - flow.first_raw +# +#print uptime_set diff --git a/ftreader.pyc b/ftreader.pyc new file mode 100644 index 0000000..7382fa3 Binary files /dev/null and b/ftreader.pyc differ diff --git a/gnuplot-http.dat b/gnuplot-http.dat new file mode 100644 index 0000000..d7d7c1e --- /dev/null +++ b/gnuplot-http.dat @@ -0,0 +1,7 @@ +# Records # Splitter(s) Grouper(s) Merger(s) Branch A Branch B Records Match +3811 0.6 0.74 2.81 146 143 68 +26521 24.8 34.95 144.75 1800 1816 1683 +56992 53.06 57.68 443.36 1985 2004 2438 +99925 100.03 136.09 960.34 3644 3684 4038 +298063 475.83 1415.34 11485 16412 16666 15131 +916633 1706.32 50141 50338 diff --git a/gnuplot-https.dat b/gnuplot-https.dat new file mode 100644 index 0000000..e1cd8b3 --- /dev/null +++ b/gnuplot-https.dat @@ -0,0 +1,6 @@ +# Records Splitter(s) Grouper(s) Merger(s) Branch A Branch B Records Match +26521 6.1 6.17 6.23 243 243 486 +56992 13.2 13.2 13.23 158 61 219 +99925 +298063 +916633 diff --git a/gnuplot.dat b/gnuplot.dat new file mode 100644 index 0000000..eeca73e --- /dev/null +++ b/gnuplot.dat @@ -0,0 +1,5 @@ +# Records # Splitter Grouper Merger +3811 +26521 +56992 +298063 diff --git a/grouper.py b/grouper.py new file mode 100644 index 0000000..f2156d5 --- /dev/null +++ b/grouper.py @@ -0,0 +1,238 @@ +import record +import options +from aggr_operators import count +import time +import profiler + +class UnsatisfiableDelta(Exception): + pass + +class Grouper(object): + def __init__(self, id, modules, aggr_ops, records, branch_name): + self.modules = modules + self.records = records + self.aggr_ops = aggr_ops + self.group_record_fields = self.create_gr_record_fields_list() + self.group_record_fields = ('rec_id',) + self.group_record_fields + self.group_record_types = self.create_gr_record_fields_types() + self.group_records = [] + self.branch_name = branch_name + self.Record = record.get_record_class(self.group_record_fields) + + #profiler.profile_on() + + #profiler.profile_off() + #import pickle + #stats = profiler.get_profile_stats() + #sorted_stats = sorted(stats.iteritems(), key=lambda a: a[1][1]/a[1][0]) + #for st in sorted_stats: + # print st + # print + + def new_group(self, record): + group = Group(record, self.modules, self.aggr_ops) + return group + + def __iter__(self): + count = 0 + start2 = time.clock() + #print "Grouping started at:", start2 + + # For each of the records that have passed either + # of the branched conditions we try to find a + for record in self.records: +# print record + matched = False + count = count + 1 +# print len(self.group_records) + del_list = [] + try: + for i, group_record in enumerate(self.group_records): +# print i + if group_record.satisfiable: + if group_record.match(record): # match from Group class + matched = True + break + else: + yield self.Record(*((count,)+group_record.export())) + count += 1 + del_list.append(i) + except ValueError: + # Group Records list is empty + # WARNING may catch ValueError from another place + # group list is still empty + matched = False # this will signal new group creation + if not matched: + self.group_records.append(self.new_group(record)) + + # remove exported groups: + for n_removed, i in enumerate(del_list): + # count removed elements with n_removed and compensate + # because positions change when removing elements + # Fortunately del_list is sorted so '-' works as + # a compensation, as all removed elements are before the + # current one + del self.group_records[i - n_removed] + + print "Number of records in branch "+self.branch_name, count + + for group_record in self.group_records: + yield self.Record(*((count,)+group_record.export())) + count += 1 + print "Finished grouping branch "+self.branch_name + #time_elapsed2 = (time.clock() - start2) + #print "Grouping time finished for branch "+self.branch_name, time_elapsed2 + #print "Current time is: ", time.clock() + + def create_gr_record_fields_list(self): + field_list = [] + for module in self.modules: + for op in module.aggr_ops: + field_list.append(op.gr_field) + + for op in self.aggr_ops: + field_list.append(op.gr_field) + return tuple(field_list) + + def create_gr_record_fields_types(self): + type_list = [None] + for module in self.modules: + for op in module.aggr_ops: + type_list.append(op.field_type) + + for op in self.aggr_ops: + if type(op) == count: + type_list[0] = op.field_type # set the type for rec_id + type_list.append(op.field_type) + return tuple(type_list) + +class AggrOp(object): + def __init__(self, op, field, gr_field, field_type): + self.op = op + self.field = field + self.gr_field = gr_field # field name used for the grouping of a set of common entries + self.field_type = field_type + + def new_op(self): + return self.op(self.field, self.gr_field, self.field_type) + +class GrouperModule(object): + def __init__(self, name, rules, aggr_ops): + self.name = name + self.rules = rules + self.aggr_ops = aggr_ops # set of the defined aggregation operations, plus 3 implicit operations + + def match(self, record, group): + for rule in self.rules: + if not rule.match(record, group): + return False + return True + +class GrouperRule(object): + def __init__(self, op, old_rec_field, new_record_field, + delta=None, relative=False): + self.op = op + self.old_rec_field = old_rec_field + self.new_rec_field = new_record_field + self.delta = delta + self.relative = relative + self.is_shortcut = self.check_is_shortcut() +# print self.op, self.old_rec_field, self.new_rec_field + + def check_is_shortcut(self): + if self.delta: + if (self.old_rec_field in ('stime', 'etime') and + self.new_rec_field in ('stime', 'etime')): + return True + + return False + + def match(self, record, group): + new = getattr(record, self.new_rec_field) + if self.relative: + old = getattr(group.last_record, self.old_rec_field) + else: + old = getattr(group.first_record, self.old_rec_field) + + if self.delta: + if self.op(abs(new - old), self.delta): + return True + elif (self.is_shortcut and + not self.op(abs(new - old), + self.delta * options.unsat_delta_threshold_mul )): +# print abs(new - old)/1000.0, (self.delta * options.unsat_delta_threshold_mul)/1000.0 + raise UnsatisfiableDelta + else: + return True + else: + return self.op(old, new) + +class Group(object): + __slots__ = ['modules', 'modules_aggr_ops', 'aggr_ops', 'records', + 'first_record', 'last_record', 'satisfiable', + 'n_unsatisfiable_deltas', 'max_unsat_deltas'] + + def __init__(self, first_record, modules, aggr_ops, + max_unsat_deltas=options.max_unsatisfiable_deltas): + self.first_record = first_record + self.last_record = first_record # changes with each new matched record + self.modules = modules + # list of lists of aggr_ops each corresponding to a module + self.modules_aggr_ops = self.create_modules_aggr() + self.aggr_ops = self.create_aggr_ops(aggr_ops) + self.satisfiable = True + self.n_unsatisfiable_deltas = 0 + self.max_unsat_deltas = max_unsat_deltas + + def create_modules_aggr(self): + modules_aggr_ops = [] + for module in self.modules: + aggr = [op.new_op() for op in module.aggr_ops] + for op in aggr: + op(self.first_record) + modules_aggr_ops.append(aggr) + return modules_aggr_ops + + def create_aggr_ops(self, aggr_ops): + aggr = [op.new_op() for op in aggr_ops] + for op in aggr: + op(self.first_record) + return aggr + + def match(self, record): + matched = False + for module, aggr_ops in zip(self.modules, self.modules_aggr_ops): + try: + if module.match(record, self): + for op in aggr_ops: + op(record) + matched = True + except UnsatisfiableDelta: + if matched: + continue + self.n_unsatisfiable_deltas += 1 + if self.n_unsatisfiable_deltas > self.max_unsat_deltas: + self.satisfiable = False + + if matched: + # self.aggr_ops contains the fields from the aggregation statement of the grouper module + # as well as 3 other implicitly stated aggregation operations (etime, stime, records...) + for aggr_op in self.aggr_ops: + aggr_op(record) +# print aggr_op.gr_field, aggr_op() +# print self.records + self.n_unsatisfiable_deltas = 0 + return True + else: + return False + + def export(self): + fields = [] + for aggr_ops in self.modules_aggr_ops: + for op in aggr_ops: + fields.append(op()) + + for op in self.aggr_ops: + fields.append(op()) + + return tuple(fields) diff --git a/grouper.pyc b/grouper.pyc new file mode 100644 index 0000000..201c9e7 Binary files /dev/null and b/grouper.pyc differ diff --git a/grouper_validator.py b/grouper_validator.py new file mode 100644 index 0000000..e928ce3 --- /dev/null +++ b/grouper_validator.py @@ -0,0 +1,179 @@ +from validator_common import * +from copy import deepcopy +from tables import UIntAtom, UIntCol +from grouper import GrouperModule as GrouperModuleImpl +from grouper import Grouper as GrouperImpl +from grouper import GrouperRule as GrouperRuleImpl +from grouper import AggrOp as AggrOpImpl +import profiler + +class GrouperValidator(object): + def __init__(self, parser, splitter_validator): + self.parser = parser + self.fields_types = get_input_fields_types( + get_input_reader(self.parser)) + self.groupers = deepcopy(parser.groupers) +# print splitter_validator.br_name_to_br + self.br_name_to_br = splitter_validator.br_name_to_br + self.br_name_to_grouper = {} + self.impl = self.create_impl() + ' ' + + def validate(self): + self.check_field_refs() + self.check_duplicate_grouper_names() + for grouper in self.groupers: + self.check_duplicate_module_names(grouper) + + for module in grouper.modules: + # Both of these come from the validator_common.py + # module in this case is/are the modules present in + # each instance of the grouper + #print module + replace_bound_rules(module) + replace_with_vals(module) + + def check_duplicate_grouper_names(self): + duplicates = {} + for grouper in self.groupers: + old_val = duplicates.setdefault(grouper.name, 0) + duplicates[grouper.name] = old_val + 1 + + duplicate_names = [k for k,v in duplicates.iteritems() if v > 1] + if len(duplicate_names) > 0: + msg = "Grouper(s) %s"%duplicate_names + msg += " is/are all defined more than once." + raise SyntaxError(msg) + + def check_duplicate_module_names(self, grouper): + duplicates = {} + for module in grouper.modules: + old_val = duplicates.setdefault(module.name, 0)# Insert (key, value) into the dictionary + duplicates[module.name] = old_val + 1 + + duplicate_names = [k for k,v in duplicates.iteritems() if v > 1] + if len(duplicate_names) > 0: + msg = "Module(s) %s"%duplicate_names + msg += " is/are all defined more than once in grouper" + msg += " %s."%grouper.name + raise SyntaxError(msg) + + # Check for presence of the reference fields + def check_field_refs(self): + for grouper in self.groupers: + for module in grouper.modules: + for rule in module.rules: + # Checks if the rule names of modules match those that were established + # from the flow records (passed as a second argument here). Defined in + # validator_common + check_rule_fields(rule[0], self.fields_types.keys()) + + # This section checks the correctness of the field names passed to the aggregator + # section of the grouper stage. field_types are defined in init and are also + # obtained in the validaton_common module. + for aggr in grouper.aggr: + for arg in aggr.args: + if type(arg) == Field: + mod, _, field = arg.name.partition('.') + if field != '': + if field not in self.fields_types.keys(): + msg = 'There is no such field %s, '%arg.name + msg += 'referenced at line %s'%aggr.line + raise SyntaxError(msg) + else: + if mod not in self.fields_types.keys(): + msg = 'There is no such field %s, '%arg.name + msg += 'referenced at line %s'%aggr.line + raise SyntaxError(msg) + + # + def create_grouper_rules_impl(self, grouper): + modules_list = [] + for module in grouper.modules: + rule_impl_list = self.convert_module_rules(module) + aggr_ops_list = self.convert_module_aggr_ops(grouper, module) + module_impl = GrouperModuleImpl(module.name, rule_impl_list, + aggr_ops_list) + modules_list.append(module_impl) + + grouper_aggr_ops = [] + for aggr in grouper.aggr: + init_args = self.create_aggr_impl_init_args(aggr) +# print init_args + spl = str.split(init_args[1], '.') + if len(spl) > 1: + msg = 'There is no such grouper module %s, '%spl + msg += 'referenced on line %s'%aggr.line + raise SyntaxError(msg) + impl = AggrOpImpl(*init_args) + grouper_aggr_ops.append(impl) + + groupers = [GrouperImpl(grouper.name, modules_list, grouper_aggr_ops, + self.br_name_to_br[br_name], br_name) + for br_name in grouper.branches] + + for grouper in groupers: + self.br_name_to_grouper[grouper.branch_name] = grouper +# print self.br_name_to_grouper + return groupers + + + def convert_module_aggr_ops(self, grouper, module): + aggr_ops_list = [] + del_list = [] + for aggr in grouper.aggr: + op, field, gr_field, field_type = self.create_aggr_impl_init_args( + aggr) + mod_name, _, f = str.partition(field, '.') + if f != '': + if module.name == mod_name: + impl = AggrOpImpl(op, f, gr_field, field_type) + aggr_ops_list.append(impl) + del_list.append(aggr) + + for a in del_list: + grouper.aggr.remove(a) + + return aggr_ops_list + + def create_aggr_impl_init_args(self, aggr): + field = aggr.args[0].name + if '.' in field: + _, _, non_qid_field = field.partition('.') + else: + non_qid_field = field + gr_field = aggr.args[1] + if aggr.op == 'count': + field_type = UIntCol(self.fields_types['rec_id'].itemsize) + elif aggr.op == 'union': + field_type = UIntAtom(self.fields_types[non_qid_field].itemsize) + else: + field_type = UIntCol(self.fields_types[non_qid_field].itemsize) + + op = find_op(aggr, 'aggr_operators') + + return op, field, gr_field, field_type + + def convert_module_rules(self, module): + rule_impl_list = [] + for rules in module.rules: + for rule in rules: + op = find_op(rule) + args = [arg.name if type(arg) is Field else arg + for arg in rule.args] + rule_impl_list.append(GrouperRuleImpl(op, *args)) + return rule_impl_list + + def create_impl(self): + self.validate() + groupers_impls = [] + for grouper in self.groupers: + groupers_impls.extend(self.create_grouper_rules_impl(grouper)) + +# print self.br_name_to_grouper + for br_name in self.br_name_to_br.keys(): + if br_name not in self.br_name_to_grouper.keys(): + msg = 'There is no grouper for branch %s.'%br_name + raise SyntaxError(msg) + + return groupers_impls diff --git a/grouper_validator.pyc b/grouper_validator.pyc new file mode 100644 index 0000000..deb99df Binary files /dev/null and b/grouper_validator.pyc differ diff --git a/groupfilter.py b/groupfilter.py new file mode 100644 index 0000000..ac6e7cf --- /dev/null +++ b/groupfilter.py @@ -0,0 +1,62 @@ +from record import RecordReader +from filter import Rule +import profiler + +class GroupFilter(object): + def __init__(self, rules, records, branch_name, groups_table, index): + self.rules = rules + self.records = records + self.branch_name = branch_name + self.index = index + self.groups_table = groups_table + self.record_reader = RecordReader(self.groups_table) + + def go(self): + count = 0 + for record in self.records: # These are the grouped records according to the groupers/modules +# print record + matched = False + for or_rules in self.rules: +# matched = False + for rule in or_rules: # This for-loop, just extracts the rule from the list +# print rule + if rule.match(record): +# print rule.operation + matched = True + break + if not matched: + break + if matched: + record.rec_id = count + count += 1 + # Adds a record to the TimeIndex class' time interval + # as an index value, over those times that the record + # covers with its start-/end-time intervals. + self.index.add(record) + self.groups_table.append(record) + print "Finished group-filtering for branch " + self.branch_name + + self.groups_table.flush() + + + + def __iter__(self): + for rec in self.record_reader: + yield rec + +class AcceptGroupFilter(GroupFilter): + def __init__(self, records, branch_name, groups_table, index): + GroupFilter.__init__(self, None, records, branch_name, groups_table, + index) +# NIK commented out on Feb 08 +# This function is not used anywhere +# in the code +# def go(self): +# count = 0 +# for record in self.records: +# record.rec_id = count +# count += 1 +# self.index.add(record) +# self.groups_table.append(record) +# print "Finished filtering groups for branch " + self.branch_name +# self.groups_table.flush() diff --git a/groupfilter.pyc b/groupfilter.pyc new file mode 100644 index 0000000..d9284ad Binary files /dev/null and b/groupfilter.pyc differ diff --git a/groupfilter_validator.py b/groupfilter_validator.py new file mode 100644 index 0000000..d95fe7b --- /dev/null +++ b/groupfilter_validator.py @@ -0,0 +1,141 @@ +import options +from copy import deepcopy +from validator_common import * +from groupfilter import Rule as RuleImpl +from groupfilter import GroupFilter as GroupFilterImpl +from groupfilter import AcceptGroupFilter as AcceptGroupFilterImpl +from operators import NOT +import pytables +from timeindex import TimeIndex +import time + + + +class GroupFilterValidator(object): + # The initiation of the GroupFilterValidator happens only ones. + def __init__(self, parser, grouper_validator): + self.parser = parser + self.grouper_validator = grouper_validator + self.filters = deepcopy(parser.group_filters) + self.branches_fields = self.get_branches_fields() + self.br_name_to_grouper = grouper_validator.br_name_to_grouper + self.br_name_to_gr_filter = {} + self.impl = self.create_impl() + + def check_duplicate_filter_names(self): + duplicates = {} + for filter in self.filters: + old_val = duplicates.setdefault(filter.name, 0) + duplicates[filter.name] = old_val + 1 + + duplicate_names = [k for k,v in duplicates.iteritems() if v > 1] + if len(duplicate_names) > 0: + msg = "Group filter(s) %s"%duplicate_names + msg += " is/are all defined more than once." + raise SyntaxError(msg) + + def check_field_refs(self): + "Check record field references, for unknown fields" + for filter in self.filters: + for rule in iterate_rules(filter): + for branch in filter.branches: + check_rule_fields(rule, self.branches_fields[branch]) + + + def get_branches_fields(self): + branches_fields = {} + for grouper in self.grouper_validator.impl: + branches_fields[grouper.branch_name] = grouper.group_record_fields + return branches_fields + + def validate(self): + self.check_for_unused_filters() + self.check_field_refs() + self.check_duplicate_filter_names() + + def check_for_unused_filters(self): + for filter in self.filters: + if len(filter.branches) == 0: + msg = "Warning groupfilter %s "%filter.name + msg += "defined on line %s"%filter.line + msg += " is not used in any branch." + print msg + continue # skips unused filters + + def get_rule_impl(self, rule): + op = find_op(rule) + args = [self.get_rule_impl(arg) if type(arg) == Rule else arg + for arg in rule.args] + impl = RuleImpl(None, NOT(op) if rule.NOT else op, args) + + return impl + + def get_rules_impl(self, filter): + replace_bound_rules(filter) + replace_with_vals(filter) + rules_impl = [] + for or_rule in filter.rules: + or_rule_list = [] + for rule in or_rule: + impl = self.get_rule_impl(rule) + or_rule_list.append(impl) + rules_impl.append(or_rule_list) + + return rules_impl + + def create_impl(self): + + #start = time.clock() + #print "GF validation started at:", start + self.validate() + group_filters_impl = [] + + for filter in self.filters: + rules_impl = self.get_rules_impl(filter) + for br_name in filter.branches: + records = self.br_name_to_grouper[br_name] + index = TimeIndex(5000) + grouper = records + field_types = dict(zip(grouper.group_record_fields, + grouper.group_record_types)) +# print records + fname = options.temp_path + options.groups_file_prefix + fname += br_name+".h5" + if options.delete_temp_files: if_exists_delete(fname) + file = pytables.create_table_file(fname, field_types) + groups_table = pytables.FlowRecordsTable(fname) # Create separate table files for each of the branches + filt_impl = GroupFilterImpl(rules_impl, records, br_name, + groups_table, index) + group_filters_impl.append(filt_impl) + + self.br_name_to_gr_filter = dict((filt.branch_name, filt) + for filt in group_filters_impl) + + + # Check for branches that don't have group filters and and put accept + # filters on them + for br_name in self.br_name_to_grouper.keys(): + if br_name not in self.br_name_to_gr_filter.keys(): +# print "We get here if the group-filter is removed" + records = self.br_name_to_grouper[br_name] + index = TimeIndex(5000) + grouper = records + field_types = dict(zip(grouper.group_record_fields, + grouper.group_record_types)) + fname = options.temp_path + options.groups_file_prefix + fname += br_name+".h5" + if options.delete_temp_files: if_exists_delete(fname) + file = pytables.create_table_file(fname, field_types) + groups_table = pytables.FlowRecordsTable(fname) + filt_impl = AcceptGroupFilterImpl(records, br_name, + groups_table, index) # This class is called in case some branch is missing + # the definition of a group-filter. Essentially a plain + # GroupFilter, but with no rules as an argument. + self.br_name_to_gr_filter[br_name] = filt_impl + group_filters_impl.append(filt_impl) + + #time_elapsed = (time.clock() - start) + #print "GF Validation required:", time_elapsed + return group_filters_impl + + diff --git a/groupfilter_validator.pyc b/groupfilter_validator.pyc new file mode 100644 index 0000000..8f3fadd Binary files /dev/null and b/groupfilter_validator.pyc differ diff --git a/h5ports.h5 b/h5ports.h5 new file mode 100644 index 0000000..7919985 Binary files /dev/null and b/h5ports.h5 differ diff --git a/http-download.flw b/http-download.flw new file mode 100644 index 0000000..16a6a6d --- /dev/null +++ b/http-download.flw @@ -0,0 +1,66 @@ +splitter S {} + +filter www_req { + dstport = 80 +} + +filter www_res { + srcport = 80 +} + +filter www_res1 { + srcport = 80 +} + +grouper g_www_req { + module g1 { + srcip = srcip + dstip = dstip + etime < stime delta 1s + } + aggregate srcip, dstip, sum(bytes) as bytes, count(rec_id) as n, + bitOR(tcp_flags) as flags, union(srcport) as srcports +} + +grouper g_www_res { + module g1 { + srcip = srcip + dstip = dstip + etime < stime delta 1s + } + aggregate srcip, dstip, sum(bytes) as bytes, count(rec_id) as n, + bitOR(tcp_flags) as flags, union(dstport) as dstports +} + +grouper g_www_res1 { + module g1 { + srcip = srcip + dstip = dstip + etime < stime delta 5s + } + aggregate srcip, dstip, sum(bytes) as bytes, count(rec_id) as n, + bitOR(tcp_flags) as flags, union(dstport) as dstports +} + +groupfilter ggf { + bitAND(flags, 0x13) = 0x13 +} + +merger M { + module m1 { + branches C, B, A + A.srcip = B.dstip + A.srcports = B.dstports + A.bytes < B.bytes + B oi A OR B d A + } + export m1 +} + +ungrouper U {} + +"./netflow-trace.h5" -> S +S branch A -> www_req -> g_www_req -> ggf -> M +S branch B -> www_res -> g_www_res -> ggf -> M +S branch C -> www_res1 -> g_www_res1 -> ggf -> M +M->U->"./ungroped.h5" diff --git a/https-flows.flw b/https-flows.flw new file mode 100644 index 0000000..9e4a2cf --- /dev/null +++ b/https-flows.flw @@ -0,0 +1,44 @@ +splitter S {} + +filter fil_dstport { + dstport = 443 +} + +filter fil_srcport { + srcport = 443 +} + +grouper g_fil_dstport { + module g1 { + } + aggregate sum(bytes) as bytes, bitOR(tcp_flags) as flags +# aggregate srcip, dstip, sum(bytes) as bytes, count(rec_id) as n, +# bitOR(tcp_flags) as flags, union(srcport) as srcports +} + +grouper g_fil_srcport { + module g1 { + } + aggregate sum(bytes) as bytes, bitOR(tcp_flags) as flags +# aggregate srcip, dstip, sum(bytes) as bytes, count(rec_id) as n, +# bitOR(tcp_flags) as flags, union(dstport) as dstports +} + +groupfilter ggf { + bitAND(flags, 0x13) = 0x13 +} + +merger M { + module m1 { + branches B, A + A m B delta 1440min + } + export m1 +} + +ungrouper U {} + +"./netflow-trace.h5" -> S +S branch A -> fil_dstport -> g_fil_dstport -> ggf -> M +S branch B -> fil_srcport -> g_fil_srcport -> ggf -> M +M->U->"./ungroped.h5" diff --git a/merger.py b/merger.py new file mode 100644 index 0000000..b4d4833 --- /dev/null +++ b/merger.py @@ -0,0 +1,190 @@ + + +class MergerStorage(object): + def __init__(self, id, tuples_table, record_class): + self.id = id + self.tuples_table = tuples_table + self.RecordClass = record_class + + def put(self, gr_rec_tuple): + self.tuples_table.append(self.RecordClass(*gr_rec_tuple)) + + def flush(self): + self.tuples_table.flush() + + +class MergerRule(object): + def __init__(self, op, args, br_to_record): + # The records are changed externally from branches: + self.br_to_record = br_to_record + self.args = args + self.op = op + + def match(self): + # The records are changed externally by another object + args = [] + for arg in self.args: + if type(arg) is MergerRule: + args.append(arg.match()) + elif type(arg) is tuple: + br, field = arg + record = self.br_to_record[br] + if field: + # normal rule get field of the record + args.append(getattr(record, field)) + else: + # allen rule, argument is the record + args.append(record) + else: + args.append(arg) + return self.op(*args) + +class Reject(Exception): + pass + +class Accept(Exception): + pass + +# This class represents only the first branch loop and no nested loops, unlike MergerBranch class +class Merger(object): + def __init__ (self, name, br_name, records, name_to_branch, + next_branches_names, export_branches, br_to_record, + index, index_rules, rules, merger_table): + self.name = name + self.merger_table = merger_table + self.records = records + self.export_branches = export_branches + self.br_name = br_name + self.name_to_branch = name_to_branch + self.rules = rules + self.index = index + self.br_to_record = br_to_record + self.next_branches_names = next_branches_names + self.remaining_rec = dict((name, None) for name + in next_branches_names) + self.index_rules = index_rules + + @property + def next_branch(self): + if not self.next_branches_names: + return False + return self.name_to_branch[self.next_branches_names[0]] + + def match(self): + for rule in self.rules: + if not rule.match(): + return False + return True + + def pass_allen_indices_down(self, record): + new_br_remaining_rec = {} + for rules in self.index_rules: + br_name = rules[0].target + rec_set = set() + branch = self.name_to_branch[br_name] + index = branch.index + for rule in rules: + interval = rule(record) + rec_set.update(index.get_interval_records(*interval)) + + # note {}.get(k) return none if {} has no key k + set_from_parent = self.remaining_rec[br_name] + if set_from_parent: + # there is a set of records defined by parent + # do an intersection + new_br_remaining_rec[br_name] = rec_set & set_from_parent + else: + # no set from parent, just add this rec_set + new_br_remaining_rec[br_name] = rec_set + + # pass to next branch + if len(new_br_remaining_rec) == 0: + self.next_branch.remaining_rec = self.remaining_rec + else: + self.next_branch.remaining_rec = new_br_remaining_rec +# print "passing",self.next_branch.remaining_rec + + + def go(self): + for rec in self.records.record_reader: + self.br_to_record[self.br_name] = rec + self.pass_allen_indices_down(rec) + self.next_branch.next() + print "Finished merging branches: ", + print [self.br_name] + self.next_branches_names + self.merger_table.flush() + self.merger_table.tuples_table.close() + +class MergerBranch(Merger): + def __init__ (self, br_name, records, name_to_branch, next_branches_names, + export_branches, br_to_record ,index, index_rules, rules, + merger_table): + Merger.__init__(self, None, br_name, records, name_to_branch, + next_branches_names, export_branches, br_to_record, + index, index_rules, rules, merger_table) + + def next(self): + remaining = self.remaining_rec[self.br_name] + for rec in self.records.record_reader.read_rows_list(remaining): + self.br_to_record[self.br_name] = rec + if not self.match(): + continue + + self.pass_allen_indices_down(rec) + try: + self.next_branch.next() + except Accept: + # the reject modules did not reject this tuple + res = tuple(self.br_to_record[br].rec_id for br + in self.export_branches) + self.merger_table.put(res) + except Reject: + # this tuple matched reject module so we go on + pass + +class MergerLastBranch(Merger): + def __init__ (self, br_name, records, name_to_branch, next_branches_names, + export_branches, br_to_record ,index, index_rules, rules, + merger_table): + Merger.__init__(self, None, br_name, records, name_to_branch, + next_branches_names, export_branches, br_to_record, + index, index_rules, rules, merger_table) + def next(self): + remaining = self.remaining_rec[self.br_name] + for rec in self.records.record_reader.read_rows_list(remaining): + self.br_to_record[self.br_name] = rec + if not self.match(): + continue + + # last branch and no reject branches + # append the record + res = tuple(self.br_to_record[br].rec_id for br + in self.export_branches) + self.merger_table.put(res) + +class MergerRejectBranch(Merger): + def __init__ (self, br_name, records, name_to_branch, next_branches_names, + export_branches, br_to_record ,index, index_rules, rules, + merger_table): + Merger.__init__(self, None, br_name, records, name_to_branch, + next_branches_names, export_branches, br_to_record, + index, index_rules, rules, merger_table) + def next(self): + remaining = self.remaining_rec[self.br_name] + for rec in self.records.record_reader.read_rows_list(remaining): + self.br_to_record[self.br_name] = rec + if self.match(): + raise Reject # goes all the way up to last normal branch + else: + try: + if self.next_branch: + self.pass_allen_indices_down(rec) + self.next_branch.next() + else: + # this is the last branch, so go on + pass + except Accept: + # this Accept is from lower reject-branch so just + # go on and raise Accept when this branch finishes + pass + raise Accept diff --git a/merger.pyc b/merger.pyc new file mode 100644 index 0000000..95b4822 Binary files /dev/null and b/merger.pyc differ diff --git a/merger_validator.py b/merger_validator.py new file mode 100644 index 0000000..a6a5000 --- /dev/null +++ b/merger_validator.py @@ -0,0 +1,505 @@ +from validator_common import * +from copy import deepcopy +from tables import UIntCol +from merger import MergerStorage +from merger import Merger as MergerImpl +from merger import MergerBranch as MergerBranchImpl +from merger import MergerLastBranch as MergerLastBranchImpl +from merger import MergerRejectBranch as MergerRejectBranchImpl +from merger import MergerRule as MergerRuleImpl +import itertools +import allen_ops +import pytables +import record +import options + +class MergerValidator(object): + def __init__(self, parser, gr_filter_validator): + self.parser = parser + self.gr_filter_validator = gr_filter_validator + self.mergers = deepcopy(parser.mergers) + # The last field returns a list of the present fields for each branch + # ('rec_id', 'etime', 'stime', 'records', 'srcip', 'dstip', 'bytes', 'n', 'flags', 'srcports') + # ('rec_id', 'etime', 'stime', 'records', 'srcip', 'dstip', 'bytes', 'n', 'flags', 'dstports') + self.branches_fields = gr_filter_validator.branches_fields + # A simple dictionary mapptin of branch name to a GroupFilter + # {'A': , 'B': } + self.br_name_to_gr_filter = gr_filter_validator.br_name_to_gr_filter + # Checks that all the defined merger modules are actually exported + # Returns a dictionay of a merger name and module implementation + self.megers_export_modules = self.find_mergers_export_modules() + # Returns the size of the field type of the 'records' field, 4 bytes + self.id_size = self.get_id_size() + self.impl = self.get_mergers_impl() + + # Returns the size of the field type of the 'records' field, 4 bytess + def get_id_size(self): + rec_reader = self.gr_filter_validator.impl[0].records + field_types = dict(zip(rec_reader.group_record_fields, + rec_reader.group_record_types)) + id_size = field_types['records'].itemsize + return id_size + + # Check for duplicate merger names + def check_duplicate_merger_names(self): + duplicates = {} + for merger in self.mergers: + old_val = duplicates.setdefault(merger.name, 0) + duplicates[merger.name] = old_val + 1 + + duplicate_names = [k for k,v in duplicates.iteritems() if v > 1] + if len(duplicate_names) > 0: + msg = "Merger(s) %s"%duplicate_names + msg += " is/are all defined more than once." + raise SyntaxError(msg) + + # Check for duplicate module names + def check_duplicate_module_names(self, merger): + duplicates = {} + for module in merger.modules: + old_val = duplicates.setdefault(module.name, 0) + duplicates[module.name] = old_val + 1 + + duplicate_names = [k for k,v in duplicates.iteritems() if v > 1] + if len(duplicate_names) > 0: + msg = "Module(s) %s"%duplicate_names + msg += " is/are all defined more than once in merger" + msg += " %s."%merger.name + raise SyntaxError(msg) + + + # Checks that all the defined merger modules are actually exported + # Returns a dictionay of a merger name and module implementation + def find_mergers_export_modules(self): + merger_to_export_module = {} + for merger in self.mergers: + exp = None + for module in merger.modules: + if merger.export == module.name: + exp = module + break + + if exp: + merger_to_export_module[merger.name] = exp +# print merger_to_export_module + else: + msg = "Merger %s"%merger.name + msg += " export module %s is not defined."%merger.export + + return merger_to_export_module + +#--------------------------------------ALLEN CHECKS-------------------------------------# +#All the operations on rules are around a sample set like: {'M': Module('m1', 38, [[Rule('EQ', 40, [Field('A.srcip'), Field('B.dstip')], False)], [Rule('EQ', 41, [Field('A.srcports'), Field('B.dstports')], False)], [Rule('LT', 42, [Field('A.bytes'), Field('B.bytes')], False)], [AllenRule('oi', 43, [Field('B'), Field('A')], False), AllenRule('d', 43, [Field('B'), Field('A')], False)]], ['B', 'A'])} + + #Returns only the Allen rules + def iterate_module_allen_op_groups(self, merger): + for module in merger.modules: + for rules in module.rules: + if type(rules[0]) is not AllenRule: + continue + else: + for op in rules: + yield op + + # Orders allen operations and the branches that they influence in a reverse order, if not already so + def order_allen_ops_args(self, merger): + order = self.get_merger_branches_order(merger)#Orders merger branches, exported module's branches being first + arg_combinaions = tuple(itertools.combinations(order, 2))#combinations('ABCD', 2) --> AB AC AD BC BD CD + for allen_op in self.iterate_module_allen_op_groups(merger):#Returns only the Allen rules + first, second = allen_op.args[:2] # Returns Field('B') Field('A') + op = allen_op.op # operations like oi, d + if (first.name, second.name) not in arg_combinaions: + allen_op.args = [second, first] + allen_op.args[2:]# reverse names + allen_op.op = allen_ops.inv_op_str(op)# and operations + + # A number of different checks of the AllenRule + def check_allen_ops(self, merger): + allen_arg_pairs = [] + arg_pairs_to_line = {} + for module in merger.modules: + for rules in module.rules: + if type(rules[0]) is not AllenRule: + continue + + first_arg = rules[0].args[0].name # Get the branch names influenced by the AllenRule + second_arg = rules[0].args[1].name + line = rules[0].line + order = (first_arg, second_arg) + allen_arg_pairs.append(order)# [('B', 'A')] + + self.check_allen_satisfiability(arg_pairs_to_line, order, line) + self.check_allen_consistency(first_arg, second_arg, rules) + self.check_allen_deltas(rules) + + self.check_allen_reachability(allen_arg_pairs, merger) + + # The following 3 methods run different tests on the allen arguments and rules + def check_allen_satisfiability(self, arg_pairs_to_line, order, line): + if arg_pairs_to_line.has_key(order): + msg = "Unsatisfiable Allen op group. " + msg += "All allen ops concerning a pair of branches should" + msg += " be connected with and OR into a single group " + msg += "within a single module.\n" + msg += "Argument pair %s on line %s"%(order, line) + msg += " is also used on line %s."%arg_pairs_to_line[order] + raise SyntaxError(msg) + else: + arg_pairs_to_line[order] = line + def check_allen_consistency(self, first_arg, second_arg, rules): + for al_op in rules: + first = al_op.args[0].name + second = al_op.args[1].name + + if (first != first_arg or second != second_arg): + msg = "Inconsistent group of Allen statements " + msg += "on line %s"%rules[0].line + msg += ": %s, %s.\n"%(first, second) + msg += "All branches in this group should have " + msg += "%s and %s"%(first_arg, second_arg) + msg += " as left and righthand side arguments " + msg += "respectively." + raise SyntaxError(msg) + def check_allen_deltas(self, rules): + for al_op in rules: + if al_op.op == 'LT' or al_op.op == 'GT': + if len(al_op.args) < 3: + msg = "Allen op < or > on line %s "%al_op.line + msg += " should have delta explicitly stated." + raise SyntaxError(msg) + # A check for reachability of subsequent branches from the first one + def check_allen_reachability(self, allen_arg_pairs, merger): + br_order = self.get_merger_branches_order(merger) + # check reachability through allen index from initial branch + # of export module: + reachable = br_order[0:1] # list of first branch of exp module + unreachable = br_order[1:] + change = True + while(change): + change = False + for arg1, arg2 in allen_arg_pairs: + if arg1 in reachable and arg2 in unreachable: + unreachable.remove(arg2) + reachable.append(arg2) + change = True + if len(unreachable) > 0: + msg = "Branch(es): %s"%unreachable + msg += " in merger %s"%merger.name + msg += " is/are unreachable through an allen op or chain of" + msg += " allen ops from the first branch of the exported module" + raise SyntaxError(msg) +#--------------------------------------END ALLEN CHECKS---------------------------------# + + # Orders the merger modules s.t. the exported module comes first + def order_modules(self): + for merger in self.mergers: + exp_module = self.megers_export_modules[merger.name] + new_modules_order = [exp_module] + new_modules_order += [m for m in merger.modules if m != exp_module] + merger.modules = new_modules_order + + # Checks that the modules are interconnected among each other with at least one branch + def check_for_disjoint_modules(self): + for merger in self.mergers: + exp_module = self.megers_export_modules[merger.name] + exp_branches = set(exp_module.branches) + for module in merger.modules: + branches = set(module.branches) + # NOTE & is set intersection + if len(exp_branches & branches) < 1: + msg = "Merger module %s.%s"%(merger.name,module.name) + msg += " in has no overlaping branches with the" + msg += " export module." + raise SyntaxError(msg) + + + # Check the validity of the AllenRule, by seeing if the branch names are all defined + def check_branch_id_ref(self, rule, module_branches): + for arg in rule.args: + if type(arg) is Field: + id_ref = arg.name + if id_ref not in self.br_name_to_gr_filter.keys(): + msg = 'Branch %s referenced on line'%id_ref + msg += ' %s is not defined.'%rule.line + raise SyntaxError(msg) + if id_ref not in module_branches: + msg = 'Branch %s referenced on line'%id_ref + msg += " %s "%rule.line + msg += "is not in module's branches statement." + raise SyntaxError(msg) + + # Check the validity of the Rule, GrouperRule and statements like A.bytes + def check_qid_field_ref(self, rule, module_branches): + for arg in rule.args: + if type(arg) is Field: + qid_field = arg.name + branch, _, field = qid_field.partition('.') #Separates statements like A.bytes + try: + if field not in self.branches_fields[branch]: + msg = 'Wrong field %s on line %s. '%(qid_field, + rule.line) + msg += 'Branch %s does not have field %s.'%(branch, + field) + raise SyntaxError(msg) + except KeyError: + msg = 'Branch %s referenced on line'%branch + msg += ' %s is not defined'%rule.line + raise SyntaxError(msg) + if branch not in module_branches: + msg = 'Branch %s referenced on line'%branch + msg += " %s "%rule.line + msg += "is not in module's branches statement." + raise SyntaxError(msg) + + # Orders merger branches with the exported module's branches being first + def get_merger_branches_order(self, merger): + br_order = [] + # first add export module + for module in merger.modules: + if module.name == merger.export: + for br in module.branches: + if br not in br_order: + br_order.append(br) + + # add all the others: + for module in merger.modules: + for br in module.branches: + if br not in br_order: + br_order.append(br) + + return br_order + + # + def order_merger_rules(self, merger): + """ + Produces mapping between incrementally larger available branches tuples + (A,B,C,etc) ordered as they will appear in the implementation. + """ + br_order = self.get_merger_branches_order(merger) + needed_brs_to_rule = {} + for module in merger.modules: + replace_with_vals(module) + replace_bound_rules(module) + for rules in module.rules: + rule_branches = self.get_rule_needed_branches(rules[0]) + + ordered_branches = tuple(br for br in br_order + if br in rule_branches) + + if len(rules) > 1: + rule = Rule('or_op', 0, rules) + else: + rule = rules[0] + needed_brs_to_rule.setdefault(ordered_branches, + []).append(rule) + + avail_to_rules = {} + tup = () + # create sets - needed for the set intersection operation + needed_sets = map(set, needed_brs_to_rule.keys()) + # incrementaly add branches to the tuple of available branches + # and check which rules have their branch needs satisfied + for br in br_order: + tup += (br,) + # find how many of the needed branches are in this tuple + # of branches. It makes elementwise intesection of the sets + # of the needed branches and the tuple of available branches + intersect = map(set(tup).intersection , needed_sets ) + for el, intersection, key in zip(needed_sets , intersect, + needed_brs_to_rule.keys()): + if len(intersection) == len(el): + # Lenght is the same, which means all needed branches + # are present. Remove these elements, take the rules from + # the needed_brs_to_rule and delete the key their to + # keep the zip() in sync + needed_sets.remove(el) + avail_to_rules[tup] = needed_brs_to_rule[key] + del needed_brs_to_rule[key] + return avail_to_rules + + # + def get_rule_needed_branches(self, rule): + args_list = set() + for sub_rule in iterate_subrules(rule): + for arg in sub_rule.args: + if type(arg) is Field: + args_list.add(arg.name) + + for arg in rule.args: + if type(arg) is Field: + args_list.add(arg.name) + + if type(rule) is AllenRule: + return list(args_list) + + else: + return [qid.partition('.')[0] for qid in args_list] + + + # Validates the correctness of the merger stage + def validate(self): + self.check_duplicate_merger_names() + for merger in self.mergers: + self.check_duplicate_module_names(merger) + for module in merger.modules: + # Checks the whole rule list to see that all + # the rules fall into [Rule, GrouperRule, AllenRule] + # Returns the actual rules + for rule in iterate_rules(module): + # Checks that all the rule entries are correctly specified + if type(rule) is AllenRule: + self.check_branch_id_ref(rule, module.branches) + else: + self.check_qid_field_ref(rule, module.branches) + # Orders allen operations and the branches that they influence in a reverse order + self.order_allen_ops_args(merger) + # Performs several checks on the branches and the operations (consistency, reachability, etc.) + self.check_allen_ops(merger) + + # Orders the merger modules s.t. the exported module comes first + self.order_modules() + # Checks that the modules are interconnected among each other with at least one branch + self.check_for_disjoint_modules() + + + # Get the allen indexing operations for each branch. + def get_branches_allen_index_ops(self, merger): + """ + Get the allen indexing operations for each branch. + """ + br_to_allen_ind_ops = {} + for module in merger.modules: + for rules in module.rules: + if type(rules[0]) != AllenRule: + continue + br = rules[0].args[0].name + br_to_allen_ind_ops.setdefault(br, []).append(rules) + return br_to_allen_ind_ops + + # + def get_rule_impl(self, rule, br_to_record): + if type(rule) == AllenRule: + op = find_op(rule, module='allen_ops') + args = [ (arg.name, None) + if type(arg) == Field else arg + for arg in rule.args] + else: + args = [] + op = find_op(rule) + for arg in rule.args: + if type(arg) == Rule: + arg_impl = self.get_rule_impl(arg, br_to_record) + elif type(arg) == Field: + branch, _, field = arg.name.partition('.') + arg_impl = (branch, field) + else: + arg_impl = arg + + args.append(arg_impl) + return MergerRuleImpl(op, args, br_to_record) + + # Create indexing rules implementation for AllenRules + def get_index_rule_impl(self, rules): + res = [] + for or_rules in rules: + or_rules_impl = [] + for rule in or_rules: + op = find_op(rule, 'allen_index') + args = [arg.name if type(arg) == Field else arg + for arg in rule.args] + # replace with values + args = [arg.value if type(arg) == Arg else arg + for arg in args] + #[, ] + or_rules_impl.append(op(*args)) + res.append(or_rules_impl) + return res + + # Creates a file MergedM.h5 for further storage of the merged files + def get_merger_table_impl(self, merger): + fields = self.megers_export_modules[merger.name].branches + types = [UIntCol(self.id_size) for _ in fields] + field_types = dict(zip(fields,types)) + recordClass = record.get_record_class(fields, types) + # TODO fix file names + fname = options.temp_path + options.merger_file_prefix + fname += merger.name + ".h5" + if options.delete_temp_files: if_exists_delete(fname) + pytables.create_table_file(fname, field_types) + mergerTable = FlowRecordsTable(fname) + + return MergerStorage(merger.name, mergerTable, recordClass) + + # Actual implementation of the merger stage + def get_merger_impl(self, merger): + # Create merger storage + merger_table = self.get_merger_table_impl(merger) + + # Create indexing rules implementation + br_to_index_rule_impl = {} + +# {'B': [[AllenRule('oi', 43, [Field('B'), Field('A')], False), AllenRule('d', 43, [Field('B'), Field('A')], False)]]} + for br, rules in self.get_branches_allen_index_ops(merger).iteritems(): + br_to_index_rule_impl[br] = self.get_index_rule_impl(rules)# List of allen index rules implemented + + for br in self.get_merger_branches_order(merger):#orders branches with the exported branch being first + if br not in br_to_index_rule_impl.keys(): + br_to_index_rule_impl[br] = [] + + # some "globals" shared among branches or needed for their creation + needed_brs = self.order_merger_rules(merger) # Re-orders the rules as they will appear in the implementation + tup = () # tuple of available branches + name = merger.name + br_order = self.get_merger_branches_order(merger) # Returns reversely-ordered branch names of the merger + export_branches = self.megers_export_modules[merger.name].branches # Returns branch names contained in the export module + br_to_record = {} + name_to_branch = {} + merger_impl = None + for br_name in br_order: # For each branch in the ordered branch set + tup += (br_name,) + next_branches_names = [br for br in br_order if br not in tup] + records = self.br_name_to_gr_filter[br_name] # Group-filters associated with each branch + index_rules = br_to_index_rule_impl[br_name] # Allen index rule associated with each branch + index = records.index # Time index object + if len(tup)<2: # If tuple contains only one branch, then execute the initial Merger class + # first branch + rules = [] + impl = MergerImpl(name, br_name, records, name_to_branch, + next_branches_names, export_branches, + br_to_record, index, index_rules, rules, + merger_table) + merger_impl = impl + else: + unimpl_rules = needed_brs[tup] + rules = [self.get_rule_impl(rule, br_to_record) + for rule in unimpl_rules] + if br_name not in export_branches: + # Reject branch + impl = MergerRejectBranchImpl(br_name, records, + name_to_branch, next_branches_names, + export_branches, br_to_record, index, + index_rules, rules, merger_table) + + elif not next_branches_names: + # Last non-rejecting branch + impl = MergerLastBranchImpl(br_name, records, + name_to_branch, next_branches_names, + export_branches, br_to_record, index, + index_rules, rules, merger_table) + + else: + # For normal middle branches execute the MergerBranch class + impl = MergerBranchImpl(br_name, records, name_to_branch, + next_branches_names, export_branches, + br_to_record, index, index_rules, + rules, merger_table) + + name_to_branch[br_name] = impl + + return merger_impl + + def get_mergers_impl(self): + self.validate() + mergers_impl = [self.get_merger_impl(merger) + for merger in self.mergers] + + return mergers_impl diff --git a/merger_validator.pyc b/merger_validator.pyc new file mode 100644 index 0000000..92114e5 Binary files /dev/null and b/merger_validator.pyc differ diff --git a/netflow-trace.h5 b/netflow-trace.h5 new file mode 100644 index 0000000..50d6464 Binary files /dev/null and b/netflow-trace.h5 differ diff --git a/operators.py b/operators.py new file mode 100644 index 0000000..83074b3 --- /dev/null +++ b/operators.py @@ -0,0 +1,111 @@ +import options +from socket import getprotobyname + + +if options.import_ops: + external_import = __import__(options.import_ops) + +def NOT(op): + def not_op(*args): + op_result = op(*args) + return not op_result + + return not_op + +def and_op(*args, **kwargs): + res = True + + for arg in args: + res = res and arg + + for arg in kwargs.values(): + res = res and arg + + return res + +def bitAND(*args): + res = args[0] + + for arg in args[1:]: + res &= arg + + return res + +def bitOR(*args): + res = args[0] + + for arg in args[1:]: + res |= arg + + return res + +def or_op(*args, **kwargs): + res = False + + for arg in args: + res = res or arg + + for arg in kwargs.values(): + res = res or arg + + return res + + +def protocol(name): + return getprotobyname(name) + +def SUM(*args): + sum = 0 + for arg in args: + sum += arg + return sum + +def EQ(*args): + prev_arg = args[0] + result = True + for arg in args[1:]: + result = result and prev_arg == arg + prev_arg = arg + return result + +def LT(*args): + prev_arg = args[0] + result = True + for arg in args[1:]: + result = result and prev_arg < arg + prev_arg = arg + return result + +def GT(*args): + prev_arg = args[0] + result = True + for arg in args[1:]: + result = result and prev_arg > arg + prev_arg = arg + return result + +def GTEQ(*args): + prev_arg = args[0] + result = True + for arg in args[1:]: + result = result and prev_arg >= arg + prev_arg = arg + return result + +def LTEQ(*args): + prev_arg = args[0] + result = True + for arg in args[1:]: + result = result and prev_arg <= arg + prev_arg = arg + return result + +def IN(*args): + last_arg = args[-1] # probably subnet mask + result = True + for arg in args[:-1]: + result = result and arg & last_arg + return result + +def true(*args): + return True \ No newline at end of file diff --git a/operators.pyc b/operators.pyc new file mode 100644 index 0000000..cf567e8 Binary files /dev/null and b/operators.pyc differ diff --git a/options.py b/options.py new file mode 100644 index 0000000..a6fe63e --- /dev/null +++ b/options.py @@ -0,0 +1,19 @@ +import_ops = "custops" +import_grouper_ops = None + + +delete_temp_files = True +time_index_interval_ms = 5000 +unsat_delta_threshold_mul = 10 +max_unsatisfiable_deltas = 20 + +do_not_expand_groups = False + +temp_path = "./flowy-run/" +import os +try: + os.mkdir(temp_path) +except OSError: + pass +groups_file_prefix = "Groups" +merger_file_prefix = "Merged" diff --git a/options.pyc b/options.pyc new file mode 100644 index 0000000..9e3b8ea Binary files /dev/null and b/options.pyc differ diff --git a/parser.out b/parser.out new file mode 100644 index 0000000..ec33226 --- /dev/null +++ b/parser.out @@ -0,0 +1,4298 @@ +Created by PLY version 3.2 (http://www.dabeaz.com/ply) + +Grammar + +Rule 0 S' -> file +Rule 1 file -> pipeline_stage_1n +Rule 2 pipeline_stage_1n -> pipeline_stage pipeline_stage_1n +Rule 3 pipeline_stage_1n -> +Rule 4 pipeline_stage -> splitter +Rule 5 pipeline_stage -> filter +Rule 6 pipeline_stage -> composite_filter +Rule 7 pipeline_stage -> branch +Rule 8 pipeline_stage -> ungrouper +Rule 9 pipeline_stage -> grouper +Rule 10 pipeline_stage -> group_filter +Rule 11 pipeline_stage -> merger +Rule 12 splitter -> splitterKeyword id { } +Rule 13 filter -> filterKeyword id { filter_rule_1n } +Rule 14 composite_filter -> filterKeyword id { filter_ref_rule_1n } +Rule 15 group_filter -> groupFilterKeyword id { filter_rule_1n } +Rule 16 filter_rule_1n -> filter_rule filter_rule_1n +Rule 17 filter_rule_1n -> +Rule 18 filter_rule -> or_rule +Rule 19 filter_ref_rule_1n -> filter_ref_rule filter_ref_rule_1n +Rule 20 filter_ref_rule_1n -> filter_ref_rule +Rule 21 filter_ref_rule -> or_id +Rule 22 or_id -> not_id opt_or_id +Rule 23 opt_or_id -> ORKeyword not_id opt_or_id +Rule 24 opt_or_id -> +Rule 25 not_id -> NOTKeyword id +Rule 26 not_id -> id +Rule 27 or_rule -> rule_or_not opt_rule +Rule 28 opt_rule -> ORKeyword rule_or_not opt_rule +Rule 29 opt_rule -> +Rule 30 rule_or_not -> rule +Rule 31 rule_or_not -> NOTKeyword rule +Rule 32 rule -> infix_rule +Rule 33 rule -> prefix_rule +Rule 34 infix_rule -> arg op arg +Rule 35 op -> EQ +Rule 36 op -> LT +Rule 37 op -> GT +Rule 38 op -> LTEQ +Rule 39 op -> GTEQ +Rule 40 op -> ML +Rule 41 op -> MG +Rule 42 op -> inKeyword +Rule 43 op -> notinKeyword +Rule 44 prefix_rule -> id ( args ) +Rule 45 prefix_rule -> bitANDKeyword ( args ) +Rule 46 prefix_rule -> bitORKeyword ( args ) +Rule 47 args -> arg , args +Rule 48 args -> arg +Rule 49 args -> +Rule 50 arg -> id +Rule 51 arg -> IPv4 +Rule 52 arg -> IPv6 +Rule 53 arg -> CIDR +Rule 54 arg -> MAC +Rule 55 arg -> int +Rule 56 arg -> float +Rule 57 arg -> hex +Rule 58 arg -> prefix_rule +Rule 59 arg -> string +Rule 60 CIDR -> IPv4 / int +Rule 61 CIDR -> IPv6 / int +Rule 62 branch -> id arrow mid_branch +Rule 63 branch -> string arrow mid_branch +Rule 64 branch -> id branchKeyword mid_branch +Rule 65 mid_branch -> id arrow mid_branch +Rule 66 mid_branch -> end_branch +Rule 67 end_branch -> id +Rule 68 end_branch -> string +Rule 69 arrow -> - GT +Rule 70 ungrouper -> ungrouperKeyword id { } +Rule 71 grouper -> grouperKeyword id { module1_n aggregate } +Rule 72 module1_n -> module module1_n +Rule 73 module1_n -> +Rule 74 module -> moduleKeyword id { grouper_rule1_n } +Rule 75 grouper_rule1_n -> grouper_rule grouper_rule1_n +Rule 76 grouper_rule1_n -> +Rule 77 grouper_rule -> id grouper_op id +Rule 78 grouper_rule -> id grouper_op id deltaKeyword delta_arg +Rule 79 grouper_rule -> id grouper_op id rdeltaKeyword delta_arg +Rule 80 grouper_op -> EQ +Rule 81 grouper_op -> LT +Rule 82 grouper_op -> GT +Rule 83 grouper_op -> GTEQ +Rule 84 grouper_op -> LTEQ +Rule 85 delta_arg -> time +Rule 86 delta_arg -> int +Rule 87 time -> int sKeyword +Rule 88 time -> int msKeyword +Rule 89 time -> int minKeyword +Rule 90 aggregate -> aggregateKeyword aggr1_n +Rule 91 aggr1_n -> aggr opt_aggr +Rule 92 opt_aggr -> , aggr opt_aggr +Rule 93 opt_aggr -> +Rule 94 aggr -> aggr_op ( id_or_qid ) asKeyword id +Rule 95 aggr -> id_or_qid asKeyword id +Rule 96 aggr -> id_or_qid +Rule 97 qid -> id . id +Rule 98 id_or_qid -> id +Rule 99 id_or_qid -> qid +Rule 100 aggr_op -> minKeyword +Rule 101 aggr_op -> maxKeyword +Rule 102 aggr_op -> sumKeyword +Rule 103 aggr_op -> avgKeyword +Rule 104 aggr_op -> unionKeyword +Rule 105 aggr_op -> countKeyword +Rule 106 aggr_op -> bitANDKeyword +Rule 107 aggr_op -> bitORKeyword +Rule 108 merger -> mergerKeyword id { merger_module1_n export } +Rule 109 merger_module1_n -> merger_module merger_module1_n +Rule 110 merger_module1_n -> +Rule 111 merger_module -> moduleKeyword id { merger_branches merger_rule1_n } +Rule 112 merger_branches -> branchesKeyword branches1_n +Rule 113 branches1_n -> id , branches1_n +Rule 114 branches1_n -> id +Rule 115 export -> exportKeyword id +Rule 116 merger_rule1_n -> merger_rule merger_rule1_n +Rule 117 merger_rule1_n -> +Rule 118 merger_rule -> merger_prefix_rule +Rule 119 merger_rule -> merger_infix_rule +Rule 120 merger_rule -> NOTKeyword merger_prefix_rule +Rule 121 merger_rule -> NOTKeyword merger_infix_rule +Rule 122 merger_infix_rule -> qid_arg op qid_arg +Rule 123 merger_prefix_rule -> id ( qid_args ) +Rule 124 qid_args -> qid_arg , qid_args +Rule 125 qid_args -> qid_arg +Rule 126 qid_args -> +Rule 127 qid_arg -> qid +Rule 128 qid_arg -> IPv4 +Rule 129 qid_arg -> IPv6 +Rule 130 qid_arg -> CIDR +Rule 131 qid_arg -> MAC +Rule 132 qid_arg -> int +Rule 133 qid_arg -> float +Rule 134 qid_arg -> hex +Rule 135 qid_arg -> merger_prefix_rule +Rule 136 qid_arg -> string +Rule 137 merger_rule -> allen_rule opt_or_allen_rule +Rule 138 opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule +Rule 139 opt_or_allen_rule -> +Rule 140 allen_rule -> id allen_op id opt_allen_delta +Rule 141 opt_allen_delta -> deltaKeyword time +Rule 142 opt_allen_delta -> +Rule 143 allen_op -> LT +Rule 144 allen_op -> GT +Rule 145 allen_op -> EQ +Rule 146 allen_op -> mKeyword +Rule 147 allen_op -> miKeyword +Rule 148 allen_op -> oKeyword +Rule 149 allen_op -> oiKeyword +Rule 150 allen_op -> sKeyword +Rule 151 allen_op -> siKeyword +Rule 152 allen_op -> dKeyword +Rule 153 allen_op -> diKeyword +Rule 154 allen_op -> fKeyword +Rule 155 allen_op -> fiKeyword +Rule 156 allen_op -> eqKeyword + +Terminals, with rules where they appear + +( : 44 45 46 94 123 +) : 44 45 46 94 123 +, : 47 92 113 124 +- : 69 +. : 97 +/ : 60 61 +EQ : 35 80 145 +GT : 37 69 82 144 +GTEQ : 39 83 +IPv4 : 51 60 128 +IPv6 : 52 61 129 +LT : 36 81 143 +LTEQ : 38 84 +MAC : 54 131 +MG : 41 +ML : 40 +NOTKeyword : 25 31 120 121 +ORKeyword : 23 28 138 +aggregateKeyword : 90 +asKeyword : 94 95 +avgKeyword : 103 +bitANDKeyword : 45 106 +bitORKeyword : 46 107 +branchKeyword : 64 +branchesKeyword : 112 +countKeyword : 105 +dKeyword : 152 +deltaKeyword : 78 141 +diKeyword : 153 +eqKeyword : 156 +error : +exportKeyword : 115 +fKeyword : 154 +fiKeyword : 155 +filterKeyword : 13 14 +float : 56 133 +groupFilterKeyword : 15 +grouperKeyword : 71 +hex : 57 134 +id : 12 13 14 15 25 26 44 50 62 64 65 67 70 71 74 77 77 78 78 79 79 94 95 97 97 98 108 111 113 114 115 123 140 140 +inKeyword : 42 +int : 55 60 61 86 87 88 89 132 +mKeyword : 146 +maxKeyword : 101 +mergerKeyword : 108 +miKeyword : 147 +minKeyword : 89 100 +moduleKeyword : 74 111 +msKeyword : 88 +notinKeyword : 43 +oKeyword : 148 +oiKeyword : 149 +rdeltaKeyword : 79 +sKeyword : 87 150 +siKeyword : 151 +splitterKeyword : 12 +string : 59 63 68 136 +sumKeyword : 102 +ungrouperKeyword : 70 +unionKeyword : 104 +{ : 12 13 14 15 70 71 74 108 111 +} : 12 13 14 15 70 71 74 108 111 + +Nonterminals, with rules where they appear + +CIDR : 53 130 +aggr : 91 92 +aggr1_n : 90 +aggr_op : 94 +aggregate : 71 +allen_op : 140 +allen_rule : 137 138 +arg : 34 34 47 48 +args : 44 45 46 47 +arrow : 62 63 65 +branch : 7 +branches1_n : 112 113 +composite_filter : 6 +delta_arg : 78 79 +end_branch : 66 +export : 108 +file : 0 +filter : 5 +filter_ref_rule : 19 20 +filter_ref_rule_1n : 14 19 +filter_rule : 16 +filter_rule_1n : 13 15 16 +group_filter : 10 +grouper : 9 +grouper_op : 77 78 79 +grouper_rule : 75 +grouper_rule1_n : 74 75 +id_or_qid : 94 95 96 +infix_rule : 32 +merger : 11 +merger_branches : 111 +merger_infix_rule : 119 121 +merger_module : 109 +merger_module1_n : 108 109 +merger_prefix_rule : 118 120 135 +merger_rule : 116 +merger_rule1_n : 111 116 +mid_branch : 62 63 64 65 +module : 72 +module1_n : 71 72 +not_id : 22 23 +op : 34 122 +opt_aggr : 91 92 +opt_allen_delta : 140 +opt_or_allen_rule : 137 138 +opt_or_id : 22 23 +opt_rule : 27 28 +or_id : 21 +or_rule : 18 +pipeline_stage : 2 +pipeline_stage_1n : 1 2 +prefix_rule : 33 58 +qid : 99 127 +qid_arg : 122 122 124 125 +qid_args : 123 124 +rule : 30 31 +rule_or_not : 27 28 +splitter : 4 +time : 85 141 +ungrouper : 8 + +Parsing method: LALR + +state 0 + + (0) S' -> . file + (1) file -> . pipeline_stage_1n + (2) pipeline_stage_1n -> . pipeline_stage pipeline_stage_1n + (3) pipeline_stage_1n -> . + (4) pipeline_stage -> . splitter + (5) pipeline_stage -> . filter + (6) pipeline_stage -> . composite_filter + (7) pipeline_stage -> . branch + (8) pipeline_stage -> . ungrouper + (9) pipeline_stage -> . grouper + (10) pipeline_stage -> . group_filter + (11) pipeline_stage -> . merger + (12) splitter -> . splitterKeyword id { } + (13) filter -> . filterKeyword id { filter_rule_1n } + (14) composite_filter -> . filterKeyword id { filter_ref_rule_1n } + (62) branch -> . id arrow mid_branch + (63) branch -> . string arrow mid_branch + (64) branch -> . id branchKeyword mid_branch + (70) ungrouper -> . ungrouperKeyword id { } + (71) grouper -> . grouperKeyword id { module1_n aggregate } + (15) group_filter -> . groupFilterKeyword id { filter_rule_1n } + (108) merger -> . mergerKeyword id { merger_module1_n export } + + $end reduce using rule 3 (pipeline_stage_1n -> .) + splitterKeyword shift and go to state 10 + filterKeyword shift and go to state 9 + id shift and go to state 5 + string shift and go to state 13 + ungrouperKeyword shift and go to state 15 + grouperKeyword shift and go to state 4 + groupFilterKeyword shift and go to state 17 + mergerKeyword shift and go to state 14 + + splitter shift and go to state 16 + branch shift and go to state 11 + ungrouper shift and go to state 6 + grouper shift and go to state 7 + merger shift and go to state 8 + pipeline_stage shift and go to state 1 + group_filter shift and go to state 2 + filter shift and go to state 18 + composite_filter shift and go to state 19 + file shift and go to state 3 + pipeline_stage_1n shift and go to state 12 + +state 1 + + (2) pipeline_stage_1n -> pipeline_stage . pipeline_stage_1n + (2) pipeline_stage_1n -> . pipeline_stage pipeline_stage_1n + (3) pipeline_stage_1n -> . + (4) pipeline_stage -> . splitter + (5) pipeline_stage -> . filter + (6) pipeline_stage -> . composite_filter + (7) pipeline_stage -> . branch + (8) pipeline_stage -> . ungrouper + (9) pipeline_stage -> . grouper + (10) pipeline_stage -> . group_filter + (11) pipeline_stage -> . merger + (12) splitter -> . splitterKeyword id { } + (13) filter -> . filterKeyword id { filter_rule_1n } + (14) composite_filter -> . filterKeyword id { filter_ref_rule_1n } + (62) branch -> . id arrow mid_branch + (63) branch -> . string arrow mid_branch + (64) branch -> . id branchKeyword mid_branch + (70) ungrouper -> . ungrouperKeyword id { } + (71) grouper -> . grouperKeyword id { module1_n aggregate } + (15) group_filter -> . groupFilterKeyword id { filter_rule_1n } + (108) merger -> . mergerKeyword id { merger_module1_n export } + + $end reduce using rule 3 (pipeline_stage_1n -> .) + splitterKeyword shift and go to state 10 + filterKeyword shift and go to state 9 + id shift and go to state 5 + string shift and go to state 13 + ungrouperKeyword shift and go to state 15 + grouperKeyword shift and go to state 4 + groupFilterKeyword shift and go to state 17 + mergerKeyword shift and go to state 14 + + splitter shift and go to state 16 + ungrouper shift and go to state 6 + grouper shift and go to state 7 + merger shift and go to state 8 + pipeline_stage shift and go to state 1 + group_filter shift and go to state 2 + filter shift and go to state 18 + composite_filter shift and go to state 19 + branch shift and go to state 11 + pipeline_stage_1n shift and go to state 20 + +state 2 + + (10) pipeline_stage -> group_filter . + + splitterKeyword reduce using rule 10 (pipeline_stage -> group_filter .) + filterKeyword reduce using rule 10 (pipeline_stage -> group_filter .) + id reduce using rule 10 (pipeline_stage -> group_filter .) + string reduce using rule 10 (pipeline_stage -> group_filter .) + ungrouperKeyword reduce using rule 10 (pipeline_stage -> group_filter .) + grouperKeyword reduce using rule 10 (pipeline_stage -> group_filter .) + groupFilterKeyword reduce using rule 10 (pipeline_stage -> group_filter .) + mergerKeyword reduce using rule 10 (pipeline_stage -> group_filter .) + $end reduce using rule 10 (pipeline_stage -> group_filter .) + + +state 3 + + (0) S' -> file . + + + +state 4 + + (71) grouper -> grouperKeyword . id { module1_n aggregate } + + id shift and go to state 21 + + +state 5 + + (62) branch -> id . arrow mid_branch + (64) branch -> id . branchKeyword mid_branch + (69) arrow -> . - GT + + branchKeyword shift and go to state 24 + - shift and go to state 22 + + arrow shift and go to state 23 + +state 6 + + (8) pipeline_stage -> ungrouper . + + splitterKeyword reduce using rule 8 (pipeline_stage -> ungrouper .) + filterKeyword reduce using rule 8 (pipeline_stage -> ungrouper .) + id reduce using rule 8 (pipeline_stage -> ungrouper .) + string reduce using rule 8 (pipeline_stage -> ungrouper .) + ungrouperKeyword reduce using rule 8 (pipeline_stage -> ungrouper .) + grouperKeyword reduce using rule 8 (pipeline_stage -> ungrouper .) + groupFilterKeyword reduce using rule 8 (pipeline_stage -> ungrouper .) + mergerKeyword reduce using rule 8 (pipeline_stage -> ungrouper .) + $end reduce using rule 8 (pipeline_stage -> ungrouper .) + + +state 7 + + (9) pipeline_stage -> grouper . + + splitterKeyword reduce using rule 9 (pipeline_stage -> grouper .) + filterKeyword reduce using rule 9 (pipeline_stage -> grouper .) + id reduce using rule 9 (pipeline_stage -> grouper .) + string reduce using rule 9 (pipeline_stage -> grouper .) + ungrouperKeyword reduce using rule 9 (pipeline_stage -> grouper .) + grouperKeyword reduce using rule 9 (pipeline_stage -> grouper .) + groupFilterKeyword reduce using rule 9 (pipeline_stage -> grouper .) + mergerKeyword reduce using rule 9 (pipeline_stage -> grouper .) + $end reduce using rule 9 (pipeline_stage -> grouper .) + + +state 8 + + (11) pipeline_stage -> merger . + + splitterKeyword reduce using rule 11 (pipeline_stage -> merger .) + filterKeyword reduce using rule 11 (pipeline_stage -> merger .) + id reduce using rule 11 (pipeline_stage -> merger .) + string reduce using rule 11 (pipeline_stage -> merger .) + ungrouperKeyword reduce using rule 11 (pipeline_stage -> merger .) + grouperKeyword reduce using rule 11 (pipeline_stage -> merger .) + groupFilterKeyword reduce using rule 11 (pipeline_stage -> merger .) + mergerKeyword reduce using rule 11 (pipeline_stage -> merger .) + $end reduce using rule 11 (pipeline_stage -> merger .) + + +state 9 + + (13) filter -> filterKeyword . id { filter_rule_1n } + (14) composite_filter -> filterKeyword . id { filter_ref_rule_1n } + + id shift and go to state 25 + + +state 10 + + (12) splitter -> splitterKeyword . id { } + + id shift and go to state 26 + + +state 11 + + (7) pipeline_stage -> branch . + + splitterKeyword reduce using rule 7 (pipeline_stage -> branch .) + filterKeyword reduce using rule 7 (pipeline_stage -> branch .) + id reduce using rule 7 (pipeline_stage -> branch .) + string reduce using rule 7 (pipeline_stage -> branch .) + ungrouperKeyword reduce using rule 7 (pipeline_stage -> branch .) + grouperKeyword reduce using rule 7 (pipeline_stage -> branch .) + groupFilterKeyword reduce using rule 7 (pipeline_stage -> branch .) + mergerKeyword reduce using rule 7 (pipeline_stage -> branch .) + $end reduce using rule 7 (pipeline_stage -> branch .) + + +state 12 + + (1) file -> pipeline_stage_1n . + + $end reduce using rule 1 (file -> pipeline_stage_1n .) + + +state 13 + + (63) branch -> string . arrow mid_branch + (69) arrow -> . - GT + + - shift and go to state 22 + + arrow shift and go to state 27 + +state 14 + + (108) merger -> mergerKeyword . id { merger_module1_n export } + + id shift and go to state 28 + + +state 15 + + (70) ungrouper -> ungrouperKeyword . id { } + + id shift and go to state 29 + + +state 16 + + (4) pipeline_stage -> splitter . + + splitterKeyword reduce using rule 4 (pipeline_stage -> splitter .) + filterKeyword reduce using rule 4 (pipeline_stage -> splitter .) + id reduce using rule 4 (pipeline_stage -> splitter .) + string reduce using rule 4 (pipeline_stage -> splitter .) + ungrouperKeyword reduce using rule 4 (pipeline_stage -> splitter .) + grouperKeyword reduce using rule 4 (pipeline_stage -> splitter .) + groupFilterKeyword reduce using rule 4 (pipeline_stage -> splitter .) + mergerKeyword reduce using rule 4 (pipeline_stage -> splitter .) + $end reduce using rule 4 (pipeline_stage -> splitter .) + + +state 17 + + (15) group_filter -> groupFilterKeyword . id { filter_rule_1n } + + id shift and go to state 30 + + +state 18 + + (5) pipeline_stage -> filter . + + splitterKeyword reduce using rule 5 (pipeline_stage -> filter .) + filterKeyword reduce using rule 5 (pipeline_stage -> filter .) + id reduce using rule 5 (pipeline_stage -> filter .) + string reduce using rule 5 (pipeline_stage -> filter .) + ungrouperKeyword reduce using rule 5 (pipeline_stage -> filter .) + grouperKeyword reduce using rule 5 (pipeline_stage -> filter .) + groupFilterKeyword reduce using rule 5 (pipeline_stage -> filter .) + mergerKeyword reduce using rule 5 (pipeline_stage -> filter .) + $end reduce using rule 5 (pipeline_stage -> filter .) + + +state 19 + + (6) pipeline_stage -> composite_filter . + + splitterKeyword reduce using rule 6 (pipeline_stage -> composite_filter .) + filterKeyword reduce using rule 6 (pipeline_stage -> composite_filter .) + id reduce using rule 6 (pipeline_stage -> composite_filter .) + string reduce using rule 6 (pipeline_stage -> composite_filter .) + ungrouperKeyword reduce using rule 6 (pipeline_stage -> composite_filter .) + grouperKeyword reduce using rule 6 (pipeline_stage -> composite_filter .) + groupFilterKeyword reduce using rule 6 (pipeline_stage -> composite_filter .) + mergerKeyword reduce using rule 6 (pipeline_stage -> composite_filter .) + $end reduce using rule 6 (pipeline_stage -> composite_filter .) + + +state 20 + + (2) pipeline_stage_1n -> pipeline_stage pipeline_stage_1n . + + $end reduce using rule 2 (pipeline_stage_1n -> pipeline_stage pipeline_stage_1n .) + + +state 21 + + (71) grouper -> grouperKeyword id . { module1_n aggregate } + + { shift and go to state 31 + + +state 22 + + (69) arrow -> - . GT + + GT shift and go to state 32 + + +state 23 + + (62) branch -> id arrow . mid_branch + (65) mid_branch -> . id arrow mid_branch + (66) mid_branch -> . end_branch + (67) end_branch -> . id + (68) end_branch -> . string + + id shift and go to state 35 + string shift and go to state 34 + + end_branch shift and go to state 33 + mid_branch shift and go to state 36 + +state 24 + + (64) branch -> id branchKeyword . mid_branch + (65) mid_branch -> . id arrow mid_branch + (66) mid_branch -> . end_branch + (67) end_branch -> . id + (68) end_branch -> . string + + id shift and go to state 35 + string shift and go to state 34 + + end_branch shift and go to state 33 + mid_branch shift and go to state 37 + +state 25 + + (13) filter -> filterKeyword id . { filter_rule_1n } + (14) composite_filter -> filterKeyword id . { filter_ref_rule_1n } + + { shift and go to state 38 + + +state 26 + + (12) splitter -> splitterKeyword id . { } + + { shift and go to state 39 + + +state 27 + + (63) branch -> string arrow . mid_branch + (65) mid_branch -> . id arrow mid_branch + (66) mid_branch -> . end_branch + (67) end_branch -> . id + (68) end_branch -> . string + + id shift and go to state 35 + string shift and go to state 34 + + end_branch shift and go to state 33 + mid_branch shift and go to state 40 + +state 28 + + (108) merger -> mergerKeyword id . { merger_module1_n export } + + { shift and go to state 41 + + +state 29 + + (70) ungrouper -> ungrouperKeyword id . { } + + { shift and go to state 42 + + +state 30 + + (15) group_filter -> groupFilterKeyword id . { filter_rule_1n } + + { shift and go to state 43 + + +state 31 + + (71) grouper -> grouperKeyword id { . module1_n aggregate } + (72) module1_n -> . module module1_n + (73) module1_n -> . + (74) module -> . moduleKeyword id { grouper_rule1_n } + + aggregateKeyword reduce using rule 73 (module1_n -> .) + moduleKeyword shift and go to state 44 + + module1_n shift and go to state 45 + module shift and go to state 46 + +state 32 + + (69) arrow -> - GT . + + id reduce using rule 69 (arrow -> - GT .) + string reduce using rule 69 (arrow -> - GT .) + + +state 33 + + (66) mid_branch -> end_branch . + + splitterKeyword reduce using rule 66 (mid_branch -> end_branch .) + filterKeyword reduce using rule 66 (mid_branch -> end_branch .) + id reduce using rule 66 (mid_branch -> end_branch .) + string reduce using rule 66 (mid_branch -> end_branch .) + ungrouperKeyword reduce using rule 66 (mid_branch -> end_branch .) + grouperKeyword reduce using rule 66 (mid_branch -> end_branch .) + groupFilterKeyword reduce using rule 66 (mid_branch -> end_branch .) + mergerKeyword reduce using rule 66 (mid_branch -> end_branch .) + $end reduce using rule 66 (mid_branch -> end_branch .) + + +state 34 + + (68) end_branch -> string . + + splitterKeyword reduce using rule 68 (end_branch -> string .) + filterKeyword reduce using rule 68 (end_branch -> string .) + id reduce using rule 68 (end_branch -> string .) + string reduce using rule 68 (end_branch -> string .) + ungrouperKeyword reduce using rule 68 (end_branch -> string .) + grouperKeyword reduce using rule 68 (end_branch -> string .) + groupFilterKeyword reduce using rule 68 (end_branch -> string .) + mergerKeyword reduce using rule 68 (end_branch -> string .) + $end reduce using rule 68 (end_branch -> string .) + + +state 35 + + (65) mid_branch -> id . arrow mid_branch + (67) end_branch -> id . + (69) arrow -> . - GT + + splitterKeyword reduce using rule 67 (end_branch -> id .) + filterKeyword reduce using rule 67 (end_branch -> id .) + id reduce using rule 67 (end_branch -> id .) + string reduce using rule 67 (end_branch -> id .) + ungrouperKeyword reduce using rule 67 (end_branch -> id .) + grouperKeyword reduce using rule 67 (end_branch -> id .) + groupFilterKeyword reduce using rule 67 (end_branch -> id .) + mergerKeyword reduce using rule 67 (end_branch -> id .) + $end reduce using rule 67 (end_branch -> id .) + - shift and go to state 22 + + arrow shift and go to state 47 + +state 36 + + (62) branch -> id arrow mid_branch . + + splitterKeyword reduce using rule 62 (branch -> id arrow mid_branch .) + filterKeyword reduce using rule 62 (branch -> id arrow mid_branch .) + id reduce using rule 62 (branch -> id arrow mid_branch .) + string reduce using rule 62 (branch -> id arrow mid_branch .) + ungrouperKeyword reduce using rule 62 (branch -> id arrow mid_branch .) + grouperKeyword reduce using rule 62 (branch -> id arrow mid_branch .) + groupFilterKeyword reduce using rule 62 (branch -> id arrow mid_branch .) + mergerKeyword reduce using rule 62 (branch -> id arrow mid_branch .) + $end reduce using rule 62 (branch -> id arrow mid_branch .) + + +state 37 + + (64) branch -> id branchKeyword mid_branch . + + splitterKeyword reduce using rule 64 (branch -> id branchKeyword mid_branch .) + filterKeyword reduce using rule 64 (branch -> id branchKeyword mid_branch .) + id reduce using rule 64 (branch -> id branchKeyword mid_branch .) + string reduce using rule 64 (branch -> id branchKeyword mid_branch .) + ungrouperKeyword reduce using rule 64 (branch -> id branchKeyword mid_branch .) + grouperKeyword reduce using rule 64 (branch -> id branchKeyword mid_branch .) + groupFilterKeyword reduce using rule 64 (branch -> id branchKeyword mid_branch .) + mergerKeyword reduce using rule 64 (branch -> id branchKeyword mid_branch .) + $end reduce using rule 64 (branch -> id branchKeyword mid_branch .) + + +state 38 + + (13) filter -> filterKeyword id { . filter_rule_1n } + (14) composite_filter -> filterKeyword id { . filter_ref_rule_1n } + (16) filter_rule_1n -> . filter_rule filter_rule_1n + (17) filter_rule_1n -> . + (19) filter_ref_rule_1n -> . filter_ref_rule filter_ref_rule_1n + (20) filter_ref_rule_1n -> . filter_ref_rule + (18) filter_rule -> . or_rule + (21) filter_ref_rule -> . or_id + (27) or_rule -> . rule_or_not opt_rule + (22) or_id -> . not_id opt_or_id + (30) rule_or_not -> . rule + (31) rule_or_not -> . NOTKeyword rule + (25) not_id -> . NOTKeyword id + (26) not_id -> . id + (32) rule -> . infix_rule + (33) rule -> . prefix_rule + (34) infix_rule -> . arg op arg + (44) prefix_rule -> . id ( args ) + (45) prefix_rule -> . bitANDKeyword ( args ) + (46) prefix_rule -> . bitORKeyword ( args ) + (50) arg -> . id + (51) arg -> . IPv4 + (52) arg -> . IPv6 + (53) arg -> . CIDR + (54) arg -> . MAC + (55) arg -> . int + (56) arg -> . float + (57) arg -> . hex + (58) arg -> . prefix_rule + (59) arg -> . string + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + + } reduce using rule 17 (filter_rule_1n -> .) + NOTKeyword shift and go to state 57 + id shift and go to state 56 + bitANDKeyword shift and go to state 48 + bitORKeyword shift and go to state 58 + IPv4 shift and go to state 70 + IPv6 shift and go to state 71 + MAC shift and go to state 68 + int shift and go to state 51 + float shift and go to state 52 + hex shift and go to state 60 + string shift and go to state 62 + + filter_ref_rule_1n shift and go to state 49 + or_rule shift and go to state 50 + filter_rule shift and go to state 59 + not_id shift and go to state 66 + or_id shift and go to state 63 + rule shift and go to state 67 + infix_rule shift and go to state 53 + rule_or_not shift and go to state 69 + prefix_rule shift and go to state 54 + arg shift and go to state 55 + filter_rule_1n shift and go to state 64 + CIDR shift and go to state 65 + filter_ref_rule shift and go to state 61 + +state 39 + + (12) splitter -> splitterKeyword id { . } + + } shift and go to state 72 + + +state 40 + + (63) branch -> string arrow mid_branch . + + splitterKeyword reduce using rule 63 (branch -> string arrow mid_branch .) + filterKeyword reduce using rule 63 (branch -> string arrow mid_branch .) + id reduce using rule 63 (branch -> string arrow mid_branch .) + string reduce using rule 63 (branch -> string arrow mid_branch .) + ungrouperKeyword reduce using rule 63 (branch -> string arrow mid_branch .) + grouperKeyword reduce using rule 63 (branch -> string arrow mid_branch .) + groupFilterKeyword reduce using rule 63 (branch -> string arrow mid_branch .) + mergerKeyword reduce using rule 63 (branch -> string arrow mid_branch .) + $end reduce using rule 63 (branch -> string arrow mid_branch .) + + +state 41 + + (108) merger -> mergerKeyword id { . merger_module1_n export } + (109) merger_module1_n -> . merger_module merger_module1_n + (110) merger_module1_n -> . + (111) merger_module -> . moduleKeyword id { merger_branches merger_rule1_n } + + exportKeyword reduce using rule 110 (merger_module1_n -> .) + moduleKeyword shift and go to state 75 + + merger_module shift and go to state 73 + merger_module1_n shift and go to state 74 + +state 42 + + (70) ungrouper -> ungrouperKeyword id { . } + + } shift and go to state 76 + + +state 43 + + (15) group_filter -> groupFilterKeyword id { . filter_rule_1n } + (16) filter_rule_1n -> . filter_rule filter_rule_1n + (17) filter_rule_1n -> . + (18) filter_rule -> . or_rule + (27) or_rule -> . rule_or_not opt_rule + (30) rule_or_not -> . rule + (31) rule_or_not -> . NOTKeyword rule + (32) rule -> . infix_rule + (33) rule -> . prefix_rule + (34) infix_rule -> . arg op arg + (44) prefix_rule -> . id ( args ) + (45) prefix_rule -> . bitANDKeyword ( args ) + (46) prefix_rule -> . bitORKeyword ( args ) + (50) arg -> . id + (51) arg -> . IPv4 + (52) arg -> . IPv6 + (53) arg -> . CIDR + (54) arg -> . MAC + (55) arg -> . int + (56) arg -> . float + (57) arg -> . hex + (58) arg -> . prefix_rule + (59) arg -> . string + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + + } reduce using rule 17 (filter_rule_1n -> .) + NOTKeyword shift and go to state 78 + id shift and go to state 77 + bitANDKeyword shift and go to state 48 + bitORKeyword shift and go to state 58 + IPv4 shift and go to state 70 + IPv6 shift and go to state 71 + MAC shift and go to state 68 + int shift and go to state 51 + float shift and go to state 52 + hex shift and go to state 60 + string shift and go to state 62 + + or_rule shift and go to state 50 + filter_rule shift and go to state 59 + rule shift and go to state 67 + infix_rule shift and go to state 53 + rule_or_not shift and go to state 69 + prefix_rule shift and go to state 54 + arg shift and go to state 55 + filter_rule_1n shift and go to state 79 + CIDR shift and go to state 65 + +state 44 + + (74) module -> moduleKeyword . id { grouper_rule1_n } + + id shift and go to state 80 + + +state 45 + + (71) grouper -> grouperKeyword id { module1_n . aggregate } + (90) aggregate -> . aggregateKeyword aggr1_n + + aggregateKeyword shift and go to state 81 + + aggregate shift and go to state 82 + +state 46 + + (72) module1_n -> module . module1_n + (72) module1_n -> . module module1_n + (73) module1_n -> . + (74) module -> . moduleKeyword id { grouper_rule1_n } + + aggregateKeyword reduce using rule 73 (module1_n -> .) + moduleKeyword shift and go to state 44 + + module1_n shift and go to state 83 + module shift and go to state 46 + +state 47 + + (65) mid_branch -> id arrow . mid_branch + (65) mid_branch -> . id arrow mid_branch + (66) mid_branch -> . end_branch + (67) end_branch -> . id + (68) end_branch -> . string + + id shift and go to state 35 + string shift and go to state 34 + + end_branch shift and go to state 33 + mid_branch shift and go to state 84 + +state 48 + + (45) prefix_rule -> bitANDKeyword . ( args ) + + ( shift and go to state 85 + + +state 49 + + (14) composite_filter -> filterKeyword id { filter_ref_rule_1n . } + + } shift and go to state 86 + + +state 50 + + (18) filter_rule -> or_rule . + + NOTKeyword reduce using rule 18 (filter_rule -> or_rule .) + id reduce using rule 18 (filter_rule -> or_rule .) + bitANDKeyword reduce using rule 18 (filter_rule -> or_rule .) + bitORKeyword reduce using rule 18 (filter_rule -> or_rule .) + IPv4 reduce using rule 18 (filter_rule -> or_rule .) + IPv6 reduce using rule 18 (filter_rule -> or_rule .) + MAC reduce using rule 18 (filter_rule -> or_rule .) + int reduce using rule 18 (filter_rule -> or_rule .) + float reduce using rule 18 (filter_rule -> or_rule .) + hex reduce using rule 18 (filter_rule -> or_rule .) + string reduce using rule 18 (filter_rule -> or_rule .) + } reduce using rule 18 (filter_rule -> or_rule .) + + +state 51 + + (55) arg -> int . + + , reduce using rule 55 (arg -> int .) + ) reduce using rule 55 (arg -> int .) + EQ reduce using rule 55 (arg -> int .) + LT reduce using rule 55 (arg -> int .) + GT reduce using rule 55 (arg -> int .) + LTEQ reduce using rule 55 (arg -> int .) + GTEQ reduce using rule 55 (arg -> int .) + ML reduce using rule 55 (arg -> int .) + MG reduce using rule 55 (arg -> int .) + inKeyword reduce using rule 55 (arg -> int .) + notinKeyword reduce using rule 55 (arg -> int .) + ORKeyword reduce using rule 55 (arg -> int .) + NOTKeyword reduce using rule 55 (arg -> int .) + id reduce using rule 55 (arg -> int .) + bitANDKeyword reduce using rule 55 (arg -> int .) + bitORKeyword reduce using rule 55 (arg -> int .) + IPv4 reduce using rule 55 (arg -> int .) + IPv6 reduce using rule 55 (arg -> int .) + MAC reduce using rule 55 (arg -> int .) + int reduce using rule 55 (arg -> int .) + float reduce using rule 55 (arg -> int .) + hex reduce using rule 55 (arg -> int .) + string reduce using rule 55 (arg -> int .) + } reduce using rule 55 (arg -> int .) + + +state 52 + + (56) arg -> float . + + , reduce using rule 56 (arg -> float .) + ) reduce using rule 56 (arg -> float .) + EQ reduce using rule 56 (arg -> float .) + LT reduce using rule 56 (arg -> float .) + GT reduce using rule 56 (arg -> float .) + LTEQ reduce using rule 56 (arg -> float .) + GTEQ reduce using rule 56 (arg -> float .) + ML reduce using rule 56 (arg -> float .) + MG reduce using rule 56 (arg -> float .) + inKeyword reduce using rule 56 (arg -> float .) + notinKeyword reduce using rule 56 (arg -> float .) + ORKeyword reduce using rule 56 (arg -> float .) + NOTKeyword reduce using rule 56 (arg -> float .) + id reduce using rule 56 (arg -> float .) + bitANDKeyword reduce using rule 56 (arg -> float .) + bitORKeyword reduce using rule 56 (arg -> float .) + IPv4 reduce using rule 56 (arg -> float .) + IPv6 reduce using rule 56 (arg -> float .) + MAC reduce using rule 56 (arg -> float .) + int reduce using rule 56 (arg -> float .) + float reduce using rule 56 (arg -> float .) + hex reduce using rule 56 (arg -> float .) + string reduce using rule 56 (arg -> float .) + } reduce using rule 56 (arg -> float .) + + +state 53 + + (32) rule -> infix_rule . + + ORKeyword reduce using rule 32 (rule -> infix_rule .) + NOTKeyword reduce using rule 32 (rule -> infix_rule .) + id reduce using rule 32 (rule -> infix_rule .) + bitANDKeyword reduce using rule 32 (rule -> infix_rule .) + bitORKeyword reduce using rule 32 (rule -> infix_rule .) + IPv4 reduce using rule 32 (rule -> infix_rule .) + IPv6 reduce using rule 32 (rule -> infix_rule .) + MAC reduce using rule 32 (rule -> infix_rule .) + int reduce using rule 32 (rule -> infix_rule .) + float reduce using rule 32 (rule -> infix_rule .) + hex reduce using rule 32 (rule -> infix_rule .) + string reduce using rule 32 (rule -> infix_rule .) + } reduce using rule 32 (rule -> infix_rule .) + + +state 54 + + (33) rule -> prefix_rule . + (58) arg -> prefix_rule . + + ORKeyword reduce using rule 33 (rule -> prefix_rule .) + NOTKeyword reduce using rule 33 (rule -> prefix_rule .) + id reduce using rule 33 (rule -> prefix_rule .) + bitANDKeyword reduce using rule 33 (rule -> prefix_rule .) + bitORKeyword reduce using rule 33 (rule -> prefix_rule .) + IPv4 reduce using rule 33 (rule -> prefix_rule .) + IPv6 reduce using rule 33 (rule -> prefix_rule .) + MAC reduce using rule 33 (rule -> prefix_rule .) + int reduce using rule 33 (rule -> prefix_rule .) + float reduce using rule 33 (rule -> prefix_rule .) + hex reduce using rule 33 (rule -> prefix_rule .) + string reduce using rule 33 (rule -> prefix_rule .) + } reduce using rule 33 (rule -> prefix_rule .) + EQ reduce using rule 58 (arg -> prefix_rule .) + LT reduce using rule 58 (arg -> prefix_rule .) + GT reduce using rule 58 (arg -> prefix_rule .) + LTEQ reduce using rule 58 (arg -> prefix_rule .) + GTEQ reduce using rule 58 (arg -> prefix_rule .) + ML reduce using rule 58 (arg -> prefix_rule .) + MG reduce using rule 58 (arg -> prefix_rule .) + inKeyword reduce using rule 58 (arg -> prefix_rule .) + notinKeyword reduce using rule 58 (arg -> prefix_rule .) + + +state 55 + + (34) infix_rule -> arg . op arg + (35) op -> . EQ + (36) op -> . LT + (37) op -> . GT + (38) op -> . LTEQ + (39) op -> . GTEQ + (40) op -> . ML + (41) op -> . MG + (42) op -> . inKeyword + (43) op -> . notinKeyword + + EQ shift and go to state 95 + LT shift and go to state 93 + GT shift and go to state 88 + LTEQ shift and go to state 91 + GTEQ shift and go to state 89 + ML shift and go to state 90 + MG shift and go to state 87 + inKeyword shift and go to state 92 + notinKeyword shift and go to state 94 + + op shift and go to state 96 + +state 56 + + (26) not_id -> id . + (44) prefix_rule -> id . ( args ) + (50) arg -> id . + + ORKeyword reduce using rule 26 (not_id -> id .) + NOTKeyword reduce using rule 26 (not_id -> id .) + id reduce using rule 26 (not_id -> id .) + } reduce using rule 26 (not_id -> id .) + ( shift and go to state 97 + EQ reduce using rule 50 (arg -> id .) + LT reduce using rule 50 (arg -> id .) + GT reduce using rule 50 (arg -> id .) + LTEQ reduce using rule 50 (arg -> id .) + GTEQ reduce using rule 50 (arg -> id .) + ML reduce using rule 50 (arg -> id .) + MG reduce using rule 50 (arg -> id .) + inKeyword reduce using rule 50 (arg -> id .) + notinKeyword reduce using rule 50 (arg -> id .) + + +state 57 + + (31) rule_or_not -> NOTKeyword . rule + (25) not_id -> NOTKeyword . id + (32) rule -> . infix_rule + (33) rule -> . prefix_rule + (34) infix_rule -> . arg op arg + (44) prefix_rule -> . id ( args ) + (45) prefix_rule -> . bitANDKeyword ( args ) + (46) prefix_rule -> . bitORKeyword ( args ) + (50) arg -> . id + (51) arg -> . IPv4 + (52) arg -> . IPv6 + (53) arg -> . CIDR + (54) arg -> . MAC + (55) arg -> . int + (56) arg -> . float + (57) arg -> . hex + (58) arg -> . prefix_rule + (59) arg -> . string + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + + id shift and go to state 99 + bitANDKeyword shift and go to state 48 + bitORKeyword shift and go to state 58 + IPv4 shift and go to state 70 + IPv6 shift and go to state 71 + MAC shift and go to state 68 + int shift and go to state 51 + float shift and go to state 52 + hex shift and go to state 60 + string shift and go to state 62 + + prefix_rule shift and go to state 54 + infix_rule shift and go to state 53 + rule shift and go to state 98 + arg shift and go to state 55 + CIDR shift and go to state 65 + +state 58 + + (46) prefix_rule -> bitORKeyword . ( args ) + + ( shift and go to state 100 + + +state 59 + + (16) filter_rule_1n -> filter_rule . filter_rule_1n + (16) filter_rule_1n -> . filter_rule filter_rule_1n + (17) filter_rule_1n -> . + (18) filter_rule -> . or_rule + (27) or_rule -> . rule_or_not opt_rule + (30) rule_or_not -> . rule + (31) rule_or_not -> . NOTKeyword rule + (32) rule -> . infix_rule + (33) rule -> . prefix_rule + (34) infix_rule -> . arg op arg + (44) prefix_rule -> . id ( args ) + (45) prefix_rule -> . bitANDKeyword ( args ) + (46) prefix_rule -> . bitORKeyword ( args ) + (50) arg -> . id + (51) arg -> . IPv4 + (52) arg -> . IPv6 + (53) arg -> . CIDR + (54) arg -> . MAC + (55) arg -> . int + (56) arg -> . float + (57) arg -> . hex + (58) arg -> . prefix_rule + (59) arg -> . string + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + + } reduce using rule 17 (filter_rule_1n -> .) + NOTKeyword shift and go to state 78 + id shift and go to state 77 + bitANDKeyword shift and go to state 48 + bitORKeyword shift and go to state 58 + IPv4 shift and go to state 70 + IPv6 shift and go to state 71 + MAC shift and go to state 68 + int shift and go to state 51 + float shift and go to state 52 + hex shift and go to state 60 + string shift and go to state 62 + + or_rule shift and go to state 50 + filter_rule shift and go to state 59 + rule shift and go to state 67 + infix_rule shift and go to state 53 + rule_or_not shift and go to state 69 + prefix_rule shift and go to state 54 + arg shift and go to state 55 + filter_rule_1n shift and go to state 101 + CIDR shift and go to state 65 + +state 60 + + (57) arg -> hex . + + , reduce using rule 57 (arg -> hex .) + ) reduce using rule 57 (arg -> hex .) + EQ reduce using rule 57 (arg -> hex .) + LT reduce using rule 57 (arg -> hex .) + GT reduce using rule 57 (arg -> hex .) + LTEQ reduce using rule 57 (arg -> hex .) + GTEQ reduce using rule 57 (arg -> hex .) + ML reduce using rule 57 (arg -> hex .) + MG reduce using rule 57 (arg -> hex .) + inKeyword reduce using rule 57 (arg -> hex .) + notinKeyword reduce using rule 57 (arg -> hex .) + ORKeyword reduce using rule 57 (arg -> hex .) + NOTKeyword reduce using rule 57 (arg -> hex .) + id reduce using rule 57 (arg -> hex .) + bitANDKeyword reduce using rule 57 (arg -> hex .) + bitORKeyword reduce using rule 57 (arg -> hex .) + IPv4 reduce using rule 57 (arg -> hex .) + IPv6 reduce using rule 57 (arg -> hex .) + MAC reduce using rule 57 (arg -> hex .) + int reduce using rule 57 (arg -> hex .) + float reduce using rule 57 (arg -> hex .) + hex reduce using rule 57 (arg -> hex .) + string reduce using rule 57 (arg -> hex .) + } reduce using rule 57 (arg -> hex .) + + +state 61 + + (19) filter_ref_rule_1n -> filter_ref_rule . filter_ref_rule_1n + (20) filter_ref_rule_1n -> filter_ref_rule . + (19) filter_ref_rule_1n -> . filter_ref_rule filter_ref_rule_1n + (20) filter_ref_rule_1n -> . filter_ref_rule + (21) filter_ref_rule -> . or_id + (22) or_id -> . not_id opt_or_id + (25) not_id -> . NOTKeyword id + (26) not_id -> . id + + } reduce using rule 20 (filter_ref_rule_1n -> filter_ref_rule .) + NOTKeyword shift and go to state 102 + id shift and go to state 104 + + filter_ref_rule_1n shift and go to state 103 + or_id shift and go to state 63 + filter_ref_rule shift and go to state 61 + not_id shift and go to state 66 + +state 62 + + (59) arg -> string . + + , reduce using rule 59 (arg -> string .) + ) reduce using rule 59 (arg -> string .) + EQ reduce using rule 59 (arg -> string .) + LT reduce using rule 59 (arg -> string .) + GT reduce using rule 59 (arg -> string .) + LTEQ reduce using rule 59 (arg -> string .) + GTEQ reduce using rule 59 (arg -> string .) + ML reduce using rule 59 (arg -> string .) + MG reduce using rule 59 (arg -> string .) + inKeyword reduce using rule 59 (arg -> string .) + notinKeyword reduce using rule 59 (arg -> string .) + ORKeyword reduce using rule 59 (arg -> string .) + NOTKeyword reduce using rule 59 (arg -> string .) + id reduce using rule 59 (arg -> string .) + bitANDKeyword reduce using rule 59 (arg -> string .) + bitORKeyword reduce using rule 59 (arg -> string .) + IPv4 reduce using rule 59 (arg -> string .) + IPv6 reduce using rule 59 (arg -> string .) + MAC reduce using rule 59 (arg -> string .) + int reduce using rule 59 (arg -> string .) + float reduce using rule 59 (arg -> string .) + hex reduce using rule 59 (arg -> string .) + string reduce using rule 59 (arg -> string .) + } reduce using rule 59 (arg -> string .) + + +state 63 + + (21) filter_ref_rule -> or_id . + + NOTKeyword reduce using rule 21 (filter_ref_rule -> or_id .) + id reduce using rule 21 (filter_ref_rule -> or_id .) + } reduce using rule 21 (filter_ref_rule -> or_id .) + + +state 64 + + (13) filter -> filterKeyword id { filter_rule_1n . } + + } shift and go to state 105 + + +state 65 + + (53) arg -> CIDR . + + , reduce using rule 53 (arg -> CIDR .) + ) reduce using rule 53 (arg -> CIDR .) + EQ reduce using rule 53 (arg -> CIDR .) + LT reduce using rule 53 (arg -> CIDR .) + GT reduce using rule 53 (arg -> CIDR .) + LTEQ reduce using rule 53 (arg -> CIDR .) + GTEQ reduce using rule 53 (arg -> CIDR .) + ML reduce using rule 53 (arg -> CIDR .) + MG reduce using rule 53 (arg -> CIDR .) + inKeyword reduce using rule 53 (arg -> CIDR .) + notinKeyword reduce using rule 53 (arg -> CIDR .) + ORKeyword reduce using rule 53 (arg -> CIDR .) + NOTKeyword reduce using rule 53 (arg -> CIDR .) + id reduce using rule 53 (arg -> CIDR .) + bitANDKeyword reduce using rule 53 (arg -> CIDR .) + bitORKeyword reduce using rule 53 (arg -> CIDR .) + IPv4 reduce using rule 53 (arg -> CIDR .) + IPv6 reduce using rule 53 (arg -> CIDR .) + MAC reduce using rule 53 (arg -> CIDR .) + int reduce using rule 53 (arg -> CIDR .) + float reduce using rule 53 (arg -> CIDR .) + hex reduce using rule 53 (arg -> CIDR .) + string reduce using rule 53 (arg -> CIDR .) + } reduce using rule 53 (arg -> CIDR .) + + +state 66 + + (22) or_id -> not_id . opt_or_id + (23) opt_or_id -> . ORKeyword not_id opt_or_id + (24) opt_or_id -> . + + ORKeyword shift and go to state 106 + NOTKeyword reduce using rule 24 (opt_or_id -> .) + id reduce using rule 24 (opt_or_id -> .) + } reduce using rule 24 (opt_or_id -> .) + + opt_or_id shift and go to state 107 + +state 67 + + (30) rule_or_not -> rule . + + ORKeyword reduce using rule 30 (rule_or_not -> rule .) + NOTKeyword reduce using rule 30 (rule_or_not -> rule .) + id reduce using rule 30 (rule_or_not -> rule .) + bitANDKeyword reduce using rule 30 (rule_or_not -> rule .) + bitORKeyword reduce using rule 30 (rule_or_not -> rule .) + IPv4 reduce using rule 30 (rule_or_not -> rule .) + IPv6 reduce using rule 30 (rule_or_not -> rule .) + MAC reduce using rule 30 (rule_or_not -> rule .) + int reduce using rule 30 (rule_or_not -> rule .) + float reduce using rule 30 (rule_or_not -> rule .) + hex reduce using rule 30 (rule_or_not -> rule .) + string reduce using rule 30 (rule_or_not -> rule .) + } reduce using rule 30 (rule_or_not -> rule .) + + +state 68 + + (54) arg -> MAC . + + , reduce using rule 54 (arg -> MAC .) + ) reduce using rule 54 (arg -> MAC .) + EQ reduce using rule 54 (arg -> MAC .) + LT reduce using rule 54 (arg -> MAC .) + GT reduce using rule 54 (arg -> MAC .) + LTEQ reduce using rule 54 (arg -> MAC .) + GTEQ reduce using rule 54 (arg -> MAC .) + ML reduce using rule 54 (arg -> MAC .) + MG reduce using rule 54 (arg -> MAC .) + inKeyword reduce using rule 54 (arg -> MAC .) + notinKeyword reduce using rule 54 (arg -> MAC .) + ORKeyword reduce using rule 54 (arg -> MAC .) + NOTKeyword reduce using rule 54 (arg -> MAC .) + id reduce using rule 54 (arg -> MAC .) + bitANDKeyword reduce using rule 54 (arg -> MAC .) + bitORKeyword reduce using rule 54 (arg -> MAC .) + IPv4 reduce using rule 54 (arg -> MAC .) + IPv6 reduce using rule 54 (arg -> MAC .) + MAC reduce using rule 54 (arg -> MAC .) + int reduce using rule 54 (arg -> MAC .) + float reduce using rule 54 (arg -> MAC .) + hex reduce using rule 54 (arg -> MAC .) + string reduce using rule 54 (arg -> MAC .) + } reduce using rule 54 (arg -> MAC .) + + +state 69 + + (27) or_rule -> rule_or_not . opt_rule + (28) opt_rule -> . ORKeyword rule_or_not opt_rule + (29) opt_rule -> . + + ORKeyword shift and go to state 109 + NOTKeyword reduce using rule 29 (opt_rule -> .) + id reduce using rule 29 (opt_rule -> .) + bitANDKeyword reduce using rule 29 (opt_rule -> .) + bitORKeyword reduce using rule 29 (opt_rule -> .) + IPv4 reduce using rule 29 (opt_rule -> .) + IPv6 reduce using rule 29 (opt_rule -> .) + MAC reduce using rule 29 (opt_rule -> .) + int reduce using rule 29 (opt_rule -> .) + float reduce using rule 29 (opt_rule -> .) + hex reduce using rule 29 (opt_rule -> .) + string reduce using rule 29 (opt_rule -> .) + } reduce using rule 29 (opt_rule -> .) + + opt_rule shift and go to state 108 + +state 70 + + (51) arg -> IPv4 . + (60) CIDR -> IPv4 . / int + + , reduce using rule 51 (arg -> IPv4 .) + ) reduce using rule 51 (arg -> IPv4 .) + EQ reduce using rule 51 (arg -> IPv4 .) + LT reduce using rule 51 (arg -> IPv4 .) + GT reduce using rule 51 (arg -> IPv4 .) + LTEQ reduce using rule 51 (arg -> IPv4 .) + GTEQ reduce using rule 51 (arg -> IPv4 .) + ML reduce using rule 51 (arg -> IPv4 .) + MG reduce using rule 51 (arg -> IPv4 .) + inKeyword reduce using rule 51 (arg -> IPv4 .) + notinKeyword reduce using rule 51 (arg -> IPv4 .) + ORKeyword reduce using rule 51 (arg -> IPv4 .) + NOTKeyword reduce using rule 51 (arg -> IPv4 .) + id reduce using rule 51 (arg -> IPv4 .) + bitANDKeyword reduce using rule 51 (arg -> IPv4 .) + bitORKeyword reduce using rule 51 (arg -> IPv4 .) + IPv4 reduce using rule 51 (arg -> IPv4 .) + IPv6 reduce using rule 51 (arg -> IPv4 .) + MAC reduce using rule 51 (arg -> IPv4 .) + int reduce using rule 51 (arg -> IPv4 .) + float reduce using rule 51 (arg -> IPv4 .) + hex reduce using rule 51 (arg -> IPv4 .) + string reduce using rule 51 (arg -> IPv4 .) + } reduce using rule 51 (arg -> IPv4 .) + / shift and go to state 110 + + +state 71 + + (52) arg -> IPv6 . + (61) CIDR -> IPv6 . / int + + , reduce using rule 52 (arg -> IPv6 .) + ) reduce using rule 52 (arg -> IPv6 .) + EQ reduce using rule 52 (arg -> IPv6 .) + LT reduce using rule 52 (arg -> IPv6 .) + GT reduce using rule 52 (arg -> IPv6 .) + LTEQ reduce using rule 52 (arg -> IPv6 .) + GTEQ reduce using rule 52 (arg -> IPv6 .) + ML reduce using rule 52 (arg -> IPv6 .) + MG reduce using rule 52 (arg -> IPv6 .) + inKeyword reduce using rule 52 (arg -> IPv6 .) + notinKeyword reduce using rule 52 (arg -> IPv6 .) + ORKeyword reduce using rule 52 (arg -> IPv6 .) + NOTKeyword reduce using rule 52 (arg -> IPv6 .) + id reduce using rule 52 (arg -> IPv6 .) + bitANDKeyword reduce using rule 52 (arg -> IPv6 .) + bitORKeyword reduce using rule 52 (arg -> IPv6 .) + IPv4 reduce using rule 52 (arg -> IPv6 .) + IPv6 reduce using rule 52 (arg -> IPv6 .) + MAC reduce using rule 52 (arg -> IPv6 .) + int reduce using rule 52 (arg -> IPv6 .) + float reduce using rule 52 (arg -> IPv6 .) + hex reduce using rule 52 (arg -> IPv6 .) + string reduce using rule 52 (arg -> IPv6 .) + } reduce using rule 52 (arg -> IPv6 .) + / shift and go to state 111 + + +state 72 + + (12) splitter -> splitterKeyword id { } . + + splitterKeyword reduce using rule 12 (splitter -> splitterKeyword id { } .) + filterKeyword reduce using rule 12 (splitter -> splitterKeyword id { } .) + id reduce using rule 12 (splitter -> splitterKeyword id { } .) + string reduce using rule 12 (splitter -> splitterKeyword id { } .) + ungrouperKeyword reduce using rule 12 (splitter -> splitterKeyword id { } .) + grouperKeyword reduce using rule 12 (splitter -> splitterKeyword id { } .) + groupFilterKeyword reduce using rule 12 (splitter -> splitterKeyword id { } .) + mergerKeyword reduce using rule 12 (splitter -> splitterKeyword id { } .) + $end reduce using rule 12 (splitter -> splitterKeyword id { } .) + + +state 73 + + (109) merger_module1_n -> merger_module . merger_module1_n + (109) merger_module1_n -> . merger_module merger_module1_n + (110) merger_module1_n -> . + (111) merger_module -> . moduleKeyword id { merger_branches merger_rule1_n } + + exportKeyword reduce using rule 110 (merger_module1_n -> .) + moduleKeyword shift and go to state 75 + + merger_module shift and go to state 73 + merger_module1_n shift and go to state 112 + +state 74 + + (108) merger -> mergerKeyword id { merger_module1_n . export } + (115) export -> . exportKeyword id + + exportKeyword shift and go to state 113 + + export shift and go to state 114 + +state 75 + + (111) merger_module -> moduleKeyword . id { merger_branches merger_rule1_n } + + id shift and go to state 115 + + +state 76 + + (70) ungrouper -> ungrouperKeyword id { } . + + splitterKeyword reduce using rule 70 (ungrouper -> ungrouperKeyword id { } .) + filterKeyword reduce using rule 70 (ungrouper -> ungrouperKeyword id { } .) + id reduce using rule 70 (ungrouper -> ungrouperKeyword id { } .) + string reduce using rule 70 (ungrouper -> ungrouperKeyword id { } .) + ungrouperKeyword reduce using rule 70 (ungrouper -> ungrouperKeyword id { } .) + grouperKeyword reduce using rule 70 (ungrouper -> ungrouperKeyword id { } .) + groupFilterKeyword reduce using rule 70 (ungrouper -> ungrouperKeyword id { } .) + mergerKeyword reduce using rule 70 (ungrouper -> ungrouperKeyword id { } .) + $end reduce using rule 70 (ungrouper -> ungrouperKeyword id { } .) + + +state 77 + + (44) prefix_rule -> id . ( args ) + (50) arg -> id . + + ( shift and go to state 97 + EQ reduce using rule 50 (arg -> id .) + LT reduce using rule 50 (arg -> id .) + GT reduce using rule 50 (arg -> id .) + LTEQ reduce using rule 50 (arg -> id .) + GTEQ reduce using rule 50 (arg -> id .) + ML reduce using rule 50 (arg -> id .) + MG reduce using rule 50 (arg -> id .) + inKeyword reduce using rule 50 (arg -> id .) + notinKeyword reduce using rule 50 (arg -> id .) + + +state 78 + + (31) rule_or_not -> NOTKeyword . rule + (32) rule -> . infix_rule + (33) rule -> . prefix_rule + (34) infix_rule -> . arg op arg + (44) prefix_rule -> . id ( args ) + (45) prefix_rule -> . bitANDKeyword ( args ) + (46) prefix_rule -> . bitORKeyword ( args ) + (50) arg -> . id + (51) arg -> . IPv4 + (52) arg -> . IPv6 + (53) arg -> . CIDR + (54) arg -> . MAC + (55) arg -> . int + (56) arg -> . float + (57) arg -> . hex + (58) arg -> . prefix_rule + (59) arg -> . string + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + + id shift and go to state 77 + bitANDKeyword shift and go to state 48 + bitORKeyword shift and go to state 58 + IPv4 shift and go to state 70 + IPv6 shift and go to state 71 + MAC shift and go to state 68 + int shift and go to state 51 + float shift and go to state 52 + hex shift and go to state 60 + string shift and go to state 62 + + prefix_rule shift and go to state 54 + infix_rule shift and go to state 53 + rule shift and go to state 98 + arg shift and go to state 55 + CIDR shift and go to state 65 + +state 79 + + (15) group_filter -> groupFilterKeyword id { filter_rule_1n . } + + } shift and go to state 116 + + +state 80 + + (74) module -> moduleKeyword id . { grouper_rule1_n } + + { shift and go to state 117 + + +state 81 + + (90) aggregate -> aggregateKeyword . aggr1_n + (91) aggr1_n -> . aggr opt_aggr + (94) aggr -> . aggr_op ( id_or_qid ) asKeyword id + (95) aggr -> . id_or_qid asKeyword id + (96) aggr -> . id_or_qid + (100) aggr_op -> . minKeyword + (101) aggr_op -> . maxKeyword + (102) aggr_op -> . sumKeyword + (103) aggr_op -> . avgKeyword + (104) aggr_op -> . unionKeyword + (105) aggr_op -> . countKeyword + (106) aggr_op -> . bitANDKeyword + (107) aggr_op -> . bitORKeyword + (98) id_or_qid -> . id + (99) id_or_qid -> . qid + (97) qid -> . id . id + + minKeyword shift and go to state 121 + maxKeyword shift and go to state 120 + sumKeyword shift and go to state 125 + avgKeyword shift and go to state 127 + unionKeyword shift and go to state 126 + countKeyword shift and go to state 130 + bitANDKeyword shift and go to state 119 + bitORKeyword shift and go to state 128 + id shift and go to state 123 + + aggr_op shift and go to state 118 + qid shift and go to state 124 + aggr shift and go to state 129 + id_or_qid shift and go to state 122 + aggr1_n shift and go to state 131 + +state 82 + + (71) grouper -> grouperKeyword id { module1_n aggregate . } + + } shift and go to state 132 + + +state 83 + + (72) module1_n -> module module1_n . + + aggregateKeyword reduce using rule 72 (module1_n -> module module1_n .) + + +state 84 + + (65) mid_branch -> id arrow mid_branch . + + splitterKeyword reduce using rule 65 (mid_branch -> id arrow mid_branch .) + filterKeyword reduce using rule 65 (mid_branch -> id arrow mid_branch .) + id reduce using rule 65 (mid_branch -> id arrow mid_branch .) + string reduce using rule 65 (mid_branch -> id arrow mid_branch .) + ungrouperKeyword reduce using rule 65 (mid_branch -> id arrow mid_branch .) + grouperKeyword reduce using rule 65 (mid_branch -> id arrow mid_branch .) + groupFilterKeyword reduce using rule 65 (mid_branch -> id arrow mid_branch .) + mergerKeyword reduce using rule 65 (mid_branch -> id arrow mid_branch .) + $end reduce using rule 65 (mid_branch -> id arrow mid_branch .) + + +state 85 + + (45) prefix_rule -> bitANDKeyword ( . args ) + (47) args -> . arg , args + (48) args -> . arg + (49) args -> . + (50) arg -> . id + (51) arg -> . IPv4 + (52) arg -> . IPv6 + (53) arg -> . CIDR + (54) arg -> . MAC + (55) arg -> . int + (56) arg -> . float + (57) arg -> . hex + (58) arg -> . prefix_rule + (59) arg -> . string + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + (44) prefix_rule -> . id ( args ) + (45) prefix_rule -> . bitANDKeyword ( args ) + (46) prefix_rule -> . bitORKeyword ( args ) + + ) reduce using rule 49 (args -> .) + id shift and go to state 136 + IPv4 shift and go to state 70 + IPv6 shift and go to state 71 + MAC shift and go to state 68 + int shift and go to state 51 + float shift and go to state 52 + hex shift and go to state 60 + string shift and go to state 62 + bitANDKeyword shift and go to state 48 + bitORKeyword shift and go to state 58 + + CIDR shift and go to state 65 + args shift and go to state 133 + prefix_rule shift and go to state 134 + arg shift and go to state 135 + +state 86 + + (14) composite_filter -> filterKeyword id { filter_ref_rule_1n } . + + splitterKeyword reduce using rule 14 (composite_filter -> filterKeyword id { filter_ref_rule_1n } .) + filterKeyword reduce using rule 14 (composite_filter -> filterKeyword id { filter_ref_rule_1n } .) + id reduce using rule 14 (composite_filter -> filterKeyword id { filter_ref_rule_1n } .) + string reduce using rule 14 (composite_filter -> filterKeyword id { filter_ref_rule_1n } .) + ungrouperKeyword reduce using rule 14 (composite_filter -> filterKeyword id { filter_ref_rule_1n } .) + grouperKeyword reduce using rule 14 (composite_filter -> filterKeyword id { filter_ref_rule_1n } .) + groupFilterKeyword reduce using rule 14 (composite_filter -> filterKeyword id { filter_ref_rule_1n } .) + mergerKeyword reduce using rule 14 (composite_filter -> filterKeyword id { filter_ref_rule_1n } .) + $end reduce using rule 14 (composite_filter -> filterKeyword id { filter_ref_rule_1n } .) + + +state 87 + + (41) op -> MG . + + IPv4 reduce using rule 41 (op -> MG .) + IPv6 reduce using rule 41 (op -> MG .) + MAC reduce using rule 41 (op -> MG .) + int reduce using rule 41 (op -> MG .) + float reduce using rule 41 (op -> MG .) + hex reduce using rule 41 (op -> MG .) + string reduce using rule 41 (op -> MG .) + id reduce using rule 41 (op -> MG .) + bitANDKeyword reduce using rule 41 (op -> MG .) + bitORKeyword reduce using rule 41 (op -> MG .) + + +state 88 + + (37) op -> GT . + + IPv4 reduce using rule 37 (op -> GT .) + IPv6 reduce using rule 37 (op -> GT .) + MAC reduce using rule 37 (op -> GT .) + int reduce using rule 37 (op -> GT .) + float reduce using rule 37 (op -> GT .) + hex reduce using rule 37 (op -> GT .) + string reduce using rule 37 (op -> GT .) + id reduce using rule 37 (op -> GT .) + bitANDKeyword reduce using rule 37 (op -> GT .) + bitORKeyword reduce using rule 37 (op -> GT .) + + +state 89 + + (39) op -> GTEQ . + + IPv4 reduce using rule 39 (op -> GTEQ .) + IPv6 reduce using rule 39 (op -> GTEQ .) + MAC reduce using rule 39 (op -> GTEQ .) + int reduce using rule 39 (op -> GTEQ .) + float reduce using rule 39 (op -> GTEQ .) + hex reduce using rule 39 (op -> GTEQ .) + string reduce using rule 39 (op -> GTEQ .) + id reduce using rule 39 (op -> GTEQ .) + bitANDKeyword reduce using rule 39 (op -> GTEQ .) + bitORKeyword reduce using rule 39 (op -> GTEQ .) + + +state 90 + + (40) op -> ML . + + IPv4 reduce using rule 40 (op -> ML .) + IPv6 reduce using rule 40 (op -> ML .) + MAC reduce using rule 40 (op -> ML .) + int reduce using rule 40 (op -> ML .) + float reduce using rule 40 (op -> ML .) + hex reduce using rule 40 (op -> ML .) + string reduce using rule 40 (op -> ML .) + id reduce using rule 40 (op -> ML .) + bitANDKeyword reduce using rule 40 (op -> ML .) + bitORKeyword reduce using rule 40 (op -> ML .) + + +state 91 + + (38) op -> LTEQ . + + IPv4 reduce using rule 38 (op -> LTEQ .) + IPv6 reduce using rule 38 (op -> LTEQ .) + MAC reduce using rule 38 (op -> LTEQ .) + int reduce using rule 38 (op -> LTEQ .) + float reduce using rule 38 (op -> LTEQ .) + hex reduce using rule 38 (op -> LTEQ .) + string reduce using rule 38 (op -> LTEQ .) + id reduce using rule 38 (op -> LTEQ .) + bitANDKeyword reduce using rule 38 (op -> LTEQ .) + bitORKeyword reduce using rule 38 (op -> LTEQ .) + + +state 92 + + (42) op -> inKeyword . + + IPv4 reduce using rule 42 (op -> inKeyword .) + IPv6 reduce using rule 42 (op -> inKeyword .) + MAC reduce using rule 42 (op -> inKeyword .) + int reduce using rule 42 (op -> inKeyword .) + float reduce using rule 42 (op -> inKeyword .) + hex reduce using rule 42 (op -> inKeyword .) + string reduce using rule 42 (op -> inKeyword .) + id reduce using rule 42 (op -> inKeyword .) + bitANDKeyword reduce using rule 42 (op -> inKeyword .) + bitORKeyword reduce using rule 42 (op -> inKeyword .) + + +state 93 + + (36) op -> LT . + + IPv4 reduce using rule 36 (op -> LT .) + IPv6 reduce using rule 36 (op -> LT .) + MAC reduce using rule 36 (op -> LT .) + int reduce using rule 36 (op -> LT .) + float reduce using rule 36 (op -> LT .) + hex reduce using rule 36 (op -> LT .) + string reduce using rule 36 (op -> LT .) + id reduce using rule 36 (op -> LT .) + bitANDKeyword reduce using rule 36 (op -> LT .) + bitORKeyword reduce using rule 36 (op -> LT .) + + +state 94 + + (43) op -> notinKeyword . + + IPv4 reduce using rule 43 (op -> notinKeyword .) + IPv6 reduce using rule 43 (op -> notinKeyword .) + MAC reduce using rule 43 (op -> notinKeyword .) + int reduce using rule 43 (op -> notinKeyword .) + float reduce using rule 43 (op -> notinKeyword .) + hex reduce using rule 43 (op -> notinKeyword .) + string reduce using rule 43 (op -> notinKeyword .) + id reduce using rule 43 (op -> notinKeyword .) + bitANDKeyword reduce using rule 43 (op -> notinKeyword .) + bitORKeyword reduce using rule 43 (op -> notinKeyword .) + + +state 95 + + (35) op -> EQ . + + IPv4 reduce using rule 35 (op -> EQ .) + IPv6 reduce using rule 35 (op -> EQ .) + MAC reduce using rule 35 (op -> EQ .) + int reduce using rule 35 (op -> EQ .) + float reduce using rule 35 (op -> EQ .) + hex reduce using rule 35 (op -> EQ .) + string reduce using rule 35 (op -> EQ .) + id reduce using rule 35 (op -> EQ .) + bitANDKeyword reduce using rule 35 (op -> EQ .) + bitORKeyword reduce using rule 35 (op -> EQ .) + + +state 96 + + (34) infix_rule -> arg op . arg + (50) arg -> . id + (51) arg -> . IPv4 + (52) arg -> . IPv6 + (53) arg -> . CIDR + (54) arg -> . MAC + (55) arg -> . int + (56) arg -> . float + (57) arg -> . hex + (58) arg -> . prefix_rule + (59) arg -> . string + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + (44) prefix_rule -> . id ( args ) + (45) prefix_rule -> . bitANDKeyword ( args ) + (46) prefix_rule -> . bitORKeyword ( args ) + + id shift and go to state 136 + IPv4 shift and go to state 70 + IPv6 shift and go to state 71 + MAC shift and go to state 68 + int shift and go to state 51 + float shift and go to state 52 + hex shift and go to state 60 + string shift and go to state 62 + bitANDKeyword shift and go to state 48 + bitORKeyword shift and go to state 58 + + CIDR shift and go to state 65 + prefix_rule shift and go to state 134 + arg shift and go to state 137 + +state 97 + + (44) prefix_rule -> id ( . args ) + (47) args -> . arg , args + (48) args -> . arg + (49) args -> . + (50) arg -> . id + (51) arg -> . IPv4 + (52) arg -> . IPv6 + (53) arg -> . CIDR + (54) arg -> . MAC + (55) arg -> . int + (56) arg -> . float + (57) arg -> . hex + (58) arg -> . prefix_rule + (59) arg -> . string + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + (44) prefix_rule -> . id ( args ) + (45) prefix_rule -> . bitANDKeyword ( args ) + (46) prefix_rule -> . bitORKeyword ( args ) + + ) reduce using rule 49 (args -> .) + id shift and go to state 136 + IPv4 shift and go to state 70 + IPv6 shift and go to state 71 + MAC shift and go to state 68 + int shift and go to state 51 + float shift and go to state 52 + hex shift and go to state 60 + string shift and go to state 62 + bitANDKeyword shift and go to state 48 + bitORKeyword shift and go to state 58 + + CIDR shift and go to state 65 + args shift and go to state 138 + prefix_rule shift and go to state 134 + arg shift and go to state 135 + +state 98 + + (31) rule_or_not -> NOTKeyword rule . + + ORKeyword reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + NOTKeyword reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + id reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + bitANDKeyword reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + bitORKeyword reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + IPv4 reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + IPv6 reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + MAC reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + int reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + float reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + hex reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + string reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + } reduce using rule 31 (rule_or_not -> NOTKeyword rule .) + + +state 99 + + (25) not_id -> NOTKeyword id . + (44) prefix_rule -> id . ( args ) + (50) arg -> id . + + ORKeyword reduce using rule 25 (not_id -> NOTKeyword id .) + NOTKeyword reduce using rule 25 (not_id -> NOTKeyword id .) + id reduce using rule 25 (not_id -> NOTKeyword id .) + } reduce using rule 25 (not_id -> NOTKeyword id .) + ( shift and go to state 97 + EQ reduce using rule 50 (arg -> id .) + LT reduce using rule 50 (arg -> id .) + GT reduce using rule 50 (arg -> id .) + LTEQ reduce using rule 50 (arg -> id .) + GTEQ reduce using rule 50 (arg -> id .) + ML reduce using rule 50 (arg -> id .) + MG reduce using rule 50 (arg -> id .) + inKeyword reduce using rule 50 (arg -> id .) + notinKeyword reduce using rule 50 (arg -> id .) + + +state 100 + + (46) prefix_rule -> bitORKeyword ( . args ) + (47) args -> . arg , args + (48) args -> . arg + (49) args -> . + (50) arg -> . id + (51) arg -> . IPv4 + (52) arg -> . IPv6 + (53) arg -> . CIDR + (54) arg -> . MAC + (55) arg -> . int + (56) arg -> . float + (57) arg -> . hex + (58) arg -> . prefix_rule + (59) arg -> . string + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + (44) prefix_rule -> . id ( args ) + (45) prefix_rule -> . bitANDKeyword ( args ) + (46) prefix_rule -> . bitORKeyword ( args ) + + ) reduce using rule 49 (args -> .) + id shift and go to state 136 + IPv4 shift and go to state 70 + IPv6 shift and go to state 71 + MAC shift and go to state 68 + int shift and go to state 51 + float shift and go to state 52 + hex shift and go to state 60 + string shift and go to state 62 + bitANDKeyword shift and go to state 48 + bitORKeyword shift and go to state 58 + + CIDR shift and go to state 65 + args shift and go to state 139 + prefix_rule shift and go to state 134 + arg shift and go to state 135 + +state 101 + + (16) filter_rule_1n -> filter_rule filter_rule_1n . + + } reduce using rule 16 (filter_rule_1n -> filter_rule filter_rule_1n .) + + +state 102 + + (25) not_id -> NOTKeyword . id + + id shift and go to state 140 + + +state 103 + + (19) filter_ref_rule_1n -> filter_ref_rule filter_ref_rule_1n . + + } reduce using rule 19 (filter_ref_rule_1n -> filter_ref_rule filter_ref_rule_1n .) + + +state 104 + + (26) not_id -> id . + + ORKeyword reduce using rule 26 (not_id -> id .) + NOTKeyword reduce using rule 26 (not_id -> id .) + id reduce using rule 26 (not_id -> id .) + } reduce using rule 26 (not_id -> id .) + + +state 105 + + (13) filter -> filterKeyword id { filter_rule_1n } . + + splitterKeyword reduce using rule 13 (filter -> filterKeyword id { filter_rule_1n } .) + filterKeyword reduce using rule 13 (filter -> filterKeyword id { filter_rule_1n } .) + id reduce using rule 13 (filter -> filterKeyword id { filter_rule_1n } .) + string reduce using rule 13 (filter -> filterKeyword id { filter_rule_1n } .) + ungrouperKeyword reduce using rule 13 (filter -> filterKeyword id { filter_rule_1n } .) + grouperKeyword reduce using rule 13 (filter -> filterKeyword id { filter_rule_1n } .) + groupFilterKeyword reduce using rule 13 (filter -> filterKeyword id { filter_rule_1n } .) + mergerKeyword reduce using rule 13 (filter -> filterKeyword id { filter_rule_1n } .) + $end reduce using rule 13 (filter -> filterKeyword id { filter_rule_1n } .) + + +state 106 + + (23) opt_or_id -> ORKeyword . not_id opt_or_id + (25) not_id -> . NOTKeyword id + (26) not_id -> . id + + NOTKeyword shift and go to state 102 + id shift and go to state 104 + + not_id shift and go to state 141 + +state 107 + + (22) or_id -> not_id opt_or_id . + + NOTKeyword reduce using rule 22 (or_id -> not_id opt_or_id .) + id reduce using rule 22 (or_id -> not_id opt_or_id .) + } reduce using rule 22 (or_id -> not_id opt_or_id .) + + +state 108 + + (27) or_rule -> rule_or_not opt_rule . + + NOTKeyword reduce using rule 27 (or_rule -> rule_or_not opt_rule .) + id reduce using rule 27 (or_rule -> rule_or_not opt_rule .) + bitANDKeyword reduce using rule 27 (or_rule -> rule_or_not opt_rule .) + bitORKeyword reduce using rule 27 (or_rule -> rule_or_not opt_rule .) + IPv4 reduce using rule 27 (or_rule -> rule_or_not opt_rule .) + IPv6 reduce using rule 27 (or_rule -> rule_or_not opt_rule .) + MAC reduce using rule 27 (or_rule -> rule_or_not opt_rule .) + int reduce using rule 27 (or_rule -> rule_or_not opt_rule .) + float reduce using rule 27 (or_rule -> rule_or_not opt_rule .) + hex reduce using rule 27 (or_rule -> rule_or_not opt_rule .) + string reduce using rule 27 (or_rule -> rule_or_not opt_rule .) + } reduce using rule 27 (or_rule -> rule_or_not opt_rule .) + + +state 109 + + (28) opt_rule -> ORKeyword . rule_or_not opt_rule + (30) rule_or_not -> . rule + (31) rule_or_not -> . NOTKeyword rule + (32) rule -> . infix_rule + (33) rule -> . prefix_rule + (34) infix_rule -> . arg op arg + (44) prefix_rule -> . id ( args ) + (45) prefix_rule -> . bitANDKeyword ( args ) + (46) prefix_rule -> . bitORKeyword ( args ) + (50) arg -> . id + (51) arg -> . IPv4 + (52) arg -> . IPv6 + (53) arg -> . CIDR + (54) arg -> . MAC + (55) arg -> . int + (56) arg -> . float + (57) arg -> . hex + (58) arg -> . prefix_rule + (59) arg -> . string + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + + NOTKeyword shift and go to state 78 + id shift and go to state 77 + bitANDKeyword shift and go to state 48 + bitORKeyword shift and go to state 58 + IPv4 shift and go to state 70 + IPv6 shift and go to state 71 + MAC shift and go to state 68 + int shift and go to state 51 + float shift and go to state 52 + hex shift and go to state 60 + string shift and go to state 62 + + prefix_rule shift and go to state 54 + infix_rule shift and go to state 53 + rule_or_not shift and go to state 142 + rule shift and go to state 67 + arg shift and go to state 55 + CIDR shift and go to state 65 + +state 110 + + (60) CIDR -> IPv4 / . int + + int shift and go to state 143 + + +state 111 + + (61) CIDR -> IPv6 / . int + + int shift and go to state 144 + + +state 112 + + (109) merger_module1_n -> merger_module merger_module1_n . + + exportKeyword reduce using rule 109 (merger_module1_n -> merger_module merger_module1_n .) + + +state 113 + + (115) export -> exportKeyword . id + + id shift and go to state 145 + + +state 114 + + (108) merger -> mergerKeyword id { merger_module1_n export . } + + } shift and go to state 146 + + +state 115 + + (111) merger_module -> moduleKeyword id . { merger_branches merger_rule1_n } + + { shift and go to state 147 + + +state 116 + + (15) group_filter -> groupFilterKeyword id { filter_rule_1n } . + + splitterKeyword reduce using rule 15 (group_filter -> groupFilterKeyword id { filter_rule_1n } .) + filterKeyword reduce using rule 15 (group_filter -> groupFilterKeyword id { filter_rule_1n } .) + id reduce using rule 15 (group_filter -> groupFilterKeyword id { filter_rule_1n } .) + string reduce using rule 15 (group_filter -> groupFilterKeyword id { filter_rule_1n } .) + ungrouperKeyword reduce using rule 15 (group_filter -> groupFilterKeyword id { filter_rule_1n } .) + grouperKeyword reduce using rule 15 (group_filter -> groupFilterKeyword id { filter_rule_1n } .) + groupFilterKeyword reduce using rule 15 (group_filter -> groupFilterKeyword id { filter_rule_1n } .) + mergerKeyword reduce using rule 15 (group_filter -> groupFilterKeyword id { filter_rule_1n } .) + $end reduce using rule 15 (group_filter -> groupFilterKeyword id { filter_rule_1n } .) + + +state 117 + + (74) module -> moduleKeyword id { . grouper_rule1_n } + (75) grouper_rule1_n -> . grouper_rule grouper_rule1_n + (76) grouper_rule1_n -> . + (77) grouper_rule -> . id grouper_op id + (78) grouper_rule -> . id grouper_op id deltaKeyword delta_arg + (79) grouper_rule -> . id grouper_op id rdeltaKeyword delta_arg + + } reduce using rule 76 (grouper_rule1_n -> .) + id shift and go to state 150 + + grouper_rule1_n shift and go to state 148 + grouper_rule shift and go to state 149 + +state 118 + + (94) aggr -> aggr_op . ( id_or_qid ) asKeyword id + + ( shift and go to state 151 + + +state 119 + + (106) aggr_op -> bitANDKeyword . + + ( reduce using rule 106 (aggr_op -> bitANDKeyword .) + + +state 120 + + (101) aggr_op -> maxKeyword . + + ( reduce using rule 101 (aggr_op -> maxKeyword .) + + +state 121 + + (100) aggr_op -> minKeyword . + + ( reduce using rule 100 (aggr_op -> minKeyword .) + + +state 122 + + (95) aggr -> id_or_qid . asKeyword id + (96) aggr -> id_or_qid . + + asKeyword shift and go to state 152 + , reduce using rule 96 (aggr -> id_or_qid .) + } reduce using rule 96 (aggr -> id_or_qid .) + + +state 123 + + (98) id_or_qid -> id . + (97) qid -> id . . id + + ) reduce using rule 98 (id_or_qid -> id .) + asKeyword reduce using rule 98 (id_or_qid -> id .) + , reduce using rule 98 (id_or_qid -> id .) + } reduce using rule 98 (id_or_qid -> id .) + . shift and go to state 153 + + +state 124 + + (99) id_or_qid -> qid . + + ) reduce using rule 99 (id_or_qid -> qid .) + asKeyword reduce using rule 99 (id_or_qid -> qid .) + , reduce using rule 99 (id_or_qid -> qid .) + } reduce using rule 99 (id_or_qid -> qid .) + + +state 125 + + (102) aggr_op -> sumKeyword . + + ( reduce using rule 102 (aggr_op -> sumKeyword .) + + +state 126 + + (104) aggr_op -> unionKeyword . + + ( reduce using rule 104 (aggr_op -> unionKeyword .) + + +state 127 + + (103) aggr_op -> avgKeyword . + + ( reduce using rule 103 (aggr_op -> avgKeyword .) + + +state 128 + + (107) aggr_op -> bitORKeyword . + + ( reduce using rule 107 (aggr_op -> bitORKeyword .) + + +state 129 + + (91) aggr1_n -> aggr . opt_aggr + (92) opt_aggr -> . , aggr opt_aggr + (93) opt_aggr -> . + + , shift and go to state 154 + } reduce using rule 93 (opt_aggr -> .) + + opt_aggr shift and go to state 155 + +state 130 + + (105) aggr_op -> countKeyword . + + ( reduce using rule 105 (aggr_op -> countKeyword .) + + +state 131 + + (90) aggregate -> aggregateKeyword aggr1_n . + + } reduce using rule 90 (aggregate -> aggregateKeyword aggr1_n .) + + +state 132 + + (71) grouper -> grouperKeyword id { module1_n aggregate } . + + splitterKeyword reduce using rule 71 (grouper -> grouperKeyword id { module1_n aggregate } .) + filterKeyword reduce using rule 71 (grouper -> grouperKeyword id { module1_n aggregate } .) + id reduce using rule 71 (grouper -> grouperKeyword id { module1_n aggregate } .) + string reduce using rule 71 (grouper -> grouperKeyword id { module1_n aggregate } .) + ungrouperKeyword reduce using rule 71 (grouper -> grouperKeyword id { module1_n aggregate } .) + grouperKeyword reduce using rule 71 (grouper -> grouperKeyword id { module1_n aggregate } .) + groupFilterKeyword reduce using rule 71 (grouper -> grouperKeyword id { module1_n aggregate } .) + mergerKeyword reduce using rule 71 (grouper -> grouperKeyword id { module1_n aggregate } .) + $end reduce using rule 71 (grouper -> grouperKeyword id { module1_n aggregate } .) + + +state 133 + + (45) prefix_rule -> bitANDKeyword ( args . ) + + ) shift and go to state 156 + + +state 134 + + (58) arg -> prefix_rule . + + , reduce using rule 58 (arg -> prefix_rule .) + ) reduce using rule 58 (arg -> prefix_rule .) + ORKeyword reduce using rule 58 (arg -> prefix_rule .) + NOTKeyword reduce using rule 58 (arg -> prefix_rule .) + id reduce using rule 58 (arg -> prefix_rule .) + bitANDKeyword reduce using rule 58 (arg -> prefix_rule .) + bitORKeyword reduce using rule 58 (arg -> prefix_rule .) + IPv4 reduce using rule 58 (arg -> prefix_rule .) + IPv6 reduce using rule 58 (arg -> prefix_rule .) + MAC reduce using rule 58 (arg -> prefix_rule .) + int reduce using rule 58 (arg -> prefix_rule .) + float reduce using rule 58 (arg -> prefix_rule .) + hex reduce using rule 58 (arg -> prefix_rule .) + string reduce using rule 58 (arg -> prefix_rule .) + } reduce using rule 58 (arg -> prefix_rule .) + + +state 135 + + (47) args -> arg . , args + (48) args -> arg . + + , shift and go to state 157 + ) reduce using rule 48 (args -> arg .) + + +state 136 + + (50) arg -> id . + (44) prefix_rule -> id . ( args ) + + , reduce using rule 50 (arg -> id .) + ) reduce using rule 50 (arg -> id .) + ORKeyword reduce using rule 50 (arg -> id .) + NOTKeyword reduce using rule 50 (arg -> id .) + id reduce using rule 50 (arg -> id .) + bitANDKeyword reduce using rule 50 (arg -> id .) + bitORKeyword reduce using rule 50 (arg -> id .) + IPv4 reduce using rule 50 (arg -> id .) + IPv6 reduce using rule 50 (arg -> id .) + MAC reduce using rule 50 (arg -> id .) + int reduce using rule 50 (arg -> id .) + float reduce using rule 50 (arg -> id .) + hex reduce using rule 50 (arg -> id .) + string reduce using rule 50 (arg -> id .) + } reduce using rule 50 (arg -> id .) + ( shift and go to state 97 + + +state 137 + + (34) infix_rule -> arg op arg . + + ORKeyword reduce using rule 34 (infix_rule -> arg op arg .) + NOTKeyword reduce using rule 34 (infix_rule -> arg op arg .) + id reduce using rule 34 (infix_rule -> arg op arg .) + bitANDKeyword reduce using rule 34 (infix_rule -> arg op arg .) + bitORKeyword reduce using rule 34 (infix_rule -> arg op arg .) + IPv4 reduce using rule 34 (infix_rule -> arg op arg .) + IPv6 reduce using rule 34 (infix_rule -> arg op arg .) + MAC reduce using rule 34 (infix_rule -> arg op arg .) + int reduce using rule 34 (infix_rule -> arg op arg .) + float reduce using rule 34 (infix_rule -> arg op arg .) + hex reduce using rule 34 (infix_rule -> arg op arg .) + string reduce using rule 34 (infix_rule -> arg op arg .) + } reduce using rule 34 (infix_rule -> arg op arg .) + + +state 138 + + (44) prefix_rule -> id ( args . ) + + ) shift and go to state 158 + + +state 139 + + (46) prefix_rule -> bitORKeyword ( args . ) + + ) shift and go to state 159 + + +state 140 + + (25) not_id -> NOTKeyword id . + + ORKeyword reduce using rule 25 (not_id -> NOTKeyword id .) + NOTKeyword reduce using rule 25 (not_id -> NOTKeyword id .) + id reduce using rule 25 (not_id -> NOTKeyword id .) + } reduce using rule 25 (not_id -> NOTKeyword id .) + + +state 141 + + (23) opt_or_id -> ORKeyword not_id . opt_or_id + (23) opt_or_id -> . ORKeyword not_id opt_or_id + (24) opt_or_id -> . + + ORKeyword shift and go to state 106 + NOTKeyword reduce using rule 24 (opt_or_id -> .) + id reduce using rule 24 (opt_or_id -> .) + } reduce using rule 24 (opt_or_id -> .) + + opt_or_id shift and go to state 160 + +state 142 + + (28) opt_rule -> ORKeyword rule_or_not . opt_rule + (28) opt_rule -> . ORKeyword rule_or_not opt_rule + (29) opt_rule -> . + + ORKeyword shift and go to state 109 + NOTKeyword reduce using rule 29 (opt_rule -> .) + id reduce using rule 29 (opt_rule -> .) + bitANDKeyword reduce using rule 29 (opt_rule -> .) + bitORKeyword reduce using rule 29 (opt_rule -> .) + IPv4 reduce using rule 29 (opt_rule -> .) + IPv6 reduce using rule 29 (opt_rule -> .) + MAC reduce using rule 29 (opt_rule -> .) + int reduce using rule 29 (opt_rule -> .) + float reduce using rule 29 (opt_rule -> .) + hex reduce using rule 29 (opt_rule -> .) + string reduce using rule 29 (opt_rule -> .) + } reduce using rule 29 (opt_rule -> .) + + opt_rule shift and go to state 161 + +state 143 + + (60) CIDR -> IPv4 / int . + + EQ reduce using rule 60 (CIDR -> IPv4 / int .) + LT reduce using rule 60 (CIDR -> IPv4 / int .) + GT reduce using rule 60 (CIDR -> IPv4 / int .) + LTEQ reduce using rule 60 (CIDR -> IPv4 / int .) + GTEQ reduce using rule 60 (CIDR -> IPv4 / int .) + ML reduce using rule 60 (CIDR -> IPv4 / int .) + MG reduce using rule 60 (CIDR -> IPv4 / int .) + inKeyword reduce using rule 60 (CIDR -> IPv4 / int .) + notinKeyword reduce using rule 60 (CIDR -> IPv4 / int .) + , reduce using rule 60 (CIDR -> IPv4 / int .) + ) reduce using rule 60 (CIDR -> IPv4 / int .) + NOTKeyword reduce using rule 60 (CIDR -> IPv4 / int .) + id reduce using rule 60 (CIDR -> IPv4 / int .) + IPv4 reduce using rule 60 (CIDR -> IPv4 / int .) + IPv6 reduce using rule 60 (CIDR -> IPv4 / int .) + MAC reduce using rule 60 (CIDR -> IPv4 / int .) + int reduce using rule 60 (CIDR -> IPv4 / int .) + float reduce using rule 60 (CIDR -> IPv4 / int .) + hex reduce using rule 60 (CIDR -> IPv4 / int .) + string reduce using rule 60 (CIDR -> IPv4 / int .) + } reduce using rule 60 (CIDR -> IPv4 / int .) + ORKeyword reduce using rule 60 (CIDR -> IPv4 / int .) + bitANDKeyword reduce using rule 60 (CIDR -> IPv4 / int .) + bitORKeyword reduce using rule 60 (CIDR -> IPv4 / int .) + + +state 144 + + (61) CIDR -> IPv6 / int . + + EQ reduce using rule 61 (CIDR -> IPv6 / int .) + LT reduce using rule 61 (CIDR -> IPv6 / int .) + GT reduce using rule 61 (CIDR -> IPv6 / int .) + LTEQ reduce using rule 61 (CIDR -> IPv6 / int .) + GTEQ reduce using rule 61 (CIDR -> IPv6 / int .) + ML reduce using rule 61 (CIDR -> IPv6 / int .) + MG reduce using rule 61 (CIDR -> IPv6 / int .) + inKeyword reduce using rule 61 (CIDR -> IPv6 / int .) + notinKeyword reduce using rule 61 (CIDR -> IPv6 / int .) + , reduce using rule 61 (CIDR -> IPv6 / int .) + ) reduce using rule 61 (CIDR -> IPv6 / int .) + NOTKeyword reduce using rule 61 (CIDR -> IPv6 / int .) + id reduce using rule 61 (CIDR -> IPv6 / int .) + IPv4 reduce using rule 61 (CIDR -> IPv6 / int .) + IPv6 reduce using rule 61 (CIDR -> IPv6 / int .) + MAC reduce using rule 61 (CIDR -> IPv6 / int .) + int reduce using rule 61 (CIDR -> IPv6 / int .) + float reduce using rule 61 (CIDR -> IPv6 / int .) + hex reduce using rule 61 (CIDR -> IPv6 / int .) + string reduce using rule 61 (CIDR -> IPv6 / int .) + } reduce using rule 61 (CIDR -> IPv6 / int .) + ORKeyword reduce using rule 61 (CIDR -> IPv6 / int .) + bitANDKeyword reduce using rule 61 (CIDR -> IPv6 / int .) + bitORKeyword reduce using rule 61 (CIDR -> IPv6 / int .) + + +state 145 + + (115) export -> exportKeyword id . + + } reduce using rule 115 (export -> exportKeyword id .) + + +state 146 + + (108) merger -> mergerKeyword id { merger_module1_n export } . + + splitterKeyword reduce using rule 108 (merger -> mergerKeyword id { merger_module1_n export } .) + filterKeyword reduce using rule 108 (merger -> mergerKeyword id { merger_module1_n export } .) + id reduce using rule 108 (merger -> mergerKeyword id { merger_module1_n export } .) + string reduce using rule 108 (merger -> mergerKeyword id { merger_module1_n export } .) + ungrouperKeyword reduce using rule 108 (merger -> mergerKeyword id { merger_module1_n export } .) + grouperKeyword reduce using rule 108 (merger -> mergerKeyword id { merger_module1_n export } .) + groupFilterKeyword reduce using rule 108 (merger -> mergerKeyword id { merger_module1_n export } .) + mergerKeyword reduce using rule 108 (merger -> mergerKeyword id { merger_module1_n export } .) + $end reduce using rule 108 (merger -> mergerKeyword id { merger_module1_n export } .) + + +state 147 + + (111) merger_module -> moduleKeyword id { . merger_branches merger_rule1_n } + (112) merger_branches -> . branchesKeyword branches1_n + + branchesKeyword shift and go to state 163 + + merger_branches shift and go to state 162 + +state 148 + + (74) module -> moduleKeyword id { grouper_rule1_n . } + + } shift and go to state 164 + + +state 149 + + (75) grouper_rule1_n -> grouper_rule . grouper_rule1_n + (75) grouper_rule1_n -> . grouper_rule grouper_rule1_n + (76) grouper_rule1_n -> . + (77) grouper_rule -> . id grouper_op id + (78) grouper_rule -> . id grouper_op id deltaKeyword delta_arg + (79) grouper_rule -> . id grouper_op id rdeltaKeyword delta_arg + + } reduce using rule 76 (grouper_rule1_n -> .) + id shift and go to state 150 + + grouper_rule1_n shift and go to state 165 + grouper_rule shift and go to state 149 + +state 150 + + (77) grouper_rule -> id . grouper_op id + (78) grouper_rule -> id . grouper_op id deltaKeyword delta_arg + (79) grouper_rule -> id . grouper_op id rdeltaKeyword delta_arg + (80) grouper_op -> . EQ + (81) grouper_op -> . LT + (82) grouper_op -> . GT + (83) grouper_op -> . GTEQ + (84) grouper_op -> . LTEQ + + EQ shift and go to state 171 + LT shift and go to state 170 + GT shift and go to state 166 + GTEQ shift and go to state 167 + LTEQ shift and go to state 169 + + grouper_op shift and go to state 168 + +state 151 + + (94) aggr -> aggr_op ( . id_or_qid ) asKeyword id + (98) id_or_qid -> . id + (99) id_or_qid -> . qid + (97) qid -> . id . id + + id shift and go to state 123 + + qid shift and go to state 124 + id_or_qid shift and go to state 172 + +state 152 + + (95) aggr -> id_or_qid asKeyword . id + + id shift and go to state 173 + + +state 153 + + (97) qid -> id . . id + + id shift and go to state 174 + + +state 154 + + (92) opt_aggr -> , . aggr opt_aggr + (94) aggr -> . aggr_op ( id_or_qid ) asKeyword id + (95) aggr -> . id_or_qid asKeyword id + (96) aggr -> . id_or_qid + (100) aggr_op -> . minKeyword + (101) aggr_op -> . maxKeyword + (102) aggr_op -> . sumKeyword + (103) aggr_op -> . avgKeyword + (104) aggr_op -> . unionKeyword + (105) aggr_op -> . countKeyword + (106) aggr_op -> . bitANDKeyword + (107) aggr_op -> . bitORKeyword + (98) id_or_qid -> . id + (99) id_or_qid -> . qid + (97) qid -> . id . id + + minKeyword shift and go to state 121 + maxKeyword shift and go to state 120 + sumKeyword shift and go to state 125 + avgKeyword shift and go to state 127 + unionKeyword shift and go to state 126 + countKeyword shift and go to state 130 + bitANDKeyword shift and go to state 119 + bitORKeyword shift and go to state 128 + id shift and go to state 123 + + aggr_op shift and go to state 118 + qid shift and go to state 124 + id_or_qid shift and go to state 122 + aggr shift and go to state 175 + +state 155 + + (91) aggr1_n -> aggr opt_aggr . + + } reduce using rule 91 (aggr1_n -> aggr opt_aggr .) + + +state 156 + + (45) prefix_rule -> bitANDKeyword ( args ) . + + ORKeyword reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + NOTKeyword reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + id reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + bitANDKeyword reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + bitORKeyword reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + IPv4 reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + IPv6 reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + MAC reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + int reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + float reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + hex reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + string reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + } reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + EQ reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + LT reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + GT reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + LTEQ reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + GTEQ reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + ML reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + MG reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + inKeyword reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + notinKeyword reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + , reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + ) reduce using rule 45 (prefix_rule -> bitANDKeyword ( args ) .) + + +state 157 + + (47) args -> arg , . args + (47) args -> . arg , args + (48) args -> . arg + (49) args -> . + (50) arg -> . id + (51) arg -> . IPv4 + (52) arg -> . IPv6 + (53) arg -> . CIDR + (54) arg -> . MAC + (55) arg -> . int + (56) arg -> . float + (57) arg -> . hex + (58) arg -> . prefix_rule + (59) arg -> . string + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + (44) prefix_rule -> . id ( args ) + (45) prefix_rule -> . bitANDKeyword ( args ) + (46) prefix_rule -> . bitORKeyword ( args ) + + ) reduce using rule 49 (args -> .) + id shift and go to state 136 + IPv4 shift and go to state 70 + IPv6 shift and go to state 71 + MAC shift and go to state 68 + int shift and go to state 51 + float shift and go to state 52 + hex shift and go to state 60 + string shift and go to state 62 + bitANDKeyword shift and go to state 48 + bitORKeyword shift and go to state 58 + + CIDR shift and go to state 65 + args shift and go to state 176 + prefix_rule shift and go to state 134 + arg shift and go to state 135 + +state 158 + + (44) prefix_rule -> id ( args ) . + + ORKeyword reduce using rule 44 (prefix_rule -> id ( args ) .) + NOTKeyword reduce using rule 44 (prefix_rule -> id ( args ) .) + id reduce using rule 44 (prefix_rule -> id ( args ) .) + bitANDKeyword reduce using rule 44 (prefix_rule -> id ( args ) .) + bitORKeyword reduce using rule 44 (prefix_rule -> id ( args ) .) + IPv4 reduce using rule 44 (prefix_rule -> id ( args ) .) + IPv6 reduce using rule 44 (prefix_rule -> id ( args ) .) + MAC reduce using rule 44 (prefix_rule -> id ( args ) .) + int reduce using rule 44 (prefix_rule -> id ( args ) .) + float reduce using rule 44 (prefix_rule -> id ( args ) .) + hex reduce using rule 44 (prefix_rule -> id ( args ) .) + string reduce using rule 44 (prefix_rule -> id ( args ) .) + } reduce using rule 44 (prefix_rule -> id ( args ) .) + EQ reduce using rule 44 (prefix_rule -> id ( args ) .) + LT reduce using rule 44 (prefix_rule -> id ( args ) .) + GT reduce using rule 44 (prefix_rule -> id ( args ) .) + LTEQ reduce using rule 44 (prefix_rule -> id ( args ) .) + GTEQ reduce using rule 44 (prefix_rule -> id ( args ) .) + ML reduce using rule 44 (prefix_rule -> id ( args ) .) + MG reduce using rule 44 (prefix_rule -> id ( args ) .) + inKeyword reduce using rule 44 (prefix_rule -> id ( args ) .) + notinKeyword reduce using rule 44 (prefix_rule -> id ( args ) .) + , reduce using rule 44 (prefix_rule -> id ( args ) .) + ) reduce using rule 44 (prefix_rule -> id ( args ) .) + + +state 159 + + (46) prefix_rule -> bitORKeyword ( args ) . + + ORKeyword reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + NOTKeyword reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + id reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + bitANDKeyword reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + bitORKeyword reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + IPv4 reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + IPv6 reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + MAC reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + int reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + float reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + hex reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + string reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + } reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + EQ reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + LT reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + GT reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + LTEQ reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + GTEQ reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + ML reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + MG reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + inKeyword reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + notinKeyword reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + , reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + ) reduce using rule 46 (prefix_rule -> bitORKeyword ( args ) .) + + +state 160 + + (23) opt_or_id -> ORKeyword not_id opt_or_id . + + NOTKeyword reduce using rule 23 (opt_or_id -> ORKeyword not_id opt_or_id .) + id reduce using rule 23 (opt_or_id -> ORKeyword not_id opt_or_id .) + } reduce using rule 23 (opt_or_id -> ORKeyword not_id opt_or_id .) + + +state 161 + + (28) opt_rule -> ORKeyword rule_or_not opt_rule . + + NOTKeyword reduce using rule 28 (opt_rule -> ORKeyword rule_or_not opt_rule .) + id reduce using rule 28 (opt_rule -> ORKeyword rule_or_not opt_rule .) + bitANDKeyword reduce using rule 28 (opt_rule -> ORKeyword rule_or_not opt_rule .) + bitORKeyword reduce using rule 28 (opt_rule -> ORKeyword rule_or_not opt_rule .) + IPv4 reduce using rule 28 (opt_rule -> ORKeyword rule_or_not opt_rule .) + IPv6 reduce using rule 28 (opt_rule -> ORKeyword rule_or_not opt_rule .) + MAC reduce using rule 28 (opt_rule -> ORKeyword rule_or_not opt_rule .) + int reduce using rule 28 (opt_rule -> ORKeyword rule_or_not opt_rule .) + float reduce using rule 28 (opt_rule -> ORKeyword rule_or_not opt_rule .) + hex reduce using rule 28 (opt_rule -> ORKeyword rule_or_not opt_rule .) + string reduce using rule 28 (opt_rule -> ORKeyword rule_or_not opt_rule .) + } reduce using rule 28 (opt_rule -> ORKeyword rule_or_not opt_rule .) + + +state 162 + + (111) merger_module -> moduleKeyword id { merger_branches . merger_rule1_n } + (116) merger_rule1_n -> . merger_rule merger_rule1_n + (117) merger_rule1_n -> . + (118) merger_rule -> . merger_prefix_rule + (119) merger_rule -> . merger_infix_rule + (120) merger_rule -> . NOTKeyword merger_prefix_rule + (121) merger_rule -> . NOTKeyword merger_infix_rule + (137) merger_rule -> . allen_rule opt_or_allen_rule + (123) merger_prefix_rule -> . id ( qid_args ) + (122) merger_infix_rule -> . qid_arg op qid_arg + (140) allen_rule -> . id allen_op id opt_allen_delta + (127) qid_arg -> . qid + (128) qid_arg -> . IPv4 + (129) qid_arg -> . IPv6 + (130) qid_arg -> . CIDR + (131) qid_arg -> . MAC + (132) qid_arg -> . int + (133) qid_arg -> . float + (134) qid_arg -> . hex + (135) qid_arg -> . merger_prefix_rule + (136) qid_arg -> . string + (97) qid -> . id . id + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + + } reduce using rule 117 (merger_rule1_n -> .) + NOTKeyword shift and go to state 185 + id shift and go to state 183 + IPv4 shift and go to state 192 + IPv6 shift and go to state 193 + MAC shift and go to state 191 + int shift and go to state 177 + float shift and go to state 179 + hex shift and go to state 186 + string shift and go to state 188 + + merger_rule shift and go to state 184 + merger_prefix_rule shift and go to state 178 + merger_rule1_n shift and go to state 180 + qid shift and go to state 187 + allen_rule shift and go to state 189 + CIDR shift and go to state 190 + merger_infix_rule shift and go to state 181 + qid_arg shift and go to state 182 + +state 163 + + (112) merger_branches -> branchesKeyword . branches1_n + (113) branches1_n -> . id , branches1_n + (114) branches1_n -> . id + + id shift and go to state 195 + + branches1_n shift and go to state 194 + +state 164 + + (74) module -> moduleKeyword id { grouper_rule1_n } . + + moduleKeyword reduce using rule 74 (module -> moduleKeyword id { grouper_rule1_n } .) + aggregateKeyword reduce using rule 74 (module -> moduleKeyword id { grouper_rule1_n } .) + + +state 165 + + (75) grouper_rule1_n -> grouper_rule grouper_rule1_n . + + } reduce using rule 75 (grouper_rule1_n -> grouper_rule grouper_rule1_n .) + + +state 166 + + (82) grouper_op -> GT . + + id reduce using rule 82 (grouper_op -> GT .) + + +state 167 + + (83) grouper_op -> GTEQ . + + id reduce using rule 83 (grouper_op -> GTEQ .) + + +state 168 + + (77) grouper_rule -> id grouper_op . id + (78) grouper_rule -> id grouper_op . id deltaKeyword delta_arg + (79) grouper_rule -> id grouper_op . id rdeltaKeyword delta_arg + + id shift and go to state 196 + + +state 169 + + (84) grouper_op -> LTEQ . + + id reduce using rule 84 (grouper_op -> LTEQ .) + + +state 170 + + (81) grouper_op -> LT . + + id reduce using rule 81 (grouper_op -> LT .) + + +state 171 + + (80) grouper_op -> EQ . + + id reduce using rule 80 (grouper_op -> EQ .) + + +state 172 + + (94) aggr -> aggr_op ( id_or_qid . ) asKeyword id + + ) shift and go to state 197 + + +state 173 + + (95) aggr -> id_or_qid asKeyword id . + + , reduce using rule 95 (aggr -> id_or_qid asKeyword id .) + } reduce using rule 95 (aggr -> id_or_qid asKeyword id .) + + +state 174 + + (97) qid -> id . id . + + EQ reduce using rule 97 (qid -> id . id .) + LT reduce using rule 97 (qid -> id . id .) + GT reduce using rule 97 (qid -> id . id .) + LTEQ reduce using rule 97 (qid -> id . id .) + GTEQ reduce using rule 97 (qid -> id . id .) + ML reduce using rule 97 (qid -> id . id .) + MG reduce using rule 97 (qid -> id . id .) + inKeyword reduce using rule 97 (qid -> id . id .) + notinKeyword reduce using rule 97 (qid -> id . id .) + , reduce using rule 97 (qid -> id . id .) + ) reduce using rule 97 (qid -> id . id .) + asKeyword reduce using rule 97 (qid -> id . id .) + } reduce using rule 97 (qid -> id . id .) + NOTKeyword reduce using rule 97 (qid -> id . id .) + id reduce using rule 97 (qid -> id . id .) + IPv4 reduce using rule 97 (qid -> id . id .) + IPv6 reduce using rule 97 (qid -> id . id .) + MAC reduce using rule 97 (qid -> id . id .) + int reduce using rule 97 (qid -> id . id .) + float reduce using rule 97 (qid -> id . id .) + hex reduce using rule 97 (qid -> id . id .) + string reduce using rule 97 (qid -> id . id .) + + +state 175 + + (92) opt_aggr -> , aggr . opt_aggr + (92) opt_aggr -> . , aggr opt_aggr + (93) opt_aggr -> . + + , shift and go to state 154 + } reduce using rule 93 (opt_aggr -> .) + + opt_aggr shift and go to state 198 + +state 176 + + (47) args -> arg , args . + + ) reduce using rule 47 (args -> arg , args .) + + +state 177 + + (132) qid_arg -> int . + + EQ reduce using rule 132 (qid_arg -> int .) + LT reduce using rule 132 (qid_arg -> int .) + GT reduce using rule 132 (qid_arg -> int .) + LTEQ reduce using rule 132 (qid_arg -> int .) + GTEQ reduce using rule 132 (qid_arg -> int .) + ML reduce using rule 132 (qid_arg -> int .) + MG reduce using rule 132 (qid_arg -> int .) + inKeyword reduce using rule 132 (qid_arg -> int .) + notinKeyword reduce using rule 132 (qid_arg -> int .) + , reduce using rule 132 (qid_arg -> int .) + ) reduce using rule 132 (qid_arg -> int .) + NOTKeyword reduce using rule 132 (qid_arg -> int .) + id reduce using rule 132 (qid_arg -> int .) + IPv4 reduce using rule 132 (qid_arg -> int .) + IPv6 reduce using rule 132 (qid_arg -> int .) + MAC reduce using rule 132 (qid_arg -> int .) + int reduce using rule 132 (qid_arg -> int .) + float reduce using rule 132 (qid_arg -> int .) + hex reduce using rule 132 (qid_arg -> int .) + string reduce using rule 132 (qid_arg -> int .) + } reduce using rule 132 (qid_arg -> int .) + + +state 178 + + (118) merger_rule -> merger_prefix_rule . + (135) qid_arg -> merger_prefix_rule . + + NOTKeyword reduce using rule 118 (merger_rule -> merger_prefix_rule .) + id reduce using rule 118 (merger_rule -> merger_prefix_rule .) + IPv4 reduce using rule 118 (merger_rule -> merger_prefix_rule .) + IPv6 reduce using rule 118 (merger_rule -> merger_prefix_rule .) + MAC reduce using rule 118 (merger_rule -> merger_prefix_rule .) + int reduce using rule 118 (merger_rule -> merger_prefix_rule .) + float reduce using rule 118 (merger_rule -> merger_prefix_rule .) + hex reduce using rule 118 (merger_rule -> merger_prefix_rule .) + string reduce using rule 118 (merger_rule -> merger_prefix_rule .) + } reduce using rule 118 (merger_rule -> merger_prefix_rule .) + EQ reduce using rule 135 (qid_arg -> merger_prefix_rule .) + LT reduce using rule 135 (qid_arg -> merger_prefix_rule .) + GT reduce using rule 135 (qid_arg -> merger_prefix_rule .) + LTEQ reduce using rule 135 (qid_arg -> merger_prefix_rule .) + GTEQ reduce using rule 135 (qid_arg -> merger_prefix_rule .) + ML reduce using rule 135 (qid_arg -> merger_prefix_rule .) + MG reduce using rule 135 (qid_arg -> merger_prefix_rule .) + inKeyword reduce using rule 135 (qid_arg -> merger_prefix_rule .) + notinKeyword reduce using rule 135 (qid_arg -> merger_prefix_rule .) + + +state 179 + + (133) qid_arg -> float . + + EQ reduce using rule 133 (qid_arg -> float .) + LT reduce using rule 133 (qid_arg -> float .) + GT reduce using rule 133 (qid_arg -> float .) + LTEQ reduce using rule 133 (qid_arg -> float .) + GTEQ reduce using rule 133 (qid_arg -> float .) + ML reduce using rule 133 (qid_arg -> float .) + MG reduce using rule 133 (qid_arg -> float .) + inKeyword reduce using rule 133 (qid_arg -> float .) + notinKeyword reduce using rule 133 (qid_arg -> float .) + , reduce using rule 133 (qid_arg -> float .) + ) reduce using rule 133 (qid_arg -> float .) + NOTKeyword reduce using rule 133 (qid_arg -> float .) + id reduce using rule 133 (qid_arg -> float .) + IPv4 reduce using rule 133 (qid_arg -> float .) + IPv6 reduce using rule 133 (qid_arg -> float .) + MAC reduce using rule 133 (qid_arg -> float .) + int reduce using rule 133 (qid_arg -> float .) + float reduce using rule 133 (qid_arg -> float .) + hex reduce using rule 133 (qid_arg -> float .) + string reduce using rule 133 (qid_arg -> float .) + } reduce using rule 133 (qid_arg -> float .) + + +state 180 + + (111) merger_module -> moduleKeyword id { merger_branches merger_rule1_n . } + + } shift and go to state 199 + + +state 181 + + (119) merger_rule -> merger_infix_rule . + + NOTKeyword reduce using rule 119 (merger_rule -> merger_infix_rule .) + id reduce using rule 119 (merger_rule -> merger_infix_rule .) + IPv4 reduce using rule 119 (merger_rule -> merger_infix_rule .) + IPv6 reduce using rule 119 (merger_rule -> merger_infix_rule .) + MAC reduce using rule 119 (merger_rule -> merger_infix_rule .) + int reduce using rule 119 (merger_rule -> merger_infix_rule .) + float reduce using rule 119 (merger_rule -> merger_infix_rule .) + hex reduce using rule 119 (merger_rule -> merger_infix_rule .) + string reduce using rule 119 (merger_rule -> merger_infix_rule .) + } reduce using rule 119 (merger_rule -> merger_infix_rule .) + + +state 182 + + (122) merger_infix_rule -> qid_arg . op qid_arg + (35) op -> . EQ + (36) op -> . LT + (37) op -> . GT + (38) op -> . LTEQ + (39) op -> . GTEQ + (40) op -> . ML + (41) op -> . MG + (42) op -> . inKeyword + (43) op -> . notinKeyword + + EQ shift and go to state 95 + LT shift and go to state 93 + GT shift and go to state 88 + LTEQ shift and go to state 91 + GTEQ shift and go to state 89 + ML shift and go to state 90 + MG shift and go to state 87 + inKeyword shift and go to state 92 + notinKeyword shift and go to state 94 + + op shift and go to state 200 + +state 183 + + (123) merger_prefix_rule -> id . ( qid_args ) + (140) allen_rule -> id . allen_op id opt_allen_delta + (97) qid -> id . . id + (143) allen_op -> . LT + (144) allen_op -> . GT + (145) allen_op -> . EQ + (146) allen_op -> . mKeyword + (147) allen_op -> . miKeyword + (148) allen_op -> . oKeyword + (149) allen_op -> . oiKeyword + (150) allen_op -> . sKeyword + (151) allen_op -> . siKeyword + (152) allen_op -> . dKeyword + (153) allen_op -> . diKeyword + (154) allen_op -> . fKeyword + (155) allen_op -> . fiKeyword + (156) allen_op -> . eqKeyword + + ( shift and go to state 204 + . shift and go to state 153 + LT shift and go to state 206 + GT shift and go to state 203 + EQ shift and go to state 215 + mKeyword shift and go to state 212 + miKeyword shift and go to state 209 + oKeyword shift and go to state 205 + oiKeyword shift and go to state 210 + sKeyword shift and go to state 207 + siKeyword shift and go to state 213 + dKeyword shift and go to state 211 + diKeyword shift and go to state 216 + fKeyword shift and go to state 214 + fiKeyword shift and go to state 201 + eqKeyword shift and go to state 202 + + allen_op shift and go to state 208 + +state 184 + + (116) merger_rule1_n -> merger_rule . merger_rule1_n + (116) merger_rule1_n -> . merger_rule merger_rule1_n + (117) merger_rule1_n -> . + (118) merger_rule -> . merger_prefix_rule + (119) merger_rule -> . merger_infix_rule + (120) merger_rule -> . NOTKeyword merger_prefix_rule + (121) merger_rule -> . NOTKeyword merger_infix_rule + (137) merger_rule -> . allen_rule opt_or_allen_rule + (123) merger_prefix_rule -> . id ( qid_args ) + (122) merger_infix_rule -> . qid_arg op qid_arg + (140) allen_rule -> . id allen_op id opt_allen_delta + (127) qid_arg -> . qid + (128) qid_arg -> . IPv4 + (129) qid_arg -> . IPv6 + (130) qid_arg -> . CIDR + (131) qid_arg -> . MAC + (132) qid_arg -> . int + (133) qid_arg -> . float + (134) qid_arg -> . hex + (135) qid_arg -> . merger_prefix_rule + (136) qid_arg -> . string + (97) qid -> . id . id + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + + } reduce using rule 117 (merger_rule1_n -> .) + NOTKeyword shift and go to state 185 + id shift and go to state 183 + IPv4 shift and go to state 192 + IPv6 shift and go to state 193 + MAC shift and go to state 191 + int shift and go to state 177 + float shift and go to state 179 + hex shift and go to state 186 + string shift and go to state 188 + + merger_rule shift and go to state 184 + merger_prefix_rule shift and go to state 178 + merger_rule1_n shift and go to state 217 + qid shift and go to state 187 + allen_rule shift and go to state 189 + CIDR shift and go to state 190 + merger_infix_rule shift and go to state 181 + qid_arg shift and go to state 182 + +state 185 + + (120) merger_rule -> NOTKeyword . merger_prefix_rule + (121) merger_rule -> NOTKeyword . merger_infix_rule + (123) merger_prefix_rule -> . id ( qid_args ) + (122) merger_infix_rule -> . qid_arg op qid_arg + (127) qid_arg -> . qid + (128) qid_arg -> . IPv4 + (129) qid_arg -> . IPv6 + (130) qid_arg -> . CIDR + (131) qid_arg -> . MAC + (132) qid_arg -> . int + (133) qid_arg -> . float + (134) qid_arg -> . hex + (135) qid_arg -> . merger_prefix_rule + (136) qid_arg -> . string + (97) qid -> . id . id + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + + id shift and go to state 220 + IPv4 shift and go to state 192 + IPv6 shift and go to state 193 + MAC shift and go to state 191 + int shift and go to state 177 + float shift and go to state 179 + hex shift and go to state 186 + string shift and go to state 188 + + qid shift and go to state 187 + merger_prefix_rule shift and go to state 218 + CIDR shift and go to state 190 + merger_infix_rule shift and go to state 219 + qid_arg shift and go to state 182 + +state 186 + + (134) qid_arg -> hex . + + EQ reduce using rule 134 (qid_arg -> hex .) + LT reduce using rule 134 (qid_arg -> hex .) + GT reduce using rule 134 (qid_arg -> hex .) + LTEQ reduce using rule 134 (qid_arg -> hex .) + GTEQ reduce using rule 134 (qid_arg -> hex .) + ML reduce using rule 134 (qid_arg -> hex .) + MG reduce using rule 134 (qid_arg -> hex .) + inKeyword reduce using rule 134 (qid_arg -> hex .) + notinKeyword reduce using rule 134 (qid_arg -> hex .) + , reduce using rule 134 (qid_arg -> hex .) + ) reduce using rule 134 (qid_arg -> hex .) + NOTKeyword reduce using rule 134 (qid_arg -> hex .) + id reduce using rule 134 (qid_arg -> hex .) + IPv4 reduce using rule 134 (qid_arg -> hex .) + IPv6 reduce using rule 134 (qid_arg -> hex .) + MAC reduce using rule 134 (qid_arg -> hex .) + int reduce using rule 134 (qid_arg -> hex .) + float reduce using rule 134 (qid_arg -> hex .) + hex reduce using rule 134 (qid_arg -> hex .) + string reduce using rule 134 (qid_arg -> hex .) + } reduce using rule 134 (qid_arg -> hex .) + + +state 187 + + (127) qid_arg -> qid . + + EQ reduce using rule 127 (qid_arg -> qid .) + LT reduce using rule 127 (qid_arg -> qid .) + GT reduce using rule 127 (qid_arg -> qid .) + LTEQ reduce using rule 127 (qid_arg -> qid .) + GTEQ reduce using rule 127 (qid_arg -> qid .) + ML reduce using rule 127 (qid_arg -> qid .) + MG reduce using rule 127 (qid_arg -> qid .) + inKeyword reduce using rule 127 (qid_arg -> qid .) + notinKeyword reduce using rule 127 (qid_arg -> qid .) + , reduce using rule 127 (qid_arg -> qid .) + ) reduce using rule 127 (qid_arg -> qid .) + NOTKeyword reduce using rule 127 (qid_arg -> qid .) + id reduce using rule 127 (qid_arg -> qid .) + IPv4 reduce using rule 127 (qid_arg -> qid .) + IPv6 reduce using rule 127 (qid_arg -> qid .) + MAC reduce using rule 127 (qid_arg -> qid .) + int reduce using rule 127 (qid_arg -> qid .) + float reduce using rule 127 (qid_arg -> qid .) + hex reduce using rule 127 (qid_arg -> qid .) + string reduce using rule 127 (qid_arg -> qid .) + } reduce using rule 127 (qid_arg -> qid .) + + +state 188 + + (136) qid_arg -> string . + + EQ reduce using rule 136 (qid_arg -> string .) + LT reduce using rule 136 (qid_arg -> string .) + GT reduce using rule 136 (qid_arg -> string .) + LTEQ reduce using rule 136 (qid_arg -> string .) + GTEQ reduce using rule 136 (qid_arg -> string .) + ML reduce using rule 136 (qid_arg -> string .) + MG reduce using rule 136 (qid_arg -> string .) + inKeyword reduce using rule 136 (qid_arg -> string .) + notinKeyword reduce using rule 136 (qid_arg -> string .) + , reduce using rule 136 (qid_arg -> string .) + ) reduce using rule 136 (qid_arg -> string .) + NOTKeyword reduce using rule 136 (qid_arg -> string .) + id reduce using rule 136 (qid_arg -> string .) + IPv4 reduce using rule 136 (qid_arg -> string .) + IPv6 reduce using rule 136 (qid_arg -> string .) + MAC reduce using rule 136 (qid_arg -> string .) + int reduce using rule 136 (qid_arg -> string .) + float reduce using rule 136 (qid_arg -> string .) + hex reduce using rule 136 (qid_arg -> string .) + string reduce using rule 136 (qid_arg -> string .) + } reduce using rule 136 (qid_arg -> string .) + + +state 189 + + (137) merger_rule -> allen_rule . opt_or_allen_rule + (138) opt_or_allen_rule -> . ORKeyword allen_rule opt_or_allen_rule + (139) opt_or_allen_rule -> . + + ORKeyword shift and go to state 222 + NOTKeyword reduce using rule 139 (opt_or_allen_rule -> .) + id reduce using rule 139 (opt_or_allen_rule -> .) + IPv4 reduce using rule 139 (opt_or_allen_rule -> .) + IPv6 reduce using rule 139 (opt_or_allen_rule -> .) + MAC reduce using rule 139 (opt_or_allen_rule -> .) + int reduce using rule 139 (opt_or_allen_rule -> .) + float reduce using rule 139 (opt_or_allen_rule -> .) + hex reduce using rule 139 (opt_or_allen_rule -> .) + string reduce using rule 139 (opt_or_allen_rule -> .) + } reduce using rule 139 (opt_or_allen_rule -> .) + + opt_or_allen_rule shift and go to state 221 + +state 190 + + (130) qid_arg -> CIDR . + + EQ reduce using rule 130 (qid_arg -> CIDR .) + LT reduce using rule 130 (qid_arg -> CIDR .) + GT reduce using rule 130 (qid_arg -> CIDR .) + LTEQ reduce using rule 130 (qid_arg -> CIDR .) + GTEQ reduce using rule 130 (qid_arg -> CIDR .) + ML reduce using rule 130 (qid_arg -> CIDR .) + MG reduce using rule 130 (qid_arg -> CIDR .) + inKeyword reduce using rule 130 (qid_arg -> CIDR .) + notinKeyword reduce using rule 130 (qid_arg -> CIDR .) + , reduce using rule 130 (qid_arg -> CIDR .) + ) reduce using rule 130 (qid_arg -> CIDR .) + NOTKeyword reduce using rule 130 (qid_arg -> CIDR .) + id reduce using rule 130 (qid_arg -> CIDR .) + IPv4 reduce using rule 130 (qid_arg -> CIDR .) + IPv6 reduce using rule 130 (qid_arg -> CIDR .) + MAC reduce using rule 130 (qid_arg -> CIDR .) + int reduce using rule 130 (qid_arg -> CIDR .) + float reduce using rule 130 (qid_arg -> CIDR .) + hex reduce using rule 130 (qid_arg -> CIDR .) + string reduce using rule 130 (qid_arg -> CIDR .) + } reduce using rule 130 (qid_arg -> CIDR .) + + +state 191 + + (131) qid_arg -> MAC . + + EQ reduce using rule 131 (qid_arg -> MAC .) + LT reduce using rule 131 (qid_arg -> MAC .) + GT reduce using rule 131 (qid_arg -> MAC .) + LTEQ reduce using rule 131 (qid_arg -> MAC .) + GTEQ reduce using rule 131 (qid_arg -> MAC .) + ML reduce using rule 131 (qid_arg -> MAC .) + MG reduce using rule 131 (qid_arg -> MAC .) + inKeyword reduce using rule 131 (qid_arg -> MAC .) + notinKeyword reduce using rule 131 (qid_arg -> MAC .) + , reduce using rule 131 (qid_arg -> MAC .) + ) reduce using rule 131 (qid_arg -> MAC .) + NOTKeyword reduce using rule 131 (qid_arg -> MAC .) + id reduce using rule 131 (qid_arg -> MAC .) + IPv4 reduce using rule 131 (qid_arg -> MAC .) + IPv6 reduce using rule 131 (qid_arg -> MAC .) + MAC reduce using rule 131 (qid_arg -> MAC .) + int reduce using rule 131 (qid_arg -> MAC .) + float reduce using rule 131 (qid_arg -> MAC .) + hex reduce using rule 131 (qid_arg -> MAC .) + string reduce using rule 131 (qid_arg -> MAC .) + } reduce using rule 131 (qid_arg -> MAC .) + + +state 192 + + (128) qid_arg -> IPv4 . + (60) CIDR -> IPv4 . / int + + EQ reduce using rule 128 (qid_arg -> IPv4 .) + LT reduce using rule 128 (qid_arg -> IPv4 .) + GT reduce using rule 128 (qid_arg -> IPv4 .) + LTEQ reduce using rule 128 (qid_arg -> IPv4 .) + GTEQ reduce using rule 128 (qid_arg -> IPv4 .) + ML reduce using rule 128 (qid_arg -> IPv4 .) + MG reduce using rule 128 (qid_arg -> IPv4 .) + inKeyword reduce using rule 128 (qid_arg -> IPv4 .) + notinKeyword reduce using rule 128 (qid_arg -> IPv4 .) + , reduce using rule 128 (qid_arg -> IPv4 .) + ) reduce using rule 128 (qid_arg -> IPv4 .) + NOTKeyword reduce using rule 128 (qid_arg -> IPv4 .) + id reduce using rule 128 (qid_arg -> IPv4 .) + IPv4 reduce using rule 128 (qid_arg -> IPv4 .) + IPv6 reduce using rule 128 (qid_arg -> IPv4 .) + MAC reduce using rule 128 (qid_arg -> IPv4 .) + int reduce using rule 128 (qid_arg -> IPv4 .) + float reduce using rule 128 (qid_arg -> IPv4 .) + hex reduce using rule 128 (qid_arg -> IPv4 .) + string reduce using rule 128 (qid_arg -> IPv4 .) + } reduce using rule 128 (qid_arg -> IPv4 .) + / shift and go to state 110 + + +state 193 + + (129) qid_arg -> IPv6 . + (61) CIDR -> IPv6 . / int + + EQ reduce using rule 129 (qid_arg -> IPv6 .) + LT reduce using rule 129 (qid_arg -> IPv6 .) + GT reduce using rule 129 (qid_arg -> IPv6 .) + LTEQ reduce using rule 129 (qid_arg -> IPv6 .) + GTEQ reduce using rule 129 (qid_arg -> IPv6 .) + ML reduce using rule 129 (qid_arg -> IPv6 .) + MG reduce using rule 129 (qid_arg -> IPv6 .) + inKeyword reduce using rule 129 (qid_arg -> IPv6 .) + notinKeyword reduce using rule 129 (qid_arg -> IPv6 .) + , reduce using rule 129 (qid_arg -> IPv6 .) + ) reduce using rule 129 (qid_arg -> IPv6 .) + NOTKeyword reduce using rule 129 (qid_arg -> IPv6 .) + id reduce using rule 129 (qid_arg -> IPv6 .) + IPv4 reduce using rule 129 (qid_arg -> IPv6 .) + IPv6 reduce using rule 129 (qid_arg -> IPv6 .) + MAC reduce using rule 129 (qid_arg -> IPv6 .) + int reduce using rule 129 (qid_arg -> IPv6 .) + float reduce using rule 129 (qid_arg -> IPv6 .) + hex reduce using rule 129 (qid_arg -> IPv6 .) + string reduce using rule 129 (qid_arg -> IPv6 .) + } reduce using rule 129 (qid_arg -> IPv6 .) + / shift and go to state 111 + + +state 194 + + (112) merger_branches -> branchesKeyword branches1_n . + + NOTKeyword reduce using rule 112 (merger_branches -> branchesKeyword branches1_n .) + id reduce using rule 112 (merger_branches -> branchesKeyword branches1_n .) + IPv4 reduce using rule 112 (merger_branches -> branchesKeyword branches1_n .) + IPv6 reduce using rule 112 (merger_branches -> branchesKeyword branches1_n .) + MAC reduce using rule 112 (merger_branches -> branchesKeyword branches1_n .) + int reduce using rule 112 (merger_branches -> branchesKeyword branches1_n .) + float reduce using rule 112 (merger_branches -> branchesKeyword branches1_n .) + hex reduce using rule 112 (merger_branches -> branchesKeyword branches1_n .) + string reduce using rule 112 (merger_branches -> branchesKeyword branches1_n .) + } reduce using rule 112 (merger_branches -> branchesKeyword branches1_n .) + + +state 195 + + (113) branches1_n -> id . , branches1_n + (114) branches1_n -> id . + + , shift and go to state 223 + NOTKeyword reduce using rule 114 (branches1_n -> id .) + id reduce using rule 114 (branches1_n -> id .) + IPv4 reduce using rule 114 (branches1_n -> id .) + IPv6 reduce using rule 114 (branches1_n -> id .) + MAC reduce using rule 114 (branches1_n -> id .) + int reduce using rule 114 (branches1_n -> id .) + float reduce using rule 114 (branches1_n -> id .) + hex reduce using rule 114 (branches1_n -> id .) + string reduce using rule 114 (branches1_n -> id .) + } reduce using rule 114 (branches1_n -> id .) + + +state 196 + + (77) grouper_rule -> id grouper_op id . + (78) grouper_rule -> id grouper_op id . deltaKeyword delta_arg + (79) grouper_rule -> id grouper_op id . rdeltaKeyword delta_arg + + id reduce using rule 77 (grouper_rule -> id grouper_op id .) + } reduce using rule 77 (grouper_rule -> id grouper_op id .) + deltaKeyword shift and go to state 225 + rdeltaKeyword shift and go to state 224 + + +state 197 + + (94) aggr -> aggr_op ( id_or_qid ) . asKeyword id + + asKeyword shift and go to state 226 + + +state 198 + + (92) opt_aggr -> , aggr opt_aggr . + + } reduce using rule 92 (opt_aggr -> , aggr opt_aggr .) + + +state 199 + + (111) merger_module -> moduleKeyword id { merger_branches merger_rule1_n } . + + moduleKeyword reduce using rule 111 (merger_module -> moduleKeyword id { merger_branches merger_rule1_n } .) + exportKeyword reduce using rule 111 (merger_module -> moduleKeyword id { merger_branches merger_rule1_n } .) + + +state 200 + + (122) merger_infix_rule -> qid_arg op . qid_arg + (127) qid_arg -> . qid + (128) qid_arg -> . IPv4 + (129) qid_arg -> . IPv6 + (130) qid_arg -> . CIDR + (131) qid_arg -> . MAC + (132) qid_arg -> . int + (133) qid_arg -> . float + (134) qid_arg -> . hex + (135) qid_arg -> . merger_prefix_rule + (136) qid_arg -> . string + (97) qid -> . id . id + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + (123) merger_prefix_rule -> . id ( qid_args ) + + IPv4 shift and go to state 192 + IPv6 shift and go to state 193 + MAC shift and go to state 191 + int shift and go to state 177 + float shift and go to state 179 + hex shift and go to state 186 + string shift and go to state 188 + id shift and go to state 229 + + qid shift and go to state 187 + merger_prefix_rule shift and go to state 227 + CIDR shift and go to state 190 + qid_arg shift and go to state 228 + +state 201 + + (155) allen_op -> fiKeyword . + + id reduce using rule 155 (allen_op -> fiKeyword .) + + +state 202 + + (156) allen_op -> eqKeyword . + + id reduce using rule 156 (allen_op -> eqKeyword .) + + +state 203 + + (144) allen_op -> GT . + + id reduce using rule 144 (allen_op -> GT .) + + +state 204 + + (123) merger_prefix_rule -> id ( . qid_args ) + (124) qid_args -> . qid_arg , qid_args + (125) qid_args -> . qid_arg + (126) qid_args -> . + (127) qid_arg -> . qid + (128) qid_arg -> . IPv4 + (129) qid_arg -> . IPv6 + (130) qid_arg -> . CIDR + (131) qid_arg -> . MAC + (132) qid_arg -> . int + (133) qid_arg -> . float + (134) qid_arg -> . hex + (135) qid_arg -> . merger_prefix_rule + (136) qid_arg -> . string + (97) qid -> . id . id + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + (123) merger_prefix_rule -> . id ( qid_args ) + + ) reduce using rule 126 (qid_args -> .) + IPv4 shift and go to state 192 + IPv6 shift and go to state 193 + MAC shift and go to state 191 + int shift and go to state 177 + float shift and go to state 179 + hex shift and go to state 186 + string shift and go to state 188 + id shift and go to state 229 + + qid shift and go to state 187 + qid_args shift and go to state 230 + merger_prefix_rule shift and go to state 227 + qid_arg shift and go to state 231 + CIDR shift and go to state 190 + +state 205 + + (148) allen_op -> oKeyword . + + id reduce using rule 148 (allen_op -> oKeyword .) + + +state 206 + + (143) allen_op -> LT . + + id reduce using rule 143 (allen_op -> LT .) + + +state 207 + + (150) allen_op -> sKeyword . + + id reduce using rule 150 (allen_op -> sKeyword .) + + +state 208 + + (140) allen_rule -> id allen_op . id opt_allen_delta + + id shift and go to state 232 + + +state 209 + + (147) allen_op -> miKeyword . + + id reduce using rule 147 (allen_op -> miKeyword .) + + +state 210 + + (149) allen_op -> oiKeyword . + + id reduce using rule 149 (allen_op -> oiKeyword .) + + +state 211 + + (152) allen_op -> dKeyword . + + id reduce using rule 152 (allen_op -> dKeyword .) + + +state 212 + + (146) allen_op -> mKeyword . + + id reduce using rule 146 (allen_op -> mKeyword .) + + +state 213 + + (151) allen_op -> siKeyword . + + id reduce using rule 151 (allen_op -> siKeyword .) + + +state 214 + + (154) allen_op -> fKeyword . + + id reduce using rule 154 (allen_op -> fKeyword .) + + +state 215 + + (145) allen_op -> EQ . + + id reduce using rule 145 (allen_op -> EQ .) + + +state 216 + + (153) allen_op -> diKeyword . + + id reduce using rule 153 (allen_op -> diKeyword .) + + +state 217 + + (116) merger_rule1_n -> merger_rule merger_rule1_n . + + } reduce using rule 116 (merger_rule1_n -> merger_rule merger_rule1_n .) + + +state 218 + + (120) merger_rule -> NOTKeyword merger_prefix_rule . + (135) qid_arg -> merger_prefix_rule . + + NOTKeyword reduce using rule 120 (merger_rule -> NOTKeyword merger_prefix_rule .) + id reduce using rule 120 (merger_rule -> NOTKeyword merger_prefix_rule .) + IPv4 reduce using rule 120 (merger_rule -> NOTKeyword merger_prefix_rule .) + IPv6 reduce using rule 120 (merger_rule -> NOTKeyword merger_prefix_rule .) + MAC reduce using rule 120 (merger_rule -> NOTKeyword merger_prefix_rule .) + int reduce using rule 120 (merger_rule -> NOTKeyword merger_prefix_rule .) + float reduce using rule 120 (merger_rule -> NOTKeyword merger_prefix_rule .) + hex reduce using rule 120 (merger_rule -> NOTKeyword merger_prefix_rule .) + string reduce using rule 120 (merger_rule -> NOTKeyword merger_prefix_rule .) + } reduce using rule 120 (merger_rule -> NOTKeyword merger_prefix_rule .) + EQ reduce using rule 135 (qid_arg -> merger_prefix_rule .) + LT reduce using rule 135 (qid_arg -> merger_prefix_rule .) + GT reduce using rule 135 (qid_arg -> merger_prefix_rule .) + LTEQ reduce using rule 135 (qid_arg -> merger_prefix_rule .) + GTEQ reduce using rule 135 (qid_arg -> merger_prefix_rule .) + ML reduce using rule 135 (qid_arg -> merger_prefix_rule .) + MG reduce using rule 135 (qid_arg -> merger_prefix_rule .) + inKeyword reduce using rule 135 (qid_arg -> merger_prefix_rule .) + notinKeyword reduce using rule 135 (qid_arg -> merger_prefix_rule .) + + +state 219 + + (121) merger_rule -> NOTKeyword merger_infix_rule . + + NOTKeyword reduce using rule 121 (merger_rule -> NOTKeyword merger_infix_rule .) + id reduce using rule 121 (merger_rule -> NOTKeyword merger_infix_rule .) + IPv4 reduce using rule 121 (merger_rule -> NOTKeyword merger_infix_rule .) + IPv6 reduce using rule 121 (merger_rule -> NOTKeyword merger_infix_rule .) + MAC reduce using rule 121 (merger_rule -> NOTKeyword merger_infix_rule .) + int reduce using rule 121 (merger_rule -> NOTKeyword merger_infix_rule .) + float reduce using rule 121 (merger_rule -> NOTKeyword merger_infix_rule .) + hex reduce using rule 121 (merger_rule -> NOTKeyword merger_infix_rule .) + string reduce using rule 121 (merger_rule -> NOTKeyword merger_infix_rule .) + } reduce using rule 121 (merger_rule -> NOTKeyword merger_infix_rule .) + + +state 220 + + (123) merger_prefix_rule -> id . ( qid_args ) + (97) qid -> id . . id + + ( shift and go to state 204 + . shift and go to state 153 + + +state 221 + + (137) merger_rule -> allen_rule opt_or_allen_rule . + + NOTKeyword reduce using rule 137 (merger_rule -> allen_rule opt_or_allen_rule .) + id reduce using rule 137 (merger_rule -> allen_rule opt_or_allen_rule .) + IPv4 reduce using rule 137 (merger_rule -> allen_rule opt_or_allen_rule .) + IPv6 reduce using rule 137 (merger_rule -> allen_rule opt_or_allen_rule .) + MAC reduce using rule 137 (merger_rule -> allen_rule opt_or_allen_rule .) + int reduce using rule 137 (merger_rule -> allen_rule opt_or_allen_rule .) + float reduce using rule 137 (merger_rule -> allen_rule opt_or_allen_rule .) + hex reduce using rule 137 (merger_rule -> allen_rule opt_or_allen_rule .) + string reduce using rule 137 (merger_rule -> allen_rule opt_or_allen_rule .) + } reduce using rule 137 (merger_rule -> allen_rule opt_or_allen_rule .) + + +state 222 + + (138) opt_or_allen_rule -> ORKeyword . allen_rule opt_or_allen_rule + (140) allen_rule -> . id allen_op id opt_allen_delta + + id shift and go to state 234 + + allen_rule shift and go to state 233 + +state 223 + + (113) branches1_n -> id , . branches1_n + (113) branches1_n -> . id , branches1_n + (114) branches1_n -> . id + + id shift and go to state 195 + + branches1_n shift and go to state 235 + +state 224 + + (79) grouper_rule -> id grouper_op id rdeltaKeyword . delta_arg + (85) delta_arg -> . time + (86) delta_arg -> . int + (87) time -> . int sKeyword + (88) time -> . int msKeyword + (89) time -> . int minKeyword + + int shift and go to state 236 + + delta_arg shift and go to state 238 + time shift and go to state 237 + +state 225 + + (78) grouper_rule -> id grouper_op id deltaKeyword . delta_arg + (85) delta_arg -> . time + (86) delta_arg -> . int + (87) time -> . int sKeyword + (88) time -> . int msKeyword + (89) time -> . int minKeyword + + int shift and go to state 236 + + delta_arg shift and go to state 239 + time shift and go to state 237 + +state 226 + + (94) aggr -> aggr_op ( id_or_qid ) asKeyword . id + + id shift and go to state 240 + + +state 227 + + (135) qid_arg -> merger_prefix_rule . + + , reduce using rule 135 (qid_arg -> merger_prefix_rule .) + ) reduce using rule 135 (qid_arg -> merger_prefix_rule .) + NOTKeyword reduce using rule 135 (qid_arg -> merger_prefix_rule .) + id reduce using rule 135 (qid_arg -> merger_prefix_rule .) + IPv4 reduce using rule 135 (qid_arg -> merger_prefix_rule .) + IPv6 reduce using rule 135 (qid_arg -> merger_prefix_rule .) + MAC reduce using rule 135 (qid_arg -> merger_prefix_rule .) + int reduce using rule 135 (qid_arg -> merger_prefix_rule .) + float reduce using rule 135 (qid_arg -> merger_prefix_rule .) + hex reduce using rule 135 (qid_arg -> merger_prefix_rule .) + string reduce using rule 135 (qid_arg -> merger_prefix_rule .) + } reduce using rule 135 (qid_arg -> merger_prefix_rule .) + + +state 228 + + (122) merger_infix_rule -> qid_arg op qid_arg . + + NOTKeyword reduce using rule 122 (merger_infix_rule -> qid_arg op qid_arg .) + id reduce using rule 122 (merger_infix_rule -> qid_arg op qid_arg .) + IPv4 reduce using rule 122 (merger_infix_rule -> qid_arg op qid_arg .) + IPv6 reduce using rule 122 (merger_infix_rule -> qid_arg op qid_arg .) + MAC reduce using rule 122 (merger_infix_rule -> qid_arg op qid_arg .) + int reduce using rule 122 (merger_infix_rule -> qid_arg op qid_arg .) + float reduce using rule 122 (merger_infix_rule -> qid_arg op qid_arg .) + hex reduce using rule 122 (merger_infix_rule -> qid_arg op qid_arg .) + string reduce using rule 122 (merger_infix_rule -> qid_arg op qid_arg .) + } reduce using rule 122 (merger_infix_rule -> qid_arg op qid_arg .) + + +state 229 + + (97) qid -> id . . id + (123) merger_prefix_rule -> id . ( qid_args ) + + . shift and go to state 153 + ( shift and go to state 204 + + +state 230 + + (123) merger_prefix_rule -> id ( qid_args . ) + + ) shift and go to state 241 + + +state 231 + + (124) qid_args -> qid_arg . , qid_args + (125) qid_args -> qid_arg . + + , shift and go to state 242 + ) reduce using rule 125 (qid_args -> qid_arg .) + + +state 232 + + (140) allen_rule -> id allen_op id . opt_allen_delta + (141) opt_allen_delta -> . deltaKeyword time + (142) opt_allen_delta -> . + + deltaKeyword shift and go to state 243 + ORKeyword reduce using rule 142 (opt_allen_delta -> .) + NOTKeyword reduce using rule 142 (opt_allen_delta -> .) + id reduce using rule 142 (opt_allen_delta -> .) + IPv4 reduce using rule 142 (opt_allen_delta -> .) + IPv6 reduce using rule 142 (opt_allen_delta -> .) + MAC reduce using rule 142 (opt_allen_delta -> .) + int reduce using rule 142 (opt_allen_delta -> .) + float reduce using rule 142 (opt_allen_delta -> .) + hex reduce using rule 142 (opt_allen_delta -> .) + string reduce using rule 142 (opt_allen_delta -> .) + } reduce using rule 142 (opt_allen_delta -> .) + + opt_allen_delta shift and go to state 244 + +state 233 + + (138) opt_or_allen_rule -> ORKeyword allen_rule . opt_or_allen_rule + (138) opt_or_allen_rule -> . ORKeyword allen_rule opt_or_allen_rule + (139) opt_or_allen_rule -> . + + ORKeyword shift and go to state 222 + NOTKeyword reduce using rule 139 (opt_or_allen_rule -> .) + id reduce using rule 139 (opt_or_allen_rule -> .) + IPv4 reduce using rule 139 (opt_or_allen_rule -> .) + IPv6 reduce using rule 139 (opt_or_allen_rule -> .) + MAC reduce using rule 139 (opt_or_allen_rule -> .) + int reduce using rule 139 (opt_or_allen_rule -> .) + float reduce using rule 139 (opt_or_allen_rule -> .) + hex reduce using rule 139 (opt_or_allen_rule -> .) + string reduce using rule 139 (opt_or_allen_rule -> .) + } reduce using rule 139 (opt_or_allen_rule -> .) + + opt_or_allen_rule shift and go to state 245 + +state 234 + + (140) allen_rule -> id . allen_op id opt_allen_delta + (143) allen_op -> . LT + (144) allen_op -> . GT + (145) allen_op -> . EQ + (146) allen_op -> . mKeyword + (147) allen_op -> . miKeyword + (148) allen_op -> . oKeyword + (149) allen_op -> . oiKeyword + (150) allen_op -> . sKeyword + (151) allen_op -> . siKeyword + (152) allen_op -> . dKeyword + (153) allen_op -> . diKeyword + (154) allen_op -> . fKeyword + (155) allen_op -> . fiKeyword + (156) allen_op -> . eqKeyword + + LT shift and go to state 206 + GT shift and go to state 203 + EQ shift and go to state 215 + mKeyword shift and go to state 212 + miKeyword shift and go to state 209 + oKeyword shift and go to state 205 + oiKeyword shift and go to state 210 + sKeyword shift and go to state 207 + siKeyword shift and go to state 213 + dKeyword shift and go to state 211 + diKeyword shift and go to state 216 + fKeyword shift and go to state 214 + fiKeyword shift and go to state 201 + eqKeyword shift and go to state 202 + + allen_op shift and go to state 208 + +state 235 + + (113) branches1_n -> id , branches1_n . + + NOTKeyword reduce using rule 113 (branches1_n -> id , branches1_n .) + id reduce using rule 113 (branches1_n -> id , branches1_n .) + IPv4 reduce using rule 113 (branches1_n -> id , branches1_n .) + IPv6 reduce using rule 113 (branches1_n -> id , branches1_n .) + MAC reduce using rule 113 (branches1_n -> id , branches1_n .) + int reduce using rule 113 (branches1_n -> id , branches1_n .) + float reduce using rule 113 (branches1_n -> id , branches1_n .) + hex reduce using rule 113 (branches1_n -> id , branches1_n .) + string reduce using rule 113 (branches1_n -> id , branches1_n .) + } reduce using rule 113 (branches1_n -> id , branches1_n .) + + +state 236 + + (86) delta_arg -> int . + (87) time -> int . sKeyword + (88) time -> int . msKeyword + (89) time -> int . minKeyword + + id reduce using rule 86 (delta_arg -> int .) + } reduce using rule 86 (delta_arg -> int .) + sKeyword shift and go to state 246 + msKeyword shift and go to state 247 + minKeyword shift and go to state 248 + + +state 237 + + (85) delta_arg -> time . + + id reduce using rule 85 (delta_arg -> time .) + } reduce using rule 85 (delta_arg -> time .) + + +state 238 + + (79) grouper_rule -> id grouper_op id rdeltaKeyword delta_arg . + + id reduce using rule 79 (grouper_rule -> id grouper_op id rdeltaKeyword delta_arg .) + } reduce using rule 79 (grouper_rule -> id grouper_op id rdeltaKeyword delta_arg .) + + +state 239 + + (78) grouper_rule -> id grouper_op id deltaKeyword delta_arg . + + id reduce using rule 78 (grouper_rule -> id grouper_op id deltaKeyword delta_arg .) + } reduce using rule 78 (grouper_rule -> id grouper_op id deltaKeyword delta_arg .) + + +state 240 + + (94) aggr -> aggr_op ( id_or_qid ) asKeyword id . + + , reduce using rule 94 (aggr -> aggr_op ( id_or_qid ) asKeyword id .) + } reduce using rule 94 (aggr -> aggr_op ( id_or_qid ) asKeyword id .) + + +state 241 + + (123) merger_prefix_rule -> id ( qid_args ) . + + NOTKeyword reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + id reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + IPv4 reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + IPv6 reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + MAC reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + int reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + float reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + hex reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + string reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + } reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + EQ reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + LT reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + GT reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + LTEQ reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + GTEQ reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + ML reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + MG reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + inKeyword reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + notinKeyword reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + , reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + ) reduce using rule 123 (merger_prefix_rule -> id ( qid_args ) .) + + +state 242 + + (124) qid_args -> qid_arg , . qid_args + (124) qid_args -> . qid_arg , qid_args + (125) qid_args -> . qid_arg + (126) qid_args -> . + (127) qid_arg -> . qid + (128) qid_arg -> . IPv4 + (129) qid_arg -> . IPv6 + (130) qid_arg -> . CIDR + (131) qid_arg -> . MAC + (132) qid_arg -> . int + (133) qid_arg -> . float + (134) qid_arg -> . hex + (135) qid_arg -> . merger_prefix_rule + (136) qid_arg -> . string + (97) qid -> . id . id + (60) CIDR -> . IPv4 / int + (61) CIDR -> . IPv6 / int + (123) merger_prefix_rule -> . id ( qid_args ) + + ) reduce using rule 126 (qid_args -> .) + IPv4 shift and go to state 192 + IPv6 shift and go to state 193 + MAC shift and go to state 191 + int shift and go to state 177 + float shift and go to state 179 + hex shift and go to state 186 + string shift and go to state 188 + id shift and go to state 229 + + qid shift and go to state 187 + qid_args shift and go to state 249 + merger_prefix_rule shift and go to state 227 + qid_arg shift and go to state 231 + CIDR shift and go to state 190 + +state 243 + + (141) opt_allen_delta -> deltaKeyword . time + (87) time -> . int sKeyword + (88) time -> . int msKeyword + (89) time -> . int minKeyword + + int shift and go to state 250 + + time shift and go to state 251 + +state 244 + + (140) allen_rule -> id allen_op id opt_allen_delta . + + ORKeyword reduce using rule 140 (allen_rule -> id allen_op id opt_allen_delta .) + NOTKeyword reduce using rule 140 (allen_rule -> id allen_op id opt_allen_delta .) + id reduce using rule 140 (allen_rule -> id allen_op id opt_allen_delta .) + IPv4 reduce using rule 140 (allen_rule -> id allen_op id opt_allen_delta .) + IPv6 reduce using rule 140 (allen_rule -> id allen_op id opt_allen_delta .) + MAC reduce using rule 140 (allen_rule -> id allen_op id opt_allen_delta .) + int reduce using rule 140 (allen_rule -> id allen_op id opt_allen_delta .) + float reduce using rule 140 (allen_rule -> id allen_op id opt_allen_delta .) + hex reduce using rule 140 (allen_rule -> id allen_op id opt_allen_delta .) + string reduce using rule 140 (allen_rule -> id allen_op id opt_allen_delta .) + } reduce using rule 140 (allen_rule -> id allen_op id opt_allen_delta .) + + +state 245 + + (138) opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule . + + NOTKeyword reduce using rule 138 (opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule .) + id reduce using rule 138 (opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule .) + IPv4 reduce using rule 138 (opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule .) + IPv6 reduce using rule 138 (opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule .) + MAC reduce using rule 138 (opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule .) + int reduce using rule 138 (opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule .) + float reduce using rule 138 (opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule .) + hex reduce using rule 138 (opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule .) + string reduce using rule 138 (opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule .) + } reduce using rule 138 (opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule .) + + +state 246 + + (87) time -> int sKeyword . + + id reduce using rule 87 (time -> int sKeyword .) + } reduce using rule 87 (time -> int sKeyword .) + ORKeyword reduce using rule 87 (time -> int sKeyword .) + NOTKeyword reduce using rule 87 (time -> int sKeyword .) + IPv4 reduce using rule 87 (time -> int sKeyword .) + IPv6 reduce using rule 87 (time -> int sKeyword .) + MAC reduce using rule 87 (time -> int sKeyword .) + int reduce using rule 87 (time -> int sKeyword .) + float reduce using rule 87 (time -> int sKeyword .) + hex reduce using rule 87 (time -> int sKeyword .) + string reduce using rule 87 (time -> int sKeyword .) + + +state 247 + + (88) time -> int msKeyword . + + id reduce using rule 88 (time -> int msKeyword .) + } reduce using rule 88 (time -> int msKeyword .) + ORKeyword reduce using rule 88 (time -> int msKeyword .) + NOTKeyword reduce using rule 88 (time -> int msKeyword .) + IPv4 reduce using rule 88 (time -> int msKeyword .) + IPv6 reduce using rule 88 (time -> int msKeyword .) + MAC reduce using rule 88 (time -> int msKeyword .) + int reduce using rule 88 (time -> int msKeyword .) + float reduce using rule 88 (time -> int msKeyword .) + hex reduce using rule 88 (time -> int msKeyword .) + string reduce using rule 88 (time -> int msKeyword .) + + +state 248 + + (89) time -> int minKeyword . + + id reduce using rule 89 (time -> int minKeyword .) + } reduce using rule 89 (time -> int minKeyword .) + ORKeyword reduce using rule 89 (time -> int minKeyword .) + NOTKeyword reduce using rule 89 (time -> int minKeyword .) + IPv4 reduce using rule 89 (time -> int minKeyword .) + IPv6 reduce using rule 89 (time -> int minKeyword .) + MAC reduce using rule 89 (time -> int minKeyword .) + int reduce using rule 89 (time -> int minKeyword .) + float reduce using rule 89 (time -> int minKeyword .) + hex reduce using rule 89 (time -> int minKeyword .) + string reduce using rule 89 (time -> int minKeyword .) + + +state 249 + + (124) qid_args -> qid_arg , qid_args . + + ) reduce using rule 124 (qid_args -> qid_arg , qid_args .) + + +state 250 + + (87) time -> int . sKeyword + (88) time -> int . msKeyword + (89) time -> int . minKeyword + + sKeyword shift and go to state 246 + msKeyword shift and go to state 247 + minKeyword shift and go to state 248 + + +state 251 + + (141) opt_allen_delta -> deltaKeyword time . + + ORKeyword reduce using rule 141 (opt_allen_delta -> deltaKeyword time .) + NOTKeyword reduce using rule 141 (opt_allen_delta -> deltaKeyword time .) + id reduce using rule 141 (opt_allen_delta -> deltaKeyword time .) + IPv4 reduce using rule 141 (opt_allen_delta -> deltaKeyword time .) + IPv6 reduce using rule 141 (opt_allen_delta -> deltaKeyword time .) + MAC reduce using rule 141 (opt_allen_delta -> deltaKeyword time .) + int reduce using rule 141 (opt_allen_delta -> deltaKeyword time .) + float reduce using rule 141 (opt_allen_delta -> deltaKeyword time .) + hex reduce using rule 141 (opt_allen_delta -> deltaKeyword time .) + string reduce using rule 141 (opt_allen_delta -> deltaKeyword time .) + } reduce using rule 141 (opt_allen_delta -> deltaKeyword time .) + diff --git a/parser.py b/parser.py new file mode 100644 index 0000000..823e358 --- /dev/null +++ b/parser.py @@ -0,0 +1,931 @@ +# -*- coding: utf-8 -*- +import ply.lex as lex +import ply.yacc as yacc +from statement import * +from ply.yacc import YaccError +import netaddr + + +class Lexer(object): + def __init__(self,**kwargs): + self.lexer = lex.lex(module=self, **kwargs) + + reserved = { + 'splitter' : 'splitterKeyword', + 'groupfilter' : 'groupFilterKeyword', + 'filter' : 'filterKeyword', + 'grouper' : 'grouperKeyword', + 'module' : 'moduleKeyword', + 'merger' : 'mergerKeyword', + 'export' : 'exportKeyword', + 'ungrouper' : 'ungrouperKeyword', + 'branches' : 'branchesKeyword', + 'branch' : 'branchKeyword', + 'aggregate' : 'aggregateKeyword', + 'as' : 'asKeyword', + 'min' : 'minKeyword', + 'max' : 'maxKeyword', + 'avg' : 'avgKeyword', + 'sum' : 'sumKeyword', + 'count' : 'countKeyword', + 'union' : 'unionKeyword', + 'in' : 'inKeyword', + 'notin' : 'notinKeyword', + 'OR' : 'ORKeyword', + 'NOT' : 'NOTKeyword', + 'bitOR': 'bitORKeyword', + 'bitAND' : 'bitANDKeyword', + 'm' : 'mKeyword', + 'mi' : 'miKeyword', + 'o' : 'oKeyword', + 'oi' : 'oiKeyword', + 's' : 'sKeyword', + 'si' : 'siKeyword', + 'd' : 'dKeyword', + 'di' : 'diKeyword', + 'f' : 'fKeyword', + 'fi' : 'fiKeyword', + 'eq' : 'eqKeyword', # prevent clash with = for match rules + 'delta': 'deltaKeyword', + 'rdelta' : 'rdeltaKeyword', + 'ms' : 'msKeyword' + } + + + def t_LTEQ(self, t): + r'<=' + t.value = 'LTEQ' + return t + + def t_GTEQ(self, t): + r'>=' + t.value = 'GTEQ' + return t + + def t_ML(self, t): + r'<<' + t.value = 'ML' + return t + + def t_MG(self, t): + r'>>' + t.value = 'MG' + return t + + def t_LT(self, t): + r'<' + t.value = 'LT' + return t + + def t_EQ(self, t): + r'=' + t.value = 'EQ' + return t + + def t_GT(self, t): + r'>' + t.value = 'GT' + return t + + + tokens = ['id', 'LT', 'EQ', 'GT', + 'LTEQ', 'GTEQ', 'ML', 'MG', + 'MAC', 'IPv4', 'IPv6', + 'int', 'float', 'hex', + 'string'] + list(reserved.values()) + + t_ignore = ' \t' + t_ignore_comment = r'\#.*' + + literals = "+-*/(){},." + + def t_string(self, t): + r'"[^"\\\r\n]*(?:\\.[^"\\\r\n]*)*"' + t.value = Arg("string", t.value[1:-1].replace("\\",''), t.value) + return t + + def t_IPv4(self, t): + r'\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}' + #the regex does include invalid IPs but they are + #checked later during conversion + try: + t.value =Arg("addr_IPv4", int(netaddr.IP(t.value)), t.value) + return t + except netaddr.AddrFormatError: + message = 'Bad IPv4 format %s at line %s' %(t.value, + t.lexer.lineno) + raise SyntaxError(message) + + def t_MAC(self, t): + r'([a-fA-F0-9]{2}[:\-]){5}[a-fA-F0-9]{2}' + try: + t.value = Arg("addr_MAC", int(netaddr.EUI(t.value)), t.value) + return t + except netaddr.AddrFormatError: + message = 'Bad MAC format %s at line %s' %(t.value, + t.lexer.lineno) + raise SyntaxError(message) + + def t_IPv6(self, t): + r'(::[0-9a-f]{1,4}[0-9a-f:]*)|([0-9a-f]:[0-9a-f:]*)' + # the regular expression is very genral, so this rule should be + # after the other address rules. + try: + t.value = Arg("addr_IPv6", int(netaddr.IP(t.value)), t.value) + return t + except netaddr.AddrFormatError: + message = 'Bad IPv6 format %s at line %s' %(t.value, + t.lexer.lineno) + raise SyntaxError(message) + + def t_float(self, t): + '[0-9]*\.[0-9]+([eE][+-]?[0-9]+)?' + t.value = Arg("float", float(t.value), t.value) + return t + + def t_hex(self, t): + r'0[xX][0-9a-fA-F]+' + t.value = Arg("int", int(t.value, 0), t.value) + return t + + def t_int(self, t): + r'\d+' + t.value = Arg("int", int(t.value), t.value) + return t + #All the reserved words are matched in this rule + def t_id(self, t): + r'[a-zA-Z_][a-zA-Z_0-9]*' + # matches also keywords, so be careful + t.type = self.reserved.get(t.value,'id') # Check for reserved words + return t + + def t_newline(self, t): + r'\n+' + t.lexer.lineno += len(t.value) + + # Error handling rule + def t_error(self,t): + msg = "Illegal character '%s'" % t.value[0] + raise SyntaxError(msg) + + # Test it output + def test(self,data): + self.lexer.input(data) + while True: + tok = self.lexer.token() + if not tok: break + print tok + +class Parser(object): + # the tokens from the lexer class: + tokens = Lexer.tokens + + def __init__(self): + self.filters = [] + self.groupers = [] + self.splitter = None + self.group_filters = [] + self.mergers = [] + self.branch_names = set() + self.ungroupers = [] + self.branches = [] + self.input = None + self.outputs = [] + self.names = {} + self.lexer = Lexer().lexer + self.parser = yacc.yacc(module=self) + + def p_file(self,p): + '''file : pipeline_stage_1n''' +# for k, v in self.names.iteritems(): +# print k, v + + def p_pipeline_stage_1n(self,p): + 'pipeline_stage_1n : pipeline_stage pipeline_stage_1n' + # add a name mapping: + try: + # branch statements dont have names + # so we skip them with try/except + self.names[p[1].name] = p[1] + except AttributeError: + pass + + def p_pipeline_stage_end(self,p): + 'pipeline_stage_1n :' + + def p_pipeline_stage(self,p): + ''' + pipeline_stage : splitter + | filter + | composite_filter + | branch + | ungrouper + | grouper + | group_filter + | merger + ''' + + p[0] = p[1] + + def p_splitter(self,p): + ''' + splitter : splitterKeyword id '{' '}' + ''' + p[0] = Splitter(p[2], p.lineno(2)) + if self.splitter != None: + raise SyntaxError( + "More than one splitter defined in file at line %s",p.lineno(2)) + + self.splitter = p[0] + + def p_filter(self,p): + ''' + filter : filterKeyword id '{' filter_rule_1n '}' + ''' + # Note that p[4] is a list of lists of rules. + # If the list has one element the rule is simple. + # If the rule has more than one element, the + # rule is OR-ed of all the rules in the list + p[0] = Filter(p[2], p.lineno(2), p[4]) + self.filters.append(p[0]) + + + def p_composite_filter(self, p): + ''' + composite_filter : filterKeyword id '{' filter_ref_rule_1n '}' + ''' + # Note that p[4] is a list of lists of rules. + # If the list has one element the rule is simple. + # If the rule has more than one element, the + # rule is OR-ed of all the rules in the list + p[0] = Filter(p[2], p.lineno(2), p[4]) + self.filters.append(p[0]) + + def p_group_filter(self, p): + ''' + group_filter : groupFilterKeyword id '{' filter_rule_1n '}' + ''' + # Note that p[4] is a list of lists of rules. + # If the list has one element the rule is simple. + # If the rule has more than one element, the + # rule is OR-ed of all the rules in the list + p[0] = Filter(p[2], p.lineno(2), p[4]) + self.group_filters.append(p[0]) + + def p_filter_rule_1n(self,p): + 'filter_rule_1n : filter_rule filter_rule_1n' + p[2].extend([p[1]]) + p[0] = p[2] + + def p_filter_rule_0(self,p): + 'filter_rule_1n :' + p[0] = [] + + def p_filter_rule(self,p): + ''' + filter_rule : or_rule + ''' + p[0] = p[1] + + def p_filter_ref_rule_1n(self,p): + 'filter_ref_rule_1n : filter_ref_rule filter_ref_rule_1n' + p[2].extend([p[1]]) + p[0] = p[2] + + def p_filter_ref_rule_0(self,p): + 'filter_ref_rule_1n : filter_ref_rule' + p[0] = [p[1]] + + def p_filter_ref_rule(self,p): + ''' + filter_ref_rule : or_id + ''' + p[0] = p[1] + + def p_or_id(self, p): + 'or_id : not_id opt_or_id' + p[1].extend(p[2]) + p[0] = p[1] + + def p_opt_or_id(self, p): + ''' + opt_or_id : ORKeyword not_id opt_or_id + ''' + p[2].extend(p[3]) + p[0] = p[2] + + def p_opt_or_id_end(self, p): + 'opt_or_id :' + p[0] = [] + + def p_not_id(self, p): + ''' + not_id : NOTKeyword id + | id + ''' + try: + p[0] = [FilterRef(p[2], p.lineno(2), True)] + except IndexError: + p[0] = [FilterRef(p[1], p.lineno(1))] + + def p_or_optrule(self,p): + 'or_rule : rule_or_not opt_rule' + if len(p[2]) > 0: + ors = [p[1]] + ors.extend(p[2]) + p[0] = ors + else: + p[0] = [p[1]] + + def p_or_rule(self, p): + 'opt_rule : ORKeyword rule_or_not opt_rule' + res = [p[2]] + res.extend(p[3]) + p[0] = res + + def p_term_opt_rule(self,p): + 'opt_rule :' + p[0] = [] + + def p_rule_or_not(self, p): + ''' + rule_or_not : rule + | NOTKeyword rule + ''' + try: + p[2].NOT = True + p[0] = p[2] + except IndexError: + p[0] = p[1] + + def p_rule(self,p): + ''' + rule : infix_rule + | prefix_rule + ''' + p[0] = p[1] + + def p_infix_rule(self,p): + 'infix_rule : arg op arg' + p[1].extend(p[3]) # concatenate args to get [arg, arg] + # for some unknown reason p.lineno(2) does not work in this production + # so p[2] is (op, lineno) + p[0] = Rule(p[2][0], p[2][1], p[1]) # (op, line, args) From filter.py + + def p_op(self, p): + ''' + op : EQ + | LT + | GT + | LTEQ + | GTEQ + | ML + | MG + | inKeyword + | notinKeyword + ''' + p[0] = (p[1], p.lineno(1)) + + def p_rule_prefix(self,p): + ''' + prefix_rule : id '(' args ')' + | bitANDKeyword '(' args ')' + | bitORKeyword '(' args ')' + ''' + p[0] = Rule(p[1], p.lineno(1), p[3]) + + def p_args(self,p): + ''' + args : arg ',' args + ''' + p[0] = p[1] + p[0].extend(p[3]) # concatenate the rest of the args to arg + + def p_args_more(self,p): + 'args : arg' + p[0] = p[1] + + def p_no_args(self, p): + 'args :' + p[0] = [] + + def p_arg(self, p): + ''' + arg : id + | IPv4 + | IPv6 + | CIDR + | MAC + | int + | float + | hex + | prefix_rule + | string + ''' + if type(p[1]) is type("string"): + p[1] = Field(p[1]) # Was defined in filter.py, but the definition was commented out. + p[0] = [p[1]] # list of one element for easy [].extend later + + def p_cidr(self, p): + ''' + CIDR : IPv4 '/' int + | IPv6 '/' int + ''' + p[0] = Rule('cidr_mask', p[1], p[3]) + + def p_start_branch(self, p): + ''' + branch : id arrow mid_branch + ''' + br = [BranchNode(p[1], p.lineno(1))] # In statement.py + br.extend(p[3]) + p[0] = br + self.branches.append(p[0]) + + def p_input_branch(self, p): + ''' + branch : string arrow mid_branch + ''' + if self.input != None: + raise SyntaxError("More than one input defined in file at line %s", + p.lineno(1)) + self.input = Input(p[1].value, p.lineno(1)) + br = [self.input] + br.extend(p[3]) + p[0] = br + self.branches.append(p[0]) + + def p_split_branch(self, p): + ''' + branch : id branchKeyword mid_branch + ''' + br = [BranchNode(p[1], p.lineno(1))] + p[3][0] = Branch(p[3][0].name, p[3][0].line) + br.extend(p[3]) + p[0] = br + self.branches.append(p[0]) + + def p_mid_branch(self, p): + ''' + mid_branch : id arrow mid_branch + ''' + br = [BranchNode(p[1], p.lineno(1))] + br.extend(p[3]) + p[0] = br + + + def p_mid_branch_terminate(self, p): + ''' + mid_branch : end_branch + ''' + p[0] = p[1] + + def p_end_branch(self, p): + 'end_branch : id' + p[0] = [BranchNode(p[1], p.lineno(1))] + + def p_output_branch(self, p): + 'end_branch : string' + out = Output(p[1].value, p.lineno(1)) + self.outputs.append(out) + p[0] = [out] + + + def p_arrow(self, p): + """arrow : "-" GT""" + pass + + def p_ungrouper(self, p): + ''' + ungrouper : ungrouperKeyword id '{' '}' + ''' + p[0] = Ungrouper(p[2], p.lineno(2)) + self.ungroupers.append(p[0]) + + def p_grouper(self, p): + "grouper : grouperKeyword id '{' module1_n aggregate '}'" + p[0] = Grouper(p[2], p.lineno(2), p[4], p[5]) + # insert aggregation of record ids (needed for ungrouping later) + p[0].aggr.insert(0,(Rule('union', p.lineno(2), [Field('rec_id'), + 'records']))) + p[0].aggr.insert(0,(Rule('min', p.lineno(2), [Field('stime'), + 'stime']))) + p[0].aggr.insert(0,(Rule('max', p.lineno(2), [Field('etime'), + 'etime']))) + self.groupers.append(p[0]) + + def p_module1_n(self, p): + 'module1_n : module module1_n' + p[1].extend(p[2]) + p[0] = p[1] + + def p_module0(self, p): + 'module1_n :' + p[0] = [] + + def p_module(self, p): + "module : moduleKeyword id '{' grouper_rule1_n '}'" + p[0] = [Module(p[2], p.lineno(2), p[4])] + + def p_grouper_rule1_n(self, p): + 'grouper_rule1_n : grouper_rule grouper_rule1_n' + p[1].extend(p[2]) + p[0] = p[1] + + def p_grouper_rule0(self, p): + 'grouper_rule1_n :' + p[0] = [] + + def p_grouper_rule(self, p): + 'grouper_rule : id grouper_op id' + p[0] = [[GrouperRule(p[2], p.lineno(2), [Field(p[1]), Field(p[3]), + None, False])]] + + def p_grouper_rule_delta(self, p): + ''' + grouper_rule : id grouper_op id deltaKeyword delta_arg + ''' + p[0] = [[GrouperRule(p[2], p.lineno(2), [Field(p[1]), Field(p[3]), + p[5], False])]] + + def p_grouper_rule_rel_delta(self, p): + ''' + grouper_rule : id grouper_op id rdeltaKeyword delta_arg + ''' + p[0] = [[GrouperRule(p[2], p.lineno(2), [Field(p[1]), Field(p[3]), + p[5], True])]] + + def p_grouper_op(self, p): + ''' + grouper_op : EQ + | LT + | GT + | GTEQ + | LTEQ + ''' + p[0] = p[1] + def p_delta_arg(self, p): + ''' + delta_arg : time + | int + ''' + p[0] = p[1] + + def p_time(self, p): + ''' + time : int sKeyword + | int msKeyword + | int minKeyword + ''' + # the number should be in ms: + if p[2] == 's': + p[1].value = p[1].value * 1000 + if p[2] == 'min': + p[1].value = p[1].value * 60 * 1000 + p[0] = p[1] + + def p_aggregate(self, p): + 'aggregate : aggregateKeyword aggr1_n' + for aggr in p[2]: + if aggr.line == 0: + aggr.line = p.lineno(1) + p[0] = p[2] + + def p_aggr1_n(self, p): + 'aggr1_n : aggr opt_aggr' + p[1].extend(p[2]) + p[0] = p[1] + + def p_opt_aggr(self, p): + "opt_aggr : ',' aggr opt_aggr" + p[2].extend(p[3]) + p[0] = p[2] + + def p_opt_aggr_end(self, p): + 'opt_aggr :' + p[0] = [] + + def p_aggr(self, p): + "aggr : aggr_op '(' id_or_qid ')' asKeyword id" + args = [Field(p[3]), p[6]] # [id_or_qid, id, aggr_op] + p[0] = [Rule(p[1], p.lineno(4), args)] + + def p_simple_agg(self, p): + 'aggr : id_or_qid asKeyword id' + args = [Field(p[1]), p[3]] # [qid, id] + p[0] = [Rule('last', p.lineno(2), args)] + + def p_simple_agg_same_name(self, p): + 'aggr : id_or_qid' + args = [Field(p[1]), p[1]] # [qid, id] + p[0] = [Rule('last', p.lineno(1), args)] + + def p_qid(self, p): + ''' + qid : id '.' id + ''' + p[0] = p[1] + p[2] + p[3] + + def p_id_or_qid(self, p): + ''' + id_or_qid : id + | qid + ''' + p[0] = p[1] + + def p_aggr_op(self, p): + ''' + aggr_op : minKeyword + | maxKeyword + | sumKeyword + | avgKeyword + | unionKeyword + | countKeyword + | bitANDKeyword + | bitORKeyword + ''' + p[0] = p[1] + + def p_merger(self, p): + "merger : mergerKeyword id '{' merger_module1_n export '}'" + p[0] = Merger(p[2], p.lineno(2), p[4], p[5]) + self.mergers.append(p[0]) + + + def p_merger_module1_n(self, p): + 'merger_module1_n : merger_module merger_module1_n' + p[1].extend(p[2]) + p[0] = p[1] + + def p_merger_module0(self, p): + 'merger_module1_n : ' + p[0] = [] + + def p_merger_module(self, p): + """ + merger_module : moduleKeyword id '{' merger_branches merger_rule1_n '}' + """ + p[0] = [Module(p[2], p.lineno(2), p[5], p[4])] + + def p_merger_branches(self, p): + 'merger_branches : branchesKeyword branches1_n' + p[0] = p[2] + + def p_branches1_n(self, p): + """ + branches1_n : id ',' branches1_n + """ + p[0] = [p[1]] + p[0].extend(p[3]) + + def p_branches1(self, p): + ' branches1_n : id' + p[0] = [p[1]] + + def p_export(self, p): + 'export : exportKeyword id' + p[0] = p[2] + + def p_merger_rule1_n(self, p): + 'merger_rule1_n : merger_rule merger_rule1_n' + p[1].extend(p[2]) + p[0] = p[1] + + def p_merger_rule0(self,p): + 'merger_rule1_n :' + p[0] = [] + + def p_merger_rule(self, p): + ''' + merger_rule : merger_prefix_rule + | merger_infix_rule + ''' + p[0] = [[p[1]]] + + def p_not_merger_rule(self, p): + ''' + merger_rule : NOTKeyword merger_prefix_rule + | NOTKeyword merger_infix_rule + ''' + p[2].NOT = True + p[0] = [[p[2]]] + + def p_merger_infix_rule(self, p): + 'merger_infix_rule : qid_arg op qid_arg' + p[1].extend(p[3]) + p[0] = Rule(p[2][0], p[2][1], p[1]) + + def p_merger_prefix_rule(self,p): + ''' + merger_prefix_rule : id '(' qid_args ')' + ''' + p[0] = Rule(p[1], p.lineno(1), p[3]) + + def p_qid_args(self,p): + ''' + qid_args : qid_arg ',' qid_args + ''' + p[0] = p[1] + p[0].extend(p[3]) # concatenate the rest of the args to arg + + def p__qid_args_more(self,p): + 'qid_args : qid_arg' + p[0] = p[1] + + def p_no_qid_args(self, p): + 'qid_args :' + p[0] = [] + + def p_qid_arg(self, p): + ''' + qid_arg : qid + | IPv4 + | IPv6 + | CIDR + | MAC + | int + | float + | hex + | merger_prefix_rule + | string + ''' + if type(p[1]) is type("string"): + p[1] = Field(p[1]) + p[0] = [p[1]] # list of one element for easy [].extend later + + def p_merger_rule_al_op(self, p): + 'merger_rule : allen_rule opt_or_allen_rule' + p[1].extend(p[2]) + p[0] = [p[1]] + + def p_opt_or_allen_rule(self, p): + 'opt_or_allen_rule : ORKeyword allen_rule opt_or_allen_rule' + p[2].extend(p[3]) + p[0] = p[2] + + def p_opt_op_rule_end(self, p): + 'opt_or_allen_rule : ' + p[0] = [] + + def p_allen_rule(self, p): + 'allen_rule : id allen_op id opt_allen_delta' + args = [Field(p[1]), Field(p[3])] + args.extend(p[4]) # add the delta time to [arg, arg] + p[0] = [AllenRule(p[2], p.lineno(1), args)] # (op, line, args) + + def p_opt_allen_delta(self, p): + ''' + opt_allen_delta : deltaKeyword time + ''' + p[0] = [p[2]] + + def p_no_allen_delta(self, p): + 'opt_allen_delta :' + p[0] = [] + + def p_allen_op(self, p): + ''' + allen_op : LT + | GT + | EQ + | mKeyword + | miKeyword + | oKeyword + | oiKeyword + | sKeyword + | siKeyword + | dKeyword + | diKeyword + | fKeyword + | fiKeyword + | eqKeyword + ''' + # for some strange reason upper level refuses to recognize lineno: + p[0] = p[1] + + def p_error(self, p): + msg ="Syntax error. Unexpected token " + msg +="%s (%s)"%(p.value, p.type) + msg += " at line %s"% self.lexer.lineno + raise SyntaxError(msg) + + def parse(self, text): + self.parser.parse(text, lexer=self.lexer) # parse method is called from ply.yacc + self.resolve_branches() + + def find_io_nodes(self): + ''' + Finds which branch nodes are inputs and which are outputs. + The rest of the branches are processing stages. + ''' + + pass + + def check_branching(self): + pass + + def check_branch_nodes(self): + for b in self.branch_nodes.values(): + if not b.is_branch: + try: + node = self.names[b.name] + if len(b.inputs) == 0: + msg = "Node %s at line" % b.name + msg += " %s does not have input." % b.line + raise SyntaxError(msg) + if len(b.outputs) == 0: + msg = "Node %s at line" % b.name + msg += " %s does not have output." % b.line + raise SyntaxError(msg) + if len(b.inputs) > 1 and type(node) is not Merger: + msg = "Non-Merger node %s at line" % b.name + msg += " %s has more than one input." % b.line + raise SyntaxError(msg) + if len(b.outputs) > 1 and type(node) is not Splitter: + msg = "Non-Splitter node %s at line" % b.name + msg += " %s has more than one output." % b.line + raise SyntaxError(msg) + + except KeyError: + # check whether this is some middle node + if len(b.inputs) != 0 and len(b.outputs) !=0: + msg = "Node %s refferenced at line" % b.name + msg += " %s not defined" % b.line + raise SyntaxError(msg) + + #check whether the node name is actually parser string(Arg) + if type(b.name) is not Arg: + msg = "Node %s refferenced at line" % b.name + msg += " %s not defined" % b.line + raise SyntaxError(msg) + else: + if len(b.inputs) != 1 or len(b.outputs) != 1: + msg = "Branch Node %s at line" % b.name + msg += " %s must have 1 input and 1 output." % b.line + raise SyntaxError(msg) + + + + def resolve_branches(self): + noname_branchings = [] + for branch in self.branches: +# print branch +# print "" + br_name = False + br_index = 0 + for i, node in enumerate(branch): + if type(node) is BranchNode: + try: + branch[i] = self.names[node.name] + except KeyError: + msg = "Node %s refferenced at line" % node.name + msg += " %s not defined" % node.line + raise SyntaxError(msg) + if type(node) is Branch: + br_name = node.name + br_index = i + self.branch_names.add(br_name) + + if type(node) is Input and i != 0: + msg = "Input node %s at line" % node.name + msg += " %s should be at first posigion" % node.line + msg += " of branching statement" + raise SyntaxError(msg) + + if type(node) is Output and i != (len(branch) - 1): + msg = "Output node %s at line" % node.name + msg += " %s should be at position posigion" % node.line + msg += " of branching statement" + raise SyntaxError(msg) + + if br_name: + del(branch[br_index]) + for node in branch: + node.branches.add(br_name) + else: + noname_branchings.append(branch) + + # second iteration to fix the remaining node, which don't have branches + for branch in noname_branchings: + s = set() + for node in branch: + s.update(node.branches) + for node in branch: + node.branches.update(s) + + +class ParsedFile(object): + def __init__(self, filters, groupers, splitters, group_filters, + mergers, branches, ungroupers, input, output, names): + self.filters = filters + self.groupers = groupers + self.splitters = splitters + self.group_filters = group_filters + self.mergers = mergers + self.branches = branches + self.ungroupers = ungroupers + self.input = input + self.output = output + self.names = names + + diff --git a/parser.pyc b/parser.pyc new file mode 100644 index 0000000..c5fe8c3 Binary files /dev/null and b/parser.pyc differ diff --git a/parsetab.py b/parsetab.py new file mode 100644 index 0000000..5a85983 --- /dev/null +++ b/parsetab.py @@ -0,0 +1,185 @@ + +# parsetab.py +# This file is automatically generated. Do not edit. +_tabversion = '3.2' + +_lr_method = 'LALR' + +_lr_signature = '#\xf5F:\x8e\x0c\xf3A\xdcQx\xee\x88c\xc4I' + +_lr_action_items = {'bitANDKeyword':([38,43,50,51,52,53,54,57,59,60,62,65,67,68,69,70,71,78,81,85,87,88,89,90,91,92,93,94,95,96,97,98,100,108,109,134,136,137,142,143,144,154,156,157,158,159,161,],[48,48,-18,-55,-56,-32,-33,48,48,-57,-59,-53,-30,-54,-29,-51,-52,48,119,48,-41,-37,-39,-40,-38,-42,-36,-43,-35,48,48,-31,48,-27,48,-58,-50,-34,-29,-60,-61,119,-45,48,-44,-46,-28,]),'moduleKeyword':([31,41,46,73,164,199,],[44,75,44,75,-74,-111,]),'maxKeyword':([81,154,],[120,120,]),'GTEQ':([51,52,54,55,56,60,62,65,68,70,71,77,99,143,144,150,156,158,159,174,177,178,179,182,186,187,188,190,191,192,193,218,241,],[-55,-56,-58,89,-50,-57,-59,-53,-54,-51,-52,-50,-50,-60,-61,167,-45,-44,-46,-97,-132,-135,-133,89,-134,-127,-136,-130,-131,-128,-129,-135,-123,]),'int':([38,43,50,51,52,53,54,57,59,60,62,65,67,68,69,70,71,78,85,87,88,89,90,91,92,93,94,95,96,97,98,100,108,109,110,111,134,136,137,142,143,144,156,157,158,159,161,162,174,177,178,179,181,184,185,186,187,188,189,190,191,192,193,194,195,200,204,218,219,221,224,225,227,228,232,233,235,241,242,243,244,245,246,247,248,251,],[51,51,-18,-55,-56,-32,-33,51,51,-57,-59,-53,-30,-54,-29,-51,-52,51,51,-41,-37,-39,-40,-38,-42,-36,-43,-35,51,51,-31,51,-27,51,143,144,-58,-50,-34,-29,-60,-61,-45,51,-44,-46,-28,177,-97,-132,-118,-133,-119,177,177,-134,-127,-136,-139,-130,-131,-128,-129,-112,-114,177,177,-120,-121,-137,236,236,-135,-122,-142,-139,-113,-123,177,250,-140,-138,-87,-88,-89,-141,]),'-':([5,13,35,],[22,22,22,]),'float':([38,43,50,51,52,53,54,57,59,60,62,65,67,68,69,70,71,78,85,87,88,89,90,91,92,93,94,95,96,97,98,100,108,109,134,136,137,142,143,144,156,157,158,159,161,162,174,177,178,179,181,184,185,186,187,188,189,190,191,192,193,194,195,200,204,218,219,221,227,228,232,233,235,241,242,244,245,246,247,248,251,],[52,52,-18,-55,-56,-32,-33,52,52,-57,-59,-53,-30,-54,-29,-51,-52,52,52,-41,-37,-39,-40,-38,-42,-36,-43,-35,52,52,-31,52,-27,52,-58,-50,-34,-29,-60,-61,-45,52,-44,-46,-28,179,-97,-132,-118,-133,-119,179,179,-134,-127,-136,-139,-130,-131,-128,-129,-112,-114,179,179,-120,-121,-137,-135,-122,-142,-139,-113,-123,179,-140,-138,-87,-88,-89,-141,]),'oKeyword':([183,234,],[205,205,]),'inKeyword':([51,52,54,55,56,60,62,65,68,70,71,77,99,143,144,156,158,159,174,177,178,179,182,186,187,188,190,191,192,193,218,241,],[-55,-56,-58,92,-50,-57,-59,-53,-54,-51,-52,-50,-50,-60,-61,-45,-44,-46,-97,-132,-135,-133,92,-134,-127,-136,-130,-131,-128,-129,-135,-123,]),'dKeyword':([183,234,],[211,211,]),'avgKeyword':([81,154,],[127,127,]),'/':([70,71,192,193,],[110,111,110,111,]),'asKeyword':([122,123,124,174,197,],[152,-98,-99,-97,226,]),'countKeyword':([81,154,],[130,130,]),'ML':([51,52,54,55,56,60,62,65,68,70,71,77,99,143,144,156,158,159,174,177,178,179,182,186,187,188,190,191,192,193,218,241,],[-55,-56,-58,90,-50,-57,-59,-53,-54,-51,-52,-50,-50,-60,-61,-45,-44,-46,-97,-132,-135,-133,90,-134,-127,-136,-130,-131,-128,-129,-135,-123,]),'id':([0,1,2,4,6,7,8,9,10,11,14,15,16,17,18,19,23,24,27,32,33,34,35,36,37,38,40,43,44,47,50,51,52,53,54,56,57,59,60,61,62,63,65,66,67,68,69,70,71,72,75,76,78,81,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,102,104,105,106,107,108,109,113,116,117,132,134,136,137,140,141,142,143,144,146,149,151,152,153,154,156,157,158,159,160,161,162,163,166,167,168,169,170,171,174,177,178,179,181,184,185,186,187,188,189,190,191,192,193,194,195,196,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,218,219,221,222,223,226,227,228,232,233,235,236,237,238,239,241,242,244,245,246,247,248,251,],[5,5,-10,21,-8,-9,-11,25,26,-7,28,29,-4,30,-5,-6,35,35,35,-69,-66,-68,-67,-62,-64,56,-63,77,80,35,-18,-55,-56,-32,-33,-26,99,77,-57,104,-59,-21,-53,-24,-30,-54,-29,-51,-52,-12,115,-70,77,123,-65,136,-14,-41,-37,-39,-40,-38,-42,-36,-43,-35,136,136,-31,-25,136,140,-26,-13,104,-22,-27,77,145,-15,150,-71,-58,-50,-34,-25,-24,-29,-60,-61,-108,150,123,173,174,123,-45,136,-44,-46,-23,-28,183,195,-82,-83,196,-84,-81,-80,-97,-132,-118,-133,-119,183,220,-134,-127,-136,-139,-130,-131,-128,-129,-112,-114,-77,229,-155,-156,-144,229,-148,-143,-150,232,-147,-149,-152,-146,-151,-154,-145,-153,-120,-121,-137,234,195,240,-135,-122,-142,-139,-113,-86,-85,-79,-78,-123,229,-140,-138,-87,-88,-89,-141,]),'NOTKeyword':([38,43,50,51,52,53,54,56,59,60,61,62,63,65,66,67,68,69,70,71,98,99,104,106,107,108,109,134,136,137,140,141,142,143,144,156,158,159,160,161,162,174,177,178,179,181,184,186,187,188,189,190,191,192,193,194,195,218,219,221,227,228,232,233,235,241,244,245,246,247,248,251,],[57,78,-18,-55,-56,-32,-33,-26,78,-57,102,-59,-21,-53,-24,-30,-54,-29,-51,-52,-31,-25,-26,102,-22,-27,78,-58,-50,-34,-25,-24,-29,-60,-61,-45,-44,-46,-23,-28,185,-97,-132,-118,-133,-119,185,-134,-127,-136,-139,-130,-131,-128,-129,-112,-114,-120,-121,-137,-135,-122,-142,-139,-113,-123,-140,-138,-87,-88,-89,-141,]),'deltaKeyword':([196,232,],[225,243,]),'bitORKeyword':([38,43,50,51,52,53,54,57,59,60,62,65,67,68,69,70,71,78,81,85,87,88,89,90,91,92,93,94,95,96,97,98,100,108,109,134,136,137,142,143,144,154,156,157,158,159,161,],[58,58,-18,-55,-56,-32,-33,58,58,-57,-59,-53,-30,-54,-29,-51,-52,58,128,58,-41,-37,-39,-40,-38,-42,-36,-43,-35,58,58,-31,58,-27,58,-58,-50,-34,-29,-60,-61,128,-45,58,-44,-46,-28,]),')':([51,52,60,62,65,68,70,71,85,97,100,123,124,133,134,135,136,138,139,143,144,156,157,158,159,172,174,176,177,179,186,187,188,190,191,192,193,204,227,230,231,241,242,249,],[-55,-56,-57,-59,-53,-54,-51,-52,-49,-49,-49,-98,-99,156,-58,-48,-50,158,159,-60,-61,-45,-49,-44,-46,197,-97,-47,-132,-133,-134,-127,-136,-130,-131,-128,-129,-126,-135,241,-125,-123,-126,-124,]),'(':([48,56,58,77,99,118,119,120,121,125,126,127,128,130,136,183,220,229,],[85,97,100,97,97,151,-106,-101,-100,-102,-104,-103,-107,-105,97,204,204,204,]),'hex':([38,43,50,51,52,53,54,57,59,60,62,65,67,68,69,70,71,78,85,87,88,89,90,91,92,93,94,95,96,97,98,100,108,109,134,136,137,142,143,144,156,157,158,159,161,162,174,177,178,179,181,184,185,186,187,188,189,190,191,192,193,194,195,200,204,218,219,221,227,228,232,233,235,241,242,244,245,246,247,248,251,],[60,60,-18,-55,-56,-32,-33,60,60,-57,-59,-53,-30,-54,-29,-51,-52,60,60,-41,-37,-39,-40,-38,-42,-36,-43,-35,60,60,-31,60,-27,60,-58,-50,-34,-29,-60,-61,-45,60,-44,-46,-28,186,-97,-132,-118,-133,-119,186,186,-134,-127,-136,-139,-130,-131,-128,-129,-112,-114,186,186,-120,-121,-137,-135,-122,-142,-139,-113,-123,186,-140,-138,-87,-88,-89,-141,]),',':([51,52,60,62,65,68,70,71,122,123,124,129,134,135,136,143,144,156,158,159,173,174,175,177,179,186,187,188,190,191,192,193,195,227,231,240,241,],[-55,-56,-57,-59,-53,-54,-51,-52,-96,-98,-99,154,-58,157,-50,-60,-61,-45,-44,-46,-95,-97,154,-132,-133,-134,-127,-136,-130,-131,-128,-129,223,-135,242,-94,-123,]),'filterKeyword':([0,1,2,6,7,8,11,16,18,19,33,34,35,36,37,40,72,76,84,86,105,116,132,146,],[9,9,-10,-8,-9,-11,-7,-4,-5,-6,-66,-68,-67,-62,-64,-63,-12,-70,-65,-14,-13,-15,-71,-108,]),'grouperKeyword':([0,1,2,6,7,8,11,16,18,19,33,34,35,36,37,40,72,76,84,86,105,116,132,146,],[4,4,-10,-8,-9,-11,-7,-4,-5,-6,-66,-68,-67,-62,-64,-63,-12,-70,-65,-14,-13,-15,-71,-108,]),'.':([123,183,220,229,],[153,153,153,153,]),'LT':([51,52,54,55,56,60,62,65,68,70,71,77,99,143,144,150,156,158,159,174,177,178,179,182,183,186,187,188,190,191,192,193,218,234,241,],[-55,-56,-58,93,-50,-57,-59,-53,-54,-51,-52,-50,-50,-60,-61,170,-45,-44,-46,-97,-132,-135,-133,93,206,-134,-127,-136,-130,-131,-128,-129,-135,206,-123,]),'oiKeyword':([183,234,],[210,210,]),'unionKeyword':([81,154,],[126,126,]),'mKeyword':([183,234,],[212,212,]),'siKeyword':([183,234,],[213,213,]),'$end':([0,1,2,3,6,7,8,11,12,16,18,19,20,33,34,35,36,37,40,72,76,84,86,105,116,132,146,],[-3,-3,-10,0,-8,-9,-11,-7,-1,-4,-5,-6,-2,-66,-68,-67,-62,-64,-63,-12,-70,-65,-14,-13,-15,-71,-108,]),'fiKeyword':([183,234,],[201,201,]),'GT':([22,51,52,54,55,56,60,62,65,68,70,71,77,99,143,144,150,156,158,159,174,177,178,179,182,183,186,187,188,190,191,192,193,218,234,241,],[32,-55,-56,-58,88,-50,-57,-59,-53,-54,-51,-52,-50,-50,-60,-61,166,-45,-44,-46,-97,-132,-135,-133,88,203,-134,-127,-136,-130,-131,-128,-129,-135,203,-123,]),'msKeyword':([236,250,],[247,247,]),'string':([0,1,2,6,7,8,11,16,18,19,23,24,27,32,33,34,35,36,37,38,40,43,47,50,51,52,53,54,57,59,60,62,65,67,68,69,70,71,72,76,78,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,100,105,108,109,116,132,134,136,137,142,143,144,146,156,157,158,159,161,162,174,177,178,179,181,184,185,186,187,188,189,190,191,192,193,194,195,200,204,218,219,221,227,228,232,233,235,241,242,244,245,246,247,248,251,],[13,13,-10,-8,-9,-11,-7,-4,-5,-6,34,34,34,-69,-66,-68,-67,-62,-64,62,-63,62,34,-18,-55,-56,-32,-33,62,62,-57,-59,-53,-30,-54,-29,-51,-52,-12,-70,62,-65,62,-14,-41,-37,-39,-40,-38,-42,-36,-43,-35,62,62,-31,62,-13,-27,62,-15,-71,-58,-50,-34,-29,-60,-61,-108,-45,62,-44,-46,-28,188,-97,-132,-118,-133,-119,188,188,-134,-127,-136,-139,-130,-131,-128,-129,-112,-114,188,188,-120,-121,-137,-135,-122,-142,-139,-113,-123,188,-140,-138,-87,-88,-89,-141,]),'branchesKeyword':([147,],[163,]),'minKeyword':([81,154,236,250,],[121,121,248,248,]),'eqKeyword':([183,234,],[202,202,]),'miKeyword':([183,234,],[209,209,]),'ungrouperKeyword':([0,1,2,6,7,8,11,16,18,19,33,34,35,36,37,40,72,76,84,86,105,116,132,146,],[15,15,-10,-8,-9,-11,-7,-4,-5,-6,-66,-68,-67,-62,-64,-63,-12,-70,-65,-14,-13,-15,-71,-108,]),'sumKeyword':([81,154,],[125,125,]),'exportKeyword':([41,73,74,112,199,],[-110,-110,113,-109,-111,]),'fKeyword':([183,234,],[214,214,]),'EQ':([51,52,54,55,56,60,62,65,68,70,71,77,99,143,144,150,156,158,159,174,177,178,179,182,183,186,187,188,190,191,192,193,218,234,241,],[-55,-56,-58,95,-50,-57,-59,-53,-54,-51,-52,-50,-50,-60,-61,171,-45,-44,-46,-97,-132,-135,-133,95,215,-134,-127,-136,-130,-131,-128,-129,-135,215,-123,]),'ORKeyword':([51,52,53,54,56,60,62,65,66,67,68,69,70,71,98,99,104,134,136,137,140,141,142,143,144,156,158,159,189,232,233,244,246,247,248,251,],[-55,-56,-32,-33,-26,-57,-59,-53,106,-30,-54,109,-51,-52,-31,-25,-26,-58,-50,-34,-25,106,109,-60,-61,-45,-44,-46,222,-142,222,-140,-87,-88,-89,-141,]),'MG':([51,52,54,55,56,60,62,65,68,70,71,77,99,143,144,156,158,159,174,177,178,179,182,186,187,188,190,191,192,193,218,241,],[-55,-56,-58,87,-50,-57,-59,-53,-54,-51,-52,-50,-50,-60,-61,-45,-44,-46,-97,-132,-135,-133,87,-134,-127,-136,-130,-131,-128,-129,-135,-123,]),'splitterKeyword':([0,1,2,6,7,8,11,16,18,19,33,34,35,36,37,40,72,76,84,86,105,116,132,146,],[10,10,-10,-8,-9,-11,-7,-4,-5,-6,-66,-68,-67,-62,-64,-63,-12,-70,-65,-14,-13,-15,-71,-108,]),'groupFilterKeyword':([0,1,2,6,7,8,11,16,18,19,33,34,35,36,37,40,72,76,84,86,105,116,132,146,],[17,17,-10,-8,-9,-11,-7,-4,-5,-6,-66,-68,-67,-62,-64,-63,-12,-70,-65,-14,-13,-15,-71,-108,]),'sKeyword':([183,234,236,250,],[207,207,246,246,]),'}':([38,39,42,43,49,50,51,52,53,54,56,59,60,61,62,63,64,65,66,67,68,69,70,71,79,82,98,99,101,103,104,107,108,114,117,122,123,124,129,131,134,136,137,140,141,142,143,144,145,148,149,155,156,158,159,160,161,162,165,173,174,175,177,178,179,180,181,184,186,187,188,189,190,191,192,193,194,195,196,198,217,218,219,221,227,228,232,233,235,236,237,238,239,240,241,244,245,246,247,248,251,],[-17,72,76,-17,86,-18,-55,-56,-32,-33,-26,-17,-57,-20,-59,-21,105,-53,-24,-30,-54,-29,-51,-52,116,132,-31,-25,-16,-19,-26,-22,-27,146,-76,-96,-98,-99,-93,-90,-58,-50,-34,-25,-24,-29,-60,-61,-115,164,-76,-91,-45,-44,-46,-23,-28,-117,-75,-95,-97,-93,-132,-118,-133,199,-119,-117,-134,-127,-136,-139,-130,-131,-128,-129,-112,-114,-77,-92,-116,-120,-121,-137,-135,-122,-142,-139,-113,-86,-85,-79,-78,-94,-123,-140,-138,-87,-88,-89,-141,]),'aggregateKeyword':([31,45,46,83,164,],[-73,81,-73,-72,-74,]),'rdeltaKeyword':([196,],[224,]),'MAC':([38,43,50,51,52,53,54,57,59,60,62,65,67,68,69,70,71,78,85,87,88,89,90,91,92,93,94,95,96,97,98,100,108,109,134,136,137,142,143,144,156,157,158,159,161,162,174,177,178,179,181,184,185,186,187,188,189,190,191,192,193,194,195,200,204,218,219,221,227,228,232,233,235,241,242,244,245,246,247,248,251,],[68,68,-18,-55,-56,-32,-33,68,68,-57,-59,-53,-30,-54,-29,-51,-52,68,68,-41,-37,-39,-40,-38,-42,-36,-43,-35,68,68,-31,68,-27,68,-58,-50,-34,-29,-60,-61,-45,68,-44,-46,-28,191,-97,-132,-118,-133,-119,191,191,-134,-127,-136,-139,-130,-131,-128,-129,-112,-114,191,191,-120,-121,-137,-135,-122,-142,-139,-113,-123,191,-140,-138,-87,-88,-89,-141,]),'IPv4':([38,43,50,51,52,53,54,57,59,60,62,65,67,68,69,70,71,78,85,87,88,89,90,91,92,93,94,95,96,97,98,100,108,109,134,136,137,142,143,144,156,157,158,159,161,162,174,177,178,179,181,184,185,186,187,188,189,190,191,192,193,194,195,200,204,218,219,221,227,228,232,233,235,241,242,244,245,246,247,248,251,],[70,70,-18,-55,-56,-32,-33,70,70,-57,-59,-53,-30,-54,-29,-51,-52,70,70,-41,-37,-39,-40,-38,-42,-36,-43,-35,70,70,-31,70,-27,70,-58,-50,-34,-29,-60,-61,-45,70,-44,-46,-28,192,-97,-132,-118,-133,-119,192,192,-134,-127,-136,-139,-130,-131,-128,-129,-112,-114,192,192,-120,-121,-137,-135,-122,-142,-139,-113,-123,192,-140,-138,-87,-88,-89,-141,]),'mergerKeyword':([0,1,2,6,7,8,11,16,18,19,33,34,35,36,37,40,72,76,84,86,105,116,132,146,],[14,14,-10,-8,-9,-11,-7,-4,-5,-6,-66,-68,-67,-62,-64,-63,-12,-70,-65,-14,-13,-15,-71,-108,]),'IPv6':([38,43,50,51,52,53,54,57,59,60,62,65,67,68,69,70,71,78,85,87,88,89,90,91,92,93,94,95,96,97,98,100,108,109,134,136,137,142,143,144,156,157,158,159,161,162,174,177,178,179,181,184,185,186,187,188,189,190,191,192,193,194,195,200,204,218,219,221,227,228,232,233,235,241,242,244,245,246,247,248,251,],[71,71,-18,-55,-56,-32,-33,71,71,-57,-59,-53,-30,-54,-29,-51,-52,71,71,-41,-37,-39,-40,-38,-42,-36,-43,-35,71,71,-31,71,-27,71,-58,-50,-34,-29,-60,-61,-45,71,-44,-46,-28,193,-97,-132,-118,-133,-119,193,193,-134,-127,-136,-139,-130,-131,-128,-129,-112,-114,193,193,-120,-121,-137,-135,-122,-142,-139,-113,-123,193,-140,-138,-87,-88,-89,-141,]),'{':([21,25,26,28,29,30,80,115,],[31,38,39,41,42,43,117,147,]),'diKeyword':([183,234,],[216,216,]),'branchKeyword':([5,],[24,]),'LTEQ':([51,52,54,55,56,60,62,65,68,70,71,77,99,143,144,150,156,158,159,174,177,178,179,182,186,187,188,190,191,192,193,218,241,],[-55,-56,-58,91,-50,-57,-59,-53,-54,-51,-52,-50,-50,-60,-61,169,-45,-44,-46,-97,-132,-135,-133,91,-134,-127,-136,-130,-131,-128,-129,-135,-123,]),'notinKeyword':([51,52,54,55,56,60,62,65,68,70,71,77,99,143,144,156,158,159,174,177,178,179,182,186,187,188,190,191,192,193,218,241,],[-55,-56,-58,94,-50,-57,-59,-53,-54,-51,-52,-50,-50,-60,-61,-45,-44,-46,-97,-132,-135,-133,94,-134,-127,-136,-130,-131,-128,-129,-135,-123,]),} + +_lr_action = { } +for _k, _v in _lr_action_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_action: _lr_action[_x] = { } + _lr_action[_x][_k] = _y +del _lr_action_items + +_lr_goto_items = {'aggr_op':([81,154,],[118,118,]),'allen_rule':([162,184,222,],[189,189,233,]),'grouper_rule1_n':([117,149,],[148,165,]),'filter_ref_rule_1n':([38,61,],[49,103,]),'grouper_rule':([117,149,],[149,149,]),'or_rule':([38,43,59,],[50,50,50,]),'qid':([81,151,154,162,184,185,200,204,242,],[124,124,124,187,187,187,187,187,187,]),'pipeline_stage':([0,1,],[1,1,]),'merger_rule1_n':([162,184,],[180,217,]),'module':([31,46,],[46,46,]),'end_branch':([23,24,27,47,],[33,33,33,33,]),'infix_rule':([38,43,57,59,78,109,],[53,53,53,53,53,53,]),'export':([74,],[114,]),'file':([0,],[3,]),'arg':([38,43,57,59,78,85,96,97,100,109,157,],[55,55,55,55,55,135,137,135,135,55,135,]),'delta_arg':([224,225,],[238,239,]),'merger_infix_rule':([162,184,185,],[181,181,219,]),'qid_arg':([162,184,185,200,204,242,],[182,182,182,228,231,231,]),'merger_rule':([162,184,],[184,184,]),'opt_rule':([69,142,],[108,161,]),'branches1_n':([163,223,],[194,235,]),'ungrouper':([0,1,],[6,6,]),'grouper':([0,1,],[7,7,]),'merger':([0,1,],[8,8,]),'opt_or_allen_rule':([189,233,],[221,245,]),'prefix_rule':([38,43,57,59,78,85,96,97,100,109,157,],[54,54,54,54,54,134,134,134,134,54,134,]),'branch':([0,1,],[11,11,]),'id_or_qid':([81,151,154,],[122,172,122,]),'pipeline_stage_1n':([0,1,],[12,20,]),'merger_module1_n':([41,73,],[74,112,]),'filter_ref_rule':([38,61,],[61,61,]),'args':([85,97,100,157,],[133,138,139,176,]),'grouper_op':([150,],[168,]),'module1_n':([31,46,],[45,83,]),'or_id':([38,61,],[63,63,]),'opt_allen_delta':([232,],[244,]),'allen_op':([183,234,],[208,208,]),'group_filter':([0,1,],[2,2,]),'opt_or_id':([66,141,],[107,160,]),'aggregate':([45,],[82,]),'filter_rule_1n':([38,43,59,],[64,79,101,]),'CIDR':([38,43,57,59,78,85,96,97,100,109,157,162,184,185,200,204,242,],[65,65,65,65,65,65,65,65,65,65,65,190,190,190,190,190,190,]),'aggr1_n':([81,],[131,]),'opt_aggr':([129,175,],[155,198,]),'splitter':([0,1,],[16,16,]),'merger_branches':([147,],[162,]),'merger_module':([41,73,],[73,73,]),'qid_args':([204,242,],[230,249,]),'not_id':([38,61,106,],[66,66,141,]),'merger_prefix_rule':([162,184,185,200,204,242,],[178,178,218,227,227,227,]),'rule':([38,43,57,59,78,109,],[67,67,98,67,98,67,]),'mid_branch':([23,24,27,47,],[36,37,40,84,]),'filter':([0,1,],[18,18,]),'filter_rule':([38,43,59,],[59,59,59,]),'composite_filter':([0,1,],[19,19,]),'rule_or_not':([38,43,59,109,],[69,69,69,142,]),'time':([224,225,243,],[237,237,251,]),'arrow':([5,13,35,],[23,27,47,]),'aggr':([81,154,],[129,175,]),'op':([55,182,],[96,200,]),} + +_lr_goto = { } +for _k, _v in _lr_goto_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_goto: _lr_goto[_x] = { } + _lr_goto[_x][_k] = _y +del _lr_goto_items +_lr_productions = [ + ("S' -> file","S'",1,None,None,None), + ('file -> pipeline_stage_1n','file',1,'p_file','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',198), + ('pipeline_stage_1n -> pipeline_stage pipeline_stage_1n','pipeline_stage_1n',2,'p_pipeline_stage_1n','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',203), + ('pipeline_stage_1n -> ','pipeline_stage_1n',0,'p_pipeline_stage_end','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',213), + ('pipeline_stage -> splitter','pipeline_stage',1,'p_pipeline_stage','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',217), + ('pipeline_stage -> filter','pipeline_stage',1,'p_pipeline_stage','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',218), + ('pipeline_stage -> composite_filter','pipeline_stage',1,'p_pipeline_stage','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',219), + ('pipeline_stage -> branch','pipeline_stage',1,'p_pipeline_stage','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',220), + ('pipeline_stage -> ungrouper','pipeline_stage',1,'p_pipeline_stage','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',221), + ('pipeline_stage -> grouper','pipeline_stage',1,'p_pipeline_stage','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',222), + ('pipeline_stage -> group_filter','pipeline_stage',1,'p_pipeline_stage','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',223), + ('pipeline_stage -> merger','pipeline_stage',1,'p_pipeline_stage','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',224), + ('splitter -> splitterKeyword id { }','splitter',4,'p_splitter','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',231), + ('filter -> filterKeyword id { filter_rule_1n }','filter',5,'p_filter','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',242), + ('composite_filter -> filterKeyword id { filter_ref_rule_1n }','composite_filter',5,'p_composite_filter','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',254), + ('group_filter -> groupFilterKeyword id { filter_rule_1n }','group_filter',5,'p_group_filter','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',265), + ('filter_rule_1n -> filter_rule filter_rule_1n','filter_rule_1n',2,'p_filter_rule_1n','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',275), + ('filter_rule_1n -> ','filter_rule_1n',0,'p_filter_rule_0','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',280), + ('filter_rule -> or_rule','filter_rule',1,'p_filter_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',285), + ('filter_ref_rule_1n -> filter_ref_rule filter_ref_rule_1n','filter_ref_rule_1n',2,'p_filter_ref_rule_1n','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',290), + ('filter_ref_rule_1n -> filter_ref_rule','filter_ref_rule_1n',1,'p_filter_ref_rule_0','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',295), + ('filter_ref_rule -> or_id','filter_ref_rule',1,'p_filter_ref_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',300), + ('or_id -> not_id opt_or_id','or_id',2,'p_or_id','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',305), + ('opt_or_id -> ORKeyword not_id opt_or_id','opt_or_id',3,'p_opt_or_id','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',311), + ('opt_or_id -> ','opt_or_id',0,'p_opt_or_id_end','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',317), + ('not_id -> NOTKeyword id','not_id',2,'p_not_id','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',322), + ('not_id -> id','not_id',1,'p_not_id','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',323), + ('or_rule -> rule_or_not opt_rule','or_rule',2,'p_or_optrule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',331), + ('opt_rule -> ORKeyword rule_or_not opt_rule','opt_rule',3,'p_or_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',340), + ('opt_rule -> ','opt_rule',0,'p_term_opt_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',346), + ('rule_or_not -> rule','rule_or_not',1,'p_rule_or_not','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',351), + ('rule_or_not -> NOTKeyword rule','rule_or_not',2,'p_rule_or_not','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',352), + ('rule -> infix_rule','rule',1,'p_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',362), + ('rule -> prefix_rule','rule',1,'p_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',363), + ('infix_rule -> arg op arg','infix_rule',3,'p_infix_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',368), + ('op -> EQ','op',1,'p_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',376), + ('op -> LT','op',1,'p_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',377), + ('op -> GT','op',1,'p_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',378), + ('op -> LTEQ','op',1,'p_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',379), + ('op -> GTEQ','op',1,'p_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',380), + ('op -> ML','op',1,'p_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',381), + ('op -> MG','op',1,'p_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',382), + ('op -> inKeyword','op',1,'p_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',383), + ('op -> notinKeyword','op',1,'p_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',384), + ('prefix_rule -> id ( args )','prefix_rule',4,'p_rule_prefix','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',390), + ('prefix_rule -> bitANDKeyword ( args )','prefix_rule',4,'p_rule_prefix','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',391), + ('prefix_rule -> bitORKeyword ( args )','prefix_rule',4,'p_rule_prefix','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',392), + ('args -> arg , args','args',3,'p_args','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',398), + ('args -> arg','args',1,'p_args_more','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',404), + ('args -> ','args',0,'p_no_args','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',408), + ('arg -> id','arg',1,'p_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',413), + ('arg -> IPv4','arg',1,'p_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',414), + ('arg -> IPv6','arg',1,'p_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',415), + ('arg -> CIDR','arg',1,'p_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',416), + ('arg -> MAC','arg',1,'p_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',417), + ('arg -> int','arg',1,'p_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',418), + ('arg -> float','arg',1,'p_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',419), + ('arg -> hex','arg',1,'p_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',420), + ('arg -> prefix_rule','arg',1,'p_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',421), + ('arg -> string','arg',1,'p_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',422), + ('CIDR -> IPv4 / int','CIDR',3,'p_cidr','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',430), + ('CIDR -> IPv6 / int','CIDR',3,'p_cidr','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',431), + ('branch -> id arrow mid_branch','branch',3,'p_start_branch','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',437), + ('branch -> string arrow mid_branch','branch',3,'p_input_branch','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',446), + ('branch -> id branchKeyword mid_branch','branch',3,'p_split_branch','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',459), + ('mid_branch -> id arrow mid_branch','mid_branch',3,'p_mid_branch','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',469), + ('mid_branch -> end_branch','mid_branch',1,'p_mid_branch_terminate','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',478), + ('end_branch -> id','end_branch',1,'p_end_branch','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',483), + ('end_branch -> string','end_branch',1,'p_output_branch','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',487), + ('arrow -> - GT','arrow',2,'p_arrow','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',494), + ('ungrouper -> ungrouperKeyword id { }','ungrouper',4,'p_ungrouper','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',499), + ('grouper -> grouperKeyword id { module1_n aggregate }','grouper',6,'p_grouper','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',505), + ('module1_n -> module module1_n','module1_n',2,'p_module1_n','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',517), + ('module1_n -> ','module1_n',0,'p_module0','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',522), + ('module -> moduleKeyword id { grouper_rule1_n }','module',5,'p_module','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',526), + ('grouper_rule1_n -> grouper_rule grouper_rule1_n','grouper_rule1_n',2,'p_grouper_rule1_n','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',530), + ('grouper_rule1_n -> ','grouper_rule1_n',0,'p_grouper_rule0','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',535), + ('grouper_rule -> id grouper_op id','grouper_rule',3,'p_grouper_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',539), + ('grouper_rule -> id grouper_op id deltaKeyword delta_arg','grouper_rule',5,'p_grouper_rule_delta','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',545), + ('grouper_rule -> id grouper_op id rdeltaKeyword delta_arg','grouper_rule',5,'p_grouper_rule_rel_delta','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',552), + ('grouper_op -> EQ','grouper_op',1,'p_grouper_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',559), + ('grouper_op -> LT','grouper_op',1,'p_grouper_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',560), + ('grouper_op -> GT','grouper_op',1,'p_grouper_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',561), + ('grouper_op -> GTEQ','grouper_op',1,'p_grouper_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',562), + ('grouper_op -> LTEQ','grouper_op',1,'p_grouper_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',563), + ('delta_arg -> time','delta_arg',1,'p_delta_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',568), + ('delta_arg -> int','delta_arg',1,'p_delta_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',569), + ('time -> int sKeyword','time',2,'p_time','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',575), + ('time -> int msKeyword','time',2,'p_time','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',576), + ('time -> int minKeyword','time',2,'p_time','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',577), + ('aggregate -> aggregateKeyword aggr1_n','aggregate',2,'p_aggregate','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',587), + ('aggr1_n -> aggr opt_aggr','aggr1_n',2,'p_aggr1_n','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',594), + ('opt_aggr -> , aggr opt_aggr','opt_aggr',3,'p_opt_aggr','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',599), + ('opt_aggr -> ','opt_aggr',0,'p_opt_aggr_end','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',604), + ('aggr -> aggr_op ( id_or_qid ) asKeyword id','aggr',6,'p_aggr','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',608), + ('aggr -> id_or_qid asKeyword id','aggr',3,'p_simple_agg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',613), + ('aggr -> id_or_qid','aggr',1,'p_simple_agg_same_name','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',618), + ('qid -> id . id','qid',3,'p_qid','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',624), + ('id_or_qid -> id','id_or_qid',1,'p_id_or_qid','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',630), + ('id_or_qid -> qid','id_or_qid',1,'p_id_or_qid','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',631), + ('aggr_op -> minKeyword','aggr_op',1,'p_aggr_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',637), + ('aggr_op -> maxKeyword','aggr_op',1,'p_aggr_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',638), + ('aggr_op -> sumKeyword','aggr_op',1,'p_aggr_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',639), + ('aggr_op -> avgKeyword','aggr_op',1,'p_aggr_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',640), + ('aggr_op -> unionKeyword','aggr_op',1,'p_aggr_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',641), + ('aggr_op -> countKeyword','aggr_op',1,'p_aggr_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',642), + ('aggr_op -> bitANDKeyword','aggr_op',1,'p_aggr_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',643), + ('aggr_op -> bitORKeyword','aggr_op',1,'p_aggr_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',644), + ('merger -> mergerKeyword id { merger_module1_n export }','merger',6,'p_merger','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',649), + ('merger_module1_n -> merger_module merger_module1_n','merger_module1_n',2,'p_merger_module1_n','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',655), + ('merger_module1_n -> ','merger_module1_n',0,'p_merger_module0','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',660), + ('merger_module -> moduleKeyword id { merger_branches merger_rule1_n }','merger_module',6,'p_merger_module','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',665), + ('merger_branches -> branchesKeyword branches1_n','merger_branches',2,'p_merger_branches','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',670), + ('branches1_n -> id , branches1_n','branches1_n',3,'p_branches1_n','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',675), + ('branches1_n -> id','branches1_n',1,'p_branches1','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',681), + ('export -> exportKeyword id','export',2,'p_export','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',685), + ('merger_rule1_n -> merger_rule merger_rule1_n','merger_rule1_n',2,'p_merger_rule1_n','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',689), + ('merger_rule1_n -> ','merger_rule1_n',0,'p_merger_rule0','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',694), + ('merger_rule -> merger_prefix_rule','merger_rule',1,'p_merger_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',699), + ('merger_rule -> merger_infix_rule','merger_rule',1,'p_merger_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',700), + ('merger_rule -> NOTKeyword merger_prefix_rule','merger_rule',2,'p_not_merger_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',706), + ('merger_rule -> NOTKeyword merger_infix_rule','merger_rule',2,'p_not_merger_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',707), + ('merger_infix_rule -> qid_arg op qid_arg','merger_infix_rule',3,'p_merger_infix_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',713), + ('merger_prefix_rule -> id ( qid_args )','merger_prefix_rule',4,'p_merger_prefix_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',719), + ('qid_args -> qid_arg , qid_args','qid_args',3,'p_qid_args','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',725), + ('qid_args -> qid_arg','qid_args',1,'p__qid_args_more','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',731), + ('qid_args -> ','qid_args',0,'p_no_qid_args','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',735), + ('qid_arg -> qid','qid_arg',1,'p_qid_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',740), + ('qid_arg -> IPv4','qid_arg',1,'p_qid_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',741), + ('qid_arg -> IPv6','qid_arg',1,'p_qid_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',742), + ('qid_arg -> CIDR','qid_arg',1,'p_qid_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',743), + ('qid_arg -> MAC','qid_arg',1,'p_qid_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',744), + ('qid_arg -> int','qid_arg',1,'p_qid_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',745), + ('qid_arg -> float','qid_arg',1,'p_qid_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',746), + ('qid_arg -> hex','qid_arg',1,'p_qid_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',747), + ('qid_arg -> merger_prefix_rule','qid_arg',1,'p_qid_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',748), + ('qid_arg -> string','qid_arg',1,'p_qid_arg','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',749), + ('merger_rule -> allen_rule opt_or_allen_rule','merger_rule',2,'p_merger_rule_al_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',756), + ('opt_or_allen_rule -> ORKeyword allen_rule opt_or_allen_rule','opt_or_allen_rule',3,'p_opt_or_allen_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',761), + ('opt_or_allen_rule -> ','opt_or_allen_rule',0,'p_opt_op_rule_end','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',766), + ('allen_rule -> id allen_op id opt_allen_delta','allen_rule',4,'p_allen_rule','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',770), + ('opt_allen_delta -> deltaKeyword time','opt_allen_delta',2,'p_opt_allen_delta','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',777), + ('opt_allen_delta -> ','opt_allen_delta',0,'p_no_allen_delta','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',782), + ('allen_op -> LT','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',787), + ('allen_op -> GT','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',788), + ('allen_op -> EQ','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',789), + ('allen_op -> mKeyword','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',790), + ('allen_op -> miKeyword','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',791), + ('allen_op -> oKeyword','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',792), + ('allen_op -> oiKeyword','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',793), + ('allen_op -> sKeyword','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',794), + ('allen_op -> siKeyword','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',795), + ('allen_op -> dKeyword','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',796), + ('allen_op -> diKeyword','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',797), + ('allen_op -> fKeyword','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',798), + ('allen_op -> fiKeyword','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',799), + ('allen_op -> eqKeyword','allen_op',1,'p_allen_op','/home/melnikovkolya/classes/semester-3-project/flowy/parser.py',800), +] diff --git a/parsetab.pyc b/parsetab.pyc new file mode 100644 index 0000000..c71571e Binary files /dev/null and b/parsetab.pyc differ diff --git a/port-filter.flw b/port-filter.flw new file mode 100644 index 0000000..ca0af11 --- /dev/null +++ b/port-filter.flw @@ -0,0 +1,48 @@ +splitter S {} + +filter www_req { + dstport = 80 +} + +filter www_res { + dstport = 80 +} + +grouper g_www_req { + module g1 { + dstport = dstport + etime < stime rdelta 1s + } + aggregate srcip, dstip, sum(bytes) as bytes, count(rec_id) as n, + bitOR(tcp_flags) as flags, union(srcport) as srcports +} + +grouper g_www_res { + module g1 { + srcport = srcport + etime < stime rdelta 1s + } + aggregate srcip, dstip, sum(bytes) as bytes, count(rec_id) as n, + bitOR(tcp_flags) as flags, union(srcport) as srcports +} + + + +groupfilter ggf { + bitAND(flags, 0x13) = 0x13 +} + + +merger M { + module m1 { + branches A, B + } + export m1 +} + +ungrouper U {} + +"./netflow-trace.h5" -> S +S branch A -> www_req -> g_www_req -> ggf -> M +S branch B -> www_res -> g_www_res -> ggf -> M +M->U->"./ungroped.h5" diff --git a/ports.flw b/ports.flw new file mode 100644 index 0000000..3ec13fd --- /dev/null +++ b/ports.flw @@ -0,0 +1,46 @@ +splitter S {} + +filter www_req { + dstport = 443 OR dstport = 80 OR dstport = 8080 + unix_secs > 1259413200 + unix_secs < 1259445600 +} + +filter www_res { + unix_secs < 1259445600 + unix_secs > 1259413200 + srcport = 443 OR srcport = 80 OR srcport = 8080 +} + +grouper g_www_req { + module g1 { + } + aggregate bitOR(tcp_flags) as flags +} + +grouper g_www_res { + module g1 { + } + aggregate bitOR(tcp_flags) as flags +} + +groupfilter ggf { + bitAND(flags, 0x13) = 0x13 +} + +merger M { + module m1 { + branches B, A +# B.stime = 1259413200 AND B.etime = 1259445600 + A d B OR B d A +# B o A delta 32400s + } + export m1 +} + +ungrouper U {} + +"./h5ports.h5" -> S +S branch A -> www_req -> g_www_req -> ggf -> M +S branch B -> www_res -> g_www_res -> ggf -> M +M->U->"./portsungroped.h5" diff --git a/portsungroped.h5 b/portsungroped.h5 new file mode 100644 index 0000000..a8290cd Binary files /dev/null and b/portsungroped.h5 differ diff --git a/print_hdf_in_step.py b/print_hdf_in_step.py new file mode 100755 index 0000000..97e51ee --- /dev/null +++ b/print_hdf_in_step.py @@ -0,0 +1,18 @@ +#!/usr/bin/python +from record import RecordReader +from pytables import FlowRecordsTable +from itertools import izip +from optparse import OptionParser + +if __name__ == '__main__': + usage = 'usage: %prog [options] input files' + p = OptionParser(usage) + opts, arguments = p.parse_args() + + mg_readers = [RecordReader(FlowRecordsTable(f)) for f in arguments] + + for rec_tuple in izip(*mg_readers): + print "" + for r in rec_tuple: + print r + diff --git a/printhdf.py b/printhdf.py new file mode 100755 index 0000000..138eaf6 --- /dev/null +++ b/printhdf.py @@ -0,0 +1,21 @@ +#!/usr/bin/python +from optparse import OptionParser +import pytables +import record +import sys + +def printHDF(hdf_file): + r = pytables.FlowRecordsTable(hdf_file) + recordReader = record.RecordReader(r) + for rec in recordReader: + print rec + +if __name__ == "__main__": + usage = 'usage: %prog file_name.h5' + p = OptionParser(usage) + options, arguments = p.parse_args() + if len(arguments) != 1: + sys.stderr.write('Exactly one argument expected\n') + exit(1) + + printHDF(arguments[0]) \ No newline at end of file diff --git a/profiler.py b/profiler.py new file mode 100644 index 0000000..c019d0d --- /dev/null +++ b/profiler.py @@ -0,0 +1,98 @@ +from time import time +import threading +import sys +from collections import deque +try: + from resource import getrusage, RUSAGE_SELF +except ImportError: + RUSAGE_SELF = 0 + def getrusage(who=0): + return [0.0, 0.0] # on non-UNIX platforms cpu_time always 0.0 + +p_stats = None +p_start_time = None + +def profiler(frame, event, arg): + if event not in ('call','return'): return profiler + #### gather stats #### + rusage = getrusage(RUSAGE_SELF) + t_cpu = rusage[0] + rusage[1] # user time + system time + code = frame.f_code + fun = (code.co_name, code.co_filename, code.co_firstlineno) + #### get stack with functions entry stats #### + ct = threading.currentThread() + try: + p_stack = ct.p_stack + except AttributeError: + ct.p_stack = deque() + p_stack = ct.p_stack + #### handle call and return #### + if event == 'call': + p_stack.append((time(), t_cpu, fun)) + elif event == 'return': + try: + t,t_cpu_prev,f = p_stack.pop() + assert f == fun + except IndexError: # TODO investigate + t,t_cpu_prev,f = p_start_time, 0.0, None + call_cnt, t_sum, t_cpu_sum = p_stats.get(fun, (0, 0.0, 0.0)) + p_stats[fun] = (call_cnt+1, t_sum+time()-t, t_cpu_sum+t_cpu-t_cpu_prev) + return profiler + + +def profile_on(): + global p_stats, p_start_time + p_stats = {} + p_start_time = time() + threading.setprofile(profiler) + sys.setprofile(profiler) + + +def profile_off(): + threading.setprofile(None) + sys.setprofile(None) + +def get_profile_stats(): + """ + returns dict[function_tuple] -> stats_tuple + where + function_tuple = (function_name, filename, lineno) + stats_tuple = (call_cnt, real_time, cpu_time) + """ + return p_stats + + +#### EXAMPLE ################################################################## + +if __name__ == '__main__': + from time import sleep + from threading import Thread + import random + + def test_function(): + pass + + class T(Thread): + def __init__(self): + Thread.__init__(self) + def run(self): # takes about 5 seconds + for i in xrange(100): + self.test_method() + test_function() + def test_method(self): + sleep(random.random() / 10) + + profile_on() + ####################### + threads = [T() for i in xrange(3)] + for t in threads: + t.start() + for i in xrange(100): + test_function() + for t in threads: + t.join() + ####################### + profile_off() + + from pprint import pprint + pprint(get_profile_stats()) diff --git a/profiler.pyc b/profiler.pyc new file mode 100644 index 0000000..3570dbd Binary files /dev/null and b/profiler.pyc differ diff --git a/profiling-heavy-functions.txt b/profiling-heavy-functions.txt new file mode 100644 index 0000000..3f6505a --- /dev/null +++ b/profiling-heavy-functions.txt @@ -0,0 +1,885 @@ +/var/netflow/ft-data-fall09/sne-ft-data/2009/sneze/2009-11-29/ + 26521 records + 1683 records matched the http request + + + +deepcopy + +Splitter initiated +Parsing and validation finished: 0.31 +Started filtering +Finished filtering +Filters ready +Splitter time elapsed: 27.19 +Finished grouping branch A +Finished grouping branch B +Finished filtering groups for branch A +Finished filtering groups for branch B +Group filter time elapsed: 45.0 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 162.3 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 168.99 +Closing remaining open files: ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsA.h5... done + +real 2m49.129s +user 2m44.070s +sys 0m5.824s + +Splitter initiated +Parsing and validation finished: 0.33 +Started filtering +Finished filtering +Filters ready +Splitter time elapsed: 30.16 +Finished grouping branch B +Finished grouping branch A +Finished filtering groups for branch B +Finished filtering groups for branch A +Group filter time elapsed: 34.2 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 138.3 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 143.71 +Closing remaining open files: ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsA.h5... done + +real 2m24.193s +user 2m19.957s +sys 0m4.608s + + + +deep_copy + +Splitter initiated +Parsing and validation finished: 0.36 +Started filtering +Finished filtering +Filters ready +Splitter time elapsed: 24.02 +Finished grouping branch B +Finished grouping branch A +Finished filtering groups for branch B +Finished filtering groups for branch A +Group filter time elapsed: 32.74 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 155.7 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 162.56 +Closing remaining open files: ./flowy-run/GroupsA.h5... done ./flowy-run/GroupsB.h5... done + +real 2m43.294s +user 2m38.782s +sys 0m4.628s + +Splitter initiated +Parsing and validation finished: 0.26 +Started filtering +Finished filtering +Filters ready +Splitter time elapsed: 24.8 +Finished grouping branch B +Finished grouping branch A +Finished filtering groups for branch A +Finished filtering groups for branch B +Group filter time elapsed: 34.95 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 144.75 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 149.73 +Closing remaining open files: ./flowy-run/GroupsA.h5... done ./flowy-run/GroupsB.h5... done + +real 2m36.640s +user 2m27.385s +sys 0m3.508s + +Splitter initiated +Parsing and validation finished: 0.3 +Started filtering +Finished filtering +Filters ready +Splitter time elapsed: 24.2 +Finished grouping branch B + Finished grouping branch A +Finished filtering groups for branch A +Finished filtering groups for branch B +Group filter time elapsed: 31.15 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 145.9 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 151.19 + +real 2m31.325s +user 2m26.629s +sys 0m5.412s + +modified reset/deepcopy +(('reset', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 64), (26521, 11.015153884887695, 11.560714000001838)) + + + + +(('new_group', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 21), (1466, 6.5672850608825684, 5.3123339999998507)) +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 25), (1468, 775.12532043457031, 766.78390699999591)) +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 15), (3228, 155.0828640460968, 160.51002500000152)) +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/splitter.py', 37), (3229, 87.616034030914307, 89.193573000000356)) +(('append', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 118), (3490, 35.743690967559814, 30.529941999999664)) +(('notify', '/usr/lib/python2.6/threading.py', 270), (6570, 10.859287977218628, 10.72066600000062)) +(('_is_owned', '/usr/lib/python2.6/threading.py', 219), (6695, 9.4564809799194336, 9.1245670000004111)) +(('final_result', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 57), (26521, 5.4859673976898193, 5.0482840000003648)) + +(('reset', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 45), (26521, 85.135001659393311, 88.205508000023968)) + +(('_deepcopy_dict', '/usr/lib/python2.6/copy.py', 251), (26712, 73.298033714294434, 75.524687000011454)) +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 129), (27270, 27.118208885192871, 27.781735000003209)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 72), (97059, 33.632721662521362, 30.013754000007793)) +(('read_row', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 99), (99360, 518.74268817901611, 468.40537100055235)) +(('iterrows', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 1441), (99377, 118.15105223655701, 106.11463399998161)) +(('read_rows_list', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 105), (99797, 522.83437442779541, 472.12965100054475)) +(('read_rows_list', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 133), (99797, 550.52120852470398, 497.50723100058826)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 23), (147484, 24.74915337562561, 21.317261000004237)) +(('deepcopy', '/usr/lib/python2.6/copy.py', 144), (187567, 161.90160441398621, 165.33823200019515)) +(('read', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 700), (195364, 274.85678458213806, 246.25141199899576)) +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 102), (294714, 294.22120332717896, 264.55258099813909)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 178), (856942, 596.70967555046082, 576.32406800022113)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 101), (861729, 430.92800951004028, 418.1861820004529)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 126), (989422, 290.51547265052795, 272.90903400041935)) +(('idx2long', '/usr/local/lib/python2.6/dist-packages/tables/utils.py', 66), (784846, 75.146798133850098, 69.772329999996373)) +(('is_idx', '/usr/local/lib/python2.6/dist-packages/tables/utils.py', 44), (784846, 26.284930467605591, 21.873351000002572)) + + + + + +/var/netflow/ft-data-fall09/kur-ft-data/2009-11-17/ + 56992 records + 2438 records matched the http request + +With profiler off: +real 8m8.700s +user 7m47.945s +sys 0m12.909s + + +Splitter initiated +Parsing and validation finished: 1.29 +Started filtering +Finished filtering +Filters ready +Splitter time elapsed: 58.21 +Finished grouping branch B +Finished grouping branch A +Finished filtering groups for branch A +Finished filtering groups for branch B +Group filter time elapsed: 59.8 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 471.27 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 480.68 + +56992 +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# time python flowy.py http-download.flw +0.72 +Splitter initiated +Started filtering + Group filter time started: 0.29 +3955 +Filters ready +Splitter finished +Splitter time elapsed: 53.06 +Number of records in branch A 1985 +Number of records in branch B 2004 +Finished grouping branch A +Finished group-filtering for branch A +Finished grouping branch B +Finished group-filtering for branch B +Group filter threads joined: 57.68 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 443.36 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 452.1 +Closing remaining open files: ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsA.h5... done + +real 7m46.456s +user 7m21.036s +sys 0m11.921s + + +(('new_group', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 21), (1824, 9.5541517734527588, 9.8766150000006974)) +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 25), (1828, 1249.1410629749298, 1300.497268999989)) +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/splitter.py', 37), (3955, 59.615991353988647, 62.479928999999061)) +(('split', '/home/melnikovkolya/classes/semester-3-project/flowy/splitter.py', 17), (3955, 30.423548460006714, 32.126016000000902)) +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 15), (3956, 456.31715869903564, 475.12168400000229)) +(('get', '/usr/lib/python2.6/Queue.py', 150), (3957, 35.274902582168579, 37.742364999999495)) +(('append', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 452), (5486, 76.012235879898071, 76.39678599999479)) +(('append', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 118), (5785, 81.44921350479126, 81.341101000000435)) +(('notify', '/usr/lib/python2.6/threading.py', 270), (8002, 17.408251523971558, 17.825101000000359)) +(('_is_owned', '/usr/lib/python2.6/threading.py', 219), (8101, 14.244855642318726, 15.092936000000464)) +(('final_result', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 57), (56992, 15.892577886581421, 15.040958000006583)) + +(('reset', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 45), (56992, 255.76119065284729, 262.48040000008808)) + +(('_deepcopy_dict', '/usr/lib/python2.6/copy.py', 251), (57183, 218.50618243217468, 224.26205200008098)) +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 89), (58365, 30.709211587905884, 31.189945000012358)) +(('iterate_fixed_fields', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 93), (58365, 19.963983297348022, 19.749231000007512)) +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 129), (58365, 86.714945554733276, 88.23755700004449)) +(('_deepcopy_list', '/usr/lib/python2.6/copy.py', 224), (114144, 72.901082038879395, 73.184596000045076)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 90), (117636, 47.137009859085083, 43.210651000023745)) +(('_deepcopy_atomic', '/usr/lib/python2.6/copy.py', 197), (171331, 14.566928386688232, 13.152824000005694)) +(('_keep_alive', '/usr/lib/python2.6/copy.py', 261), (343098, 47.557926893234253, 39.274455000023863)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 72), (343759, 89.168351411819458, 86.809352999718158)) +(('read_row', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 99), (347405, 1355.7759656906128, 1345.6080879980259)) +(('iterrows', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 1441), (347422, 306.37827634811401, 304.82301899932509)) +(('read_rows_list', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 105), (348171, 1369.7901601791382, 1360.4090329980108)) + +(('deepcopy', '/usr/lib/python2.6/copy.py', 144), (400864, 485.14781737327576, 489.78665900019996)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 82), (408197, 79.613070487976074, 80.693067999662162)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 23), (527995, 64.410658597946167, 62.123842999773387)) +(('read', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 700), (689950, 714.14480590820312, 706.58424299669286)) +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 102), (1037339, 765.8496515750885, 758.55947299578656)) +(('_processRangeRead', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 449), (1037372, 470.43238306045532, 463.84111299771757)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 101), (1380363, 747.47748589515686, 753.67501099601259)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 178), (1380426, 1028.9652721881866, 1053.8537989941979)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 126), (1740570, 498.78313732147217, 495.35881499854258)) +(('EQ', '/home/melnikovkolya/classes/semester-3-project/flowy/operators.py', 63), (2370745, 182.36606240272522, 156.70575899921459)) +(('idx2long', '/usr/local/lib/python2.6/dist-packages/tables/utils.py', 66), (2764694, 214.65504741668701, 203.63286399914659)) +(('is_idx', '/usr/local/lib/python2.6/dist-packages/tables/utils.py', 44), (2764694, 75.347645044326782, 63.899976999761293)) + + +(('reset', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 64), (56992, 31.726502895355225, 31.213908000036554)) +(('deep_copy', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 41), (56994, 15.406083345413208, 16.889049000018872)) + + +7 days of data +python ft2hdf.py /var/netflow/ft-data-fall09/sne-ft-data/2009/sneze/2009-12-0* netflow-trace.h5 + +246350 records in total +12394 records match the query + +profiling: +Splitter initiated +Parsing and validation finished: 2.22 +Started filtering +Finished filtering +Filters ready +Splitter time elapsed: 1130.1 +Finished grouping branch B +Finished filtering groups for branch B +Finished grouping branch A +Finished filtering groups for branch A +Group filter time elapsed: 2123.665408 +Finished merging branches: ['B', 'A'] +Merger time elapsed: -185.553776 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: -114.543776 + +no profiling: +Splitter initiated +Parsing and validation finished: 0.26 +Started filtering +Finished filtering +Filters ready +Splitter time elapsed: 320.43 +Finished grouping branch B +Finished grouping branch A +Finished filtering groups for branch B +Finished filtering groups for branch A +Group filter time elapsed: 922.42 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 1039.122704 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 1074.252704 +Closing remaining open files: ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsA.h5... done + + +start()real 90m16.511s +user 86m23.020s +sys 3m7.356s + +Splitter initiated +Parsing and validation finished: 0.31 +Started filtering +Finished filtering +Filters ready +Splitter time elapsed: 346.66 +Finished grouping branch B +Finished grouping branch A +Finished filtering groups for branch B +Finished filtering groups for branch A +Group filter time elapsed: 916.19 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 1037.532704 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 1073.552704 + +(('reset', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 64), (246349, 940.52704691886902, 994.15005099796895)) + + +(('reset', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 64), (246349, 111.18868279457092, 105.20649999988791)) +(('deep_copy', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 41), (246351, 61.105264902114868, 52.447237999959725)) +(('read_rows_list', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 105), (3155228, 13582.554839611053, 13318.368595361764)) +(('read_rows_list', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 133), (3155228, 14223.106386899948, 13948.747855334786)) +(('read', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 700), (6280932, 6694.1691343784332, 6541.9808274548959)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 178), (30651429, 17337.516788959503, 17566.637794171394)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 101), (30677828, 12477.594463348389, 12583.8665639143)) +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 126), (35383022, 8230.0888061523438, 8037.6824171527333)) +(('EQ', '/home/melnikovkolya/classes/semester-3-project/flowy/operators.py', 63), (40143460, 2728.9575715065002, 2304.1001345953482)) + + +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# time flow-cat /var/netflow/ft-data-fall09/kur-ft-data/2009-11-16/ | flow-print | wc -l +99925 + +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# time python flowy.py http-download.flw0.77 +Splitter initiated +Started filtering +Group filter time started: 0.33 +7222 +Filters ready +Splitter finished +Splitter time elapsed: 100.03 +Number of records in branch B 3684 +Number of records in branch A 3644 +Finished grouping branch A +Finished group-filtering for branch A +Finished grouping branch B +Finished group-filtering for branch B +Group filter threads joined: 136.09 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 960.34 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 974.11 +Closing remaining open files: ./flowy-run/GroupsA.h5... done ./flowy-run/GroupsB.h5... done + +real 16m39.728s +user 15m49.067s +sys 0m26.002s + + + + + + + + + +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# python ft2hdf.py /var/netflow/ft-data-fall09/sne-ft-data/2009/sneze/2009-11-* netflow-trace.h5 + +298063 + +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# time python flowy.py http-download.flw +0.84 +Splitter initiated +Started filtering +29448 +Filters ready +Splitter finished +Splitter time elapsed: 475.83 +Number of records in branch B 16666 +Number of records in branch A 16412 +Finished grouping branch B +Finished group-filtering for branch B +Finished grouping branch A +Finished group-filtering for branch A +Group filter threads joined: 1415.34 +Finished merging branches: ['B', 'A'] +Merger time elapsed: -1347.101888 = 11485 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: -1301.531888 = 11531 +Closing remaining open files: ./flowy-run/GroupsA.h5... done ./flowy-run/GroupsB.h5... done + +real 192m11.507s = 11531 +user 185m39.648s +sys 7m25.104s + + + +The following example shows how heavy is the influence of performing the match operations (and the consequences in case of a match being found) by having a different load for 2 different branches. +We can see that processing of Branch A, which has been matching records against a port number 443, has a running time of around 17 seconds, while the branch, which checked whether a record entry has a TCP protocol has taken around 90 seconds. The reason for a larger running time is that many more entries that match the prot=TCP requirement have been fround, and it takes an additional processing to index and to append the record to a group. + +Though each of the executions of the reset function takes on average 9-10 times more time than the match function, the number of the executions of the match function is at least 30 times greater than that of the reset function. + +After spot-profiling (running multi-thread profiler on certain sections of the code only), I could verify that the time spent in executing the match calls of the grouper module was causing most significant slow-down in all of the code, up to the merger module. Depending on the complexity of each of the match() calls, the execution time varied for the same number of function calls. The three match() calls from different classes form a nested chain, where one match() function, relies on another match(). The heaviest (in terms of time per execution) of all three match functions is the top-most match(), that comes from the Group class of the grouper module. Besides relying on a double-nested match call from two other classes, it also performs a +and calculates the first and the last records of newly-formed group, which is necessary for relative comparisons. + +with an average time spent per cycle (including the profiler overhead) being: +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 193), (280246, 151.20158743858337, 154.237679000132)) + + +A very simple GrouperModule class match, with a single + + + def match(self, record, group): + for rule in self.rules: + if not rule.match(record, group): + return False + return True + +This was followed by the filtering operation + + +Splitter initiated +GF validation started at: 0.89 +GF Validation required: 0.09 +Parsing and validation finished: 0.32 +Started filtering + Grouping started at:Fitlering time started at: Grouping started at: 1.0 +1.0 +1.0 +Finished filtering +Filtering required: 16.87 +Filters ready +Splitter time elapsed: 17.11 +Finished grouping branch A +Grouping time required branch A 17.34 +Current time is: 18.34 +Finished filtering groups for branch A +Finished grouping branch B +Grouping time required branch B 90.08 +Current time is: 91.08 +Finished filtering groups for branch B +Group filter time elapsed: 90.41 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 111.58 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 111.75 +Closing remaining open files: ./flowy-run/GroupsA.h5... done ./flowy-run/GroupsB.h5... done + +real 1m52.516s +user 1m50.411s +sys 0m2.136s + + +The performance of the group-filters differs significantly, depending on how many matching records have been found in the filtering stage. +I.e., a filter for the port 443 results in relatively few records, while the filter for a port number >30000 results in many record matching it. The matching records need to be processed and stored for further group-filters, where the group filters try to form groups from the matched records. An example of running a query, which identifies flows with a destination port 443 and a source port > 30000, is shown next. It can be seen, that group-filtering of branch B, which is responsible for filtering out the srcport > 30000 request has a much larger running time than that of branch A, which looks only for those few records with a destination port of 443. +Splitter initiated +Parsing and validation finished: 0.28 +Started filtering +Fitlering time started at: 0.98 +Finished filtering +Filtering required: 33.49 +Filters ready +Splitter time elapsed: 33.61 +Finished grouping branch A +Grouping time finished for branch A 40.49 +Finished filtering groups for branch A +Finished grouping branch B +Grouping time finished for branch B 228.46 +Finished filtering groups for branch B +Group filter time elapsed: 227.86 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 252.77 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 253.31 +Closing remaining open files: ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsA.h5... done + +real 4m11.671s +user 4m9.296s +sys 0m4.880s + + + +==================================HTTPS PROFILE=========================================== + + + + +A query that selects source and destination ports is defined as follows. We used that query to compare simple running times of different tools: + +splitter S {} + +filter www_tcp { + dstport = 443 +} + +filter www_port { + srcport = 443 + +} + +grouper g_www_tcp { + module g1 { + } + aggregate srcip, dstip, sum(bytes) as bytes, count(rec_id) as n, + bitOR(tcp_flags) as flags, union(srcport) as srcports +} + +grouper g_www_port { + module g1 { + } + aggregate srcip, dstip, sum(bytes) as bytes, count(rec_id) as n, + bitOR(tcp_flags) as flags, union(dstport) as dstports +} + +groupfilter ggf { + bitAND(flags, 0x13) = 0x13 +} + +merger M { + module m1 { + branches B, A + A m B delta 1440min + } + export m1 +} + +ungrouper U {} + +"./netflow-trace.h5" -> S +S branch A -> www_tcp -> g_www_tcp -> ggf -> M +S branch B -> www_port -> g_www_port -> ggf -> M +M->U->"./ungroped.h5" + +The same number of + +/var/netflow/ft-data-fall09/sne-ft-data/2009/sneze/2009-11-29/ + +26521 records in total +486 records match + +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# time python flowy.py https-flows.flw +0.73 +Splitter initiated +Started filtering +486 +Filters ready +Splitter finished +Number of records in branch A 243 Number of records in branch B 243 + +Finished grouping branch A +Finished group-filtering for branch A +Finished grouping branch B +Finished group-filtering for branch B +Group filter threads joined +Finished merging branches: ['B', 'A'] +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 6.61 +Closing remaining open files: ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsA.h5... done ./netflow-trace.h5... done ./netflow-trace.h5... done ./flowy-run/GroupsB-merged.h5... done ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsA-merged.h5... done ./flowy-run/MergedM.h5... done ./flowy-run/MergedM.h5... done ./ungroped.h5... done ./flowy-run/GroupsA.h5... done + +real 0m14.245s +user 0m7.168s +sys 0m0.280s +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# time python flowy.py https-flows.flw +0.81 +Splitter initiated +Started filtering +486 +Filters ready +Splitter finished +Number of records in branch B 243Number of records in branch A 243 + +Finished grouping branch A +Finished group-filtering for branch A +Finished grouping branch B +Finished group-filtering for branch B +Group filter threads joined +Finished merging branches: ['B', 'A'] +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 6.31 +Closing remaining open files: ./netflow-trace.h5... done ./flowy-run/GroupsA.h5... done ./flowy-run/MergedM.h5... done ./flowy-run/GroupsB-merged.h5... done ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsA.h5... done ./ungroped.h5... done ./netflow-trace.h5... done ./flowy-run/MergedM.h5... done ./flowy-run/GroupsA-merged.h5... done ./flowy-run/GroupsB.h5... done + +real 0m9.051s +user 0m7.072s +sys 0m0.160s + +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# time python flowy.py https-flows.flw +0.83 +Splitter initiated +Started filtering +Group filter time started: 0.23 +486 +Filters ready +Number of records in branch A 243 + Splitter finished +Splitter time elapsed: 6.1 +Finished grouping branch A +Finished group-filtering for branch A +Number of records in branch B 243 +Finished grouping branch B +Finished group-filtering for branch B +Group filter threads joined: 6.17 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 6.23 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 7.36 +Closing remaining open files: ./flowy-run/GroupsB.h5... done ./netflow-trace.h5... done ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsA.h5... done ./netflow-trace.h5... done ./flowy-run/GroupsB-merged.h5... done ./flowy-run/GroupsA.h5... done ./flowy-run/MergedM.h5... done ./flowy-run/GroupsA-merged.h5... done ./ungroped.h5... done ./flowy-run/MergedM.h5... done + +real 0m15.893s +user 0m7.440s +sys 0m0.868s + + + +Most frequent: + +(('final_result', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 99), (26521, 1.8366894721984863, 1.7001189999999156)) + +(('reset', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 83), (26521, 3.138737678527832, 3.0042079999998066)) + +(('deep_copy', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 58), (26523, 1.7581963539123535, 1.6681159999999338)) + +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 92), (26526, 3.3419792652130127, 3.0921969999998495)) + +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 130), (26526, 9.8621282577514648, 9.6565820000015421)) + +(('iterate_fixed_fields', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 96), (26526, 1.9721605777740479, 1.7561189999999769)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 82), (27015, 4.6438140869140625, 4.6482780000005732)) + +(('mask', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 90), (53042, 1.6173598766326904, 1.5800989999999153)) + +(('EQ', '/home/melnikovkolya/classes/semester-3-project/flowy/operators.py', 63), (53044, 1.4263303279876709, 1.1120729999999632)) + +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 134), (53046, 5.1699655055999756, 4.6562810000002663)) + + +Heaviest: + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter_validator.py', 16), (1, 0.18725490570068359, 0.18801199999999962)) + +(('get_interval_records', '/home/melnikovkolya/classes/semester-3-project/flowy/timeindex.py', 57), (1, 0.2019498348236084, 0.20001300000000199)) + +(('pass_allen_indices_down', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 79), (1, 0.20258498191833496, 0.20001300000000199)) + +(('go', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 108), (1, 0.2162168025970459, 0.21201300000000245)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper_validator.py', 11), (1, 0.22698211669921875, 0.22401300000000002)) + +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 36), (4, 1.1266498565673828, 1.1920739999999945)) + +(('_form_master_re', '/usr/lib/pymodules/python2.6/ply/lex.py', 482), (1, 0.30334997177124023, 0.22801499999999986)) + +(('validate_rules', '/usr/lib/pymodules/python2.6/ply/lex.py', 723), (1, 0.33556008338928223, 0.31602000000000008)) + +(('validate_all', '/usr/lib/pymodules/python2.6/ply/lex.py', 567), (1, 0.33656787872314453, 0.31602000000000008)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 10), (1, 0.37907099723815918, 0.3560230000000002)) + +(('go', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter.py', 14), (2, 1.1871206760406494, 1.248076999999995)) + +(('create_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/ungrouper_validator.py', 76), (1, 0.60985612869262695, 0.60803800000000052)) + +(('lex', '/usr/lib/pymodules/python2.6/ply/lex.py', 865), (1, 0.65552186965942383, 0.56003499999999995)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 9), (1, 0.6572871208190918, 0.56403499999999995)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/ungrouper_validator.py', 10), (1, 0.67348289489746094, 0.67204200000000114)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 182), (1, 0.71254801750183105, 0.6200389999999999)) + +(('go', '/home/melnikovkolya/classes/semester-3-project/flowy/ungrouper.py', 29), (1, 1.85223388671875, 1.8081130000000023)) + + + +/var/netflow/ft-data-fall09/kur-ft-data/2009-11-17/ + 56992 records + +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# python ft2hdf.py /var/netflow/ft-data-fall09/kur-ft-data/2009-11-17/ netflow-trace.h5 +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# time python flowy.py https-flows.flw +0.7 +Splitter initiated +Started filtering + Group filter time started: 0.27 +219 +Filters ready +Splitter finished +Splitter time elapsed: 13.2 +Number of records in branch A 158 +Finished grouping branch A +Finished group-filtering for branch A +Number of records in branch B 61 +Finished grouping branch B +Finished group-filtering for branch B +Group filter threads joined: 13.18 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 13.23 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 13.83 +Closing remaining open files: ./netflow-trace.h5... done ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsA.h5... done ./netflow-trace.h5... done ./flowy-run/GroupsB-merged.h5... done ./flowy-run/GroupsA.h5... done ./flowy-run/GroupsB.h5... done ./flowy-run/MergedM.h5... done ./flowy-run/GroupsA-merged.h5... done ./ungroped.h5... done ./flowy-run/MergedM.h5... done + +real 0m15.696s +user 0m13.653s +sys 0m1.004s +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# python printhdf.py ungroped.h5 | wc -l +Closing remaining open files: ungroped.h5... done +219 + + + +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# python ft2hdf.py /var/netflow/ft-data-fall09/kur-ft-data/2009-11-16/ netflow-trace.h5 + +99924 + +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# time python flowy.py https-flows.flw +0.71 +Splitter initiated +Started filtering + Group filter time started: 0.27 +1434 +Filters ready +Splitter finished +Splitter time elapsed: 23.19 +Number of records in branch A 748 +Finished grouping branch A +Finished group-filtering for branch A +Number of records in branch B 686 +Finished grouping branch B +Finished group-filtering for branch B +Group filter threads joined: 23.23 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 23.31 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 26.48 +Closing remaining open files: ./netflow-trace.h5... done ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsA.h5... done ./netflow-trace.h5... done ./flowy-run/GroupsB-merged.h5... done ./flowy-run/GroupsA.h5... done ./flowy-run/MergedM.h5... done ./flowy-run/GroupsA-merged.h5... done ./ungroped.h5... done ./flowy-run/MergedM.h5... done + +real 0m28.767s +user 0m24.486s +sys 0m2.840s +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# python printhdf.py ungroped.h5 | wc -l +Closing remaining open files: ungroped.h5... done +1434 + + + +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# python ft2hdf.py /var/netflow/ft-data-fall09/sne-ft-data/2009/sneze/2009-11-* netflow-trace.h5 + +298063 + +root@melnikovkolya-laptop:/home/melnikovkolya/classes/semester-3-project/flowy# python printhdf.py ungroped.h5 | wc -l +Closing remaining open files: ungroped.h5... done +4087 + + +0.76 +Splitter initiated +Started filtering +4087 +Filters ready +Splitter finished +Group filter time started: 53.73 +Splitter time elapsed: 53.73 +Number of records in branch A 2041 +Finished grouping branch A +Finished group-filtering for branch A +Number of records in branch B 2046 +Finished grouping branch B +Finished group-filtering for branch B +Group filter threads joined: 54.37 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 54.47 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 63.47 +Closing remaining open files: ./flowy-run/GroupsB-merged.h5... done ./netflow-trace.h5... done ./ungroped.h5... done ./flowy-run/MergedM.h5... done ./flowy-run/MergedM.h5... done ./netflow-trace.h5... done ./flowy-run/GroupsA.h5... done ./flowy-run/GroupsA.h5... done ./flowy-run/GroupsA-merged.h5... done ./flowy-run/GroupsB.h5... done ./flowy-run/GroupsB.h5... done + +real 1m8.146s +user 1m3.576s +sys 0m0.776s + + + + + + + + + +\begin{mytinylisting} +\begin{verbatim} +splitter S {} + +filter www_req { + dstport = 443 OR dstport = 80 OR dstport = 8080 + unix_secs > 1259413200 + unix_secs < 1259445600 +} + +filter www_res { + srcport = 443 OR srcport = 80 OR srcport = 8080 + unix_secs < 1259445600 + unix_secs > 1259413200 +} + +grouper g_www_req { + module g1 { + } + aggregate bitOR(tcp_flags) as flags +} + +grouper g_www_res { + module g1 { + } + aggregate bitOR(tcp_flags) as flags +} + +groupfilter ggf { + bitAND(flags, 0x13) = 0x13 +} + +merger M { + module m1 { + branches B, A + A d B OR B d A + } + export m1 +} + +ungrouper U {} + +"./h5ports.h5" -> S +S branch A -> www_req -> g_www_req -> ggf -> M +S branch B -> www_res -> g_www_res -> ggf -> M +M->U->"./portsungroped.h5" +\end{verbatim} +\end{mytinylisting} +% +Execution of that query +\begin{verbatim} +flowy# time python flowy.py ports.flw +0.83 +Splitter initiated +Started filtering +Group filter time started: 0.3 +1463 +Filters ready +Splitter finished +Splitter time elapsed: 7.12 +Number of records in branch B 1463 +Finished grouping branch B +Finished group-filtering for branch B + Number of records in branch A 1463 +Finished grouping branch A +Finished group-filtering for branch A +Group filter threads joined: 7.26 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 7.26 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 13.92 + +real 0m14.788s +user 0m13.969s +sys 0m0.900s +\end{verbatim} + + diff --git a/pytables.py b/pytables.py new file mode 100644 index 0000000..0afa740 --- /dev/null +++ b/pytables.py @@ -0,0 +1,383 @@ +import tables +import os.path +from itertools import izip +from math import ceil, floor + + +default_fields = [] +default_types = [] + +def create_flowtools_value_reader(fields): + def get_fields(record): + x = tuple(getattr(record,attr) for attr in fields) + return x + + return get_fields + +#class RecordsTable(object): +# def __init__(self, file_path, tree_path, id_size): +# if os.path.exists(file_path): +# self.file_path = file_path +# else: +# raise IOError("File %s cannot be accessed."%file_path) +# self.tree_path = tree_path +# # open for reading +# self.file = tables.openFile(self.file_path, mode="r+") +# try: +# #try to open the table as specified by path if node does not +# #exist create it +# self.table = self.file.getNode(self.tree_path) +# except tables.exceptions.NoSuchNodeError: +# raise IOError("file %s does not contain table %s"% +# (self.file_path,self.tree_path)) +# self.fields = tuple(self.table.description._v_names) +# self.types = tuple([self.table.description._v_dtypes[name] +# for name in self.fields]) +# # add the id field base on row number: +# self.fields += ('rec_id',) +# self.types += (id_size,) +# +# def __del__(self): +# self.file.close() +# +# def close(self): +# self.file.close() + + + +class Table(object): + def __init__(self, file_path, id_size): + if os.path.exists(file_path): + self.file_path = file_path + else: + raise IOError("File %s cannot be accessed."%file_path) + # open for reading + self.file = tables.openFile(self.file_path, mode="r+") + + # Returns the names of the fields that matter at the filter stage. + # i.e. srcport/dstport/prot/srcip/... + var_nodes = ['/' + field + for field in self.file.root._v_attrs.variable_fields] + self.table = self.file.getNode("/fixed_length_fields") +# print var_nodes + self.tables =[self.table.iterrows()] + map(self.file.getNode, var_nodes) +# print self.tables + self.fixed_fields = self.file.root._v_attrs.fixed_fields +# print self.fixed_fields + self.variable_fields = self.file.root._v_attrs.variable_fields + # add the id field base on row number: + self.fields = tuple(['rec_id'] + + self.fixed_fields + + self.variable_fields) + self.types = tuple([id_size] + + self.file.root._v_attrs.fixed_types + + self.file.root._v_attrs.variable_types) + +# print self.file.root._v_attrs.variable_fields + + def __del__(self): + self.file.close() + + def close(self): + self.file.close() + + def flush(self): + self.file.flush() + +class FlowRecordsTable(Table): + """A reader object for an HDF table of flow records""" + def __init__(self, file_path, expected_id_size = tables.UInt32Col()): + Table.__init__(self, file_path, id_size = expected_id_size) + + def __iter__(self): + for row in izip(self.iterate_fixed_fields(), *self.tables[1:]): + yield row[0] + tuple(row[1:]) + + def iterate_fixed_fields(self): + for row in self.table: + yield tuple([row.nrow] + + [row[field] for field in self.fixed_fields]) + raise StopIteration + + def read_row(self, row_n): + row = [r for r in self.table.iterrows(row_n, row_n + 1)][0] #Is iterrows actually heavy itself? + fixed = tuple([row[field] for field in self.fixed_fields]) + variable = tuple(table.read(row_n)[0] for table in self.tables[1:]) +# print (row_n,) + fixed + variable + return (row_n,) + fixed + variable + + def read_rows_list(self, rows_list): + for row_n in rows_list: + yield self.read_row(row_n) + + def iter_ids(self, id_list): + return self.table.readCoordinates(id_list) + + def get_record_by_id(self,id): + return self.table[id] + + def __del__(self): + self.file.close() + + def append(self, record): + self.row = self.table.row + for field in self.fixed_fields: + self.row[field] = getattr(record, field) + self.row.append() + for field in self.variable_fields: + getattr(self.file.root, field).append(getattr(record, field)) + + def get_current_row(self): + return self.row.nrow + + @property + def len(self): + return self.table.nrows + +class TimeIndex(FlowRecordsTable): + def __init__(self, fname, id_size_bytes=4): + FlowRecordsTable.__init__(self, fname, id_size_bytes) + self.start_time = self.file.root._v_attrs.start_time + self.delta = self.file.root._v_attrs.delta + self.id_size = id_size_bytes + self.index = self.tables[0] + + + def get_intervals_list(self, stime, etime): + start_interval = int(floor((stime - self.start_time) / self.delta)) + end_interval = int(ceil((etime - self.start_time) / self.delta)) + if start_interval < 1 or end_interval < 1: + raise ValueError("Something's wrong with index intervals") + + return xrange(start_interval, end_interval) + + def get_intervals_before(self, record, time_before): + res = self.get_intervals_list(record.stime - time_before, record.stime) + return res + + def get_intervals_after(self, record, time_after): + res = self.get_intervals_list(record.etime, record.etime + time_after) + return res + + def during(self, record): + return self.index.get_intervals_list + + def time_to_index_row(self, time): + return int(floor((time - self.start_time) / self.delta)) + + def index(self, record): + for i in self.get_intervals_list(record.stime, record.etime): + self.index[i] = self.index[i].append(record.rec_id) + +#class FlowRecordsTable(RecordsTable): +# """A reader object for an HDF table of flow records""" +# def __init__(self, file_path, expected_id_size = tables.UInt32Col()): +# RecordsTable.__init__(self, file_path, "/flow_records", +# id_size = expected_id_size) +# +# def __iter__(self): +# for row in self.table: +# yield row[:] + (row.nrow,) # tuple concatenation +# +# raise StopIteration +# +# def iter_ids(self, id_list): +# return self.table.readCoordinates(id_list) +# +# def get_record_by_id(self,id): +# return self.table[id] +# +# def __del__(self): +# self.file.close() +# +# def append(self,args): +# self.row = self.table.row +## print zip(self.fields, args) +# for field, val in zip(self.fields, args): +# self.row[field]= val +# self.row.append() + +def create_Table(file, fields, table_name, field_types, filters): + file.createTable(file.root, table_name, field_types, + "Records Table", filters=filters) + +def create_VLArray(file, name, atom, description, filters): + array = file.createVLArray(file.root, name, + atom, + "variable length field "+name, + filters=filters) + array.flavor = 'python' + +#def create_table_file(file_path, field_types, table_name="flow_records", +# complib='lzo', complevel=9): +# if os.path.exists(file_path): +# raise IOError("File %s already exists"%file_path) +# +# file = tables.openFile(file_path, mode="w") +# filters = tables.Filters(complevel=complevel, complib=complib) +# file.createTable(file.root, table_name, field_types, +# "Records Table", filters=filters) +# file.close() + +def create_index_file(file_path, start_time, delta, id_size_bytes, + complib='lzo', complevel=9, itemsize_in_bytes = 4): + if os.path.exists(file_path): + raise IOError("File %s already exists"%file_path) + + file = tables.openFile(file_path, mode="w") + filters = tables.Filters(complevel=complevel, complib=complib) + array = create_VLArray(file.root, 'time_index', + tables.UIntAtom(itemsize=itemsize_in_bytes), + "time_index", filters=filters) + array.flavor = 'python' + file.root._v_attrs.variable_fields = ['time_index'] + file.root._v_attrs.variable_types = [ + tables.UIntAtom(itemsize=itemsize_in_bytes)] + file.root._v_attrs.start_time = start_time + file.root._v_attrs.delta = delta + file.close() + + +def create_table_file(file_path, field_types, + complib='lzo', complevel=9): + if os.path.exists(file_path): + raise IOError("File %s already exists"%file_path) + + file = tables.openFile(file_path, mode="w") + filters = tables.Filters(complevel=complevel, complib=complib) +# filters = tables.Filters() + if 'rec_id' in field_types: + del field_types['rec_id'] + fixed_length_fields = {} + variable_length_fields = {} + for k, v in field_types.iteritems(): +# print str(type(v)), str(type(v)).find('atom') + if str(type(v)).find('atom') == -1: + fixed_length_fields[k] = v + else: + variable_length_fields[k] = v + + file.createTable(file.root, "fixed_length_fields", fixed_length_fields, + "Records Table", filters=filters) + + for field_name, atom in variable_length_fields.iteritems(): + array = file.createVLArray(file.root, field_name, atom, "field " + + field_name, filters) + array.flavor = 'python' + file.root._v_attrs.fixed_fields = fixed_length_fields.keys() + file.root._v_attrs.fixed_types = fixed_length_fields.values() + + file.root._v_attrs.variable_fields = variable_length_fields.keys() + file.root._v_attrs.variable_types = variable_length_fields.values() + + file.close() + +class GroupsMembersTable(object): + def __init__(self, file_path, tree_path): + self.file_path = file_path + self.tree_path = tree_path + # open for reading + self.file = tables.openFile(self.file_path, mode="r+") + try: + #try to open the table as specified by path if node does not + #exist create it + self.table = self.file.getNode(self.tree_path) + except tables.exceptions.NoSuchNodeError: + raise IOError("file %s does not contain table %s"% + (self.file_path,self.tree_path)) + + def __iter__(self): + for row in self.table: + yield row + + raise StopIteration + + def iter_ids(self, id_list): + for id in id_list: + yield self.table[id] + + def get_group_by_id(self): + return self.table[id] + + def __del__(self): +# self.table.flush() + self.file.close() + + def append(self, val_list): + self.table.append(val_list) + +# Performs ungrouping, based on the iterator of group records and an +# iterator over flow records +class GroupsExpander(object): + def __init__(self, groups_file_path, records_file_path): + self.groups = GroupsMembersTable(groups_file_path, "gr1") + self.records = FlowRecordsTable(self.records_file_path) + + + def group_members(self,group_id): + grp_member_ids = self.groups.get_group_by_id(group_id) + return self.record.iter_ids(grp_member_ids) + + +default_ft_types = { + 'dFlows' : tables.UInt32Col(), 'bytes' : tables.UInt32Col(), + 'dPkts' : tables.UInt32Col(), 'dst_as' : tables.UInt16Col(), + 'dst_mask' : tables.UInt8Col(), 'dst_tag' : tables.UInt32Col(), + 'dstip' : tables.UInt32Col(), 'dstport' : tables.UInt16Col(), + 'engine_id' : tables.UInt8Col(), 'engine_type' : tables.UInt8Col(), + 'exaddr' : tables.UInt32Col(), 'extra_pkts' : tables.UInt32Col(), + 'stime' : tables.UInt32Col(), 'in_encaps' : tables.UInt8Col(), + 'input' : tables.UInt16Col(), 'etime' : tables.UInt32Col(), + 'marked_tos' : tables.UInt8Col(), 'nexthop' : tables.UInt32Col(), + 'out_encaps' : tables.UInt8Col(), 'output' : tables.UInt16Col(), + 'peer_nexthop' : tables.UInt32Col(), 'prot' : tables.UInt8Col(), + 'router_sc' : tables.UInt32Col(), 'src_as' : tables.UInt16Col(), + 'src_mask' : tables.UInt8Col(), 'src_tag' : tables.UInt32Col(), + 'srcip' : tables.UInt32Col(), 'srcport' : tables.UInt16Col(), + 'sysUpTime' : tables.UInt32Col(), 'tcp_flags' : tables.UInt8Col(), + 'tos' : tables.UInt8Col(), 'unix_nsecs' : tables.UInt32Col(), + 'unix_secs' : tables.UInt32Col() + } +#tab = FlowRecordsTable("../dynZip9.h5") + +#for x in tab: +# print x + +#print tab.fields + +#wr = TableWriter("../test.h5","/dumps/table1") + +#create_group_file("../grptest.h5", "gr1") +#grp = GroupsMembersTable("../grptest.h5", "/gr1") +#grp.append([1,3,5]) +#grp.append([2,4]) +#grp.append([4324904231490123,98]) +# +#for ls in grp.iter_ids([1,2]): +# print ls + +#grp.__del__() +#print [1,4,543,32] + +#from os import remove +#fname = "../comp.h5" +#remove(fname) +#field_types = {'info': tables.UInt8Col(), +# 'records': tables.UInt8Atom(), 'info_list': tables.UInt8Atom()} +#create_complex_file(fname, field_types) +##h5f = tables.openFile(fname, 'r') +##print h5f.root._v_attrs.fixed_fields +##print h5f.root._v_attrs.fixed_types +##print h5f.root._v_attrs.variable_fields +##print h5f.root._v_attrs.variable_types +# +#cread = FlRecordsTable(fname) +# +#cread.append((999,[1,3],[1])) +#cread.append((2,[1,4],[2,4,999])) +#cread.close() +# +#read = FlRecordsTable(fname) +#for r in read: +# print r + diff --git a/pytables.pyc b/pytables.pyc new file mode 100644 index 0000000..ecd2ef7 Binary files /dev/null and b/pytables.pyc differ diff --git a/record.py b/record.py new file mode 100644 index 0000000..ce6bd19 --- /dev/null +++ b/record.py @@ -0,0 +1,165 @@ +""" +This module provides methods for dynamically creating flow and +group record classes. +""" + +def get_record_class(attributes, types=None, default_vals=None): + ''' + Creates a record class for given attribute names. + + Arguments: + attributes - a sequence of attribute names + types - optional sequence of attribute types, which + correspond to the attribute names in attributes. + Types may be of any type, and are not used by the + Record class, but are useful for external storage, + where data type has to be predetermined. + default_val - a sequence of default values which + correspond to the attribute names in attributes + + Lists are used instead of dictionaries because the order + may be important. + + Return: + Record class which has attributes with the names given + by attributes list. The class uses __slots__ to lower + memory usage as potentially millions of instance will + be present during runtime. The class has a constructor, + which takes as argument values for the attributes ordered + the same way as in the attributes list. If default values + are specified there is a default(no argument) constructor + as well. + NOTE that this method returns a class not an instance. + + Raises: + ValueError if number of types or default values doesn't + match number of attributes. + ''' + if default_vals and len(attributes) != len(default_vals): + raise ValueError( + "Number of attributes(%d) and number of default values(%d)"% + (len(attributes),len(default_vals))+" don't match") + if types and len(attributes) != len(types): + raise ValueError( + "Number of attributes(%d) and number of default types(%d)"% + (len(attributes),len(default_vals))+" don't match") + elif types: + types_dict = dict(zip(attributes, types)) + else: + types_dict = {} + class Record(object): + ''' + Record class contains flow or group record information. + + It uses __slots__ to save memory because potentially millions of + FlowRecords will be used during run time. + Attributes: + attribute names are specified in cls.__slots__ + defaults - contains the default values for attributes used + with default constructor. + attr_types - contains a dictionary of the types of + the attributes. + + Methods: + __init__ - when defaults is specified __init__() + creates an object with default values. If no + defaults are specified during class creation + __init__() raises TypeError. + __init__(*args) takes exactly the same number + of arguments as the classes' number of attributes, + and creates new instance with the given values. + Argument order corresponds to the order of + attributes in cls.__slots__ + + ''' + # set slots to conserve memory + # copy ([:]) don't reference to protect from unexpected changes + __slots__ = attributes[:] + attr_types = types_dict + num_of_fields = len(__slots__) + defaults = default_vals[:] if default_vals else None + + def __init__(self, *args): + num_args = len(args) + if num_args == self.num_of_fields: + for name, value in zip(self.__slots__,args): + setattr(self, name, value) + elif num_args == 0 and self.defaults != None: + for name, value in zip(self.__slots__,self.defaults): + setattr(self, name, value) + elif self.defaults == None: + raise TypeError( + "__init__() takes %d arguments (%d given)"% + ( self.num_of_fields + 1, num_args+1)) + else: + raise TypeError( + "__init__() takes either 1 or %d arguments (%d given)"% + ( self.num_of_fields + 1, num_args+1)) + + def tuple(self): + return tuple(getattr(self, field) for field in self.slots) + + def __repr__(self): + res = "Recod(" + for field in self.__slots__: + val = getattr(self, field) + if type(val) is str: + val = "'" + str(val) + "'" + else: + val = str(val) + res += val + ", " + res =res[:-2] + ")" + return res + + def __str__(self): + res = "Recod: " + for field in self.__slots__: + val = getattr(self, field) + res += field + "->" + str(val) + ", " + res =res[:-2] + return res + return Record + + +class RecordReader(object): + def __init__(self, reader_object): + self.reader = reader_object + #print self.reader.fields + self.Record = get_record_class(self.reader.fields) + + def __iter__(self): + for tuple in self.reader: + yield self.Record(*tuple) + + def read_rows_list(self, rows_list): + for tuple in self.reader.read_rows_list(rows_list): + yield self.Record(*tuple) + + def read_row(self, row_n): + tup = self.reader.read_row(row_n) + return self.Record(*tup) + +#from flowy import pytables +#ptread = pytables.FlowRecordsTable("../testFT.h5" ) +#rr = RecordReader(ptread) +#for i in rr: +# print i.dOctets + +# +# +#FlowRecord = get_record_class(["a","b"],["str","uint"],[1,6]) +# +#def printSth(self): +# print "sth" +# +#FlowRecord.p = printSth +# +#x = FlowRecord(1,6) +# +# +#print x.a, x.b +#print x.__slots__ +# +#t = FlowRecord() +#print t.a +#t.p() diff --git a/record.pyc b/record.pyc new file mode 100644 index 0000000..ff33caa Binary files /dev/null and b/record.pyc differ diff --git a/run-output.txt b/run-output.txt new file mode 100644 index 0000000..6fd9cb2 --- /dev/null +++ b/run-output.txt @@ -0,0 +1,1387 @@ +0.97 +[Input('./netflow-trace.h5', 50, set([]), set([]), set([])), BranchNode('S', 50, set([]), set([]))] + +[BranchNode('S', 51, set([]), set([])), Branch('A', 51, None, set([]), set([])), BranchNode('www_req', 51, set([]), set([])), BranchNode('g_www_req', 51, set([]), set([])), BranchNode('ggf', 51, set([]), set([])), BranchNode('M', 51, set([]), set([]))] + +[BranchNode('S', 52, set([]), set([])), Branch('B', 52, None, set([]), set([])), BranchNode('www_res', 52, set([]), set([])), BranchNode('g_www_res', 52, set([]), set([])), BranchNode('ggf', 52, set([]), set([])), BranchNode('M', 52, set([]), set([]))] + +[BranchNode('M', 53, set([]), set([])), BranchNode('U', 53, set([]), set([])), Output('./ungroped.h5', 53, set([]), set([]), set([]))] + +Splitter initiated +Parsing and validation finished: 2.22 +Started filtering +Finished filtering +Filters ready +Splitter time elapsed: 1130.1 +Finished grouping branch B +Finished filtering groups for branch B +Finished grouping branch A +Finished filtering groups for branch A +Group filter time elapsed: 2123.665408 +Finished merging branches: ['B', 'A'] +Merger time elapsed: -185.553776 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: -114.543776 +(('IncrementalEncoder', '/usr/lib/python2.6/encodings/latin_1.py', 20), (1, 3.0994415283203125e-05, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 2), (1, 3.0994415283203125e-05, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter_validator.py', 12), (1, 0.35587811470031738, 0.34402200000000027)) + +(('p_group_filter', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 263), (1, 0.00046992301940917969, 0.0)) + +(('close', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 78), (1, 0.33764791488647461, 0.0039999999971769284)) + +(('get_branches_allen_index_ops', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 313), (1, 8.0108642578125e-05, 0.0)) + +(('Codec', '/usr/lib/python2.6/encodings/latin_1.py', 13), (1, 3.910064697265625e-05, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 17), (1, 0.14102602005004883, 0.1320089999999996)) + +(('validate_tokens', '/usr/lib/pymodules/python2.6/ply/lex.py', 594), (1, 0.00038218498229980469, 0.0)) + +(('__init__', '/usr/lib/pymodules/python2.6/ply/yacc.py', 118), (1, 3.0994415283203125e-05, 0.0)) + +(('__repr__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 43), (1, 6.008148193359375e-05, 0.0)) + +(('oi', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 69), (1, 2.3126602172851562e-05, 0.0)) + +(('p_args_more', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 403), (1, 0.00016498565673828125, 0.0)) + +(('validate', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 329), (1, 0.002529144287109375, 0.0040000000000000036)) + +(('get_merger_table_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 382), (1, 0.09451603889465332, 0.084004999999999885)) + +(('d', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 82), (1, 2.4080276489257812e-05, 0.0)) + +(('p_branches1', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 680), (1, 0.00017094612121582031, 0.0)) + +(('go', '/home/melnikovkolya/classes/semester-3-project/flowy/splitter.py', 11), (1, 1090.2936701774597, 1133.1228150000002)) + +(('get_default_values', '/usr/lib/python2.6/optparse.py', 1313), (1, 0.00064682960510253906, 0.0)) + +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/flowy_exec.py', 1), (1, 0.00023794174194335938, 0.98005999999999993)) + +(('check_allen_reachability', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 150), (1, 0.00019001960754394531, 0.0)) + +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/parsetab.py', 4), (1, 0.01084589958190918, 0.012000000000000011)) + +(('go', '/home/melnikovkolya/classes/semester-3-project/flowy/ungrouper.py', 29), (1, 77.383197784423828, 70.288392999998905)) + +(('__init__', '/usr/lib/python2.6/optparse.py', 365), (1, 0.00017595291137695312, 0.0)) + +(('get_tokens', '/usr/lib/pymodules/python2.6/ply/lex.py', 574), (1, 5.3882598876953125e-05, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 55), (1, 0.00030398368835449219, 0.0)) + +(('p_merger_module0', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 659), (1, 0.0001010894775390625, 0.0)) + +(('sort_branches', '/home/melnikovkolya/classes/semester-3-project/flowy/splitter_validator.py', 15), (1, 0.00016307830810546875, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 4), (1, 4.00543212890625e-05, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 87), (1, 3.7908554077148438e-05, 0.0)) + +(('setprofile', '/usr/lib/python2.6/threading.py', 84), (1, 2.5987625122070312e-05, 0.0)) + +(('p_export', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 684), (1, 0.00016498565673828125, 0.0)) + +(('find_name_to_merger_records_file', '/home/melnikovkolya/classes/semester-3-project/flowy/ungrouper_validator.py', 39), (1, 1.6223840713500977, 0.044001999998727115)) + +(('_populate_option_list', '/usr/lib/python2.6/optparse.py', 1252), (1, 0.0051400661468505859, 0.0040000000000000036)) + +(('order_merger_rules', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 247), (1, 0.0043649673461914062, 0.0040000000000000036)) + +(('p_output_branch', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 486), (1, 0.00031900405883789062, 0.0)) + +(('find_name_to_groups', '/home/melnikovkolya/classes/semester-3-project/flowy/ungrouper_validator.py', 47), (1, 0.28166508674621582, 0.11600700000053621)) + +(('__init__', '/usr/lib/python2.6/optparse.py', 932), (1, 0.00022912025451660156, 0.0)) + +(('_splitext', '/usr/lib/python2.6/genericpath.py', 85), (1, 7.7962875366210938e-05, 0.0)) + +(('get_index_rule_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 367), (1, 0.0034859180450439453, 0.0040009999999996992)) + +(('dgettext', '/usr/lib/python2.6/gettext.py', 542), (1, 0.0038430690765380859, 0.0040000000000000036)) + +(('mi', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 41), (1, 3.1948089599609375e-05, 0.0)) + +(('check_field_refs', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter_validator.py', 33), (1, 0.00054502487182617188, 0.0)) + +(('set_parser', '/usr/lib/python2.6/optparse.py', 224), (1, 2.4080276489257812e-05, 0.0)) + +(('order_allen_ops_args', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 85), (1, 0.00036096572875976562, 0.0)) + +(('read_table', '/usr/lib/pymodules/python2.6/ply/yacc.py', 1821), (1, 0.027589082717895508, 0.028001000000000165)) + +(('m', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 27), (1, 2.288818359375e-05, 0.0)) + +(('create_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 158), (1, 0.0055849552154541016, 0.0080000000000000071)) + +(('_add_help_option', '/usr/lib/python2.6/optparse.py', 1242), (1, 0.0050837993621826172, 0.0040000000000000036)) + +(('check_duplicate_filter_names', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 32), (1, 0.00013208389282226562, 0.0)) + +(('validate_all', '/usr/lib/pymodules/python2.6/ply/lex.py', 567), (1, 0.28138995170593262, 0.27201699999999995)) + +(('get_rules_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter_validator.py', 69), (1, 0.0014450550079345703, 0.0)) + +(('parseopt_notrack', '/usr/lib/pymodules/python2.6/ply/yacc.py', 869), (1, 0.16629600524902344, 0.16800999999999999)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 12), (1, 3.1948089599609375e-05, 0.0)) + +(('__repr__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 29), (1, 7.1048736572265625e-05, 0.0)) + +(('s', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 135), (1, 3.3140182495117188e-05, 0.0)) + +(('check_for_unused_filters', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter_validator.py', 52), (1, 3.3140182495117188e-05, 0.0)) + +(('check_allen_ops', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 96), (1, 0.00050592422485351562, 0.0040000000000000036)) + +(('', '/usr/lib/python2.6/hashlib.py', 55), (1, 0.0026788711547851562, 0.0)) + +(('create_rule_implementations', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 126), (1, 0.00099492073059082031, 0.0040000000000000036)) + +(('check_allen_satisfiability', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 116), (1, 4.1961669921875e-05, 0.0)) + +(('create_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper_validator.py', 151), (1, 0.038751840591430664, 0.040001999999999871)) + +(('_get_args', '/usr/lib/python2.6/optparse.py', 1356), (1, 7.4863433837890625e-05, 0.0)) + +(('StreamConverter', '/usr/lib/python2.6/encodings/latin_1.py', 34), (1, 3.4093856811523438e-05, 0.0)) + +(('find_mergers_export_modules', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 59), (1, 4.5061111450195312e-05, 0.0)) + +(('validate_file', '/usr/lib/pymodules/python2.6/ply/lex.py', 829), (1, 0.06090092658996582, 0.060003999999999946)) + +(('p_opt_or_allen_rule', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 760), (1, 0.00027298927307128906, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 74), (1, 3.4809112548828125e-05, 0.0)) + +(('_process_args', '/usr/lib/python2.6/optparse.py', 1414), (1, 4.6014785766601562e-05, 0.0)) + +(('ready', '/home/melnikovkolya/classes/semester-3-project/flowy/splitter.py', 24), (1, 6.5088272094726562e-05, 0.0)) + +(('__init__', '/usr/lib/pymodules/python2.6/ply/lex.py', 79), (1, 3.1948089599609375e-05, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 182), (1, 0.56630086898803711, 0.55203500000000005)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 36), (1, 4.7206878662109375e-05, 0.0)) + +(('_get_all_options', '/usr/lib/python2.6/optparse.py', 1307), (1, 3.4093856811523438e-05, 0.0)) + +(('parse_args', '/usr/lib/python2.6/optparse.py', 1362), (1, 0.00095796585083007812, 0.0040000000000000036)) + +(('change_branch_names_to_id', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 50), (1, 0.00050210952758789062, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 9), (1, 0.48897290229797363, 0.47603000000000018)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper_validator.py', 10), (1, 0.39739584922790527, 0.35602199999999984)) + +(('get_merger_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 396), (1, 0.10486102104187012, 0.096006999999999731)) + +(('p_branches1_n', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 673), (1, 0.00031900405883789062, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/splitter_validator.py', 7), (1, 0.55339598655700684, 0.30001899999999981)) + +(('evaluate_rule', '/home/melnikovkolya/classes/semester-3-project/flowy/validator_common.py', 72), (1, 4.6014785766601562e-05, 0.0)) + +(('__init__', '/usr/lib/pymodules/python2.6/ply/yacc.py', 240), (1, 2.9087066650390625e-05, 0.0)) + +(('__init__', '/usr/lib/pymodules/python2.6/ply/lex.py', 545), (1, 6.8902969360351562e-05, 0.0)) + +(('input', '/usr/lib/pymodules/python2.6/ply/lex.py', 251), (1, 6.6041946411132812e-05, 0.0040000000000000036)) + +(('set_description', '/usr/lib/python2.6/optparse.py', 964), (1, 2.3126602172851562e-05, 0.0)) + +(('check_for_unused_filters', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 23), (1, 5.6028366088867188e-05, 0.0)) + +(('go', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 107), (1, 15145.504531860352, 14870.641358000001)) + +(('GT', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 14), (1, 2.3126602172851562e-05, 0.0)) + +(('validate', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter_validator.py', 47), (1, 0.00074887275695800781, 0.0)) + +(('__init__', '/usr/lib/python2.6/optparse.py', 200), (1, 0.00011801719665527344, 0.0)) + +(('create_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/splitter_validator.py', 21), (1, 0.0024619102478027344, 0.0)) + +(('lex', '/usr/lib/pymodules/python2.6/ply/lex.py', 865), (1, 0.48882603645324707, 0.47603000000000018)) + +(('__init__', '/usr/lib/python2.6/optparse.py', 1185), (1, 0.0058839321136474609, 0.0080009999999999248)) + +(('check_field_refs', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper_validator.py', 53), (1, 0.0012209415435791016, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 22), (1, 3.0994415283203125e-05, 0.0)) + +(('get_states', '/usr/lib/pymodules/python2.6/ply/lex.py', 621), (1, 8.58306884765625e-05, 0.0040000000000000036)) + +(('flush', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 12), (1, 0.66196799278259277, 0.0080010000019683503)) + +(('fi', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 122), (1, 5.0067901611328125e-05, 0.0)) + +(('LT', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 1), (1, 2.4080276489257812e-05, 0.0)) + +(('o', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 55), (1, 2.5987625122070312e-05, 0.0040009999999996992)) + +(('set_conflict_handler', '/usr/lib/python2.6/optparse.py', 959), (1, 2.288818359375e-05, 0.0)) + +(('profile_on', '/home/melnikovkolya/classes/semester-3-project/flowy/profiler.py', 43), (1, 0.0001010894775390625, 0.98005999999999993)) + +(('validate_rules', '/usr/lib/pymodules/python2.6/ply/lex.py', 723), (1, 0.2807769775390625, 0.27201699999999995)) + +(('StreamWriter', '/usr/lib/python2.6/encodings/latin_1.py', 28), (1, 3.3140182495117188e-05, 0.0)) + +(('_create_option_list', '/usr/lib/python2.6/optparse.py', 1237), (1, 8.20159912109375e-05, 0.0)) + +(('resolve_branches', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 868), (1, 0.0021741390228271484, 0.0040010000000001433)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/splitter.py', 5), (1, 5.91278076171875e-05, 0.0)) + +(('create_masks', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 100), (1, 0.001110076904296875, 0.0)) + +(('check_duplicate_module_names', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 46), (1, 6.198883056640625e-05, 0.0)) + +(('__init__', '/usr/lib/pymodules/python2.6/ply/yacc.py', 192), (1, 2.5033950805664062e-05, 0.0)) + +(('p_merger_rule_al_op', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 755), (1, 0.00028109550476074219, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 145), (1, 0.0002269744873046875, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/ungrouper.py', 3), (1, 3.0994415283203125e-05, 0.0)) + +(('_form_master_re', '/usr/lib/pymodules/python2.6/ply/lex.py', 482), (1, 0.18841695785522461, 0.18401200000000006)) + +(('get_pfunctions', '/usr/lib/pymodules/python2.6/ply/yacc.py', 2958), (1, 0.01446986198425293, 0.012000999999999928)) + +(('check_duplicate_merger_names', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 34), (1, 6.389617919921875e-05, 0.0)) + +(('get_error_func', '/usr/lib/pymodules/python2.6/ply/yacc.py', 2865), (1, 4.6968460083007812e-05, 0.0)) + +(('_create_option_mappings', '/usr/lib/python2.6/optparse.py', 943), (1, 3.3855438232421875e-05, 0.0)) + +(('create_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter_validator.py', 82), (1, 0.33964705467224121, 0.32802100000000056)) + +(('StreamReader', '/usr/lib/python2.6/encodings/latin_1.py', 31), (1, 3.2186508178710938e-05, 0.0)) + +(('f', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 109), (1, 3.7908554077148438e-05, 0.0)) + +(('find', '/usr/lib/python2.6/gettext.py', 421), (1, 0.0036578178405761719, 0.0040000000000000036)) + +(('p_merger_rule0', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 693), (1, 8.9883804321289062e-05, 0.0)) + +(('__init__', '/usr/lib/pymodules/python2.6/ply/lex.py', 115), (1, 4.00543212890625e-05, 0.0)) + +(('get_id_size', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 27), (1, 6.7949295043945312e-05, 0.0)) + +(('__init__', '/usr/lib/pymodules/python2.6/ply/yacc.py', 2763), (1, 6.6995620727539062e-05, 0.0)) + +(('validate', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 146), (1, 0.0040280818939208984, 0.0040000000000000036)) + +(('p_ungrouper', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 497), (1, 0.00028586387634277344, 0.0)) + +(('check_values', '/usr/lib/python2.6/optparse.py', 1401), (1, 2.8133392333984375e-05, 0.0)) + +(('di', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 95), (1, 2.288818359375e-05, 0.0)) + +(('check_for_disjoint_modules', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 172), (1, 6.29425048828125e-05, 0.0)) + +(('', '/usr/lib/python2.6/encodings/latin_1.py', 8), (1, 0.0010180473327636719, 0.0040000000000000036)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 10), (1, 0.38966488838195801, 0.34802100000000014)) + +(('splitext', '/usr/lib/python2.6/posixpath.py', 94), (1, 0.0001430511474609375, 0.0)) + +(('p_pipeline_stage_end', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 212), (1, 2.9087066650390625e-05, 0.0)) + +(('create_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/ungrouper_validator.py', 76), (1, 1.5083169937133789, 0.72004500000184635)) + +(('validate_literals', '/usr/lib/pymodules/python2.6/ply/lex.py', 609), (1, 0.00011110305786132812, 0.0)) + +(('check_allen_deltas', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 142), (1, 3.4093856811523438e-05, 0.0)) + +(('get_mergers_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 461), (1, 0.1074979305267334, 0.10000699999999973)) + +(('get_precedence', '/usr/lib/pymodules/python2.6/ply/yacc.py', 2923), (1, 4.6014785766601562e-05, 0.0)) + +(('translation', '/usr/lib/python2.6/gettext.py', 476), (1, 0.0037548542022705078, 0.0040000000000000036)) + +(('check_allen_consistency', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 127), (1, 3.910064697265625e-05, 0.0)) + +(('p_file', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 197), (1, 2.8848648071289062e-05, 0.0)) + +(('p_args', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 396), (1, 0.00030803680419921875, 0.0)) + +(('find_name_to_gr_output', '/home/melnikovkolya/classes/semester-3-project/flowy/ungrouper_validator.py', 65), (1, 0.2986290454864502, 0.24001500000304077)) + +(('get_all', '/usr/lib/pymodules/python2.6/ply/lex.py', 560), (1, 0.013720989227294922, 0.016001000000000154)) + +(('check_duplicate_filter_names', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter_validator.py', 21), (1, 6.7949295043945312e-05, 0.0)) + +(('order_modules', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 185), (1, 4.291534423828125e-05, 0.0)) + +(('IncrementalDecoder', '/usr/lib/python2.6/encodings/latin_1.py', 24), (1, 3.4093856811523438e-05, 0.0)) + +(('validate', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper_validator.py', 19), (1, 0.0040161609649658203, 0.0040000000000004476)) + +(('getregentry', '/usr/lib/python2.6/encodings/latin_1.py', 41), (1, 0.00015878677368164062, 0.0)) + +(('p_input_branch', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 444), (1, 0.00034213066101074219, 0.0)) + +(('p_splitter', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 229), (1, 0.00028204917907714844, 0.0)) + +(('p_merger_branches', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 669), (1, 0.00039505958557128906, 0.0040000000000000036)) + +(('__new__', '/usr/lib/python2.6/codecs.py', 77), (1, 6.8187713623046875e-05, 0.0)) + +(('check_field_refs', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 44), (1, 0.00067901611328125, 0.0040000000000000036)) + +(('_init_parsing_state', '/usr/lib/python2.6/optparse.py', 1262), (1, 3.0994415283203125e-05, 0.0040009999999999213)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/ungrouper_validator.py', 10), (1, 3.5059800148010254, 0.78004800000053365)) + +(('parse', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 811), (1, 0.16866111755371094, 0.17201100000000014)) + +(('find_name_to_otput', '/home/melnikovkolya/classes/semester-3-project/flowy/ungrouper_validator.py', 55), (1, 3.1948089599609375e-05, 0.0)) + +(('gettext', '/usr/lib/python2.6/gettext.py', 580), (1, 0.0039150714874267578, 0.0040000000000000036)) + +(('get_start', '/usr/lib/pymodules/python2.6/ply/yacc.py', 2855), (1, 4.7206878662109375e-05, 0.0)) + +(('set_usage', '/usr/lib/python2.6/optparse.py', 1271), (1, 4.100799560546875e-05, 0.0)) + +(('get_tokens', '/usr/lib/pymodules/python2.6/ply/yacc.py', 2889), (1, 6.6995620727539062e-05, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 151), (1, 4.8875808715820312e-05, 0.0)) + +(('p_merger_module', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 663), (1, 0.00040102005004882812, 0.0)) + +(('parse', '/usr/lib/pymodules/python2.6/ply/yacc.py', 257), (1, 0.16637682914733887, 0.16800999999999999)) + +(('p_opt_op_rule_end', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 765), (1, 8.7976455688476562e-05, 0.0)) + +(('bind_callables', '/usr/lib/pymodules/python2.6/ply/yacc.py', 1870), (1, 0.011104822158813477, 0.0080009999999999248)) + +(('get_rules', '/usr/lib/pymodules/python2.6/ply/lex.py', 652), (1, 0.013356924057006836, 0.01200100000000015)) + +(('find_ungrouper_to_merger', '/home/melnikovkolya/classes/semester-3-project/flowy/ungrouper_validator.py', 20), (1, 6.198883056640625e-05, 0.0)) + +(('get_branches_fields', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter_validator.py', 41), (1, 3.1948089599609375e-05, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 10), (1, 3.6954879760742188e-05, 0.0)) + +(('p_rule_prefix', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 388), (1, 0.00041985511779785156, 0.0040010000000001433)) + +(('search_function', '/usr/lib/python2.6/encodings/__init__.py', 71), (1, 0.0019559860229492188, 0.0040000000000000036)) + +(('check_duplicate_grouper_names', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper_validator.py', 28), (1, 0.00011610984802246094, 0.0)) + +(('yacc', '/usr/lib/pymodules/python2.6/ply/yacc.py', 3036), (1, 0.077062129974365234, 0.076004999999999878)) + +(('__init__', '/usr/lib/pymodules/python2.6/ply/yacc.py', 1815), (1, 4.1961669921875e-05, 0.0)) + +(('create_pseudobranches', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 62), (1, 8.0108642578125e-05, 0.0)) + +(('get_literals', '/usr/lib/pymodules/python2.6/ply/lex.py', 605), (1, 4.00543212890625e-05, 0.0)) + +(('si', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 148), (1, 3.6001205444335938e-05, 0.0)) + +(('signature', '/usr/lib/pymodules/python2.6/ply/yacc.py', 2796), (1, 0.0085060596466064453, 0.012000999999999928)) + +(('p_merger_module1_n', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 654), (1, 0.00029706954956054688, 0.0)) + +(('p_start_branch', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 435), (1, 0.00042104721069335938, 0.0)) + +(('__init__', '/usr/lib/python2.6/optparse.py', 837), (1, 8.106231689453125e-05, 0.0)) + +(('p_merger', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 648), (1, 0.00057721138000488281, 0.0)) + +(('EQ', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 161), (1, 3.1948089599609375e-05, 0.0)) + +(('get_all', '/usr/lib/pymodules/python2.6/ply/yacc.py', 2778), (1, 0.014896154403686523, 0.012000999999999928)) + +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 1), (1, 0.0024480819702148438, 0.0040009999999996992)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/timeindex.py', 22), (2, 0.00011086463928222656, 0.0)) + +(('__init__', '/usr/lib/python2.6/Queue.py', 22), (2, 0.0018391609191894531, 0.0)) + +(('p_time', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 573), (2, 0.00082492828369140625, 0.0)) + +(('p_module', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 525), (2, 0.00074410438537597656, 0.0)) + +(('create_gr_record_fields_types', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 73), (2, 0.0001678466796875, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 50), (2, 9.1075897216796875e-05, 0.0)) + +(('_initLoop', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 542), (2, 0.047768115997314453, 0.0)) + +(('p_grouper_rule0', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 534), (2, 0.00021195411682128906, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 96), (2, 5.1975250244140625e-05, 0.0)) + +(('check_branch_id_ref', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 192), (2, 0.0001239776611328125, 0.0)) + +(('p_opt_aggr_end', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 603), (2, 0.00020813941955566406, 0.0)) + +(('p_module0', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 521), (2, 0.0001888275146484375, 0.0)) + +(('p_grouper', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 504), (2, 0.0028209686279296875, 0.0040000000000000036)) + +(('p_allen_op', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 785), (2, 0.00029993057250976562, 0.0)) + +(('__repr__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 58), (2, 9.7036361694335938e-05, 0.0)) + +(('p_delta_arg', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 566), (2, 0.00029873847961425781, 0.0)) + +(('__iter__', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 516), (2, 0.061742782592773438, 0.0)) + +(('convert_module_aggr_ops', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper_validator.py', 105), (2, 0.014819860458374023, 0.016001999999999406)) + +(('__del__', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 115), (2, 0.011918067932128906, 0.012000000000000011)) + +(('__getitem__', '/usr/lib/python2.6/UserDict.py', 17), (2, 7.9870223999023438e-05, 0.0)) + +(('p_no_allen_delta', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 781), (2, 0.00017881393432617188, 0.0)) + +(('p_module1_n', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 516), (2, 0.00076985359191894531, 0.0)) + +(('get_input_fields_types', '/home/melnikovkolya/classes/semester-3-project/flowy/validator_common.py', 52), (2, 0.00232696533203125, 0.0)) + +(('p_filter', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 240), (2, 0.00073194503784179688, 0.0)) + +(('create_grouper_rules_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper_validator.py', 75), (2, 0.034547090530395508, 0.036001999999999423)) + +(('_init', '/usr/lib/python2.6/Queue.py', 197), (2, 8.0108642578125e-05, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 9), (2, 0.0010058879852294922, 0.0)) + +(('p_allen_rule', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 769), (2, 0.0012629032135009766, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 49), (2, 0.00036001205444335938, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 138), (2, 8.869171142578125e-05, 0.0)) + +(('t_string', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 101), (2, 0.00020503997802734375, 0.0)) + +(('create_gr_record_fields_list', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 63), (2, 0.00015211105346679688, 0.0)) + +(('convert_module_rules', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper_validator.py', 141), (2, 0.0013997554779052734, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter.py', 5), (2, 0.00083112716674804688, 0.0)) + +(('p_aggr1_n', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 593), (2, 0.00061702728271484375, 0.0)) + +(('get_rule_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter_validator.py', 61), (2, 0.00050306320190429688, 0.0)) + +(('replace_nested_rules', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 136), (2, 0.00048112869262695312, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/splitter.py', 32), (2, 0.0020008087158203125, 0.0)) + +(('t_hex', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 145), (2, 0.00022983551025390625, 0.0)) + +(('p_grouper_rule_rel_delta', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 550), (2, 0.0012071132659912109, 0.0)) + +(('_expand_lang', '/usr/lib/python2.6/gettext.py', 130), (2, 0.00095987319946289062, 0.0040000000000000036)) + +(('check_duplicate_module_names', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper_validator.py', 40), (2, 0.00018310546875, 0.0)) + +(('p_aggregate', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 586), (2, 0.00080180168151855469, 0.0)) + +(('go', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter.py', 13), (2, 20632.220237016678, 21338.749588999999)) + +(('p_split_branch', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 457), (2, 0.0015261173248291016, 0.0)) + +(('normalize_encoding', '/usr/lib/python2.6/encodings/__init__.py', 49), (2, 0.00016188621520996094, 0.0)) + +(('normalize', '/usr/lib/python2.6/locale.py', 316), (2, 0.00043177604675292969, 0.0)) + +(('__iter__', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 1473), (3, 0.43885898590087891, 0.0080010000019683503)) + +(('p_filter_rule_1n', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 274), (3, 0.00074124336242675781, 0.0)) + +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 57), (3, 9.7751617431640625e-05, 0.0)) + +(('p_term_opt_rule', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 345), (3, 0.00038623809814453125, 0.0040000000000000036)) + +(('p_or_optrule', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 330), (3, 0.00062799453735351562, 0.0)) + +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 14), (3, 8.9406967163085938e-05, 0.0)) + +(('check_qid_field_ref', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 206), (3, 0.0002079010009765625, 0.0)) + +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/filter_validator.py', 55), (3, 0.00010633468627929688, 0.0)) + +(('flush', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 81), (3, 2.4475281238555908, 0.32802200000514858)) + +(('get_input_reader', '/home/melnikovkolya/classes/semester-3-project/flowy/validator_common.py', 48), (3, 0.54138565063476562, 0.46803000000000017)) + +(('p_infix_rule', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 367), (3, 0.0013358592987060547, 0.0)) + +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/groupfilter_validator.py', 102), (3, 0.00010514259338378906, 0.0)) + +(('p_filter_rule_0', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 279), (3, 0.00025320053100585938, 0.0)) + +(('p_filter_rule', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 283), (3, 0.00044512748718261719, 0.0)) + +(('t_LT', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 74), (3, 9.0837478637695312e-05, 0.0)) + +(('p_rule_or_not', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 349), (3, 0.00063610076904296875, 0.0)) + +(('p_rule', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 360), (3, 0.00040578842163085938, 0.0)) + +(('iterate_module_allen_op_groups', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 76), (3, 9.6797943115234375e-05, 0.0)) + +(('p_merger_infix_rule', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 712), (3, 0.0015790462493896484, 0.0)) + +(('p_merger_rule', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 697), (3, 0.00050592422485351562, 0.0)) + +(('flush', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 1951), (3, 2.390861988067627, 0.32802200000514858)) + +(('p_end_branch', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 482), (3, 0.00078797340393066406, 0.0)) + +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 62), (3, 0.0001010894775390625, 0.0)) + +(('__hash__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 94), (4, 0.00078701972961425781, 0.0)) + +(('_g_preKillHook', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 354), (4, 9.8943710327148438e-05, 0.0)) + +(('__contains__', '/usr/lib/python2.6/UserDict.py', 69), (4, 0.00011372566223144531, 0.0)) + +(('', 'flowy.py', 17), (4, 0.00011277198791503906, 0.0)) + +(('p_simple_agg_same_name', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 617), (4, 0.0017957687377929688, 0.0080000000000000071)) + +(('get_rule_impl', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 346), (4, 0.0010442733764648438, 0.0)) + +(('get', '/usr/lib/python2.6/UserDict.py', 57), (4, 0.00040292739868164062, 0.0)) + +(('__cmp__', '/usr/local/lib/python2.6/dist-packages/tables/misc/lrucache.py', 108), (4, 0.00013422966003417969, 0.0)) + +(('isdigit', '/usr/lib/python2.6/sre_parse.py', 219), (4, 0.00011396408081054688, 0.0)) + +(('__repr__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 106), (4, 0.00013685226440429688, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 17), (4, 0.00012159347534179688, 0.0)) + +(('get_rule_needed_branches', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 296), (4, 0.00063681602478027344, 0.0)) + +(('t_int', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 150), (4, 0.00040483474731445312, 0.0)) + +(('p_mid_branch_terminate', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 476), (4, 0.00052571296691894531, 0.0)) + +(('__str__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 88), (4, 0.00048184394836425781, 0.0)) + +(('p_grouper_rule', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 538), (4, 0.0019702911376953125, 0.0)) + +(('p_merger_rule1_n', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 688), (4, 0.0011007785797119141, 0.0)) + +(('start', '/usr/lib/python2.6/threading.py', 461), (5, 3.6452641487121582, 0.93205800000214367)) + +(('Event', '/usr/lib/python2.6/threading.py', 357), (5, 0.076456308364868164, 0.0040000000008149073)) + +(('_set_attrs', '/usr/lib/python2.6/optparse.py', 609), (5, 0.00063014030456542969, 0.0)) + +(('__init__', '/usr/lib/python2.6/threading.py', 364), (5, 0.065616846084594727, 0.0040000000008149073)) + +(('_check_opt_strings', '/usr/lib/python2.6/optparse.py', 579), (5, 0.00018310546875, 0.0)) + +(('_check_nargs', '/usr/lib/python2.6/optparse.py', 699), (5, 0.000148773193359375, 0.0)) + +(('run', '/usr/lib/python2.6/threading.py', 474), (5, 21799.923771858215, 22542.164796999998)) + +(('join', '/usr/lib/python2.6/threading.py', 622), (5, 10595.639101982117, 10780.325727000003)) + +(('__init__', '/usr/lib/python2.6/threading.py', 424), (5, 0.1553959846496582, 0.0040000000008149073)) + +(('_check_type', '/usr/lib/python2.6/optparse.py', 635), (5, 0.00016474723815917969, 0.0)) + +(('__init__', '/usr/lib/python2.6/optparse.py', 560), (5, 0.0038492679595947266, 0.0)) + +(('_set_opt_strings', '/usr/lib/python2.6/optparse.py', 588), (5, 0.00031185150146484375, 0.0)) + +(('get_merger_branches_order', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 229), (5, 0.00030994415283203125, 0.0)) + +(('_check_const', '/usr/lib/python2.6/optparse.py', 693), (5, 0.00014805793762207031, 0.0)) + +(('_check_conflict', '/usr/lib/python2.6/optparse.py', 980), (5, 0.00019383430480957031, 0.0)) + +(('_check_choice', '/usr/lib/python2.6/optparse.py', 665), (5, 0.00015211105346679688, 0.0)) + +(('wait', '/usr/lib/python2.6/threading.py', 389), (5, 1.4045898914337158, 0.92805800000132876)) + +(('add_option', '/usr/lib/python2.6/optparse.py', 1007), (5, 0.0047156810760498047, 0.0)) + +(('_set_daemon', '/usr/lib/python2.6/threading.py', 444), (5, 0.019243001937866211, 0.0)) + +(('_newname', '/usr/lib/python2.6/threading.py', 399), (5, 0.013895273208618164, 0.0)) + +(('__stop', '/usr/lib/python2.6/threading.py', 581), (5, 0.0017960071563720703, 0.0)) + +(('isbasestring', '/usr/lib/python2.6/optparse.py', 832), (5, 0.00019145011901855469, 0.0)) + +(('daemon', '/usr/lib/python2.6/threading.py', 676), (5, 0.00013899803161621094, 0.0)) + +(('_check_dest', '/usr/lib/python2.6/optparse.py', 678), (5, 0.00020813941955566406, 0.0)) + +(('_check_action', '/usr/lib/python2.6/optparse.py', 629), (5, 0.00015115737915039062, 0.0)) + +(('_check_callback', '/usr/lib/python2.6/optparse.py', 708), (5, 0.00014495849609375, 0.0)) + +(('__bootstrap', '/usr/lib/python2.6/threading.py', 483), (5, 73152.289892196655, 73724.271475999994)) + +(('__bootstrap_inner', '/usr/lib/python2.6/threading.py', 506), (5, 73152.289488077164, 73724.267475000001)) + +(('_calc_chunkshape', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 352), (6, 0.0176849365234375, 0.0040000000008149073)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 120), (6, 0.00021100044250488281, 0.0)) + +(('replace_bound_rules', '/home/melnikovkolya/classes/semester-3-project/flowy/validator_common.py', 66), (6, 0.0030739307403564453, 0.0040000000000000036)) + +(('p_op', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 374), (6, 0.0013499259948730469, 0.0)) + +(('createTable', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 625), (6, 0.35201072692871094, 0.24001599999974133)) + +(('_getmaindim', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 218), (6, 0.00025892257690429688, 0.0)) + +(('_g_preKillHook', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 2429), (6, 0.00019288063049316406, 0.0)) + +(('isdir', '/usr/lib/python2.6/genericpath.py', 38), (6, 0.00091314315795898438, 0.004001000001153443)) + +(('p_qid_arg', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 738), (6, 0.0026998519897460938, 0.0)) + +(('row', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 295), (6, 0.032982349395751953, 0.0)) + +(('check_is_shortcut', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 118), (6, 0.00016331672668457031, 0.0)) + +(('_g_create', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 730), (6, 0.22419285774230957, 0.17201199999908567)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 108), (6, 0.00050306320190429688, 0.0)) + +(('replace_with_vals', '/home/melnikovkolya/classes/semester-3-project/flowy/validator_common.py', 106), (6, 0.0029850006103515625, 0.0)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/filters.py', 212), (6, 0.0076298713684082031, 0.0)) + +(('create_table_file', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 237), (6, 0.75770020484924316, 0.46802799999932176)) + +(('dirname', '/usr/lib/python2.6/posixpath.py', 117), (6, 0.0095231533050537109, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 108), (6, 0.00019216537475585938, 0.0)) + +(('p_grouper_op', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 557), (6, 0.00089693069458007812, 0.0040000000000000036)) + +(('p_grouper_rule1_n', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 529), (6, 0.0016539096832275391, 0.0)) + +(('S_ISDIR', '/usr/lib/python2.6/stat.py', 40), (6, 0.00046300888061523438, 0.004001000001153443)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 110), (6, 0.0001811981201171875, 0.0)) + +(('if_exists_delete', '/home/melnikovkolya/classes/semester-3-project/flowy/validator_common.py', 11), (6, 0.096372842788696289, 0.0)) + +(('p_qid', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 622), (6, 0.001857757568359375, 0.0)) + +(('_g_addChildrenNames', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 376), (6, 0.010331392288208008, 0.0040000000008149073)) + +(('', '', 1), (7, 0.012396812438964844, 0.012000000000000011)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 892), (7, 0.00074839591979980469, 0.0080000000016298145)) + +(('createVLArray', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 837), (8, 0.043541193008422852, 0.040002000001550186)) + +(('p_arg', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 411), (8, 0.0022470951080322266, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 174), (8, 0.00025129318237304688, 0.0)) + +(('_calc_chunkshape', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 303), (8, 0.0019943714141845703, 0.0)) + +(('_g_create', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 324), (8, 0.024519920349121094, 0.020000000000436557)) + +(('_setflavor', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 233), (8, 0.032287120819091797, 0.028001999995467486)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 125), (8, 0.24568963050842285, 0.0040000000008149073)) + +(('check_flavor', '/usr/local/lib/python2.6/dist-packages/tables/flavor.py', 101), (8, 0.00023317337036132812, 0.0)) + +(('p_aggr', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 607), (8, 0.0041449069976806641, 0.0080009999999999248)) + +(('p_aggr_op', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 635), (8, 0.0012972354888916016, 0.0)) + +(('copy', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 544), (8, 0.010071754455566406, 0.0080000000016298145)) + +(('join', '/usr/lib/python2.6/posixpath.py', 59), (8, 0.00067973136901855469, 0.0)) + +(('__del__', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 297), (9, 0.0087170600891113281, 0.0)) + +(('_g_closeDescendents', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 857), (9, 0.21888160705566406, 0.039999999997235136)) + +(('_g_close', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 911), (9, 0.0067708492279052734, 0.0080000000016298145)) + +(('_g_close', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 2754), (9, 0.051886081695556641, 0.011999999998806743)) + +(('__len__', '/usr/local/lib/python2.6/dist-packages/tables/misc/lrucache.py', 138), (9, 0.00029063224792480469, 0.0)) + +(('_f_close', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 921), (9, 0.24245715141296387, 0.052000999996380415)) + +(('t_EQ', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 79), (9, 0.00025415420532226562, 0.0)) + +(('close', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 1972), (9, 0.38457894325256348, 0.056000999993557343)) + +(('p_mid_branch', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 467), (9, 0.0031220912933349609, 0.0080009999999999248)) + +(('_f_close', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 2464), (9, 0.17920565605163574, 0.027999999994790414)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/misc/lrucache.py', 102), (10, 0.00034022331237792969, 0.0)) + +(('__setitem__', '/usr/local/lib/python2.6/dist-packages/tables/misc/lrucache.py', 144), (10, 0.050474882125854492, 0.0)) + +(('_g_postReviveHook', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 359), (10, 0.00025463104248046875, 0.0)) + +(('currentThread', '/usr/lib/python2.6/threading.py', 801), (10, 0.00030732154846191406, 0.0)) + +(('_killNode', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 2101), (10, 0.052376985549926758, 0.0)) + +(('isSet', '/usr/lib/python2.6/threading.py', 369), (10, 0.00030684471130371094, 0.0)) + +(('pop', '/usr/local/lib/python2.6/dist-packages/tables/misc/lrucache.py', 192), (10, 0.00054502487182617188, 0.0)) + +(('_reviveNode', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 2127), (10, 0.0037400722503662109, 0.0040000000008149073)) + +(('p_opt_aggr', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 598), (10, 0.0032141208648681641, 0.0040000000000000036)) + +(('__getseqn', '/usr/local/lib/python2.6/dist-packages/tables/misc/lrucache.py', 115), (10, 0.00094175338745117188, 0.0)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 904), (10, 0.0042827129364013672, 0.0040000000008149073)) + +(('get_record_class', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 6), (11, 0.2394108772277832, 0.0040000000008149073)) + +(('t_GT', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 84), (11, 0.0003261566162109375, 0.0)) + +(('Record', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 50), (11, 0.0004711151123046875, 0.0)) + +(('p_arrow', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 493), (11, 0.00031614303588867188, 0.0)) + +(('', '/usr/lib/python2.6/hashlib.py', 109), (11, 0.00038290023803710938, 0.0)) + +(('p_pipeline_stage', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 215), (12, 0.0017573833465576172, 0.0)) + +(('__contains__', '/usr/local/lib/python2.6/dist-packages/tables/attributeset.py', 545), (12, 0.00031352043151855469, 0.0)) + +(('_g_getparent', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 211), (12, 0.0019068717956542969, 0.0)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 304), (12, 0.00034046173095703125, 0.0)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 390), (12, 0.00038623809814453125, 0.0)) + +(('p_pipeline_stage_1n', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 202), (12, 0.0019559860229492188, 0.0040000000000000036)) + +(('check_rule_fields', '/home/melnikovkolya/classes/semester-3-project/flowy/validator_common.py', 56), (12, 0.0004558563232421875, 0.0)) + +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/merger_validator.py', 260), (12, 0.00038480758666992188, 0.0)) + +(('p_id_or_qid', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 628), (12, 0.0019271373748779297, 0.0)) + +(('flush', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 2408), (12, 1.6777589321136475, 0.3200200000028417)) + +(('_g_open', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 369), (12, 0.0656280517578125, 0.024000999997952022)) + +(('__repr__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 71), (13, 0.00061392784118652344, 0.0)) + +(('_g_checkName', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 905), (14, 0.00046086311340332031, 0.0)) + +(('_saveBufferedRows', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 1823), (14, 1.1933684349060059, 0.32402199999341974)) + +(('_class_escape', '/usr/lib/python2.6/sre_parse.py', 231), (14, 0.00063347816467285156, 0.0)) + +(('_getOrCreatePath', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 568), (14, 0.0032899379730224609, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 86), (14, 2.8468761444091797, 1.1680750000050466)) + +(('csformula', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 51), (14, 0.000576019287109375, 0.0)) + +(('_g_open', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 788), (14, 1.5102775096893311, 0.8320540000058827)) + +(('_f_list', '/usr/local/lib/python2.6/dist-packages/tables/attributeset.py', 275), (14, 0.000469207763671875, 0.0040000000008149073)) + +(('calc_chunksize', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 69), (14, 0.014083623886108398, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 49), (14, 2.8359520435333252, 1.1680750000050466)) + +(('limit_es', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 60), (14, 0.00038599967956542969, 0.0)) + +(('__contains__', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 442), (14, 0.0029871463775634766, 0.0)) + +(('_indexNameOf', '/usr/local/lib/python2.6/dist-packages/tables/_table_common.py', 13), (14, 0.00043606758117675781, 0.0)) + +(('isVisibleName', '/usr/local/lib/python2.6/dist-packages/tables/path.py', 170), (14, 0.00056886672973632812, 0.0040000000008149073)) + +(('_g_refNode', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 514), (14, 0.020660638809204102, 0.012000000002444722)) + +(('alltrue', '/usr/lib/python2.6/dist-packages/numpy/core/fromnumeric.py', 1289), (14, 0.042331218719482422, 0.0040000000008149073)) + +(('_indexPathnameOf', '/usr/local/lib/python2.6/dist-packages/tables/_table_common.py', 16), (14, 0.010383844375610352, 0.004001000001153443)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 65), (15, 0.00060582160949707031, 0.0040009999999999213)) + +(('flush', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 692), (16, 1.0141937732696533, 0.024003000002267072)) + +(('tell', '/usr/lib/python2.6/sre_parse.py', 211), (16, 0.00046443939208984375, 0.0)) + +(('_slotnames', '/usr/lib/python2.6/copy_reg.py', 95), (16, 0.0020489692687988281, 0.0080000000000000071)) + +(('__init__', '/usr/lib/python2.6/threading.py', 179), (16, 0.063062906265258789, 0.0)) + +(('Condition', '/usr/lib/python2.6/threading.py', 174), (16, 0.064303398132324219, 0.0)) + +(('_mk_bitmap', '/usr/lib/python2.6/sre_compile.py', 264), (16, 0.0048615932464599609, 0.0040000000000000036)) + +(('_f_close', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 704), (17, 0.091074943542480469, 0.0080000000016298145)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 86), (18, 0.00052547454833984375, 0.0)) + +(('_statetoken', '/usr/lib/pymodules/python2.6/ply/lex.py', 521), (19, 0.0021924972534179688, 0.0)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 1103), (20, 0.027700662612915039, 0.024000000004889444)) + +(('__init__', '/usr/lib/python2.6/sre_parse.py', 73), (20, 0.00057554244995117188, 0.0)) + +(('expanduser', '/usr/lib/python2.6/posixpath.py', 245), (20, 0.00081634521484375, 0.0)) + +(('_g_setPathNames', '/usr/local/lib/python2.6/dist-packages/tables/description.py', 521), (20, 0.034388065338134766, 0.024001999994652579)) + +(('_g_postInitHook', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 581), (20, 0.074328899383544922, 0.060003999999025837)) + +(('_compile', '/usr/lib/python2.6/re.py', 229), (20, 0.42908382415771484, 0.42002599999999979)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 2576), (20, 0.016957759857177734, 0.011999999998806743)) + +(('correct_byteorder', '/usr/local/lib/python2.6/dist-packages/tables/utils.py', 35), (20, 0.00051355361938476562, 0.0)) + +(('_g_postInitHook', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 297), (20, 0.00396728515625, 0.012000999999145279)) + +(('__init__', '/usr/lib/python2.6/sre_parse.py', 184), (20, 0.0021224021911621094, 0.0)) + +(('_checkfilters', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 136), (20, 0.00059771537780761719, 0.0)) + +(('_v_wdflts', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 318), (20, 0.015527009963989258, 0.0)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 379), (20, 1.9717285633087158, 1.1520739999978105)) + +(('_calc_nrowsinbuf', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 393), (20, 0.13779807090759277, 0.0040000000008149073)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/description.py', 380), (20, 0.26455187797546387, 0.1840109999939159)) + +(('parse', '/usr/lib/python2.6/sre_parse.py', 669), (20, 0.2817232608795166, 0.25601699999999994)) + +(('_g_setNestedNamesDescr', '/usr/local/lib/python2.6/dist-packages/tables/description.py', 502), (20, 0.0041193962097167969, 0.0)) + +(('_cacheDescriptionData', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 849), (20, 0.11352849006652832, 0.10801000000719796)) + +(('_compile_info', '/usr/lib/python2.6/sre_compile.py', 367), (20, 0.032503366470336914, 0.044001999999999875)) + +(('getColsInOrder', '/usr/local/lib/python2.6/dist-packages/tables/description.py', 529), (20, 0.00079584121704101562, 0.0)) + +(('_g_postInitHook', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 267), (20, 0.40326571464538574, 0.10000800000125309)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/misc/lrucache.py', 123), (20, 0.0061318874359130859, 0.0080009999983303715)) + +(('_getEnumMap', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 720), (20, 0.026581048965454102, 0.0080010000019683503)) + +(('compile', '/usr/lib/python2.6/re.py', 188), (20, 0.43025588989257812, 0.42002599999999979)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/utils.py', 316), (20, 0.00078845024108886719, 0.0)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 237), (20, 0.00099825859069824219, 0.0039999999971769284)) + +(('openFile', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 168), (20, 1.0345497131347656, 0.21601500000542728)) + +(('__getRootGroup', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 538), (20, 0.040925502777099609, 0.028001000002404908)) + +(('compile', '/usr/lib/python2.6/sre_compile.py', 501), (20, 0.42527437210083008, 0.41202399999999995)) + +(('expandvars', '/usr/lib/python2.6/posixpath.py', 276), (20, 0.0098223686218261719, 0.0)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 191), (20, 0.12205791473388672, 0.084005000000615837)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 467), (20, 1.0327165126800537, 0.21601500000542728)) + +(('_code', '/usr/lib/python2.6/sre_compile.py', 486), (20, 0.13938212394714355, 0.14800499999999994)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/misc/proxydict.py', 20), (24, 0.00069141387939453125, 0.0)) + +(('__del__', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 319), (25, 0.053544521331787109, 0.0080000000016298145)) + +(('_g_close', '/usr/local/lib/python2.6/dist-packages/tables/attributeset.py', 617), (26, 0.0006389617919921875, 0.0)) + +(('_wrapit', '/usr/lib/python2.6/dist-packages/numpy/core/fromnumeric.py', 32), (26, 0.077532291412353516, 0.0080000000016298145)) + +(('asarray', '/usr/lib/python2.6/dist-packages/numpy/core/numeric.py', 180), (26, 0.025931119918823242, 0.0040000000008149073)) + +(('_g_getChildLeafClass', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 338), (26, 0.010759830474853516, 0.0)) + +(('_f_close', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 506), (26, 0.064695119857788086, 0.011999999998806743)) + +(('__init__', '/usr/lib/python2.6/threading.py', 59), (26, 0.00082802772521972656, 0.0)) + +(('_g_delLocation', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 468), (26, 0.028501033782958984, 0.0040000000008149073)) + +(('prod', '/usr/lib/python2.6/dist-packages/numpy/core/fromnumeric.py', 1679), (26, 0.11123895645141602, 0.0080000000016298145)) + +(('__setitem__', '/usr/local/lib/python2.6/dist-packages/tables/misc/proxydict.py', 33), (28, 0.00091433525085449219, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 77), (28, 0.00099396705627441406, 0.0040010000000001433)) + +(('S_ISREG', '/usr/lib/python2.6/stat.py', 49), (28, 0.0023477077484130859, 0.0040000000008149073)) + +(('__iter__', '/usr/local/lib/python2.6/dist-packages/tables/misc/lrucache.py', 202), (28, 0.012085437774658203, 0.0)) + +(('isfile', '/usr/lib/python2.6/genericpath.py', 26), (28, 0.012959957122802734, 0.0040000000008149073)) + +(('__contains__', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 1300), (28, 0.049411773681640625, 0.028001999995467486)) + +(('_escape', '/usr/lib/python2.6/sre_parse.py', 263), (31, 0.0017206668853759766, 0.0)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 604), (32, 0.017023563385009766, 0.032003000000258908)) + +(('__setattr__', '/usr/local/lib/python2.6/dist-packages/tables/attributeset.py', 411), (32, 0.11370277404785156, 0.064002999999502208)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 599), (32, 0.00083613395690917969, 0.0)) + +(('isname', '/usr/lib/python2.6/sre_parse.py', 222), (34, 0.012839794158935547, 0.012001999999999846)) + +(('S_IFMT', '/usr/lib/python2.6/stat.py', 24), (34, 0.00092267990112304688, 0.0)) + +(('create_aggr_impl_init_args', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper_validator.py', 123), (36, 0.028400659561157227, 0.036001999999999423)) + +(('closeNodes', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 862), (36, 0.19506335258483887, 0.039999999997235136)) + +(('_unrefNode', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 2088), (36, 0.011468887329101562, 0.0)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 119), (38, 0.019418239593505859, 0.024000999999999717)) + +(('_g_postInitHook', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 490), (40, 0.0010602474212646484, 0.0)) + +(('_getTypeColNames', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 712), (40, 0.043287038803100586, 0.040003000001888722)) + +(('_g_postInitHook', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 330), (40, 0.0068311691284179688, 0.0039999999971769284)) + +(('_g_setLocation', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 386), (40, 0.028193235397338867, 0.024002999998629093)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 250), (40, 2.0035805702209473, 1.1840770000017073)) + +(('isstring', '/usr/lib/python2.6/sre_compile.py', 480), (40, 0.0014374256134033203, 0.0)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 259), (40, 2.0063085556030273, 1.1840770000017073)) + +(('_g_checkGroup', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 873), (40, 0.0016260147094726562, 0.0)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/statement.py', 104), (43, 0.0013353824615478516, 0.0)) + +(('t_newline', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 161), (43, 0.0016865730285644531, 0.0)) + +(('closegroup', '/usr/lib/python2.6/sre_parse.py', 89), (44, 0.0015876293182373047, 0.0)) + +(('opengroup', '/usr/lib/python2.6/sre_parse.py', 78), (44, 0.0021440982818603516, 0.0)) + +(('iterate_subrules', '/home/melnikovkolya/classes/semester-3-project/flowy/validator_common.py', 15), (46, 0.0019969940185546875, 0.0)) + +(('isUndoEnabled', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 1432), (46, 0.0038013458251953125, 0.0080009999983303715)) + +(('', '/usr/lib/pymodules/python2.6/ply/lex.py', 709), (47, 0.0077641010284423828, 0.0040010000000001433)) + +(('checkFileAccess', '/usr/local/lib/python2.6/dist-packages/tables/utils.py', 130), (48, 0.10146737098693848, 0.016000999999960186)) + +(('__setitem__', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 256), (50, 0.0097527503967285156, 0.0040000000008149073)) + +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/validator_common.py', 53), (50, 0.0011332035064697266, 0.0)) + +(('__setitem__', '/usr/lib/python2.6/sre_parse.py', 140), (52, 0.0015087127685546875, 0.0)) + +(('_simple', '/usr/lib/python2.6/sre_compile.py', 360), (52, 0.0086431503295898438, 0.0080000000000000071)) + +(('splitPath', '/usr/local/lib/python2.6/dist-packages/tables/path.py', 147), (52, 0.0021691322326660156, 0.004001000001153443)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 308), (52, 0.0015707015991210938, 0.0)) + +(('split_type', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 72), (53, 0.0048263072967529297, 0.0)) + +(('from_type', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 414), (53, 0.050877571105957031, 0.052004000001034001)) + +(('exists', '/usr/lib/python2.6/genericpath.py', 15), (54, 0.088840723037719727, 0.0)) + +(('find_op', '/home/melnikovkolya/classes/semester-3-project/flowy/validator_common.py', 35), (54, 0.0060968399047851562, 0.0080009999999997028)) + +(('_g_loadChild', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 1146), (54, 1.7912564277648926, 0.98006199999508681)) + +(('iterate_rules', '/home/melnikovkolya/classes/semester-3-project/flowy/validator_common.py', 26), (55, 0.0049200057983398438, 0.0040000000000000036)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 601), (59, 0.0014562606811523438, 0.0)) + +(('_compile_charset', '/usr/lib/python2.6/sre_compile.py', 184), (59, 0.038069248199462891, 0.032000999999999946)) + +(('_optimize_charset', '/usr/lib/python2.6/sre_compile.py', 213), (59, 0.027762889862060547, 0.028000999999999943)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/attributeset.py', 213), (60, 0.63532543182373047, 0.29602099999829079)) + +(('_v_attrs', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 221), (60, 0.64392971992492676, 0.30402099999264465)) + +(('__contains__', '/usr/local/lib/python2.6/dist-packages/tables/misc/lrucache.py', 141), (64, 0.0020375251770019531, 0.0)) + +(('lineno', '/usr/lib/pymodules/python2.6/ply/yacc.py', 210), (65, 0.0025455951690673828, 0.0040000000000000036)) + +(('_parse_sub', '/usr/lib/python2.6/sre_parse.py', 307), (66, 0.48796296119689941, 0.46803300000000037)) + +(('_reduce_ex', '/usr/lib/python2.6/copy_reg.py', 59), (67, 0.01495361328125, 0.012000999999145279)) + +(('getNode', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 905), (68, 1.809065580368042, 1.0040639999933774)) + +(('_refNode', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 2073), (70, 0.013490438461303711, 0.0040000000008149073)) + +(('_parse', '/usr/lib/python2.6/sre_parse.py', 385), (84, 0.46760940551757812, 0.45203300000000035)) + +(('from_dtype', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 384), (85, 0.091119289398193359, 0.088008000002446352)) + +(('__setattr__', '/usr/local/lib/python2.6/dist-packages/tables/misc/lrucache.py', 209), (90, 0.0032684803009033203, 0.0080009999983303715)) + +(('checkNameValidity', '/usr/local/lib/python2.6/dist-packages/tables/path.py', 71), (99, 0.035396814346313477, 0.0039999999971769284)) + +(('close', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 3200), (101, 0.011583328247070312, 0.0)) + +(('_g__setattr', '/usr/local/lib/python2.6/dist-packages/tables/attributeset.py', 358), (109, 0.23629546165466309, 0.1600109999999404)) + +(('notifyAll', '/usr/lib/python2.6/threading.py', 288), (116, 0.3298649787902832, 0.31601800000134972)) + +(('_checkOpen', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 1400), (117, 0.0032482147216796875, 0.0)) + +(('_acquire_restore', '/usr/lib/python2.6/threading.py', 216), (119, 0.52397418022155762, 0.52003399999739486)) + +(('wait', '/usr/lib/python2.6/threading.py', 228), (119, 10657.345429897308, 10841.357548999993)) + +(('_release_save', '/usr/lib/python2.6/threading.py', 213), (119, 0.0046584606170654297, 0.0040000000008149073)) + +(('t_id', '/home/melnikovkolya/classes/semester-3-project/flowy/parser.py', 155), (121, 0.0049595832824707031, 0.0040009999999999213)) + +(('_compile', '/usr/lib/python2.6/sre_compile.py', 38), (139, 0.34765720367431641, 0.36801000000000017)) + +(('__init__', '/usr/lib/python2.6/sre_parse.py', 96), (139, 0.0040760040283203125, 0.0040010000000001433)) + +(('bind', '/usr/lib/pymodules/python2.6/ply/yacc.py', 1240), (157, 0.0056459903717041016, 0.0080009999999999248)) + +(('__init__', '/usr/lib/pymodules/python2.6/ply/yacc.py', 1226), (157, 0.0065135955810546875, 0.0040000000000000036)) + +(('_deepcopy_list', '/usr/lib/python2.6/copy.py', 224), (158, 3.1772556304931641, 1.89612599999964)) + +(('from_atom', '/usr/local/lib/python2.6/dist-packages/tables/description.py', 136), (163, 0.40211606025695801, 0.20401399999900605)) + +(('append', '/usr/lib/python2.6/sre_parse.py', 144), (164, 0.0057592391967773438, 0.01200100000000015)) + +(('ismethod', '/usr/lib/python2.6/inspect.py', 67), (171, 0.023562192916870117, 0.0)) + +(('getargs', '/usr/lib/python2.6/inspect.py', 731), (171, 0.053690433502197266, 0.040003999998589279)) + +(('getargspec', '/usr/lib/python2.6/inspect.py', 791), (171, 0.20256519317626953, 0.072004999998171115)) + +(('isfunction', '/usr/lib/python2.6/inspect.py', 142), (171, 0.0052220821380615234, 0.0040000000008149073)) + +(('iscode', '/usr/lib/python2.6/inspect.py', 209), (171, 0.0052673816680908203, 0.0079999999979918357)) + +(('_get_init_args', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 573), (171, 0.28468012809753418, 0.11200800000005984)) + +(('func_code', '/usr/lib/pymodules/python2.6/ply/yacc.py', 91), (174, 0.0055019855499267578, 0.0)) + +(('func_code', '/usr/lib/pymodules/python2.6/ply/lex.py', 51), (177, 0.00527191162109375, 0.0)) + +(('_deepcopy_dict', '/usr/lib/python2.6/copy.py', 251), (189, 1.6356081962585449, 1.0400690000024042)) + +(('_reconstructor', '/usr/lib/python2.6/copy_reg.py', 46), (189, 0.023641347885131836, 0.004001000001153443)) + +(('getwidth', '/usr/lib/python2.6/sre_parse.py', 146), (191, 0.052240133285522461, 0.060001999999999667)) + +(('prefix', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 352), (193, 0.0070273876190185547, 0.0)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/description.py', 218), (193, 0.11548066139221191, 0.10000799999761512)) + +(('isident', '/usr/lib/python2.6/sre_parse.py', 216), (206, 0.0056557655334472656, 0.0040000000000000036)) + +(('__setitem__', '/usr/lib/pymodules/python2.6/ply/yacc.py', 201), (208, 0.0063018798828125, 0.0040000000000000036)) + +(('joinPaths', '/usr/local/lib/python2.6/dist-packages/tables/description.py', 533), (216, 0.0057065486907958984, 0.0039999999971769284)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 514), (216, 0.0078222751617431641, 0.012000999999145279)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 2890), (216, 0.0081388950347900391, 0.0040000000008149073)) + +(('copy', '/usr/lib/python2.6/copy.py', 65), (217, 0.084427833557128906, 0.072005000001809094)) + +(('_deepcopy_tuple', '/usr/lib/python2.6/copy.py', 232), (220, 0.15613865852355957, 0.068004000000655651)) + +(('token', '/usr/lib/pymodules/python2.6/ply/lex.py', 304), (222, 0.043691873550415039, 0.044003999999999932)) + +(('_identityfunction', '/usr/lib/python2.6/sre_compile.py', 24), (228, 0.0059294700622558594, 0.0)) + +(('__len__', '/usr/lib/python2.6/sre_parse.py', 132), (250, 0.0083570480346679688, 0.012000999999999928)) + +(('from_kind', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 440), (260, 0.22029495239257812, 0.17201100001329905)) + +(('_g_col', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 2641), (317, 0.02419281005859375, 0.01599999999962165)) + +(('__setattr__', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 803), (328, 0.012391090393066406, 0.024000000001251465)) + +(('__new__', '', 8), (342, 0.01979374885559082, 0.012001999999483814)) + +(('_deepcopy_atomic', '/usr/lib/python2.6/copy.py', 197), (352, 0.017214298248291016, 0.024000000001251465)) + +(('__newobj__', '/usr/lib/python2.6/copy_reg.py', 92), (406, 0.016112804412841797, 0.016000999996322207)) + +(('__getitem__', '/usr/lib/pymodules/python2.6/ply/yacc.py', 197), (436, 0.013558387756347656, 0.0080010000000001469)) + +(('_reconstruct', '/usr/lib/python2.6/copy.py', 300), (437, 2.0792980194091797, 1.3320890000031795)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 656), (437, 0.25737309455871582, 0.18001000001095235)) + +(('_normalize_shape', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 130), (469, 0.053843259811401367, 0.052005000005010515)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 527), (469, 0.012288808822631836, 0.012001000002783258)) + +(('__init__', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 519), (469, 0.23362064361572266, 0.17601300001115305)) + +(('_normalize_default', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 152), (469, 0.041225671768188477, 0.024002000001928536)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 150), (469, 0.012531280517578125, 0.016000999999960186)) + +(('match', '/usr/lib/python2.6/sre_parse.py', 201), (512, 0.037920475006103516, 0.036000000000000032)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 580), (513, 0.016402482986450195, 0.0040000000008149073)) + +(('__getitem__', '/usr/lib/python2.6/sre_parse.py', 136), (539, 0.02490234375, 0.036001000000000172)) + +(('get', '/usr/lib/python2.6/sre_parse.py', 207), (709, 0.077857732772827148, 0.076003000000000043)) + +(('__getattr__', '/usr/local/lib/python2.6/dist-packages/tables/attributeset.py', 293), (826, 0.53875136375427246, 0.24001400000270223)) + +(('issysattrname', '/usr/local/lib/python2.6/dist-packages/tables/attributeset.py', 76), (861, 0.05054020881652832, 0.040002000001550186)) + +(('_g_getnode', '/usr/local/lib/python2.6/dist-packages/tables/attributeset.py', 207), (915, 0.13512516021728516, 0.12000599999373662)) + +(('__next', '/usr/lib/python2.6/sre_parse.py', 188), (987, 0.052829265594482422, 0.044001000000000179)) + +(('_keep_alive', '/usr/lib/python2.6/copy.py', 261), (1139, 0.094742774963378906, 0.044003999999404186)) + +(('put', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 9), (1633, 0.53054642677307129, 0.4280369999796676)) + +(('or_op', '/home/melnikovkolya/classes/semester-3-project/flowy/operators.py', 42), (1633, 0.06103062629699707, 0.064007000011770288)) + +(('_f_walk', '/usr/local/lib/python2.6/dist-packages/tables/description.py', 597), (1641, 0.07251429557800293, 0.076007000003301073)) + +(('deepcopy', '/usr/lib/python2.6/copy.py', 144), (1912, 7.6241989135742188, 4.6243060000015248)) + +(('__call__', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 79), (2392, 0.066572666168212891, 0.064002999999502208)) + +(('__call__', '/home/melnikovkolya/classes/semester-3-project/flowy/allen_index.py', 92), (2392, 0.063406229019165039, 0.052002999997057486)) + +(('next', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 151), (2392, 15133.992086648941, 14863.716920000006)) + +(('pass_allen_indices_down', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 78), (2392, 5.3669826984405518, 4.0682569999771658)) + +(('read_row', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 137), (3266, 15.117149591445923, 14.024838999965141)) + +(('get_interval_records', '/home/melnikovkolya/classes/semester-3-project/flowy/timeindex.py', 57), (4784, 3.6127510070800781, 2.3081400000082795)) + +(('next', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 552), (4784, 2.3432192802429199, 1.4280830000025162)) + +(('next_branch', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 66), (4784, 0.13327312469482422, 0.12400600000182749)) + +(('add', '/home/melnikovkolya/classes/semester-3-project/flowy/timeindex.py', 48), (4820, 4.9994432926177979, 2.6681820000048901)) + +(('update_min_max_time', '/home/melnikovkolya/classes/semester-3-project/flowy/timeindex.py', 38), (4820, 0.22450447082519531, 0.10000299999774143)) + +(('len', '/home/melnikovkolya/classes/semester-3-project/flowy/timeindex.py', 29), (4820, 0.31543779373168945, 0.14800999998988118)) + +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 160), (4899, 0.32368016242980957, 0.14000999999188934)) + +(('new_group', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 21), (8209, 38.275750398635864, 37.966419999997015)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 62), (8209, 0.55724692344665527, 0.8800680000003922)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 141), (8209, 0.56306171417236328, 0.69204699999681907)) + +(('create_modules_aggr', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 163), (8209, 2.0085594654083252, 1.9321239999972022)) + +(('bitAND', '/home/melnikovkolya/classes/semester-3-project/flowy/operators.py', 26), (8209, 0.44151520729064941, 0.30002200000126322)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 25), (8209, 0.5666811466217041, 0.74404600000525534)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 96), (8209, 1.3313522338867188, 0.96006100000522565)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 79), (8209, 0.98292064666748047, 0.31201400000281865)) + +(('create_aggr_ops', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 172), (8209, 33.414367437362671, 33.61013400000229)) + +(('export', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 203), (8209, 18.683615446090698, 9.036556999946697)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 151), (8209, 36.858602523803711, 36.630342999991626)) + +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 25), (8211, 20219.451322078705, 20932.852232000798)) + +(('get_interval', '/home/melnikovkolya/classes/semester-3-project/flowy/timeindex.py', 33), (9604, 1.2529714107513428, 1.2960850000126811)) + +(('convertToNPAtom', '/usr/local/lib/python2.6/dist-packages/tables/utils.py', 79), (16172, 18.547049999237061, 16.853049000066676)) + +(('_f_getChild', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 663), (16172, 16.804285764694214, 11.332684000026347)) + +(('flavor_of', '/usr/local/lib/python2.6/dist-packages/tables/flavor.py', 171), (16172, 6.6775972843170166, 6.4084059999877354)) + +(('_conv_python_to_numpy', '/usr/local/lib/python2.6/dist-packages/tables/flavor.py', 389), (16172, 3.2069950103759766, 1.860116999996535)) + +(('_getnobjects', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 398), (16172, 94.946576595306396, 96.20998100007273)) + +(('_is_python', '/usr/local/lib/python2.6/dist-packages/tables/flavor.py', 323), (16172, 1.4184434413909912, 2.4401610000131768)) + +(('append', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 452), (16172, 385.18879699707031, 391.28442500016899)) + +(('array_of_flavor', '/usr/local/lib/python2.6/dist-packages/tables/flavor.py', 187), (16172, 15.438640832901001, 14.396902000040427)) + +(('conv_to_numpy', '/usr/local/lib/python2.6/dist-packages/tables/flavor.py', 344), (16172, 4.9252064228057861, 3.016180999991775)) + +(('convertToNPAtom2', '/usr/local/lib/python2.6/dist-packages/tables/utils.py', 114), (16172, 21.558700323104858, 18.093125000079453)) + +(('_is_numpy', '/usr/local/lib/python2.6/dist-packages/tables/flavor.py', 328), (16172, 1.5470161437988281, 0.96406699998988188)) + +(('__getattr__', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 785), (16178, 19.058515310287476, 12.80476400004045)) + +(('sum', '/usr/lib/python2.6/dist-packages/numpy/core/fromnumeric.py', 1185), (16180, 86.684595346450806, 90.773652000061702)) + +(('joinPath', '/usr/local/lib/python2.6/dist-packages/tables/path.py', 122), (16200, 1.5187370777130127, 0.94805099999211961)) + +(('_isWritable', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 1410), (16226, 1.0091426372528076, 0.4920290000118257)) + +(('_checkWritable', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 1415), (16226, 4.2642014026641846, 3.4121979999981704)) + +(('_g_checkHasChild', '/usr/local/lib/python2.6/dist-packages/tables/group.py', 417), (16240, 4.112175464630127, 1.6361010000036913)) + +(('_g_checkOpen', '/usr/local/lib/python2.6/dist-packages/tables/node.py', 374), (16240, 0.88990879058837891, 0.90805100001671235)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 9), (16416, 0.9982764720916748, 1.0080560000005789)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 111), (16418, 0.97502565383911133, 1.3320870000061404)) + +(('append', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 118), (22113, 413.17530179023743, 412.41372800016688)) + +(('task_done', '/usr/lib/python2.6/Queue.py', 45), (22810, 67.266116380691528, 69.376329999999143)) + +(('_get', '/usr/lib/python2.6/Queue.py', 208), (22811, 4.7672314643859863, 1.2320720000116125)) + +(('_put', '/usr/lib/python2.6/Queue.py', 204), (22811, 1.259291410446167, 2.4361499999988609)) + +(('put', '/usr/lib/python2.6/Queue.py', 107), (22811, 165.07228136062622, 175.91898199991579)) + +(('split', '/home/melnikovkolya/classes/semester-3-project/flowy/splitter.py', 17), (22811, 171.19394636154175, 182.82742799990979)) + +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 15), (22812, 914.74895477294922, 945.61510499965675)) + +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/splitter.py', 37), (22813, 258.71072959899902, 266.49664500002473)) + +(('get', '/usr/lib/python2.6/Queue.py', 150), (22814, 150.85909199714661, 153.76158300002135)) + +(('empty', '/usr/lib/python2.6/Queue.py', 93), (22815, 35.282126903533936, 36.642283999995925)) + +(('__call__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 68), (31017, 2.7790284156799316, 2.5281559999912133)) + +(('__call__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 31), (31020, 3.7186253070831299, 2.5441570000075444)) + +(('__call__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 102), (31020, 1.7362370491027832, 1.8761179999855813)) + +(('__call__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 85), (31020, 2.9057433605194092, 2.7481640000096377)) + +(('__call__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 147), (31020, 2.3475019931793213, 2.2001420000087819)) + +(('notify', '/usr/lib/python2.6/threading.py', 270), (45738, 140.78615832328796, 150.28141099993445)) + +(('_qsize', '/usr/lib/python2.6/Queue.py', 200), (45741, 4.5908727645874023, 4.4162810000161699)) + +(('_is_owned', '/usr/lib/python2.6/threading.py', 219), (45855, 125.10483431816101, 133.50436399995306)) + +(('_note', '/usr/lib/python2.6/threading.py', 64), (45873, 3.5862228870391846, 3.1721919999945385)) + +(('__call__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 117), (62034, 7.7535879611968994, 4.1762439999965864)) + +(('__call__', '/home/melnikovkolya/classes/semester-3-project/flowy/aggr_operators.py', 15), (62034, 4.9642887115478516, 3.5122199999968871)) + +(('new_op', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 92), (73881, 16.079083919525146, 14.632925999982035)) + +(('LT', '/home/melnikovkolya/classes/semester-3-project/flowy/operators.py', 71), (152069, 13.969250202178955, 9.8246090000284312)) + +(('final_result', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 80), (246349, 60.500575304031372, 54.087404999956789)) + +(('reset', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 64), (246349, 111.18868279457092, 105.20649999988791)) + +(('deep_copy', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 41), (246351, 61.105264902114868, 52.447237999959725)) + +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 89), (250377, 128.07371306419373, 122.62363699984053)) + +(('iterate_fixed_fields', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 93), (250377, 81.752978563308716, 74.712657999931253)) + +(('__iter__', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 129), (250377, 336.43062138557434, 343.01345199924253)) + +(('mask', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 71), (492698, 59.059696674346924, 48.559062999958314)) + +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/filter.py', 113), (509112, 201.0359148979187, 195.16825499970582)) + +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 72), (3137176, 770.75759077072144, 749.61094304290964)) + +(('read_row', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 99), (3152836, 13413.966747999191, 13149.705791366749)) + +(('iterrows', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 1441), (3152853, 2847.1434261798859, 2791.3104361655387)) + +(('_get_container', '/usr/local/lib/python2.6/dist-packages/tables/table.py', 669), (3152853, 169.87609815597534, 154.98565101060376)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 197), (3152867, 86.064342260360718, 76.52479300505729)) + +(('read_rows_list', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 105), (3155228, 13582.554839611053, 13318.368595361764)) + +(('read_rows_list', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 133), (3155228, 14223.106386899948, 13948.747855334786)) + +(('newfget', '/usr/local/lib/python2.6/dist-packages/tables/utils.py', 224), (3175278, 99.648912668228149, 88.921667005884956)) + +(('__init__', '/home/melnikovkolya/classes/semester-3-project/flowy/record.py', 82), (3413051, 522.62214875221252, 501.33129404957072)) + +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/merger.py', 23), (4397019, 553.19540309906006, 531.55333503122165)) + +(('read', '/usr/local/lib/python2.6/dist-packages/tables/vlarray.py', 700), (6280932, 6694.1691343784332, 6541.9808274548959)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 243), (6280932, 165.18905258178711, 141.83687900967561)) + +(('internal_to_flavor', '/usr/local/lib/python2.6/dist-packages/tables/flavor.py', 150), (6285668, 1241.2321634292603, 1200.2193500868016)) + +(('flavor_to_flavor', '/usr/local/lib/python2.6/dist-packages/tables/flavor.py', 130), (6285668, 920.82357239723206, 885.05146706402957)) + +(('_conv_numpy_to_python', '/usr/local/lib/python2.6/dist-packages/tables/flavor.py', 465), (6285668, 245.37669491767883, 212.76530701557567)) + +(('', '/usr/local/lib/python2.6/dist-packages/tables/atom.py', 511), (6297148, 176.32034802436829, 156.25368801116201)) + +(('array_of_flavor2', '/usr/local/lib/python2.6/dist-packages/tables/flavor.py', 110), (6301840, 594.53782534599304, 562.19098804149326)) + +(('__getitem__', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 249), (6322857, 253.65313673019409, 221.51785001508688)) + +(('_getNode', '/usr/local/lib/python2.6/dist-packages/tables/file.py', 880), (6323076, 648.34474611282349, 619.23076704233608)) + +(('', '/home/melnikovkolya/classes/semester-3-project/flowy/pytables.py', 102), (9433720, 7178.6332139968872, 7026.6352255172096)) + +(('_processRangeRead', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 449), (9433785, 4368.2701687812805, 4262.7936753118302)) + +(('__len__', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 293), (9433785, 249.99315619468689, 206.95274501475433)) + +(('_processRange', '/usr/local/lib/python2.6/dist-packages/tables/leaf.py', 425), (9433785, 3676.16250872612, 3576.8311242603595)) + +(('idx2long', '/usr/local/lib/python2.6/dist-packages/tables/utils.py', 66), (25148502, 1995.707435131073, 1876.7011831339769)) + +(('is_idx', '/usr/local/lib/python2.6/dist-packages/tables/utils.py', 44), (25148502, 700.56697034835815, 591.32907204183357)) + +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 178), (30651429, 17337.516788959503, 17566.637794171394)) + +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 101), (30677828, 12477.594463348389, 12583.8665639143)) + +(('match', '/home/melnikovkolya/classes/semester-3-project/flowy/grouper.py', 126), (35383022, 8230.0888061523438, 8037.6824171527333)) + +(('EQ', '/home/melnikovkolya/classes/semester-3-project/flowy/operators.py', 63), (40143460, 2728.9575715065002, 2304.1001345953482)) + diff --git a/run-output2.txt b/run-output2.txt new file mode 100644 index 0000000..ead60df --- /dev/null +++ b/run-output2.txt @@ -0,0 +1,25 @@ +0.82 +[Input('./netflow-trace.h5', 50, set([]), set([]), set([])), BranchNode('S', 50, set([]), set([]))] + +[BranchNode('S', 51, set([]), set([])), Branch('A', 51, None, set([]), set([])), BranchNode('www_req', 51, set([]), set([])), BranchNode('g_www_req', 51, set([]), set([])), BranchNode('ggf', 51, set([]), set([])), BranchNode('M', 51, set([]), set([]))] + +[BranchNode('S', 52, set([]), set([])), Branch('B', 52, None, set([]), set([])), BranchNode('www_res', 52, set([]), set([])), BranchNode('g_www_res', 52, set([]), set([])), BranchNode('ggf', 52, set([]), set([])), BranchNode('M', 52, set([]), set([]))] + +[BranchNode('M', 53, set([]), set([])), BranchNode('U', 53, set([]), set([])), Output('./ungroped.h5', 53, set([]), set([]), set([]))] + +Splitter initiated +Parsing and validation finished: 0.31 +Started filtering +Finished filtering +Filters ready +Splitter time elapsed: 346.66 +Finished grouping branch B +Finished grouping branch A +Finished filtering groups for branch B +Finished filtering groups for branch A +Group filter time elapsed: 916.19 +Finished merging branches: ['B', 'A'] +Merger time elapsed: 1037.532704 +Ungrouper U finished exectution +FINISHED! +Overall time elapsed: 1073.552704 diff --git a/splitter.py b/splitter.py new file mode 100644 index 0000000..dea1be4 --- /dev/null +++ b/splitter.py @@ -0,0 +1,64 @@ +from Queue import Queue +from Queue import Empty +import profiler + +class Splitter(object): + def __init__(self, name_to_br, filter): + self.branches = name_to_br.values() # Returns the actual implementaion of Branches A and B, their values + self.name_to_branch = name_to_br + self.filter = filter + print "Splitter initiated" + + def go(self): + count = 0 + + + # Exactly rec and branch are returned, since that is specified + # by the 'generator' function, denoted by 'yield' inside the + # __iter__ function. Every time an __iter__ is called, one tuple + # of (rec, branch) is returned + for rec, branch in self.filter: + self.split(branch, rec) + count = count + 1 + + + + print count + self.ready() + + + + def split(self, branch_mask, record): +# print zip(self.branches, branch_mask) + for branch, active in zip(self.branches, branch_mask): +# print active, branch + if active: + branch.put(record) +# if branch.name == 'A': print record +# if branch.name == 'B': print record +# print branch + + def ready(self): + print "Filters ready" + for br in self.branches: + br.ready = True + + + +class Branch(Queue): + def __init__(self, name): + Queue.__init__(self, 0) + self.name = name + self.ready = False + + def __iter__(self): + while(True): + if self.empty() and self.ready: + raise StopIteration + try: + record = self.get(timeout=3) + yield record + self.task_done() + except Empty: + if self.ready: + raise StopIteration diff --git a/splitter.pyc b/splitter.pyc new file mode 100644 index 0000000..43b84d4 Binary files /dev/null and b/splitter.pyc differ diff --git a/splitter_validator.py b/splitter_validator.py new file mode 100644 index 0000000..7913a3f --- /dev/null +++ b/splitter_validator.py @@ -0,0 +1,25 @@ +from copy import copy, deepcopy +from splitter import Branch as BranchImpl +from splitter import Splitter as SplitterImpl + +class SplitterValidator(object): + def __init__(self, parser, filter_validator): + self.splitter = copy(parser.splitter) + self.branches = deepcopy(parser.branches) + self.branch_ids = filter_validator.branches_ids + self.filter_impl = filter_validator.impl + self.br_name_to_br = {} + self.impl = self.create_impl() + + def sort_branches(self): + id_to_branch = dict(zip(self.branch_ids.values(), + self.branch_ids.keys())) + sorted_br = [id_to_branch[k] for k in sorted(id_to_branch.keys())] + return sorted_br + + def create_impl(self): + br_names = self.sort_branches() + branches = [BranchImpl(name) for name in br_names]# Actual branch instances are being initiated + name_to_br = dict(zip(br_names, branches))# These instances are being mapped to the corresponding names, i.e. A, B + self.br_name_to_br = name_to_br + return SplitterImpl(name_to_br, self.filter_impl) diff --git a/splitter_validator.pyc b/splitter_validator.pyc new file mode 100644 index 0000000..ca2cba2 Binary files /dev/null and b/splitter_validator.pyc differ diff --git a/statement.py b/statement.py new file mode 100644 index 0000000..eb7e593 --- /dev/null +++ b/statement.py @@ -0,0 +1,181 @@ +class Splitter(object): + def __init__(self, name, line, branches=None): + self.name = name + self.line = line + self.branches = branches if branches else set() + + def __repr__(self): + str = "Splitter('%s', %s, %s)"%(self.name, self.line, self.branches) + return str + +class Ungrouper(object): + def __init__(self, name, line, branches=None): + self.name = name + self.line = line + self.branches = branches if branches else set() + + def __repr__(self): + str = "Ungrouper('%s', %s, %s)"%(self.name, self.line, self.branches) + return str + +class Input(object): + def __init__(self, name, line, inputs=None, outputs=None, branches=None): + self.name = name + self.line = line + self.branches = branches if branches else set() + self.inputs = inputs if inputs != None else set() + self.outputs = outputs if outputs != None else set() + + def __repr__(self): + str = "Input('%s', %s, %s, %s, %s)"%(self.name, self.line, + self.branches, self.inputs, + self.outputs) + return str + +class Output(object): + def __init__(self, name, line, inputs=None, outputs=None, branches=None): + self.name = name + self.line = line + self.branches = branches if branches else set() + self.inputs = inputs if inputs != None else set() + self.outputs = outputs if outputs != None else set() + + def __repr__(self): + str = "Output('%s', %s, %s, %s, %s)"%(self.name, self.line, + self.branches, self.inputs, + self.outputs) + return str + +class Branch(object): + def __init__(self, name, line, members=None, inputs=None, outputs=None): + self.name = name + self.members = members + self.line = line + self.inputs = members if members != None else set() + self.inputs = inputs if inputs != None else set() + self.outputs = outputs if outputs != None else set() + + def __repr__(self): + str = "Branch('%s', %s, %s, %s, %s)"%(self.name, self.line, + self.members, self.inputs, + self.outputs) + return str + +class BranchNode(object): + def __init__(self, name, line, inputs=None, outputs=None): + self.name = name + self.line = line + self.inputs = inputs if inputs != None else set() + self.outputs = outputs if outputs != None else set() + + def __repr__(self): + str = "BranchNode('%s', %s, %s, %s)"%(self.name, self.line , + self.inputs, self.outputs) + return str + +class Rule(object): + def __init__(self, op, line, args, NOT=False): + self.op = op + self.args = args + self.NOT = False + self.line = line + + def __repr__(self): + str = "Rule('%s', %s, %s, %s)"%(self.op, self.line, + self.args, self.NOT) + return str + + def __str__(self): + return "%s%s" % (self.op, self.args) + + def __eq__(self, other): + return str(self)== str(other) + + def __hash__(self): + return hash(str(self)) + +class AllenRule(Rule): + def __repr__(self): + str = "AllenRule('%s', %s, %s, %s)"%(self.op, self.line, + self.args, self.NOT) + return str + +class Field(object): + def __init__(self, name): + self.name = name + def __repr__(self): + return "Field('%s')"%self.name + +class GrouperRule(object): + def __init__(self, op, line, args): + self.line = line + self.args = args + self.op = op + + def __repr__(self): + str = "GrouperRule('%s', %s, %s)"%(self.op, self.line, self.args) + return str + +class Filter(object): + def __init__(self, name, line, rules, branches=None): + self.name = name + self.rules = rules + self.line = line + self.branches = branches if branches else set() + + def __repr__(self): + str = "Filter('%s', %s, %s, %s)"%(self.name, self.line, self.rules, + self.branches) + return str + +class Module(Filter): + def __repr__(self): + str = "Module('%s', %s, %s, %s)"%(self.name, self.line, + self.rules, self.branches) + return str + +class Grouper(object): + def __init__(self, name, line, modules, aggr, branches=None): + self.name = name + self.aggr = aggr + self.modules = modules + self.line = line + self.branches = branches if branches else set() + + def __repr__(self): + str = "Grouper('%s', %s, %s, %s, %s)"%(self.name, self.line, + self.modules, self.aggr, self.branches) + return str + +class Merger(object): + def __init__(self, name, line, modules, export, branches=None): + self.name = name + self.export = export + self.modules = modules + self.line = line + self.branches = branches if branches else set() + + def __repr__(self): + str = "Merger('%s', %s, %s, %s, %s)"%(self.name, self.line, + self.modules, self.export, self.branches) + return str + +class FilterRef(object): + def __init__(self, name, line, NOT=False): + self.name = name + self.NOT = NOT + self.line = line + + def __repr__(self): + str = "FilterRef('%s', %s, %s)"%(self.name, self.line, self.NOT) + return str + +class Arg(object): + def __init__(self, type, value, str=''): + self.type = type + self.value = value + self.str = str + + def __repr__(self): + str = "Arg('%s', %s, '%s')"%(self.type, repr(self.value), self.str) + return str diff --git a/statement.pyc b/statement.pyc new file mode 100644 index 0000000..617847e Binary files /dev/null and b/statement.pyc differ diff --git a/stats_print.py b/stats_print.py new file mode 100644 index 0000000..99e9e21 --- /dev/null +++ b/stats_print.py @@ -0,0 +1,13 @@ +import pickle +f = open('./profile_stats1') + +stats = pickle.load(f) + +#for st in sorted(filter(lambda a: a[1][0]>1 and a[1][1]>10, stats), key=lambda a: a[1][1]/a[1][0], reverse=True): +# print st[0], st[1][1]/st[1][0], st[1][1], st[1][0] + +for st in sorted(stats, key=lambda a: a[1][1], reverse=True): + print st +#for st in sorted(stats, key=lambda a: a[1][0], reverse=True): +# if st[0][1].find('flowy/src/flowy') != -1: +# print (st[0][1].partition('flowy/src/flowy/'))[2], st[0][0], st[0][2], st[1][0], st[1][1] \ No newline at end of file diff --git a/timeindex.py b/timeindex.py new file mode 100644 index 0000000..2b1614a --- /dev/null +++ b/timeindex.py @@ -0,0 +1,62 @@ +#from pytables import FlowRecordsTable +#from pytables import create_table_file +#from tables import UIntAtom +from math import floor + +#class TimeIndex(object): +# def __init__(self, start_time, delta, id_size_bytes=4): +# self.start_time = start_time +# self.delta = delta +# self.id_size = id_size_bytes +# self.index = self.get_index_file() +# +# def get_index_file(self): +# if self.index: return self.index +# create_table_file(self.file, {'t': UIntAtom(self.id_size)}) +# +# self.index = FlowRecordsTable(self.file) +# self.index. +# return self.index + +class TimeIndex(object): + def __init__(self, interval=1000, maxsize=10**5): + self.interval = float(interval) + self.index = {} + self.maxsize = maxsize + self.mintime = float('inf') # later replaced with int + self.maxtime = float('-inf') # later replaced with int + + @property + def len(self): + return len(self.index) + + def get_interval(self, stime, etime): + start = int(floor(stime/self.interval)) + end = int(floor(etime/self.interval) + 1) + return xrange(start, end) + + def update_min_max_time(self, record): + if self.mintime > record.stime: + self.mintime = record.stime + if self.maxtime < record.etime: + self.maxtime = record.etime + + def get_total_interval(self): + return self.get_interval(self.mintime, self.maxtime) + + + def add(self, record): + interval = self.get_interval(record.stime, record.etime) + for i in interval: + self.index.setdefault(i, set()).add(record.rec_id) + + self.update_min_max_time(record) + if self.len > self.maxsize: + print "Warning large index" + + def get_interval_records(self, stime, etime): + res = set() + for i in self.get_interval(stime, etime): + res |= self.index.setdefault(i, set()) # set union + + return sorted(res) \ No newline at end of file diff --git a/timeindex.pyc b/timeindex.pyc new file mode 100644 index 0000000..ba6e57b Binary files /dev/null and b/timeindex.pyc differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.000001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.000001+0200 new file mode 100644 index 0000000..617431b Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.000001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.000501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.000501+0200 new file mode 100644 index 0000000..575491a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.000501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.001001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.001001+0200 new file mode 100644 index 0000000..0208dfc Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.001001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.001501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.001501+0200 new file mode 100644 index 0000000..c9d5b22 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.001501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.002001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.002001+0200 new file mode 100644 index 0000000..cede42a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.002001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.002501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.002501+0200 new file mode 100644 index 0000000..80bef14 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.002501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.003001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.003001+0200 new file mode 100644 index 0000000..a4ff3fe Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.003001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.003501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.003501+0200 new file mode 100644 index 0000000..4237b91 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.003501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.004001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.004001+0200 new file mode 100644 index 0000000..ca9df03 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.004001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.004501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.004501+0200 new file mode 100644 index 0000000..0e47e1a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.004501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.005001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.005001+0200 new file mode 100644 index 0000000..3a29e0c Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.005001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.005501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.005501+0200 new file mode 100644 index 0000000..714984b Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.005501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.010001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.010001+0200 new file mode 100644 index 0000000..d113d15 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.010001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.010501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.010501+0200 new file mode 100644 index 0000000..7716d0e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.010501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.011001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.011001+0200 new file mode 100644 index 0000000..3dd0c7a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.011001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.011501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.011501+0200 new file mode 100644 index 0000000..8352daa Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.011501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.012001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.012001+0200 new file mode 100644 index 0000000..393a08f Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.012001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.012501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.012501+0200 new file mode 100644 index 0000000..1bf93c1 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.012501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.013001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.013001+0200 new file mode 100644 index 0000000..db28b89 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.013001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.013501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.013501+0200 new file mode 100644 index 0000000..d274253 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.013501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.014001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.014001+0200 new file mode 100644 index 0000000..cc3d295 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.014001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.014501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.014501+0200 new file mode 100644 index 0000000..bc4222d Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.014501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.015001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.015001+0200 new file mode 100644 index 0000000..0ab59b0 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.015001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.015501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.015501+0200 new file mode 100644 index 0000000..46e4ac7 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.015501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.020001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.020001+0200 new file mode 100644 index 0000000..718d43f Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.020001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.020501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.020501+0200 new file mode 100644 index 0000000..19ce584 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.020501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.021001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.021001+0200 new file mode 100644 index 0000000..8298d98 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.021001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.021501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.021501+0200 new file mode 100644 index 0000000..24c2757 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.021501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.022001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.022001+0200 new file mode 100644 index 0000000..91633e8 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.022001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.022501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.022501+0200 new file mode 100644 index 0000000..af95347 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.022501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.023001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.023001+0200 new file mode 100644 index 0000000..5fc05c5 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.023001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.023501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.023501+0200 new file mode 100644 index 0000000..fc9fe4b Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.023501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.024001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.024001+0200 new file mode 100644 index 0000000..7131803 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.024001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.024501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.024501+0200 new file mode 100644 index 0000000..3aedd09 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.024501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.025001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.025001+0200 new file mode 100644 index 0000000..1549481 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.025001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.025501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.025501+0200 new file mode 100644 index 0000000..2d0eb1d Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.025501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.030001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.030001+0200 new file mode 100644 index 0000000..7f7982d Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.030001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.030501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.030501+0200 new file mode 100644 index 0000000..10c3d8c Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.030501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.031001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.031001+0200 new file mode 100644 index 0000000..7a6fe55 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.031001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.031501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.031501+0200 new file mode 100644 index 0000000..286b330 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.031501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.032001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.032001+0200 new file mode 100644 index 0000000..c2fbab7 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.032001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.032501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.032501+0200 new file mode 100644 index 0000000..b56524f Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.032501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.033001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.033001+0200 new file mode 100644 index 0000000..c8b41f3 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.033001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.033501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.033501+0200 new file mode 100644 index 0000000..ef9e9f1 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.033501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.034001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.034001+0200 new file mode 100644 index 0000000..a95627a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.034001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.034501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.034501+0200 new file mode 100644 index 0000000..731555f Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.034501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.035001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.035001+0200 new file mode 100644 index 0000000..165c72d Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.035001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.035501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.035501+0200 new file mode 100644 index 0000000..578d376 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.035501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.040001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.040001+0200 new file mode 100644 index 0000000..11f1c0e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.040001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.040501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.040501+0200 new file mode 100644 index 0000000..b5c3661 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.040501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.041001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.041001+0200 new file mode 100644 index 0000000..54d912f Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.041001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.041501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.041501+0200 new file mode 100644 index 0000000..fdcf089 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.041501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.042001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.042001+0200 new file mode 100644 index 0000000..543361a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.042001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.042501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.042501+0200 new file mode 100644 index 0000000..ea6bf0c Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.042501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.043001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.043001+0200 new file mode 100644 index 0000000..32132ac Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.043001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.043501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.043501+0200 new file mode 100644 index 0000000..98807c8 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.043501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.044001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.044001+0200 new file mode 100644 index 0000000..3726580 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.044001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.044501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.044501+0200 new file mode 100644 index 0000000..e59c3d6 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.044501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.045001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.045001+0200 new file mode 100644 index 0000000..e11c092 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.045001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.045501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.045501+0200 new file mode 100644 index 0000000..1de6900 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.045501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.050001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.050001+0200 new file mode 100644 index 0000000..768bd0c Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.050001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.050501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.050501+0200 new file mode 100644 index 0000000..3b301a9 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.050501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.051001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.051001+0200 new file mode 100644 index 0000000..fccbdd8 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.051001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.051501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.051501+0200 new file mode 100644 index 0000000..956acc1 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.051501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.052001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.052001+0200 new file mode 100644 index 0000000..a4b8ece Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.052001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.052501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.052501+0200 new file mode 100644 index 0000000..5968cd0 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.052501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.053001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.053001+0200 new file mode 100644 index 0000000..1d3d7d5 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.053001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.053501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.053501+0200 new file mode 100644 index 0000000..96a23af Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.053501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.054001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.054001+0200 new file mode 100644 index 0000000..920a514 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.054001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.054501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.054501+0200 new file mode 100644 index 0000000..9447431 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.054501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.055001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.055001+0200 new file mode 100644 index 0000000..cc50299 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.055001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.055501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.055501+0200 new file mode 100644 index 0000000..6b58f22 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.055501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.060001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.060001+0200 new file mode 100644 index 0000000..2e7e57a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.060001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.060501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.060501+0200 new file mode 100644 index 0000000..e30b353 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.060501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.061001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.061001+0200 new file mode 100644 index 0000000..8f57954 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.061001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.061501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.061501+0200 new file mode 100644 index 0000000..66085d7 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.061501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.062001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.062001+0200 new file mode 100644 index 0000000..5eb1479 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.062001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.062501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.062501+0200 new file mode 100644 index 0000000..57f7428 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.062501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.063001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.063001+0200 new file mode 100644 index 0000000..7a0a8b4 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.063001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.063501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.063501+0200 new file mode 100644 index 0000000..d3fb017 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.063501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.064001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.064001+0200 new file mode 100644 index 0000000..ac9507b Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.064001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.064501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.064501+0200 new file mode 100644 index 0000000..d14b73c Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.064501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.065001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.065001+0200 new file mode 100644 index 0000000..9d9f42b Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.065001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.065501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.065501+0200 new file mode 100644 index 0000000..3e1badd Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.065501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.070001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.070001+0200 new file mode 100644 index 0000000..631d75f Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.070001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.070501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.070501+0200 new file mode 100644 index 0000000..9adae50 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.070501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.071001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.071001+0200 new file mode 100644 index 0000000..e39f5a5 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.071001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.071501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.071501+0200 new file mode 100644 index 0000000..497a15d Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.071501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.072001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.072001+0200 new file mode 100644 index 0000000..e6339a9 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.072001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.072501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.072501+0200 new file mode 100644 index 0000000..8b204c5 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.072501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.073001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.073001+0200 new file mode 100644 index 0000000..6e3e78c Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.073001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.073501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.073501+0200 new file mode 100644 index 0000000..566ec18 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.073501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.074001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.074001+0200 new file mode 100644 index 0000000..43681ac Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.074001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.074501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.074501+0200 new file mode 100644 index 0000000..2744492 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.074501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.075001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.075001+0200 new file mode 100644 index 0000000..434dc74 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.075001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.075501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.075501+0200 new file mode 100644 index 0000000..21040f4 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.075501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.080001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.080001+0200 new file mode 100644 index 0000000..7b06053 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.080001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.080501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.080501+0200 new file mode 100644 index 0000000..e7922fb Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.080501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.081001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.081001+0200 new file mode 100644 index 0000000..4fcad14 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.081001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.081501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.081501+0200 new file mode 100644 index 0000000..ec61a9d Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.081501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.082001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.082001+0200 new file mode 100644 index 0000000..f54a7dc Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.082001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.082501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.082501+0200 new file mode 100644 index 0000000..687534e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.082501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.083001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.083001+0200 new file mode 100644 index 0000000..b56dd00 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.083001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.083501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.083501+0200 new file mode 100644 index 0000000..cb028c9 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.083501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.084001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.084001+0200 new file mode 100644 index 0000000..f4dab08 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.084001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.084501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.084501+0200 new file mode 100644 index 0000000..80307b1 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.084501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.085001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.085001+0200 new file mode 100644 index 0000000..bd58d12 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.085001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.085501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.085501+0200 new file mode 100644 index 0000000..41a2bc8 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.085501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.090001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.090001+0200 new file mode 100644 index 0000000..74031d6 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.090001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.090501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.090501+0200 new file mode 100644 index 0000000..7588cbe Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.090501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.091001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.091001+0200 new file mode 100644 index 0000000..e4b5542 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.091001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.091501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.091501+0200 new file mode 100644 index 0000000..bfcb897 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.091501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.092001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.092001+0200 new file mode 100644 index 0000000..b0963b5 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.092001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.092501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.092501+0200 new file mode 100644 index 0000000..f8bdbfa Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.092501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.093001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.093001+0200 new file mode 100644 index 0000000..0ea369a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.093001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.093501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.093501+0200 new file mode 100644 index 0000000..b46ab1e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.093501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.094001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.094001+0200 new file mode 100644 index 0000000..7302da1 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.094001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.094501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.094501+0200 new file mode 100644 index 0000000..bbbbfb7 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.094501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.095001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.095001+0200 new file mode 100644 index 0000000..13b3585 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.095001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.095501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.095501+0200 new file mode 100644 index 0000000..fdd0832 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.095501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.100001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.100001+0200 new file mode 100644 index 0000000..4392dd1 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.100001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.100501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.100501+0200 new file mode 100644 index 0000000..9ec674e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.100501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.101001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.101001+0200 new file mode 100644 index 0000000..048b264 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.101001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.101501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.101501+0200 new file mode 100644 index 0000000..41c1a48 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.101501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.102001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.102001+0200 new file mode 100644 index 0000000..58855c9 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.102001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.102501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.102501+0200 new file mode 100644 index 0000000..6e64aef Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.102501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.103001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.103001+0200 new file mode 100644 index 0000000..070c7bb Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.103001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.103501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.103501+0200 new file mode 100644 index 0000000..c119f4a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.103501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.104001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.104001+0200 new file mode 100644 index 0000000..743e452 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.104001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.104501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.104501+0200 new file mode 100644 index 0000000..29d38db Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.104501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.105001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.105001+0200 new file mode 100644 index 0000000..691a634 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.105001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.105501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.105501+0200 new file mode 100644 index 0000000..4af6661 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.105501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.110001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.110001+0200 new file mode 100644 index 0000000..f08221d Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.110001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.110501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.110501+0200 new file mode 100644 index 0000000..f40d485 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.110501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.111001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.111001+0200 new file mode 100644 index 0000000..6b572b3 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.111001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.111501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.111501+0200 new file mode 100644 index 0000000..7c42240 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.111501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.112001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.112001+0200 new file mode 100644 index 0000000..5dcac5f Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.112001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.112501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.112501+0200 new file mode 100644 index 0000000..a21a1cf Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.112501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.113001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.113001+0200 new file mode 100644 index 0000000..c78db92 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.113001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.113501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.113501+0200 new file mode 100644 index 0000000..19aa708 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.113501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.114001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.114001+0200 new file mode 100644 index 0000000..6e72dcf Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.114001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.114501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.114501+0200 new file mode 100644 index 0000000..8e7fe8b Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.114501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.115001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.115001+0200 new file mode 100644 index 0000000..697920d Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.115001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.115501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.115501+0200 new file mode 100644 index 0000000..423b443 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.115501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.120001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.120001+0200 new file mode 100644 index 0000000..e51d845 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.120001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.120501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.120501+0200 new file mode 100644 index 0000000..0660b24 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.120501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.121001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.121001+0200 new file mode 100644 index 0000000..b66407e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.121001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.121501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.121501+0200 new file mode 100644 index 0000000..15585d9 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.121501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.122001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.122001+0200 new file mode 100644 index 0000000..192e8fe Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.122001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.122501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.122501+0200 new file mode 100644 index 0000000..043498f Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.122501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.123001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.123001+0200 new file mode 100644 index 0000000..5156bc0 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.123001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.123501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.123501+0200 new file mode 100644 index 0000000..62ee341 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.123501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.124001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.124001+0200 new file mode 100644 index 0000000..739062a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.124001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.124501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.124501+0200 new file mode 100644 index 0000000..063f669 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.124501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.125001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.125001+0200 new file mode 100644 index 0000000..8c8e2b8 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.125001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.125501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.125501+0200 new file mode 100644 index 0000000..f9a498e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.125501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.130001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.130001+0200 new file mode 100644 index 0000000..2f0c904 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.130001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.130501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.130501+0200 new file mode 100644 index 0000000..3849a90 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.130501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.131001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.131001+0200 new file mode 100644 index 0000000..6203be3 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.131001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.131501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.131501+0200 new file mode 100644 index 0000000..e3bb6ef Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.131501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.132001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.132001+0200 new file mode 100644 index 0000000..4dcfffc Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.132001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.132501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.132501+0200 new file mode 100644 index 0000000..8566b38 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.132501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.133001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.133001+0200 new file mode 100644 index 0000000..b848ec9 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.133001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.133501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.133501+0200 new file mode 100644 index 0000000..05e9530 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.133501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.134001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.134001+0200 new file mode 100644 index 0000000..fdff215 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.134001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.134501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.134501+0200 new file mode 100644 index 0000000..e15f285 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.134501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.135001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.135001+0200 new file mode 100644 index 0000000..4a6de51 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.135001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.135501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.135501+0200 new file mode 100644 index 0000000..e285eb8 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.135501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.140001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.140001+0200 new file mode 100644 index 0000000..d2fb412 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.140001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.140501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.140501+0200 new file mode 100644 index 0000000..a56d7b7 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.140501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.141001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.141001+0200 new file mode 100644 index 0000000..e9798bf Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.141001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.141501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.141501+0200 new file mode 100644 index 0000000..21e1e7f Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.141501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.142001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.142001+0200 new file mode 100644 index 0000000..bf6dec5 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.142001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.142501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.142501+0200 new file mode 100644 index 0000000..91f9150 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.142501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.143001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.143001+0200 new file mode 100644 index 0000000..89ea6cf Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.143001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.143501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.143501+0200 new file mode 100644 index 0000000..06bff2e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.143501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.144001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.144001+0200 new file mode 100644 index 0000000..62711c6 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.144001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.144501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.144501+0200 new file mode 100644 index 0000000..8db6822 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.144501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.145001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.145001+0200 new file mode 100644 index 0000000..4fafe39 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.145001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.145501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.145501+0200 new file mode 100644 index 0000000..9403aee Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.145501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.150001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.150001+0200 new file mode 100644 index 0000000..2308eb0 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.150001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.150501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.150501+0200 new file mode 100644 index 0000000..3f0d5e4 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.150501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.151001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.151001+0200 new file mode 100644 index 0000000..6b4d006 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.151001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.151501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.151501+0200 new file mode 100644 index 0000000..3bf761b Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.151501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.152001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.152001+0200 new file mode 100644 index 0000000..c43af23 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.152001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.152501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.152501+0200 new file mode 100644 index 0000000..7e05f6b Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.152501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.153001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.153001+0200 new file mode 100644 index 0000000..9cf2420 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.153001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.153501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.153501+0200 new file mode 100644 index 0000000..e3412ef Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.153501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.154001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.154001+0200 new file mode 100644 index 0000000..bc08a99 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.154001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.154501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.154501+0200 new file mode 100644 index 0000000..011dd15 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.154501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.155001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.155001+0200 new file mode 100644 index 0000000..115f850 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.155001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.155501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.155501+0200 new file mode 100644 index 0000000..bb1e698 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.155501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.160001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.160001+0200 new file mode 100644 index 0000000..b877836 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.160001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.160113+0200 b/traces/2009-10-24/ft-v05.2009-10-24.160113+0200 new file mode 100644 index 0000000..06dd80a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.160113+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.160501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.160501+0200 new file mode 100644 index 0000000..50661f2 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.160501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.161001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.161001+0200 new file mode 100644 index 0000000..ce98939 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.161001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.161501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.161501+0200 new file mode 100644 index 0000000..e8bb900 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.161501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.162001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.162001+0200 new file mode 100644 index 0000000..a155c63 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.162001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.162501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.162501+0200 new file mode 100644 index 0000000..20ec9fe Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.162501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.163001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.163001+0200 new file mode 100644 index 0000000..4e63675 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.163001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.163501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.163501+0200 new file mode 100644 index 0000000..0f9388d Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.163501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.164001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.164001+0200 new file mode 100644 index 0000000..4a012b0 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.164001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.164501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.164501+0200 new file mode 100644 index 0000000..443198e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.164501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.165001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.165001+0200 new file mode 100644 index 0000000..60364f3 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.165001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.165501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.165501+0200 new file mode 100644 index 0000000..a8c735e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.165501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.170001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.170001+0200 new file mode 100644 index 0000000..5c0534c Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.170001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.170501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.170501+0200 new file mode 100644 index 0000000..36271d0 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.170501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.171001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.171001+0200 new file mode 100644 index 0000000..8a7a319 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.171001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.171501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.171501+0200 new file mode 100644 index 0000000..bf9ebc7 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.171501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.172001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.172001+0200 new file mode 100644 index 0000000..9a4ed4e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.172001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.172501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.172501+0200 new file mode 100644 index 0000000..cd3b6aa Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.172501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.173001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.173001+0200 new file mode 100644 index 0000000..b16c80f Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.173001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.173501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.173501+0200 new file mode 100644 index 0000000..1616d18 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.173501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.174001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.174001+0200 new file mode 100644 index 0000000..d27595e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.174001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.174501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.174501+0200 new file mode 100644 index 0000000..6bb6adc Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.174501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.175001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.175001+0200 new file mode 100644 index 0000000..d5cae31 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.175001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.175501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.175501+0200 new file mode 100644 index 0000000..e0d63f0 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.175501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.180001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.180001+0200 new file mode 100644 index 0000000..9f8db5b Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.180001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.180501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.180501+0200 new file mode 100644 index 0000000..5e4349c Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.180501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.181001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.181001+0200 new file mode 100644 index 0000000..9fe5229 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.181001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.181501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.181501+0200 new file mode 100644 index 0000000..96f02ca Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.181501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.182001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.182001+0200 new file mode 100644 index 0000000..60c1191 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.182001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.182501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.182501+0200 new file mode 100644 index 0000000..901401e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.182501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.183001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.183001+0200 new file mode 100644 index 0000000..746d7a1 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.183001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.183501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.183501+0200 new file mode 100644 index 0000000..b03e155 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.183501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.184001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.184001+0200 new file mode 100644 index 0000000..a50b07e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.184001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.184501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.184501+0200 new file mode 100644 index 0000000..17f2ccb Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.184501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.185001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.185001+0200 new file mode 100644 index 0000000..982de98 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.185001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.185501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.185501+0200 new file mode 100644 index 0000000..81e335f Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.185501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.190001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.190001+0200 new file mode 100644 index 0000000..959a403 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.190001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.190501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.190501+0200 new file mode 100644 index 0000000..40bed15 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.190501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.191001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.191001+0200 new file mode 100644 index 0000000..8a0e01b Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.191001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.191501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.191501+0200 new file mode 100644 index 0000000..175e61c Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.191501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.192001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.192001+0200 new file mode 100644 index 0000000..7a89493 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.192001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.192501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.192501+0200 new file mode 100644 index 0000000..dd65dda Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.192501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.193001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.193001+0200 new file mode 100644 index 0000000..3b9ad0e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.193001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.193501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.193501+0200 new file mode 100644 index 0000000..1aecb02 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.193501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.194001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.194001+0200 new file mode 100644 index 0000000..59808aa Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.194001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.194501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.194501+0200 new file mode 100644 index 0000000..194db31 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.194501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.195001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.195001+0200 new file mode 100644 index 0000000..b3f7ea8 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.195001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.195501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.195501+0200 new file mode 100644 index 0000000..0693363 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.195501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.200001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.200001+0200 new file mode 100644 index 0000000..df399de Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.200001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.200501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.200501+0200 new file mode 100644 index 0000000..463d98e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.200501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.201001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.201001+0200 new file mode 100644 index 0000000..978b08e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.201001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.201501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.201501+0200 new file mode 100644 index 0000000..a410f33 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.201501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.202001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.202001+0200 new file mode 100644 index 0000000..95d3ce6 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.202001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.202501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.202501+0200 new file mode 100644 index 0000000..80cdf67 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.202501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.203001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.203001+0200 new file mode 100644 index 0000000..f6ae577 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.203001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.203501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.203501+0200 new file mode 100644 index 0000000..c724d87 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.203501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.204001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.204001+0200 new file mode 100644 index 0000000..b9df66e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.204001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.204501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.204501+0200 new file mode 100644 index 0000000..daae757 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.204501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.205001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.205001+0200 new file mode 100644 index 0000000..726adc2 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.205001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.205501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.205501+0200 new file mode 100644 index 0000000..b00e5ed Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.205501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.210001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.210001+0200 new file mode 100644 index 0000000..cbde491 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.210001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.210501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.210501+0200 new file mode 100644 index 0000000..7091f5b Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.210501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.211001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.211001+0200 new file mode 100644 index 0000000..1b16af5 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.211001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.211501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.211501+0200 new file mode 100644 index 0000000..dadbb84 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.211501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.212001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.212001+0200 new file mode 100644 index 0000000..43cc537 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.212001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.212501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.212501+0200 new file mode 100644 index 0000000..2416f89 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.212501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.213001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.213001+0200 new file mode 100644 index 0000000..89017b3 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.213001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.213501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.213501+0200 new file mode 100644 index 0000000..8b6f488 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.213501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.214001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.214001+0200 new file mode 100644 index 0000000..6057a3a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.214001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.214501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.214501+0200 new file mode 100644 index 0000000..12251d2 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.214501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.215001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.215001+0200 new file mode 100644 index 0000000..55bcdef Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.215001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.215501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.215501+0200 new file mode 100644 index 0000000..960f446 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.215501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.220001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.220001+0200 new file mode 100644 index 0000000..a1a8a68 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.220001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.220501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.220501+0200 new file mode 100644 index 0000000..6b5a9c7 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.220501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.221001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.221001+0200 new file mode 100644 index 0000000..2bbba88 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.221001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.221501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.221501+0200 new file mode 100644 index 0000000..d76be31 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.221501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.222001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.222001+0200 new file mode 100644 index 0000000..7de3057 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.222001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.222501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.222501+0200 new file mode 100644 index 0000000..9af5408 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.222501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.223001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.223001+0200 new file mode 100644 index 0000000..9e8f54c Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.223001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.223501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.223501+0200 new file mode 100644 index 0000000..de1ee4e Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.223501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.224001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.224001+0200 new file mode 100644 index 0000000..5b9f0ed Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.224001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.224501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.224501+0200 new file mode 100644 index 0000000..51081f7 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.224501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.225001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.225001+0200 new file mode 100644 index 0000000..8c8f5b2 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.225001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.225501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.225501+0200 new file mode 100644 index 0000000..28a74cd Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.225501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.230001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.230001+0200 new file mode 100644 index 0000000..e2d2c1f Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.230001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.230501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.230501+0200 new file mode 100644 index 0000000..9d29722 Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.230501+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.231001+0200 b/traces/2009-10-24/ft-v05.2009-10-24.231001+0200 new file mode 100644 index 0000000..ff3c70a Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.231001+0200 differ diff --git a/traces/2009-10-24/ft-v05.2009-10-24.231501+0200 b/traces/2009-10-24/ft-v05.2009-10-24.231501+0200 new file mode 100644 index 0000000..847c96b Binary files /dev/null and b/traces/2009-10-24/ft-v05.2009-10-24.231501+0200 differ diff --git a/ungroped.h5 b/ungroped.h5 new file mode 100644 index 0000000..60cd0dc Binary files /dev/null and b/ungroped.h5 differ diff --git a/ungroped.h5.300k b/ungroped.h5.300k new file mode 100644 index 0000000..282b71e Binary files /dev/null and b/ungroped.h5.300k differ diff --git a/ungrouper.py b/ungrouper.py new file mode 100644 index 0000000..54765e4 --- /dev/null +++ b/ungrouper.py @@ -0,0 +1,46 @@ +import options +class Ungrouper(object): + def __init__(self, name, file_name, merger, br_order, br_to_groups, + records, output_file, br_to_gr_output): + self.merger = merger + self.name = name + self.file_name = file_name + self.br_order = br_order + self.br_to_groups = br_to_groups + self.flow_records = records + self.output_file = output_file + self.br_to_gr_output = br_to_gr_output + + def groups(self): + for rec in self.merger: + for br in self.br_order: + gr_rec_id = getattr(rec, br) + gr_rec = self.br_to_groups[br].read_row(gr_rec_id) + yield gr_rec + + def records(self): + for rec in self.merger: + for br in self.br_order: + gr_rec_id = getattr(rec, br) + gr_rec = self.br_to_groups[br].read_row(gr_rec_id) + for record in self.flow_records.read_rows_list(gr_rec.records): + yield record + + def go(self): + if options.do_not_expand_groups: + for rec in self.merger: + for br in self.br_order: + gr_rec_id = getattr(rec, br) + gr_rec = self.br_to_groups[br].read_row(gr_rec_id) + self.br_to_gr_output[br].append(gr_rec) + else: + for rec in self.merger: + for br in self.br_order: + gr_rec_id = getattr(rec, br) + gr_rec = self.br_to_groups[br].read_row(gr_rec_id) + self.br_to_gr_output[br].append(gr_rec) + for record in self.flow_records.read_rows_list( + gr_rec.records): + self.output_file.append(record) + + print 'Ungrouper %s finished exectution'%self.name \ No newline at end of file diff --git a/ungrouper.pyc b/ungrouper.pyc new file mode 100644 index 0000000..072b729 Binary files /dev/null and b/ungrouper.pyc differ diff --git a/ungrouper_validator.py b/ungrouper_validator.py new file mode 100644 index 0000000..1aaf33e --- /dev/null +++ b/ungrouper_validator.py @@ -0,0 +1,101 @@ +from copy import deepcopy +from pytables import create_table_file +from ungrouper import Ungrouper as UngrouperImpl +from record import RecordReader +from validator_common import * +from pytables import FlowRecordsTable, create_table_file +import options + +class UngrouperValidator(object): + def __init__(self, parser, merger_validator): + self.ungroupers = deepcopy(parser.ungroupers) + self.outputs = deepcopy(parser.outputs) + self.input = deepcopy(parser.input) + self.merger_validator = merger_validator + self.name_to_merger = self.find_ungrouper_to_merger() + self.name_to_output = self.find_name_to_otput() + self.name_to_merger_table = self.find_name_to_merger_records_file() + self.impl = self.create_impl() + + def find_ungrouper_to_merger(self): + n_to_m = {} + for ungrouper in self.ungroupers: + m = None + for merger in self.merger_validator.mergers: + if ungrouper.branches == merger.branches: + for merger_impl in self.merger_validator.impl: + if merger_impl.name == merger.name: + m = merger_impl + break + break + if not m: + msg = "Could not find merger for Ungrouper %s,"%ungrouper.name + raise SyntaxError(msg) + else: + n_to_m[ungrouper.name] = m + + return n_to_m + + def find_name_to_merger_records_file(self): + n_to_merger_records_file = {} + for name, m in self.name_to_merger.iteritems(): + file_name = m.merger_table.tuples_table.file_path + table = FlowRecordsTable(file_name) + n_to_merger_records_file[name] = RecordReader(table) + return n_to_merger_records_file + + def find_name_to_groups(self, ungrouper): + br_to_groups = {} + for branch in ungrouper.branches: + fname = options.temp_path + options.groups_file_prefix + fname += branch + '.h5' + br_to_groups[branch] = RecordReader(FlowRecordsTable(fname)) + return br_to_groups + + def find_name_to_otput(self): + n_to_o = {} + for ungrouper in self.ungroupers: + for output in self.outputs: + if output.branches == ungrouper.branches: + n_to_o[ungrouper.name] = output.name + break + + return n_to_o + + def find_name_to_gr_output(self, br_order, br_to_groups): + br_to_gr_output = {} + for br in br_order: + fname = options.temp_path + options.groups_file_prefix + fname += br + '-merged' + '.h5' + src = br_to_groups[br].reader + if options.delete_temp_files: if_exists_delete(fname) + create_table_file(fname, dict(zip(src.fields, src.types))) + br_to_gr_output[br] = FlowRecordsTable(fname) + return br_to_gr_output + + def create_impl(self): + ungr_impl = [] + for ungrouper in self.ungroupers: + name = ungrouper.name + merger_impl = self.name_to_merger[name] + merger_name = merger_impl.name + br_order = merger_impl.export_branches + merger_file = options.temp_path + options.merger_file_prefix + merger_file += merger_name + '.h5' + merger = RecordReader(FlowRecordsTable(merger_file)) + br_to_groups = self.find_name_to_groups(ungrouper) + file_name = self.name_to_output.get(name) + records_table = FlowRecordsTable(self.input.name) + records = RecordReader(records_table) + output_file = self.name_to_output[name] + if options.delete_temp_files: if_exists_delete(output_file) + create_table_file(output_file, dict(zip(records_table.fields, + records_table.types))) + br_to_gr_output = self.find_name_to_gr_output(br_order, + br_to_groups) + output = FlowRecordsTable(output_file) + ungr_impl.append(UngrouperImpl(name, file_name, merger, br_order, + br_to_groups, records, output, + br_to_gr_output)) + + return ungr_impl \ No newline at end of file diff --git a/ungrouper_validator.pyc b/ungrouper_validator.pyc new file mode 100644 index 0000000..dcf7816 Binary files /dev/null and b/ungrouper_validator.pyc differ diff --git a/validator_common.py b/validator_common.py new file mode 100644 index 0000000..16bc5d3 --- /dev/null +++ b/validator_common.py @@ -0,0 +1,114 @@ +from statement import Rule, GrouperRule, AllenRule, Field, Arg +from pytables import FlowRecordsTable +import os + +def flatten(l): + if isinstance(l,list): + return sum(map(flatten,l)) + else: + return l + +def if_exists_delete(path): + if os.path.exists(path): + if path[path.find('h5'):] == 'h5': + os.remove(path) +def iterate_subrules(rule): + for arg in rule.args: + if type(arg) in [Rule, GrouperRule, AllenRule]: + for r in iterate_subrules(arg): + yield r + yield arg + +def iterate_args(rule): + for arg in rule.args: + yield arg + +def iterate_rules(filter): + for rule_list in filter.rules: + for rule in rule_list: + if type(rule) not in [Rule, GrouperRule, AllenRule]: + continue + yield rule + for r in iterate_subrules(rule): + yield r + +# returns the operation implementation from operators.py +# module, based on the operation contained in rule.op attribute +def find_op(rule, module='operators'): + imp = __import__(module) + op_name = rule.op +# print getattr(imp, op_name) + try: + return getattr(imp, op_name) + except AttributeError: + try: + external_imp = getattr(imp, 'external_import') + return getattr(external_imp, op_name) + except AttributeError: + raise SyntaxError('Uknown operator %s at line %s.'%(op_name, + rule.line)) + +def get_input_reader(parser): + """Returns a reader for the parser's input""" + return FlowRecordsTable(parser.input.name) # parser.input.name is the ./netflow-trace.h5 file + +def get_input_fields_types(input_reader): + return dict((f, t) for f, t in zip(input_reader.fields, + input_reader.types)) + +def check_rule_fields(rule, fields): + for arg in rule.args: + if type(arg) is Field: + if arg.name in fields: + continue + else: + msg = 'There is no such field %s, '%arg.name + msg += 'referenced at line %s'%rule.line + raise SyntaxError(msg) + +def replace_bound_rules(filter): + ''' + Iterate over the rules replacing evaluatable rules with their values + until there's nothing left to replace i.e. all remaining rules have + field references. + ''' + def evaluate_rule(rule): + ''' + Evaluates rule if possible i.e. rule contains no record field + references. If evaluation is not possible returns the original + rule. + ''' + arg_types = [type(x) for x in rule.args] + if Rule in arg_types or Field in arg_types: + return rule + else: + # no references to record fields evaluate now + replace_bound_rules.count += 1 + op = find_op(rule) + # hasattr() takes care of already replaced values + args = [a.value if hasattr(a, 'value') else a + for a in rule.args] + result = op(*args) if not rule.NOT else not op(*args) + + return result + replace_bound_rules.count = 0 + for rule in iterate_rules(filter): + newargs = [evaluate_rule(arg) if type(arg) is Rule else arg + for arg in rule.args] + rule.args = newargs + + # we have to break because nasty stuff happens + # because the iterable is being changed + if replace_bound_rules.count !=0: + break + + if replace_bound_rules.count != 0: + replace_bound_rules.count = 0 + replace_bound_rules(filter) + +def replace_with_vals(filter): + for rule in iterate_rules(filter): + for i, arg in enumerate(rule.args): + if type(arg) is Arg: + rule.args[i] = arg.value + diff --git a/validator_common.pyc b/validator_common.pyc new file mode 100644 index 0000000..50bf51b Binary files /dev/null and b/validator_common.pyc differ