# HG changeset patch # User Laman # Date 2024-06-27 21:53:24 # Node ID c532c271f40784e1392e9c2948b8660ba7b00caf # Parent 61a2b8f09823dc2e37b77489eb3319679f93372b refactoring: rules as a vector instead of a hashmap diff --git a/regexp.py b/regexp.py --- a/regexp.py +++ b/regexp.py @@ -1,3 +1,5 @@ +import math +import itertools from abc import abstractmethod from collections import deque @@ -205,21 +207,21 @@ def parse(pattern, offset=0): class Regexp: def __init__(self, pattern): - (self.rules, self.end_states) = self._parse(pattern) - - def _parse(self, s): - r = parse(s) + r = parse(pattern) rules = dict() + alphabet = set() for i in r.list_first(): - c = s[i] + c = pattern[i] + alphabet.add(c) key = (-1, c) if key not in rules: rules[key] = set() rules[key].add(i) for (i, j) in r.list_neighbours(): - c = s[j] + c = pattern[j] + alphabet.add(c) key = (i, c) if key not in rules: rules[key] = set() @@ -229,7 +231,9 @@ class Regexp: if r.is_skippable: end_states.add(-1) - return rules, end_states + self.rules = rules + self.end_states = end_states + self.alphabet = alphabet def match(self, s): current = {-1} @@ -245,10 +249,12 @@ class Regexp: return any(st in self.end_states for st in current) def determinize(self): - rules = dict() - end_states = {-1} if -1 in self.end_states else set() + alphabet_index = {c: i for (i, c) in enumerate(sorted(self.alphabet))} + n = len(alphabet_index) + compact_rules = [-1] * n + end_states = {0} if -1 in self.end_states else set() - index = {(-1,): -1} + index = {(-1,): 0} stack = [(-1,)] while stack: multistate = stack.pop() @@ -262,37 +268,39 @@ class Regexp: for (c, target_set) in new_rules.items(): target_tup = tuple(sorted(target_set)) if target_tup not in index: - new_target = len(index)-1 + new_target = len(index) index[target_tup] = new_target + compact_rules.extend([-1] * n) stack.append(target_tup) - rules[(index[multistate], c)] = index[target_tup] + compact_rules[index[multistate]*n + alphabet_index[c]] = index[target_tup] if any(st in self.end_states for st in target_set): end_states.add(index[target_tup]) - return (rules, end_states) + return (compact_rules, end_states, alphabet_index) class RegexpDFA: - def __init__(self, rules, end_states): + def __init__(self, rules, end_states, alphabet_index): self.rules = rules self.end_states = end_states + self.alphabet_index = alphabet_index @classmethod def create(cls, pattern): r = Regexp(pattern) - (rules, end_states) = r.determinize() + (rules, end_states, alphabet_index) = r.determinize() - return cls(rules, end_states) + return cls(rules, end_states, alphabet_index) def match(self, s): - st = -1 + st = 0 + n = len(self.alphabet_index) for c in s: - key = (st, c) - if key in self.rules: - st = self.rules[key] - else: + if c not in self.alphabet_index or st < 0: return False + key = (st*n + self.alphabet_index[c]) + st = self.rules[key] return st in self.end_states @@ -300,42 +308,40 @@ class RegexpDFA: equivalents = self._find_equivalent_states() (rules, end_states) = self._collapse_states(equivalents) - return RegexpDFA(rules, end_states) + return RegexpDFA(rules, end_states, self.alphabet_index) def normalize(self): - index = {-1: -1} - queue = deque([-1]) + n = len(self.alphabet_index) + index = {-1: -1, 0: 0} + queue = deque([0]) + + rules = [] while queue: - state = queue.popleft() - edges = [((st, c), t) for ((st, c), t) in self.rules.items() if st == state] - edges.sort() - for ((st, c), t) in edges: - if t not in index: - index[t] = len(index)-1 - queue.append(t) + si = queue.popleft() + row = self.rules[si*n:(si+1)*n] + for sj in row: + if sj not in index: + index[sj] = len(index)-1 + queue.append(sj) + rules.extend(index[sj] for sj in row) - rules = dict(((index[st], c), index[t]) for ((st, c), t) in self.rules.items()) - end_states = {index[st] for st in self.end_states} + end_states = {index[si] for si in self.end_states} - return RegexpDFA(rules, end_states) + return RegexpDFA(rules, end_states, self.alphabet_index) def _find_equivalent_states(self): - state_set = [-2, -1] + sorted(set(self.rules.values())) - alphabet = {c for (st, c) in self.rules.keys()} - equivalents = {(s1, s2) for (i, s1) in enumerate(state_set) for s2 in state_set[i+1:]} - - for (s1, s2) in equivalents.copy(): - if (s1 in self.end_states and s2 not in self.end_states) or (s1 not in self.end_states and s2 in self.end_states): - equivalents.remove((s1, s2)) + n = len(self.alphabet_index) + state_list = list(range(len(self.rules) // n)) + equivalents = {(s1, s2) for (i, s1) in enumerate(state_list) for s2 in state_list[i+1:] if (s1 in self.end_states) == (s2 in self.end_states)} ctrl = True while ctrl: ctrl = False for (s1, s2) in equivalents.copy(): - for c in alphabet: - t1 = self.rules.get((s1, c), -2) - t2 = self.rules.get((s2, c), -2) + for ci in range(n): + t1 = self.rules[s1*n + ci] + t2 = self.rules[s2*n + ci] key = (min(t1, t2), max(t1, t2)) if t1 != t2 and key not in equivalents: equivalents.remove((s1, s2)) @@ -345,17 +351,28 @@ class RegexpDFA: return equivalents def _collapse_states(self, equivalents): - rules = self.rules.items() - end_states = self.end_states.copy() + n = len(self.alphabet_index) + rules = [] + + eq_mapping = dict() + for (s1, s2) in equivalents: + eq_mapping[s2] = min(s1, eq_mapping.get(s2, math.inf)) + + discard_mapping = {-1: -1} + discard_count = 0 - for (s1, s2) in sorted(equivalents): - rules = map( - lambda item: (item[0], s1 if item[1] == s2 else item[1]), - filter(lambda item: item[0][0] != s2, rules) - ) - end_states.discard(s2) + for i in range(0, len(self.rules), n): + si = i//n + if si in eq_mapping: + discard_count += 1 + continue + discard_mapping[si] = si - discard_count + rules.extend(map(lambda st: eq_mapping.get(st, st), self.rules[i:i+n])) - return (dict(rules), end_states) + rules = [discard_mapping[st] for st in rules] + end_states = {discard_mapping[eq_mapping.get(st, st)] for st in self.end_states} + + return (rules, end_states) if __name__ == "__main__": diff --git a/src/regexp.rs b/src/regexp.rs --- a/src/regexp.rs +++ b/src/regexp.rs @@ -1,13 +1,14 @@ -use std::collections::{HashMap, HashSet, VecDeque}; +use std::{collections::{HashMap, HashSet, VecDeque}, iter}; mod token; pub use token::ParsingError; use token::parse; -const START: i32 = -1; -const FAIL: i32 = -2; +const START_NFA: usize = usize::MAX; +const START_DFA: usize = 0; +const FAIL: usize = usize::MAX>>1; -fn encode_set(set: &HashSet) -> String { +fn encode_set(set: &HashSet) -> String { let mut v = Vec::from_iter(set.iter()); v.sort(); let res: Vec = v.into_iter().map(|x| x.to_string()).collect(); @@ -16,44 +17,51 @@ fn encode_set(set: &HashSet) -> Str #[derive(Debug)] pub struct Regexp { - rules: HashMap<(i32, char), HashSet>, - end_states: HashSet + rules: HashMap<(usize, char), HashSet>, + end_states: HashSet, + alphabet: Vec } impl Regexp { pub fn new(pattern: &String) -> Result { let r = parse(pattern, 0)?; let pattern_chars = Vec::from_iter(pattern.chars()); - let mut rules: HashMap<(i32, char), HashSet> = HashMap::new(); + let mut rules: HashMap<(usize, char), HashSet> = HashMap::new(); + let mut alphabet: HashSet = HashSet::new(); for i in r.list_first() { let c = pattern_chars[i]; - let key = (START, c); + alphabet.insert(c); + let key = (START_NFA, c); match rules.get_mut(&key) { - Some(set) => {set.insert(i as i32);}, - None => {rules.insert(key, HashSet::from([i as i32]));} + Some(set) => {set.insert(i);}, + None => {rules.insert(key, HashSet::from([i]));} }; } for (i, j) in r.list_neighbours() { let c = pattern_chars[j]; - let key = (i as i32, c); + alphabet.insert(c); + let key = (i, c); match rules.get_mut(&key) { - Some(set) => {set.insert(j as i32);}, - None => {rules.insert(key, HashSet::from([j as i32]));} + Some(set) => {set.insert(j);}, + None => {rules.insert(key, HashSet::from([j]));} }; } - let mut end_states = HashSet::from_iter(r.list_last().into_iter().map(|i| i as i32)); + let mut end_states = HashSet::from_iter(r.list_last().into_iter()); if r.is_skippable() { - end_states.insert(START); + end_states.insert(START_NFA); } - return Ok(Regexp{rules, end_states}); + let mut alphabet_vec = Vec::from_iter(alphabet.into_iter()); + alphabet_vec.sort(); + + return Ok(Regexp{rules, end_states, alphabet: alphabet_vec}); } pub fn eval(&self, s: String) -> bool { - let mut multistate = HashSet::from([START]); + let mut multistate = HashSet::from([START_NFA]); for c in s.chars() { let mut new_multistate = HashSet::new(); @@ -72,18 +80,22 @@ impl Regexp { } pub fn determinize(&self) -> RegexpDFA { - let mut rules: HashMap<(i32, char), i32> = HashMap::new(); - let mut end_states: HashSet = HashSet::new(); - if self.end_states.contains(&START) {end_states.insert(START);} + let alphabet_index: HashMap = self.alphabet.iter().enumerate().map(|(i, c)| (*c, i)).collect(); + let n = alphabet_index.len(); + let mut compact_rules = vec![FAIL; n]; + let mut end_states: HashSet = HashSet::new(); + if self.end_states.contains(&START_NFA) {end_states.insert(START_DFA);} - let mut index_new = HashMap::from([(START.to_string(), START)]); - let mut index_multi = HashMap::from([(START.to_string(), HashSet::from([START]))]); - let mut stack = Vec::from([START.to_string()]); + // string hash -> single int DFA state + let mut index_new = HashMap::from([(START_NFA.to_string(), START_DFA)]); + // string hash -> HashSet NFA multistate + let mut index_multi = HashMap::from([(START_NFA.to_string(), HashSet::from([START_NFA]))]); + let mut stack = Vec::from([START_NFA.to_string()]); while !stack.is_empty() { let state_hash = stack.pop().unwrap(); let multistate = &index_multi[&state_hash]; - let mut new_rules: HashMap> = HashMap::new(); + let mut new_rules: HashMap> = HashMap::new(); for key in self.rules.keys().filter(|key| multistate.contains(&key.0)) { let (_st, c) = key; @@ -99,79 +111,90 @@ impl Regexp { let target_hash = encode_set(&target_set); let is_end = target_set.iter().any(|st| self.end_states.contains(st)); if !index_new.contains_key(&target_hash) { - let target_new = index_new.len() as i32; + let target_new = index_new.len(); index_new.insert(target_hash.clone(), target_new); index_multi.insert(target_hash.clone(), target_set); + compact_rules.extend(iter::repeat(FAIL).take(n)); stack.push(target_hash.clone()); } - rules.insert((index_new[&state_hash], c), index_new[&target_hash]); + compact_rules[index_new[&state_hash]*n + alphabet_index[&c]] = index_new[&target_hash]; if is_end { end_states.insert(index_new[&target_hash]); } } } - return RegexpDFA{rules, end_states}; + return RegexpDFA{rules: compact_rules, end_states, alphabet_index}; } } pub struct RegexpDFA { - rules: HashMap<(i32, char), i32>, - end_states: HashSet + rules: Vec, + end_states: HashSet, + alphabet_index: HashMap } impl RegexpDFA { pub fn eval(&self, s: String) -> bool { - let mut state = START; + let n = self.alphabet_index.len(); + let mut state = START_DFA; for c in s.chars() { - if let Some(x) = self.rules.get(&(state, c)) { - state = *x; + if let Some(ci) = self.alphabet_index.get(&c) { + state = self.rules[state*n + ci]; } else { return false; } + if state == FAIL { + return false; + } } return self.end_states.contains(&state); } pub fn reduce(&self) -> RegexpDFA { + if self.alphabet_index.len() == 0 { + return RegexpDFA{rules: self.rules.clone(), end_states: self.end_states.clone(), alphabet_index: self.alphabet_index.clone()}; + } let equivalents = self.find_equivalent_states(); return self.collapse_states(equivalents); } pub fn normalize(&self) -> RegexpDFA { - let mut index = HashMap::from([(START, START)]); - let mut queue = VecDeque::from([START]); + let n = self.alphabet_index.len(); + if n == 0 { + return RegexpDFA{rules: self.rules.clone(), end_states: self.end_states.clone(), alphabet_index: self.alphabet_index.clone()}; + } + let m = self.rules.len()/n; + let mut index: Vec = vec![FAIL;m]; + index[0] = 0; + let mut queue = VecDeque::from([START_DFA]); + + let mut rules = vec![]; + let mut k = 1; while !queue.is_empty() { - let state = queue.pop_front().unwrap(); - let mut edges: Vec<((i32, char), i32)> = self.rules.iter() - .filter(|((st, c), t)| *st == state) - .map(|((st, c), t)| ((*st, *c), *t)).collect(); - edges.sort(); - for ((_st, _c), t) in edges { - if !index.contains_key(&t) { - index.insert(t, index.len() as i32); - queue.push_back(t); + let si = queue.pop_front().unwrap(); + let row = &self.rules[si*n..(si+1)*n]; + for &sj in row { + if sj != FAIL && index[sj] == FAIL { + index[sj] = k; + k += 1; + queue.push_back(sj); } } + rules.extend(row.iter().map(|&st| if st != FAIL {index[st]} else {FAIL})); } - let rules = self.rules.iter().map(|((st, c), t)| ((index[st], *c), index[t])).collect(); - let end_states = self.end_states.iter().map(|st| index[st]).collect(); + let end_states = self.end_states.iter().map(|st| index[*st]).collect(); - return RegexpDFA{rules, end_states}; + return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()}; } - fn find_equivalent_states(&self) -> Vec<(i32, i32)> { - let state_set: HashSet = HashSet::from_iter(self.rules.values().copied()); - let mut state_vec: Vec = Vec::from_iter(state_set.into_iter()); - state_vec.push(START); - state_vec.push(FAIL); - state_vec.sort(); - let alphabet: HashSet = self.rules.keys().map(|(_st, c)| c).copied().collect(); - + fn find_equivalent_states(&self) -> Vec<(usize, usize)> { + let n = self.alphabet_index.len(); + let state_vec: Vec = (0..self.rules.len()/n).collect(); let mut equivalents = HashSet::new(); state_vec.iter().enumerate().for_each(|(i, s1)| { equivalents.extend( @@ -181,14 +204,14 @@ impl RegexpDFA { ); }); - let mut n = usize::MAX; - while equivalents.len() < n { - n = equivalents.len(); + let mut m = usize::MAX; + while equivalents.len() < m { + m = equivalents.len(); equivalents = equivalents.iter().filter(|(s1, s2)| { - !alphabet.iter().any(|c| { - let t1 = self.rules.get(&(*s1, *c)).unwrap_or(&FAIL); - let t2 = self.rules.get(&(*s2, *c)).unwrap_or(&FAIL); - let key = (*t1.min(t2), *t1.max(t2)); + !(0..n).any(|ci| { + let t1 = self.rules[s1*n + ci]; + let t2 = self.rules[s2*n + ci]; + let key = (t1.min(t2), t2.max(t1)); return t1 != t2 && !equivalents.contains(&key); }) }).copied().collect(); @@ -197,18 +220,31 @@ impl RegexpDFA { return Vec::from_iter(equivalents.into_iter()); } - fn collapse_states(&self, mut equivalents: Vec<(i32, i32)>) -> RegexpDFA { - let mut rules = self.rules.clone(); - let mut end_states = self.end_states.clone(); - equivalents.sort(); + fn collapse_states(&self, equivalents: Vec<(usize, usize)>) -> RegexpDFA { + let n = self.alphabet_index.len(); + let m = self.rules.len()/n; + let mut rules = Vec::new(); + let mut eq_mapping: Vec = ((0..m)).collect(); for (s1, s2) in equivalents.into_iter() { - rules = rules.into_iter() - .filter(|((st, _c), _t)| *st != s2) - .map(|(key, t)| (key, if t==s2 {s1} else {t})).collect(); - end_states.remove(&s2); + eq_mapping[s2] = eq_mapping[s2].min(s1); } - return RegexpDFA{rules, end_states}; + let mut discard_mapping: Vec = ((0..m)).collect(); + let mut discard_count = 0; + + for si in 0..m { + if eq_mapping[si] != si { + discard_count += 1; + continue; + } + discard_mapping[si] = si-discard_count; + rules.extend(self.rules[si*n..(si+1)*n].iter().map(|&st| if st!=FAIL {eq_mapping[st]} else {FAIL})); + } + + rules = rules.into_iter().map(|st| if st!=FAIL {discard_mapping[st]} else {FAIL}).collect(); + let end_states = self.end_states.iter().map(|st| discard_mapping[eq_mapping[*st]]).collect(); + + return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()}; } }