Changeset - fd790596c353
[Not reviewed]
default
0 2 0
Laman - 10 months ago 2024-06-30 16:15:19

implemented comparison of regexps with different alphabets
2 files changed with 65 insertions and 2 deletions:
0 comments (0 inline, 0 general)
regexp.py
Show inline comments
 
@@ -113,359 +113,391 @@ class Alternative(Token):
 
	def is_skippable(self):
 
		return any(x.is_skippable for x in self.variants)
 

	
 
class AlternativeSeparator:
 
	pass
 

	
 
class Chain(Token):
 
	def __init__(self, content: list):
 
		self.content = content
 

	
 
	def list_first(self):
 
		for token in self.content:
 
			yield from token.list_first()
 
			if not token.is_skippable:
 
				break
 

	
 
	def list_last(self):
 
		for token in reversed(self.content):
 
			yield from token.list_last()
 
			if not token.is_skippable:
 
				break
 

	
 
	def list_neighbours(self):
 
		previous = []
 
		for token in self.content:
 
			for t in previous:
 
				for x in t.list_last():
 
					for y in token.list_first():
 
						yield (x, y)
 
			yield from token.list_neighbours()
 

	
 
			if token.is_skippable:
 
				previous.append(token)
 
			else:
 
				previous = [token]
 

	
 
	@property
 
	def is_skippable(self):
 
		return all(x.is_skippable for x in self.content)
 

	
 
	def __str__(self):
 
		return "(" + "".join(str(x) for x in self.content) + ")"
 

	
 

	
 
def find_closing_parenthesis(pattern, k):
 
	counter = 0
 

	
 
	for (i, c) in enumerate(pattern[k:]):
 
		if c == "(":
 
			counter += 1
 
		elif c == ")":
 
			counter -= 1
 
		if counter == 0:
 
			return k+i
 

	
 
	raise ParsingError(f'A closing parenthesis not found. Pattern: "{pattern}", position: {k}')
 

	
 

	
 
def parse(pattern, offset=0):
 
	res = []
 
	is_alternative = False
 

	
 
	i = 0
 
	while i < len(pattern):
 
		c = pattern[i]
 
		if c == "(":
 
			j = find_closing_parenthesis(pattern, i)
 
			inner_content = parse(pattern[i+1:j], offset+i+1)
 
			res.append(inner_content)
 
			i = j+1
 
		elif c == "*":
 
			try:
 
				token = res.pop()
 
			except IndexError as e:
 
				raise ParsingError(f'The asterisk operator is missing an argument. Pattern: "{pattern}", position {i}')
 
			res.append(Asterisk(token))
 
			i += 1
 
		elif c == ")":
 
			raise ParsingError(f'An opening parenthesis not found. Pattern: "{pattern}", position: {i}')
 
		elif c == "|" or c == "+":
 
			is_alternative = True
 
			res.append(AlternativeSeparator())
 
			i += 1
 
		elif c == "_":
 
			res.append(Lambda())
 
			i += 1
 
		else:
 
			res.append(Symbol(i+offset, c))
 
			i += 1
 

	
 
	if is_alternative:
 
		return Alternative(res)
 
	else:
 
		return Chain(res)
 

	
 

	
 
def print_dfa(dfa, label=""):
 
	n = len(dfa.alphabet_index)
 
	print(label)
 
	for i in range(0, len(dfa.rules), n):
 
		print(i//n, dfa.rules[i:i+n])
 
	print(dfa.end_states)
 

	
 

	
 
class Regexp:
 
	def __init__(self, pattern):
 
		r = parse(pattern)
 
		rules = dict()
 
		alphabet = set()
 

	
 
		for i in r.list_first():
 
			c = pattern[i]
 
			alphabet.add(c)
 
			key = (-1, c)
 
			if key not in rules:
 
				rules[key] = set()
 
			rules[key].add(i)
 

	
 
		for (i, j) in r.list_neighbours():
 
			c = pattern[j]
 
			alphabet.add(c)
 
			key = (i, c)
 
			if key not in rules:
 
				rules[key] = set()
 
			rules[key].add(j)
 

	
 
		end_states = set(r.list_last())
 
		if r.is_skippable:
 
			end_states.add(-1)
 

	
 
		self.rules = rules
 
		self.end_states = end_states
 
		self.alphabet = alphabet
 

	
 
	def match(self, s):
 
		current = {-1}
 

	
 
		for c in s:
 
			new_state = set()
 
			for st in current:
 
				key = (st, c)
 
				if key in self.rules:
 
					new_state.update(self.rules[key])
 
			current = new_state
 

	
 
		return any(st in self.end_states for st in current)
 

	
 
	def determinize(self):
 
		alphabet_index = {c: i for (i, c) in enumerate(sorted(self.alphabet))}
 
		n = len(alphabet_index)
 
		compact_rules = [-1] * n
 
		end_states = {0} if -1 in self.end_states else set()
 

	
 
		index = {(-1,): 0}
 
		stack = [(-1,)]
 
		while stack:
 
			multistate = stack.pop()
 
			new_rules = dict()
 
			
 
			for ((st, c), target) in filter(lambda item: item[0][0] in multistate, self.rules.items()):
 
				if c not in new_rules:
 
					new_rules[c] = set()
 
				new_rules[c].update(target)
 
			
 
			for (c, target_set) in new_rules.items():
 
				target_tup = tuple(sorted(target_set))
 
				if target_tup not in index:
 
					new_target = len(index)
 
					index[target_tup] = new_target
 
					compact_rules.extend([-1] * n)
 
					stack.append(target_tup)
 
				compact_rules[index[multistate]*n + alphabet_index[c]] = index[target_tup]
 
				if any(st in self.end_states for st in target_set):
 
					end_states.add(index[target_tup])
 

	
 
		fail = len(index)
 
		compact_rules = [(st if st >= 0 else fail) for st in compact_rules]
 
		compact_rules.extend([fail] * n)
 
		
 
		return (compact_rules, end_states, alphabet_index)
 

	
 

	
 
class RegexpDFA:
 
	def __init__(self, rules, end_states, alphabet_index):
 
		self.rules = rules
 
		self.end_states = end_states
 
		self.alphabet_index = alphabet_index
 

	
 
	@classmethod
 
	def create(cls, pattern):
 
		r = Regexp(pattern)
 
		(rules, end_states, alphabet_index) = r.determinize()
 

	
 
		return cls(rules, end_states, alphabet_index)
 

	
 
	def match(self, s):
 
		st = 0
 
		n = len(self.alphabet_index)
 
		fail = len(self.rules) // n
 

	
 
		for c in s:
 
			if c not in self.alphabet_index or st == fail:
 
				return False
 
			key = (st*n + self.alphabet_index[c])
 
			st = self.rules[key]
 

	
 
		return st in self.end_states
 

	
 
	def reduce(self):
 
		equivalents = self._find_equivalent_states()
 
		(rules, end_states) = self._collapse_states(equivalents)
 

	
 
		return RegexpDFA(rules, end_states, self.alphabet_index)
 

	
 
	def normalize(self):
 
		n = len(self.alphabet_index)
 
		index = {0: 0}
 
		queue = deque([0])
 

	
 
		rules = []
 

	
 
		while queue:
 
			si = queue.popleft()
 
			row = self.rules[si*n:(si+1)*n]
 
			for sj in row:
 
				if sj not in index:
 
					index[sj] = len(index)
 
					queue.append(sj)
 
			rules.extend(index[sj] for sj in row)
 
		
 
		end_states = {index[si] for si in self.end_states if si in index}
 

	
 
		return RegexpDFA(rules, end_states, self.alphabet_index)
 

	
 
	def find_distinguishing_string(self, r):
 
		if self.rules == r.rules and self.end_states == r.end_states:
 
			return None
 

	
 
		product = self._build_product_automaton(r)
 
		r1 = self._expand_alphabet(r.alphabet_index)
 
		r2 = r._expand_alphabet(self.alphabet_index)
 
		product = r1._build_product_automaton(r2)
 

	
 
		n = len(product.alphabet_index)
 
		reverse_alphabet_index = {v: k for (k, v) in product.alphabet_index.items()}
 
		queue = deque([(0, "")])
 
		visited = {0}
 
		while queue:
 
			(state, acc) = queue.popleft()
 
			if state in product.end_states:
 
				return acc
 
			for (i, target) in enumerate(product.rules[state*n:(state+1)*n]):
 
				if target not in visited:
 
					queue.append((target, acc+reverse_alphabet_index[i]))
 
					visited.add(target)
 
		
 
		assert False
 

	
 
	def _find_equivalent_states(self):
 
		n = len(self.alphabet_index)
 
		state_list = list(range(len(self.rules) // n))
 
		equivalents = {(s1, s2) for (i, s1) in enumerate(state_list) for s2 in state_list[i+1:] if (s1 in self.end_states) == (s2 in self.end_states)}
 
		
 
		ctrl = True
 
		while ctrl:
 
			ctrl = False
 
			for (s1, s2) in equivalents.copy():
 
				for ci in range(n):
 
					t1 = self.rules[s1*n + ci]
 
					t2 = self.rules[s2*n + ci]
 
					key = (min(t1, t2), max(t1, t2))
 
					if t1 != t2 and key not in equivalents:
 
						equivalents.remove((s1, s2))
 
						ctrl = True
 
						break
 
		
 
		return equivalents
 
	
 
	def _collapse_states(self, equivalents):
 
		n = len(self.alphabet_index)
 
		rules = []
 

	
 
		eq_mapping = dict()
 
		for (s1, s2) in equivalents:
 
			eq_mapping[s2] = min(s1, eq_mapping.get(s2, math.inf))
 

	
 
		discard_mapping = dict()
 
		discard_count = 0
 

	
 
		for i in range(0, len(self.rules), n):
 
			si = i//n
 
			if si in eq_mapping:
 
				discard_count += 1
 
				continue
 
			discard_mapping[si] = si - discard_count
 
			rules.extend(map(lambda st: eq_mapping.get(st, st), self.rules[i:i+n]))
 
		
 
		rules = [discard_mapping[st] for st in rules]
 
		end_states = {discard_mapping[eq_mapping.get(st, st)] for st in self.end_states}
 
		
 
		return (rules, end_states)
 

	
 
	def _expand_alphabet(self, alphabet_index):
 
		if alphabet_index == self.alphabet_index:
 
			return self
 

	
 
		n1 = len(self.alphabet_index)
 
		m = len(self.rules) // n1
 

	
 
		combined_alphabet = set(self.alphabet_index.keys()) | set(alphabet_index.keys())
 
		combined_index = {c: i for (i, c) in enumerate(sorted(combined_alphabet))}
 
		conversion_index = {v: combined_index[k] for (k, v) in self.alphabet_index.items()}
 
		n2 = len(combined_alphabet)
 

	
 
		rules = []
 
		for i in range(0, len(self.rules), n1):
 
			row = ([m]*n2)
 
			for (j, st) in enumerate(self.rules[i:i+n1]):
 
				row[conversion_index[j]] = st
 
			rules.extend(row)
 
		rules.extend([m]*n2)
 

	
 
		return RegexpDFA(rules, self.end_states, combined_index).reduce().normalize()
 

	
 
	def _build_product_automaton(self, r):
 
		n = len(self.alphabet_index)
 
		m = len(r.rules) // n
 
		k = len(self.rules) // n
 

	
 
		rules = []
 
		end_states = set()
 

	
 
		for s1 in range(k):
 
			row1 = self.rules[s1*n:(s1+1)*n]
 
			for s2 in range(m):
 
				row2 = r.rules[s2*n:(s2+1)*n]
 
				rules.extend([x*m + y for (x, y) in zip(row1, row2)])
 
				if (s1 in self.end_states) != (s2 in r.end_states):
 
					end_states.add(s1*m + s2)
 

	
 
		return RegexpDFA(rules, end_states, self.alphabet_index).reduce().normalize()
 

	
 

	
 
def test():
 
	tests = ["", "a", "ab", "aabb", "abab", "abcd", "abcbcdbcd"]
 
	for pattern in ["a(b|c)", "a*b*", "(ab)*", "a((bc)*d)*", "(a|b)*a(a|b)(a|b)(a|b)", "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"]:
 
		print("#", pattern)
 
		try:
 
			r = RegexpDFA.create(pattern).reduce().normalize()
 
		except ParsingError as e:
 
			print("Failed to parse the regexp:")
 
			print(e)
 
			continue
 
		for t in tests:
 
			print(t, r.match(t))
 
		print()
 

	
 

	
 
if __name__ == "__main__":
 
	parser = argparse.ArgumentParser()
 
	subparsers = parser.add_subparsers()
 
	
 
	test_parser = subparsers.add_parser("test")
 
	test_parser.set_defaults(name="test")
 

	
 
	match_parser = subparsers.add_parser("match")
 
	match_parser.add_argument("pattern")
 
	match_parser.add_argument("string")
 
	match_parser.set_defaults(name="match")
 

	
 
	compare_parser = subparsers.add_parser("compare")
 
	compare_parser.add_argument("pattern1")
 
	compare_parser.add_argument("pattern2")
 
	compare_parser.set_defaults(name="compare")
 

	
 
	args = parser.parse_args()
 

	
 
	if args.name == "test":
 
		test()
 
	elif args.name == "match":
 
		try:
 
			r = RegexpDFA.create(args.pattern).reduce().normalize()
 
		except ParsingError as e:
 
			print("Failed to parse the regexp:")
 
			print(e)
 
		print(r.match(args.string))
 
	elif args.name == "compare":
 
		r1 = RegexpDFA.create(args.pattern1).reduce().normalize()
 
		r2 = RegexpDFA.create(args.pattern2).reduce().normalize()
 
		print(r1.find_distinguishing_string(r2))
 
	else:
 
		parser.print_help()
src/regexp.rs
Show inline comments
 
@@ -39,270 +39,301 @@ impl Regexp {
 
		}
 

	
 
		for (i, j) in r.list_neighbours() {
 
			let c = pattern_chars[j];
 
			alphabet.insert(c);
 
			let key = (i, c);
 
			match rules.get_mut(&key) {
 
				Some(set) => {set.insert(j);},
 
				None => {rules.insert(key, HashSet::from([j]));}
 
			};
 
		}
 

	
 
		let mut end_states = HashSet::from_iter(r.list_last().into_iter());
 
		if r.is_skippable() {
 
			end_states.insert(START_NFA);
 
		}
 

	
 
		let mut alphabet_vec = Vec::from_iter(alphabet.into_iter());
 
		alphabet_vec.sort();
 

	
 
		return Ok(Regexp{rules, end_states, alphabet: alphabet_vec});
 
	}
 

	
 
	pub fn eval(&self, s: String) -> bool {
 
		let mut multistate = HashSet::from([START_NFA]);
 

	
 
		for c in s.chars() {
 
			let mut new_multistate = HashSet::new();
 

	
 
			for state in multistate {
 
				if let Some(x) = self.rules.get(&(state, c)) {
 
					new_multistate = new_multistate.union(&x).map(|&y| y).collect();
 
				} else if let Some(x) = self.rules.get(&(state, '.')) {
 
					new_multistate = new_multistate.union(&x).map(|&y| y).collect();
 
				}
 
			}
 
			multistate = new_multistate;
 
		}
 

	
 
		return multistate.iter().any(|x| self.end_states.contains(x));
 
	}
 

	
 
	pub fn determinize(&self) -> RegexpDFA {
 
		const FAIL: usize = usize::MAX;
 
		let alphabet_index: HashMap<char, usize> = self.alphabet.iter().enumerate().map(|(i, c)| (*c, i)).collect();
 
		let n = alphabet_index.len();
 
		let mut compact_rules = vec![FAIL; n];
 
		let mut end_states: HashSet<usize> = HashSet::new();
 
		if self.end_states.contains(&START_NFA) {end_states.insert(START_DFA);}
 

	
 
		// string hash -> single int DFA state
 
		let mut index_new = HashMap::from([(START_NFA.to_string(), START_DFA)]);
 
		// string hash -> HashSet NFA multistate
 
		let mut index_multi = HashMap::from([(START_NFA.to_string(), HashSet::from([START_NFA]))]);
 
		let mut stack = Vec::from([START_NFA.to_string()]);
 

	
 
		while !stack.is_empty() {
 
			let state_hash = stack.pop().unwrap();
 
			let multistate = &index_multi[&state_hash];
 
			let mut new_rules: HashMap<char, HashSet<usize>> = HashMap::new();
 

	
 
			for key in self.rules.keys().filter(|key| multistate.contains(&key.0)) {
 
				let (_st, c) = key;
 
				if !new_rules.contains_key(c) {
 
					new_rules.insert(*c, HashSet::new());
 
				}
 
				for target in &self.rules[key] {
 
					new_rules.get_mut(c).unwrap().insert(*target);
 
				}
 
			}
 

	
 
			for (c, target_set) in new_rules.into_iter() {
 
				let target_hash = encode_set(&target_set);
 
				let is_end = target_set.iter().any(|st| self.end_states.contains(st));
 
				if !index_new.contains_key(&target_hash) {
 
					let target_new = index_new.len();
 
					index_new.insert(target_hash.clone(), target_new);
 
					index_multi.insert(target_hash.clone(), target_set);
 
					compact_rules.extend(iter::repeat(FAIL).take(n));
 
					stack.push(target_hash.clone());
 
				}
 
				compact_rules[index_new[&state_hash]*n + alphabet_index[&c]] = index_new[&target_hash];
 
				if is_end {
 
					end_states.insert(index_new[&target_hash]);
 
				}
 
			}
 
		}
 

	
 
		let fail = index_new.len();
 
		compact_rules = compact_rules.into_iter().map(|st| if st != FAIL {st} else {fail}).collect();
 
		compact_rules.extend(iter::repeat(fail).take(n));
 

	
 
		return RegexpDFA{rules: compact_rules, end_states, alphabet_index};
 
	}
 
}
 

	
 
#[derive(Clone)]
 
pub struct RegexpDFA {
 
	rules: Vec<usize>,
 
	end_states: HashSet<usize>,
 
	alphabet_index: HashMap<char, usize>
 
}
 

	
 
impl RegexpDFA {
 
	pub fn eval(&self, s: String) -> bool {
 
		let n = self.alphabet_index.len();
 
		if n == 0 {
 
			return s.len() == 0;
 
		}
 
		let fail = self.rules.len() / n;
 
		let mut state = START_DFA;
 

	
 
		for c in s.chars() {
 
			if let Some(ci) = self.alphabet_index.get(&c) {
 
				state = self.rules[state*n + ci];
 
			} else {
 
				return false;
 
			}
 
			if state == fail {
 
				return false;
 
			}
 
		}
 

	
 
		return self.end_states.contains(&state);
 
	}
 

	
 
	pub fn reduce(&self) -> RegexpDFA {
 
		if self.alphabet_index.len() == 0 {
 
			return RegexpDFA{rules: self.rules.clone(), end_states: self.end_states.clone(), alphabet_index: self.alphabet_index.clone()};
 
		}
 
		let equivalents = self.find_equivalent_states();
 
		return self.collapse_states(equivalents);
 
	}
 

	
 
	pub fn normalize(&self) -> RegexpDFA {
 
		let n = self.alphabet_index.len();
 
		if n == 0 {
 
			return RegexpDFA{rules: self.rules.clone(), end_states: self.end_states.clone(), alphabet_index: self.alphabet_index.clone()}; 
 
		}
 
		let m = self.rules.len()/n;
 
		let fail = m;
 
		let mut index: Vec<usize> = vec![fail;m];
 
		index[0] = 0;
 
		let mut queue = VecDeque::from([START_DFA]);
 

	
 
		let mut rules = vec![];
 
		let mut k = 1;
 

	
 
		while !queue.is_empty() {
 
			let si = queue.pop_front().unwrap();
 
			let row = &self.rules[si*n..(si+1)*n];
 
			for &sj in row {
 
				if sj != fail && index[sj] == fail {
 
					index[sj] = k;
 
					k += 1;
 
					queue.push_back(sj);
 
				}
 
			}
 
			rules.extend(row.iter().map(|&st| index[st]));
 
		}
 

	
 
		let end_states = self.end_states.iter().map(|st| index[*st]).collect();
 
		
 
		return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()};
 
	}
 

	
 
	pub fn find_distinguishing_string(&self, other: &RegexpDFA) -> Option<String> {
 
		if self.rules == other.rules && self.end_states == other.end_states {
 
			return None;
 
		}
 

	
 
		let product = self.build_product_automaton(other);
 
		let r1 = self.expand_alphabet(&other.alphabet_index);
 
		let r2 = other.expand_alphabet(&self.alphabet_index);
 
		let product = r1.build_product_automaton(&r2);
 
		let n = product.alphabet_index.len();
 
		let reverse_alphabet_index: HashMap<usize, char> = HashMap::from_iter(product.alphabet_index.iter().map(|(&k, &v)| (v, k)));
 

	
 
		let mut queue = VecDeque::from([(0, "".to_string())]);
 
		let mut visited = HashSet::new();
 
		while !queue.is_empty() {
 
			let (state, acc) = queue.pop_front().unwrap();
 
			if product.end_states.contains(&state) {
 
				return Some(acc);
 
			}
 
			for (i, target) in product.rules[state*n..(state+1)*n].iter().enumerate() {
 
				if !visited.contains(target) {
 
					queue.push_back((*target, acc.clone()+&String::from(reverse_alphabet_index[&i])));
 
					visited.insert(target);
 
				}
 
			}
 
		}
 

	
 
		panic!();
 
	}
 

	
 
	fn find_equivalent_states(&self) -> Vec<(usize, usize)> {
 
		let n = self.alphabet_index.len();
 
		let state_vec: Vec<usize> = (0..self.rules.len()/n).collect();
 
		let mut equivalents = HashSet::new();
 
		state_vec.iter().enumerate().for_each(|(i, s1)| {
 
			equivalents.extend(
 
				state_vec[i+1..].iter()
 
				.filter(|s2| !(self.end_states.contains(s1)^self.end_states.contains(s2)))
 
				.map(|s2| (*s1, *s2))
 
			);
 
		});
 

	
 
		let mut m = usize::MAX;
 
		while equivalents.len() < m {
 
			m = equivalents.len();
 
			equivalents = equivalents.iter().filter(|(s1, s2)| {
 
				!(0..n).any(|ci| {
 
					let t1 = self.rules[s1*n + ci];
 
					let t2 = self.rules[s2*n + ci];
 
					let key = (t1.min(t2), t2.max(t1));
 
					return t1 != t2 && !equivalents.contains(&key);
 
				})
 
			}).copied().collect();
 
		}
 

	
 
		return Vec::from_iter(equivalents.into_iter());
 
	}
 

	
 
	fn collapse_states(&self, equivalents: Vec<(usize, usize)>) -> RegexpDFA {
 
		let n = self.alphabet_index.len();
 
		let m = self.rules.len()/n;
 
		let mut rules = Vec::new();
 

	
 
		let mut eq_mapping: Vec<usize> = ((0..m)).collect();
 
		for (s1, s2) in equivalents.into_iter() {
 
			eq_mapping[s2] = eq_mapping[s2].min(s1);
 
		}
 

	
 
		let mut discard_mapping: Vec<usize> = ((0..m)).collect();
 
		let mut discard_count = 0;
 

	
 
		for si in 0..m {
 
			if eq_mapping[si] != si {
 
				discard_count += 1;
 
				continue;
 
			}
 
			discard_mapping[si] = si-discard_count;
 
			rules.extend(self.rules[si*n..(si+1)*n].iter().map(|&st| eq_mapping[st]));
 
		}
 

	
 
		rules = rules.into_iter().map(|st| discard_mapping[st]).collect();
 
		let end_states = self.end_states.iter().map(|st| discard_mapping[eq_mapping[*st]]).collect();
 

	
 
		return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()};
 
	}
 

	
 
	fn expand_alphabet(&self, alphabet_index: &HashMap<char, usize>) -> RegexpDFA {
 
		if *alphabet_index == self.alphabet_index {
 
			return self.clone();
 
		}
 

	
 
		let n1 = self.alphabet_index.len();
 
		let m = self.rules.len() / n1;
 

	
 
		let combined_alphabet: HashSet<char> = HashSet::from_iter(self.alphabet_index.keys().chain(alphabet_index.keys()).copied());
 
		let mut combined_vec = Vec::from_iter(combined_alphabet.into_iter());
 
		combined_vec.sort();
 
		let combined_index = HashMap::from_iter(combined_vec.iter().enumerate().map(|(i, c)| (*c, i)));
 
		let conversion_index: HashMap<usize, usize> = HashMap::from_iter(self.alphabet_index.iter().map(|(k, v)| (*v, combined_index[k])));
 
		let n2 = combined_vec.len();
 

	
 
		let mut rules = vec![];
 
		for i in 0..m {
 
			let mut row = vec![m;n2];
 
			for (j, st) in self.rules[i*n1..(i+1)*n1].iter().enumerate() {
 
				row[conversion_index[&j]] = *st;
 
			}
 
			rules.append(&mut row);
 
		}
 
		rules.append(&mut vec![m;n2]);
 

	
 
		return RegexpDFA{rules, end_states: self.end_states.clone(), alphabet_index: combined_index}.reduce().normalize();
 
	}
 

	
 
	fn build_product_automaton(&self, other: &RegexpDFA) -> RegexpDFA {
 
		let n = self.alphabet_index.len();
 
		let m = other.rules.len() / n;
 
		let k = self.rules.len() / n;
 

	
 
		let mut rules = vec![];
 
		let mut end_states = HashSet::new();
 

	
 
		for s1 in 0..k {
 
			let row1 = &self.rules[s1*n..(s1+1)*n];
 
			for s2 in 0..m {
 
				let row2 = &other.rules[s2*n..(s2+1)*n];
 
				rules.extend(row1.iter().zip(row2.iter()).map(|(x, y)| x*m + y));
 
				if (self.end_states.contains(&s1)) ^ (other.end_states.contains(&s2)) {
 
					end_states.insert(s1*m + s2);
 
				}
 
			}
 
		}
 

	
 
		return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()}.reduce().normalize();
 
	}
 
}
0 comments (0 inline, 0 general)