Changeset - 61a2b8f09823
[Not reviewed]
default
0 3 0
Laman - 10 months ago 2024-06-24 13:47:20

refactoring: states represented by basic integers
3 files changed with 40 insertions and 51 deletions:
0 comments (0 inline, 0 general)
regexp.py
Show inline comments
 
@@ -153,220 +153,221 @@ class Chain(Token):
 

	
 
def find_closing_parenthesis(pattern, k):
 
	counter = 0
 

	
 
	for (i, c) in enumerate(pattern[k:]):
 
		if c == "(":
 
			counter += 1
 
		elif c == ")":
 
			counter -= 1
 
		if counter == 0:
 
			return k+i
 

	
 
	raise ParsingError(f'A closing parenthesis not found. Pattern: "{pattern}", position: {k}')
 

	
 

	
 
def parse(pattern, offset=0):
 
	res = []
 
	is_alternative = False
 

	
 
	i = 0
 
	while i < len(pattern):
 
		c = pattern[i]
 
		if c == "(":
 
			j = find_closing_parenthesis(pattern, i)
 
			inner_content = parse(pattern[i+1:j], offset+i+1)
 
			res.append(inner_content)
 
			i = j+1
 
		elif c == "*":
 
			try:
 
				token = res.pop()
 
			except IndexError as e:
 
				raise ParsingError(f'The asterisk operator is missing an argument. Pattern: "{pattern}", position {i}')
 
			res.append(Asterisk(token))
 
			i += 1
 
		elif c == ")":
 
			raise ParsingError(f'An opening parenthesis not found. Pattern: "{pattern}", position: {i}')
 
		elif c == "|" or c == "+":
 
			is_alternative = True
 
			res.append(AlternativeSeparator())
 
			i += 1
 
		elif c == "_":
 
			res.append(Lambda())
 
			i += 1
 
		else:
 
			res.append(Symbol(i+offset, c))
 
			i += 1
 

	
 
	if is_alternative:
 
		return Alternative(res)
 
	else:
 
		return Chain(res)
 

	
 

	
 
class Regexp:
 
	def __init__(self, pattern):
 
		(self.rules, self.end_states) = self._parse(pattern)
 

	
 
	def _parse(self, s):
 
		r = parse(s)
 
		rules = dict()
 

	
 
		for i in r.list_first():
 
			c = s[i]
 
			key = (-1, c)
 
			if key not in rules:
 
				rules[key] = set()
 
			rules[key].add(i)
 

	
 
		for (i, j) in r.list_neighbours():
 
			c = s[j]
 
			key = (i, c)
 
			if key not in rules:
 
				rules[key] = set()
 
			rules[key].add(j)
 

	
 
		end_states = set(r.list_last())
 
		if r.is_skippable:
 
			end_states.add(-1)
 

	
 
		return rules, end_states
 

	
 
	def match(self, s):
 
		current = {-1}
 

	
 
		for c in s:
 
			new_state = set()
 
			for st in current:
 
				key = (st, c)
 
				if key in self.rules:
 
					new_state.update(self.rules[key])
 
			current = new_state
 

	
 
		return any(st in self.end_states for st in current)
 

	
 
	def determinize(self):
 
		rules = dict()
 
		end_states = {(-1,)} if -1 in self.end_states else set()
 
		end_states = {-1} if -1 in self.end_states else set()
 

	
 
		index = {(-1,): -1}
 
		stack = [(-1,)]
 
		processed_states = set()
 
		while stack:
 
			multistate = stack.pop()
 
			new_rules = dict()
 
			
 
			for ((st, c), target) in filter(lambda item: item[0][0] in multistate, self.rules.items()):
 
				if c not in new_rules:
 
					new_rules[c] = set()
 
				new_rules[c].update(target)
 
			
 
			for (c, target_set) in new_rules.items():
 
				new_target = tuple(sorted(target_set))
 
				rules[(multistate, c)] = new_target
 
				if any(st in self.end_states for st in new_target):
 
					end_states.add(new_target)
 
				if new_target not in processed_states:
 
					stack.append(new_target)
 
					processed_states.add(new_target)
 
				target_tup = tuple(sorted(target_set))
 
				if target_tup not in index:
 
					new_target = len(index)-1
 
					index[target_tup] = new_target
 
					stack.append(target_tup)
 
				rules[(index[multistate], c)] = index[target_tup]
 
				if any(st in self.end_states for st in target_set):
 
					end_states.add(index[target_tup])
 
		
 
		return (rules, end_states)
 

	
 

	
 
class RegexpDFA:
 
	def __init__(self, rules, end_states):
 
		self.rules = rules
 
		self.end_states = end_states
 

	
 
	@classmethod
 
	def create(cls, pattern):
 
		r = Regexp(pattern)
 
		(rules, end_states) = r.determinize()
 

	
 
		return cls(rules, end_states)
 

	
 
	def match(self, s):
 
		st = 0
 
		st = -1
 

	
 
		for c in s:
 
			key = (st, c)
 
			if key in self.rules:
 
				st = self.rules[key]
 
			else:
 
				return False
 

	
 
		return st in self.end_states
 

	
 
	def reduce(self):
 
		equivalents = self._find_equivalent_states()
 
		(rules, end_states) = self._collapse_states(equivalents)
 

	
 
		return RegexpDFA(rules, end_states)
 

	
 
	def normalize(self):
 
		index = {(-1,): 0}
 
		queue = deque([(-1,)])
 
		index = {-1: -1}
 
		queue = deque([-1])
 

	
 
		while queue:
 
			state = queue.popleft()
 
			edges = [((st, c), t) for ((st, c), t) in self.rules.items() if st == state]
 
			edges.sort()
 
			for ((st, c), t) in edges:
 
				if t not in index:
 
					index[t] = len(index)
 
					index[t] = len(index)-1
 
					queue.append(t)
 
		
 
		rules = dict(((index[st], c), index[t]) for ((st, c), t) in self.rules.items())
 
		end_states = {index[st] for st in self.end_states}
 

	
 
		return RegexpDFA(rules, end_states)
 

	
 
	def _find_equivalent_states(self):
 
		state_set = [(-2,), (-1,)] + sorted(set(self.rules.values()))
 
		state_set = [-2, -1] + sorted(set(self.rules.values()))
 
		alphabet = {c for (st, c) in self.rules.keys()}
 
		equivalents = {(s1, s2) for (i, s1) in enumerate(state_set) for s2 in state_set[i+1:]}
 

	
 
		for (s1, s2) in equivalents.copy():
 
			if (s1 in self.end_states and s2 not in self.end_states) or (s1 not in self.end_states and s2 in self.end_states):
 
				equivalents.remove((s1, s2))
 
		
 
		ctrl = True
 
		while ctrl:
 
			ctrl = False
 
			for (s1, s2) in equivalents.copy():
 
				for c in alphabet:
 
					t1 = self.rules.get((s1, c), (-2,))
 
					t2 = self.rules.get((s2, c), (-2,))
 
					t1 = self.rules.get((s1, c), -2)
 
					t2 = self.rules.get((s2, c), -2)
 
					key = (min(t1, t2), max(t1, t2))
 
					if t1 != t2 and key not in equivalents:
 
						equivalents.remove((s1, s2))
 
						ctrl = True
 
						break
 
		
 
		return equivalents
 
	
 
	def _collapse_states(self, equivalents):
 
		rules = self.rules.items()
 
		end_states = self.end_states.copy()
 

	
 
		for (s1, s2) in sorted(equivalents):
 
			rules = map(
 
				lambda item: (item[0], s1 if item[1] == s2 else item[1]),
 
				filter(lambda item: item[0][0] != s2, rules)
 
			)
 
			end_states.discard(s2)
 
		
 
		return (dict(rules), end_states)
 

	
 

	
 
if __name__ == "__main__":
 
	tests = ["", "a", "ab", "aabb", "abab", "abcd", "abcbcdbcd"]
 
	for pattern in ["a(b|c)", "a*b*", "(ab)*", "a((bc)*d)*", "(a|b)*a(a|b)(a|b)(a|b)"]:
 
	for pattern in ["a(b|c)", "a*b*", "(ab)*", "a((bc)*d)*", "(a|b)*a(a|b)(a|b)(a|b)", "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"]:
 
		print("#", pattern)
 
		try:
 
			r = RegexpDFA.create(pattern).reduce().normalize()
 
		except ParsingError as e:
 
			print("Failed to parse the regexp:")
 
			print(e)
 
			continue
 
		for t in tests:
 
			print(t, r.match(t))
 
		print()
src/main.rs
Show inline comments
 
use regexp::Regexp;
 

	
 
fn main() {
 
	let tests = ["", "a", "ab", "aabb", "abab", "abcd", "abcbcdbcd"];
 
	for pattern in ["a(b|c)", "a*b*", "(ab)*", "a((bc)*d)*", "(a|b)*a(a|b)(a|b)(a|b)"] {
 
	for pattern in ["a(b|c)", "a*b*", "(ab)*", "a((bc)*d)*", "(a|b)*a(a|b)(a|b)(a|b)", "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"] {
 
		println!("# {pattern}");
 
		let r = match Regexp::new(&pattern.to_string()) {
 
			Ok(r1) => r1.determinize().reduce().normalize(),
 
			Err(e) => {
 
				println!("{e}");
 
				continue;
 
			}
 
		};
 
		for &t in tests.iter() {
 
			println!("{t} {}", r.eval(t.to_string()));
 
		}
 
		println!();
 
	}
 
}
src/regexp.rs
Show inline comments
 
use std::collections::{HashMap, HashSet, VecDeque};
 

	
 
mod token;
 
pub use token::ParsingError;
 
use token::parse;
 

	
 
const START: i32 = -1;
 
const FAIL: i32 = -2;
 

	
 
fn encode_set(set: &HashSet<i32>) -> i32 {
 
	let mut res = 0;
 
	for x in set.iter() {
 
		assert!(*x >= 0);
 
		res ^= 1<<x;
 
	}
 
	return res;
 
}
 

	
 
fn decode_set(x: i32) -> HashSet<i32> {
 
	if x == START {return HashSet::from([START]);}
 

	
 
	let mut x = x;
 
	let mut res: HashSet<i32> = HashSet::new();
 
	
 
	while x > 0 {
 
		let y = x.trailing_zeros();
 
		res.insert(y as i32);
 
		x ^= 1 << y;
 
	}
 

	
 
	return res;
 
fn encode_set(set: &HashSet<i32>) -> String {
 
	let mut v = Vec::from_iter(set.iter());
 
	v.sort();
 
	let res: Vec<String> = v.into_iter().map(|x| x.to_string()).collect();
 
	return res.join(",");
 
}
 

	
 
#[derive(Debug)]
 
pub struct Regexp {
 
	rules: HashMap<(i32, char), HashSet<i32>>,
 
	end_states: HashSet<i32>
 
}
 

	
 
impl Regexp {
 
	pub fn new(pattern: &String) -> Result<Regexp, ParsingError> {
 
		let r = parse(pattern, 0)?;
 
		let pattern_chars = Vec::from_iter(pattern.chars());
 
		let mut rules: HashMap<(i32, char), HashSet<i32>> = HashMap::new();
 
		
 
		for i in r.list_first() {
 
			let c = pattern_chars[i];
 
			let key = (START, c);
 
			match rules.get_mut(&key) {
 
				Some(set) => {set.insert(i as i32);},
 
				None => {rules.insert(key, HashSet::from([i as i32]));}
 
			};
 
		}
 

	
 
		for (i, j) in r.list_neighbours() {
 
			let c = pattern_chars[j];
 
			let key = (i as i32, c);
 
			match rules.get_mut(&key) {
 
				Some(set) => {set.insert(j as i32);},
 
				None => {rules.insert(key, HashSet::from([j as i32]));}
 
			};
 
		}
 

	
 
		let mut end_states = HashSet::from_iter(r.list_last().into_iter().map(|i| i as i32));
 
		if r.is_skippable() {
 
			end_states.insert(START);
 
		}
 

	
 
		return Ok(Regexp{rules, end_states});
 
	}
 

	
 
	pub fn eval(&self, s: String) -> bool {
 
		let mut multistate = HashSet::from([START]);
 

	
 
		for c in s.chars() {
 
			let mut new_multistate = HashSet::new();
 

	
 
			for state in multistate {
 
				if let Some(x) = self.rules.get(&(state, c)) {
 
					new_multistate = new_multistate.union(&x).map(|&y| y).collect();
 
				} else if let Some(x) = self.rules.get(&(state, '.')) {
 
					new_multistate = new_multistate.union(&x).map(|&y| y).collect();
 
				}
 
			}
 
			multistate = new_multistate;
 
		}
 

	
 
		return multistate.iter().any(|x| self.end_states.contains(x));
 
	}
 

	
 
	pub fn determinize(&self) -> RegexpDFA {
 
		let mut rules: HashMap<(i32, char), i32> = HashMap::new();
 
		let mut end_states: HashSet<i32> = HashSet::new();
 
		if self.end_states.contains(&START) {end_states.insert(START);}
 

	
 
		let mut stack = Vec::from([START]);
 
		let mut processed_states = HashSet::new();
 
		let mut index_new = HashMap::from([(START.to_string(), START)]);
 
		let mut index_multi = HashMap::from([(START.to_string(), HashSet::from([START]))]);
 
		let mut stack = Vec::from([START.to_string()]);
 

	
 
		while !stack.is_empty() {
 
			let state = stack.pop().unwrap();
 
			let multistate = decode_set(state);
 
			let state_hash = stack.pop().unwrap();
 
			let multistate = &index_multi[&state_hash];
 
			let mut new_rules: HashMap<char, HashSet<i32>> = HashMap::new();
 

	
 
			for key in self.rules.keys().filter(|key| multistate.contains(&key.0)) {
 
				let (_st, c) = key;
 
				if !new_rules.contains_key(c) {
 
					new_rules.insert(*c, HashSet::new());
 
				}
 
				for target in &self.rules[key] {
 
					new_rules.get_mut(c).unwrap().insert(*target);
 
				}
 
			}
 

	
 
			for (c, target_set) in new_rules.into_iter() {
 
				let encoded_target = encode_set(&target_set);
 
				rules.insert((state, c), encoded_target);
 
				if target_set.iter().any(|st| self.end_states.contains(st)) {
 
					end_states.insert(encoded_target);
 
				let target_hash = encode_set(&target_set);
 
				let is_end = target_set.iter().any(|st| self.end_states.contains(st));
 
				if !index_new.contains_key(&target_hash) {
 
					let target_new = index_new.len() as i32;
 
					index_new.insert(target_hash.clone(), target_new);
 
					index_multi.insert(target_hash.clone(), target_set);
 
					stack.push(target_hash.clone());
 
				}
 
				if !processed_states.contains(&encoded_target) {
 
					stack.push(encoded_target);
 
					processed_states.insert(encoded_target);
 
				rules.insert((index_new[&state_hash], c), index_new[&target_hash]);
 
				if is_end {
 
					end_states.insert(index_new[&target_hash]);
 
				}
 
			}
 
		}
 

	
 
		return RegexpDFA{rules, end_states};
 
	}
 
}
 

	
 
pub struct RegexpDFA {
 
	rules: HashMap<(i32, char), i32>,
 
	end_states: HashSet<i32>
 
}
 

	
 
impl RegexpDFA {
 
	pub fn eval(&self, s: String) -> bool {
 
		let mut state = START;
 

	
 
		for c in s.chars() {
 
			if let Some(x) = self.rules.get(&(state, c)) {
 
				state = *x;
 
			} else {
 
				return false;
 
			}
 
		}
 

	
 
		return self.end_states.contains(&state);
 
	}
 

	
 
	pub fn reduce(&self) -> RegexpDFA {
 
		let equivalents = self.find_equivalent_states();
 
		return self.collapse_states(equivalents);
 
	}
 

	
 
	pub fn normalize(&self) -> RegexpDFA {
 
		let mut index = HashMap::from([(START, START)]);
 
		let mut queue = VecDeque::from([START]);
 

	
 
		while !queue.is_empty() {
 
			let state = queue.pop_front().unwrap();
 
			let mut edges: Vec<((i32, char), i32)> = self.rules.iter()
 
				.filter(|((st, c), t)| *st == state)
 
				.map(|((st, c), t)| ((*st, *c), *t)).collect();
 
			edges.sort();
 
			for ((_st, _c), t) in edges {
 
				if !index.contains_key(&t) {
 
					index.insert(t, index.len() as i32);
 
					queue.push_back(t);
 
				}
 
			}
 
		}
 

	
 
		let rules = self.rules.iter().map(|((st, c), t)| ((index[st], *c), index[t])).collect();
 
		let end_states = self.end_states.iter().map(|st| index[st]).collect();
 
		
 
		return RegexpDFA{rules, end_states};
 
	}
 

	
 
	fn find_equivalent_states(&self) -> Vec<(i32, i32)> {
 
		let state_set: HashSet<i32> = HashSet::from_iter(self.rules.values().copied());
 
		let mut state_vec: Vec<i32> = Vec::from_iter(state_set.into_iter());
 
		state_vec.push(START);
 
		state_vec.push(FAIL);
 
		state_vec.sort();
 
		let alphabet: HashSet<char> = self.rules.keys().map(|(_st, c)| c).copied().collect();
 

	
 
		let mut equivalents = HashSet::new();
 
		state_vec.iter().enumerate().for_each(|(i, s1)| {
 
			equivalents.extend(
 
				state_vec[i+1..].iter()
 
				.filter(|s2| !(self.end_states.contains(s1)^self.end_states.contains(s2)))
 
				.map(|s2| (*s1, *s2))
 
			);
 
		});
 

	
 
		let mut n = usize::MAX;
 
		while equivalents.len() < n {
 
			n = equivalents.len();
 
			equivalents = equivalents.iter().filter(|(s1, s2)| {
 
				!alphabet.iter().any(|c| {
 
					let t1 = self.rules.get(&(*s1, *c)).unwrap_or(&FAIL);
 
					let t2 = self.rules.get(&(*s2, *c)).unwrap_or(&FAIL);
 
					let key = (*t1.min(t2), *t1.max(t2));
 
					return t1 != t2 && !equivalents.contains(&key);
 
				})
 
			}).copied().collect();
 
		}
 

	
 
		return Vec::from_iter(equivalents.into_iter());
 
	}
 

	
 
	fn collapse_states(&self, mut equivalents: Vec<(i32, i32)>) -> RegexpDFA {
 
		let mut rules = self.rules.clone();
 
		let mut end_states = self.end_states.clone();
 
		equivalents.sort();
 

	
 
		for (s1, s2) in equivalents.into_iter() {
0 comments (0 inline, 0 general)