Files @ fd790596c353
Branch filter:

Location: Regular-Expresso/src/regexp.rs - annotation

fd790596c353 10.2 KiB application/rls-services+xml Show Source Show as Raw Download as Raw
Laman
implemented comparison of regexps with different alphabets
c532c271f407
e93b264ec5cc
e93b264ec5cc
7e640b0cffa7
3cdbf505e6f8
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
c532c271f407
61a2b8f09823
61a2b8f09823
61a2b8f09823
61a2b8f09823
e93b264ec5cc
e93b264ec5cc
7e640b0cffa7
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
7e640b0cffa7
7e640b0cffa7
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
e93b264ec5cc
c532c271f407
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
2810f86816cd
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
61a2b8f09823
e93b264ec5cc
61a2b8f09823
61a2b8f09823
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
61a2b8f09823
61a2b8f09823
61a2b8f09823
c532c271f407
61a2b8f09823
61a2b8f09823
c532c271f407
61a2b8f09823
e93b264ec5cc
c532c271f407
61a2b8f09823
61a2b8f09823
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
2810f86816cd
2810f86816cd
2810f86816cd
2810f86816cd
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
fd790596c353
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
2810f86816cd
2810f86816cd
2810f86816cd
2810f86816cd
c532c271f407
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
2810f86816cd
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
0163ce5ddc96
0163ce5ddc96
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
4f7b6352013d
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
2810f86816cd
2810f86816cd
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
4f7b6352013d
4f7b6352013d
c532c271f407
c532c271f407
c532c271f407
2810f86816cd
c532c271f407
c532c271f407
c532c271f407
4f7b6352013d
4f7b6352013d
2810f86816cd
4f7b6352013d
4f7b6352013d
c532c271f407
4f7b6352013d
c532c271f407
4f7b6352013d
4f7b6352013d
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
fd790596c353
fd790596c353
fd790596c353
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
c532c271f407
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
c532c271f407
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
c532c271f407
0163ce5ddc96
c532c271f407
0163ce5ddc96
0163ce5ddc96
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
2810f86816cd
c532c271f407
c532c271f407
2810f86816cd
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
f0f9655e62ee
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
e93b264ec5cc
use std::{collections::{HashMap, HashSet, VecDeque}, iter};

mod token;
pub use token::ParsingError;
use token::parse;

const START_NFA: usize = usize::MAX;
const START_DFA: usize = 0;

fn encode_set(set: &HashSet<usize>) -> String {
	let mut v = Vec::from_iter(set.iter());
	v.sort();
	let res: Vec<String> = v.into_iter().map(|x| x.to_string()).collect();
	return res.join(",");
}

#[derive(Debug)]
pub struct Regexp {
	rules: HashMap<(usize, char), HashSet<usize>>,
	end_states: HashSet<usize>,
	alphabet: Vec<char>
}

impl Regexp {
	pub fn new(pattern: &String) -> Result<Regexp, ParsingError> {
		let r = parse(pattern, 0)?;
		let pattern_chars = Vec::from_iter(pattern.chars());
		let mut rules: HashMap<(usize, char), HashSet<usize>> = HashMap::new();
		let mut alphabet: HashSet<char> = HashSet::new();
		
		for i in r.list_first() {
			let c = pattern_chars[i];
			alphabet.insert(c);
			let key = (START_NFA, c);
			match rules.get_mut(&key) {
				Some(set) => {set.insert(i);},
				None => {rules.insert(key, HashSet::from([i]));}
			};
		}

		for (i, j) in r.list_neighbours() {
			let c = pattern_chars[j];
			alphabet.insert(c);
			let key = (i, c);
			match rules.get_mut(&key) {
				Some(set) => {set.insert(j);},
				None => {rules.insert(key, HashSet::from([j]));}
			};
		}

		let mut end_states = HashSet::from_iter(r.list_last().into_iter());
		if r.is_skippable() {
			end_states.insert(START_NFA);
		}

		let mut alphabet_vec = Vec::from_iter(alphabet.into_iter());
		alphabet_vec.sort();

		return Ok(Regexp{rules, end_states, alphabet: alphabet_vec});
	}

	pub fn eval(&self, s: String) -> bool {
		let mut multistate = HashSet::from([START_NFA]);

		for c in s.chars() {
			let mut new_multistate = HashSet::new();

			for state in multistate {
				if let Some(x) = self.rules.get(&(state, c)) {
					new_multistate = new_multistate.union(&x).map(|&y| y).collect();
				} else if let Some(x) = self.rules.get(&(state, '.')) {
					new_multistate = new_multistate.union(&x).map(|&y| y).collect();
				}
			}
			multistate = new_multistate;
		}

		return multistate.iter().any(|x| self.end_states.contains(x));
	}

	pub fn determinize(&self) -> RegexpDFA {
		const FAIL: usize = usize::MAX;
		let alphabet_index: HashMap<char, usize> = self.alphabet.iter().enumerate().map(|(i, c)| (*c, i)).collect();
		let n = alphabet_index.len();
		let mut compact_rules = vec![FAIL; n];
		let mut end_states: HashSet<usize> = HashSet::new();
		if self.end_states.contains(&START_NFA) {end_states.insert(START_DFA);}

		// string hash -> single int DFA state
		let mut index_new = HashMap::from([(START_NFA.to_string(), START_DFA)]);
		// string hash -> HashSet NFA multistate
		let mut index_multi = HashMap::from([(START_NFA.to_string(), HashSet::from([START_NFA]))]);
		let mut stack = Vec::from([START_NFA.to_string()]);

		while !stack.is_empty() {
			let state_hash = stack.pop().unwrap();
			let multistate = &index_multi[&state_hash];
			let mut new_rules: HashMap<char, HashSet<usize>> = HashMap::new();

			for key in self.rules.keys().filter(|key| multistate.contains(&key.0)) {
				let (_st, c) = key;
				if !new_rules.contains_key(c) {
					new_rules.insert(*c, HashSet::new());
				}
				for target in &self.rules[key] {
					new_rules.get_mut(c).unwrap().insert(*target);
				}
			}

			for (c, target_set) in new_rules.into_iter() {
				let target_hash = encode_set(&target_set);
				let is_end = target_set.iter().any(|st| self.end_states.contains(st));
				if !index_new.contains_key(&target_hash) {
					let target_new = index_new.len();
					index_new.insert(target_hash.clone(), target_new);
					index_multi.insert(target_hash.clone(), target_set);
					compact_rules.extend(iter::repeat(FAIL).take(n));
					stack.push(target_hash.clone());
				}
				compact_rules[index_new[&state_hash]*n + alphabet_index[&c]] = index_new[&target_hash];
				if is_end {
					end_states.insert(index_new[&target_hash]);
				}
			}
		}

		let fail = index_new.len();
		compact_rules = compact_rules.into_iter().map(|st| if st != FAIL {st} else {fail}).collect();
		compact_rules.extend(iter::repeat(fail).take(n));

		return RegexpDFA{rules: compact_rules, end_states, alphabet_index};
	}
}

#[derive(Clone)]
pub struct RegexpDFA {
	rules: Vec<usize>,
	end_states: HashSet<usize>,
	alphabet_index: HashMap<char, usize>
}

impl RegexpDFA {
	pub fn eval(&self, s: String) -> bool {
		let n = self.alphabet_index.len();
		if n == 0 {
			return s.len() == 0;
		}
		let fail = self.rules.len() / n;
		let mut state = START_DFA;

		for c in s.chars() {
			if let Some(ci) = self.alphabet_index.get(&c) {
				state = self.rules[state*n + ci];
			} else {
				return false;
			}
			if state == fail {
				return false;
			}
		}

		return self.end_states.contains(&state);
	}

	pub fn reduce(&self) -> RegexpDFA {
		if self.alphabet_index.len() == 0 {
			return RegexpDFA{rules: self.rules.clone(), end_states: self.end_states.clone(), alphabet_index: self.alphabet_index.clone()};
		}
		let equivalents = self.find_equivalent_states();
		return self.collapse_states(equivalents);
	}

	pub fn normalize(&self) -> RegexpDFA {
		let n = self.alphabet_index.len();
		if n == 0 {
			return RegexpDFA{rules: self.rules.clone(), end_states: self.end_states.clone(), alphabet_index: self.alphabet_index.clone()}; 
		}
		let m = self.rules.len()/n;
		let fail = m;
		let mut index: Vec<usize> = vec![fail;m];
		index[0] = 0;
		let mut queue = VecDeque::from([START_DFA]);

		let mut rules = vec![];
		let mut k = 1;

		while !queue.is_empty() {
			let si = queue.pop_front().unwrap();
			let row = &self.rules[si*n..(si+1)*n];
			for &sj in row {
				if sj != fail && index[sj] == fail {
					index[sj] = k;
					k += 1;
					queue.push_back(sj);
				}
			}
			rules.extend(row.iter().map(|&st| index[st]));
		}

		let end_states = self.end_states.iter().map(|st| index[*st]).collect();
		
		return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()};
	}

	pub fn find_distinguishing_string(&self, other: &RegexpDFA) -> Option<String> {
		if self.rules == other.rules && self.end_states == other.end_states {
			return None;
		}

		let r1 = self.expand_alphabet(&other.alphabet_index);
		let r2 = other.expand_alphabet(&self.alphabet_index);
		let product = r1.build_product_automaton(&r2);
		let n = product.alphabet_index.len();
		let reverse_alphabet_index: HashMap<usize, char> = HashMap::from_iter(product.alphabet_index.iter().map(|(&k, &v)| (v, k)));

		let mut queue = VecDeque::from([(0, "".to_string())]);
		let mut visited = HashSet::new();
		while !queue.is_empty() {
			let (state, acc) = queue.pop_front().unwrap();
			if product.end_states.contains(&state) {
				return Some(acc);
			}
			for (i, target) in product.rules[state*n..(state+1)*n].iter().enumerate() {
				if !visited.contains(target) {
					queue.push_back((*target, acc.clone()+&String::from(reverse_alphabet_index[&i])));
					visited.insert(target);
				}
			}
		}

		panic!();
	}

	fn find_equivalent_states(&self) -> Vec<(usize, usize)> {
		let n = self.alphabet_index.len();
		let state_vec: Vec<usize> = (0..self.rules.len()/n).collect();
		let mut equivalents = HashSet::new();
		state_vec.iter().enumerate().for_each(|(i, s1)| {
			equivalents.extend(
				state_vec[i+1..].iter()
				.filter(|s2| !(self.end_states.contains(s1)^self.end_states.contains(s2)))
				.map(|s2| (*s1, *s2))
			);
		});

		let mut m = usize::MAX;
		while equivalents.len() < m {
			m = equivalents.len();
			equivalents = equivalents.iter().filter(|(s1, s2)| {
				!(0..n).any(|ci| {
					let t1 = self.rules[s1*n + ci];
					let t2 = self.rules[s2*n + ci];
					let key = (t1.min(t2), t2.max(t1));
					return t1 != t2 && !equivalents.contains(&key);
				})
			}).copied().collect();
		}

		return Vec::from_iter(equivalents.into_iter());
	}

	fn collapse_states(&self, equivalents: Vec<(usize, usize)>) -> RegexpDFA {
		let n = self.alphabet_index.len();
		let m = self.rules.len()/n;
		let mut rules = Vec::new();

		let mut eq_mapping: Vec<usize> = ((0..m)).collect();
		for (s1, s2) in equivalents.into_iter() {
			eq_mapping[s2] = eq_mapping[s2].min(s1);
		}

		let mut discard_mapping: Vec<usize> = ((0..m)).collect();
		let mut discard_count = 0;

		for si in 0..m {
			if eq_mapping[si] != si {
				discard_count += 1;
				continue;
			}
			discard_mapping[si] = si-discard_count;
			rules.extend(self.rules[si*n..(si+1)*n].iter().map(|&st| eq_mapping[st]));
		}

		rules = rules.into_iter().map(|st| discard_mapping[st]).collect();
		let end_states = self.end_states.iter().map(|st| discard_mapping[eq_mapping[*st]]).collect();

		return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()};
	}

	fn expand_alphabet(&self, alphabet_index: &HashMap<char, usize>) -> RegexpDFA {
		if *alphabet_index == self.alphabet_index {
			return self.clone();
		}

		let n1 = self.alphabet_index.len();
		let m = self.rules.len() / n1;

		let combined_alphabet: HashSet<char> = HashSet::from_iter(self.alphabet_index.keys().chain(alphabet_index.keys()).copied());
		let mut combined_vec = Vec::from_iter(combined_alphabet.into_iter());
		combined_vec.sort();
		let combined_index = HashMap::from_iter(combined_vec.iter().enumerate().map(|(i, c)| (*c, i)));
		let conversion_index: HashMap<usize, usize> = HashMap::from_iter(self.alphabet_index.iter().map(|(k, v)| (*v, combined_index[k])));
		let n2 = combined_vec.len();

		let mut rules = vec![];
		for i in 0..m {
			let mut row = vec![m;n2];
			for (j, st) in self.rules[i*n1..(i+1)*n1].iter().enumerate() {
				row[conversion_index[&j]] = *st;
			}
			rules.append(&mut row);
		}
		rules.append(&mut vec![m;n2]);

		return RegexpDFA{rules, end_states: self.end_states.clone(), alphabet_index: combined_index}.reduce().normalize();
	}

	fn build_product_automaton(&self, other: &RegexpDFA) -> RegexpDFA {
		let n = self.alphabet_index.len();
		let m = other.rules.len() / n;
		let k = self.rules.len() / n;

		let mut rules = vec![];
		let mut end_states = HashSet::new();

		for s1 in 0..k {
			let row1 = &self.rules[s1*n..(s1+1)*n];
			for s2 in 0..m {
				let row2 = &other.rules[s2*n..(s2+1)*n];
				rules.extend(row1.iter().zip(row2.iter()).map(|(x, y)| x*m + y));
				if (self.end_states.contains(&s1)) ^ (other.end_states.contains(&s2)) {
					end_states.insert(s1*m + s2);
				}
			}
		}

		return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()}.reduce().normalize();
	}
}