Files @ bac2e7f972b5
Branch filter:

Location: Regular-Expresso/src/regexp.rs - annotation

bac2e7f972b5 10.5 KiB application/rls-services+xml Show Source Show as Raw Download as Raw
Laman
added a license
c532c271f407
e93b264ec5cc
e93b264ec5cc
7e640b0cffa7
3cdbf505e6f8
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
7e640b0cffa7
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
7e640b0cffa7
7e640b0cffa7
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
e93b264ec5cc
c532c271f407
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
2810f86816cd
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
d2dfbcc8bb96
d2dfbcc8bb96
61a2b8f09823
e93b264ec5cc
d2dfbcc8bb96
c532c271f407
e93b264ec5cc
d2dfbcc8bb96
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
d2dfbcc8bb96
d2dfbcc8bb96
d2dfbcc8bb96
d2dfbcc8bb96
d2dfbcc8bb96
d2dfbcc8bb96
c532c271f407
d2dfbcc8bb96
e93b264ec5cc
d2dfbcc8bb96
61a2b8f09823
d2dfbcc8bb96
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
d2dfbcc8bb96
2810f86816cd
2810f86816cd
1790bcb433e3
1790bcb433e3
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
fd790596c353
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
1790bcb433e3
1790bcb433e3
1790bcb433e3
1790bcb433e3
1790bcb433e3
1790bcb433e3
1790bcb433e3
1790bcb433e3
1790bcb433e3
1790bcb433e3
1790bcb433e3
1790bcb433e3
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
0163ce5ddc96
0163ce5ddc96
f03565ba6137
f03565ba6137
0163ce5ddc96
0163ce5ddc96
4f7b6352013d
c532c271f407
c532c271f407
2810f86816cd
2810f86816cd
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
4f7b6352013d
4f7b6352013d
c532c271f407
c532c271f407
c532c271f407
2810f86816cd
c532c271f407
c532c271f407
c532c271f407
4f7b6352013d
4f7b6352013d
2810f86816cd
4f7b6352013d
4f7b6352013d
c532c271f407
4f7b6352013d
c532c271f407
4f7b6352013d
4f7b6352013d
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
fd790596c353
fd790596c353
fd790596c353
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f03565ba6137
c532c271f407
f03565ba6137
f03565ba6137
0163ce5ddc96
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
0163ce5ddc96
0163ce5ddc96
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
0163ce5ddc96
0163ce5ddc96
f03565ba6137
c532c271f407
c532c271f407
f03565ba6137
0163ce5ddc96
c532c271f407
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
f03565ba6137
0163ce5ddc96
0163ce5ddc96
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
2810f86816cd
c532c271f407
c532c271f407
2810f86816cd
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
f0f9655e62ee
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
fd790596c353
4eb3ab63f9dd
fd790596c353
fd790596c353
4eb3ab63f9dd
4eb3ab63f9dd
4eb3ab63f9dd
4eb3ab63f9dd
4eb3ab63f9dd
4eb3ab63f9dd
fd790596c353
4eb3ab63f9dd
fd790596c353
fd790596c353
fd790596c353
fd790596c353
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
f0f9655e62ee
e93b264ec5cc
use std::{collections::{HashMap, HashSet, VecDeque}, iter};

mod token;
pub use token::ParsingError;
use token::parse;

const START_NFA: usize = usize::MAX;
const START_DFA: usize = 0;

#[derive(Debug)]
pub struct Regexp {
	rules: HashMap<(usize, char), HashSet<usize>>,
	end_states: HashSet<usize>,
	alphabet: Vec<char>
}

impl Regexp {
	pub fn new(pattern: &String) -> Result<Regexp, ParsingError> {
		let r = parse(pattern, 0)?;
		let pattern_chars = Vec::from_iter(pattern.chars());
		let mut rules: HashMap<(usize, char), HashSet<usize>> = HashMap::new();
		let mut alphabet: HashSet<char> = HashSet::new();
		
		for i in r.list_first() {
			let c = pattern_chars[i];
			alphabet.insert(c);
			let key = (START_NFA, c);
			match rules.get_mut(&key) {
				Some(set) => {set.insert(i);},
				None => {rules.insert(key, HashSet::from([i]));}
			};
		}

		for (i, j) in r.list_neighbours() {
			let c = pattern_chars[j];
			alphabet.insert(c);
			let key = (i, c);
			match rules.get_mut(&key) {
				Some(set) => {set.insert(j);},
				None => {rules.insert(key, HashSet::from([j]));}
			};
		}

		let mut end_states = HashSet::from_iter(r.list_last().into_iter());
		if r.is_skippable() {
			end_states.insert(START_NFA);
		}

		let mut alphabet_vec = Vec::from_iter(alphabet.into_iter());
		alphabet_vec.sort();

		return Ok(Regexp{rules, end_states, alphabet: alphabet_vec});
	}

	pub fn eval(&self, s: String) -> bool {
		let mut multistate = HashSet::from([START_NFA]);

		for c in s.chars() {
			let mut new_multistate = HashSet::new();

			for state in multistate {
				if let Some(x) = self.rules.get(&(state, c)) {
					new_multistate = new_multistate.union(&x).map(|&y| y).collect();
				} else if let Some(x) = self.rules.get(&(state, '.')) {
					new_multistate = new_multistate.union(&x).map(|&y| y).collect();
				}
			}
			multistate = new_multistate;
		}

		return multistate.iter().any(|x| self.end_states.contains(x));
	}

	pub fn determinize(&self) -> RegexpDFA {
		const FAIL: usize = usize::MAX;
		let alphabet_index: HashMap<char, usize> = self.alphabet.iter().enumerate().map(|(i, c)| (*c, i)).collect();
		let n = alphabet_index.len();
		let mut compact_rules = vec![FAIL; n];
		let mut end_states: HashSet<usize> = HashSet::new();
		if self.end_states.contains(&START_NFA) {end_states.insert(START_DFA);}

		let mut index = HashMap::from([(vec![START_NFA], START_DFA)]);
		let mut stack = vec![vec![START_NFA]];

		while !stack.is_empty() {
			let multistate = stack.pop().unwrap();
			let mut new_rules: HashMap<char, HashSet<usize>> = HashMap::new();

			for key in self.rules.keys().filter(|(st, _c)| multistate.binary_search(st).is_ok()) {
				let (_st, c) = key;
				if !new_rules.contains_key(c) {
					new_rules.insert(*c, HashSet::new());
				}
				for target in &self.rules[key] {
					new_rules.get_mut(c).unwrap().insert(*target);
				}
			}

			for (c, target_set) in new_rules.into_iter() {
				let mut target_vec = Vec::from_iter(target_set.into_iter());
				target_vec.sort();
				let is_end = target_vec.iter().any(|st| self.end_states.contains(st));
				if !index.contains_key(&target_vec) {
					let target_new = index.len();
					index.insert(target_vec.clone(), target_new);
					compact_rules.extend(iter::repeat(FAIL).take(n));
					stack.push(target_vec.clone());
				}
				compact_rules[index[&multistate]*n + alphabet_index[&c]] = index[&target_vec];
				if is_end {
					end_states.insert(index[&target_vec]);
				}
			}
		}

		let fail = index.len();
		compact_rules = compact_rules.into_iter().map(|st| if st != FAIL {st} else {fail}).collect();
		compact_rules.extend(iter::repeat(fail).take(n));
		
		return RegexpDFA::new(compact_rules, end_states, alphabet_index);
	}
}

#[derive(Clone)]
pub struct RegexpDFA {
	rules: Vec<usize>,
	end_states: HashSet<usize>,
	alphabet_index: HashMap<char, usize>
}

impl RegexpDFA {
	pub fn new(rules: Vec<usize>, end_states: HashSet<usize>, alphabet_index: HashMap<char, usize>) -> RegexpDFA {
		if rules.len() > 0 {
			return RegexpDFA{rules, end_states, alphabet_index};
		} else {
			return RegexpDFA{
				rules: vec![1, 1],
				end_states,
				alphabet_index: HashMap::from([('\0', 0)])
			};
		}
	}

	pub fn eval(&self, s: String) -> bool {
		let n = self.alphabet_index.len();
		let mut state = START_DFA;

		for c in s.chars() {
			if let Some(ci) = self.alphabet_index.get(&c) {
				state = self.rules[state*n + ci];
			} else {
				return false;
			}
		}

		return self.end_states.contains(&state);
	}

	pub fn reduce(&self) -> RegexpDFA {
		let partition = self.find_equivalent_states();
		return self.collapse_states(partition);
	}

	pub fn normalize(&self) -> RegexpDFA {
		let n = self.alphabet_index.len();
		let m = self.rules.len()/n;
		let fail = m;
		let mut index: Vec<usize> = vec![fail;m];
		index[0] = 0;
		let mut queue = VecDeque::from([START_DFA]);

		let mut rules = vec![];
		let mut k = 1;

		while !queue.is_empty() {
			let si = queue.pop_front().unwrap();
			let row = &self.rules[si*n..(si+1)*n];
			for &sj in row {
				if sj != fail && index[sj] == fail {
					index[sj] = k;
					k += 1;
					queue.push_back(sj);
				}
			}
			rules.extend(row.iter().map(|&st| index[st]));
		}

		let end_states = self.end_states.iter().map(|st| index[*st]).collect();
		
		return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()};
	}

	pub fn find_distinguishing_string(&self, other: &RegexpDFA) -> Option<String> {
		if self.rules == other.rules && self.end_states == other.end_states {
			return None;
		}

		let r1 = self.expand_alphabet(&other.alphabet_index);
		let r2 = other.expand_alphabet(&self.alphabet_index);
		let product = r1.build_product_automaton(&r2);
		let n = product.alphabet_index.len();
		let reverse_alphabet_index: HashMap<usize, char> = HashMap::from_iter(product.alphabet_index.iter().map(|(&k, &v)| (v, k)));

		let mut queue = VecDeque::from([(0, "".to_string())]);
		let mut visited = HashSet::new();
		while !queue.is_empty() {
			let (state, acc) = queue.pop_front().unwrap();
			if product.end_states.contains(&state) {
				return Some(acc);
			}
			for (i, target) in product.rules[state*n..(state+1)*n].iter().enumerate() {
				if !visited.contains(target) {
					queue.push_back((*target, acc.clone()+&String::from(reverse_alphabet_index[&i])));
					visited.insert(target);
				}
			}
		}

		panic!();
	}

	fn find_equivalent_states(&self) -> Vec<HashSet<usize>> {
		let n = self.alphabet_index.len();
		let m = self.rules.len() / n;
		let mut inverse_rules = vec![HashSet::<usize>::new();m*n];

		for i in 0..m {
			for j in 0..n {
				let target = self.rules[i*n + j];
				inverse_rules[target*n + j].insert(i);
			}
		}

		let mut set_bag = vec![
			self.end_states.clone(),
			HashSet::from_iter(0..m).difference(&self.end_states).copied().collect()
		];
		let mut res = HashSet::<usize>::from([0, 1]);
		let mut work = HashSet::<usize>::from([0, 1]);

		while !work.is_empty() {
			let key = *work.iter().next().unwrap();
			work.remove(&key);
			let target_set = set_bag[key].clone();
			for j in 0..n {
				let source_set = HashSet::<usize>::from_iter(target_set.iter().flat_map(|&t| inverse_rules[t*n+j].iter()).copied());
				for k in res.clone().into_iter() {
					let part = &set_bag[k];
					let intersection = part.intersection(&source_set).copied().collect::<HashSet<usize>>();
					let diff = part.difference(&source_set).copied().collect::<HashSet<usize>>();
					if intersection.is_empty() || diff.is_empty() {
						continue;
					}
					res.remove(&k);
					let ki = set_bag.len();
					set_bag.push(intersection);
					res.insert(ki);
					let kd = set_bag.len();
					set_bag.push(diff);
					res.insert(kd);
					if work.contains(&k) {
						work.remove(&k);
						work.insert(ki);
						work.insert(kd);
					} else if set_bag[ki].len() < set_bag[kd].len() {
						work.insert(ki);
					} else {
						work.insert(kd);
					}
				}
			}
		}

		return Vec::from_iter(res.into_iter().map(|k| std::mem::take(&mut set_bag[k])));
	}

	fn collapse_states(&self, partition: Vec<HashSet<usize>>) -> RegexpDFA {
		let n = self.alphabet_index.len();
		let m = self.rules.len()/n;
		let mut rules = vec![];

		let mut eq_mapping: Vec<usize> = ((0..m)).collect();
		for eq_set in partition.into_iter() {
			let mut eq_vec = Vec::from_iter(eq_set.into_iter());
			eq_vec.sort();
			let label = eq_vec[0];
			for st in eq_vec.into_iter() {
				eq_mapping[st] = label;
			}
		}

		let mut discard_mapping: Vec<usize> = ((0..m)).collect();
		let mut discard_count = 0;

		for si in 0..m {
			if eq_mapping[si] != si {
				discard_count += 1;
				continue;
			}
			discard_mapping[si] = si-discard_count;
			rules.extend(self.rules[si*n..(si+1)*n].iter().map(|&st| eq_mapping[st]));
		}

		rules = rules.into_iter().map(|st| discard_mapping[st]).collect();
		let end_states = self.end_states.iter().map(|st| discard_mapping[eq_mapping[*st]]).collect();

		return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()};
	}

	fn expand_alphabet(&self, alphabet_index: &HashMap<char, usize>) -> RegexpDFA {
		if *alphabet_index == self.alphabet_index {
			return self.clone();
		}

		let n1 = self.alphabet_index.len();
		let m = self.rules.len() / n1;

		let combined_alphabet: HashSet<char> = HashSet::from_iter(self.alphabet_index.keys().chain(alphabet_index.keys()).copied());
		let mut combined_vec = Vec::from_iter(combined_alphabet.into_iter());
		combined_vec.sort();
		let combined_index = HashMap::from_iter(combined_vec.iter().enumerate().map(|(i, c)| (*c, i)));
		let conversion_index: HashMap<usize, usize> = HashMap::from_iter(self.alphabet_index.iter().map(|(k, v)| (combined_index[k], *v)));
		let n2 = combined_vec.len();

		let rules: Vec<usize> = (0..m*n2).map(
			|i| {
				let (j, k) = (i/n2, i%n2);
				return if conversion_index.contains_key(&k) {
					self.rules[j*n1 + conversion_index[&k]]
				} else {m};
			}
		).chain(std::iter::repeat(m).take(n2)).collect();

		return RegexpDFA{rules, end_states: self.end_states.clone(), alphabet_index: combined_index}.reduce().normalize();
	}

	fn build_product_automaton(&self, other: &RegexpDFA) -> RegexpDFA {
		let n = self.alphabet_index.len();
		let m = other.rules.len() / n;
		let k = self.rules.len() / n;

		let mut rules = vec![];
		let mut end_states = HashSet::new();

		for s1 in 0..k {
			let row1 = &self.rules[s1*n..(s1+1)*n];
			for s2 in 0..m {
				let row2 = &other.rules[s2*n..(s2+1)*n];
				rules.extend(row1.iter().zip(row2.iter()).map(|(x, y)| x*m + y));
				if (self.end_states.contains(&s1)) ^ (other.end_states.contains(&s2)) {
					end_states.insert(s1*m + s2);
				}
			}
		}

		return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()}.reduce().normalize();
	}
}