Files @ c532c271f407
Branch filter:

Location: Regular-Expresso/src/regexp.rs - annotation

c532c271f407 7.3 KiB application/rls-services+xml Show Source Show as Raw Download as Raw
Laman
refactoring: rules as a vector instead of a hashmap
c532c271f407
e93b264ec5cc
e93b264ec5cc
7e640b0cffa7
3cdbf505e6f8
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
c532c271f407
61a2b8f09823
61a2b8f09823
61a2b8f09823
61a2b8f09823
e93b264ec5cc
e93b264ec5cc
7e640b0cffa7
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
7e640b0cffa7
7e640b0cffa7
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
e93b264ec5cc
c532c271f407
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
61a2b8f09823
e93b264ec5cc
61a2b8f09823
61a2b8f09823
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
61a2b8f09823
61a2b8f09823
61a2b8f09823
c532c271f407
61a2b8f09823
61a2b8f09823
c532c271f407
61a2b8f09823
e93b264ec5cc
c532c271f407
61a2b8f09823
61a2b8f09823
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
c532c271f407
c532c271f407
c532c271f407
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
e93b264ec5cc
0163ce5ddc96
0163ce5ddc96
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
4f7b6352013d
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
4f7b6352013d
4f7b6352013d
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
4f7b6352013d
4f7b6352013d
c532c271f407
4f7b6352013d
4f7b6352013d
c532c271f407
4f7b6352013d
c532c271f407
4f7b6352013d
4f7b6352013d
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
c532c271f407
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
0163ce5ddc96
c532c271f407
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
c532c271f407
0163ce5ddc96
c532c271f407
0163ce5ddc96
0163ce5ddc96
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
c532c271f407
0163ce5ddc96
e93b264ec5cc
use std::{collections::{HashMap, HashSet, VecDeque}, iter};

mod token;
pub use token::ParsingError;
use token::parse;

const START_NFA: usize = usize::MAX;
const START_DFA: usize = 0;
const FAIL: usize = usize::MAX>>1;

fn encode_set(set: &HashSet<usize>) -> String {
	let mut v = Vec::from_iter(set.iter());
	v.sort();
	let res: Vec<String> = v.into_iter().map(|x| x.to_string()).collect();
	return res.join(",");
}

#[derive(Debug)]
pub struct Regexp {
	rules: HashMap<(usize, char), HashSet<usize>>,
	end_states: HashSet<usize>,
	alphabet: Vec<char>
}

impl Regexp {
	pub fn new(pattern: &String) -> Result<Regexp, ParsingError> {
		let r = parse(pattern, 0)?;
		let pattern_chars = Vec::from_iter(pattern.chars());
		let mut rules: HashMap<(usize, char), HashSet<usize>> = HashMap::new();
		let mut alphabet: HashSet<char> = HashSet::new();
		
		for i in r.list_first() {
			let c = pattern_chars[i];
			alphabet.insert(c);
			let key = (START_NFA, c);
			match rules.get_mut(&key) {
				Some(set) => {set.insert(i);},
				None => {rules.insert(key, HashSet::from([i]));}
			};
		}

		for (i, j) in r.list_neighbours() {
			let c = pattern_chars[j];
			alphabet.insert(c);
			let key = (i, c);
			match rules.get_mut(&key) {
				Some(set) => {set.insert(j);},
				None => {rules.insert(key, HashSet::from([j]));}
			};
		}

		let mut end_states = HashSet::from_iter(r.list_last().into_iter());
		if r.is_skippable() {
			end_states.insert(START_NFA);
		}

		let mut alphabet_vec = Vec::from_iter(alphabet.into_iter());
		alphabet_vec.sort();

		return Ok(Regexp{rules, end_states, alphabet: alphabet_vec});
	}

	pub fn eval(&self, s: String) -> bool {
		let mut multistate = HashSet::from([START_NFA]);

		for c in s.chars() {
			let mut new_multistate = HashSet::new();

			for state in multistate {
				if let Some(x) = self.rules.get(&(state, c)) {
					new_multistate = new_multistate.union(&x).map(|&y| y).collect();
				} else if let Some(x) = self.rules.get(&(state, '.')) {
					new_multistate = new_multistate.union(&x).map(|&y| y).collect();
				}
			}
			multistate = new_multistate;
		}

		return multistate.iter().any(|x| self.end_states.contains(x));
	}

	pub fn determinize(&self) -> RegexpDFA {
		let alphabet_index: HashMap<char, usize> = self.alphabet.iter().enumerate().map(|(i, c)| (*c, i)).collect();
		let n = alphabet_index.len();
		let mut compact_rules = vec![FAIL; n];
		let mut end_states: HashSet<usize> = HashSet::new();
		if self.end_states.contains(&START_NFA) {end_states.insert(START_DFA);}

		// string hash -> single int DFA state
		let mut index_new = HashMap::from([(START_NFA.to_string(), START_DFA)]);
		// string hash -> HashSet NFA multistate
		let mut index_multi = HashMap::from([(START_NFA.to_string(), HashSet::from([START_NFA]))]);
		let mut stack = Vec::from([START_NFA.to_string()]);

		while !stack.is_empty() {
			let state_hash = stack.pop().unwrap();
			let multistate = &index_multi[&state_hash];
			let mut new_rules: HashMap<char, HashSet<usize>> = HashMap::new();

			for key in self.rules.keys().filter(|key| multistate.contains(&key.0)) {
				let (_st, c) = key;
				if !new_rules.contains_key(c) {
					new_rules.insert(*c, HashSet::new());
				}
				for target in &self.rules[key] {
					new_rules.get_mut(c).unwrap().insert(*target);
				}
			}

			for (c, target_set) in new_rules.into_iter() {
				let target_hash = encode_set(&target_set);
				let is_end = target_set.iter().any(|st| self.end_states.contains(st));
				if !index_new.contains_key(&target_hash) {
					let target_new = index_new.len();
					index_new.insert(target_hash.clone(), target_new);
					index_multi.insert(target_hash.clone(), target_set);
					compact_rules.extend(iter::repeat(FAIL).take(n));
					stack.push(target_hash.clone());
				}
				compact_rules[index_new[&state_hash]*n + alphabet_index[&c]] = index_new[&target_hash];
				if is_end {
					end_states.insert(index_new[&target_hash]);
				}
			}
		}

		return RegexpDFA{rules: compact_rules, end_states, alphabet_index};
	}
}

pub struct RegexpDFA {
	rules: Vec<usize>,
	end_states: HashSet<usize>,
	alphabet_index: HashMap<char, usize>
}

impl RegexpDFA {
	pub fn eval(&self, s: String) -> bool {
		let n = self.alphabet_index.len();
		let mut state = START_DFA;

		for c in s.chars() {
			if let Some(ci) = self.alphabet_index.get(&c) {
				state = self.rules[state*n + ci];
			} else {
				return false;
			}
			if state == FAIL {
				return false;
			}
		}

		return self.end_states.contains(&state);
	}

	pub fn reduce(&self) -> RegexpDFA {
		if self.alphabet_index.len() == 0 {
			return RegexpDFA{rules: self.rules.clone(), end_states: self.end_states.clone(), alphabet_index: self.alphabet_index.clone()};
		}
		let equivalents = self.find_equivalent_states();
		return self.collapse_states(equivalents);
	}

	pub fn normalize(&self) -> RegexpDFA {
		let n = self.alphabet_index.len();
		if n == 0 {
			return RegexpDFA{rules: self.rules.clone(), end_states: self.end_states.clone(), alphabet_index: self.alphabet_index.clone()}; 
		}
		let m = self.rules.len()/n;
		let mut index: Vec<usize> = vec![FAIL;m];
		index[0] = 0;
		let mut queue = VecDeque::from([START_DFA]);

		let mut rules = vec![];
		let mut k = 1;

		while !queue.is_empty() {
			let si = queue.pop_front().unwrap();
			let row = &self.rules[si*n..(si+1)*n];
			for &sj in row {
				if sj != FAIL && index[sj] == FAIL {
					index[sj] = k;
					k += 1;
					queue.push_back(sj);
				}
			}
			rules.extend(row.iter().map(|&st| if st != FAIL {index[st]} else {FAIL}));
		}

		let end_states = self.end_states.iter().map(|st| index[*st]).collect();
		
		return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()};
	}

	fn find_equivalent_states(&self) -> Vec<(usize, usize)> {
		let n = self.alphabet_index.len();
		let state_vec: Vec<usize> = (0..self.rules.len()/n).collect();
		let mut equivalents = HashSet::new();
		state_vec.iter().enumerate().for_each(|(i, s1)| {
			equivalents.extend(
				state_vec[i+1..].iter()
				.filter(|s2| !(self.end_states.contains(s1)^self.end_states.contains(s2)))
				.map(|s2| (*s1, *s2))
			);
		});

		let mut m = usize::MAX;
		while equivalents.len() < m {
			m = equivalents.len();
			equivalents = equivalents.iter().filter(|(s1, s2)| {
				!(0..n).any(|ci| {
					let t1 = self.rules[s1*n + ci];
					let t2 = self.rules[s2*n + ci];
					let key = (t1.min(t2), t2.max(t1));
					return t1 != t2 && !equivalents.contains(&key);
				})
			}).copied().collect();
		}

		return Vec::from_iter(equivalents.into_iter());
	}

	fn collapse_states(&self, equivalents: Vec<(usize, usize)>) -> RegexpDFA {
		let n = self.alphabet_index.len();
		let m = self.rules.len()/n;
		let mut rules = Vec::new();

		let mut eq_mapping: Vec<usize> = ((0..m)).collect();
		for (s1, s2) in equivalents.into_iter() {
			eq_mapping[s2] = eq_mapping[s2].min(s1);
		}

		let mut discard_mapping: Vec<usize> = ((0..m)).collect();
		let mut discard_count = 0;

		for si in 0..m {
			if eq_mapping[si] != si {
				discard_count += 1;
				continue;
			}
			discard_mapping[si] = si-discard_count;
			rules.extend(self.rules[si*n..(si+1)*n].iter().map(|&st| if st!=FAIL {eq_mapping[st]} else {FAIL}));
		}

		rules = rules.into_iter().map(|st| if st!=FAIL {discard_mapping[st]} else {FAIL}).collect();
		let end_states = self.end_states.iter().map(|st| discard_mapping[eq_mapping[*st]]).collect();

		return RegexpDFA{rules, end_states, alphabet_index: self.alphabet_index.clone()};
	}
}