├── cli_demo ├── debug │ ├── stdin.txt │ └── inputs.txt ├── Cargo.toml └── src │ └── main.rs ├── .cargo └── config ├── hanzi_lookup.gif ├── Cargo.toml ├── hanzi_lookup ├── data │ └── mmah.bin ├── Cargo.toml └── src │ ├── entities.rs │ ├── lib.rs │ ├── match_collector.rs │ ├── cubic_curve_2d.rs │ ├── analyzed_character.rs │ └── matcher.rs ├── web_demo ├── hanzi_lookup_bg.wasm ├── worker.js ├── index.html ├── hanzi_lookup.js ├── drawingBoard.js └── jquery-3.4.1.min.js ├── mmah_json_convert ├── data │ └── mmah.bin ├── Cargo.toml └── src │ └── main.rs ├── .gitignore ├── .travis.yml ├── .vscode └── launch.json ├── README.md ├── LICENSE-APL └── LICENSE /cli_demo/debug/stdin.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.cargo/config: -------------------------------------------------------------------------------- 1 | [build] 2 | target-dir = ".target" 3 | -------------------------------------------------------------------------------- /hanzi_lookup.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gugray/hanzi_lookup/HEAD/hanzi_lookup.gif -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["cli_demo", "hanzi_lookup"] 3 | exclude = ["mmah_json_convert"] 4 | -------------------------------------------------------------------------------- /hanzi_lookup/data/mmah.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gugray/hanzi_lookup/HEAD/hanzi_lookup/data/mmah.bin -------------------------------------------------------------------------------- /web_demo/hanzi_lookup_bg.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gugray/hanzi_lookup/HEAD/web_demo/hanzi_lookup_bg.wasm -------------------------------------------------------------------------------- /mmah_json_convert/data/mmah.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gugray/hanzi_lookup/HEAD/mmah_json_convert/data/mmah.bin -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | .target/ 3 | dist/ 4 | **/debug/stderr.txt 5 | **/debug/stdout.txt 6 | Cargo.lock 7 | **/*.rs.bk 8 | -------------------------------------------------------------------------------- /cli_demo/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "cli_demo" 3 | version = "0.1.0" 4 | authors = ["gugray "] 5 | edition = "2018" 6 | 7 | [dependencies] 8 | hanzi_lookup = { path = "../hanzi_lookup" } 9 | serde = { version = "1.0", features = ["derive"] } 10 | serde_derive = "1.0.90" 11 | serde_json = "1.0" 12 | -------------------------------------------------------------------------------- /mmah_json_convert/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "mmah_json_convert" 3 | version = "0.1.0" 4 | authors = ["gugray "] 5 | edition = "2018" 6 | 7 | [dependencies] 8 | serde = { version = "1.0", features = ["derive"] } 9 | serde_json = "1.0" 10 | base64 = "0.10.1" 11 | bincode = "1.1.3" 12 | serde_derive = "1.0.90" 13 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: rust 2 | 3 | rust: 4 | - nightly 5 | 6 | matrix: 7 | include: 8 | - language: rust 9 | rust: nightly 10 | name: "check wasm32 support" 11 | install: rustup target add wasm32-unknown-unknown 12 | 13 | cache: cargo 14 | 15 | script: 16 | - cd hanzi_lookup 17 | - cargo build --target wasm32-unknown-unknown --release 18 | - cargo test 19 | -------------------------------------------------------------------------------- /web_demo/worker.js: -------------------------------------------------------------------------------- 1 | onmessage = (e) => { 2 | if ("wasm_uri" in e.data) { 3 | importScripts("hanzi_lookup.js"); 4 | wasm_bindgen(e.data.wasm_uri).then(() => { 5 | postMessage({ what: "loaded" }); 6 | }); 7 | } 8 | else if ("strokes" in e.data) { 9 | const json = wasm_bindgen.lookup(e.data.strokes, e.data.limit); 10 | const matches = JSON.parse(json); 11 | postMessage({ what: "lookup", matches: matches }); 12 | } 13 | }; 14 | -------------------------------------------------------------------------------- /hanzi_lookup/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "hanzi_lookup" 3 | version = "1.0.0" 4 | authors = ["gugray "] 5 | 6 | [lib] 7 | crate-type = ["lib", "cdylib"] 8 | 9 | [dependencies] 10 | wasm-bindgen = { version = "0.2.42", features = ["serde-serialize"] } 11 | serde = { version = "1.0", features = ["derive"] } 12 | bincode = "1.1.3" 13 | serde_derive = "1.0.90" 14 | serde_json = "1.0" 15 | 16 | [build-dependencies] 17 | wasm-bindgen-cli = "= 0.2.42" 18 | -------------------------------------------------------------------------------- /hanzi_lookup/src/entities.rs: -------------------------------------------------------------------------------- 1 | // One analyzed stroke 2 | pub struct AnalyzedStroke<'a> { 3 | // The stroke's points 4 | pub points: &'a Vec, 5 | // Indexes of pivot points delimiting substrokes 6 | pub pivot_indexes: Vec, 7 | // The substrokes delineated by the identified pivot points 8 | pub sub_strokes: Vec, 9 | } 10 | 11 | // A single analyzed substroke 12 | #[derive(Debug, Clone, Copy)] 13 | pub struct SubStroke { 14 | // The substroke's direction; normalized into 0..256 from 0..2*PI 15 | pub direction: f32, 16 | // The substroke's length, normalized into 0..256, from 0..1 17 | pub length: f32, 18 | // The substroke centerpoint's X coordinate, in 0..256 19 | pub center_x: f32, 20 | // The substroke centerpoint's Y coordinate, in 0..256 21 | pub center_y: f32, 22 | } -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "type": "lldb", 6 | "request": "launch", 7 | "name": "Debug-cli-demo", 8 | "cargo": { 9 | "args": [ "build" ] 10 | }, 11 | "program": "${workspaceRoot}/.target/debug/cli_demo", 12 | "args": [], 13 | "stdio": { 14 | "stdin": "cli_demo/debug/stdin.txt", 15 | "stdout": "cli_demo/debug/stdout.txt", 16 | "stderr": "cli_demo/debug/stderr.txt" 17 | }, 18 | "cwd": "${workspaceRoot}/cli_demo", 19 | "sourceLanguages": [ 20 | "rust" 21 | ] 22 | }, 23 | { 24 | "type": "lldb", 25 | "request": "launch", 26 | "name": "Debug-lib-tests", 27 | "cargo": { 28 | "args": [ "test", "--no-run" ], 29 | "filter": { "kind": "bin" } 30 | }, 31 | "program": "${workspaceRoot}/.target/debug/hanzi_lookup-012bdc0c20f3fa85.exe", 32 | "args": [], 33 | "cwd": "${workspaceRoot}/hanzi_lookup", 34 | "sourceLanguages": [ 35 | "rust" 36 | ] 37 | } 38 | ] 39 | } -------------------------------------------------------------------------------- /cli_demo/debug/inputs.txt: -------------------------------------------------------------------------------- 1 | [[[70,124],[71,124],[79,124],[104,124],[119,124],[132,125],[151,126],[168,126],[169,126],[189,125],[191,124],[191,124]]] 2 | [[[76,127],[77,127],[84,127],[97,128],[119,128],[125,129],[138,130],[147,130],[153,131],[154,131],[158,131],[162,131],[167,131],[168,131],[169,131],[169,131]],[[129,60],[129,62],[128,74],[128,102],[128,118],[129,143],[130,162],[130,170],[130,178],[131,184],[131,188],[131,193],[131,196],[131,198],[131,203],[131,203]]] 3 | [[[86,65],[98,66],[146,69],[152,69],[161,69],[166,69],[170,68],[170,68]],[[47,97],[48,97],[54,97],[89,103],[117,104],[146,101],[169,100],[176,98],[180,98],[184,98],[189,98],[193,98],[195,98],[195,98]],[[103,109],[103,110],[99,132],[91,156],[70,180],[56,190],[53,192]],[[143,105],[143,106],[142,114],[140,134],[138,149],[138,160],[138,167],[140,174],[144,182],[150,186],[155,190],[161,193],[166,194],[172,196],[188,197],[193,197],[197,197],[206,197],[206,196],[207,196],[208,196],[208,194],[204,182],[203,174],[202,174],[202,175],[202,176]]] 4 | [[[76,32],[76,33],[75,37],[73,43],[70,51],[67,58],[64,66],[61,72],[57,77],[52,82],[50,85],[50,85]],[[68,58],[69,58],[76,58],[90,59],[100,60],[110,62],[118,62],[132,62],[136,62],[141,62],[145,62],[146,62],[148,62],[148,62]],[[68,95],[69,95],[77,96],[96,96],[105,96],[110,96],[126,97],[144,98],[146,98],[154,98],[156,98],[156,98]],[[59,126],[60,126],[67,126],[90,130],[107,131],[120,132],[134,132],[149,132],[151,132],[156,132],[158,133],[158,134],[156,142],[154,147],[153,155],[152,160],[151,166],[150,172],[150,179],[150,183],[150,186],[150,190],[151,194],[152,199],[156,204],[158,206],[162,209],[167,213],[171,215],[175,216],[184,220],[192,222],[196,223],[200,224],[204,225],[208,225],[210,225],[214,225],[218,223],[218,222],[216,214],[214,208],[214,207],[214,207]],[[79,147],[82,148],[87,155],[91,161],[91,161]],[[124,148],[123,148],[116,155],[110,162],[108,164],[108,164]],[[73,175],[75,175],[88,178],[98,180],[104,180],[111,182],[117,182],[122,182],[125,182]],[[100,148],[100,151],[102,172],[102,195],[103,204],[103,211],[104,216],[104,220],[104,224]],[[94,189],[93,189],[81,204],[72,210],[71,210]],[[109,192],[112,194],[120,199],[132,208],[133,210],[133,210]]] 5 | -------------------------------------------------------------------------------- /hanzi_lookup/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(dead_code)] 2 | #![allow(unused_imports)] 3 | 4 | extern crate wasm_bindgen; 5 | extern crate serde_derive; 6 | extern crate bincode; 7 | 8 | mod analyzed_character; 9 | mod cubic_curve_2d; 10 | mod entities; 11 | mod match_collector; 12 | mod matcher; 13 | 14 | use serde_derive::{Deserialize, Serialize}; 15 | use std::cell::RefCell; 16 | use wasm_bindgen::prelude::*; 17 | 18 | use match_collector::*; 19 | use analyzed_character::*; 20 | use match_collector::*; 21 | use matcher::*; 22 | 23 | #[derive(Serialize, Deserialize)] 24 | struct Action { 25 | action: String, 26 | points: Vec>, 27 | } 28 | 29 | #[derive(Serialize, Deserialize)] 30 | struct Input { 31 | char: String, 32 | ix: i64, 33 | duration: i64, 34 | actions: Vec, 35 | } 36 | 37 | 38 | #[wasm_bindgen] 39 | pub fn lookup(input: &JsValue, limit: usize) -> String { 40 | // Input is vector of vector of vector of numbers - how strokes and their points are represented in JS 41 | let input: Vec>> = input.into_serde().unwrap(); 42 | // Convert to typed form: vector of strokes 43 | let mut strokes: Vec = Vec::with_capacity(input.len()); 44 | for i in 0..input.len() { 45 | let mut stroke = Stroke { 46 | points: Vec::with_capacity(input[i].len()), 47 | }; 48 | for j in 0..input[i].len() { 49 | stroke.points.push(Point { 50 | x: input[i][j][0].round() as u8, 51 | y: input[i][j][1].round() as u8, 52 | }); 53 | } 54 | strokes.push(stroke); 55 | } 56 | let lookup_res = match_typed(&strokes, limit); 57 | serde_json::to_string(&lookup_res).unwrap() 58 | } 59 | 60 | #[derive(Debug, Clone, Copy, PartialEq)] 61 | pub struct Point { 62 | pub x: u8, 63 | pub y: u8, 64 | } 65 | 66 | #[derive(Debug)] 67 | pub struct Stroke { 68 | pub points: Vec, 69 | } 70 | 71 | #[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq)] 72 | pub struct Match { 73 | pub hanzi: char, 74 | pub score: f32, 75 | } 76 | 77 | thread_local!(static MATCHER: RefCell = RefCell::new(Matcher::new())); 78 | 79 | pub fn match_typed(strokes: &Vec, limit: usize) -> Vec { 80 | let mut res: Vec = Vec::with_capacity(limit); 81 | let mut collector = MatchCollector::new(&mut res, limit); 82 | MATCHER.with(|matcher| { 83 | matcher.borrow_mut().lookup(strokes, &mut collector); 84 | }); 85 | res 86 | } 87 | -------------------------------------------------------------------------------- /cli_demo/src/main.rs: -------------------------------------------------------------------------------- 1 | extern crate serde_derive; 2 | extern crate hanzi_lookup; 3 | 4 | use std::time::{Instant}; 5 | use std::fs::File; 6 | use std::io::{BufRead, BufReader}; 7 | use std::fmt::Write; 8 | use hanzi_lookup::{Stroke, Point}; 9 | 10 | const ITERS: usize = 10; 11 | 12 | 13 | fn parse_sample(str_strokes: &str) -> Vec { 14 | let vec_strokes: Vec>> = serde_json::from_str(str_strokes).unwrap(); 15 | let mut strokes: Vec = Vec::new(); 16 | for vec_stroke in &vec_strokes { 17 | let mut points: Vec = Vec::new(); 18 | for vec_point in vec_stroke { 19 | points.push(Point { 20 | x: vec_point[0], 21 | y: vec_point[1], 22 | }); 23 | } 24 | strokes.push(Stroke { 25 | points: points, 26 | }); 27 | } 28 | strokes 29 | } 30 | 31 | fn read_inputs(fname: &str) -> Vec> { 32 | let mut res: Vec> = Vec::new(); 33 | let file = File::open(fname).expect("Failed to open file."); 34 | for line in BufReader::new(file).lines() { 35 | let line = line.expect("Line huh?"); 36 | if line.is_empty() { continue; } 37 | let strokes = parse_sample(&line); 38 | res.push(strokes); 39 | } 40 | return res; 41 | } 42 | 43 | fn clone_stroke(stroke: &Stroke) -> Stroke { 44 | let mut res = Stroke { 45 | points: Vec::with_capacity(stroke.points.len()), 46 | }; 47 | for i in 0..stroke.points.len() { 48 | res.points.push(Point { 49 | x: stroke.points[i].x, 50 | y: stroke.points[i].y, 51 | }); 52 | } 53 | res 54 | } 55 | 56 | fn incremental_replay(chars: &Vec>) -> Vec> { 57 | let mut res: Vec> = Vec::new(); 58 | for i in 0..chars.len() { 59 | let this_char = &chars[i]; 60 | for j in 1..this_char.len() { 61 | res.push(Vec::new()); 62 | let strokes: &mut Vec = res.last_mut().unwrap(); 63 | for k in 0 ..j { 64 | strokes.push(clone_stroke(&this_char[k])); 65 | } 66 | } 67 | } 68 | res 69 | } 70 | 71 | fn main() { 72 | println!("Loading evaluation data."); 73 | let inputs = read_inputs("debug/inputs.txt"); 74 | println!("Loaded {} inputs.", inputs.len()); 75 | let inputs = incremental_replay(&inputs); 76 | println!("Generated {} inputs with stroke-by-stroke replay of characters.", inputs.len()); 77 | println!("Running {} lookup iterations.", ITERS); 78 | for _ in 0..ITERS { 79 | for input in &inputs { 80 | let start = Instant::now(); 81 | let matches = hanzi_lookup::match_typed(&input, 8); 82 | let duration = start.elapsed(); 83 | let mut chars = String::new(); 84 | for i in 0..matches.len() { 85 | write!(&mut chars, "{}", matches[i].hanzi).unwrap(); 86 | } 87 | println!("{:?} {} strokes Chars: {}", duration, input.len(), chars); 88 | } 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /mmah_json_convert/src/main.rs: -------------------------------------------------------------------------------- 1 | extern crate serde_derive; 2 | extern crate base64; 3 | extern crate bincode; 4 | 5 | use serde_derive::{Serialize, Deserialize}; 6 | use serde_json::{Result, Value}; 7 | use std::fs::File; 8 | 9 | #[derive(Serialize, Deserialize, PartialEq, Debug)] 10 | struct SubStrokeTriple { 11 | dir: u8, 12 | length: u8, 13 | center: u8, 14 | } 15 | 16 | #[derive(Serialize, Deserialize, PartialEq, Debug)] 17 | struct CharData { 18 | hanzi: char, 19 | stroke_count: u16, 20 | substrokes: Vec, 21 | } 22 | 23 | fn parse_json_strokes(fname: &str) -> Result> { 24 | let file = File::open(fname) 25 | .expect("file should open read only"); 26 | let json: serde_json::Value = serde_json::from_reader(file) 27 | .expect("file should be proper JSON"); 28 | 29 | let mut res: Vec = Vec::new(); 30 | 31 | // "substrokes" member of json is one u8 blob in base64 32 | let mut bytes: Vec = Vec::new(); 33 | if let Value::String(substrokes) = &json["substrokes"] { 34 | bytes = base64::decode(&substrokes).unwrap(); 35 | } 36 | 37 | // "chars" member of json lists concise info about characters 38 | if let Value::Array(chars) = &json["chars"] { 39 | for x in chars { 40 | // Each character is an array of four items like this: ["丿",1,2,0] 41 | // Character / Stroke Count / Substroke Count / First-substroke-index in byte array 42 | let mut char_data = CharData { 43 | hanzi: ' ', 44 | stroke_count: 0, 45 | substrokes: Vec::new(), 46 | }; 47 | // Get our character 48 | if let Value::String(chr) = &x[0] { 49 | let first_char = chr.chars().next().unwrap(); 50 | char_data.hanzi = first_char; 51 | } 52 | // Stroke count 53 | if let Value::Number(val) = &x[1] { 54 | char_data.stroke_count = val.as_u64().unwrap() as u16; 55 | } 56 | // Substroke count 57 | let mut substroke_count: u64 = 0; 58 | if let Value::Number(val) = &x[2] { 59 | substroke_count = val.as_u64().unwrap(); 60 | } 61 | // Start in byte array 62 | let mut start_ix: u64 = 0; 63 | if let Value::Number(val) = &x[3] { 64 | start_ix = val.as_u64().unwrap(); 65 | } 66 | // Copy out the relevant triplets (as many as there are substrokes) 67 | for cnt in 0..substroke_count { 68 | let sst = SubStrokeTriple { 69 | dir: bytes[(start_ix + cnt * 3) as usize], 70 | length: bytes[(start_ix + cnt * 3 + 1) as usize], 71 | center: bytes[(start_ix + cnt * 3 + 2) as usize], 72 | }; 73 | char_data.substrokes.push(sst); 74 | } 75 | // Append to result 76 | res.push(char_data); 77 | } 78 | } 79 | Ok(res) 80 | } 81 | 82 | fn main() { 83 | let char_data = parse_json_strokes("./data/mmah.json").expect("Failed to parse json."); 84 | let mut f = File::create("./data/mmah.bin").expect("Failed to create binary file."); 85 | bincode::serialize_into(&mut f, &char_data).expect("Failed to serialize into binary file."); 86 | } 87 | -------------------------------------------------------------------------------- /hanzi_lookup/src/match_collector.rs: -------------------------------------------------------------------------------- 1 | use super::Match; 2 | 3 | pub struct MatchCollector<'a> { 4 | limit: usize, 5 | matches: &'a mut Vec, 6 | } 7 | 8 | impl<'a> MatchCollector<'a> { 9 | pub fn new(matches: &mut Vec, limit: usize) -> MatchCollector { 10 | assert!(limit > 0, "Expected a positive number for the maximum number of matches."); 11 | assert!(matches.len() == 0, "The pre-existing matches vector must be empty."); 12 | MatchCollector { 13 | limit: limit, 14 | matches: matches, 15 | } 16 | } 17 | 18 | fn remove_existing_lower(&mut self, mc: &Match) -> bool { 19 | let mut ix: i32 = -1; 20 | for i in 0..self.matches.len() { 21 | if self.matches[i].hanzi == mc.hanzi { 22 | ix = i as i32; 23 | break; 24 | } 25 | } 26 | // Not there yet: we're good, match doesn't need to be skipped 27 | if ix == -1 { 28 | return false; 29 | } 30 | // New score is not better: skip new match 31 | if mc.score <= self.matches[ix as usize].score { 32 | return true; 33 | } 34 | // Remove existing match; don't skip new. Means shifting array left. 35 | self.matches.remove(ix as usize); 36 | return false; 37 | } 38 | 39 | pub fn file_match(&mut self, mc: Match) { 40 | // Already at limit: don't bother if new match's score is smaller than current minimum 41 | if self.matches.len() == self.limit as usize && mc.score <= self.matches.last().unwrap().score { 42 | return; 43 | } 44 | // Remove if we already have this character with a lower score 45 | // If we get "true", we should skip new match (already there with higher score) 46 | if self.remove_existing_lower(&mc) { 47 | return; 48 | } 49 | // Where does new match go? (Keep array sorted largest score to smallest.) 50 | // Largest score is always at start of vector. 51 | let ix = self.matches.iter().position(|x| x.score < mc.score); 52 | match ix { 53 | Some(ix) => self.matches.insert(ix, mc), 54 | None => self.matches.push(mc) 55 | } 56 | // Beyond limit? Drop last item. 57 | if self.matches.len() > self.limit as usize { 58 | self.matches.pop(); 59 | } 60 | } 61 | 62 | } 63 | 64 | #[cfg(test)] 65 | mod tests { 66 | use super::*; 67 | use super::super::Match; 68 | 69 | //#[ignore] 70 | #[test] 71 | #[should_panic] 72 | fn test_new_fail1() { 73 | let mut matches: Vec = Vec::new(); 74 | let mut _collector = MatchCollector::new(&mut matches, 0); 75 | } 76 | 77 | //#[ignore] 78 | #[test] 79 | #[should_panic] 80 | fn test_new_fail2() { 81 | let mut matches: Vec = Vec::new(); 82 | matches.push(Match { 83 | hanzi: '我', 84 | score: 1.0, 85 | }); 86 | let mut _collector = MatchCollector::new(&mut matches, 1); 87 | } 88 | 89 | #[test] 90 | fn test_filing() { 91 | let mut matches: Vec = Vec::new(); 92 | let mut collector = MatchCollector::new(&mut matches, 3); 93 | let mc1 = Match { 94 | hanzi: '我', 95 | score: 0.8, 96 | }; 97 | let mc2 = Match { 98 | hanzi: '你', 99 | score: 0.9, 100 | }; 101 | let mc3 = Match { 102 | hanzi: '我', 103 | score: 0.7, 104 | }; 105 | let mc4 = Match { 106 | hanzi: '他', 107 | score: 0.7, 108 | }; 109 | let mc5 = Match { 110 | hanzi: '鸡', 111 | score: 1.0, 112 | }; 113 | collector.file_match(mc1); 114 | collector.file_match(mc2); 115 | collector.file_match(mc3); 116 | collector.file_match(mc4); 117 | collector.file_match(mc5); 118 | assert_eq!(matches, [mc5, mc2, mc1]); 119 | } 120 | } 121 | 122 | -------------------------------------------------------------------------------- /hanzi_lookup/src/cubic_curve_2d.rs: -------------------------------------------------------------------------------- 1 | pub struct CubicCurve2D { 2 | pub x1: f32, 3 | pub y1: f32, 4 | pub ctrlx1: f32, 5 | pub ctrly1: f32, 6 | pub ctrlx2: f32, 7 | pub ctrly2: f32, 8 | pub x2: f32, 9 | pub y2: f32, 10 | } 11 | 12 | impl CubicCurve2D { 13 | pub fn new(x1: f32, y1: f32, ctrlx1: f32, ctrly1: f32, ctrlx2: f32, ctrly2: f32, x2: f32, y2: f32) -> CubicCurve2D { 14 | CubicCurve2D { 15 | x1: x1, 16 | y1: y1, 17 | ctrlx1: ctrlx1, 18 | ctrly1: ctrly1, 19 | ctrlx2: ctrlx2, 20 | ctrly2: ctrly2, 21 | x2: x2, 22 | y2: y2, 23 | } 24 | } 25 | 26 | fn get_cubic_ax(&self) -> f32 { 27 | return self.x2 - self.x1 - self.get_cubic_bx() - self.get_cubic_cx(); 28 | } 29 | fn get_cubic_ay(&self) -> f32 { 30 | return self.y2 - self.y1 - self.get_cubic_by() - self.get_cubic_cy(); 31 | } 32 | fn get_cubic_bx(&self) -> f32 { 33 | return 3.0 * (self.ctrlx2 - self.ctrlx1) - self.get_cubic_cx(); 34 | } 35 | fn get_cubic_by(&self) -> f32 { 36 | return 3.0 * (self.ctrly2 - self.ctrly1) - self.get_cubic_cy(); 37 | } 38 | fn get_cubic_cx(&self) -> f32 { 39 | return 3.0 * (self.ctrlx1 - self.x1); 40 | } 41 | fn get_cubic_cy(&self) -> f32 { 42 | return 3.0 * (self.ctrly1 - self.y1); 43 | } 44 | 45 | pub fn solve_for_x(&self, x: f32) -> (f32, f32, f32, usize) { 46 | let mut res = (std::f32::NAN, std::f32::NAN, std::f32::NAN, 0); 47 | let a = self.get_cubic_ax(); 48 | let b = self.get_cubic_bx(); 49 | let c = self.get_cubic_cx(); 50 | let d = self.x1 - x; 51 | let f = ((3.0 * c / a) - (b*b / (a*a))) / 3.0; 52 | let g = ((2.0 * b*b*b / (a*a*a)) - (9.0 * b * c / (a*a)) + (27.0 * d / a)) / 27.0; 53 | let h = (g * g / 4.0) + (f * f * f / 27.0); 54 | // There is only one real root 55 | if h > 0f32 { 56 | let u = 0f32 - g; 57 | let r = (u / 2.0) + h.powf(0.5); 58 | let s6 = r.powf(1.0 / 3.0); 59 | let s8 = s6; 60 | let t8 = (u / 2.0) - h.powf(0.5); 61 | let v7 = (0f32 - t8).powf(1.0 / 3.0); 62 | let v8 = v7; 63 | let x3 = (s8 - v8) - (b / (3.0 * a)); 64 | res.0 = x3; 65 | res.3 = 1; 66 | } 67 | // All 3 roots are real and equal 68 | else if f == 0.0 && g == 0.0 && h == 0.0 { 69 | res.0 = -(d / a).powf(1.0 / 3.0); 70 | res.3 = 1; 71 | } 72 | // All three roots are real (h <= 0) 73 | else { 74 | let i = ((g * g / 4.0) - h).sqrt(); 75 | let j = i.powf(1.0 / 3.0); 76 | let k = (-g / (2.0 * i)).acos(); 77 | let l = j * -1.0; 78 | let m = (k / 3.0).cos(); 79 | let n = (3f32).sqrt() * (k / 3.0).sin(); 80 | let p = (b / (3.0 * a)) * -1.0; 81 | res.0 = 2.0 * j * (k / 3.0).cos() - (b / (3.0 * a)); 82 | res.1 = l * (m + n) + p; 83 | res.2 = l * (m - n) + p; 84 | res.3 = 3; 85 | } 86 | res 87 | } 88 | 89 | pub fn get_first_solution_for_x(&self, x: f32) -> f32 { 90 | let solutions = self.solve_for_x(x); 91 | for i in 0..solutions.3 { 92 | let d; 93 | if i == 0 { d = solutions.0; } 94 | else if i == 1 { d = solutions.1; } 95 | else if i == 2 { d = solutions.2; } 96 | else { unreachable!(); } 97 | if d >= -0.0000001 && d <= 1.0000001 { 98 | if d >= 0.0 && d <= 1.0 { return d; } 99 | if d < 0.0 { return 0.0; } 100 | return 1.0; 101 | } 102 | } 103 | return std::f32::NAN; 104 | } 105 | 106 | pub fn get_y_on_curve(&self, t: f32) -> f32 { 107 | let ay = self.get_cubic_ay(); 108 | let by = self.get_cubic_by(); 109 | let cy = self.get_cubic_cy(); 110 | let t_squared = t * t; 111 | let t_cubed = t * t_squared; 112 | let y = (ay * t_cubed) + (by * t_squared) + (cy * t) + self.y1; 113 | return y; 114 | } 115 | } 116 | 117 | #[cfg(test)] 118 | mod tests { 119 | use super::*; 120 | 121 | #[test] 122 | fn test_cubic_curve() { 123 | let curve = CubicCurve2D::new(0f32, 1.0, 0.5, 1.0, 0.25, -2.0, 1.0, 1.0); 124 | let sol = curve.get_first_solution_for_x(0.0); 125 | assert_eq!(sol, 0.0); 126 | let sol = curve.get_first_solution_for_x(1.0); 127 | assert_eq!(sol, 1.0); 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /web_demo/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | HanziLookup Rust/WASM demo 6 | 7 | 41 | 42 | 43 |
44 |

HanziLookup Rust/WASM demo

45 | github.com/gugray/hanzi_lookup 46 |
47 |
48 |
49 |

Stroke input

50 |
Loading...
51 |
52 |
Undo
53 |
Clear

54 |
55 |
56 |
57 |

Recognized characters

58 |

Make Me a Hanzi data

59 |
60 |
61 | Elapsed: -- 62 |
63 |
64 |
65 | 66 | 67 | 68 | 124 | 125 | 126 | -------------------------------------------------------------------------------- /web_demo/hanzi_lookup.js: -------------------------------------------------------------------------------- 1 | (function() { 2 | const __exports = {}; 3 | let wasm; 4 | 5 | const heap = new Array(32); 6 | 7 | heap.fill(undefined); 8 | 9 | heap.push(undefined, null, true, false); 10 | 11 | let stack_pointer = 32; 12 | 13 | function addBorrowedObject(obj) { 14 | if (stack_pointer == 1) throw new Error('out of js stack'); 15 | heap[--stack_pointer] = obj; 16 | return stack_pointer; 17 | } 18 | 19 | let cachedTextDecoder = new TextDecoder('utf-8'); 20 | 21 | let cachegetUint8Memory = null; 22 | function getUint8Memory() { 23 | if (cachegetUint8Memory === null || cachegetUint8Memory.buffer !== wasm.memory.buffer) { 24 | cachegetUint8Memory = new Uint8Array(wasm.memory.buffer); 25 | } 26 | return cachegetUint8Memory; 27 | } 28 | 29 | function getStringFromWasm(ptr, len) { 30 | return cachedTextDecoder.decode(getUint8Memory().subarray(ptr, ptr + len)); 31 | } 32 | 33 | let cachedGlobalArgumentPtr = null; 34 | function globalArgumentPtr() { 35 | if (cachedGlobalArgumentPtr === null) { 36 | cachedGlobalArgumentPtr = wasm.__wbindgen_global_argument_ptr(); 37 | } 38 | return cachedGlobalArgumentPtr; 39 | } 40 | 41 | let cachegetUint32Memory = null; 42 | function getUint32Memory() { 43 | if (cachegetUint32Memory === null || cachegetUint32Memory.buffer !== wasm.memory.buffer) { 44 | cachegetUint32Memory = new Uint32Array(wasm.memory.buffer); 45 | } 46 | return cachegetUint32Memory; 47 | } 48 | /** 49 | * @param {any} input 50 | * @param {number} limit 51 | * @returns {string} 52 | */ 53 | __exports.lookup = function(input, limit) { 54 | const retptr = globalArgumentPtr(); 55 | try { 56 | wasm.lookup(retptr, addBorrowedObject(input), limit); 57 | const mem = getUint32Memory(); 58 | const rustptr = mem[retptr / 4]; 59 | const rustlen = mem[retptr / 4 + 1]; 60 | 61 | const realRet = getStringFromWasm(rustptr, rustlen).slice(); 62 | wasm.__wbindgen_free(rustptr, rustlen * 1); 63 | return realRet; 64 | 65 | 66 | } finally { 67 | heap[stack_pointer++] = undefined; 68 | 69 | } 70 | 71 | }; 72 | 73 | let WASM_VECTOR_LEN = 0; 74 | 75 | let cachedTextEncoder = new TextEncoder('utf-8'); 76 | 77 | let passStringToWasm; 78 | if (typeof cachedTextEncoder.encodeInto === 'function') { 79 | passStringToWasm = function(arg) { 80 | 81 | let size = arg.length; 82 | let ptr = wasm.__wbindgen_malloc(size); 83 | let writeOffset = 0; 84 | while (true) { 85 | const view = getUint8Memory().subarray(ptr + writeOffset, ptr + size); 86 | const { read, written } = cachedTextEncoder.encodeInto(arg, view); 87 | writeOffset += written; 88 | if (read === arg.length) { 89 | break; 90 | } 91 | arg = arg.substring(read); 92 | ptr = wasm.__wbindgen_realloc(ptr, size, size += arg.length * 3); 93 | } 94 | WASM_VECTOR_LEN = writeOffset; 95 | return ptr; 96 | }; 97 | } else { 98 | passStringToWasm = function(arg) { 99 | 100 | const buf = cachedTextEncoder.encode(arg); 101 | const ptr = wasm.__wbindgen_malloc(buf.length); 102 | getUint8Memory().set(buf, ptr); 103 | WASM_VECTOR_LEN = buf.length; 104 | return ptr; 105 | }; 106 | } 107 | 108 | function getObject(idx) { return heap[idx]; } 109 | 110 | __exports.__wbindgen_json_serialize = function(idx, ptrptr) { 111 | const ptr = passStringToWasm(JSON.stringify(getObject(idx))); 112 | getUint32Memory()[ptrptr / 4] = ptr; 113 | return WASM_VECTOR_LEN; 114 | }; 115 | 116 | let heap_next = heap.length; 117 | 118 | function dropObject(idx) { 119 | if (idx < 36) return; 120 | heap[idx] = heap_next; 121 | heap_next = idx; 122 | } 123 | 124 | __exports.__wbindgen_object_drop_ref = function(i) { dropObject(i); }; 125 | 126 | function init(module) { 127 | let result; 128 | const imports = { './hanzi_lookup': __exports }; 129 | if (module instanceof URL || typeof module === 'string' || module instanceof Request) { 130 | 131 | const response = fetch(module); 132 | if (typeof WebAssembly.instantiateStreaming === 'function') { 133 | result = WebAssembly.instantiateStreaming(response, imports) 134 | .catch(e => { 135 | console.warn("`WebAssembly.instantiateStreaming` failed. Assuming this is because your server does not serve wasm with `application/wasm` MIME type. Falling back to `WebAssembly.instantiate` which is slower. Original error:\n", e); 136 | return response 137 | .then(r => r.arrayBuffer()) 138 | .then(bytes => WebAssembly.instantiate(bytes, imports)); 139 | }); 140 | } else { 141 | result = response 142 | .then(r => r.arrayBuffer()) 143 | .then(bytes => WebAssembly.instantiate(bytes, imports)); 144 | } 145 | } else { 146 | 147 | result = WebAssembly.instantiate(module, imports) 148 | .then(result => { 149 | if (result instanceof WebAssembly.Instance) { 150 | return { instance: result, module }; 151 | } else { 152 | return result; 153 | } 154 | }); 155 | } 156 | return result.then(({instance, module}) => { 157 | wasm = instance.exports; 158 | init.__wbindgen_wasm_module = module; 159 | 160 | return wasm; 161 | }); 162 | } 163 | 164 | self.wasm_bindgen = Object.assign(init, __exports); 165 | 166 | })(); 167 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # hanzi_lookup 2 | 3 | ![Version](https://img.shields.io/github/tag/gugray/hanzi_lookup.svg) 4 | [![Build Status](https://travis-ci.com/gugray/hanzi_lookup.svg?branch=master)](https://travis-ci.com/gugray/hanzi_lookup) 5 | [![](https://img.shields.io/badge/license-LGPL-blue.svg)](https://opensource.org/licenses/LGPL-3.0) 6 | 7 | Free, open-source, browser-based Chinese handwriting recognition in Rust / WebAssembly 8 | 9 | This library is a Rust port of [HanziLookupJS](https://github.com/gugray/HanziLookupJS), which is itself based on Jordan Kiang's [HanziLookup](http://kiang.org/jordan/software/hanzilookup). It contains data derived from Shaunak Kishore's [Make Me a Hanzi](https://github.com/skishore/makemeahanzi), and an improved character recognition algorithm. 10 | 11 | Online demo: 12 | 13 | ![hanzi_lookup demo](hanzi_lookup.gif) 14 | 15 | ## Getting started 16 | If you are only interested in using the library in your own project, all you need is `hanzi_lookup_bg.wasm` and `hanzi_lookup.js`. These two files are the Rust library's output, and they are included in the `web_demo` folder. 17 | 18 | - You can use `web_demo` directly if you publish it with a lightweight HTTP server, or even if you just open `index.html` directly from the file system. 19 | 20 | - The demo project loads the WebAssembly module within a tiny Web Worker, contained in `worker.js`. This adds a little extra complexity to the demo because of the event-based communication between the Javascript in `index.html` and the Web Worker. But it's this setup that creates a really smooth user experience by offloading the costly character lookup from the browser's UI thread. 21 | 22 | - The demo project includes `drawingBoard.js`, which is a simple component for drawing characters on the screen. It is not needed for `hanzi_lookup` itself to work, and if you do choose to use it, you also need to include jQuery in your page. The compiled library has no external dependencies. 23 | 24 | - The WebAssembly module exposes one function, called lookup, accessible by calling `wasm_bindgen.lookup(strokes, limit)`, as seen in `worker.js`. The first parameter is an array of strokes. Every stroke, in turn, consists of an array of points. Every point is a two-dimensional array representing its X and Y coordinates. The second parameter is the maximum number of matching characters returned; 8 is a reasonable number here. 25 | 26 | - The lookup function returns a JSON string, which you need to convert by `JSON.parse`. The result is an array of match objects, each of which has a `hanzi` member containing the character itself, and a `score` member. The array is ordered by score. 27 | 28 | - The compiled library contains all the stroke information embedded as binary data. For details about the origin of the strokes data file and its licensing, see the related sections below. 29 | 30 | ## Building the library 31 | 32 | You need Rust nightly to build the library; I have been compiling with `rustc 1.36.0-nightly` specifically. In order to generate the WebAssembly module, you also need to install the WASM target `wasm32-unknown-unknown`, best done by the following command: 33 | 34 | $ rustup target add wasm32-unknown-unknown 35 | 36 | With this in place, building the library is a two-step process: 37 | 38 | $ cargo build --target wasm32-unknown-unknown --release 39 | $ wasm-bindgen .target/wasm32-unknown-unknown/release/hanzi_lookup.wasm --out-dir ./dist --no-modules --no-typescript 40 | 41 | I included these steps in `build.cmd`, a simple Windows batch file. If you are using Linux/Mac, an equivalent shell script can be derived trivially. The second command, `wasm-bindgen`, is what produces the `hanzi_lookup.js` file that makes the WebAssembly module comfortably accessible from Javascript. 42 | 43 | Some more details if you want to delve deeper: 44 | 45 | - The command-line demo `demo_cli` also refers to the `hanzi_lookup` library but has regular Debug and Release targets; it doesn't require the WASM target. You can simply run it by switching to its folder and executing `cargo run`. 46 | 47 | - You can run the library's unit tests via `cargo test` either from the root or from `hanzi_lookup`. 48 | 49 | - There is a `launch.json` file in .vscode with two configurations, for debugging the library's tests, and for running the command-line demo. If using VS Code, you will need the [Rust (rls)](https://marketplace.visualstudio.com/items?itemName=rust-lang.rust) plugin. 50 | 51 | - A great intro to Rust, WebAssembly and Web Workers is this post: [Rust, WebAssembly & Web Workers for speed and profit](https://asquera.de/blog/2018-10-01/webassembly-and-wasm-bindgen/). 52 | 53 | ## The data file 54 | 55 | The library no longer includes the original strokes data from Jordan Kiang's HanziLookup. If you're interested, you can still find it in my related project, [HanziLookupJS](https://github.com/gugray/HanziLookupJS). 56 | 57 | The data in in this library is based on `mmah.json`, which is derived from Make Me a Hanzi's `graphics.txt` and encodes 9,507 characters. This file is richer than the Jordan Kiang's original because its substroke data also contains the normalized location (center point) of every substroke. The matching algorithm calculates the score accordingly: a substroke that is in the wrong place counts for less. Each substroke is represented by 3 bytes: (1) Direction in radians, with 0\-2\*PI normalized to 0\-255; (2) Length normalized to 0\-255, where 255 is the bounding square's full width; (3) Centerpoint X and Y, both normalized to 0\-15, with X in the 4 higher bits. 58 | 59 | The Rust code loads strokes data from an embedded binary file. You can find the tiny tool I used to convert HanziLookupJS's JSON file into the binary format in the `mmah_json_convert` folder. 60 | 61 | 62 | ## License 63 | 64 | This Rust library is derived from Jordan Kiang's original [HanziLookup](http://kiang.org/jordan/software/hanzilookup). In compliance with the original, it is licensed under [GNU LGPL](http://www.gnu.org/copyleft/gpl.html). 65 | 66 | The strokes data is ultimately derived from the following fonts, via Make Me a Hanzi's `graphics.txt` and HanziLookupJS's `mmah.json`: 67 | - Arphic PL KaitiM GB - https://apps.ubuntu.com/cat/applications/precise/fonts-arphic-gkai00mp/ 68 | - Arphic PL UKai - https://apps.ubuntu.com/cat/applications/fonts-arphic-ukai/ 69 | 70 | You can redistribute and/or modify `mmah.json` or the derived binary file under the terms of the Arphic Public License as published by Arphic Technology Co., Ltd. The license is reproduced in LICENSE-APL; you can also find it online at . 71 | -------------------------------------------------------------------------------- /LICENSE-APL: -------------------------------------------------------------------------------- 1 | ARPHIC PUBLIC LICENSE 2 | 3 | Copyright (C) 1999 Arphic Technology Co., Ltd. 4 | 11Fl. No.168, Yung Chi Rd., Taipei, 110 Taiwan 5 | All rights reserved except as specified below. 6 | 7 | Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is forbidden. 8 | 9 | Preamble 10 | 11 | The licenses for most software are designed to take away your freedom to share and change it. By contrast, the ARPHIC PUBLIC LICENSE specifically permits and encourages you to use this software, provided that you give the recipients all the rights that we gave you and make sure they can get the modifications of this software. 12 | 13 | Legal Terms 14 | 15 | 0. Definitions: 16 | Throughout this License, "Font" means the TrueType fonts "AR PL Mingti2L Big5", "AR PL KaitiM Big5" (BIG-5 character set) and "AR PL SungtiL GB", "AR PL KaitiM GB" (GB character set) which are originally distributed by Arphic, and the derivatives of those fonts created through any modification including modifying glyph, reordering glyph, converting format, changing font name, or adding/deleting some characters in/from glyph table. 17 | 18 | "PL" means "Public License". 19 | 20 | "Copyright Holder" means whoever is named in the copyright or copyrights for the Font. 21 | 22 | "You" means the licensee, or person copying, redistributing or modifying the Font. 23 | 24 | "Freely Available" means that you have the freedom to copy or modify the Font as well as redistribute copies of the Font under the same conditions you received, not price. If you wish, you can charge for this service. 25 | 26 | 1. Copying & Distribution 27 | You may copy and distribute verbatim copies of this Font in any medium, without restriction, provided that you retain this license file (ARPHICPL.TXT) unaltered in all copies. 28 | 29 | 2. Modification 30 | You may otherwise modify your copy of this Font in any way, including modifying glyph, reordering glyph, converting format, changing font name, or adding/deleting some characters in/from glyph table, and copy and distribute such modifications under the terms of Section 1 above, provided that the following conditions are met: 31 | 32 | a) You must insert a prominent notice in each modified file stating how and when you changed that file. 33 | 34 | b) You must make such modifications Freely Available as a whole to all third parties under the terms of this License, such as by offering access to copy the modifications from a designated place, or distributing the modifications on a medium customarily used for software interchange. 35 | 36 | c) If the modified fonts normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the Font under these conditions, and telling the user how to view a copy of this License. 37 | 38 | These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Font, and can be reasonably considered independent and separate works in themselves, then this License and its terms, do not apply to those sections when you distribute them as separate works. Therefore, mere aggregation of another work not based on the Font with the Font on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 39 | 40 | 3. Condition Subsequent 41 | You may not copy, modify, sublicense, or distribute the Font except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Font will automatically retroactively void your rights under this License. However, parties who have received copies or rights from you under this License will keep their licenses valid so long as such parties remain in full compliance. 42 | 43 | 4. Acceptance 44 | You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to copy, modify, sublicense or distribute the Font. These actions are prohibited by law if you do not accept this License. Therefore, by copying, modifying, sublicensing or distributing the Font, you indicate your acceptance of this License and all its terms and conditions. 45 | 46 | 5. Automatic Receipt 47 | Each time you redistribute the Font, the recipient automatically receives a license from the original licensor to copy, distribute or modify the Font subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 48 | 49 | 6. Contradiction 50 | If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Font at all. For example, if a patent license would not permit royalty-free redistribution of the Font by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Font. 51 | 52 | If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. 53 | 54 | 7. NO WARRANTY 55 | BECAUSE THE FONT IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE FONT, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS OR OTHER PARTIES PROVIDE THE FONT "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE FONT IS WITH YOU. SHOULD THE FONT PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 56 | 57 | 8. DAMAGES WAIVER 58 | UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING, IN NO EVENT WILL ANY COPYRIGHTT HOLDERS, OR OTHER PARTIES WHO MAY COPY, MODIFY OR REDISTRIBUTE THE FONT AS PERMITTED ABOVE, BE LIABLE TO YOU FOR ANY DIRECT, INDIRECT, CONSEQUENTIAL, INCIDENTAL, SPECIAL OR EXEMPLARY DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE FONT (INCLUDING BUT NOT LIMITED TO PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA OR PROFITS; OR BUSINESS INTERRUPTION), EVEN IF SUCH HOLDERS OR OTHER PARTIES HAVE BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 59 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU LESSER GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | 9 | This version of the GNU Lesser General Public License incorporates 10 | the terms and conditions of version 3 of the GNU General Public 11 | License, supplemented by the additional permissions listed below. 12 | 13 | 0. Additional Definitions. 14 | 15 | As used herein, "this License" refers to version 3 of the GNU Lesser 16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU 17 | General Public License. 18 | 19 | "The Library" refers to a covered work governed by this License, 20 | other than an Application or a Combined Work as defined below. 21 | 22 | An "Application" is any work that makes use of an interface provided 23 | by the Library, but which is not otherwise based on the Library. 24 | Defining a subclass of a class defined by the Library is deemed a mode 25 | of using an interface provided by the Library. 26 | 27 | A "Combined Work" is a work produced by combining or linking an 28 | Application with the Library. The particular version of the Library 29 | with which the Combined Work was made is also called the "Linked 30 | Version". 31 | 32 | The "Minimal Corresponding Source" for a Combined Work means the 33 | Corresponding Source for the Combined Work, excluding any source code 34 | for portions of the Combined Work that, considered in isolation, are 35 | based on the Application, and not on the Linked Version. 36 | 37 | The "Corresponding Application Code" for a Combined Work means the 38 | object code and/or source code for the Application, including any data 39 | and utility programs needed for reproducing the Combined Work from the 40 | Application, but excluding the System Libraries of the Combined Work. 41 | 42 | 1. Exception to Section 3 of the GNU GPL. 43 | 44 | You may convey a covered work under sections 3 and 4 of this License 45 | without being bound by section 3 of the GNU GPL. 46 | 47 | 2. Conveying Modified Versions. 48 | 49 | If you modify a copy of the Library, and, in your modifications, a 50 | facility refers to a function or data to be supplied by an Application 51 | that uses the facility (other than as an argument passed when the 52 | facility is invoked), then you may convey a copy of the modified 53 | version: 54 | 55 | a) under this License, provided that you make a good faith effort to 56 | ensure that, in the event an Application does not supply the 57 | function or data, the facility still operates, and performs 58 | whatever part of its purpose remains meaningful, or 59 | 60 | b) under the GNU GPL, with none of the additional permissions of 61 | this License applicable to that copy. 62 | 63 | 3. Object Code Incorporating Material from Library Header Files. 64 | 65 | The object code form of an Application may incorporate material from 66 | a header file that is part of the Library. You may convey such object 67 | code under terms of your choice, provided that, if the incorporated 68 | material is not limited to numerical parameters, data structure 69 | layouts and accessors, or small macros, inline functions and templates 70 | (ten or fewer lines in length), you do both of the following: 71 | 72 | a) Give prominent notice with each copy of the object code that the 73 | Library is used in it and that the Library and its use are 74 | covered by this License. 75 | 76 | b) Accompany the object code with a copy of the GNU GPL and this license 77 | document. 78 | 79 | 4. Combined Works. 80 | 81 | You may convey a Combined Work under terms of your choice that, 82 | taken together, effectively do not restrict modification of the 83 | portions of the Library contained in the Combined Work and reverse 84 | engineering for debugging such modifications, if you also do each of 85 | the following: 86 | 87 | a) Give prominent notice with each copy of the Combined Work that 88 | the Library is used in it and that the Library and its use are 89 | covered by this License. 90 | 91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license 92 | document. 93 | 94 | c) For a Combined Work that displays copyright notices during 95 | execution, include the copyright notice for the Library among 96 | these notices, as well as a reference directing the user to the 97 | copies of the GNU GPL and this license document. 98 | 99 | d) Do one of the following: 100 | 101 | 0) Convey the Minimal Corresponding Source under the terms of this 102 | License, and the Corresponding Application Code in a form 103 | suitable for, and under terms that permit, the user to 104 | recombine or relink the Application with a modified version of 105 | the Linked Version to produce a modified Combined Work, in the 106 | manner specified by section 6 of the GNU GPL for conveying 107 | Corresponding Source. 108 | 109 | 1) Use a suitable shared library mechanism for linking with the 110 | Library. A suitable mechanism is one that (a) uses at run time 111 | a copy of the Library already present on the user's computer 112 | system, and (b) will operate properly with a modified version 113 | of the Library that is interface-compatible with the Linked 114 | Version. 115 | 116 | e) Provide Installation Information, but only if you would otherwise 117 | be required to provide such information under section 6 of the 118 | GNU GPL, and only to the extent that such information is 119 | necessary to install and execute a modified version of the 120 | Combined Work produced by recombining or relinking the 121 | Application with a modified version of the Linked Version. (If 122 | you use option 4d0, the Installation Information must accompany 123 | the Minimal Corresponding Source and Corresponding Application 124 | Code. If you use option 4d1, you must provide the Installation 125 | Information in the manner specified by section 6 of the GNU GPL 126 | for conveying Corresponding Source.) 127 | 128 | 5. Combined Libraries. 129 | 130 | You may place library facilities that are a work based on the 131 | Library side by side in a single library together with other library 132 | facilities that are not Applications and are not covered by this 133 | License, and convey such a combined library under terms of your 134 | choice, if you do both of the following: 135 | 136 | a) Accompany the combined library with a copy of the same work based 137 | on the Library, uncombined with any other library facilities, 138 | conveyed under the terms of this License. 139 | 140 | b) Give prominent notice with the combined library that part of it 141 | is a work based on the Library, and explaining where to find the 142 | accompanying uncombined form of the same work. 143 | 144 | 6. Revised Versions of the GNU Lesser General Public License. 145 | 146 | The Free Software Foundation may publish revised and/or new versions 147 | of the GNU Lesser General Public License from time to time. Such new 148 | versions will be similar in spirit to the present version, but may 149 | differ in detail to address new problems or concerns. 150 | 151 | Each version is given a distinguishing version number. If the 152 | Library as you received it specifies that a certain numbered version 153 | of the GNU Lesser General Public License "or any later version" 154 | applies to it, you have the option of following the terms and 155 | conditions either of that published version or of any later version 156 | published by the Free Software Foundation. If the Library as you 157 | received it does not specify a version number of the GNU Lesser 158 | General Public License, you may choose any version of the GNU Lesser 159 | General Public License ever published by the Free Software Foundation. 160 | 161 | If the Library as you received it specifies that a proxy can decide 162 | whether future versions of the GNU Lesser General Public License shall 163 | apply, that proxy's public statement of acceptance of any version is 164 | permanent authorization for you to choose that version for the 165 | Library. 166 | -------------------------------------------------------------------------------- /web_demo/drawingBoard.js: -------------------------------------------------------------------------------- 1 | var HanziLookup = HanziLookup || {}; 2 | 3 | HanziLookup.DrawingBoard = (function (elmHost, strokeFinished) { 4 | "use strict"; 5 | 6 | var _elmHost = elmHost; 7 | var _strokeFinised = strokeFinished; 8 | var _canvas; 9 | var _ctx; 10 | 11 | // Global options ****************************** 12 | // Width of strokes drawn on screen 13 | var strokeWidth = 5; 14 | 15 | // UI state 16 | var clicking = false; 17 | var lastTouchX = -1; 18 | var lastTouchY = -1; 19 | var tstamp; 20 | var lastPt; 21 | 22 | // An array of arrays; each element is the coordinate sequence for one stroke from the canvas 23 | // Where "stroke" is everything between button press - move - button release 24 | var _rawStrokes = []; 25 | 26 | // Canvas coordinates of each point in current stroke, in raw (unanalyzed) form. 27 | var _currentStroke = null; 28 | 29 | // Overlay. If null, no overlay. 30 | var _overlay = null; 31 | var _showSubstrokes = false; 32 | var _showBoundary = false; 33 | var _showControlMedians = false; 34 | 35 | // Initializes handwriting recognition (events etc.) 36 | _canvas = $(''); 37 | _elmHost.append(_canvas); 38 | _ctx = _canvas[0].getContext("2d"); 39 | _canvas.mousemove(function (e) { 40 | if (!clicking) return; 41 | var x = e.pageX - $(this).offset().left; 42 | var y = e.pageY - $(this).offset().top; 43 | dragClick(x, y); 44 | }); 45 | _canvas.mousedown(function (e) { 46 | var x = e.pageX - $(this).offset().left; 47 | var y = e.pageY - $(this).offset().top; 48 | startClick(x, y); 49 | }).mouseup(function (e) { 50 | var x = e.pageX - $(this).offset().left; 51 | var y = e.pageY - $(this).offset().top; 52 | endClick(x, y); 53 | }); 54 | _canvas.bind("touchmove", function (e) { 55 | if (!clicking) return; 56 | e.preventDefault(); 57 | var x = e.originalEvent.touches[0].pageX - $(this).offset().left; 58 | lastTouchX = x; 59 | var y = e.originalEvent.touches[0].pageY - $(this).offset().top; 60 | lastTouchY = y; 61 | dragClick(x, y); 62 | }); 63 | _canvas.bind("touchstart", function (e) { 64 | e.preventDefault(); 65 | document.activeElement.blur(); 66 | var x = e.originalEvent.touches[0].pageX - $(this).offset().left; 67 | var y = e.originalEvent.touches[0].pageY - $(this).offset().top; 68 | startClick(x, y); 69 | }).bind("touchend", function (e) { 70 | e.preventDefault(); 71 | document.activeElement.blur(); 72 | endClick(lastTouchX, lastTouchY); 73 | lastTouchX = lastTouchY = -1; 74 | }); 75 | 76 | drawClearCanvas(); 77 | 78 | // Draws a clear canvas, with gridlines 79 | function drawClearCanvas() { 80 | _ctx.clearRect(0, 0, _ctx.canvas.width, _ctx.canvas.height); 81 | _ctx.setLineDash([1, 1]); 82 | _ctx.lineWidth = 0.5; 83 | _ctx.strokeStyle = "grey"; 84 | _ctx.beginPath(); 85 | _ctx.moveTo(0, 0); 86 | _ctx.lineTo(_ctx.canvas.width, 0); 87 | _ctx.lineTo(_ctx.canvas.width, _ctx.canvas.height); 88 | _ctx.lineTo(0,_ctx.canvas.height); 89 | _ctx.lineTo(0, 0); 90 | _ctx.stroke(); 91 | _ctx.beginPath(); 92 | _ctx.moveTo(0, 0); 93 | _ctx.lineTo(_ctx.canvas.width, _ctx.canvas.height); 94 | _ctx.stroke(); 95 | _ctx.beginPath(); 96 | _ctx.moveTo(_ctx.canvas.width, 0); 97 | _ctx.lineTo(0, _ctx.canvas.height); 98 | _ctx.stroke(); 99 | _ctx.beginPath(); 100 | _ctx.moveTo(_ctx.canvas.width / 2, 0); 101 | _ctx.lineTo(_ctx.canvas.width / 2, _ctx.canvas.height); 102 | _ctx.stroke(); 103 | _ctx.beginPath(); 104 | _ctx.moveTo(0, _ctx.canvas.height / 2); 105 | _ctx.lineTo(_ctx.canvas.width, _ctx.canvas.height / 2); 106 | _ctx.stroke(); 107 | } 108 | 109 | function startClick(x, y) { 110 | clicking = true; 111 | _currentStroke = []; 112 | lastPt = [x, y]; 113 | _currentStroke.push(lastPt); 114 | _ctx.strokeStyle = "grey"; 115 | _ctx.setLineDash([]); 116 | _ctx.lineWidth = strokeWidth; 117 | _ctx.beginPath(); 118 | _ctx.moveTo(x, y); 119 | tstamp = new Date(); 120 | } 121 | 122 | function dragClick(x, y) { 123 | if ((new Date().getTime() - tstamp) < 50) return; 124 | tstamp = new Date(); 125 | var pt = [x, y]; 126 | if ((pt[0] == lastPt[0]) && (pt[1] == lastPt[1])) return; 127 | _currentStroke.push(pt); 128 | lastPt = pt; 129 | _ctx.lineTo(x, y); 130 | _ctx.stroke(); 131 | } 132 | 133 | function endClick(x, y) { 134 | clicking = false; 135 | if (x == -1) return; 136 | _ctx.lineTo(x, y); 137 | _ctx.stroke(); 138 | _currentStroke.push([x, y]); 139 | _rawStrokes.push(_currentStroke); 140 | _currentStroke = []; 141 | // Tell the world a stroke has finished 142 | if (_strokeFinised) _strokeFinised(); 143 | } 144 | 145 | // Redraws raw strokes on the canvas. 146 | function redrawInput() { 147 | // Draw strokes proper 148 | for (var i1 in _rawStrokes) { 149 | _ctx.strokeStyle = "grey"; 150 | _ctx.setLineDash([]); 151 | _ctx.lineWidth = strokeWidth; 152 | _ctx.beginPath(); 153 | _ctx.moveTo(_rawStrokes[i1][0][0], _rawStrokes[i1][0][1]); 154 | var len = _rawStrokes[i1].length; 155 | for (var i2 = 0; i2 < len - 1; i2++) { 156 | _ctx.lineTo(_rawStrokes[i1][i2][0], _rawStrokes[i1][i2][1]); 157 | _ctx.stroke(); 158 | } 159 | _ctx.lineTo(_rawStrokes[i1][len - 1][0], _rawStrokes[i1][len - 1][1]); 160 | _ctx.stroke(); 161 | } 162 | 163 | // No additional info: quit here. 164 | if (!_overlay) return; 165 | 166 | // Bounding rectangle 167 | if (_showBoundary) { 168 | _ctx.strokeStyle = "blue"; 169 | _ctx.setLineDash([1, 1]); 170 | _ctx.lineWidth = 0.5; 171 | _ctx.beginPath(); 172 | _ctx.moveTo(_overlay.left, _overlay.top); 173 | _ctx.lineTo(_overlay.right, _overlay.top); 174 | _ctx.stroke(); 175 | _ctx.lineTo(_overlay.right, _overlay.bottom); 176 | _ctx.stroke(); 177 | _ctx.lineTo(_overlay.left, _overlay.bottom); 178 | _ctx.stroke(); 179 | _ctx.lineTo(_overlay.left, _overlay.top); 180 | _ctx.stroke(); 181 | } 182 | 183 | // Skeleton strokes 184 | if (_showSubstrokes) { 185 | for (var six = 0; six != _overlay.xStrokes.length; ++six) { 186 | var xstroke = _overlay.xStrokes[six]; 187 | _ctx.strokeStyle = "red"; 188 | _ctx.setLineDash([]); 189 | _ctx.lineWidth = 1; 190 | _ctx.beginPath(); 191 | _ctx.moveTo(xstroke[0][0], xstroke[0][1]); 192 | _ctx.arc(xstroke[0][0], xstroke[0][1], 3, 0, 2 * Math.PI, true); 193 | _ctx.fillStyle = "red"; 194 | _ctx.fill(); 195 | for (var i = 1; i < xstroke.length; ++i) { 196 | _ctx.lineTo(xstroke[i][0], xstroke[i][1]); 197 | _ctx.stroke(); 198 | _ctx.beginPath(); 199 | _ctx.arc(xstroke[i][0], xstroke[i][1], 3, 0, 2 * Math.PI, true); 200 | _ctx.fillStyle = "red"; 201 | _ctx.fill(); 202 | } 203 | } 204 | } 205 | 206 | // Control character medians 207 | if (_showControlMedians && _overlay.yStrokes) { 208 | for (var six = 0; six != _overlay.yStrokes.length; ++six) { 209 | var ystroke = _overlay.yStrokes[six]; 210 | _ctx.strokeStyle = "#e6cee6"; 211 | _ctx.setLineDash([]); 212 | _ctx.lineWidth = strokeWidth; 213 | _ctx.beginPath(); 214 | _ctx.moveTo(ystroke[0][0], ystroke[0][1]); 215 | for (var i = 1; i < ystroke.length; ++i) { 216 | _ctx.lineTo(ystroke[i][0], ystroke[i][1]); 217 | _ctx.stroke(); 218 | } 219 | } 220 | } 221 | 222 | // Control character's skeleton strokes 223 | if (_overlay.zStrokes) { 224 | for (var six = 0; six != _overlay.zStrokes.length; ++six) { 225 | var xstroke = _overlay.zStrokes[six]; 226 | _ctx.strokeStyle = "green"; 227 | _ctx.setLineDash([]); 228 | _ctx.lineWidth = 1; 229 | _ctx.beginPath(); 230 | _ctx.moveTo(xstroke[0][0], xstroke[0][1]); 231 | _ctx.arc(xstroke[0][0], xstroke[0][1], 3, 0, 2 * Math.PI, true); 232 | _ctx.fillStyle = "green"; 233 | _ctx.fill(); 234 | for (var i = 1; i < xstroke.length; ++i) { 235 | _ctx.lineTo(xstroke[i][0], xstroke[i][1]); 236 | _ctx.stroke(); 237 | _ctx.beginPath(); 238 | _ctx.arc(xstroke[i][0], xstroke[i][1], 3, 0, 2 * Math.PI, true); 239 | _ctx.fillStyle = "green"; 240 | _ctx.fill(); 241 | } 242 | } 243 | } 244 | } 245 | 246 | return { 247 | // Clear canvas and resets gathered strokes data for new input. 248 | clearCanvas: function () { 249 | _rawStrokes.length = 0; 250 | // Caller must make canvas redraw! And they will. 251 | }, 252 | 253 | // Undoes the last stroke input by the user. 254 | undoStroke: function () { 255 | // Sanity check: nothing to do if input is empty (no strokes yet) 256 | if (_rawStrokes.length == 0) return; 257 | // Remove last stroke 258 | _rawStrokes.length = _rawStrokes.length - 1; 259 | // Caller must make canvas redraw! And they will. 260 | }, 261 | 262 | // Clones the strokes accumulated so far. Three-dimensional array: 263 | // - array of strokes, each of which is 264 | // - array of points, each of which is 265 | // - two-dimensional array of coordinates 266 | cloneStrokes: function () { 267 | var res = []; 268 | for (var i = 0; i != _rawStrokes.length; ++i) { 269 | var stroke = []; 270 | for (var j = 0; j != _rawStrokes[i].length; ++j) { 271 | stroke.push([_rawStrokes[i][j][0], _rawStrokes[i][j][1]]); 272 | } 273 | res.push(stroke); 274 | } 275 | return res; 276 | }, 277 | 278 | // Redraw canvas, e.g., after undo or clear 279 | redraw: function() { 280 | drawClearCanvas(); 281 | redrawInput(); 282 | }, 283 | 284 | // Adds overlay to visualize analysis 285 | enrich: function(overlay, showSubstrokes, showBoundary, showControlMedians) { 286 | _overlay = overlay; 287 | _showBoundary = showBoundary; 288 | _showSubstrokes = showSubstrokes; 289 | _showControlMedians = showControlMedians; 290 | drawClearCanvas(); 291 | redrawInput(); 292 | } 293 | }; 294 | 295 | }); 296 | -------------------------------------------------------------------------------- /hanzi_lookup/src/analyzed_character.rs: -------------------------------------------------------------------------------- 1 | extern crate serde_derive; 2 | 3 | use super::entities::*; 4 | use super::*; 5 | 6 | const MIN_SEGMENT_LENGTH: f32 = 12.5; 7 | const MAX_LOCAL_LENGTH_RATIO: f32 = 1.1; 8 | const MAX_RUNNING_LENGTH_RATIO: f32 = 1.09; 9 | 10 | struct Rect { 11 | pub top: f32, 12 | pub bottom: f32, 13 | pub left: f32, 14 | pub right: f32, 15 | } 16 | 17 | pub struct AnalyzedCharacter<'a> { 18 | pub analyzed_strokes: Vec>, 19 | pub sub_stroke_count: usize, 20 | } 21 | 22 | impl<'a> AnalyzedCharacter<'a> { 23 | pub fn from_strokes(strokes: &Vec) -> AnalyzedCharacter { 24 | let bounding_rect = get_bounding_rect(strokes); 25 | let analyzed_strokes: Vec = build_analyzed_strokes(strokes, &bounding_rect); 26 | let mut sub_stroke_count: usize = 0; 27 | for i in 0..analyzed_strokes.len() { 28 | sub_stroke_count += analyzed_strokes[i].sub_strokes.len(); 29 | } 30 | AnalyzedCharacter { 31 | analyzed_strokes: analyzed_strokes, 32 | sub_stroke_count: sub_stroke_count, 33 | } 34 | } 35 | 36 | pub fn get_analyzed_strokes(&self) -> Vec { 37 | let mut res: Vec = Vec::with_capacity(self.sub_stroke_count); 38 | for i in 0..self.analyzed_strokes.len() { 39 | for j in 0..self.analyzed_strokes[i].sub_strokes.len() { 40 | res.push(self.analyzed_strokes[i].sub_strokes[j]); 41 | } 42 | } 43 | res 44 | } 45 | } 46 | 47 | // Gets distance between two points 48 | fn dist(a: Point, b: Point) -> f32 { 49 | let dx = (a.x as f32) - (b.x as f32); 50 | let dy = (a.y as f32) - (b.y as f32); 51 | (dx * dx + dy * dy).sqrt() 52 | } 53 | 54 | // Gets normalized distance between two points 55 | // Normalized based on bounding rectangle 56 | fn norm_dist(a: Point, b: Point, bounding_rect: &Rect) -> f32 { 57 | let width = bounding_rect.right - bounding_rect.left; 58 | let height = bounding_rect.bottom - bounding_rect.top; 59 | // normalizer is a diagonal along a square with sides of size the larger dimension of the bounding box 60 | let dim_squared; 61 | if width > height { dim_squared = width * width; } 62 | else { dim_squared = height * height; } 63 | let normalizer = (dim_squared + dim_squared).sqrt(); 64 | let dist_norm = dist(a, b) / normalizer; 65 | // Cap at 1 (...why is this needed??) 66 | f32::min(dist_norm, 1f32) 67 | } 68 | 69 | // Gets direction, in radians, from point a to b 70 | // 0 is to the right, PI / 2 is up, etc. 71 | fn dir(a: Point, b: Point) -> f32 { 72 | let dx = (a.x as f32) - (b.x as f32); 73 | let dy = (a.y as f32) - (b.y as f32); 74 | let dir = dy.atan2(dx); 75 | std::f32::consts::PI - dir 76 | } 77 | 78 | fn get_norm_center(a: Point, b: Point, bounding_rect: &Rect) -> (f32, f32) { 79 | let mut x = ((a.x as f32) + (b.x as f32)) / 2f32; 80 | let mut y = ((a.y as f32) + (b.y as f32)) / 2f32; 81 | let side; 82 | // Bounding rect is landscape 83 | if bounding_rect.right - bounding_rect.left > bounding_rect.bottom - bounding_rect.top { 84 | side = bounding_rect.right - bounding_rect.left; 85 | let height = bounding_rect.bottom - bounding_rect.top; 86 | x = x - bounding_rect.left; 87 | y = y - bounding_rect.top + (side - height) / 2f32; 88 | } 89 | // Portrait 90 | else { 91 | side = bounding_rect.bottom - bounding_rect.top; 92 | let width = bounding_rect.right - bounding_rect.left; 93 | x = x - bounding_rect.left + (side - width) / 2f32; 94 | y = y - bounding_rect.top; 95 | } 96 | (x / side, y / side) 97 | } 98 | 99 | // Calculates array with indexes of pivot points in raw stroke 100 | fn get_pivot_indexes(stroke: &Stroke) -> Vec { 101 | 102 | let points = &stroke.points; 103 | 104 | // One item for each point: true if it's a pivot 105 | let mut markers: Vec = Vec::with_capacity(points.len()); 106 | for _ in 0..points.len() { markers.push(false); } 107 | 108 | // Cycle variables 109 | let mut prev_pt_ix = 0; 110 | let mut first_pt_ix = 0; 111 | let mut pivot_pt_ix = 1; 112 | 113 | // The first point of a Stroke is always a pivot point. 114 | markers[0] = true; 115 | 116 | // localLength keeps track of the immediate distance between the latest three points. 117 | // We can use localLength to find an abrupt change in substrokes, such as at a corner. 118 | // We do this by checking localLength against the distance between the first and last 119 | // of the three points. If localLength is more than a certain amount longer than the 120 | // length between the first and last point, then there must have been a corner of some kind. 121 | let mut local_length = dist(points[first_pt_ix], points[pivot_pt_ix]); 122 | 123 | // runningLength keeps track of the length between the start of the current SubStroke 124 | // and the point we are currently examining. If the runningLength becomes a certain 125 | // amount longer than the straight distance between the first point and the current 126 | // point, then there is a new SubStroke. This accounts for a more gradual change 127 | // from one SubStroke segment to another, such as at a longish curve. 128 | let mut running_length = local_length; 129 | 130 | // Cycle through rest of stroke points. 131 | let mut i = 2; 132 | while i < points.len() { 133 | let next_point = points[i]; 134 | 135 | // pivotPoint is the point we're currently examining to see if it's a pivot. 136 | // We get the distance between this point and the next point and add it 137 | // to the length sums we're using. 138 | let pivot_length = dist(points[pivot_pt_ix], next_point); 139 | local_length += pivot_length; 140 | running_length += pivot_length; 141 | 142 | // Check the lengths against the ratios. If the lengths are a certain among 143 | // longer than a straight line between the first and last point, then we 144 | // mark the point as a pivot. 145 | let dist_from_previous = dist(points[prev_pt_ix], next_point); 146 | let dist_from_first = dist(points[first_pt_ix], next_point); 147 | if local_length > MAX_LOCAL_LENGTH_RATIO * dist_from_previous || 148 | running_length > MAX_RUNNING_LENGTH_RATIO * dist_from_first { 149 | // If the previous point was a pivot and was very close to this point, 150 | // which we are about to mark as a pivot, then unmark the previous point as a pivot. 151 | if markers[prev_pt_ix] && dist(points[prev_pt_ix], points[pivot_pt_ix]) < MIN_SEGMENT_LENGTH { 152 | markers[prev_pt_ix] = false; 153 | } 154 | markers[pivot_pt_ix] = true; 155 | running_length = pivot_length; 156 | first_pt_ix = pivot_pt_ix; 157 | } 158 | local_length = pivot_length; 159 | prev_pt_ix = pivot_pt_ix; 160 | pivot_pt_ix = i; 161 | 162 | i += 1; 163 | } 164 | 165 | // last point (currently referenced by pivotPoint) has to be a pivot 166 | markers[pivot_pt_ix] = true; 167 | // Point before the final point may need to be handled specially. 168 | // Often mouse action will produce an unintended small segment at the end. 169 | // We'll want to unmark the previous point if it's also a pivot and very close to the lat point. 170 | // However if the previous point is the first point of the stroke, then don't unmark it, because 171 | // then we'd only have one pivot. 172 | if markers[prev_pt_ix] && dist(points[prev_pt_ix], points[pivot_pt_ix]) < MIN_SEGMENT_LENGTH && prev_pt_ix != 0 { 173 | markers[prev_pt_ix] = false; 174 | } 175 | 176 | // Return result in the form of an index array: includes indexes where marker is true 177 | let mut marker_count = 0; 178 | for x in &markers { 179 | if *x { 180 | marker_count += 1; 181 | } 182 | } 183 | let mut res: Vec = Vec::with_capacity(marker_count); 184 | for ix in 0..markers.len() { 185 | if markers[ix] { 186 | res.push(ix); 187 | } 188 | } 189 | res 190 | } 191 | 192 | // Builds array of substrokes from stroke's points, pivots, and character's bounding rectangle 193 | fn build_sub_strokes(stroke: &Stroke, pivot_indexes: &Vec, bounding_rect: &Rect) -> Vec { 194 | let mut res: Vec = Vec::new(); 195 | let mut prev_ix: usize = 0; 196 | for i in 0..pivot_indexes.len() { 197 | let ix = pivot_indexes[i]; 198 | if ix == prev_ix { continue; } 199 | let mut direction = dir(stroke.points[prev_ix], stroke.points[ix]); 200 | direction = (direction * 256f32 / std::f32::consts::PI / 2f32).round(); 201 | if direction >= 256f32 { direction = 0f32; } 202 | let mut norm_length = norm_dist(stroke.points[prev_ix], stroke.points[ix], bounding_rect); 203 | norm_length = (norm_length * 255f32).round(); 204 | let center = get_norm_center(stroke.points[prev_ix], stroke.points[ix], bounding_rect); 205 | res.push(SubStroke { 206 | direction: direction, 207 | length: norm_length, 208 | center_x: (center.0 * 15f32).round(), 209 | center_y: (center.1 * 15f32).round(), 210 | }); 211 | prev_ix = ix; 212 | } 213 | res 214 | } 215 | 216 | // Analyze raw input, store result in _analyzedStrokes member. 217 | fn build_analyzed_strokes<'a>(strokes: &'a Vec, bounding_rect: &Rect) -> Vec> { 218 | let mut res: Vec = Vec::new(); 219 | // Process each stroke 220 | for stroke in strokes { 221 | // Identify pivot points 222 | let pivot_indexes = get_pivot_indexes(stroke); 223 | // Abstract away substrokes 224 | let sub_strokes = build_sub_strokes(stroke, &pivot_indexes, bounding_rect); 225 | // Store all this 226 | res.push(AnalyzedStroke{ 227 | points: &stroke.points, 228 | pivot_indexes: pivot_indexes, 229 | sub_strokes: sub_strokes, 230 | }); 231 | } 232 | res 233 | } 234 | 235 | fn get_bounding_rect(strokes: &Vec) -> Rect { 236 | let mut res = Rect { 237 | top: std::f32::MAX, 238 | bottom: std::f32::MIN, 239 | left: std::f32::MAX, 240 | right: std::f32::MIN, 241 | }; 242 | for stroke in strokes { 243 | for pt in &stroke.points { 244 | if (pt.x as f32) < res.left { res.left = pt.x as f32; } 245 | if (pt.x as f32) > res.right { res.right = pt.x as f32; } 246 | if (pt.y as f32) < res.top { res.top = pt.y as f32; } 247 | if (pt.y as f32) > res.bottom { res.bottom = pt.y as f32; } 248 | } 249 | } 250 | if res.top > 255f32 { res.top = 0f32; } 251 | if res.bottom < 0f32 { res.bottom = 255f32; } 252 | if res.left > 255f32 { res.left = 0f32; } 253 | if res.right < 0f32 { res.right = 255f32; } 254 | res 255 | } 256 | 257 | #[cfg(test)] 258 | #[allow(non_snake_case)] 259 | mod tests { 260 | use serde_derive::{Serialize, Deserialize}; 261 | 262 | use super::*; 263 | use super::super::Point; 264 | 265 | #[derive(Serialize, Deserialize)] 266 | struct SampleAnSubStroke { 267 | direction: u8, 268 | length: u8, 269 | centerX: u8, 270 | centerY: u8, 271 | } 272 | 273 | #[derive(Serialize, Deserialize)] 274 | struct SampleAnStroke { 275 | points: Vec>, 276 | pivotIndexes: Vec, 277 | subStrokes: Vec, 278 | } 279 | 280 | #[derive(Serialize, Deserialize)] 281 | struct SampleAnChar { 282 | top: u8, 283 | bottom: u8, 284 | left: u8, 285 | right: u8, 286 | analyzedStrokes: Vec, 287 | subStrokeCount: usize, 288 | } 289 | 290 | // These manual samples are custom-saved from a tweaked version of the HanziLookupJS demo 291 | 292 | // This is a hand-drawn 一 293 | static STROKES_1: &str = "[[[70,124],[71,124],[79,124],[104,124],[119,124],[132,125],[151,126],[168,126],[169,126],[189,125],[191,124],[191,124]]]"; 294 | static AN_CHAR_1: &str = "{\"top\":124,\"bottom\":126,\"left\":70,\"right\":191,\"analyzedStrokes\":[{\"points\":[[70,124],[71,124],[79,124],[104,124],[119,124],[132,125],[151,126],[168,126],[169,126],[189,125],[191,124],[191,124]],\"pivotIndexes\":[0,11],\"subStrokes\":[{\"direction\":0,\"length\":180,\"centerX\":8,\"centerY\":7}]}],\"subStrokeCount\":1}"; 295 | 296 | // This is a hand-drawn 十 297 | static STROKES_2: &str = "[[[76,127],[77,127],[84,127],[97,128],[119,128],[125,129],[138,130],[147,130],[153,131],[154,131],[158,131],[162,131],[167,131],[168,131],[169,131],[169,131]],[[129,60],[129,62],[128,74],[128,102],[128,118],[129,143],[130,162],[130,170],[130,178],[131,184],[131,188],[131,193],[131,196],[131,198],[131,203],[131,203]]]"; 298 | static AN_CHAR_2: &str = "{\"top\":60,\"bottom\":203,\"left\":76,\"right\":169,\"analyzedStrokes\":[{\"points\":[[76,127],[77,127],[84,127],[97,128],[119,128],[125,129],[138,130],[147,130],[153,131],[154,131],[158,131],[162,131],[167,131],[168,131],[169,131],[169,131]],\"pivotIndexes\":[0,15],\"subStrokes\":[{\"direction\":254,\"length\":117,\"centerX\":8,\"centerY\":7}]},{\"points\":[[129,60],[129,62],[128,74],[128,102],[128,118],[129,143],[130,162],[130,170],[130,178],[131,184],[131,188],[131,193],[131,196],[131,198],[131,203],[131,203]],\"pivotIndexes\":[0,15],\"subStrokes\":[{\"direction\":193,\"length\":180,\"centerX\":8,\"centerY\":8}]}],\"subStrokeCount\":2}"; 299 | 300 | // This is a hand-drawn 元 301 | static STROKES_3: &str = "[[[86,65],[98,66],[146,69],[152,69],[161,69],[166,69],[170,68],[170,68]],[[47,97],[48,97],[54,97],[89,103],[117,104],[146,101],[169,100],[176,98],[180,98],[184,98],[189,98],[193,98],[195,98],[195,98]],[[103,109],[103,110],[99,132],[91,156],[70,180],[56,190],[53,192]],[[143,105],[143,106],[142,114],[140,134],[138,149],[138,160],[138,167],[140,174],[144,182],[150,186],[155,190],[161,193],[166,194],[172,196],[188,197],[193,197],[197,197],[206,197],[206,196],[207,196],[208,196],[208,194],[204,182],[203,174],[202,174],[202,175],[202,176]]]"; 302 | static AN_CHAR_3: &str = "{\"top\":65,\"bottom\":197,\"left\":47,\"right\":208,\"analyzedStrokes\":[{\"points\":[[86,65],[98,66],[146,69],[152,69],[161,69],[166,69],[170,68],[170,68]],\"pivotIndexes\":[0,7],\"subStrokes\":[{\"direction\":255,\"length\":94,\"centerX\":8,\"centerY\":1}]},{\"points\":[[47,97],[48,97],[54,97],[89,103],[117,104],[146,101],[169,100],[176,98],[180,98],[184,98],[189,98],[193,98],[195,98],[195,98]],\"pivotIndexes\":[0,13],\"subStrokes\":[{\"direction\":0,\"length\":166,\"centerX\":7,\"centerY\":4}]},{\"points\":[[103,109],[103,110],[99,132],[91,156],[70,180],[56,190],[53,192]],\"pivotIndexes\":[0,6],\"subStrokes\":[{\"direction\":170,\"length\":109,\"centerX\":3,\"centerY\":9}]},{\"points\":[[143,105],[143,106],[142,114],[140,134],[138,149],[138,160],[138,167],[140,174],[144,182],[150,186],[155,190],[161,193],[166,194],[172,196],[188,197],[193,197],[197,197],[206,197],[206,196],[207,196],[208,196],[208,194],[204,182],[203,174],[202,174],[202,175],[202,176]],\"pivotIndexes\":[0,10,18,20,24,26],\"subStrokes\":[{\"direction\":198,\"length\":96,\"centerX\":10,\"centerY\":9},{\"direction\":251,\"length\":58,\"centerX\":12,\"centerY\":13},{\"direction\":0,\"length\":2,\"centerX\":15,\"centerY\":14},{\"direction\":75,\"length\":26,\"centerX\":15,\"centerY\":13},{\"direction\":192,\"length\":2,\"centerX\":14,\"centerY\":12}]}],\"subStrokeCount\":8}"; 303 | 304 | // This is a hand-drawn 氣 305 | static STROKES_4: &str = "[[[76,32],[76,33],[75,37],[73,43],[70,51],[67,58],[64,66],[61,72],[57,77],[52,82],[50,85],[50,85]],[[68,58],[69,58],[76,58],[90,59],[100,60],[110,62],[118,62],[132,62],[136,62],[141,62],[145,62],[146,62],[148,62],[148,62]],[[68,95],[69,95],[77,96],[96,96],[105,96],[110,96],[126,97],[144,98],[146,98],[154,98],[156,98],[156,98]],[[59,126],[60,126],[67,126],[90,130],[107,131],[120,132],[134,132],[149,132],[151,132],[156,132],[158,133],[158,134],[156,142],[154,147],[153,155],[152,160],[151,166],[150,172],[150,179],[150,183],[150,186],[150,190],[151,194],[152,199],[156,204],[158,206],[162,209],[167,213],[171,215],[175,216],[184,220],[192,222],[196,223],[200,224],[204,225],[208,225],[210,225],[214,225],[218,223],[218,222],[216,214],[214,208],[214,207],[214,207]],[[79,147],[82,148],[87,155],[91,161],[91,161]],[[124,148],[123,148],[116,155],[110,162],[108,164],[108,164]],[[73,175],[75,175],[88,178],[98,180],[104,180],[111,182],[117,182],[122,182],[125,182]],[[100,148],[100,151],[102,172],[102,195],[103,204],[103,211],[104,216],[104,220],[104,224]],[[94,189],[93,189],[81,204],[72,210],[71,210]],[[109,192],[112,194],[120,199],[132,208],[133,210],[133,210]]]"; 306 | static AN_CHAR_4: &str = "{\"top\":32,\"bottom\":225,\"left\":50,\"right\":218,\"analyzedStrokes\":[{\"points\":[[76,32],[76,33],[75,37],[73,43],[70,51],[67,58],[64,66],[61,72],[57,77],[52,82],[50,85],[50,85]],\"pivotIndexes\":[0,11],\"subStrokes\":[{\"direction\":173,\"length\":55,\"centerX\":2,\"centerY\":2}]},{\"points\":[[68,58],[69,58],[76,58],[90,59],[100,60],[110,62],[118,62],[132,62],[136,62],[141,62],[145,62],[146,62],[148,62],[148,62]],\"pivotIndexes\":[0,13],\"subStrokes\":[{\"direction\":254,\"length\":75,\"centerX\":5,\"centerY\":2}]},{\"points\":[[68,95],[69,95],[77,96],[96,96],[105,96],[110,96],[126,97],[144,98],[146,98],[154,98],[156,98],[156,98]],\"pivotIndexes\":[0,11],\"subStrokes\":[{\"direction\":255,\"length\":82,\"centerX\":6,\"centerY\":5}]},{\"points\":[[59,126],[60,126],[67,126],[90,130],[107,131],[120,132],[134,132],[149,132],[151,132],[156,132],[158,133],[158,134],[156,142],[154,147],[153,155],[152,160],[151,166],[150,172],[150,179],[150,183],[150,186],[150,190],[151,194],[152,199],[156,204],[158,206],[162,209],[167,213],[171,215],[175,216],[184,220],[192,222],[196,223],[200,224],[204,225],[208,225],[210,225],[214,225],[218,223],[218,222],[216,214],[214,208],[214,207],[214,207]],\"pivotIndexes\":[0,10,26,39,43],\"subStrokes\":[{\"direction\":253,\"length\":93,\"centerX\":6,\"centerY\":8},{\"direction\":194,\"length\":71,\"centerX\":10,\"centerY\":11},{\"direction\":247,\"length\":54,\"centerX\":12,\"centerY\":14},{\"direction\":75,\"length\":15,\"centerX\":14,\"centerY\":14}]},{\"points\":[[79,147],[82,148],[87,155],[91,161],[91,161]],\"pivotIndexes\":[0,4],\"subStrokes\":[{\"direction\":221,\"length\":17,\"centerX\":4,\"centerY\":9}]},{\"points\":[[124,148],[123,148],[116,155],[110,162],[108,164],[108,164]],\"pivotIndexes\":[0,5],\"subStrokes\":[{\"direction\":160,\"length\":21,\"centerX\":6,\"centerY\":10}]},{\"points\":[[73,175],[75,175],[88,178],[98,180],[104,180],[111,182],[117,182],[122,182],[125,182]],\"pivotIndexes\":[0,8],\"subStrokes\":[{\"direction\":251,\"length\":49,\"centerX\":5,\"centerY\":11}]},{\"points\":[[100,148],[100,151],[102,172],[102,195],[103,204],[103,211],[104,216],[104,220],[104,224]],\"pivotIndexes\":[0,8],\"subStrokes\":[{\"direction\":194,\"length\":71,\"centerX\":5,\"centerY\":12}]},{\"points\":[[94,189],[93,189],[81,204],[72,210],[71,210]],\"pivotIndexes\":[0,4],\"subStrokes\":[{\"direction\":158,\"length\":29,\"centerX\":3,\"centerY\":13}]},{\"points\":[[109,192],[112,194],[120,199],[132,208],[133,210],[133,210]],\"pivotIndexes\":[0,5],\"subStrokes\":[{\"direction\":230,\"length\":28,\"centerX\":6,\"centerY\":13}]}],\"subStrokeCount\":13}"; 307 | 308 | fn parse_sample(str_strokes: &str, str_an_char: &str) -> (Vec, SampleAnChar) { 309 | let vec_strokes: Vec>> = serde_json::from_str(str_strokes).unwrap(); 310 | let mut strokes: Vec = Vec::new(); 311 | for vec_stroke in &vec_strokes { 312 | let mut points: Vec = Vec::new(); 313 | for vec_point in vec_stroke { 314 | points.push(Point { 315 | x: vec_point[0], 316 | y: vec_point[1], 317 | }); 318 | } 319 | strokes.push(Stroke { 320 | points: points, 321 | }); 322 | } 323 | let an_char: SampleAnChar = serde_json::from_str(str_an_char).unwrap(); 324 | (strokes, an_char) 325 | } 326 | 327 | fn assert_same(sample_anc: &SampleAnChar, anc: &AnalyzedCharacter) { 328 | assert!(sample_anc.analyzedStrokes.len() == anc.analyzed_strokes.len(), "Expected same number of analyzed strokes."); 329 | for as_ix in 0..sample_anc.analyzedStrokes.len() { 330 | // Reference same analyzed stroke 331 | let sample_ans = &sample_anc.analyzedStrokes[as_ix]; 332 | let ans = &anc.analyzed_strokes[as_ix]; 333 | // Analyze stroke must have same points 334 | assert!(sample_ans.points.len() == ans.points.len(), "Analyzed stroke expected to have same number of points."); 335 | for pt_ix in 0..sample_ans.points.len() { 336 | assert!(sample_ans.points[pt_ix][0] == ans.points[pt_ix].x, "Analyzed stroke expected to have the exact same points."); 337 | assert!(sample_ans.points[pt_ix][1] == ans.points[pt_ix].y, "Analyzed stroke expected to have the exact same points."); 338 | } 339 | // Analyze stroke must have same pivot indexes 340 | assert!(sample_ans.pivotIndexes.len() == ans.pivot_indexes.len(), "Analyzed stroke expected to have same number of pivot indexes."); 341 | for pivot_ix in 0..sample_ans.pivotIndexes.len() { 342 | assert!(sample_ans.pivotIndexes[pivot_ix] == ans.pivot_indexes[pivot_ix], "Analyzed stroke expected to have the exact same pivot indexes."); 343 | } 344 | // Analyze stroke must have same substrokes 345 | assert!(sample_ans.subStrokes.len() == ans.sub_strokes.len(), "Analyzed stroke expected to have same number of substrokes."); 346 | for ss_ix in 0..sample_ans.subStrokes.len() { 347 | assert!(sample_ans.subStrokes[ss_ix].direction as f32 == ans.sub_strokes[ss_ix].direction, 348 | "Analyzed stroke must have the exact same substrokes."); 349 | assert!(sample_ans.subStrokes[ss_ix].length as f32 == ans.sub_strokes[ss_ix].length, 350 | "Analyzed stroke must have the exact same substrokes."); 351 | assert!(sample_ans.subStrokes[ss_ix].centerX as f32 == ans.sub_strokes[ss_ix].center_x, 352 | "Analyzed stroke must have the exact same substrokes."); 353 | assert!(sample_ans.subStrokes[ss_ix].centerY as f32 == ans.sub_strokes[ss_ix].center_y, 354 | "Analyzed stroke must have the exact same substrokes."); 355 | } 356 | } 357 | } 358 | 359 | #[test] 360 | fn test_samples() { 361 | { 362 | let sample = parse_sample(STROKES_1, AN_CHAR_1); 363 | let ac = AnalyzedCharacter::from_strokes(&sample.0); 364 | assert_same(&sample.1, &ac); 365 | } 366 | { 367 | let sample = parse_sample(STROKES_2, AN_CHAR_2); 368 | let ac = AnalyzedCharacter::from_strokes(&sample.0); 369 | assert_same(&sample.1, &ac); 370 | } 371 | { 372 | let sample = parse_sample(STROKES_3, AN_CHAR_3); 373 | let ac = AnalyzedCharacter::from_strokes(&sample.0); 374 | assert_same(&sample.1, &ac); 375 | } 376 | { 377 | let sample = parse_sample(STROKES_4, AN_CHAR_4); 378 | let ac = AnalyzedCharacter::from_strokes(&sample.0); 379 | assert_same(&sample.1, &ac); 380 | } 381 | } 382 | } 383 | -------------------------------------------------------------------------------- /hanzi_lookup/src/matcher.rs: -------------------------------------------------------------------------------- 1 | use super::entities::*; 2 | use super::cubic_curve_2d::*; 3 | use super::match_collector::*; 4 | use super::analyzed_character::*; 5 | use super::*; 6 | 7 | #[derive(Serialize, Deserialize, PartialEq, Debug)] 8 | struct SubStrokeTriple { 9 | dir: u8, 10 | length: u8, 11 | center: u8, 12 | } 13 | 14 | #[derive(Serialize, Deserialize, PartialEq, Debug)] 15 | struct CharData { 16 | hanzi: char, 17 | stroke_count: u16, 18 | sub_strokes: Vec, 19 | } 20 | 21 | thread_local!(static CHAR_DATA: Vec = load_strokes()); 22 | 23 | fn load_strokes() -> Vec { 24 | let hwbytes = include_bytes!("../data/mmah.bin"); 25 | let reader = std::io::BufReader::new(&hwbytes[..]); 26 | let res = bincode::deserialize_from(reader).expect("Failed to deserialize."); 27 | res 28 | } 29 | 30 | // The algorithm's magic numbers. Allow shouting snake case because we look at these as effective constants. 31 | #[allow(non_snake_case)] 32 | #[derive(Clone, Copy)] 33 | pub struct MatcherParams { 34 | pub MAX_CHARACTER_STROKE_COUNT: usize, 35 | pub MAX_CHARACTER_SUB_STROKE_COUNT: usize, 36 | pub DEFAULT_LOOSENESS: f32, 37 | pub AVG_SUBSTROKE_LENGTH: f32, // an average length (out of 1) 38 | pub SKIP_PENALTY_MULTIPLIER: f32, // penalty mulitplier for skipping a stroke 39 | pub CORRECT_NUM_STROKES_BONUS: f32, // max multiplier bonus if characters has the correct number of strokes 40 | pub CORRECT_NUM_STROKES_CAP: usize, // characters with more strokes than this will not be multiplied 41 | } 42 | #[warn(non_snake_case)] 43 | 44 | impl Default for MatcherParams { 45 | fn default() -> MatcherParams { 46 | MatcherParams { 47 | MAX_CHARACTER_STROKE_COUNT: 48, 48 | MAX_CHARACTER_SUB_STROKE_COUNT: 64, 49 | DEFAULT_LOOSENESS: 0.15, 50 | AVG_SUBSTROKE_LENGTH: 0.33, 51 | SKIP_PENALTY_MULTIPLIER: 1.75, 52 | CORRECT_NUM_STROKES_BONUS: 0.1, 53 | CORRECT_NUM_STROKES_CAP: 10, 54 | } 55 | } 56 | } 57 | 58 | pub struct Matcher { 59 | // Magic numbers; can be overridden 60 | params: MatcherParams, 61 | // N*N dimensional matrix where N = MAX_CHARACTER_SUB_STROKE_COUNT + 1 62 | score_matrix: Vec>, 63 | // Values pre-computed as solutions of a 2D quadratic curve 64 | direction_score_table: Vec, 65 | // Values pre-computed as solutions of a 2D quadratic curve 66 | length_score_table: Vec, 67 | // Values pre-computed as solutions of a 2D quadratic curve 68 | pos_score_table: Vec, 69 | } 70 | 71 | impl Matcher { 72 | pub fn new() -> Matcher { 73 | let params = MatcherParams::default(); 74 | Matcher::with_params(¶ms) 75 | } 76 | 77 | pub fn with_params(params: &MatcherParams) -> Matcher { 78 | let mut res = Matcher { 79 | score_matrix: Vec::with_capacity(params.MAX_CHARACTER_SUB_STROKE_COUNT + 1), 80 | params: *params, 81 | direction_score_table: Vec::with_capacity(256), 82 | length_score_table: Vec::with_capacity(129), 83 | pos_score_table: Vec::with_capacity(450), 84 | }; 85 | init_score_tables(&mut res.direction_score_table, &mut res.length_score_table, &mut res.pos_score_table); 86 | res.init_score_matrix(); 87 | res 88 | } 89 | 90 | pub fn lookup(&mut self, strokes: &Vec, collector: &mut MatchCollector) { 91 | let input_char = AnalyzedCharacter::from_strokes(strokes); 92 | 93 | // Edge case: empty input should return no matches; but permissive lookup does find a few... 94 | if input_char.analyzed_strokes.len() == 0 { 95 | return; 96 | } 97 | 98 | CHAR_DATA.with(|char_data| { 99 | 100 | // Flat format: matching needs this. Only transform once. 101 | let input_sub_strokes = input_char.get_analyzed_strokes(); 102 | 103 | // Some pre-computed looseness magic 104 | let stroke_count = input_char.analyzed_strokes.len(); 105 | let sub_stroke_count = input_char.sub_stroke_count; 106 | // Get the range of strokes to compare against based on the loosness. 107 | // Characters with fewer strokes than stroke_count - stroke_range 108 | // or more than stroke_count + stroke_range won't even be considered. 109 | let stroke_range = self.get_strokes_range(stroke_count, self.params.DEFAULT_LOOSENESS); 110 | let minimum_strokes = usize::max(stroke_count - stroke_range, 1); 111 | let maximum_strokes = usize::min(stroke_count + stroke_range, self.params.MAX_CHARACTER_STROKE_COUNT); 112 | // Get the range of substrokes to compare against based on looseness. 113 | // When trying to match sub stroke patterns, won't compare sub strokes 114 | // that are farther about in sequence than this range. This is to make 115 | // computing matches less expensive for low loosenesses. 116 | let sub_strokes_range = self.get_sub_strokes_range(sub_stroke_count, self.params.DEFAULT_LOOSENESS); 117 | let min_sub_strokes = usize::max(sub_stroke_count - sub_strokes_range, 1); 118 | let max_sub_strokes = usize::min(sub_stroke_count + sub_strokes_range, self.params.MAX_CHARACTER_SUB_STROKE_COUNT); 119 | // Iterate over all characters in repo 120 | for cix in 0..char_data.len() { 121 | let repo_char = &char_data[cix]; 122 | let cmp_stroke_count = repo_char.stroke_count; 123 | let cmp_sub_strokes = &repo_char.sub_strokes; 124 | if (cmp_stroke_count as usize) < minimum_strokes || cmp_stroke_count as usize > maximum_strokes { 125 | continue; 126 | } 127 | if cmp_sub_strokes.len() < min_sub_strokes || cmp_sub_strokes.len() > max_sub_strokes { 128 | continue; 129 | } 130 | // Match against character in repo 131 | let char_match = self.match_one(stroke_count, &input_sub_strokes, sub_strokes_range, &repo_char); 132 | // File; collector takes care of comparisons and keeping N-best 133 | collector.file_match(char_match); 134 | } 135 | }); 136 | } 137 | 138 | fn match_one( &mut self, 139 | input_stroke_count: usize, 140 | input_sub_strokes: &Vec, 141 | sub_strokes_range: usize, 142 | repo_char: &CharData) -> Match { 143 | // Calculate score. This is the *actual* meat. 144 | let mut score = self.compute_match_score(input_sub_strokes, sub_strokes_range, repo_char); 145 | // If the input character and the character in the repository have the same number of strokes, assign a small bonus. 146 | // Might be able to remove this, doesn't really add much, only semi-useful for characters with only a couple strokes. 147 | if input_stroke_count == repo_char.stroke_count as usize && input_stroke_count < self.params.CORRECT_NUM_STROKES_CAP { 148 | // The bonus declines linearly as the number of strokes increases, writing 2 instead of 3 strokes is worse than 9 for 10. 149 | let bonus = self.params.CORRECT_NUM_STROKES_BONUS * 150 | (i32::max(self.params.CORRECT_NUM_STROKES_CAP as i32 - input_stroke_count as i32, 0) as f32) / 151 | (self.params.CORRECT_NUM_STROKES_CAP as f32); 152 | score += bonus * score; 153 | } 154 | Match { 155 | hanzi: repo_char.hanzi, 156 | score: score, 157 | } 158 | } 159 | 160 | fn compute_match_score( &mut self, 161 | input_sub_strokes: &Vec, 162 | sub_strokes_range: usize, 163 | repo_char: &CharData) -> f32 { 164 | // 165 | for x in 0..input_sub_strokes.len() { 166 | // For each of the input substrokes... 167 | let input_direction = input_sub_strokes[x].direction.round() as u8; 168 | let input_length = input_sub_strokes[x].length.round() as u8; 169 | let input_center = Point { 170 | x: input_sub_strokes[x].center_x as u8, 171 | y: input_sub_strokes[x].center_y as u8, 172 | }; 173 | for y in 0..repo_char.sub_strokes.len() { 174 | // For each of the compare substrokes... 175 | // initialize the score as being not usable, it will only be set to a good 176 | // value if the two substrokes are within the range. 177 | let mut new_score = std::f32::MIN; 178 | let range = ((x as i32) - (y as i32)).abs() as usize; 179 | if range <= sub_strokes_range { 180 | // The range is based on looseness. If the two substrokes fall out of the range 181 | // then the comparison score for those two substrokes remains Double.MIN_VALUE and will not be used. 182 | let cmp_dir = repo_char.sub_strokes[y].dir; 183 | let cmp_length = repo_char.sub_strokes[y].length; 184 | let cmp_center = Point { 185 | x: (repo_char.sub_strokes[y].center & 0xf0).wrapping_shr(4), 186 | y: repo_char.sub_strokes[y].center & 0x0f, 187 | }; 188 | // We incur penalties for skipping substrokes. 189 | // Get the scores that would be incurred either for skipping the substroke from the descriptor, or from the repository. 190 | let skip1_score = self.score_matrix[x][y + 1] - 191 | (input_length as f32 / 256.0 * self.params.SKIP_PENALTY_MULTIPLIER); 192 | let skip2_score = self.score_matrix[x + 1][y] - 193 | (cmp_length as f32 / 256.0 * self.params.SKIP_PENALTY_MULTIPLIER); 194 | // The skip score is the maximum of the scores that would result from skipping one of the substrokes. 195 | let skip_score = f32::max(skip1_score, skip2_score); 196 | // The match_score is the score of actually comparing the two substrokes. 197 | let match_score = self.compute_sub_stroke_score(input_direction, 198 | input_length, 199 | cmp_dir, 200 | cmp_length, 201 | input_center, 202 | cmp_center); 203 | // Previous score is the score we'd add to if we compared the two substrokes. 204 | let prev_score = self.score_matrix[x][y]; 205 | // Result score is the maximum of skipping a substroke, or comparing the two. 206 | new_score = f32::max(prev_score + match_score, skip_score); 207 | } 208 | // Set the score for comparing the two substrokes. 209 | self.score_matrix[x + 1][y + 1] = new_score; 210 | } 211 | } 212 | // At the end the score is the score at the opposite corner of the matrix... 213 | // don't need to use count - 1 since seed values occupy indices 0 214 | self.score_matrix[input_sub_strokes.len()][repo_char.sub_strokes.len()] 215 | } 216 | 217 | fn compute_sub_stroke_score(&self, 218 | input_direction: u8, 219 | input_length: u8, 220 | repo_direction: u8, 221 | repo_length: u8, 222 | input_center: Point, 223 | repo_center: Point) -> f32 { 224 | // Score drops off after directions get sufficiently apart, start to rise again as the substrokes approach opposite directions. 225 | // This in particular reflects that occasionally strokes will be written backwards, this isn't totally bad, they get 226 | // some score for having the stroke oriented correctly. 227 | let direction_score = self.get_direction_score(input_direction, repo_direction, input_length); 228 | //var direction_score = Math.max(Math.cos(2.0 * theta), 0.3 * Math.cos((1.5 * theta) + (Math.PI / 3.0))); 229 | 230 | // Length score gives an indication of how similar the lengths of the substrokes are. 231 | // Get the ratio of the smaller of the lengths over the longer of the lengths. 232 | let length_score = self.get_length_score(input_length, repo_length); 233 | // Ratios that are within a certain range are fine, but after that they drop off, scores not more than 1. 234 | //var length_score = Math.log(length_score + (1.0 / Math.E)) + 1; 235 | //length_score = Math.min(length_score, 1.0); 236 | 237 | // For the final "classic" score we just multiply the two scores together. 238 | let mut score = length_score * direction_score; 239 | 240 | // Reduce score if strokes are farther apart 241 | let dx = input_center.x as i32 - repo_center.x as i32; 242 | let dy = input_center.y as i32 - repo_center.y as i32; 243 | let closeness = self.pos_score_table[(dx * dx + dy * dy) as usize]; 244 | 245 | // var dist = Math.sqrt(dx * dx + dy * dy); 246 | // // Distance is [0 .. 21.21] because X and Y are all [0..15] 247 | // // Square distance is [0..450] 248 | // // TO-DO: a cubic function for this too 249 | // var closeness = 1 - dist / 22; 250 | // Closeness is always [0..1]. We reduce positive score, and make negative more negative. 251 | if score > 0.0 { score *= closeness; } 252 | else { score /= closeness; } 253 | 254 | // Done 255 | score 256 | } 257 | 258 | fn get_direction_score(&self, direction1: u8, direction2: u8, input_length: u8) -> f32 { 259 | // Both directions are [0..255], integer 260 | let theta = (direction1 as i32 - direction2 as i32).abs() as usize; 261 | // Lookup table for actual score function 262 | let mut direction_score = self.direction_score_table[theta]; 263 | // Add bonus if the input length is small. 264 | // Directions doesn't really matter for small dian-like strokes. 265 | if input_length < 64 { 266 | let short_length_bonus_max = f32::min(1.0, 1.0 - direction_score); 267 | let short_length_bonus = short_length_bonus_max * (1.0 - (input_length as f32 / 64.0)); 268 | direction_score += short_length_bonus; 269 | } 270 | direction_score 271 | } 272 | 273 | fn get_length_score(&self, length1: u8, length2: u8) -> f32 { 274 | // Get the ratio between the two lengths less than one. 275 | let ratio: usize; 276 | // Shift for "times 128" 277 | if length1 > length2 { ratio = ((length2 as f32 * 128.0) / length1 as f32).round() as usize; } 278 | else { ratio = ((length1 as f32 * 128.0) / length2 as f32).round() as usize; } 279 | // Lookup table for actual score function 280 | self.length_score_table[ratio] 281 | } 282 | 283 | fn init_score_matrix(&mut self) { 284 | // Allocate if this is the first time we're initializing 285 | if self.score_matrix.len() == 0 { 286 | for i in 0..self.params.MAX_CHARACTER_SUB_STROKE_COUNT + 1 { 287 | self.score_matrix.push(Vec::with_capacity(self.params.MAX_CHARACTER_SUB_STROKE_COUNT + 1)); 288 | for _ in 0..self.params.MAX_CHARACTER_SUB_STROKE_COUNT + 1 { 289 | self.score_matrix[i].push(0f32); 290 | } 291 | } 292 | } 293 | // For starters, everythig is zero 294 | for i in 0..self.score_matrix.len() { 295 | for j in 0..self.score_matrix[i].len() { 296 | self.score_matrix[i][j] = 0f32; 297 | } 298 | } 299 | // Seed the first row and column with base values. 300 | // Starting from a cell that isn't at 0,0 to skip strokes incurs a penalty. 301 | for i in 0..self.score_matrix.len() { 302 | let penalty = -self.params.AVG_SUBSTROKE_LENGTH * self.params.SKIP_PENALTY_MULTIPLIER * (i as f32); 303 | self.score_matrix[i][0] = penalty; 304 | self.score_matrix[0][i] = penalty; 305 | } 306 | } 307 | 308 | fn get_strokes_range(&self, stroke_count: usize, looseness: f32) -> usize { 309 | if looseness == 0f32 { return 0; } 310 | if looseness == 1f32 { return self.params.MAX_CHARACTER_STROKE_COUNT; } 311 | // We use a CubicCurve that grows slowly at first and then rapidly near the end to the maximum. 312 | // This is so a looseness at or near 1.0 will return a range that will consider all characters. 313 | let ctrl1_x = 0.35; 314 | let ctrl1_y = (stroke_count as f32) * 0.4; 315 | let ctrl2_x = 0.6; 316 | let ctrl2_y = stroke_count as f32; 317 | let curve = CubicCurve2D::new(0.0, 0.0, ctrl1_x, ctrl1_y, ctrl2_x, ctrl2_y, 1.0, self.params.MAX_CHARACTER_STROKE_COUNT as f32); 318 | let t = curve.get_first_solution_for_x(looseness); 319 | // We get the t value on the parametrized curve where the x value matches the looseness. 320 | // Then we compute the y value for that t. This gives the range. 321 | let res = curve.get_y_on_curve(t).round(); 322 | return res as usize; 323 | } 324 | 325 | fn get_sub_strokes_range(&self, sub_stroke_count: usize, looseness: f32) -> usize { 326 | // Return the maximum if looseness = 1.0. 327 | // Otherwise we'd have to ensure that the floating point value led to exactly the right int count. 328 | if looseness == 1.0 { return self.params.MAX_CHARACTER_SUB_STROKE_COUNT; } 329 | // We use a CubicCurve that grows slowly at first and then rapidly near the end to the maximum. 330 | let y0 = (sub_stroke_count as f32) * 0.25; 331 | let ctrl1_x = 0.4; 332 | let ctrl1_y = 1.5 * y0; 333 | let ctrl2_x = 0.75; 334 | let ctrl2_y = 1.5 * ctrl1_y; 335 | let curve = CubicCurve2D::new(0.0, y0, ctrl1_x, ctrl1_y, ctrl2_x, ctrl2_y, 1.0, self.params.MAX_CHARACTER_SUB_STROKE_COUNT as f32); 336 | let t = curve.get_first_solution_for_x(looseness); 337 | // We get the t value on the parametrized curve where the x value matches the looseness. 338 | // Then we compute the y value for that t. This gives the range. 339 | let res = curve.get_y_on_curve(t).round(); 340 | return res as usize; 341 | } 342 | } 343 | 344 | 345 | fn init_score_tables(direction_score_table: &mut Vec, length_score_table: &mut Vec, pos_score_table: &mut Vec) { 346 | // Builds a precomputed array of values to use when getting the score between two substroke directions. 347 | // Two directions should differ by 0 - Pi, and the score should be the (difference / Pi) * score table's length 348 | // The curve drops as the difference grows, but rises again some at the end because 349 | // a stroke that is 180 degrees from the expected direction maybe OK passable. 350 | let curve = CubicCurve2D::new(0f32, 1.0, 0.5, 1.0, 0.25, -2.0, 1.0, 1.0); 351 | init_sc_from_curve(direction_score_table, &curve, 256); 352 | 353 | // Builds a precomputed array of values to use when getting the score between two substroke lengths. 354 | // A ratio less than one is computed for the two lengths, and the score should be the ratio * score table's length. 355 | // Curve grows rapidly as the ratio grows and levels off quickly. 356 | // This is because we don't really expect lengths to lety a lot. 357 | // We are really just trying to distinguish between tiny strokes and long strokes. 358 | let curve = CubicCurve2D::new(0f32, 0f32, 0.25, 1.0, 0.75, 1.0, 1.0, 1.0); 359 | init_sc_from_curve(length_score_table, &curve, 129); 360 | 361 | pos_score_table.clear(); 362 | for i in 0..450 { 363 | pos_score_table.push(1.0 - (i as f32).sqrt() / 22.0); 364 | } 365 | } 366 | 367 | fn init_sc_from_curve(score_table: &mut Vec, curve: &CubicCurve2D, samples: usize) { 368 | score_table.clear(); 369 | let x1 = curve.x1; 370 | let x2 = curve.x2; 371 | let range = x2 - x1; 372 | let x_inc = range / (samples as f32); // even incrementer to increment x value by when sampling across the curve 373 | let mut x = x1; 374 | // Sample evenly across the curve and set the samples into the table. 375 | for _ in 0..samples { 376 | let t = curve.get_first_solution_for_x(f32::min(x, x2)); 377 | score_table.push(curve.get_y_on_curve(t)); 378 | x += x_inc; 379 | } 380 | } 381 | 382 | 383 | #[cfg(test)] 384 | mod tests { 385 | use std::fmt::Write; 386 | use std::time::{Instant}; 387 | use super::*; 388 | 389 | #[test] 390 | fn test_score_tables() { 391 | let mut direction_score_table: Vec = Vec::new(); 392 | let mut length_score_table: Vec = Vec::new(); 393 | let mut pos_score_table: Vec = Vec::new(); 394 | init_score_tables(&mut direction_score_table, &mut length_score_table, &mut pos_score_table); 395 | 396 | assert_eq!(direction_score_table.len(), 256); 397 | assert!(direction_score_table[0] > 0.99); 398 | assert!(direction_score_table[96] > 0.0); 399 | assert!(direction_score_table[97] < 0.0); 400 | assert!(direction_score_table[183] < 0.0); 401 | assert!(direction_score_table[184] > 0.0); 402 | assert!(direction_score_table[255] > 0.98); 403 | 404 | assert_eq!(length_score_table.len(), 129); 405 | assert!(length_score_table[0] >= 0.0); 406 | assert!(length_score_table[0] < 0.01); 407 | assert!(length_score_table[23] < 0.5); 408 | assert!(length_score_table[24] > 0.5); 409 | assert!(length_score_table[128] > 0.99); 410 | 411 | assert!(pos_score_table.len() == 450); 412 | assert!(pos_score_table[0] == 1.0); 413 | assert!(pos_score_table[121] == 0.5); 414 | assert!(pos_score_table[449] < 0.04); 415 | } 416 | 417 | // These manual samples are custom-saved from a tweaked version of the HanziLookupJS demo 418 | // This is a hand-drawn 一 419 | static STROKES_1: &str = "[[[70,124],[71,124],[79,124],[104,124],[119,124],[132,125],[151,126],[168,126],[169,126],[189,125],[191,124],[191,124]]]"; 420 | // This is a hand-drawn 十 421 | static STROKES_2: &str = "[[[76,127],[77,127],[84,127],[97,128],[119,128],[125,129],[138,130],[147,130],[153,131],[154,131],[158,131],[162,131],[167,131],[168,131],[169,131],[169,131]],[[129,60],[129,62],[128,74],[128,102],[128,118],[129,143],[130,162],[130,170],[130,178],[131,184],[131,188],[131,193],[131,196],[131,198],[131,203],[131,203]]]"; 422 | // This is a hand-drawn 元 423 | static STROKES_3: &str = "[[[86,65],[98,66],[146,69],[152,69],[161,69],[166,69],[170,68],[170,68]],[[47,97],[48,97],[54,97],[89,103],[117,104],[146,101],[169,100],[176,98],[180,98],[184,98],[189,98],[193,98],[195,98],[195,98]],[[103,109],[103,110],[99,132],[91,156],[70,180],[56,190],[53,192]],[[143,105],[143,106],[142,114],[140,134],[138,149],[138,160],[138,167],[140,174],[144,182],[150,186],[155,190],[161,193],[166,194],[172,196],[188,197],[193,197],[197,197],[206,197],[206,196],[207,196],[208,196],[208,194],[204,182],[203,174],[202,174],[202,175],[202,176]]]"; 424 | // This is a hand-drawn 氣 425 | static STROKES_4: &str = "[[[76,32],[76,33],[75,37],[73,43],[70,51],[67,58],[64,66],[61,72],[57,77],[52,82],[50,85],[50,85]],[[68,58],[69,58],[76,58],[90,59],[100,60],[110,62],[118,62],[132,62],[136,62],[141,62],[145,62],[146,62],[148,62],[148,62]],[[68,95],[69,95],[77,96],[96,96],[105,96],[110,96],[126,97],[144,98],[146,98],[154,98],[156,98],[156,98]],[[59,126],[60,126],[67,126],[90,130],[107,131],[120,132],[134,132],[149,132],[151,132],[156,132],[158,133],[158,134],[156,142],[154,147],[153,155],[152,160],[151,166],[150,172],[150,179],[150,183],[150,186],[150,190],[151,194],[152,199],[156,204],[158,206],[162,209],[167,213],[171,215],[175,216],[184,220],[192,222],[196,223],[200,224],[204,225],[208,225],[210,225],[214,225],[218,223],[218,222],[216,214],[214,208],[214,207],[214,207]],[[79,147],[82,148],[87,155],[91,161],[91,161]],[[124,148],[123,148],[116,155],[110,162],[108,164],[108,164]],[[73,175],[75,175],[88,178],[98,180],[104,180],[111,182],[117,182],[122,182],[125,182]],[[100,148],[100,151],[102,172],[102,195],[103,204],[103,211],[104,216],[104,220],[104,224]],[[94,189],[93,189],[81,204],[72,210],[71,210]],[[109,192],[112,194],[120,199],[132,208],[133,210],[133,210]]]"; 426 | 427 | fn parse_sample(str_strokes: &str) -> Vec { 428 | let vec_strokes: Vec>> = serde_json::from_str(str_strokes).unwrap(); 429 | let mut strokes: Vec = Vec::new(); 430 | for vec_stroke in &vec_strokes { 431 | let mut points: Vec = Vec::new(); 432 | for vec_point in vec_stroke { 433 | points.push(Point { 434 | x: vec_point[0], 435 | y: vec_point[1], 436 | }); 437 | } 438 | strokes.push(Stroke { 439 | points: points, 440 | }); 441 | } 442 | strokes 443 | } 444 | 445 | #[test] 446 | fn test_matches() { 447 | let mut barf = String::new(); 448 | let mut matcher = Matcher::new(); 449 | let mut res: Vec = Vec::with_capacity(8); 450 | { 451 | let sample = parse_sample(STROKES_1); 452 | res.clear(); 453 | let mut collector = MatchCollector::new(&mut res, 8); 454 | matcher.lookup(&sample, &mut collector); 455 | //write!(&mut barf, "#1: {}", res[0].hanzi).unwrap(); 456 | assert!(res[0].hanzi == '一'); 457 | } 458 | { 459 | let sample = parse_sample(STROKES_2); 460 | res.clear(); 461 | let mut collector = MatchCollector::new(&mut res, 8); 462 | matcher.lookup(&sample, &mut collector); 463 | //write!(&mut barf, "#1: {} #2: {} #3: {} #4: {}", res[0].hanzi, res[1].hanzi, res[2].hanzi, res[3].hanzi).unwrap(); 464 | assert!(res[0].hanzi == '十'); 465 | } 466 | { 467 | let sample = parse_sample(STROKES_3); 468 | res.clear(); 469 | let mut collector = MatchCollector::new(&mut res, 8); 470 | matcher.lookup(&sample, &mut collector); 471 | //write!(&mut barf, "#1: {} #2: {} #3: {} #4: {}", res[0].hanzi, res[1].hanzi, res[2].hanzi, res[3].hanzi).unwrap(); 472 | assert!(res[1].hanzi == '元'); // Here we get the right char as the second match! 473 | } 474 | { 475 | let sample = parse_sample(STROKES_4); 476 | res.clear(); 477 | let start = Instant::now(); 478 | let mut collector = MatchCollector::new(&mut res, 8); 479 | matcher.lookup(&sample, &mut collector); 480 | let duration = start.elapsed(); 481 | write!(&mut barf, "Duration: {:?}", duration).unwrap(); 482 | println!("Duration: {:?}", duration); 483 | //write!(&mut barf, "#1: {} #2: {} #3: {} #4: {}", res[0].hanzi, res[1].hanzi, res[2].hanzi, res[3].hanzi).unwrap(); 484 | assert!(res[0].hanzi == '氣'); 485 | } 486 | } 487 | } 488 | -------------------------------------------------------------------------------- /web_demo/jquery-3.4.1.min.js: -------------------------------------------------------------------------------- 1 | /*! jQuery v3.4.1 | (c) JS Foundation and other contributors | jquery.org/license */ 2 | !function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],E=C.document,r=Object.getPrototypeOf,s=t.slice,g=t.concat,u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType},x=function(e){return null!=e&&e===e.window},c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.4.1",k=function(e,t){return new k.fn.init(e,t)},p=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;function d(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp($),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+$),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ne=function(e,t,n){var r="0x"+t-65536;return r!=r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023&r|56320)},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(m.childNodes),m.childNodes),t[m.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&((e?e.ownerDocument||e:m)!==C&&T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!A[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&U.test(t)){(s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=k),o=(l=h(t)).length;while(o--)l[o]="#"+s+" "+xe(l[o]);c=l.join(","),f=ee.test(t)&&ye(e.parentNode)||e}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){A(t,!0)}finally{s===k&&e.removeAttribute("id")}}}return g(t.replace(B,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[k]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e.namespaceURI,n=(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:m;return r!==C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),m!==C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=k,!C.getElementsByName||!C.getElementsByName(k).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+k+"-]").length||v.push("~="),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+k+"+*").length||v.push(".#.+[+~]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",$)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)===(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e===C||e.ownerDocument===m&&y(m,e)?-1:t===C||t.ownerDocument===m&&y(m,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e===C?-1:t===C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]===m?-1:s[r]===m?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if((e.ownerDocument||e)!==C&&T(e),d.matchesSelector&&E&&!A[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){A(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=p[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&p(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?k.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?k.grep(e,function(e){return e===n!==r}):"string"!=typeof n?k.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(k.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||q,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:L.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof k?t[0]:t,k.merge(this,k.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),D.test(r[1])&&k.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(k):k.makeArray(e,this)}).prototype=k.fn,q=k(E);var H=/^(?:parents|prev(?:Until|All))/,O={children:!0,contents:!0,next:!0,prev:!0};function P(e,t){while((e=e[t])&&1!==e.nodeType);return e}k.fn.extend({has:function(e){var t=k(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i,ge={option:[1,""],thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?k.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;nx",y.noCloneChecked=!!me.cloneNode(!0).lastChild.defaultValue;var Te=/^key/,Ce=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,Ee=/^([^.]*)(?:\.(.+)|)/;function ke(){return!0}function Se(){return!1}function Ne(e,t){return e===function(){try{return E.activeElement}catch(e){}}()==("focus"===t)}function Ae(e,t,n,r,i,o){var a,s;if("object"==typeof t){for(s in"string"!=typeof n&&(r=r||n,n=void 0),t)Ae(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=Se;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return k().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=k.guid++)),e.each(function(){k.event.add(this,t,i,r,n)})}function De(e,i,o){o?(Q.set(e,i,!1),k.event.add(e,i,{namespace:!1,handler:function(e){var t,n,r=Q.get(this,i);if(1&e.isTrigger&&this[i]){if(r.length)(k.event.special[i]||{}).delegateType&&e.stopPropagation();else if(r=s.call(arguments),Q.set(this,i,r),t=o(this,i),this[i](),r!==(n=Q.get(this,i))||t?Q.set(this,i,!1):n={},r!==n)return e.stopImmediatePropagation(),e.preventDefault(),n.value}else r.length&&(Q.set(this,i,{value:k.event.trigger(k.extend(r[0],k.Event.prototype),r.slice(1),this)}),e.stopImmediatePropagation())}})):void 0===Q.get(e,i)&&k.event.add(e,i,ke)}k.event={global:{},add:function(t,e,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.get(t);if(v){n.handler&&(n=(o=n).handler,i=o.selector),i&&k.find.matchesSelector(ie,i),n.guid||(n.guid=k.guid++),(u=v.events)||(u=v.events={}),(a=v.handle)||(a=v.handle=function(e){return"undefined"!=typeof k&&k.event.triggered!==e.type?k.event.dispatch.apply(t,arguments):void 0}),l=(e=(e||"").match(R)||[""]).length;while(l--)d=g=(s=Ee.exec(e[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=k.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=k.event.special[d]||{},c=k.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&k.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(t,r,h,a)||t.addEventListener&&t.addEventListener(d,a)),f.add&&(f.add.call(t,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),k.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.hasData(e)&&Q.get(e);if(v&&(u=v.events)){l=(t=(t||"").match(R)||[""]).length;while(l--)if(d=g=(s=Ee.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d){f=k.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,v.handle)||k.removeEvent(e,d,v.handle),delete u[d])}else for(d in u)k.event.remove(e,d+t[l],n,r,!0);k.isEmptyObject(u)&&Q.remove(e,"handle events")}},dispatch:function(e){var t,n,r,i,o,a,s=k.event.fix(e),u=new Array(arguments.length),l=(Q.get(this,"events")||{})[s.type]||[],c=k.event.special[s.type]||{};for(u[0]=s,t=1;t\x20\t\r\n\f]*)[^>]*)\/>/gi,qe=/\s*$/g;function Oe(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&k(e).children("tbody")[0]||e}function Pe(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Re(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Me(e,t){var n,r,i,o,a,s,u,l;if(1===t.nodeType){if(Q.hasData(e)&&(o=Q.access(e),a=Q.set(t,o),l=o.events))for(i in delete a.handle,a.events={},l)for(n=0,r=l[i].length;n")},clone:function(e,t,n){var r,i,o,a,s,u,l,c=e.cloneNode(!0),f=oe(e);if(!(y.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||k.isXMLDoc(e)))for(a=ve(c),r=0,i=(o=ve(e)).length;r").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var Vt,Gt=[],Yt=/(=)\?(?=&|$)|\?\?/;k.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Gt.pop()||k.expando+"_"+kt++;return this[e]=!0,e}}),k.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Yt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Yt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Yt,"$1"+r):!1!==e.jsonp&&(e.url+=(St.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||k.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?k(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Gt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((Vt=E.implementation.createHTMLDocument("").body).innerHTML="
",2===Vt.childNodes.length),k.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=D.exec(e))?[t.createElement(i[1])]:(i=we([e],t,o),o&&o.length&&k(o).remove(),k.merge([],i.childNodes)));var r,i,o},k.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(k.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},k.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){k.fn[t]=function(e){return this.on(t,e)}}),k.expr.pseudos.animated=function(t){return k.grep(k.timers,function(e){return t===e.elem}).length},k.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=k.css(e,"position"),c=k(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=k.css(e,"top"),u=k.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,k.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},k.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){k.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===k.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===k.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=k(e).offset()).top+=k.css(e,"borderTopWidth",!0),i.left+=k.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-k.css(r,"marginTop",!0),left:t.left-i.left-k.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===k.css(e,"position"))e=e.offsetParent;return e||ie})}}),k.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;k.fn[t]=function(e){return _(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),k.each(["top","left"],function(e,n){k.cssHooks[n]=ze(y.pixelPosition,function(e,t){if(t)return t=_e(e,n),$e.test(t)?k(e).position()[n]+"px":t})}),k.each({Height:"height",Width:"width"},function(a,s){k.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){k.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return _(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?k.css(e,t,i):k.style(e,t,n,i)},s,n?e:void 0,n)}})}),k.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){k.fn[n]=function(e,t){return 0