├── .gitattributes ├── .gitignore ├── README.md ├── requirements.txt ├── serve.py ├── src ├── .entry.js.swp ├── content.js ├── entry.js └── stt.js ├── templates ├── index.jinja2 └── viewer.jinja2 └── webpack.config.js /.gitattributes: -------------------------------------------------------------------------------- 1 | webpack.config.js linguist-vendored 2 | 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject 90 | 91 | static/* 92 | assets/* 93 | images/* 94 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SpeechPortal 2 | 3 | # Installation 4 | 5 | ``` 6 | virtualenv -p python3 .env 7 | source .env/bin/activate 8 | pip install -r requirements.txt 9 | 10 | npm install webpack -g 11 | webpack 12 | 13 | mkdir images assets 14 | python serve.py 15 | ``` 16 | 17 | # Inspiration 18 | Over the course of human history, formal speech has proven to be the singly most powerful way to communicate a message to a group of people. Whether you're a Ph.D student studying in Korea, or a refugee escaping Syria, you will find yourself having to give a speech at some point in your life. And whether it's about defending the past five years of your research or desperately seeking for help for your younger siblings still at home, preparing for that important speech can be difficult, tedious, and nervewracking. 19 | 20 | And as college students who have had to give speeches to protect their school programs, to inspire suffering communities, and to defend others from bigotry, we believe everyone should have the essential human ability to communicate to others. 21 | 22 | That's why we built SpeechPortal, to make preparing for your next speech fun, easy, and intuitive! Using novel webVR and in-browser speech recognition technology, SpeechPortal creates a dynamic training ground for people who need to quickly and intelligently memorize their next talk! Accessible to anyone with a mobile phone or computer! 23 | 24 | # What it does 25 | SpeechPortal is a webVR app that produces a dynamic virtual memory palace made up of memorable Google Streetview locations that holds image clues generated from an inputted speech. Once you use the clues around you to help you recite the first portion of your speech, the speech-processing portion of our project advances you to the next location (and paragraph in your speech), and will wait to give you a 'hint' if you hesitate/say the wrong thing. 26 | 27 | # How we built it 28 | We used Python and Flask to serve as a container to host the VR environment, a multithreaded Python script to download streetview images onto an SSD server, WebVR/OpenGL technologies for VR rendering, and Javascript to render the auditory processing, perform image stitching, and perform secondary VR processing client-side. We made extensive use of Google Street View Image API and Web Speech API (supported by Google speech processing). On the NLP side, we used NLTK to broke down the speech into sections and extract keywords/phrases, and for each keyword, we found a related image using Unsplash. 29 | 30 | # Challenges we ran into 31 | Each VR frame of Google Street view contains a massive amount of data, which we had to resolve using parallel image downloads and non-blocking compressions on an SSD server in order to allow fast mobile rendering. 32 | 33 | # Built With 34 | python 35 | javascript 36 | google-web-speech-api 37 | nltk 38 | flask 39 | opengl 40 | google-streetview 41 | 42 | # Developed by 43 | Ricky Han, Johann Miller, and Kevin Chen 44 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | appdirs==1.4.0 2 | click==6.7 3 | Flask==0.12 4 | itsdangerous==0.24 5 | Jinja2==2.9.5 6 | MarkupSafe==0.23 7 | olefile==0.44 8 | packaging==16.8 9 | Pillow==4.0.0 10 | pkg-resources==0.0.0 11 | pyparsing==2.1.10 12 | requests==2.13.0 13 | six==1.10.0 14 | Werkzeug==0.11.15 15 | -------------------------------------------------------------------------------- /serve.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, send_file, render_template, abort, send_from_directory, jsonify 2 | from PIL import Image 3 | import urllib.request 4 | import requests 5 | import os, threading, json, glob, queue 6 | from textblob import TextBlob 7 | from unsplash_python.unsplash import Unsplash 8 | 9 | unsplash = Unsplash({ 10 | 'application_id': '30a7d2c59ecd38ab3631dad0bc18005c87d65d6af473c93e6df4ce6f2dbd9d07', 11 | 'secret': '427ee640ed1607b02cab74987bd261383dad9fbe84e9ebc4546ef173352b613b', 12 | 'callback_url': 'http://www.something.com' 13 | }) 14 | 15 | app = Flask(__name__) 16 | 17 | AWESOME_LOCS = [ 18 | [-26.938312,-68.74491499999999], 19 | [60.534114,-149.55007899999998], 20 | [60.070409,6.542388999999957], 21 | [30.184983,-84.72466199999997], 22 | [36.252972,136.90053699999999], 23 | [48.865937,2.312376], 24 | [36.2381539,137.9683151], 25 | [64.0444798,-16.1711884], 26 | [42.658402,11.633269], 27 | [30.3248983,35.4471292], 28 | [47.51075,10.390309], 29 | [53.043081,57.064946], 30 | [-8.4226166,115.3124971], 31 | [35.659607,139.700378], 32 | [50.087586,14.421231], 33 | [-13.165713,-72.545542], 34 | [41.403286,2.174673], 35 | [-14.251967,-170.689851], 36 | [33.461503,126.939297], 37 | [-64.731988,-62.594564], 38 | [27.17557,78.041462], 39 | [68.19649,13.53183], 40 | [53.2783229,107.3506844], 41 | [59.9387245,30.3163621], 42 | [40.4900264,-75.0729199], 43 | [14.5841104,120.9799109], 44 | [17.5707683,120.3886023], 45 | [10.6422373,122.2358045], 46 | [18.0619395,120.5205914], 47 | [17.5713349,120.3887765], 48 | [0.5738293,37.5750599], 49 | [-1.3766622,36.7743556] 50 | ] 51 | 52 | WIDTHS = [ 416, 832, 1664, 3328, 6656, 13312 ] 53 | HEIGHTS = [ 416, 416, 832, 1664, 3328, 6656 ] 54 | LEVELSW = [ 1, 2, 4, 7, 13, 26 ] 55 | LEVELSH = [ 1, 1, 2, 4, 7, 13 ] 56 | 57 | 58 | ROOT_FOLDER = "./images/" 59 | 60 | X_RANGE = range(0,7) 61 | Y_RANGE = range(0,3) 62 | 63 | ZOOM = 3 64 | 65 | def equirect(zoom): 66 | global X_RANGE, Y_RANGE 67 | 68 | width = WIDTHS[zoom] 69 | height = HEIGHTS[zoom] 70 | cols = LEVELSW[zoom] 71 | rows = LEVELSH[zoom] 72 | squareW = 512 73 | squareH = 512 74 | 75 | X_RANGE = range(0,cols) 76 | Y_RANGE = range(0,rows) 77 | 78 | return { 79 | 'columns': cols, 80 | 'rows': rows, 81 | 'tileWidth': squareW, 82 | 'tileHeight': squareH, 83 | 'width': width, 84 | 'height': height 85 | } 86 | 87 | 88 | class ThreadUrl(threading.Thread): 89 | def __init__(self, myQueue): 90 | threading.Thread.__init__(self) 91 | self.myQueue = myQueue 92 | 93 | def run(self): 94 | while True: 95 | (panoid,X,Y) = self.myQueue.get() 96 | url = _format_tile(panoid, X, Y) 97 | input_image = ROOT_FOLDER + str(panoid)+'-'+str(X)+'-'+str(Y)+".jpg" 98 | urllib.request.urlretrieve(url,input_image) 99 | self.myQueue.task_done() 100 | 101 | 102 | def download_all_img(data): 103 | myQueue = queue.Queue() 104 | 105 | for i in range(len(data)): 106 | t = ThreadUrl(myQueue) 107 | t.setDaemon(True) 108 | t.start() 109 | for datum in data: 110 | myQueue.put(datum) 111 | 112 | myQueue.join() 113 | 114 | 115 | def _format_id(panoid): 116 | return 'http://maps.google.com/cbk?output=json&cb_client=apiv3&v=4&dm=1&pm=1&ph=1&hl=en&panoid=' + panoid 117 | 118 | def _format_loc(lat, lng, rad): 119 | return 'https://cbks0.google.com/cbk?cb_client=apiv3&authuser=0&hl=en&output=polygon&it=1%3A1&rank=closest&ll=' + str(lat)+ ',' + str(lng) + '&radius=' + str(rad) 120 | 121 | def _format_tile(panoid, x, y): 122 | return 'https://geo0.ggpht.com/cbk?cb_client=maps_sv.tactile&authuser=0&hl=en&panoid=' + str(panoid) + '&output=tile&x=' + str(x) + '&y=' + str(y) + '&zoom=' + str(ZOOM) + '&nbt&fover=2' 123 | 124 | 125 | def get_panoid_by_loc(lat,lng): 126 | ''' 127 | return panoid from location, False if not found 128 | ''' 129 | r = requests.get(_format_loc(lat,lng,400)) 130 | print(r.text) 131 | if r.text == '{}': return False 132 | return json.loads(r.text)['result'][0]['id'] 133 | 134 | def get_links_by_panoid(panoid): 135 | ''' 136 | returns the linked panoid (frames), i.e. clicking the arrow button in google street view 137 | ''' 138 | r = requests.get(_format_id(panoid)) 139 | return json.loads(r.text)['Links'] 140 | 141 | def get_api_by_panoid(panoid): 142 | ''' 143 | returns the linked panoid (frames), i.e. clicking the arrow button in google street view 144 | ''' 145 | r = requests.get(_format_id(panoid)) 146 | return json.loads(r.text) 147 | 148 | def stitch(data, hd): 149 | ''' 150 | stitch tiles together and save a stitched file 151 | ''' 152 | if hd: 153 | ext = '.png' 154 | else : 155 | ext = '.jpg' 156 | rect = equirect(ZOOM) 157 | images = [Image.open(ROOT_FOLDER +str(panoid)+'-'+str(X)+'-'+str(Y)+'.jpg') for (panoid, X, Y) in data ] 158 | total_size = (rect['width'], rect['height']) 159 | stitched = Image.new('RGB', total_size) 160 | panoid = data[0][0] 161 | for (datum, im) in zip(data, images): 162 | (panoid, x, y) = datum 163 | stitched.paste(im=im, box=(512*x,512*y)) 164 | if hd: 165 | blah = 'hd' 166 | else: 167 | blah = '' 168 | 169 | fname = ROOT_FOLDER + blah +'stitched-'+panoid+ext 170 | if hd: 171 | stitched = stitched.resize((4096,2048)) 172 | else: 173 | stitched = stitched.resize((1024,512)) 174 | 175 | stitched.save(fname) 176 | 177 | 178 | @app.route('/location//') 179 | def api(lat,lng): 180 | ''' 181 | GET the equirectangular streetview image at location 182 | ''' 183 | panoid = get_panoid_by_loc(lat,lng) 184 | if not panoid: 185 | abort(404) 186 | output_image = ROOT_FOLDER + 'stitched-'+panoid+'.jpg' 187 | if os.path.exists(output_image): 188 | pass 189 | else: 190 | data = [(panoid,X,Y) for X in X_RANGE for Y in Y_RANGE] 191 | download_all_img(data) 192 | stitch(data, False) 193 | return send_file(output_image) 194 | 195 | @app.route('/panoid/') 196 | def panoid_get(panoid): 197 | ''' 198 | downloads equirectangular streetview image at panoid 199 | http://localhost:5000/panoid/DtaclnuEVvssSuojH8CPpw 200 | ''' 201 | output_image = ROOT_FOLDER + 'stitched-'+panoid+'.jpg' 202 | if os.path.exists(output_image): 203 | pass 204 | else: 205 | data = [(panoid,X,Y) for X in X_RANGE for Y in Y_RANGE] 206 | download_all_img(data) 207 | stitch(data, False) 208 | return send_file(output_image) 209 | 210 | 211 | @app.route('/keypanoid/') 212 | def keypanoid_get(panoid): 213 | ''' 214 | downloads equirectangular streetview image at panoid 215 | http://localhost:5000/panoid/DtaclnuEVvssSuojH8CPpw 216 | ''' 217 | output_image = ROOT_FOLDER + 'hdstitched-'+panoid+'.png' 218 | if os.path.exists(output_image): 219 | pass 220 | else: 221 | data = [(panoid,X,Y) for X in X_RANGE for Y in Y_RANGE] 222 | download_all_img(data) 223 | stitch(data, True) 224 | return send_file(output_image) 225 | 226 | 227 | @app.route('/api/') 228 | def panoid_api_get(panoid): 229 | ''' 230 | returns the original panoid api with depth map etc... 231 | ''' 232 | return jsonify(get_api_by_panoid(panoid)) 233 | 234 | 235 | @app.route('/next/') 236 | def get_next_linked_panoid(panoid): 237 | ''' 238 | GET the next panoid 239 | 240 | http://localhost:5000/next/DtaclnuEVvssSuojH8CPpw 241 | ''' 242 | return jsonify(get_links_by_panoid(panoid)) 243 | 244 | def init(): 245 | equirect(ZOOM) 246 | 247 | @app.route('/nouns/') 248 | def nlp(text): 249 | blob = [word for (word, tag) in TextBlob(text).tags if tag == "NN"] 250 | print(blob) # convert this list of strings(nouns) to a list of urls 251 | 252 | searchq = [ json.loads(requests.get("https://pixabay.com/api/?key=4600901-0a61793485c8e08b555145cd1&q="+x+"&image_type=photo&pretty=false").text) for x in blob ] 253 | ret = [ x['hits'][0]['webformatURL'] for x in searchq ] 254 | 255 | return jsonify(ret) 256 | 257 | @app.route('/images/') 258 | def send_static(path): 259 | return send_from_directory('images', path) 260 | 261 | 262 | @app.route('/assets/') 263 | def send_assets(path): 264 | return send_from_directory('assets', path) 265 | 266 | @app.route('/viewer') 267 | def viewer(): 268 | return render_template('viewer.jinja2') 269 | 270 | @app.route('/') 271 | def index(): 272 | return render_template('index.jinja2') 273 | 274 | if __name__ == "__main__": 275 | init() 276 | app.run('127.0.0.1','5000') 277 | 278 | # panoid = get_panoid_by_loc(AWESOME_LOCS[0][0],AWESOME_LOCS[0][1]) 279 | # data = [(panoid,X,Y) for X in X_RANGE for Y in Y_RANGE] 280 | # download_all_img(data) 281 | # stitch(data) 282 | # get_links_by_panoid(panoid) 283 | -------------------------------------------------------------------------------- /src/.entry.js.swp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/turtlehacks/speechportal/9e3559b45c5d2cae605fea93fe04d4c97f8b5c5a/src/.entry.js.swp -------------------------------------------------------------------------------- /src/content.js: -------------------------------------------------------------------------------- 1 | export default "It works from content.js." -------------------------------------------------------------------------------- /src/entry.js: -------------------------------------------------------------------------------- 1 | window.current_panoid = ''; 2 | 3 | var COUNTER = 0; 4 | 5 | const places = [ 6 | 'ABqudn7qGGDzdu-3HZFuZw', 7 | 'E9JGW1q48jvgvFkjx0kYjw', 8 | 'ocuWPJcOa4FUFc_pxh0fNQ', 9 | '_znQ1nR9o95RXrK1kEwP0Q', 10 | 'BXw6CgL5yxDtsFlLJ_AdUw', 11 | 'EVUfulQv-D5ypQwXKoF-hA', 12 | '_fFPxZTrOI7MHyReJhWung', 13 | ] 14 | 15 | const xPosValues = [0, -2.5, 2.5, -4, 4]; //picture x positions 16 | const zPosValues = [1,2,2,2,2]; 17 | const yRotValues = [0, 50, -50, 40, -40]; 18 | 19 | 20 | function init() { 21 | const init_panoid = "Jf1XdypK_M7bjLeY1N581g"; 22 | setHDSky(init_panoid); 23 | 24 | fetch(document.location.origin+'/nouns/'+window.paragraph_list[window.curr_paragraph]) 25 | .then((resp)=>resp.json()) 26 | .then(insertImgs) 27 | .catch(console.error) 28 | 29 | } 30 | 31 | function setSky(panoid){ 32 | const sky = document.querySelector('a-sky'); 33 | sky.setAttribute('src', document.location.origin + "/panoid/" + panoid); 34 | window.current_panoid = panoid; 35 | } 36 | function setHDSky(panoid){ 37 | const sky = document.querySelector('a-sky'); 38 | sky.setAttribute('src', document.location.origin + "/keypanoid/" + panoid); 39 | window.current_panoid = panoid; 40 | } 41 | function setBGTransparent(){ 42 | const sky = document.querySelector('a-sky'); 43 | sky.setAttribute('opacity', '0'); 44 | sky.setAttribute('color', '#fff'); 45 | } 46 | 47 | function unsetBGTransparent(){ 48 | const sky = document.querySelector('a-sky'); 49 | sky.setAttribute('opacity', '1'); 50 | sky.setAttribute('color', '#fff'); 51 | } 52 | 53 | function updateSky(){ 54 | fetch(document.location.origin + "/next/" + window.current_panoid) 55 | .then( 56 | (res) => res.json() 57 | ).then((json) => { 58 | var sorted = json.sort((a,b)=>a.yawDeg - b.yawDeg) 59 | setSky(sorted[0].panoId); 60 | }).catch((ex) => { 61 | console.log('parsing failed', ex) 62 | }) 63 | } 64 | 65 | function insertImgs(srcs){ 66 | console.log(srcs) 67 | var scene = document.querySelector('a-scene'); 68 | srcs.forEach((src,i)=>{ 69 | var img = document.createElement('a-image'); 70 | img.setAttribute('src', src); 71 | img.setAttribute('position', xPosValues[i] + ' ' + 2 + ' ' + zPosValues[i] ); 72 | img.setAttribute('rotation', 0 + ' ' + yRotValues[i] + ' ' + 0 ); 73 | scene.appendChild(img); 74 | }); 75 | } 76 | 77 | function deleteImgs(){ 78 | var imgs = document.querySelectorAll('a-image'); 79 | if(imgs){ 80 | for(let i=0; iupdateSky(), 100); 90 | deleteImgs(); 91 | 92 | setTimeout(()=>{ 93 | clearInterval(transition); 94 | // setHDSky(window.current_panoid); 95 | 96 | setBGTransparent(); 97 | 98 | setTimeout(()=>{ 99 | setHDSky(places[COUNTER]); 100 | COUNTER++; 101 | setTimeout(()=>{ 102 | unsetBGTransparent(); 103 | 104 | fetch(document.location.origin+'/nouns/'+window.paragraph_list[window.curr_paragraph]) 105 | .then((resp)=>resp.json()) 106 | .then(insertImgs); 107 | 108 | var event = new Event('startTimer'); 109 | window.dispatchEvent(event); 110 | },1500); 111 | }, 1000); 112 | }, 3000); 113 | } 114 | 115 | 116 | window.onload = function() { 117 | require('./stt.js'); 118 | init(); 119 | window.addEventListener('startTransition', startTransition); 120 | }; 121 | -------------------------------------------------------------------------------- /src/stt.js: -------------------------------------------------------------------------------- 1 | var isMobile = { // check if user device is mobile 2 | Android: function() { 3 | return navigator.userAgent.match(/Android/i); 4 | }, 5 | BlackBerry: function() { 6 | return navigator.userAgent.match(/BlackBerry/i); 7 | }, 8 | iOS: function() { 9 | return navigator.userAgent.match(/iPhone|iPad|iPod/i); 10 | }, 11 | Opera: function() { 12 | return navigator.userAgent.match(/Opera Mini/i); 13 | }, 14 | Windows: function() { 15 | return navigator.userAgent.match(/IEMobile/i); 16 | }, 17 | any: function() { 18 | return (isMobile.Android() || isMobile.BlackBerry() || isMobile.iOS() || isMobile.Opera() || isMobile.Windows()); 19 | } 20 | }; 21 | 22 | window.paragraph_list = ["The winter evening settles down With smell of steaks in passageways.", 23 | "Six o'clock. The burnt-out ends of smoky days.", 24 | "And now a gusty shower wraps The grimy scraps", 25 | "Of withered leaves about your feet And newspapers from vacant lots;", 26 | "The showers beat On broken blinds and chimney-pots,", 27 | "And at the corner of the street A lonely cab-horse steams and stamps.", 28 | "And then the lighting of the lamps."] 29 | 30 | // [ 31 | // "Some say the world will end in fire, Some say in ice.", 32 | // "From what I’ve tasted of desire, I hold with those who favor fire.", 33 | // "But if it had to perish twice, I think I know enough of hate", 34 | // "To say that for destruction ice, Is also great and would suffice."] 35 | 36 | var unimportant_words = new Set('i','is','was','am','are','a','and','the'); 37 | 38 | 39 | // instantiate recognition 40 | var recognition = new webkitSpeechRecognition(); //Chrome supports webkit prefixed, firefox doesn't 41 | recognition.continuous = true; // doesn't turn off recognition during pause 42 | recognition.interimResults = true; // can see the interim results 43 | 44 | document.body.onkeydown = function(e){ 45 | if(e.keyCode == 32){ // spacebar press 46 | next_frame(); 47 | } 48 | if(e.keyCode == 90){ // z press 49 | resetAll(); 50 | } 51 | } 52 | 53 | if( isMobile.any() ){ // user device is mobile 54 | document.querySelector("canvas").ontouchstart = function(e){ // screen touch 55 | next_frame(); 56 | } 57 | } 58 | 59 | //global vars 60 | window.curr_paragraph; //index 61 | var input_set; //all words said by user 62 | var input_set_size_past; //size of set at previous check 63 | var conf_score; //accuracy of input compared to master paragraph 64 | var conf_score_past; //accuracy of input at previous check 65 | var total_results; //num of times the user has spoken 66 | var total_results_past; //num of times at previous check 67 | var restart_recog; //true so that recording doesn't stop 68 | var speech_pause_timer; //check if user is speaking 69 | var text_to_speech; 70 | var conf_cuttoff = 0.8; // when to move to next line 71 | var reading; 72 | 73 | initialize(); 74 | 75 | // instantiate 76 | function initialize(){ 77 | window.curr_paragraph = 0; 78 | input_set = new Set(); 79 | input_set_size_past = 0; 80 | conf_score = 0; 81 | conf_score_past = 0; 82 | total_results = 0; 83 | total_results_past = 0; 84 | restart_recog = true; 85 | reading = false; 86 | 87 | recognition.start(); 88 | setTimeout(()=>{ 89 | startTimer(); 90 | },5000); 91 | window.addEventListener('startTimer', startTimer); 92 | 93 | } 94 | 95 | recognition.onresult = function(event) { 96 | total_results++; 97 | //list of recognized phrases, split by pauses. If the phrase is final, add to perm string 98 | for(var i = event.resultIndex; i < event.results.length; ++i) { 99 | var input_raw = event.results[i][0].transcript.toLowerCase().toLowerCase().replace(/[.,\/#!$%\^&\*;:{}=\-_`~()]/g,"").replace(/\s{2,}/g," ") 100 | var input_split = input_raw.split(" ") 101 | for(let j=0; jinput_set_size_past){ 115 | input_set_size_past = input_set_size; 116 | conf_score = calc_conf_score(input_set, window.paragraph_list[window.curr_paragraph]); 117 | // let conf = calc_conf_score(perm_trans, window.paragraph_list[curr_paragraph]); 118 | console.log(conf_score); 119 | if(conf_score >= conf_cuttoff) { 120 | next_frame(); 121 | } 122 | } 123 | } 124 | 125 | recognition.onend = function(event){ 126 | console.log("Restart: "+restart_recog+event) 127 | if(restart_recog){ 128 | recognition.start(); 129 | } 130 | } 131 | 132 | // Calculates how close two strings are to each other 133 | function calc_conf_score(input_set, master_paragraph){ 134 | 135 | let num_same_words = 0; 136 | // clean string and split by spaces 137 | let master_paragraph_split = master_paragraph.toLowerCase().replace(/[.,\/#!$%\^&\*;:{}=\-_`~()]/g,"").replace(/\s{2,}/g," ").split(" "); 138 | let master_paragraph_split_filtered = master_paragraph_split.filter((x)=>!unimportant_words.has(x)); 139 | let num_words_paragraph = master_paragraph_split_filtered.length; 140 | 141 | for(var i = 0; i < master_paragraph_split_filtered.length; i++){ 142 | if (input_set.has(master_paragraph_split_filtered[i])){ 143 | num_same_words++; 144 | } 145 | } 146 | return (num_same_words / num_words_paragraph); 147 | } 148 | 149 | // Resets the input for the next line 150 | function next_frame(){ 151 | clearInterval(speech_pause_timer); 152 | if(window.curr_paragraph 2 | 3 | 4 | SpeechPortal 5 | 6 | 7 | 8 |
9 |
10 |
11 |

SpeechPortal

12 |

[1st Place HopHacks] A dynamic webVR memory palace for speech training

13 |

Click anywhere to get started!

14 |
15 |
16 |
17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /templates/viewer.jinja2: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Speech Portal 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | entry: "./src/entry.js", 3 | output: { 4 | path: __dirname, 5 | filename: "assets/bundle.js" 6 | }, 7 | module: { 8 | loaders: [ 9 | { test: /\.css$/, loader: "style!css" } 10 | ] 11 | } 12 | }; 13 | --------------------------------------------------------------------------------