├── .github └── FUNDING.yml ├── 1. Get all available numbers.md ├── 2. Find cool numbers.md ├── 3. Collect words.txt ├── LICENSE ├── README.md ├── har-to-curl.html └── har-to-curl.js /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [fulldecent] 4 | custom: ["https://www.paypal.me/fulldecent", "https://amazon.com/hz/wishlist/ls/EE78A23EEGQB"] 5 | -------------------------------------------------------------------------------- /1. Get all available numbers.md: -------------------------------------------------------------------------------- 1 | # Get All Available Numbers 2 | 3 | Run all these instructions using `bash` or compatible. 4 | 5 | ## Prepare to harvest 6 | 7 | 1. Use Chrome and open (yes I know it's 404) 8 | 9 | ```sh 10 | https://www.google.com/voice/b/0/404 11 | ``` 12 | 13 | 2. Extract your authentication cookies 14 | 15 | 1. Open developer tools -> network 16 | 2. Select the `0/` and right click to `Save as HAR` 17 | 3. Open that file, select all, copy 18 | 4. Open `har-to-curl.html` and paste 19 | 5. Locate the `-H "cookie: gv=... GV_NR=1; ..."` part, it's long 20 | 6. Copy between and not including those double quotes 21 | 22 | 3. Record those cookies paste below 23 | 24 | ```sh 25 | mkdir ~/Desktop/GOOGLEVOICE; cd ~/Desktop/GOOGLEVOICE 26 | URL="https://www.google.com/voice/b/0/setup/searchnew/" 27 | HEADER="PASTE HERE" 28 | ``` 29 | 30 | 4. Test that it works 31 | 32 | ```sh 33 | curl -H $HEADER "${URL}?ac=484&q=484&start=0&country=US" 34 | ``` 35 | 36 | If you see a brief response (less than 100 lines) with a redirect then you failed. 37 | 38 | Here are the available area codes: 39 | 40 | ```sh 41 | curl -H $HEADER "${URL}?ac=[201-999]" | grep -o "+1[0-9]\{3\}" | cut -b3-5 | sort -u > areacodes 42 | ``` 43 | 44 | Now put the area codes you want into the file to seed it: 45 | 46 | ```sh 47 | echo -e "7472222222\n2122222222" > numbers 48 | ``` 49 | 50 | ## Harvest — the public blog way 51 | 52 | ```sh 53 | for a in $(cat areacodes); do echo "${a}0000000"; done > numbers # GET ALL NUMBERS 54 | cut -b1-3 numbers | sort -u | (while read LINE; do curl --cookie "$COOKIES" "${URL}?ac=${LINE:0:3}&q=$LINE[0-9]&start=0"; done) | grep -Pho '\d{10}\b' | sort -u >> numbers 55 | cut -b1-4 numbers | sort -u | (while read LINE; do curl --cookie "$COOKIES" "${URL}?ac=${LINE:0:3}&q=$LINE[0-9]&start=0"; done) | grep -Pho '\d{10}\b' | sort -u >> numbers 56 | cut -b1-5 numbers | sort -u | (while read LINE; do curl --cookie "$COOKIES" "${URL}?ac=${LINE:0:3}&q=$LINE[0-9]&start=0"; done) | grep -Pho '\d{10}\b' | sort -u >> numbers 57 | cut -b1-6 numbers | sort -u | (while read LINE; do curl --cookie "$COOKIES" "${URL}?ac=${LINE:0:3}&q=$LINE[0-9]&start=0"; done) | grep -Pho '\d{10}\b' | sort -u >> numbers 58 | cut -b1-7 numbers | sort -u | (while read LINE; do curl --cookie "$COOKIES" "${URL}?ac=${LINE:0:3}&q=$LINE[0-9]&start=0"; done) | grep -Pho '\d{10}\b' | sort -u >> numbers 59 | cut -b1-8 numbers | sort -u | (while read LINE; do curl --cookie "$COOKIES" "${URL}?ac=${LINE:0:3}&q=$LINE[0-9]&start=0"; done) | grep -Pho '\d{10}\b' | sort -u >> numbers 60 | cut -b1-8 numbers | sort -u | (while read LINE; do curl --cookie "$COOKIES" "${URL}?ac=${LINE:0:3}&q=$LINE[0-9]&start=5"; done) | grep -Pho '\d{10}\b' | sort -u >> numbers 61 | ``` 62 | 63 | ## Improvements not posted to blog (Mac + Linux) 64 | 65 | DIFS: LOGIN TO SOME SERVER YOU CAN RUN, NOT YOUR LAPTOP! / `ssh root@…` / `screen` 66 | 67 | ```sh 68 | curl --cookie "$COOKIES" "${URL}?ac=[201-999]&start=0" | egrep -ho '[0-9]{10}\b' > numbers 69 | cut -b1-3 numbers | sort -u > areacodes 70 | cut -b1-3 numbers | sort -u | sed "s|^\(...\).*|$URL?ac=\1\&q=&[0-9]|" | xargs -P10 -- curl --cookie "$COOKIES" | egrep -o '[0-9]{10}\b' >> numbers 71 | cut -b1-4 numbers | sort -u | sed "s|^\(...\).*|$URL?ac=\1\&q=&[0-9]|" | xargs -P10 -- curl --cookie "$COOKIES" | egrep -o '[0-9]{10}\b' >> numbers 72 | cut -b1-5 numbers | sort -u | sed "s|^\(...\).*|$URL?ac=\1\&q=&[0-9]|" | xargs -P10 -- curl --cookie "$COOKIES" | egrep -o '[0-9]{10}\b' >> numbers 73 | cut -b1-6 numbers | sort -u | sed "s|^\(...\).*|$URL?ac=\1\&q=&[0-9]|" | xargs -P10 -- curl --cookie "$COOKIES" | egrep -o '[0-9]{10}\b' >> numbers 74 | cut -b1-7 numbers | sort -u | sed "s|^\(...\).*|$URL?ac=\1\&q=&[0-9]|" | xargs -P10 -- curl --cookie "$COOKIES" | egrep -o '[0-9]{10}\b' >> numbers 75 | cut -b1-8 numbers | sort -u | sed "s|^\(...\).*|$URL?ac=\1\&q=&[0-9]|" | xargs -P10 -- curl --cookie "$COOKIES" | egrep -o '[0-9]{10}\b' >> numbers 76 | cut -b1-8 numbers | sort -u | sed "s|^\(...\).*|$URL?ac=\1\&q=&[0-9]\&start=5|" | xargs -P10 -- curl --cookie "$COOKIES" | egrep -o '[0-9]{10}\b' | sort -u >> numbers 77 | say done 78 | ``` 79 | 80 | ## Crazy new parallel way 81 | 82 | Add a bunch of servers you have root access to 83 | 84 | ```sh 85 | NUM2URL=(sed "s|^\(...\).*|$URL?ac=\1\&q=&[0-9]|") 86 | PARALLEL_OPTS=(-n5 --max-procs 2 --sshlogin :,A,B,C,... --eta) 87 | PROGRAM="curl --silent -H \"$HEADER\" {} | grep -o '[0-9]\{10\}\b' | sort -u" 88 | cut -b1-3 numbers | sort -u | $NUM2URL | parallel $PARALLEL_OPTS "$PROGRAM" >> numbers 89 | cut -b1-4 numbers | sort -u | $NUM2URL | parallel $PARALLEL_OPTS "$PROGRAM" >> numbers 90 | cut -b1-4 numbers | sort -u | $NUM2URL | parallel $PARALLEL_OPTS "$PROGRAM" >> numbers 91 | cut -b1-5 numbers | sort -u | $NUM2URL | parallel $PARALLEL_OPTS "$PROGRAM" >> numbers 92 | cut -b1-6 numbers | sort -u | $NUM2URL | parallel $PARALLEL_OPTS "$PROGRAM" >> numbers 93 | cut -b1-7 numbers | sort -u | $NUM2URL | parallel $PARALLEL_OPTS "$PROGRAM" >> numbers 94 | cut -b1-8 numbers | sort -u | $NUM2URL | parallel $PARALLEL_OPTS "$PROGRAM" >> numbers 95 | cut -b1-9 numbers | sort -u | $NUM2URL | parallel $PARALLEL_OPTS "$PROGRAM" >> numbers 96 | cut -b1-9 numbers | sort -u | sed "s|^\(...\).*|$URL?ac=\1\&q=&[0-9]\&start=5|" | parallel $PARALLEL_OPTS "$PROGRAM" >> numbers 97 | say done 98 | ``` 99 | 100 | -------------------------------------------------------------------------------- /2. Find cool numbers.md: -------------------------------------------------------------------------------- 1 | # Find Cool Numbers 2 | 3 | First, clean up your numbers file: 4 | 5 | ```sh 6 | sort -u numbers > tmp; mv tmp numbers 7 | wc -l numbers # how many numbers 8 | cut -b 1-3 numbers | uniq -c # how many numbers per area code 9 | ``` 10 | 11 | Now, find fun numbers 12 | 13 | ```sh 14 | # repeated strings of digits 15 | egrep "(\d{5}).*\1" numbers #5 16 | egrep "(\d{4}).*\1" numbers #4 17 | 18 | # the whole number, with a limited number of unique digits 19 | egrep "^(\d)\1*(\d)\1*\2*\1*\2*\1*\2*\1*\2*(\d)(\1|\2|\3)*$" numbers #3 20 | egrep "^(\d)\1*(\d)(\1|\2)*$" numbers #2 21 | 22 | # ex-area code, limited number of unique digits 23 | egrep "^...(\d)\1*(\d)\1*\2*\1*\2*\1*\2*\1*\2*(\d)(\1|\2|\3)*$" numbers #3 24 | egrep "^...(\d)\1*(\d)(\1|\2)*$" numbers #2 25 | 26 | # digit runs 27 | egrep '(\d)\1{3}' numbers #4 28 | 29 | # sets of triples 30 | egrep "(\d)\1\1.*(\d)\2\2" numbers #2 31 | egrep '(\d)\1{2,}' numbers #1 32 | 33 | # sets of doubles 34 | egrep "(\d)\1(\d)\2(\d)\3(\d)\4(\d)\5" numbers #5 35 | egrep "(\d)\1.(\d)\2.(\d)\3.*(\d)\4" numbers #4 36 | egrep "(\d)\1.(\d)\2.(\d)\3" numbers #3 37 | 38 | # palindrome 39 | egrep "(\d)(\d)(\d)(\d)(\d)\5\4\3\2\1" numbers #10 40 | egrep "(\d)(\d)(\d)(\d)(\d)\4\3\2\1" numbers #9 41 | egrep "(\d)(\d)(\d)(\d)\4\3\2\1" numbers #8 42 | egrep "(\d)(\d)(\d)(\d)\3\2\1" numbers #7 43 | 44 | # toggles 45 | egrep "(\d)(\d)\1\2\1\2\1" numbers #7 46 | egrep "(\d)(\d)\1\2\1\2" numbers #6 47 | egrep "(\d)(\d)\1\2\1" numbers #5 48 | ``` 49 | -------------------------------------------------------------------------------- /3. Collect words.txt: -------------------------------------------------------------------------------- 1 | # Find Words 2 | 3 | **Prerequisites:** 4 | 5 | * `./numbers` is all the numbers 6 | * You need your own words list 7 | * ​ 8 | 9 | ## Put numbers into database 10 | 11 | ## 12 | ## Dictionaries are in word-number format: 13 | ## tr abcdefghijklmnopqrstuvwxyz 2223334445556667777888999 < 9letter.txt > 9letter.txt123 14 | ## paste PHRASES.10 PHRASES.10123 > PHRASES.10txt123 15 | ## 16 | 17 | ## 18 | ## Put numbers in database 19 | ## 20 | sqlite3 numbers.db 21 | 22 | 23 | CREATE TABLE numbersStaging (num, PRIMARY KEY(num)); 24 | CREATE TABLE numbers (num, last7, last4, PRIMARY KEY(num)); 25 | CREATE INDEX l7 on numbers (last7); 26 | CREATE INDEX l4 on numbers (last4); 27 | 28 | .import numbers numbersStaging 29 | 30 | INSERT OR IGNORE INTO numbers SELECT num, substr(num,4), substr(num,7) FROM numbersStaging; 31 | 32 | 33 | ## 34 | ## Put words in database 35 | ## 36 | CREATE TABLE wordsStaging (word, PRIMARY KEY(word)); 37 | CREATE TABLE words (word, wnum, wnumfirst10, PRIMARY KEY(word)); 38 | CREATE INDEX wnum on words (wnum); 39 | CREATE INDEX w10 on words (wnumfirst10); 40 | 41 | .import ../Wordlists/MarkDavies/PHRASES wordsStaging 42 | .import ../Wordlists/Scrabble/10letter.txt wordsStaging 43 | .import ../Wordlists/Scrabble/9letter.txt wordsStaging 44 | 45 | INSERT OR IGNORE INTO words SELECT word,word,word FROM wordsStaging; 46 | UPDATE words SET wnum = replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(word,'a','2'),'b','2'),'c','2'),'d','3'),'e','3'),'f','3'),'g','4'),'h','4'),'i','4'),'j','5'),'k','5'),'l','5'),'m','6'),'n','6'),'o','6'),'p','7'),'q','7'),'r','7'),'s','7'),'t','8'),'u','8'),'v','8'),'w','9'),'x','9'),'y','9'),'z','9'); 47 | UPDATE words SET wnumfirst10 = substr(wnum,1,10); 48 | 49 | 50 | ## 51 | ## Extract 52 | ## 53 | SELECT * FROM numbers,words WHERE num=wnum; 54 | SELECT * FROM numbers,words WHERE last7=wnum; -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 William Entriken 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # google-voice-numbers 2 | Retrieves the full list of available Google Voice numbers and finds the best ones 3 | -------------------------------------------------------------------------------- /har-to-curl.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Convert HAR (HTTP Archive) to cURL command 6 | 7 | 50 | 56 | 57 | 58 |
59 | 60 | 61 |

HAR goes on the left and cURL comes out on the right.

62 |

har-to-curl.js is hosted on GitHub. By @mcaruanagalizia.

63 |
64 | 65 | 66 | -------------------------------------------------------------------------------- /har-to-curl.js: -------------------------------------------------------------------------------- 1 | /** 2 | * A CommonJS utility for converting a HAR (HTTP Archive) format JSON object to a cURL command string for use on the command line. 3 | * 4 | * @overview 5 | * @author Matthew Caruana Galizia 6 | * @license MIT 7 | * @copyright Copyright (c) 2012, Matthew Caruana Galizia 8 | * @version 0.3.0 9 | * @preserve 10 | */ 11 | 12 | /*jslint node: true */ 13 | 14 | var harToCurl = function(har) { 15 | 'use strict'; 16 | if (typeof har === 'string') { 17 | har = JSON.parse(har); 18 | } 19 | 20 | if (!har || typeof har !== 'object') { 21 | return; 22 | } 23 | 24 | if (har.request) { 25 | return harToCurl.fromEntry(har); 26 | } 27 | 28 | if (har.log && Array.isArray(har.log.entries)) { 29 | return harToCurl.fromLog(har.log); 30 | } 31 | 32 | if (Array.isArray(har)) { 33 | return harToCurl.fromEntries(har); 34 | } 35 | 36 | if (Array.isArray(har.entries)) { 37 | return harToCurl.fromLog(har); 38 | } 39 | }; 40 | 41 | harToCurl.fromLog = function(log) { 42 | 'use strict'; 43 | if (!log || !Array.isArray(log.entries)) { 44 | return; 45 | } 46 | 47 | return harToCurl.fromEntries(log.entries); 48 | }; 49 | 50 | harToCurl.fromEntries = function(entries) { 51 | 'use strict'; 52 | return entries.map(harToCurl.fromEntry); 53 | }; 54 | 55 | harToCurl.fromEntry = function(entry) { 56 | 'use strict'; 57 | var command; 58 | 59 | if (!entry || !entry.request) { 60 | return ''; 61 | } 62 | 63 | command = 'curl -X ' + entry.request.method; 64 | 65 | if (entry.request.httpVersion === 'HTTP/1.0') { 66 | command += ' -0'; 67 | } 68 | 69 | if (entry.request.cookies.length) { 70 | command += ' -b "' + entry.request.cookies.map(function(cookie) { 71 | return encodeURIComponent(cookie.name) + '=' + encodeURIComponent(cookie.value); 72 | }).join('&') + '"'; 73 | } 74 | 75 | command += entry.request.headers.map(function(header) { 76 | return ' -H "' + header.name + ': ' + header.value + '"'; 77 | }).join(''); 78 | 79 | if (entry.request.postData) { 80 | command += ' -d "' + entry.request.postData.text + '"'; 81 | } 82 | 83 | return command + ' ' + entry.request.url; 84 | }; 85 | --------------------------------------------------------------------------------