├── .gitignore ├── Cakefile ├── LICENSE ├── README.md ├── docs ├── docco.css └── index.html ├── examples ├── README.md └── server.js ├── index.js ├── package.json ├── samples └── venues.js ├── src ├── examples │ └── server.coffee ├── index.coffee └── test │ ├── autocomplete.test.coffee │ └── helper.test.coffee └── test ├── autocomplete.test.js └── helper.test.js /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | .sublime-project -------------------------------------------------------------------------------- /Cakefile: -------------------------------------------------------------------------------- 1 | # Module requires 2 | {spawn, exec} = require 'child_process' 3 | sys = require 'sys' 4 | 5 | # ## Helpers 6 | 7 | # Helper function for showing error messages if anyting happens 8 | printOutput = (process) -> 9 | process.stdout.on 'data', (data) -> sys.print data 10 | process.stderr.on 'data', (data) -> sys.print data 11 | 12 | # Watch Javascript for changes 13 | watchJS = -> 14 | coffee = exec 'coffee -cw -o ./ src/' 15 | printOutput(coffee) 16 | 17 | runTests = -> 18 | expresso = exec 'expresso -b test/*.test.js' 19 | printOutput(expresso) 20 | 21 | # Tasks 22 | task 'watch', 'Watches all Coffeescript(JS) and Stylus(CSS) files', -> 23 | watchJS() 24 | 25 | task 'docs', 'Create documentation using Docco', -> 26 | docco = exec """ 27 | docco src/index.coffee 28 | """ 29 | printOutput(docco) 30 | 31 | task 'sbuild', 'Build task for Sublime Text', -> 32 | watchJS() 33 | # runTests() -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011 Teng Siong Ong 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | flo 2 | === 3 | **flo** is a [redis](http://redis.io) powered [node.js](http://nodejs.org) autocompleter inspired by [soulmate](https://github.com/seatgeek/soulmate). You can use this anywhere you want since this is just a module. If you look into the examples folder, I have provided an example on how to get it work with [express](http://expressjs.com/). 4 | 5 | If you want see a real world example of this, you should try out the search box at [SeatGeek](http://seatgeek.com) or [Quora](http://quora.com). 6 | 7 | Documentations 8 | ============== 9 | 10 | First, connect to the redis instance: 11 | 12 | Sets up a new Redis Connection. 13 | 14 | var flo = require('flo').connect(); 15 | 16 | options - Optional Hash of options. 17 | 18 | * `redis` - An existing redis connection to use. 19 | * `host` - String Redis host. (Default: Redis' default) 20 | * `port` - Integer Redis port. (Default: Redis' default) 21 | * `password` - String Redis password. 22 | * `namespace` - String namespace prefix for Redis keys. (Default: flo). 23 | * `mincomplete` - Minimum completion of keys required for auto completion. (Default: 1) 24 | * `database` - Integer of the Redis database to select. 25 | 26 | Returns a Connection instance. 27 | 28 | These are the public functions: 29 | 30 | Add a new term 31 | -------------- 32 | 33 | `add_term(type, id, term, score, data, callback)`: 34 | 35 | * `type` - the type of data of this term (String) 36 | * `id` - unique identifier(within the specific type) 37 | * `term` - the phrase you wish to provide completions for 38 | * `score` - user specified ranking metric (redis will order things lexicographically for items with the same score) 39 | * `data` - container for metadata that you would like to return when this item is matched (optional) 40 | * `callback` - callback to be run (optional) 41 | 42 | Returns nothing. 43 | 44 | Search for a term 45 | ----------------- 46 | 47 | `search_term(types, phrase, limit, callback)`: 48 | 49 | * `types` - types of term that you are looking for (Array of String) 50 | * `phrase` - the phrase or phrases you want to be autocompleted 51 | * `limit` - the count of the number you want to return (optional, default: 5) 52 | * `callback(err, result)` - err is the error and results is the results 53 | 54 | This call: 55 | 56 | `search_term(["chinese", "indian"], "rice", 1, cb);` 57 | 58 | will return a result in json format like: 59 | 60 | { 61 | term: "rice" 62 | chinese: [ 63 | { 64 | id: 3, 65 | term: "mongolian fried rice", 66 | score: 10, 67 | data: { 68 | name: "Gonghu Chinese Restaurant", 69 | address: "304, University Avenue, Palo Alto" 70 | } 71 | } 72 | ], 73 | indian: [ 74 | { 75 | id: 1, 76 | term: "Briyani Chicken Rice", 77 | score: 5, 78 | data: { 79 | name: "Bombay Grill", 80 | address: "100 Green St, Urbana" 81 | } 82 | } 83 | ] 84 | } 85 | 86 | Remove a term 87 | ------------- 88 | 89 | `remove_term(type, id, callback)`: 90 | 91 | * `type` - the type of data of this term (String) 92 | * `id` - unique identifier(within the specific type) 93 | * `callback` - callback to be run (optional) 94 | 95 | Returns nothing. 96 | 97 | Get the IDs for a term 98 | ---------------------- 99 | 100 | `get_ids (type, term, callback)`: 101 | 102 | * `type` - the type of data for this term 103 | * `term` - the term to find the unique identifiers for 104 | * `callback(err, result)` - result is an array of IDs for the term. Empty array if none were found 105 | 106 | Get the data for an ID 107 | ----------------------- 108 | 109 | `get_data(type, id, callback)`: 110 | 111 | * `type` - the type of data for this term 112 | * `id` - unique identifier (within the specific type) 113 | * `callback(err, result)` - result is the data 114 | 115 | For more information, you can read it [here](https://github.com/siong1987/flo/tree/master/docs). 116 | 117 | ## Tests 118 | To run tests, first make sure your local redis is running, then: 119 | 120 | ./node_modules/expresso/bin/expresso test/*.test.js 121 | 122 | ### License 123 | [MIT License](https://github.com/siong1987/flo/blob/master/LICENSE) 124 | 125 | --- 126 | ### Author 127 | [Teng Siong Ong](https://github.com/siong1987/) 128 | 129 | ### Company 130 | [FLOChip](http://flochip.com) 131 | -------------------------------------------------------------------------------- /docs/docco.css: -------------------------------------------------------------------------------- 1 | /*--------------------- Layout and Typography ----------------------------*/ 2 | body { 3 | font-family: 'Palatino Linotype', 'Book Antiqua', Palatino, FreeSerif, serif; 4 | font-size: 15px; 5 | line-height: 22px; 6 | color: #252519; 7 | margin: 0; padding: 0; 8 | } 9 | a { 10 | color: #261a3b; 11 | } 12 | a:visited { 13 | color: #261a3b; 14 | } 15 | p { 16 | margin: 0 0 15px 0; 17 | } 18 | h1, h2, h3, h4, h5, h6 { 19 | margin: 0px 0 15px 0; 20 | } 21 | h1 { 22 | margin-top: 40px; 23 | } 24 | #container { 25 | position: relative; 26 | } 27 | #background { 28 | position: fixed; 29 | top: 0; left: 525px; right: 0; bottom: 0; 30 | background: #f5f5ff; 31 | border-left: 1px solid #e5e5ee; 32 | z-index: -1; 33 | } 34 | #jump_to, #jump_page { 35 | background: white; 36 | -webkit-box-shadow: 0 0 25px #777; -moz-box-shadow: 0 0 25px #777; 37 | -webkit-border-bottom-left-radius: 5px; -moz-border-radius-bottomleft: 5px; 38 | font: 10px Arial; 39 | text-transform: uppercase; 40 | cursor: pointer; 41 | text-align: right; 42 | } 43 | #jump_to, #jump_wrapper { 44 | position: fixed; 45 | right: 0; top: 0; 46 | padding: 5px 10px; 47 | } 48 | #jump_wrapper { 49 | padding: 0; 50 | display: none; 51 | } 52 | #jump_to:hover #jump_wrapper { 53 | display: block; 54 | } 55 | #jump_page { 56 | padding: 5px 0 3px; 57 | margin: 0 0 25px 25px; 58 | } 59 | #jump_page .source { 60 | display: block; 61 | padding: 5px 10px; 62 | text-decoration: none; 63 | border-top: 1px solid #eee; 64 | } 65 | #jump_page .source:hover { 66 | background: #f5f5ff; 67 | } 68 | #jump_page .source:first-child { 69 | } 70 | table td { 71 | border: 0; 72 | outline: 0; 73 | } 74 | td.docs, th.docs { 75 | max-width: 450px; 76 | min-width: 450px; 77 | min-height: 5px; 78 | padding: 10px 25px 1px 50px; 79 | overflow-x: hidden; 80 | vertical-align: top; 81 | text-align: left; 82 | } 83 | .docs pre { 84 | margin: 15px 0 15px; 85 | padding-left: 15px; 86 | } 87 | .docs p tt, .docs p code { 88 | background: #f8f8ff; 89 | border: 1px solid #dedede; 90 | font-size: 12px; 91 | padding: 0 0.2em; 92 | } 93 | .pilwrap { 94 | position: relative; 95 | } 96 | .pilcrow { 97 | font: 12px Arial; 98 | text-decoration: none; 99 | color: #454545; 100 | position: absolute; 101 | top: 3px; left: -20px; 102 | padding: 1px 2px; 103 | opacity: 0; 104 | -webkit-transition: opacity 0.2s linear; 105 | } 106 | td.docs:hover .pilcrow { 107 | opacity: 1; 108 | } 109 | td.code, th.code { 110 | padding: 14px 15px 16px 25px; 111 | width: 100%; 112 | vertical-align: top; 113 | background: #f5f5ff; 114 | border-left: 1px solid #e5e5ee; 115 | } 116 | pre, tt, code { 117 | font-size: 12px; line-height: 18px; 118 | font-family: Monaco, Consolas, "Lucida Console", monospace; 119 | margin: 0; padding: 0; 120 | } 121 | 122 | 123 | /*---------------------- Syntax Highlighting -----------------------------*/ 124 | td.linenos { background-color: #f0f0f0; padding-right: 10px; } 125 | span.lineno { background-color: #f0f0f0; padding: 0 5px 0 5px; } 126 | body .hll { background-color: #ffffcc } 127 | body .c { color: #408080; font-style: italic } /* Comment */ 128 | body .err { border: 1px solid #FF0000 } /* Error */ 129 | body .k { color: #954121 } /* Keyword */ 130 | body .o { color: #666666 } /* Operator */ 131 | body .cm { color: #408080; font-style: italic } /* Comment.Multiline */ 132 | body .cp { color: #BC7A00 } /* Comment.Preproc */ 133 | body .c1 { color: #408080; font-style: italic } /* Comment.Single */ 134 | body .cs { color: #408080; font-style: italic } /* Comment.Special */ 135 | body .gd { color: #A00000 } /* Generic.Deleted */ 136 | body .ge { font-style: italic } /* Generic.Emph */ 137 | body .gr { color: #FF0000 } /* Generic.Error */ 138 | body .gh { color: #000080; font-weight: bold } /* Generic.Heading */ 139 | body .gi { color: #00A000 } /* Generic.Inserted */ 140 | body .go { color: #808080 } /* Generic.Output */ 141 | body .gp { color: #000080; font-weight: bold } /* Generic.Prompt */ 142 | body .gs { font-weight: bold } /* Generic.Strong */ 143 | body .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ 144 | body .gt { color: #0040D0 } /* Generic.Traceback */ 145 | body .kc { color: #954121 } /* Keyword.Constant */ 146 | body .kd { color: #954121; font-weight: bold } /* Keyword.Declaration */ 147 | body .kn { color: #954121; font-weight: bold } /* Keyword.Namespace */ 148 | body .kp { color: #954121 } /* Keyword.Pseudo */ 149 | body .kr { color: #954121; font-weight: bold } /* Keyword.Reserved */ 150 | body .kt { color: #B00040 } /* Keyword.Type */ 151 | body .m { color: #666666 } /* Literal.Number */ 152 | body .s { color: #219161 } /* Literal.String */ 153 | body .na { color: #7D9029 } /* Name.Attribute */ 154 | body .nb { color: #954121 } /* Name.Builtin */ 155 | body .nc { color: #0000FF; font-weight: bold } /* Name.Class */ 156 | body .no { color: #880000 } /* Name.Constant */ 157 | body .nd { color: #AA22FF } /* Name.Decorator */ 158 | body .ni { color: #999999; font-weight: bold } /* Name.Entity */ 159 | body .ne { color: #D2413A; font-weight: bold } /* Name.Exception */ 160 | body .nf { color: #0000FF } /* Name.Function */ 161 | body .nl { color: #A0A000 } /* Name.Label */ 162 | body .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */ 163 | body .nt { color: #954121; font-weight: bold } /* Name.Tag */ 164 | body .nv { color: #19469D } /* Name.Variable */ 165 | body .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */ 166 | body .w { color: #bbbbbb } /* Text.Whitespace */ 167 | body .mf { color: #666666 } /* Literal.Number.Float */ 168 | body .mh { color: #666666 } /* Literal.Number.Hex */ 169 | body .mi { color: #666666 } /* Literal.Number.Integer */ 170 | body .mo { color: #666666 } /* Literal.Number.Oct */ 171 | body .sb { color: #219161 } /* Literal.String.Backtick */ 172 | body .sc { color: #219161 } /* Literal.String.Char */ 173 | body .sd { color: #219161; font-style: italic } /* Literal.String.Doc */ 174 | body .s2 { color: #219161 } /* Literal.String.Double */ 175 | body .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */ 176 | body .sh { color: #219161 } /* Literal.String.Heredoc */ 177 | body .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */ 178 | body .sx { color: #954121 } /* Literal.String.Other */ 179 | body .sr { color: #BB6688 } /* Literal.String.Regex */ 180 | body .s1 { color: #219161 } /* Literal.String.Single */ 181 | body .ss { color: #19469D } /* Literal.String.Symbol */ 182 | body .bp { color: #954121 } /* Name.Builtin.Pseudo */ 183 | body .vc { color: #19469D } /* Name.Variable.Class */ 184 | body .vg { color: #19469D } /* Name.Variable.Global */ 185 | body .vi { color: #19469D } /* Name.Variable.Instance */ 186 | body .il { color: #666666 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /docs/index.html: -------------------------------------------------------------------------------- 1 | index.coffee

index.coffee

flo is an redis powered node.js autocompleter inspired by soulmate. 2 | You can check out some examples here.

_ = require "underscore"
  3 | async = require "async"

Sets up a new Redis Connection.

4 | 5 |

options - Optional Hash of options.

6 | 7 |
    8 |
  • redis - An existing redis connection to use.
  • 9 |
  • host - String Redis host. (Default: Redis' default)
  • 10 |
  • port - Integer Redis port. (Default: Redis' default)
  • 11 |
  • password - String Redis password.
  • 12 |
  • namespace - String namespace prefix for Redis keys. 13 | (Default: flo).
  • 14 |
  • mincomplete - Minimum completion of keys required for auto completion. 15 | (Default: 1)
  • 16 |
  • database - Integer of the Redis database to select.
  • 17 |
18 | 19 |

Returns a Connection instance.

exports.connect = (options) ->
 20 |   new exports.Connection options || {}

Handles the connection to the Redis server.

class Connection
 21 |   constructor: (options) ->
 22 |     @helper      = new Helper
 23 |     @redis       = options.redis       || connectToRedis options
 24 |     @namespace   = options.namespace   || 'flo'
 25 |     @mincomplete = options.mincomplete || 1
 26 |     @redis.select options.database if options.database?

Public: Get all prefixes for a phrase

27 | 28 |
    29 |
  • phrase - the phrase that needs to be parsed into many prefixes
  • 30 |
31 | 32 |

Returns an array of unique prefixes for the phrase

  prefixes_for_phrase: (phrase) ->
 33 |     words = @helper.normalize(phrase).split(' ')
 34 |     _.uniq(
 35 |       _.flatten(
 36 |         _.map(words, (w) =>
 37 |           _.map([(@mincomplete-1)..(w.length-1)], (l) ->
 38 |             w[0..l]
 39 |           )
 40 |         )
 41 |       )
 42 |     )

Public: Search for a term

43 | 44 |
    45 |
  • types - types of term that you are looking for (Array of Strina)
  • 46 |
  • phrase - the phrase or phrases you want to be autocompleted
  • 47 |
  • limit - the count of the number you want to return (optional, default: 5)
  • 48 |
  • callback(err, result) - err is the error and results is the results
  • 49 |
  search_term: (types, phrase, args...) ->
 50 |     if typeof(args[0]) == 'number'
 51 |       limit = args[0]
 52 |     else
 53 |       limit = 5
 54 |     callback = args[args.length-1]
 55 | 
 56 |     async.map types, (type, callb) =>
 57 |       words = _.uniq(
 58 |         @helper.normalize(phrase).split(' ')
 59 |       ).sort()

for caching purpose

      cachekey = @key(type, "cache", words.join('|'))
 60 |       async.waterfall([
 61 |         ((cb) =>
 62 |           @redis.exists cachekey, cb
 63 |         ),
 64 |         ((exists, cb) =>
 65 |           if !exists
 66 |             interkeys = _.map(words, (w) =>
 67 |               @key(type, "index", w)
 68 |             )
 69 |             @redis.zinterstore cachekey, interkeys.length, interkeys..., (err, count) =>
 70 |               @redis.expire cachekey, 10 * 60, -> # expire after 10 minutes
 71 |                 cb()
 72 |           else
 73 |             cb()
 74 |         ),
 75 |         ((cb) =>
 76 |           @redis.zrevrange cachekey, 0, (limit - 1), (err, ids) =>
 77 |             if ids.length > 0
 78 |               @redis.hmget @key(type, "data"), ids..., cb
 79 |             else
 80 |               cb(null, [])
 81 |         )
 82 |       ], (err, results) ->
 83 |         data = {}
 84 |         data[type] = results
 85 |         callb(err, data)
 86 |       )
 87 |     , (err, results) ->
 88 |       results = _.extend results...
 89 |       results.term = phrase
 90 |       callback(err, results)

Public: Add a new term

91 | 92 |
    93 |
  • type - the type of data of this term (String)
  • 94 |
  • id - unique identifier(within the specific type)
  • 95 |
  • term - the phrase you wish to provide completions for
  • 96 |
  • score - user specified ranking metric (redis will order things lexicographically for items with the same score)
  • 97 |
  • data - container for metadata that you would like to return when this item is matched (optional)
  • 98 |
  • callback - callback to be run (optional)
  • 99 |
100 | 101 |

Returns nothing.

  add_term: (type, id, term, score, data, callback) ->

store the data in parallel

    async.parallel([
102 |       ((callb) =>
103 |         @redis.hset @key(type, "data"), id,
104 |           JSON.stringify term: term, score: score, data: (data || []),
105 |           ->
106 |             callb()
107 |       ),
108 |       ((callb) =>
109 |         async.forEach @prefixes_for_phrase(term),
110 |         ((w, cb) =>
111 |           @redis.zadd @key(type, "index", w), score, id, # sorted set
112 |           -> cb()
113 |         ), ->
114 |           callb()
115 |       )
116 |     ], ->
117 |       callback() if callback?
118 |     )

Public: Get the redis instance

119 | 120 |

Returns the redis instance.

  redis: ->
121 |     @redis

Public: Quits the connection to the Redis server.

122 | 123 |

Returns nothing.

  end: ->
124 |     @redis.quit()

Builds a namespaced Redis key with the given arguments.

125 | 126 |
    127 |
  • type - Type of the param
  • 128 |
  • args - Array of Strings.
  • 129 |
130 | 131 |

Returns an assembled String key.

  key: (args...) ->
132 |     args.unshift @namespace
133 |     args.join ":"
134 | 
135 | class Helper

Public: Normalize a term to remove all other characters than a-z and 0-9.

136 | 137 |
    138 |
  • term - the term to be normalized
  • 139 |
140 | 141 |

Returns a normalized term.

  normalize: (term) ->
142 |     @strip(@gsub(term.toLowerCase(), /[^a-z0-9 ]/i, ''))

Public: This function partially simulates the Ruby's String gsub method.

143 | 144 |
    145 |
  • source - the source string
  • 146 |
  • pattern - the Regex pattern
  • 147 |
  • replacement - the replacement text
  • 148 |
149 | 150 |

Example:

151 | 152 |
gsub("-abc-abc-", /[^a-z0-9 ]/i, '')  # returns "abcabc"
153 | gsub("-abc-abc-", /[^a-z0-9 ]/i, '*') # returns "*abc*abc*"
154 | 
155 | 156 |

Returns the modified string.

  gsub: (source, pattern, replacement) ->
157 |     unless pattern? and replacement?
158 |       return source
159 | 
160 |     result = ''
161 |     while source.length > 0
162 |       if (match = source.match(pattern))
163 |         result += source.slice(0, match.index)
164 |         result += replacement
165 |         source  = source.slice(match.index + match[0].length)
166 |       else
167 |         result += source
168 |         source = ''
169 | 
170 |     result

Public: Strip out leading and trailing whitespaces.

171 | 172 |
    173 |
  • source - string to be stripped
  • 174 |
175 | 176 |

Returns a copy of str with leading and trailing whitespace removed.

  strip: (source) ->
177 |     source.replace(/^\s+/, '').replace(/\s+$/, '')
178 | 
179 | connectToRedis = (options) ->
180 |   redis = require('redis').createClient options.port, options.host
181 |   redis.auth options.password if options.password?
182 |   redis
183 | 
184 | exports.Helper = new Helper
185 | exports.Connection = Connection
186 | 
187 | 
-------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | Examples 2 | ======== 3 | 4 | * server.js - This is a simple expressjs app that shows how you could integrate [flo](https://github.com/FLOChip/flo) with it. 5 | 6 | After you get the express app running, you can try to accees it with `http://localhost:3000/search/venues,food/stad/1` 7 | -------------------------------------------------------------------------------- /examples/server.js: -------------------------------------------------------------------------------- 1 | // Generated by CoffeeScript 1.3.1 2 | (function() { 3 | var app, async, flo, venues; 4 | 5 | app = require('express').createServer(); 6 | 7 | flo = require('../index').connect(); 8 | 9 | async = require('async'); 10 | 11 | app.get('/', function(req, res) { 12 | return res.send('Up and running'); 13 | }); 14 | 15 | app.get('/search/:types/:term/:limit', function(req, res) { 16 | var types; 17 | types = req.params.types.split(','); 18 | return flo.search_term(types, req.params.term, parseInt(req.params.limit), function(err, results) { 19 | return res.send(JSON.stringify(results)); 20 | }); 21 | }); 22 | 23 | venues = require('../samples/venues').venues; 24 | 25 | async.forEach(venues, (function(venue, cb) { 26 | return flo.add_term("venues", venue.id, venue.term, venue.score, venue.data, function() { 27 | return cb(); 28 | }); 29 | }), function() { 30 | return app.listen(3000); 31 | }); 32 | 33 | }).call(this); 34 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | // Generated by CoffeeScript 1.3.1 2 | (function() { 3 | var Connection, Helper, async, connectToRedis, _, 4 | __slice = [].slice; 5 | 6 | _ = require("underscore"); 7 | 8 | async = require("async"); 9 | 10 | exports.connect = function(options) { 11 | return new exports.Connection(options || {}); 12 | }; 13 | 14 | Connection = (function() { 15 | 16 | Connection.name = 'Connection'; 17 | 18 | function Connection(options) { 19 | this.helper = new Helper; 20 | this.redis = options.redis || connectToRedis(options); 21 | this.namespace = options.namespace || 'flo'; 22 | this.mincomplete = options.mincomplete || 1; 23 | if (options.database != null) { 24 | this.redis.select(options.database); 25 | } 26 | } 27 | 28 | Connection.prototype.prefixes_for_phrase = function(phrase) { 29 | var words, 30 | _this = this; 31 | words = this.helper.normalize(phrase).split(' '); 32 | return _.uniq(_.flatten(_.map(words, function(w) { 33 | var _i, _ref, _ref1, _results; 34 | return _.map((function() { 35 | _results = []; 36 | for (var _i = _ref = _this.mincomplete - 1, _ref1 = w.length - 1; _ref <= _ref1 ? _i <= _ref1 : _i >= _ref1; _ref <= _ref1 ? _i++ : _i--){ _results.push(_i); } 37 | return _results; 38 | }).apply(this), function(l) { 39 | return w.slice(0, l + 1 || 9e9); 40 | }); 41 | }))); 42 | }; 43 | 44 | Connection.prototype.search_term = function() { 45 | var args, callback, limit, phrase, types, 46 | _this = this; 47 | types = arguments[0], phrase = arguments[1], args = 3 <= arguments.length ? __slice.call(arguments, 2) : []; 48 | if (typeof args[0] === 'number') { 49 | limit = args[0]; 50 | } else { 51 | limit = 5; 52 | } 53 | callback = args[args.length - 1]; 54 | return async.map(types, function(type, callb) { 55 | var cachekey, words; 56 | words = _.uniq(_this.helper.normalize(phrase).split(' ')).sort(); 57 | cachekey = _this.key(type, "cache", words.join('|')); 58 | return async.waterfall([ 59 | (function(cb) { 60 | return _this.redis.exists(cachekey, cb); 61 | }), (function(exists, cb) { 62 | var interkeys, _ref; 63 | if (!exists) { 64 | interkeys = _.map(words, function(w) { 65 | return _this.key(type, "index", w); 66 | }); 67 | return (_ref = _this.redis).zinterstore.apply(_ref, [cachekey, interkeys.length].concat(__slice.call(interkeys), [function(err, count) { 68 | return _this.redis.expire(cachekey, 10 * 60, function() { 69 | return cb(); 70 | }); 71 | }])); 72 | } else { 73 | return cb(); 74 | } 75 | }), (function(cb) { 76 | return _this.redis.zrevrange(cachekey, 0, limit - 1, function(err, ids) { 77 | var _ref; 78 | if (ids.length > 0) { 79 | return (_ref = _this.redis).hmget.apply(_ref, [_this.key(type, "data")].concat(__slice.call(ids), [cb])); 80 | } else { 81 | return cb(null, []); 82 | } 83 | }); 84 | }) 85 | ], function(err, results) { 86 | var data; 87 | data = {}; 88 | data[type] = results; 89 | return callb(err, data); 90 | }); 91 | }, function(err, results) { 92 | results = _.extend.apply(_, results); 93 | results.term = phrase; 94 | return callback(err, results); 95 | }); 96 | }; 97 | 98 | Connection.prototype.add_term = function() { 99 | var args, callback, data, id, score, term, type, 100 | _this = this; 101 | type = arguments[0], id = arguments[1], term = arguments[2], score = arguments[3], args = 5 <= arguments.length ? __slice.call(arguments, 4) : []; 102 | if (typeof args[0] !== 'function') { 103 | data = args[0]; 104 | callback = args[args.length - 1]; 105 | } else if (typeof args[0] === 'function') { 106 | callback = args[0]; 107 | } 108 | return async.parallel([ 109 | (function(callb) { 110 | return _this.redis.hset(_this.key(type, "data"), id, JSON.stringify({ 111 | id: id, 112 | term: term, 113 | score: score, 114 | data: data || [] 115 | }), function() { 116 | return callb(); 117 | }); 118 | }), (function(callb) { 119 | return async.forEach(_this.prefixes_for_phrase(term), (function(w, cb) { 120 | return _this.redis.zadd(_this.key(type, "index", w), score, id, function() { 121 | return cb(); 122 | }); 123 | }), function() { 124 | return callb(); 125 | }); 126 | }), (function(callb) { 127 | var key; 128 | key = _this.key(type, _this.helper.normalize(term)); 129 | return _this.redis.get(key, function(err, result) { 130 | var arr; 131 | if (err) { 132 | return callb(err); 133 | } 134 | if (result) { 135 | arr = JSON.parse(result); 136 | arr.push(id); 137 | arr = _.uniq(arr); 138 | } else { 139 | arr = [id]; 140 | } 141 | return _this.redis.set(key, JSON.stringify(arr), callb); 142 | }); 143 | }) 144 | ], function() { 145 | if (callback != null) { 146 | return callback(); 147 | } 148 | }); 149 | }; 150 | 151 | Connection.prototype.remove_term = function(type, id, callback) { 152 | var _this = this; 153 | return this.redis.hget(this.key(type, "data"), id, function(err, result) { 154 | var term; 155 | if (err) { 156 | return callback(err); 157 | } 158 | if (result === null) { 159 | return callback(new Error("Invalid term id")); 160 | } 161 | term = JSON.parse(result).term; 162 | return async.parallel([ 163 | (function(callb) { 164 | return _this.redis.hdel(_this.key(type, "data"), id, callb); 165 | }), (function(callb) { 166 | return async.forEach(_this.prefixes_for_phrase(term), (function(w, cb) { 167 | return _this.redis.zrem(_this.key(type, "index", w), id, function() { 168 | return cb(); 169 | }); 170 | }), callb); 171 | }), (function(callb) { 172 | var key; 173 | key = _this.key(type, _this.helper.normalize(term)); 174 | return _this.redis.get(key, function(err, result) { 175 | var arr; 176 | if (err) { 177 | return callb(err); 178 | } 179 | if (result === null) { 180 | return cb(new Error("Couldn't delete " + id(+". No such entry."))); 181 | } 182 | arr = JSON.parse(result); 183 | if (arr.toString() === [id].toString()) { 184 | return _this.redis.del(key, callb); 185 | } 186 | return _this.redis.set(key, JSON.stringify(_.without(arr, id)), callb); 187 | }); 188 | }) 189 | ], function(err) { 190 | if (callback != null) { 191 | return callback(err); 192 | } 193 | }); 194 | }); 195 | }; 196 | 197 | Connection.prototype.get_ids = function(type, term, callback) { 198 | return this.redis.get(this.key(type, this.helper.normalize(term)), function(err, result) { 199 | var arr; 200 | if (err) { 201 | return callback(err); 202 | } 203 | arr = JSON.parse(result); 204 | if (arr === null) { 205 | return callback(null, []); 206 | } 207 | return callback(null, arr); 208 | }); 209 | }; 210 | 211 | Connection.prototype.get_data = function(type, id, callback) { 212 | return this.redis.hget(this.key(type, "data"), id, function(err, result) { 213 | if (err) { 214 | return callback(err); 215 | } 216 | return callback(null, JSON.parse(result)); 217 | }); 218 | }; 219 | 220 | Connection.prototype.redis = function() { 221 | return this.redis; 222 | }; 223 | 224 | Connection.prototype.end = function() { 225 | return this.redis.quit(); 226 | }; 227 | 228 | Connection.prototype.key = function() { 229 | var args; 230 | args = 1 <= arguments.length ? __slice.call(arguments, 0) : []; 231 | args.unshift(this.namespace); 232 | return args.join(":"); 233 | }; 234 | 235 | return Connection; 236 | 237 | })(); 238 | 239 | Helper = (function() { 240 | 241 | Helper.name = 'Helper'; 242 | 243 | function Helper() {} 244 | 245 | Helper.prototype.normalize = function(term) { 246 | return this.strip(this.gsub(term.toLowerCase(), /[^a-z0-9 ]/i, '')); 247 | }; 248 | 249 | Helper.prototype.gsub = function(source, pattern, replacement) { 250 | var match, result; 251 | if (!((pattern != null) && (replacement != null))) { 252 | return source; 253 | } 254 | result = ''; 255 | while (source.length > 0) { 256 | if ((match = source.match(pattern))) { 257 | result += source.slice(0, match.index); 258 | result += replacement; 259 | source = source.slice(match.index + match[0].length); 260 | } else { 261 | result += source; 262 | source = ''; 263 | } 264 | } 265 | return result; 266 | }; 267 | 268 | Helper.prototype.strip = function(source) { 269 | return source.replace(/^\s+/, '').replace(/\s+$/, ''); 270 | }; 271 | 272 | return Helper; 273 | 274 | })(); 275 | 276 | connectToRedis = function(options) { 277 | var redis; 278 | redis = require('redis').createClient(options.port, options.host); 279 | if (options.password != null) { 280 | redis.auth(options.password); 281 | } 282 | return redis; 283 | }; 284 | 285 | exports.Helper = new Helper; 286 | 287 | exports.Connection = Connection; 288 | 289 | }).call(this); 290 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "flo", 3 | "description": "Redis powered node.js autocompleter inspired by soulmate", 4 | "version": "0.2.0", 5 | "author": "Teng Siong Ong ", 6 | "dependencies": { 7 | "redis": ">= 0.6.7", 8 | "async": "0.1.9", 9 | "underscore": ">= 1.1.7" 10 | }, 11 | "devDependencies": { 12 | "expresso": "0.8.1", 13 | "should": "0.2.1", 14 | "express": "2.4.3" 15 | }, 16 | "keywords": ["autocompleter", "soulmate", "redis", "flo"], 17 | "repository": "git://github.com/FLOChip/flo", 18 | "main": "index", 19 | "engines": { "node": ">= 0.4.9 < 0.7.0" } 20 | } 21 | -------------------------------------------------------------------------------- /samples/venues.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | venues: [ 3 | {id:1, term:"Dodger Stadium", score:84, data:{url:"\/dodger-stadium-tickets\/", subtitle:"Los Angeles, CA"}}, 4 | {id:28, term:"Angel Stadium", score:90, data:{url:"\/angel-stadium-tickets\/", subtitle:"Anaheim, CA"}}, 5 | {id:30, term:"Chase Field ", score:80, data:{url:"\/chase-field-tickets\/", subtitle:"Phoenix, AZ"}}, 6 | {id:29, term:"Sun Life Stadium",score:75, data:{url:"\/sun-life-stadium-tickets\/",subtitle:"Miami, FL"}}, 7 | {id:2, term:"Turner Field", score:50, data:{url:"\/turner-field-tickets\/", subtitle:"Atlanta, GA"}}, 8 | {id:3, term:"Citi Field", score:92, data:{url:"\/citi-field-tickets\/", subtitle:"Atlanta, GA"}} 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/examples/server.coffee: -------------------------------------------------------------------------------- 1 | app = require('express').createServer() 2 | flo = require('../index').connect() 3 | async = require 'async' 4 | 5 | app.get '/', (req, res) -> 6 | res.send('Up and running') 7 | 8 | app.get '/search/:types/:term/:limit', (req, res) -> 9 | types = req.params.types.split(',') 10 | flo.search_term types, req.params.term, parseInt(req.params.limit), (err, results) -> 11 | res.send(JSON.stringify(results)) 12 | 13 | venues = require('../samples/venues').venues 14 | async.forEach venues, 15 | ((venue, cb) -> 16 | flo.add_term("venues", venue.id, venue.term, venue.score, venue.data, -> 17 | cb() 18 | ) 19 | ), -> 20 | app.listen(3000) 21 | 22 | -------------------------------------------------------------------------------- /src/index.coffee: -------------------------------------------------------------------------------- 1 | # **[flo](https://github.com/FLOChip/flo)** is an redis powered node.js autocompleter inspired by [soulmate](https://github.com/seatgeek/soulmate). 2 | # You can check out some examples [here](https://github.com/FLOChip/flo/tree/master/examples). 3 | _ = require "underscore" 4 | async = require "async" 5 | 6 | # Sets up a new Redis Connection. 7 | # 8 | # options - Optional Hash of options. 9 | # 10 | # * `redis` - An existing redis connection to use. 11 | # * `host` - String Redis host. (Default: Redis' default) 12 | # * `port` - Integer Redis port. (Default: Redis' default) 13 | # * `password` - String Redis password. 14 | # * `namespace` - String namespace prefix for Redis keys. 15 | # (Default: flo). 16 | # * `mincomplete` - Minimum completion of keys required for auto completion. 17 | # (Default: 1) 18 | # * `database` - Integer of the Redis database to select. 19 | # 20 | # Returns a Connection instance. 21 | exports.connect = (options) -> 22 | new exports.Connection options || {} 23 | 24 | # Handles the connection to the Redis server. 25 | class Connection 26 | constructor: (options) -> 27 | @helper = new Helper 28 | @redis = options.redis || connectToRedis options 29 | @namespace = options.namespace || 'flo' 30 | @mincomplete = options.mincomplete || 1 31 | @redis.select options.database if options.database? 32 | 33 | # Public: Get all prefixes for a phrase 34 | # 35 | # * `phrase` - the phrase that needs to be parsed into many prefixes 36 | # 37 | # Returns an array of unique prefixes for the phrase 38 | prefixes_for_phrase: (phrase) -> 39 | words = @helper.normalize(phrase).split(' ') 40 | _.uniq( 41 | _.flatten( 42 | _.map(words, (w) => 43 | _.map([(@mincomplete-1)..(w.length-1)], (l) -> 44 | w[0..l] 45 | ) 46 | ) 47 | ) 48 | ) 49 | 50 | # Public: Search for a term 51 | # 52 | # * `types` - types of term that you are looking for (Array of String) 53 | # * `phrase` - the phrase or phrases you want to be autocompleted 54 | # * `limit` - the count of the number you want to return (optional, default: 5) 55 | # * `callback(err, result)` - err is the error and results is the results 56 | search_term: (types, phrase, args...) -> 57 | if typeof(args[0]) == 'number' 58 | limit = args[0] 59 | else 60 | limit = 5 61 | callback = args[args.length-1] 62 | 63 | async.map types, (type, callb) => 64 | words = _.uniq( 65 | @helper.normalize(phrase).split(' ') 66 | ).sort() 67 | 68 | # for caching purpose 69 | cachekey = @key(type, "cache", words.join('|')) 70 | async.waterfall([ 71 | ((cb) => 72 | @redis.exists cachekey, cb 73 | ), 74 | ((exists, cb) => 75 | if !exists 76 | interkeys = _.map(words, (w) => 77 | @key(type, "index", w) 78 | ) 79 | @redis.zinterstore cachekey, interkeys.length, interkeys..., (err, count) => 80 | @redis.expire cachekey, 10 * 60, -> # expire after 10 minutes 81 | cb() 82 | else 83 | cb() 84 | ), 85 | ((cb) => 86 | @redis.zrevrange cachekey, 0, (limit - 1), (err, ids) => 87 | if ids.length > 0 88 | @redis.hmget @key(type, "data"), ids..., cb 89 | else 90 | cb(null, []) 91 | ) 92 | ], (err, results) -> 93 | data = {} 94 | data[type] = results 95 | callb(err, data) 96 | ) 97 | , (err, results) -> 98 | results = _.extend results... 99 | results.term = phrase 100 | callback(err, results) 101 | 102 | # Public: Add a new term 103 | # 104 | # * `type` - the type of data of this term (String) 105 | # * `id` - unique identifier(within the specific type) 106 | # * `term` - the phrase you wish to provide completions for 107 | # * `score` - user specified ranking metric (redis will order things lexicographically for items with the same score) 108 | # * `data` - container for metadata that you would like to return when this item is matched (optional) 109 | # * `callback` - callback to be run (optional) 110 | # 111 | # Returns nothing. 112 | add_term: (type, id, term, score, args...) -> 113 | if typeof(args[0]) != 'function' 114 | data = args[0] 115 | callback = args[args.length-1] 116 | else if typeof(args[0]) == 'function' 117 | callback = args[0] 118 | 119 | # store the data in parallel 120 | async.parallel([ 121 | ((callb) => 122 | @redis.hset @key(type, "data"), id, 123 | JSON.stringify id: id, term: term, score: score, data: (data || []), 124 | -> 125 | callb() 126 | ), 127 | ((callb) => 128 | async.forEach @prefixes_for_phrase(term), 129 | ((w, cb) => 130 | @redis.zadd @key(type, "index", w), score, id, # sorted set 131 | -> cb() 132 | ), -> 133 | callb() 134 | ), 135 | ((callb) => 136 | key = @key(type, @helper.normalize(term)) 137 | # do we already have this term? 138 | @redis.get key, (err, result) => 139 | if (err) 140 | return callb(err) 141 | 142 | if (result) 143 | # append to existing ids (without duplicates) 144 | arr = JSON.parse(result) 145 | arr.push(id) 146 | arr = _.uniq(arr) 147 | else 148 | arr = [id] 149 | 150 | # store the id 151 | @redis.set key, JSON.stringify(arr), callb 152 | ) 153 | ], -> 154 | callback() if callback? 155 | ) 156 | 157 | # Public: Remove a term 158 | # 159 | # * `type` - the type of data of this term (String) 160 | # * `id` - unique identifier (within the specific type) 161 | # * `callback(err)` - callback to be run (optional) 162 | # 163 | # Returns nothing. 164 | remove_term: (type, id, callback) -> 165 | #get the term 166 | @redis.hget @key(type, "data"), id, 167 | (err, result) => 168 | if err 169 | return callback(err) 170 | if result == null 171 | return callback(new Error("Invalid term id")) 172 | 173 | term = JSON.parse(result).term 174 | 175 | # remove 176 | async.parallel([ 177 | ((callb) => 178 | @redis.hdel @key(type, "data"), id, callb 179 | ), 180 | ((callb) => 181 | async.forEach @prefixes_for_phrase(term), 182 | ((w, cb) => 183 | @redis.zrem @key(type, "index", w), id, 184 | -> cb() 185 | ), callb 186 | ), 187 | ((callb) => 188 | key = @key(type, @helper.normalize(term)) 189 | @redis.get key, (err, result) => 190 | if (err) 191 | return callb(err) 192 | 193 | if (result == null) 194 | return cb(new Error("Couldn't delete "+ id +". No such entry.")) 195 | 196 | arr = JSON.parse(result) 197 | 198 | if (arr.toString() == [id].toString()) 199 | # delete it 200 | return @redis.del key, callb 201 | 202 | @redis.set key, JSON.stringify(_.without(arr, id)), callb 203 | ) 204 | ], (err) -> 205 | callback(err) if callback? 206 | ) 207 | 208 | # Public: Returns an array of IDs for a term 209 | 210 | # * 'type' - the type of data for this term 211 | # * 'term' - the term to find the unique identifiers for 212 | # * 'callback(err, result)' - result is the ID for the term 213 | 214 | # Returns nothing. 215 | get_ids: (type, term, callback) -> 216 | @redis.get @key(type, @helper.normalize(term)), (err, result) -> 217 | if (err) 218 | return callback(err) 219 | 220 | arr = JSON.parse(result) 221 | 222 | if (arr == null) 223 | return callback(null, []) 224 | 225 | callback(null, arr) 226 | 227 | 228 | # Public: Returns the data for an ID 229 | 230 | # * 'type' - the type of data for this term 231 | # * `id` - unique identifier (within the specific type) 232 | # * 'callback(err, result)' - result is the data 233 | 234 | # Returns nothing. 235 | get_data: (type, id, callback) -> 236 | @redis.hget @key(type, "data"), id, 237 | (err, result) -> 238 | if err 239 | return callback(err) 240 | 241 | callback(null, JSON.parse(result)) 242 | 243 | # Public: Get the redis instance 244 | # 245 | # Returns the redis instance. 246 | redis: -> 247 | @redis 248 | 249 | # Public: Quits the connection to the Redis server. 250 | # 251 | # Returns nothing. 252 | end: -> 253 | @redis.quit() 254 | 255 | # Builds a namespaced Redis key with the given arguments. 256 | # 257 | # * `type` - Type of the param 258 | # * `args` - Array of Strings. 259 | # 260 | # Returns an assembled String key. 261 | key: (args...) -> 262 | args.unshift @namespace 263 | args.join ":" 264 | 265 | class Helper 266 | # Public: Normalize a term to remove all other characters than a-z and 0-9. 267 | # 268 | # * `term` - the term to be normalized 269 | # 270 | # Returns a normalized term. 271 | normalize: (term) -> 272 | @strip(@gsub(term.toLowerCase(), /[^a-z0-9 ]/i, '')) 273 | 274 | # Public: This function partially simulates the Ruby's String gsub method. 275 | # 276 | # * `source` - the source string 277 | # * `pattern` - the Regex pattern 278 | # * `replacement` - the replacement text 279 | # 280 | # Example: 281 | # 282 | # gsub("-abc-abc-", /[^a-z0-9 ]/i, '') # returns "abcabc" 283 | # gsub("-abc-abc-", /[^a-z0-9 ]/i, '*') # returns "*abc*abc*" 284 | # 285 | # Returns the modified string. 286 | gsub: (source, pattern, replacement) -> 287 | unless pattern? and replacement? 288 | return source 289 | 290 | result = '' 291 | while source.length > 0 292 | if (match = source.match(pattern)) 293 | result += source.slice(0, match.index) 294 | result += replacement 295 | source = source.slice(match.index + match[0].length) 296 | else 297 | result += source 298 | source = '' 299 | 300 | result 301 | 302 | # Public: Strip out leading and trailing whitespaces. 303 | # 304 | # * `source` - string to be stripped 305 | # 306 | # Returns a copy of str with leading and trailing whitespace removed. 307 | strip: (source) -> 308 | source.replace(/^\s+/, '').replace(/\s+$/, '') 309 | 310 | connectToRedis = (options) -> 311 | redis = require('redis').createClient options.port, options.host 312 | redis.auth options.password if options.password? 313 | redis 314 | 315 | exports.Helper = new Helper 316 | exports.Connection = Connection 317 | -------------------------------------------------------------------------------- /src/test/autocomplete.test.coffee: -------------------------------------------------------------------------------- 1 | flo = require('../index').connect() 2 | async = require 'async' 3 | _ = require 'underscore' 4 | assert = require 'assert' 5 | 6 | module.exports = 7 | 'test prefixes_for_phrase': () -> 8 | result = flo.prefixes_for_phrase("abc") 9 | assert.eql(["a", "ab", "abc"], result) 10 | 11 | result = flo.prefixes_for_phrase("abc abc") 12 | assert.eql(["a", "ab", "abc"], result) 13 | 14 | result = flo.prefixes_for_phrase("a(*&^%bc") 15 | assert.eql(["a", "ab", "abc"], result) 16 | 17 | result = flo.prefixes_for_phrase("how are you") 18 | assert.eql(["h","ho","how","a","ar","are","y","yo","you"], result) 19 | 20 | 'test key': () -> 21 | result = flo.key("fun", "abc") 22 | assert.equal("flo:fun:abc", result) 23 | 24 | 'test add_term': () -> 25 | term_type = 'book' 26 | term_id = 1 27 | term = "Algorithms for Noob" 28 | term_score = 10 29 | term_data = 30 | ISBN: "123AOU123" 31 | Publisher: "Siong Publication" 32 | flo.add_term(term_type, term_id, term, term_score, term_data, -> 33 | async.parallel([ 34 | ((callback) -> 35 | flo.redis.hget flo.key(term_type, "data"), term_id, (err, reply) -> 36 | result = JSON.parse(reply) 37 | assert.equal(term, result.term) 38 | assert.equal(term_score, result.score) 39 | assert.eql(term_data, result.data) 40 | 41 | callback() 42 | ), 43 | ((callback) -> 44 | async.map flo.prefixes_for_phrase(term), 45 | ((w, cb) => 46 | flo.redis.zrange flo.key(term_type, "index", w), 0, -1, cb# sorted set 47 | ), 48 | (err, results) -> 49 | assert.equal(17, results.length) 50 | results = _.uniq(_.flatten(results)) 51 | assert.equal(1, results[0]) 52 | callback() 53 | ) 54 | ]) 55 | ) 56 | 57 | 'test search_term': () -> 58 | venues = require('../samples/venues').venues 59 | async.series([ 60 | ((callback) -> 61 | async.forEach venues, 62 | ((venue, cb) -> 63 | flo.add_term("venues", venue.id, venue.term, venue.score, venue.data, -> 64 | cb() 65 | ) 66 | ), callback), 67 | ((callback) -> 68 | flo.search_term ["venues", "food"], "stadium", 69 | (err, results) -> 70 | assert.equal(3, results.venues.length) 71 | callback() 72 | ), 73 | ((callback) -> 74 | # set limit to 1 75 | flo.search_term ["venues"], "stadium", 1, 76 | (err, results) -> 77 | assert.equal(1, results.venues.length) 78 | callback() 79 | ) 80 | ]) 81 | 82 | 'test remove_term': () -> 83 | term_type = "foods" 84 | term_id = 2 85 | term_id2 = 3 86 | term = "Burger" 87 | term_score = 10 88 | term_data = 89 | temp:"data" 90 | 91 | all_data = 92 | id:term_id 93 | term:term 94 | score:term_score 95 | data:term_data 96 | 97 | async.series([ 98 | ((next) -> 99 | flo.add_term term_type, term_id, term, term_score, term_data, next 100 | ), 101 | ((next) -> 102 | flo.get_ids term_type, term, 103 | (err, ids) -> 104 | assert.isNull err 105 | assert.eql ids, [term_id] 106 | next() 107 | ), 108 | ((next) -> 109 | flo.get_data term_type, term_id, 110 | (err, data) -> 111 | assert.isNull err 112 | assert.eql data, all_data 113 | next() 114 | ), 115 | ((next) -> 116 | # add a duplicate with new id 117 | flo.add_term term_type, term_id2, term, term_score, term_data, next 118 | ), 119 | ((next) -> 120 | # check to see if we get both ids back 121 | flo.get_ids term_type, term, 122 | (err, ids) -> 123 | assert.isNull err 124 | assert.eql ids, [term_id, term_id2] 125 | next() 126 | ), 127 | ((next) -> 128 | flo.remove_term term_type, term_id, next 129 | ), 130 | ((next) -> 131 | flo.remove_term term_type, term_id2, next 132 | ), 133 | ((next) -> 134 | # second call should return an error 135 | flo.remove_term term_type, term_id, 136 | (err) -> 137 | assert.isNotNull(err) 138 | next() 139 | ), 140 | ((next) -> 141 | flo.get_ids term_type, term, 142 | (err, ids) -> 143 | assert.eql ids, [] 144 | next() 145 | ), 146 | ((next) -> 147 | flo.search_term [term_type], term, 148 | (err, result) -> 149 | eql = 150 | term: term 151 | eql[term_type] = [] 152 | assert.eql result, eql 153 | next() 154 | ) 155 | ], (err, results) -> 156 | flo.end() 157 | ) 158 | -------------------------------------------------------------------------------- /src/test/helper.test.coffee: -------------------------------------------------------------------------------- 1 | helper = require('../index').Helper 2 | assert = require 'assert' 3 | 4 | module.exports = 5 | 'test strip': () -> 6 | result = helper.strip(" abc") 7 | assert.equal("abc", result) 8 | 9 | result = helper.strip("abc ") 10 | assert.equal("abc", result) 11 | 12 | result = helper.strip(" abc ") 13 | assert.equal("abc", result) 14 | 15 | 'test gsub': () -> 16 | result = helper.gsub("-abc-abc-", /[^a-z0-9 ]/i, '') 17 | assert.equal("abcabc", result) 18 | 19 | result = helper.gsub("-abc-abc-", /[^a-z0-9 ]/i, '*') 20 | assert.equal("*abc*abc*", result) 21 | 22 | result = helper.gsub("!@#abc-!@#abc!@#", /[^a-z0-9 ]/i, '') 23 | assert.equal("abcabc", result) 24 | 25 | 'test gsub with errors': () -> 26 | # missing arguments should just return false 27 | result = helper.gsub("-abc-abc-") 28 | assert.equal("-abc-abc-", result) 29 | 30 | result = helper.gsub("-abc-abc-", //) 31 | assert.equal("-abc-abc-", result) 32 | 33 | 'test normalize': () -> 34 | normalized_str = helper.normalize("a-bc") 35 | assert.equal("abc", normalized_str) 36 | 37 | normalized_str = helper.normalize("a bc") 38 | assert.equal("a bc", normalized_str) 39 | 40 | normalized_str = helper.normalize("a-b!@#$%^&*()c") 41 | assert.equal("abc", normalized_str) 42 | 43 | -------------------------------------------------------------------------------- /test/autocomplete.test.js: -------------------------------------------------------------------------------- 1 | // Generated by CoffeeScript 1.3.1 2 | (function() { 3 | var assert, async, flo, _; 4 | 5 | flo = require('../index').connect(); 6 | 7 | async = require('async'); 8 | 9 | _ = require('underscore'); 10 | 11 | assert = require('assert'); 12 | 13 | module.exports = { 14 | 'test prefixes_for_phrase': function() { 15 | var result; 16 | result = flo.prefixes_for_phrase("abc"); 17 | assert.eql(["a", "ab", "abc"], result); 18 | result = flo.prefixes_for_phrase("abc abc"); 19 | assert.eql(["a", "ab", "abc"], result); 20 | result = flo.prefixes_for_phrase("a(*&^%bc"); 21 | assert.eql(["a", "ab", "abc"], result); 22 | result = flo.prefixes_for_phrase("how are you"); 23 | return assert.eql(["h", "ho", "how", "a", "ar", "are", "y", "yo", "you"], result); 24 | }, 25 | 'test key': function() { 26 | var result; 27 | result = flo.key("fun", "abc"); 28 | return assert.equal("flo:fun:abc", result); 29 | }, 30 | 'test add_term': function() { 31 | var term, term_data, term_id, term_score, term_type; 32 | term_type = 'book'; 33 | term_id = 1; 34 | term = "Algorithms for Noob"; 35 | term_score = 10; 36 | term_data = { 37 | ISBN: "123AOU123", 38 | Publisher: "Siong Publication" 39 | }; 40 | return flo.add_term(term_type, term_id, term, term_score, term_data, function() { 41 | return async.parallel([ 42 | (function(callback) { 43 | return flo.redis.hget(flo.key(term_type, "data"), term_id, function(err, reply) { 44 | var result; 45 | result = JSON.parse(reply); 46 | assert.equal(term, result.term); 47 | assert.equal(term_score, result.score); 48 | assert.eql(term_data, result.data); 49 | return callback(); 50 | }); 51 | }), (function(callback) { 52 | var _this = this; 53 | return async.map(flo.prefixes_for_phrase(term), (function(w, cb) { 54 | return flo.redis.zrange(flo.key(term_type, "index", w), 0, -1, cb); 55 | }), function(err, results) { 56 | assert.equal(17, results.length); 57 | results = _.uniq(_.flatten(results)); 58 | assert.equal(1, results[0]); 59 | return callback(); 60 | }); 61 | }) 62 | ]); 63 | }); 64 | }, 65 | 'test search_term': function() { 66 | var venues; 67 | venues = require('../samples/venues').venues; 68 | return async.series([ 69 | (function(callback) { 70 | return async.forEach(venues, (function(venue, cb) { 71 | return flo.add_term("venues", venue.id, venue.term, venue.score, venue.data, function() { 72 | return cb(); 73 | }); 74 | }), callback); 75 | }), (function(callback) { 76 | return flo.search_term(["venues", "food"], "stadium", function(err, results) { 77 | assert.equal(3, results.venues.length); 78 | return callback(); 79 | }); 80 | }), (function(callback) { 81 | return flo.search_term(["venues"], "stadium", 1, function(err, results) { 82 | assert.equal(1, results.venues.length); 83 | return callback(); 84 | }); 85 | }) 86 | ]); 87 | }, 88 | 'test remove_term': function() { 89 | var all_data, term, term_data, term_id, term_id2, term_score, term_type; 90 | term_type = "foods"; 91 | term_id = 2; 92 | term_id2 = 3; 93 | term = "Burger"; 94 | term_score = 10; 95 | term_data = { 96 | temp: "data" 97 | }; 98 | all_data = { 99 | id: term_id, 100 | term: term, 101 | score: term_score, 102 | data: term_data 103 | }; 104 | return async.series([ 105 | (function(next) { 106 | return flo.add_term(term_type, term_id, term, term_score, term_data, next); 107 | }), (function(next) { 108 | return flo.get_ids(term_type, term, function(err, ids) { 109 | assert.isNull(err); 110 | assert.eql(ids, [term_id]); 111 | return next(); 112 | }); 113 | }), (function(next) { 114 | return flo.get_data(term_type, term_id, function(err, data) { 115 | assert.isNull(err); 116 | assert.eql(data, all_data); 117 | return next(); 118 | }); 119 | }), (function(next) { 120 | return flo.add_term(term_type, term_id2, term, term_score, term_data, next); 121 | }), (function(next) { 122 | return flo.get_ids(term_type, term, function(err, ids) { 123 | assert.isNull(err); 124 | assert.eql(ids, [term_id, term_id2]); 125 | return next(); 126 | }); 127 | }), (function(next) { 128 | return flo.remove_term(term_type, term_id, next); 129 | }), (function(next) { 130 | return flo.remove_term(term_type, term_id2, next); 131 | }), (function(next) { 132 | return flo.remove_term(term_type, term_id, function(err) { 133 | assert.isNotNull(err); 134 | return next(); 135 | }); 136 | }), (function(next) { 137 | return flo.get_ids(term_type, term, function(err, ids) { 138 | assert.eql(ids, []); 139 | return next(); 140 | }); 141 | }), (function(next) { 142 | return flo.search_term([term_type], term, function(err, result) { 143 | var eql; 144 | eql = { 145 | term: term 146 | }; 147 | eql[term_type] = []; 148 | assert.eql(result, eql); 149 | return next(); 150 | }); 151 | }) 152 | ], function(err, results) { 153 | return flo.end(); 154 | }); 155 | } 156 | }; 157 | 158 | }).call(this); 159 | -------------------------------------------------------------------------------- /test/helper.test.js: -------------------------------------------------------------------------------- 1 | // Generated by CoffeeScript 1.3.1 2 | (function() { 3 | var assert, helper; 4 | 5 | helper = require('../index').Helper; 6 | 7 | assert = require('assert'); 8 | 9 | module.exports = { 10 | 'test strip': function() { 11 | var result; 12 | result = helper.strip(" abc"); 13 | assert.equal("abc", result); 14 | result = helper.strip("abc "); 15 | assert.equal("abc", result); 16 | result = helper.strip(" abc "); 17 | return assert.equal("abc", result); 18 | }, 19 | 'test gsub': function() { 20 | var result; 21 | result = helper.gsub("-abc-abc-", /[^a-z0-9 ]/i, ''); 22 | assert.equal("abcabc", result); 23 | result = helper.gsub("-abc-abc-", /[^a-z0-9 ]/i, '*'); 24 | assert.equal("*abc*abc*", result); 25 | result = helper.gsub("!@#abc-!@#abc!@#", /[^a-z0-9 ]/i, ''); 26 | return assert.equal("abcabc", result); 27 | }, 28 | 'test gsub with errors': function() { 29 | var result; 30 | result = helper.gsub("-abc-abc-"); 31 | assert.equal("-abc-abc-", result); 32 | result = helper.gsub("-abc-abc-", /(?:)/); 33 | return assert.equal("-abc-abc-", result); 34 | }, 35 | 'test normalize': function() { 36 | var normalized_str; 37 | normalized_str = helper.normalize("a-bc"); 38 | assert.equal("abc", normalized_str); 39 | normalized_str = helper.normalize("a bc"); 40 | assert.equal("a bc", normalized_str); 41 | normalized_str = helper.normalize("a-b!@#$%^&*()c"); 42 | return assert.equal("abc", normalized_str); 43 | } 44 | }; 45 | 46 | }).call(this); 47 | --------------------------------------------------------------------------------