├── .dockerignore
├── .gitignore
├── .modulusignore
├── .npmignore
├── Dockerfile
├── LICENSE
├── README.md
├── app.coffee
├── app.yaml
├── clean.coffee
├── configure.coffee
├── database
├── index.coffee
├── plugins
│ ├── lastModified.coffee
│ └── paginate.coffee
└── schema
│ ├── package.coffee
│ ├── plunk.coffee
│ ├── session.coffee
│ └── user.coffee
├── errors.coffee
├── index.js
├── middleware
├── analytics.coffee
├── cors.coffee
├── nocache.coffee
├── schema.coffee
├── session.coffee
└── version.coffee
├── package.json
├── resources
├── comments.coffee
├── packages.coffee
├── plunks.coffee
├── schema
│ ├── packages
│ │ ├── create.json
│ │ ├── update.json
│ │ └── versions
│ │ │ ├── create.json
│ │ │ └── update.json
│ └── plunks
│ │ ├── create.json
│ │ ├── fork.json
│ │ └── update.json
├── sessions.coffee
├── tags.coffee
└── users.coffee
└── server.js
/.dockerignore:
--------------------------------------------------------------------------------
1 | lib-cov
2 | *.seed
3 | *.log
4 | *.csv
5 | *.dat
6 | *.out
7 | *.pid
8 | *.gz
9 |
10 | pids
11 | logs
12 | results
13 |
14 | .git
15 | node_modules
16 |
17 | npm-debug.log
18 |
19 | .c9revisions
20 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | lib-cov
2 | *.seed
3 | *.log
4 | *.csv
5 | *.dat
6 | *.out
7 | *.pid
8 | *.gz
9 |
10 | pids
11 | logs
12 | results
13 |
14 | config.development.json
15 | config.production.json
16 |
17 | node_modules
18 |
19 | npm-debug.log
20 |
21 | .c9revisions
22 |
23 | sessions.json
24 |
--------------------------------------------------------------------------------
/.modulusignore:
--------------------------------------------------------------------------------
1 | lib-cov
2 | *.seed
3 | *.log
4 | *.csv
5 | *.dat
6 | *.out
7 | *.pid
8 | *.gz
9 |
10 | pids
11 | logs
12 | results
13 |
14 | node_modules
15 |
16 | npm-debug.log
17 |
18 | .c9revisions
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | lib-cov
2 | *.seed
3 | *.log
4 | *.csv
5 | *.dat
6 | *.out
7 | *.pid
8 | *.gz
9 |
10 | pids
11 | logs
12 | results
13 |
14 | node_modules
15 |
16 | npm-debug.log
17 |
18 | .c9revisions
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM google/nodejs-runtime
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2013 Geoffrey Goodman
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in
13 | all copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 | THE SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | plunker_api
2 | ===========
3 |
4 | The public API off-which Plunker runs
--------------------------------------------------------------------------------
/app.coffee:
--------------------------------------------------------------------------------
1 | appengine = require('appengine')
2 | express = require("express")
3 | morgan = require("morgan")
4 | nconf = require("nconf")
5 | cors = require("cors")
6 |
7 |
8 | # Set defaults in nconf
9 | require "./configure"
10 |
11 |
12 | validateSchema = require("./middleware/schema")
13 | apiErrors = require("./errors")
14 |
15 |
16 |
17 | app = module.exports = express()
18 | apiUrl = nconf.get("url:api")
19 | wwwUrl = nconf.get("url:www")
20 |
21 |
22 | {Session} = require("./database")
23 |
24 |
25 | errorHandler = (err, req, res, next) ->
26 | if err instanceof apiErrors.ApiError
27 | res.json err.httpCode, err.toJSON()
28 | console.log "[ERR]", err.toJSON()
29 | else next(err)
30 |
31 | #allowedCorsOrigins = [nconf.get('url:www'), nconf.get('url:embed'), "http://plnkr.co"]
32 | corsOptions =
33 | origin: true
34 | exposeHeaders: "Link"
35 | maxAge: 60
36 |
37 |
38 | app.set "jsonp callback", true
39 |
40 | app.use appengine.middleware.base
41 | app.use morgan("short")
42 | app.use require("./middleware/cors").middleware()
43 | app.use express.bodyParser()
44 | app.use require("./middleware/version").middleware()
45 | app.use require("./middleware/nocache").middleware()
46 | app.use require("./middleware/session").middleware(sessions: Session)
47 | app.use app.router
48 | app.use errorHandler
49 | app.use express.errorHandler()
50 |
51 |
52 |
53 | # Sessions
54 | sessions = require "./resources/sessions"
55 |
56 |
57 | # Users
58 | users = require "./resources/users"
59 |
60 |
61 | app.get "/_ah/start", (req, res) ->
62 | res.send(200, "OK")
63 | app.get "/_ah/stop", (req, res) ->
64 | res.send(200, "OK")
65 | process.exit(0)
66 | app.get "/_ah/health", (req, res) ->
67 | res.send(200, "OK")
68 |
69 | protocolRelativeUrl = wwwUrl.replace(/^https?:/, '')
70 |
71 | app.get "/proxy.html", (req, res) ->
72 | res.send """
73 |
74 |
75 |
81 | """
82 |
83 |
84 |
85 | app.get "/sessions", sessions.findOrCreate
86 | app.post "/sessions", sessions.create
87 | app.get "/sessions/:id", sessions.read
88 |
89 | app.post "/sessions/:id/user", sessions.withSession, sessions.setUser
90 | app.del "/sessions/:id/user", sessions.withSession, sessions.unsetUser
91 |
92 |
93 | # Make sure all non-user, non-session put/post requests have a session assigned
94 | app.put "*", sessions.withCurrentSession
95 | app.post "*", sessions.withCurrentSession
96 |
97 |
98 |
99 | # Plunks
100 | plunks = require "./resources/plunks"
101 |
102 |
103 | app.get "/plunks", plunks.createListing (req, res) ->
104 | baseUrl: "#{apiUrl}/plunks"
105 | sort: "-updated_at"
106 | ignorePrivate: true
107 | onlyPublic: true
108 | app.get "/plunks/trending", plunks.createListing (req, res) ->
109 | baseUrl: "#{apiUrl}/plunks/trending"
110 | sort: "-score -updated_at"
111 | ignorePrivate: true
112 | onlyPublic: true
113 | app.get "/plunks/popular", plunks.createListing (req, res) ->
114 | baseUrl: "#{apiUrl}/plunks/popular"
115 | sort: "-thumbs -updated_at"
116 | ignorePrivate: true
117 | onlyPublic: true
118 |
119 | app.get "/plunks/views", plunks.createListing (req, res) ->
120 | baseUrl: "#{apiUrl}/plunks/views"
121 | sort: "-views -updated_at"
122 | ignorePrivate: true
123 | onlyPublic: true
124 |
125 | app.get "/plunks/forked", plunks.createListing (req, res) ->
126 | baseUrl: "#{apiUrl}/plunks/forked"
127 | sort: "-forked -updated_at"
128 | ignorePrivate: true
129 | onlyPublic: true
130 |
131 |
132 | app.get "/plunks/remembered", users.withCurrentUser, plunks.createListing (req, res) ->
133 | sort: "-updated_at"
134 | ignorePrivate: true
135 | query: {rememberers: req.currentUser._id}
136 | baseUrl: "#{apiUrl}/users/#{req.params.login}/remembered"
137 |
138 | app.post "/plunks", sessions.withCurrentSession, validateSchema(plunks.schema.create), plunks.create
139 | app.get "/plunks/:id", plunks.withPlunk, plunks.read
140 | app.post "/plunks/:id", sessions.withCurrentSession, validateSchema(plunks.schema.update), plunks.withPlunk, plunks.ownsPlunk, plunks.update
141 | app.del "/plunks/:id", plunks.withPlunk, plunks.ownsPlunk, plunks.destroy
142 |
143 | app.post "/plunks/:id/freeze", sessions.withCurrentSession, plunks.withPlunk, plunks.ownsPlunk, plunks.freeze
144 | app.del "/plunks/:id/freeze", sessions.withCurrentSession, plunks.withPlunk, plunks.ownsPlunk, plunks.unfreeze
145 |
146 | app.post "/plunks/:id/thumb", sessions.withCurrentSession, plunks.withPlunk, plunks.setThumbed
147 | app.del "/plunks/:id/thumb", sessions.withCurrentSession, plunks.withPlunk, plunks.unsetThumbed
148 |
149 | app.post "/plunks/:id/remembered", sessions.withCurrentSession, plunks.withPlunk, plunks.setRemembered
150 | app.del "/plunks/:id/remembered", sessions.withCurrentSession, plunks.withPlunk, plunks.unsetRemembered
151 |
152 | forkSchema = (req) ->
153 | if req.apiVersion is 0 then plunks.schema.create
154 | else if req.apiVersion is 1 then plunks.schema.fork
155 |
156 | app.post "/plunks/:id/forks", sessions.withCurrentSession, validateSchema(forkSchema), plunks.withPlunk, plunks.fork
157 | app.get "/plunks/:id/forks", plunks.createListing (req, res) ->
158 | query: {fork_of: req.params.id}
159 | baseUrl: "#{apiUrl}/plunk/#{req.params.id}/forks"
160 | sort: "-updated_at"
161 | ignorePrivate: true
162 | onlyPublic: true
163 |
164 |
165 |
166 | #app.get "/templates", plunks.createListing (req, res) ->
167 | #query = type: "template"
168 | #
169 | #if taglist = req.query.taglist then query.tags = {$all: taglist.split(",")}
170 | #
171 | #baseUrl: "#{apiUrl}/templates"
172 | #query: query
173 | #sort: "-thumbs -updated_at"
174 | #onlyPublic: true
175 |
176 |
177 |
178 |
179 | app.get "/users/:login", users.withUser, users.read
180 |
181 | app.get "/users/:login/plunks", users.withUser, plunks.createListing (req, res) ->
182 | sort: "-updated_at"
183 | query: {user: req.user._id}
184 | baseUrl: "#{apiUrl}/users/#{req.params.login}/plunks"
185 | ignorePrivate: req.currentUser and req.currentUser.login == req.params.login
186 | onlyPublic: !req.currentUser or req.currentUser.login != req.params.login
187 | app.get "/users/:login/plunks/tagged/:tag", users.withUser, plunks.createListing (req, res) ->
188 | sort: "-updated_at"
189 | query: {user: req.user._id, tags: req.params.tag}
190 | baseUrl: "#{apiUrl}/users/#{req.params.login}/plunks/tagged/#{req.params.tag}"
191 | ignorePrivate: req.currentUser and req.currentUser.login == req.params.login
192 | onlyPublic: !req.currentUser or req.currentUser.login != req.params.login
193 | app.get "/users/:login/thumbed", users.withUser, plunks.createListing (req, res) ->
194 | sort: "-updated_at"
195 | query: {voters: req.user._id}
196 | baseUrl: "#{apiUrl}/users/#{req.params.login}/thumbed"
197 | ignorePrivate: req.currentUser and req.currentUser.login == req.params.login
198 | onlyPublic: !req.currentUser or req.currentUser.login != req.params.login
199 | app.get "/users/:login/remembered", users.withUser, plunks.createListing (req, res) ->
200 | sort: "-updated_at"
201 | query: {rememberers: req.user._id}
202 | baseUrl: "#{apiUrl}/users/#{req.params.login}/remembered"
203 | ignorePrivate: req.currentUser and req.currentUser.login == req.params.login
204 | onlyPublic: !req.currentUser or req.currentUser.login != req.params.login
205 |
206 | ###
207 |
208 | # Comments
209 | comments = require "./resources/comments"
210 |
211 |
212 | app.post "/plunks/:id/comments", comments.create
213 | app.get "/comments/:id", comments.read
214 | app.post "/comments/:id", comments.update
215 | app.del "/comments/:id", comments.destroy
216 |
217 | ###
218 |
219 | # Catalogue
220 | packages = require "./resources/packages"
221 |
222 | app.get "/catalogue/packages", packages.createListing (req, res) ->
223 | if q = req.param("query") then query: name: $regex: "^#{q}"
224 | else {}
225 |
226 | #app.post "/catalogue/packages", validateSchema(packages.schema.create), packages.create
227 | app.post "/catalogue/packages", validateSchema(packages.schema.create), users.withCurrentUser, packages.create
228 | app.get "/catalogue/packages/:name", packages.withPackage, packages.read
229 | app.post "/catalogue/packages/:name", validateSchema(packages.schema.update), users.withCurrentUser, packages.withPackage, packages.update
230 | app.post "/catalogue/packages/:name/bump", users.withCurrentUser, packages.withPackage, packages.bump
231 | app.del "/catalogue/packages/:name", packages.withOwnPackage, packages.destroy
232 |
233 | app.post "/catalogue/packages/:name/maintainers", packages.withOwnPackage, packages.addMaintainer
234 | app.del "/catalogue/packages/:name/maintainers", packages.withOwnPackage, packages.removeMaintainer
235 |
236 | app.post "/catalogue/packages/:name/versions", validateSchema(packages.schema.versions.create), users.withCurrentUser, packages.withPackage, packages.versions.create
237 | #app.get "/catalogue/packages/:name/versions/:semver", packages.withPackage, packages.readVersion
238 | app.post "/catalogue/packages/:name/versions/:semver", validateSchema(packages.schema.versions.update), users.withCurrentUser, packages.withPackage, packages.versions.update
239 | app.del "/catalogue/packages/:name/versions/:semver", packages.withOwnPackage, packages.versions.destroy
240 |
241 | # Tags
242 | tags = require "./resources/tags"
243 |
244 | app.get "/tags", tags.list
245 | app.get "/tags/:taglist", plunks.createListing (req, res) ->
246 | taglist = req.params.taglist.split(",")
247 |
248 | return res.json [] unless taglist.length
249 |
250 | query: if taglist.length > 1 then {tags: {$all: taglist}} else {tags: taglist[0]}
251 | baseUrl: "#{apiUrl}/tags/#{req.params.taglist}"
252 | sort: "-score -updated_at"
253 |
254 | #app.get "/robots.txt", (req, res, next) ->
255 | # res.send """
256 | # User-Agent: *
257 | # Disallow: /
258 | # """
259 |
260 | app.get "/favicon.ico", (req, res, next) ->
261 | res.send("")
262 |
263 | app.all "*", (req, res, next) -> next(new apiErrors.NotFound)
264 |
265 |
266 | PRUNE_FREQUENCY = 1000 * 60 * 60 * 6 # Prune the sessions every 6 hours
267 |
268 | pruneSessions = ->
269 | console.log "[INFO] Pruning sessions"
270 | sessions.prune (err, numDocs) ->
271 | if err then console.log "[ERR] Pruning failed", err.message
272 | else console.log "[OK] Pruned #{numDocs} sessions"
273 |
274 | setTimeout pruneSessions, PRUNE_FREQUENCY
275 |
276 | pruneSessions()
277 |
--------------------------------------------------------------------------------
/app.yaml:
--------------------------------------------------------------------------------
1 | runtime: nodejs
2 | vm: true
3 | api_version: 1
4 |
5 | manual_scaling:
6 | instances: 1
--------------------------------------------------------------------------------
/clean.coffee:
--------------------------------------------------------------------------------
1 | config = require("./configure")
2 | db = require("./database")
3 |
4 | db.Plunk.count({_id: $in: [
5 |
6 | ]}, (err) -> console.log "DONE", arguments...)
--------------------------------------------------------------------------------
/configure.coffee:
--------------------------------------------------------------------------------
1 | nconf = require("nconf")
2 | analytics = require("analytics-node")
3 |
4 | env = process.env.NODE_ENV or "development"
5 |
6 | nconf.use("memory")
7 | .file({file: "config.#{env}.json"})
8 | .defaults({
9 | PORT: 8888,
10 | })
11 |
12 | analytics.init(config) if config = nconf.get("analytics")
13 |
14 | unless host = nconf.get("host")
15 | console.error "The 'host' option is required for Plunker to run."
16 | process.exit(1)
--------------------------------------------------------------------------------
/database/index.coffee:
--------------------------------------------------------------------------------
1 | mongoose = require("mongoose")
2 | nconf = require("nconf")
3 | mime = require("mime")
4 | genid = require("genid")
5 | url = require("url")
6 |
7 |
8 | apiUrl = nconf.get('url:api')
9 | wwwUrl = nconf.get('url:www')
10 | runUrl = nconf.get('url:run')
11 |
12 | errorConnecting = ->
13 | console.error "Error connecting to mongodb"
14 | process.exit(1)
15 |
16 | plunkerDb = mongoose.createConnection nconf.get("mongodb:uri")
17 | plunkerDbTimeout = setTimeout(errorConnecting, 1000 * 30)
18 |
19 | plunkerDb.on "open", -> clearTimeout(plunkerDbTimeout)
20 | plunkerDb.on "error", (err) -> console.log "[ERR] Database error:", err
21 | plunkerDb.on "disconnected", (err) -> console.log "[WARN] Database disconnected:", arguments...
22 | plunkerDb.on "reconnected", (err) -> console.log "[WARN] Database reconnected:", arguments...
23 |
24 |
25 |
26 | # Enable Query::paginate
27 | require "./plugins/paginate"
28 |
29 |
30 |
31 | module.exports =
32 | Session: plunkerDb.model "Session", require("./schema/session").SessionSchema
33 | User: plunkerDb.model "User", require("./schema/user").UserSchema
34 | Plunk: plunkerDb.model "Plunk", require("./schema/plunk").PlunkSchema
35 | Package:plunkerDb.model "Package", require("./schema/package").PackageSchema
36 |
37 |
38 |
--------------------------------------------------------------------------------
/database/plugins/lastModified.coffee:
--------------------------------------------------------------------------------
1 | module.exports = (schema, options = {}) ->
2 | schema.add updated_at: Date
3 | schema.pre "save", (next) ->
4 | @updated_at = Date.now()
5 | next()
6 |
7 | if options.index then schema.path("updated_at").index(options.index)
--------------------------------------------------------------------------------
/database/plugins/paginate.coffee:
--------------------------------------------------------------------------------
1 | mongoose = require("mongoose")
2 | memoize = require("memoize")
3 |
4 | {Query} = mongoose
5 |
6 | # Add a pagination method to mongoose to simplify this repetitive stuff
7 | Query::paginate = (page, limit, cb) ->
8 | page = Math.max(1, parseInt(page, 10))
9 | limit = Math.max(4, Math.min(12, parseInt(limit, 10))) # [4, 10]
10 | query = @
11 | countRecords = memoize @model.count.bind(@model),
12 | expire: 1000 * 60 # One minute
13 |
14 | query.skip(page * limit - limit).limit(limit).exec (err, docs) ->
15 | if err then return cb(err, null, null)
16 | countRecords query._conditions, (err, count) ->
17 | if err then return cb(err, null, null)
18 | cb(null, docs, count, Math.ceil(count / limit), page)
--------------------------------------------------------------------------------
/database/schema/package.coffee:
--------------------------------------------------------------------------------
1 | mongoose = require("mongoose")
2 |
3 |
4 | {Schema, Document, Query} = mongoose
5 | {ObjectId, Mixed} = Schema.Types
6 |
7 | PackageDependencySchema = new Schema
8 | name: String
9 | range: String
10 |
11 | PackageVersionSchema = new Schema
12 | semver: String
13 | unstable: { type: Boolean, default: false }
14 | scripts: [String]
15 | styles: [String]
16 | dependencies: [PackageDependencySchema]
17 |
18 | PackageSchema = new Schema
19 | name: { type: String, match: /^[-_.a-z0-9]+$/i, index: true, unique: true }
20 | created_at: { type: Date, default: Date.now() }
21 | versionCount: { type: Number, default: 0 }
22 | description: { type: String }
23 | homepage: String
24 | documentation: String
25 | keywords: [{type: String, index: true}]
26 | versions: [PackageVersionSchema]
27 | categories: [String]
28 | bumps: { type: Number, default: 0, index: true }
29 | maintainers: [{ type: String, index: true }]
30 |
31 | ###
32 | PackageSchema.index {
33 | name: "text"
34 | description: "text"
35 | keywords: "text"
36 | }, {
37 | name: "typeahead"
38 | weights:
39 | name: 3
40 | description: 1
41 | keywords: 2
42 | }
43 | ###
44 |
45 | exports.PackageSchema = PackageSchema
--------------------------------------------------------------------------------
/database/schema/plunk.coffee:
--------------------------------------------------------------------------------
1 | mongoose = require("mongoose")
2 | genid = require("genid")
3 | nconf = require("nconf")
4 | mime = require("mime")
5 |
6 | {Schema, Document, Query} = mongoose
7 | {ObjectId, Mixed} = Schema.Types
8 |
9 | wwwUrl = nconf.get("url:www")
10 | apiUrl = nconf.get("url:api")
11 | runUrl = nconf.get("url:run")
12 |
13 |
14 | PlunkFileSchema = new Schema
15 | filename: String
16 | content: String
17 |
18 | #PlunkFileSchema.virtual("mime").get -> mime.lookup(@filename, "text/plain")
19 |
20 | PlunkVoteSchema = new Schema
21 | user: { type: Schema.ObjectId, ref: "User" }
22 | created_at: { type: Date, 'default': Date.now }
23 |
24 | PlunkChangeSchema = new Schema
25 | fn: String # Current/new filename
26 | pn: String # Previous filename
27 | pl: String # Payload (contents / diff)
28 |
29 | PlunkHistorySchema = new Schema
30 | event: { type: String, 'enum': ["create", "update", "fork"] }
31 | user: { type: Schema.ObjectId, ref: "User" }
32 | changes: [PlunkChangeSchema]
33 |
34 | PlunkHistorySchema.virtual("created_at").get -> new Date(parseInt(@_id.toString().substring(0, 8), 16) * 1000)
35 |
36 | PlunkSchema = new Schema
37 | _id: { type: String, index: true }
38 | description: String
39 | score: { type: Number, 'default': Date.now }
40 | thumbs: { type: Number, 'default': 0 }
41 | created_at: { type: Date, 'default': Date.now }
42 | updated_at: { type: Date, 'default': Date.now }
43 | token: { type: String, 'default': genid.bind(null, 16) }
44 | 'private': { type: Boolean, 'default': false, index: true }
45 | template: { type: Boolean, 'default': false }
46 | source: {}
47 | files: [PlunkFileSchema]
48 | user: { type: Schema.ObjectId, ref: "User", index: true }
49 | comments: { type: Number, 'default': 0 }
50 | fork_of: { type: String, ref: "Plunk", index: true }
51 | forks: [{ type: String, ref: "Plunk", index: true }]
52 | tags: [{ type: String, index: true}]
53 | voters: [{ type: Schema.ObjectId, ref: "Users", index: true }]
54 | rememberers: [{ type: Schema.ObjectId, ref: "Users", index: true }]
55 | history: [PlunkHistorySchema]
56 | type: { type: String, 'default': "plunk", 'enum': "plunk template".split(" "), index: true }
57 | views: { type: Number, 'default': 0 }
58 | forked: { type: Number, 'default': 0 }
59 | frozen_version: { type: Number }
60 | frozen_at: { type: Date, 'default': 0, index: true }
61 |
62 | PlunkSchema.index(score: -1, updated_at: -1)
63 | PlunkSchema.index(thumbs: -1, updated_at: -1)
64 | PlunkSchema.index(views: -1, updated_at: -1)
65 | PlunkSchema.index(forked: -1, updated_at: -1)
66 |
67 | PlunkSchema.virtual("url").get -> apiUrl + "/plunks/#{@_id}"
68 | PlunkSchema.virtual("raw_url").get -> runUrl + "/plunks/#{@_id}/"
69 | PlunkSchema.virtual("comments_url").get -> wwwUrl + "/#{@_id}/comments"
70 |
71 | exports.PlunkSchema = PlunkSchema
--------------------------------------------------------------------------------
/database/schema/session.coffee:
--------------------------------------------------------------------------------
1 | mongoose = require("mongoose")
2 | nconf = require("nconf")
3 | genid = require("genid")
4 |
5 | apiUrl = nconf.get("url:api")
6 |
7 |
8 | {Schema, Document, Query} = mongoose
9 | {ObjectId, Mixed} = Schema.Types
10 |
11 |
12 | TokenSchema = new Schema
13 | _id: { type: String, ref: "Plunk" }
14 | token: { type: String }
15 |
16 |
17 | SessionSchema = new Schema
18 | user:
19 | type: Schema.ObjectId
20 | ref: "User"
21 | user_info: {}
22 | last_access: { type: Date, index: true, 'default': Date.now }
23 | public_id: { type: String, 'default': genid }
24 | auth: {}
25 | keychain: [TokenSchema]
26 |
27 | SessionSchema.virtual("url").get -> apiUrl + "/sessions/#{@_id}"
28 | SessionSchema.virtual("user_url").get -> apiUrl + "/sessions/#{@_id}/user"
29 | SessionSchema.virtual("age").get -> Date.now() - @last_access
30 |
31 | SessionSchema.set "toJSON",
32 | virtuals: true
33 | getters: true
34 | transform: (session, json, options) ->
35 | json.id = json._id
36 |
37 | json.user = json.user_info if json.user_info
38 |
39 | delete json.user_info
40 | delete json._id
41 | delete json.__v
42 |
43 | json
44 | exports.SessionSchema = SessionSchema
--------------------------------------------------------------------------------
/database/schema/user.coffee:
--------------------------------------------------------------------------------
1 | mongoose = require("mongoose")
2 |
3 |
4 | {Schema, Document, Query} = mongoose
5 | {ObjectId, Mixed} = Schema.Types
6 |
7 |
8 | exports.UserSchema = UserSchema = new Schema
9 | login: { type: String, index: true }
10 | gravatar_id: String
11 | service_id: { type: String, index: { unique: true } }
12 |
13 | UserSchema.virtual("created_at").get -> new Date(parseInt(@_id.toString().substring(0, 8), 16) * 1000)
14 |
15 | UserSchema.set "toJSON",
16 | virtuals: true
17 | transform: (user, json, options) ->
18 | json.id = json._id
19 |
20 | delete json._id
21 | delete json.__v
22 | delete json.service_id
23 |
24 | json
25 |
--------------------------------------------------------------------------------
/errors.coffee:
--------------------------------------------------------------------------------
1 | exports.ApiError = class ApiError extends Error
2 |
3 | createErrorClass = (name, classOptions = {}) ->
4 |
5 | classOptions.message ||= "Unknown error"
6 | classOptions.httpCode ||= 500
7 | classOptions.initialize ||= (message, options = {}) ->
8 | @message = message if message
9 | @[prop] = val for prop, val of options
10 | classOptions.toJSON ||= ->
11 | error: @message
12 |
13 |
14 |
15 | class extends ApiError
16 | @::[prop] = val for prop, val of classOptions
17 |
18 | constructor: ->
19 | Error.call(@)
20 | Error.captureStackTrace(@, arguments.callee)
21 |
22 | @name = name
23 |
24 | @initialize(arguments...)
25 |
26 |
27 |
28 | errorTypes =
29 | ResourceExists:
30 | httpCode: 400
31 | message: "Resource exists"
32 | DatabaseError:
33 | httpCode: 400
34 | message: "Database error"
35 | initialize: (err) -> console.error("[ERR] #{@message}", err, err.stack)
36 | InvalidBody:
37 | httpCode: 400
38 | message: "Invalid payload"
39 | initialize: (err) -> @invalid = err.message
40 | toJSON: ->
41 | message: @message
42 | invalid: @invalid
43 | NotFound:
44 | message: "Not Found"
45 | httpCode: 404
46 | PermissionDenied:
47 | message: "Permission denied"
48 | httpCode: 404
49 | ImpossibleError:
50 | message: "Impossibru"
51 | initialize: (err) -> console.error("[ERR] #{@message}", err)
52 |
53 | exports[name] = createErrorClass(name, errDef) for name, errDef of errorTypes
54 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | require("coffee-script");
2 |
3 | process.env.NODE_ENV = "production";
4 |
5 | var nconf = require("nconf")
6 | , http = require("http")
7 | , server = require("./app")
8 | , domain = require("domain")
9 | , serverDomain = domain.create();
10 |
11 |
12 | serverDomain.run(function(){
13 | http.createServer(function(req, res){
14 | var reqd = domain.create();
15 | reqd.add(req);
16 | reqd.add(res);
17 |
18 | // On error dispose of the domain
19 | reqd.on('error', function (error) {
20 | console.log('[ERR]', error.code, error.message, req.url);
21 | reqd.dispose();
22 | });
23 |
24 | // Pass the request to express
25 | server(req, res);
26 |
27 | }).listen(nconf.get("PORT"), function(){
28 | console.log("[OK] Server started");
29 | });
30 |
31 | });
32 |
33 | serverDomain.on("error", function (err) {
34 | console.log('[ERR]', "Server level error", err.stack);
35 | });
36 |
37 | process.on('uncaughtException', function(err) {
38 | console.log("[ERR] Uncaught exception: " + err);
39 | });
--------------------------------------------------------------------------------
/middleware/analytics.coffee:
--------------------------------------------------------------------------------
1 | analytics = require("analytics-node")
2 | _ = require("underscore")._
3 |
4 | module.exports = (config = {}) ->
5 | (req, res, next) ->
6 | req.track = (event, properties = {}) ->
7 |
8 | next()
9 | req.analytics = {}
10 |
11 | req.analytics.userId = req.currentUser._id if req.currentUser
12 |
13 | next()
14 |
--------------------------------------------------------------------------------
/middleware/cors.coffee:
--------------------------------------------------------------------------------
1 | nconf = require("nconf")
2 | _ = require("lodash")
3 |
4 | module.exports.middleware = (config = {}) ->
5 |
6 | valid = [nconf.get('url:www'), nconf.get('url:embed'), "http://plnkr.co"]
7 |
8 | (req, res, next) ->
9 | # Just send the headers all the time. That way we won't miss the right request ;-)
10 | # Other CORS middleware just wouldn't work for me
11 | # TODO: Minimize these headers to only those needed at the right time
12 |
13 | res.header("Access-Control-Allow-Origin", if req.headers.origin in valid then req.headers.origin else "*")
14 | res.header("Access-Control-Allow-Methods", "OPTIONS,GET,PUT,POST,DELETE")
15 |
16 | if requestHeaders = req.headers['access-control-request-headers']
17 | allowHeaders = _(requestHeaders.split(",")).invoke("trim").invoke("toLowerCase").sort().value().join(", ")
18 | res.header("Access-Control-Allow-Headers", allowHeaders)
19 |
20 | res.header("Access-Control-Expose-Headers", "Link")
21 | res.header("Access-Control-Max-Age", "60")
22 |
23 | if "OPTIONS" == req.method then res.send(200)
24 | else next()
--------------------------------------------------------------------------------
/middleware/nocache.coffee:
--------------------------------------------------------------------------------
1 | module.exports.middleware = (config = {}) ->
2 | (req, res, next) ->
3 | res.set
4 | "Cache-Control": "no-cache"
5 | "Expires": 0
6 |
7 | next()
--------------------------------------------------------------------------------
/middleware/schema.coffee:
--------------------------------------------------------------------------------
1 | _ = require("underscore")._
2 | apiErrors = require("../errors")
3 |
4 | module.exports = (schema) ->
5 | (req, res, next) ->
6 | schema = schema(req) if _.isFunction(schema)
7 | schema.validate req.body, (err, json) ->
8 | console.log "[INFO] Invalid schema", err if err
9 | if err then next(new apiErrors.InvalidBody(err))
10 | else next()
--------------------------------------------------------------------------------
/middleware/session.coffee:
--------------------------------------------------------------------------------
1 | nconf = require("nconf")
2 | sessions = require("../resources/sessions")
3 | errors = require("../errors")
4 |
5 | badSessions = ["52a56d9f1aeed79fe80163ea"]
6 |
7 | module.exports.middleware = (config = {}) ->
8 | (req, res, next) ->
9 | if req.query.sessid then sessid = req.query.sessid
10 | else if auth = req.get("authorization") then [header, sessid] = auth.match(/^token (\S+)$/i)
11 |
12 | if sessid
13 | sessions.loadSession sessid, (err, session) ->
14 | return next(err) if err
15 | return next() unless session
16 |
17 | unless 0 > badSessions.indexOf(sessid)
18 | console.log "[SPAM] Filtering out spam for sessid: #{sessid}"
19 | return next(new errors.NotFound)
20 |
21 | req.currentSession = session
22 | req.currentUser = session.user if session.user
23 |
24 | next()
25 | else
26 | next()
--------------------------------------------------------------------------------
/middleware/version.coffee:
--------------------------------------------------------------------------------
1 | module.exports.middleware = (config = {}) ->
2 | (req, res, next) ->
3 |
4 | req.apiVersion = (if v = req.param("api") then parseInt(v, 10) else 0)
5 |
6 | next()
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "plunker-api",
3 | "subdomain": "plunker-api",
4 | "domains": [
5 | "api.plnkr.co",
6 | "api.plunker.co"
7 | ],
8 | "version": "0.5.1",
9 | "private": true,
10 | "description": "The RESTful backend that powers Plunker",
11 | "main": "server.js",
12 | "dependencies": {
13 | "analytics-node": "~0.3.0",
14 | "appengine": "^0.3.0",
15 | "cache-helpers": "~1.3.0",
16 | "coffee-script": "~1.6.3",
17 | "cors": "^2.4.1",
18 | "express": "~3.10.5",
19 | "forever-monitor": "^1.2.3",
20 | "genid": "~0.1.0",
21 | "googlediff": "~0.1.0",
22 | "json-gate": "~0.8.21",
23 | "jsonwebtoken": "^3.2.2",
24 | "lodash": "~2.4.1",
25 | "lru-cache": "~2.5.0",
26 | "memoize": "~0.1.1",
27 | "mime": "~1.2.9",
28 | "mongoose": "^4.1.6",
29 | "morgan": "^1.2.2",
30 | "nconf": "~0.6.7",
31 | "request": "~2.16.6",
32 | "semver": "~1.1.4",
33 | "underscore": "~1.4.4",
34 | "url": "~0.7.9"
35 | },
36 | "devDependencies": {
37 | "coffee-script": "~1.6.1"
38 | },
39 | "scripts": {
40 | "test": "echo \"Error: no test specified\" && exit 1",
41 | "start": "node server.js"
42 | },
43 | "engines": {
44 | "node": "0.10.x"
45 | },
46 | "repository": {
47 | "type": "git",
48 | "url": "git://github.com/filearts/plunker_api.git"
49 | },
50 | "author": "Geoffrey Goodman",
51 | "license": "MIT"
52 | }
53 |
--------------------------------------------------------------------------------
/resources/comments.coffee:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/filearts/plunker_api/7179216520646926b4ba81c9e95fcea816310be1/resources/comments.coffee
--------------------------------------------------------------------------------
/resources/packages.coffee:
--------------------------------------------------------------------------------
1 | nconf = require("nconf")
2 | analytics = require("analytics-node")
3 | users = require("./users")
4 | apiErrors = require("../errors")
5 | gate = require("json-gate")
6 | semver = require("semver")
7 | _ = require("underscore")._
8 |
9 | {Package} = require("../database")
10 |
11 | apiUrl = nconf.get("url:api")
12 |
13 |
14 | exports.schema =
15 | create: gate.createSchema(require("./schema/packages/create.json"))
16 | update: gate.createSchema(require("./schema/packages/update.json"))
17 | versions:
18 | create: gate.createSchema(require("./schema/packages/versions/create.json"))
19 | update: gate.createSchema(require("./schema/packages/versions/update.json"))
20 |
21 | createLinksObject = (baseUrl, page, pages, limit) ->
22 | links = {}
23 |
24 | if page < pages
25 | links.next = "#{baseUrl}?p=#{page+1}&pp=#{limit}"
26 | links.last = "#{baseUrl}?p=#{pages}&pp=#{limit}"
27 | if page > 1
28 | links.prev = "#{baseUrl}?p=#{page-1}&pp=#{limit}"
29 | links.first = "#{baseUrl}?p=1&pp=#{limit}"
30 |
31 | links
32 |
33 | preparePackage = (pkg, json, options) ->
34 | # This is a sub-document of the pkg
35 | return json if 'function' == typeof pkg.ownerDocument
36 |
37 | json.maintainer = true if options.session?.user?.login# in json.maintainers
38 |
39 | delete json._id
40 | delete json.__v
41 |
42 | json
43 |
44 | preparePackages = (session, pkgs) ->
45 | _.map pkgs, (pkg) ->
46 | pkg.toJSON
47 | session: session
48 | transform: preparePackage
49 | virtuals: true
50 | getters: true
51 |
52 |
53 |
54 | module.exports.loadPackage = loadPackage = (query, cb) ->
55 | return cb() unless query
56 |
57 | Package.findOne(query).exec (err, pkg) ->
58 | if err then cb(err)
59 | else unless pkg then cb()
60 | else cb(null, pkg)
61 |
62 |
63 |
64 | # Package-related middleware
65 |
66 | module.exports.withPackage = (req, res, next) ->
67 | loadPackage {name: req.params.name}, (err, pkg) ->
68 | if err then next(new apiErrors.DatabaseError(err))
69 | else unless pkg then next(new apiErrors.NotFound)
70 | else
71 | req.pkg = pkg
72 | next()
73 |
74 | module.exports.withOwnPackage = (req, res, next) ->
75 | console.log "withOwnPackage", req.currentUser?
76 | return next(new apiErrors.NotFound) unless req.currentUser
77 |
78 | loadPackage {name: req.params.name, maintainers: req.currentUser.login}, (err, pkg) ->
79 | #loadPackage {name: req.params.name}, (err, pkg) ->
80 | if err then next(new apiErrors.DatabaseError(err))
81 | else unless pkg then next(new apiErrors.NotFound)
82 | else
83 | req.pkg = pkg
84 | next()
85 |
86 |
87 | exports.createListing = (config = {}) ->
88 | (req, res, next) ->
89 | options =
90 | if _.isFunction(config) then options = config(req, res)
91 | else angular.copy(config)
92 |
93 | options.baseUrl ||= "#{apiUrl}/catalogue/packages"
94 | options.query ||= {}
95 |
96 | page = parseInt(req.param("p", "1"), 10)
97 | limit = parseInt(req.param("pp", "8"), 10)
98 |
99 | # Build the Mongoose Query
100 | query = Package.find(options.query)
101 | query.sort(options.sort or {bumps: -1})
102 |
103 | query.paginate page, limit, (err, packages, count, pages, current) ->
104 | if err then next(new apiErrors.DatabaseError(err))
105 | else
106 | res.links createLinksObject(options.baseUrl, current, pages, limit)
107 | res.json preparePackages(req.currentSession, packages)
108 |
109 |
110 |
111 | # Request handlers
112 |
113 | exports.create = (req, res, next) ->
114 | pkg = new Package(req.body)
115 | pkg.maintainers.push(req.currentUser.login) if req.currentUser?.login
116 | pkg.save (err, pkg) ->
117 | if err
118 | if err.code is 11000 then next(new apiErrors.ResourceExists)
119 | else next(new apiErrors.DatabaseError(err))
120 | else
121 | json = pkg.toJSON
122 | session: req.currentSession
123 | transform: preparePackage
124 | virtuals: true
125 | getters: true
126 |
127 | res.json(201, json)
128 |
129 |
130 | exports.read = (req, res, next) ->
131 | if req.param("bump")
132 | req.pkg.update({$inc: {bumps: 1}}) # Send asynch request to update db copy
133 | req.pkg.bumps++
134 |
135 | json = req.pkg.toJSON
136 | session: req.currentSession
137 | transform: preparePackage
138 | virtuals: true
139 | getters: true
140 |
141 | res.json json
142 |
143 |
144 |
145 | exports.update = (req, res, next) ->
146 | req.pkg.set(req.body).save (err, pkg) ->
147 | if err then next(new apiErrors.DatabaseError(err))
148 | else
149 | json = pkg.toJSON
150 | session: req.currentSession
151 | transform: preparePackage
152 | virtuals: true
153 | getters: true
154 |
155 | res.json json
156 |
157 |
158 | exports.bump = (req, res, next) ->
159 | req.pkg.bumps++
160 |
161 | json = req.pkg.toJSON
162 | session: req.currentSession
163 | transform: preparePackage
164 | virtuals: true
165 | getters: true
166 |
167 | res.json json
168 |
169 | req.pkg.update({$inc: {bumps: 1}}).exec() # Send asynch request to update db copy
170 |
171 | exports.destroy = (req, res, next) ->
172 | req.pkg.remove (err) ->
173 | if err then next(new apiErrors.DatabaseError(err))
174 | else res.send(204)
175 |
176 |
177 | exports.addMaintainer = (req, res, next) ->
178 | req.pkg.maintainers.push(req.body.login)
179 | req.pkg.save (err, pkg) ->
180 | if err then next(new apiErrors.DatabaseError(err))
181 | else
182 | json = pkg.toJSON
183 | session: req.currentSession
184 | transform: preparePackage
185 | virtuals: true
186 | getters: true
187 |
188 | res.json 201, json
189 |
190 |
191 | exports.removeMaintainer = (req, res, next) ->
192 | req.pkg.maintainers.pull(req.body.login)
193 | req.pkg.save (err, pkg) ->
194 | if err then next(new apiErrors.DatabaseError(err))
195 | else
196 | json = pkg.toJSON
197 | session: req.currentSession
198 | transform: preparePackage
199 | virtuals: true
200 | getters: true
201 |
202 | res.json 200, json
203 |
204 | exports.versions = {}
205 |
206 | exports.versions.create = (req, res, next) ->
207 | req.pkg.versions.push(req.body)
208 | req.pkg.save (err, pkg) ->
209 | if err then next(new apiErrors.DatabaseError(err))
210 | else
211 | json = pkg.toJSON
212 | session: req.currentSession
213 | transform: preparePackage
214 | virtuals: true
215 | getters: true
216 |
217 | res.json 201, json
218 |
219 | # There is no specific versions.read
220 |
221 | exports.versions.update = (req, res, next) ->
222 | version = _.find req.pkg.versions, (ver) -> ver.semver == req.params.semver
223 |
224 | return next(new apiErrors.NotFound) unless version
225 |
226 | _.extend version, req.body
227 |
228 | req.pkg.save (err, pkg) ->
229 | if err then next(new apiErrors.DatabaseError(err))
230 | else
231 | json = pkg.toJSON
232 | session: req.currentSession
233 | transform: preparePackage
234 | virtuals: true
235 | getters: true
236 |
237 | res.json 200, json
238 |
239 |
240 | exports.versions.destroy = (req, res, next) ->
241 | version = _.find req.pkg.versions, (ver) -> ver.semver == req.params.semver
242 |
243 | return next(new apiErrors.NotFound) unless version
244 |
245 | req.pkg.versions.remove(version)
246 |
247 | req.pkg.save (err, pkg) ->
248 | if err then next(new apiErrors.DatabaseError(err))
249 | else
250 | json = pkg.toJSON
251 | session: req.currentSession
252 | transform: preparePackage
253 | virtuals: true
254 | getters: true
255 |
256 | res.json 200, json
--------------------------------------------------------------------------------
/resources/plunks.coffee:
--------------------------------------------------------------------------------
1 | _ = require("lodash")
2 | nconf = require("nconf")
3 | genid = require("genid")
4 | diff_match_patch = require("googlediff")
5 | gate = require("json-gate")
6 | analytics = require("analytics-node")
7 | LRU = require("lru-cache")
8 |
9 |
10 |
11 | gdiff = new diff_match_patch()
12 | apiErrors = require("../errors")
13 | apiUrl = nconf.get('url:api')
14 | database = require("../database")
15 |
16 | {Plunk} = database
17 |
18 |
19 |
20 | exports.schema =
21 | create: gate.createSchema(require("./schema/plunks/create.json"))
22 | fork: gate.createSchema(require("./schema/plunks/fork.json"))
23 | update: gate.createSchema(require("./schema/plunks/update.json"))
24 |
25 | createLinksObject = (baseUrl, page, pages, limit) ->
26 | links = {}
27 |
28 | if page < pages
29 | links.next = "#{baseUrl}?p=#{page+1}&pp=#{limit}"
30 | links.last = "#{baseUrl}?p=#{pages}&pp=#{limit}"
31 | if page > 1
32 | links.prev = "#{baseUrl}?p=#{page-1}&pp=#{limit}"
33 | links.first = "#{baseUrl}?p=1&pp=#{limit}"
34 |
35 | links
36 |
37 | createEvent = (type, user) ->
38 | event =
39 | event: type or "create"
40 | changes: []
41 |
42 | event.user = user._id if user
43 |
44 | event
45 |
46 | # Versions are 0-indexed
47 | dmp = new diff_match_patch()
48 | verCache = LRU(128) # 500 is an arbitrary number
49 | revertTo = (current, version, cb) ->
50 | # Return current if requesting a version that exceeds current version
51 | return current unless current.history?.length
52 | return current if version >= current.history.length
53 |
54 | # Don't cache plunks for people who are the owners
55 | if !current.token and cached = verCache.get("#{current.id}/#{version}")
56 | return cached
57 |
58 | size = (current.history.length or 1) - 1
59 | rel = size - version
60 |
61 | rename = (fn, to) ->
62 | if file = current.files[fn]
63 | file.filename = to
64 | delete current.files[fn]
65 | current.files[to] = file
66 |
67 | patch = (fn, patches) ->
68 | if file = current.files[fn]
69 | [file.content] = dmp.patch_apply(patches, file.content)
70 |
71 | remove = (fn) ->
72 | delete current.files[fn]
73 | try
74 | for i in [0...rel]
75 | for chg, j in current.history[size - i].changes
76 | # The changed file existed previously
77 | if chg.pn
78 | if chg.fn
79 | # File changed
80 | if chg.pl
81 | #console.log "Patching", chg.fn, "to", chg.pl
82 | patch(chg.fn, dmp.patch_fromText(chg.pl))
83 | # File renamed
84 | if chg.pn != chg.fn
85 | #console.log "Renaming", chg.pn, "to", chg.fn
86 | rename(chg.fn, chg.pn)
87 | else # Deleted the file
88 | #console.log "Adding", chg.fn, chg.pl
89 | current.files[chg.pn] =
90 | filename: chg.pn
91 | content: chg.pl
92 | else if chg.fn
93 | #console.log "Deleting", chg.fn
94 | remove(chg.fn)
95 | catch e
96 | console.trace "[ERR] Failed to revert #{current.id} to version #{version}:", e
97 |
98 | current.currentVersion = version
99 |
100 | unless current.token
101 | verCache.set "#{current.id}/#{version}", current
102 |
103 | current
104 |
105 | ownsPlunk = (session, json) ->
106 | owner = false
107 |
108 | if session
109 | owner ||= !!(json.user and session.user and json.user is session.user._id)
110 | owner ||= !!(json.user and session.user and json.user.login is session.user.login)
111 | owner ||= !!(session.keychain and session.keychain.id(json.id)?.token is json.token)
112 |
113 | owner
114 |
115 | saveNewPlunk = (plunk, cb) ->
116 | # Keep generating new ids until not taken
117 | savePlunk = ->
118 | plunk._id = if !!plunk.private then genid(20) else genid(6)
119 |
120 | plunk.save (err) ->
121 | if err
122 | if err.code is 11000 then savePlunk()
123 | else
124 | console.error "[ERR]", err.message, err
125 | return cb(new apiErrors.DatabaseError(err))
126 | else return cb(null, plunk)
127 |
128 | savePlunk()
129 |
130 | populatePlunk = (json, options = {}) ->
131 | plunk = options.plunk or new Plunk
132 | plunk.description = json.description or "Untitled"
133 | plunk.private = json.private != false
134 | plunk.source = json.source
135 | plunk.user = options.user._id if options.user
136 | plunk.fork_of = options.parent._id if options.parent
137 | plunk.tags.push(tag) for tag in json.tags unless options.skipTags
138 | plunk.type = if options.type in ["plunk", "template"] then options.type else "plunk"
139 |
140 | unless options.skipFiles then for filename, file of json.files
141 | plunk.files.push
142 | filename: file.filename or filename
143 | content: file.content
144 |
145 | plunk
146 |
147 | preparePlunk = (plunk, json, options) ->
148 | # This is a sub-document of the plunk
149 | return json if 'function' == typeof plunk.ownerDocument
150 |
151 | corrected = false
152 |
153 | if (was = plunk.voters?.length) != plunk.thumbs
154 | console.log "[INFO] Correcting thumbs for #{plunk.id} from #{plunk.thumbs} to #{plunk.voters.length}"
155 |
156 | plunk.thumbs = plunk.voters.length
157 |
158 | corrected = true
159 |
160 | if (was = plunk.forks?.length) != plunk.forked
161 | console.log "[INFO] Correcting forks for #{plunk.id} from #{was} to #{plunk.forks.length}"
162 |
163 | plunk.forked = plunk.forks.length
164 |
165 | corrected = true
166 |
167 | if (was = plunk.score) and plunk.score != plunk.created_at.valueOf() + calculateScore(plunk.thumbs)
168 | delta = calculateScore(plunk.thumbs)
169 | console.log "[INFO] Correcting score for #{plunk.id} from #{was} to #{delta / (1000 * 60 * 60)}"
170 | plunk.score = json.score = plunk.created_at.valueOf() + delta
171 |
172 | corrected = true
173 |
174 |
175 | if corrected then plunk.save() # Issue a save asynchronously and don't care about result
176 |
177 | delete json.token unless ownsPlunk(options.session, plunk)
178 | delete json.voters
179 | delete json.rememberers
180 | delete json._id
181 | delete json.__v
182 | if json.files then json.files = do ->
183 | files = {}
184 | for file in json.files
185 | file.raw_url = "#{json.raw_url}#{file.filename}"
186 | files[file.filename] = file
187 | files
188 |
189 | # Unless the current user owns the plunk o
190 | if !json.token and json.frozen_at and json.history
191 | json.frozen_version ?= json.history.length - 1
192 | json = revertTo json, json.frozen_version
193 | json.history = json.history.slice(0, json.frozen_version + 1) if json.history
194 |
195 |
196 | json.thumbed = options.session?.user? and plunk.voters?.indexOf("#{options.session.user._id}") >= 0
197 | json.remembered = options.session?.user? and plunk.rememberers?.indexOf("#{options.session.user._id}") >= 0
198 | json.user = options.user.toJSON() if options.user
199 |
200 | json
201 |
202 | preparePlunks = (session, plunks) ->
203 | _.map plunks, (plunk) ->
204 | plunk.toJSON
205 | session: session
206 | transform: preparePlunk
207 | virtuals: true
208 | getters: true
209 |
210 | applyFilesDeltaToPlunk = (plunk, json) ->
211 | oldFiles = {}
212 | changes = []
213 |
214 | return changes unless json.files
215 |
216 | # Create a map of filename=>file (subdocument) of existing files
217 | for file, index in plunk.files
218 | oldFiles[file.filename] = file
219 |
220 | # For each change proposed in the json
221 | for filename, file of json.files
222 |
223 | # Attempt to delete
224 | if file is null
225 | if old = oldFiles[filename]
226 | changes.push
227 | pn: filename
228 | pl: old.content
229 |
230 | # The old file may be a subdocument (when updating) OR a simple field (when forking)
231 | # Handle both cases
232 | if old.remove? then oldFiles[filename].remove()
233 | else delete oldFiles[filename]
234 |
235 | # Modification to an existing file
236 | else if old = oldFiles[filename]
237 | chg =
238 | pn: old.filename
239 | fn: file.filename or old.filename
240 |
241 | if file.filename
242 | old.filename = file.filename
243 | if file.content?
244 | chg.pl = gdiff.patch_toText(gdiff.patch_make(file.content, old.content))
245 | old.content = file.content
246 |
247 | if chg.fn or file.filename
248 | changes.push(chg)
249 |
250 | # New file; handle only if content provided
251 | else if file.content
252 | changes.push
253 | fn: filename
254 | pl: file.content
255 | plunk.files.push
256 | filename: filename
257 | content: file.content
258 |
259 | changes
260 |
261 | applyTagsDeltaToPlunk = (plunk, json) ->
262 | changes = []
263 |
264 | if json.tags
265 | plunk.tags ||= []
266 |
267 | for tagname, add of json.tags
268 | if add
269 | plunk.tags.push(tagname) if (idx = plunk.tags.indexOf(tagname)) < 0
270 | else
271 | plunk.tags.splice(idx, 1) if (idx = plunk.tags.indexOf(tagname)) >= 0
272 |
273 | changes
274 |
275 |
276 |
277 |
278 | exports.loadPlunk = loadPlunk = (id, cb) ->
279 | return cb() unless id and id.length
280 |
281 | Plunk.findById(id).populate("user", 'gravatar_id login service_id').populate("history.user", 'gravatar_id login service_id').exec (err, plunk) ->
282 | changed = false
283 |
284 | # Fix duplicate tags
285 | if plunk?.tags and _.uniq(plunk.tags).length != plunk.tags.length
286 | seen = []
287 | dups = []
288 | idx = plunk.tags.length - 1
289 |
290 | while idx >= 0
291 | tagname = plunk.tags[idx]
292 |
293 | unless 0 > seen.indexOf(tagname)
294 | dups.push(tagname)
295 | plunk.tags.splice(idx, 1)
296 | else seen.push(tagname)
297 |
298 | idx--
299 |
300 | changed ||= dups.length > 0
301 |
302 | if dups.length
303 | console.log "[INFO] Removing duplicate tags: #{dups.join(', ')} for #{id}"
304 |
305 | if changed then return plunk.save (err) ->
306 | console.log "[OK] Duplicate tags removed" unless err
307 | cb(err, plunk)
308 |
309 | if err then cb(err)
310 | else unless plunk then cb()
311 | else cb(null, plunk)
312 |
313 |
314 | return
315 |
316 |
317 | exports.withPlunk = (req, res, next) ->
318 | loadPlunk req.params.id, (err, plunk) ->
319 | if err then next(new apiErrors.DatabaseError(err))
320 | else unless plunk then next(new apiErrors.NotFound)
321 | else
322 | req.plunk = plunk
323 | next()
324 |
325 | return
326 |
327 | exports.ownsPlunk = (req, res, next) ->
328 | unless ownsPlunk(req.currentSession, req.plunk) then next(new apiErrors.NotFound)
329 | else next()
330 |
331 | return
332 |
333 |
334 | exports.createListing = (config = {}) ->
335 | (req, res, next) ->
336 | options = config(req, res) if _.isFunction(config)
337 | options ||= {}
338 |
339 | options.baseUrl ||= "#{apiUrl}/plunks"
340 | options.query ||= {}
341 |
342 | page = parseInt(req.param("p", "1"), 10)
343 | limit = parseInt(req.param("pp", "8"), 10)
344 |
345 | # Filter on plunks that are visible to the active user
346 | unless options.ignorePrivate
347 | if req.currentUser
348 | options.query.$or = [
349 | 'private': false
350 | ,
351 | user: req.currentUser._id
352 | ]
353 | else
354 | options.query.private = false
355 | else if options.onlyPublic
356 | options.query.private = false
357 |
358 | # Build the Mongoose Query
359 | query = Plunk.find(options.query)
360 | query.sort(options.sort or "-updated_at")
361 | query.select("-files") unless req.param("files") is "yes" # We exclude files from plunk listings
362 | query.select("-files.content") if req.param("file.contents") is "no"
363 | query.select("-history") # We exclude history from plunk listings
364 | query.populate("user", 'gravatar_id login service_id').paginate page, limit, (err, plunks, count, pages, current) ->
365 |
366 | if err then next(new apiErrors.DatabaseError(err))
367 | else
368 | res.links createLinksObject(options.baseUrl, current, pages, limit)
369 | res.json preparePlunks(req.currentSession, plunks)
370 |
371 | # Is a memory leak here?
372 | options = page = limit = null
373 |
374 | return
375 |
376 |
377 | # Request handlers
378 |
379 | exports.read = (req, res, next) ->
380 | loadPlunk req.params.id, (err, plunk) ->
381 | if err then next(new apiErrors.DatabaseError(err))
382 | else unless plunk then next(new apiErrors.NotFound)
383 | else if plunk
384 |
385 | unless req.param("nv")
386 | plunk.views++
387 | plunk.save()
388 |
389 | json = plunk.toJSON
390 | session: req.currentSession
391 | transform: preparePlunk
392 | virtuals: true
393 | getters: true
394 |
395 | json = revertTo json, parseInt(req.param("v"), 10) if req.param("v")
396 |
397 | res.json json
398 |
399 | return
400 |
401 | exports.create = (req, res, next) ->
402 | event = createEvent("create", req.currentUser)
403 |
404 | plunk = populatePlunk(req.body, user: req.currentUser)
405 | plunk.history.push(event)
406 |
407 | if !req.currentUser and !plunk.private
408 | return next(new apiErrors.NotFound)
409 |
410 | saveNewPlunk plunk, (err, plunk) ->
411 | if err then next(new apiErrors.DatabaseError(err))
412 | else
413 | if !req.currentUser and req.currentSession and req.currentSession.keychain
414 | req.currentSession.keychain.push _id: plunk._id, token: plunk.token
415 | req.currentSession.save()
416 |
417 | json = plunk.toJSON
418 | session: req.currentSession
419 | transform: preparePlunk
420 | virtuals: true
421 | getters: true
422 |
423 | json.user = req.currentUser.toJSON() if req.currentUser
424 | json.history[json.history.length - 1].user = req.currentUser.toJSON() if req.currentUser
425 |
426 | res.json(201, json)
427 |
428 | return
429 |
430 |
431 |
432 | exports.update = (req, res, next) ->
433 | return next(new Error("request.plunk is required for update()")) unless req.plunk
434 |
435 | event = createEvent "update", req.currentUser
436 | event.changes.push(e) for e in applyFilesDeltaToPlunk(req.plunk, req.body)
437 | event.changes.push(e) for e in applyTagsDeltaToPlunk(req.plunk, req.body)
438 |
439 | req.plunk.updated_at = new Date
440 | req.plunk.description = req.body.description if req.body.description
441 | req.plunk.user = req.currentUser._id if req.currentUser
442 |
443 | req.plunk.history.push(event)
444 |
445 | req.plunk.save (err, plunk) ->
446 | if err then next(new apiErrors.DatabaseError(err))
447 | else
448 |
449 | json = plunk.toJSON
450 | user: req.currentUser
451 | session: req.currentSession
452 | transform: preparePlunk
453 | virtuals: true
454 | getters: true
455 |
456 | json.history[json.history.length - 1].user = req.currentUser.toJSON() if req.currentUser
457 |
458 | res.json json
459 |
460 | return
461 |
462 | exports.freeze = (req, res, next) ->
463 | return next(new Error("request.plunk is required for freeze()")) unless req.plunk
464 |
465 | json = req.plunk.toJSON
466 | user: req.currentUser
467 | session: req.currentSession
468 | transform: preparePlunk
469 | virtuals: true
470 | getters: true
471 |
472 | json = revertTo json, parseInt(req.param("v"), 10) if req.param("v")
473 |
474 | req.plunk.frozen_at = new Date
475 | req.plunk.frozen_version = if req.param("v") then parseInt(req.param("v"), 10) else req.plunk.history.length - 1
476 |
477 | req.plunk.save (err, plunk) ->
478 | json = plunk.toJSON
479 | session: req.currentSession
480 | transform: preparePlunk
481 | virtuals: true
482 | getters: true
483 |
484 | if err then next(new apiErrors.DatabaseError(err))
485 | else res.send 200, json
486 |
487 | exports.unfreeze = (req, res, next) ->
488 | return next(new Error("request.plunk is required for freeze()")) unless req.plunk
489 |
490 | req.plunk.frozen_at = undefined
491 | req.plunk.frozen_version = undefined
492 |
493 | req.plunk.save (err, plunk) ->
494 | json = plunk.toJSON
495 | session: req.currentSession
496 | transform: preparePlunk
497 | virtuals: true
498 | getters: true
499 |
500 | if err then next(new apiErrors.DatabaseError(err))
501 | else res.send 200, json
502 |
503 | exports.fork = (req, res, next) ->
504 | return next(new Error("request.plunk is required for update()")) unless req.plunk
505 |
506 | if !req.currentUser
507 | req.body.private = true # Force forked plunks to be private for unlogged users
508 |
509 | event = createEvent "fork", req.currentUser
510 |
511 | if req.apiVersion is 1
512 | json = req.plunk.toJSON()
513 | json.description = req.body.description if req.body.description
514 | json.private = req.body.private if req.body.private?
515 |
516 | event.changes.push(e) for e in applyFilesDeltaToPlunk(json, req.body)
517 | event.changes.push(e) for e in applyTagsDeltaToPlunk(json, req.body)
518 |
519 | else if req.apiVersion is 0
520 | json = req.body
521 |
522 |
523 | fork = populatePlunk(json, user: req.currentUser, parent: req.plunk)
524 | fork.history.push(evt) for evt in req.plunk.history
525 | fork.history.push(event)
526 |
527 | saveNewPlunk fork, (err, plunk) ->
528 | if err then next(new apiErrors.DatabaseError(err))
529 | else
530 | if !req.currentUser and req.currentSession and req.currentSession.keychain
531 | req.currentSession.keychain.push _id: plunk._id, token: plunk.token
532 | req.currentSession.save()
533 |
534 | json = plunk.toJSON
535 | session: req.currentSession
536 | transform: preparePlunk
537 | virtuals: true
538 | getters: true
539 |
540 | json.user = req.currentUser.toJSON() if req.currentUser
541 | json.history[json.history.length - 1].user = req.currentUser.toJSON() if req.currentUser
542 |
543 | res.json(201, json)
544 |
545 | # Update the forks of the parent after the request is sent
546 | # No big deal if the forks update fails
547 | req.plunk.forks.push(plunk._id)
548 | req.plunk.forked++
549 | req.plunk.save()
550 |
551 | return
552 |
553 | exports.destroy = (req, res, next) ->
554 | return next(new Error("request.plunk is required for update()")) unless req.plunk
555 |
556 | if req.plunk.fork_of then loadPlunk req.plunk.fork_of, (err, parent) ->
557 | if parent
558 | parent.forks.remove(req.plunk.fork_of)
559 | parent.forked--
560 | parent.save()
561 |
562 | unless ownsPlunk(req.currentSession, req.plunk) then next(new apiErrors.NotFound)
563 | else req.plunk.remove ->
564 | res.send(204)
565 |
566 | return
567 |
568 | calculateScore = (count = 0) ->
569 | score = 0
570 |
571 | while count > 0
572 | score += calculateScoreDelta(count - 1)
573 | count--
574 |
575 | score
576 |
577 |
578 | calculateScoreDelta = (count = 0) ->
579 | baseIncrement = 1000 * 60 * 60 * 12 # The first vote will move the plunk forward 12 hours in time
580 | decayFactor = 1.2
581 |
582 | baseIncrement / Math.pow(decayFactor, count)
583 |
584 |
585 | exports.setThumbed = (req, res, next) ->
586 | return next(new apiErrors.PermissionDenied) unless req.currentUser
587 | return next(new apiErrors.NotFound) unless 0 > req.plunk.voters.indexOf(req.currentUser._id)
588 |
589 | req.plunk.score ||= req.plunk.created_at.valueOf()
590 | req.plunk.thumbs ||= 0
591 |
592 | req.plunk.voters.addToSet(req.currentUser._id)
593 | req.plunk.score += calculateScoreDelta(req.plunk.thumbs)
594 | req.plunk.thumbs++
595 |
596 | req.plunk.save (err, plunk) ->
597 | if err then next(new apiErrors.DatabaseError(err))
598 | else res.json({ thumbs: plunk.get("thumbs"), score: plunk.score}, 201)
599 |
600 | exports.unsetThumbed = (req, res, next) ->
601 | return next(new apiErrors.PermissionDenied) unless req.currentUser
602 | return next(new apiErrors.NotFound) if 0 > req.plunk.voters.indexOf(req.currentUser._id)
603 |
604 | unless 0 > req.plunk.voters.indexOf(req.currentUser._id)
605 | req.plunk.voters.remove(req.currentUser._id)
606 | req.plunk.score -= calculateScoreDelta(req.plunk.thumbs - 1)
607 | req.plunk.thumbs--
608 |
609 | req.plunk.save (err, plunk) ->
610 | if err then next(new apiErrors.DatabaseError(err))
611 | else res.json({ thumbs: plunk.get("thumbs"), score: plunk.score}, 200)
612 |
613 |
614 | exports.setRemembered = (req, res, next) ->
615 | return next(new apiErrors.PermissionDenied) unless req.currentUser
616 |
617 | req.plunk.rememberers.addToSet(req.currentUser._id)
618 |
619 | req.plunk.save (err, plunk) ->
620 | if err then next(new apiErrors.DatabaseError(err))
621 | else res.json({ status: "OK" }, 201)
622 |
623 | exports.unsetRemembered = (req, res, next) ->
624 | return next(new apiErrors.PermissionDenied) unless req.currentUser
625 |
626 | req.plunk.rememberers.remove(req.currentUser._id)
627 |
628 | req.plunk.save (err, plunk) ->
629 | if err then next(new apiErrors.DatabaseError(err))
630 | else res.json({ status: "OK" }, 200)
631 |
--------------------------------------------------------------------------------
/resources/schema/packages/create.json:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "additionalProperties": false,
4 | "properties": {
5 | "name": {
6 | "type": "string",
7 | "pattern": "^[-_.a-zA-Z0-9]+$",
8 | "required": true
9 | },
10 | "description": {
11 | "type": "string",
12 | "default": ""
13 | },
14 | "homepage": {
15 | "type": "string",
16 | "default": ""
17 | },
18 | "documentation": {
19 | "type": "string",
20 | "default": ""
21 | },
22 | "categories": {
23 | "type": "array",
24 | "additionalItems": false,
25 | "uniqueItems": true,
26 | "items": {
27 | "type": "string"
28 | }
29 | },
30 | "maintainers": {
31 | "type": "array",
32 | "additionalItems": false,
33 | "uniqueItems": true,
34 | "items": {
35 | "type": "string"
36 | }
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/resources/schema/packages/update.json:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "additionalProperties": false,
4 | "properties": {
5 | "description": {
6 | "type": "string",
7 | "default": ""
8 | },
9 | "description": {
10 | "type": "string",
11 | "default": ""
12 | },
13 | "homepage": {
14 | "type": "string",
15 | "default": ""
16 | },
17 | "documentation": {
18 | "type": "string",
19 | "default": ""
20 | },
21 | "categories": {
22 | "type": "array",
23 | "additionalItems": false,
24 | "uniqueItems": true,
25 | "items": {
26 | "type": "string"
27 | }
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/resources/schema/packages/versions/create.json:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "additionalProperties": false,
4 | "properties": {
5 | "semver": {
6 | "type": "string",
7 | "required": true
8 | },
9 | "unstable": {
10 | "type": "boolean"
11 | },
12 | "scripts": {
13 | "type": "array",
14 | "items": {
15 | "type": "string"
16 | }
17 | },
18 | "styles": {
19 | "type": "array",
20 | "items": {
21 | "type": "string"
22 | }
23 | },
24 | "dependencies": {
25 | "type": "array",
26 | "additionalItems": false,
27 | "items": {
28 | "type": "object",
29 | "properties": {
30 | "range": {
31 | "type": "string"
32 | },
33 | "name": {
34 | "type": "string"
35 | }
36 | }
37 | }
38 | }
39 | }
40 | }
--------------------------------------------------------------------------------
/resources/schema/packages/versions/update.json:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "additionalProperties": false,
4 | "properties": {
5 | "unstable": {
6 | "type": "boolean"
7 | },
8 | "scripts": {
9 | "type": "array",
10 | "items": {
11 | "type": "string"
12 | }
13 | },
14 | "styles": {
15 | "type": "array",
16 | "items": {
17 | "type": "string"
18 | }
19 | },
20 | "dependencies": {
21 | "type": "array",
22 | "additionalItems": false,
23 | "items": {
24 | "type": "object",
25 | "properties": {
26 | "range": {
27 | "type": "string"
28 | },
29 | "name": {
30 | "type": "string"
31 | }
32 | }
33 | }
34 | }
35 | }
36 | }
--------------------------------------------------------------------------------
/resources/schema/plunks/create.json:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "additionalProperties": false,
4 | "properties": {
5 | "description": {
6 | "type": "string",
7 | "default": ""
8 | },
9 | "tags": {
10 | "type": "array",
11 | "additionalItems": false,
12 | "uniqueItems": true,
13 | "items": {
14 | "type": "string"
15 | }
16 | },
17 | "private": {
18 | "type": "boolean",
19 | "default": false
20 | },
21 | "source": {
22 | "type": [
23 | {
24 | "type": "null"
25 | }, {
26 | "type": "object",
27 | "additionalProperties": false,
28 | "properties": {
29 | "type": {
30 | "type": "string",
31 | "required": true,
32 | "enum": [ "plunker_no_de", "gist" ]
33 | },
34 | "url": {
35 | "type": "string",
36 | "required": true
37 | },
38 | "title": {
39 | "type": "string",
40 | "required": true
41 | },
42 | "description": {
43 | "type": "string"
44 | }
45 | }
46 | }
47 | ]
48 | },
49 | "files": {
50 | "required": true,
51 | "type": "object",
52 | "additionalProperties": false,
53 | "patternProperties": {
54 | "^[\\w\\-. +/]+$": {
55 | "type": "object",
56 | "properties": {
57 | "content": {
58 | "type": "string",
59 | "required": true
60 | }
61 | }
62 | }
63 | }
64 | }
65 | }
66 | }
--------------------------------------------------------------------------------
/resources/schema/plunks/fork.json:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "additionalProperties": false,
4 | "minProperties": 1,
5 | "properties": {
6 | "description": {
7 | "type": "string",
8 | "default": ""
9 | },
10 | "tags": {
11 | "type": "object",
12 | "additionalProperties": false,
13 | "patternProperties": {
14 | "^[a-zA-Z0-9_][-a-zA-Z0-9_ :]*$": {
15 | "type": "boolean"
16 | }
17 | }
18 | },
19 | "private": {
20 | "type": "boolean"
21 | },
22 | "files": {
23 | "type": "object",
24 | "additionalProperties": false,
25 | "patternProperties": {
26 | "^[\\w\\-. +/]+$": {
27 | "type": [
28 | {
29 | "type": "null"
30 | }, {
31 | "type": "object",
32 | "properties": {
33 | "filename": {
34 | "type": "string"
35 | },
36 | "content": {
37 | "type": "string"
38 | }
39 | },
40 | "minProperties": 1
41 | }
42 | ]
43 | }
44 | }
45 | }
46 | }
47 | }
--------------------------------------------------------------------------------
/resources/schema/plunks/update.json:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "additionalProperties": false,
4 | "minProperties": 1,
5 | "properties": {
6 | "description": {
7 | "type": "string"
8 | },
9 | "tags": {
10 | "type": "object",
11 | "additionalProperties": false,
12 | "patternProperties": {
13 | "^[a-zA-Z0-9_][-a-zA-Z0-9_ :]*$": {
14 | "type": "boolean"
15 | }
16 | }
17 | },
18 | "files": {
19 | "type": "object",
20 | "additionalProperties": false,
21 | "patternProperties": {
22 | "^[\\w\\-. +/]+$": {
23 | "type": [
24 | {
25 | "type": "null"
26 | }, {
27 | "type": "object",
28 | "properties": {
29 | "filename": {
30 | "type": "string"
31 | },
32 | "content": {
33 | "type": "string"
34 | }
35 | },
36 | "minProperties": 1
37 | }
38 | ]
39 | }
40 | }
41 | }
42 | }
43 | }
--------------------------------------------------------------------------------
/resources/sessions.coffee:
--------------------------------------------------------------------------------
1 | nconf = require("nconf")
2 | analytics = require("analytics-node")
3 | request = require("request")
4 | users = require("./users")
5 | apiErrors = require("../errors")
6 | _ = require("underscore")._
7 | LRU = require("lru-cache")
8 |
9 | sessionCache = LRU
10 | max: 400
11 | maxAge: 1000 * 60 * 60 * 24 * 7 # One week
12 |
13 | {Session} = require("../database")
14 |
15 |
16 | MAX_AGE = 1000 * 60 * 60 * 24 * 7 * 2
17 |
18 | # Session-related helpers
19 |
20 | module.exports.prune = (cb = ->) ->
21 | Session.remove(last_access: $lt: Date.now() - MAX_AGE).exec(cb)
22 |
23 | module.exports.createSession = createSession = (user, cb) ->
24 | session =
25 | last_access: new Date
26 | keychain: {}
27 |
28 | if user
29 | session.user = user._id
30 | #session.user_info = user.toJSON()
31 |
32 | Session.create(session, cb)
33 |
34 |
35 | module.exports.loadSession = loadSession = (sessid, cb) ->
36 | return cb() unless sessid and sessid.length
37 |
38 | if sessionData = sessionCache.get(sessid)
39 | return cb(null, sessionData)
40 |
41 | sessionData =
42 | last_access: Date.now()
43 | expires_at: Date.now() + 1000 * 60 * 60 * 24 * 7 * 2 # Two weeks
44 |
45 | query = Session.findByIdAndUpdate sessid, sessionData
46 | query.populate("user", 'gravatar_id login service_id')
47 | query.exec (err, session) ->
48 | if err then cb(err)
49 | else
50 | sessionCache.set sessid, session
51 | cb(null, session)
52 |
53 |
54 |
55 | # Session-related middleware
56 |
57 | module.exports.withSession = (req, res, next) ->
58 | loadSession req.params.id, (err, session) ->
59 | if err then next(new apiErrors.DatabaseError(err))
60 | else unless session then next(new apiErrors.NotFound)
61 | else
62 | req.session = session
63 | next()
64 |
65 | module.exports.withCurrentSession = (req, res, next) ->
66 | if req.currentSession then next()
67 | else next(new apiErrors.NotFound)
68 |
69 |
70 | # Session-related request handlers
71 |
72 | module.exports.findOrCreate = (req, res, next) ->
73 | if req.session then res.json req.session.toJSON()
74 | else createSession null, (err, session) ->
75 | if err then next(new apiErrors.DatabaseError(err))
76 | else if session then res.json session.toJSON()
77 | else
78 | console.log "[ERR] findOrCreate"
79 | next(new apiErrors.ImpossibleError)
80 |
81 |
82 | module.exports.read = (req, res, next) ->
83 | loadSession req.params.id, (err, session) ->
84 | if err then next(new apiErrors.DatabaseError(err))
85 | else if session then res.json session.toJSON()
86 | else next(new apiErrors.NotFound)
87 |
88 |
89 | module.exports.create = (req, res, next) ->
90 | createSession null, (err, session) ->
91 | if err then next(new apiErrors.DatabaseError(err))
92 | else if session then res.json session.toJSON()
93 | else
94 | console.log "[ERR] createSession"
95 | next(new apiErrors.ImpossibleError)
96 |
97 |
98 | module.exports.setUser = (req, res, next) ->
99 | token = req.param("token")
100 | users.authenticateGithubToken token, (err, ghuser) ->
101 | return next(new apiErrors.DatabaseError(err)) if err
102 | return next(new apiErrors.NotFound) unless ghuser
103 |
104 | userInfo =
105 | login: ghuser.login
106 | gravatar_id: ghuser.gravatar_id
107 | service_id: "github:#{ghuser.id}"
108 |
109 | users.upsert userInfo, (err, user) ->
110 | return next(new apiErrors.DatabaseError(err)) if err
111 | return next(new apiErrors.NotFound) unless user
112 |
113 | users.correct("github:#{ghuser.login}", user._id)
114 |
115 | #analytics.identify user._id,
116 | # username: user.login
117 | # created: user.created_at
118 |
119 | req.session.user = user._id
120 | #req.session.user_info = user.toJSON()
121 | req.session.auth =
122 | service_name: "github"
123 | service_token: token
124 | req.session.save (err, session) ->
125 | if err then next(new apiErrors.DatabaseError(err))
126 | else if session
127 | sessionCache.del req.session._id
128 | res.json(201, _.extend(session.toJSON(), user: user.toJSON()))
129 | else
130 | console.log "[ERR] setUser->session.save", arguments...
131 | next(new apiErrors.ImpossibleError)
132 |
133 |
134 |
135 | module.exports.unsetUser = (req, res, next) ->
136 | req.session.user = null
137 | #req.session.user_info = null
138 | req.session.auth = null
139 |
140 | req.session.save (err, session) ->
141 | if err then next(apiErrors.DatabaseError(err))
142 | else if session
143 | sessionCache.set req.session._id, req.session
144 | res.json session.toJSON()
145 | else
146 | console.log "[ERR] unsetUser->session.save", arguments...
147 | next(new apiErrors.ImpossibleError)
148 |
--------------------------------------------------------------------------------
/resources/tags.coffee:
--------------------------------------------------------------------------------
1 | _ = require("underscore")._
2 | nconf = require("nconf")
3 | LRU = require("lru-cache")
4 |
5 | plunks = require("./plunks")
6 |
7 |
8 | apiErrors = require("../errors")
9 | apiUrl = nconf.get('url:api')
10 | database = require("../database")
11 | cache = LRU(max: 200, maxAge: 1000 * 60 * 60 * 24)
12 |
13 | {Plunk} = database
14 |
15 |
16 | exports.list = (req, res, next) ->
17 | pipeline = []
18 |
19 | pipeline.push $unwind:
20 | "$tags"
21 | pipeline.push $group:
22 | _id:
23 | $toLower: "$tags"
24 | count: $sum: 1
25 | if q = req.param("q") then pipeline.push $match:
26 | _id: $regex: "^#{q}", $options: "i"
27 | pipeline.push $sort:
28 | count: -1
29 | pipeline.push $limit:
30 | 10
31 |
32 | Plunk.aggregate pipeline, (err, results) ->
33 | return next(err) if err
34 |
35 | res.json _.map results, (record) ->
36 | tag: record._id
37 | count: record.count
38 |
39 |
--------------------------------------------------------------------------------
/resources/users.coffee:
--------------------------------------------------------------------------------
1 | nconf = require("nconf")
2 | request = require("request")
3 | users = require("./users")
4 | apiErrors = require("../errors")
5 |
6 | {User, Plunk} = require("../database")
7 |
8 |
9 |
10 | # User-related helpers
11 |
12 | module.exports.authenticateGithubToken = authenticateGithubToken = (token, cb) ->
13 | return cb() unless token
14 |
15 | config =
16 | url: "https://api.github.com/user?access_token=#{token}"
17 | json: true
18 | timeout: 6000
19 | headers: { 'User-Agent': "plunker-api" }
20 |
21 | request.get config, (err, res, body) ->
22 | return cb(err) if err
23 | return cb(new apiErrors.PermissionDenied) if res.status >= 400
24 |
25 | cb(null, body)
26 |
27 |
28 | module.exports.upsert = (userInfo, cb) ->
29 | query = service_id: userInfo.service_id
30 | update = (user) ->
31 | user.set(userInfo).save (err) -> cb(err, user)
32 |
33 | User.findOne(query, 'gravatar_id login service_id').exec (err, user) ->
34 | if err then cb(err)
35 | else if user then update(user)
36 | else update(new User)
37 |
38 | # Fix plunks saved with invalid github:login style service_id's
39 | module.exports.correct = (invalid_id, correct_id) ->
40 | User.findOne {service_id: invalid_id}, 'gravatar_id login service_id', (err, user) ->
41 | if err then console.log "[ERR] Failed to query for #{invalid_id}"
42 | else if user then Plunk.update {user: user._id}, {user: correct_id}, {multi: true}, (err, numAffected) ->
43 | console.log "[OK] Fixed #{numAffected} plunks by #{user.login} incorrectly attributed to #{invalid_id}"
44 |
45 | user.remove (err) ->
46 | if err then console.log "[ERR] Failed to remove duplicate user #{user.login}"
47 | else console.log "[OK] Removed duplicate user #{user.login}"
48 |
49 |
50 |
51 | # User-related middleware
52 |
53 | module.exports.withUser = withUser = (req, res, next) ->
54 | User.findOne({login: req.params.login}, 'gravatar_id login service_id').exec (err, user) ->
55 | return next(new apiErrors.DatabaseError(err)) if err
56 | return next(new apiErrors.NotFound) unless user
57 |
58 | req.user = user
59 | next()
60 |
61 | module.exports.withCurrentUser = withCurrentUser = (req, res, next) ->
62 | return next(new apiErrors.NotFound) unless req.currentUser
63 | next()
64 |
65 |
66 | # User-related request handlers
67 |
68 | module.exports.read = (req, res, next) ->
69 | res.json req.user.toJSON()
70 |
--------------------------------------------------------------------------------
/server.js:
--------------------------------------------------------------------------------
1 | require('coffee-script');
2 |
3 | var App = require('./app.coffee');
4 | var Nconf = require('nconf');
5 |
6 | App.listen(Nconf.get('PORT'), function(err) {
7 | if (err) throw err;
8 |
9 | console.log('[OK] API server listening on port `' + Nconf.get('PORT') + '`.');
10 | });
11 |
--------------------------------------------------------------------------------