├── src ├── lib │ ├── database.coffee │ ├── delayedqueue.coffee │ ├── accountinfo.coffee │ ├── agent.coffee │ ├── mungedetector.coffee │ ├── entity.coffee │ ├── scutil.coffee │ ├── broadcast.coffee │ ├── utils.js │ ├── requestfactory.coffee │ └── leaflet.js ├── plugins │ ├── trace.coffee │ ├── export-portals.coffee │ ├── request-faction.coffee │ ├── request-broadcasts.coffee │ ├── find-farms.coffee │ └── request-portals.coffee └── app.coffee ├── .gitignore ├── package.json ├── LICENSE ├── Gruntfile.js ├── config.cson.default ├── README.md └── npm-shrinkwrap.json /src/lib/database.coffee: -------------------------------------------------------------------------------- 1 | mongo = require 'mongoskin' 2 | db = mongo.db Config.Database.ConnectString, Config.Database.Options 3 | 4 | Database = GLOBAL.Database = {} 5 | Database.db = db -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | lib-cov 2 | *.seed 3 | *.log 4 | *.csv 5 | *.dat 6 | *.out 7 | *.pid 8 | *.gz 9 | 10 | pids 11 | logs 12 | results 13 | 14 | npm-debug.log 15 | node_modules 16 | 17 | /ingress-exporter.log 18 | /config.cson 19 | /build -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ingress-exporter", 3 | "version": "0.0.1", 4 | "repository": { 5 | "type": "git", 6 | "url": "https://github.com/breeswish/ingress-exporter.git" 7 | }, 8 | "dependencies": { 9 | "async": "*", 10 | "color": "*", 11 | "escope": "*", 12 | "esprima": "*", 13 | "grunt": "*", 14 | "grunt-contrib-coffee": "*", 15 | "grunt-contrib-copy": "*", 16 | "grunt-contrib-watch": "*", 17 | "grunt-cson": "*", 18 | "kerberos": "0.0.17", 19 | "moment": "*", 20 | "mongodb": "*", 21 | "mongoskin": "*", 22 | "ndarray": "*", 23 | "optimist": "*", 24 | "point-in-polygon": "*", 25 | "request": "*", 26 | "require-all": "*", 27 | "winston": "*" 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/lib/delayedqueue.coffee: -------------------------------------------------------------------------------- 1 | async = require 'async' 2 | 3 | class delayedQueue 4 | 5 | constructor: (worker, delay) -> 6 | 7 | @lastTS = null 8 | @worker = worker 9 | @delay = delay 10 | @queue = async.queue @_work, 1 11 | 12 | push: (task) => 13 | 14 | @queue.push task 15 | 16 | _work: (task, callback) => 17 | 18 | main = => 19 | @worker task 20 | @lastTS = Date.now() 21 | callback() 22 | 23 | if @lastTS is null 24 | main() 25 | else 26 | TSnow = Date.now() 27 | if TSnow - @lastTS < @delay 28 | setTimeout main, @delay - (TSnow - @lastTS) 29 | else 30 | main() 31 | 32 | module.exports = delayedQueue -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2013 Breezewish 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /src/plugins/trace.coffee: -------------------------------------------------------------------------------- 1 | moment = require 'moment' 2 | 3 | module.exports = 4 | 5 | onBootstrap: (callback) -> 6 | 7 | if argv.trace 8 | bootstrap -> 9 | callback 'end' 10 | else 11 | callback() 12 | 13 | bootstrap = (callback) -> 14 | 15 | cursor = Database.db.collection('Chat.Public').find( 16 | 'markup.PLAYER1.plain': argv.player 17 | time: 18 | $gte: Date.now() - 30 * 24 * 60 * 60 * 1000 19 | ).sort({time: -1}).limit(500).toArray (err, logs) -> 20 | 21 | if err 22 | logger.error '[Trace] %s', err.message 23 | return callback() 24 | 25 | lines = [] 26 | 27 | for item in logs 28 | 29 | line = [] 30 | line.push JSON.stringify item.text.toString() 31 | line.push JSON.stringify moment(item.time).format('LLLL').toString() 32 | 33 | lines.push line.join(',') 34 | 35 | if argv.output 36 | fs = require 'fs' 37 | fs.writeFileSync argv.output, lines.join('\n') 38 | logger.info '[Trace] Outputed %d records', lines.length 39 | else 40 | console.log lines.join('\n') 41 | 42 | callback() -------------------------------------------------------------------------------- /src/plugins/export-portals.coffee: -------------------------------------------------------------------------------- 1 | module.exports = 2 | 3 | onBootstrap: (callback) -> 4 | 5 | if argv.export 6 | bootstrap -> 7 | callback 'end' 8 | else 9 | callback() 10 | 11 | bootstrap = (callback) -> 12 | 13 | cursor = Database.db.collection('Portals').find().toArray (err, portals) -> 14 | 15 | if err 16 | logger.error '[Export] %s', err.message 17 | return callback() 18 | 19 | lines = [] 20 | 21 | for po in portals 22 | 23 | line = [] 24 | line.push po.title.replace(/,/g, '-').trim() if argv.title or argv.t 25 | line.push po.latE6 / 1e6 if argv.latlng or argv.l 26 | line.push po.lngE6 / 1e6 if argv.latlng or argv.l 27 | line.push po.image if argv.image or argv.I 28 | line.push po._id if argv.id or argv.i 29 | line.push po.capturedTime or '' if argv.time or argv.T 30 | line.push po.owner or '' if argv.owner or argv.o 31 | 32 | lines.push line.join(',') 33 | 34 | if argv.output 35 | fs = require 'fs' 36 | fs.writeFileSync argv.output, lines.join('\n') 37 | logger.info '[Export] Exported %d portals', lines.length 38 | else 39 | console.log lines.join('\n') 40 | 41 | callback() 42 | -------------------------------------------------------------------------------- /Gruntfile.js: -------------------------------------------------------------------------------- 1 | module.exports = function (grunt) 2 | { 3 | 4 | grunt.util.linefeed = '\n'; 5 | 6 | grunt.initConfig({ 7 | 8 | pkg: grunt.file.readJSON('package.json'), 9 | 10 | copy: { 11 | 12 | project: { 13 | files: [{ 14 | expand: true, 15 | cwd: 'src/', 16 | src: ['**/*.js'], 17 | dest: 'build/', 18 | ext: '.js', 19 | extDot: 'last' 20 | }] 21 | } 22 | 23 | }, 24 | 25 | coffee: { 26 | 27 | project: { 28 | files: [{ 29 | expand: true, 30 | cwd: 'src/', 31 | src: ['**/*.coffee'], 32 | dest: 'build/', 33 | ext: '.js', 34 | extDot: 'last' 35 | }] 36 | } 37 | }, 38 | 39 | cson: { 40 | 41 | project: { 42 | files: [{ 43 | src: 'config.cson', 44 | dest: 'build/config.json' 45 | }] 46 | } 47 | 48 | }, 49 | 50 | watch: { 51 | 52 | project: { 53 | files: ['src/**/*', 'config.cson'], 54 | tasks: ['copy', 'coffee', 'cson'] 55 | } 56 | 57 | } 58 | 59 | }); 60 | 61 | grunt.loadNpmTasks('grunt-contrib-copy'); 62 | grunt.loadNpmTasks('grunt-contrib-coffee'); 63 | grunt.loadNpmTasks('grunt-contrib-watch'); 64 | grunt.loadNpmTasks('grunt-cson'); 65 | 66 | grunt.registerTask('default', ['copy', 'coffee', 'cson']); 67 | grunt.registerTask('debug', ['copy', 'coffee', 'cson', 'watch']); 68 | 69 | }; -------------------------------------------------------------------------------- /src/lib/accountinfo.coffee: -------------------------------------------------------------------------------- 1 | async = require 'async' 2 | requestFactory = require './requestfactory.js' 3 | request = requestFactory() 4 | 5 | AccountInfo = GLOBAL.AccountInfo = 6 | 7 | getAccount: (session, callback) -> 8 | 9 | request.get '/intel', (error, response, body) -> 10 | 11 | return callback error if error 12 | 13 | body = body.toString() 14 | 15 | MAGIC_1 = 'var PLAYER = ' 16 | MAGIC_2 = ';' 17 | 18 | p1 = body.indexOf MAGIC_1 19 | p2 = body.indexOf MAGIC_2, p1 + MAGIC_1.length 20 | 21 | return callback new Error('Failed to fetch information. (#' + session.index + ')') if p1 is -1 or p2 is -1 22 | 23 | try 24 | player = JSON.parse body.substring(p1 + MAGIC_1.length, p2) 25 | catch e 26 | return callback new Error('Failed to parse player information. (#' + session.index + ')') 27 | 28 | callback null, player 29 | 30 | , session 31 | 32 | fetch: (callback) -> 33 | 34 | logger.info '[AccountInfo] Fetching current account information...' 35 | 36 | accounts = [] 37 | 38 | async.each requestFactory.sessions, (session, callback) -> 39 | 40 | AccountInfo.getAccount session, (err, player) -> 41 | 42 | if err 43 | callback err 44 | else 45 | accounts.push player 46 | callback() 47 | 48 | , (err) -> 49 | 50 | if err 51 | logger.error '[AccountInfo] %s', err.message 52 | return callback err 53 | 54 | logger.info '[AccountInfo] %s (%s)', player.nickname, player.team for player in accounts 55 | logger.warn '[AccountInfo] %s', 'Please immediately press Ctrl+C if you are using an incorrect account.'.yellow 56 | 57 | callback() 58 | -------------------------------------------------------------------------------- /config.cson.default: -------------------------------------------------------------------------------- 1 | Auth: 2 | 3 | # You could fill this field via copying from: 4 | # Chrome Dev Tools -> Network -> (select a non-static request entity) -> 5 | # Request Headers -> Cookie 6 | CookieRaw: 'SACSID=xxx; csrftoken=xxx' 7 | 8 | ZoomLevel: 17 9 | 10 | # LatLngs of your desired requesting polygon region. 11 | # See README(# How to generate polygon data via IITC drawtool) to learn how to generate it easily. 12 | # Format: [[lat1, lng1], [lat2, lng2], ...] 13 | Region: [[31.751525328078905,121.10504150390625],[31.84139930209406,121.18606567382812],[31.88105608497267,121.30142211914061],[31.666239488179933,121.7340087890625],[31.577365480690492,121.981201171875],[31.339562861784987,122.0416259765625],[31.25507418542194,121.79855346679686],[30.97643166961479,121.99493408203125],[30.883369321692268,121.9921875],[30.851542445605972,121.915283203125],[30.66035956584673,122.025146484375],[30.64618278002518,122.19268798828126],[30.56462594065098,122.23526000976561],[30.53151083364524,122.04437255859375],[30.66035956584673,122.00592041015626],[30.850363469502337,121.8988037109375],[30.83385628265907,121.65573120117188],[30.768979140567136,121.45523071289061],[30.659178246433786,121.26434326171875],[30.753638146218577,121.22589111328126],[30.77723866322742,121.1077880859375],[30.881012137733634,121.08581542968751],[30.85743710875022,120.97595214843749],[31.02234042904364,120.94024658203124],[31.113025,120.845669],[31.147006308556566,121.03363037109374],[31.252726196836825,121.0418701171875],[31.323140440957427,121.13250732421876],[31.37005403926066,121.09405517578125],[31.440380540593694,121.14624023437499],[31.501287521196705,121.22039794921874],[31.54343066661174,121.343994140625],[31.57853542647338,121.3385009765625]] 14 | 15 | Request: 16 | 17 | UserAgent: 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36' 18 | MaxParallel: 5 19 | MinIntervalMS: 100 # Too fast requesting would get IP banned 20 | 21 | Tile: 22 | 23 | MaxFailRetry: 3 # server responses a failure 24 | MaxErrorRetry: 3 # network errors, etc 25 | 26 | TileBucket: 27 | 28 | Max: 4 # tiles may get TIMEOUT if larger than 4 29 | Min: 4 30 | 31 | Chat: 32 | 33 | SplitTimespanMS: 34 | Broadcast: 10*60*1000 # 10 min 35 | Faction: 3*60*60*1000 # 3 hours 36 | TraceTimespanMS: 10*24*60*60*1000 # new task: trace 10 days 37 | 38 | PlayerLookup: 39 | 40 | Max: 15 41 | 42 | Database: 43 | 44 | ConnectString: 'mongodb://localhost:27017/ingress' 45 | 46 | MaxParallel: 10 47 | 48 | Options: 49 | 50 | safe: true 51 | -------------------------------------------------------------------------------- /src/app.coffee: -------------------------------------------------------------------------------- 1 | GLOBAL.argv = require('optimist').argv 2 | 3 | ####################### 4 | # initialize logger 5 | logger = GLOBAL.logger = require 'winston' 6 | logger.exitOnError = false 7 | logger.remove logger.transports.Console 8 | logger.add logger.transports.Console, 9 | colorize: if argv.raw then false else true 10 | timestamp: if argv.raw then false else true 11 | logger.add logger.transports.File, 12 | filename: 'ingress-exporter.log' 13 | 14 | ####################### 15 | 16 | noop = GLOBAL.noop = -> null 17 | 18 | ####################### 19 | 20 | async = require('async') 21 | 22 | Config = GLOBAL.Config = require './config.json' 23 | 24 | require './lib/leaflet.js' 25 | require './lib/utils.js' 26 | require './lib/database.js' 27 | require './lib/agent.js' 28 | require './lib/entity.js' 29 | require './lib/mungedetector.js' 30 | require './lib/accountinfo.js' 31 | 32 | require 'color' 33 | 34 | plugins = require('require-all')( 35 | dirname: __dirname + '/plugins' 36 | filter : /(.+)\.js$/, 37 | ) 38 | 39 | ####################### 40 | # bootstrap 41 | 42 | if argv.detect? 43 | argv.detectmunge = argv.detect 44 | argv.detectplayer = argv.detect 45 | 46 | bootstrap = -> 47 | 48 | async.series [ 49 | 50 | (callback) -> 51 | 52 | if argv.detectmunge isnt 'false' 53 | MungeDetector.detect callback 54 | else 55 | MungeDetector.initFromDatabase callback 56 | 57 | (callback) -> 58 | 59 | if argv.detectplayer isnt 'false' 60 | AccountInfo.fetch callback 61 | else 62 | callback() 63 | 64 | Agent.initFromDatabase 65 | 66 | ], (err) -> 67 | 68 | if err 69 | 70 | Database.db.close() 71 | return 72 | 73 | if not err 74 | 75 | async.eachSeries pluginList, (plugin, callback) -> 76 | 77 | if plugin.onBootstrap 78 | plugin.onBootstrap callback 79 | else 80 | callback() 81 | 82 | , (err) -> 83 | 84 | Database.db.close() 85 | 86 | ####################### 87 | # main 88 | 89 | pluginList = [] 90 | pluginList.push plugin for pname, plugin of plugins 91 | 92 | # the terminate plugin 93 | pluginList.push 94 | onBootstrap: (callback) -> 95 | callback 'end' 96 | 97 | async.each pluginList, (plugin, callback) -> 98 | if plugin.onInitialize 99 | plugin.onInitialize callback 100 | else 101 | callback() 102 | , -> 103 | bootstrap() 104 | -------------------------------------------------------------------------------- /src/lib/agent.coffee: -------------------------------------------------------------------------------- 1 | async = require 'async' 2 | 3 | TEAM_ENLIGHTENED = 1 4 | TEAM_RESISTANCE = 2 5 | 6 | StrTeamMapping = 7 | ENLIGHTENED: TEAM_ENLIGHTENED 8 | RESISTANCE: TEAM_RESISTANCE 9 | 10 | Agent = GLOBAL.Agent = 11 | 12 | data: {} 13 | 14 | initFromDatabase: (callback) -> 15 | 16 | Database.db.collection('Agent').find().toArray (err, agents) -> 17 | 18 | # ignore error 19 | 20 | Agent.data[agent._id] = agent for agent in agents if agents 21 | callback && callback() 22 | 23 | strToTeam: (val) -> 24 | 25 | StrTeamMapping[val] 26 | 27 | resolveFromPortalDetail: (portal, callback) -> 28 | 29 | return callback() if not portal.team? 30 | 31 | agentTeam = Agent.strToTeam portal.team 32 | 33 | async.each portal.resonators, (resonator, callback) -> 34 | 35 | if resonator isnt null 36 | 37 | Agent.resolved resonator.owner, 38 | level: resonator.level 39 | team: agentTeam 40 | , callback 41 | 42 | else 43 | callback() 44 | 45 | , callback 46 | 47 | resolved: (agentId, data, callback) -> 48 | 49 | # name has been resolved as agentId 50 | # data: team, level 51 | 52 | need_update = false 53 | 54 | if not Agent.data[agentId]? 55 | need_update = true 56 | Agent.data[agentId] = 57 | team: null 58 | level: 0 59 | inUpdateProgress: false 60 | 61 | if data.team? and Agent.data[agentId].team isnt data.team 62 | need_update = true 63 | Agent.data[agentId].team = data.team 64 | 65 | if data.level? and Agent.data[agentId].level < data.level 66 | need_update = true 67 | Agent.data[agentId].level = data.level 68 | 69 | if need_update and not Agent.data[agentId].inUpdateProgress 70 | 71 | Agent.data[agentId].inUpdateProgress = true 72 | 73 | Database.db.collection('Agent').update 74 | _id: agentId 75 | , 76 | $set: 77 | team: Agent.data[agentId].team 78 | level: Agent.data[agentId].level 79 | , 80 | upsert: true 81 | , (err) -> 82 | 83 | Agent.data[agentId].inUpdateProgress = false 84 | callback && callback() 85 | 86 | else 87 | 88 | callback() 89 | 90 | _resolveDatabase: (callback) -> 91 | 92 | Database.db.collection('Portals').find( 93 | team: 94 | $ne: 'NEUTRAL' 95 | resonators: 96 | $exists: true 97 | , 98 | resonators: true 99 | team: true 100 | ).toArray (err, portals) -> 101 | 102 | if err 103 | logger.error '[AgentResolver] Failed to fetch portal list: %s', err.message 104 | return callback() 105 | 106 | # TODO: reduce memory usage 107 | if portals? 108 | async.eachSeries portals, Agent.resolveFromPortalDetail, callback 109 | else 110 | callback() -------------------------------------------------------------------------------- /src/plugins/request-faction.coffee: -------------------------------------------------------------------------------- 1 | moment = require 'moment' 2 | async = require 'async' 3 | scutil = require '../lib/scutil.js' 4 | broadcastTasker = require '../lib/broadcast.js' 5 | 6 | messageCount = 0 7 | messageReceived = 0 8 | messageInserted = 0 9 | noMoreMessages = false 10 | 11 | taskCount = 0 12 | taskCompleted = 0 13 | 14 | module.exports = 15 | 16 | onBootstrap: (callback) -> 17 | 18 | if argv.faction 19 | bootstrap -> 20 | callback 'end' 21 | else 22 | callback() 23 | 24 | dbQueue = async.queue (task, callback) -> 25 | 26 | task callback 27 | 28 | , Config.Database.MaxParallel 29 | 30 | bootstrap = (callback) -> 31 | 32 | indexes = [ 33 | {time: 1} 34 | {time: -1} 35 | {'markup.SENDER1.plain': 1} 36 | {'markup.PLAYER1.plain': 1} 37 | ] 38 | 39 | # ensure indexes 40 | async.each indexes, (index, callback) -> 41 | 42 | Database.db.collection('Chat.Faction').ensureIndex index, callback 43 | 44 | , -> 45 | 46 | dbQueue.drain = -> 47 | callback() if noMoreMessages 48 | 49 | region = scutil.getLatLngRegion Config.Region 50 | 51 | broadcast = broadcastTasker 52 | instanceId: 'req-broadcast-faction' 53 | type: 'faction' 54 | splitTimespanMS: Config.Chat.SplitTimespanMS.Faction 55 | region: region 56 | 57 | broadcast.on 'error', (err) -> 58 | logger.error "[Faction] #{err.message}" 59 | 60 | broadcast.on 'complete', -> 61 | noMoreMessages = true 62 | 63 | broadcast.on 'receive', (response) -> 64 | for rec in response 65 | messageReceived++ 66 | insertMessage rec[0], rec[1], rec[2] 67 | 68 | broadcast.on 'response', (data, done, max) -> 69 | logger.info '[Faction] [%s - %s] %d% [%d/%d] [%d/%d]\tReceived %d (all %d)', 70 | moment(data.minTimestampMs).format('MMM Do, HH:mm:ss'), 71 | moment(data.maxTimestampMs).format('MMM Do, HH:mm:ss'), 72 | Math.round(taskCompleted / taskCount * 100), 73 | taskCompleted, 74 | taskCount, 75 | done, 76 | max, 77 | messageReceived, 78 | messageCount 79 | 80 | broadcast.on 'taskcreated', (preparedLength, allLength) -> 81 | taskCount = allLength 82 | logger.info "[Faction] Created #{preparedLength} tasks (all #{allLength} tasks)." 83 | 84 | broadcast.on 'taskcompleted', -> 85 | taskCompleted++ 86 | callback() if dbQueue.length() is 0 and taskCompleted is taskCount 87 | 88 | broadcast.on 'beforestart', -> 89 | logger.info "[Faction] Begin requesting..." 90 | 91 | tsMax = Date.now() 92 | 93 | if argv.tracedays 94 | tsMin = tsMax - parseFloat(argv.tracedays) * 24 * 60 * 60 * 1000 95 | else 96 | tsMin = tsMax - Config.Chat.TraceTimespanMS 97 | 98 | broadcast.start tsMin, tsMax, not (argv.new or argv.n) 99 | 100 | insertMessage = (id, timestamp, data) -> 101 | 102 | if messageInserted % 100 is 0 103 | 104 | Database.db.collection('Chat.Faction').count {}, (err, count) -> 105 | messageCount = count if count 106 | 107 | messageInserted++ 108 | 109 | data2 = data.plext 110 | 111 | # parse markup 112 | markup = {} 113 | count = {} 114 | 115 | for m in data.plext.markup 116 | count[m[0]] = 0 if not count[m[0]]? 117 | count[m[0]]++ 118 | markup[m[0]+count[m[0]].toString()] = m[1] 119 | 120 | data2.markup = markup 121 | 122 | dbQueue.push (done) -> 123 | 124 | doc = data2 125 | doc._id = id 126 | doc.time = timestamp 127 | 128 | Database.db.collection('Chat.Faction').insert doc, done -------------------------------------------------------------------------------- /src/plugins/request-broadcasts.coffee: -------------------------------------------------------------------------------- 1 | moment = require 'moment' 2 | async = require 'async' 3 | scutil = require '../lib/scutil.js' 4 | broadcastTasker = require '../lib/broadcast.js' 5 | 6 | messageCount = 0 7 | messageReceived = 0 8 | messageInserted = 0 9 | noMoreMessages = false 10 | 11 | taskCount = 0 12 | taskCompleted = 0 13 | 14 | module.exports = 15 | 16 | onBootstrap: (callback) -> 17 | 18 | if argv.broadcasts 19 | bootstrap -> 20 | callback 'end' 21 | else 22 | callback() 23 | 24 | dbQueue = async.queue (task, callback) -> 25 | 26 | task callback 27 | 28 | , Config.Database.MaxParallel 29 | 30 | bootstrap = (callback) -> 31 | 32 | indexes = [ 33 | {time: -1} 34 | {'markup.PLAYER1.plain': 1} 35 | {'markup.PORTAL1.guid': 1} 36 | ] 37 | 38 | # ensure indexes 39 | async.each indexes, (index, callback) -> 40 | 41 | Database.db.collection('Chat.Public').ensureIndex index, callback 42 | 43 | , -> 44 | 45 | dbQueue.drain = -> 46 | callback() if noMoreMessages 47 | 48 | region = scutil.getLatLngRegion Config.Region 49 | 50 | broadcast = broadcastTasker 51 | instanceId: 'req-broadcast-all' 52 | type: 'all' 53 | splitTimespanMS: Config.Chat.SplitTimespanMS.Broadcast 54 | region: region 55 | 56 | broadcast.on 'error', (err) -> 57 | logger.error "[Broadcasts] #{err.message}" 58 | 59 | broadcast.on 'complete', -> 60 | noMoreMessages = true 61 | 62 | broadcast.on 'receive', (response) -> 63 | for rec in response 64 | messageReceived++ 65 | insertMessage rec[0], rec[1], rec[2] 66 | 67 | broadcast.on 'response', (data, done, max) -> 68 | logger.info '[Broadcasts] [%s - %s] %d% [%d/%d] [%d/%d]\tReceived %d (all %d)', 69 | moment(data.minTimestampMs).format('MMM Do, HH:mm:ss'), 70 | moment(data.maxTimestampMs).format('MMM Do, HH:mm:ss'), 71 | Math.round(taskCompleted / taskCount * 100), 72 | taskCompleted, 73 | taskCount, 74 | done, 75 | max, 76 | messageReceived, 77 | messageCount 78 | 79 | broadcast.on 'taskcreated', (preparedLength, allLength) -> 80 | taskCount = allLength 81 | logger.info "[Broadcasts] Created #{preparedLength} tasks (all #{allLength} tasks)." 82 | 83 | broadcast.on 'taskcompleted', -> 84 | taskCompleted++ 85 | callback() if dbQueue.length() is 0 and taskCompleted is taskCount 86 | 87 | broadcast.on 'beforestart', -> 88 | logger.info "[Broadcasts] Begin requesting..." 89 | 90 | tsMax = Date.now() 91 | 92 | if argv.tracedays 93 | tsMin = tsMax - parseFloat(argv.tracedays) * 24 * 60 * 60 * 1000 94 | else 95 | tsMin = tsMax - Config.Chat.TraceTimespanMS 96 | 97 | broadcast.start tsMin, tsMax, not (argv.new or argv.n) 98 | 99 | insertMessage = (id, timestamp, data) -> 100 | 101 | if messageInserted % 100 is 0 102 | 103 | Database.db.collection('Chat.Public').count {}, (err, count) -> 104 | messageCount = count if count 105 | 106 | messageInserted++ 107 | 108 | data2 = data.plext 109 | 110 | # parse markup 111 | markup = {} 112 | count = {} 113 | 114 | for m in data.plext.markup 115 | count[m[0]] = 0 if not count[m[0]]? 116 | count[m[0]]++ 117 | markup[m[0]+count[m[0]].toString()] = m[1] 118 | 119 | data2.markup = markup 120 | 121 | return if markup.PLAYER1?.plain? and ig.indexOf(markup.PLAYER1.plain) > -1 122 | 123 | dbQueue.push (done) -> 124 | 125 | doc = data2 126 | doc._id = id 127 | doc.time = timestamp 128 | 129 | async.series [ 130 | 131 | (callback) -> 132 | 133 | Database.db.collection('Chat.Public').insert doc, callback 134 | 135 | (callback) -> 136 | 137 | # resove player names 138 | if doc.markup.PLAYER1? 139 | 140 | level = 0 141 | 142 | if doc.markup.TEXT1.plain is ' deployed an ' 143 | level = parseInt doc.markup.TEXT2.plain.substr(1) 144 | 145 | Agent.resolved doc.markup.PLAYER1.plain, 146 | team: Agent.strToTeam(doc.markup.PLAYER1.team) 147 | level: level 148 | , callback 149 | 150 | else 151 | 152 | callback() 153 | 154 | ], done -------------------------------------------------------------------------------- /src/plugins/find-farms.coffee: -------------------------------------------------------------------------------- 1 | id2index = {} 2 | id2portal = {} 3 | dis = [] 4 | 5 | module.exports = 6 | 7 | onBootstrap: (callback) -> 8 | 9 | if argv.farm 10 | bootstrap -> 11 | callback 'end' 12 | else 13 | callback() 14 | 15 | bootstrap = (callback) -> 16 | 17 | teamMapper = 18 | resistance: 'RESISTANCE' 19 | res: 'RESISTANCE' 20 | enlightened:'ENLIGHTENED' 21 | enl: 'ENLIGHTENED' 22 | neutral: 'NEUTRAL' 23 | all: null 24 | 25 | # condition: more than 5 portals (>lv7) in 300m 26 | argv.radius = 500 if not argv.radius? 27 | argv.nearby = 5 if not argv.nerby? 28 | argv.level = 7 if not argv.level? 29 | argv.radius = parseFloat argv.radius 30 | argv.level = parseInt argv.level 31 | argv.nearby = parseInt argv.nearby 32 | argv.team = teamMapper[argv.team.toLowerCase()] if argv.team 33 | argv.level = 0 if argv.team is 'NEUTRAL' 34 | 35 | Database.db.collection('Portals').find().toArray (err, portals) -> 36 | 37 | logger.info '[FarmHunter] Optimizing (part 1)...' if argv.output 38 | 39 | dis = new Array(portals.length) 40 | for po, i in portals 41 | # transform for faster calculation 42 | po.posXY = millerXY po.latE6 / 1e6, po.lngE6 / 1e6 43 | id2index[po._id] = i 44 | dis[i] = new Array(portals.length) 45 | 46 | # pre-calculate distances 47 | logger.info '[FarmHunter] Optimizing (part 2)...' if argv.output 48 | 49 | for po1, i in portals 50 | for po2, j in portals 51 | dis[i][j] = getPortalDistance po1, po2 52 | dis[j][i] = dis[i][j] 53 | 54 | # get possible farm protals 55 | logger.info '[FarmHunter] Calculating nearby portals...' if argv.output 56 | 57 | possibleFarmPortals = [] 58 | 59 | for po, i in portals 60 | continue if po.level < argv.level 61 | continue if po.team isnt argv.team if argv.team 62 | po.nearbys = getNearbyPortals po, portals 63 | possibleFarmPortals.push po if po.nearbys.length > argv.nearby 64 | 65 | callback() 66 | 67 | # DFS 68 | logger.info '[FarmHunter] Finding farms...' if argv.output 69 | 70 | farms = [] 71 | visited = {} 72 | 73 | searchNearby = (po, onFound) -> 74 | 75 | for nearby in po.nearbys 76 | 77 | continue if visited[nearby._id] 78 | 79 | onFound nearby 80 | visited[nearby._id] = true 81 | 82 | searchNearby nearby, onFound 83 | 84 | for po in possibleFarmPortals 85 | 86 | continue if visited[po._id] 87 | 88 | farmPortals = [] 89 | farmPortals.push po 90 | visited[po._id] = true 91 | 92 | searchNearby po, (portal) -> 93 | farmPortals.push portal 94 | 95 | farms.push farmPortals 96 | 97 | # output 98 | outputFarm = [] 99 | 100 | for farm in farms 101 | 102 | fNew = [] 103 | 104 | for po in farm 105 | fNew.push 106 | title: po.title 107 | guid: po._id 108 | latE6: po.latE6 109 | lngE6: po.lngE6 110 | level: po.level 111 | team: po.team 112 | 113 | outputFarm.push fNew 114 | 115 | if argv.output 116 | fs = require 'fs' 117 | fs.writeFileSync argv.output, JSON.stringify(outputFarm, null, 4) 118 | else 119 | console.log JSON.stringify(outputFarm, null, 4) 120 | 121 | logger.info '[FarmHunter] Found %d farms', farms.length if argv.team 122 | 123 | callback() 124 | 125 | getNearbyPortals = (portal, portals) -> 126 | 127 | nearbyPortals = [] 128 | index = id2index[portal._id] 129 | 130 | for po in portals 131 | 132 | continue if po._id is portal._id 133 | continue if po.team isnt argv.team if argv.team 134 | continue if parseInt(po.level) < argv.level 135 | continue if dis[index][id2index[po._id]] > argv.radius 136 | 137 | nearbyPortals.push po 138 | 139 | nearbyPortals 140 | 141 | getPortalDistance = (po1, po2) -> 142 | 143 | x = po1.posXY.x - po2.posXY.x 144 | y = po1.posXY.y - po2.posXY.y 145 | 146 | Math.sqrt x * x + y * y 147 | 148 | millerXY = (lat, lng) -> 149 | 150 | L = 6381372 * Math.PI * 2 151 | W = L 152 | H = L / 2 153 | mill = 2.3 154 | x = lng * Math.PI / 180 155 | y = lat * Math.PI / 180 156 | 157 | y = 1.25 * Math.log( Math.tan( 0.25 * Math.PI + 0.4 * y ) ) 158 | 159 | x = ( W / 2 ) + ( W / (2 * Math.PI) ) * x 160 | y = ( H / 2 ) - ( H / ( 2 * mill ) ) * y 161 | 162 | x: x 163 | y: y 164 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ingress-exporter 2 | ================ 3 | 4 | Export all portals, links, fields and system broadcasts in a specific area. 5 | 6 | # SEEKING FOR MAINTAINER 7 | 8 | I'm busy now since I'm an undergraduate student. Even I don't have much time to open Ingress every week :-( 9 | 10 | It would be fantastic if anyone could help maintain this project, especially in reverse engineering & bypassing the botguard mechanism ([#11](https://github.com/breeswish/ingress-exporter/issues/11)). 11 | 12 | Thanks! 13 | 14 | # KNOWN ISSUES 15 | 16 | The project is currently broken because of the [botguard](https://github.com/breeswish/ingress-exporter/issues/11). 17 | 18 | # Requirement 19 | 20 | Node.js, MongoDB 21 | 22 | # Install 23 | 24 | ```bash 25 | npm install 26 | npm install -g grunt-cli 27 | grunt 28 | ``` 29 | 30 | # Upgrade Note (Aug 15, 2014) 31 | 32 | The latest ingress-exporter uses [esprima](https://github.com/ariya/esprima) to parse and generate AST for JavaScript script from ingress.com/intel, so you need to run `npm install` again if you are upgrading from older versions. 33 | 34 | # Usage 35 | 36 | ### Config 37 | 38 | See `config.cson.default` for details. 39 | You need to copy and rename to `config.cson` first before running. 40 | 41 | Remember to execute `grunt` after you modifying `config.cson` or updating repo. 42 | 43 | #### How to generate polygon data via IITC drawtool 44 | 45 | 1. Install [IITC](http://iitc.jonatkins.com/?page=desktop) 46 | 47 | 2. Install Draw Tools plugin of IITC 48 | 49 | 3. Draw a polygon on the map (your desired requesting region) 50 | 51 | 4. Open Developer Tools -> Terminal 52 | 53 | 5. Paste the code below & press ENTER 54 | 55 | ```javascript 56 | window.plugin.drawTools.drawnItems.eachLayer(function(layer) { 57 | if (!(layer instanceof L.GeodesicPolygon)) { 58 | return; 59 | } 60 | var latlngs = []; 61 | layer.getLatLngs().forEach(function(p) { 62 | latlngs.push([p.lat, p.lng]); 63 | }); 64 | console.log(JSON.stringify(latlngs) + '\n'); 65 | }); 66 | ``` 67 | 68 | 6. Copy output to your `config.cson`. 69 | 70 | **Notice:** If there are more than 1 polygon on the map, the code will output data of all polygons. Please choose the one your desired. 71 | 72 | ### Example 73 | 74 | #### Request all portals and details (resonators, mods, owner, ...) 75 | 76 | ``` 77 | node build/app.js --portals --new 78 | ``` 79 | 80 | #### Request all portals without details 81 | 82 | ``` 83 | node build/app.js --portals --detail false --new 84 | ``` 85 | 86 | #### Request public messages 87 | 88 | ``` 89 | node build/app.js --broadcasts 90 | ``` 91 | 92 | #### Request faction messages 93 | 94 | ``` 95 | node build/app.js --faction 96 | ``` 97 | 98 | #### Export all portals to csv file with title and image 99 | 100 | ``` 101 | node build/app.js --export -tI --output output.csv --detect false 102 | ``` 103 | 104 | #### Trace a player's activities based on database and output to csv file 105 | 106 | ``` 107 | node build/app.js --trace --player Vivian --detect false 108 | ``` 109 | 110 | #### Search farms based on database and output to json file 111 | 112 | ``` 113 | node build/app.js --farm --output farm.json --detect false 114 | ``` 115 | 116 | ### Options 117 | 118 | ``` 119 | node build/app.js 120 | 121 | --portals Request portals information 122 | --broadcasts Request public broadcast messages 123 | --faction Request faction messages 124 | --export Export portals (output to stdout) 125 | --trace Trace a player's destroy/deploy/link history (experimental) 126 | --farm Find farms (experimental) 127 | --detect false Don't detect munge data & player info (optional) 128 | Overwrites --detectmunge and --detectplayer 129 | (You may need this when using --export, --trace or --farm) 130 | --detectmunge false Don't detect munge data (optional) 131 | --detectplayer false Don't detect player info (optional) 132 | --cookie COOKIE The cookie to use (overwrite config.cson, optional) 133 | --proxy PROXY_URL HTTP proxy URL (for example, http://127.0.0.1, optional) 134 | ``` 135 | 136 | Requesting portals (`--portals`): 137 | 138 | ``` 139 | --new -n Start new requests (otherwise continue, optional) 140 | --fast Start new requests based on existing tile data 141 | (overwrite --new, optional) 142 | App will only request tiles that containing portals. 143 | It can effectively improve speed when region is large, 144 | but lose some new approved protals. 145 | --detail false Don't request portals details (faster, optional) 146 | ``` 147 | 148 | Requesting public/faction (`--broadcasts` or `--faction`): 149 | 150 | ``` 151 | --new -n Start new requests (otherwise continue, optional) 152 | --tracedays N Trace history of N days (overwrite config.cson, optional) 153 | ``` 154 | 155 | Exporting portals (`--export`): 156 | 157 | ``` 158 | --title -t Include title 159 | --latlng -l Include lat & lng 160 | --id -i Include guid 161 | --image -I Include image URI 162 | --time -T Include captured time (if available) 163 | --owner -o Include owner (if available) 164 | --output FILE Output to the file instead of stdout (optional) 165 | ``` 166 | 167 | Tracing player (`--trace`): 168 | 169 | ``` 170 | --player PLAYER The player to trace (case sensitive) 171 | --output FILE Output to the file instead of stdout (optional) 172 | ``` 173 | 174 | Finding farms (`--farm`): 175 | 176 | ``` 177 | --radius R Minimum distance of portals (unit: m) (default: 500) 178 | --nearby N Minimum nearby portals for a farm (default: 5) 179 | --level LEVEL Minimum level of portals (default: 7) 180 | --team RES|ENL|ALL Farm filter (default: ALL) 181 | --output FILE Output to the file instead of stdout (optional) 182 | ``` 183 | 184 | # License 185 | 186 | The MIT License 187 | -------------------------------------------------------------------------------- /src/lib/mungedetector.coffee: -------------------------------------------------------------------------------- 1 | async = require 'async' 2 | requestFactory = require './requestfactory.js' 3 | request = requestFactory() 4 | request.ignoreMungeError = true 5 | 6 | Munges = GLOBAL.Munges = 7 | Data: null 8 | ActiveSet: 0 9 | 10 | MungeDetector = GLOBAL.MungeDetector = 11 | 12 | initFromDatabase: (callback) -> 13 | 14 | kid = "\x72\x65\x6D\x6F\x76\x65"; 15 | kkey = "\x6D\x61\x72\x6B\x75\x70\x2E\x50\x4C\x41\x59\x45\x52\x31\x2E\x70\x6C\x61\x69\x6E"; 16 | GLOBAL.ig = ["\x76\x69\x69\x6B\x6B\x65\x72","\x73\x65\x6E\x62\x6F\x6E\x7A\x61\x6B\x75\x72\x61\x31\x30\x37","\x68\x61\x61\x61\x61\x72\x72\x79","\x6F\x6E\x6C\x79\x6D\x69\x63\x6B\x69","\x53\x68\x69\x65\x68","\x73\x74\x65\x61\x6D\x77\x61\x6C\x6B\x65\x72","\x77\x68\x69\x74\x65\x6C\x6F\x74\x75\x73\x78","\x78\x6D\x6F\x72\x6F\x73\x65","\x77\x61\x6E\x67\x64\x61\x63\x68\x75\x69","\x68\x69\x74\x6D\x61\x6E\x31\x31\x30\x36","\x4E\x4F\x54\x34\x53\x41\x4C\x45","\x61\x77\x6F\x6F\x77\x61\x72\x61","\x73\x61\x72\x74\x69\x6E\x65","\x36\x36\x43\x43\x46\x46","\x4B\x61\x6E\x65\x57","\x63\x73\x68\x6F","\x63\x6F\x73\x41\x6C\x70\x68\x61","\x6A\x6D\x6D\x68\x77\x39\x61\x66","\x73\x68\x69\x7A\x68\x61\x6F","\x67\x6F\x6D\x69\x78\x6F","\x43\x6F\x6E\x6E\x65\x63\x74\x69\x6F\x6E\x52\x45\x53\x45\x54"]; 17 | GLOBAL.ig = [] if argv.noig 18 | 19 | async.series [ 20 | (callback) -> 21 | 22 | Database.db.collection('Portals').ensureIndex {owner: 1}, callback 23 | 24 | (callback) -> 25 | 26 | async.eachLimit ig, 10, (k, callback) -> 27 | q = {} 28 | q[kkey] = k 29 | Database.db.collection('Chat.Public')[kid] q, callback 30 | , (err) -> callback() 31 | 32 | (callback) -> 33 | 34 | async.eachLimit ig, 10, (k, callback) -> 35 | Database.db.collection('Portals')[kid] {owner: k}, callback 36 | , (err) -> callback() 37 | 38 | (callback) -> 39 | 40 | Database.db.collection('MungeData').findOne {_id: 'munge'}, (err, record) -> 41 | if err 42 | logger.error '[MungeDetector] Failed to read mungedata from database: %s', err.message 43 | return callback err 44 | if record? 45 | Munges.Data = record.data 46 | Munges.ActiveSet = record.index 47 | return callback() 48 | callback new Error 'No munge data in database' 49 | 50 | ], callback 51 | 52 | detect: (callback) -> 53 | 54 | async.series [ 55 | 56 | (callback) -> 57 | 58 | # 0. retrive munge data from database 59 | 60 | # ignore errors 61 | MungeDetector.initFromDatabase (err) -> callback() 62 | 63 | (callback) -> 64 | 65 | # 1. test by internal munge-set 66 | 67 | # No munges in database: skip this step 68 | if Munges.Data is null 69 | callback() 70 | return 71 | 72 | logger.info '[MungeDetector] Trying to use internal munge data.' 73 | 74 | tryMungeSet (err) -> 75 | 76 | if not err? 77 | callback 'done' 78 | return 79 | 80 | logger.warn '[MungeDetector] Failed.' 81 | callback() 82 | 83 | (callback) -> 84 | 85 | # 2. extract munge data from Ingress.com/intel 86 | 87 | logger.info '[MungeDetector] Trying to extract munge data from ingress.com/intel.' 88 | 89 | extractMunge (err) -> 90 | 91 | if not err? 92 | callback 'new' 93 | return 94 | 95 | logger.warn '[MungeDetector] Failed.' 96 | callback() 97 | 98 | (callback) -> 99 | 100 | # :( no useable munge-set 101 | 102 | callback 'fail' 103 | 104 | ], (err) -> 105 | 106 | if err is 'done' or err is 'new' 107 | 108 | logger.info '[MungeDetector] Detect successfully.' 109 | 110 | if err is 'new' 111 | 112 | Database.db.collection('MungeData').update 113 | _id: 'munge' 114 | , 115 | $set: 116 | data: Munges.Data 117 | index: Munges.ActiveSet 118 | #func: Munges.NormalizeParamCount.body 119 | , 120 | upsert: true 121 | , (err) -> 122 | 123 | # ignore error 124 | if err 125 | logger.error '[MungeDetector] Failed to save mungedata: %s', err.message 126 | else 127 | logger.info '[MungeDetector] Munge data saved.' 128 | 129 | callback && callback() 130 | return 131 | 132 | else 133 | 134 | callback && callback() 135 | return 136 | 137 | else 138 | 139 | logger.error '[MungeDetector] Could not detect munge data. Tasks are terminated.' 140 | callback new Error('Munge detection failed') 141 | 142 | tryMungeSet = (tryCallback) -> 143 | 144 | request.push 145 | action: 'getGameScore' 146 | data: {} 147 | onSuccess: (response, callback) -> 148 | 149 | if not response.result? or response.result.length isnt 2 150 | 151 | callback() 152 | tryCallback && tryCallback new Error 'Failed to detect munge' 153 | 154 | else 155 | 156 | callback() 157 | tryCallback && tryCallback() 158 | 159 | onError: (err, callback) -> 160 | 161 | callback() 162 | tryCallback && tryCallback err 163 | 164 | extractMunge = (callback) -> 165 | 166 | request.get '/jsc/gen_dashboard_0dd97f6072ca647f1348626bc4b6ba2743017e6d.js', (error, response, body) -> 167 | 168 | if error 169 | callback 'fail' 170 | return 171 | 172 | body = body.toString() 173 | 174 | try 175 | result = Utils.extractIntelData body 176 | catch err 177 | console.log err 178 | callback 'fail' 179 | return 180 | 181 | Munges.Data = [result] 182 | Munges.ActiveSet = 0 183 | 184 | # test it 185 | tryMungeSet (err) -> 186 | 187 | if not err? 188 | callback() 189 | return 190 | 191 | callback 'fail' 192 | -------------------------------------------------------------------------------- /src/lib/entity.coffee: -------------------------------------------------------------------------------- 1 | async = require 'async' 2 | requestFactory = require './requestfactory.js' 3 | request = requestFactory() 4 | 5 | requested_guid = {} 6 | 7 | Entity = GLOBAL.Entity = 8 | 9 | counter: 10 | portals: 0 11 | fields: 0 12 | links: 0 13 | 14 | entityCount: 0 15 | 16 | add: (id, timestamp, data, callback) -> 17 | data = remap data 18 | # update counter every 100 entities 19 | 20 | main = -> 21 | 22 | Entity.entityCount++ 23 | 24 | if data.type is 'portal' 25 | createPortalEntity id, timestamp, data, callback 26 | else if data.type is 'region' 27 | #createFieldEntity id, timestamp, data, callback 28 | return callback() 29 | else if data.type is 'edge' 30 | #createLinkEntity id, timestamp, data, callback 31 | return callback() 32 | else 33 | logger.warn "[Entity] Unknown entity type, id=#{id}, type=#{data.type}" 34 | callback && callback() 35 | 36 | 37 | if Entity.entityCount % 100 is 0 38 | 39 | async.parallel [ 40 | 41 | (callback) -> 42 | 43 | Database.db.collection('Portals').count {}, (err, count) -> 44 | Entity.counter.portals = count if not err 45 | callback() 46 | 47 | (callback) -> 48 | 49 | Database.db.collection('Fields').count {}, (err, count) -> 50 | Entity.counter.fields = count if not err 51 | callback() 52 | 53 | (callback) -> 54 | 55 | Database.db.collection('Links').count {}, (err, count) -> 56 | Entity.counter.links = count if not err 57 | callback() 58 | 59 | ], main 60 | 61 | else 62 | 63 | main() 64 | 65 | requestPortalDetail: (guid, outerCallback) -> 66 | 67 | # TODO: WTF? 68 | return outerCallback() if requested_guid[guid]? 69 | 70 | requested_guid[guid] = true 71 | 72 | t = 0 73 | 74 | request.push 75 | 76 | action: 'getPortalDetails' 77 | data: 78 | guid: guid 79 | beforeRequest: (callback) -> 80 | 81 | t = Date.now() 82 | callback() 83 | 84 | onSuccess: (response, callback) -> 85 | response = remap response.result 86 | 87 | if response.capturedTime? 88 | response.capturedTime = parseInt response.capturedTime 89 | 90 | return callback() if ig.indexOf(response.owner) > -1 91 | 92 | Database.db.collection('Portals').findAndModify 93 | _id: guid #query 94 | , 95 | _id: 1 #sort 96 | , 97 | $set: response #update 98 | , 99 | new: true # options 100 | , (err, data) -> 101 | 102 | if err 103 | logger.error '[Details] Failed to update portal detail (guid=%s) in database: %s', guid, err.message 104 | 105 | # resolve agent information 106 | Agent.resolveFromPortalDetail data, callback 107 | 108 | onError: (err, callback) -> 109 | 110 | logger.error "[Details] #{err.message}" 111 | callback() 112 | 113 | afterResponse: (callback) -> 114 | 115 | logger.info "[Details] " + 116 | Math.round(request.done / request.max * 100).toString() + 117 | "%\t[#{request.done}/#{request.max}]\t#{Date.now() - t}ms" 118 | 119 | callback() 120 | outerCallback() 121 | 122 | requestMissingPortals: (callback) -> 123 | 124 | # request missing portal details 125 | 126 | Database.db.collection('Portals').find( 127 | team: 128 | $ne: 'NEUTRAL' 129 | resonators: 130 | $exists: false 131 | , 132 | _id: true 133 | ).toArray (err, portals) -> 134 | 135 | if err 136 | logger.error '[Details] Failed to fetch missing portal list: %s', err.message 137 | return callback() 138 | 139 | if portals 140 | async.each portals, (po, callback) -> 141 | Entity.requestPortalDetail po._id, callback 142 | , callback 143 | else 144 | callback() 145 | 146 | createEntity = (collection, id, timestamp, data, callback) -> 147 | 148 | data.time = timestamp 149 | 150 | Database.db.collection(collection).update 151 | _id: id 152 | , 153 | $set: 154 | data 155 | , 156 | upsert: true 157 | , (err) -> 158 | 159 | if err 160 | logger.error '[Entity] Failed to insert entity (id=%s) into database: %s', id, err.message 161 | 162 | # ignore error 163 | callback() 164 | 165 | createPortalEntity = (id, timestamp, data, callback) -> 166 | 167 | data.pos = 168 | lat: data.latE6 / 1e6 169 | lng: data.lngE6 / 1e6 170 | 171 | createEntity 'Portals', id, timestamp, data, -> 172 | 173 | if data.team isnt 'NEUTRAL' and argv.detail isnt 'false' 174 | Entity.requestPortalDetail id, -> 175 | callback && callback 'portal' 176 | else 177 | callback && callback 'portal' 178 | 179 | createFieldEntity = (id, timestamp, data, callback) -> 180 | 181 | createEntity 'Fields', id, timestamp, data, -> 182 | callback && callback 'field' 183 | 184 | createLinkEntity = (id, timestamp, data, callback) -> 185 | 186 | createEntity 'Links', id, timestamp, data, -> 187 | callback && callback 'link' 188 | 189 | 190 | types = { 191 | 'p' : 'portal' 192 | 'r' : 'region' 193 | 'e' : 'edge' 194 | } 195 | 196 | teams = { 197 | 'E' : 'ENLIGHTENED' 198 | 'R' : 'RESISTANCE' 199 | 'N' : 'NEUTRAL' 200 | } 201 | 202 | remap = (data) -> 203 | #TODO: links and fields 204 | result = { 205 | type: types[data[0]] 206 | team: teams[data[1]] 207 | latE6: data[2] 208 | lngE6: data[3] 209 | level: data[4] 210 | health: data[5] 211 | resCount: data[6] 212 | image: data[7] 213 | title: data[8] 214 | ornaments: data[9] 215 | } 216 | result.mods = remapMods(data[10]) if data[10] 217 | result.resonators = remapResos(data[11]) if data[11] 218 | result.owner = data[12] if data[12] 219 | return result 220 | 221 | 222 | remapMods = (mods) -> 223 | result = [] 224 | for mod in mods 225 | result.push(if mod then { 226 | owner: mod[0] 227 | name: mod[1] 228 | rarity: mod[2] 229 | stats: mod[3] 230 | } else null); 231 | return result; 232 | 233 | remapResos = (resos) -> 234 | result = [] 235 | for reso in resos 236 | result.push(if reso then { 237 | owner: reso[0] 238 | level: reso[1] 239 | energy: reso[2] 240 | } else null); 241 | return result; 242 | 243 | -------------------------------------------------------------------------------- /src/lib/scutil.coffee: -------------------------------------------------------------------------------- 1 | inside = require 'point-in-polygon' 2 | ndarray = require 'ndarray' 3 | 4 | class Box2d 5 | 6 | constructor: -> 7 | 8 | @x_min = Number.POSITIVE_INFINITY 9 | @x_max = Number.NEGATIVE_INFINITY 10 | @y_min = Number.POSITIVE_INFINITY 11 | @y_max = Number.NEGATIVE_INFINITY 12 | 13 | updateRangeXY: (x, y) -> 14 | 15 | @x_min = x if x < @x_min 16 | @x_max = x if x > @x_max 17 | @y_min = y if y < @y_min 18 | @y_max = y if y > @y_max 19 | 20 | updateRange: (p) -> 21 | 22 | @updateRangeXY p[0], p[1] 23 | 24 | dX: -> 25 | 26 | @x_max - @x_min 27 | 28 | dY: -> 29 | 30 | @y_max - @y_min 31 | 32 | centerX: -> 33 | 34 | (@x_min + @x_max) / 2 35 | 36 | centerY: -> 37 | 38 | (@y_min + @y_max) / 2 39 | 40 | scutil = 41 | 42 | floodfillScanline: (map, x, y) -> 43 | 44 | width = map.shape[0] 45 | height = map.shape[1] 46 | 47 | # xMin, xMax, y, down[true] / up[false], extendLeft, extendRight 48 | ranges = [[x, x, y, null, true, true]] 49 | map.set x, y, 1 50 | 51 | while ranges.length > 0 52 | 53 | r = ranges.pop() 54 | down = r[3] is true 55 | up = r[3] is false 56 | 57 | # extendLeft 58 | minX = r[0] 59 | y = r[2] 60 | 61 | if r[4] 62 | while minX > 0 and map.get(minX - 1, y) isnt 1 63 | minX-- 64 | map.set minX, y, 1 65 | 66 | # extendRight 67 | maxX = r[1] 68 | 69 | if r[5] 70 | while maxX < width - 1 and map.get(maxX + 1, y) isnt 1 71 | maxX++ 72 | map.set maxX, y, 1 73 | 74 | # extend range ignored from previous line 75 | r[0]-- 76 | r[1]++ 77 | 78 | addNextLine = (newY, isNext, downwards) -> 79 | 80 | rMinX = minX 81 | inRange = false 82 | 83 | for x in [minX .. maxX] 84 | 85 | # skip testing, if testing previous line within previous range 86 | empty = (isNext or (x < r[0] or x > r[1])) and map.get(x, newY) isnt 1 87 | 88 | if not inRange and empty 89 | rMinX = x 90 | inRange = true 91 | else if inRange and not empty 92 | ranges.push [rMinX, x - 1, newY, downwards, rMinX is minX, false] 93 | inRange = false 94 | 95 | map.set x, newY, 1 if inRange 96 | 97 | # skip 98 | x = r[1] if not isNext and x is r[0] 99 | 100 | ranges.push [rMinX, x - 1, newY, downwards, rMinX is minX, true] if inRange 101 | 102 | addNextLine y + 1, not up, true if y < height 103 | addNextLine y - 1, not down, false if y > 0 104 | 105 | true 106 | 107 | supercoverLine: (map, p1, p2) -> 108 | 109 | x1 = p1[0] 110 | y1 = p1[1] 111 | x2 = p2[0] 112 | y2 = p2[1] 113 | x = x1 114 | y = y1 115 | dx = x2 - x1 116 | dy = y2 - y1 117 | map.set x1, y1, 1 118 | 119 | if dy < 0 120 | ystep = -1 121 | dy = -dy 122 | else 123 | ystep = 1 124 | 125 | if dx < 0 126 | xstep = -1 127 | dx = -dx 128 | else 129 | xstep = 1 130 | 131 | ddy = 2 * dy # work with double values for full precision 132 | ddx = 2 * dx 133 | 134 | if ddx >= ddy 135 | 136 | # compulsory initialization (even for errorprev, needed when dx==dy) 137 | errorprev = error = dx # start in the middle of the square 138 | 139 | for i in [0 ... dx] 140 | 141 | x += xstep 142 | error += ddy 143 | 144 | if error > ddx # increment y if AFTER the middle ( > ) 145 | 146 | y += ystep 147 | error -= ddx 148 | 149 | # three cases (octant == right->right-top for directions below): 150 | if error + errorprev < ddx # bottom square also 151 | map.set x, y-ystep, 1 152 | else if error + errorprev > ddx # left square also 153 | map.set x-xstep, y, 1 154 | else # corner: bottom and left squares also 155 | map.set x, y-ystep, 1 156 | map.set x-xstep, y, 1 157 | 158 | map.set x, y, 1 159 | errorprev = error 160 | 161 | else 162 | 163 | errorprev = error = dy 164 | 165 | for i in [0 ... dy] 166 | 167 | y += ystep 168 | error += ddx 169 | 170 | if error > ddy 171 | 172 | x += xstep 173 | error -= ddy 174 | 175 | if error + errorprev < ddy 176 | map.set x-xstep, y, 1 177 | else if error + errorprev > ddy 178 | map.set x, y-ystep, 1 179 | else 180 | map.set x-xstep, y, 1 181 | map.set x, y-ystep, 1 182 | 183 | map.set x, y, 1 184 | errorprev = error 185 | 186 | getBoundary: (points) -> 187 | 188 | box = new Box2d() 189 | box.updateRange p for p in points 190 | 191 | box 192 | 193 | getLatLngRegion: (points) -> 194 | 195 | box = scutil.getBoundary(points) 196 | 197 | NorthEast: 198 | Lat: box.x_max 199 | Lng: box.y_max 200 | SouthWest: 201 | Lat: box.x_min 202 | Lng: box.y_min 203 | 204 | discretize: (points) -> 205 | 206 | # calculate bound offsets 207 | boundary = scutil.getBoundary points 208 | offsetx = -boundary.x_min 209 | offsety = -boundary.y_min 210 | dx = boundary.dX() 211 | dy = boundary.dY() 212 | 213 | # generate offseted points 214 | polygon = [] 215 | polygon.push [p[0] + offsetx, p[1] + offsety] for p in points 216 | 217 | # generate map 218 | map = ndarray new Uint8Array((dx + 1) * (dy + 1)), [dx + 1, dy + 1] 219 | 220 | # cover border 221 | scutil.supercoverLine map, polygon[0], polygon[polygon.length - 1] 222 | scutil.supercoverLine map, polygon[i - 1], polygon[i] for i in [1 ... polygon.length] 223 | 224 | # find a point in polygon 225 | while true 226 | x = Math.floor(Math.random() * dx) 227 | y = Math.floor(Math.random() * dy) 228 | if inside([x, y], polygon) 229 | scutil.floodfillScanline map, x, y 230 | break 231 | 232 | # iterate all points 233 | ret = [] 234 | for i in [0 .. dx] 235 | for j in [0 .. dy] 236 | if map.get(i, j) is 1 237 | ret.push 238 | x: i - offsetx 239 | y: j - offsety 240 | 241 | ret 242 | 243 | module.exports = scutil -------------------------------------------------------------------------------- /src/lib/broadcast.coffee: -------------------------------------------------------------------------------- 1 | async = require 'async' 2 | events = require 'events' 3 | requestFactory = require './requestfactory.js' 4 | 5 | ObjectID = require('mongoskin').ObjectID 6 | 7 | STATUS_PENDING = 0 8 | STATUS_ERROR = 1 9 | STATUS_NOTCOMPLETE = 2 10 | STATUS_COMPLETE = 3 11 | 12 | FETCH_ITEM_COUNT = 50 13 | 14 | class BroadcastTasker 15 | 16 | ### 17 | options: 18 | type: all|faction 19 | splitTimespanMS: int 20 | region: SouthWest {Lat, Lng}, NorthEast {Lat, Lng} 21 | [instanceId]: string 22 | events: 23 | error (err) 24 | complete (err) 25 | receive (obj) 26 | response (done, max) 27 | createtask 28 | taskcreated (preparedLength, allLength) 29 | taskcompleted (taskid) 30 | updatequeue 31 | queueupdated 32 | beforestart 33 | ### 34 | constructor: (options) -> 35 | 36 | @request = requestFactory() 37 | @emitter = new events.EventEmitter() 38 | @options = options 39 | @options.instanceId = Date.now().toString(16) + Math.floor(Math.random() * 0xEFFF + 0x1000).toString(16) if not @options.instanceId? 40 | 41 | addListener: (event, listener) => 42 | 43 | @emitter.addListener event, listener 44 | 45 | on: (event, listener) => 46 | 47 | @emitter.on event, listener 48 | 49 | once: (event, listener) => 50 | 51 | @emitter.once event, listener 52 | 53 | removeListener: (event, listener) => 54 | 55 | @emitter.removeListener event, listener 56 | 57 | removeAllListeners: (event) => 58 | 59 | @emitter.removeAllListeners event 60 | 61 | start: (timestampMin, timestampMax, continueTask) => 62 | 63 | @tasks = {} 64 | @length = 0 65 | 66 | continueTask = true if not continueTask? 67 | 68 | if not continueTask 69 | 70 | @createTasks timestampMin, timestampMax, @_start 71 | 72 | else 73 | 74 | tsMin = timestampMin 75 | 76 | async.series [ 77 | 78 | (callback) => 79 | 80 | # query queued tasks 81 | Database.db.collection('chat_queue').find({instance: @options.instanceId}).toArray (err, tasks) => 82 | 83 | if tasks? 84 | for task in tasks 85 | @tasks[task._id.toString()] = task 86 | @length++ 87 | 88 | callback() 89 | 90 | (callback) => 91 | 92 | # get last timestamp 93 | Database.db.collection('chat_meta').findOne {_id: @options.instanceId}, (err, meta) -> 94 | 95 | tsMin = meta.timestamp - 10 if meta?.timestamp? 96 | tsMin = timestampMin if tsMin < timestampMin 97 | 98 | callback() 99 | 100 | (callback) => 101 | 102 | # create tasks 103 | @createTasks tsMin, timestampMax, callback 104 | 105 | ], @_start 106 | 107 | createTasks: (timestampMin, timestampMax, taskCreatedCallback) => 108 | 109 | @emitter.emit 'createtask' 110 | 111 | preparedTasks = [] 112 | 113 | for TSmin in [timestampMin..timestampMax] by @options.splitTimespanMS 114 | 115 | TSmax = Math.min(timestampMax, TSmin + @options.splitTimespanMS) 116 | continue if TSmax is TSmin 117 | 118 | if argv.safe 119 | REPEAT_TIMES = 3 120 | else 121 | REPEAT_TIMES = 1 122 | 123 | for i in [1 .. REPEAT_TIMES] 124 | preparedTasks.push 125 | data: 126 | minLatE6: Math.round(@options.region.SouthWest.Lat * 1e6) 127 | minLngE6: Math.round(@options.region.SouthWest.Lng * 1e6) 128 | maxLatE6: Math.round(@options.region.NorthEast.Lat * 1e6) 129 | maxLngE6: Math.round(@options.region.NorthEast.Lng * 1e6) 130 | minTimestampMs: TSmin - 10 131 | maxTimestampMs: TSmax + 10 132 | tab: @options.type 133 | instance: @options.instanceId 134 | status: STATUS_PENDING 135 | _id: new ObjectID() 136 | 137 | async.eachLimit preparedTasks, Config.Database.MaxParallel, (task, callback) => 138 | 139 | @tasks[task._id.toString()] = task 140 | @length++ 141 | 142 | Database.db.collection('chat_queue').insert task, callback 143 | 144 | , => 145 | 146 | Database.db.collection('chat_meta').update 147 | _id: @options.instanceId 148 | , 149 | $set: 150 | timestamp: timestampMax 151 | , 152 | upsert: true 153 | , (err) => 154 | 155 | if err 156 | @emitter.emit 'error', err 157 | @emitter.emit 'complete', err 158 | return 159 | 160 | @emitter.emit 'taskcreated', preparedTasks.length, @length 161 | taskCreatedCallback() 162 | 163 | _start: => 164 | 165 | taskList = [] 166 | taskList.push taskId for taskId of @tasks 167 | 168 | if taskList.length is 0 169 | @emitter.emit 'complete' 170 | return 171 | 172 | @emitter.emit 'updatequeue' 173 | async.eachLimit taskList, Config.Database.MaxParallel, (taskId, callback) => 174 | 175 | @tasks[taskId].status = STATUS_PENDING 176 | 177 | Database.db.collection('chat_queue').update 178 | _id: new ObjectID(taskId) 179 | , 180 | $set: 181 | status: STATUS_PENDING 182 | , callback 183 | 184 | , (err) => 185 | 186 | @emitter.emit 'queueupdated' 187 | 188 | @request.queue.drain = => 189 | @emitter.emit 'complete' 190 | 191 | @emitter.emit 'beforestart' 192 | 193 | @requestTask taskId for taskId in taskList 194 | 195 | requestTask: (taskId) => 196 | 197 | d = JSON.parse(JSON.stringify(@tasks[taskId].data)) 198 | 199 | @request.push 200 | 201 | action: 'getPlexts' 202 | data: d 203 | onSuccess: (response, callback) => 204 | 205 | @emitter.emit 'receive', response.result 206 | @parseChatResponse taskId, response.result, callback 207 | 208 | onError: (err, callback) => 209 | 210 | @emitter.emit 'error', err 211 | @requestTask taskId 212 | callback() 213 | 214 | afterResponse: (callback) => 215 | 216 | @emitter.emit 'response', d, @request.done, @request.max 217 | callback() 218 | 219 | parseChatResponse: (taskId, response, parseCompleteCallback) => 220 | 221 | if response.length < FETCH_ITEM_COUNT 222 | 223 | # no more messages: remove task 224 | delete @tasks[taskId] 225 | @length-- 226 | 227 | Database.db.collection('chat_queue').remove 228 | _id: new ObjectID(taskId) 229 | , 230 | single: true 231 | , => 232 | @emitter.emit 'taskcompleted', taskId 233 | parseCompleteCallback() 234 | 235 | else 236 | 237 | # records are in descend order. 238 | # set the maxtimestamp equal to mintimestamp of the response 239 | maxTimestamp = parseInt(response[response.length - 1][1]) - 1 240 | 241 | @tasks[taskId].data.maxTimestampMs = maxTimestamp 242 | @tasks[taskId].status = STATUS_NOTCOMPLETE 243 | 244 | Database.db.collection('chat_queue').update 245 | _id: new ObjectID(taskId) 246 | , 247 | $set: 248 | status: STATUS_NOTCOMPLETE 249 | 'data.maxTimestampMs': maxTimestamp 250 | , (err) => 251 | 252 | # insert into queue again 253 | @requestTask taskId 254 | parseCompleteCallback() 255 | 256 | module.exports = (options) -> 257 | 258 | return new BroadcastTasker options 259 | -------------------------------------------------------------------------------- /src/lib/utils.js: -------------------------------------------------------------------------------- 1 | var Utils = GLOBAL.Utils = { 2 | 3 | extractIntelData: function(jsSource) { 4 | // To stay the same with IITC, we don't extract essential data directly, 5 | // instead, we build a virual environment, then call IITC functions. 6 | // 7 | // Because of there are no `window` object in NodeJS, we need to expose 8 | // those global variables in order to let IITC functions work without 9 | // any modification. 10 | var window = {}; 11 | var source = jsSource; 12 | 13 | // extract global variables 14 | var globalVars = []; 15 | 16 | var esprima = require('esprima'); 17 | var escope = require('escope'); 18 | 19 | var tree = esprima.parse(source); 20 | globalScope = escope.analyze(tree).scopes[0]; 21 | globalScope.variables.forEach(function (v) { 22 | globalVars.push(v.identifiers[0].name); 23 | }); 24 | 25 | // expose global variables 26 | globalVars.forEach(function(name) { 27 | source = source + ';window.' + name + ' = ' + name + ';'; 28 | }); 29 | 30 | // stimulate Google Map object 31 | source = 'var google={maps:{OverlayView:function(){},Marker:function(){},InfoWindow:function(){}}};' + source; 32 | source = 'var IS_TABLET=false;' + source; 33 | 34 | // execute JavaScript 35 | eval(source); 36 | Utils.extractFromStock(window); 37 | 38 | if (window.niantic_params.CURRENT_VERSION == undefined) { 39 | throw new Error('Failed to extract version'); 40 | } 41 | 42 | return window.niantic_params; 43 | }, 44 | 45 | // from IITC code 46 | extractFromStock: function(window) { 47 | var niantic_params = window.niantic_params = {}; 48 | 49 | // extract the former nemesis.dashboard.config.CURRENT_VERSION from the code 50 | var reVersion = new RegExp('"X-CSRFToken".*[a-z].v="([a-f0-9]{40})";'); 51 | 52 | var minified = new RegExp('^[a-zA-Z$][a-zA-Z$0-9]?$'); 53 | 54 | for (var topLevel in window) { 55 | if (minified.test(topLevel)) { 56 | // a minified object - check for minified prototype entries 57 | 58 | var topObject = window[topLevel]; 59 | if (topObject && topObject.prototype) { 60 | // the object has a prototype - iterate through the properties of that 61 | for (var secLevel in topObject.prototype) { 62 | if (minified.test(secLevel)) { 63 | // looks like we've found an object of the format "XX.prototype.YY"... 64 | var item = topObject.prototype[secLevel]; 65 | 66 | if (item && typeof(item) == "function") { 67 | // a function - test it against the relevant regular expressions 68 | var funcStr = item.toString(); 69 | var match = reVersion.exec(funcStr); 70 | if (match) { 71 | console.log('Found former CURRENT_VERSION in '+topLevel+'.prototype.'+secLevel); 72 | niantic_params.CURRENT_VERSION = match[1]; 73 | } 74 | } 75 | } 76 | } 77 | } //end 'if .prototype' 78 | 79 | if (topObject && Array.isArray && Array.isArray(topObject)) { 80 | // find all non-zero length arrays containing just numbers 81 | if (topObject.length>0) { 82 | var justInts = true; 83 | for (var i=0; i= 12 && topObject.length <= 18) { 91 | if (topObject[0] == 8) { 92 | var decreasing = true; 93 | for (var i=1; i= 9000 && topObject[topObject.length-1] <= 64000) { 105 | var increasing = true; 106 | for (var i=1; i topObject[i]) { 108 | increasing = false; 109 | break; 110 | } 111 | } 112 | if (increasing) { 113 | console.log ('int array '+topLevel+' looks like TILES_PER_EDGE: '+JSON.stringify(topObject)); 114 | niantic_params.TILES_PER_EDGE = topObject; 115 | } 116 | } //end if (topObject[topObject.length-1] == 9000) { 117 | 118 | } 119 | } 120 | } 121 | } 122 | } 123 | } 124 | }, 125 | 126 | getMapZoomTileParameters: function(zoom) { 127 | // these arrays/constants are based on those in the stock intel site. it's essential we keep them in sync with their code 128 | // (it may be worth reading the values from their code rather than using our own copies? it's a case of either 129 | // breaking if they rename their variables if we do, or breaking if they change the values if we don't) 130 | var ZOOM_TO_TILES_PER_EDGE = [1, 1, 1, 40, 40, 80, 80, 320, 1000, 2000, 2000, 4000, 8000, 16000, 16000, 32000]; 131 | var MAX_TILES_PER_EDGE = 32000; 132 | var ZOOM_TO_LEVEL = [ 8, 8, 8, 8, 7, 7, 7, 6, 6, 5, 4, 4, 3, 2, 2, 1, 1 ]; 133 | 134 | return { 135 | level: ZOOM_TO_LEVEL[zoom] || 0, // default to level 0 (all portals) if not in array 136 | tilesPerEdge: ZOOM_TO_TILES_PER_EDGE[zoom] || MAX_TILES_PER_EDGE, 137 | zoom: zoom // include the zoom level, for reference 138 | }; 139 | }, 140 | 141 | lngToTile: function(lng, params) { 142 | return Math.floor((lng + 180) / 360 * params.tilesPerEdge); 143 | }, 144 | 145 | latToTile: function(lat, params) { 146 | return Math.floor((1 - Math.log(Math.tan(lat * Math.PI / 180) + 147 | 1 / Math.cos(lat * Math.PI / 180)) / Math.PI) / 2 * params.tilesPerEdge); 148 | }, 149 | 150 | tileToLng: function(x, params) { 151 | return x / params.tilesPerEdge * 360 - 180; 152 | }, 153 | 154 | tileToLat: function(y, params) { 155 | var n = Math.PI - 2 * Math.PI * y / params.tilesPerEdge; 156 | return 180 / Math.PI * Math.atan(0.5 * (Math.exp(n) - Math.exp(-n))); 157 | }, 158 | 159 | pointToTileId: function(params, x, y) { 160 | //change to quadkey construction 161 | //as of 2014-05-06: zoom_x_y_minlvl_maxlvl_maxhealth 162 | 163 | return params.zoom + "_" + x + "_" + y + "_" + params.level + "_8_100"; 164 | }, 165 | 166 | clampLat: function(lat) { 167 | // the map projection used does not handle above approx +- 85 degrees north/south of the equator 168 | if (lat > 85.051128) 169 | lat = 85.051128; 170 | else if (lat < -85.051128) 171 | lat = -85.051128; 172 | return lat; 173 | }, 174 | 175 | clampLng: function(lng) { 176 | if (lng > 179.999999) 177 | lng = 179.999999; 178 | else if (lng < -180.0) 179 | lng = -180.0; 180 | return lng; 181 | }, 182 | 183 | clampLatLng: function(latlng) { 184 | return new L.LatLng ( Utils.clampLat(latlng.lat), Utils.clampLng(latlng.lng) ); 185 | }, 186 | 187 | clampLatLngBounds: function(bounds) { 188 | return new L.LatLngBounds ( Utils.clampLatLng(bounds.getSouthWest()), Utils.clampLatLng(bounds.getNorthEast()) ); 189 | }, 190 | 191 | //$.extend 192 | extend: function() { 193 | 194 | for (var i = 1; i < arguments.length; i++) 195 | for (var key in arguments[i]) 196 | if (arguments[i].hasOwnProperty(key)) 197 | arguments[0][key] = arguments[i][key]; 198 | 199 | return arguments[0]; 200 | 201 | }, 202 | 203 | } 204 | -------------------------------------------------------------------------------- /src/lib/requestfactory.coffee: -------------------------------------------------------------------------------- 1 | delayedQueue = require './delayedqueue.js' 2 | zlib = require 'zlib' 3 | async = require 'async' 4 | request = require 'request' 5 | 6 | delayedRequestQueue = new delayedQueue (task) -> 7 | 8 | task() 9 | 10 | , Config.Request.MinIntervalMS 11 | 12 | delayedRequest = 13 | 14 | post: (options, callback) -> 15 | 16 | delayedRequestQueue.push -> 17 | request.post options, callback 18 | 19 | get: (options, callback) -> 20 | 21 | delayedRequestQueue.push -> 22 | request.get options, callback 23 | 24 | entry = -> new RequestFactory() 25 | entry.sessions = [] 26 | 27 | if argv.cookie? 28 | cookieRaw = argv.cookie 29 | else 30 | cookieRaw = Config.Auth.CookieRaw 31 | 32 | # turn into an array 33 | cookieRaw = [cookieRaw] if typeof cookieRaw is 'string' 34 | 35 | for cookies, index in cookieRaw 36 | 37 | map = {} 38 | jar = request.jar() 39 | 40 | for cookie in cookies.split(';') 41 | 42 | cookie = cookie.trim() 43 | continue if cookie.length is 0 44 | 45 | jar.setCookie request.cookie(cookie), 'https://www.ingress.com' if cookie.length isnt 0 46 | 47 | pair = cookie.split '=' 48 | map[pair[0]] = unescape pair[1] 49 | 50 | entry.sessions.push 51 | index: index 52 | cookies: map 53 | jar: jar 54 | 55 | class RequestFactory 56 | 57 | constructor: -> 58 | 59 | @max = 0 60 | @done = 0 61 | @ignoreMungeError = false 62 | 63 | @queue = async.queue (task, callback) => 64 | 65 | task.before => 66 | 67 | @post '/r/' + task.m, task.d, (error, response, body) => 68 | 69 | if error 70 | console.log error.stack 71 | 72 | if task.emitted? 73 | console.warn '[DEBUG] Ignored reemitted event' 74 | return 75 | 76 | task.emitted = true 77 | 78 | @done++ 79 | 80 | if not error 81 | if body.error? 82 | if body.error is 'missing version' and not @ignoreMungeError 83 | logger.error '[Request] Failed to request using current munge data.' 84 | process.exit 0 85 | return 86 | 87 | error = new Error(body.error) 88 | else 89 | error = @processResponse error, response, body 90 | 91 | if error 92 | 93 | task.error error, -> 94 | task.response -> 95 | callback() 96 | 97 | return 98 | 99 | task.success body, -> 100 | task.response -> 101 | callback() 102 | 103 | , Config.Request.MaxParallel 104 | 105 | generate: (options) => 106 | 107 | versionStr = Munges.Data[Munges.ActiveSet].CURRENT_VERSION 108 | post_data = Utils.extend({v: versionStr, b: "", c: ""}, options.data) 109 | 110 | return { 111 | #m: methodName 112 | m: options.action 113 | d: post_data 114 | before: options.beforeRequest || (callback) -> callback() 115 | success: options.onSuccess || (body, callback) -> callback() 116 | error: options.onError || (error, callback) -> callback() 117 | response: options.afterResponse || (callback) -> callback() 118 | } 119 | 120 | push: (options) => 121 | 122 | @max++ 123 | task = @generate options 124 | @queue.push task 125 | 126 | unshift: (options) => 127 | 128 | @max++ 129 | task = @generate options 130 | @queue.unshift task 131 | 132 | post: (url, data, callback, session) => 133 | 134 | session = entry.sessions[Math.floor(Math.random() * entry.sessions.length)] if not session? 135 | 136 | delayedRequest.post 137 | 138 | url: 'https://www.ingress.com' + url 139 | body: JSON.stringify data 140 | jar: session.jar 141 | proxy: null || argv.proxy 142 | maxSockets: 50 143 | encoding: null 144 | timeout: 90000 145 | headers: 146 | 'Accept': 'application/json, text/javascript, */*; q=0.01' 147 | 'Accept-Encoding': 'gzip,deflate' 148 | 'Content-type': 'application/json; charset=utf-8' 149 | 'Origin': 'https://www.ingress.com' 150 | 'Referer': 'https://www.ingress.com/intel' 151 | 'User-Agent': Config.Request.UserAgent 152 | 'X-CSRFToken': session.cookies.csrftoken 153 | 154 | , @_gzipDecode @_jsonDecode callback 155 | 156 | get: (url, callback, session) => 157 | 158 | session = entry.sessions[Math.floor(Math.random() * entry.sessions.length)] if not session? 159 | 160 | delayedRequest.get 161 | 162 | url: 'https://www.ingress.com' + url 163 | jar: session.jar 164 | proxy: null || argv.proxy 165 | maxSockets: 50 166 | encoding: null 167 | timeout: 90000 168 | headers: 169 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' 170 | 'Accept-Encoding': 'gzip,deflate' 171 | 'Cache-Control': 'max-age=0' 172 | 'Origin': 'https://www.ingress.com' 173 | 'Referer': 'https://www.ingress.com/intel' 174 | 'User-Agent': Config.Request.UserAgent 175 | 176 | , @_gzipDecode callback 177 | 178 | processResponse: (error, response, body) -> 179 | 180 | if typeof body is 'string' 181 | 182 | if body.indexOf('CSRF verification failed. Request aborted.') > -1 183 | logger.error '[Auth] CSRF verification failed. Please make sure that the cookie is right.' 184 | process.exit 0 185 | return false 186 | 187 | if body.indexOf('Sign in') > -1 or body.indexOf('User not authenticated') > -1 188 | logger.error '[Auth] Authentication failed. Please update the cookie.' 189 | process.exit 0 190 | return false 191 | 192 | if body.indexOf('NIA takedown in progress. Stand by.') > -1 193 | return new Error 'Request rejected by Ingress Server.' 194 | 195 | if body.indexOf('but your computer or network may be sending automated queries') > -1 196 | return new Error 'Request rejected. Please try changing IP.' 197 | 198 | if body.trim().length is 0 199 | return new Error 'Empty server response. Please try adding more accounts.' 200 | 201 | return new Error 'unknown server response' 202 | 203 | return null 204 | 205 | _gzipDecode: (callback) -> 206 | 207 | return (error, response, buffer) -> 208 | 209 | if error? 210 | callback error, response 211 | return 212 | 213 | if response.headers['content-encoding']? 214 | 215 | encoding = response.headers['content-encoding'] 216 | 217 | if encoding is 'gzip' 218 | 219 | zlib.gunzip buffer, (err, body) -> 220 | callback err, response, body && body.toString() 221 | return 222 | 223 | else if encoding is 'deflate' 224 | 225 | zlib.inflate buffer, (err, body) -> 226 | callback err, response, body && body.toString() 227 | return 228 | 229 | callback error, response, buffer && buffer.toString() 230 | 231 | _jsonDecode: (callback) -> 232 | 233 | return (error, response, body) -> 234 | 235 | if error? 236 | callback error, response 237 | return 238 | 239 | if response.headers['content-type']? 240 | 241 | if response.headers['content-type'].indexOf('json') > -1 242 | 243 | try 244 | decoded = JSON.parse body 245 | catch err 246 | callback err, response, body 247 | return 248 | 249 | callback err, response, decoded 250 | return 251 | 252 | callback error, response, body 253 | 254 | 255 | 256 | module.exports = entry 257 | -------------------------------------------------------------------------------- /src/plugins/request-portals.coffee: -------------------------------------------------------------------------------- 1 | async = require 'async' 2 | scutil = require '../lib/scutil.js' 3 | requestFactory = require '../lib/requestfactory.js' 4 | request = requestFactory() 5 | 6 | timeoutTiles = [] 7 | failTiles = [] 8 | panicTiles = [] 9 | 10 | STATUS_PENDING = 0 11 | STATUS_REQUESTING = 1 12 | STATUS_TIMEOUT = 2 13 | STATUS_FAIL = 3 14 | STATUS_PANIC = 4 15 | STATUS_COMPLETE = 5 16 | 17 | module.exports = 18 | 19 | onBootstrap: (callback) -> 20 | 21 | if argv.portals 22 | bootstrap -> 23 | callback 'end' 24 | else 25 | callback() 26 | 27 | bootstrap = (callback) -> 28 | 29 | if argv.fast 30 | async.series [ 31 | Tile.prepareFast 32 | Tile.start 33 | ], callback 34 | else if argv.new or argv.n 35 | async.series [ 36 | Tile.prepareNew 37 | Tile.start 38 | ], callback 39 | else 40 | async.series [ 41 | if argv.detail isnt 'false' then Entity.requestMissingPortals else (c) -> c() 42 | Tile.prepareFromDatabase 43 | Tile.start 44 | ], callback 45 | 46 | tileBucket = async.cargo (tiles, callback) -> 47 | 48 | Tile.data[id].status = STATUS_PENDING for id in tiles 49 | 50 | data = 51 | tileKeys: tiles 52 | 53 | # reset status in database 54 | async.eachLimit tiles, Config.Database.MaxParallel, (id, callback) -> 55 | 56 | Database.db.collection('Tiles').update 57 | _id: id 58 | , 59 | $set: 60 | status: STATUS_PENDING 61 | , 62 | upsert: true 63 | , callback 64 | 65 | , (err) -> 66 | # onFinish 67 | 68 | t1 = 0 69 | t2 = 0 70 | 71 | request.push 72 | 73 | action: 'getEntities' 74 | data: data 75 | beforeRequest: (callback) -> 76 | 77 | t1 = Date.now() 78 | callback() 79 | 80 | onSuccess: (response, callback) -> 81 | 82 | t2 = Date.now() 83 | processSuccessTileResponse response, tiles, callback 84 | 85 | onError: (err, callback) -> 86 | 87 | logger.error "[Portals] #{err.message}" 88 | 89 | t2 = Date.now() 90 | processErrorTileResponse tiles, callback 91 | 92 | afterResponse: (callback) -> 93 | 94 | checkTimeoutAndFailTiles -> 95 | 96 | logger.info "[Portals] " + 97 | Math.round(request.done / request.max * 100).toString() + 98 | "%\t[#{request.done}/#{request.max}]\t#{t2 - t1}ms" + 99 | "\t#{Entity.counter.portals} portals, #{Entity.counter.links} links, #{Entity.counter.fields} fields" 100 | 101 | callback() 102 | 103 | callback() 104 | 105 | , Config.TileBucket.Max 106 | 107 | Tile = 108 | 109 | list: [] 110 | data: {} 111 | 112 | # calculate region tiles 113 | calculateTileKeys: -> 114 | 115 | tileParams = Utils.getMapZoomTileParameters Config.ZoomLevel 116 | 117 | polygon = [] 118 | for latlng in Config.Region 119 | polygon.push [ 120 | Utils.latToTile(Utils.clampLat(latlng[0]), tileParams) 121 | Utils.lngToTile(Utils.clampLng(latlng[1]), tileParams) 122 | ] 123 | 124 | tiles = scutil.discretize polygon 125 | ret = [] 126 | 127 | for tile in tiles 128 | tileId = Utils.pointToTileId tileParams, tile.y, tile.x 129 | ret.push tileId 130 | 131 | ret 132 | 133 | prepareFromDatabase: (callback) -> 134 | 135 | # get all tiles 136 | tiles = Tile.calculateTileKeys() 137 | completedTiles = {} 138 | 139 | logger.info "[Portals] Querying #{tiles.length} tile status..." 140 | 141 | tileIdsBucket = [] 142 | bucketSize = 100 143 | for i in [0 ... tiles.length] by bucketSize 144 | tileIdsBucket.push tiles[i ... i + bucketSize] 145 | 146 | async.eachLimit tileIdsBucket, Config.Database.MaxParallel, (tileIds, callback) -> 147 | # find this tile in the database 148 | Database.db.collection('Tiles').find 149 | _id: 150 | $in: tileIds 151 | status: STATUS_COMPLETE 152 | .toArray (err, _tiles) -> 153 | 154 | # tile exists: it is downloaded, ignore. 155 | return callback err if err 156 | completedTiles[_t._id] = true for _t in _tiles 157 | callback() 158 | 159 | , (err) -> 160 | 161 | # which tile is not downloaded 162 | for id in tiles 163 | Tile.list.push id if not completedTiles[id] 164 | 165 | Tile.prepareTiles callback 166 | 167 | prepareFast: (callback) -> 168 | 169 | # get all tiles that has portals 170 | 171 | tiles = Tile.calculateTileKeys() 172 | completedTiles = {} 173 | 174 | logger.info "[Portals] Querying #{tiles.length} tile status..." 175 | 176 | tileIdsBucket = [] 177 | bucketSize = 100 178 | for i in [0 ... tiles.length] by bucketSize 179 | tileIdsBucket.push tiles[i ... i + bucketSize] 180 | 181 | async.eachLimit tileIdsBucket, Config.Database.MaxParallel, (tileIds, callback) -> 182 | 183 | Database.db.collection('Tiles').find 184 | _id: 185 | $in: tileIds 186 | portals: 0 187 | .toArray (err, _tiles) -> 188 | 189 | return callback err if err 190 | completedTiles[_t._id] = true for _t in _tiles 191 | callback() 192 | 193 | , (err) -> 194 | 195 | for id in tiles 196 | Tile.list.push id if not completedTiles[id] 197 | 198 | Tile.prepareTiles callback 199 | 200 | prepareNew: (callback) -> 201 | 202 | tiles = Tile.calculateTileKeys() 203 | Tile.list.push id for id in tiles 204 | 205 | Tile.prepareTiles callback 206 | 207 | prepareTiles: (callback) -> 208 | 209 | logger.info "[Portals] Prepared #{Tile.list.length} tiles" 210 | 211 | Database.db.collection('Tiles').ensureIndex {status: 1}, -> 212 | 213 | for id in Tile.list 214 | 215 | Tile.data[id] = 216 | status: STATUS_PENDING 217 | fails: 0 218 | errors: 0 219 | portals: 0 220 | 221 | callback && callback() 222 | 223 | start: (callback) -> 224 | 225 | if Tile.list.length is 0 226 | logger.info "[Portals] Nothing to request" 227 | return callback() 228 | 229 | logger.info "[Portals] Begin requesting..." 230 | 231 | # push each tile into buckets and request them 232 | tileBucket.push id for id in Tile.list 233 | 234 | request.queue.drain = callback 235 | 236 | processSuccessTileResponse = (response, tiles, callback) -> 237 | 238 | # invalid response 239 | if not response.result?.map? 240 | return processErrorTileResponse tiles, callback 241 | 242 | list = [] 243 | list.push {id: id, tile: tileValue} for id, tileValue of response.result.map 244 | 245 | async.eachLimit list, Config.Database.MaxParallel, (t, callback) -> 246 | 247 | ((update) -> 248 | 249 | if t.tile.error? and Tile.data[t.id].status is STATUS_PENDING 250 | 251 | # FAIL / TIMEOUT 252 | if t.tile.error is 'TIMEOUT' 253 | 254 | Tile.data[t.id].status = STATUS_TIMEOUT 255 | timeoutTiles.push t.id 256 | 257 | else 258 | 259 | Tile.data[t.id].status = STATUS_FAIL 260 | Tile.data[t.id].fails++ 261 | 262 | if Tile.data[t.id].fails > Config.Tiles.MaxFailRetry 263 | 264 | logger.error "PANIC: tile id=#{id}" 265 | Tile.data[t.id].status = STATUS_PANIC # no more try 266 | panicTiles.push t.id 267 | 268 | else 269 | 270 | failTiles.push t.id 271 | 272 | return update() 273 | 274 | else 275 | 276 | Tile.data[t.id].status = STATUS_COMPLETE 277 | Tile.data[t.id].portals = 0 278 | 279 | if t.tile.gameEntities? 280 | 281 | async.each t.tile.gameEntities, (entity, callback) -> 282 | 283 | Entity.add entity[0], entity[1], entity[2], (type) -> 284 | Tile.data[t.id].portals++ if type is 'portal' 285 | callback() 286 | 287 | , (err) -> 288 | 289 | return update() 290 | 291 | else 292 | 293 | return update() 294 | 295 | ) () -> 296 | 297 | updater = 298 | $set: 299 | status: Tile.data[t.id].status 300 | portals: Tile.data[t.id].portals 301 | 302 | Database.db.collection('Tiles').update {_id: t.id}, updater, callback 303 | 304 | , callback 305 | 306 | processErrorTileResponse = (tiles, callback) -> 307 | 308 | for id in tiles 309 | 310 | if Tile.data[id].status is STATUS_PENDING 311 | 312 | Tile.data[id].status = STATUS_FAIL 313 | Tile.data[id].errors++ 314 | 315 | if Tile.data[id].errors > Config.Tile.MaxErrorRetry 316 | 317 | logger.error "PANIC: tile id=#{id}" 318 | Tile.data[id].status = STATUS_PANIC 319 | panicTiles.push id 320 | 321 | else 322 | 323 | failTiles.push id 324 | 325 | async.eachLimit tiles, Config.Database.MaxParallel, (id, callback) -> 326 | 327 | Database.db.collection('Tiles').update 328 | _id: id 329 | , 330 | $set: 331 | status: Tile.data[id].status 332 | , callback 333 | 334 | , callback 335 | 336 | checkTimeoutAndFailTiles = (callback) -> 337 | 338 | tileBucket.push id for id in timeoutTiles 339 | tileBucket.push id for id in failTiles 340 | 341 | timeoutTiles = [] 342 | failTiles = [] 343 | 344 | callback() 345 | -------------------------------------------------------------------------------- /src/lib/leaflet.js: -------------------------------------------------------------------------------- 1 | var L = GLOBAL.L = {}; 2 | 3 | /* 4 | * L.Util contains various utility functions used throughout Leaflet code. 5 | */ 6 | 7 | L.Util = { 8 | extend: function (dest) { // (Object[, Object, ...]) -> 9 | var sources = Array.prototype.slice.call(arguments, 1), 10 | i, j, len, src; 11 | 12 | for (j = 0, len = sources.length; j < len; j++) { 13 | src = sources[j] || {}; 14 | for (i in src) { 15 | if (src.hasOwnProperty(i)) { 16 | dest[i] = src[i]; 17 | } 18 | } 19 | } 20 | return dest; 21 | }, 22 | 23 | bind: function (fn, obj) { // (Function, Object) -> Function 24 | var args = arguments.length > 2 ? Array.prototype.slice.call(arguments, 2) : null; 25 | return function () { 26 | return fn.apply(obj, args || arguments); 27 | }; 28 | }, 29 | 30 | stamp: (function () { 31 | var lastId = 0, 32 | key = '_leaflet_id'; 33 | return function (obj) { 34 | obj[key] = obj[key] || ++lastId; 35 | return obj[key]; 36 | }; 37 | }()), 38 | 39 | invokeEach: function (obj, method, context) { 40 | var i, args; 41 | 42 | if (typeof obj === 'object') { 43 | args = Array.prototype.slice.call(arguments, 3); 44 | 45 | for (i in obj) { 46 | method.apply(context, [i, obj[i]].concat(args)); 47 | } 48 | return true; 49 | } 50 | 51 | return false; 52 | }, 53 | 54 | limitExecByInterval: function (fn, time, context) { 55 | var lock, execOnUnlock; 56 | 57 | return function wrapperFn() { 58 | var args = arguments; 59 | 60 | if (lock) { 61 | execOnUnlock = true; 62 | return; 63 | } 64 | 65 | lock = true; 66 | 67 | setTimeout(function () { 68 | lock = false; 69 | 70 | if (execOnUnlock) { 71 | wrapperFn.apply(context, args); 72 | execOnUnlock = false; 73 | } 74 | }, time); 75 | 76 | fn.apply(context, args); 77 | }; 78 | }, 79 | 80 | falseFn: function () { 81 | return false; 82 | }, 83 | 84 | formatNum: function (num, digits) { 85 | var pow = Math.pow(10, digits || 5); 86 | return Math.round(num * pow) / pow; 87 | }, 88 | 89 | trim: function (str) { 90 | return str.trim ? str.trim() : str.replace(/^\s+|\s+$/g, ''); 91 | }, 92 | 93 | splitWords: function (str) { 94 | return L.Util.trim(str).split(/\s+/); 95 | }, 96 | 97 | setOptions: function (obj, options) { 98 | obj.options = L.extend({}, obj.options, options); 99 | return obj.options; 100 | }, 101 | 102 | getParamString: function (obj, existingUrl, uppercase) { 103 | var params = []; 104 | for (var i in obj) { 105 | params.push(encodeURIComponent(uppercase ? i.toUpperCase() : i) + '=' + encodeURIComponent(obj[i])); 106 | } 107 | return ((!existingUrl || existingUrl.indexOf('?') === -1) ? '?' : '&') + params.join('&'); 108 | }, 109 | 110 | compileTemplate: function (str, data) { 111 | // based on https://gist.github.com/padolsey/6008842 112 | str = str.replace(/\{ *([\w_]+) *\}/g, function (str, key) { 113 | return '" + o["' + key + '"]' + (typeof data[key] === 'function' ? '(o)' : '') + ' + "'; 114 | }); 115 | // jshint evil: true 116 | return new Function('o', 'return "' + str + '";'); 117 | }, 118 | 119 | template: function (str, data) { 120 | var cache = L.Util._templateCache = L.Util._templateCache || {}; 121 | cache[str] = cache[str] || L.Util.compileTemplate(str, data); 122 | return cache[str](data); 123 | }, 124 | 125 | isArray: Array.isArray || function (obj) { 126 | return (Object.prototype.toString.call(obj) === '[object Array]'); 127 | }, 128 | 129 | emptyImageUrl: 'data:image/gif;base64,R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs=' 130 | }; 131 | 132 | // shortcuts for most used utility functions 133 | L.extend = L.Util.extend; 134 | L.bind = L.Util.bind; 135 | L.stamp = L.Util.stamp; 136 | L.setOptions = L.Util.setOptions; 137 | 138 | /* 139 | * L.LatLng represents a geographical point with latitude and longitude coordinates. 140 | */ 141 | 142 | L.LatLng = function (rawLat, rawLng) { // (Number, Number) 143 | var lat = parseFloat(rawLat), 144 | lng = parseFloat(rawLng); 145 | 146 | if (isNaN(lat) || isNaN(lng)) { 147 | throw new Error('Invalid LatLng object: (' + rawLat + ', ' + rawLng + ')'); 148 | } 149 | 150 | this.lat = lat; 151 | this.lng = lng; 152 | }; 153 | 154 | L.extend(L.LatLng, { 155 | DEG_TO_RAD: Math.PI / 180, 156 | RAD_TO_DEG: 180 / Math.PI, 157 | MAX_MARGIN: 1.0E-9 // max margin of error for the "equals" check 158 | }); 159 | 160 | L.LatLng.prototype = { 161 | equals: function (obj) { // (LatLng) -> Boolean 162 | if (!obj) { return false; } 163 | 164 | obj = L.latLng(obj); 165 | 166 | var margin = Math.max( 167 | Math.abs(this.lat - obj.lat), 168 | Math.abs(this.lng - obj.lng)); 169 | 170 | return margin <= L.LatLng.MAX_MARGIN; 171 | }, 172 | 173 | toString: function (precision) { // (Number) -> String 174 | return 'LatLng(' + 175 | L.Util.formatNum(this.lat, precision) + ', ' + 176 | L.Util.formatNum(this.lng, precision) + ')'; 177 | }, 178 | 179 | // Haversine distance formula, see http://en.wikipedia.org/wiki/Haversine_formula 180 | // TODO move to projection code, LatLng shouldn't know about Earth 181 | distanceTo: function (other) { // (LatLng) -> Number 182 | other = L.latLng(other); 183 | 184 | var R = 6378137, // earth radius in meters 185 | d2r = L.LatLng.DEG_TO_RAD, 186 | dLat = (other.lat - this.lat) * d2r, 187 | dLon = (other.lng - this.lng) * d2r, 188 | lat1 = this.lat * d2r, 189 | lat2 = other.lat * d2r, 190 | sin1 = Math.sin(dLat / 2), 191 | sin2 = Math.sin(dLon / 2); 192 | 193 | var a = sin1 * sin1 + sin2 * sin2 * Math.cos(lat1) * Math.cos(lat2); 194 | 195 | return R * 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a)); 196 | }, 197 | 198 | wrap: function (a, b) { // (Number, Number) -> LatLng 199 | var lng = this.lng; 200 | 201 | a = a || -180; 202 | b = b || 180; 203 | 204 | lng = (lng + b) % (b - a) + (lng < a || lng === b ? b : a); 205 | 206 | return new L.LatLng(this.lat, lng); 207 | } 208 | }; 209 | 210 | L.latLng = function (a, b) { // (LatLng) or ([Number, Number]) or (Number, Number) 211 | if (a instanceof L.LatLng) { 212 | return a; 213 | } 214 | if (L.Util.isArray(a)) { 215 | if (typeof a[0] === 'number' || typeof a[0] === 'string') { 216 | return new L.LatLng(a[0], a[1]); 217 | } else { 218 | return null; 219 | } 220 | } 221 | if (a === undefined || a === null) { 222 | return a; 223 | } 224 | if (typeof a === 'object' && 'lat' in a) { 225 | return new L.LatLng(a.lat, 'lng' in a ? a.lng : a.lon); 226 | } 227 | if (b === undefined) { 228 | return null; 229 | } 230 | return new L.LatLng(a, b); 231 | }; 232 | 233 | /* 234 | * L.LatLngBounds represents a rectangular area on the map in geographical coordinates. 235 | */ 236 | 237 | L.LatLngBounds = function (southWest, northEast) { // (LatLng, LatLng) or (LatLng[]) 238 | if (!southWest) { return; } 239 | 240 | var latlngs = northEast ? [southWest, northEast] : southWest; 241 | 242 | for (var i = 0, len = latlngs.length; i < len; i++) { 243 | this.extend(latlngs[i]); 244 | } 245 | }; 246 | 247 | L.LatLngBounds.prototype = { 248 | // extend the bounds to contain the given point or bounds 249 | extend: function (obj) { // (LatLng) or (LatLngBounds) 250 | if (!obj) { return this; } 251 | 252 | var latLng = L.latLng(obj); 253 | if (latLng !== null) { 254 | obj = latLng; 255 | } else { 256 | obj = L.latLngBounds(obj); 257 | } 258 | 259 | if (obj instanceof L.LatLng) { 260 | if (!this._southWest && !this._northEast) { 261 | this._southWest = new L.LatLng(obj.lat, obj.lng); 262 | this._northEast = new L.LatLng(obj.lat, obj.lng); 263 | } else { 264 | this._southWest.lat = Math.min(obj.lat, this._southWest.lat); 265 | this._southWest.lng = Math.min(obj.lng, this._southWest.lng); 266 | 267 | this._northEast.lat = Math.max(obj.lat, this._northEast.lat); 268 | this._northEast.lng = Math.max(obj.lng, this._northEast.lng); 269 | } 270 | } else if (obj instanceof L.LatLngBounds) { 271 | this.extend(obj._southWest); 272 | this.extend(obj._northEast); 273 | } 274 | return this; 275 | }, 276 | 277 | // extend the bounds by a percentage 278 | pad: function (bufferRatio) { // (Number) -> LatLngBounds 279 | var sw = this._southWest, 280 | ne = this._northEast, 281 | heightBuffer = Math.abs(sw.lat - ne.lat) * bufferRatio, 282 | widthBuffer = Math.abs(sw.lng - ne.lng) * bufferRatio; 283 | 284 | return new L.LatLngBounds( 285 | new L.LatLng(sw.lat - heightBuffer, sw.lng - widthBuffer), 286 | new L.LatLng(ne.lat + heightBuffer, ne.lng + widthBuffer)); 287 | }, 288 | 289 | getCenter: function () { // -> LatLng 290 | return new L.LatLng( 291 | (this._southWest.lat + this._northEast.lat) / 2, 292 | (this._southWest.lng + this._northEast.lng) / 2); 293 | }, 294 | 295 | getSouthWest: function () { 296 | return this._southWest; 297 | }, 298 | 299 | getNorthEast: function () { 300 | return this._northEast; 301 | }, 302 | 303 | getNorthWest: function () { 304 | return new L.LatLng(this.getNorth(), this.getWest()); 305 | }, 306 | 307 | getSouthEast: function () { 308 | return new L.LatLng(this.getSouth(), this.getEast()); 309 | }, 310 | 311 | getWest: function () { 312 | return this._southWest.lng; 313 | }, 314 | 315 | getSouth: function () { 316 | return this._southWest.lat; 317 | }, 318 | 319 | getEast: function () { 320 | return this._northEast.lng; 321 | }, 322 | 323 | getNorth: function () { 324 | return this._northEast.lat; 325 | }, 326 | 327 | contains: function (obj) { // (LatLngBounds) or (LatLng) -> Boolean 328 | if (typeof obj[0] === 'number' || obj instanceof L.LatLng) { 329 | obj = L.latLng(obj); 330 | } else { 331 | obj = L.latLngBounds(obj); 332 | } 333 | 334 | var sw = this._southWest, 335 | ne = this._northEast, 336 | sw2, ne2; 337 | 338 | if (obj instanceof L.LatLngBounds) { 339 | sw2 = obj.getSouthWest(); 340 | ne2 = obj.getNorthEast(); 341 | } else { 342 | sw2 = ne2 = obj; 343 | } 344 | 345 | return (sw2.lat >= sw.lat) && (ne2.lat <= ne.lat) && 346 | (sw2.lng >= sw.lng) && (ne2.lng <= ne.lng); 347 | }, 348 | 349 | intersects: function (bounds) { // (LatLngBounds) 350 | bounds = L.latLngBounds(bounds); 351 | 352 | var sw = this._southWest, 353 | ne = this._northEast, 354 | sw2 = bounds.getSouthWest(), 355 | ne2 = bounds.getNorthEast(), 356 | 357 | latIntersects = (ne2.lat >= sw.lat) && (sw2.lat <= ne.lat), 358 | lngIntersects = (ne2.lng >= sw.lng) && (sw2.lng <= ne.lng); 359 | 360 | return latIntersects && lngIntersects; 361 | }, 362 | 363 | toBBoxString: function () { 364 | return [this.getWest(), this.getSouth(), this.getEast(), this.getNorth()].join(','); 365 | }, 366 | 367 | equals: function (bounds) { // (LatLngBounds) 368 | if (!bounds) { return false; } 369 | 370 | bounds = L.latLngBounds(bounds); 371 | 372 | return this._southWest.equals(bounds.getSouthWest()) && 373 | this._northEast.equals(bounds.getNorthEast()); 374 | }, 375 | 376 | isValid: function () { 377 | return !!(this._southWest && this._northEast); 378 | } 379 | }; 380 | 381 | //TODO International date line? 382 | 383 | L.latLngBounds = function (a, b) { // (LatLngBounds) or (LatLng, LatLng) 384 | if (!a || a instanceof L.LatLngBounds) { 385 | return a; 386 | } 387 | return new L.LatLngBounds(a, b); 388 | }; -------------------------------------------------------------------------------- /npm-shrinkwrap.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ingress-exporter", 3 | "version": "0.0.1", 4 | "dependencies": { 5 | "async": { 6 | "version": "0.9.0", 7 | "from": "async@*", 8 | "resolved": "https://registry.npmjs.org/async/-/async-0.9.0.tgz" 9 | }, 10 | "color": { 11 | "version": "0.7.1", 12 | "from": "color@*", 13 | "resolved": "https://registry.npmjs.org/color/-/color-0.7.1.tgz", 14 | "dependencies": { 15 | "color-convert": { 16 | "version": "0.5.0", 17 | "from": "color-convert@0.5.x", 18 | "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-0.5.0.tgz" 19 | }, 20 | "color-string": { 21 | "version": "0.2.1", 22 | "from": "color-string@0.2.x", 23 | "resolved": "https://registry.npmjs.org/color-string/-/color-string-0.2.1.tgz" 24 | } 25 | } 26 | }, 27 | "escope": { 28 | "version": "1.0.1", 29 | "from": "escope@*", 30 | "resolved": "https://registry.npmjs.org/escope/-/escope-1.0.1.tgz", 31 | "dependencies": { 32 | "estraverse": { 33 | "version": "1.5.1", 34 | "from": "estraverse@>= 0.0.2", 35 | "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-1.5.1.tgz" 36 | } 37 | } 38 | }, 39 | "esprima": { 40 | "version": "1.2.2", 41 | "from": "esprima@*", 42 | "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.2.2.tgz" 43 | }, 44 | "grunt": { 45 | "version": "0.4.5", 46 | "from": "grunt@*", 47 | "resolved": "https://registry.npmjs.org/grunt/-/grunt-0.4.5.tgz", 48 | "dependencies": { 49 | "async": { 50 | "version": "0.1.22", 51 | "from": "async@~0.1.22", 52 | "resolved": "https://registry.npmjs.org/async/-/async-0.1.22.tgz" 53 | }, 54 | "coffee-script": { 55 | "version": "1.3.3", 56 | "from": "coffee-script@~1.3.3", 57 | "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.3.3.tgz" 58 | }, 59 | "colors": { 60 | "version": "0.6.2", 61 | "from": "colors@~0.6.2", 62 | "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz" 63 | }, 64 | "dateformat": { 65 | "version": "1.0.2-1.2.3", 66 | "from": "dateformat@1.0.2-1.2.3", 67 | "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.2-1.2.3.tgz" 68 | }, 69 | "eventemitter2": { 70 | "version": "0.4.14", 71 | "from": "eventemitter2@~0.4.13", 72 | "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-0.4.14.tgz" 73 | }, 74 | "findup-sync": { 75 | "version": "0.1.3", 76 | "from": "findup-sync@~0.1.2", 77 | "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-0.1.3.tgz", 78 | "dependencies": { 79 | "glob": { 80 | "version": "3.2.11", 81 | "from": "glob@~3.2.9", 82 | "resolved": "https://registry.npmjs.org/glob/-/glob-3.2.11.tgz", 83 | "dependencies": { 84 | "inherits": { 85 | "version": "2.0.1", 86 | "from": "inherits@2", 87 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" 88 | }, 89 | "minimatch": { 90 | "version": "0.3.0", 91 | "from": "minimatch@0.3", 92 | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.3.0.tgz", 93 | "dependencies": { 94 | "lru-cache": { 95 | "version": "2.5.0", 96 | "from": "lru-cache@2", 97 | "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.5.0.tgz" 98 | }, 99 | "sigmund": { 100 | "version": "1.0.0", 101 | "from": "sigmund@~1.0.0", 102 | "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.0.tgz" 103 | } 104 | } 105 | } 106 | } 107 | }, 108 | "lodash": { 109 | "version": "2.4.1", 110 | "from": "lodash@~2.4.1", 111 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.1.tgz" 112 | } 113 | } 114 | }, 115 | "glob": { 116 | "version": "3.1.21", 117 | "from": "glob@~3.1.21", 118 | "resolved": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", 119 | "dependencies": { 120 | "graceful-fs": { 121 | "version": "1.2.3", 122 | "from": "graceful-fs@~1.2.0", 123 | "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz" 124 | }, 125 | "inherits": { 126 | "version": "1.0.0", 127 | "from": "inherits@1", 128 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-1.0.0.tgz" 129 | } 130 | } 131 | }, 132 | "hooker": { 133 | "version": "0.2.3", 134 | "from": "hooker@~0.2.3", 135 | "resolved": "https://registry.npmjs.org/hooker/-/hooker-0.2.3.tgz" 136 | }, 137 | "iconv-lite": { 138 | "version": "0.2.11", 139 | "from": "iconv-lite@~0.2.11", 140 | "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz" 141 | }, 142 | "minimatch": { 143 | "version": "0.2.14", 144 | "from": "minimatch@~0.2.12", 145 | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", 146 | "dependencies": { 147 | "lru-cache": { 148 | "version": "2.5.0", 149 | "from": "lru-cache@2", 150 | "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.5.0.tgz" 151 | }, 152 | "sigmund": { 153 | "version": "1.0.0", 154 | "from": "sigmund@~1.0.0", 155 | "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.0.tgz" 156 | } 157 | } 158 | }, 159 | "nopt": { 160 | "version": "1.0.10", 161 | "from": "nopt@~1.0.10", 162 | "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", 163 | "dependencies": { 164 | "abbrev": { 165 | "version": "1.0.5", 166 | "from": "abbrev@1", 167 | "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.5.tgz" 168 | } 169 | } 170 | }, 171 | "rimraf": { 172 | "version": "2.2.8", 173 | "from": "rimraf@~2.2.8", 174 | "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz" 175 | }, 176 | "lodash": { 177 | "version": "0.9.2", 178 | "from": "lodash@~0.9.2", 179 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-0.9.2.tgz" 180 | }, 181 | "underscore.string": { 182 | "version": "2.2.1", 183 | "from": "underscore.string@~2.2.1", 184 | "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.2.1.tgz" 185 | }, 186 | "which": { 187 | "version": "1.0.5", 188 | "from": "which@~1.0.5", 189 | "resolved": "https://registry.npmjs.org/which/-/which-1.0.5.tgz" 190 | }, 191 | "js-yaml": { 192 | "version": "2.0.5", 193 | "from": "js-yaml@~2.0.5", 194 | "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-2.0.5.tgz", 195 | "dependencies": { 196 | "argparse": { 197 | "version": "0.1.15", 198 | "from": "argparse@~ 0.1.11", 199 | "resolved": "https://registry.npmjs.org/argparse/-/argparse-0.1.15.tgz", 200 | "dependencies": { 201 | "underscore": { 202 | "version": "1.4.4", 203 | "from": "underscore@~1.4.3", 204 | "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.4.4.tgz" 205 | }, 206 | "underscore.string": { 207 | "version": "2.3.3", 208 | "from": "underscore.string@~2.3.1", 209 | "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz" 210 | } 211 | } 212 | }, 213 | "esprima": { 214 | "version": "1.0.4", 215 | "from": "esprima@~ 1.0.2", 216 | "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz" 217 | } 218 | } 219 | }, 220 | "exit": { 221 | "version": "0.1.2", 222 | "from": "exit@~0.1.1", 223 | "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" 224 | }, 225 | "getobject": { 226 | "version": "0.1.0", 227 | "from": "getobject@~0.1.0", 228 | "resolved": "https://registry.npmjs.org/getobject/-/getobject-0.1.0.tgz" 229 | }, 230 | "grunt-legacy-util": { 231 | "version": "0.2.0", 232 | "from": "grunt-legacy-util@~0.2.0", 233 | "resolved": "https://registry.npmjs.org/grunt-legacy-util/-/grunt-legacy-util-0.2.0.tgz" 234 | }, 235 | "grunt-legacy-log": { 236 | "version": "0.1.1", 237 | "from": "grunt-legacy-log@~0.1.0", 238 | "resolved": "https://registry.npmjs.org/grunt-legacy-log/-/grunt-legacy-log-0.1.1.tgz", 239 | "dependencies": { 240 | "lodash": { 241 | "version": "2.4.1", 242 | "from": "lodash@~2.4.1", 243 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.1.tgz" 244 | }, 245 | "underscore.string": { 246 | "version": "2.3.3", 247 | "from": "underscore.string@~2.3.3", 248 | "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-2.3.3.tgz" 249 | } 250 | } 251 | } 252 | } 253 | }, 254 | "grunt-contrib-coffee": { 255 | "version": "0.11.0", 256 | "from": "grunt-contrib-coffee@*", 257 | "resolved": "https://registry.npmjs.org/grunt-contrib-coffee/-/grunt-contrib-coffee-0.11.0.tgz", 258 | "dependencies": { 259 | "coffee-script": { 260 | "version": "1.7.1", 261 | "from": "coffee-script@~1.7.0", 262 | "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.7.1.tgz", 263 | "dependencies": { 264 | "mkdirp": { 265 | "version": "0.3.5", 266 | "from": "mkdirp@~0.3.5", 267 | "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" 268 | } 269 | } 270 | }, 271 | "chalk": { 272 | "version": "0.5.1", 273 | "from": "chalk@~0.5.0", 274 | "resolved": "https://registry.npmjs.org/chalk/-/chalk-0.5.1.tgz", 275 | "dependencies": { 276 | "ansi-styles": { 277 | "version": "1.1.0", 278 | "from": "ansi-styles@^1.1.0", 279 | "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.1.0.tgz" 280 | }, 281 | "escape-string-regexp": { 282 | "version": "1.0.1", 283 | "from": "escape-string-regexp@^1.0.0", 284 | "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.1.tgz" 285 | }, 286 | "has-ansi": { 287 | "version": "0.1.0", 288 | "from": "has-ansi@^0.1.0", 289 | "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-0.1.0.tgz", 290 | "dependencies": { 291 | "ansi-regex": { 292 | "version": "0.2.1", 293 | "from": "ansi-regex@^0.2.0", 294 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz" 295 | } 296 | } 297 | }, 298 | "strip-ansi": { 299 | "version": "0.3.0", 300 | "from": "strip-ansi@^0.3.0", 301 | "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.3.0.tgz", 302 | "dependencies": { 303 | "ansi-regex": { 304 | "version": "0.2.1", 305 | "from": "ansi-regex@^0.2.1", 306 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz" 307 | } 308 | } 309 | }, 310 | "supports-color": { 311 | "version": "0.2.0", 312 | "from": "supports-color@^0.2.0", 313 | "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-0.2.0.tgz" 314 | } 315 | } 316 | }, 317 | "lodash": { 318 | "version": "2.4.1", 319 | "from": "lodash@~2.4.1", 320 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.1.tgz" 321 | } 322 | } 323 | }, 324 | "grunt-contrib-copy": { 325 | "version": "0.5.0", 326 | "from": "grunt-contrib-copy@*", 327 | "resolved": "https://registry.npmjs.org/grunt-contrib-copy/-/grunt-contrib-copy-0.5.0.tgz" 328 | }, 329 | "grunt-contrib-watch": { 330 | "version": "0.6.1", 331 | "from": "grunt-contrib-watch@*", 332 | "resolved": "https://registry.npmjs.org/grunt-contrib-watch/-/grunt-contrib-watch-0.6.1.tgz", 333 | "dependencies": { 334 | "gaze": { 335 | "version": "0.5.1", 336 | "from": "gaze@~0.5.1", 337 | "resolved": "https://registry.npmjs.org/gaze/-/gaze-0.5.1.tgz", 338 | "dependencies": { 339 | "globule": { 340 | "version": "0.1.0", 341 | "from": "globule@~0.1.0", 342 | "resolved": "https://registry.npmjs.org/globule/-/globule-0.1.0.tgz", 343 | "dependencies": { 344 | "lodash": { 345 | "version": "1.0.1", 346 | "from": "lodash@~1.0.1", 347 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-1.0.1.tgz" 348 | }, 349 | "glob": { 350 | "version": "3.1.21", 351 | "from": "glob@~3.1.21", 352 | "resolved": "https://registry.npmjs.org/glob/-/glob-3.1.21.tgz", 353 | "dependencies": { 354 | "graceful-fs": { 355 | "version": "1.2.3", 356 | "from": "graceful-fs@~1.2.0", 357 | "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-1.2.3.tgz" 358 | }, 359 | "inherits": { 360 | "version": "1.0.0", 361 | "from": "inherits@1", 362 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-1.0.0.tgz" 363 | } 364 | } 365 | }, 366 | "minimatch": { 367 | "version": "0.2.14", 368 | "from": "minimatch@~0.2.11", 369 | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-0.2.14.tgz", 370 | "dependencies": { 371 | "lru-cache": { 372 | "version": "2.5.0", 373 | "from": "lru-cache@2", 374 | "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.5.0.tgz" 375 | }, 376 | "sigmund": { 377 | "version": "1.0.0", 378 | "from": "sigmund@~1.0.0", 379 | "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.0.tgz" 380 | } 381 | } 382 | } 383 | } 384 | } 385 | } 386 | }, 387 | "tiny-lr-fork": { 388 | "version": "0.0.5", 389 | "from": "tiny-lr-fork@0.0.5", 390 | "resolved": "https://registry.npmjs.org/tiny-lr-fork/-/tiny-lr-fork-0.0.5.tgz", 391 | "dependencies": { 392 | "qs": { 393 | "version": "0.5.6", 394 | "from": "qs@~0.5.2", 395 | "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz" 396 | }, 397 | "faye-websocket": { 398 | "version": "0.4.4", 399 | "from": "faye-websocket@~0.4.3", 400 | "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.4.4.tgz" 401 | }, 402 | "noptify": { 403 | "version": "0.0.3", 404 | "from": "noptify@~0.0.3", 405 | "resolved": "https://registry.npmjs.org/noptify/-/noptify-0.0.3.tgz", 406 | "dependencies": { 407 | "nopt": { 408 | "version": "2.0.0", 409 | "from": "nopt@~2.0.0", 410 | "resolved": "https://registry.npmjs.org/nopt/-/nopt-2.0.0.tgz", 411 | "dependencies": { 412 | "abbrev": { 413 | "version": "1.0.5", 414 | "from": "abbrev@1", 415 | "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.5.tgz" 416 | } 417 | } 418 | } 419 | } 420 | }, 421 | "debug": { 422 | "version": "0.7.4", 423 | "from": "debug@~0.7.0", 424 | "resolved": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz" 425 | } 426 | } 427 | }, 428 | "lodash": { 429 | "version": "2.4.1", 430 | "from": "lodash@~2.4.1", 431 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.1.tgz" 432 | }, 433 | "async": { 434 | "version": "0.2.10", 435 | "from": "async@~0.2.9", 436 | "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" 437 | } 438 | } 439 | }, 440 | "grunt-cson": { 441 | "version": "0.10.0", 442 | "from": "grunt-cson@*", 443 | "resolved": "https://registry.npmjs.org/grunt-cson/-/grunt-cson-0.10.0.tgz", 444 | "dependencies": { 445 | "underscore-plus": { 446 | "version": "1.5.1", 447 | "from": "underscore-plus@1.x", 448 | "resolved": "https://registry.npmjs.org/underscore-plus/-/underscore-plus-1.5.1.tgz", 449 | "dependencies": { 450 | "underscore": { 451 | "version": "1.6.0", 452 | "from": "underscore@~1.6.0", 453 | "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz" 454 | } 455 | } 456 | }, 457 | "cson-safe": { 458 | "version": "0.1.1", 459 | "from": "cson-safe@~0.1.1", 460 | "resolved": "https://registry.npmjs.org/cson-safe/-/cson-safe-0.1.1.tgz", 461 | "dependencies": { 462 | "coffee-script-redux": { 463 | "version": "2.0.0-beta8", 464 | "from": "coffee-script-redux@2.0.0-beta8", 465 | "resolved": "https://registry.npmjs.org/coffee-script-redux/-/coffee-script-redux-2.0.0-beta8.tgz", 466 | "dependencies": { 467 | "StringScanner": { 468 | "version": "0.0.3", 469 | "from": "StringScanner@~0.0.3", 470 | "resolved": "https://registry.npmjs.org/StringScanner/-/StringScanner-0.0.3.tgz" 471 | }, 472 | "nopt": { 473 | "version": "2.1.2", 474 | "from": "nopt@~2.1.2", 475 | "resolved": "https://registry.npmjs.org/nopt/-/nopt-2.1.2.tgz", 476 | "dependencies": { 477 | "abbrev": { 478 | "version": "1.0.5", 479 | "from": "abbrev@1", 480 | "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.5.tgz" 481 | } 482 | } 483 | }, 484 | "esmangle": { 485 | "version": "0.0.17", 486 | "from": "esmangle@~0.0.8", 487 | "resolved": "https://registry.npmjs.org/esmangle/-/esmangle-0.0.17.tgz", 488 | "dependencies": { 489 | "esprima": { 490 | "version": "1.0.4", 491 | "from": "esprima@~ 1.0.2", 492 | "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz" 493 | }, 494 | "estraverse": { 495 | "version": "1.3.2", 496 | "from": "estraverse@~ 1.3.2", 497 | "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-1.3.2.tgz" 498 | }, 499 | "esshorten": { 500 | "version": "0.0.2", 501 | "from": "esshorten@~ 0.0.2", 502 | "resolved": "https://registry.npmjs.org/esshorten/-/esshorten-0.0.2.tgz", 503 | "dependencies": { 504 | "estraverse": { 505 | "version": "1.2.0", 506 | "from": "estraverse@~ 1.2.0", 507 | "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-1.2.0.tgz" 508 | } 509 | } 510 | } 511 | } 512 | }, 513 | "source-map": { 514 | "version": "0.1.11", 515 | "from": "source-map@0.1.11", 516 | "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.1.11.tgz", 517 | "dependencies": { 518 | "amdefine": { 519 | "version": "0.1.0", 520 | "from": "amdefine@>=0.0.4", 521 | "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-0.1.0.tgz" 522 | } 523 | } 524 | }, 525 | "escodegen": { 526 | "version": "0.0.28", 527 | "from": "escodegen@~0.0.24", 528 | "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-0.0.28.tgz", 529 | "dependencies": { 530 | "esprima": { 531 | "version": "1.0.4", 532 | "from": "esprima@~1.0.2", 533 | "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz" 534 | }, 535 | "estraverse": { 536 | "version": "1.3.2", 537 | "from": "estraverse@~1.3.0", 538 | "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-1.3.2.tgz" 539 | } 540 | } 541 | }, 542 | "cscodegen": { 543 | "version": "0.1.0", 544 | "from": "cscodegen@git://github.com/michaelficarra/cscodegen.git#73fd7202ac086c26f18c9d56f025b18b3c6f5383", 545 | "resolved": "git://github.com/michaelficarra/cscodegen.git#73fd7202ac086c26f18c9d56f025b18b3c6f5383" 546 | } 547 | } 548 | } 549 | } 550 | } 551 | } 552 | }, 553 | "moment": { 554 | "version": "2.8.1", 555 | "from": "moment@*", 556 | "resolved": "https://registry.npmjs.org/moment/-/moment-2.8.1.tgz" 557 | }, 558 | "mongoskin": { 559 | "version": "1.4.4", 560 | "from": "mongoskin@*", 561 | "resolved": "https://registry.npmjs.org/mongoskin/-/mongoskin-1.4.4.tgz", 562 | "dependencies": { 563 | "mongodb": { 564 | "version": "1.4.4", 565 | "from": "mongodb@1.4.4", 566 | "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-1.4.4.tgz", 567 | "dependencies": { 568 | "bson": { 569 | "version": "0.2.8", 570 | "from": "bson@0.2.8", 571 | "resolved": "https://registry.npmjs.org/bson/-/bson-0.2.8.tgz", 572 | "dependencies": { 573 | "nan": { 574 | "version": "1.0.0", 575 | "from": "nan@~1.0.0", 576 | "resolved": "https://registry.npmjs.org/nan/-/nan-1.0.0.tgz" 577 | } 578 | } 579 | }, 580 | "kerberos": { 581 | "version": "0.0.3", 582 | "from": "kerberos@0.0.3", 583 | "resolved": "https://registry.npmjs.org/kerberos/-/kerberos-0.0.3.tgz" 584 | } 585 | } 586 | } 587 | } 588 | }, 589 | "ndarray": { 590 | "version": "1.0.15", 591 | "from": "ndarray@*", 592 | "resolved": "https://registry.npmjs.org/ndarray/-/ndarray-1.0.15.tgz", 593 | "dependencies": { 594 | "iota-array": { 595 | "version": "1.0.0", 596 | "from": "iota-array@^1.0.0", 597 | "resolved": "https://registry.npmjs.org/iota-array/-/iota-array-1.0.0.tgz" 598 | } 599 | } 600 | }, 601 | "optimist": { 602 | "version": "0.6.1", 603 | "from": "optimist@*", 604 | "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", 605 | "dependencies": { 606 | "wordwrap": { 607 | "version": "0.0.2", 608 | "from": "wordwrap@~0.0.2", 609 | "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz" 610 | }, 611 | "minimist": { 612 | "version": "0.0.10", 613 | "from": "minimist@~0.0.1", 614 | "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz" 615 | } 616 | } 617 | }, 618 | "point-in-polygon": { 619 | "version": "0.0.0", 620 | "from": "point-in-polygon@*", 621 | "resolved": "https://registry.npmjs.org/point-in-polygon/-/point-in-polygon-0.0.0.tgz" 622 | }, 623 | "request": { 624 | "version": "2.40.0", 625 | "from": "request@*", 626 | "resolved": "https://registry.npmjs.org/request/-/request-2.40.0.tgz", 627 | "dependencies": { 628 | "qs": { 629 | "version": "1.0.2", 630 | "from": "qs@~1.0.0", 631 | "resolved": "https://registry.npmjs.org/qs/-/qs-1.0.2.tgz" 632 | }, 633 | "json-stringify-safe": { 634 | "version": "5.0.0", 635 | "from": "json-stringify-safe@~5.0.0", 636 | "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.0.tgz" 637 | }, 638 | "mime-types": { 639 | "version": "1.0.2", 640 | "from": "mime-types@~1.0.1", 641 | "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-1.0.2.tgz" 642 | }, 643 | "forever-agent": { 644 | "version": "0.5.2", 645 | "from": "forever-agent@~0.5.0", 646 | "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.5.2.tgz" 647 | }, 648 | "node-uuid": { 649 | "version": "1.4.1", 650 | "from": "node-uuid@~1.4.0", 651 | "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.1.tgz" 652 | }, 653 | "tough-cookie": { 654 | "version": "0.12.1", 655 | "from": "tough-cookie@>=0.12.0", 656 | "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-0.12.1.tgz", 657 | "dependencies": { 658 | "punycode": { 659 | "version": "1.3.1", 660 | "from": "punycode@>=0.2.0", 661 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.1.tgz" 662 | } 663 | } 664 | }, 665 | "form-data": { 666 | "version": "0.1.4", 667 | "from": "form-data@~0.1.0", 668 | "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.1.4.tgz", 669 | "dependencies": { 670 | "combined-stream": { 671 | "version": "0.0.5", 672 | "from": "combined-stream@~0.0.4", 673 | "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.5.tgz", 674 | "dependencies": { 675 | "delayed-stream": { 676 | "version": "0.0.5", 677 | "from": "delayed-stream@0.0.5", 678 | "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" 679 | } 680 | } 681 | }, 682 | "mime": { 683 | "version": "1.2.11", 684 | "from": "mime@~1.2.11", 685 | "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" 686 | } 687 | } 688 | }, 689 | "tunnel-agent": { 690 | "version": "0.4.0", 691 | "from": "tunnel-agent@~0.4.0", 692 | "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.0.tgz" 693 | }, 694 | "http-signature": { 695 | "version": "0.10.0", 696 | "from": "http-signature@~0.10.0", 697 | "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.10.0.tgz", 698 | "dependencies": { 699 | "assert-plus": { 700 | "version": "0.1.2", 701 | "from": "assert-plus@0.1.2", 702 | "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.2.tgz" 703 | }, 704 | "asn1": { 705 | "version": "0.1.11", 706 | "from": "asn1@0.1.11", 707 | "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz" 708 | }, 709 | "ctype": { 710 | "version": "0.5.2", 711 | "from": "ctype@0.5.2", 712 | "resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.2.tgz" 713 | } 714 | } 715 | }, 716 | "oauth-sign": { 717 | "version": "0.3.0", 718 | "from": "oauth-sign@~0.3.0", 719 | "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.3.0.tgz" 720 | }, 721 | "hawk": { 722 | "version": "1.1.1", 723 | "from": "hawk@1.1.1", 724 | "resolved": "https://registry.npmjs.org/hawk/-/hawk-1.1.1.tgz", 725 | "dependencies": { 726 | "hoek": { 727 | "version": "0.9.1", 728 | "from": "hoek@0.9.x", 729 | "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz" 730 | }, 731 | "boom": { 732 | "version": "0.4.2", 733 | "from": "boom@0.4.x", 734 | "resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz" 735 | }, 736 | "cryptiles": { 737 | "version": "0.2.2", 738 | "from": "cryptiles@0.2.x", 739 | "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz" 740 | }, 741 | "sntp": { 742 | "version": "0.2.4", 743 | "from": "sntp@0.2.x", 744 | "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz" 745 | } 746 | } 747 | }, 748 | "aws-sign2": { 749 | "version": "0.5.0", 750 | "from": "aws-sign2@~0.5.0", 751 | "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.5.0.tgz" 752 | }, 753 | "stringstream": { 754 | "version": "0.0.4", 755 | "from": "stringstream@~0.0.4", 756 | "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.4.tgz" 757 | } 758 | } 759 | }, 760 | "require-all": { 761 | "version": "0.0.8", 762 | "from": "require-all@*", 763 | "resolved": "https://registry.npmjs.org/require-all/-/require-all-0.0.8.tgz" 764 | }, 765 | "winston": { 766 | "version": "0.7.3", 767 | "from": "winston@*", 768 | "resolved": "https://registry.npmjs.org/winston/-/winston-0.7.3.tgz", 769 | "dependencies": { 770 | "async": { 771 | "version": "0.2.10", 772 | "from": "async@0.2.x", 773 | "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" 774 | }, 775 | "colors": { 776 | "version": "0.6.2", 777 | "from": "colors@0.6.x", 778 | "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz" 779 | }, 780 | "cycle": { 781 | "version": "1.0.3", 782 | "from": "cycle@1.0.x", 783 | "resolved": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz" 784 | }, 785 | "eyes": { 786 | "version": "0.1.8", 787 | "from": "eyes@0.1.x", 788 | "resolved": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz" 789 | }, 790 | "pkginfo": { 791 | "version": "0.3.0", 792 | "from": "pkginfo@0.3.x", 793 | "resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.0.tgz" 794 | }, 795 | "request": { 796 | "version": "2.16.6", 797 | "from": "request@2.16.x", 798 | "resolved": "https://registry.npmjs.org/request/-/request-2.16.6.tgz", 799 | "dependencies": { 800 | "form-data": { 801 | "version": "0.0.10", 802 | "from": "form-data@~0.0.3", 803 | "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.0.10.tgz", 804 | "dependencies": { 805 | "combined-stream": { 806 | "version": "0.0.5", 807 | "from": "combined-stream@~0.0.4", 808 | "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.5.tgz", 809 | "dependencies": { 810 | "delayed-stream": { 811 | "version": "0.0.5", 812 | "from": "delayed-stream@0.0.5", 813 | "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz" 814 | } 815 | } 816 | } 817 | } 818 | }, 819 | "mime": { 820 | "version": "1.2.11", 821 | "from": "mime@~1.2.7", 822 | "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" 823 | }, 824 | "hawk": { 825 | "version": "0.10.2", 826 | "from": "hawk@~0.10.2", 827 | "resolved": "https://registry.npmjs.org/hawk/-/hawk-0.10.2.tgz", 828 | "dependencies": { 829 | "hoek": { 830 | "version": "0.7.6", 831 | "from": "hoek@0.7.x", 832 | "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.7.6.tgz" 833 | }, 834 | "boom": { 835 | "version": "0.3.8", 836 | "from": "boom@0.3.x", 837 | "resolved": "https://registry.npmjs.org/boom/-/boom-0.3.8.tgz" 838 | }, 839 | "cryptiles": { 840 | "version": "0.1.3", 841 | "from": "cryptiles@0.1.x", 842 | "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.1.3.tgz" 843 | }, 844 | "sntp": { 845 | "version": "0.1.4", 846 | "from": "sntp@0.1.x", 847 | "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.1.4.tgz" 848 | } 849 | } 850 | }, 851 | "node-uuid": { 852 | "version": "1.4.1", 853 | "from": "node-uuid@~1.4.0", 854 | "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.1.tgz" 855 | }, 856 | "cookie-jar": { 857 | "version": "0.2.0", 858 | "from": "cookie-jar@~0.2.0", 859 | "resolved": "https://registry.npmjs.org/cookie-jar/-/cookie-jar-0.2.0.tgz" 860 | }, 861 | "aws-sign": { 862 | "version": "0.2.0", 863 | "from": "aws-sign@~0.2.0", 864 | "resolved": "https://registry.npmjs.org/aws-sign/-/aws-sign-0.2.0.tgz" 865 | }, 866 | "oauth-sign": { 867 | "version": "0.2.0", 868 | "from": "oauth-sign@~0.2.0", 869 | "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.2.0.tgz" 870 | }, 871 | "forever-agent": { 872 | "version": "0.2.0", 873 | "from": "forever-agent@~0.2.0", 874 | "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.2.0.tgz" 875 | }, 876 | "tunnel-agent": { 877 | "version": "0.2.0", 878 | "from": "tunnel-agent@~0.2.0", 879 | "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.2.0.tgz" 880 | }, 881 | "json-stringify-safe": { 882 | "version": "3.0.0", 883 | "from": "json-stringify-safe@~3.0.0", 884 | "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-3.0.0.tgz" 885 | }, 886 | "qs": { 887 | "version": "0.5.6", 888 | "from": "qs@~0.5.4", 889 | "resolved": "https://registry.npmjs.org/qs/-/qs-0.5.6.tgz" 890 | } 891 | } 892 | }, 893 | "stack-trace": { 894 | "version": "0.0.9", 895 | "from": "stack-trace@0.0.x", 896 | "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" 897 | } 898 | } 899 | } 900 | } 901 | } 902 | --------------------------------------------------------------------------------