├── .gitattributes ├── .github └── workflows │ └── default.yaml ├── .gitignore ├── .npmignore ├── LICENSE ├── README.md ├── README.md.tpl ├── commands └── recon │ ├── index.js │ ├── lib │ ├── globals │ │ ├── cache.js │ │ ├── options.js │ │ └── recon.js │ └── handlers │ │ ├── file.js │ │ └── output.js │ └── sub │ ├── add │ └── index.js │ ├── cache │ ├── index.js │ └── sub │ │ ├── clear.js │ │ └── set.js │ ├── diff │ └── index.js │ ├── edit │ └── index.js │ ├── exec │ └── index.js │ ├── export │ └── index.js │ ├── group │ └── index.js │ ├── import │ └── index.js │ ├── layout │ └── index.js │ ├── load │ └── index.js │ ├── merge │ └── index.js │ ├── options │ ├── index.js │ └── sub │ │ ├── clear.js │ │ ├── delete.js │ │ ├── get.js │ │ ├── list.js │ │ └── set.js │ ├── remote │ ├── add.js │ ├── index.js │ ├── list.js │ └── remove.js │ ├── remove │ └── index.js │ ├── save │ └── index.js │ ├── select │ └── index.js │ ├── summary │ └── index.js │ ├── template │ ├── index.js │ └── sub │ │ └── run │ │ └── index.js │ ├── transform │ ├── index.js │ └── transforms.js │ ├── traverse │ ├── index.js │ └── sub │ │ ├── del.js │ │ ├── get.js │ │ ├── set.js │ │ └── traverse.js │ └── ungroup │ └── index.js ├── examples ├── auto.pown ├── cache-options.pown ├── coderecon.pown ├── dns.pown ├── githubenum.pown ├── group.pown ├── measure.pown ├── named-traversals.pown ├── noop.pown ├── permissive-cors-1.pown ├── permissive-cors-1.yaml ├── permissive-cors-2.pown ├── permissive-cors-2.yaml ├── request.pown ├── request.yaml ├── script-traversals.pown ├── traverse.pown ├── whocode.pown ├── whodev.pown └── worker.js ├── lib ├── cache │ ├── dynamodb.js │ └── memcached.js ├── common.js ├── cytoscape.js ├── detect.js ├── graph.js ├── index.js ├── normalize.js ├── options.js ├── plugins │ └── traverse.js ├── recon.js ├── remote.js ├── scheduler.js ├── template.js ├── transform.js ├── types.js ├── utils.js └── worker │ ├── index.js │ ├── utils.js │ └── worker.js ├── package-lock.json ├── package.json ├── test ├── detect.js ├── normalize.js ├── recon.js └── worker │ ├── env.js │ └── utils.js └── transforms ├── bitbucket └── index.js ├── bufferoverrun └── index.js ├── certspotter └── index.js ├── cloudflare └── index.js ├── crtsh └── index.js ├── dns └── index.js ├── dockerhub └── index.js ├── gravatar └── index.js ├── hackertarget └── index.js ├── http └── index.js ├── ipinfoio └── index.js ├── omnisint └── index.js ├── pks └── index.js ├── pwndb └── index.js ├── riddler └── index.js ├── script └── index.js ├── scylla └── index.js ├── securitytrails └── index.js ├── shodan └── index.js ├── spyse └── index.js ├── tcp ├── index.js └── ports.js ├── threatcrowd └── index.js ├── urlscan └── index.js ├── utils └── index.js ├── virustotal └── index.js ├── vulners └── index.js ├── wappalyzer └── index.js ├── worker └── index.js └── zonecruncher └── index.js /.gitattributes: -------------------------------------------------------------------------------- 1 | *.png filter=lfs diff=lfs merge=lfs -text 2 | -------------------------------------------------------------------------------- /.github/workflows/default.yaml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: master 4 | 5 | jobs: 6 | install: 7 | name: Install 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v2 11 | - uses: actions/setup-node@v2 12 | - uses: actions/cache@v2 13 | with: 14 | path: '**/node_modules' 15 | key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} 16 | - name: Install npm 17 | if: steps.npm-cache.outputs.cache-hit != 'true' 18 | run: npm install 19 | 20 | test: 21 | name: Test 22 | runs-on: ubuntu-latest 23 | steps: 24 | - uses: actions/checkout@v2 25 | - uses: actions/setup-node@v2 26 | - uses: actions/cache@v2 27 | with: 28 | path: '**/node_modules' 29 | key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} 30 | - name: Install npm 31 | if: steps.npm-cache.outputs.cache-hit != 'true' 32 | run: npm install 33 | - name: Run testing 34 | run: npm run test 35 | 36 | render_readme: 37 | name: Render README 38 | runs-on: ubuntu-latest 39 | steps: 40 | - uses: actions/checkout@v2 41 | - uses: actions/setup-node@v2 42 | - uses: actions/cache@v2 43 | with: 44 | path: '**/node_modules' 45 | key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} 46 | - name: Install npm 47 | if: steps.npm-cache.outputs.cache-hit != 'true' 48 | run: npm install 49 | - name: Generate readme 50 | run: | 51 | USAGE=`npm run usage --silent` 52 | cat README.md.tpl | awk -v USAGE="$USAGE" '{ gsub("{{usage}}",USAGE) }1' > README.md 53 | - name: Commit results 54 | run: | 55 | git config --global user.email "action@github.com" 56 | git config --global user.name "Github Action" 57 | git commit README.md -m 'Re-build README.md' || echo "No changes to commit" 58 | git push origin || echo "No changes to commit" 59 | 60 | publish: 61 | name: Publish 62 | runs-on: ubuntu-latest 63 | needs: [test, render_readme] 64 | steps: 65 | - uses: actions/checkout@v2 66 | - uses: actions/setup-node@v2 67 | - uses: actions/cache@v2 68 | with: 69 | path: '**/node_modules' 70 | key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} 71 | - name: Install npm 72 | if: steps.npm-cache.outputs.cache-hit != 'true' 73 | run: npm install 74 | - uses: JS-DevTools/npm-publish@v1 75 | with: 76 | token: ${{ secrets.NPM_TOKEN }} 77 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | 6 | # Runtime data 7 | pids 8 | *.pid 9 | *.seed 10 | 11 | # Directory for instrumented libs generated by jscoverage/JSCover 12 | lib-cov 13 | 14 | # Coverage directory used by tools like istanbul 15 | coverage 16 | 17 | # nyc test coverage 18 | .nyc_output 19 | 20 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 21 | .grunt 22 | 23 | # node-waf configuration 24 | .lock-wscript 25 | 26 | # Compiled binary addons (http://nodejs.org/api/addons.html) 27 | build/Release 28 | 29 | # Dependency directories 30 | node_modules 31 | jspm_packages 32 | 33 | # Optional npm cache directory 34 | .npm 35 | 36 | # Optional REPL history 37 | .node_repl_history 38 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | examples 2 | screenshots 3 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 pownjs 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /commands/recon/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'recon ', 3 | describe: 'Target recon', 4 | 5 | builder: (yargs) => { 6 | yargs.command(require('./sub/transform').yargs) 7 | yargs.command(require('./sub/template').yargs) 8 | yargs.command(require('./sub/select').yargs) 9 | yargs.command(require('./sub/traverse').yargs) 10 | yargs.command(require('./sub/options').yargs) 11 | yargs.command(require('./sub/cache').yargs) 12 | yargs.command(require('./sub/add').yargs) 13 | yargs.command(require('./sub/remove').yargs) 14 | yargs.command(require('./sub/edit').yargs) 15 | yargs.command(require('./sub/merge').yargs) 16 | yargs.command(require('./sub/diff').yargs) 17 | yargs.command(require('./sub/group').yargs) 18 | yargs.command(require('./sub/ungroup').yargs) 19 | yargs.command(require('./sub/load').yargs) 20 | yargs.command(require('./sub/save').yargs) 21 | yargs.command(require('./sub/import').yargs) 22 | yargs.command(require('./sub/export').yargs) 23 | yargs.command(require('./sub/remote').yargs) 24 | yargs.command(require('./sub/layout').yargs) 25 | yargs.command(require('./sub/summary').yargs) 26 | yargs.command(require('./sub/exec').yargs) 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /commands/recon/lib/globals/cache.js: -------------------------------------------------------------------------------- 1 | let cache 2 | 3 | const getCache = () => { 4 | return cache 5 | } 6 | 7 | const setCache = (c) => { 8 | cache = c 9 | } 10 | 11 | const clearCache = (c) => { 12 | cache = undefined 13 | } 14 | 15 | module.exports = { getCache, setCache, clearCache } 16 | -------------------------------------------------------------------------------- /commands/recon/lib/globals/options.js: -------------------------------------------------------------------------------- 1 | const { Options } = require('../../../../lib/options') 2 | 3 | const options = new Options() 4 | 5 | module.exports = { options } 6 | -------------------------------------------------------------------------------- /commands/recon/lib/globals/recon.js: -------------------------------------------------------------------------------- 1 | const { Bar } = require('@pown/cli/lib/bar') 2 | 3 | const { Recon } = require('../../../../lib/recon') 4 | 5 | const recon = new Recon() 6 | 7 | recon.on('info', console.info.bind(console)) 8 | recon.on('warn', console.warn.bind(console)) 9 | recon.on('error', console.error.bind(console)) 10 | recon.on('debug', console.debug.bind(console)) 11 | 12 | // TODO: show internal-error 13 | 14 | const bars = {} 15 | 16 | recon.on('barStart', (name, { total = 0 }) => { 17 | if (total < 1000) { 18 | return 19 | } 20 | 21 | const bar = new Bar() 22 | 23 | bar.start(total, 0) 24 | 25 | bars[name] = bar 26 | }) 27 | 28 | recon.on('barStep', (name, { step = 0 }) => { 29 | const bar = bars[name] 30 | 31 | if (!bar) { 32 | return 33 | } 34 | 35 | bar.update(step) 36 | }) 37 | 38 | recon.on('barEnd', (name) => { 39 | const bar = bars[name] 40 | 41 | if (!bar) { 42 | return 43 | } 44 | 45 | bar.stop() 46 | 47 | delete bars[name] 48 | }) 49 | 50 | module.exports = { recon } 51 | -------------------------------------------------------------------------------- /commands/recon/lib/handlers/file.js: -------------------------------------------------------------------------------- 1 | const installReadOptions = (yargs) => { 2 | yargs.option('read', { 3 | alias: 'r', 4 | type: 'string', 5 | description: 'Read file' 6 | }) 7 | } 8 | 9 | const installWriteOptions = (yargs) => { 10 | yargs.option('write', { 11 | alias: 'w', 12 | type: 'string', 13 | description: 'Write file' 14 | }) 15 | } 16 | 17 | const handleReadOptions = async(argv, recon) => { 18 | const { read } = argv 19 | 20 | if (!read) { 21 | return 22 | } 23 | 24 | console.warn(`reading from file ${read}`) 25 | 26 | const { readFile } = require('@pown/file/lib/readFile') 27 | 28 | let data 29 | 30 | try { 31 | data = await readFile(read) 32 | } 33 | catch (e) { 34 | console.error(`Cannot read file ${read}`) 35 | 36 | return 37 | } 38 | 39 | console.debug(`read ${data.length} bytes`) 40 | 41 | let json 42 | 43 | try { 44 | json = JSON.parse(data, (key, value) => { 45 | if (value && value.type === 'Buffer' && Array.isArray(value.data)) { 46 | return Buffer.from(value.data) 47 | } 48 | else { 49 | return value 50 | } 51 | }) 52 | } 53 | catch (e) { 54 | console.error(`Cannot parse recon data`) 55 | 56 | return 57 | } 58 | 59 | try { 60 | recon.deserialize(json) 61 | } 62 | catch (e) { 63 | console.error(`Cannot deserialize recon data`) 64 | } 65 | } 66 | 67 | const handleWriteOptions = async(argv, recon) => { 68 | const { write } = argv 69 | 70 | if (!write) { 71 | return 72 | } 73 | 74 | console.warn(`writing to file ${write}`) 75 | 76 | const { writeFile } = require('@pown/file/lib/writeFile') 77 | 78 | let json 79 | 80 | try { 81 | json = recon.serialize() 82 | } 83 | catch (e) { 84 | console.error(`Cannot serialize recon data`) 85 | 86 | return 87 | } 88 | 89 | let data 90 | 91 | try { 92 | data = JSON.stringify(json) 93 | } 94 | catch (e) { 95 | console.error(`Cannot stringify recon data`) 96 | 97 | return 98 | } 99 | 100 | if (data) { 101 | try { 102 | await writeFile(write, data) 103 | } 104 | catch (e) { 105 | console.error(`Cannot write file ${write}`) 106 | 107 | return 108 | } 109 | } 110 | 111 | console.debug(`wrote ${data.length} bytes`) 112 | } 113 | 114 | module.exports = { installReadOptions, installWriteOptions, handleReadOptions, handleWriteOptions } 115 | -------------------------------------------------------------------------------- /commands/recon/lib/handlers/output.js: -------------------------------------------------------------------------------- 1 | const installOutputOptions = (yargs) => { 2 | yargs.options('output-format', { 3 | description: 'Output format', 4 | alias: 'o', 5 | type: 'string', 6 | default: 'table', 7 | choices: ['table', 'grid', 'csv', 'json', 'jsonstream', 'none'] 8 | }) 9 | 10 | yargs.options('output-ids', { 11 | description: 'Output ids', 12 | type: 'boolean', 13 | default: false 14 | }) 15 | 16 | yargs.options('output-labels', { 17 | description: 'Output labels', 18 | type: 'boolean', 19 | default: false 20 | }) 21 | 22 | yargs.options('output-fields', { 23 | description: 'Output fields', 24 | type: 'string', 25 | default: '' 26 | }) 27 | 28 | yargs.options('max-output-size', { 29 | description: 'Maximum amount of nodes to output', 30 | type: 'number', 31 | default: Infinity 32 | }) 33 | } 34 | 35 | const handleOutputOptions = (argv, nodes) => { 36 | const { outputFormat, outputIds, outputLabels, outputFields, maxOutputSize } = argv 37 | 38 | let propsFilter 39 | 40 | if (outputFields) { 41 | const fields = outputFields.split(/[\s,]+/g).map(f => f.trim()).filter(f => f) 42 | 43 | propsFilter = (props) => Object.entries(props).filter(([key]) => fields.includes(key)).slice(0, 5) 44 | } 45 | else { 46 | propsFilter = (props) => Object.entries(props).slice(0, 5) 47 | } 48 | 49 | switch (outputFormat) { 50 | case 'table': 51 | const tables = {} 52 | 53 | nodes.forEach(({ type, id, label, image, parent, props = {} }) => { 54 | const row = {} 55 | 56 | if (type === 'group') { 57 | row['label'] = label 58 | } 59 | 60 | if (outputIds) { 61 | row['id'] = id 62 | } 63 | 64 | if (outputLabels) { 65 | row['label'] = label 66 | } 67 | 68 | propsFilter(props).forEach(([name, value]) => { 69 | row[name] = value 70 | }) 71 | 72 | let table = tables[type] || [] 73 | 74 | table.push(row) 75 | 76 | tables[type] = table 77 | }) 78 | 79 | Object.entries(tables).forEach(([type, table]) => { 80 | console.group(type) 81 | console.table(table.slice(0, maxOutputSize)) 82 | console.groupEnd() 83 | }) 84 | 85 | break 86 | 87 | case 'grid': 88 | const table = [] 89 | 90 | nodes.forEach(({ type, id, label, image, parent, props = {} }) => { 91 | const row = {} 92 | 93 | row['type'] = type 94 | 95 | if (type === 'group') { 96 | row['label'] = label 97 | } 98 | 99 | if (outputIds) { 100 | row['id'] = id 101 | } 102 | 103 | if (outputLabels) { 104 | row['label'] = label 105 | } 106 | 107 | propsFilter(props).forEach(([name, value]) => { 108 | row[name] = value 109 | }) 110 | 111 | table.push(row) 112 | }) 113 | 114 | console.table(table.slice(0, maxOutputSize)) 115 | 116 | break 117 | 118 | case 'csv': 119 | const fields = {} 120 | const lines = [] 121 | 122 | nodes.forEach(({ id, type, label, image, parent, props }) => { 123 | const line = {} 124 | 125 | fields['type'] = 1 126 | line['type'] = type 127 | 128 | if (type === 'group') { 129 | fields['label'] = 1 130 | line['label'] = label 131 | } 132 | 133 | if (outputIds) { 134 | fields['id'] = 1 135 | line['id'] = id 136 | } 137 | 138 | if (outputLabels) { 139 | fields['label'] = 1 140 | line['label'] = label 141 | } 142 | 143 | propsFilter(props).forEach(([name, value]) => { 144 | fields[name] = 1 145 | line[name] = value 146 | }) 147 | 148 | lines.push(line) 149 | }) 150 | 151 | const fieldNames = Object.keys(fields) 152 | 153 | console.log('#' + fieldNames.join(',')) 154 | 155 | lines.slice(0, maxOutputSize).forEach((line) => { 156 | const fieldValues = fieldNames.map((name) => JSON.stringify(line[name] || '')) 157 | 158 | console.log(fieldValues.join(',')) 159 | }) 160 | 161 | break 162 | 163 | case 'json': 164 | console.log('['); 165 | 166 | const lastIndex = nodes.length - 1 167 | 168 | nodes.slice(0, maxOutputSize).forEach((node, index) => { 169 | try { 170 | console.log(' ', JSON.stringify(node) + (index === lastIndex ? '' : ',')) 171 | } 172 | catch (e) { 173 | console.error(e) 174 | } 175 | }) 176 | 177 | console.log(']'); 178 | 179 | break 180 | 181 | case 'jsonstream': 182 | nodes.slice(0, maxOutputSize).forEach((node) => { 183 | try { 184 | console.log(JSON.stringify(node)) 185 | } 186 | catch (e) { 187 | console.error(e) 188 | } 189 | }) 190 | 191 | break 192 | } 193 | } 194 | 195 | module.exports = { installOutputOptions, handleOutputOptions } 196 | -------------------------------------------------------------------------------- /commands/recon/sub/add/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'add ', 3 | describe: 'Add nodes', 4 | aliases: ['a'], 5 | 6 | builder: (yargs) => { 7 | yargs.options('group', { 8 | alias: 'g', 9 | type: 'string', 10 | describe: 'Group nodes', 11 | default: '' 12 | }) 13 | 14 | yargs.option('node-type', { 15 | type: 'string', 16 | describe: 'The type for new nodes', 17 | default: 'string' 18 | }) 19 | 20 | yargs.option('node-props', { 21 | type: 'string', 22 | describe: 'The props for new nodes', 23 | default: '' 24 | }) 25 | 26 | yargs.option('node-props-file', { 27 | type: 'string', 28 | describe: 'A file for the props for new nodes', 29 | default: '' 30 | }) 31 | 32 | yargs.options('select', { 33 | alias: 's', 34 | type: 'string', 35 | describe: 'Select graph. Nodes will be added and linked only if graph contains at least one node', 36 | default: '' 37 | }) 38 | 39 | yargs.options('traverse', { 40 | alias: 'v', 41 | type: 'string', 42 | describe: 'Traverse graph. Nodes will be added and linked only if graph contains at least one node', 43 | default: false 44 | }) 45 | 46 | const { installReadOptions, installWriteOptions } = require('../../lib/handlers/file') 47 | 48 | installReadOptions(yargs) 49 | installWriteOptions(yargs) 50 | 51 | const { installOutputOptions } = require('../../lib/handlers/output') 52 | 53 | installOutputOptions(yargs) 54 | }, 55 | 56 | handler: async(argv) => { 57 | const { group, nodeType, nodeProps, nodePropsFile, select, traverse, nodes } = argv 58 | 59 | const { recon } = require('../../lib/globals/recon') 60 | 61 | const { handleReadOptions, handleWriteOptions } = require('../../lib/handlers/file') 62 | 63 | await handleReadOptions(argv, recon) 64 | 65 | let ifNodes 66 | 67 | if (select) { 68 | ifNodes = recon.select(select) 69 | } 70 | else 71 | if (traverse) { 72 | ifNodes = recon.traverse(traverse) 73 | } 74 | 75 | if (!ifNodes || (ifNodes && ifNodes.length > 0)) { 76 | const { makeId } = require('../../../../lib/utils') 77 | 78 | const { readFile } = require('fs') 79 | const { promisify } = require('util') 80 | 81 | const readFileAsync = promisify(readFile) 82 | 83 | const nodePropsObj = nodeProps ? JSON.parse(nodeProps) : {} 84 | 85 | const nodePropsFileObj = nodePropsFile ? JSON.parse(await readFileAsync(nodePropsFile)) : {} 86 | 87 | const nodeEdges = (ifNodes ? ifNodes.map((node) => node.data('id')) : []) 88 | 89 | const properNodes = nodes 90 | .filter((node) => { 91 | return node 92 | }) 93 | .map((node) => ({ 94 | id: makeId(nodeType, node), 95 | type: nodeType, 96 | label: node, 97 | props: { 98 | [nodeType]: node, 99 | 100 | ...nodePropsObj, 101 | ...nodePropsFileObj 102 | }, 103 | edges: nodeEdges 104 | })) 105 | 106 | await recon.addNodes(properNodes) 107 | 108 | if (group) { 109 | recon.group(group) 110 | } 111 | 112 | await handleWriteOptions(argv, recon) 113 | 114 | const { handleOutputOptions } = require('../../lib/handlers/output') 115 | 116 | await handleOutputOptions(argv, properNodes) 117 | } 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /commands/recon/sub/cache/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'cache ', 3 | describe: 'Manage cache', 4 | aliases: [], 5 | 6 | builder: (yargs) => { 7 | yargs.command(require('./sub/set').yargs) 8 | yargs.command(require('./sub/clear').yargs) 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /commands/recon/sub/cache/sub/clear.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'clear [options]', 3 | describe: 'Clear cache configuration', 4 | aliases: ['c'], 5 | 6 | builder: (yargs) => {}, 7 | 8 | handler: (argv) => { 9 | const { clearCache } = require('../../../lib/globals/cache') 10 | 11 | clearCache() 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /commands/recon/sub/cache/sub/set.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'set [options]', 3 | describe: 'Set cache configuration', 4 | aliases: ['s'], 5 | 6 | builder: (yargs) => { 7 | yargs.options('cache-memcached-server', { 8 | type: 'string', 9 | describe: 'A memcached server address[:port]', 10 | default: '', 11 | alias: ['memcached-server'] 12 | }) 13 | 14 | yargs.options('cache-dynamodb-table', { 15 | type: 'string', 16 | describe: 'A dynamodb table name', 17 | default: '', 18 | alias: ['dynamodb-table'] 19 | }) 20 | 21 | yargs.options('cache-ttl', { 22 | type: 'number', 23 | describe: 'Cache max lifetime (seconds)', 24 | default: 60, 25 | alias: ['ttl'] 26 | }) 27 | 28 | yargs.options('cache-key-prefix', { 29 | type: 'string', 30 | describe: 'Prefix to add to keys', 31 | default: '', 32 | alias: ['key-prefix'] 33 | }) 34 | 35 | yargs.options('cache-key-suffix', { 36 | type: 'string', 37 | describe: 'Suffix to add to keys', 38 | default: '', 39 | alias: ['key-suffix'] 40 | }) 41 | }, 42 | 43 | handler: (argv) => { 44 | const { cacheMemcachedServer, cacheDynamodbTable, cacheTtl, cacheKeyPrefix, cacheKeySuffix } = argv 45 | 46 | const { setCache } = require('../../../lib/globals/cache') 47 | 48 | let cache 49 | 50 | if (cacheMemcachedServer) { 51 | const { Cache } = require('../../../../../lib/cache/memcached') 52 | 53 | cache = new Cache({ hosts: [cacheMemcachedServer], ttl: cacheTtl, keyPrefix: cacheKeyPrefix, keySuffix: cacheKeySuffix }) 54 | 55 | console.info('caching with memcached server', JSON.stringify(cacheMemcachedServer)) 56 | } 57 | else 58 | if (cacheDynamodbTable) { 59 | const { Cache } = require('../../../../../lib/cache/dynamodb') 60 | 61 | cache = new Cache({ table: cacheDynamodbTable, ttl: cacheTtl, keyPrefix: cacheKeyPrefix, keySuffix: cacheKeySuffix }) 62 | 63 | console.info('caching with dynamodb table', JSON.stringify(cacheDynamodbTable)) 64 | } 65 | 66 | if (cache) { 67 | setCache(cache) 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /commands/recon/sub/diff/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'diff ', 3 | describe: 'Perform a diff between two recon files', 4 | aliases: ['d'], 5 | 6 | builder: (yargs) => { 7 | yargs.option('subset', { 8 | alias: 's', 9 | choices: ['left', 'right', 'both'], 10 | default: 'left', 11 | describe: 'The subset to select' 12 | }) 13 | 14 | const { installWriteOptions } = require('../../lib/handlers/file') 15 | 16 | installWriteOptions(yargs) 17 | 18 | const { installOutputOptions } = require('../../lib/handlers/output') 19 | 20 | installOutputOptions(yargs) 21 | }, 22 | 23 | handler: async(argv) => { 24 | const { fileA, fileB, subset } = argv 25 | 26 | const { readFile } = require('@pown/file/lib/readFile') // TODO: remove and use fs instead 27 | 28 | const { Recon } = require('../../../../lib/recon') 29 | 30 | let fileAData 31 | 32 | try { 33 | fileAData = await readFile(fileA) 34 | } 35 | catch (e) { 36 | console.error(`Cannot read file ${fileA}`) 37 | 38 | return 39 | } 40 | 41 | let fileAJSON 42 | 43 | try { 44 | fileAJSON = JSON.parse(fileAData, (key, value) => { 45 | if (value && value.type === 'Buffer' && Array.isArray(value.data)) { 46 | return Buffer.from(value.data) 47 | } 48 | else { 49 | return value 50 | } 51 | }) 52 | } 53 | catch (e) { 54 | console.error(`Cannot unpack file ${fileA}`) 55 | 56 | return 57 | } 58 | 59 | const reconA = new Recon() 60 | 61 | reconA.on('info', console.info.bind(console)) 62 | reconA.on('warn', console.warn.bind(console)) 63 | reconA.on('error', console.error.bind(console)) 64 | reconA.on('debug', console.debug.bind(console)) 65 | 66 | try { 67 | reconA.deserialize(fileAJSON) 68 | } 69 | catch (e) { 70 | console.error(`Cannot load file ${fileA}`) 71 | 72 | return 73 | } 74 | 75 | let fileBData 76 | 77 | try { 78 | fileBData = await readFile(fileB) 79 | } 80 | catch (e) { 81 | console.error(`Cannot read file ${fileB}`) 82 | 83 | return 84 | } 85 | 86 | let fileBJSON 87 | 88 | try { 89 | fileBJSON = JSON.parse(fileBData, (key, value) => { 90 | if (value && value.type === 'Buffer' && Array.isArray(value.data)) { 91 | return Buffer.from(value.data) 92 | } 93 | else { 94 | return value 95 | } 96 | }) 97 | } 98 | catch (e) { 99 | console.error(`Cannot parse file ${fileB}`) 100 | 101 | return 102 | } 103 | 104 | const reconB = new Recon() 105 | 106 | reconB.on('info', console.info.bind(console)) 107 | reconB.on('warn', console.warn.bind(console)) 108 | reconB.on('error', console.error.bind(console)) 109 | reconB.on('debug', console.debug.bind(console)) 110 | 111 | try { 112 | reconB.deserialize(fileBJSON) 113 | } 114 | catch (e) { 115 | console.error(`Cannot load file ${fileB}`) 116 | 117 | return 118 | } 119 | 120 | const { 121 | [subset]: collection 122 | } = reconA.cy.elements().diff(reconB.cy.elements()) 123 | 124 | const resultNodes = collection.map(node => node.data()) 125 | 126 | const { handleWriteOptions } = require('../../lib/handlers/file') 127 | 128 | const recon = new Recon() 129 | 130 | recon.on('info', console.info.bind(console)) 131 | recon.on('warn', console.warn.bind(console)) 132 | recon.on('error', console.error.bind(console)) 133 | recon.on('debug', console.debug.bind(console)) 134 | 135 | recon.cy.add({ 136 | group: 'nodes', 137 | data: { 138 | id: 'previous', 139 | type: 'previous', 140 | label: 'Previous', 141 | props: {} 142 | } 143 | }) 144 | 145 | collection.nodes().forEach((node) => { 146 | recon.cy.add(node) 147 | }) 148 | 149 | collection.edges().forEach((edge) => { 150 | const data = edge.data() 151 | 152 | let { source, target } = data 153 | 154 | let move 155 | 156 | if (!source || !collection.getElementById(source).length) { 157 | source = 'previous' 158 | move = true 159 | } 160 | 161 | if (!target || !collection.getElementById(target).length) { 162 | target = 'previous' 163 | move = true 164 | } 165 | 166 | if (move) { 167 | recon.cy.add({ 168 | group: 'edges', 169 | data: { ...data, source, target } 170 | }) 171 | } 172 | else { 173 | recon.cy.add({ 174 | group: 'edges', 175 | data: data 176 | }) 177 | } 178 | }) 179 | 180 | await handleWriteOptions(argv, recon) 181 | 182 | const { handleOutputOptions } = require('../../lib/handlers/output') 183 | 184 | await handleOutputOptions(argv, resultNodes) 185 | 186 | } 187 | } 188 | -------------------------------------------------------------------------------- /commands/recon/sub/edit/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'edit ', 3 | describe: 'Edit nodes', 4 | aliases: ['e'], 5 | 6 | builder: (yargs) => { 7 | const { installReadOptions, installWriteOptions } = require('../../lib/handlers/file') 8 | 9 | installReadOptions(yargs) 10 | installWriteOptions(yargs) 11 | 12 | const { installOutputOptions } = require('../../lib/handlers/output') 13 | 14 | installOutputOptions(yargs) 15 | 16 | yargs.options('traverse', { 17 | alias: 'v', 18 | type: 'boolean', 19 | describe: 'Traverse graph', 20 | default: false 21 | }) 22 | 23 | yargs.options('auto-weight', { 24 | alias: 't', 25 | type: 'boolean', 26 | describe: 'Auto weight nodes', 27 | default: false 28 | }) 29 | }, 30 | 31 | handler: async(argv) => { 32 | const { traverse, autoWeight, expressions } = argv 33 | 34 | const { recon } = require('../../lib/globals/recon') 35 | 36 | const { handleReadOptions, handleWriteOptions } = require('../../lib/handlers/file') 37 | 38 | await handleReadOptions(argv, recon) 39 | 40 | let resultNodes 41 | 42 | if (traverse) { 43 | resultNodes = recon.traverse(...expressions) 44 | } 45 | else { 46 | resultNodes = recon.select(...expressions) 47 | } 48 | 49 | if (autoWeight) { 50 | recon.measure(resultNodes) 51 | } 52 | 53 | resultNodes = resultNodes.map(node => node.data()) 54 | 55 | await handleWriteOptions(argv, recon) 56 | 57 | const { handleOutputOptions } = require('../../lib/handlers/output') 58 | 59 | await handleOutputOptions(argv, resultNodes) 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /commands/recon/sub/exec/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'exec ', 3 | describe: 'Execute js file', 4 | aliases: ['c'], 5 | 6 | builder: (yargs) => { 7 | const { installReadOptions, installWriteOptions } = require('../../lib/handlers/file') 8 | 9 | installReadOptions(yargs) 10 | installWriteOptions(yargs) 11 | 12 | const { installOutputOptions } = require('../../lib/handlers/output') 13 | 14 | installOutputOptions(yargs) 15 | }, 16 | 17 | handler: async(argv) => { 18 | const { files } = argv 19 | 20 | const path = require('path') 21 | const process = require('process') 22 | 23 | const { recon } = require('../../lib/globals/recon') 24 | 25 | const { handleReadOptions, handleWriteOptions } = require('../../lib/handlers/file') 26 | 27 | await handleReadOptions(argv, recon) 28 | 29 | for (let file of Array.isArray(files) ? files : [files]) { 30 | const module = require(path.join(process.cwd(), file)) // TODO: we should not be doing our own path resolve 31 | 32 | if (typeof(module) === 'function') { 33 | await module(recon) 34 | } 35 | } 36 | 37 | const resultNodes = recon.selection.map(node => node.data()) 38 | 39 | await handleWriteOptions(argv, recon) 40 | 41 | const { handleOutputOptions } = require('../../lib/handlers/output') 42 | 43 | await handleOutputOptions(argv, resultNodes) 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /commands/recon/sub/export/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'export ', 3 | describe: 'Export to file', 4 | aliases: ['x'], 5 | 6 | builder: (yargs) => {}, 7 | 8 | handler: async(argv) => {} 9 | } 10 | -------------------------------------------------------------------------------- /commands/recon/sub/group/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'group ', 3 | describe: 'Group nodes', 4 | aliases: ['g'], 5 | 6 | builder: (yargs) => { 7 | const { installReadOptions, installWriteOptions } = require('../../lib/handlers/file') 8 | 9 | installReadOptions(yargs) 10 | installWriteOptions(yargs) 11 | 12 | const { installOutputOptions } = require('../../lib/handlers/output') 13 | 14 | installOutputOptions(yargs) 15 | 16 | yargs.options('traverse', { 17 | alias: 'v', 18 | type: 'boolean', 19 | describe: 'Traverse graph', 20 | default: false 21 | }) 22 | }, 23 | 24 | handler: async(argv) => { 25 | const { traverse, name, expressions } = argv 26 | 27 | const { recon } = require('../../lib/globals/recon') 28 | 29 | const { handleReadOptions, handleWriteOptions } = require('../../lib/handlers/file') 30 | 31 | await handleReadOptions(argv, recon) 32 | 33 | let resultNodes 34 | 35 | if (traverse) { 36 | resultNodes = recon.traverse(...expressions).map(node => node.data()) 37 | } 38 | else { 39 | resultNodes = recon.select(...expressions).map(node => node.data()) 40 | } 41 | 42 | await recon.group(name) 43 | 44 | await handleWriteOptions(argv, recon) 45 | 46 | const { handleOutputOptions } = require('../../lib/handlers/output') 47 | 48 | await handleOutputOptions(argv, resultNodes) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /commands/recon/sub/import/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'import ', 3 | describe: 'Import file', 4 | aliases: ['i'], 5 | 6 | builder: (yargs) => { 7 | yargs.option('file-type', { 8 | choices: ['json'], 9 | describe: 'Import file type' 10 | }) 11 | 12 | yargs.option('node-type', { 13 | describe: 'Imported nodes types', 14 | default: 'string' 15 | }) 16 | 17 | yargs.option('id-field', { 18 | alias: 'i', 19 | describe: 'The id field' 20 | }) 21 | 22 | yargs.option('type-field', { 23 | alias: 'i', 24 | describe: 'The type field' 25 | }) 26 | 27 | yargs.option('label-field', { 28 | alias: 'l', 29 | describe: 'The label field' 30 | }) 31 | 32 | yargs.option('image-field', { 33 | alias: 'm', 34 | describe: 'The image field' 35 | }) 36 | 37 | yargs.options('group', { 38 | alias: 'g', 39 | type: 'string', 40 | describe: 'Group nodes', 41 | default: '' 42 | }) 43 | 44 | const { installOutputOptions } = require('../../lib/handlers/output') 45 | 46 | installOutputOptions(yargs) 47 | }, 48 | 49 | handler: async(argv) => { 50 | const { fileType: _fileType, nodeType, idField, typeField, labelField, imageField, group, file } = argv 51 | 52 | const path = require('path') 53 | 54 | const fileType = _fileType ? _fileType : path.extname(file).slice(1) 55 | 56 | const { recon } = require('../../lib/globals/recon') 57 | 58 | const nodes = [] 59 | 60 | await new Promise((resolve, reject) => { 61 | const fs = require('fs') 62 | const { chain } = require('stream-chain') 63 | const { parser } = require('stream-csv-as-json') 64 | const { asObjects } = require('stream-csv-as-json/AsObjects') 65 | const StreamValues = require('stream-json/streamers/StreamValues') 66 | const { streamValues } = require('stream-json/streamers/StreamValues') 67 | 68 | let pipeline 69 | 70 | switch (fileType) { 71 | case 'csv': 72 | pipeline = chain([ 73 | fs.createReadStream(file), 74 | parser(), 75 | asObjects(), 76 | streamValues() 77 | ]) 78 | 79 | break 80 | 81 | case 'json': 82 | default: 83 | pipeline = chain([ 84 | fs.createReadStream(file), 85 | StreamValues.withParser() 86 | ]) 87 | } 88 | 89 | pipeline.on('data', ({ value }) => { 90 | const item = { 91 | props: value 92 | } 93 | 94 | if (nodeType) { 95 | item.type = nodeType 96 | } 97 | 98 | if (idField) { 99 | item.id = value[idField] 100 | } 101 | 102 | if (typeField) { 103 | item.typeField = value[typeField] 104 | } 105 | 106 | if (labelField) { 107 | item.label = value[labelField] 108 | } 109 | 110 | if (imageField) { 111 | item.image = value[imageField] 112 | } 113 | 114 | nodes.push(item) 115 | }) 116 | 117 | pipeline.on('error', (error) => { 118 | reject(error) 119 | }) 120 | 121 | pipeline.on('end', () => { 122 | resolve() 123 | }) 124 | }) 125 | 126 | const resultNodes = (await recon.addNodes(nodes)).map(node => node.data()) 127 | 128 | const { handleOutputOptions } = require('../../lib/handlers/output') 129 | 130 | await handleOutputOptions(argv, resultNodes) 131 | } 132 | } 133 | -------------------------------------------------------------------------------- /commands/recon/sub/layout/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'layout ', 3 | describe: 'Layout the graph', 4 | aliases: ['k'], 5 | 6 | builder: (yargs) => { 7 | const { installReadOptions, installWriteOptions } = require('../../lib/handlers/file') 8 | 9 | installReadOptions(yargs) 10 | installWriteOptions(yargs) 11 | }, 12 | 13 | handler: async(argv) => { 14 | const { name } = argv 15 | 16 | const { cytoscape } = require('../../../../lib/cytoscape') 17 | 18 | const klay = require('cytoscape-klay') 19 | const dagre = require('cytoscape-dagre') 20 | const euler = require('cytoscape-euler') 21 | 22 | cytoscape.use(klay) 23 | cytoscape.use(dagre) 24 | cytoscape.use(euler) 25 | 26 | const { recon } = require('../../lib/globals/recon') 27 | 28 | recon.resetGraph({ 29 | headless: true, 30 | 31 | styleEnabled: true 32 | }) 33 | 34 | const { handleReadOptions, handleWriteOptions } = require('../../lib/handlers/file') 35 | 36 | await handleReadOptions(argv, recon) 37 | 38 | const layout = recon.elements().layout({ 39 | boundingBox: { x1: 0, y1: 0, w: 4096, h: 3072 }, 40 | 41 | name: name, 42 | 43 | animate: false, 44 | 45 | nodeDimensionsIncludeLabels: true 46 | }) 47 | 48 | const promise = layout.pon('layoutstop') 49 | 50 | await layout.run() 51 | 52 | await promise 53 | 54 | await handleWriteOptions(argv, recon) 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /commands/recon/sub/load/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'load ', 3 | describe: 'Load a file', 4 | aliases: ['l'], 5 | 6 | builder: (yargs) => { 7 | const { installWriteOptions } = require('../../lib/handlers/file') 8 | 9 | installWriteOptions(yargs) 10 | }, 11 | 12 | handler: async(argv) => { 13 | const { file } = argv 14 | 15 | const { recon } = require('../../lib/globals/recon') 16 | 17 | const { handleWriteOptions, handleReadOptions } = require('../../lib/handlers/file') 18 | 19 | await handleReadOptions({ read: file }, recon) 20 | 21 | await handleWriteOptions(argv, recon) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /commands/recon/sub/merge/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'merge ', 3 | describe: 'Perform a merge between at least two recon files', 4 | aliases: ['m'], 5 | 6 | builder: (yargs) => { 7 | const { installWriteOptions } = require('../../lib/handlers/file') 8 | 9 | installWriteOptions(yargs) 10 | }, 11 | 12 | handler: async(argv) => { 13 | const { files } = argv 14 | 15 | const { readFile } = require('@pown/file/lib/readFile') // TODO: remove and use fs instead 16 | 17 | const { Recon } = require('../../../../lib/recon') 18 | 19 | const recon = new Recon() 20 | 21 | recon.on('info', console.info.bind(console)) 22 | recon.on('warn', console.warn.bind(console)) 23 | recon.on('error', console.error.bind(console)) 24 | recon.on('debug', console.debug.bind(console)) 25 | 26 | await Promise.all(files.map(async(file) => { 27 | let data 28 | 29 | try { 30 | data = await readFile(file) 31 | } 32 | catch (e) { 33 | console.error(`Cannot read file ${file}`) 34 | 35 | return 36 | } 37 | 38 | let json 39 | 40 | try { 41 | json = JSON.parse(data, (key, value) => { 42 | if (value && value.type === 'Buffer' && Array.isArray(value.data)) { 43 | return Buffer.from(value.data) 44 | } 45 | else { 46 | return value 47 | } 48 | }) 49 | } 50 | catch (e) { 51 | console.error(`Cannot unpack file ${file}`) 52 | 53 | return 54 | } 55 | 56 | const reconFile = new Recon() 57 | 58 | reconFile.on('info', console.info.bind(console)) 59 | reconFile.on('warn', console.warn.bind(console)) 60 | reconFile.on('error', console.error.bind(console)) 61 | reconFile.on('debug', console.debug.bind(console)) 62 | 63 | try { 64 | reconFile.deserialize(json) 65 | } 66 | catch (e) { 67 | console.error(`Cannot load file ${file}`) 68 | 69 | return 70 | } 71 | 72 | recon.cy.add(reconFile.cy.elements()) 73 | })) 74 | 75 | const { handleWriteOptions } = require('../../lib/handlers/file') 76 | 77 | await handleWriteOptions(argv, recon) 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /commands/recon/sub/options/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'options ', 3 | describe: 'Manage options', 4 | aliases: ['option'], 5 | 6 | builder: (yargs) => { 7 | yargs.command(require('./sub/list').yargs) 8 | yargs.command(require('./sub/set').yargs) 9 | yargs.command(require('./sub/get').yargs) 10 | yargs.command(require('./sub/delete').yargs) 11 | yargs.command(require('./sub/clear').yargs) 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /commands/recon/sub/options/sub/clear.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'clear', 3 | describe: 'Clear options', 4 | aliases: ['c'], 5 | 6 | builder: (yargs) => { 7 | yargs.option('category', { 8 | alias: ['c'], 9 | describe: 'Select category', 10 | default: 'global' 11 | }) 12 | }, 13 | 14 | handler: (argv) => { 15 | const { options } = require('../../../lib/globals/options') 16 | 17 | const { category } = argv 18 | 19 | options.clearOptions(category) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /commands/recon/sub/options/sub/delete.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'delete ', 3 | describe: 'Delete option', 4 | aliases: ['d'], 5 | 6 | builder: (yargs) => { 7 | yargs.option('category', { 8 | alias: ['c'], 9 | describe: 'Select category', 10 | default: 'global' 11 | }) 12 | }, 13 | 14 | handler: (argv) => { 15 | const { options } = require('../../../lib/globals/options') 16 | 17 | const { category, name } = argv 18 | 19 | options.deleteOption(category, name) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /commands/recon/sub/options/sub/get.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'get ', 3 | describe: 'Get option', 4 | aliases: ['g'], 5 | 6 | builder: (yargs) => { 7 | yargs.option('category', { 8 | alias: ['c'], 9 | describe: 'Select category', 10 | default: 'global' 11 | }) 12 | }, 13 | 14 | handler: (argv) => { 15 | const { options } = require('../../../lib/globals/options') 16 | 17 | const { category, name } = argv 18 | 19 | const value = options.getOption(category, name) 20 | 21 | console.log(value) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /commands/recon/sub/options/sub/list.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'list', 3 | describe: 'List option', 4 | aliases: ['l'], 5 | 6 | builder: (yargs) => { 7 | yargs.option('category', { 8 | alias: ['c'], 9 | describe: 'Select category', 10 | default: 'global' 11 | }) 12 | }, 13 | 14 | handler: (argv) => { 15 | const { options } = require('../../../lib/globals/options') 16 | 17 | const { category } = argv 18 | 19 | const table = [] 20 | 21 | for (let option of options.listOptions(category)) { 22 | table.push(option) 23 | } 24 | 25 | console.table(table) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /commands/recon/sub/options/sub/set.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'set ', 3 | describe: 'Set option', 4 | aliases: ['s'], 5 | 6 | builder: (yargs) => { 7 | yargs.option('category', { 8 | alias: ['c'], 9 | type: 'string', 10 | describe: 'Select category', 11 | default: 'global' 12 | }) 13 | 14 | yargs.option('json', { 15 | alias: ['j'], 16 | type: 'boolean', 17 | describe: 'Assume the option is a json string', 18 | default: false 19 | }) 20 | }, 21 | 22 | handler: (argv) => { 23 | const { options } = require('../../../lib/globals/options') 24 | 25 | const { category, json, name, value } = argv 26 | 27 | if (json) { 28 | options.setOption(category, name, JSON.parse(value)) 29 | } 30 | else { 31 | options.setOption(category, name, value) 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /commands/recon/sub/remote/add.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'add ', 3 | describe: 'Add remote', 4 | aliases: ['a'], 5 | 6 | handler: async(argv) => { 7 | const { uris } = argv 8 | 9 | const { getPreferences, setPreferences } = require('@pown/preferences') 10 | 11 | const { fetchRemoteTransforms } = require('../../../../lib/remote') 12 | 13 | const preferences = await getPreferences('recon') 14 | 15 | preferences.remotes = { 16 | ...preferences.remotes, 17 | 18 | ...Object.assign({}, ...(await Promise.all(uris.map(async(uri) => { 19 | const transforms = await fetchRemoteTransforms(uri) 20 | 21 | console.group(uri) 22 | 23 | Object.entries(transforms).forEach(([name, def]) => { 24 | console.table([{ ...def, name }], ['name', 'title', 'description']) 25 | }) 26 | 27 | console.groupEnd() 28 | 29 | return { 30 | [uri]: transforms 31 | } 32 | })))) 33 | } 34 | 35 | await setPreferences('recon', preferences) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /commands/recon/sub/remote/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'remote ', 3 | describe: 'Remote managment', 4 | aliases: ['remotes', 'f'], 5 | 6 | builder: (yargs) => { 7 | yargs.command(require('./list').yargs) 8 | yargs.command(require('./add').yargs) 9 | yargs.command(require('./remove').yargs) 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /commands/recon/sub/remote/list.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'list', 3 | describe: 'List remotes', 4 | aliases: ['l'], 5 | 6 | handler: async(argv) => { 7 | const { getPreferences } = require('@pown/preferences') 8 | 9 | const { remotes = {} } = await getPreferences('recon') 10 | 11 | Object.entries(remotes).forEach(([uri, defs]) => { 12 | console.group(uri) 13 | 14 | Object.entries(defs).forEach(([name, def]) => { 15 | console.table([{ ...def, name }], ['name', 'title', 'description']) 16 | }) 17 | 18 | console.groupEnd() 19 | }) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /commands/recon/sub/remote/remove.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'remove ', 3 | describe: 'Remove remote', 4 | aliases: ['a'], 5 | 6 | handler: async(argv) => { 7 | const { uris } = argv 8 | 9 | const { getPreferences, setPreferences } = require('@pown/preferences') 10 | 11 | const preferences = await getPreferences('recon') 12 | 13 | uris.forEach((uri) => { 14 | delete preferences.remotes[uri] 15 | }) 16 | 17 | setPreferences('recon', preferences) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /commands/recon/sub/remove/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'remove ', 3 | describe: 'Remove nodes', 4 | aliases: ['r'], 5 | 6 | builder: (yargs) => { 7 | const { installReadOptions, installWriteOptions } = require('../../lib/handlers/file') 8 | 9 | installReadOptions(yargs) 10 | installWriteOptions(yargs) 11 | 12 | const { installOutputOptions } = require('../../lib/handlers/output') 13 | 14 | installOutputOptions(yargs) 15 | 16 | yargs.options('traverse', { 17 | alias: 'v', 18 | type: 'boolean', 19 | describe: 'Traverse graph', 20 | default: false 21 | }) 22 | }, 23 | 24 | handler: async(argv) => { 25 | const { traverse, expressions } = argv 26 | 27 | const { recon } = require('../../lib/globals/recon') 28 | 29 | const { handleReadOptions, handleWriteOptions } = require('../../lib/handlers/file') 30 | 31 | await handleReadOptions(argv, recon) 32 | 33 | let resultNodes 34 | 35 | if (traverse) { 36 | resultNodes = recon.traverse(...expressions).map(node => node.data()) 37 | } 38 | else { 39 | resultNodes = recon.select(...expressions).map(node => node.data()) 40 | } 41 | 42 | recon.removeNodes(resultNodes) 43 | 44 | await handleWriteOptions(argv, recon) 45 | 46 | const { handleOutputOptions } = require('../../lib/handlers/output') 47 | 48 | await handleOutputOptions(argv, resultNodes) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /commands/recon/sub/save/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'save ', 3 | describe: 'Save to file', 4 | aliases: ['o'], 5 | 6 | builder: (yargs) => { 7 | const { installReadOptions } = require('../../lib/handlers/file') 8 | 9 | installReadOptions(yargs) 10 | }, 11 | 12 | handler: async(argv) => { 13 | const { file } = argv 14 | 15 | const { recon } = require('../../lib/globals/recon') 16 | 17 | const { handleReadOptions, handleWriteOptions } = require('../../lib/handlers/file') 18 | 19 | await handleReadOptions(argv, recon) 20 | 21 | await handleWriteOptions({ write: file }, recon) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /commands/recon/sub/select/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'select ', 3 | describe: 'Select nodes', 4 | aliases: ['s'], 5 | 6 | builder: (yargs) => { 7 | const { installReadOptions, installWriteOptions } = require('../../lib/handlers/file') 8 | 9 | installReadOptions(yargs) 10 | installWriteOptions(yargs) 11 | 12 | const { installOutputOptions } = require('../../lib/handlers/output') 13 | 14 | installOutputOptions(yargs) 15 | }, 16 | 17 | handler: async(argv) => { 18 | const { expressions } = argv 19 | 20 | const { recon } = require('../../lib/globals/recon') 21 | 22 | const { handleReadOptions, handleWriteOptions } = require('../../lib/handlers/file') 23 | 24 | await handleReadOptions(argv, recon) 25 | 26 | const resultNodes = recon.select(...expressions).map(node => node.data()) 27 | 28 | await handleWriteOptions(argv, recon) 29 | 30 | const { handleOutputOptions } = require('../../lib/handlers/output') 31 | 32 | await handleOutputOptions(argv, resultNodes) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /commands/recon/sub/template/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'template ', 3 | describe: 'Recon template commands', 4 | aliases: ['p', 'templates'], 5 | 6 | builder: (yargs) => { 7 | yargs.command(require('./sub/run').yargs) 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /commands/recon/sub/template/sub/run/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'run ', 3 | describe: 'Run template', 4 | aliases: ['r'], 5 | 6 | builder: (yargs) => { 7 | const { installReadOptions, installWriteOptions } = require('../../../../lib/handlers/file') 8 | 9 | installReadOptions(yargs) 10 | installWriteOptions(yargs) 11 | 12 | const { installOutputOptions } = require('../../../../lib/handlers/output') 13 | 14 | installOutputOptions(yargs) 15 | }, 16 | 17 | handler: async(argv) => { 18 | const { templates } = argv 19 | 20 | const { recon: gRecon } = require('../../../../lib/globals/recon') 21 | 22 | const { getCompoundTransforms } = require('../../../transform/transforms') 23 | 24 | const compoundTransforms = await getCompoundTransforms() 25 | 26 | gRecon.registerTransforms(compoundTransforms) 27 | 28 | const { handleReadOptions, handleWriteOptions } = require('../../../../lib/handlers/file') 29 | 30 | await handleReadOptions(argv, gRecon) 31 | 32 | const jsYaml = require('js-yaml') 33 | 34 | const { extname, join } = require('path') 35 | const { statSync, readdirSync, readFileSync } = require('fs') 36 | 37 | const { ReconTemplate } = require('../../../../../../lib/template') 38 | 39 | const { Scheduler } = require('../../../../../../lib/scheduler') 40 | 41 | const scheduler = new Scheduler() 42 | 43 | const findTemplates = function*(paths) { 44 | for (let path of paths) { 45 | const stat = statSync(path) 46 | 47 | if (stat.isDirectory()) { 48 | for (let dir of readdirSync(path)) { 49 | yield* findTemplates([join(path, dir)]) 50 | } 51 | } 52 | else { 53 | const ext = extname(path) 54 | 55 | let doc 56 | 57 | if (['.yaml', '.yml'].includes(ext)) { 58 | const data = readFileSync(path) 59 | 60 | doc = jsYaml.load(data) 61 | } 62 | else 63 | if (['.json'].includes(ext)) { 64 | const data = readFileSync(path) 65 | 66 | doc = JSON.parse(data) 67 | } 68 | else { 69 | return 70 | } 71 | 72 | const template = new ReconTemplate(doc, { scheduler }) 73 | 74 | template.path = path 75 | 76 | yield template 77 | } 78 | } 79 | } 80 | 81 | const { handleOutputOptions } = require('../../../../lib/handlers/output') 82 | 83 | for (let template of findTemplates(templates)) { 84 | console.info(`running template ${template.id || template.path}`) 85 | 86 | await template.run({}, gRecon) 87 | 88 | const resultNodes = gRecon.selection.map(node => node.data()) 89 | 90 | await handleOutputOptions(argv, resultNodes) 91 | } 92 | 93 | await handleWriteOptions(argv, gRecon) 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /commands/recon/sub/transform/transforms.js: -------------------------------------------------------------------------------- 1 | const { extractSync } = require('@pown/modules') 2 | const { getPreferencesSync } = require('@pown/preferences') 3 | 4 | const { buildRemoteTransforms } = require('../../../../lib/remote') 5 | 6 | const getCompoundTransforms = async () => { 7 | const { remotes = {} } = getPreferencesSync('recon') 8 | 9 | const remoteTransforms = buildRemoteTransforms(remotes) 10 | 11 | const { loadableTransforms } = extractSync() 12 | 13 | return { 14 | ...remoteTransforms, 15 | 16 | ...Object.assign({}, ...(await Promise.all(loadableTransforms.map(async (module) => { 17 | let transforms 18 | 19 | try { 20 | transforms = require(module) 21 | } 22 | catch (e) { 23 | if (e.code === 'ERR_REQUIRE_ESM' || e.message === 'Cannot use import statement outside a module') { 24 | return await import(module) 25 | } 26 | 27 | return {} 28 | } 29 | 30 | if (!transforms) { 31 | return {} 32 | } 33 | 34 | return Object.assign({}, ...Object.entries(transforms).map(([name, Transform]) => { 35 | return { 36 | [name]: class extends Transform { 37 | static loadableTransformModule = module; 38 | static loadableTransformName = name; 39 | } 40 | } 41 | })) 42 | })))) 43 | } 44 | } 45 | 46 | module.exports = { getCompoundTransforms } 47 | -------------------------------------------------------------------------------- /commands/recon/sub/traverse/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'traverse ', 3 | describe: 'Traverse nodes', 4 | aliases: ['v'], 5 | 6 | builder: (yargs) => { 7 | yargs.command(require('./sub/traverse').yargs) 8 | yargs.command(require('./sub/set').yargs) 9 | yargs.command(require('./sub/get').yargs) 10 | yargs.command(require('./sub/del').yargs) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /commands/recon/sub/traverse/sub/del.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'del ', 3 | describe: 'Delete named traversal', 4 | 5 | builder: (yargs) => {}, 6 | 7 | handler: async(argv) => { 8 | const { name } = argv 9 | 10 | const { recon } = require('../../../lib/globals/recon') 11 | 12 | recon.cy.delTraversalByName(name) 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /commands/recon/sub/traverse/sub/get.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'get ', 3 | describe: 'Get named traversal', 4 | 5 | builder: (yargs) => {}, 6 | 7 | handler: async(argv) => { 8 | const { name } = argv 9 | 10 | const { recon } = require('../../../lib/globals/recon') 11 | 12 | console.log(recon.cy.getTraversalByName(name)) 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /commands/recon/sub/traverse/sub/set.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'set ', 3 | describe: 'Set named traversal', 4 | 5 | builder: (yargs) => {}, 6 | 7 | handler: async(argv) => { 8 | const { name, expression } = argv 9 | 10 | const { recon } = require('../../../lib/globals/recon') 11 | 12 | recon.cy.setTraversalByName(name, expression) 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /commands/recon/sub/traverse/sub/traverse.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: '$0 ', 3 | 4 | builder: (yargs) => { 5 | const { installReadOptions, installWriteOptions } = require('../../../lib/handlers/file') 6 | 7 | installReadOptions(yargs) 8 | installWriteOptions(yargs) 9 | 10 | const { installOutputOptions } = require('../../../lib/handlers/output') 11 | 12 | installOutputOptions(yargs) 13 | }, 14 | 15 | handler: async(argv) => { 16 | const { expressions } = argv 17 | 18 | const { recon } = require('../../../lib/globals/recon') 19 | 20 | const { handleReadOptions, handleWriteOptions } = require('../../../lib/handlers/file') 21 | 22 | await handleReadOptions(argv, recon) 23 | 24 | const resultNodes = recon.traverse(...expressions).map(node => node.data()) 25 | 26 | await handleWriteOptions(argv, recon) 27 | 28 | const { handleOutputOptions } = require('../../../lib/handlers/output') 29 | 30 | await handleOutputOptions(argv, resultNodes) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /commands/recon/sub/ungroup/index.js: -------------------------------------------------------------------------------- 1 | exports.yargs = { 2 | command: 'ungroup ', 3 | describe: 'Ungroup nodes', 4 | aliases: ['u'], 5 | 6 | builder: (yargs) => { 7 | const { installReadOptions, installWriteOptions } = require('../../lib/handlers/file') 8 | 9 | installReadOptions(yargs) 10 | installWriteOptions(yargs) 11 | 12 | const { installOutputOptions } = require('../../lib/handlers/output') 13 | 14 | installOutputOptions(yargs) 15 | 16 | yargs.options('traverse', { 17 | alias: 'v', 18 | type: 'boolean', 19 | describe: 'Traverse graph', 20 | default: false 21 | }) 22 | }, 23 | 24 | handler: async(argv) => { 25 | const { traverse, expressions } = argv 26 | 27 | const { recon } = require('../../lib/globals/recon') 28 | 29 | const { handleReadOptions, handleWriteOptions } = require('../../lib/handlers/file') 30 | 31 | await handleReadOptions(argv, recon) 32 | 33 | let resultNodes 34 | 35 | if (traverse) { 36 | resultNodes = recon.traverse(...expressions).map(node => node.data()) 37 | } 38 | else { 39 | resultNodes = recon.select(...expressions).map(node => node.data()) 40 | } 41 | 42 | await recon.ungroup() 43 | 44 | await handleWriteOptions(argv, recon) 45 | 46 | const { handleOutputOptions } = require('../../lib/handlers/output') 47 | 48 | await handleOutputOptions(argv, resultNodes) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /examples/auto.pown: -------------------------------------------------------------------------------- 1 | echo Setting brand 2 | recon a --node-type brand pownjs 3 | 4 | echo Auto recon phase 1 5 | recon t auto -s 'node' 6 | 7 | echo Auto recon phase 2 8 | recon t auto -s 'node' 9 | -------------------------------------------------------------------------------- /examples/cache-options.pown: -------------------------------------------------------------------------------- 1 | # The following script demonstrates how to use cache features with options. The options 2 | # can be used to persist cache configurations across multiple transforms 3 | 4 | set -x 5 | 6 | recon cache set --dynamodb-table Cache 7 | 8 | recon add --node-type domain "secapps.com" 9 | 10 | recon t build_uri -s 'node[type="domain"]' 11 | recon t http_fingerprint -s 'node[type="uri"]' 12 | -------------------------------------------------------------------------------- /examples/coderecon.pown: -------------------------------------------------------------------------------- 1 | set -x 2 | 3 | recon add --node-type brand "$1" 4 | 5 | recon t auto --auto-group --auto-weight --name 'github*' -s 'node[type="brand"]' 6 | recon t auto --auto-group --auto-weight --name 'bitbucket*' -s 'node[type="brand"]' 7 | recon t auto --auto-group --auto-weight --name 'dockerhub*' -s 'node[type="brand"]' 8 | 9 | recon t auto --auto-group --auto-weight --name 'github*' -s 'node[type="github:member"]' 10 | 11 | recon save "$2" 12 | -------------------------------------------------------------------------------- /examples/dns.pown: -------------------------------------------------------------------------------- 1 | echo DNS Recon Example 2 | recon t auto --name 'dnsresolve' --node-type domain secapps.com websecurify.com 3 | -------------------------------------------------------------------------------- /examples/githubenum.pown: -------------------------------------------------------------------------------- 1 | echo Listing members 2 | recon t ghlm pownjs 3 | 4 | echo Extracting member repositories 5 | recon t ghlr -s 'node[type="github:member"]' 6 | -------------------------------------------------------------------------------- /examples/group.pown: -------------------------------------------------------------------------------- 1 | set -xe 2 | 3 | # The following script demonstrates that grouped nodes can be regroupd into new groups. 4 | 5 | recon add a b 6 | recon group 'AB' 'node[type!="group"]' 7 | recon s 'node[id="group:AB"] > node' 8 | 9 | recon add c d 10 | recon group 'CD' 'node[type!="group"]' 11 | recon s 'node[id="group:CD"] > node' 12 | -------------------------------------------------------------------------------- /examples/measure.pown: -------------------------------------------------------------------------------- 1 | echo Measure Example 2 | recon a a 3 | recon a b 4 | recon e --auto-weight '*' 5 | -------------------------------------------------------------------------------- /examples/named-traversals.pown: -------------------------------------------------------------------------------- 1 | set -xe 2 | 3 | recon add --node-type letter a b c d c 4 | recon add --node-type number 1 2 3 4 5 5 | 6 | recon v set letters 'filter node[type="letter"]' 7 | recon v set numbers 'filter node[type="number"]' 8 | 9 | recon v 'traverseByName letters' 10 | recon v 'traverseByName numbers' 11 | 12 | recon v 'traverseByName letters & traverseByName numbers' 13 | 14 | recon v 'traverseByName numbers | traverseByScript node.data("label") % 2 === 0' 15 | 16 | recon v set script 'traverseByScript node.data("label") % 2 === 0' 17 | 18 | recon v 'traverseByName numbers | traverseByName script' 19 | -------------------------------------------------------------------------------- /examples/noop.pown: -------------------------------------------------------------------------------- 1 | recon t noop a 2 | recon t noop b 3 | recon t noop c 4 | -------------------------------------------------------------------------------- /examples/permissive-cors-1.pown: -------------------------------------------------------------------------------- 1 | set -x 2 | 3 | recon a --node-type uri 'https://www.googleoptimize.com/optimize.js?id=GTM-WLFPCHW' 4 | 5 | recon t http_fingerprint -s 'node[type="uri"]' 6 | 7 | # TODO: files cannot be relative to the script because of bug in the script module 8 | # the script should be setting the cwd to its folder 9 | 10 | recon template run ./examples/permissive-cors-1.yaml 11 | -------------------------------------------------------------------------------- /examples/permissive-cors-1.yaml: -------------------------------------------------------------------------------- 1 | id: permissive-cors 2 | 3 | add: 4 | select: node[type="uri"] 5 | 6 | matchers: 7 | - type: word 8 | word: '*' 9 | part: props.responseHeaders['access-control-allow-origin'] 10 | 11 | - type: word 12 | word: 'true' 13 | part: props.responseHeaders['access-control-allow-credentials'] 14 | 15 | extractors: 16 | - type: value 17 | path: props.responseHeaders['access-control-allow-origin'] 18 | name: props.origin 19 | 20 | - type: value 21 | path: props.responseHeaders['access-control-allow-credentials'] 22 | name: props.credentials 23 | 24 | type: issue 25 | label: Permissive Cross-Origin Resource Sharing 26 | props: 27 | level: 1 28 | title: Permissive Cross-Origin Resource Sharing 29 | description: > 30 | Cross-origin Resource Sharing (CORS) is a specification, which allows 31 | Web applications the ability to offer its resources for public consumption 32 | from different domains. CORS is typically used in cross-origin APIs designed 33 | to be consumed by JavaScript applications. 34 | -------------------------------------------------------------------------------- /examples/permissive-cors-2.pown: -------------------------------------------------------------------------------- 1 | set -x 2 | 3 | recon a --node-type uri 'https://www.googleoptimize.com/optimize.js?id=GTM-WLFPCHW' 4 | 5 | recon t http_fingerprint -s 'node[type="uri"]' 6 | 7 | # TODO: files cannot be relative to the script because of bug in the script module 8 | # the script should be setting the cwd to its folder 9 | 10 | recon template run ./examples/permissive-cors-2.yaml 11 | -------------------------------------------------------------------------------- /examples/permissive-cors-2.yaml: -------------------------------------------------------------------------------- 1 | id: permissive-cors 2 | 3 | select: 4 | select: node[type="uri"] 5 | 6 | add: 7 | matchers: 8 | - type: word 9 | word: '*' 10 | part: props.responseHeaders['access-control-allow-origin'] 11 | 12 | - type: word 13 | word: 'true' 14 | part: props.responseHeaders['access-control-allow-credentials'] 15 | 16 | extractors: 17 | - type: value 18 | path: props.responseHeaders['access-control-allow-origin'] 19 | name: props.origin 20 | 21 | - type: value 22 | path: props.responseHeaders['access-control-allow-credentials'] 23 | name: props.credentials 24 | 25 | type: issue 26 | label: Permissive Cross-Origin Resource Sharing 27 | props: 28 | level: 1 29 | title: Permissive Cross-Origin Resource Sharing 30 | description: > 31 | Cross-origin Resource Sharing (CORS) is a specification, which allows 32 | Web applications the ability to offer its resources for public consumption 33 | from different domains. CORS is typically used in cross-origin APIs designed 34 | to be consumed by JavaScript applications. 35 | -------------------------------------------------------------------------------- /examples/request.pown: -------------------------------------------------------------------------------- 1 | set -x 2 | 3 | # TODO: files cannot be relative to the script because of bug in the script module 4 | # the script should be setting the cwd to its folder 5 | 6 | recon template run ./examples/request.yaml 7 | -------------------------------------------------------------------------------- /examples/request.yaml: -------------------------------------------------------------------------------- 1 | id: request 2 | 3 | request: 4 | uri: https://httpbin.org/status/200 5 | 6 | extract: 7 | value: responseCode 8 | name: code 9 | 10 | add: 11 | type: string 12 | label: HTTP/${code} 13 | props: 14 | string: HTTP/${code} 15 | -------------------------------------------------------------------------------- /examples/script-traversals.pown: -------------------------------------------------------------------------------- 1 | set -xe 2 | 3 | recon add --node-type number 1 2 3 4 5 4 | recon v 'traverseByScript node.data("label") % 2 === 0' 5 | -------------------------------------------------------------------------------- /examples/traverse.pown: -------------------------------------------------------------------------------- 1 | set -xe 2 | 3 | recon add --node-type uri http://0.0.0.0 4 | recon add --node-type uri http://google.com 5 | 6 | recon t http_fingerprint --auto-group --auto-weight -s 'node[type="uri"]' 7 | 8 | recon remove -v 'filter node[type="code"] | neighborhood node[type="uri"]' 9 | 10 | recon select node 11 | -------------------------------------------------------------------------------- /examples/whocode.pown: -------------------------------------------------------------------------------- 1 | set -x 2 | 3 | recon add --node-type brand "$1" 4 | 5 | recon options set -c whoaretheyreport categories -j ["coding"] 6 | recon options list -c whoaretheyreport 7 | 8 | recon t whoaretheyreport -s 'node[type="brand"]' 9 | -------------------------------------------------------------------------------- /examples/whodev.pown: -------------------------------------------------------------------------------- 1 | set -x 2 | 3 | recon add --node-type brand "$1" 4 | 5 | recon options set -c whoaretheyreport categories -j ["coding"] 6 | recon options list -c whoaretheyreport 7 | 8 | recon t auto -s 'node[type="brand"]' 9 | -------------------------------------------------------------------------------- /examples/worker.js: -------------------------------------------------------------------------------- 1 | const { parentPort } = require('worker_threads') 2 | 3 | parentPort.on('message', (node) => { 4 | parentPort.postMessage({ type: 'new-type', label: 'new-label', props: { title: 'It works!' } }) 5 | }) 6 | -------------------------------------------------------------------------------- /lib/cache/dynamodb.js: -------------------------------------------------------------------------------- 1 | const awsSdk = require('aws-sdk') 2 | const querystring = require('querystring') 3 | 4 | class Cache { 5 | constructor(options) { 6 | const { ttl, ttlKeyName, keyPrefix, keySuffix, keyName, table, ...documentClientOptions } = options || {} 7 | 8 | this.ttl = ttl || 60 9 | this.ttlKeyName = ttlKeyName || 'ttl' 10 | 11 | this.keyPrefix = keyPrefix || '' 12 | this.keySuffix = keySuffix || '' 13 | this.keyName = keyName || 'key' 14 | 15 | this.table = table 16 | 17 | this.client = new awsSdk.DynamoDB.DocumentClient(documentClientOptions) 18 | } 19 | 20 | key(transform, node, options) { 21 | return [this.keyPrefix, transform, node.id, this.keySuffix].filter(i => i).map(encodeURIComponent).join('/') + '?' + querystring.stringify(options || {}) 22 | } 23 | 24 | async get(transform, node, options) { 25 | const { Item: item } = await this.client.get({ 26 | TableName: this.table, 27 | Key: { 28 | [this.keyName]: this.key(transform, node, options) 29 | } 30 | }).promise() 31 | 32 | if (item) { 33 | if (item.ttl >= Date.now() / 1000) { 34 | return item.value 35 | } 36 | } 37 | } 38 | 39 | async set(transform, node, options, value, ttl = this.ttl) { 40 | await this.client.put({ 41 | TableName: this.table, 42 | Item: { 43 | [this.keyName]: this.key(transform, node, options), 44 | [this.ttlKeyName]: Math.round((Date.now() / 1000) + ttl), 45 | 46 | value: value 47 | } 48 | }).promise() 49 | } 50 | 51 | async end() {} 52 | } 53 | 54 | module.exports = { Cache } 55 | -------------------------------------------------------------------------------- /lib/cache/memcached.js: -------------------------------------------------------------------------------- 1 | const Memcached = require('memcached') 2 | const querystring = require('querystring') 3 | 4 | class Cache { 5 | constructor(options) { 6 | const { ttl, keyPrefix, keySuffix, hosts, ...memcachedOptions } = options || {} 7 | 8 | this.ttl = ttl || 60 9 | 10 | this.keyPrefix = keyPrefix || '' 11 | this.keySuffix = keySuffix || '' 12 | 13 | this.client = new Memcached(hosts, memcachedOptions) 14 | } 15 | 16 | invoke(op, ...args) { 17 | return new Promise((resolve, reject) => { 18 | this.client[op](...args, (err, result) => { 19 | if (err) { 20 | reject(err) 21 | } 22 | else { 23 | resolve(result) 24 | } 25 | }) 26 | }) 27 | } 28 | 29 | key(transform, node, options) { 30 | return [this.keyPrefix, transform, node.id, this.keySuffix].filter(i => i).map(encodeURIComponent).join('/') + '?' + querystring.stringify(options || {}) 31 | } 32 | 33 | async get(transform, node, options) { 34 | return this.invoke('get', this.key(transform, node, options)) 35 | } 36 | 37 | async set(transform, node, options, value, ttl = this.ttl) { 38 | return this.invoke('set', this.key(transform, node, options), value, ttl) 39 | } 40 | 41 | async end() { 42 | return this.invoke('end') 43 | } 44 | } 45 | 46 | module.exports = { Cache } 47 | -------------------------------------------------------------------------------- /lib/common.js: -------------------------------------------------------------------------------- 1 | const { isDomain } = require('./detect') 2 | const { Transform } = require('./transform') 3 | const { normalizeDomain } = require('./normalize') 4 | const { DOMAIN_TYPE, SUBDOMAIN_TYPE } = require('./types') 5 | 6 | class SubdomainTransform extends Transform { 7 | async handle({ id: source = '', label = '' }) { 8 | let { subdomains, ...rest } = await this.getResults(label) 9 | 10 | const results = subdomains 11 | .filter(({ subdomain }) => subdomain) 12 | .map(({ subdomain, ...props }) => ({ subdomain: normalizeDomain(subdomain), ...props })) 13 | .filter(({ subdomain }) => subdomain && isDomain(subdomain) && subdomain.endsWith(`.${label}`)) 14 | .map(({ subdomain, ...props }) => ({ type: DOMAIN_TYPE, label: subdomain, props: { domain: subdomain, ...props }, edges: [{ source, type: SUBDOMAIN_TYPE }] })) 15 | 16 | results.push(...Object.values(rest).filter(value => Array.isArray(value))) 17 | 18 | return results 19 | } 20 | } 21 | 22 | module.exports = { SubdomainTransform } 23 | -------------------------------------------------------------------------------- /lib/cytoscape.js: -------------------------------------------------------------------------------- 1 | const cytoscape = require('cytoscape') 2 | 3 | const { traverse } = require('./plugins/traverse') 4 | 5 | cytoscape.use(traverse) 6 | 7 | module.exports = { cytoscape } 8 | -------------------------------------------------------------------------------- /lib/detect.js: -------------------------------------------------------------------------------- 1 | // TODO: do better detection than this 2 | 3 | const urlRegex = /^https?:\/\//i 4 | 5 | const emailRegex = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/ 6 | 7 | const ipv4Regex = /^(\d\d?\d?\.\d\d?\d?\.\d\d?\d?\.\d\d?\d?|\d{10})$/ 8 | 9 | const ipv6Regex = /^([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4}$/ 10 | 11 | const domainRegex = /^(([a-zA-Z]{1})|([a-zA-Z]{1}[a-zA-Z]{1})|([a-zA-Z]{1}[0-9]{1})|([0-9]{1}[a-zA-Z]{1})|([a-zA-Z0-9][a-zA-Z0-9-_]{1,61}[a-zA-Z0-9]))\.([a-zA-Z]{2,6}|[a-zA-Z0-9-]{2,30}\.[a-zA-Z]{2,3})$/ 12 | 13 | const isUrl = (input) => { 14 | return urlRegex.test(input || '') 15 | } 16 | 17 | const isEmail = (input) => { 18 | return emailRegex.test(input || '') 19 | } 20 | 21 | const isIpv4 = (input) => { 22 | return ipv4Regex.test(input || '') 23 | } 24 | 25 | const isIpv6 = (input) => { 26 | return ipv6Regex.test(input || '') 27 | } 28 | 29 | const isIp = (input) => { 30 | return isIpv4(input) || isIpv6(input) 31 | } 32 | 33 | const isDomain = (input) => { 34 | return domainRegex.test(input || '') 35 | } 36 | 37 | const isSubdomain = (input) => { 38 | // NOTE: this method rather basic and error prone 39 | // TODO: fix me 40 | 41 | return isDomain(input) && input.split('.').length - 1 >= 2 42 | } 43 | 44 | const isSubdomainOf = (input, root) => { 45 | if (!root.startsWith('.')) { 46 | root = '.' + root 47 | } 48 | 49 | input = input.toLowerCase().trim() 50 | root = root.toLowerCase().trim() 51 | 52 | return input.endsWith(root) 53 | } 54 | 55 | module.exports = { isUrl, isEmail, isIpv4, isIpv6, isIp, isDomain, isSubdomain, isSubdomainOf } 56 | -------------------------------------------------------------------------------- /lib/index.js: -------------------------------------------------------------------------------- 1 | module.exports = require('./recon') 2 | -------------------------------------------------------------------------------- /lib/normalize.js: -------------------------------------------------------------------------------- 1 | const normalizeDomain = (input) => { 2 | return input.trim().toLowerCase().replace(/\.+/g, '.').replace(/^(\*\.)+/, '').replace(/^\.+/, '').replace(/\.+$/, '') 3 | } 4 | 5 | module.exports = { 6 | normalizeDomain 7 | } 8 | -------------------------------------------------------------------------------- /lib/options.js: -------------------------------------------------------------------------------- 1 | class Store { 2 | constructor() { 3 | this.store = {} 4 | } 5 | 6 | getOption(name) { 7 | return this.store[name] 8 | } 9 | 10 | setOption(name, value) { 11 | this.store[name] = value 12 | } 13 | 14 | deleteOption(name) { 15 | delete this.store[name] 16 | } 17 | 18 | clearOptions() { 19 | this.store = {} 20 | } 21 | 22 | listOptions() { 23 | return Object.entries(this.store).map(([name, value]) => ({ name, value })) 24 | } 25 | 26 | getOptions() { 27 | return { 28 | ...this.store 29 | } 30 | } 31 | } 32 | 33 | class Options { 34 | constructor() { 35 | this.stores = {} 36 | } 37 | 38 | getStore(category) { 39 | if (!this.stores[category]) { 40 | this.stores[category] = new Store() 41 | } 42 | 43 | return this.stores[category] 44 | } 45 | 46 | getOption(category, name) { 47 | return this.getStore(category).getOption(name) 48 | } 49 | 50 | setOption(category, name, value) { 51 | this.getStore(category).setOption(name, value) 52 | } 53 | 54 | deleteOption(category, name) { 55 | this.getStore(category).deleteOption(name) 56 | } 57 | 58 | clearOptions(category) { 59 | this.getStore(category).clearOptions() 60 | } 61 | 62 | listOptions(category) { 63 | return this.getStore(category).listOptions() 64 | } 65 | 66 | getOptions(category) { 67 | return this.getStore(category).getOptions() 68 | } 69 | } 70 | 71 | module.exports = { Options, Store } 72 | -------------------------------------------------------------------------------- /lib/plugins/traverse.js: -------------------------------------------------------------------------------- 1 | const esprima = require('esprima') 2 | const staticEval = require('static-eval') 3 | 4 | const traverseFunctions = { 5 | filter: {}, 6 | 7 | neighborhood: {}, 8 | 9 | openNeighborhood: {}, 10 | 11 | closedNeighborhood: {}, 12 | 13 | component: {}, 14 | 15 | components: {}, 16 | 17 | edgesWith: {}, 18 | 19 | edgesTo: {}, 20 | 21 | connectedNodes: {}, 22 | 23 | connectedEdges: {}, 24 | 25 | sources: {}, 26 | 27 | targets: {}, 28 | 29 | parallelEdges: {}, 30 | 31 | codirectedEdges: {}, 32 | 33 | roots: {}, 34 | 35 | leaves: {}, 36 | 37 | outgoers: {}, 38 | 39 | successors: {}, 40 | 41 | incomers: {}, 42 | 43 | predecessors: {}, 44 | 45 | eq: {}, 46 | 47 | first: {}, 48 | 49 | last: {}, 50 | 51 | traverseByName: {}, 52 | 53 | traverseByScript: {} 54 | } 55 | 56 | function parseDate(date) { 57 | if (date === 'now') { 58 | return new Date() 59 | } 60 | else { 61 | return new Date(date) 62 | } 63 | } 64 | 65 | function traverse(cytoscape) { 66 | const namedTraversals = {} 67 | 68 | cytoscape('core', 'setTraversalByName', function(name, traversal) { 69 | namedTraversals[name] = traversal 70 | }) 71 | 72 | cytoscape('core', 'getTraversalByName', function(name) { 73 | return namedTraversals[name] 74 | }) 75 | 76 | cytoscape('core', 'delTraversalByName', function(name) { 77 | delete namedTraversals[name] 78 | }) 79 | 80 | const traverseByName = function(name) { 81 | const traversal = namedTraversals[name] 82 | 83 | if (!traversal) { 84 | throw new Error(`Unrecognized traversal name ${name}`) 85 | } 86 | 87 | return this.traverse(traversal) 88 | } 89 | 90 | cytoscape('core', 'traverseByName', traverseByName) 91 | cytoscape('collection', 'traverseByName', traverseByName) 92 | 93 | const traverseByScript = function(script) { 94 | const expression = esprima.parse(script || 'false').body[0].expression 95 | 96 | return this.filter((node) => { 97 | return staticEval(expression, { node, parseDate, parseInt, parseFloat }) 98 | }) 99 | } 100 | 101 | cytoscape('core', 'traverseByScript', traverseByScript) 102 | cytoscape('collection', 'traverseByScript', traverseByScript) 103 | 104 | const traverse = function(expression) { 105 | const traversorCollections = expression.match(/(\\.|[^&])+/gm).map(p => p.trim()).filter(p => p).map((part) => { 106 | part = part.replace(/\\\&/g, '&') 107 | 108 | const traversorGroups = part.match(/(\\.|[^/])+/gm).map(p => p.trim()).filter(p => p).map((part) => { 109 | part = part.replace(/\\\//g, '/') 110 | 111 | return part.match(/(\\.|[^|])+/gm).map(p => p.trim()).filter(p => p).map((part) => { 112 | part = part.replace(/\\\|/g, '|') 113 | 114 | const [name, ...input] = part.split(' ') 115 | 116 | return { 117 | name: name.toLowerCase().trim() || '', 118 | input: input.join(' ').trim() || '*' 119 | } 120 | }) 121 | }) 122 | 123 | let returnCollection = this 124 | 125 | traversorGroups.forEach((traversors, index) => { 126 | if (index == 0) { 127 | traversors.forEach(({ name, input }) => { 128 | for (let traverseFunction of Object.keys(traverseFunctions)) { 129 | if (traverseFunction.toLowerCase() === name) { 130 | returnCollection = returnCollection[traverseFunction](input) 131 | 132 | return 133 | } 134 | } 135 | 136 | throw new Error(`Unrecognized traverse function ${name}`) 137 | }) 138 | } 139 | else { 140 | let subCollection 141 | 142 | returnCollection.map((node) => { 143 | let subSubCollection = this.collection().add(node) 144 | 145 | traversors.forEach(({ name, input }) => { 146 | for (let traverseFunction of Object.keys(traverseFunctions)) { 147 | if (traverseFunction.toLowerCase() === name) { 148 | subSubCollection = subSubCollection[traverseFunction](input) 149 | 150 | return 151 | } 152 | } 153 | 154 | throw new Error(`Unrecognized traverse function ${name}`) 155 | }) 156 | 157 | if (subCollection) { 158 | subCollection = subCollection.union(subSubCollection) 159 | } 160 | else { 161 | subCollection = subSubCollection 162 | } 163 | }) 164 | 165 | returnCollection = subCollection 166 | } 167 | }) 168 | 169 | return returnCollection 170 | }) 171 | 172 | let returnCollection 173 | 174 | traversorCollections.forEach((traversorCollection) => { 175 | if (returnCollection) { 176 | returnCollection = returnCollection.union(traversorCollection) 177 | } 178 | else { 179 | returnCollection = traversorCollection 180 | } 181 | }) 182 | 183 | return returnCollection 184 | } 185 | 186 | cytoscape('core', 'traverse', traverse) 187 | cytoscape('collection', 'traverse', traverse) 188 | } 189 | 190 | module.exports = { traverse } 191 | -------------------------------------------------------------------------------- /lib/scheduler.js: -------------------------------------------------------------------------------- 1 | const { sleep } = require('@pown/async/lib/sleep') 2 | const { Scheduler: PownScheduler } = require('@pown/request/lib/scheduler') 3 | 4 | class Scheduler extends PownScheduler { 5 | // NOTE: it is tempting to overload the request method but we do not do that here because the scheduler could be used by other 6 | // libraries and as a result this specific behaviour might be unexpected side-effect 7 | 8 | async tryRequest(request) { 9 | if (request.headers && !request.headers['user-agent']) { 10 | request = { ...request, headers: { ...request.headers, 'user-agent': 'pown' } } 11 | } 12 | 13 | const { maxRetries = 5, retryDely = 5000 } = request 14 | 15 | let res 16 | 17 | for (let i = 0; i < maxRetries; i++) { 18 | if (i > 0) { 19 | await sleep(retryDely) 20 | } 21 | 22 | try { 23 | res = await this.request(request) 24 | } 25 | catch (e) { 26 | if (process.env.NODE_ENV !== 'production') { 27 | console.error(e) 28 | } 29 | 30 | continue 31 | } 32 | 33 | if (res.info.error) { 34 | console.error(`${res.method || 'GET'}`, res.uri, '->', res.info.error) 35 | } 36 | else { 37 | console.debug(`${res.method || 'GET'}`, res.uri, '->', res.responseCode, Buffer.from(res.responseBody).slice(0, 512).toString('base64')) 38 | 39 | if (res.responseCode < 500) { 40 | break 41 | } 42 | } 43 | } 44 | 45 | if (!res || res.responseCode >= 500) { 46 | throw new Error(`Cannot request ${res.method} ${res.uri} -> ${res.responseCode}`) 47 | } 48 | 49 | if (request.toJson || request.toJSON) { 50 | return JSON.parse(res.responseBody) 51 | } 52 | else { 53 | return res 54 | } 55 | } 56 | } 57 | 58 | module.exports = { Scheduler } 59 | -------------------------------------------------------------------------------- /lib/template.js: -------------------------------------------------------------------------------- 1 | const deepmerge = require('deepmerge') 2 | const { Template } = require('@pown/engine/lib/template') 3 | 4 | class ReconTemplate extends Template { 5 | constructor(doc, options) { 6 | const { scheduler, ...restOfOptions } = options || {} 7 | 8 | super(doc, restOfOptions) 9 | 10 | this.scheduler = scheduler 11 | } 12 | 13 | async executeTask(taskName, task, input = {}) { 14 | switch (taskName) { 15 | case 'request': 16 | return this.scheduler.request(this.interpolate(task, input)) 17 | 18 | default: 19 | throw new Error(`Unrecognized task ${taskName}`) 20 | } 21 | } 22 | 23 | async runTask(taskName, task, input = {}, recon, nodes, ...args) { 24 | if (['request'].includes(taskName)) { 25 | return super.runTask(taskName, task, input) 26 | } 27 | else { 28 | const { select, traverse, ...data } = await this.getTaskDefinition(task) 29 | 30 | if (select) { 31 | nodes = recon.select(select) 32 | } 33 | else 34 | if (traverse) { 35 | nodes = recon.traverse(traverse) 36 | } 37 | 38 | if (nodes) { 39 | if (nodes.length) { 40 | if (['add'].includes(taskName)) { 41 | nodes = await Promise.all(nodes.map(async(node) => { 42 | const nodeData = node.data() 43 | 44 | if (await this.matchWithTask(task, nodeData)) { 45 | const extract = await this.extractWithTask(task, nodeData) 46 | 47 | const { id, type, label, props = {} } = deepmerge(extract, data) 48 | 49 | return { id: id ? id : `${type}:${label}`, type, label: this.interpolate(label, input), props: this.interpolate(props, input), edges: [nodeData.id] } 50 | } 51 | })) 52 | 53 | nodes = nodes.filter(node => node) 54 | 55 | await recon.add(nodes) 56 | } 57 | else 58 | if (['remove'].includes(taskName)) { 59 | nodes = await Promise.all(nodes.map(async(node) => { 60 | const nodeData = node.data() 61 | 62 | if (await this.matchWithTask(task, nodeData)) { 63 | return { id: nodeData.id } 64 | } 65 | })) 66 | 67 | nodes = nodes.filter(node => node) 68 | 69 | await recon.remove(nodes) 70 | } 71 | else 72 | if (['transform'].includes(taskName)) { 73 | nodes = await Promise.all(nodes.map(async(node) => { 74 | const nodeData = node.data() 75 | 76 | if (await this.matchWithTask(task, nodeData)) { 77 | return nodeData 78 | } 79 | })) 80 | 81 | nodes = nodes.filter(node => node) 82 | 83 | const { transformation, transform = transformation, ...rest } = data 84 | 85 | await recon.transform(transform, nodes, rest, rest) 86 | } 87 | else { 88 | throw new Error(`Unrecognized task ${taskName}`) 89 | } 90 | 91 | return { id: task.id, name: taskName, result: {}, input, matches: true, extracts: {}, output: {} } 92 | } 93 | } 94 | else { 95 | if (['add'].includes(taskName)) { 96 | if (await this.matchWithTask(task, input)) { 97 | const extract = await this.extractWithTask(task, input) 98 | 99 | const { id, type, label, props = {} } = deepmerge(extract, data) 100 | 101 | await recon.add({ id: id ? id : `${type}:${label}`, type, label: this.interpolate(label, input), props: this.interpolate(props, input) }) 102 | } 103 | } 104 | 105 | return { id: task.id, name: taskName, result: {}, input, matches: true, extracts: {}, output: {} } 106 | } 107 | } 108 | } 109 | 110 | async * runTaskSetIt(taskName, tasks, input = {}, recon, ...args) { 111 | if (['op', 'ops', 'operation', 'operations'].includes(taskName)) { 112 | for (let task of tasks) { 113 | yield* this.runTaskDefinitionsIt(task, input, recon, ...args) 114 | } 115 | } 116 | else 117 | if (['select', 'selection'].includes(taskName)) { 118 | for (let task of tasks) { 119 | const { selection, select = selection, exp = selection, expression = exp, ...rest } = task 120 | 121 | const nodes = recon.select(expression) 122 | 123 | yield* super.runTaskDefinitionsIt(rest, input, recon, nodes, ...args) 124 | } 125 | } 126 | else 127 | if (['traverse', 'traversal'].includes(taskName)) { 128 | for (let task of tasks) { 129 | const { traversal, traverse = traversal, exp = traverse, expression = exp, ...rest } = task 130 | 131 | const nodes = recon.traverse(expression) 132 | 133 | yield* super.runTaskDefinitionsIt(rest, input, recon, nodes, ...args) 134 | } 135 | } 136 | else { 137 | yield* super.runTaskSetIt(taskName, tasks, input, recon, ...args) 138 | } 139 | } 140 | } 141 | 142 | module.exports = { ReconTemplate } 143 | -------------------------------------------------------------------------------- /lib/transform.js: -------------------------------------------------------------------------------- 1 | const { EventEmitter } = require('events') 2 | const { isIterable } = require('@pown/async/lib/utils') 3 | const { eachOfLimit } = require('@pown/async/lib/eachOfLimit') 4 | const { iterateOverEmitter } = require('@pown/async/lib/iterateOverEmitter') 5 | 6 | class Transform extends EventEmitter { 7 | constructor(options) { 8 | super() 9 | 10 | const { concurrency = Infinity, ...rest } = options || {} 11 | 12 | this.concurrency = concurrency 13 | 14 | Object.assign(this, rest) 15 | } 16 | 17 | info(...args) { 18 | this.emit('info', ...args) 19 | } 20 | 21 | warn(...args) { 22 | this.emit('warn', ...args) 23 | } 24 | 25 | error(...args) { 26 | this.emit('error', ...args) 27 | } 28 | 29 | debug(...args) { 30 | this.emit('debug', ...args) 31 | } 32 | 33 | progress(...args) { 34 | this.emit('progress', ...args) 35 | } 36 | 37 | async handle() { 38 | throw new Error(`Not implemented`) // NOTE: virtual method 39 | } 40 | 41 | async * itr(nodes, options = {}, concurrency = this.concurrency) { 42 | let updateProgress 43 | 44 | if (Array.isArray(nodes)) { 45 | let i = 0 46 | let l = nodes.length 47 | 48 | updateProgress = (c = 0) => this.progress((i += c), l) 49 | } 50 | else 51 | if (isIterable(nodes)) { 52 | let i = 0 53 | 54 | updateProgress = (c = 0) => this.progress((i += c)) 55 | } 56 | else { 57 | throw new Error(`Non-iterable nodes detected`) 58 | } 59 | 60 | const em = new EventEmitter() 61 | 62 | eachOfLimit(nodes, concurrency, async(node) => { 63 | updateProgress() 64 | 65 | let results 66 | 67 | try { 68 | results = await this.handle(node, options) 69 | } 70 | catch (e) { 71 | this.error(e) // NOTE: report the error but do not break execution for other nodes 72 | 73 | updateProgress(1) 74 | 75 | return 76 | } 77 | 78 | if (!results) { 79 | return 80 | } 81 | 82 | if (!isIterable(results)) { 83 | results = [results] 84 | } 85 | 86 | try { 87 | for await (let result of results) { 88 | em.emit('result', result) 89 | } 90 | } 91 | catch (e) { 92 | this.error(e) // NOTE: report the error but do not break execution for other nodes 93 | 94 | updateProgress(1) 95 | 96 | return 97 | } 98 | 99 | updateProgress(1) 100 | }).then(() => em.emit('end')).catch((error) => em.emit('error', error)) 101 | 102 | yield* iterateOverEmitter(em, 'result') 103 | } 104 | 105 | async run(...args) { 106 | const results = [] 107 | 108 | for await (let result of this.itr(...args)) { 109 | results.push(result) 110 | } 111 | 112 | return results 113 | } 114 | } 115 | 116 | module.exports = { Transform } 117 | -------------------------------------------------------------------------------- /lib/types.js: -------------------------------------------------------------------------------- 1 | const ALL_TYPE = '*' 2 | const STRING_TYPE = 'string' 3 | const DOMAIN_TYPE = 'domain' 4 | const SUBDOMAIN_TYPE = 'subdomain' 5 | const IPV4_TYPE = 'ipv4' 6 | const IPV6_TYPE = 'ipv6' 7 | const PORT_TYPE = 'port' 8 | const URI_TYPE = 'uri' 9 | const BRAND_TYPE = 'brand' 10 | const PERSON_TYPE = 'person' 11 | const EMAIL_TYPE = 'email' 12 | const NICK_TYPE = 'nick' 13 | const ORG_TYPE = 'org' 14 | const CODE_TYPE = 'code' 15 | const TITLE_TYPE = 'title' 16 | const SOFTWARE_TYPE = 'software' 17 | const MIME_TYPE = 'mime' 18 | const IMAGE_TYPE = 'image' 19 | const SCREENSHOT_TYPE = 'screenshot' 20 | const SHA1_TYPE = 'sha1' 21 | const FINGERPRINT_TYPE = 'fingerprint' 22 | const SIGNATURE_TYPE = 'signature' 23 | const WHOIS_TYPE = 'whois' 24 | const ISSUE_TYPE = 'issue' 25 | const EXPLOIT_TYPE = 'exploit' 26 | const HASH_TYPE = 'hash' 27 | const SANDOMAIN_TYPE = 'sandomain' 28 | const BANNER_TYPE = 'banner' 29 | const CERTIFICATE_TYPE = 'certificate' 30 | const TLS_TYPE = 'tls' 31 | 32 | module.exports = { 33 | ALL_TYPE, 34 | STRING_TYPE, 35 | DOMAIN_TYPE, 36 | SUBDOMAIN_TYPE, 37 | IPV4_TYPE, 38 | IPV6_TYPE, 39 | PORT_TYPE, 40 | URI_TYPE, 41 | BRAND_TYPE, 42 | PERSON_TYPE, 43 | EMAIL_TYPE, 44 | NICK_TYPE, 45 | ORG_TYPE, 46 | CODE_TYPE, 47 | TITLE_TYPE, 48 | SOFTWARE_TYPE, 49 | MIME_TYPE, 50 | IMAGE_TYPE, 51 | SCREENSHOT_TYPE, 52 | SHA1_TYPE, 53 | FINGERPRINT_TYPE, 54 | SIGNATURE_TYPE, 55 | WHOIS_TYPE, 56 | ISSUE_TYPE, 57 | EXPLOIT_TYPE, 58 | HASH_TYPE, 59 | SANDOMAIN_TYPE, 60 | BANNER_TYPE, 61 | CERTIFICATE_TYPE, 62 | TLS_TYPE 63 | } 64 | -------------------------------------------------------------------------------- /lib/utils.js: -------------------------------------------------------------------------------- 1 | const makeId = (type, name, ...extra) => { 2 | return `${type ? type + ':' : ''}${[name || Math.random().toString(32).substring(2)].concat(extra).join(':')}` 3 | } 4 | 5 | const makeNode = (node) => { 6 | const { id: _id, type = '', label = '', props = {}, edges = [], ...rest } = node 7 | 8 | const id = _id || makeId(type, label) 9 | 10 | return { ...rest, id, type, label, props, edges } 11 | } 12 | 13 | const flatten = (array, times) => { 14 | let result = array 15 | 16 | for (let i = 0; i < times; i++) { 17 | result = [].concat(...result) 18 | } 19 | 20 | return result 21 | } 22 | 23 | module.exports = { makeId, makeNode, flatten } 24 | -------------------------------------------------------------------------------- /lib/worker/index.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const { EventEmitter } = require('events') 3 | const { Worker } = require('worker_threads') 4 | const { iterateOverEmitter } = require('@pown/async/lib/iterateOverEmitter') 5 | 6 | const { deserialize } = require('./utils') 7 | 8 | class WorkerError extends Error { 9 | constructor({ type, message, stack }) { 10 | super() 11 | 12 | this.type = type 13 | this.message = message 14 | this.stack = stack 15 | } 16 | } 17 | 18 | const unwrapError = (error) => { 19 | return new WorkerError(error) 20 | } 21 | 22 | const unwrapArgs = (args) => { 23 | return args.map((arg) => arg && arg.__isError ? unwrapError(arg) : arg) 24 | } 25 | 26 | const streamIterable = async(iterable, worker) => { 27 | for await (let node of iterable) { 28 | worker.postMessage({ type: 'stream.put', node }) 29 | } 30 | 31 | worker.postMessage({ type: 'stream.end' }) 32 | } 33 | 34 | const wrapInWorker = (Transform) => { 35 | const { loadableTransformModule, loadableTransformName } = Transform 36 | 37 | if (loadableTransformModule && loadableTransformModule) { 38 | return class extends Transform { 39 | async * itr(nodes, options, concurrency) { 40 | this.warn('starting in worker') 41 | 42 | const emitter = new EventEmitter() 43 | 44 | const worker = new Worker(path.join(__dirname, 'worker.js')) 45 | 46 | worker.postMessage({ type: 'run', transformModule: loadableTransformModule, transformName: loadableTransformName, transformOptions: options, transformConcurrency: concurrency }) 47 | 48 | worker.on('message', ({ type, result, error, args }) => { 49 | switch (type) { 50 | case 'yield': 51 | emitter.emit('yield', deserialize(result)) 52 | 53 | break 54 | 55 | case 'error': 56 | emitter.emit('error', unwrapError(error)) 57 | 58 | break 59 | 60 | case 'end': 61 | emitter.emit('end') 62 | 63 | break 64 | 65 | case 'transform.info': 66 | this.info(...unwrapArgs(args)) 67 | 68 | break 69 | 70 | case 'transform.warn': 71 | this.warn(...unwrapArgs(args)) 72 | 73 | break 74 | 75 | case 'transform.error': 76 | this.error(...unwrapArgs(args)) 77 | 78 | break 79 | 80 | case 'transform.debug': 81 | this.debug(...unwrapArgs(args)) 82 | 83 | break 84 | 85 | case 'transform.progress': 86 | this.progress(...unwrapArgs(args)) 87 | 88 | break 89 | } 90 | }) 91 | 92 | worker.on('mesageerror', (error) => { 93 | emitter.emit('error', error) 94 | }) 95 | 96 | worker.on('error', (error) => { 97 | emitter.emit('error', error) 98 | }) 99 | 100 | worker.on('exit', (code) => { 101 | if (code) { 102 | emitter.emit('error', new Error(`Worker exited with code ${code}`)) 103 | } 104 | 105 | emitter.emit('end') 106 | }) 107 | 108 | streamIterable(nodes, worker) 109 | 110 | let error 111 | 112 | try { 113 | yield* iterateOverEmitter(emitter, 'yield') 114 | } 115 | catch (e) { 116 | error = unwrapError(e) 117 | } 118 | 119 | worker.removeAllListeners('exit') 120 | 121 | worker.terminate() 122 | 123 | this.warn('finished in worker') 124 | 125 | if (error) { 126 | throw error 127 | } 128 | } 129 | } 130 | } 131 | else { 132 | return Transform 133 | } 134 | } 135 | 136 | module.exports = { wrapInWorker } 137 | -------------------------------------------------------------------------------- /lib/worker/utils.js: -------------------------------------------------------------------------------- 1 | const serialize = (obj) => { 2 | // NOTE: this method is only available for consistency reasons because we rely on the structured 3 | // cloning algorithm to do the heavy lifting, but keep in mind that there is a chance that the 4 | // serialised and deseiralised object will be different, which should not be a problem for most 5 | // intents and purposes 6 | 7 | return obj 8 | } 9 | 10 | const deserialize = (obj) => { 11 | // NOTE: there is no need to handle String, Number, Boolean and other native objects because 12 | // we assume this method is only used in the context of deserializing object created by the 13 | // structured clone algorithm - i.e. due to message passing 14 | 15 | switch (true) { 16 | case obj === null: 17 | return obj 18 | 19 | case Array.isArray(obj): 20 | return obj.map(deserialize) 21 | 22 | case ArrayBuffer.isView(obj): 23 | return Buffer.from(obj, obj.byteOffset, obj.byteLength) 24 | 25 | case typeof(obj) === 'object': 26 | if (obj.type === 'Buffer' && Array.isArray(obj.data)) { 27 | return Buffer.from(obj.data) 28 | } 29 | else { 30 | return Object.assign({}, ...Object.entries(obj).map(([key, value]) => ({ 31 | [key]: deserialize(value) 32 | }))) 33 | } 34 | 35 | default: 36 | return obj 37 | } 38 | } 39 | 40 | module.exports = { serialize, deserialize } 41 | -------------------------------------------------------------------------------- /lib/worker/worker.js: -------------------------------------------------------------------------------- 1 | const { EventEmitter } = require('events') 2 | const { parentPort } = require('worker_threads') 3 | const { iterateOverEmitter } = require('@pown/async/lib/iterateOverEmitter') 4 | 5 | const { serialize } = require('./utils') 6 | const { Scheduler } = require('../scheduler') 7 | 8 | const getSafeError = (error) => { 9 | return { __isError: true, type: error.type, message: error.message, stack: error.stack } 10 | } 11 | 12 | const getSafeArgs = (args) => { 13 | return args.map((arg) => arg && arg instanceof Error ? getSafeError(arg) : arg) 14 | } 15 | 16 | console.info = (...args) => { 17 | parentPort.postMessage({ type: 'transform.info', args: getSafeArgs(args) }) 18 | } 19 | 20 | console.warn = (...args) => { 21 | parentPort.postMessage({ type: 'transform.warn', args: getSafeArgs(args) }) 22 | } 23 | 24 | console.error = (...args) => { 25 | parentPort.postMessage({ type: 'transform.error', args: getSafeArgs(args) }) 26 | } 27 | 28 | console.debug = (...args) => { 29 | parentPort.postMessage({ type: 'transform.debug', args: getSafeArgs(args) }) 30 | } 31 | 32 | const stream = new class extends EventEmitter { 33 | constructor() { 34 | super() 35 | 36 | this.finished = false 37 | } 38 | 39 | async put(options) { 40 | if (this.finished) { 41 | throw new Error(`Stream already finished`) 42 | } 43 | else { 44 | const { node } = options 45 | 46 | this.emit('node', node) 47 | } 48 | } 49 | 50 | async end() { 51 | if (this.finished) { 52 | throw new Error(`Stream already finished`) 53 | } 54 | else { 55 | this.finished = true 56 | } 57 | 58 | this.emit('end') 59 | } 60 | } 61 | 62 | const transform = new class { 63 | constructor() { 64 | this.running = false 65 | } 66 | 67 | info(...args) { 68 | parentPort.postMessage({ type: 'transform.info', args: getSafeArgs(args) }) 69 | } 70 | 71 | warn(...args) { 72 | parentPort.postMessage({ type: 'transform.warn', args: getSafeArgs(args) }) 73 | } 74 | 75 | error(...args) { 76 | parentPort.postMessage({ type: 'transform.error', args: getSafeArgs(args) }) 77 | } 78 | 79 | debug(...args) { 80 | parentPort.postMessage({ type: 'transform.debug', args: getSafeArgs(args) }) 81 | } 82 | 83 | progress(...args) { 84 | parentPort.postMessage({ type: 'transform.progress', args: getSafeArgs(args) }) 85 | } 86 | 87 | async run(options) { 88 | if (this.running) { 89 | throw new Error(`Transform already running`) 90 | } 91 | else { 92 | this.running = true 93 | } 94 | 95 | const { transformModule, transformName, transformOptions, transformConcurrency } = options 96 | 97 | const module = require(transformModule) 98 | 99 | const Transform = transformName ? module[transformName] : module 100 | 101 | const transform = new Transform({ scheduler: new Scheduler() }) 102 | 103 | transform.on('info', this.info) 104 | transform.on('warn', this.warn) 105 | transform.on('error', this.error) 106 | transform.on('debug', this.debug) 107 | transform.on('progress', this.progress) 108 | 109 | for await (let result of transform.itr(iterateOverEmitter(stream, 'node'), transformOptions, transformConcurrency)) { 110 | parentPort.postMessage({ type: 'yield', result: serialize(result) }) 111 | } 112 | 113 | parentPort.postMessage({ type: 'end' }) 114 | } 115 | } 116 | 117 | const onMessage = async({ type, ...options }) => { 118 | switch (type) { 119 | case 'stream.put': 120 | await stream.put(options) 121 | 122 | break 123 | 124 | case 'stream.end': 125 | await stream.end(options) 126 | 127 | break 128 | 129 | case 'run': 130 | await transform.run(options) 131 | 132 | break 133 | 134 | default: 135 | throw new Error(`Unrecognized message type ${type}`) 136 | } 137 | } 138 | 139 | parentPort.on('message', (message) => { 140 | onMessage(message).catch((error) => parentPort.postMessage({ type: 'error', error: getSafeError(error) })) 141 | }) 142 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@pown/recon", 3 | "version": "2.176.0", 4 | "description": "Pownage guaranteed", 5 | "main": "lib/index.js", 6 | "bin": {}, 7 | "scripts": { 8 | "usage": "POWN_ROOT=. pown-cli recon --help; echo; POWN_ROOT=. pown-cli recon transform --help", 9 | "recon": "POWN_ROOT=. pown-cli recon", 10 | "script": "POWN_ROOT=. pown-cli script", 11 | "shell": "POWN_ROOT=. pown-cli shell", 12 | "prefs": "POWN_ROOT=. pown-cli prefs", 13 | "build": "true", 14 | "test": "NODE_ENV=test npx mocha --recursive" 15 | }, 16 | "repository": { 17 | "type": "git", 18 | "url": "git+https://github.com/pownjs/pown-recon.git" 19 | }, 20 | "keywords": [ 21 | "exploit", 22 | "framework" 23 | ], 24 | "author": "pdp ", 25 | "contributors": [], 26 | "license": "MIT", 27 | "bugs": { 28 | "url": "https://github.com/pownjs/pown-recon/issues" 29 | }, 30 | "homepage": "https://github.com/pownjs/pown-recon#readme", 31 | "peerDependencies": { 32 | "aws-sdk": "^2.899.0", 33 | "memcached": "^2.2.2" 34 | }, 35 | "devDependencies": { 36 | "@pown/script": "^2.21.0", 37 | "@pown/shell": "^2.6.0", 38 | "aws-sdk": "^2.1221.0", 39 | "memcached": "^2.2.2", 40 | "mocha": "^9.0.3" 41 | }, 42 | "dependencies": { 43 | "@pown/async": "^2.18.1", 44 | "@pown/cli": "^2.40.1", 45 | "@pown/connect": "^2.2.1", 46 | "@pown/engine": "^2.8.0", 47 | "@pown/file": "^2.1.0", 48 | "@pown/modules": "^2.11.0", 49 | "@pown/preferences": "^2.7.0", 50 | "@pown/request": "^2.28.0", 51 | "cytoscape": "^3.19.0", 52 | "cytoscape-dagre": "^2.3.2", 53 | "cytoscape-euler": "^1.2.2", 54 | "cytoscape-klay": "^3.1.4", 55 | "deepmerge": "^4.2.2", 56 | "esprima": "^4.0.1", 57 | "js-yaml": "^4.1.0", 58 | "jsonpath": "^1.1.1", 59 | "name": "^0.0.2", 60 | "static-eval": "^2.1.0", 61 | "stream-chain": "^2.2.4", 62 | "stream-csv-as-json": "^1.0.4", 63 | "stream-json": "^1.7.2", 64 | "uuid": "^8.3.2" 65 | }, 66 | "pown": { 67 | "commands": [ 68 | "commands/recon" 69 | ], 70 | "transforms": [ 71 | "transforms/bitbucket", 72 | "transforms/bufferoverrun", 73 | "transforms/certspotter", 74 | "transforms/cloudflare", 75 | "transforms/crtsh", 76 | "transforms/dns", 77 | "transforms/dockerhub", 78 | "transforms/gravatar", 79 | "transforms/hackertarget", 80 | "transforms/http", 81 | "transforms/ipinfoio", 82 | "transforms/omnisint", 83 | "transforms/pks", 84 | "transforms/pwndb", 85 | "transforms/riddler", 86 | "transforms/script", 87 | "transforms/scylla", 88 | "transforms/securitytrails", 89 | "transforms/shodan", 90 | "transforms/spyse", 91 | "transforms/tcp", 92 | "transforms/threatcrowd", 93 | "transforms/urlscan", 94 | "transforms/utils", 95 | "transforms/virustotal", 96 | "transforms/vulners", 97 | "transforms/wappalyzer", 98 | "transforms/worker", 99 | "transforms/zonecruncher" 100 | ] 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /test/detect.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert') 2 | 3 | const { isSubdomain, isSubdomainOf } = require('../lib/detect') 4 | 5 | describe('detect', () => { 6 | it('#isSubdomain', () => { 7 | assert.ok(!isSubdomain('acme.com'), 'acme.com is not subodmain') 8 | assert.ok(!!isSubdomain('sub.acme.com'), 'sub.acme.com is subodmain') 9 | }) 10 | 11 | it('#isSubdomainOf', () => { 12 | assert.ok(!isSubdomainOf('acme.com', 'acme.com', 'acme.com is not subdomain of acme.com')) 13 | assert.ok(!!isSubdomainOf('sub.acme.com', 'acme.com', 'sub.acme.com is subdomain of acme.com')) 14 | assert.ok(!isSubdomainOf('sub.test.com', 'acme.com', 'sub.test.com is not subdomain of acme.com')) 15 | }) 16 | }) 17 | -------------------------------------------------------------------------------- /test/normalize.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert') 2 | 3 | const { normalizeDomain } = require('../lib/normalize') 4 | 5 | describe('normalize', () => { 6 | it('#normalizeDomain', () => { 7 | assert.equal(normalizeDomain('acme.com'), 'acme.com') 8 | assert.equal(normalizeDomain('acme.com.'), 'acme.com') 9 | assert.equal(normalizeDomain('Acme.com.'), 'acme.com') 10 | assert.equal(normalizeDomain('*.Acme.com.'), 'acme.com') 11 | assert.equal(normalizeDomain('*.*.Acme.com.'), 'acme.com') 12 | }) 13 | }) 14 | -------------------------------------------------------------------------------- /test/recon.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert') 2 | 3 | const { Recon } = require('../lib/recon') 4 | const { makeNode } = require('../lib/utils') 5 | 6 | describe('recon', () => { 7 | describe('#addNodes', () => { 8 | it('must add node', async function() { 9 | const r = new Recon() 10 | 11 | await r.addNodes([makeNode({ type: 'string', label: 'test' })]) 12 | 13 | r.select('*') 14 | 15 | assert.ok(r.selection.length === 1) 16 | assert.ok(r.selection[0].data('props').string === undefined) 17 | }) 18 | 19 | it('must update node if node exists', async function() { 20 | const r = new Recon() 21 | 22 | await r.addNodes([makeNode({ type: 'string', label: 'test' })]) 23 | 24 | r.select('*') 25 | 26 | assert.ok(r.selection.length === 1) 27 | assert.ok(r.selection[0].data('props').string === undefined) 28 | 29 | await r.addNodes([makeNode({ type: 'string', label: 'test', props: { string: 'test' } })]) 30 | 31 | r.select('*') 32 | 33 | assert.ok(r.selection.length === 1) 34 | assert.ok(r.selection[0].data('props').string === 'test') 35 | }) 36 | 37 | it('must preserve buffers in data structs', async function() { 38 | const r = new Recon() 39 | 40 | const buffer = Buffer.from('buffer') 41 | 42 | await r.addNodes([makeNode({ type: 'buffer', label: 'buffer', props: { buffer } })]) 43 | 44 | r.select('*') 45 | 46 | assert.ok(r.selection.length === 1) 47 | assert.ok(r.selection[0].data('props').buffer === buffer) 48 | 49 | const d = await r.serialize() 50 | 51 | assert.ok(d.elements.nodes.length === 1) 52 | assert.ok(d.elements.nodes[0].data.props.buffer === buffer) 53 | }) 54 | }) 55 | }) 56 | -------------------------------------------------------------------------------- /test/worker/env.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert') 2 | 3 | describe('env', () => { 4 | it('ensure env is test', async() => { 5 | assert.equal(process.env.NODE_ENV, 'test', 'the env is set to test') 6 | }) 7 | }) 8 | -------------------------------------------------------------------------------- /test/worker/utils.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert') 2 | 3 | const { deserialize } = require('../../lib/worker/utils') 4 | 5 | describe('worker/utils', () => { 6 | describe('#deserialize', () => { 7 | it('must deserialize', () => { 8 | const t = (i) => deserialize(i) 9 | 10 | assert.deepEqual(t(null), null, 'handle null') 11 | assert.deepEqual(t(0), 0, 'handle zero') 12 | assert.deepEqual(t(1), 1, 'handle number') 13 | assert.deepEqual(t(true), true, 'handle true') 14 | assert.deepEqual(t(false), false, 'handle false') 15 | assert.deepEqual(t('a'), 'a', 'handle string') 16 | assert.deepEqual(t([]), [], 'handle empty array') 17 | assert.deepEqual(t(['a']), ['a'], 'handle array with elements') 18 | assert.deepEqual(t({}), {}, 'handle empty object') 19 | assert.deepEqual(t({ a: 'a' }), { a: 'a' }, 'handle object with props') 20 | assert.deepEqual(t(Buffer.from('a')), Buffer.from('a'), 'handle buffer') 21 | }) 22 | 23 | it('must handle json', () => { 24 | const t = (i) => deserialize(JSON.parse(JSON.stringify(i))) 25 | 26 | assert.deepEqual(t(null), null, 'handle null') 27 | assert.deepEqual(t(0), 0, 'handle zero') 28 | assert.deepEqual(t(1), 1, 'handle number') 29 | assert.deepEqual(t(true), true, 'handle true') 30 | assert.deepEqual(t(false), false, 'handle false') 31 | assert.deepEqual(t('a'), 'a', 'handle string') 32 | assert.deepEqual(t([]), [], 'handle empty array') 33 | assert.deepEqual(t(['a']), ['a'], 'handle array with elements') 34 | assert.deepEqual(t({}), {}, 'handle empty object') 35 | assert.deepEqual(t({ a: 'a' }), { a: 'a' }, 'handle object with props') 36 | assert.deepEqual(t(Buffer.from('a')), Buffer.from('a'), 'handle buffer') 37 | }) 38 | }) 39 | }) 40 | -------------------------------------------------------------------------------- /transforms/bitbucket/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { makeId } = require('../../lib//utils') 4 | const { BRAND_TYPE } = require('../../lib//types') 5 | const { Transform } = require('../../lib//transform') 6 | 7 | const BITBUCKET_REPO_TYPE = 'bitbucket:repo' 8 | const BITBUCKET_SNIPPET_TYPE = 'bitbucket:snippet' 9 | const BITBUCKET_MEMBER_TYPE = 'bitbucket:member' 10 | 11 | const bitbucketListRepos = class extends Transform { 12 | static get alias() { 13 | return ['bitbucket_list_repos', 'bblr'] 14 | } 15 | 16 | static get title() { 17 | return 'List Bitbucket Repos' 18 | } 19 | 20 | static get description() { 21 | return 'List Bitbucket repositories' 22 | } 23 | 24 | static get group() { 25 | return this.title 26 | } 27 | 28 | static get tags() { 29 | return ['ce'] 30 | } 31 | 32 | static get types() { 33 | return [BRAND_TYPE, BITBUCKET_MEMBER_TYPE] 34 | } 35 | 36 | static get priority() { 37 | return 1 38 | } 39 | 40 | static get noise() { 41 | return 1 42 | } 43 | 44 | static get options() { 45 | return {} 46 | } 47 | 48 | async handle({ id: source = '', label = '' }) { 49 | const results = [] 50 | 51 | const search = querystring.escape(label) 52 | 53 | const { responseBody } = await this.scheduler.tryRequest({ uri: `https://api.bitbucket.org/2.0/repositories/${search}` }) 54 | 55 | const json = JSON.parse(responseBody.toString() || '{}') || {} 56 | 57 | const { error, values = [] } = json 58 | 59 | if (error) { 60 | this.error(error.message) 61 | 62 | return results 63 | } 64 | 65 | values.forEach(({ full_name: fullName = '', owner = {} }) => { 66 | const repoId = makeId(BITBUCKET_REPO_TYPE, `${label}`) 67 | 68 | const uri = `https://bitbucket.com/${fullName}` 69 | 70 | results.push({ id: repoId, type: BITBUCKET_REPO_TYPE, label: fullName, props: { uri, fullName }, edges: [source] }) 71 | 72 | const { username: ownerUsername = '', displayName: ownerDisplayName = '', links: ownerLinks = {} } = owner 73 | 74 | const { html: ownerUri = {}, avatar: ownerAvatar = {} } = ownerLinks 75 | 76 | const ownerId = makeId(BITBUCKET_MEMBER_TYPE, ownerUsername) 77 | const ownerLabel = ownerDisplayName || ownerUsername 78 | 79 | results.push({ id: ownerId, type: BITBUCKET_MEMBER_TYPE, label: ownerLabel, image: ownerAvatar.href, props: { username: ownerUsername, uri: ownerUri.href }, edges: [repoId] }) 80 | }) 81 | 82 | return results 83 | } 84 | } 85 | 86 | const bitbucketListSnippets = class extends Transform { 87 | static get alias() { 88 | return ['bitbucket_list_snippets', 'bbls'] 89 | } 90 | 91 | static get title() { 92 | return 'List Bitbucket Snippets' 93 | } 94 | 95 | static get description() { 96 | return 'List Bitbucket snippets' 97 | } 98 | 99 | static get group() { 100 | return this.title 101 | } 102 | 103 | static get tags() { 104 | return ['ce'] 105 | } 106 | 107 | static get types() { 108 | return [BRAND_TYPE, BITBUCKET_MEMBER_TYPE] 109 | } 110 | 111 | static get priority() { 112 | return 1 113 | } 114 | 115 | static get noise() { 116 | return 1 117 | } 118 | 119 | static get options() { 120 | return {} 121 | } 122 | 123 | async handle({ id: source = '', label = '' }) { 124 | const results = [] 125 | 126 | const search = querystring.escape(label) 127 | 128 | const { responseBody } = await this.scheduler.tryRequest({ uri: `https://api.bitbucket.org/2.0/snippets/${search}` }) 129 | 130 | const json = JSON.parse(responseBody.toString() || '{}') || {} 131 | 132 | const { error, values = [] } = json 133 | 134 | if (error) { 135 | this.error(error.message) 136 | 137 | return results 138 | } 139 | 140 | values.forEach(({ id = '', links = {}, title = '', owner = {} }) => { 141 | const { html: htmlLinks = {} } = links 142 | 143 | const { href: uri = '' } = htmlLinks 144 | 145 | const snippetId = makeId(BITBUCKET_SNIPPET_TYPE, `${label}:${id}`) 146 | 147 | results.push({ id: snippetId, type: BITBUCKET_SNIPPET_TYPE, label: title, props: { title, uri }, edges: [source] }) 148 | 149 | const { username: ownerUsername = '', displayName: ownerDisplayName = '', links: ownerLinks = {} } = owner 150 | 151 | const { html: ownerUri = {}, avatar: ownerAvatar = {} } = ownerLinks 152 | 153 | const ownerId = makeId(BITBUCKET_MEMBER_TYPE, ownerUsername) 154 | const ownerLabel = ownerDisplayName || ownerUsername 155 | 156 | results.push({ id: ownerId, type: BITBUCKET_MEMBER_TYPE, label: ownerLabel, image: ownerAvatar.href, props: { username: ownerUsername, uri: ownerUri.href }, edges: [snippetId] }) 157 | }) 158 | 159 | return results 160 | } 161 | } 162 | 163 | const bitbucketListTeamRepos = class extends Transform { 164 | static get alias() { 165 | return ['bitbucket_list_team_repos', 'bbltr'] 166 | } 167 | 168 | static get title() { 169 | return 'List Bitbucket Team Repos' 170 | } 171 | 172 | static get description() { 173 | return 'List Bitbucket team repos' 174 | } 175 | 176 | static get group() { 177 | return this.title 178 | } 179 | 180 | static get tags() { 181 | return ['ce'] 182 | } 183 | 184 | static get types() { 185 | return [BRAND_TYPE, BITBUCKET_MEMBER_TYPE] 186 | } 187 | 188 | static get options() { 189 | return {} 190 | } 191 | 192 | static get priority() { 193 | return 1 194 | } 195 | 196 | static get noise() { 197 | return 1 198 | } 199 | 200 | async handle({ id: source = '', label = '' }, options) { 201 | const results = [] 202 | 203 | const search = querystring.escape(label) 204 | 205 | const { responseBody } = await this.scheduler.tryRequest({ uri: `https://api.bitbucket.org/2.0/teams/${search}/repositories` }) 206 | 207 | const json = JSON.parse(responseBody.toString() || '{}') || {} 208 | 209 | const { error, values = [] } = json 210 | 211 | if (error) { 212 | this.error(error.message) 213 | 214 | return results 215 | } 216 | 217 | // TODO: add code here 218 | 219 | return results 220 | } 221 | } 222 | 223 | const bitbucketListTeamMembers = class extends Transform { 224 | static get alias() { 225 | return ['bitbucket_list_team_members', 'bbltm'] 226 | } 227 | 228 | static get title() { 229 | return 'List Bitbucket Team Members' 230 | } 231 | 232 | static get description() { 233 | return 'List Bitbucket team members' 234 | } 235 | 236 | static get group() { 237 | return this.title 238 | } 239 | 240 | static get tags() { 241 | return ['ce'] 242 | } 243 | 244 | static get types() { 245 | return [BRAND_TYPE, BITBUCKET_MEMBER_TYPE] 246 | } 247 | 248 | static get options() { 249 | return {} 250 | } 251 | 252 | static get priority() { 253 | return 1 254 | } 255 | 256 | static get noise() { 257 | return 1 258 | } 259 | 260 | async handle({ id: source = '', label = '' }, options) { 261 | const results = [] 262 | 263 | const search = querystring.escape(label) 264 | 265 | const { responseBody } = await this.scheduler.tryRequest({ uri: `https://api.bitbucket.org/2.0/teams/${search}/members` }) 266 | 267 | const json = JSON.parse(responseBody.toString() || '{}') || {} 268 | 269 | const { error, values = [] } = json 270 | 271 | if (error) { 272 | this.error(error.message) 273 | 274 | return results 275 | } 276 | 277 | // TODO: add code here 278 | 279 | return results 280 | } 281 | } 282 | 283 | module.exports = { bitbucketListRepos, bitbucketListSnippets, bitbucketListTeamRepos, bitbucketListTeamMembers } 284 | -------------------------------------------------------------------------------- /transforms/bufferoverrun/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { makeId } = require('../../lib//utils') 4 | const { isIpv4 } = require('../../lib//detect') 5 | const { Transform } = require('../../lib//transform') 6 | const { DOMAIN_TYPE, SUBDOMAIN_TYPE, IPV4_TYPE } = require('../../lib//types') 7 | 8 | const DEFAULT_EXTRACT_IPS = false 9 | 10 | const bufferoverrunSubdomainSearch = class extends Transform { 11 | static get alias() { 12 | return ['bufferoverrun_subdomain_search', 'brss'] 13 | } 14 | 15 | static get title() { 16 | return 'Bufferover.run Subdomain Search' 17 | } 18 | 19 | static get description() { 20 | return 'Obtain a list of subdomains using bufferover.run DNS service' 21 | } 22 | 23 | static get group() { 24 | return this.title 25 | } 26 | 27 | static get tags() { 28 | return ['ce'] 29 | } 30 | 31 | static get types() { 32 | return [DOMAIN_TYPE] 33 | } 34 | 35 | static get options() { 36 | return { 37 | extractIps: { 38 | description: 'Extract IPs', 39 | type: 'boolean', 40 | default: DEFAULT_EXTRACT_IPS 41 | } 42 | } 43 | } 44 | 45 | static get priority() { 46 | return 1 47 | } 48 | 49 | static get noise() { 50 | return 1 51 | } 52 | 53 | async handle({ id: source = '', label = '' }, { extractIps = DEFAULT_EXTRACT_IPS }) { 54 | const needle = `.${label}` 55 | 56 | const query = querystring.stringify({ 57 | q: needle 58 | }) 59 | 60 | const { FDNS_A, RDNS } = await this.scheduler.tryRequest({ uri: `https://dns.bufferover.run/dns?${query}`, toJson: true }) 61 | 62 | const results = [] 63 | 64 | ; // WTF? 65 | 66 | [FDNS_A || [], RDNS || []].forEach((category) => { 67 | category.forEach((item) => { 68 | let [ptr, domain] = item.split(',') 69 | 70 | if (!domain) { 71 | domain = ptr 72 | ptr = '' 73 | } 74 | 75 | if (!domain.endsWith(needle)) { 76 | return 77 | } 78 | 79 | const label = domain 80 | const type = DOMAIN_TYPE 81 | const id = makeId(type, label) 82 | 83 | results.push({ id, type, label, props: { domain, ...{ previouslySeenIPv4: (ptr && isIpv4(ptr) ? ptr : undefined) } }, edges: [{ source, type: SUBDOMAIN_TYPE }] }) 84 | 85 | if (extractIps && ptr && isIpv4(ptr)) { 86 | results.push({ type: IPV4_TYPE, label: ptr, props: { ipv4: ptr, domain }, edges: [id] }) 87 | } 88 | }) 89 | }) 90 | 91 | return results 92 | } 93 | } 94 | 95 | module.exports = { bufferoverrunSubdomainSearch } 96 | -------------------------------------------------------------------------------- /transforms/certspotter/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | const { normalizeDomain } = require('../../lib//normalize') 5 | const { DOMAIN_TYPE, SUBDOMAIN_TYPE } = require('../../lib//types') 6 | 7 | const certspotterIssuances = class extends Transform { 8 | static get alias() { 9 | return ['certspotter_issuances', 'csi'] 10 | } 11 | 12 | static get title() { 13 | return 'Certspotter Issuances' 14 | } 15 | 16 | static get description() { 17 | return 'Obtain issuances from Certspotter' 18 | } 19 | 20 | static get group() { 21 | return this.title 22 | } 23 | 24 | static get tags() { 25 | return ['ce'] 26 | } 27 | 28 | static get types() { 29 | return [DOMAIN_TYPE] 30 | } 31 | 32 | static get options() { 33 | return {} 34 | } 35 | 36 | static get priority() { 37 | return 1 38 | } 39 | 40 | static get noise() { 41 | return 1 42 | } 43 | 44 | async handle({ id: source = '', label = '' }, options) { 45 | const query = querystring.stringify({ 46 | domain: label, 47 | include_subdomains: 'true', 48 | expand: 'dns_names' 49 | }) 50 | 51 | const items = await this.scheduler.tryRequest({ uri: `https://api.certspotter.com/v1/issuances?${query}`, toJson: true }) 52 | 53 | return [].concat(...items.map(({ dns_names }) => { 54 | return dns_names 55 | .filter((name) => name.slice(-label.length - 1) === `.${label}`) 56 | .map((name) => name.replace(/^\*\./, '')) 57 | .filter((name) => name !== label) 58 | .map((name) => { 59 | const domain = normalizeDomain(name) 60 | 61 | return { type: DOMAIN_TYPE, label: domain, props: { domain }, edges: [{ source, type: SUBDOMAIN_TYPE }] } 62 | }) 63 | })) 64 | } 65 | } 66 | 67 | module.exports = { certspotterIssuances } 68 | -------------------------------------------------------------------------------- /transforms/cloudflare/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | const { normalizeDomain } = require('../../lib//normalize') 5 | const { DOMAIN_TYPE, IPV4_TYPE, IPV6_TYPE, STRING_TYPE } = require('../../lib//types') 6 | 7 | const cloudflareDnsQuery = class extends Transform { 8 | static get alias() { 9 | return ['cloudflare_dns_query', 'cfdq'] 10 | } 11 | 12 | static get title() { 13 | return 'CloudFlare DNS Query' 14 | } 15 | 16 | static get description() { 17 | return 'Query CloudFlare DNS API' 18 | } 19 | 20 | static get group() { 21 | return this.title 22 | } 23 | 24 | static get tags() { 25 | return ['ce', 'dns'] 26 | } 27 | 28 | static get types() { 29 | return [DOMAIN_TYPE, IPV4_TYPE, IPV6_TYPE] 30 | } 31 | 32 | static get options() { 33 | return { 34 | type: { 35 | description: 'Record type', 36 | type: 'string', 37 | default: 'ALL', 38 | choices: ['ALL', 'A', 'AAAA', 'CNAME', 'MX', 'NS', 'TXT'] 39 | } 40 | } 41 | } 42 | 43 | static get priority() { 44 | return 1 45 | } 46 | 47 | static get noise() { 48 | return 1 49 | } 50 | 51 | async query(name, type, source) { 52 | const query = querystring.stringify({ 53 | name: name, 54 | type: type 55 | }) 56 | 57 | const headers = { 58 | 'Accept': 'application/dns-json' 59 | } 60 | 61 | const { Answer = [] } = await this.scheduler.tryRequest({ uri: `https://cloudflare-dns.com/dns-query?${query}`, headers, toJson: true }) 62 | 63 | return Answer.filter(({ data }) => data).map(({ type, data }) => { 64 | if (type === 1) { 65 | const ipv4 = data 66 | const domain = name 67 | 68 | return { type: IPV4_TYPE, label: ipv4, props: { ipv4, type: 'A', domain }, edges: [source] } 69 | } 70 | else 71 | if (type === 28) { 72 | const ipv6 = data 73 | const domain = name 74 | 75 | return { type: IPV6_TYPE, label: ipv6, props: { ipv6, type: 'AAAA', domain }, edges: [source] } 76 | } 77 | else 78 | if (type === 5) { 79 | const domain = normalizeDomain(data.slice(0, -1)) 80 | const targetDomain = name 81 | 82 | return { type: DOMAIN_TYPE, label: domain, props: { domain, type: 'CNAME', targetDomain }, edges: [source] } 83 | } 84 | else 85 | if (type === 2) { 86 | const domain = data.slice(0, -1).toLowerCase() 87 | const targetDomain = name 88 | 89 | return { type: DOMAIN_TYPE, label: domain, props: { domain, type: 'NS', targetDomain }, edges: [source] } 90 | } 91 | else 92 | if (type === 15) { 93 | const domain = normalizeDomain(data.split(' ')[1].slice(0, -1)) 94 | const targetDomain = name 95 | 96 | return { type: DOMAIN_TYPE, label: domain, props: { domain, type: 'MX', targetDomain }, edges: [source] } 97 | } 98 | else 99 | if (type === 16) { 100 | const string = data.slice(1, -1) 101 | const domain = name 102 | 103 | return { type: STRING_TYPE, label: string, props: { string, type: 'TXT', domain }, edges: [source] } 104 | } 105 | else { 106 | const string = data 107 | 108 | return { type: STRING_TYPE, label: string, props: { string }, edges: [source] } 109 | } 110 | }) 111 | } 112 | 113 | async doAll(item) { 114 | const safe = async(func, ...args) => { 115 | try { 116 | return await func(...args) 117 | } 118 | catch (e) { 119 | console.error(e) 120 | 121 | return [] 122 | } 123 | } 124 | 125 | return [].concat( 126 | await safe(this.doA.bind(this), item), 127 | await safe(this.doAAAA.bind(this), item), 128 | await safe(this.doCNAME.bind(this), item), 129 | await safe(this.doNS.bind(this), item), 130 | await safe(this.doMX.bind(this), item), 131 | await safe(this.doTXT.bind(this), item) 132 | ) 133 | } 134 | 135 | async doA({ id: source = '', label = '' }) { 136 | return this.query(label, 'A', source) 137 | } 138 | 139 | async doAAAA({ id: source = '', label = '' }) { 140 | return this.query(label, 'AAAA', source) 141 | } 142 | 143 | async doCNAME({ id: source = '', label = '' }) { 144 | return this.query(label, 'CNAME', source) 145 | } 146 | 147 | async doNS({ id: source = '', label = '' }) { 148 | return this.query(label, 'NS', source) 149 | } 150 | 151 | async doMX({ id: source = '', label = '' }) { 152 | return this.query(label, 'MX', source) 153 | } 154 | 155 | async doTXT({ id: source = '', label = '' }) { 156 | return this.query(label, 'TXT', source) 157 | } 158 | 159 | async handle(item, options) { 160 | const { type = 'ALL' } = options 161 | 162 | switch (type) { 163 | case 'ALL': 164 | return this.doAll(item) 165 | 166 | case 'A': 167 | return this.doA(item) 168 | 169 | case 'AAAA': 170 | return this.doAAAA(item) 171 | 172 | case 'CNAME': 173 | return this.doCNAME(item) 174 | 175 | case 'NS': 176 | return this.doNS(item) 177 | 178 | case 'MX': 179 | return this.doMX(item) 180 | 181 | case 'TXT': 182 | return this.doTXT(item) 183 | 184 | default: 185 | throw new Error(`Unrecognized record type ${type}`) 186 | } 187 | } 188 | } 189 | 190 | module.exports = { cloudflareDnsQuery } 191 | -------------------------------------------------------------------------------- /transforms/crtsh/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | const { normalizeDomain } = require('../../lib//normalize') 5 | const { DOMAIN_TYPE, SUBDOMAIN_TYPE } = require('../../lib//types') 6 | 7 | const crtshCNDomainReport = class extends Transform { 8 | static get alias() { 9 | return ['crtsh_cn_domain_report', 'crtshcdr'] 10 | } 11 | 12 | static get title() { 13 | return 'CRT.SH CN Domain Report' 14 | } 15 | 16 | static get description() { 17 | return 'Obtain crt.sh domain report which helps enumerating potential target subdomains' 18 | } 19 | 20 | static get group() { 21 | return this.title 22 | } 23 | 24 | static get tags() { 25 | return ['ce'] 26 | } 27 | 28 | static get types() { 29 | return [DOMAIN_TYPE] 30 | } 31 | 32 | static get options() { 33 | return {} 34 | } 35 | 36 | static get priority() { 37 | return 1 38 | } 39 | 40 | static get noise() { 41 | return 1 42 | } 43 | 44 | async handle({ id: source = '', label = '' }, options) { 45 | const query = querystring.stringify({ 46 | CN: `%.${label}` 47 | }) 48 | 49 | const { responseBody } = await this.scheduler.tryRequest({ uri: `https://crt.sh/?${query}` }) 50 | 51 | const text = responseBody.toString() 52 | const expr = new RegExp(/(?:\*\.)?([^\s]+?)\.?<\/TD>/g) 53 | 54 | const domains = [] 55 | 56 | let match = expr.exec(text) 57 | 58 | while (match) { 59 | match[1].split(/
/gi).forEach((line) => { 60 | domains.push(line.toLowerCase().replace(/^\.+|\.+$/g, '').trim()) 61 | }) 62 | 63 | match = expr.exec(text) 64 | } 65 | 66 | const results = [] 67 | 68 | Array.from(new Set(domains)).forEach((domain) => { 69 | domain = normalizeDomain(domain) 70 | 71 | if (!domain) { 72 | return 73 | } 74 | 75 | if (!domain.endsWith(`.${label}`)) { 76 | return 77 | } 78 | 79 | results.push({ type: DOMAIN_TYPE, label: domain, props: { domain }, edges: [{ source, type: SUBDOMAIN_TYPE }] }) 80 | }) 81 | 82 | return results 83 | } 84 | } 85 | 86 | const crtshSANDomainReport = class extends Transform { 87 | static get alias() { 88 | return ['crtsh_san_domain_report', 'crtshsdr'] 89 | } 90 | 91 | static get title() { 92 | return 'CRT.SH SAN Domain Report' 93 | } 94 | 95 | static get description() { 96 | return 'Obtain crt.sh domain report which helps enumerating potential target subdomains' 97 | } 98 | 99 | static get group() { 100 | return this.title 101 | } 102 | 103 | static get tags() { 104 | return ['ce'] 105 | } 106 | 107 | static get types() { 108 | return [DOMAIN_TYPE] 109 | } 110 | 111 | static get options() { 112 | return {} 113 | } 114 | 115 | static get priority() { 116 | return 1 117 | } 118 | 119 | static get noise() { 120 | return 1 121 | } 122 | 123 | async handle({ id: source = '', label = '' }, options) { 124 | const query = querystring.stringify({ 125 | dNSName: `%.${label}` 126 | }) 127 | 128 | const { responseBody } = await this.scheduler.tryRequest({ uri: `https://crt.sh/?${query}` }) 129 | 130 | const text = responseBody.toString() 131 | const expr = new RegExp(/(?:\*\.)?([^\s]+?)\.?<\/TD>/g) 132 | 133 | const domains = [] 134 | 135 | let match = expr.exec(text) 136 | 137 | while (match) { 138 | match[1].split(/
/gi).forEach((line) => { 139 | domains.push(line.toLowerCase().replace(/^\.+|\.+$/g, '').trim()) 140 | }) 141 | 142 | match = expr.exec(text) 143 | } 144 | 145 | const results = [] 146 | 147 | Array.from(new Set(domains)).forEach((domain) => { 148 | domain = normalizeDomain(domain) 149 | 150 | if (!domain) { 151 | return 152 | } 153 | 154 | if (!domain.endsWith(`.${label}`)) { 155 | return 156 | } 157 | 158 | results.push({ type: DOMAIN_TYPE, label: domain, props: { domain }, edges: [{ source, type: SUBDOMAIN_TYPE }] }) 159 | }) 160 | 161 | return results 162 | } 163 | } 164 | 165 | module.exports = { crtshCNDomainReport, crtshSANDomainReport } 166 | -------------------------------------------------------------------------------- /transforms/dns/index.js: -------------------------------------------------------------------------------- 1 | const util = require('util') 2 | const { Resolver } = require('dns') 3 | 4 | const { flatten } = require('../../lib//utils') 5 | const { Transform } = require('../../lib//transform') 6 | const { normalizeDomain } = require('../../lib//normalize') 7 | const { DOMAIN_TYPE, IPV4_TYPE, IPV6_TYPE, STRING_TYPE } = require('../../lib//types') 8 | 9 | const DEFAULT_CONCURRENCY = 500 10 | 11 | const dnsResolve = class extends Transform { 12 | static get alias() { 13 | return ['dns_resolve', 'dr', 'dns'] 14 | } 15 | 16 | static get title() { 17 | return 'DNS Resolve' 18 | } 19 | 20 | static get description() { 21 | return 'Performs DNS resolution' 22 | } 23 | 24 | static get group() { 25 | return this.title 26 | } 27 | 28 | static get tags() { 29 | return ['ce', 'local', 'dns'] 30 | } 31 | 32 | static get types() { 33 | return [DOMAIN_TYPE, IPV4_TYPE, IPV6_TYPE] 34 | } 35 | 36 | static get options() { 37 | return { 38 | type: { 39 | description: 'Record type', 40 | type: 'string', 41 | default: 'ALL', 42 | choices: ['ALL', 'A', 'AAAA', 'CNAME', 'MX', 'NS', 'TXT'] 43 | }, 44 | 45 | servers: { 46 | description: 'DNS servers', 47 | type: 'string', 48 | default: '' 49 | } 50 | } 51 | } 52 | 53 | static get priority() { 54 | return 1 55 | } 56 | 57 | static get noise() { 58 | return 1 59 | } 60 | 61 | async doAll(item, resolve) { 62 | const safe = async(func, ...args) => { 63 | try { 64 | return await func(...args) 65 | } 66 | catch (e) { 67 | if (!['ENOTFOUND', 'ENODATA'].includes(e.code)) { 68 | console.error(e) 69 | } 70 | 71 | return [] 72 | } 73 | } 74 | 75 | return [].concat( 76 | await safe(this.doA.bind(this), item, resolve), 77 | await safe(this.doAAAA.bind(this), item, resolve), 78 | await safe(this.doCNAME.bind(this), item, resolve), 79 | await safe(this.doNS.bind(this), item, resolve), 80 | await safe(this.doMX.bind(this), item, resolve), 81 | await safe(this.doTXT.bind(this), item, resolve) 82 | ) 83 | } 84 | 85 | async doA({ id: source = '', label = '' }, resolve) { 86 | const records = await resolve(label, 'A') 87 | 88 | return records.filter(r => r).map((record) => { 89 | const ipv4 = record.toLowerCase() 90 | const domain = label 91 | 92 | return { type: IPV4_TYPE, label: record, props: { ipv4, type: 'A', domain }, edges: [source] } 93 | }) 94 | } 95 | 96 | async doAAAA({ id: source = '', label = '' }, resolve) { 97 | const records = await resolve(label, 'AAAA') 98 | 99 | return records.filter(r => r).map((record) => { 100 | const ipv6 = record.toLowerCase() 101 | const domain = label 102 | 103 | return { type: IPV6_TYPE, label: record, props: { ipv6, type: 'AAAA', domain }, edges: [source] } 104 | }) 105 | } 106 | 107 | async doCNAME({ id: source = '', label = '' }, resolve) { 108 | const records = await resolve(label, 'CNAME') 109 | 110 | return records.filter(r => r).map((record) => { 111 | const domain = normalizeDomain(record) 112 | const targetDomain = label 113 | 114 | return { type: DOMAIN_TYPE, label: record, props: { domain, type: 'CNAME', targetDomain }, edges: [source] } 115 | }) 116 | } 117 | 118 | async doNS({ id: source = '', label = '' }, resolve) { 119 | const records = await resolve(label, 'NS') 120 | 121 | return records.filter(r => r).map((record) => { 122 | const domain = normalizeDomain(record) 123 | const targetDomain = label 124 | 125 | return { type: DOMAIN_TYPE, label: record, props: { domain, type: 'NS', targetDomain }, edges: [source] } 126 | }) 127 | } 128 | 129 | async doMX({ id: source = '', label = '' }, resolve) { 130 | const records = await resolve(label, 'MX') 131 | 132 | return records.filter(({ exchange }) => exchange).map((record) => { 133 | const { exchange } = record 134 | 135 | const domain = normalizeDomain(exchange) 136 | const targetDomain = label 137 | 138 | return { type: DOMAIN_TYPE, label: domain, props: { domain, type: 'MX', targetDomain }, edges: [source] } 139 | }) 140 | } 141 | 142 | async doTXT({ id: source = '', label = '' }, resolve) { 143 | const records = await resolve(label, 'TXT') 144 | 145 | const results = records.map((records) => { 146 | return records.filter(r => r).map((record) => { 147 | const string = record 148 | const domain = label 149 | 150 | return { type: STRING_TYPE, label: record, props: { string, type: 'TXT', domain }, edges: [source] } 151 | }) 152 | }) 153 | 154 | return flatten(results, 2) 155 | } 156 | 157 | async handle(item, options) { 158 | const { type = 'ALL', servers = '' } = options 159 | 160 | // TODO: Add the ability to interpolate options so that the resolver can be created only once. 161 | 162 | const resolver = new Resolver() 163 | 164 | if (servers) { 165 | const resolverServers = servers.split(',').map(s => s.trim()).filter(s => s) 166 | 167 | if (resolverServers.length) { 168 | console.warn(`using dns servers ${resolverServers.join(', ')}`) 169 | 170 | resolver.setServers(resolverServers) 171 | } 172 | } 173 | 174 | const resolve = util.promisify(resolver.resolve.bind(resolver)) 175 | 176 | switch (type) { 177 | case 'ALL': 178 | return this.doAll(item, resolve) 179 | 180 | case 'A': 181 | return this.doA(item, resolve) 182 | 183 | case 'AAAA': 184 | return this.doAAAA(item, resolve) 185 | 186 | case 'CNAME': 187 | return this.doCNAME(item, resolve) 188 | 189 | case 'NS': 190 | return this.doNS(item, resolve) 191 | 192 | case 'MX': 193 | return this.doMX(item, resolve) 194 | 195 | case 'TXT': 196 | return this.doTXT(item, resolve) 197 | 198 | default: 199 | throw new Error(`Recognized record type ${type}`) 200 | } 201 | } 202 | } 203 | 204 | module.exports = { dnsResolve } 205 | -------------------------------------------------------------------------------- /transforms/dockerhub/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | 5 | const DOCKERHUB_REPO_TYPE = 'dockerhub:repo' 6 | 7 | const dockerhubListRepos = class extends Transform { 8 | static get alias() { 9 | return ['dockerhub_list_repos', 'dhlr'] 10 | } 11 | 12 | static get title() { 13 | return 'List DockerHub Repos' 14 | } 15 | 16 | static get description() { 17 | return 'List DockerHub repositories for a given member or org' 18 | } 19 | 20 | static get group() { 21 | return this.title 22 | } 23 | 24 | static get tags() { 25 | return ['ce'] 26 | } 27 | 28 | static get types() { 29 | return ['brand'] 30 | } 31 | 32 | static get options() { 33 | return { 34 | dockerhubKey: { 35 | description: 'DockerHub API Key', 36 | type: 'string' 37 | }, 38 | 39 | count: { 40 | description: 'Results per page', 41 | type: 'number', 42 | default: 100 43 | }, 44 | 45 | pages: { 46 | description: 'Number of pages', 47 | type: 'number' 48 | } 49 | } 50 | } 51 | 52 | static get priority() { 53 | return 1 54 | } 55 | 56 | static get noise() { 57 | return 1 58 | } 59 | 60 | async handle({ id: source = '', label = '' }, options) { 61 | const { dockerhubKey = process.env.DOCKERHUB_KEY, count = 100, pages = Infinity } = options 62 | 63 | const search = querystring.escape(label) 64 | 65 | const results = [] 66 | 67 | for (let page = 1; page <= pages; page++) { 68 | const query = querystring.stringify({ 69 | page: page, 70 | per_page: count 71 | }) 72 | 73 | const { results: items = [] } = await this.scheduler.tryRequest({ uri: `https://hub.docker.com/v2/repositories/${search}/?${query}`, toJson: true }) 74 | 75 | if (!items.length) { 76 | break 77 | } 78 | 79 | items.forEach(({ name, namespace, description }) => { 80 | const fullName = `${namespace}/${name}` 81 | const uri = `https://hub.docker.com/r/${fullName}` 82 | 83 | results.push({ type: DOCKERHUB_REPO_TYPE, label: fullName, props: { uri, fullName, description }, edges: [source] }) 84 | }) 85 | } 86 | 87 | return results 88 | } 89 | } 90 | 91 | module.exports = { dockerhubListRepos } 92 | -------------------------------------------------------------------------------- /transforms/gravatar/index.js: -------------------------------------------------------------------------------- 1 | const crypto = require('crypto') 2 | const querystring = require('querystring') 3 | 4 | const { Transform } = require('../../lib//transform') 5 | 6 | const GRAVATAR_TYPE = 'gravatar' 7 | 8 | const gravatar = class extends Transform { 9 | static get alias() { 10 | return [] 11 | } 12 | 13 | static get title() { 14 | return 'Gravatar' 15 | } 16 | 17 | static get description() { 18 | return 'Get gravatar' 19 | } 20 | 21 | static get group() { 22 | return this.title 23 | } 24 | 25 | static get tags() { 26 | return ['ce'] 27 | } 28 | 29 | static get types() { 30 | return ['email'] 31 | } 32 | 33 | static get options() { 34 | return {} 35 | } 36 | 37 | static get priority() { 38 | return 1 39 | } 40 | 41 | static get noise() { 42 | return 1 43 | } 44 | 45 | async handle({ id: source = '', label = '' }, options) { 46 | const gravatar = crypto.createHash('md5').update(label).digest('hex') 47 | 48 | const query = querystring.stringify({ 49 | s: 256, 50 | d: 'identicon' 51 | }) 52 | 53 | const uri = `https://gravatar.com/avatar/${gravatar}?${query}` 54 | 55 | return [ 56 | { type: GRAVATAR_TYPE, label, image: uri, props: { gravatar, uri }, edges: [source] } 57 | ] 58 | } 59 | } 60 | 61 | module.exports = { gravatar } 62 | -------------------------------------------------------------------------------- /transforms/hackertarget/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | const { normalizeDomain } = require('../../lib//normalize') 5 | const { DOMAIN_TYPE, IPV4_TYPE, IPV6_TYPE, PORT_TYPE } = require('../../lib//types') 6 | 7 | const hackertargetReverseIpLookup = class extends Transform { 8 | static get alias() { 9 | return ['hackertarget_reverse_ip_lookup', 'htril'] 10 | } 11 | 12 | static get title() { 13 | return 'HackerTarget Reverse IP Lookup' 14 | } 15 | 16 | static get description() { 17 | return 'Obtain reverse IP information from hackertarget.com' 18 | } 19 | 20 | static get group() { 21 | return this.title 22 | } 23 | 24 | static get tags() { 25 | return ['ce'] 26 | } 27 | 28 | static get types() { 29 | return [DOMAIN_TYPE, IPV4_TYPE, IPV6_TYPE] 30 | } 31 | 32 | static get options() { 33 | return { 34 | noiseThreshold: { 35 | description: 'The number of minimum nodes before considering the result set as noise', 36 | type: 'number', 37 | default: 100 38 | } 39 | } 40 | } 41 | 42 | static get priority() { 43 | return 100 44 | } 45 | 46 | static get noise() { 47 | return 9 48 | } 49 | 50 | async handle({ id: source = '', label = '', type: sourceType = '' }, options) { 51 | const { noiseThreshold = 100 } = options 52 | 53 | const query = querystring.stringify({ 54 | q: label 55 | }) 56 | 57 | const { responseBody } = await this.scheduler.tryRequest({ uri: `http://api.hackertarget.com/reverseiplookup/?${query}` }) 58 | 59 | const text = responseBody.toString() 60 | 61 | if (/^\s*\ line.trim().toLowerCase()).filter(line => line) 66 | 67 | if (lines.length === 0 || (lines.length === 1 && lines[0].indexOf(' ') >= 0)) { 68 | return [] 69 | } 70 | 71 | if (lines.length > noiseThreshold) { 72 | this.warn(`query ${label} unlikely to produce specific results due to result set length (${lines.length}) greather than noise threshold (${noiseThreshold})`) 73 | 74 | return [] 75 | } 76 | 77 | return lines.map((domain) => { 78 | domain = normalizeDomain(domain) 79 | 80 | return { type: DOMAIN_TYPE, label: domain, props: { domain }, edges: [source] } 81 | }) 82 | } 83 | } 84 | 85 | const hackertargetOnlinePortScan = class extends Transform { 86 | static get alias() { 87 | return ['hackertarget_online_port_scan', 'htps'] 88 | } 89 | 90 | static get title() { 91 | return 'HackerTarget Online Port Scan' 92 | } 93 | 94 | static get description() { 95 | return 'Obtain port information from hackertarget.com' 96 | } 97 | 98 | static get group() { 99 | return this.title 100 | } 101 | 102 | static get tags() { 103 | return ['ce'] 104 | } 105 | 106 | static get types() { 107 | return [DOMAIN_TYPE, IPV4_TYPE, IPV6_TYPE] 108 | } 109 | 110 | static get options() { 111 | return {} 112 | } 113 | 114 | static get priority() { 115 | return 100 116 | } 117 | 118 | static get noise() { 119 | return 1 120 | } 121 | 122 | async handle({ id: source = '', label = '' }, options) { 123 | const query = querystring.stringify({ 124 | q: label 125 | }) 126 | 127 | const { responseBody } = await this.scheduler.tryRequest({ uri: `https://api.hackertarget.com/nmap/?${query}` }) 128 | 129 | const text = responseBody.toString() 130 | 131 | if (/^\s*\ line.trim().toLowerCase()).filter(line => line && /^\d+\/\w+\s+open\s+/.test(line)) 136 | 137 | return lines.map((line) => { 138 | const [_label, _state, _service] = line.split(/\s+/) 139 | 140 | const label = _label.toUpperCase() 141 | 142 | const [port, protocol] = label.split('/') 143 | 144 | const state = _state.toLowerCase() 145 | const services = [_service.toLowerCase()] 146 | 147 | return { type: PORT_TYPE, label, props: { port, protocol, state, services }, edges: [source] } 148 | }) 149 | } 150 | } 151 | 152 | module.exports = { hackertargetReverseIpLookup, hackertargetOnlinePortScan } 153 | -------------------------------------------------------------------------------- /transforms/http/index.js: -------------------------------------------------------------------------------- 1 | const uuid = require('uuid') 2 | const crypto = require('crypto') 3 | 4 | const { Transform } = require('../../lib//transform') 5 | const { URI_TYPE, STRING_TYPE, CODE_TYPE, TITLE_TYPE, SOFTWARE_TYPE, MIME_TYPE, SHA1_TYPE, FINGERPRINT_TYPE, SIGNATURE_TYPE } = require('../../lib//types') 6 | 7 | const NAMESPACE = 'a456d553-d024-45d2-99af-7719a80050b7' 8 | 9 | const DEFAULT_TIMEOUT = 30000 10 | const DEFAULT_FOLLOW = false 11 | const DEFAULT_AUGMENT = true 12 | const DEFAULT_DEFUSE = false 13 | const DEFAULT_RESPONSE_BODY_SNIFF_SIZE = 512 14 | 15 | const httpFingerprint = class extends Transform { 16 | static get alias() { 17 | return ['http_fingerprint', 'hf'] 18 | } 19 | 20 | static get title() { 21 | return 'HTTP Fingerprint' 22 | } 23 | 24 | static get description() { 25 | return 'Performs a fingerprint on the HTTP server and application' 26 | } 27 | 28 | static get group() { 29 | return this.title 30 | } 31 | 32 | static get tags() { 33 | return ['ce', 'local', 'http'] 34 | } 35 | 36 | static get types() { 37 | return [URI_TYPE] 38 | } 39 | 40 | static get options() { 41 | return { 42 | timeout: { 43 | description: 'HTTP timeout interval', 44 | type: 'number', 45 | default: DEFAULT_TIMEOUT 46 | }, 47 | 48 | follow: { 49 | description: 'Follow redirects', 50 | type: 'boolean', 51 | default: DEFAULT_FOLLOW 52 | }, 53 | 54 | augment: { 55 | description: 'Augment input node with result nodes', 56 | type: 'boolean', 57 | default: DEFAULT_AUGMENT 58 | }, 59 | 60 | defuse: { 61 | description: 'Prevention extraction of sub types', 62 | type: 'boolean', 63 | default: DEFAULT_DEFUSE 64 | }, 65 | 66 | responseBodySniffSize: { 67 | description: 'The size of the response body sniff', 68 | type: 'number', 69 | default: DEFAULT_RESPONSE_BODY_SNIFF_SIZE 70 | } 71 | } 72 | } 73 | 74 | static get priority() { 75 | return 1 76 | } 77 | 78 | static get noise() { 79 | return 1 80 | } 81 | 82 | // NOTE: It is possible to keep the number of nodes at minimum by default. This transform can simply generate a fingerprint node. If other transforms 83 | // require to work on individual fields, they can perform their own extraction first. Something to consider for future improvements. 84 | 85 | async handle({ id: source = '', label = '', props, ...rest }, { timeout = DEFAULT_TIMEOUT, follow = DEFAULT_FOLLOW, augment = DEFAULT_AUGMENT, defuse = DEFAULT_DEFUSE, responseBodySniffSize = DEFAULT_RESPONSE_BODY_SNIFF_SIZE }) { 86 | const results = [] 87 | 88 | // TODO: build a separate module to fingerprint http servers and use instead of this 89 | 90 | const headers = { 91 | 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:73.0) Gecko/20100101 Firefox/73.0', 92 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 93 | 'Accept-Language': 'en;q=0.5', 94 | 'Accept-Encoding': 'deflate, gzip' 95 | } 96 | 97 | const { url = label, uri = url } = props 98 | 99 | const { responseCode, responseHeaders, responseBody } = await this.scheduler.request({ uri, headers, timeout, follow, downloadLimit: DEFAULT_RESPONSE_BODY_SNIFF_SIZE, rejectUnauthorized: false }) 100 | 101 | if (!responseCode) { 102 | return results 103 | } 104 | 105 | const responseBodySniff = responseBody ? responseBody.slice(0, responseBodySniffSize).toString() : '' 106 | 107 | const signature = uuid.v5([responseCode, responseHeaders['location'], responseHeaders['content-location'], responseBodySniff].filter(f => f).join(':::'), NAMESPACE) 108 | 109 | const getHeader = (headers, name) => { 110 | let header = headers[name] 111 | 112 | if (!header) { 113 | return 114 | } 115 | 116 | if (Array.isArray(header)) { 117 | header = header[0] 118 | } 119 | 120 | if (!header) { 121 | return 122 | } 123 | 124 | return header.trim() 125 | } 126 | 127 | let server = getHeader(responseHeaders, 'server') 128 | let contentType = getHeader(responseHeaders, 'content-type') 129 | 130 | let sha1 131 | let title 132 | let software 133 | 134 | if (responseBodySniff) { 135 | sha1 = crypto.createHash('sha1').update(responseBodySniff).digest('hex') 136 | 137 | const titleMatch = responseBodySniff.match(/([^<]+)/i) 138 | 139 | if (titleMatch) { 140 | title = titleMatch[1] 141 | } 142 | 143 | const generatorMatch = responseBodySniff.match(/<meta\s+name="generator"\s+content="(.+?)"|<meta\s+content="(.+?)"\s+name="generator"/i) 144 | 145 | if (generatorMatch) { 146 | software = generatorMatch[1].toLowerCase() 147 | } 148 | } 149 | 150 | if (augment) { 151 | results.push({ id: source, label, props: { ...props, responseCode, responseHeaders, responseBodySniff, signature, title, software, sha1 }, ...rest }) 152 | } 153 | 154 | if (!augment || !defuse) { 155 | if (signature) { 156 | results.push({ type: SIGNATURE_TYPE, label: signature, props: { signature }, edges: [{ source, type: FINGERPRINT_TYPE }] }) 157 | } 158 | 159 | if (sha1) { 160 | results.push({ type: SHA1_TYPE, label: sha1, props: { sha1 }, edges: [{ source, type: FINGERPRINT_TYPE }] }) 161 | } 162 | 163 | if (title) { 164 | results.push({ type: TITLE_TYPE, label: title, props: { title }, edges: [{ source, type: FINGERPRINT_TYPE }] }) 165 | } 166 | 167 | if (software) { 168 | results.push({ type: SOFTWARE_TYPE, label: `${software}`, props: { software }, edges: [{ source, type: FINGERPRINT_TYPE }] }) 169 | } 170 | 171 | if (responseCode) { 172 | results.push({ type: CODE_TYPE, label: `${responseCode}/HTTP`, props: { code: responseCode }, edges: [{ source, type: FINGERPRINT_TYPE }] }) 173 | } 174 | 175 | if (server) { 176 | server = server.trim() 177 | 178 | results.push({ type: SOFTWARE_TYPE, label: `${server}`, props: { server }, edges: [{ source, type: FINGERPRINT_TYPE }] }) 179 | } 180 | 181 | if (contentType) { 182 | contentType = contentType.trim().toLowerCase() 183 | 184 | results.push({ type: MIME_TYPE, label: `${contentType}`, props: { contentType }, edges: [{ source, type: FINGERPRINT_TYPE }] }) 185 | } 186 | } 187 | 188 | return results 189 | } 190 | } 191 | 192 | module.exports = { httpFingerprint } 193 | -------------------------------------------------------------------------------- /transforms/ipinfoio/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | const { WHOIS_TYPE, IPV4_TYPE } = require('../../lib//types') 5 | 6 | const ipinfoioWidgetSearch = class extends Transform { 7 | static get alias() { 8 | return ['ipinfoio_widget_search', 'iiiows'] 9 | } 10 | 11 | static get title() { 12 | return 'ipinfo.io widget search' 13 | } 14 | 15 | static get description() { 16 | return 'Obtain ipinfo.io whois report via the web widget' 17 | } 18 | 19 | static get group() { 20 | return this.title 21 | } 22 | 23 | static get tags() { 24 | return ['ce'] 25 | } 26 | 27 | static get types() { 28 | return [IPV4_TYPE] 29 | } 30 | 31 | static get options() { 32 | return {} 33 | } 34 | 35 | static get priority() { 36 | return 1 37 | } 38 | 39 | static get noise() { 40 | return 1 41 | } 42 | 43 | async handle({ id: source = '', label = '' }, options) { 44 | const search = querystring.escape(label) 45 | 46 | const headers = { 47 | 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:88.0) Gecko/20100101 Firefox/88.0', 48 | 'Referer': 'https://ipinfo.io/' 49 | } 50 | 51 | const { org, ...rest } = await this.scheduler.tryRequest({ uri: `https://ipinfo.io/widget/${search}`, headers, toJson: true }) 52 | 53 | return [{ 54 | type: WHOIS_TYPE, 55 | label: org, 56 | props: { org, ...rest } 57 | }] 58 | } 59 | } 60 | 61 | module.exports = { ipinfoioWidgetSearch } 62 | -------------------------------------------------------------------------------- /transforms/omnisint/index.js: -------------------------------------------------------------------------------- 1 | const { SubdomainTransform } = require('../../lib//common') 2 | 3 | const { DOMAIN_TYPE } = require('../../lib//types') 4 | 5 | class OmnisintSubdomainReport extends SubdomainTransform { 6 | static alias = ['omnisint_subdomain_report']; 7 | 8 | static title = 'Omnisint Subdomain Report'; 9 | 10 | static description = 'Obtain omnisint domain report which helps enumerating target subdomains'; 11 | 12 | static types = [DOMAIN_TYPE]; 13 | 14 | static group = 'Omnisint Subdomain Report'; 15 | 16 | static tags = ['ce']; 17 | 18 | static options = {}; 19 | 20 | static priority = 1; 21 | 22 | static noise = 1; 23 | 24 | async getResults(domain) { 25 | const json = await this.scheduler.tryRequest({ uri: `https://sonar.omnisint.io/subdomains/${domain}`, toJson: true }) 26 | 27 | return { subdomains: [...new Set(json)].map((subdomain) => ({ subdomain })) } 28 | } 29 | } 30 | 31 | module.exports = { OmnisintSubdomainReport } 32 | -------------------------------------------------------------------------------- /transforms/pks/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { makeId } = require('../../lib//utils') 4 | const { isEmail } = require('../../lib//detect') 5 | const { Transform } = require('../../lib//transform') 6 | const { DOMAIN_TYPE, EMAIL_TYPE } = require('../../lib//types') 7 | 8 | const pksLookupKeys = class extends Transform { 9 | static get alias() { 10 | return ['pks_lookup_keys', 'pkslk'] 11 | } 12 | 13 | static get title() { 14 | return 'PKS Lookup' 15 | } 16 | 17 | static get description() { 18 | return 'Look the the PKS database at pool.sks-keyservers.net which pgp.mit.edu is part of' 19 | } 20 | 21 | static get group() { 22 | return this.title 23 | } 24 | 25 | static get tags() { 26 | return ['ce'] 27 | } 28 | 29 | static get types() { 30 | return [DOMAIN_TYPE, EMAIL_TYPE] 31 | } 32 | 33 | static get options() { 34 | return {} 35 | } 36 | 37 | static get priority() { 38 | return 1 39 | } 40 | 41 | static get noise() { 42 | return 1 43 | } 44 | 45 | async handle({ id: source = '', label = '' }, options) { 46 | const query = querystring.stringify({ 47 | search: label, 48 | op: 'index' 49 | }) 50 | 51 | const server = 'http://hkps.pool.sks-keyservers.net' 52 | 53 | const { responseBody } = await this.scheduler.tryRequest({ uri: `${server}/pks/lookup?${query}`, maxRetries: 5 }) 54 | 55 | const text = responseBody.toString() 56 | const regx = /<pre>([\s\S]+?)<\/pre>/g 57 | 58 | const results = [] 59 | 60 | let match 61 | 62 | while (match = regx.exec(text)) { 63 | let inner = match[1] || '' 64 | 65 | if (inner.length > 1024) { 66 | continue // NOTE: too big and probably junk 67 | } 68 | 69 | inner = inner.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>') 70 | 71 | let key = '' 72 | let uri = '' 73 | let addresses = [] 74 | 75 | const innerMatch = inner.match(/<a href="([\s\S]+?)">([\s\S]+?)<\/a>[\s\S]+?<a href="([\s\S]+?)">([\s\S]+?)<\/a>((?:.|[\r\n])*?)$/) 76 | 77 | if (innerMatch) { 78 | uri = `${server}${innerMatch[1].trim()}` 79 | key = innerMatch[2].trim() 80 | 81 | addresses.push(innerMatch[4].trim()) 82 | 83 | innerMatch[5].split(/\r|\n/g).forEach((line) => { 84 | line = line.trim() 85 | 86 | if (!line) { 87 | return 88 | } 89 | 90 | addresses.push(line) 91 | }) 92 | } 93 | 94 | if (key && uri) { 95 | if (!/^[0-9A-Z]{8}$/.test(key)) { 96 | continue 97 | } 98 | 99 | const keyNode = { id: makeId('pks:key', key), type: 'pks:key', label: key, props: { uri, key }, edges: [source] } 100 | 101 | results.push(keyNode) 102 | 103 | addresses.forEach((address) => { 104 | let name = '' 105 | let email = '' 106 | 107 | const emailMatch = address.match(/^(.*?)<(.*?)>$/) 108 | 109 | if (emailMatch) { 110 | const [_, _name = '', _email = ''] = emailMatch 111 | 112 | name = _name.trim() 113 | email = _email.trim() 114 | } 115 | else { 116 | email = address.trim() 117 | } 118 | 119 | if (!isEmail(email)) { 120 | return 121 | } 122 | 123 | const emailNode = { id: makeId('email', email), type: 'email', label: email, props: { email, name }, edges: [keyNode.id] } 124 | 125 | results.push(emailNode) 126 | 127 | if (name) { 128 | results.push({ id: makeId('person', name), type: 'person', label: name, props: { name, email }, edges: [emailNode.id] }) 129 | } 130 | }) 131 | } 132 | } 133 | 134 | return results 135 | } 136 | } 137 | 138 | module.exports = { pksLookupKeys } 139 | -------------------------------------------------------------------------------- /transforms/pwndb/index.js: -------------------------------------------------------------------------------- 1 | const { makeId } = require('../../lib//utils') 2 | const { Transform } = require('../../lib//transform') 3 | const { DOMAIN_TYPE, EMAIL_TYPE } = require('../../lib//types') 4 | 5 | const PWNDB_ACCOUNT_TYPE = 'pwndb:account' 6 | 7 | const pwndbSearch = class extends Transform { 8 | static get alias() { 9 | return ['pwndb_search', 'pds'] 10 | } 11 | 12 | static get title() { 13 | return 'PwnDB Search' 14 | } 15 | 16 | static get description() { 17 | return 'Searching the PwnDB database' 18 | } 19 | 20 | static get group() { 21 | return this.title 22 | } 23 | 24 | static get tags() { 25 | return ['ce'] 26 | } 27 | 28 | static get types() { 29 | return [DOMAIN_TYPE, EMAIL_TYPE] 30 | } 31 | 32 | static get options() { 33 | return {} 34 | } 35 | 36 | static get priority() { 37 | return 1 38 | } 39 | 40 | static get noise() { 41 | return 1 42 | } 43 | 44 | async handle({ id: source = '', label = '', type = '' }) { 45 | const results = [] 46 | 47 | let body 48 | 49 | switch (type) { 50 | case DOMAIN_TYPE: 51 | body = `luser=%25&domain=${encodeURIComponent(label)}&luseropr=1&domainopr=0&submitform=em` 52 | 53 | break 54 | 55 | case EMAIL_TYPE: 56 | const [user = '', domain = ''] = label.split('@') 57 | 58 | body = `luser=${encodeURIComponent(user)}&domain=${encodeURIComponent(domain)}&luseropr=1&domainopr=0&submitform=em` 59 | 60 | break 61 | 62 | default: 63 | if (label.indexOf('@') > 0) { 64 | const [user = '', domain = ''] = label.split('@') 65 | 66 | body = `luser=${encodeURIComponent(user)}&domain=${encodeURIComponent(domain)}&luseropr=1&domainopr=0&submitform=em` 67 | } 68 | else { 69 | body = `luser=%25&domain=${encodeURIComponent(label)}&luseropr=1&domainopr=0&submitform=em` 70 | } 71 | } 72 | 73 | const { responseBody } = await this.scheduler.tryRequest({ method: 'POST', uri: 'http://pwndb2am4tzkvold.onion.pet/', body, maxRetries: 5 }) 74 | 75 | const text = responseBody.toString() 76 | 77 | const preMatch = text.match(/<pre>([\w\W]+?)<\/pre>/) 78 | 79 | if (preMatch) { 80 | const reg = /\d[\W]?Array[\W]+?\(([\w\W]*?)\)/g 81 | 82 | let arrayMatch 83 | 84 | while ((arrayMatch = reg.exec(preMatch[1])) !== null) { 85 | const entry = {} 86 | 87 | let reg = /\[(id|luser|domain|password)\]\s=>\s(.*)/g 88 | 89 | let detailsMatch 90 | 91 | while ((detailsMatch = reg.exec(arrayMatch[1])) !== null) { 92 | entry[detailsMatch[1]] = detailsMatch[2] 93 | } 94 | 95 | const { id: recordId, luser: user, domain, password } = entry 96 | 97 | if (user === 'donate' && domain === 'btc.thx') { 98 | continue 99 | } 100 | 101 | const email = `${user}@${domain}` 102 | const label = `${recordId}@${email}` 103 | 104 | const nodeId = makeId(PWNDB_ACCOUNT_TYPE, label) 105 | 106 | results.push({ id: nodeId, type: PWNDB_ACCOUNT_TYPE, label, props: { recordId, user, domain, password }, edges: [source] }) 107 | 108 | if (type !== EMAIL_TYPE) { 109 | results.push({ type: EMAIL_TYPE, label: email, props: { email }, edges: [nodeId] }) 110 | } 111 | } 112 | } 113 | 114 | return results 115 | } 116 | } 117 | 118 | module.exports = { pwndbSearch } 119 | -------------------------------------------------------------------------------- /transforms/riddler/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | const { makeId, flatten } = require('../../lib//utils') 5 | const { normalizeDomain } = require('../../lib//normalize') 6 | const { DOMAIN_TYPE, SUBDOMAIN_TYPE, IPV4_TYPE, IPV6_TYPE } = require('../../lib//types') 7 | 8 | const DEFAULT_IGNORE_DOMAINS = true 9 | const DEFAULT_IGNORE_IPS = true 10 | 11 | const riddlerIpSearch = class extends Transform { 12 | static get alias() { 13 | return ['riddler_ip_search', 'rdis'] 14 | } 15 | 16 | static get title() { 17 | return 'Riddler IP Search' 18 | } 19 | 20 | static get description() { 21 | return 'Searches for IP references using F-Secure riddler.io' 22 | } 23 | 24 | static get group() { 25 | return this.title 26 | } 27 | 28 | static get tags() { 29 | return ['ce'] 30 | } 31 | 32 | static get types() { 33 | return [IPV4_TYPE, IPV6_TYPE] 34 | } 35 | 36 | static get options() { 37 | return { 38 | ignoreDomains: { 39 | description: 'Ignore the provided domains', 40 | type: 'boolean', 41 | default: DEFAULT_IGNORE_DOMAINS 42 | } 43 | } 44 | } 45 | 46 | static get priority() { 47 | return 1 48 | } 49 | 50 | static get noise() { 51 | return 1 52 | } 53 | 54 | async run(items, { ignoreDomains = DEFAULT_IGNORE_DOMAINS }) { 55 | const results = await Promise.all(items.map(async({ id: source = '', label = '' }) => { 56 | const query = querystring.stringify({ 57 | q: `ip:${label}` 58 | }) 59 | 60 | const { responseBody } = await this.scheduler.tryRequest({ uri: `https://riddler.io/search/exportcsv?${query}`, maxRetries: 5 }) 61 | 62 | const lines = responseBody.toString().trim().split('\n').slice(2).filter(l => l).map(l => l.split(',')) 63 | 64 | const ips = lines.map(([ipv4, _data, _c1, _c2, _country, _fqdn, _keywords, _pld, domain]) => { 65 | domain = normalizeDomain(domain) 66 | 67 | const ipId = makeId(IPV4_TYPE, ipv4) 68 | const domainId = makeId(DOMAIN_TYPE, domain) 69 | 70 | const results = [] 71 | 72 | results.push({ id: ipId, type: IPV4_TYPE, label: ipv4, props: { ipv4, domain }, edges: [source] }) 73 | 74 | if (!ignoreDomains) { 75 | results.push({ id: domainId, type: DOMAIN_TYPE, label: domain, props: { domain, ipv4 }, edges: [ipId] }) 76 | } 77 | 78 | return results 79 | }) 80 | 81 | return [].concat(ips) 82 | })) 83 | 84 | return flatten(results, 3) 85 | } 86 | } 87 | 88 | const riddlerDomainSearch = class extends Transform { 89 | static get alias() { 90 | return ['riddler_domain_search', 'rdds'] 91 | } 92 | 93 | static get title() { 94 | return 'Riddler Domain Search' 95 | } 96 | 97 | static get description() { 98 | return 'Searches for Domain references using F-Secure riddler.io' 99 | } 100 | 101 | static get group() { 102 | return this.title 103 | } 104 | 105 | static get tags() { 106 | return ['ce'] 107 | } 108 | 109 | static get types() { 110 | return [DOMAIN_TYPE] 111 | } 112 | 113 | static get options() { 114 | return { 115 | ignoreIps: { 116 | description: 'Ignore the provided IPs', 117 | type: 'boolean', 118 | default: DEFAULT_IGNORE_IPS 119 | } 120 | } 121 | } 122 | 123 | static get priority() { 124 | return 1 125 | } 126 | 127 | static get noise() { 128 | return 1 129 | } 130 | 131 | async run(items, { ignoreIps = DEFAULT_IGNORE_IPS }) { 132 | const results = await Promise.all(items.map(async({ id: source = '', label = '' }) => { 133 | const query = querystring.stringify({ 134 | q: `pld:${label}` 135 | }) 136 | 137 | const { responseBody } = await this.scheduler.tryRequest({ uri: `https://riddler.io/search/exportcsv?${query}`, maxRetries: 5 }) 138 | 139 | const lines = responseBody.toString().trim().split('\n').slice(2).filter(l => l).map(l => l.split(',')) 140 | 141 | const ips = lines.map(([ipv4, _data, _c1, _c2, _country, fqdn, _keywords, _pld, domain]) => { 142 | domain = normalizeDomain(fqdn) 143 | 144 | const domainId = makeId(DOMAIN_TYPE, domain) 145 | const ipId = makeId(IPV4_TYPE, ipv4) 146 | 147 | const results = [] 148 | 149 | results.push({ id: domainId, type: DOMAIN_TYPE, label: domain, props: { domain, ipv4 }, edges: [{ source, type: SUBDOMAIN_TYPE }] }) 150 | 151 | if (!ignoreIps) { 152 | results.push({ id: ipId, type: IPV4_TYPE, label: ipv4, props: { ipv4, domain }, edges: [domainId] }) 153 | } 154 | 155 | return results 156 | }) 157 | 158 | return [].concat(ips) 159 | })) 160 | 161 | return flatten(results, 3) 162 | } 163 | } 164 | 165 | module.exports = { riddlerIpSearch, riddlerDomainSearch } 166 | -------------------------------------------------------------------------------- /transforms/script/index.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | 5 | const script = class extends Transform { 6 | static get alias() { 7 | return ['script'] 8 | } 9 | 10 | static get title() { 11 | return 'Script' 12 | } 13 | 14 | static get description() { 15 | return 'Perform transformation with external script' 16 | } 17 | 18 | static get group() { 19 | return this.title 20 | } 21 | 22 | static get tags() { 23 | return ['ce', 'evil', 'local'] 24 | } 25 | 26 | static get types() { 27 | return [] 28 | } 29 | 30 | static get options() { 31 | return { 32 | script: { 33 | type: 'string', 34 | description: 'The path to the script to execute', 35 | default: '' 36 | }, 37 | 38 | arg: { 39 | type: 'array', 40 | description: 'List of arguments to pass to the script function', 41 | default: [] 42 | } 43 | } 44 | } 45 | 46 | static get priority() { 47 | return 100 48 | } 49 | 50 | static get noise() { 51 | return 100 52 | } 53 | 54 | async handle(node, options) { 55 | const { func, args } = options 56 | 57 | return await func.call(this, node, ...args) 58 | } 59 | 60 | async run(nodes, options, ...rest) { 61 | const { script, arg: args } = options 62 | 63 | if (!script) { 64 | throw new Error(`No script to load`) 65 | } 66 | 67 | const func = require(path.resolve(process.cwd(), script)) 68 | 69 | if (typeof(func) !== 'function') { 70 | throw new Error(`No function exported in ${script}`) 71 | } 72 | 73 | return super.run(nodes, { func, args }, ...rest) 74 | } 75 | } 76 | 77 | module.exports = { 78 | script 79 | } 80 | -------------------------------------------------------------------------------- /transforms/scylla/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { makeId } = require('../../lib//utils') 4 | const { Transform } = require('../../lib//transform') 5 | const { DOMAIN_TYPE, EMAIL_TYPE } = require('../../lib//types') 6 | 7 | const SCYLLA_ACCOUNT_TYPE = 'scylla:account' 8 | 9 | const scyllaSearch = class extends Transform { 10 | static get alias() { 11 | return ['scylla_search', 'scys'] 12 | } 13 | 14 | static get title() { 15 | return 'Scylla Search' 16 | } 17 | 18 | static get description() { 19 | return 'Searching the Scylla database' 20 | } 21 | 22 | static get group() { 23 | return this.title 24 | } 25 | 26 | static get tags() { 27 | return ['ce'] 28 | } 29 | 30 | static get types() { 31 | return [DOMAIN_TYPE, EMAIL_TYPE] 32 | } 33 | 34 | static get options() { 35 | return {} 36 | } 37 | 38 | static get priority() { 39 | return 1 40 | } 41 | 42 | static get noise() { 43 | return 1 44 | } 45 | 46 | async handle({ id: source = '', label = '', type = '' }) { 47 | const results = [] 48 | 49 | let field 50 | let search 51 | 52 | if (type === DOMAIN_TYPE) { 53 | field = 'email' 54 | search = label 55 | } 56 | else 57 | if (type === EMAIL_TYPE) { 58 | field = 'email' 59 | search = label 60 | } 61 | else { 62 | field = '*' 63 | search = label 64 | } 65 | 66 | const size = 100 67 | 68 | let start = 0 69 | 70 | for (;;) { 71 | const query = querystring.stringify({ 72 | q: `${field}:${search}`, 73 | size: size, 74 | start: start 75 | }) 76 | 77 | this.info(`retrieving scylla page with size ${start}`) 78 | 79 | const items = await this.scheduler.tryRequest({ uri: `https://scylla.so/search?${query}`, toJson: true }) 80 | 81 | if (!items.length) { 82 | break 83 | } 84 | 85 | items.forEach(({ id, fields }) => { 86 | const { username, domain, email } = fields 87 | 88 | let label 89 | 90 | if (email) { 91 | label = email 92 | } 93 | else 94 | if (username && domain) { 95 | label = `${username}@${domain}` 96 | } 97 | else 98 | if (username) { 99 | label = username 100 | } 101 | else { 102 | label = `${domain}/${Math.random().toString(32).slice(2)}` 103 | } 104 | 105 | const nodeId = makeId(SCYLLA_ACCOUNT_TYPE, label) 106 | 107 | results.push({ id: nodeId, type: SCYLLA_ACCOUNT_TYPE, label, props: { id, ...fields }, edges: [source] }) 108 | 109 | if (type !== EMAIL_TYPE) { 110 | results.push({ type: EMAIL_TYPE, label: email, props: { email }, edges: [nodeId] }) 111 | } 112 | }) 113 | 114 | if (items.length < size) { 115 | break 116 | } 117 | 118 | start += items.length 119 | } 120 | 121 | return results 122 | } 123 | } 124 | 125 | module.exports = { scyllaSearch } 126 | -------------------------------------------------------------------------------- /transforms/securitytrails/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | const { normalizeDomain } = require('../../lib//normalize') 5 | const { BRAND_TYPE, DOMAIN_TYPE, SUBDOMAIN_TYPE } = require('../../lib//types') 6 | 7 | const securitytrailsSuggestions = class extends Transform { 8 | static get alias() { 9 | return ['securitytrails_domain_suggestions', 'stds'] 10 | } 11 | 12 | static get title() { 13 | return 'Security Trails Domain Suggestions' 14 | } 15 | 16 | static get description() { 17 | return 'Get a list of domain suggestions from securitytrails.com' 18 | } 19 | 20 | static get group() { 21 | return this.title 22 | } 23 | 24 | static get tags() { 25 | return ['ce'] 26 | } 27 | 28 | static get types() { 29 | return [BRAND_TYPE] 30 | } 31 | 32 | static get options() { 33 | return {} 34 | } 35 | 36 | static get priority() { 37 | return 1 38 | } 39 | 40 | static get noise() { 41 | return 9 42 | } 43 | 44 | async handle({ id: source = '', label = '' }) { 45 | const search = querystring.escape(label) 46 | 47 | const { suggestions = [] } = await this.scheduler.tryRequest({ uri: `https://securitytrails.com/app/api/autocomplete/domain/${search}`, toJson: true }) 48 | 49 | return suggestions.map((domain) => { 50 | domain = normalizeDomain(domain) 51 | 52 | return { type: DOMAIN_TYPE, label: domain, props: { domain }, edges: [source] } 53 | }) 54 | } 55 | } 56 | 57 | module.exports = { securitytrailsSuggestions } 58 | -------------------------------------------------------------------------------- /transforms/spyse/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | const { normalizeDomain } = require('../../lib//normalize') 5 | const { DOMAIN_TYPE, SUBDOMAIN_TYPE } = require('../../lib//types') 6 | 7 | class SpyseTransform extends Transform { 8 | getCallOptions() { 9 | throw new Error(`Not implemented`) 10 | } 11 | 12 | filterRecord() { 13 | return true 14 | } 15 | 16 | extractRecord() { 17 | throw new Error(`Not implemented`) 18 | } 19 | 20 | async handle({ id: source = '', label = '' }, { spyseKey = process.env.SPYSE_KEY, ...options }) { 21 | if (!spyseKey) { 22 | this.warn(`no spyse api key specified`) 23 | 24 | return 25 | } 26 | 27 | const { pathname: p, query: q } = this.getCallOptions({ source, label }, options) 28 | 29 | const results = [] 30 | 31 | let page = 1 32 | let count = 0 33 | 34 | for (;;) { 35 | const query = querystring.stringify({ 36 | ...q, 37 | 38 | page: page 39 | }) 40 | 41 | this.info(`retrieving spyse page ${page}`) 42 | 43 | const headers = { 44 | Authorization: `Bearer ${spyseKey}` 45 | } 46 | 47 | const { records = [], count: total } = await this.scheduler.tryRequest({ uri: `https://api.spyse.com/v3/data/${p.replace(/^\/+/, '')}?${query}`, headers, toJson: true }) 48 | 49 | if (!records.length) { 50 | break 51 | } 52 | 53 | records.forEach((record) => { 54 | if (!this.filterRecord({ source, label }, record)) { 55 | return 56 | } 57 | 58 | results.push(this.extractRecord({ source, label }, record)) 59 | }) 60 | 61 | count += records.length 62 | 63 | if (count >= total) { 64 | break 65 | } 66 | 67 | page += 1 68 | } 69 | 70 | return results 71 | } 72 | } 73 | 74 | const spyseSubdomains = class extends SpyseTransform { 75 | static get category() { 76 | return ['shodan'] 77 | } 78 | 79 | static get alias() { 80 | return ['spyse_subdomains', 'ssds'] 81 | } 82 | 83 | static get title() { 84 | return 'Spyse Subdomains' 85 | } 86 | 87 | static get description() { 88 | return 'Performs subdomain searching with Spyse' 89 | } 90 | 91 | static get group() { 92 | return this.title 93 | } 94 | 95 | static get tags() { 96 | return ['ce'] 97 | } 98 | 99 | static get types() { 100 | return [DOMAIN_TYPE] 101 | } 102 | 103 | static get options() { 104 | return { 105 | spyseKey: { 106 | type: 'string', 107 | description: 'Spyse API key' 108 | } 109 | } 110 | } 111 | 112 | static get priority() { 113 | return 1 114 | } 115 | 116 | static get noise() { 117 | return 1 118 | } 119 | 120 | getCallOptions({ label }) { 121 | return { 122 | pathname: '/domain/subdomain', 123 | query: { 124 | domain: label 125 | } 126 | } 127 | } 128 | 129 | extractRecord({ source }, record) { 130 | const { domain: _domain } = record 131 | 132 | const domain = normalizeDomain(_domain) 133 | 134 | return { 135 | type: DOMAIN_TYPE, 136 | label: domain, 137 | props: { domain }, 138 | edges: [{ source, type: SUBDOMAIN_TYPE }] 139 | } 140 | } 141 | } 142 | 143 | module.exports = { spyseSubdomains } 144 | -------------------------------------------------------------------------------- /transforms/tcp/ports.js: -------------------------------------------------------------------------------- 1 | const relationalDatabases = { 2 | MaxDB: [7210], 3 | MySQL: [3306], 4 | OracleDB: [1521, 1830], 5 | PostgreSQL: [5432], 6 | MSSQL: [1433, 1434], 7 | Sphinx: [9306] 8 | } 9 | 10 | const nosqlDatabases = { 11 | ArangoDB: [8529], 12 | Cassandra: [7000, 7001, 9042], 13 | CouchDB: [5984], 14 | Elasticsearch: [9200, 9243, 9300], 15 | MongoDB: [27017, 27018, 27019, 28017], 16 | Neo4J: [7473, 7474], 17 | Redis: [6379], 18 | Riak: [8087, 8098], 19 | RethinkDB: [8080, 28015, 29015], 20 | Solr: [7574, 8983] 21 | } 22 | 23 | const webApplicationServers = { 24 | JBoss: [3528, 3529, 4447, 8009, 8080, 8443, 9990, 9999], 25 | Jetty: [8080], 26 | Tomcat: [8005, 8009, 8080], 27 | WildFly: [4712, 4713, 8009, 8080, 8443, 9990, 9993], 28 | WebLogic: [5556, 7001, 7002, 8001], 29 | WebSphere: [8008, 9043, 9060, 9080, 9443] 30 | } 31 | 32 | const containerStores = { 33 | Consul: [8300, 8301, 8302, 8400, 8500, 8600], 34 | etcd: [2379, 2380], 35 | Kubernetes: [6443, 8080], 36 | Mesos: [5050, 5051], 37 | ZooKeeper: [2181, 2888, 3888] 38 | } 39 | 40 | const defaultProtocols = { 41 | DNS: [53], 42 | DNSTLS: [853], 43 | FTP: [20, 21], 44 | FTPS: [989, 990], 45 | HTTP: [80], 46 | HTTPS: [443], 47 | IMAP: [143], 48 | IMAPS: [993], 49 | Kerberos: [543, 544, 749, 750, 751, 752, 753, 754, 760], 50 | LDAP: [389, 636], 51 | NetBIOS: [137, 138, 139], 52 | NFS: [944], 53 | NTP: [123], 54 | RPC: [530], 55 | RSH: [514], 56 | rsync: [873], 57 | SMB: [445], 58 | SNMP: [161, 162, 199], 59 | SSH: [22], 60 | Telnet: [23, 992], 61 | SMTP: [25], 62 | SMTPTLS: [465], 63 | WHOIS: [43] 64 | } 65 | 66 | const categories = { 67 | relationalDatabases, 68 | 69 | nosqlDatabases, 70 | 71 | webApplicationServers, 72 | 73 | containerStores, 74 | 75 | defaultProtocols 76 | } 77 | 78 | const services = Object.assign({}, ...Object.values(categories)) 79 | 80 | const ports = Object.entries(services).reduce((acc, [service, ports]) => { 81 | ports.forEach((port) => { 82 | if (!acc[port]) { 83 | acc[port] = [] 84 | } 85 | 86 | acc[port].push(service) 87 | }) 88 | 89 | return acc 90 | }, {}) 91 | 92 | const protocols = { 93 | 'HTTP': [80, 9200], 94 | 'HTTPS': [443, 6443, 7443, 8443, 9243, 9443] 95 | } 96 | 97 | module.exports = { categories, services, ports, protocols } 98 | -------------------------------------------------------------------------------- /transforms/threatcrowd/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | const { normalizeDomain } = require('../../lib//normalize') 5 | const { DOMAIN_TYPE, SUBDOMAIN_TYPE, EMAIL_TYPE, IPV4_TYPE, IPV6_TYPE } = require('../../lib//types') 6 | 7 | const threatcrowdDomainReport = class extends Transform { 8 | static get alias() { 9 | return ['threatcrowd_domain_report', 'tcdr'] 10 | } 11 | 12 | static get title() { 13 | return 'Threatcrowd Domain Report' 14 | } 15 | 16 | static get description() { 17 | return 'Obtain threatcrowd domain report which helps enumerating potential target subdomains and email addresses' 18 | } 19 | 20 | static get types() { 21 | return [DOMAIN_TYPE] 22 | } 23 | 24 | static get group() { 25 | return this.title 26 | } 27 | 28 | static get tags() { 29 | return ['ce'] 30 | } 31 | 32 | static get options() { 33 | return {} 34 | } 35 | 36 | static get priority() { 37 | return 1 38 | } 39 | 40 | static get noise() { 41 | return 1 42 | } 43 | 44 | async handle({ id: source = '', label = '' }, options) { 45 | const query = querystring.stringify({ 46 | domain: label 47 | }) 48 | 49 | const { subdomains: domains = [], emails = [] } = await this.scheduler.tryRequest({ uri: `https://www.threatcrowd.org/searchApi/v2/domain/report/?${query}`, toJson: true }) 50 | 51 | const results = [] 52 | 53 | domains.forEach((domain) => { 54 | domain = normalizeDomain(domain) 55 | 56 | if (!domain) { 57 | return 58 | } 59 | 60 | results.push({ type: DOMAIN_TYPE, label: domain, props: { domain }, edges: [{ source, type: SUBDOMAIN_TYPE }] }) 61 | }) 62 | 63 | emails.forEach((email) => { 64 | email = email.trim().toLocaleLowerCase() 65 | 66 | if (!email) { 67 | return 68 | } 69 | 70 | results.push({ type: EMAIL_TYPE, label: email, props: { email }, edges: [source] }) 71 | }) 72 | 73 | return results 74 | } 75 | } 76 | 77 | const threatcrowdIpReport = class extends Transform { 78 | static get alias() { 79 | return ['threatcrowd_ip_report', 'tcir'] 80 | } 81 | 82 | static get title() { 83 | return 'Threatcrowd IP Report' 84 | } 85 | 86 | static get description() { 87 | return 'Obtain threatcrowd ip report which helps enumerating virtual hosts' 88 | } 89 | 90 | static get group() { 91 | return this.title 92 | } 93 | 94 | static get tags() { 95 | return ['ce'] 96 | } 97 | 98 | static get types() { 99 | return [IPV4_TYPE, IPV6_TYPE] 100 | } 101 | 102 | static get options() { 103 | return {} 104 | } 105 | 106 | async handle({ id: source = '', label = '' }, options) { 107 | const query = querystring.stringify({ 108 | ip: label 109 | }) 110 | 111 | const { resolutions = [] } = await this.scheduler.tryRequest({ uri: `https://www.threatcrowd.org/searchApi/v2/ip/report/?${query}`, toJson: true }) 112 | 113 | const results = [] 114 | 115 | resolutions.forEach(({ domain }) => { 116 | domain = normalizeDomain(domain) 117 | 118 | if (!domain) { 119 | return 120 | } 121 | 122 | results.push({ type: DOMAIN_TYPE, label: domain, props: { domain }, edges: [source] }) 123 | }) 124 | 125 | return results 126 | } 127 | } 128 | 129 | module.exports = { threatcrowdDomainReport, threatcrowdIpReport } 130 | -------------------------------------------------------------------------------- /transforms/urlscan/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { isUrl } = require('../../lib//detect') 4 | const { Transform } = require('../../lib//transform') 5 | const { normalizeDomain } = require('../../lib//normalize') 6 | const { URI_TYPE, BRAND_TYPE, SCREENSHOT_TYPE, DOMAIN_TYPE, SUBDOMAIN_TYPE } = require('../../lib//types') 7 | 8 | const urlscanLiveshot = class extends Transform { 9 | static get alias() { 10 | return ['urlscan_liveshot', 'usls'] 11 | } 12 | 13 | static get title() { 14 | return 'Urlscan Liveshot' 15 | } 16 | 17 | static get description() { 18 | return 'Generates a liveshot of any public site via urlscan' 19 | } 20 | 21 | static get group() { 22 | return this.title 23 | } 24 | 25 | static get tags() { 26 | return ['ce'] 27 | } 28 | 29 | static get types() { 30 | return [URI_TYPE] 31 | } 32 | 33 | static get options() { 34 | return { 35 | mix: { 36 | description: 'Mix input nodes with result nodes', 37 | type: 'boolean', 38 | default: true 39 | } 40 | } 41 | } 42 | 43 | static get priority() { 44 | return 1 45 | } 46 | 47 | static get noise() { 48 | return 1 49 | } 50 | 51 | async handle({ id: source = '', label = '', props = {} }, { mix = true }) { 52 | const uri = isUrl(label) ? label : isUrl(props.url) ? props.url : isUrl(props.uri) ? props.uri : '' 53 | 54 | if (!uri) { 55 | throw new Error(`Cannot find url/uri`) 56 | } 57 | 58 | const query = querystring.stringify({ 59 | width: 1024, 60 | height: 768, 61 | url: uri 62 | }) 63 | 64 | const urlscanUri = `https://urlscan.io/liveshot/?${query}` 65 | 66 | if (mix) { 67 | return [ 68 | { id: source, screenshot: urlscanUri, props: { screenshot: urlscanUri } } 69 | ] 70 | } 71 | else { 72 | return [ 73 | { type: SCREENSHOT_TYPE, label: uri, screenshot: urlscanUri, props: { screenshot: urlscanUri, uri }, edges: [source] } 74 | ] 75 | } 76 | } 77 | } 78 | 79 | const urlscanSubdomains = class extends Transform { 80 | static get alias() { 81 | return ['urlscan_subdomains', 'uss'] 82 | } 83 | 84 | static get title() { 85 | return 'Urlscan Subdomains' 86 | } 87 | 88 | static get description() { 89 | return 'Find subdomains via urlscan' 90 | } 91 | 92 | static get group() { 93 | return this.title 94 | } 95 | 96 | static get tags() { 97 | return ['ce'] 98 | } 99 | 100 | static get types() { 101 | return [DOMAIN_TYPE] 102 | } 103 | 104 | static get options() { 105 | return { 106 | urlscanKey: { 107 | type: 'string', 108 | description: 'Urlscan API key' 109 | } 110 | } 111 | } 112 | 113 | static get priority() { 114 | return 1 115 | } 116 | 117 | static get noise() { 118 | return 1 119 | } 120 | 121 | async handle({ id: source = '', label = '' }, { urlscanKey = process.env.URLSCAN_KEY, ...options }) { 122 | if (!urlscanKey) { 123 | this.warn(`no urlscan api key specified`) 124 | } 125 | 126 | const headers = {} 127 | 128 | if (urlscanKey) { 129 | headers['API-Key'] = urlscanKey 130 | } 131 | 132 | let domains = [] 133 | 134 | let count = 0 135 | 136 | for (;;) { 137 | const query = querystring.stringify({ 138 | q: `domain:${label}`, 139 | 140 | size: 100, 141 | 142 | offset: count 143 | }) 144 | 145 | this.info(`retrieving urlscan page with offset ${count}`) 146 | 147 | const { results, total } = await this.scheduler.tryRequest({ uri: `https://urlscan.io/api/v1/search/?${query}`, headers, toJson: true }) 148 | 149 | this.info(`total results in this query is ${total}`) 150 | 151 | if (!results || !results.length) { 152 | break 153 | } 154 | 155 | results.forEach((result) => { 156 | const { task, page } = result || {} 157 | 158 | const { url } = task || {} 159 | const { domain } = page || {} 160 | 161 | const domainMatch = url.match(/^https?:\/\/([^\/]+)/) 162 | 163 | if (domainMatch) { 164 | const domain = domainMatch[1] 165 | 166 | if (domain.endsWith(`.${label}`)) { 167 | domains.push(domain.trim().toLowerCase()) 168 | } 169 | } 170 | 171 | if (domain) { 172 | if (domain.endsWith(`.${label}`)) { 173 | domains.push(domain.trim().toLowerCase()) 174 | } 175 | } 176 | }) 177 | 178 | count += results.length 179 | 180 | if (count >= total) { 181 | break 182 | } 183 | } 184 | 185 | const nodes = [] 186 | 187 | domains = Array.from(new Set(domains)) 188 | 189 | domains.forEach((domain) => { 190 | domain = normalizeDomain(domain) 191 | 192 | nodes.push({ 193 | type: DOMAIN_TYPE, 194 | label: domain, 195 | props: { 196 | domain: domain 197 | }, 198 | edges: [{ source, type: SUBDOMAIN_TYPE }] 199 | }) 200 | }) 201 | 202 | return nodes 203 | } 204 | } 205 | 206 | module.exports = { urlscanLiveshot, urlscanSubdomains } 207 | -------------------------------------------------------------------------------- /transforms/virustotal/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | const { normalizeDomain } = require('../../lib//normalize') 5 | const { DOMAIN_TYPE, SUBDOMAIN_TYPE } = require('../../lib//types') 6 | 7 | const virustotalSubdomains = class extends Transform { 8 | static get alias() { 9 | return ['virustotal_subdomains', 'vtsd'] 10 | } 11 | 12 | static get title() { 13 | return 'Virustotal Subdomains' 14 | } 15 | 16 | static get description() { 17 | return 'Obtain subdomains from Virustotal' 18 | } 19 | 20 | static get group() { 21 | return this.title 22 | } 23 | 24 | static get tags() { 25 | return ['ce'] 26 | } 27 | 28 | static get types() { 29 | return [DOMAIN_TYPE] 30 | } 31 | 32 | static get options() { 33 | return {} 34 | } 35 | 36 | static get priority() { 37 | return 1 38 | } 39 | 40 | static get noise() { 41 | return 1 42 | } 43 | 44 | async handle({ id: source = '', label = '' }, options) { 45 | const query = querystring.stringify({ 46 | limit: 40 47 | }) 48 | 49 | const { data = [] } = await this.scheduler.tryRequest({ uri: `https://www.virustotal.com/ui/domains/${encodeURIComponent(label)}/subdomains?${query}`, toJson: true }) 50 | 51 | return data.map(({ id }) => { 52 | const domain = normalizeDomain(id) 53 | 54 | return { type: DOMAIN_TYPE, label: domain, props: { domain }, edges: [{ source, type: SUBDOMAIN_TYPE }] } 55 | }) 56 | } 57 | } 58 | 59 | module.exports = { virustotalSubdomains } 60 | -------------------------------------------------------------------------------- /transforms/vulners/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | const { SOFTWARE_TYPE, ISSUE_TYPE, EXPLOIT_TYPE } = require('../../lib//types') 5 | 6 | const vulnersSearch = class extends Transform { 7 | static get alias() { 8 | return ['vulners_search', 'vs'] 9 | } 10 | 11 | static get title() { 12 | return 'Vulners Search' 13 | } 14 | 15 | static get description() { 16 | return 'Obtain vulnerability information via vulners.com' 17 | } 18 | 19 | static get group() { 20 | return this.title 21 | } 22 | 23 | static get tags() { 24 | return ['ce'] 25 | } 26 | 27 | static get types() { 28 | return [SOFTWARE_TYPE] 29 | } 30 | 31 | static get options() { 32 | return { 33 | family: { 34 | description: 'Bulletin family', 35 | type: 'string', 36 | default: '', 37 | choices: [ 38 | '', 39 | 'exploit', 40 | 'nvd' 41 | ] 42 | } 43 | } 44 | } 45 | 46 | static get priority() { 47 | return 1 48 | } 49 | 50 | static get noise() { 51 | return 1 52 | } 53 | 54 | async handle({ id: source = '', label = '' }, options) { 55 | const { family } = options 56 | 57 | const tokens = label.split(/((?:\d+\.?)+)/) 58 | 59 | const software = (tokens[0] || '').replace(/\W+/g, '').toLowerCase().trim() 60 | const version = (tokens[1] || '').trim() 61 | 62 | if (!software || !version) { 63 | return 64 | } 65 | 66 | const query = querystring.stringify({ software, version, type: 'software' }) 67 | 68 | const { data = {} } = await this.scheduler.tryRequest({ uri: `https://vulners.com/api/v3/burp/software/?${query}`, toJson: true }) 69 | 70 | const { search = [] } = data 71 | 72 | const results = [] 73 | 74 | search.forEach(({ _source }) => { 75 | const { id, title, description, cvss, bulletinFamily } = _source || {} 76 | const { score } = cvss || {} 77 | 78 | if (family) { 79 | if (family !== bulletinFamily.toLowerCase()) { 80 | return 81 | } 82 | } 83 | 84 | const scoreValue = parseFloat(score) 85 | 86 | let severity 87 | 88 | if (scoreValue >= 9.0) { 89 | severity = 'Critical' 90 | } 91 | else 92 | if (scoreValue >= 7.0) { 93 | severity = 'High' 94 | } 95 | else 96 | if (scoreValue >= 4.0) { 97 | severity = 'Medium' 98 | } 99 | else 100 | if (scoreValue >= 0.1) { 101 | severity = 'Low' 102 | } 103 | else { 104 | severity = 'Informational' 105 | } 106 | 107 | if (bulletinFamily === 'NVD') { 108 | results.push({ 109 | type: ISSUE_TYPE, 110 | id: id, 111 | label: title, 112 | props: { 113 | description: description, 114 | cvss: score, 115 | severity: severity 116 | }, 117 | edges: [source] 118 | }) 119 | } 120 | else { 121 | results.push({ 122 | type: EXPLOIT_TYPE, 123 | id: id, 124 | label: title, 125 | props: { 126 | description: description, 127 | cvss: score, 128 | severity: severity 129 | }, 130 | edges: [source] 131 | }) 132 | } 133 | }) 134 | 135 | return results 136 | } 137 | } 138 | 139 | module.exports = { vulnersSearch } 140 | -------------------------------------------------------------------------------- /transforms/wappalyzer/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { flatten } = require('../../lib//utils') 4 | const { Transform } = require('../../lib//transform') 5 | 6 | const WAPPALYZER_APPLICATION_TYPE = 'wappalyzer:application' 7 | 8 | const wappalyzerProfile = class extends Transform { 9 | static get alias() { 10 | return ['wappalyzer_profile', 'wzp'] 11 | } 12 | 13 | static get title() { 14 | return 'Wappalyzer Profile' 15 | } 16 | 17 | static get description() { 18 | return 'Enumerate technologies with api.wappalyzer.com' 19 | } 20 | 21 | static get group() { 22 | return this.title 23 | } 24 | 25 | static get tags() { 26 | return ['ce'] 27 | } 28 | 29 | static get types() { 30 | return ['uri'] 31 | } 32 | 33 | static get options() { 34 | return { 35 | wappalyzerKey: { 36 | type: 'string', 37 | description: 'Shodan API key' 38 | } 39 | } 40 | } 41 | 42 | static get priority() { 43 | return 1 44 | } 45 | 46 | static get noise() { 47 | return 1 48 | } 49 | 50 | async handle({ id: source = '', label = '' }, { wappalyzerKey = process.env.WAPPALYZER_KEY || 'wappalyzer.api.demo.key' }) { 51 | const query = querystring.stringify({ 52 | url: label 53 | }) 54 | 55 | const headers = { 56 | 'X-Api-Key': wappalyzerKey 57 | } 58 | 59 | const results = await this.scheduler.tryRequest({ uri: `https://api.wappalyzer.com/lookup/v1/?${query}`, headers, toJson: true }) 60 | 61 | if (!Array.isArray(results)) { 62 | return 63 | } 64 | 65 | const applications = Array.from(new Set(flatten(results.map(({ applications }) => { 66 | return applications.map(({ name }) => name) 67 | }), 1))) 68 | 69 | return applications.map((application) => { 70 | return { type: WAPPALYZER_APPLICATION_TYPE, label: application, props: { application }, edges: [source] } 71 | }) 72 | } 73 | } 74 | 75 | module.exports = { wappalyzerProfile } 76 | -------------------------------------------------------------------------------- /transforms/worker/index.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const { Worker } = require('worker_threads') 3 | 4 | const { Transform } = require('../../lib//transform') 5 | 6 | const worker = class extends Transform { 7 | static get alias() { 8 | return ['worker'] 9 | } 10 | 11 | static get title() { 12 | return 'Worker' 13 | } 14 | 15 | static get description() { 16 | return 'Perform transformation with external worker' 17 | } 18 | 19 | static get group() { 20 | return this.title 21 | } 22 | 23 | static get tags() { 24 | return ['ce', 'evil', 'local'] 25 | } 26 | 27 | static get types() { 28 | return [] 29 | } 30 | 31 | static get options() { 32 | return { 33 | worker: { 34 | type: 'string', 35 | description: 'The path to the worker to execute', 36 | default: '' 37 | }, 38 | 39 | arg: { 40 | type: 'array', 41 | description: 'List of arguments to pass to the worker function', 42 | default: [] 43 | }, 44 | 45 | perNode: { 46 | type: 'boolean', 47 | description: 'Run new worker per node', 48 | default: false 49 | } 50 | } 51 | } 52 | 53 | static get priority() { 54 | return 100 55 | } 56 | 57 | static get noise() { 58 | return 100 59 | } 60 | 61 | async handle(node, options) { 62 | if (options.perNode) { 63 | const { worker: script, arg: args } = options 64 | 65 | if (!script) { 66 | throw new Error(`No worker to load`) 67 | } 68 | 69 | const results = [] 70 | 71 | const workerPath = path.resolve(process.cwd(), script) 72 | 73 | const worker = new Worker(workerPath, { 74 | workerData: { args } 75 | }) 76 | 77 | worker.on('message', (result) => { 78 | results.push(result) 79 | }) 80 | 81 | worker.postMessage(node) 82 | 83 | await new Promise((resolve, reject) => { 84 | worker.once('exit', (code) => { 85 | if (code === 0) { 86 | resolve() 87 | } 88 | else { 89 | reject(new Error(`Worker stopped with exit code ${code}`)) 90 | } 91 | }) 92 | 93 | worker.once('error', reject) 94 | }) 95 | 96 | return results 97 | } 98 | else { 99 | const { worker } = options 100 | 101 | worker.postMessage(node) 102 | 103 | return new Promise((resolve, reject) => { 104 | worker.once('message', resolve) 105 | 106 | worker.once('error', reject) 107 | }) 108 | } 109 | } 110 | 111 | async run(nodes, options, ...rest) { 112 | if (options.perNode) { 113 | return super.run(nodes, options, ...rest) 114 | } 115 | else { 116 | const { worker: script, arg: args } = options 117 | 118 | if (!script) { 119 | throw new Error(`No worker to load`) 120 | } 121 | 122 | const workerPath = path.resolve(process.cwd(), script) 123 | 124 | const worker = new Worker(workerPath, { 125 | workerData: { args } 126 | }) 127 | 128 | const workerPromise = new Promise((resolve, reject) => { 129 | worker.once('exit', (code) => { 130 | if (code === 0) { 131 | resolve() 132 | } 133 | else { 134 | reject(new Error(`Worker stopped with exit code ${code}`)) 135 | } 136 | }) 137 | }) 138 | 139 | const runnerPromise = super.run(nodes, { worker }, ...rest) 140 | 141 | await Promise.race([workerPromise, runnerPromise]) 142 | 143 | worker.terminate() 144 | 145 | return runnerPromise 146 | } 147 | } 148 | } 149 | 150 | module.exports = { 151 | worker 152 | } 153 | -------------------------------------------------------------------------------- /transforms/zonecruncher/index.js: -------------------------------------------------------------------------------- 1 | const querystring = require('querystring') 2 | 3 | const { Transform } = require('../../lib//transform') 4 | const { normalizeDomain } = require('../../lib//normalize') 5 | const { DOMAIN_TYPE, SUBDOMAIN_TYPE } = require('../../lib//types') 6 | 7 | class ZonecruncherTransform extends Transform { 8 | getCallOptions() { 9 | throw new Error(`Not implemented`) 10 | } 11 | 12 | filterRecord() { 13 | return true 14 | } 15 | 16 | extractRecord() { 17 | throw new Error(`Not implemented`) 18 | } 19 | 20 | async handle({ id: source = '', label = '' }, { zonecruncherKey = process.env.ZONECRUNCHER_KEY, ...options }) { 21 | if (!zonecruncherKey) { 22 | throw new Error(`No zonecruncher api key specified`) 23 | } 24 | 25 | const { pathname: p, query: q } = this.getCallOptions({ source, label }, options) 26 | 27 | const query = querystring.stringify({ 28 | ...q, 29 | 30 | token: zonecruncherKey 31 | }) 32 | 33 | const { results = [] } = await this.scheduler.tryRequest({ uri: `https://zonecruncher.com/api/v1/${p.replace(/^\/+/, '')}?${query}`, toJson: true }) 34 | 35 | const nodes = [] 36 | 37 | results.forEach((result) => { 38 | if (!this.filterRecord({ source, label }, result)) { 39 | return 40 | } 41 | 42 | nodes.push(this.extractRecord({ source, label }, result)) 43 | }) 44 | 45 | return nodes 46 | } 47 | } 48 | 49 | const zonecruncherSubdomains = class extends ZonecruncherTransform { 50 | static get category() { 51 | return ['zonecruncher'] 52 | } 53 | 54 | static get alias() { 55 | return ['zonecruncher_subdomains', 'zcss'] 56 | } 57 | 58 | static get title() { 59 | return 'Zonecruncher Subdomains' 60 | } 61 | 62 | static get description() { 63 | return 'Performs subdomain searching with Zonecruncher' 64 | } 65 | 66 | static get group() { 67 | return this.title 68 | } 69 | 70 | static get tags() { 71 | return ['ce'] 72 | } 73 | 74 | static get types() { 75 | return [DOMAIN_TYPE] 76 | } 77 | 78 | static get options() { 79 | return { 80 | zonecruncherKey: { 81 | type: 'string', 82 | description: 'Zonecruncher API key' 83 | } 84 | } 85 | } 86 | 87 | static get priority() { 88 | return 1 89 | } 90 | 91 | static get noise() { 92 | return 1 93 | } 94 | 95 | getCallOptions({ label }) { 96 | return { 97 | pathname: '/subdomains', 98 | query: { 99 | q: label, 100 | sort: 'last' 101 | } 102 | } 103 | } 104 | 105 | extractRecord({ source }, record) { 106 | const { qname, first_seen: firstSeen, last_seen: lastSeen } = record 107 | 108 | const domain = normalizeDomain(qname) 109 | 110 | return { 111 | type: DOMAIN_TYPE, 112 | label: domain, 113 | props: { domain, firstSeen, lastSeen }, 114 | edges: [{ source, type: SUBDOMAIN_TYPE }] 115 | } 116 | } 117 | } 118 | 119 | module.exports = { zonecruncherSubdomains } 120 | --------------------------------------------------------------------------------