├── .eslintrc ├── .github └── workflows │ └── build.yml ├── .gitignore ├── .travis.yml ├── CHANGES.md ├── LICENSE ├── README.md ├── default_config.json ├── docker-compose.yml ├── docs └── create-a-release.md ├── lib ├── intern.js └── qbuilder.js ├── mysql-store.js ├── package.json ├── script └── schema.sql └── test ├── mysql.autoincrement.test.js ├── mysql.ext.test.js ├── mysql.test.js └── support ├── db ├── config.js └── seed │ └── schema.sql └── helpers.js /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "seneca", 3 | "rules": { 4 | "space-before-function-paren": "off", 5 | "brace-style": ["error", "1tbs"] 6 | }, 7 | "parserOptions": { 8 | "ecmaVersion": 10 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions 3 | 4 | name: build 5 | 6 | on: 7 | push: 8 | branches: [ master ] 9 | pull_request: 10 | branches: [ master ] 11 | 12 | jobs: 13 | build: 14 | timeout-minutes: 4 15 | 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | os: [ubuntu-latest] 20 | node-version: [12.x] 21 | 22 | runs-on: ${{ matrix.os }} 23 | 24 | services: 25 | mysql: 26 | image: mysql:5.7 27 | ports: 28 | - 3306:3306 29 | environment: 30 | MYSQL_ROOT_PASSWORD: "itsasekret_ci_6g9b75t2gt528az" 31 | MYSQL_DATABASE: "senecatest_ci_578gw9f6wf7" 32 | 33 | steps: 34 | - uses: actions/checkout@v2 35 | - name: Use Node.js ${{ matrix.node-version }} 36 | uses: actions/setup-node@v1 37 | with: 38 | node-version: ${{ matrix.node-version }} 39 | - run: npm install 40 | - run: npm test 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | .DS_Store 3 | 4 | lib-cov 5 | *.seed 6 | *.log 7 | *.csv 8 | *.dat 9 | *.out 10 | *.pid 11 | *.gz 12 | 13 | pids 14 | logs 15 | results 16 | 17 | node_modules 18 | npm-debug.log 19 | 20 | README.html 21 | 22 | test/dbconfig.mine.js 23 | 24 | .idea/ 25 | .settings/ 26 | .vscode/ 27 | 28 | docs/annotated 29 | docs/coverage.html 30 | package-lock.json 31 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: true 2 | language: node_js 3 | 4 | env: 5 | - SENECA_VER=@1.x.x 6 | - SENECA_VER=@2.x.x 7 | - SENECA_VER=@3.x.x 8 | 9 | node_js: 10 | - '6' 11 | - '4' 12 | 13 | cache: 14 | directories: 15 | - node_modules 16 | 17 | services: 18 | - mysql 19 | 20 | before_script: 21 | - mysql -h localhost -u root --password="" < ./script/schema.sql 22 | - npm uninstall seneca 23 | - npm install seneca$SENECA_VER 24 | 25 | after_script: 26 | - npm run coveralls 27 | -------------------------------------------------------------------------------- /CHANGES.md: -------------------------------------------------------------------------------- 1 | # 1.1.0 2016/08/27 2 | * Added Seneca 3 and Node 6 support 3 | * Dropped Node 0.10, 0.12, 5 support 4 | * Updated dependencies 5 | 6 | # 1.0.1 2016/08/05 7 | * Fixed incorrect package.json "files" section 8 | 9 | # 1.0.0 2016/08/04 10 | * Replaced query generation with seneca-standard-query 11 | * Extended and unified the patterns with seneca-postgres-store 12 | * Update dependencies 13 | 14 | 15 | # 0.4.1 2015/11/10 16 | * Updated for seneca 0.7.x 17 | * Cleanup code 18 | * Improve tests 19 | * Dockerize tests 20 | 21 | # 0.4.0 2015/02/04 22 | * Updated for seneca 0.6.1 23 | 24 | # 0.3.0 2015/01/30 25 | * Add query reporting including SLOW, SLOWER, SLOWEST tags 26 | * Update version references 27 | * Add connection pooling 28 | * Fix sort$, add skip$ 29 | * SQL injection protection 30 | * Add standard store-test 31 | 32 | # 0.2.0 2014/10/24 33 | * Do not add metadata for date columns 34 | 35 | # 0.1.0 2014/10/24 36 | * Use seneca-store-test and make them pass 37 | * Add support for auto_increment columns (use auto_increment: true option) 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2012-2016 Mircea Alexandru and other contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Seneca](http://senecajs.org/files/assets/seneca-logo.png) 2 | > A [Seneca.js](http://senecajs.org) Data Storage Plugin 3 | 4 | # seneca-mysql-store 5 | 6 | [![npm version][npm-badge]][npm-url] 7 | [![Build Status][travis-badge]][travis-url] 8 | [![Coverage Status][coverage-badge]][coverage-url] 9 | [![Dependency Status][david-badge]][david-url] 10 | [![Coveralls][BadgeCoveralls]][Coveralls] 11 | [![Gitter][gitter-badge]][gitter-url] 12 | 13 | ## Description 14 | 15 | A storage engine that uses [mySql][] to persist data. It may also be used as an example on how to 16 | implement a storage plugin for Seneca using an underlying relational store. 17 | 18 | If you're using this module, and need help, you can: 19 | 20 | - Post a [github issue][], 21 | - Tweet to [@senecajs][], 22 | - Ask on the [Gitter][gitter-url]. 23 | 24 | If you are new to Seneca in general, please take a look at [senecajs.org][]. We have everything from 25 | tutorials to sample apps to help get you up and running quickly. 26 | 27 | ### Seneca compatibility 28 | Supports Seneca versions **1.x** - **3.x** 29 | 30 | ### Supported functionality 31 | All Seneca data store supported functionality is implemented in [seneca-store-test](https://github.com/senecajs/seneca-store-test) as a test suite. The tests represent the store functionality specifications. 32 | 33 | 34 | ## Install 35 | To install, simply use npm. Remember you will need to install [Seneca.js][] 36 | separately. 37 | 38 | ``` 39 | npm install seneca 40 | npm install seneca-mysql-store 41 | ``` 42 | 43 | ## Quick Example 44 | ```js 45 | var seneca = require('seneca')() 46 | seneca.use('mysql-store', { 47 | name:'senecatest', 48 | host:'localhost', 49 | user:'senecatest', 50 | password:'senecatest', 51 | port:3306 52 | }) 53 | 54 | seneca.ready(function () { 55 | var apple = seneca.make$('fruit') 56 | apple.name = 'Pink Lady' 57 | apple.price = 0.99 58 | apple.save$(function (err, apple) { 59 | console.log("apple.id = " + apple.id) 60 | }) 61 | }) 62 | ``` 63 | 64 | ## Usage 65 | You don't use this module directly. It provides an underlying data storage engine for the Seneca entity API: 66 | 67 | ```js 68 | var entity = seneca.make$('typename') 69 | entity.someproperty = "something" 70 | entity.anotherproperty = 100 71 | 72 | entity.save$(function (err, entity) { ... }) 73 | entity.load$({id: ...}, function (err, entity) { ... }) 74 | entity.list$({property: ...}, function (err, entity) { ... }) 75 | entity.remove$({id: ...}, function (err, entity) { ... }) 76 | ``` 77 | 78 | ### Query Support 79 | 80 | The standard Seneca query format is supported. See the [seneca-standard-query][standard-query] plugin for more details. 81 | 82 | ## Extended Query Support 83 | 84 | By using the [seneca-store-query][store-query] plugin its query capabilities can be extended. See the plugin page for more details. 85 | 86 | ### Native Driver 87 | As with all seneca stores, you can access the native driver, in this case, the `mysql` 88 | `connectionPool` object using `entity.native$(function (err, connectionPool) {...})`. 89 | 90 | ## Contributing 91 | The [Senecajs org][] encourage open participation. If you feel you can help in any way, be it with 92 | documentation, examples, extra testing, or new features please get in touch. 93 | 94 | ## To run tests with Docker 95 | Build the MySQL Docker image: 96 | 97 | ```sh 98 | npm run build 99 | 100 | ``` 101 | 102 | Start the MySQL container: 103 | ```sh 104 | npm run start 105 | ``` 106 | 107 | Stop the MySQL container: 108 | ```sh 109 | npm run stop 110 | ``` 111 | 112 | While the container is running you can run the tests into another terminal: 113 | ```sh 114 | npm run test 115 | ``` 116 | 117 | #### Testing for Mac users 118 | Before the tests can be run you must run `docker-machine env default` and copy the docker host address (example: '192.168.99.100'). 119 | This address must be inserted into the test/dbconfig.example.js file as the value for the host variable. The tests can now be run. 120 | 121 | ## License 122 | Copyright (c) 2012-2016, Mircea Alexandru and other contributors. 123 | Licensed under [MIT][]. 124 | 125 | [npm-badge]: https://img.shields.io/npm/v/seneca-mysql-store.svg 126 | [npm-url]: https://npmjs.com/package/seneca-mysql-store 127 | [travis-badge]: https://travis-ci.org/senecajs/seneca-mysql-store.svg 128 | [travis-url]: https://travis-ci.org/senecajs/seneca-mysql-store 129 | [codeclimate-badge]: https://codeclimate.com/github/senecajs/seneca-mysql-store/badges/gpa.svg 130 | [codeclimate-url]: https://codeclimate.com/github/senecajs/seneca-mysql-store 131 | [coverage-badge]: https://coveralls.io/repos/senecajs/seneca-mysql-store/badge.svg?branch=master&service=github 132 | [coverage-url]: https://coveralls.io/github/senecajs/seneca-mysql-store?branch=master 133 | [david-badge]: https://david-dm.org/senecajs/seneca-mysql-store.svg 134 | [david-url]: https://david-dm.org/senecajs/seneca-mysql-store 135 | [gitter-badge]: https://badges.gitter.im/Join%20Chat.svg 136 | [gitter-url]: https://gitter.im/senecajs/seneca 137 | [mySql]: https://www.mysql.com/ 138 | [node-mysqldb-native]: http://mysqldb.github.com/node-mysqldb-native/markdown-docs/queries.html 139 | [MIT]: ./LICENSE 140 | [Senecajs org]: https://github.com/senecajs/ 141 | [Seneca.js]: https://www.npmjs.com/package/seneca 142 | [senecajs.org]: http://senecajs.org/ 143 | [github issue]: https://github.com/senecajs/seneca-mysql-store/issues 144 | [@senecajs]: http://twitter.com/senecajs 145 | [Coveralls]: https://coveralls.io/github/senecajs/seneca-mysql-store?branch=master 146 | [BadgeCoveralls]: https://coveralls.io/repos/github/senecajs/seneca-mysql-store/badge.svg?branch=master 147 | [standard-query]: https://github.com/senecajs/seneca-standard-query 148 | [store-query]: https://github.com/senecajs/seneca-store-query 149 | -------------------------------------------------------------------------------- /default_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "minwait": 16, 3 | "maxwait": 65336, 4 | "query_log_level": "debug", 5 | "auto_increment": false, 6 | "benchmark": { 7 | "rules": [ 8 | { 9 | "time": 100, 10 | "tag": "SLOW" 11 | }, 12 | { 13 | "time": 300, 14 | "tag": "SLOWER" 15 | }, 16 | { 17 | "time": 500, 18 | "tag": "SLOWEST" 19 | } 20 | ] 21 | } 22 | } -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | mysql: 2 | image: mysql:5.7 3 | ports: 4 | - "3306:3306" 5 | volumes: 6 | - ./test/support/db/seed:/docker-entrypoint-initdb.d 7 | working_dir: /var/mysql 8 | environment: 9 | MYSQL_ROOT_PASSWORD: "itsasekret_85a96vbFdh" 10 | MYSQL_DATABASE: "senecatest" 11 | -------------------------------------------------------------------------------- /docs/create-a-release.md: -------------------------------------------------------------------------------- 1 | # Creating a release 2 | 3 | 1. Review github issues, triage, close and merge issues related to the release. 4 | 2. Update CHANGES.md, with date release, notes, and version. 5 | 3. Pull down the repository locally on the master branch. 6 | 4. Ensure there are no outstanding commits and the branch is clean. 7 | 5. Run `npm install` and ensure all dependencies correctly install. 8 | 6. Run `npm run build` to build the Docker container. 9 | 7. Run `npm run start` to start the mysql Docker container. 10 | 8. Run `npm run test` and ensure testing and linting passes. 11 | 9. Run `npm version vx.x.x -m "version x.x.x"` where `x.x.x` is the version. 12 | 10. Run `git push upstream master --tags` 13 | 11. Run `npm publish` 14 | 12. Go to the [Github release page][Releases] and hit 'Draft a new release'. 15 | 13. Paste the Changelog content for this release and add additional release notes. 16 | 14. Choose the tag version and a title matching the release and publish. 17 | 15. Notify core maintainers of the release via email. 18 | 19 | [Releases]: https://github.com/senecajs/seneca-mysql-store/releases -------------------------------------------------------------------------------- /lib/intern.js: -------------------------------------------------------------------------------- 1 | const Assert = require('assert') 2 | const Util = require('util') 3 | const Uuid = require('uuid') 4 | const Q = require('./qbuilder') 5 | 6 | 7 | const intern = { 8 | generateid() { 9 | return { id: Uuid() } 10 | }, 11 | 12 | 13 | async insertrow(args, ctx) { 14 | const query = Q.insertstm(args) 15 | return intern.execquery(query, ctx) 16 | }, 17 | 18 | 19 | async insertrowwherenotexists(args, ctx) { 20 | const query = Q.insertwherenotexistsstm(args) 21 | return intern.execquery(query, ctx) 22 | }, 23 | 24 | 25 | async updaterows(args, ctx) { 26 | const query = Q.updatestm(args) 27 | return intern.execquery(query, ctx) 28 | }, 29 | 30 | 31 | async deleterows(args, ctx) { 32 | const query = Q.deletestm(args) 33 | return intern.execquery(query, ctx) 34 | }, 35 | 36 | 37 | async selectrows(args, ctx) { 38 | const query = Q.selectstm(args) 39 | return intern.execquery(query, ctx) 40 | }, 41 | 42 | 43 | async execquery(query, ctx) { 44 | const { db } = ctx 45 | const exec = Util.promisify(db.query).bind(db) 46 | 47 | if ('string' === typeof query) { 48 | return exec(query) 49 | } 50 | 51 | return exec(query.sql, query.bindings) 52 | }, 53 | 54 | 55 | is_upsert(msg) { 56 | const { q } = msg 57 | 58 | if (!Array.isArray(q.upsert$)) { 59 | return null 60 | } 61 | 62 | const upsert_fields = q.upsert$.filter((p) => !p.includes('$')) 63 | 64 | if (0 === upsert_fields.length) { 65 | return null 66 | } 67 | 68 | return upsert_fields 69 | }, 70 | 71 | 72 | is_update(msg) { 73 | const { ent } = msg 74 | return null != ent.id 75 | }, 76 | 77 | 78 | async transaction(f, ctx) { 79 | const { db } = ctx 80 | 81 | const getConnection = Util.promisify(db.getConnection).bind(db) 82 | const trx = await getConnection() 83 | 84 | try { 85 | const beginTransaction = Util.promisify(trx.beginTransaction).bind(trx) 86 | const commit = Util.promisify(trx.commit).bind(trx) 87 | const rollback = Util.promisify(trx.rollback).bind(trx) 88 | 89 | try { 90 | await beginTransaction() 91 | const result = await f(trx) 92 | await commit() 93 | 94 | return result 95 | } catch (err) { 96 | await rollback() 97 | throw err 98 | } 99 | } finally { 100 | trx.release() 101 | } 102 | }, 103 | 104 | 105 | compact(obj) { 106 | return Object.keys(obj) 107 | .map(k => [k, obj[k]]) 108 | .filter(([, v]) => undefined !== v) 109 | .reduce((acc, [k, v]) => { 110 | acc[k] = v 111 | return acc 112 | }, {}) 113 | }, 114 | 115 | 116 | asyncmethod(f) { 117 | return function (msg, done) { 118 | const seneca = this 119 | const p = f.call(seneca, msg) 120 | 121 | Assert('function' === typeof p.then && 122 | 'function' === typeof p.catch, 123 | 'The function must be async, i.e. return a promise.') 124 | 125 | return p 126 | .then(result => done(null, result)) 127 | .catch(done) 128 | } 129 | }, 130 | 131 | 132 | async remove_many(msg, ctx) { 133 | const { seneca } = ctx 134 | const { q, qent } = msg 135 | 136 | 137 | const ent_table = intern.tablename(qent) 138 | 139 | const rows = await intern.selectrows({ 140 | columns: ['id'], 141 | from: ent_table, 142 | where: seneca.util.clean(q), 143 | limit: 0 <= q.limit$ ? q.limit$ : null, 144 | offset: 0 <= q.skip$ ? q.skip$ : null, 145 | order_by: q.sort$ || null 146 | }, ctx) 147 | 148 | 149 | await intern.deleteent({ 150 | ent: qent, 151 | where: { 152 | id: rows.map(x => x.id) 153 | } 154 | }, ctx) 155 | 156 | 157 | return 158 | }, 159 | 160 | 161 | async remove_one(msg, ctx) { 162 | const { seneca } = ctx 163 | const { q, qent } = msg 164 | 165 | 166 | const del_ent = await intern.loadent({ 167 | ent: qent, 168 | where: seneca.util.clean(q), 169 | offset: 0 <= q.skip$ ? q.skip$ : null, 170 | order_by: q.sort$ || null 171 | }, ctx) 172 | 173 | 174 | if (null === del_ent) { 175 | return null 176 | } 177 | 178 | 179 | await intern.deleteent({ 180 | ent: qent, 181 | where: { 182 | id: del_ent.id 183 | } 184 | }, ctx) 185 | 186 | 187 | if (q.load$) { 188 | return del_ent 189 | } 190 | 191 | 192 | return null 193 | }, 194 | 195 | 196 | async insertent (args, ctx) { 197 | const { ent } = args 198 | 199 | const ent_table = intern.tablename(ent) 200 | const entp = intern.makeentp(ent) 201 | 202 | const inserted = await intern.insertrow({ 203 | into: ent_table, 204 | values: intern.compact(entp) 205 | }, ctx) 206 | 207 | return intern.loadent({ 208 | ent, 209 | where: { id: ent.id || inserted.insertId } 210 | }, ctx) 211 | }, 212 | 213 | 214 | make_insertable(msg) { 215 | const { ent, q } = msg 216 | const { auto_increment$: auto_increment = false } = q 217 | 218 | 219 | // WARNING: It looks like Entity#clone$ does not clone private fields 220 | // of the parent entity, which may lead to some confusion. 221 | // 222 | const out = ent.clone$() 223 | 224 | out.id = out.id || ent.id$ || intern.generateid().id 225 | 226 | if (auto_increment) { 227 | delete out.id 228 | } 229 | 230 | return out 231 | }, 232 | 233 | 234 | async do_create(msg, ctx) { 235 | const { seneca } = ctx 236 | 237 | const new_ent = intern.make_insertable(msg) 238 | const upsert_fields = intern.is_upsert(msg) 239 | 240 | 241 | let op_name 242 | let out 243 | 244 | if (null == upsert_fields) { 245 | op_name = 'save/insert' 246 | out = await intern.insertent({ ent: new_ent }, ctx) 247 | } else { 248 | op_name = 'save/upsert' 249 | out = await intern.upsertent(upsert_fields, { ent: new_ent }, ctx) 250 | } 251 | 252 | seneca.log.debug(op_name, 'ok', out) 253 | 254 | return out 255 | }, 256 | 257 | 258 | async do_update(msg, ctx) { 259 | const { ent } = msg 260 | const { seneca } = ctx 261 | 262 | const { id: ent_id } = ent 263 | const entp = intern.makeentp(ent) 264 | 265 | const update = await intern.updateent({ 266 | ent, 267 | set: intern.compact(entp), 268 | where: { id: ent_id } 269 | }, ctx) 270 | 271 | 272 | let out 273 | 274 | const updated_anything = update.affectedRows > 0 275 | 276 | if (updated_anything) { 277 | out = await intern.loadent({ ent, where: { id: ent.id } }, ctx) 278 | } else { 279 | out = await intern.insertent({ ent }, ctx) 280 | } 281 | 282 | seneca.log.debug('save/update', 'ok', out) 283 | 284 | return out 285 | }, 286 | 287 | 288 | is_empty(obj) { 289 | const num_keys = Object.keys(obj).length 290 | return 0 === num_keys 291 | }, 292 | 293 | 294 | async upsertent(upsert_fields, args, ctx) { 295 | const { ent } = args 296 | 297 | const entp = intern.makeentp(ent) 298 | const ent_table = intern.tablename(ent) 299 | 300 | return intern.transaction(async (trx) => { 301 | const trx_ctx = { ...ctx, db: trx } 302 | 303 | const update_q = upsert_fields 304 | .filter(c => undefined !== entp[c]) 305 | .reduce((h, c) => { 306 | h[c] = entp[c] 307 | return h 308 | }, {}) 309 | 310 | if (intern.is_empty(update_q)) { 311 | return intern.insertent({ ent }, trx_ctx) 312 | } 313 | 314 | const update_set = { ...entp } 315 | delete update_set.id 316 | 317 | await intern.updateent({ 318 | ent, 319 | where: update_q, 320 | set: update_set 321 | }, trx_ctx) 322 | 323 | 324 | await intern.insertrowwherenotexists({ 325 | into: ent_table, 326 | values: intern.compact(entp), 327 | where_not: update_q 328 | }, trx_ctx) 329 | 330 | // NOTE: Because MySQL does not support "RETURNING", we must fetch 331 | // the entity in a separate trip to the db. We can fetch the entity 332 | // by the query and not worry about duplicates - this is because 333 | // the query is unique by definition, because upserts can only work 334 | // for unique keys. 335 | // 336 | return intern.loadent({ ent, where: update_q }, trx_ctx) 337 | }, ctx) 338 | }, 339 | 340 | 341 | where_of_q(q, ctx) { 342 | if ('string' === typeof q || Array.isArray(q)) { 343 | return { id: q } 344 | } 345 | 346 | const { seneca } = ctx 347 | 348 | return seneca.util.clean(q) 349 | }, 350 | 351 | 352 | async selectents(args, ctx) { 353 | const { ent } = args 354 | const from = intern.tablename(ent) 355 | 356 | const sel_args = { ...args, from, columns: '*' } 357 | delete sel_args.ent 358 | 359 | const rows = await intern.selectrows(sel_args, ctx) 360 | const out = rows.map(row => intern.makeent(ent, row)) 361 | 362 | return out 363 | }, 364 | 365 | 366 | async listents(args, ctx) { 367 | return intern.selectents(args, ctx) 368 | }, 369 | 370 | 371 | async loadent(args, ctx) { 372 | const load_args = { ...args, limit: 1 } 373 | const out = await intern.selectents(load_args, ctx) 374 | 375 | 376 | if (0 === out.length) { 377 | return null 378 | } 379 | 380 | return out[0] 381 | }, 382 | 383 | 384 | async deleteent(args, ctx) { 385 | const { ent } = args 386 | const ent_table = intern.tablename(ent) 387 | 388 | const del_args = { ...args, from: ent_table } 389 | delete del_args.ent 390 | 391 | return await intern.deleterows(del_args, ctx) 392 | }, 393 | 394 | 395 | async updateent(args, ctx) { 396 | const { ent } = args 397 | const ent_table = intern.tablename(ent) 398 | 399 | const update_args = { ...args, table: ent_table } 400 | delete update_args.ent 401 | 402 | return intern.updaterows(update_args, ctx) 403 | }, 404 | 405 | 406 | is_native(msg) { 407 | const { q } = msg 408 | 409 | if ('string' === typeof q.native$) { 410 | return q.native$ 411 | } 412 | 413 | if (Array.isArray(q.native$)) { 414 | Assert(0 < q.native$.length, 'q.native$.length') 415 | const [sql, ...bindings] = q.native$ 416 | 417 | return { sql, bindings } 418 | } 419 | 420 | return null 421 | }, 422 | 423 | 424 | tablename(ent) { 425 | const canon = ent.canon$({ object: true }) 426 | return (canon.base ? canon.base + '_' : '') + canon.name 427 | }, 428 | 429 | 430 | is_object(x) { 431 | const type = typeof x 432 | return (null != x) && ('object' === type || 'function' === type) 433 | }, 434 | 435 | 436 | is_date(x) { 437 | return '[object Date]' === toString.call(x) 438 | }, 439 | 440 | 441 | /** 442 | * NOTE: makeentp is used to create a new persistable entity from the entity 443 | * object. 444 | */ 445 | makeentp(ent) { 446 | const fields = ent.fields$() 447 | const entp = {} 448 | 449 | for (const field of fields) { 450 | if (!intern.is_date(ent[field]) && intern.is_object(ent[field])) { 451 | entp[field] = JSON.stringify(ent[field]) 452 | } else { 453 | entp[field] = ent[field] 454 | } 455 | } 456 | 457 | return entp 458 | }, 459 | 460 | 461 | /** 462 | * NOTE: makeent is used to create a new entity using a row from a database. 463 | * 464 | */ 465 | makeent(ent, row) { 466 | if (!row) { 467 | return null 468 | } 469 | 470 | const fields = Object.keys(row) 471 | const entp = {} 472 | 473 | for (const field of fields) { 474 | let value = row[field] 475 | 476 | try { 477 | const parsed = JSON.parse(row[field]) 478 | 479 | if (intern.is_object(parsed)) { 480 | value = parsed 481 | } 482 | } catch (err) { 483 | if (!(err instanceof SyntaxError)) { 484 | throw err 485 | } 486 | } 487 | 488 | entp[field] = value 489 | } 490 | 491 | return ent.make$(entp) 492 | } 493 | } 494 | 495 | 496 | module.exports = { intern } 497 | -------------------------------------------------------------------------------- /lib/qbuilder.js: -------------------------------------------------------------------------------- 1 | function insertstm(args) { 2 | const { into, values } = args 3 | 4 | const col_names = Object.keys(values) 5 | const col_vals = Object.values(values) 6 | 7 | 8 | let bindings = [] 9 | let sql = '' 10 | 11 | 12 | sql += 'insert into ?? ' 13 | bindings.push(into) 14 | 15 | 16 | const col_placeholders = col_names.map(_ => '??') 17 | sql += '(' + col_placeholders.join(', ') + ') ' 18 | bindings = bindings.concat(col_names) 19 | 20 | 21 | const val_placeholders = col_vals.map(_ => '?') 22 | sql += 'values (' + val_placeholders.join(', ') + ') ' 23 | bindings = bindings.concat(col_vals) 24 | 25 | 26 | return { sql, bindings } 27 | } 28 | 29 | function insertwherenotexistsstm(args) { 30 | const { into, values, where_not } = args 31 | 32 | const col_names = Object.keys(values) 33 | const col_vals = Object.values(values) 34 | 35 | 36 | let bindings = [] 37 | let sql = '' 38 | 39 | 40 | sql += 'insert into ?? ' 41 | bindings.push(into) 42 | 43 | 44 | const col_placeholders = col_names.map(_ => '??') 45 | sql += '(' + col_placeholders.join(', ') + ') ' 46 | bindings = bindings.concat(col_names) 47 | 48 | const val_placeholders = col_vals.map(_ => '?') 49 | sql += 'select ' + val_placeholders.join(', ') + ' from dual ' 50 | bindings = bindings.concat(col_vals) 51 | 52 | 53 | const sub_sel = selectstm({ 54 | columns: '*', 55 | from: into, 56 | where: where_not 57 | }) 58 | 59 | sql += 'where not exists (' + sub_sel.sql + ')' 60 | bindings = bindings.concat(sub_sel.bindings) 61 | 62 | 63 | return { sql, bindings } 64 | } 65 | 66 | function wherestm(args) { 67 | const { where } = args 68 | const update_all = 0 === Object.keys(where).length 69 | 70 | 71 | let sql = '' 72 | let bindings = [] 73 | 74 | 75 | if (update_all) { 76 | sql += '1' 77 | } else { 78 | let first_where = true 79 | 80 | for (const where_col in where) { 81 | const where_val = where[where_col] 82 | 83 | if (!first_where) { 84 | sql += ' and ' 85 | } 86 | 87 | if (Array.isArray(where_val)) { 88 | const val_placeholders = where_val.map(_ => '?').join(', ') 89 | 90 | if (0 === val_placeholders.length) { 91 | sql += '0' 92 | } else { 93 | sql += '?? in (' + val_placeholders + ')' 94 | 95 | bindings.push(where_col) 96 | bindings = bindings.concat(where_val) 97 | } 98 | } else { 99 | bindings.push(where_col) 100 | 101 | if (null == where_val) { 102 | sql += '?? is null' 103 | } else { 104 | bindings.push(where_val) 105 | sql += '?? = ?' 106 | } 107 | } 108 | 109 | first_where = false 110 | } 111 | } 112 | 113 | return { sql, bindings } 114 | } 115 | 116 | function updatestm(args) { 117 | const { table, set, where } = args 118 | 119 | 120 | let bindings = [] 121 | let sql = '' 122 | 123 | 124 | sql += 'update ?? ' 125 | bindings.push(table) 126 | 127 | sql += 'set ' 128 | 129 | 130 | let first_set = true 131 | 132 | for (const set_col in set) { 133 | const set_val = set[set_col] 134 | 135 | if (!first_set) { 136 | sql += ', ' 137 | } 138 | 139 | sql += '?? = ?' 140 | bindings.push(set_col) 141 | bindings.push(set_val) 142 | 143 | first_set = false 144 | } 145 | 146 | sql += ' ' 147 | 148 | 149 | const where_q = wherestm({ where }) 150 | 151 | sql += 'where ' + where_q.sql 152 | bindings = bindings.concat(where_q.bindings) 153 | 154 | 155 | return { sql, bindings } 156 | } 157 | 158 | function orderbystm(args) { 159 | const { order_by } = args 160 | 161 | 162 | let sql = '' 163 | let bindings = [] 164 | 165 | 166 | let first_pair = true 167 | 168 | for (const order_col in order_by) { 169 | if (!first_pair) { 170 | sql += ', ' 171 | } 172 | 173 | first_pair = false 174 | 175 | 176 | const order_val = order_by[order_col] 177 | const order = 0 <= order_val ? 'asc' : 'desc' 178 | 179 | sql += '?? ' + order 180 | bindings.push(order_col) 181 | } 182 | 183 | 184 | return { sql, bindings } 185 | } 186 | 187 | function selectstm(args) { 188 | const { 189 | from, 190 | columns = '*', 191 | where = null, 192 | offset = null, 193 | limit = null, 194 | order_by = null 195 | } = args 196 | 197 | 198 | let bindings = [] 199 | let sql = '' 200 | 201 | 202 | sql += 'select ' 203 | 204 | 205 | if ('*' === columns) { 206 | sql += '*' 207 | } else { 208 | const col_placeholders = columns.map(_ => '??') 209 | sql += col_placeholders.join(', ') 210 | 211 | bindings = bindings.concat(columns) 212 | } 213 | 214 | 215 | sql += ' from ??' 216 | bindings.push(from) 217 | 218 | 219 | if (null != where) { 220 | const where_q = wherestm({ where }) 221 | 222 | sql += ' where ' + where_q.sql 223 | bindings = bindings.concat(where_q.bindings) 224 | } 225 | 226 | 227 | if (null != order_by) { 228 | const order_q = orderbystm({ order_by }) 229 | 230 | sql += ' order by ' + order_q.sql 231 | bindings = bindings.concat(order_q.bindings) 232 | } 233 | 234 | 235 | if (null != limit) { 236 | sql += ' limit ?' 237 | bindings.push(limit) 238 | } 239 | 240 | 241 | if (null != offset) { 242 | sql += ' offset ?' 243 | bindings.push(offset) 244 | } 245 | 246 | 247 | return { sql, bindings } 248 | } 249 | 250 | function deletestm(args) { 251 | const { from, where = null, limit = null } = args 252 | 253 | let sql = '' 254 | let bindings = [] 255 | 256 | sql += 'delete from ?? ' 257 | bindings.push(from) 258 | 259 | if (null != where) { 260 | const where_q = wherestm({ where }) 261 | 262 | sql += 'where ' + where_q.sql 263 | bindings = bindings.concat(where_q.bindings) 264 | } 265 | 266 | if (null != limit) { 267 | sql += ' limit ?' 268 | bindings.push(limit) 269 | } 270 | 271 | return { sql, bindings } 272 | } 273 | 274 | module.exports = { selectstm, insertstm, deletestm, updatestm, wherestm, insertwherenotexistsstm } 275 | -------------------------------------------------------------------------------- /mysql-store.js: -------------------------------------------------------------------------------- 1 | const MySQL = require('mysql') 2 | const DefaultConfig = require('./default_config.json') 3 | 4 | const Util = require('util') 5 | const { intern } = require('./lib/intern') 6 | const { asyncmethod } = intern 7 | 8 | const STORE_NAME = 'mysql-store' 9 | 10 | function mysql_store (options) { 11 | const seneca = this 12 | 13 | const opts = seneca.util.deepextend(DefaultConfig, options) 14 | 15 | const internals = { 16 | name: STORE_NAME, 17 | opts 18 | } 19 | 20 | function configure(spec, done) { 21 | const conf = get_config(spec) 22 | 23 | const default_conn_opts = { 24 | connectionLimit: conf.poolSize || 5, 25 | host: conf.host, 26 | user: conf.user || conf.username, 27 | password: conf.password, 28 | database: conf.name, 29 | port: conf.port || 3306 30 | } 31 | 32 | const conn_opts = conf.conn || default_conn_opts 33 | 34 | internals.connectionPool = MySQL.createPool(conn_opts) 35 | internals.spec = spec 36 | 37 | return ensure_connected(done) 38 | 39 | 40 | function get_config(spec) { 41 | if ('string' === typeof spec) { 42 | const urlM = /^mysql:\/\/((.*?):(.*?)@)?(.*?)(:?(\d+))?\/(.*?)$/.exec(spec) 43 | 44 | const conf = { 45 | name: urlM[7], 46 | server: urlM[4], 47 | username: urlM[2], 48 | password: urlM[3], 49 | port: urlM[6] ? parseInt(conf.port, 10) : null 50 | } 51 | 52 | return conf 53 | } 54 | 55 | return spec 56 | } 57 | 58 | 59 | function ensure_connected(done) { 60 | return internals.connectionPool.getConnection((err, conn) => { 61 | if (err) { 62 | return done(err) 63 | } 64 | 65 | conn.release() 66 | 67 | return done() 68 | }) 69 | } 70 | } 71 | 72 | const store = { 73 | name: STORE_NAME, 74 | 75 | close: asyncmethod(async function (_msg) { 76 | const { connectionPool: pool = null } = internals 77 | 78 | if (pool) { 79 | const end = Util.promisify(pool.end).bind(pool) 80 | 81 | try { 82 | await end() 83 | } catch (err) { 84 | return seneca.fail('connection/end', { 85 | store: internals.name, 86 | error: err 87 | }) 88 | } 89 | } 90 | 91 | seneca.log.debug('Closed the connection to the db') 92 | }), 93 | 94 | save: asyncmethod(async function (msg) { 95 | const seneca = this 96 | const ctx = { seneca, db: internals.connectionPool } 97 | 98 | if (intern.is_update(msg)) { 99 | return intern.do_update(msg, ctx) 100 | } 101 | 102 | return intern.do_create(msg, ctx) 103 | }), 104 | 105 | load: asyncmethod(async function (msg) { 106 | const seneca = this 107 | const { qent, q } = msg 108 | const ctx = { seneca, db: internals.connectionPool } 109 | 110 | const where = intern.where_of_q(q, ctx) 111 | 112 | const out = await intern.loadent({ 113 | ent: qent, 114 | where, 115 | limit: 1, 116 | offset: 0 <= q.skip$ ? q.skip$ : null, 117 | order_by: q.sort$ || null 118 | }, ctx) 119 | 120 | seneca.log.debug('load', 'ok', q, out) 121 | 122 | return out 123 | }), 124 | 125 | list: asyncmethod(async function (msg) { 126 | const seneca = this 127 | const { qent, q } = msg 128 | const ctx = { seneca, db: internals.connectionPool } 129 | 130 | 131 | let out 132 | 133 | const nat_query = intern.is_native(msg) 134 | 135 | if (null == nat_query) { 136 | const where = intern.where_of_q(q, ctx) 137 | 138 | out = await intern.listents({ 139 | ent: qent, 140 | where, 141 | limit: 0 <= q.limit$ ? q.limit$ : null, 142 | offset: 0 <= q.skip$ ? q.skip$ : null, 143 | order_by: q.sort$ || null 144 | }, ctx) 145 | } else { 146 | const rows = await intern.execquery(nat_query, ctx) 147 | out = rows.map(row => intern.makeent(qent, row)) 148 | } 149 | 150 | seneca.log.debug('list', 'ok', q, out.length) 151 | 152 | return out 153 | }), 154 | 155 | remove: asyncmethod(async function (msg) { 156 | const seneca = this 157 | const { q } = msg 158 | const ctx = { seneca, db: internals.connectionPool } 159 | 160 | let op_name 161 | let out 162 | 163 | if (q.all$) { 164 | op_name = 'remove/all' 165 | out = await intern.remove_many(msg, ctx) 166 | } else { 167 | op_name = 'remove/one' 168 | out = await intern.remove_one(msg, ctx) 169 | } 170 | 171 | seneca.log.debug(op_name, 'ok', q) 172 | 173 | return out 174 | }), 175 | 176 | native: asyncmethod(async function (_msg) { 177 | return internals.connectionPool 178 | }) 179 | } 180 | 181 | 182 | const meta = seneca.store.init(seneca, opts, store) 183 | 184 | internals.desc = meta.desc 185 | 186 | seneca.add({ init: store.name, tag: meta.tag }, function (args, done) { 187 | configure(internals.opts, function (err) { 188 | if (err) { 189 | return seneca.fail('entity/configure', { 190 | store: internals.name, 191 | error: err, 192 | desc: internals.desc 193 | }) 194 | } 195 | 196 | seneca.log.debug('Successfully connected to the database') 197 | 198 | return done() 199 | }) 200 | }) 201 | 202 | return { name: store.name, tag: meta.tag } 203 | } 204 | 205 | 206 | module.exports = mysql_store 207 | 208 | 209 | module.exports.errors = { 210 | 'entity/configure': 'Failed to connect to the database, store "<%=store%>", ' + 211 | 'error: "<%=error%>", desc: <%=desc%>', 212 | 213 | 'connection/end': 'Failed to close the connection, store "<%=store%>, ' + 214 | 'error: "<%=error%>"' 215 | } 216 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "seneca-mysql-store", 3 | "version": "1.1.0", 4 | "description": "MySQL database layer for Seneca framework", 5 | "main": "mysql-store.js", 6 | "license": "MIT", 7 | "author": { 8 | "name": "Mircea Alexandru (http://www.alexandrumircea.ro)", 9 | "email": "mircea.alexandru@gmail.com" 10 | }, 11 | "contributors": [ 12 | "Peter Elger (http://peterelger.com/)", 13 | "Chico Charlesworth (https://twitter.com/ccharlesworth)", 14 | "Dean McDonnell (http://mcdonnelldean.me)", 15 | "Paolo Chiodi (https://github.com/paolochiodi)", 16 | "Guy Ellis (https://github.com/guyellis)", 17 | "Richard Rodger (http://richardrodger.com/)", 18 | "Cristian Kiss (https://github.com/ckiss)", 19 | "Mihai Dima ", 20 | "Max Nachlinger (https://github.com/maxnachlinger)", 21 | "Shane Lacey (https://github.com/shanel262)", 22 | "Thinh (https://github.com/voxuanthinh)", 23 | "Seamus D'Arcy (https://github.com/darsee)", 24 | "Kamil Mech (https://github.com/kamil-mech)" 25 | ], 26 | "scripts": { 27 | "build": "docker-compose build", 28 | "start": "docker-compose up", 29 | "stop": "docker-compose kill", 30 | "test": "lab -P '\\.test' test -r console -v -L -m 3000 -t 72", 31 | "test-some": "lab -P '\\.test' test -r console -v -L -g ", 32 | "lint": "lab -dL", 33 | "coverage": "lab -v -P test -L -t 80 -r html > docs/coverage.html", 34 | "annotate": "docco mysql-store.js -o docs/annotated" 35 | }, 36 | "bugs": { 37 | "url": "https://github.com/senecajs/seneca-mysql-store/issues" 38 | }, 39 | "homepage": "https://github.com/senecajs/seneca-mysql-store", 40 | "repository": { 41 | "type": "git", 42 | "url": "git://github.com/senecajs/seneca-mysql-store.git" 43 | }, 44 | "keywords": [ 45 | "seneca", 46 | "mysql", 47 | "plugin" 48 | ], 49 | "dependencies": { 50 | "eraro": "0.4.1", 51 | "mysql": "2.11.1", 52 | "node-uuid": "1.4.7" 53 | }, 54 | "files": [ 55 | "lib", 56 | "CHANGES.md", 57 | "LICENSE", 58 | "README.md", 59 | "default_config.json", 60 | "mysql-store.js", 61 | "package.json", 62 | "query-builder.js" 63 | ], 64 | "devDependencies": { 65 | "@hapi/code": "8.0.3", 66 | "@hapi/lab": "24.2.1", 67 | "async": "3.2.0", 68 | "chai": "3.5.0", 69 | "coveralls": "2.11.x", 70 | "docco": "0.7.x", 71 | "eslint-config-seneca": "3.x.x", 72 | "eslint-plugin-hapi": "4.x.x", 73 | "eslint-plugin-standard": "2.x.x", 74 | "pre-commit": "1.1.3", 75 | "seneca": "3.23.3", 76 | "seneca-entity": "14.0.0", 77 | "seneca-store-test": "4.0.2" 78 | }, 79 | "pre-commit": [ 80 | "test" 81 | ] 82 | } 83 | -------------------------------------------------------------------------------- /script/schema.sql: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2012 Mircea Alexandru */ 2 | /* execute using mysql -u root < dbschema.sql */ 3 | 4 | DROP DATABASE IF EXISTS senecatest; 5 | CREATE DATABASE senecatest; 6 | 7 | USE senecatest; 8 | 9 | /* Create user senecatest with harmless privilege so that DROP USER does not throw error on next line if user does not exist */ 10 | GRANT USAGE ON *.* TO 'senecatest'@'localhost'; 11 | DROP USER 'senecatest'@'localhost'; 12 | CREATE USER 'senecatest'@'localhost' IDENTIFIED BY 'senecatest'; 13 | GRANT ALL PRIVILEGES ON senecatest.* TO senecatest@localhost; 14 | FLUSH PRIVILEGES; 15 | 16 | CREATE TABLE foo (id VARCHAR(36), p1 VARCHAR(255), p2 VARCHAR(255), p3 VARCHAR(255), seneca VARCHAR(125)); 17 | 18 | CREATE TABLE moon_bar ( 19 | id VARCHAR(36), 20 | str VARCHAR(255), 21 | `int` INT, 22 | bol BOOLEAN, 23 | wen TIMESTAMP, 24 | mark VARCHAR(255), 25 | `dec` REAL, 26 | arr TEXT, 27 | obj TEXT, 28 | seneca VARCHAR(125)); 29 | 30 | CREATE TABLE product (id VARCHAR(36), name VARCHAR(255), price INT); 31 | 32 | CREATE TABLE incremental ( 33 | id INT AUTO_INCREMENT, 34 | p1 VARCHAR(255), 35 | PRIMARY KEY (id) 36 | ); 37 | 38 | -------------------------------------------------------------------------------- /test/mysql.autoincrement.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { expect } = require('@hapi/code') 4 | const { make_it } = require('./support/helpers') 5 | 6 | function autoincrementTest (settings) { 7 | const { script, seneca: si } = settings 8 | 9 | const { describe, beforeEach, afterEach } = script 10 | const it = make_it(script) 11 | 12 | describe('Autoincrement tests', () => { 13 | beforeEach(() => clearDb(si)) 14 | afterEach(() => clearDb(si)) 15 | 16 | it('delegates id generation to the db', (done) => { 17 | const inc = si.make('incremental') 18 | inc.p1 = 'v1' 19 | 20 | inc.save$({ auto_increment$: true }, (err, inc1) => { 21 | if (err) { 22 | return done(err) 23 | } 24 | 25 | expect(typeof inc1.id).to.equal('number') 26 | 27 | return inc.load$({ id: inc1.id }, (err, inc2) => { 28 | if (err) { 29 | return done(err) 30 | } 31 | 32 | expect(inc2).to.contain({ 33 | id: inc1.id, 34 | p1: 'v1' 35 | }) 36 | 37 | expect(null == inc2).to.equal(false) 38 | 39 | expect(inc2).to.contain({ 40 | id: inc1.id, 41 | p1: 'v1' 42 | }) 43 | 44 | return done() 45 | }) 46 | }) 47 | }) 48 | 49 | it('delegates id generation to the db, when upserting/creating', (done) => { 50 | const inc = si.make('incremental') 51 | inc.p1 = 'v1' 52 | 53 | inc.save$({ upsert$: ['uniq'], auto_increment$: true }, (err, inc1) => { 54 | if (err) { 55 | return done(err) 56 | } 57 | 58 | expect(typeof inc1.id).to.equal('number') 59 | 60 | return inc.load$({ id: inc1.id }, (err, inc2) => { 61 | if (err) { 62 | return done(err) 63 | } 64 | 65 | expect(inc2).to.contain({ 66 | id: inc1.id, 67 | p1: 'v1' 68 | }) 69 | 70 | expect(null == inc2).to.equal(false) 71 | 72 | expect(inc2).to.contain({ 73 | id: inc1.id, 74 | p1: 'v1' 75 | }) 76 | 77 | return done() 78 | }) 79 | }) 80 | }) 81 | 82 | it('delegates id generation to the db, when upserting/matching', (done) => { 83 | const new_id = 37 84 | 85 | si.make('incremental').data$({ id: new_id, uniq: 1 }).save$((err) => { 86 | if (err) { 87 | return done(err) 88 | } 89 | 90 | return si.make('incremental') 91 | .data$({ p1: 'v1', uniq: 1 }) 92 | .save$({ upsert$: ['uniq'], auto_increment$: true }, (err, inc1) => { 93 | if (err) { 94 | return done(err) 95 | } 96 | 97 | expect(inc1.id).to.equal(new_id) 98 | 99 | return si.make('incremental').load$({ id: inc1.id }, (err, inc2) => { 100 | if (err) { 101 | return done(err) 102 | } 103 | 104 | expect(inc2).to.contain({ 105 | id: inc1.id, 106 | p1: 'v1' 107 | }) 108 | 109 | expect(null == inc2).to.equal(false) 110 | 111 | expect(inc2).to.contain({ 112 | id: inc1.id, 113 | p1: 'v1' 114 | }) 115 | 116 | return done() 117 | }) 118 | }) 119 | }) 120 | }) 121 | 122 | async function clearDb(si) { 123 | return new Promise((resolve, reject) => { 124 | const done = (err, out) => err 125 | ? reject(err) 126 | : resolve(out) 127 | 128 | return si.make('incremental').remove$({ all$: true }, done) 129 | }) 130 | } 131 | }) 132 | } 133 | 134 | module.exports.autoincrementTest = autoincrementTest 135 | -------------------------------------------------------------------------------- /test/mysql.ext.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const Async = require('async') 4 | const { assert: Assert } = require('chai') 5 | const { make_it } = require('./support/helpers') 6 | 7 | const scratch = {} 8 | 9 | function extendTest (settings) { 10 | const { script, seneca: si } = settings 11 | 12 | const { describe } = script 13 | const it = make_it(script) 14 | 15 | describe('Extended tests', function () { 16 | it('Extended tests', function (done) { 17 | Async.series( 18 | { 19 | removeAll: function (next) { 20 | var foo = si.make({name$: 'foo'}) 21 | foo.remove$({all$: true}, function (err, res) { 22 | Assert(!err) 23 | next() 24 | }) 25 | }, 26 | listEmpty: function (next) { 27 | var foo = si.make({name$: 'foo'}) 28 | foo.list$({}, function (err, res) { 29 | Assert(!err) 30 | Assert.equal(0, res.length) 31 | next() 32 | }) 33 | }, 34 | insert2: function (next) { 35 | var foo = si.make({name$: 'foo'}) 36 | foo.p1 = 'v1' 37 | 38 | foo.save$(foo, function (err, foo) { 39 | Assert(!err) 40 | Assert.isNotNull(foo.id) 41 | Assert.equal('v1', foo.p1) 42 | scratch.foo1 = foo 43 | next() 44 | }) 45 | }, 46 | list1: function (next) { 47 | scratch.foo1.list$({}, function (err, res) { 48 | Assert(!err) 49 | Assert.equal(1, res.length) 50 | next() 51 | }) 52 | }, 53 | 54 | list2: function (next) { 55 | scratch.foo1.list$({id: scratch.foo1.id}, function (err, res) { 56 | Assert(!err) 57 | Assert.equal(1, res.length) 58 | next() 59 | }) 60 | }, 61 | load1: function (next) { 62 | scratch.foo1.load$({id: scratch.foo1.id}, function (err, res) { 63 | Assert(!err) 64 | Assert.isNotNull(res.id) 65 | next() 66 | }) 67 | }, 68 | 69 | update: function (next) { 70 | scratch.foo1.p1 = 'v2' 71 | 72 | scratch.foo1.save$(function (err, foo) { 73 | Assert(!err) 74 | Assert.isNotNull(foo.id) 75 | Assert.equal('v2', foo.p1) 76 | next() 77 | }) 78 | }, 79 | 80 | load2: function (next) { 81 | scratch.foo1.load$({id: scratch.foo1.id}, function (err, res) { 82 | Assert(!err) 83 | Assert.equal('v2', res.p1) 84 | next() 85 | }) 86 | }, 87 | 88 | insertwithsafe: function (next) { 89 | var foo = si.make({name$: 'foo'}) 90 | foo.p1 = 'v3' 91 | 92 | foo.save$(function (err, foo) { 93 | Assert(!err) 94 | Assert.isNotNull(foo.id) 95 | Assert.equal('v3', foo.p1) 96 | scratch.foo2 = foo 97 | next() 98 | }) 99 | }, 100 | 101 | list3: function (next) { 102 | scratch.foo2.list$({id: scratch.foo2.id}, function (err, res) { 103 | Assert(!err) 104 | Assert.equal(1, res.length) 105 | next() 106 | }) 107 | }, 108 | 109 | list4: function (next) { 110 | scratch.foo2.list$({id: scratch.foo2.id, limit$: 1}, function (err, res) { 111 | Assert(!err) 112 | Assert.equal(1, res.length) 113 | next() 114 | }) 115 | }, 116 | 117 | remove1: function (next) { 118 | scratch.foo2.remove$({id: scratch.foo2.id}, function (err) { 119 | Assert(!err) 120 | next() 121 | }) 122 | }, 123 | 124 | list5: function (next) { 125 | var foo = si.make('foo') 126 | foo.list$({}, function (err, res) { 127 | Assert(!err) 128 | Assert.equal(1, res.length) 129 | next() 130 | }) 131 | }, 132 | 133 | reportAllErrors: function (next) { 134 | const foo = si.make('foo') 135 | foo.missing_attribute = 'v1' 136 | 137 | 138 | const BAD_FIELD_ERROR_CODE = 'ER_BAD_FIELD_ERROR' 139 | const stdoutWrite = process.stdout.write 140 | 141 | process.stdout.write = output => { 142 | if ('string' === typeof output && 143 | output.includes(BAD_FIELD_ERROR_CODE)) { 144 | return 145 | } 146 | 147 | return stdoutWrite.apply(process.stdout, [output]) 148 | } 149 | 150 | 151 | foo.save$(function (err, foo1) { 152 | process.stdout.write = stdoutWrite 153 | 154 | Assert.isNotNull(err) 155 | Assert(err.message.includes(BAD_FIELD_ERROR_CODE)) 156 | 157 | return next() 158 | }) 159 | } 160 | }, 161 | function (err, out) { 162 | Assert(!err) 163 | done() 164 | }) 165 | }) 166 | }) 167 | } 168 | 169 | module.exports.extendTest = extendTest 170 | -------------------------------------------------------------------------------- /test/mysql.test.js: -------------------------------------------------------------------------------- 1 | /* jslint node: true */ 2 | /* Copyright (c) 2012 Mircea Alexandru */ 3 | /* 4 | * These tests assume a MySQL database/structure is already created. 5 | * execute script/schema.sql to create 6 | */ 7 | 8 | const Seneca = require('seneca') 9 | const Shared = require('seneca-store-test') 10 | const Extra = require('./mysql.ext.test.js') 11 | const Autoincrement = require('./mysql.autoincrement.test.js') 12 | 13 | const Lab = require('@hapi/lab') 14 | const lab = exports.lab = Lab.script() 15 | const { describe, before, after } = lab 16 | 17 | const DbConfig = require('./support/db/config') 18 | 19 | 20 | describe('MySQL suite tests ', function () { 21 | const si = makeSeneca({ mysqlStoreOpts: DbConfig }) 22 | 23 | before({}, function (done) { 24 | si.ready(done) 25 | }) 26 | 27 | after({}, function (done) { 28 | si.close(done) 29 | }) 30 | 31 | Shared.basictest({ 32 | seneca: si, 33 | script: lab 34 | }) 35 | 36 | Shared.sorttest({ 37 | seneca: si, 38 | script: lab 39 | }) 40 | 41 | Shared.limitstest({ 42 | seneca: si, 43 | script: lab 44 | }) 45 | 46 | Shared.sqltest({ 47 | seneca: si, 48 | script: lab 49 | }) 50 | 51 | Shared.upserttest({ 52 | seneca: si, 53 | script: lab 54 | }) 55 | 56 | Extra.extendTest({ 57 | seneca: si, 58 | script: lab 59 | }) 60 | }) 61 | 62 | describe('MySQL autoincrement tests ', function () { 63 | const incrementConfig = Object.assign( 64 | {}, DbConfig, { 65 | map: {'-/-/incremental': '*'}, 66 | auto_increment: true 67 | } 68 | ) 69 | 70 | const si2 = makeSeneca({ mysqlStoreOpts: incrementConfig }) 71 | 72 | 73 | before({}, function (done) { 74 | si2.ready(done) 75 | }) 76 | 77 | after({}, function (done) { 78 | si2.close(done) 79 | }) 80 | 81 | Autoincrement.autoincrementTest({ 82 | seneca: si2, 83 | script: lab 84 | }) 85 | }) 86 | 87 | 88 | function makeSeneca (opts = {}) { 89 | const si = Seneca({ 90 | default_plugins: { 91 | 'mem-store': false 92 | } 93 | }) 94 | 95 | if (si.version >= '2.0.0') { 96 | si.use('entity') 97 | } 98 | 99 | const { mysqlStoreOpts = {} } = opts 100 | si.use(require('../mysql-store.js'), mysqlStoreOpts) 101 | 102 | return si 103 | } 104 | 105 | -------------------------------------------------------------------------------- /test/support/db/config.js: -------------------------------------------------------------------------------- 1 | function getConfig() { 2 | if (process.env.CI) { 3 | return { 4 | name: 'senecatest_ci_578gw9f6wf7', 5 | host: 'localhost', 6 | user: 'root', 7 | password: 'itsasekret_ci_6g9b75t2gt528az', 8 | port: 3306 9 | } 10 | } 11 | 12 | return { 13 | name: 'senecatest', 14 | host: 'localhost', 15 | user: 'root', 16 | password: 'itsasekret_85a96vbFdh', 17 | port: 3306 18 | } 19 | } 20 | 21 | module.exports = getConfig() 22 | -------------------------------------------------------------------------------- /test/support/db/seed/schema.sql: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2012 Mircea Alexandru */ 2 | /* execute using mysql -u root < dbschema.sql */ 3 | 4 | DROP DATABASE IF EXISTS senecatest; 5 | CREATE DATABASE senecatest; 6 | 7 | USE senecatest; 8 | 9 | CREATE TABLE foo ( 10 | id VARCHAR(36), 11 | p1 VARCHAR(255), 12 | p2 VARCHAR(255), 13 | p3 VARCHAR(255), 14 | x INT, 15 | y INT, 16 | int_arr JSON, 17 | seneca VARCHAR(125), 18 | 19 | PRIMARY KEY(id), 20 | UNIQUE(x) 21 | ); 22 | 23 | CREATE TABLE moon_bar ( 24 | id VARCHAR(36), 25 | str VARCHAR(255), 26 | `int` INT, 27 | bol BOOLEAN, 28 | wen TIMESTAMP, 29 | mark VARCHAR(255), 30 | `dec` REAL, 31 | arr JSON, 32 | obj JSON, 33 | seneca VARCHAR(125), 34 | 35 | PRIMARY KEY(id) 36 | ); 37 | 38 | CREATE TABLE players ( 39 | id VARCHAR(36) NOT NULL, 40 | username VARCHAR(255) NOT NULL, 41 | points INT DEFAULT NULL, 42 | points_history JSON DEFAULT NULL, 43 | 44 | PRIMARY KEY (id), 45 | UNIQUE(username) 46 | ); 47 | 48 | CREATE TABLE racers ( 49 | id VARCHAR(36) NOT NULL, 50 | points INT NOT NULL DEFAULT 0, 51 | username VARCHAR(255) NOT NULL, 52 | favorite_car VARCHAR(255) NOT NULL, 53 | 54 | PRIMARY KEY (id), 55 | UNIQUE(username) 56 | ); 57 | 58 | CREATE TABLE users ( 59 | id VARCHAR(36) NOT NULL, 60 | username VARCHAR(255) NOT NULL, 61 | email VARCHAR(255) NOT NULL, 62 | 63 | PRIMARY KEY (id), 64 | UNIQUE(email) 65 | ); 66 | 67 | CREATE TABLE customers ( 68 | id VARCHAR(36) NOT NULL, 69 | first_name VARCHAR(255) NOT NULL, 70 | last_name VARCHAR(255) NOT NULL, 71 | credits INT NOT NULL, 72 | 73 | PRIMARY KEY (id), 74 | UNIQUE(first_name, last_name) 75 | ); 76 | 77 | CREATE TABLE products ( 78 | id VARCHAR(36) NOT NULL, 79 | price VARCHAR(255) NOT NULL, 80 | label VARCHAR(255) DEFAULT NULL, 81 | coolness_factor INT DEFAULT NULL, 82 | 83 | PRIMARY KEY (id), 84 | UNIQUE(label), 85 | UNIQUE(label, price) 86 | ); 87 | 88 | CREATE TABLE incremental ( 89 | id INT AUTO_INCREMENT, 90 | p1 VARCHAR(255), 91 | uniq INT, 92 | 93 | PRIMARY KEY (id), 94 | UNIQUE(uniq) 95 | ); 96 | -------------------------------------------------------------------------------- /test/support/helpers.js: -------------------------------------------------------------------------------- 1 | const Util = require('util') 2 | 3 | 4 | function make_it(lab) { 5 | return function it(name, opts, func) { 6 | if ('function' === typeof opts) { 7 | func = opts 8 | opts = {} 9 | } 10 | 11 | lab.it( 12 | name, 13 | opts, 14 | Util.promisify(function (x, fin) { 15 | func(fin) 16 | }) 17 | ) 18 | } 19 | } 20 | 21 | 22 | module.exports = { make_it } 23 | --------------------------------------------------------------------------------