├── .editorconfig ├── .eslintignore ├── .eslintrc ├── .gitignore ├── .npmignore ├── LICENSE ├── README.md ├── dist └── main.js ├── examples ├── actionLogger │ └── actionLogger.js ├── counter │ ├── counter.js │ └── types.js ├── firebase │ ├── firebaseListeners.js │ ├── firebaseStartup.js │ ├── import │ │ └── firebase.js │ └── sagas │ │ └── defaultListeners.js ├── networkMonitor │ └── networkMonitor.js ├── productFetcher │ ├── productFetcher.js │ └── types.js ├── react-notification-system │ └── notifications.js ├── reducerOnly │ ├── reducerOnly.js │ └── types.js ├── reduxPersistor │ └── reduxPersistor.js └── websocket │ └── websocket.js ├── package.json ├── src ├── generators.js ├── main.js ├── process-lib │ ├── createActions.js │ ├── effects.js │ ├── helpers.js │ ├── process.js │ ├── reducerGenerators.js │ ├── registry.js │ ├── statics.js │ └── wildcard.js └── statics.js ├── test └── index.js └── webpack.config.js /.editorconfig: -------------------------------------------------------------------------------- 1 | # editorconfig.org 2 | root = true 3 | 4 | [*] 5 | indent_style = tab 6 | end_of_line = lf 7 | charset = utf-8 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | 11 | [*.md] 12 | trim_trailing_whitespace = false 13 | 14 | [*.json] 15 | indent_style = space 16 | indent_size = 2 17 | 18 | [.eslintrc] 19 | indent_style = space 20 | indent_size = 2 21 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | flow-typed/ 2 | tools/flow/ 3 | node_modules/ 4 | build/ 5 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "parser": "babel-eslint", 3 | "extends": "airbnb", 4 | "plugins": [ 5 | "flowtype" 6 | ], 7 | "env": { 8 | "browser": true, 9 | "es6": true, 10 | "node": true, 11 | "jest": true 12 | }, 13 | "ecmaFeatures": { 14 | "defaultParams": true 15 | }, 16 | "rules": { 17 | // We use the 'import' plugin which allows for cases "flow" awareness. 18 | "no-duplicate-imports": 0, 19 | // A .jsx extension is not required for files containing jsx. 20 | "react/jsx-filename-extension": 0, 21 | // This rule struggles with flow and class properties. 22 | "react/sort-comp": 0, 23 | // This rule struggles with flow. 24 | "react/prop-types": 0, 25 | // We use global requires in various places, e.g. code splitting instances. 26 | "global-require": 0 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .idea 3 | node_modules 4 | npm-debug.log 5 | .git 6 | yarn.lock 7 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | .DS_STORE 2 | .idea 3 | src 4 | test 5 | examples 6 | node_modules 7 | yarn.lock 8 | .git 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017, Braden R. Napier 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Redux Saga Process 2 | 3 | [![npm version](https://badge.fury.io/js/redux-saga-process.svg)](https://badge.fury.io/js/redux-saga-process) 4 | 5 | Saga Processes provide an encapsulated environment for processing complex (or simple) logic. 6 | The Saga Process pattern that is being presenting is heavily inspired (surprisingly) by the general 7 | concept of [processes](https://en.wikipedia.org/wiki/Process_(computing)). 8 | 9 | Each Process manages its logic, holds its local state, and makes intelligent decisions on when and how to dispatch a 10 | pure repesentation of such data to the rest of the Application to be rendered efficiently. Keep 11 | the logic out of your views. 12 | 13 | ### Package Dependencies 14 | - [redux](https://github.com/reactjs/redux) 15 | - [redux-saga](https://github.com/redux-saga/redux-saga) 16 | - [reselect](https://github.com/reactjs/reselect) 17 | 18 | # Installation 19 | 20 | ```bash 21 | $ yarn add redux-saga-process 22 | ``` 23 | **or** 24 | ```bash 25 | $ npm install --save redux-saga-process 26 | ``` 27 | 28 | # Overview 29 | 30 | Processes should remain a "pure environment" with a specific purpose or intent. They 31 | run as daemons in most cases which will live and respond throughout the lifetime of the 32 | application. 33 | 34 | Processes are run as sagas using the [redux-saga](https://github.com/redux-saga/redux-saga) 35 | library. They may also be configured to reduce a portion of your [redux](https://github.com/reactjs/redux) 36 | store, providing a means for dispatching a pure representation of our data to be rendered by our 37 | view-layer. 38 | 39 | # Migrating to version > 0.12 40 | 41 | Note that 0.12 no longer exports "default" and instead exports the named module "Process". 42 | 43 | ```javascript 44 | // from 45 | import Process from 'redux-saga-process' 46 | 47 | // to 48 | import { Process } from 'redux-saga-process' 49 | ``` 50 | 51 | ## Examples 52 | 53 | Here are a [few examples of some simple processes](https://github.com/Dash-OS/redux-saga-process/tree/master/examples) which are being used today. When built 54 | properly, a process should be a completely independent "module" which could be plugged 55 | into any other app. This should enable sharing of logic between your apps and/or each others. 56 | 57 | - [Network Monitor Process](https://github.com/Dash-OS/redux-saga-process/blob/master/examples/networkMonitor/networkMonitor.js) - this shows 58 | most of the options available in use. We specify this as a client-rendered process only, reduce part of our store, and observer push-style 59 | events. In use we do a bit more monitoring of different events, but stripped some of that out to make it more direct of an example. 60 | - [Action Logger](https://github.com/Dash-OS/redux-saga-process/blob/master/examples/actionLogger/actionLogger.js) - A minimal process which simply 61 | logs any actions dispatched. 62 | - [Redux Persistor](https://github.com/Dash-OS/redux-saga-process/blob/master/examples/reduxPersistor/reduxPersistor.js) - Handles the management of 63 | saving application data to persistent storage via [redux-persist](https://github.com/rt2zz/redux-persist) (in our case we use localForage). 64 | - [Firebase Listeners](https://github.com/Dash-OS/redux-saga-process/tree/master/examples/firebase) - Firebase Setup & Listeners 65 | 66 | *** 67 | 68 | #### Creating a Saga Redux Process 69 | 70 | 71 | ```javascript 72 | import { Process } from 'redux-saga-process' 73 | class MyProcess extends Process { /* ... */ } 74 | ``` 75 | 76 | *** 77 | 78 | #### Building your Processes 79 | 80 | Before we [create our Redux Store](http://redux.js.org/docs/basics/Store.html#store) we should start 81 | by building our processes. During its build phase, the properties of each Process will be parsed and 82 | an environment will be created for each. 83 | 84 | Below we show one example of how you may organize your processes. We have a root 85 | folder which has an index.js file that exports each "category" folder. This folder 86 | then exports each process that should be built. With this setup, processes can be 87 | thought of as "threads" while each category folder would represent an overall process. 88 | 89 | ``` 90 | > ./processes 91 | > |---- ./ShoppingCart 92 | > |-------- ./productFetcher 93 | > |-------------- ./productFetcherProcess.js 94 | > |-------- ./shoppingCart 95 | > |-------------- ./shoppingCartProcess.js 96 | > |-------- ./index.js 97 | > |---- ./index.js 98 | ``` 99 | 100 | > ***Note:*** When you build your processes, the library will search up to two levels deep for classes that can be 101 | > built and build any discovered processes. 102 | 103 | Given the above configuration, we would then build our processes easily enough. We would 104 | simply do the following: 105 | 106 | ```javascript 107 | // configureStore.js 108 | import { buildProcesses } from 'redux-saga-process' 109 | import * as processCategories from '../processes' 110 | const processes = buildProcesses(processCategories) 111 | ``` 112 | 113 | *** 114 | 115 | #### Adding your Processes Reducers (Optional) 116 | 117 | When your Process defines the [static reducer](https://github.com/Dash-OS/redux-saga-process#static-reducer) property, 118 | a redux-style reducer will be built and added to the list of reducers for you. This is accomplished by 119 | combining the reducers returned by ```buildProcesses``` (via Redux's [combineReducers](http://redux.js.org/docs/api/combineReducers.html)). These 120 | reducers are found on the responses "processReducers" property. 121 | 122 | > ***Tip:*** If multiple processes specify the same reducer name, they will be merged in the order they 123 | > were created. This is handled by using the [arrayMapReducer](https://github.com/Dash-OS/redux-saga-process#arraymapreducerinitialstate-reducerarray-context) generator from [reducerGenerators.js](https://github.com/Dash-OS/redux-saga-process/blob/master/src/lib/reducerGenerators.js). 124 | 125 | ```javascript 126 | // configureStore.js 127 | import { combineReducers } from 'redux' 128 | import { buildProcesses } from 'redux-saga-process' 129 | import * as processCategories from '../processes' 130 | const processes = buildProcesses(processCategories) 131 | 132 | const rootReducer = combineReducers({ 133 | /* Any other reducers you have may be added */ 134 | ...processes.processReducers, 135 | }) 136 | ``` 137 | 138 | *** 139 | 140 | #### Running your Processes 141 | 142 | Now that we have built our processes we need to run them. This is done from within your 143 | [root redux-saga](https://redux-saga.github.io/redux-saga/docs/introduction/BeginnerTutorial.html). 144 | 145 | ```javascript 146 | // configureSagas.js 147 | import { runProcesses } from 'redux-saga-process' 148 | import * as processCategories from '../processes' 149 | 150 | function* root() { 151 | // Processes are forked once ran, so we can simply use yield* in this case. 152 | // You could also use fork if you wish - but be careful with [spawn] if you 153 | // want to hot reload! 154 | yield* runProcesses(processCategories) 155 | } 156 | 157 | export default root 158 | ``` 159 | 160 | *** 161 | 162 | #### Hot Reloading Processes 163 | 164 | We have the ability to hot reload our processes. RSP will pass the `state` object, 165 | if it exists, to the newly built process through the constructor. This allows you to 166 | handle it however you wish based on what the process does. 167 | 168 | This allows us to hot reload the reducers **AND** the processes while maintaining state 169 | across all of them. 170 | 171 | ```js 172 | // when we run our sagas 173 | let sagaTask = sagaMiddleware.run(rootSaga) 174 | // then... 175 | if ( module.hot ) { 176 | module.hot.accept('./reducers', () => { 177 | sagaTask.cancel() 178 | sagaTask.done.then(() => { 179 | // dynamic import reducers and sagas 180 | Promise.all([ 181 | import('./reducers'), 182 | import('../ui/shared/sagas') 183 | ]).then( ([ new_reducers, new_sagas ]) => { 184 | // replace the reducers with the new reducers - this will 185 | // also rebuild our sagas efficiently (it doesnt re-build what it doesnt 186 | // have to). 187 | store.replaceReducer(new_reducers.default) 188 | // Update our sagaTask with the new task and run our sagas 189 | sagaTask = sagaMiddleware.run(new_sagas.default) 190 | // in case we want to handle the hot reload in our processes 191 | // (simply add an actionRoute for "hotReloaded") 192 | store.dispatch({ 193 | type: 'HOT_RELOADED' 194 | }) 195 | }) 196 | }) 197 | }) 198 | module.hot.accept('../ui/shared/sagas', () => { 199 | // Accept changes to all of our sagas! 200 | }) 201 | } 202 | ``` 203 | 204 | *** 205 | 206 | # Building your Processes 207 | 208 | So now lets look at what a Process actually looks like, and what it can do for us. As 209 | shown above we start by building an ES6 class which extends ```Process```: 210 | 211 | ```javascript 212 | import Process from 'redux-saga-process' 213 | class MyProcess extends Process { /* ... */ } 214 | ``` 215 | 216 | *** 217 | 218 | ## Process Properties 219 | 220 | Our classes can be configured using [static properties](http://exploringjs.com/es6/ch_classes.html). In our examples 221 | we are using the babel [transform-class-properties](https://babeljs.io/docs/plugins/transform-class-properties/) plugin. 222 | 223 | > ***Note:*** All of the properties are completely optional. 224 | 225 | *** 226 | 227 | 228 | ### static ```config``` 229 | 230 | 231 | ```javascript 232 | class MyProcess extends Process { 233 | static config = { 234 | /* Process Configuration Example */ 235 | // enabled? Setting to false will stop the process from being built and/or added 236 | // to the application. (default: true) 237 | enabled: true 238 | // providing this will indicate that we wish to reduce part of the redux store 239 | reduces: 'myState', 240 | // or 241 | // reduces: ['myState', 'myState2'] 242 | // Should we run on the server side as well? (default: true) 243 | ssr: true 244 | }; // don't forget to add the semi-colon! 245 | } 246 | ``` 247 | 248 | Providing a config property allows you to modify how the process will be built and handled. 249 | We plan to utilize this property to add flexibility and features in the future. Below are 250 | the properties that can be provided within this property. 251 | 252 | 253 | | Property | Type(s) | Description | 254 | | ------------- |:-------------:| ----- | 255 | | **pid** | _String_ | If using the `statics` connector, you will need to define a `pid` to use while importing a proceses exported values. | 256 | | **enabled** | _Boolean_ | true/false if this process is enabled. If set to "false" the process will be ignored on startup. | 257 | | **reduces** | _String_ || _Array_ | a string indicating the name of the [reducer](http://redux.js.org/docs/basics/Reducers.html) this process should reduce. Or an array to provide multiple reducer keys.
***Note:*** If this property is not defined a reducer will not be generated.
| 258 | | **ssr** | _Boolean_ | true/false if this process should run on the server as well as the client (default: true) | 259 | 260 | > ##### Overlapping Reducer Names / Reducer Merge 261 | > 262 | > There are times that we needed multiple processes to reduce against the same key within our 263 | > state. If you define multiple processes that reduce the same state we will merge them into 264 | > a single reducer and also attempt to merge and ```initialState``` that is provided. 265 | > 266 | > This is done internally by building a reducer which reduces an array of reducers while passing 267 | > and merging initialState and any reduction filters we have specified. 268 | > 269 | > It is probably inadvisable to do this as it can cause conflicts. It is generally a better 270 | > idea to have each process reduce its own key within your state. 271 | 272 | > ##### Defining Multiple Reducers for a Single Process 273 | > 274 | > Another option is to have a single process reduce multiple keys. This is another feature which should 275 | > be used lightly, however, it can be very useful in certain situations that you need to be able to place 276 | > data in various places. 277 | > 278 | > When an array is provided, the reducer is expected to be an Object Literal where the keys are the key to reducer 279 | > and the values are any accept reducer type. 280 | 281 | *** 282 | 283 | 284 | ### static ```initialState``` 285 | 286 | 287 | ```javascript 288 | class MyProcess extends Process { 289 | static config = { 290 | // enabled reducing a portion of the redux store 291 | reduces: 'myState' 292 | }; 293 | 294 | static initialState = { 295 | /* Initial 'myState' Reducer State */ 296 | myKey: 'myValue' 297 | }; 298 | } 299 | ``` 300 | 301 | When we are adding a reducer we may want to define an initialState that the 302 | store should use. This is done by providing the initialState property as 303 | shown above. When your reducer is built we will pass the initialState as the 304 | state on your first reduction. 305 | 306 | > ***Tip:*** We also return the compiled initialState of all your processes as a result of 307 | > the ```buildProcesses``` call. 308 | 309 | *** 310 | 311 | 312 | ### static ```reducer``` 313 | 314 | 315 | ```javascript 316 | import { MY_TYPE, MY_OTHER_TYPE } from '../constants' 317 | 318 | class MyProcess extends Process { 319 | static config = { 320 | reduces: 'myState' 321 | }; 322 | 323 | static initialState = { 324 | /* Initial 'myState' Reducer State */ 325 | myKey: 'myValue', 326 | anotherKey: 'anotherValue' 327 | }; 328 | 329 | // filters for MY_TYPE and MY_OTHER_TYPE 330 | static reducer = { 331 | [MY_TYPE]: (state, action) => ({ 332 | ...state, 333 | myKey: action.value 334 | }), 335 | [MY_OTHER_TYPE]: (state, action) => ({ 336 | ...state, 337 | anotherKey: action.value 338 | }) 339 | }; 340 | } 341 | ``` 342 | 343 | We use "higher-order-reducers" to build special reducers that are used to filter the appropriate 344 | actions into your processes reducers (and trigger your sagas if specified). Your reducer property 345 | can be either a ```Reducer Function``` which itself is a reducer, an ```Object Literal``` (as shown above) 346 | which maps specific types into a reducer function, or an ```Array``` where each element itself is a reducer. 347 | 348 | Object reducers may also use wildcard matching, and a special shortcut is given that may be used. 349 | 350 | ```javascript 351 | // shows shorthand method of specifying the type to filter for. this is 352 | // is identical to the above example. 353 | 354 | class MyProcess extends Process { 355 | static config = { 356 | reduces: 'myState' 357 | }; 358 | 359 | static initialState = { 360 | /* Initial 'myState' Reducer State */ 361 | myKey: 'myValue', 362 | anotherKey: 'anotherValue' 363 | }; 364 | 365 | // filters for MY_TYPE and MY_OTHER_TYPE 366 | static reducer = { 367 | myType: (state, action) => ({ 368 | ...state, 369 | myKey: action.value 370 | }), 371 | myOtherType: (state, action) => ({ 372 | ...state, 373 | anotherKey: action.value 374 | }) 375 | }; 376 | } 377 | ``` 378 | 379 | 380 | > ***Note:*** Our higher-order-reducers will automatically return an unmodified state if no types match your specified 381 | > handlers. 382 | 383 | > ***Note:*** Reducers should be pure. You can not access ```this``` within them. Instead 384 | > you should pass any desired properties within a dispatched action. 385 | 386 | 387 |
388 | An Example of using a Reducer Function

389 | 390 | Here is an example of a reducer format that matches the style shown in the redux documentation: 391 | 392 | 393 | ```javascript 394 | static reducer = function(state, action) { 395 | switch(action.type) { 396 | case MY_TYPE: 397 | return { 398 | ...state, 399 | myKey: action.value 400 | } 401 | default: 402 | return state 403 | } 404 | }; 405 | ``` 406 | 407 |

408 | 409 |
410 | An Example of a Higher-Order-Reducer Generator (Click to Expand)

411 |
412 | This is not strictly important to see or understand, but for those of you that are interested 413 | in how we are building the reducers below is an example of the object filter reducer we showed 414 | in the first example. 415 | 416 | 417 | ```javascript 418 | const objectMapReducerGenerator = 419 | (initialState, handlers = {}) => 420 | (state = initialState, action) => 421 | ( 422 | ! action || ! action.type || ! handlers[action.type] && state || 423 | handlers[action.type](state, action) 424 | ) 425 | ``` 426 |

427 | 428 | 429 | *** 430 | 431 | 432 | ### static ```actionRoutes``` 433 | 434 | ```javascript 435 | import { put } from 'redux-saga/effects' 436 | import { MY_TYPE, RECEIVED_ACTION } from '../constants' 437 | 438 | class MyProcess extends Process { 439 | 440 | static actionRoutes = { 441 | [MY_TYPE]: 'myMethod' 442 | }; 443 | 444 | * myMethod(action) { 445 | yield put({ type: RECEIVED_ACTION }) 446 | } 447 | 448 | } 449 | ``` 450 | 451 | Action Routes allow us to define types that we are interested in handling as a 452 | side-effect and maps it to a method within your process. If your method is a 453 | generator you can use any of the redux-saga API via yield within the method. 454 | 455 | actionRoutes support wildcard matching and the shorthand type defintion found 456 | in other properties. Below we match using the shorthand property and route 457 | any types that start with "ACTION_" to our actionHandler. 458 | 459 | ```javascript 460 | import { put } from 'redux-saga/effects' 461 | import { RECEIVED_ACTION } from '../constants' 462 | 463 | class MyProcess extends Process { 464 | 465 | static actionRoutes = { 466 | myType: 'myMethod', 467 | 'ACTION_*': 'actionHandler' 468 | }; 469 | 470 | * myMethod(action) { 471 | yield put({ type: RECEIVED_ACTION }) 472 | } 473 | 474 | * actionHandler(action) { 475 | /* Handle types starting with ACTION_ */ 476 | } 477 | } 478 | ``` 479 | 480 | *** 481 | 482 | ### static ```actionCreators``` 483 | 484 | ```javascript 485 | class MyProcess extends Process { 486 | 487 | static actionCreators = { 488 | trigger: [ 'action' ], 489 | myType: [ 'username', 'password' ], 490 | fooType: { staticKey: 'staticValue' }, 491 | fnType: (value, props, obj) => ({ value, props, ...obj }) 492 | }; 493 | 494 | * processStarts() { 495 | 496 | yield* this.dispatch('trigger', 'this') 497 | // dispatches action to reducers -> 498 | // { type: 'TRIGGER', action: 'this' } 499 | 500 | yield* this.dispatch('myType', 'myUsername', 'myPassword', { mergedKey: 'value' } ) 501 | // dispatches action to reducers -> 502 | // { type: 'MY_TYPE', username: 'myUsername', password: 'myPassword', mergedKey: 'value' }\ 503 | 504 | yield* this.dispatch('fooType', { mergedKey: 'value' }) 505 | // dispatches action to reducers -> 506 | // { type: 'FOO_TYPE', staticKey: 'staticValue', mergedKey: 'value' } 507 | 508 | yield* this.dispatch('fnType', 'foo', 'bar', { mergedKey: 'value' }) 509 | // dispatches action to reducers -> 510 | // { type: 'FN_TYPE', value: 'foo', props: 'bar', mergedKey: 'value' } 511 | 512 | } 513 | } 514 | ``` 515 | 516 | *** 517 | 518 | ### static ```selectors``` 519 | 520 | ```javascript 521 | class MyProcess extends Process { 522 | static config = { 523 | reduces: 'myState' 524 | }; 525 | 526 | static initialState = { 527 | /* Initial 'myState' Reducer State */ 528 | foo: { nested: 'bar' }, 529 | baz: 'qux' 530 | }; 531 | 532 | static selectors = { 533 | local: [ myState => myState.foo ], 534 | global: [ state => state, state => state.anotherKey.foo ], 535 | global2: [ state => state.myState, myState => myState.baz ] // identical to above 536 | composed: [ 537 | state => state.myState.foo.nested, 538 | state => state.myState.baz, 539 | (foo, baz) => ({ foo, baz }) 540 | ], 541 | }; 542 | 543 | * processStarts() { 544 | const local = yield* this.select('local') // myValue 545 | const global = yield* this.select('global') // value of foo in anotherState key 546 | const global2 = yield* this.select('global2') 547 | const composed = yield* this.select('composed') 548 | console.log(local, global, global2, composed) 549 | 550 | } 551 | } 552 | ``` 553 | 554 | Currently powered by the [reselect](https://github.com/reactjs/reselect) library (although at some point 555 | we would like to allow for plugins instead), selectors allow us to capture the state of our Application 556 | within our processes. While it is generally a best practice to handle state within the process for 557 | most things related to our processes encapsulated logic (example: ```this.state```), 558 | it can be helpful to capture our state. 559 | 560 | You can use the ```this.select``` function to conduct operations using the pre-built selectors. However, 561 | you can also dynamically specify selectors that were not built using the reselect package (using the selectors property). 562 | 563 | Under-the-hood this.select is simply calling redux-sagas [yield select(selector)](https://redux-saga.github.io/redux-saga/docs/api/index.html#selectselector-args) 564 | and feeding it a [reselect selector](https://github.com/reactjs/reselect#createselectorinputselectors--inputselectors-resultfunc) 565 | 566 | *** 567 | 568 | ### static ```cancelTypes``` 569 | ```javascript 570 | import { put } from 'redux-saga/effects' 571 | import { CANCEL_PROCESS, USER_LOGOUT } from '../constants' 572 | 573 | class MyProcess extends Process { 574 | 575 | static cancelTypes = [ 576 | { type: CANCEL_PROCESS }, 577 | USER_LOGOUT 578 | ]; 579 | 580 | * shouldProcessCancel(action) { 581 | switch(action.type) { 582 | case USER_LOGOUT: return true 583 | case CANCEL_PROCESS: 584 | if (action.name === this.name) return true 585 | default: return false 586 | } 587 | } 588 | 589 | * processCancels(action) { 590 | /* Conduct Cleanup */ 591 | } 592 | 593 | * processStarts() { 594 | /* Cancel Ourself on Process Startup (As Example) */ 595 | yield put({ type: CANCEL_PROCESS, name: this.name }) 596 | } 597 | 598 | } 599 | ``` 600 | 601 | # Connecting to React Components 602 | 603 | When we want to connect actions and selectors provided by our processes, we have 604 | the ability to use the "statics" module. This will essentially connect to the 605 | connect HOC that is provided - passing it the given processes public actions and/or 606 | selectors. 607 | 608 | You setup what actions and selectors should be public when defining your selectors 609 | or actions by prefixing them with "!" (otherwise all will be imported). 610 | 611 | ```javascript 612 | import React, { Component } from 'react' 613 | import { connect } from 'react-connect' 614 | import statics from 'redux-saga-process/statics' 615 | 616 | class DashboardGrid extends Component { 617 | render() { 618 | // ... 619 | } 620 | } 621 | 622 | export default statics( 623 | { 624 | grid: ['actions', 'selectors'], 625 | processTwo: ['actions'] 626 | }, 627 | ({ selectors, actions }) => ( 628 | connect( 629 | state => ({ 630 | grid: selectors.grid(state) 631 | }), 632 | actions 633 | )(DashboardGrid) 634 | ), 635 | { prefixed: false } 636 | ) 637 | ``` 638 | 639 | # Process API 640 | 641 | In addition to the static properties, process contains special API helpers 642 | that can be used within your process to make some patterns easier to work with 643 | and maintain. 644 | 645 | > More Information Coming Soon... 646 | 647 | *** 648 | 649 | ## Process Task System 650 | 651 | Tasks are essential to building powerful & asynchronous workflows within your 652 | projects. With the ```Process Task API``` it is simple to manage and maintain 653 | your tasks. 654 | 655 | Here are a few key points about the Task API: 656 | 657 | - Tasks are given a `category` and `id` when created 658 | - If a task with the same `category` and `id` is created, the previous is automatically cancelled. 659 | - You can cancel a task by its `category` and `id` 660 | - You can cancel all tasks in a `category` 661 | - You can cancel all tasks 662 | - You can register callbacks to occur when a task is complete 663 | 664 | > ***Note:*** This API is really a convenience wrapper around the ```redux-saga``` 665 | > [task](https://redux-saga.github.io/redux-saga/docs/api/index.html#task) system 666 | > implemented using their [fork](https://redux-saga.github.io/redux-saga/docs/api/index.html#forkfn-args) 667 | > feature. 668 | 669 | *** 670 | 671 | ### this.task.create(category, id) 672 | 673 | ```javascript 674 | const prop1 = 'foo', prop2 = 'bar' 675 | yield* this.task.create('category', 'taskID', this.myTask, prop1, prop2) 676 | ``` 677 | 678 | *** 679 | 680 | ### this.task.save(Task, category, id) 681 | 682 | ```javascript 683 | const task = yield fork([this, this.myTask], prop1, prop2) 684 | yield* this.task.save(task, 'category', 'taskID') 685 | ``` 686 | 687 | *** 688 | 689 | ### this.task.task(category, id) 690 | 691 | ```javascript 692 | const task = yield* this.task.task('category', 'taskID') 693 | const category = yield* this.task.task('category') 694 | const all = yield* this.task.task() 695 | ``` 696 | 697 | *** 698 | 699 | ### this.task.cancel(category, id) 700 | 701 | ```javascript 702 | yield* this.task.cancel('category', 'taskID') 703 | // or 704 | yield* this.task.cancel('category') 705 | ``` 706 | 707 | *** 708 | 709 | ### this.task.onComplete(category, id, callback, ...props) 710 | 711 | ```javascript 712 | * registerCallback() { 713 | const foo = 'foo', bar = 'bar' 714 | yield* this.task.onComplete('category', 'taskID', 'onComplete', foo, bar) 715 | } 716 | 717 | * onComplete(foo, bar) { 718 | // foo === 'foo', bar === 'bar' 719 | } 720 | ``` 721 | 722 | *** 723 | 724 | ### this.task.cancelAll() 725 | 726 | ```javascript 727 | yield* this.task.cancelAll() 728 | ``` 729 | 730 | ## Process Observables 731 | 732 | Used to handle "push" events rather than "pull" events. For example, we use 733 | this API heavily for handling [Firebase](https://www.firebase.com) real-time 734 | events within our processes. 735 | 736 | Observables include a buffer, a cancellable promise, and more. We will add 737 | more information about how these works as we can, but feel free to try them 738 | out! 739 | 740 | > More Information Coming Soon... 741 | 742 | ```javascript 743 | * observer(ref, type, id) { 744 | const { getNext, onData } = this.observable.create(id) 745 | const observerID = ref.on(type, onData) 746 | try { 747 | while (true) { 748 | const data = yield call(getNext) 749 | yield fork([this, this.processReceived], data) 750 | } 751 | } catch(error) { 752 | console.warn('Catch Called in Observer', error.message) 753 | } finally { 754 | if (yield cancelled()) { 755 | console.log('Observable Cancelled') 756 | ref.off(type, observerID) 757 | } 758 | } 759 | } 760 | ``` 761 | 762 | *** 763 | 764 | ### this.observable.create() 765 | 766 | # Reducer Generators 767 | 768 | Internally we use ```reducer generators``` to build reducers which reduce reducers. 769 | This allows us to filter actions into our methods efficiently and allows us to build much 770 | of the syntax we use for the static properties such as ```actionRoutes```. You can 771 | import these and use them elsewhere if desired. 772 | 773 | ```javascript 774 | import { reducerReducer, arrayMapReducer, objectMapReducer } from 'redux-saga-process/generators' 775 | ``` 776 | 777 | *** 778 | 779 | ### objectMapReducer(initialState, handlers, context) 780 | 781 | Reduces an object with types as their keys and reducers as their values. 782 | 783 | ```javascript 784 | const reducer = objectMapReducer( 785 | { foo: 'bar' }, 786 | { 787 | 'MY_TYPE': (state, action) => ({ 788 | ...state, 789 | key: action.value 790 | }), 791 | 'ANOTHER_TYPE': (state, action) => ({ 792 | ...state, 793 | another: action.value 794 | }) 795 | } 796 | ) 797 | ``` 798 | 799 | *** 800 | 801 | ### arrayMapReducer(initialState, reducerArray, context) 802 | 803 | Reduces an array of reducers. 804 | 805 | ```javascript 806 | const reducer = arrayMapReducer( 807 | { foo: 'bar' }, // initialState 808 | [ 809 | (state, action, context) => { 810 | /* reducer here */ 811 | return state 812 | }, 813 | (state, action, context) => { 814 | /* reducer here */ 815 | return state 816 | } 817 | ], 818 | { shared: 'data' } 819 | ) 820 | ``` 821 | 822 | *** 823 | 824 | ### reducerReducer(initialState, reducer, context) 825 | 826 | A reducer which reduces a reducer. The main purpose of this generator is to allow us 827 | to inject information into the reducer before executing. This is mostly used internally 828 | but you may find a use for it. 829 | 830 | ```javascript 831 | reducerReducer( 832 | { foo: 'bar' }, // initialState 833 | (state, action, context) => { 834 | switch(action.type) { 835 | case 'MY_TYPE': 836 | return { 837 | ...state, 838 | key: action.value 839 | } 840 | default: 841 | return state 842 | } 843 | }, 844 | { shared: 'data' } // context 845 | ) 846 | ``` 847 | 848 | *** 849 | 850 | ### Special Thanks & Inspirations 851 | 852 | - **Dan Abramov [@gaearon](https://github.com/gaearon)** - Because it's the cool thing to do to thank him and obviously because of his endless contributions to the community including [redux](https://github.com/reactjs/redux) which is what this package is based upon (obviously). 853 | - **Yassine Elouafi [@yelouafi](https://github.com/yelouafi) / [@redux-saga](https://github.com/redux-saga)** - For bringing us [redux-sagas](https://github.com/redux-saga/redux-saga) and for indirectly inspiring the process concept while assisting us with questions. 854 | - **Steve Kellock [@skellock](https://github.com/skellock)** - [reduxsauce](https://github.com/skellock/reduxsauce) - Originally we used reduxsauce to handle some of the handling of data. Many parts of this package are heavily inspired by the great package they have provided! -------------------------------------------------------------------------------- /dist/main.js: -------------------------------------------------------------------------------- 1 | !function(e,r){"object"==typeof exports&&"object"==typeof module?module.exports=r(require("lodash"),require("redux-saga/effects"),require("to-redux-type"),require("invoke-if"),require("react"),require("saga-task-manager"),require("hoist-non-react-statics"),require("reducer-generator-object-map"),require("reducer-generator-array-map"),require("reducer-generator-reducer"),require("reducer-generator-wildcard"),require("redux-saga"),require("wildcard-utils"),require("promise-map-es6")):"function"==typeof define&&define.amd?define("redux-saga-process",["lodash","redux-saga/effects","to-redux-type","invoke-if","react","saga-task-manager","hoist-non-react-statics","reducer-generator-object-map","reducer-generator-array-map","reducer-generator-reducer","reducer-generator-wildcard","redux-saga","wildcard-utils","promise-map-es6"],r):"object"==typeof exports?exports["redux-saga-process"]=r(require("lodash"),require("redux-saga/effects"),require("to-redux-type"),require("invoke-if"),require("react"),require("saga-task-manager"),require("hoist-non-react-statics"),require("reducer-generator-object-map"),require("reducer-generator-array-map"),require("reducer-generator-reducer"),require("reducer-generator-wildcard"),require("redux-saga"),require("wildcard-utils"),require("promise-map-es6")):e["redux-saga-process"]=r(e.lodash,e["redux-saga/effects"],e["to-redux-type"],e["invoke-if"],e.react,e["saga-task-manager"],e["hoist-non-react-statics"],e["reducer-generator-object-map"],e["reducer-generator-array-map"],e["reducer-generator-reducer"],e["reducer-generator-wildcard"],e["redux-saga"],e["wildcard-utils"],e["promise-map-es6"])}(this,function(e,r,t,n,a,o,c,i,s,u,f,l,p,d){return function(e){function r(n){if(t[n])return t[n].exports;var a=t[n]={i:n,l:!1,exports:{}};return e[n].call(a.exports,a,a.exports,r),a.l=!0,a.exports}var t={};return r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{configurable:!1,enumerable:!0,get:n})},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},r.p="",r(r.s=8)}([function(e,r,t){"use strict";function n(e){return"function"==typeof e&&Object.getPrototypeOf(e)&&Object.getPrototypeOf(e).isProcess===d}function a(e){var r;return o?(r=i.killAllTaskManagers(),o.set("hot",!0)):o=new Map(v()),c=i.createTaskManager("root",{name:"ROOT",log:e.log}),l.set("config",g(e)),r}t.d(r,"d",function(){return y}),t.d(r,"f",function(){return u}),t.d(r,"e",function(){return f}),t.d(r,"a",function(){return l}),t.d(r,"h",function(){return o}),t.d(r,"g",function(){return c}),t.d(r,"k",function(){return a}),t.d(r,"i",function(){return h}),t.d(r,"j",function(){return n}),t.d(r,"b",function(){return p}),t.d(r,"c",function(){return d});var o,c,i=t(11),s=(t.n(i),Object.assign||function(e){for(var r,t=1;t "+f+"."+b+" | is not a function");case 18:p=!0,i.next=8;break;case 21:i.next=27;break;case 23:i.prev=23,i.t1=i.catch(6),d=!0,y=i.t1;case 27:i.prev=27,i.prev=28,!p&&v.return&&v.return();case 30:if(i.prev=30,!d){i.next=33;break}throw y;case 33:return i.finish(30);case 34:return i.finish(27);case 35:case"end":return i.stop()}},i[0],this,[[6,23,27,35],[28,,30,34]])}r.a=n;var a=t(0),o=t(33),c=t(35),i=[n].map(regeneratorRuntime.mark)},function(e,r,t){"use strict";function n(e){if(Array.isArray(e)){for(var r=0,t=Array(e.length);r "+s+"."+n+" | uncaught error: "+o.t0.message);case 9:case"end":return o.stop()}},o[0],this,[[0,5]])}r.a=n;var a=t(2),o=(t.n(a),[n].map(regeneratorRuntime.mark))},function(e,r,t){"use strict";function n(e,r){return regeneratorRuntime.wrap(function(t){for(;;)switch(t.prev=t.next){case 0:if("function"!=typeof r.schema.instance.shouldProcessCancel){t.next=6;break}return t.next=3,a.call([r.schema.instance,r.schema.instance.shouldProcessCancel],e);case 3:return t.abrupt("return",t.sent);case 6:return t.abrupt("return",!0);case 7:case"end":return t.stop()}},o[0],this)}r.a=n;var a=t(2),o=(t.n(a),[n].map(regeneratorRuntime.mark))},function(e,r,t){"use strict";function n(e,r){var t,n,u;return regeneratorRuntime.wrap(function(s){for(;;)switch(s.prev=s.next){case 0:if(t=e.processID,n=e.schema,s.prev=1,!(n.instance&&"function"==typeof n.instance.processStarts)){s.next=10;break}return u=c.a(n.startOnAction,i({},r,{parseObject:"matches"})),s.next=6,o.take(u);case 6:return s.next=8,o.call([a.g,a.g.create],t,"processStarts",[n.instance,n.instance.processStarts],t);case 8:s.next=11;break;case 10:console.warn("[saga-process-manager]: You specified an await startup action but * processStarts does not exist. This is an anti-pattern. ",t);case 11:s.next=16;break;case 13:s.prev=13,s.t0=s.catch(1),console.error("Error while awaiting startup: ",s.t0.message);case 16:return s.prev=16,s.next=19,o.cancelled();case 19:if(!s.sent){s.next=21;break}console.warn("Process Cancelled while awaiting Startup ",t);case 21:return s.finish(16);case 22:case"end":return s.stop()}},s[0],this,[[1,13,16,22]])}r.a=n;var a=t(0),o=t(2),c=(t.n(o),t(4)),i=Object.assign||function(e){for(var r,t=1;t state[id] 23 | ], 24 | getCounters: [ state => state ] 25 | }; 26 | 27 | static reducer = { 28 | [INCREMENT]: (state, { id = '_default', by = 1 }) => ({ 29 | ...state, 30 | [id]: ( state[id] || 0 ) + by 31 | }), 32 | [DECREMENT]: (state, { id = '_default', by = 1 }) => ({ 33 | ...state, 34 | [id]: ( state[id] || 0 ) - by 35 | }), 36 | [RESET]: (state, { id = '_default' }) => ({ 37 | ...state, 38 | [id]: 0 39 | }) 40 | }; 41 | 42 | static actionRoutes = { 43 | [INCREMENT]: 'log', 44 | [DECREMENT]: 'log', 45 | [RESET]: 'log', 46 | [LOGGING]: 'setLogging' 47 | }; 48 | 49 | * setLogging({ enabled }) { 50 | this.logging !== enabled && 51 | console.info(`Logging has been ${ enabled ? 'ENABLED' : 'DISABLED' }`) 52 | this.logging = enabled 53 | } 54 | 55 | * log({ id = 'default' }) { 56 | this.logging && 57 | console.log(`[LOG] COUNTER ${id}: ${yield* this.select('getCounter')}`) 58 | } 59 | 60 | /* 61 | Console should show: 62 | ------------------------ 63 | Logging has been ENABLED 64 | [LOG] COUNTER default: 0 65 | [LOG] COUNTER default: 1 66 | [LOG] COUNTER default: 11 67 | [LOG] COUNTER myCounter: 11 68 | Counters are: { _default: 11, myCounter: 5 } 69 | */ 70 | * processStarts() { 71 | // Turn on the logging programmatically 72 | yield* this.dispatch('logging', true) 73 | // Set the Default Counter to 0 (reset) 74 | yield* this.dispatch('reset') 75 | // Increment by 1 using a standard yield put: 76 | yield put({ type: INCREMENT }) 77 | // Increment using this.dispatch / actionCreators 78 | yield* this.dispatch('increment', 10) 79 | // Set a counter with an id 80 | yield* this.dispatch('increment', 5, 'myCounter') 81 | // Get all counters and output them 82 | const counters = yield* this.select('getCounters') 83 | console.log('Counters are: ', counters) 84 | } 85 | 86 | } 87 | -------------------------------------------------------------------------------- /examples/counter/types.js: -------------------------------------------------------------------------------- 1 | export const INCREMENT = 'INCREMENT'; 2 | export const DECREMENT = 'DECREMENT'; 3 | export const RESET = 'RESET'; 4 | export const LOGGING = 'LOGGING'; 5 | export const EXPR = 'EXPR'; 6 | -------------------------------------------------------------------------------- /examples/firebase/firebaseListeners.js: -------------------------------------------------------------------------------- 1 | /* global google */ 2 | import { Process } from 'redux-saga-process' 3 | 4 | import startListeners from './sagas/defaultListeners' 5 | 6 | import { call, apply, put, take, fork } from 'redux-saga/effects' 7 | import { delay } from 'redux-saga' 8 | import _ from 'lodash' 9 | 10 | /* 11 | Handles listening to, managing, and dispatching the data associated with 12 | firebase listeners. 13 | 14 | yield put({ 15 | type: 'FIREBASE_LISTENERS', 16 | category: 'projects', 17 | path: 'projects', 18 | events: [ 'once', 'child_added', 'child_changed', 'child_removed' ] 19 | }) 20 | 21 | yield put({ 22 | type: 'FIREBASE_CANCEL', 23 | category: 'projects', 24 | }) 25 | 26 | yield put({ 27 | type: 'FIREBASE_CANCEL', 28 | category: 'projects', 29 | event: 'child_changed' 30 | }) 31 | 32 | yield put({ 33 | type: 'FIREBASE_CANCEL', 34 | category: 'projects', 35 | events: [ 'child_changed', 'child_removed' ] 36 | }) 37 | 38 | */ 39 | export default class FirebaseGettersProcess extends Process { 40 | 41 | ref = undefined 42 | 43 | // This allows us to hot reload our state! 44 | constructor(config, state) { 45 | super(config, state) 46 | this.state = { 47 | listeners: { 48 | // 'category': { 49 | // 'keys': [] 50 | // } 51 | }, 52 | ...state 53 | } 54 | } 55 | 56 | static config = { ssr: false, reduces: 'db' }; 57 | 58 | static initialState = { 59 | projects: {} 60 | }; 61 | 62 | // Composed Private Selectors 63 | static selectors = { 64 | '!ref': [ state => state.user, user => user.firebase.ref ] 65 | }; 66 | 67 | static actionRoutes = { 68 | firebaseReady: 'ready', 69 | firebaseListener: 'startListener', 70 | firebaseListeners: 'startListeners', 71 | firebaseCancel: 'cancelListeners' 72 | }; 73 | 74 | static actions = { 75 | '!firebaseData': null, 76 | '!firebaseRemove': ['category'], 77 | firebaseListeners: null, 78 | firebaseListener: null, 79 | firebaseCancel: ['category', 'events'] 80 | }; 81 | 82 | static reducer = { 83 | firebaseData: ( state, { category, event, method, value, key } ) => { 84 | switch(event) { 85 | case 'value': { 86 | return { 87 | ...state, 88 | [category]: value 89 | } 90 | } 91 | case 'child_removed': { 92 | return { 93 | ...state, 94 | [category]: _.omit(state[category], [ key ]) 95 | } 96 | } 97 | case 'child_changed': 98 | case 'child_added': { 99 | return { 100 | ...state, 101 | [category]: { 102 | ...state[category], 103 | [key]: value 104 | } 105 | } 106 | } 107 | } 108 | return state 109 | }, 110 | firebaseRemove: ( state, { category } ) => _.omit(state, [ category ]) 111 | }; 112 | 113 | // Saves our Firebase ref so that we can use it later to create children 114 | // refs. 115 | * ready({ ref }) { 116 | if ( ! ref && ! ( yield* this.getRef() ) ) { 117 | console.error('Firebase Ready was indicated but no ref was discovered') 118 | return 119 | } 120 | yield* startListeners(this.dispatch) 121 | } 122 | 123 | * getRef() { 124 | const _ref = yield* this.select('ref') 125 | if ( ! _ref ) { 126 | throw new Error('[Firebase Listeners Process] | Ref Unknown, Starting Listeners Not Possible') 127 | return 128 | } 129 | return _ref 130 | } 131 | 132 | // child_removed , child_added , child_changed , value 133 | * startListeners({ path, events, ...action }) { 134 | if ( ! Array.isArray(events) ) { 135 | console.error('Invalid Listeners Requested: ', path, events, action) 136 | } 137 | let paths 138 | if ( Array.isArray(path) ) { 139 | paths = path 140 | } else { paths = [ path ] } 141 | for ( let path of paths ) { 142 | 143 | let category 144 | if ( ! action.category ) { 145 | category = path 146 | } else { category = action.category } 147 | 148 | yield* this.createListenerCategory(category) 149 | 150 | if ( events.includes('once') ) { 151 | this.state.listeners[category].once = true 152 | yield fork([ this, this.startListener ], { 153 | path, category, event: 'value', method: 'once' 154 | }) 155 | yield take('FIREBASE_DATA') 156 | } 157 | 158 | for ( let event of events ) { 159 | if ( event === 'once' ) { continue } 160 | yield fork([ this, this.startListener ], { 161 | path, category, event 162 | }) 163 | } 164 | } 165 | } 166 | 167 | * startListener({ path, category, event, method = 'on' }) { 168 | if ( ! path || ! event ) { 169 | console.error('Realtime Listener Request Error: Missing Parameters: ', path, category, event, method) 170 | return 171 | } 172 | const _ref = yield* this.getRef() 173 | const ref = _ref.child(path) 174 | 175 | yield* this.task.create(category || path, `${method}_${event}`, this.observe, ref, category || path, event, method) 176 | } 177 | 178 | * cancelListeners({ category, event, events, method = 'on' }) { 179 | if ( event !== undefined ) { 180 | yield* this.task.cancel(category, `${method}_${event}`) 181 | } else if ( events !== undefined ) { 182 | // we actually allow events to be a string to specify a single event 183 | if ( typeof events === 'string' ) { 184 | yield* this.task.cancel(category, `${method}_${event}`) 185 | } else if ( Array.isArray(events) ) { 186 | for ( let event of events ) { 187 | if ( event === 'once' ) { continue } 188 | yield* this.task.cancel(category, `${method}_${event}`) 189 | } 190 | } else { console.warn('Cancellation Failure: "events" is defined but its not a string or array') } 191 | } else { 192 | yield* this.task.cancel(category) 193 | } 194 | } 195 | 196 | * observe(ref, category, event, method = 'on') { 197 | // console.log('Observe: ', method, event) 198 | const { getNext, onData, onCancel } = this.observable.create(`${category}::${method}_${event}`) 199 | const observerID = ref[method](event, onData, onCancel) 200 | try { 201 | while (true) { 202 | const data = yield call(getNext) 203 | yield fork([ this, this.handleReceived ], category, event, method, ...data.values) 204 | if ( method === 'once' ) { break } 205 | } 206 | } catch(error) { 207 | console.warn('Catch Called in Observer [System Comm]', error.message) 208 | } finally { 209 | if (yield onCancel()) { 210 | console.log('Firebase Listener Cancelled: ', category, method, event) 211 | } else { 212 | console.log('Error or Other Issue in Firebase Listeners Process?', category, method) 213 | } 214 | if ( method !== 'once' ) { ref.off(event, observerID) } 215 | } 216 | } 217 | 218 | * createListenerCategory(category) { 219 | if ( ! this.state.listeners[category] ) { 220 | this.state.listeners[category] = { 221 | keys: [] 222 | } 223 | return true 224 | } else { return false } 225 | } 226 | 227 | * removeKeyFromCategory(category, key) { 228 | _.pull(this.state.listeners[category].keys, key) 229 | if ( this.state.listeners[category].keys.length === 0 ) { 230 | // No more keys included - listener was removed? 231 | delete this.state.listers[category] 232 | return true 233 | } else { return false } 234 | } 235 | 236 | * addKeyToCategory(category, key) { 237 | yield* this.createListenerCategory(category) 238 | if ( ! this.state.listeners[category].keys.includes(key) ) { 239 | this.state.listeners[category].keys.push(key) 240 | return true 241 | } else { return false } 242 | } 243 | 244 | * setKeysToCategory(category, keys) { 245 | yield* this.createListenerCategory(category) 246 | this.state.listeners[category].keys = keys 247 | return true 248 | } 249 | 250 | * handleReceived(category, event, method, ...received) { 251 | //console.log('Firebase Receives: ', category, event, method) 252 | const [ snapshot, ...args ] = received 253 | let update, category_removed, value, key = snapshot.key 254 | switch(event) { 255 | case 'value': { 256 | if ( snapshot.exists() === false ) { 257 | // The request has no actual data! Removing 258 | category_removed = yield* this.removeKeyFromCategory(category, key) 259 | if ( ! category_removed ) { 260 | update = true 261 | } 262 | } else if ( snapshot.hasChildren() ) { 263 | // We have nested keys to add 264 | value = snapshot.val() 265 | yield* this.setKeysToCategory(category, Object.keys(value)) 266 | update = true 267 | } else { 268 | // This is a direct value! 269 | this.state.listeners[category].keys = [ key ] 270 | update = true 271 | } 272 | break 273 | } 274 | case 'child_added': { 275 | // We only want to update the data if we don't already have the child. 276 | // This is because we use 'once' to read the data and update it initially 277 | // then use child_added to add new keys from there. 278 | update = yield* this.addKeyToCategory(category, key) 279 | break 280 | } 281 | case 'child_changed': { 282 | yield* this.addKeyToCategory(category, key) 283 | update = true 284 | break 285 | } 286 | case 'child_removed': { 287 | // remove the key from the list of keys 288 | category_removed = yield* this.removeKeyFromCategory(category, key) 289 | if ( ! category_removed ) { 290 | update = true 291 | } 292 | break 293 | } 294 | } 295 | 296 | if ( category_removed === true ) { 297 | if ( ! value ) { value = snapshot.val() } 298 | yield* this.dispatch('firebaseRemove', category) 299 | } else if ( update === true ) { 300 | if ( ! value ) { value = snapshot.val() } 301 | yield* this.dispatch('firebaseData', { 302 | category, event, method, key, value 303 | }) 304 | } 305 | 306 | } 307 | 308 | } 309 | 310 | -------------------------------------------------------------------------------- /examples/firebase/firebaseStartup.js: -------------------------------------------------------------------------------- 1 | /* global google */ 2 | import Process from 'redux-saga-process' 3 | import { call, fork } from 'redux-saga/effects' 4 | 5 | /* 6 | Wraps import in a promise that can be resolved by sagas since 7 | call(import, './path') would cause an error. This allows us 8 | to dynamically import the package. Using webpack 2 this means 9 | that the package will be code split. 10 | */ 11 | function* ImportPackage(path) { 12 | const importPromise = new Promise(resolve => 13 | import(`./import/${path}`).then(module => resolve(module)) 14 | ) 15 | const awaitPromise = () => Promise.resolve(importPromise) 16 | return awaitPromise 17 | } 18 | 19 | export default class FirebaseStartupProcess extends Process { 20 | 21 | // Our Firebase Token & Module 22 | initialized = false 23 | token = undefined 24 | firebase = undefined 25 | authorizer = undefined 26 | dbRef = undefined 27 | 28 | /* We don't want to run firebase when we are server-rendered, we reduce the "user" key */ 29 | static config = { ssr: false, reduces: 'user' }; 30 | 31 | // Composed Selectors, these are private when prefixed with ! (only accessible by the process 32 | // and not exported for others to use with the [statics] package). These are memoized and 33 | // can be exported to be used by components as-needed. 34 | static selectors = { 35 | '!user': [ user => user ], 36 | '!fireToken': [ user => user.auth.fireToken ], 37 | '!fireConfig': [ user => user.firebase.config ] 38 | }; 39 | 40 | /* 41 | These will listen for redux actions and trigger the given sagas when they occur with the 42 | actions payload. 43 | 44 | They are converted to SCREAMING_SNAKE_CASE from screamingSnakeCase 45 | */ 46 | static actionRoutes = { 47 | authSuccess: 'authenticated', 48 | authLogout: 'logout' 49 | }; 50 | 51 | /* 52 | These help us to create actions which can be dispatched using [this.dispatch('firebaseReady', ref)]. If 53 | null, then we expect an object to dispatch with the event, otherwise they are added by the keys in the array. 54 | */ 55 | static actions = { 56 | firebaseReady: [ 'ref' ] 57 | }; 58 | 59 | /* 60 | Our reducer for this process. Reducers are merged among all processes and handled in the order they are 61 | registered. This allows multiple smaller processes to reduce the same key in the state. We generate the 62 | reducers in a way that they are efficiently parsed based on the registered types for each process. 63 | 64 | Note: this is using the object filter reducer style. 65 | */ 66 | static reducer = { 67 | firebaseReady: ( state, action ) => ({ 68 | ...state, 69 | firebase: { 70 | ...state.firebase, 71 | ref: action.ref 72 | } 73 | }) 74 | }; 75 | 76 | /* 77 | Called whenever AUTH_SUCCESS is dispatched. 78 | */ 79 | * authenticated(action) { 80 | if ( this.firebase === undefined ) { 81 | // When we are authenticated, if firebase is not defined in the process then we will 82 | // import the module (and use code splitting via webpack 2) to download the package. 83 | yield* this.task.create('init', 'firebase', this.importer, 'firebase', 'firebase.js') 84 | } 85 | } 86 | 87 | /* Logout of Firebase when the user logs out */ 88 | * logout(action) { 89 | if ( this.authorizer ) { 90 | this.authorizer.signOut() 91 | } 92 | this.ready = false 93 | } 94 | 95 | /* 96 | Dynamically load the firebase library using code-splitting. This way 97 | we can wait to load the library until needed. 98 | */ 99 | * importer(what, path) { 100 | const importer = yield* ImportPackage(path) 101 | switch(what) { 102 | case 'firebase': { 103 | this.firebase = ( yield call(importer) ).default 104 | yield* this.initializeFirebase() 105 | break 106 | } 107 | } 108 | } 109 | 110 | /* Once we have received the firebase module we will build it and authenticate. */ 111 | * initializeFirebase() { 112 | 113 | if ( ! this.initialized ) { 114 | // Grab our firebase configuration from the redux state and initialize the app. 115 | const config = yield* this.select('fireConfig') 116 | this.firebase.initializeApp(config) 117 | } 118 | 119 | // Create the authorizer 120 | if ( ! this.authorizer ) { this.authorizer = this.firebase.auth() } 121 | 122 | // Login with our Custom Token (which will be in the redux state) 123 | yield* this.provideToken() 124 | 125 | // Setup our initial dbRef 126 | this.dbRef = this.firebase.database().ref().child('ui') 127 | 128 | // Setup a task that will continually watch the onAuthStateChanged and handle the results 129 | // since this is a task, if another watcher is already running for some reason, it will 130 | // automatically be cancelled before running the new watcher. 131 | yield* this.task.create('init', 'watchAuth', this.watcher, 'onAuthStateChanged') 132 | 133 | } 134 | 135 | * watcher(evt) { 136 | /* 137 | An observable allows us to easily receive events continually and remain within our cancellable 138 | saga context. An observable returns onData which adds to the buffer, getNext which gets the 139 | next value from the buffer (or waits for the next), and onCancel which handles cancellations 140 | */ 141 | const { getNext, onData, onCancel } = this.observable.create(evt) 142 | 143 | // Call the Firebase authorizer with the given event (onAuthStateChanged), trigger an addition 144 | // to our buffer each time data is available. 145 | const cancelObserver = this.authorizer[evt](onData) 146 | 147 | try { 148 | // We will continually listen for new events until cancelled. 149 | while (true) { 150 | // yield call(getNext) will pause execution until a new value is available in our 151 | // observable buffer. 152 | const data = yield call(getNext) 153 | // Fork the received data to our processor then wait for the next value in the buffer. 154 | yield fork([ this, this.authStateChanged ], ...data.values) 155 | } 156 | } catch(error) { 157 | // Lets jsut let us know for now when an error occurs. When an error occurs 158 | // we will automatically cleanup and cancel the observer (but we may simply want 159 | // to re-sschedule the observer as well in this case). 160 | console.warn('Catch Called in Observer [Firebase Startup]', error.message) 161 | } finally { 162 | // We call yield onCancel() to check if we reached here due to cancellation. 163 | if (yield onCancel()) { 164 | console.log('Firebase Init Observer Cancelled: ', evt) 165 | } 166 | // Cancel the Firebase Observer. 167 | cancelObserver() 168 | } 169 | } 170 | 171 | // When auth state changes will receive or null 172 | // https://firebase.google.com/docs/reference/node/firebase.User 173 | * authStateChanged(user) { 174 | // Get some values from our redux store to help with handling the auth state 175 | const { email, userIdentityID } = yield* this.select('user') 176 | 177 | if ( user === null ) { 178 | // User is no longer authenticated 179 | yield* this.logout() 180 | } else { 181 | 182 | if ( email && ! user.email ) { 183 | // Update the user email with firebase if we have one but the user object does not. 184 | if ( email ) { user.updateEmail(email).then(() => console.info('Updated Email to: ', email)) } 185 | } else if ( ! email ) { 186 | // Email not known? Don't accept this user as we can not validate it 187 | } else if ( email !== user.email ) { 188 | // TO DO: Handle this situation - should we logout the user? 189 | } 190 | 191 | if ( ! this.ready ) { 192 | // If we haven't authenticated yet, create the users ref and dispatch our FIREBASE_READY 193 | // action so that any other processes, sagas, or components can begin using the ref. 194 | const ref = this.dbRef.child(`dealers/${userIdentityID}`) 195 | yield* this.dispatch('firebaseReady', ref) 196 | } 197 | this.ready = true 198 | } 199 | } 200 | 201 | // https://firebase.google.com/docs/reference/node/firebase.auth.Auth 202 | * provideToken(retry = true) { 203 | // Select our custom token from our redux store 204 | const fireToken = yield* this.select('fireToken') 205 | if ( ! fireToken ) { throw new Error('Tried to Authenticate with Firebase but a Firebase Token was not found' ) } 206 | try { 207 | yield call(::this.authorizer.signInWithCustomToken, fireToken) 208 | } catch (e) { 209 | yield fork([ this, this.handleFirebaseError ], e, retry) 210 | } 211 | } 212 | 213 | * handleFirebaseError(e, retry) { 214 | const { code, message } = e 215 | switch(code) { 216 | case 'auth/invalid-custom-token': { 217 | // Thrown if the custom token format is incorrect. 218 | // This could also mean that the auth token has expired and 219 | // must be renewed. 220 | console.error('Failed to Login to the Realtime Database: ', message) 221 | break 222 | } 223 | case 'auth/custom-token-mismatch': { 224 | // Thrown if the custom token is for a different Firebase App. 225 | break 226 | } 227 | default: { 228 | console.warn('An Unknown Realtime Database Error has Occurred!') 229 | console.error(code, message) 230 | break 231 | } 232 | } 233 | } 234 | 235 | } 236 | 237 | -------------------------------------------------------------------------------- /examples/firebase/import/firebase.js: -------------------------------------------------------------------------------- 1 | import firebase from 'firebase/app' 2 | import 'firebase/auth' 3 | import 'firebase/database' 4 | 5 | export default firebase -------------------------------------------------------------------------------- /examples/firebase/sagas/defaultListeners.js: -------------------------------------------------------------------------------- 1 | import { put } from 'redux-saga/effects' 2 | 3 | /* 4 | The default listeners that we want to setup 5 | */ 6 | function* startListeners(dispatch) { 7 | 8 | /* We want to listen to all our projects continually */ 9 | yield* dispatch('firebaseListeners', { 10 | path: 'projects', 11 | events: [ 'once', 'child_added', 'child_changed', 'child_removed' ] 12 | }) 13 | 14 | 15 | } 16 | 17 | export default startListeners -------------------------------------------------------------------------------- /examples/networkMonitor/networkMonitor.js: -------------------------------------------------------------------------------- 1 | import Process from 'redux-saga-process' 2 | 3 | import { call, fork } from 'redux-saga/effects' 4 | 5 | export default class NetworkMonitorProcess extends Process { 6 | 7 | /* We do not want this to be server rendered and should reduce "app" */ 8 | static config = { reduces: 'app', ssr: false }; 9 | 10 | /* The initialState of our "app" reducer */ 11 | static initialState = { 12 | network: { online: undefined } 13 | }; 14 | 15 | /* Our "app" reducer itself */ 16 | static reducer = { 17 | networkStatus: ( state, action ) => ({ 18 | ...state, 19 | network: { 20 | ...state.network, 21 | online: action.online !== undefined 22 | ? action.online 23 | : state.network.online || false 24 | } 25 | }) 26 | }; 27 | 28 | /* Actions to update our network status in our "app" reducer */ 29 | static actions = { 30 | networkStatus: [ 'online' ] 31 | }; 32 | 33 | /* 34 | We update our "app" state with the latest value of navigator.onLine. This 35 | is currently only called on startup. 36 | */ 37 | * updateNetworkStatus() { 38 | const online = navigator && navigator.onLine === true 39 | yield* this.dispatch('networkStatus', online) 40 | } 41 | 42 | /* 43 | Our Observer will be called and create the observable. An observable 44 | is create a cancellable promise then creating a queue of responses when 45 | the callback is received. This allows us to stay within the saga pattern 46 | while handling "push" style events as well as allows us to cancel the 47 | promises if we need to (in the situation that our task or process are cancelled). 48 | */ 49 | * observer(event, type) { 50 | const { getNext, onData, onCancel } = this.observable.create(type) 51 | const observerID = window.addEventListener(event, onData) 52 | try { 53 | while (true) { 54 | const data = yield call(getNext) 55 | yield fork([this, this.processReceived ], data, type) 56 | } 57 | } catch(error) { 58 | console.warn('Catch Called in Observer [Watchdog]', error.message) 59 | } finally { 60 | if (yield onCancel()) { 61 | console.log('Observer Cancelled: ', type) 62 | } 63 | observerID.removeEventListener(event, onData) 64 | } 65 | } 66 | 67 | /* 68 | Whenever a new event is available from our observable we will receive 69 | it and dispatch the relevant event to our application for rendering. 70 | */ 71 | * processReceived(data, type) { 72 | const { values = [] } = data 73 | for ( let value of values ) { 74 | switch(type) { 75 | case 'networkOnline': 76 | value.type && ( yield* this.dispatch('networkStatus', value.type === 'online' ) ) 77 | break 78 | case 'networkOffline': 79 | value.type && ( yield* this.dispatch('networkStatus', value.type === 'online' ) ) 80 | break 81 | } 82 | } 83 | } 84 | 85 | /* 86 | Called when our process is started for the first time. We will first synchronously 87 | update our store with the current navigator.onLine value then we will create two forked 88 | tasks which will monitor the online/offline events and update our store accordingly. 89 | 90 | Any new events we want to monitor can be added here easily. 91 | */ 92 | * processStarts() { 93 | yield* this.updateNetworkStatus() 94 | yield* this.task.create('network', 'online', this.observer, 'online', 'networkOnline') 95 | yield* this.task.create('network', 'offline', this.observer, 'offline', 'networkOffline') 96 | } 97 | 98 | } 99 | 100 | -------------------------------------------------------------------------------- /examples/productFetcher/productFetcher.js: -------------------------------------------------------------------------------- 1 | import Process from 'redux-saga-process' 2 | 3 | import { call } from 'redux-saga/effects' 4 | 5 | /* 6 | ProductFetcherProcess 7 | This is a fairly straight forward example of how you might build a 8 | process which is used to handle fetching product data from a server, 9 | processing the results, optionally modify / normalize it, and dispatch 10 | the results to our UI (which would be connected to the products key within 11 | our store). 12 | 13 | We can store any data which does not pertain to our UI internally and only 14 | dispatch a pure representation to the UI once it is ready. 15 | */ 16 | 17 | export default class ProductFetcherProcess extends Process { 18 | 19 | // this.history will contain data to throttle requests 20 | history = {}; 21 | 22 | // Set a throttleTimeout property for throttling fetch requests. 23 | throttleTimeout = 30000 24 | 25 | // Reduce the 'products' key within our store. 26 | static config = { reduces: 'products' }; 27 | 28 | // Reduce the product details state when product data is received 29 | static reducer = { 30 | productDetails: (state, action) => ({ 31 | ...state, 32 | [action.productSKU]: action.data 33 | }), 34 | removeProducts: (state, action) => ({}) 35 | }; 36 | 37 | // call * getProduct method whenever GET_PRODUCT is dispatched. 38 | static actionRoutes = { 39 | getProduct: 'getProduct' 40 | }; 41 | 42 | // yield* this.dispatch('removeProducts') dispatches 43 | // { type: 'REMOVE_PRODUCTS' } 44 | // yield* this.dispatch('productDetails', productSKU, product) dispatches 45 | // { type: 'PRODUCT_DETAILS', productSKU: productSKU, data: product } 46 | static actions = { 47 | removeProducts: null, 48 | productDetails: [ 'productSKU', 'data' ], 49 | }; 50 | 51 | // Get the product asynchronously within our method (called within a fork by 52 | // the Process) and dispatch the product details when completed 53 | * getProduct({ type, productSKU, force, ...action }) { 54 | const now = Date.now() 55 | if ( 56 | ! force && this.history[productSKU] 57 | && now - this.history[productSKU] < this.throttleTimeout 58 | ) { 59 | /* Only attempt fetch at most once every 30 seconds unless force = true */ 60 | return 61 | } 62 | this.history[productSKU] = now 63 | const product = yield call(fetch(/*...fetch args...*/)) 64 | if ( product && product.productSKU === productSKU ) { 65 | yield* this.dispatch('productDetails', productSKU, product) 66 | } 67 | } 68 | 69 | } 70 | -------------------------------------------------------------------------------- /examples/productFetcher/types.js: -------------------------------------------------------------------------------- 1 | export const GET_PRODUCT = 'GET_PRODUCT' 2 | export const PRODUCT_DETAILS = 'PRODUCT_DETAILS' 3 | export const REMOVE_PRODUCTS = 'REMOVE_PRODUCTS' -------------------------------------------------------------------------------- /examples/react-notification-system/notifications.js: -------------------------------------------------------------------------------- 1 | import Process from 'redux-saga-process' 2 | import { put } from 'redux-saga/effects' 3 | 4 | const DEFAULT_NOTIFICATION = { 5 | title: 'Attention!', 6 | message: '', 7 | level: 'info', 8 | autoDismiss: 15, 9 | position: 'br', 10 | } 11 | 12 | /* 13 | Accepts the redux action NOTIFICATION and creates the notification. 14 | */ 15 | export default class NotificationsProcess extends Process { 16 | 17 | // Our action can be imported with 'notifications' as the key 18 | static config = { pid: 'notifications' }; 19 | 20 | static exports = [ 'actions' ]; 21 | 22 | static actionRoutes = { 23 | notificationRef: 'ref', 24 | notification: 'notification' 25 | }; 26 | 27 | static actions = { 28 | notification: null 29 | }; 30 | 31 | // Save the ref when it is updated 32 | * ref({ ref }) { this.notifier = ref } 33 | 34 | // Dispatch a message if we have the ref to do so 35 | * notification(message) { 36 | if ( this.notifier ) { 37 | this.notifier.addNotification({ 38 | ...DEFAULT_NOTIFICATION, 39 | ...message 40 | }) 41 | } 42 | } 43 | 44 | } -------------------------------------------------------------------------------- /examples/reducerOnly/reducerOnly.js: -------------------------------------------------------------------------------- 1 | import Process from 'redux-saga-process' 2 | 3 | import { INCREMENT, DECREMENT, RESET } from './types' 4 | 5 | /* 6 | ReducerOnlyProcess 7 | This is an example to show how we can easily build a reducer which 8 | will automatically be added for us. This will create a reducer with 9 | 'counters' as its key. It will then start reducing INCREMENT, DECREMENT, 10 | and RESET as shown. 11 | 12 | While this probably is far less efficient than a standard reducer and does 13 | not really add any new features, it is just an example to show the simplicity 14 | and composability of the package. 15 | 16 | */ 17 | 18 | export default class ReducerOnlyProcess extends Process { 19 | 20 | static config = { reduces: 'counters' }; 21 | 22 | static reducer = { 23 | [INCREMENT]: (state, { id = '_default', by = 1 }) => ({ 24 | ...state, 25 | [id]: ( state[id] || 0 ) + by 26 | }), 27 | [DECREMENT]: (state, { id = '_default', by = 1 }) => ({ 28 | ...state, 29 | [id]: ( state[id] || 0 ) - by 30 | }), 31 | [RESET]: (state, { id = '_default' }) => ({ 32 | ...state, 33 | [id]: 0 34 | }) 35 | }; 36 | 37 | } 38 | -------------------------------------------------------------------------------- /examples/reducerOnly/types.js: -------------------------------------------------------------------------------- 1 | export const INCREMENT = 'INCREMENT'; 2 | export const DECREMENT = 'DECREMENT'; 3 | export const RESET = 'RESET'; 4 | export const LOGGING = 'LOGGING'; 5 | export const EXPR = 'EXPR'; 6 | -------------------------------------------------------------------------------- /examples/reduxPersistor/reduxPersistor.js: -------------------------------------------------------------------------------- 1 | import Process from 'rsp/src/main' 2 | 3 | import { take, apply, select, put } from 'redux-saga/effects' 4 | import _ from 'lodash' 5 | 6 | export default class ArchiverProcess extends Process { 7 | 8 | // setup our local state that we will use to manage 9 | // the process. 10 | 11 | // has our store been rehydrated from redux-persist yet? 12 | rehydrated = false; 13 | // should we block the ability to purge our stores from 14 | // the outside? 15 | blockPurge = false; 16 | // saved with the controller for localForage if we 17 | // ever need to access its API directly. 18 | persistor = undefined; 19 | // A queue of data to be saved into the store when it 20 | // is busy or not yet ready. 21 | queue = []; 22 | 23 | // signify that we wish to reduce "archives" within the 24 | // redux store it is connected to. 25 | static config = { reduces: 'archives' }; 26 | 27 | // Our Processes reducer, used to define the pure representation 28 | // of the data which should be saved to the redux store. 29 | static reducer = { 30 | // Called internally to save data to the reducer. 31 | // Should never be called directly by any component. 32 | archiveData: (state, { type, ...action }) => ({ 33 | ...state, 34 | ...action 35 | }), 36 | // Purge the redux store. 37 | archivePurge: (state, { type, ...action }) => ({}) 38 | }; 39 | 40 | static actionRoutes = { 41 | // signify that rehydration (by redux-persist) has completed 42 | // when REHYDRATE_COMPLETE has been dispatched. 43 | rehydrateComplete: 'rehydrateComplete', 44 | // will save the data presented 45 | archiveSave: 'archiveSave', 46 | // receive AUTH_SUCESS and archive important data received. 47 | authSuccess: 'archiveSave', 48 | // purge any archives when the user logs out of their account. 49 | authLogout: 'purgeArchives', 50 | // provide a means to retrieve data from the archives by dispatching 51 | // ARCHIVE_GET. Allow the action to specify an action to dispatch 52 | // with the requested data when it is ready. 53 | archiveGet: 'archiveGet' 54 | }; 55 | 56 | static actions = { 57 | // Used to allow queueing before rehydration completes. 58 | archiveSave: null, 59 | // Used internally to save after rehydration. 60 | archiveData: null, 61 | archivePurge: null 62 | }; 63 | 64 | // when called, the execution frame will be paused until 65 | // rehydration has been completed by redux-persist. the 66 | // rest of the app continues its functionality in the meantime. 67 | // 68 | // we utilize this saga so that when a request from any other part of 69 | // our application is made, we can easily make sure we wait to respond 70 | // until our stores have been refilled from the storage medium used. 71 | * awaitRehydration() { 72 | ! this.rehydrated && ( yield take('REHYDRATE_COMPLETE') ) 73 | this.rehydrated = true 74 | } 75 | 76 | // Once the rehydration has completed we will empty our queue and 77 | // allow any save requests into our reducer. 78 | * rehydrateComplete({ type, persistor }) { 79 | this.rehydrated = true 80 | this.persistor = persistor 81 | for ( let action of this.queue ) { 82 | yield apply(this, this.archiveData, [ action ]) 83 | } 84 | this.queue = [] 85 | } 86 | 87 | 88 | // Gets data from the archives. If we have not yet finished rehydrating 89 | // we will wait for it to complete before resolving. Responds by dispatching 90 | // the type defined by the "responseType" key. This way any other process 91 | // may make the request then wait for the response (optionally with a timeout). 92 | * archiveGet({ type, ...data }) { 93 | if ( ! data.responseType ) { 94 | throw new Error('To Get Archived Data you must provide a responseType') 95 | } 96 | yield* this.awaitRehydration() 97 | const response = yield* this.select() 98 | const requested = data.pick && _.pick(response, data.pick) || response 99 | yield put({ type: data.responseType, requested }) 100 | } 101 | 102 | // Receive requests to save data to our persistent store and process 103 | // them before saving them. If our stores have not yet been rehydrated 104 | // we add items to a queue and save them when ready in a FIFO fashion. 105 | * archiveSave({ type, ...data }) { 106 | if ( type !== 'ARCHIVE_SAVE' && ! data.archiveKey ) { return } 107 | if ( ! this.rehydrated ) { 108 | this.queue.push({ type, ...data }) 109 | } else if ( data ) { 110 | if ( data.archiveKey ) { 111 | const { archiveKey, ...rest } = data 112 | yield* this.dispatch('archiveData', { [archiveKey]: rest }) 113 | } else { 114 | yield* this.dispatch('archiveData', data) 115 | } 116 | } else { 117 | console.error('Tried to Save an Empty Object into Persistent State') 118 | } 119 | } 120 | 121 | * purgeArchives(action = {}) { 122 | const { type, ...data } = action 123 | yield* this.awaitRehydration() 124 | if ( this.persistor && ( type === 'AUTH_LOGOUT' || ! this.blockPurge || data.force ) ) { 125 | yield* this.dispatch('archivePurge') 126 | yield apply(this, this.persistor.purge, [ [ 'archives' ] ]) 127 | } 128 | } 129 | 130 | } -------------------------------------------------------------------------------- /examples/websocket/websocket.js: -------------------------------------------------------------------------------- 1 | import Process from 'redux-saga-process' 2 | 3 | import { call, apply, put, take, fork } from 'redux-saga/effects' 4 | import { delay } from 'redux-saga' 5 | 6 | function tryJSON(str) { 7 | let r 8 | try { 9 | return JSON.parse(str) 10 | } catch (e) { 11 | return 12 | } 13 | } 14 | 15 | function tryToJSON(str) { 16 | let r 17 | try { 18 | r = JSON.stringify(str); 19 | } catch (e) { return str } 20 | return r 21 | } 22 | 23 | export default class UserAuthProcess extends Process { 24 | 25 | WS = undefined 26 | schema = undefined 27 | reconnect = true 28 | 29 | // imported with the key "ws" (using [statics]), reduces key "system" (redux store) 30 | static config = { pid: 'ws', reduces: 'system' }; 31 | 32 | // We want to export our action creators so others can use them 33 | static exports = [ 'actions' ]; 34 | 35 | 36 | static initialState = { 37 | connected: false, 38 | }; 39 | 40 | static reducer = { 41 | systemConnect: (state, action) => ({ 42 | connected: false 43 | }), 44 | systemConnected: (state, action) => ({ 45 | ...state, 46 | lanip: action.lanip, 47 | connected: true 48 | }), 49 | systemDisconnected: (state) => ({ 50 | ...state, 51 | lanip: undefined, 52 | connected: false 53 | }), 54 | systemReceive: (state, { type, uuid, ...action }) => ({ 55 | ...state, 56 | ...action 57 | }) 58 | }; 59 | 60 | static actions = { 61 | systemRequest: ['request', 'data'], 62 | systemSend: ['payload'], 63 | systemConnect: null, 64 | systemDisconnect: null, 65 | // Our private actions we don't wish to be made public outside of the process. 66 | '!systemConnected': null, 67 | '!systemDisconnected': null, 68 | '!systemReceive': null, 69 | }; 70 | 71 | static actionRoutes = { 72 | systemConnect: 'connect', 73 | systemDisconnected: 'reconnect', 74 | systemDisconnect: 'disconnect', 75 | systemSend: 'send', 76 | systemRequest: 'request', 77 | }; 78 | 79 | * notify(message) { 80 | // Dispatching a notification to react-notification-system 81 | yield put({ 82 | type: 'NOTIFICATION', 83 | autoDismiss: 2, 84 | dismissable: false, 85 | ...message 86 | }) 87 | } 88 | 89 | * connect(schema = this.schema, ...args) { 90 | this.reconnect = true 91 | if ( schema !== this.schema ) { 92 | // New Connection! save it for reconnection purposes 93 | this.schema = { 94 | hostname: schema.hostname || schema.lanip || window.location.hostname, 95 | port: schema.port || 9001, 96 | protocol: schema.protocol || 'wss', 97 | reconnect: schema.reconnect || true 98 | } 99 | } 100 | this.WS = new WebSocket(`${this.schema.protocol}://${this.schema.hostname}:${this.schema.port}`) 101 | yield* this.notify({ 102 | title: 'Connecting', 103 | message: `Connecting to System: ${this.schema.hostname}` 104 | }) 105 | // Create a task, define what events to listen for, automatically cancel 106 | // any previous connections if they were made. 107 | yield* this.task.create( 108 | 'ws', 109 | 'events', 110 | this.ws_events, 111 | [ 'open', 'close', 'error', 'message' ], 112 | this.schema.hostname 113 | ) 114 | } 115 | 116 | * reconnect() { 117 | // Auto Reconnect? (Default) 118 | if ( this.reconnect === true && this.schema.reconnect === true ) { 119 | yield* this.disconnect() 120 | console.log('[WebSocket] | Retry Connection in 10 Seconds') 121 | yield call(delay, 10000) 122 | yield fork([ this, this.connect ]) 123 | } 124 | } 125 | 126 | * disconnect({ reconnect = false }) { 127 | this.reconnect = reconnect 128 | this.WS.close() 129 | } 130 | 131 | * send({ payload }) { 132 | this.WS.send( tryToJSON(payload) ) 133 | if ( payload.uuid ) { 134 | // Dispatch the UUID being sent for logging and external 135 | // integration purposes. 136 | yield put({ 137 | type: `SYSTEM_SEND_${payload.uuid.toUpperCase()}` 138 | }) 139 | } 140 | } 141 | 142 | // Shortcut for sending tagged payloads with uuid / event. 143 | * request({ uuid, request, event, data }) { 144 | if ( event ) { 145 | yield* this.send({ 146 | payload: { 147 | uuid: uuid || event, 148 | event, ...data 149 | } 150 | }) 151 | } else if ( request ) { 152 | yield* this.send({ 153 | payload: { 154 | uuid: uuid || request, 155 | request, ...data 156 | } 157 | }) 158 | } 159 | } 160 | 161 | * ws_events(events, type) { 162 | const { getNext, onData, onCancel } = this.observable.create(type) 163 | for ( let event of events ) { 164 | // When specific events are received, call our handler (below) 165 | this.WS[`on${event}`] = received => onData(event, received) 166 | } 167 | try { 168 | while (true) { 169 | const data = yield call(getNext) 170 | yield* this.ws_event(...data.values) 171 | } 172 | } catch(error) { 173 | console.warn('Catch Called in Observer [Websocket]', error.message) 174 | } finally { 175 | if (yield onCancel()) { 176 | // Cancelled! 177 | } 178 | yield* this.disconnect() 179 | } 180 | } 181 | 182 | * ws_event(event, received) { 183 | switch(event) { 184 | case 'open': { 185 | yield* this.notify({ 186 | title: 'Connected', 187 | message: 'Connected to System!', 188 | level: 'success' 189 | }) 190 | yield* this.dispatch('systemConnected', { 191 | lanip: this.schema.hostname 192 | }) 193 | break 194 | } 195 | case 'close': { 196 | yield* this.notify({ 197 | title: 'Disconnected', 198 | message: 'System Connection Lost!', 199 | level: 'error' 200 | }) 201 | yield* this.dispatch('systemDisconnected') 202 | break 203 | } 204 | case 'error': { 205 | yield* this.notify({ 206 | title: 'Disconnected', 207 | message: received.message, 208 | level: 'error' 209 | }) 210 | break 211 | } 212 | case 'message': { 213 | const data = tryJSON(received.data) 214 | if ( data ) { 215 | yield* this.dispatch(`systemReceive`, data) 216 | if ( data.uuid ) { 217 | // When a UUID is present, dispatch an empty action so that callers 218 | // can listen for responses to their requests in other sagas / processes. 219 | yield put({ 220 | type: `SYSTEM_RECEIVE_${data.uuid.toUpperCase()}` 221 | }) 222 | } 223 | } else { 224 | console.warn('Invalid Data Received From WebSocket? (Not JSON) ', received.data) 225 | } 226 | break 227 | } 228 | } 229 | } 230 | 231 | } 232 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "redux-saga-process", 3 | "version": "1.0.3", 4 | "description": "Processes for Redux-Saga", 5 | "main": "dist/redux-saga-process.js", 6 | "scripts": { 7 | "test": "./node_modules/.bin/mocha --require co-mocha", 8 | "prepublish": "npm run build", 9 | "build": "cross-env-shell BABEL_ENV=production NODE_ENV=production \"rimraf dist && webpack\"" 10 | }, 11 | "repository": { 12 | "type": "git", 13 | "url": "git+https://github.com/Dash-OS/redux-saga-process.git" 14 | }, 15 | "files": [ 16 | "src", 17 | "dist", 18 | "webpack.config.js", 19 | "README.md", 20 | "LICENSE" 21 | ], 22 | "directories": { 23 | "lib": "src/process-lib" 24 | }, 25 | "bugs": { 26 | "url": "https://github.com/Dash-OS/redux-saga-process/issues" 27 | }, 28 | "keywords": [ 29 | "react", 30 | "react-native", 31 | "redux", 32 | "saga", 33 | "redux-saga", 34 | "process", 35 | "daemon", 36 | "generators", 37 | "yield", 38 | "class", 39 | "observer", 40 | "reselect", 41 | "selectors", 42 | "async" 43 | ], 44 | "author": "Braden R. Napier (https://www.dashos.net)", 45 | "license": "MIT", 46 | "homepage": "https://github.com/Dash-OS/redux-saga-process#readme", 47 | "devDependencies": { 48 | "babel-cli": "^6.24.1", 49 | "babel-loader": "^7.0.0", 50 | "babel-plugin-transform-class-properties": "^6.24.1", 51 | "babel-preset-babili": "^0.1.2", 52 | "babel-preset-env": "^1.5.2", 53 | "babel-preset-stage-0": "^6.24.1", 54 | "babili-webpack-plugin": "^0.1.1", 55 | "cross-env": "^5.0.1", 56 | "rimraf": "^2.6.1", 57 | "webpack": "^2.6.1", 58 | "webpack-node-externals": "^1.6.0", 59 | "yargs": "^8.0.2" 60 | }, 61 | "dependencies": { 62 | "react": "^15.5.4", 63 | "redux-saga": "^0.14.8", 64 | "reselect": "^3.0.0" 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /src/generators.js: -------------------------------------------------------------------------------- 1 | 2 | export { 3 | nilReducer, 4 | reducerReducer, 5 | arrayMapReducer, 6 | objectMapReducer, 7 | nestedObjectMapReducer, 8 | objectWildcardMapReducer, 9 | } from './process-lib/reducerGenerators' -------------------------------------------------------------------------------- /src/main.js: -------------------------------------------------------------------------------- 1 | export { 2 | runProcesses, 3 | runProcess, 4 | buildProcesses, 5 | reloadProcess, 6 | processContext, 7 | } from './process-lib/effects' 8 | 9 | export { 10 | toReduxType, 11 | configProcess 12 | } from './process-lib/helpers' 13 | 14 | export { 15 | createActions, 16 | buildCreator 17 | } from './process-lib/createActions' 18 | 19 | export { 20 | Wildcard, 21 | hasWildcard 22 | } from './process-lib/wildcard' 23 | 24 | export { 25 | default as Process 26 | } from './process-lib/process' 27 | 28 | export { 29 | default as statics 30 | } from './process-lib/statics' 31 | -------------------------------------------------------------------------------- /src/process-lib/createActions.js: -------------------------------------------------------------------------------- 1 | import { isObjLiteral, toReduxType } from './helpers' 2 | 3 | const buildTypes = types => { 4 | const compiled = {} 5 | for ( let _type of types ) { 6 | const type = _type.replace(/^!/, '') 7 | const snakeCase = toReduxType(type) 8 | compiled[type] = snakeCase 9 | } 10 | return compiled 11 | } 12 | 13 | const buildCreator = (type, keys) => (...args) => { 14 | var i = 0, compiled = { type } 15 | if ( keys === null || typeof keys === undefined ) { 16 | compiled = { ...compiled, ...(isObjLiteral(args[0]) && args[0]) } 17 | } else if ( Array.isArray(keys) ) { 18 | for ( const key of keys ) { compiled[key] = args[i++] } 19 | if (args.length > keys.length && isObjLiteral(args[i])) { 20 | compiled = { ...compiled, ...args[i] } 21 | } 22 | } else if ( typeof keys === 'function' ) { 23 | compiled = { ...compiled, ...keys(...args) } 24 | } else if ( isObjLiteral(keys) ) { 25 | compiled = { ...compiled, ...keys, ...(isObjLiteral(args[0]) && args[0]) } 26 | } else { throw new Error('Dont know how to handle action: ', type, keys, args) } 27 | return compiled 28 | } 29 | 30 | const buildActions = (actions) => { 31 | const compiled = { public: {}, private: {} } 32 | for ( let _type in actions ) { 33 | const scope = _type.startsWith('!') ? 'private' : 'public', 34 | type = _type.replace(/^!/, '') 35 | compiled[scope][type] = buildCreator(toReduxType(type), actions[_type]) 36 | } 37 | return compiled 38 | } 39 | 40 | const createActions = actions => { 41 | if ( ! actions ) { throw new Error('No Actions Received') } 42 | const _types = Object.keys(actions) 43 | const TYPES = buildTypes(_types), 44 | ACTIONS = buildActions(actions) 45 | return { TYPES, ACTIONS } 46 | } 47 | 48 | export { createActions, toReduxType, buildCreator } -------------------------------------------------------------------------------- /src/process-lib/effects.js: -------------------------------------------------------------------------------- 1 | import { spawn, fork, call, take } from 'redux-saga/effects' 2 | import { eventChannel } from 'redux-saga' 3 | import { createActions } from './createActions' 4 | import { createSelector, createStructuredSelector } from 'reselect' 5 | import { registerRecord } from './registry' 6 | import * as generate from './reducerGenerators' 7 | 8 | import { isReduxType, toReduxType, isObjLiteral, props } from './helpers' 9 | import { hasWildcard } from './wildcard' 10 | 11 | const isProcess = o => Object.getPrototypeOf(o) && Object.getPrototypeOf(o).isProcess === true; 12 | 13 | const isSSR = 14 | ( typeof window === undefined || typeof window !== 'object' || ! window || ! window.document ) 15 | 16 | function* runProcesses(categories, rebuild = false) { 17 | for ( const categoryID in categories ) { 18 | const category = categories[categoryID] 19 | if ( 20 | isProcess(category) !== true && 21 | typeof category !== 'object' 22 | ) { continue } 23 | if ( isProcess(category) === true ) { 24 | isProcessActive(category) && ( yield fork(runProcess, category, rebuild) ) 25 | } else { 26 | for ( const processID in category ) { 27 | const proc = category[processID] 28 | if ( isProcess(proc) === true ) { 29 | isProcessActive(proc) && ( yield fork(runProcess, proc, rebuild) ) 30 | } else { continue } 31 | } 32 | } 33 | } 34 | } 35 | 36 | // This will allow pushing events into the processes for development 37 | // function* buildCommChannel() { 38 | // if ( ! IPC ) { 39 | // if ( module.hot ) { 40 | // // Create our event emitter 41 | // const chan = eventChannel( emitter => { 42 | // ExternalEventEmitter = emitter 43 | // window.chan = emitter 44 | // return () => { 45 | // ExternalEventEmitter = undefined 46 | // } 47 | // } ) 48 | // while(true) { 49 | // const action = yield take(chan) 50 | // console.log('External Action: ', action) 51 | // } 52 | // } 53 | // } 54 | // } 55 | 56 | const isProcessActive = ({ config = {} }) => ( 57 | config.enabled === false 58 | || ( isSSR && config.ssr === false ) 59 | ? false 60 | : true 61 | ) 62 | 63 | function* runProcess(proc, rebuild = false) { 64 | if ( ! props.compiled ) { 65 | if ( props.log ) { 66 | console.warn('[rsp] Did not connect to reducers before calling runProcesses, building the process now') 67 | } 68 | buildProcess(proc) 69 | } 70 | const { config = {}, runningProcess } = proc 71 | 72 | let SagaProcess, state 73 | if ( runningProcess && ! rebuild ) { 74 | // We still want to rebuild the process, but we will move 75 | // its entire state to the newly built class when we rebuild. 76 | state = runningProcess.state 77 | // SagaProcess = runningProcess 78 | SagaProcess = new proc(config, state) 79 | } else { 80 | SagaProcess = new proc(config, state) 81 | } 82 | 83 | proc.runningProcess = SagaProcess 84 | yield fork(SagaProcess.__utils.init, proc) 85 | } 86 | 87 | function buildProcesses(categories) { 88 | if ( module.hot && props.hot === true ) { 89 | // if we are hot reloading and our config specifies to hot reload: 90 | props.compiled = false 91 | } 92 | if ( props.ssr === false && isSSR ) { 93 | if ( props.log ) { 94 | console.info('[rsp] Processes have been set to only run on the client, cancelling build') 95 | } 96 | return 97 | } 98 | if ( ! isObjLiteral(categories) ) { throw new Error('[rsp] buildProcesses expects an object') } 99 | const processes = { 100 | reducers: {}, 101 | initialState: {}, 102 | context: {}, 103 | } 104 | for ( const categoryID of Object.keys(categories) ) { 105 | const category = categories[categoryID] 106 | if ( 107 | isProcess(category) !== true 108 | && typeof category !== 'object' 109 | ) { continue } 110 | if ( isProcess(category) === true ) { 111 | const compiled = isProcessActive(category) && buildProcess(category) 112 | if ( ! compiled ) { continue } 113 | parseCompiledProcess(compiled, processes) 114 | } else { 115 | for ( const processID in category ) { 116 | const proc = category[processID] 117 | if ( isProcess(proc) === true ) { 118 | const compiled = isProcessActive(proc) && buildProcess(proc) 119 | if ( ! compiled ) { continue } 120 | parseCompiledProcess(compiled, processes) 121 | } 122 | } 123 | } 124 | } 125 | 126 | const buildObjectMapReducer = (reducerName, reducer, initialState, ctx) => { 127 | processes.reducers[reducerName] = 128 | props.wildcardMatch && hasWildcard(reducer) 129 | ? generate.wildcardMapReducer(initialState, reducer, ctx) 130 | : generate.objectMapReducer(initialState, reducer, ctx) 131 | } 132 | 133 | for ( const reducerName in processes.reducers ) { 134 | const reducer = processes.reducers[reducerName] 135 | if ( typeof reducer === 'function' ) { 136 | continue 137 | } else if ( Array.isArray(reducer) ) { 138 | processes.reducers[reducerName] = generate.arrayMapReducer( 139 | processes.initialState[reducerName], 140 | reducer.map(r => 141 | props.wildcardMatch && hasWildcard(r) 142 | ? generate.wildcardMapReducer(undefined, r, undefined) 143 | : generate.objectMapReducer(undefined, r, undefined) 144 | ), 145 | processes.context[reducerName] 146 | ) 147 | } else if ( isObjLiteral(reducer) && Object.keys(reducer).length > 0 ) { 148 | buildObjectMapReducer( 149 | reducerName, reducer, 150 | processes.initialState[reducerName], 151 | processes.context[reducerName] 152 | ) 153 | } else if ( typeof reducer === 'function' ) { 154 | processes.reducers[reducerName] = generate.reducerReducer( 155 | processes.initialState[reducerName], 156 | reducer, 157 | processes.context[reducerName] 158 | ) 159 | } else { 160 | //throw new Error('[rsp] Failed to Build Reducer: ', reducerName, processes.reducers) 161 | 162 | } 163 | } 164 | // if ( ! processes.reducers ) { 165 | // // We want to provide at least one reducer so we don't get an error 166 | // // when using this with combineReducers. 167 | // processes.reducers._e = generate.emptyReducer 168 | // } 169 | props.compiled = true 170 | 171 | return { 172 | reducerNames: Object.keys(processes.reducers), 173 | processReducers: processes.reducers, 174 | initialStates: processes.initialState 175 | } 176 | } 177 | 178 | function buildProcess(proc) { 179 | const compiled = {} 180 | if ( ( ! proc.isCompiled && ! props.compiled ) ) { 181 | // console.log('Building') 182 | buildReducer(proc, compiled) 183 | buildSelectors(proc, compiled) 184 | buildActions(proc, compiled) 185 | buildActionRoutes(proc, compiled) 186 | 187 | mutateProcess(proc, compiled) 188 | props.useRegistry && registerRecord(proc) 189 | 190 | proc.isCompiled = true 191 | } else { 192 | // console.log('Return Mutated: ', proc.actions, proc.actionRoutes) 193 | buildReducer(proc, compiled) 194 | /* Already compiled this process, return compiled data */ 195 | if ( proc.actions ) { compiled.actions = proc.actions } 196 | if ( proc.actionRoutes ) { compiled.actionRoutes = proc.actionRoutes } else { 197 | buildActionRoutes(proc, compiled) 198 | } 199 | if ( proc.selectors ) { compiled.selectors = proc.selectors } 200 | if ( proc.types ) { compiled.types = proc.types } 201 | } 202 | compiled.initialState = proc.initialState 203 | return compiled 204 | } 205 | 206 | const buildReducer = ({ config = {}, initialState, reducer, name, ...proc }, compiled = {}) => { 207 | if ( config.reduces && reducer ) { 208 | compiled.reducer = { 209 | reduces: config.reduces, 210 | reducer: undefined, 211 | initialState: undefined, 212 | } 213 | let preReducer, preState 214 | if ( Array.isArray(config.reduces) ) { 215 | // When we have an array then we indicate that we want to reduce multiple 216 | // keys in the store. We split the reducers appropriately and will distribute 217 | // them once we reach the higher-level handlers. 218 | if ( ! isObjLiteral(reducer) ) { 219 | // We expect that the reducer property is an object literal 220 | // mapping the given keys to a reducer property. 221 | throw new Error(`[rsp] | Process (${name}) indicates that it reduces multiple keys but the reducer property is not an object literal mapping the keys to reducers.`) 222 | } 223 | preReducer = [], preState = [] 224 | for ( let reducerKey of config.reduces ) { 225 | // Each key that we reduce will be reduced like any other reducer that we 226 | // allow. 227 | if ( ! reducer[reducerKey] ) { 228 | // We can't reduce a reducer that doesn't exist! 229 | throw new Error(`[rsp] | Process ${name} indicates that it reduces ${reducerKey} but does not provide a reducer for it! You have provided ${Object.keys(reducer)}`) 230 | } 231 | // Push our reducers into the compiled reducers in the same order that they are defined. 232 | const parsedReducer = parseReducer(reducer[reducerKey]) 233 | preReducer.push(parsedReducer) 234 | if ( initialState && initialState[reducerKey] ) { 235 | // If we have an initialState for this reducer then we will add it, 236 | // otherwise we provide an empty object literal. 237 | preState.push( 238 | isObjLiteral(initialState[reducerKey]) 239 | ? Object.assign({}, initialState[reducerKey]) 240 | : initialState[reducerKey] 241 | ) 242 | } else { preState.push({}) } 243 | } 244 | } else { 245 | preReducer = parseReducer(reducer) 246 | preState = isObjLiteral(initialState) 247 | ? Object.assign({}, initialState) 248 | : initialState 249 | } 250 | compiled.reducer.reducer = preReducer 251 | compiled.reducer.initialState = preState 252 | } else if ( config.reduces && ! reducer ) { 253 | if ( props.log ) { 254 | console.warn(`[rsp] | Process ${name && `(${name})`} indicates that it reduces "${config.reduces.toString()}" but does not provide a reducer. This will be ignored.`) 255 | } 256 | } 257 | return compiled 258 | } 259 | 260 | const parseReducer = (reducer) => { 261 | let parsed 262 | if ( typeof reducer === 'function' ) { 263 | parsed = reducer 264 | } else if ( isObjLiteral(reducer) ) { 265 | parsed = {} 266 | for ( const type in reducer ) { 267 | parsed[toReduxType(type)] = reducer[type] 268 | } 269 | } else { 270 | // We will likely need to handle arrays better and possibly other values 271 | parsed = reducer 272 | } 273 | return parsed 274 | } 275 | 276 | const buildSelectors = ({ selectors, config = {} }, compiled = {}) => { 277 | let deferredSelectors = [] 278 | let coreSelector 279 | if ( Array.isArray(config.reduces ) ) { 280 | coreSelector = (state, props) => state 281 | } else { 282 | coreSelector = config.reduces 283 | ? (state, props) => state[config.reduces] 284 | : (state, props) => state 285 | } 286 | if ( ! props.compiled && selectors ) { 287 | if ( ! compiled.selectors ) { compiled.selectors = { public: {}, private: {} } } 288 | for ( const _selector in selectors ) { 289 | const scope = _selector.startsWith('!') ? 'private' : 'public', 290 | selector = _selector.replace(/^!/, ''), 291 | selectorValue = selectors[_selector] 292 | if ( Array.isArray(selectorValue) ) { 293 | const deferCreation = selectorValue.some(s => typeof s === 'string') 294 | if ( deferCreation ) { 295 | deferredSelectors.push({ 296 | scope, selector, selectorValue 297 | }) 298 | continue 299 | } 300 | if ( selectorValue.length === 1 ) { 301 | compiled.selectors[scope][selector] = 302 | () => createSelector(coreSelector, ...selectorValue) 303 | } else { 304 | compiled.selectors[scope][selector] = 305 | () => createSelector(...selectorValue) 306 | } 307 | } else if ( isObjLiteral(selectorValue) ) { 308 | compiled.selectors[scope][selector] = 309 | () => createStructuredSelector(selectorValue) 310 | } else { throw new Error('Process Selectors must be an array or object of selectors') } 311 | } 312 | } 313 | // We defer selector creation when a composed selector is discovered (by providing a string reference) 314 | // This allows us to nest selectors. 315 | for ( let deferredSelector of deferredSelectors ) { 316 | const { scope, selector, selectorValue: _selectorValue } = deferredSelector 317 | if ( _selectorValue.length === 1 ) { 318 | throw new Error('[PROCESS BUILD ERROR - Selectors]: Composed Selectors may not be a single element, it makes no sense.') 319 | } else { 320 | const selectorValue = composeSelector(deferredSelector, compiled) 321 | compiled.selectors[scope][selector] = 322 | () => createSelector(...selectorValue) 323 | } 324 | } 325 | return compiled 326 | } 327 | 328 | const composeSelector = ({ selectorValue }, compiled) => { 329 | return selectorValue.map(value => { 330 | // We need to replace strings with our composed selectors 331 | if ( typeof value !== 'string' ) { return value } 332 | const composed = compiled.selectors.public[value] || compiled.selectors.private[value] 333 | if ( ! composed ) { throw new Error(`[rsp - Selectors]: Failed to discover composed selector: ${value}`) } 334 | return composed 335 | }) 336 | } 337 | 338 | const buildActions = (proc, compiled = {}) => { 339 | const creators = proc.actionCreators || proc.actions 340 | const actions = creators && createActions(creators) 341 | if ( actions ) { 342 | compiled.actions = actions.ACTIONS 343 | compiled.types = { ...compiled.types, ...actions.TYPES } 344 | } 345 | return compiled 346 | } 347 | 348 | const buildActionRoutes = (proc, compiled = {}) => { 349 | const actionRoutes = proc.actionRoutes 350 | if ( ! actionRoutes ) { return compiled } 351 | compiled.actionRoutes = {} 352 | for (let route in actionRoutes) { 353 | compiled.actionRoutes[toReduxType(route)] = actionRoutes[route] 354 | } 355 | } 356 | 357 | const mutateProcess = (process, compiled) => { 358 | if ( compiled.actions ) { process.actions = compiled.actions } 359 | if ( compiled.selectors ) { process.selectors = compiled.selectors } 360 | if ( compiled.types ) { process.types = compiled.types } 361 | if ( compiled.actionRoutes ) { process.actionRoutes = compiled.actionRoutes } 362 | } 363 | 364 | const parseCompiledProcess = (compiled, processes) => { 365 | if ( compiled.reducer ) { 366 | const reduces = compiled.reducer.reduces 367 | if ( compiled.reducer.reducer ) { 368 | if ( ! reduces ) { throw new Error('[rsp] Reducer Does Not Have a Reduces Value, this should not have occurred?') } 369 | // What type does our reduces value have? 370 | if ( Array.isArray(reduces) ) { 371 | // When we have an array then we are actually reducing multiple keys in 372 | // the store and need to map them appropriately. 373 | let i = 0 374 | for ( let reducerKey of reduces ) { 375 | const initialState = compiled.reducer.initialState[i], 376 | reducer = compiled.reducer.reducer[i] 377 | parseCompiledReducer(reducerKey, reducer, initialState, processes) 378 | i++ 379 | } 380 | } else { 381 | parseCompiledReducer(reduces, compiled.reducer.reducer, compiled.reducer.initialState, processes) 382 | } 383 | } 384 | } 385 | } 386 | 387 | const parseCompiledReducer = (name, reducer, initialState = {}, processes) => { 388 | // Received the compiled reducer, check if we need to merge with other reducers, 389 | // then save to our processes object. 390 | if ( processes.reducers[name] ) { 391 | mergeReducers(name, reducer, initialState, processes) 392 | } else { 393 | processes.reducers[name] = reducer 394 | processes.initialState[name] = initialState 395 | } 396 | } 397 | 398 | 399 | const mergeReducers = (name, reducer, initialState = {}, processes) => { 400 | if ( ! props.mergeReducers ) { 401 | throw new Error(`[rsp] Two processes are attempting to reduce the same key (${name}) in the state but mergeReducers is disabled.`) 402 | } 403 | if ( Array.isArray(processes.reducers[name]) ) { 404 | processes.reducers[name].push(reducer) 405 | } else { 406 | processes.reducers[name] = [ 407 | processes.reducers[name], 408 | reducer 409 | ] 410 | } 411 | if ( processes.initialState[name] ) { 412 | processes.initialState[name] = { 413 | ...processes.initialState[name], 414 | ...initialState 415 | } 416 | } else { processes.initialState[name] = initialState } 417 | } 418 | 419 | export { runProcesses, runProcess, buildProcesses, isProcess } 420 | 421 | -------------------------------------------------------------------------------- /src/process-lib/helpers.js: -------------------------------------------------------------------------------- 1 | import { hasWildcard } from './wildcard' 2 | var props = { 3 | compiled: false, 4 | mergeReducers: true, 5 | useRegistry: true, 6 | wildcardMatch: true, 7 | ssr: true, 8 | hot: true, 9 | log: true 10 | } 11 | 12 | const configProcess = config => ( config && ( props = { ...props, ...config } ) ) 13 | 14 | const isObjLiteral = 15 | o => ( o !== null && ! Array.isArray(o) && typeof o !== 'function' && typeof o === 'object' ) 16 | 17 | // const toReduxType = 18 | // str => isReduxType(str) ? str : str.replace(/(?!^)([A-Z])/g, '_$1').toUpperCase() 19 | 20 | const toReduxType = str => formatType(str) 21 | 22 | // Allow for wildcard in the types 23 | const isReduxType = str => /^[A-Z\*]+([_\*][A-Z\*]+)*?$/.test(str) 24 | 25 | const cancellablePromise = (p, onCancel, CANCEL) => { 26 | p[CANCEL] = onCancel // eslint-disable-line 27 | return p 28 | } 29 | 30 | /* 31 | Give a best effort to make the type formatting as reliable as possible. 32 | We start by splitting the string into upper case and lowercase elements. 33 | Then we iterate through the value and try to determine how we need to 34 | format it. 35 | 36 | We used to use: 37 | str => isReduxType(str) ? str : str.replace(/(?!^)([A-Z])/g, '_$1').toUpperCase() 38 | 39 | However, this led to certain situations causing an improperly formatted action. 40 | Specifically when not done properly (systemRX instead of systemRx). In order to 41 | attempt to work in all situations we parse it with some logic instead. 42 | 43 | systemHeartbeat -> 'SYSTEM_HEARTBEAT' 44 | FormatThis -> 'FORMAT_THIS' 45 | systemRx -> 'SYSTEM_RX' 46 | systemRX -> 'SYSTEM_RX' 47 | */ 48 | function formatType (type) { 49 | let wildcardType = hasWildcard(type) 50 | if ( isReduxType(type) ) { return type } 51 | var buffer = '', 52 | list = type 53 | .split(/([A-Z]+|[a-z]+)/) 54 | .reduce( 55 | (a, c) => { 56 | if ( c === '' ) { return a } 57 | a.push(c) 58 | return a 59 | }, [] 60 | ) 61 | if ( list.length === 1 ) { return type.toUpperCase() } 62 | let wasCapital = false 63 | for ( let e of list ) { 64 | if ( ! e.length ) { continue } 65 | const isCapital = /[A-Z]/.test(e) 66 | e = e.toUpperCase() 67 | if ( isReduxType(e) ) { 68 | if ( buffer === '' ) { 69 | buffer += e 70 | } else { 71 | if ( isCapital && ! wasCapital ) { 72 | buffer += '_' + e 73 | } else if ( wasCapital && isCapital ) { 74 | buffer += e 75 | } else if ( wasCapital && ! isCapital ) { 76 | if ( buffer.slice(-2, -1) === '_' || buffer.slice(-2, -1) === '' ) { 77 | buffer += e 78 | } else { buffer = buffer.slice(0, -1) + '_' + buffer.slice(-1) + e } 79 | } else if ( wasCapital && ! isCapital ) { 80 | if ( buffer.slice(-2, -1) === '_' || buffer.slice(-2, -1) === '' ) { 81 | buffer += e 82 | } else { buffer += '_' + e } 83 | } else { buffer += '_' + e } 84 | } 85 | } else if ( e.includes('*') ) { 86 | buffer += e 87 | } 88 | wasCapital = isCapital 89 | } 90 | if ( isReduxType(buffer) ) { 91 | return buffer 92 | } 93 | } 94 | 95 | export { isObjLiteral, toReduxType, isReduxType, props, configProcess, cancellablePromise } 96 | -------------------------------------------------------------------------------- /src/process-lib/process.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-constant-condition */ 2 | 3 | import { CANCEL, delay } from 'redux-saga' 4 | import { TASK } from 'redux-saga/utils' 5 | import { take, fork, put, cancel, call, race, apply, cancelled, select, spawn } from 'redux-saga/effects' 6 | 7 | import { 8 | isReduxType, 9 | toReduxType, 10 | props as processProps, 11 | isObjLiteral, 12 | cancellablePromise 13 | } from './helpers' 14 | 15 | import { Wildcard, hasWildcard } from './wildcard' 16 | 17 | const WC = processProps.wildcardMatch && new Wildcard() 18 | 19 | class Process { 20 | 21 | displayName = undefined 22 | 23 | constructor(config, state, ipc) { 24 | this.config = config 25 | 26 | // if ( state ) { this.state = state } 27 | 28 | this.task.classTasks = [] 29 | this.task.roster = {} 30 | 31 | this.__utils.init = this.__utils.init.bind(this) 32 | this.__utils.log = this.__utils.log.bind(this) 33 | 34 | this.task.create = this.task.create.bind(this) 35 | this.task.save = this.task.save.bind(this) 36 | this.task.cancel = this.task.cancel.bind(this) 37 | this.task.onComplete = this.task.onComplete.bind(this) 38 | this.task.task = this.task.task.bind(this) 39 | this.task.cleanup = this.task.cleanup.bind(this) 40 | this.task.cancelAll = this.task.cancelAll.bind(this) 41 | this.task.show = this.task.show.bind(this) 42 | 43 | this.observable.create = this.observable.create.bind(this) 44 | 45 | this.select = this.select.bind(this) 46 | this.dispatch = this.dispatch.bind(this) 47 | 48 | this.__utils.ipc = ipc 49 | 50 | } 51 | 52 | __utils = { 53 | ipc: undefined, 54 | refs: {}, 55 | 56 | log(type, msg, ...args) { 57 | try { 58 | if ( processProps.log !== true ) { return } 59 | let title = '', root = false 60 | if ( ! this.__utils.groupStart ) { 61 | title += `[RSP] | ${this.displayName} | ` 62 | root = true 63 | this.__utils.groupStart = true 64 | } 65 | if ( args.length === 0 ) { 66 | if ( root ) { 67 | console.groupCollapsed(title, msg) 68 | console.info('Process Context:', this) 69 | console.groupEnd() 70 | delete this.__utils.groupStart 71 | } else { console.log(msg) } 72 | } else { 73 | if ( type === 'error' ) { 74 | console.group(title, msg) 75 | console.error(msg) 76 | } else { 77 | console.groupCollapsed(title, msg) 78 | } 79 | 80 | for ( let arg of args ) { 81 | if ( typeof arg === 'function' ) { 82 | arg.call(this, arg) 83 | } else if ( typeof arg === 'string' ) { 84 | this.__utils.log(type, arg) 85 | } else { 86 | this.__utils.log(type, ...arg) 87 | } 88 | } 89 | if ( root ) { 90 | console.info('Process Context:', this) 91 | delete this.__utils.groupStart 92 | } 93 | console.groupEnd() 94 | } 95 | } catch (e) { 96 | console.error(e.message) 97 | } 98 | }, 99 | 100 | * init(target) { 101 | this.displayName = this.displayName || target.displayName || target.name || 'ANONYMOUS_PROCESS' 102 | const staticsTask = yield fork([ this, this.__utils.startProcessMonitor ], target) 103 | if ( target.compiledselectors ) { 104 | yield fork([ this, this.__utils.prepareSelectors ], target.compiledselectors) 105 | } else if ( target.selectors ) { 106 | yield fork([ this, this.__utils.prepareSelectors ], target.selectors) 107 | } 108 | let startTask 109 | if (typeof this.processStarts === 'function') { 110 | startTask = yield fork([this, this.processStarts]) 111 | } 112 | if ( startTask ) { 113 | this.task.classTasks.push(staticsTask, startTask) 114 | } 115 | 116 | }, 117 | 118 | * prepareSelectors(selectors) { 119 | this.__utils.selectors = {} 120 | for ( let scope in selectors ) { 121 | const selectorCreators = selectors[scope] 122 | for ( let selectorID in selectorCreators ) { 123 | const selectorCreator = selectors[scope][selectorID] 124 | this.__utils.selectors[selectorID] = selectorCreator() 125 | } 126 | } 127 | }, 128 | 129 | * startProcessMonitor(target) { 130 | const { 131 | actions, 132 | types, 133 | actionRoutes, 134 | selectors, 135 | cancelTypes, 136 | name 137 | } = target 138 | 139 | const config = { wildcard: false } 140 | const monitorPattern = actionRoutes && getPattern(actionRoutes, config) || '@@_PROCESS_DONT_MONITOR_TYPE_', 141 | cancelPattern = cancelTypes && getPattern(cancelTypes) || '@@_PROCESS_DONT_MONITOR_TYPE_' 142 | 143 | this.__utils.actions = actions 144 | this.__utils.target = target 145 | 146 | if ( monitorPattern === '@@_PROCESS_DONT_MONITOR_TYPE_' && cancelPattern === '@@_PROCESS_DONT_MONITOR_TYPE_') { 147 | //console.info(name, ' process does not monitor anything and will be killed when it completes its lifecycle') 148 | return 149 | } 150 | let stopCheck 151 | try { 152 | while ( ! stopCheck ) { 153 | const { monitorAction, ipcAction, cancelAction } = yield race({ 154 | monitorAction: take(monitorPattern), 155 | // ipcAction: take(this.__utils.ipc, monitorPattern), 156 | cancelAction: take(cancelPattern) 157 | }) 158 | if (monitorAction || ipcAction) { 159 | const action = monitorAction || ipcAction 160 | yield fork([this, this.__utils.handleMonitorExecution], action, config) 161 | continue 162 | } else if (cancelAction) { 163 | stopCheck = yield apply(this, this.__utils.handleCancelExecution, [ cancelAction, config ]) 164 | } 165 | } 166 | } catch (e) { 167 | this.__utils.log('error', `Process Monitor Error Occurred: ${e.message}`) 168 | throw new Error(e) 169 | } finally { 170 | if ( yield cancelled() ) { 171 | // Our process has been cancelled by an external source (such as sagaTask.cancel() or directly) 172 | this.__utils.log('info', 'Cancelled!') 173 | try { 174 | yield apply(this, this.__utils.handleCancelExecution, [ { type: CANCEL }, config ]) 175 | } catch(e) { 176 | console.error('[rsp] Error while handling process cancellation: ', e.message) 177 | } 178 | } 179 | // Cancel any tasks we have registered 180 | try { 181 | yield apply(this, this.task.cancelAll) 182 | } catch (e) { 183 | this.__utils.log('error', 'Error while cancelling saved tasks: ', e.message) 184 | } 185 | for ( let classTask of this.task.classTasks ) { 186 | try { 187 | yield cancel(classTask) 188 | } catch (e) { 189 | this.__utils.log('error', 'Error while Cancelling a Task: ', e.message, classTask) 190 | } 191 | } 192 | } 193 | 194 | this.task.classTasks = [] 195 | }, 196 | 197 | * handleMonitorExecution(action, config = {}) { 198 | try { 199 | const { actionRoutes, name } = this.__utils.target 200 | // If we are not being externally cancelled then we will run the 201 | // actionRoute before cancellation if it is specified. 202 | const route = actionRoutes[action.type] 203 | if (typeof this[route] !== 'function' ) { 204 | if ( ! config.wildcard ) { 205 | this.__utils.log('error', `Action Route ${route} is not a function`) 206 | return 207 | } 208 | } else { 209 | return yield apply(this, this[route], [ action ]) 210 | } 211 | if ( config.wildcard && this.config.matchWildcard !== false ) { 212 | const matches = WC.pattern(actionRoutes).search(action.type) 213 | for ( let match in matches ) { 214 | const fn = matches[match] 215 | if ( typeof this[fn] !== 'function' ) { 216 | this.__utils.log('error', `Action Route ${route} is not a function`) 217 | continue 218 | } else { 219 | yield apply(this, this[fn],[ action ]) 220 | } 221 | } 222 | } 223 | } catch (e) { 224 | this.__utils.log('error', 'While Executing a Monitored Action Event.', 'Dispatched Action: ', action, e.message) 225 | } 226 | }, 227 | 228 | * handleCancelExecution(action, config = {}) { 229 | var stopCheck = true 230 | if ( action.type !== CANCEL && typeof this.shouldProcessCancel === 'function') { 231 | stopCheck = yield apply(this, this.shouldProcessCancel, [ action ]) 232 | } else { stopCheck = true } 233 | if (stopCheck === true) { 234 | if ( typeof this.processWillCancel === 'function' ) { 235 | yield apply(this, this.processWillCancel, [ action ]) 236 | } 237 | } else if ( stopCheck !== false ) { 238 | this.__utils.log('warn', 'shouldProcessCancel expects a boolean value but received: ', stopCheck) 239 | } 240 | return stopCheck 241 | }, 242 | 243 | } 244 | 245 | // yield* this.task.create('handlers', 'clicks', 'handleClick', action) 246 | task = { 247 | * create(category, id, callback, ...props) { 248 | const task = yield fork([this, callback], ...props) 249 | yield* this.task.save(task, category, id) 250 | return task 251 | }, 252 | 253 | // prints all running tasks in a nested group when called 254 | show() { 255 | this.__utils.log( 256 | 'tasks', 257 | 'Currently Running Tasks:', 258 | () => { 259 | for ( let taskCategory in this.task.roster ) { 260 | this.__utils.log( 261 | false, 262 | taskCategory, 263 | () => { 264 | for ( let taskID in this.task.roster[taskCategory] ) { 265 | this.__utils.log( 266 | false, 267 | taskID, 268 | this.task.roster[taskCategory][taskID].name 269 | ) 270 | } 271 | } 272 | ) 273 | } 274 | } 275 | ) 276 | }, 277 | /* 278 | this.task.save(...) 279 | Saves a task with a category and id. 280 | */ 281 | * save(task, category, id) { 282 | const roster = this.task.roster 283 | // If we save a task that was already saved previously we 284 | // will cancel the previous task automatically. 285 | if ( roster[category] && roster[category][id] ) { 286 | yield apply(this, this.task.cancel, [ category, id ]) 287 | } 288 | this.task.roster = { 289 | ...roster, 290 | [category]: { 291 | ...roster[category], 292 | [id]: task 293 | } 294 | } 295 | yield fork([this, this.task.onComplete], category, id, ['task', 'cleanup'], category, id) 296 | }, 297 | 298 | * task(category, id) { 299 | if ( ! id && this.task.roster[category] ) { 300 | return this.task.roster[category] 301 | } else if ( id && category ) { 302 | return this.task.roster[category] && this.task.roster[category][id] 303 | } else if ( ! id && ! category ) { 304 | return this.task.roster 305 | } 306 | }, 307 | 308 | /* 309 | onComplete() 310 | Register a callback that will be made with the "this" context attached 311 | and as a redux-saga. The callback will be made once the given tasks 312 | promise (task.done) is resolved. The callback will be made whether the 313 | task was cancelled or not. 314 | */ 315 | * onComplete(category, id, callback, ...props) { 316 | // Wait until the task has completed this includes any forks but 317 | // not spawns. 318 | const task = yield* this.task.task(category, id) 319 | if ( ! task || ! task[TASK] || ! task.done ) { 320 | this.__utils.log('error', 'onComplete received an invalid task object: ', task) 321 | return 322 | } 323 | try { yield task.done } finally { 324 | // Make the callback if the function is found, otherwise transmit 325 | // a message to console 326 | if ( ! callback ) { return } 327 | if ( Array.isArray(callback) ) { 328 | const fn = this[callback[0]] && this[callback[0]][callback[1]] 329 | if ( typeof fn === 'function' ) { yield apply(this, fn, props) } 330 | } else if ( typeof this[callback] === 'function' ) { 331 | yield apply(this, this[callback], props) 332 | } else if ( typeof callback === 'function' ) { 333 | yield apply(this, callback, props) 334 | } else { 335 | this.__utils.log('error', 'onCompelte callback not found: ', callback) 336 | } 337 | } 338 | }, 339 | 340 | * cleanup(category, id) { 341 | const roster = this.task.roster 342 | if ( roster[category] && roster[category][id] ) { 343 | delete this.task.roster[category][id] 344 | if ( Object.keys(this.task.roster[category] === 0 ) ) { 345 | delete this.task.roster[category] 346 | } 347 | } 348 | }, 349 | 350 | * cancel(category, id) { 351 | const task = yield apply(this, this.task.task, [ category, id ]) 352 | if ( ! task ) { 353 | // Should we warn about the task not existing? 354 | return 355 | } 356 | if (task && task[TASK] && task.isRunning()) { 357 | //console.log('Cancelling Normally') 358 | yield cancel(task) 359 | } else if ( task && ! task[TASK] ) { 360 | //console.log('Cancel All') 361 | const ids = Object.keys(task) 362 | for (const id of ids) { 363 | yield fork([this, this.task.cancel], category, id) 364 | } 365 | } 366 | }, 367 | 368 | * cancelAll() { 369 | this.__utils.log('info', 'Cancelling All Tasks!') 370 | const categories = Object.keys(this.task.roster) 371 | for (const category of categories) { 372 | try { 373 | yield apply(this, this.task.cancel, [ category ]) 374 | } catch (e) { 375 | // We don't want one error to stop us from cancelling other tasks 376 | this.__utils.log('error', 'Could Not Cancel a Task Category! ', category) 377 | } 378 | } 379 | } 380 | 381 | }; 382 | 383 | observable = { 384 | create(name, handleCancel, ...cancelArgs) { 385 | 386 | const actionQueue = [], dispatchQueue = [], observerRef = Symbol(name) 387 | 388 | this.observable[observerRef] = (...values) => { 389 | const queued = actionQueue.length + dispatchQueue.length 390 | if (dispatchQueue.length) { 391 | const nextDispatch = dispatchQueue.shift() 392 | nextDispatch({ values, name, queued }) 393 | } else { 394 | actionQueue.push({ values, name, queued }) 395 | } 396 | } 397 | 398 | const onCancel = () => { 399 | delete this.observable[observerRef] 400 | if ( typeof handleCancel === 'function' ) { 401 | handleCancel(...cancelArgs, name).bind(this) 402 | } 403 | } 404 | 405 | return { 406 | onData: this.observable[observerRef], 407 | onCancel: () => { 408 | onCancel() 409 | return cancelled 410 | }, 411 | getNext() { 412 | let promise 413 | if (actionQueue.length) { 414 | promise = Promise.resolve(actionQueue.shift()) 415 | } else { 416 | promise = new Promise(resolve => dispatchQueue.push(resolve)) 417 | } 418 | return cancellablePromise(promise, onCancel, CANCEL) 419 | } 420 | } 421 | } 422 | }; 423 | 424 | * select(selector, props) { 425 | let results 426 | if ( 427 | typeof selector === 'string' && 428 | this.__utils.selectors && 429 | this.__utils.selectors[selector] 430 | ) { 431 | const selectFn = this.__utils.selectors[selector] 432 | return yield select(selectFn, props) 433 | } else if ( typeof selector === 'function' ) { 434 | return yield select(selector) 435 | } else if ( Array.isArray(selector) ) { 436 | results = [] 437 | for ( let selected of selector ) { 438 | results.push( yield apply(this, this.select, [ selected ]) ) 439 | } 440 | } else if ( 441 | typeof selector === 'string' && 442 | this.config && this.config.reduces 443 | ) { 444 | return yield select(state => state[this.config.reduces][selector]) 445 | } else if ( ! selector ) { 446 | return yield select(state => state[this.config.reduces]) 447 | } 448 | return results 449 | } 450 | 451 | * dispatch(action, ...args) { 452 | const actionFn = 453 | typeof action === 'string' 454 | && this.__utils.actions 455 | && ( this.__utils.actions.public[action] 456 | || this.__utils.actions.private[action] 457 | ) 458 | if ( actionFn ) { 459 | yield put(actionFn(...args)) 460 | } else if ( typeof action === 'object' && action.type ) { 461 | yield put(action) 462 | } else { throw new Error('Must dispatch either a registered action or a valid redux action object.') } 463 | } 464 | 465 | // * ipc(action, ...args) { 466 | // const chan = this.__utils.ipc 467 | // if ( ! chan ) { throw new Error('[rsp] IPC is not activated') } 468 | // const actionFn = 469 | // typeof action === 'string' 470 | // && this.__utils.actions 471 | // && ( this.__utils.actions.public[action] 472 | // || this.__utils.actions.private[action] 473 | // ) 474 | // if ( actionFn ) { 475 | // yield put(chan, actionFn(...args)) 476 | // } else if ( typeof action === 'object' && action.type ) { 477 | // yield put(chan, action) 478 | // } else { throw new Error('Must dispatch either a registered action or a valid redux action object.') } 479 | // } 480 | 481 | * setState(state) { 482 | let update 483 | for ( let key in state ) { 484 | if ( this.state[key] === undefined || this.state[key] !== state[key] ) { 485 | update = true 486 | break 487 | } 488 | } 489 | if ( update === true ) { this.state = Object.assign({}, this.state, state) } 490 | return update === true 491 | } 492 | 493 | } 494 | 495 | const getPattern = (_types, config) => { 496 | const patterns = [] 497 | let types, isObject 498 | 499 | if (isObjLiteral(_types)) { 500 | types = Object.keys(_types) 501 | isObject = true 502 | } else { 503 | types = _types 504 | isObject = false 505 | } 506 | 507 | if (types === undefined || types.length === 0) { 508 | return '@@_PROCESS_DONT_MONITOR_TYPE_' 509 | } 510 | 511 | for (const type of types) { 512 | parseTypePattern(type, isObject, _types, patterns, config) 513 | } 514 | 515 | return action => patterns.some(func => func(action)) 516 | } 517 | 518 | const parseTypePattern = (type, isObject, _types, patterns, config) => { 519 | const wildcardMatch = processProps.wildcardMatch && hasWildcard(type) 520 | if ( wildcardMatch ) { config.wildcard = true } 521 | let fn 522 | const params = isObject ? _types[type] : _types 523 | switch (typeof params) { 524 | case 'string': { 525 | fn = wildcardMatch 526 | ? action => WC.pattern(type).match(action.type) 527 | : action => action.type === type 528 | patterns.push(fn) 529 | break 530 | } 531 | case 'object': { 532 | let fn 533 | if (Array.isArray(type)) { 534 | fn = wildcardMatch 535 | ? action => WC.pattern(type).match(action.type) 536 | : action => action.type 537 | } else { 538 | fn = action => Object.keys(type).every(x => type[x] === action[x]) 539 | } 540 | patterns.push(fn) 541 | break 542 | } 543 | case 'function': 544 | patterns.push(fn) 545 | break 546 | default: 547 | console.error(`[rsp] | parseTypePattern | unsupported type ${type}`) 548 | } 549 | } 550 | 551 | Process.isProcess = true 552 | 553 | export default Process -------------------------------------------------------------------------------- /src/process-lib/reducerGenerators.js: -------------------------------------------------------------------------------- 1 | import { Wildcard } from './wildcard' 2 | 3 | // const WC = new Wildcard() 4 | // .logic('and') 5 | // .case(false) 6 | // .pattern({ 'NETWORK*': 'yes', 'FOO': 'no', '*REQUEST': 'ok' }) 7 | // .search('network_request') 8 | // //.filter('NETWORK_REQUEST') 9 | // //.pattern('NETWORK*') 10 | // //.filter({ 'NETWORK*': 'yes', 'FOO': 'no' }) 11 | // //.pattern({ 'NETWORK*': 'win', '*REQUEST': 'win', 'NO': 'fail' }) 12 | // //.pattern(['*NETWORK*', 'REQUEST', '*REQUEST']) 13 | // //.pattern('*NETWORK*') 14 | // //.filter(['One NETWORK', 'NETWORK', 'foo', 'bar', 'network']) 15 | // //.filter(['NETWORK', 'REQUEST', 'FOO']) 16 | 17 | // console.log(WC) 18 | 19 | const nilReducer = 20 | ( initialState = {} ) => ( state = initialState ) => state 21 | 22 | const arrayMapReducer = 23 | ( initialState, reducers, pcontext ) => ( state = initialState, action, context ) => 24 | ( reducers.reduce( (p, c) => c(p, action, { ...pcontext, ...context }), state ) ) 25 | 26 | const objectMapReducer = 27 | (initialState, handlers = {}, pcontext) => (state = initialState, action, context) => 28 | { 29 | if ( ! action || ! action.type || ! handlers[action.type] ) return state 30 | return handlers[action.type](state, action, { ...pcontext, ...context }) 31 | } 32 | 33 | const wildcardMapReducer = 34 | (initialState, handlers = {}, pcontext) => { 35 | const wcMatcher = new Wildcard(handlers) 36 | return (state = initialState, action, context) => 37 | { 38 | if ( ! action || ! action.type ) return state 39 | const matches = wcMatcher.search(action.type) 40 | return Object.keys(matches).reduce( (p, c) => 41 | matches[c](p, action, { ...pcontext, ...context }) 42 | , state ) 43 | } 44 | } 45 | 46 | const reducerReducer = 47 | ( initialState, reducer, pcontext ) => ( state = initialState, action, context ) => 48 | ( reducer(state, action, { ...pcontext, ...context }) ) 49 | 50 | const nestedObjectMapReducer = 51 | (initialState, handlers = {}, pcontext ) => ( state = initialState, action, context) => 52 | { 53 | if ( ! action || ! action.type || ! handlers[action.type] ) return state 54 | const _context = { ...pcontext, ...context } 55 | const { path } = _context 56 | if ( ! path ) return state 57 | const childState = handlers[action.type](state[path], action, _context) 58 | return { 59 | ...state, 60 | [path]: childState 61 | } 62 | } 63 | 64 | export { 65 | nilReducer, 66 | arrayMapReducer, 67 | objectMapReducer, 68 | nestedObjectMapReducer, 69 | reducerReducer, 70 | wildcardMapReducer 71 | } -------------------------------------------------------------------------------- /src/process-lib/registry.js: -------------------------------------------------------------------------------- 1 | 2 | const Errors = { 3 | exportsType() { 4 | throw new Error('[PROCESS] | [static exports] must be an Array of properties to export') 5 | }, 6 | recordsNotFound(...args) { 7 | throw new Error('[PROCESS] | Records Not Found: ', ...args) 8 | } 9 | } 10 | 11 | const RecordRegistry = {} 12 | 13 | class RecordContext { 14 | constructor(proc) { 15 | const { config } = proc 16 | const { pid } = config 17 | this.exported = {} 18 | this.pid = pid 19 | this.captureExports(proc) 20 | } 21 | captureExports = proc => { 22 | const { exports } = proc 23 | if ( ! exports ) { return } 24 | Array.isArray(exports) 25 | ? this.captureExported(proc) 26 | : Errors.exportsType() 27 | } 28 | captureExported = proc => { 29 | const { exports } = proc 30 | for (let $export of exports ) { 31 | this.exported[$export] = proc[$export] && proc[$export].public 32 | } 33 | } 34 | } 35 | 36 | const registerRecord = proc => { 37 | if ( ! proc.config.pid ) { return } 38 | const Record = new RecordContext(proc) 39 | RecordRegistry[Record.pid] = 40 | Array.isArray(RecordRegistry[Record.pid]) 41 | ? [ ...RecordRegistry[Record.pid], Record ] 42 | : [ Record ] 43 | } 44 | 45 | const BUILD = { 46 | selectors: raw => Object.keys(raw).reduce( (prev, id) => { 47 | prev[id] = raw[id]() 48 | return prev 49 | }, {} ) 50 | } 51 | 52 | /* Reduce all records by pid, merge props across them */ 53 | const getRecord = (id, props, config, accum) => { 54 | return RecordRegistry[id].reduce( (p, c) => { 55 | props.forEach( prop => { 56 | if ( c.exported[prop] !== undefined ) { 57 | const value = BUILD[prop] 58 | ? BUILD[prop](c.exported[prop]) 59 | : c.exported[prop] 60 | p[prop] = config.prefixed === true 61 | ? { ...p[prop], [id]: value } 62 | : { ...p[prop], ...value } 63 | } 64 | }) 65 | return p 66 | }, accum ) 67 | } 68 | 69 | const buildSelector = () => { 70 | 71 | } 72 | 73 | 74 | /* Reduce an object container pid/selector pairs */ 75 | /* { modals: ['selectors', 'actions'] } */ 76 | function getRecords(records, config, accum = {}) { 77 | if ( Object.keys(RecordRegistry).length === 0 ) { return {} } 78 | return Object.keys(records) 79 | .reduce( (p, c) => getRecord(c, records[c], config, p), accum ) 80 | } 81 | 82 | 83 | // getRecords({ 84 | // modals: [ 'selectors' ] 85 | // }) 86 | // getRecord('modals', [ 'selectors' ]) 87 | 88 | export { registerRecord, getRecords, getRecord } -------------------------------------------------------------------------------- /src/process-lib/statics.js: -------------------------------------------------------------------------------- 1 | /* 2 | Process Connect 3 | - Connects a Component to a Process to receive its data as props 4 | 5 | export default statics( 6 | { 7 | dashboard: ['actions', 'selectors'] 8 | }, 9 | ({ selectors, actions }) => ( 10 | connect( 11 | state => ({ 12 | grid: selectors.grid(state) 13 | }), 14 | actions 15 | )(DashboardGrid) 16 | ), 17 | { prefixed: false } 18 | ) 19 | */ 20 | import React from 'react' 21 | import { getRecord, getRecords } from './registry' 22 | 23 | export default (selected, connector, config = {}) => { 24 | return connector(getRecords(selected, config) || {}) 25 | } 26 | -------------------------------------------------------------------------------- /src/process-lib/wildcard.js: -------------------------------------------------------------------------------- 1 | const isObjLiteral = 2 | o => ( o !== null && ! Array.isArray(o) && typeof o !== 'function' && typeof o === 'object' ) 3 | 4 | // const REXPS = { 5 | // hasWildcard: /\*/, 6 | // } 7 | 8 | const hasWildcard = (pattern) => ( 9 | typeof pattern === 'string' 10 | ? pattern.includes('*') 11 | : Array.isArray(pattern) 12 | ? pattern.some(x => x.includes('*') ) 13 | : isObjLiteral(pattern) 14 | ? hasWildcard(Object.keys(pattern)) 15 | : false 16 | ) 17 | 18 | class Wildcard { 19 | 20 | constructor(str) { 21 | this.config = { 22 | matchLogic: 'and', 23 | matchCase: true 24 | } 25 | if (str) this.pattern(str) 26 | return this 27 | } 28 | 29 | static toPattern(patterns, config = { matchLogic: 'and', matchCase: true }) { 30 | typeof patterns === 'string' && ( patterns = [patterns] ) 31 | || isObjLiteral(patterns) && ( patterns = Object.keys(patterns) ) 32 | var compiledRE = '' 33 | for ( let pattern of patterns ) { 34 | compiledRE !== '' && config.matchLogic === 'or' && (compiledRE += '|') 35 | const re = ['^'] 36 | let index = 0 37 | for ( const char of pattern.split('') ) { 38 | if ( index === 0 ) { 39 | if ( char === '!' ) { 40 | re.push('(?!') 41 | index++ 42 | continue 43 | } else { 44 | re.push('(?=') 45 | } 46 | } 47 | if ( char === '*' ) { 48 | re.push('.*?') 49 | } else { 50 | re.push(char) 51 | } 52 | index++ 53 | } 54 | re.push('$)') 55 | compiledRE += re.join('') 56 | } 57 | 58 | return compiledRE 59 | }; 60 | 61 | 62 | 63 | re = pattern => this.pattern(pattern) 64 | 65 | pattern = (pattern) => { 66 | this._raw = pattern 67 | this._pattern = new RegExp(Wildcard.toPattern(pattern, this.config), this.__flags()) 68 | return this 69 | } 70 | 71 | __flags = (flags = '') => { 72 | ! this.config.matchCase && ( flags += 'i' ) 73 | return flags 74 | } 75 | 76 | search = (pattern, nomatch = undefined) => this.filterReversed(pattern, nomatch) 77 | 78 | match = (data, pattern = this._pattern) => ( 79 | ( pattern instanceof RegExp || ( pattern = new RegExp(Wildcard.toPattern(pattern, this.config), this.__flags() ) ) ) && 80 | typeof data === 'string' 81 | ? pattern.test(data) 82 | : Array.isArray(data) 83 | ? data.some(x => this.match(x)) 84 | : isObjLiteral(data) 85 | ? this.match(Object.keys(data)) 86 | : false 87 | ) 88 | 89 | filter = (data, nomatch = undefined) => ( 90 | typeof data === 'string' 91 | ? this.match(data) && data 92 | : Array.isArray(data) 93 | ? data.filter(x => this.match(x)) 94 | : isObjLiteral(data) 95 | ? Object.keys(data).reduce( 96 | (p, c) => this.match(c) && ( p[c] = data[c] ) && p || p, 97 | {} 98 | ) 99 | : nomatch 100 | ) 101 | 102 | filterReversed = (data, nomatch = undefined) => ( 103 | Array.isArray(this._raw) 104 | ? this._raw.filter(x => this.match(data, x)) 105 | : isObjLiteral(this._raw) 106 | ? Object.keys(this._raw).reduce( 107 | (p, c) => this.match(data, c) 108 | ? ( p[c] = this._raw[c] ) && p || p 109 | : p, 110 | {} 111 | ) 112 | : nomatch 113 | ) 114 | 115 | hasWildcard = (pattern = this._raw) => hasWildcard(pattern) 116 | 117 | logic = matchLogic => { 118 | this.config.matchLogic = matchLogic || 'and' 119 | return this 120 | } 121 | 122 | case = matchCase => { 123 | this.config.matchCase = matchCase 124 | return this 125 | } 126 | 127 | reset = () => { 128 | this.config = { matchCase: true, matchLogic: 'and' } 129 | this._pattern = undefined 130 | this._raw = undefined 131 | return this 132 | } 133 | 134 | } 135 | 136 | 137 | 138 | export { Wildcard, hasWildcard } -------------------------------------------------------------------------------- /src/statics.js: -------------------------------------------------------------------------------- 1 | export { default } from './process-lib/statics' -------------------------------------------------------------------------------- /test/index.js: -------------------------------------------------------------------------------- 1 | // TODO: Add Tests -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | var path = require('path'); 2 | var webpack = require('webpack'); 3 | var UglifyJsPlugin = webpack.optimize.UglifyJsPlugin; 4 | //var env = require('yargs').argv.mode; 5 | var libraryName = 'redux-saga-process'; 6 | var plugins = [], 7 | outputFile; 8 | const BabiliPlugin = require('babili-webpack-plugin'); 9 | const NodeExternals = require('webpack-node-externals'); 10 | 11 | console.log('Env: ', process.env.NODE_ENV); 12 | if (process.env.NODE_ENV === 'production') { 13 | // plugins.push( 14 | // new webpack.optimize.UglifyJsPlugin({ 15 | // sourceMap: false, 16 | // compress: { 17 | // screw_ie8: true, 18 | // warnings: false, 19 | // }, 20 | // mangle: { 21 | // screw_ie8: true, 22 | // }, 23 | // output: { 24 | // comments: false, 25 | // screw_ie8: true, 26 | // }, 27 | // }), 28 | // ); 29 | outputFile = libraryName + '.min.js'; 30 | plugins.push( 31 | new webpack.LoaderOptionsPlugin({ 32 | minimize: true, 33 | }), 34 | ); 35 | plugins.push( 36 | new webpack.DefinePlugin({ 37 | 'process.env.NODE_ENV': process.env.NODE_ENV, 38 | }), 39 | ); 40 | } else { 41 | outputFile = libraryName + '.js'; 42 | } 43 | 44 | module.exports = { 45 | entry: [ 46 | path.resolve(__dirname, './src/statics.js'), 47 | path.resolve(__dirname, './src/main.js'), 48 | ], 49 | 50 | // target: 'async-node', 51 | 52 | devtool: process.env.NODE_ENV !== 'production' && 'source-map', 53 | 54 | output: { 55 | path: path.resolve(__dirname, './dist'), 56 | filename: 'redux-saga-process.js', 57 | library: libraryName, 58 | libraryTarget: 'umd', 59 | umdNamedDefine: true, 60 | }, 61 | 62 | resolve: { 63 | modules: ['node_modules'], 64 | }, 65 | 66 | plugins: plugins, 67 | 68 | module: { 69 | rules: [ 70 | { 71 | test: /\.jsx?$/, 72 | exclude: /node_modules/, 73 | include: [path.resolve(__dirname, './src')], 74 | use: [ 75 | { 76 | loader: 'babel-loader', 77 | options: { 78 | babelrc: false, 79 | plugins: ['transform-class-properties'], 80 | presets: [ 81 | [ 82 | 'env', 83 | { 84 | modules: false, 85 | targets: { 86 | browsers: ['last 2 Chrome versions'], 87 | node: 'current', 88 | }, 89 | }, 90 | ], 91 | 'stage-0', 92 | ], 93 | env: { 94 | production: { 95 | presets: ['babili'], 96 | }, 97 | }, 98 | }, 99 | }, 100 | ], 101 | }, 102 | ], 103 | }, 104 | 105 | externals: [NodeExternals()], 106 | }; 107 | --------------------------------------------------------------------------------