├── .github
├── issue_template.md
└── pull_request_template.md
├── .gitignore
├── .npmignore
├── .nvmrc
├── .travis.yml
├── CONTRIBUTING.md
├── Gruntfile.coffee
├── LICENSE
├── README.md
├── bin
└── product-csv-sync
├── create_config.sh
├── data
├── cat-order-hint-import.csv
├── minimum-header.csv
├── moneySetAttributeProduct.json
├── moneySetAttributeProductType.json
├── myimport.csv
├── simple-import.csv
└── template_sample.csv
├── features
├── common.feature
├── export.feature
├── import.feature
├── state.feature
└── support
│ └── env.rb
├── package-lock.json
├── package.json
├── renovate.json
├── src
├── coffee
│ ├── categories.coffee
│ ├── channels.coffee
│ ├── constants.coffee
│ ├── customergroups.coffee
│ ├── export.coffee
│ ├── exportmapping.coffee
│ ├── globals.coffee
│ ├── header.coffee
│ ├── helpers.coffee
│ ├── import.coffee
│ ├── io
│ │ ├── reader.coffee
│ │ └── writer.coffee
│ ├── logger.coffee
│ ├── main.coffee
│ ├── mapping.coffee
│ ├── matchutils.coffee
│ ├── queryutils.coffee
│ ├── resourceutils.coffee
│ ├── run.coffee
│ ├── states.coffee
│ ├── taxes.coffee
│ ├── types.coffee
│ └── validator.coffee
└── spec
│ ├── categories.spec.coffee
│ ├── export.spec.coffee
│ ├── exportmapping.spec.coffee
│ ├── header.spec.coffee
│ ├── import.spec.coffee
│ ├── integration
│ ├── export.spec.coffee
│ ├── exportXlsx.spec.coffee
│ ├── impex.spec.coffee
│ ├── import.spec.coffee
│ ├── importArchive.spec.coffee
│ ├── importPublish.spec.coffee
│ ├── importXlsx.spec.coffee
│ ├── state.spec.coffee
│ └── testhelpers.coffee
│ ├── mapping.spec.coffee
│ ├── matchutils.spec.coffee
│ ├── queryutils.spec.coffee
│ ├── reader.spec.coffee
│ ├── types.spec.coffee
│ ├── unless.spec.coffee
│ └── validator.spec.coffee
└── yarn.lock
/.github/issue_template.md:
--------------------------------------------------------------------------------
1 | ### Support reference number
2 |
3 |
4 | ### Description
5 |
6 |
7 | ```js
8 | var your => (code) => here;
9 | ```
10 |
11 | ### Expected Behavior
12 |
13 |
14 | ### Current Behavior
15 |
16 |
17 |
18 | ### Context
19 |
20 |
21 | ### Possible Solution
22 |
23 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | #### Summary
2 |
3 |
4 | #### Description
5 |
6 |
7 | #### Todo
8 |
9 | - Tests
10 | - [ ] Unit
11 | - [ ] Integration
12 | - [ ] Acceptance
13 | - [ ] Documentation
14 |
15 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | node_modules
3 | *.log
4 | lib
5 | test
6 | tmp
7 | config.js
8 | .coveralls.yml
9 | coverage
10 | package
11 | .sphere-project-credentials
12 | .sphere/
13 | .project
14 | exported.csv
15 | .idea
16 | DE-product-template.csv
17 | DE-product.csv
18 | DE-productfamily-template.csv
19 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | *.log
3 | src
4 | test
5 | tmp
6 | config.js
7 | coverage
8 | .travis.yml
9 | .coveralls.yml
10 | create_config.sh
11 | *.coffee
12 | package
--------------------------------------------------------------------------------
/.nvmrc:
--------------------------------------------------------------------------------
1 | 10
2 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: node_js
2 | branches:
3 | except:
4 | - latest
5 | node_js:
6 | - '8'
7 | before_script:
8 | - npm install -g grunt-cli
9 | - ./create_config.sh
10 | - gem install cucumber
11 | - gem install aruba -v 0.7.4
12 | script:
13 | - npm test
14 | - cat data/simple-import.csv | ./bin/product-csv-sync import --projectKey ${SPHERE_PROJECT_KEY} --clientId ${SPHERE_CLIENT_ID} --clientSecret ${SPHERE_CLIENT_SECRET} --suppressMissingHeaderWarning --matchBy sku
15 | - cat data/template_sample.csv | ./bin/product-csv-sync export --projectKey ${SPHERE_PROJECT_KEY} --clientId ${SPHERE_CLIENT_ID} --clientSecret ${SPHERE_CLIENT_SECRET}
16 | - ./bin/product-csv-sync import --projectKey ${SPHERE_PROJECT_KEY} --clientId ${SPHERE_CLIENT_ID} --clientSecret ${SPHERE_CLIENT_SECRET} --csv data/simple-import.csv --suppressMissingHeaderWarning --matchBy sku
17 | - ./bin/product-csv-sync template --projectKey ${SPHERE_PROJECT_KEY} --clientId ${SPHERE_CLIENT_ID} --clientSecret ${SPHERE_CLIENT_SECRET} --out data/template_sample.csv --all
18 | - ./bin/product-csv-sync export --projectKey ${SPHERE_PROJECT_KEY} --clientId ${SPHERE_CLIENT_ID} --clientSecret ${SPHERE_CLIENT_SECRET} --template data/template_sample.csv --out data/exported.csv
19 | - ./bin/product-csv-sync state --projectKey ${SPHERE_PROJECT_KEY} --clientId ${SPHERE_CLIENT_ID} --clientSecret ${SPHERE_CLIENT_SECRET} --changeTo delete --forceDelete
20 | branches:
21 | except:
22 | - /^v[0-9]\.[0-9]\.[0-9]$/
23 | notifications:
24 | hipchat:
25 | rooms:
26 | secure: KgryNM2IN5oc407jAiNxKFnKohyOccHdcJ7Tb+RamcuMOGj6xftMMnTwDvlqLGsXiaoMNQh44E5DGiHiBtrwPx7x1Qn7uXWD4lYxCQL3Tbd0DnsW+1PyFmOR6X1N7qcYKRtq3xwS+lW0WuTr1HkbXZ8pZpKR2/ipLzANQJUITgY=
27 | template:
28 | - '%{repository}#%{build_number} (%{author}): %{message} (Details/Compare)'
29 | format: html
30 | on_success: change
31 | on_failure: change
32 | env:
33 | global:
34 | - secure: nd50EfL32KaVoqMGNp4/gyl1XwvTDDRVfiMpMjHajorp9kAfMfFWVqo7N43PT9or2Bkks1h9/cFrDMLFwOz/Oc+D6CCcuvhEJbRq7gcMvaR5Ei2rEo2hG+Liph5EAj7yeiFoI73D4Oj0iQY/+qleRh451I4x3XW5ef8AP8pV+D8=
35 | - secure: dQ35ND3Dd5FSBcePLRv7InBPkbvYm0011/xkPJeyOkLPLSmB+SXxTzBZur50YZELld9CYfNvMLkBluaNTjP/hVcb921Q53UUKG/k36X38sWXACQzIwEvbOHnU7p+A42oJDQWGLi1AKZrTUvaMZiEFf4HyB04dKnf+Xsc5aXwKQE=
36 | - secure: d06TBgyndHegzTVf9aX7EkJvaQx1sDavv6jcfWQ2umhIvkK+hFN5T2+i2aTdOAzsmkew/zjKgVnY7sJ5eTCmCzd5HVKZz+EVGX/xr7Wo6omdyEyRkXliARQoaYumFUs9JFAAZXEUEvT52ClkeulGOF8yPFgh98KMmismsE9qapA=
37 | sudo: false
38 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Please take a moment to review this document in order to make the contribution
4 | process easy and effective for everyone involved.
5 |
6 | Following these guidelines helps to communicate that you respect the time of
7 | the developers managing and developing this open source project. In return,
8 | they should reciprocate that respect in addressing your issue or assessing
9 | patches and features.
10 |
11 | We now have a central repository for the sdk and our tools. Gradually, we will be migrating all our tools to the repository. Keep an eye on it [here](https://github.com/commercetools/nodejs)
12 |
13 | ## Setting Up a Local Copy
14 | 1. Clone the repo with `git clone git@github.com:sphereio/sphere-node-product-csv-sync.git`
15 | 2. Run `npm install`
16 | 3. Setup the credentials by running
17 | ```bash
18 | $ ./create_config.sh
19 | ```
20 | Make sure you have _SPHERE_PROJECT_KEY_, _SPHERE_CLIENT_ID_, _SPHERE_CLIENT_SECRET_ in your environment variables before running. You can see the credentials generated in `config.js`
21 | 4. To run all packages tests simply do `grunt test` (we use [jasmine-node](https://github.com/mhevery/jasmine-node)).
22 | 5. Linting and static checks are done by `grunt lint`.
23 | 6. You can check the code coverage by running `grunt coverage`
24 |
25 | ## Styleguide
26 | Regarding code style like indentation and whitespace, **follow the conventions you see used in the source already**. Please have a look at this [referenced coffeescript](https://github.com/polarmobile/coffeescript-style-guide) styleguide when doing changes to the code.
27 | We also have a coffee linter.
28 | You can lint your code by running `grunt lint`
29 |
30 | ## Commit message
31 | Make sure your commit messages follow [Angular's commit message format](https://github.com/angular/angular.js/blob/master/CONTRIBUTING.md#-git-commit-guidelines). An example is given below:
32 |
33 | ```
34 | docs(contributing): add example of a full commit message
35 |
36 | The example commit message in the contributing.md document is not a concrete example. This is a problem because the
37 | contributor is left to imagine what the commit message should look like based on a description rather than an example. Fix the
38 | problem by making the example concrete and imperative.
39 |
40 | Closes #1
41 | BREAKING CHANGE: imagination no longer works
42 | ```
43 |
44 | ## Branching
45 | When creating a branch. Use the issue number(without the '#') as the prefix and add a short title, like: `1-commit-message-example`
46 |
47 | ## Labels
48 | We have two categories of labels, one for _status_ and another for _type_ of issue.
49 | Please add the relevant labels as needed. When working on an issue, please add the _status: in progress_ label and when you want it to be reviewed. Add the _status: in review_ and it will be reviewed.
50 |
51 | ## Tests
52 | Before submitting a PR, please make sure your code is well unit tested, and build passes on CI
53 | We use [jasmine-node](https://github.com/mhevery/jasmine-node) for testing.
54 |
55 | ## Submitting a Pull Request
56 | Good pull requests, such as patches, improvements, and new features, are a fantastic help. They should remain focused in scope and avoid containing unrelated commits.
57 |
58 | Please **ask first** if somebody else is already working on this or the core developers think your feature is in-scope for the related package / project. Generally always have a related issue with discussions for whatever you are including.
59 |
60 | Please also provide a **test plan**, i.e. specify how you verified that your addition works.
61 |
62 | Please adhere to the coding conventions used throughout a project (indentation,
63 | accurate comments, etc.) and any other requirements (such as test coverage).
64 |
65 | ## Assignees and reviewees
66 | After submitting a PR, assign yourself the PR and add part of the NodeJS team in the reviewers section.
67 |
68 | ## Releases
69 | Releasing a new version is completely automated using the Grunt task `grunt release`.
70 |
71 | ```javascript
72 | grunt release // patch release
73 | grunt release:minor // minor release
74 | grunt release:major // major release
75 | ```
76 |
77 | Once the above steps are completed please release the same version on github.
78 | You can see the releases here https://github.com/sphereio/sphere-node-product-csv-sync/releases.
79 |
--------------------------------------------------------------------------------
/Gruntfile.coffee:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | module.exports = (grunt) ->
4 | # project configuration
5 | grunt.initConfig
6 | # load package information
7 | pkg: grunt.file.readJSON 'package.json'
8 |
9 | meta:
10 | banner: "/* ===========================================================\n" +
11 | "# <%= pkg.name %> - v<%= pkg.version %>\n" +
12 | "# ==============================================================\n" +
13 | "# Copyright (c) 2014 <%= pkg.author.name %>\n" +
14 | "# Licensed under the MIT license.\n" +
15 | "*/\n"
16 |
17 | coffeelint:
18 | options: grunt.file.readJSON('node_modules/sphere-coffeelint/coffeelint.json')
19 | default: ["Gruntfile.coffee", "src/**/*.coffee"]
20 |
21 | clean:
22 | default: "lib"
23 | test: "test"
24 |
25 | coffee:
26 | options:
27 | bare: true
28 | default:
29 | files: grunt.file.expandMapping(['**/*.coffee'], 'lib/',
30 | flatten: false
31 | cwd: 'src/coffee'
32 | ext: '.js'
33 | rename: (dest, matchedSrcPath) ->
34 | dest + matchedSrcPath
35 | )
36 | test:
37 | files: grunt.file.expandMapping(['**/*.spec.coffee'], 'test/',
38 | flatten: false
39 | cwd: 'src/spec'
40 | ext: '.spec.js'
41 | rename: (dest, matchedSrcPath) ->
42 | dest + matchedSrcPath
43 | )
44 | testhelper:
45 | files: grunt.file.expandMapping(['**/testhelpers.coffee'], 'test/',
46 | flatten: false
47 | cwd: 'src/spec'
48 | ext: '.js'
49 | rename: (dest, matchedSrcPath) ->
50 | dest + matchedSrcPath
51 | )
52 |
53 | concat:
54 | options:
55 | banner: "<%= meta.banner %>"
56 | default:
57 | expand: true
58 | flatten: true
59 | cwd: "lib"
60 | src: ["*.js"]
61 | dest: "lib"
62 | ext: ".js"
63 |
64 | jasmine:
65 | default:
66 | options:
67 | oneFailurePerSpec: true
68 | random: false
69 |
70 | # watching for changes
71 | watch:
72 | default:
73 | files: ["src/coffee/**/*.coffee"]
74 | tasks: ["build"]
75 | dev:
76 | files: ["src/coffee/**/*.coffee"]
77 | tasks: ["watchDev"]
78 | test:
79 | files: ["src/**/*.coffee"]
80 | tasks: ["test"]
81 | unittest:
82 | files: ["src/**/*.coffee"]
83 | tasks: ["test"]
84 |
85 | shell:
86 | options:
87 | stdout: true
88 | stderr: true
89 | failOnError: true
90 | jasmine:
91 | command: "jasmine test/**/*.spec.js"
92 | coverage:
93 | command: "node_modules/.bin/istanbul cover jasmine test/**/*.spec.js"
94 | publish:
95 | command: 'npm publish'
96 |
97 | bump:
98 | options:
99 | files: ['package.json']
100 | updateConfigs: ['pkg']
101 | commit: true
102 | commitMessage: 'Bump version to %VERSION%'
103 | commitFiles: ['-a']
104 | createTag: true
105 | tagName: 'v%VERSION%'
106 | tagMessage: 'Version %VERSION%'
107 | push: true
108 | pushTo: 'origin'
109 | gitDescribeOptions: '--tags --always --abbrev=1 --dirty=-d'
110 |
111 | # load plugins that provide the tasks defined in the config
112 | grunt.loadNpmTasks "grunt-coffeelint"
113 | grunt.loadNpmTasks "grunt-contrib-clean"
114 | grunt.loadNpmTasks "grunt-contrib-coffee"
115 | grunt.loadNpmTasks "grunt-contrib-concat"
116 | grunt.loadNpmTasks "grunt-contrib-watch"
117 | grunt.loadNpmTasks "grunt-shell"
118 | grunt.loadNpmTasks 'grunt-bump'
119 |
120 | # register tasks
121 | grunt.registerTask "build", ["clean", "coffeelint", "coffee", "concat"]
122 | grunt.registerTask "watchDev", ["clean", "coffee"]
123 | grunt.registerTask "coverage", ["build", "shell:coverage"]
124 | grunt.registerTask "test", ["build", "shell:jasmine"]
125 | grunt.registerTask 'release', 'Release a new version, push it and publish it', (target) ->
126 | target = 'patch' unless target
127 | grunt.task.run "bump-only:#{target}", 'bump-commit', 'shell:publish'
128 | #grunt.task.run "bump-only:#{target}", 'test', 'bump-commit', 'shell:publish' Disabled for time being
129 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2014 SPHERE.IO
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of
6 | this software and associated documentation files (the "Software"), to deal in
7 | the Software without restriction, including without limitation the rights to
8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9 | the Software, and to permit persons to whom the Software is furnished to do so,
10 | subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
--------------------------------------------------------------------------------
/bin/product-csv-sync:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | var path = require('path');
4 | require(path.join(__dirname,'../lib/run.js'));
5 |
--------------------------------------------------------------------------------
/create_config.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | cat > "config.js" << EOF
4 | /* SPHERE.IO credentials */
5 | exports.config = {
6 | client_id: "${SPHERE_CLIENT_ID}",
7 | client_secret: "${SPHERE_CLIENT_SECRET}",
8 | project_key: "${SPHERE_PROJECT_KEY}"
9 | }
10 |
11 | exports.logConfig = {
12 | levelStream: 'warn'
13 | }
14 |
15 | exports.logentries_token = "${LOGENTRIES_TOKEN}"
16 | EOF
17 |
18 | cat > "${HOME}/.sphere-project-credentials" << EOF
19 | ${SPHERE_PROJECT_KEY}:${SPHERE_CLIENT_ID}:${SPHERE_CLIENT_SECRET}
20 | EOF
21 |
--------------------------------------------------------------------------------
/data/cat-order-hint-import.csv:
--------------------------------------------------------------------------------
1 | productType,name,id,categoryOrderHints
2 | 214b3150-bdc3-4bab-a386-93cbb8b0746d,testtest,2a7880c6-691b-497b-96f4-91bfe48a5427,Snowboard equipment:0.4
3 |
--------------------------------------------------------------------------------
/data/minimum-header.csv:
--------------------------------------------------------------------------------
1 | productType,variantId
2 |
--------------------------------------------------------------------------------
/data/moneySetAttributeProduct.json:
--------------------------------------------------------------------------------
1 | {
2 | "key": "productKey",
3 | "productType": {
4 | "typeId": "product-type",
5 | "id": ""
6 | },
7 | "name": {
8 | "en": "Money product"
9 | },
10 | "description": {
11 | "en": "money product"
12 | },
13 | "categories": [],
14 | "slug": {
15 | "en": "money-product"
16 | },
17 | "masterVariant": {
18 | "id": 1,
19 | "key": "variantKey",
20 | "sku": "11111",
21 | "prices": [
22 | {
23 | "value": {
24 | "currencyCode": "EUR",
25 | "centAmount": 12900
26 | },
27 | "validFrom": "2001-09-11T14:00:00.000Z",
28 | "validUntil": "2015-09-11T14:00:00.000Z",
29 | "tiers": [
30 | {
31 | "value": {
32 | "currencyCode": "EUR",
33 | "centAmount": 12000
34 | },
35 | "minimumQuantity": 2000
36 | }
37 | ],
38 | "country": "DE"
39 | }
40 | ],
41 | "images": [],
42 | "attributes": [
43 | {
44 | "name": "money_attribute",
45 | "value": [
46 | {
47 | "centAmount": 123456,
48 | "currencyCode": "EUR"
49 | },
50 | {
51 | "centAmount": 98765,
52 | "currencyCode": "GBP"
53 | }
54 | ]
55 | }
56 | ],
57 | "assets": [],
58 | "availability": {
59 | "isOnStock": false,
60 | "availableQuantity": 0
61 | }
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/data/moneySetAttributeProductType.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "moneyProduct",
3 | "description": "test money product",
4 | "classifier": "Complex",
5 | "attributes": [
6 | {
7 | "name": "money_attribute",
8 | "label": {
9 | "en": "Test money attribute"
10 | },
11 | "isRequired": false,
12 | "type": {
13 | "name": "set",
14 | "elementType": {
15 | "name": "money"
16 | }
17 | },
18 | "isSearchable": false,
19 | "inputHint": "SingleLine"
20 | }
21 | ]
22 | }
23 |
--------------------------------------------------------------------------------
/data/myimport.csv:
--------------------------------------------------------------------------------
1 | searchKeywords.en,setOfLenum,color,productType,variantId,id,sku,name,name.de-DE,searchKeywords.fr-BE,color.en,color.de-DE,color.fr-BE,setOfLenum.de-DE,setOfLenum.fr-BE,myEnum,myEnumSet,searchKeywords
--------------------------------------------------------------------------------
/data/simple-import.csv:
--------------------------------------------------------------------------------
1 | productType,variantId,name,sku
2 | ImpEx with all types,1,myProduct,12345
3 |
--------------------------------------------------------------------------------
/data/template_sample.csv:
--------------------------------------------------------------------------------
1 | _published,_hasStagedChanges,productType,variantId,id,sku,prices,tax,categories,images,name.en,description.en,slug.en,metaTitle.en,metaDescription.en,metaKeywords.en,categoryOrderHints
2 |
--------------------------------------------------------------------------------
/features/common.feature:
--------------------------------------------------------------------------------
1 | Feature: Show common information for tooling
2 |
3 | Scenario: Show general help
4 | When I run `../../bin/product-csv-sync`
5 | Then the exit status should be 0
6 | And the output should contain:
7 | """
8 | Usage: product-csv-sync
9 | """
10 |
11 | Scenario: Show help when running import subcommand
12 | When I run `../../bin/product-csv-sync import`
13 | Then the exit status should be 0
14 | And the output should contain:
15 | """
16 | Usage: import
17 | """
18 |
19 | Scenario: Show help when running export subcommand
20 | When I run `../../bin/product-csv-sync export`
21 | Then the exit status should be 0
22 | And the output should contain:
23 | """
24 | Usage: export
25 | """
26 |
27 | Scenario: Show help when running state subcommand
28 | When I run `../../bin/product-csv-sync state`
29 | Then the exit status should be 0
30 | And the output should contain:
31 | """
32 | Usage: state
33 | """
34 |
35 | Scenario: Show help when running template subcommand
36 | When I run `../../bin/product-csv-sync template`
37 | Then the exit status should be 0
38 | And the output should contain:
39 | """
40 | Usage: template
41 | """
--------------------------------------------------------------------------------
/features/export.feature:
--------------------------------------------------------------------------------
1 | Feature: Export products
2 |
3 | Scenario: Import some products first
4 | Given a file named "i.csv" with:
5 | """
6 | productType,name,variantId,sku
7 | ImpEx with all types,Product 1,1,sku-1-123
8 | ImpEx with all types,Product 2,1,sku-2-123
9 | ImpEx with all types,Product 3,1,0123
10 | """
11 | When I run `../../bin/product-csv-sync import --projectKey sphere-node-product-csv-sync-94 --csv i.csv --matchBy sku`
12 | Then the exit status should be 0
13 | And the output should contain:
14 | """
15 | Finished processing 3 product(s)
16 | """
17 |
18 | @wip
19 | Scenario: Export products with --fileAllRows
20 | Given a file named "t.csv" with:
21 | """
22 | productType,name,sku
23 | """
24 | When I run `../../bin/product-csv-sync export --projectKey sphere-node-product-csv-sync-94 --template 't.csv' --out 'exported.csv' --fillAllRows`
25 | Then the exit status should be 0
26 | And the output should contain:
27 | """
28 | Fetched 3 product(s)
29 | """
30 | Then a file named "exported.csv" should exist
31 | And the file "exported.csv" should match /^productType,name,sku$/
32 | And the file "exported.csv" should match /^ImpEx with all types,Product 3,0123$/
33 | And the file "exported.csv" should match /^ImpEx with all types,Product 3,2345$/
34 |
35 | Scenario: Export products by query
36 | When I run `../../bin/product-csv-sync export --projectKey sphere-node-product-csv-sync-94 --template '../../data/template_sample.csv' --out '../../data/exported.csv' --queryString 'where=name(en = "Product 1")&staged=true'`
37 | Then the exit status should be 0
38 | And the output should contain:
39 | """
40 | Fetched 1 product(s)
41 | """
42 |
43 | Scenario: Export products by query (encoded)
44 | When I run `../../bin/product-csv-sync export --projectKey sphere-node-product-csv-sync-94 --template '../../data/template_sample.csv' --out '../../data/exported.csv' --queryString 'where=name(en%20%3D%20%22Product%201%22)&staged=true' --queryEncoded`
45 | Then the exit status should be 0
46 | And the output should contain:
47 | """
48 | Fetched 1 product(s)
49 | """
50 |
51 | @wip
52 | Scenario: Export products by search
53 | When I run `../../bin/product-csv-sync export --projectKey sphere-node-product-csv-sync-94 --template '../../data/template_sample.csv' --out '../../data/exported.csv' --queryString 'text.en=0123&staged=true' --queryType search`
54 | Then the exit status should be 0
55 | And the output should contain:
56 | """
57 | Fetched 1 product(s)
58 | """
59 |
60 | @wip
61 | Scenario: Export all products
62 | When I run `../../bin/product-csv-sync export --projectKey sphere-node-product-csv-sync-94 --out 'exported.zip' --fullExport`
63 | Then the exit status should be 0
64 | And the output should contain:
65 | """
66 | Processing products with productType "ImpEx with all types"
67 | Fetched 3 product(s).
68 | Processing products with productType "theType"
69 | Fetched 0 product(s).
70 | All productTypes were processed - archiving output folder
71 | Folder was archived and saved to exported.zip
72 | Export done.
73 | """
74 | Then a file named "exported.zip" should exist
--------------------------------------------------------------------------------
/features/import.feature:
--------------------------------------------------------------------------------
1 | Feature: Import products
2 |
3 | Scenario: Can't find product type
4 | Given a file named "i.csv" with:
5 | """
6 | productType,variantId
7 | foo,1
8 | """
9 | When I run `../../bin/product-csv-sync import --projectKey sphere-node-product-csv-sync-94 --csv i.csv --matchBy sku`
10 | Then the exit status should be 1
11 | And the output should contain:
12 | """
13 | CSV file with 2 row(s) loaded.
14 | [ '[row 2] Can\'t find product type for \'foo\'' ]
15 | """
16 |
17 | Scenario: Show message when delimiter selection clashes
18 | Given a file named "i.csv" with:
19 | """
20 | productType,variantId
21 | """
22 | When I run `../../bin/product-csv-sync import --projectKey sphere-node-product-csv-sync-94 --csvDelimiter ';' --csv i.csv --matchBy sku`
23 | Then the exit status should be 1
24 | And the output should contain:
25 | """
26 | [ 'Your selected delimiter clash with each other: {"csvDelimiter":";","csvQuote":"\\"","language":".","multiValue":";","categoryChildren":">"}' ]
27 | """
28 |
29 | Scenario: Import/update and remove a product
30 | When I run `../../bin/product-csv-sync state --projectKey sphere-node-product-csv-sync-94 --changeTo delete` interactively
31 | And I type "yes"
32 |
33 | Given a file named "i.csv" with:
34 | """
35 | productType,variantId,name,sku
36 | ImpEx with all types,1,myProduct,12345
37 | """
38 | When I run `../../bin/product-csv-sync import --projectKey sphere-node-product-csv-sync-94 --csv i.csv --matchBy sku`
39 | Then the exit status should be 0
40 | And the output should contain:
41 | """
42 | [ '[row 2] New product created.' ]
43 | """
44 |
45 | When I run `../../bin/product-csv-sync import --projectKey sphere-node-product-csv-sync-94 --csv i.csv --matchBy sku`
46 | Then the exit status should be 0
47 | And the output should contain:
48 | """
49 | [ '[row 2] Product update not necessary.' ]
50 | """
51 |
52 | Given a file named "u.csv" with:
53 | """
54 | productType,variantId,name,sku
55 | ImpEx with all types,1,myProductCHANGED,12345
56 | """
57 | When I run `../../bin/product-csv-sync import --projectKey sphere-node-product-csv-sync-94 --csv u.csv --matchBy sku`
58 | Then the exit status should be 0
59 | And the output should contain:
60 | """
61 | [ '[row 2] Product updated.' ]
62 | """
63 |
64 | When I run `../../bin/product-csv-sync state --projectKey sphere-node-product-csv-sync-94 --changeTo delete` interactively
65 | And I type "yes"
66 | Then the exit status should be 0
67 | And the output should contain:
68 | """
69 | [ '[row 0] Product deleted.' ]
70 | """
71 |
72 | Scenario: Match products
73 |
74 | Given a file named "i.csv" with:
75 | """
76 | id,productType,slug.en,variantId,name,sku,attr-text-n
77 | 0912,ImpEx with all types,slug_1,1,myProduct,12345,key_1
78 | """
79 | When I run `../../bin/product-csv-sync import --projectKey sphere-node-product-csv-sync-94 --csv i.csv --matchBy sku`
80 | Then the exit status should be 0
81 | And the output should contain:
82 | """
83 | [ '[row 2] New product created.' ]
84 | """
85 |
86 | Given a file named "u.csv" with:
87 | """
88 | id,productType,slug.en,variantId,name,sku,attr-text-n
89 | 0912,ImpEx with all types,slug_1,1,myProduct_mb_id,12345,key_1
90 | """
91 | When I run `../../bin/product-csv-sync import --projectKey sphere-node-product-csv-sync-94 --csv u.csv --matchBy sku`
92 | Then the exit status should be 0
93 | And the output should contain:
94 | """
95 | [ '[row 2] Product updated.' ]
96 | """
97 |
98 | Given a file named "u.csv" with:
99 | """
100 | id,productType,slug.en,variantId,name,sku,attr-text-n
101 | 0912,ImpEx with all types,slug_1,1,myProduct_mb_slug,12345,key_1
102 | """
103 | When I run `../../bin/product-csv-sync import -m slug --projectKey sphere-node-product-csv-sync-94 --csv u.csv --matchBy sku`
104 | Then the exit status should be 0
105 | And the output should contain:
106 | """
107 | [ '[row 2] Product updated.' ]
108 | """
109 |
110 | Given a file named "u.csv" with:
111 | """
112 | id,productType,slug.en,variantId,name,sku,attr-text-n
113 | 0912,ImpEx with all types,slug_1,1,myProduct_mb_sku,12345,key_1
114 | """
115 | When I run `../../bin/product-csv-sync import -m sku --projectKey sphere-node-product-csv-sync-94 --csv u.csv --matchBy sku`
116 | Then the exit status should be 0
117 | And the output should contain:
118 | """
119 | [ '[row 2] Product updated.' ]
120 | """
121 |
122 | Given a file named "u.csv" with:
123 | """
124 | id,productType,slug.en,variantId,name,sku,attr-text-n
125 | 0912,ImpEx with all types,slug_1,1,myProduct_mb_ca,12345,key_1
126 | """
127 | When I run `../../bin/product-csv-sync import -m attr-text-n --projectKey sphere-node-product-csv-sync-94 --csv u.csv --matchBy sku`
128 | Then the exit status should be 0
129 | And the output should contain:
130 | """
131 | [ '[row 2] Product updated.' ]
132 | """
133 |
134 | When I run `../../bin/product-csv-sync state --projectKey sphere-node-product-csv-sync-94 --changeTo delete` interactively
135 | And I type "yes"
136 | Then the exit status should be 0
137 | And the output should contain:
138 | """
139 | [ '[row 0] Product deleted.' ]
140 | """
141 |
142 | Scenario: Batch import
143 |
144 | Given a file named "i.csv" with:
145 | """
146 | productType,slug.en,name,variantId,sku,attr-text-n
147 | ImpEx with all types,slug_1,myProduct,1,1,key_1
148 | ,,,2,1_1,key_1_1
149 | ,,,2,1_2,key_1_2
150 | ImpEx with all types,slug_2,myProduct,1,2,key_1
151 | ,,,2,2_1,key_2_1
152 | ,,,2,2_2,key_2_2
153 | ImpEx with all types,slug_3,myProduct,1,3,key_1
154 | ImpEx with all types,slug_4,myProduct,1,4,key_1
155 | ImpEx with all types,slug_5,myProduct,1,5,key_1
156 | ImpEx with all types,slug_6,myProduct,1,6,key_1
157 | ImpEx with all types,slug_7,myProduct,1,7,key_1
158 | ImpEx with all types,slug_8,myProduct,1,8,key_1
159 | ImpEx with all types,slug_9,myProduct,1,9,key_1
160 | ImpEx with all types,slug_10,myProduct,1,10,key_1
161 | ImpEx with all types,slug_11,myProduct,1,11,key_1
162 | ImpEx with all types,slug_12,myProduct,1,12,key_1
163 | ImpEx with all types,slug_13,myProduct,1,13,key_1
164 | ImpEx with all types,slug_14,myProduct,1,14,key_1
165 | ImpEx with all types,slug_15,myProduct,1,15,key_1
166 | ImpEx with all types,slug_16,myProduct,1,16,key_1
167 | ImpEx with all types,slug_17,myProduct,1,17,key_1
168 | ImpEx with all types,slug_18,myProduct,1,18,key_1
169 | ImpEx with all types,slug_19,myProduct,1,19,key_1
170 | ImpEx with all types,slug_20,myProduct,1,20,key_1
171 | ImpEx with all types,slug_21,myProduct,1,21,key_1
172 | ,,,2,21_1,key_21_1
173 | ,,,2,21_2,key_21_2
174 | """
175 | When I run `../../bin/product-csv-sync import --projectKey sphere-node-product-csv-sync-94 --csv i.csv --matchBy slug`
176 | Then the exit status should be 0
177 | And the output should contain:
178 | """
179 | [ '[row 2] New product created.',
180 | '[row 5] New product created.',
181 | '[row 8] New product created.',
182 | '[row 9] New product created.',
183 | '[row 10] New product created.',
184 | '[row 11] New product created.',
185 | '[row 12] New product created.',
186 | '[row 13] New product created.',
187 | '[row 14] New product created.',
188 | '[row 15] New product created.',
189 | '[row 16] New product created.',
190 | '[row 17] New product created.',
191 | '[row 18] New product created.',
192 | '[row 19] New product created.',
193 | '[row 20] New product created.',
194 | '[row 21] New product created.',
195 | '[row 22] New product created.',
196 | '[row 23] New product created.',
197 | '[row 24] New product created.',
198 | '[row 25] New product created.',
199 | '[row 26] New product created.' ]
200 | """
201 |
202 | When I run `../../bin/product-csv-sync state --projectKey sphere-node-product-csv-sync-94 --changeTo delete` interactively
203 | And I type "yes"
204 | Then the exit status should be 0
205 | And the output should contain:
206 | """
207 | [ '[row 0] Product deleted.',
208 | '[row 0] Product deleted.',
209 | '[row 0] Product deleted.',
210 | '[row 0] Product deleted.',
211 | '[row 0] Product deleted.',
212 | '[row 0] Product deleted.',
213 | '[row 0] Product deleted.',
214 | '[row 0] Product deleted.',
215 | '[row 0] Product deleted.',
216 | '[row 0] Product deleted.',
217 | '[row 0] Product deleted.',
218 | '[row 0] Product deleted.',
219 | '[row 0] Product deleted.',
220 | '[row 0] Product deleted.',
221 | '[row 0] Product deleted.',
222 | '[row 0] Product deleted.',
223 | '[row 0] Product deleted.',
224 | '[row 0] Product deleted.',
225 | '[row 0] Product deleted.',
226 | '[row 0] Product deleted.',
227 | '[row 0] Product deleted.' ]
228 | """
229 |
--------------------------------------------------------------------------------
/features/state.feature:
--------------------------------------------------------------------------------
1 | Feature: Publish and unpublish products
2 |
3 | Scenario: Import publish, unpublish and remove a product
4 | When I run `../../bin/product-csv-sync state --projectKey sphere-node-product-csv-sync-94 --changeTo delete` interactively
5 | And I type "yes"
6 |
7 | Given a file named "i.csv" with:
8 | """
9 | productType,variantId,name,sku
10 | ImpEx with all types,1,myPublishedProduct,123456789
11 | """
12 | When I run `../../bin/product-csv-sync import --projectKey sphere-node-product-csv-sync-94 --csv i.csv --matchBy sku`
13 | Then the exit status should be 0
14 | And the output should contain:
15 | """
16 | [ '[row 2] New product created.' ]
17 | """
18 |
19 | When I run `../../bin/product-csv-sync state --projectKey sphere-node-product-csv-sync-94 --changeTo publish`
20 | Then the exit status should be 0
21 | And the output should contain:
22 | """
23 | [ '[row 0] Product published.' ]
24 | """
25 |
26 | When I run `../../bin/product-csv-sync state --projectKey sphere-node-product-csv-sync-94 --changeTo unpublish`
27 | Then the exit status should be 0
28 | And the output should contain:
29 | """
30 | [ '[row 0] Product unpublished.' ]
31 | """
32 |
33 | When I run `../../bin/product-csv-sync state --projectKey sphere-node-product-csv-sync-94 --changeTo delete` interactively
34 | And I type "yes"
35 | Then the exit status should be 0
36 | And the output should contain:
37 | """
38 | [ '[row 0] Product deleted.' ]
39 | """
40 |
--------------------------------------------------------------------------------
/features/support/env.rb:
--------------------------------------------------------------------------------
1 | require 'aruba/cucumber'
2 |
3 | Before do
4 | @processes = []
5 | @aruba_timeout_seconds = 30
6 | end
7 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "sphere-node-product-csv-sync",
3 | "description": "Import and export products to and from a commercetools project.",
4 | "version": "4.8.0",
5 | "homepage": "https://github.com/sphereio/sphere-node-product-csv-sync",
6 | "private": false,
7 | "author": {
8 | "name": "Hajo Eichler",
9 | "email": "hajo.eichler@commercetools.de"
10 | },
11 | "maintainers": [
12 | {
13 | "name": "Nicola Molinari",
14 | "email": "nicola.molinari@commercetools.de"
15 | }
16 | ],
17 | "repository": {
18 | "type": "git",
19 | "url": "https://github.com/sphereio/sphere-node-product-csv-sync.git"
20 | },
21 | "bugs": {
22 | "url": "https://github.com/sphereio/sphere-node-product-csv-sync/issues"
23 | },
24 | "license": "MIT",
25 | "bin": {
26 | "product-csv-sync": "bin/product-csv-sync"
27 | },
28 | "main": "main.js",
29 | "engines": {
30 | "node": ">= 8"
31 | },
32 | "scripts": {
33 | "lint": "grunt coffeelint",
34 | "build": "grunt build",
35 | "watch": "grunt watch:dev",
36 | "test": "grunt coverage"
37 | },
38 | "dependencies": {
39 | "@commercetools/api-request-builder": "^5.0.0",
40 | "@commercetools/sdk-client": "^2.0.0",
41 | "@commercetools/sdk-middleware-auth": "^5.0.0",
42 | "@commercetools/sdk-middleware-http": "^5.0.0",
43 | "@commercetools/sdk-middleware-queue": "^2.0.0",
44 | "@commercetools/sdk-middleware-user-agent": "^2.0.0",
45 | "@commercetools/sync-actions": "^3.0.0",
46 | "archiver": "^3.0.0",
47 | "bluebird": "^3.5.1",
48 | "commander": "2.3.0",
49 | "csv": "0.3.7",
50 | "exceljs": "^1.1.1",
51 | "extract-zip": "^1.6.5",
52 | "iconv-lite": "0.5.0",
53 | "lodash.chunk": "^4.2.0",
54 | "lodash.merge": "^4.6.0",
55 | "node-fetch": "^2.6.0",
56 | "optimist": "0.6.1",
57 | "prompt": "1.0.0",
58 | "sphere-node-utils": "^2.0.1",
59 | "tmp": "0.1.0",
60 | "underscore": "1.9.1",
61 | "underscore-mixins": "*",
62 | "underscore.string": "^3.3.4",
63 | "walk-sync": "^1.0.0"
64 | },
65 | "devDependencies": {
66 | "coffeelint": "^2.1.0",
67 | "coveralls": "^3.0.0",
68 | "cuid": "2.1.6",
69 | "grunt": "^1.0.3",
70 | "grunt-bump": "^0.8.0",
71 | "grunt-coffeelint": "0.0.16",
72 | "grunt-contrib-clean": "^2.0.0",
73 | "grunt-contrib-coffee": "^2.0.0",
74 | "grunt-contrib-concat": "^1.0.1",
75 | "grunt-contrib-jasmine": "^2.2.0",
76 | "grunt-contrib-watch": "^1.0.0",
77 | "grunt-shell": "^3.0.0",
78 | "handlebars": "^4.7.6",
79 | "istanbul": "0.4.5",
80 | "jasmine": "3.4.0",
81 | "jasmine-spec-reporter": "^4.2.1",
82 | "sinon": "^7.0.0",
83 | "sphere-coffeelint": "git://github.com/sphereio/sphere-coffeelint.git#master"
84 | },
85 | "keywords": [
86 | "sphere",
87 | "sphereio",
88 | "api",
89 | "sync",
90 | "import",
91 | "export",
92 | "csv",
93 | "cli"
94 | ]
95 | }
96 |
--------------------------------------------------------------------------------
/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": [
3 | "config:base",
4 | ":preserveSemverRanges",
5 | ":automergeMinor",
6 | "group:allNonMajor",
7 | ":prHourlyLimitNone",
8 | ":prConcurrentLimitNone",
9 | ":automergeBranchMergeCommit",
10 | ":enableVulnerabilityAlerts",
11 | ":disableMajorUpdates",
12 | "schedule:monthly"
13 | ],
14 | "lockFileMaintenance": {
15 | "enabled": true
16 | },
17 | "ignoreDeps": ["csv", "commander"]
18 | }
19 |
--------------------------------------------------------------------------------
/src/coffee/categories.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | GLOBALS = require './globals'
3 | { fetchResources } = require './resourceutils'
4 |
5 | # TODO:
6 | # - JSDoc
7 | # - make it util only
8 | class Categories
9 | constructor: ->
10 | @id2index = {}
11 | @id2externalId = {}
12 | @id2slug = {}
13 | @name2id = {}
14 | @externalId2id = {}
15 | @fqName2id = {}
16 | @id2fqName = {}
17 | @key2Id = {}
18 | @duplicateNames = []
19 |
20 | getAll: (client, projectKey) ->
21 | fetchResources(client, projectKey, 'categories')
22 |
23 | buildMaps: (categories) ->
24 | _.each categories, (category, index) =>
25 | name = category.name[GLOBALS.DEFAULT_LANGUAGE]
26 | id = category.id
27 | externalId = category.externalId
28 | @id2index[id] = index
29 | if (category.key)
30 | @key2Id[category.key] = id
31 | @id2slug[id] = category.slug[GLOBALS.DEFAULT_LANGUAGE]
32 | if _.has @name2id, name
33 | @duplicateNames.push name
34 | @name2id[name] = id
35 | @id2externalId[id] = externalId
36 | @externalId2id[externalId] = id
37 |
38 | _.each categories, (category, index) =>
39 | fqName = ''
40 | if category.ancestors
41 | _.each category.ancestors, (anchestor) =>
42 | index = @id2index[anchestor.id]
43 | if isNaN(index)
44 | return
45 | cat = categories[index]
46 | if not cat
47 | return
48 | name = cat.name[GLOBALS.DEFAULT_LANGUAGE]
49 | fqName = "#{fqName}#{name}#{GLOBALS.DELIM_CATEGORY_CHILD}"
50 | fqName = "#{fqName}#{category.name[GLOBALS.DEFAULT_LANGUAGE]}"
51 | @fqName2id[fqName] = category.id
52 | @id2fqName[category.id] = fqName
53 |
54 | module.exports = Categories
55 |
--------------------------------------------------------------------------------
/src/coffee/channels.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | { fetchResources } = require './resourceutils'
3 |
4 | # TODO:
5 | # - JSDoc
6 | # - make it util only
7 | class Channels
8 | constructor: ->
9 | @key2id = {}
10 | @id2key = {}
11 |
12 | getAll: (client, projectKey) ->
13 | fetchResources(client, projectKey, 'channels')
14 |
15 | buildMaps: (channels) ->
16 | _.each channels, (channel) =>
17 | key = channel.key
18 | id = channel.id
19 |
20 | @key2id[key] = id
21 | @id2key[id] = key
22 |
23 |
24 | module.exports = Channels
25 |
--------------------------------------------------------------------------------
/src/coffee/constants.coffee:
--------------------------------------------------------------------------------
1 | constants =
2 | HEADER_PRODUCT_TYPE: 'productType'
3 | HEADER_ID: 'id'
4 | HEADER_KEY: 'key'
5 | HEADER_EXTERNAL_ID: 'externalId'
6 | HEADER_VARIANT_ID: 'variantId'
7 | HEADER_VARIANT_KEY: 'variantKey'
8 | HEADER_STATE: 'state'
9 |
10 | HEADER_NAME: 'name'
11 | HEADER_DESCRIPTION: 'description'
12 | HEADER_CATEGORY_ORDER_HINTS: 'categoryOrderHints'
13 | HEADER_SLUG: 'slug'
14 |
15 | HEADER_META_TITLE: 'metaTitle'
16 | HEADER_META_DESCRIPTION: 'metaDescription'
17 | HEADER_META_KEYWORDS: 'metaKeywords'
18 | HEADER_SEARCH_KEYWORDS: 'searchKeywords'
19 |
20 | HEADER_TAX: 'tax'
21 | HEADER_CATEGORIES: 'categories'
22 |
23 | HEADER_SKU: 'sku'
24 | HEADER_PRICES: 'prices'
25 | HEADER_IMAGES: 'images'
26 | HEADER_IMAGE_LABELS: 'imageLabels'
27 | HEADER_IMAGE_DIMENSIONS: 'imageDimensions'
28 |
29 | HEADER_PUBLISHED: '_published'
30 | HEADER_HAS_STAGED_CHANGES: '_hasStagedChanges'
31 | HEADER_CREATED_AT: '_createdAt'
32 | HEADER_LAST_MODIFIED_AT: '_lastModifiedAt'
33 |
34 | HEADER_PUBLISH: 'publish'
35 |
36 | ATTRIBUTE_TYPE_SET: 'set'
37 | ATTRIBUTE_TYPE_LTEXT: 'ltext'
38 | ATTRIBUTE_TYPE_ENUM: 'enum'
39 | ATTRIBUTE_TYPE_LENUM: 'lenum'
40 | ATTRIBUTE_TYPE_NUMBER: 'number'
41 | ATTRIBUTE_TYPE_MONEY: 'money'
42 | ATTRIBUTE_TYPE_REFERENCE: 'reference'
43 | ATTRIBUTE_TYPE_BOOLEAN: 'boolean'
44 |
45 | ATTRIBUTE_CONSTRAINT_SAME_FOR_ALL: 'SameForAll'
46 |
47 | REGEX_PRICE: new RegExp /^(([A-Za-z]{2})-|)([A-Z]{3}) (-?\d+)(-?\|(\d+)|)( ([^~\$#%]*)|)(#([^~\$%]*)|)(\$([^~%]*)|)(~([^%]*)|)(%([A-Z]{3} -?\d+ @\d+(.*))|)$/
48 | REGEX_MONEY: new RegExp /^([A-Z]{3}) (-?\d+)$/
49 | REGEX_INTEGER: new RegExp /^-?\d+$/
50 | REGEX_FLOAT: new RegExp /^-?\d+(\.\d+)?$/
51 |
52 |
53 | for name, value of constants
54 | exports[name] = value
55 |
56 | exports.BASE_HEADERS = [
57 | constants.HEADER_PRODUCT_TYPE,
58 | constants.HEADER_VARIANT_ID,
59 | constants.HEADER_VARIANT_KEY
60 | ]
61 |
62 | exports.BASE_LOCALIZED_HEADERS = [
63 | constants.HEADER_NAME,
64 | constants.HEADER_DESCRIPTION
65 | constants.HEADER_SLUG,
66 | constants.HEADER_META_TITLE,
67 | constants.HEADER_META_DESCRIPTION,
68 | constants.HEADER_META_KEYWORDS,
69 | constants.HEADER_SEARCH_KEYWORDS
70 | ]
71 |
72 | exports.SPECIAL_HEADERS = [
73 | constants.HEADER_ID,
74 | constants.HEADER_KEY,
75 | constants.HEADER_SKU,
76 | constants.HEADER_PRICES,
77 | constants.HEADER_TAX,
78 | constants.HEADER_CATEGORIES,
79 | constants.HEADER_IMAGES,
80 | constants.HEADER_STATE
81 | # TODO: image labels and dimensions
82 | ]
83 |
84 | # this list was created from a documentation of productProjection properties:
85 | # https://docs.commercetools.com/http-api-projects-productProjections.html#productprojection
86 | exports.PRODUCT_LEVEL_PROPERTIES = [
87 | constants.HEADER_ID,
88 | constants.HEADER_KEY,
89 | constants.HEADER_PRODUCT_TYPE,
90 | constants.HEADER_NAME,
91 | constants.HEADER_DESCRIPTION,
92 | constants.HEADER_CATEGORY_ORDER_HINTS,
93 | constants.HEADER_SLUG,
94 | constants.HEADER_CATEGORIES
95 | constants.HEADER_META_TITLE
96 | constants.HEADER_META_DESCRIPTION
97 | constants.HEADER_META_KEYWORDS
98 | constants.HEADER_SEARCH_KEYWORDS
99 | constants.HEADER_STATE
100 | constants.HEADER_SEARCH_KEYWORDS
101 | 'version',
102 | 'createdAt',
103 | 'lastModifiedAt',
104 | 'categories',
105 | 'hasStagedChanges',
106 | 'published',
107 | 'masterVariant',
108 | 'variants',
109 | 'taxCategory',
110 | 'reviewRatingStatistics'
111 | ]
112 |
113 | exports.ALL_HEADERS = exports.BASE_HEADERS.concat(exports.BASE_LOCALIZED_HEADERS.concat(exports.SPECIAL_HEADERS))
114 |
--------------------------------------------------------------------------------
/src/coffee/customergroups.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | { fetchResources } = require './resourceutils'
3 |
4 | # TODO:
5 | # - JSDoc
6 | # - make it util only
7 | class CustomerGroups
8 | constructor: ->
9 | @name2id = {}
10 | @id2name = {}
11 |
12 | getAll: (client, projectKey) ->
13 | fetchResources(client, projectKey, 'customerGroups')
14 |
15 | buildMaps: (customerGroups) ->
16 | _.each customerGroups, (group) =>
17 | name = group.name
18 | id = group.id
19 | @name2id[name] = id
20 | @id2name[id] = name
21 |
22 |
23 | module.exports = CustomerGroups
24 |
--------------------------------------------------------------------------------
/src/coffee/export.coffee:
--------------------------------------------------------------------------------
1 | { createClient } = require '@commercetools/sdk-client'
2 | {
3 | createAuthMiddlewareForClientCredentialsFlow
4 | createAuthMiddlewareWithExistingToken
5 | } = require '@commercetools/sdk-middleware-auth'
6 | { createHttpMiddleware } = require '@commercetools/sdk-middleware-http'
7 | { createQueueMiddleware } = require '@commercetools/sdk-middleware-queue'
8 | { createUserAgentMiddleware } = require '@commercetools/sdk-middleware-user-agent'
9 | { createRequestBuilder } = require '@commercetools/api-request-builder'
10 | _ = require 'underscore'
11 | Csv = require 'csv'
12 | archiver = require 'archiver'
13 | path = require 'path'
14 | tmp = require 'tmp'
15 | Promise = require 'bluebird'
16 | fetch = require 'node-fetch'
17 | iconv = require 'iconv-lite'
18 | fs = Promise.promisifyAll require('fs')
19 | prompt = Promise.promisifyAll require('prompt')
20 | Types = require './types'
21 | Categories = require './categories'
22 | Channels = require './channels'
23 | CustomerGroups = require './customergroups'
24 | Header = require './header'
25 | States = require './states'
26 | Taxes = require './taxes'
27 | ExportMapping = require './exportmapping'
28 | Writer = require './io/writer'
29 | queryStringParser = require 'querystring'
30 | GLOBALS = require './globals'
31 |
32 | # will clean temporary files even when an uncaught exception occurs
33 | tmp.setGracefulCleanup()
34 |
35 | # TODO:
36 | # - JSDoc
37 | class Export
38 |
39 | constructor: (@options = {}) ->
40 | @projectKey = @options.authConfig.projectKey
41 | @options.outputDelimiter = @options.outputDelimiter || ","
42 | @options.templateDelimiter = @options.templateDelimiter || ","
43 | @options.encoding = @options.encoding || "utf8"
44 | @options.exportFormat = @options.exportFormat || "csv"
45 | @options.onlyMasterVariants = @options.onlyMasterVariants || false
46 |
47 | @queryOptions =
48 | queryString: @options.export?.queryString?.trim()
49 | isQueryEncoded: @options.export?.isQueryEncoded
50 | filterVariantsByAttributes: @_parseQuery(
51 | @options.export?.filterVariantsByAttributes
52 | )
53 | filterPrices: @_parseQuery(@options.export?.filterPrices)
54 |
55 | @client = @_createClient()
56 |
57 | # TODO: using single mapping util instead of services
58 | @typesService = new Types()
59 | @categoryService = new Categories()
60 | @channelService = new Channels()
61 | @customerGroupService = new CustomerGroups()
62 | @taxService = new Taxes()
63 | @stateService = new States()
64 |
65 | @createdFiles = {}
66 |
67 | _createClient: (options = @options, projectKey = @projectKey) ->
68 | createClient(middlewares: [
69 | createAuthMiddlewareWithExistingToken(
70 | if options.authConfig.accessToken
71 | then "Bearer #{options.authConfig.accessToken}"
72 | else ''
73 | )
74 | createAuthMiddlewareForClientCredentialsFlow
75 | host: options.authConfig.host
76 | projectKey: projectKey
77 | credentials: options.authConfig.credentials
78 | fetch: fetch
79 | createUserAgentMiddleware options.userAgentConfig
80 | createHttpMiddleware options.httpConfig
81 | ])
82 |
83 | _parseQuery: (queryStr) ->
84 | if !queryStr then return null
85 | return _.map(
86 | queryStr.split('&'),
87 | (filter) ->
88 | filter = filter.split('=')
89 | if filter[1] == 'true' || filter[1] == 'false'
90 | filter[1] = filter[1] == 'true'
91 | return {
92 | name: filter[0]
93 | value: filter[1]
94 | }
95 | )
96 |
97 | _filterPrices: (prices, filters) ->
98 | _.filter(prices, (price) ->
99 | return _.reduce(
100 | filters,
101 | (filterOutPrice, filter) ->
102 | return filterOutPrice && price[filter.name] == filter.value
103 | , true)
104 | )
105 |
106 | _filterVariantsByAttributes: (variants, filter) ->
107 | filteredVariants = _.filter(variants, (variant) ->
108 | return if filter?.length > 0
109 | _.reduce(
110 | filter,
111 | (filterOutVariant, filter) ->
112 | # filter attributes
113 | attribute = _.findWhere(variant.attributes, {
114 | name: filter.name
115 | })
116 | return filterOutVariant && !!attribute &&
117 | (attribute.value == filter.value)
118 | , true)
119 | else
120 | true
121 | )
122 |
123 | # filter prices of filtered variants
124 | return _.map(filteredVariants, (variant) =>
125 | if @queryOptions.filterPrices?.length > 0
126 | variant.prices = @_filterPrices(
127 | variant.prices,
128 | @queryOptions.filterPrices
129 | )
130 | if variant.prices.length == 0 then return null
131 | return variant
132 | )
133 |
134 | _initMapping: (header) ->
135 | _.extend @options,
136 | channelService: @channelService
137 | categoryService: @categoryService
138 | typesService: @typesService
139 | customerGroupService: @customerGroupService
140 | taxService: @taxService
141 | stateService: @stateService
142 | header: header
143 | new ExportMapping(@options)
144 |
145 | _parseQueryString: (query) ->
146 | queryStringParser.parse(query)
147 |
148 | _appendQueryStringPredicate: (query, predicate) ->
149 | query.where = if query.where then query.where + " AND "+predicate else predicate
150 | query
151 |
152 | # return the correct product service in case query string is used or not
153 | _getProductService: (staged = true, customWherePredicate = false) ->
154 | productsService = createRequestBuilder({@projectKey})
155 | .productProjections
156 | .staged(staged)
157 | .perPage(100)
158 |
159 | if @queryOptions.queryString
160 | query = @_parseQueryString(@queryOptions.queryString)
161 |
162 | if customWherePredicate
163 | query = @_appendQueryStringPredicate(query, customWherePredicate)
164 |
165 | productsService.where(query.where) if query.where
166 |
167 | uri: productsService.build()
168 | method: 'GET'
169 | else
170 | productsService.where(customWherePredicate) if customWherePredicate
171 |
172 | uri: productsService.build()
173 | method: 'GET'
174 |
175 | _fetchResources: =>
176 | data = [
177 | @typesService.getAll @client, @projectKey
178 | @categoryService.getAll @client, @projectKey
179 | @channelService.getAll @client, @projectKey
180 | @customerGroupService.getAll @client, @projectKey
181 | @taxService.getAll @client, @projectKey
182 | @stateService.getAll @client, @projectKey
183 | ]
184 | Promise.all(data)
185 | .then ([productTypes, categories, channels, customerGroups, taxes, states]) =>
186 | @typesService.buildMaps productTypes
187 | @categoryService.buildMaps categories
188 | @channelService.buildMaps channels
189 | @customerGroupService.buildMaps customerGroups
190 | @taxService.buildMaps taxes
191 | @stateService.buildMaps states
192 |
193 | console.warn "Fetched #{productTypes.length} product type(s)."
194 | Promise.resolve({productTypes, categories, channels, customerGroups, taxes})
195 |
196 | exportDefault: (templateContent, outputFile, staged = true) =>
197 | @_fetchResources()
198 | .then ({productTypes}) =>
199 | @export templateContent, outputFile, productTypes, staged, false, true
200 |
201 | _archiveFolder: (inputFolder, outputFile) ->
202 | output = fs.createWriteStream(outputFile)
203 | archive = archiver 'zip'
204 |
205 | new Promise (resolve, reject) ->
206 | output.on 'close', () -> resolve()
207 | archive.on 'error', (err) -> reject(err)
208 | archive.pipe output
209 | archive.glob('**', { cwd: inputFolder })
210 | archive.finalize()
211 |
212 | exportFull: (output, staged = true) =>
213 | lang = GLOBALS.DEFAULT_LANGUAGE
214 | console.log 'Creating full export for "%s" language', lang
215 |
216 | @_fetchResources()
217 | .then ({productTypes}) =>
218 | if not productTypes.length
219 | return Promise.reject("Project does not have any productTypes.")
220 |
221 | tempDir = tmp.dirSync({ unsafeCleanup: true })
222 | console.log "Creating temp directory in %s", tempDir.name
223 |
224 | Promise.map productTypes, (type) =>
225 | console.log 'Processing products with productType "%s"', type.name
226 | csv = new ExportMapping().createTemplate(type, [lang])
227 | fileName = _.slugify(type.name)+"_"+type.id+"."+@options.exportFormat
228 | filePath = path.join(tempDir.name, fileName)
229 | condition = 'productType(id="'+type.id+'")'
230 |
231 | @export csv.join(@options.templateDelimiter), filePath, productTypes, staged, condition, false
232 | , { concurrency: 1}
233 | .then =>
234 | console.log "All productTypes were processed - archiving output folder"
235 | @_archiveFolder tempDir.name, output
236 | .then ->
237 | console.log "Folder was archived and saved to %s", output
238 | tempDir.removeCallback()
239 | Promise.resolve "Export done."
240 |
241 | _processChunk: (writer, products, productTypes, createFileWhenEmpty, header, exportMapper, outputFile) =>
242 | data = []
243 | # if there are no products to export
244 | if not products.length && not createFileWhenEmpty
245 | return Promise.resolve()
246 |
247 | (if @createdFiles[outputFile]
248 | Promise.resolve()
249 | else
250 | @createdFiles[outputFile] = 1
251 | writer.setHeader header.rawHeader
252 | )
253 | .then =>
254 | _.each products, (product) =>
255 | # filter variants
256 | product.variants = @_filterVariantsByAttributes(
257 | product.variants,
258 | @queryOptions.filterVariantsByAttributes
259 | )
260 | # filter masterVariant
261 | [ product.masterVariant ] = @_filterVariantsByAttributes(
262 | [ product.masterVariant ],
263 | @queryOptions.filterVariantsByAttributes
264 | )
265 | # remove all the variants that don't meet the price condition
266 | product.variants = _.compact(product.variants)
267 | data = data.concat exportMapper.mapProduct(
268 | product,
269 | productTypes
270 | )
271 | writer.write data
272 | .catch (err) ->
273 | console.log("Error while processing products batch", err)
274 | Promise.reject(err)
275 |
276 | export: (templateContent, outputFile, productTypes, staged = true, customWherePredicate = false, createFileWhenEmpty = false) ->
277 | @_parse(templateContent)
278 | .then (header) =>
279 | writer = null
280 | errors = header.validate()
281 | rowsReaded = 0
282 |
283 | unless _.size(errors) is 0
284 | Promise.reject errors
285 | else
286 | header.toIndex()
287 | header.toLanguageIndex()
288 | exportMapper = @_initMapping(header)
289 |
290 | _.each productTypes, (productType) ->
291 | header._productTypeLanguageIndexes(productType)
292 | productsService = @_getProductService(staged, customWherePredicate)
293 | @client.process(productsService, (res) =>
294 | rowsReaded += res.body.count
295 | console.warn "Fetched #{res.body.count} product(s)."
296 |
297 | # init writer and create output file
298 | # when doing full export - don't create empty files
299 | if not writer && (createFileWhenEmpty || rowsReaded)
300 | try
301 | writer = new Writer
302 | csvDelimiter: @options.outputDelimiter,
303 | encoding: @options.encoding,
304 | exportFormat: @options.exportFormat,
305 | outputFile: outputFile,
306 | debug: @options.debug
307 | catch e
308 | return Promise.reject e
309 |
310 | @_processChunk writer, res.body.results, productTypes, createFileWhenEmpty, header, exportMapper, outputFile
311 | , {accumulate: false})
312 | .then ->
313 | if createFileWhenEmpty || rowsReaded
314 | writer.flush()
315 | else
316 | Promise.resolve()
317 | .then ->
318 | Promise.resolve "Export done."
319 | .catch (err) ->
320 | console.dir(err, {depth: 10})
321 | Promise.reject err
322 |
323 | createTemplate: (languages, outputFile, allProductTypes = false) ->
324 | @typesService.getAll(@client, @projectKey)
325 | .then (productTypes) =>
326 | if _.size(productTypes) is 0
327 | Promise.reject 'Can not find any product type.'
328 | else
329 | idsAndNames = _.map productTypes, (productType) ->
330 | productType.name
331 |
332 | if allProductTypes
333 | allHeaders = []
334 | exportMapping = new ExportMapping()
335 | _.each productTypes, (productType) ->
336 | allHeaders = allHeaders.concat exportMapping.createTemplate(productType, languages)
337 | csv = _.uniq allHeaders
338 | @_saveCSV(outputFile, [csv])
339 | .then -> Promise.resolve 'Template for all product types generated.'
340 | else
341 | _.each idsAndNames, (entry, index) ->
342 | console.warn ' %d) %s', index, entry
343 | prompt.start()
344 | property =
345 | name: 'number'
346 | message: 'Enter the number of the producttype.'
347 | validator: /\d+/
348 | warning: 'Please enter a valid number'
349 | prompt.getAsync property
350 | .then (result) =>
351 | productType = productTypes[parseInt(result.number)]
352 | if productType
353 | console.warn "Generating template for product type '#{productType.name}' (id: #{productType.id})."
354 | process.stdin.destroy()
355 | csv = new ExportMapping().createTemplate(productType, languages)
356 | @_saveCSV(outputFile, [csv])
357 | .then -> Promise.resolve 'Template generated.'
358 | else
359 | Promise.reject 'Please re-run and select a valid number.'
360 |
361 | _saveCSV: (file, content, append) =>
362 | flag = if append then 'a' else 'w'
363 | new Promise (resolve, reject) =>
364 | parsedCsv = Csv().from(content, {delimiter: @options.outputDelimiter})
365 | opts =
366 | flag: flag
367 |
368 | if file
369 | parsedCsv.to.string (res) =>
370 | converted = iconv.encode(res+'\n', @options.encoding)
371 | fs.writeFileAsync file, converted, opts
372 | .then -> resolve()
373 | .catch (err) -> reject err
374 | else
375 | parsedCsv.to.stream process.stdout, opts
376 |
377 | parsedCsv
378 | .on 'error', (err) -> reject err
379 | .on 'close', (count) -> resolve count
380 |
381 | _parse: (csvString) =>
382 | new Promise (resolve, reject) =>
383 | csvString = _.trim(csvString, @options.templateDelimiter)
384 | Csv().from.string(csvString, {delimiter: @options.templateDelimiter})
385 | .to.array (data, count) ->
386 | header = new Header(data[0])
387 | resolve header
388 | .on 'error', (err) -> reject err
389 |
390 | module.exports = Export
391 |
--------------------------------------------------------------------------------
/src/coffee/exportmapping.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | _.mixin require('underscore.string').exports()
3 | CONS = require './constants'
4 | GLOBALS = require './globals'
5 |
6 | # TODO:
7 | # - JSDoc
8 | # - no services!!!
9 | # - utils only
10 | class ExportMapping
11 |
12 | constructor: (options = {}) ->
13 | @typesService = options.typesService
14 | @categoryService = options.categoryService
15 | @channelService = options.channelService
16 | @stateService = options.stateService
17 | @customerGroupService = options.customerGroupService
18 | @taxService = options.taxService
19 | @header = options.header
20 | @fillAllRows = options.fillAllRows
21 | @onlyMasterVariants = options.onlyMasterVariants || false
22 | @categoryBy = options.categoryBy
23 | @categoryOrderHintBy = options.categoryOrderHintBy
24 |
25 | mapProduct: (product, productTypes) ->
26 | productType = productTypes[@typesService.id2index[product.productType.id]]
27 | rows = []
28 | productRow = @_mapBaseProduct product, productType
29 | if product.masterVariant
30 | rows.push productRow
31 |
32 | if not @onlyMasterVariants and product.variants
33 | for variant in product.variants
34 | variantRow = if @fillAllRows
35 | _.deepClone productRow
36 | else
37 | []
38 | rows.push @_mapVariant variant, productType, variantRow
39 |
40 | rows
41 |
42 | createTemplate: (productType, languages = [GLOBALS.DEFAULT_LANGUAGE]) ->
43 | header = [ CONS.HEADER_PUBLISHED, CONS.HEADER_HAS_STAGED_CHANGES ].concat(CONS.BASE_HEADERS.concat(CONS.SPECIAL_HEADERS))
44 | _.each CONS.BASE_LOCALIZED_HEADERS, (locBaseAttrib) ->
45 | header = header.concat _.map languages, (lang) ->
46 | "#{locBaseAttrib}#{GLOBALS.DELIM_HEADER_LANGUAGE}#{lang}"
47 | _.each productType.attributes, (attribute) =>
48 | switch attribute.type.name
49 | when CONS.ATTRIBUTE_TYPE_SET then header = header.concat @_mapAttributeTypeDef attribute.type.elementType, attribute, languages
50 | else header = header.concat @_mapAttributeTypeDef attribute.type, attribute, languages
51 | header
52 |
53 | _mapAttributeTypeDef: (attributeTypeDef, attribute, languages) ->
54 | switch attributeTypeDef.name
55 | when CONS.ATTRIBUTE_TYPE_LTEXT then _.map languages, (lang) -> "#{attribute.name}#{GLOBALS.DELIM_HEADER_LANGUAGE}#{lang}"
56 | else [ attribute.name ]
57 |
58 | _mapBaseProduct: (product, productType) ->
59 | row = if product.masterVariant
60 | @_mapVariant product.masterVariant, productType
61 | else
62 | []
63 |
64 | if @header.has(CONS.HEADER_PUBLISHED)
65 | row[@header.toIndex CONS.HEADER_PUBLISHED] = "#{product.published}"
66 |
67 | if @header.has(CONS.HEADER_HAS_STAGED_CHANGES)
68 | row[@header.toIndex CONS.HEADER_HAS_STAGED_CHANGES] = "#{product.hasStagedChanges}"
69 |
70 | if @header.has(CONS.HEADER_ID)
71 | row[@header.toIndex CONS.HEADER_ID] = product.id
72 |
73 | if @header.has(CONS.HEADER_KEY)
74 | row[@header.toIndex CONS.HEADER_KEY] = product.key
75 |
76 | if @header.has(CONS.HEADER_STATE) and _.has(product, 'state')
77 | if _.has @stateService.id2key, product.state.id
78 | row[@header.toIndex CONS.HEADER_STATE] = @stateService.id2key[product.state.id]
79 |
80 | if @header.has(CONS.HEADER_PRODUCT_TYPE)
81 | row[@header.toIndex CONS.HEADER_PRODUCT_TYPE] = productType.name
82 |
83 | if @header.has(CONS.HEADER_TAX) and _.has(product, 'taxCategory')
84 | if _.has @taxService.id2name, product.taxCategory.id
85 | row[@header.toIndex CONS.HEADER_TAX] = @taxService.id2name[product.taxCategory.id]
86 |
87 | if @header.has(CONS.HEADER_CATEGORIES)
88 | row[@header.toIndex CONS.HEADER_CATEGORIES] = _.reduce(product.categories or [], (memo, category, index) =>
89 | memo += GLOBALS.DELIM_MULTI_VALUE unless index is 0
90 | memo + if @categoryBy is CONS.HEADER_SLUG
91 | @categoryService.id2slug[category.id]
92 | else if @categoryBy is CONS.HEADER_EXTERNAL_ID
93 | @categoryService.id2externalId[category.id]
94 | else
95 | @categoryService.id2fqName[category.id]
96 | , '')
97 |
98 | if @header.has(CONS.HEADER_CREATED_AT)
99 | row[@header.toIndex CONS.HEADER_CREATED_AT] = product.createdAt
100 |
101 | if @header.has(CONS.HEADER_LAST_MODIFIED_AT)
102 | row[@header.toIndex CONS.HEADER_LAST_MODIFIED_AT] = product.lastModifiedAt
103 |
104 | for attribName, h2i of @header.toLanguageIndex()
105 | for lang, index of h2i
106 | if product[attribName]
107 | if attribName is CONS.HEADER_SEARCH_KEYWORDS
108 | row[index] = _.reduce(product[attribName][lang], (memo, val, index) ->
109 | memo += GLOBALS.DELIM_MULTI_VALUE unless index is 0
110 | memo + val.text
111 | , '')
112 | else
113 | row[index] = product[attribName][lang]
114 |
115 | if @header.has(CONS.HEADER_CATEGORY_ORDER_HINTS)
116 | if product.categoryOrderHints?
117 | categoryIds = Object.keys product.categoryOrderHints
118 | categoryOrderHints = _.map categoryIds, (categoryId) =>
119 | categoryIdentificator = categoryId
120 | if @categoryOrderHintBy == 'externalId'
121 | categoryIdentificator = @categoryService.id2externalId[categoryId]
122 | return "#{categoryIdentificator}:#{product.categoryOrderHints[categoryId]}"
123 | row[@header.toIndex CONS.HEADER_CATEGORY_ORDER_HINTS] = categoryOrderHints.join GLOBALS.DELIM_MULTI_VALUE
124 | else
125 | row[@header.toIndex CONS.HEADER_CATEGORY_ORDER_HINTS] = ''
126 |
127 | row
128 |
129 | _mapVariant: (variant, productType, row = []) ->
130 | if @header.has(CONS.HEADER_VARIANT_ID)
131 | row[@header.toIndex CONS.HEADER_VARIANT_ID] = variant.id
132 |
133 | if @header.has(CONS.HEADER_VARIANT_KEY)
134 | row[@header.toIndex CONS.HEADER_VARIANT_KEY] = variant.key
135 |
136 | if @header.has(CONS.HEADER_SKU)
137 | row[@header.toIndex CONS.HEADER_SKU] = variant.sku
138 |
139 | if @header.has(CONS.HEADER_PRICES)
140 | row[@header.toIndex CONS.HEADER_PRICES] = @_mapPrices(variant.prices)
141 |
142 | if @header.has(CONS.HEADER_IMAGES)
143 | row[@header.toIndex CONS.HEADER_IMAGES] = @_mapImages(variant.images)
144 |
145 | if variant.attributes
146 | for attribute in variant.attributes
147 | attributeTypeDef = @typesService.id2nameAttributeDefMap[productType.id][attribute.name].type
148 | if attributeTypeDef.name is CONS.ATTRIBUTE_TYPE_LTEXT
149 | row = @_mapLocalizedAttribute attribute, productType, row
150 | else if attributeTypeDef.name is CONS.ATTRIBUTE_TYPE_SET and attributeTypeDef.elementType?.name is CONS.ATTRIBUTE_TYPE_LENUM
151 | # we need special treatment for set of lenums
152 | row = @_mapSetOfLenum(attribute, productType, row)
153 | else if attributeTypeDef.name is CONS.ATTRIBUTE_TYPE_SET and attributeTypeDef.elementType?.name is CONS.ATTRIBUTE_TYPE_LTEXT
154 | row = @_mapSetOfLtext(attribute, productType, row)
155 | else if attributeTypeDef.name is CONS.ATTRIBUTE_TYPE_LENUM # we need special treatnemt for lenums
156 | row = @_mapLenum(attribute, productType, row)
157 | else if @header.has attribute.name
158 | row[@header.toIndex attribute.name] = @_mapAttribute(attribute, attributeTypeDef)
159 |
160 | row
161 |
162 | _mapPrices: (prices) ->
163 | _.reduce(prices, (acc, price, index) =>
164 | acc += GLOBALS.DELIM_MULTI_VALUE unless index is 0
165 | countryPart = ''
166 | if price.country
167 | countryPart = "#{price.country}-"
168 | customerGroupPart = ''
169 | if price.customerGroup and _.has(@customerGroupService.id2name, price.customerGroup.id)
170 | customerGroupPart = " #{@customerGroupService.id2name[price.customerGroup.id]}"
171 | channelKeyPart = ''
172 | if price.channel and _.has(@channelService.id2key, price.channel.id)
173 | channelKeyPart = "##{@channelService.id2key[price.channel.id]}"
174 | discountedPricePart = ''
175 |
176 | validFromPart = ''
177 | if price.validFrom
178 | validFromPart = "$#{price.validFrom}"
179 |
180 | validUntilPart = ''
181 | if price.validUntil
182 | validUntilPart = "~#{price.validUntil}"
183 |
184 | tiersPart = ''
185 | if price.tiers
186 | tiersPart = "%#{@_mapTiers price.tiers}"
187 |
188 | if price.discounted?
189 | discountedPricePart = "|#{price.discounted.value.centAmount}"
190 | acc + "#{countryPart}#{price.value.currencyCode} #{price.value.centAmount}#{discountedPricePart}#{customerGroupPart}#{channelKeyPart}#{validFromPart}#{validUntilPart}#{tiersPart}"
191 | , '')
192 |
193 | _mapTiers: (tiers) ->
194 | _.reduce(tiers, (acc, priceTier, index) ->
195 | acc += GLOBALS.DELIM_TIERS_MULTI_VALUE unless index is 0
196 | acc + "#{priceTier.value.currencyCode} #{priceTier.value.centAmount} @#{priceTier.minimumQuantity}"
197 | , '')
198 |
199 | _mapMoney: (money) ->
200 | "#{money.currencyCode} #{money.centAmount}"
201 |
202 | _mapImages: (images) ->
203 | _.reduce(images, (acc, image, index) ->
204 | acc += GLOBALS.DELIM_MULTI_VALUE unless index is 0
205 | acc + image.url + GLOBALS.DELIM_URL_ATTRIBUTES_SEPERATOR +
206 | (image.label || "") + GLOBALS.DELIM_URL_ATTRIBUTES_SEPERATOR + (image?.dimensions?.w || 0)+
207 | GLOBALS.DELIM_DIMENSIONS_SEPERATOR + (image?.dimensions?.h || 0)
208 | , '')
209 |
210 | _mapAttribute: (attribute, attributeTypeDef) ->
211 | switch attributeTypeDef.name
212 | when CONS.ATTRIBUTE_TYPE_SET then @_mapSetAttribute(attribute, attributeTypeDef)
213 | when CONS.ATTRIBUTE_TYPE_ENUM then attribute.value.key
214 | when CONS.ATTRIBUTE_TYPE_MONEY then @_mapMoney attribute.value
215 | when CONS.ATTRIBUTE_TYPE_REFERENCE then attribute.value?.id
216 | when CONS.ATTRIBUTE_TYPE_BOOLEAN then attribute.value.toString()
217 | else attribute.value
218 |
219 | _mapLocalizedAttribute: (attribute, productType, row) ->
220 | h2i = @header.productTypeAttributeToIndex productType, attribute
221 | if h2i
222 | for lang, index of h2i
223 | if attribute.value
224 | row[index] = attribute.value[lang]
225 | row
226 |
227 | _mapLenum: (attribute, productType, row) ->
228 | noneLangIndex = @header.toIndex(attribute.name)
229 | # if my attribute has no language index, I want the key only
230 | if noneLangIndex
231 | row[noneLangIndex] = attribute.value.key
232 | h2i = @header.productTypeAttributeToIndex productType, attribute
233 | if h2i
234 | for lang, index of h2i
235 | if attribute.value
236 | row[index] = attribute.value.label[lang]
237 | else
238 | row[index] = attribute.value.key
239 | row
240 |
241 | _mapSetOfLenum: (attribute, productType, row) ->
242 | # if my attribute has no language index, I want the keys only
243 | noneLangIndex = @header.toIndex(attribute.name)
244 | if noneLangIndex
245 | row[noneLangIndex] = _.reduce(attribute.value, (memo, val, index) ->
246 | memo += GLOBALS.DELIM_MULTI_VALUE unless index is 0
247 | memo + val.key
248 | , '')
249 | h2i = @header.productTypeAttributeToIndex productType, attribute
250 | if h2i
251 | for lang, index of h2i
252 | if attribute.value
253 | row[index] = _.reduce(attribute.value, (memo, val, index) ->
254 | memo += GLOBALS.DELIM_MULTI_VALUE unless index is 0
255 | memo + val.label[lang]
256 | , '')
257 | else
258 | row[index] = attribute.value.key
259 |
260 | row
261 |
262 | _mapSetOfLtext: (attribute, productType, row) ->
263 | h2i = @header.productTypeAttributeToIndex productType, attribute
264 | for lang, index of h2i
265 | row[index] = _.reduce(attribute.value, (memo, val, index) ->
266 | return memo unless val[lang]?
267 |
268 | memo += GLOBALS.DELIM_MULTI_VALUE unless index is 0
269 | memo + val[lang]
270 | , '')
271 | row
272 |
273 | _mapSetAttribute: (attribute, attributeTypeDef) ->
274 | switch attributeTypeDef.elementType.name
275 | when CONS.ATTRIBUTE_TYPE_ENUM
276 | _.reduce(attribute.value, (memo, val, index) ->
277 | memo += GLOBALS.DELIM_MULTI_VALUE unless index is 0
278 | memo + val.key
279 | , '')
280 | when CONS.ATTRIBUTE_TYPE_MONEY
281 | _.reduce(attribute.value, (memo, val, index) =>
282 | memo += GLOBALS.DELIM_MULTI_VALUE unless index is 0
283 | memo + @_mapMoney val
284 | , '')
285 | else
286 | attribute.value.join GLOBALS.DELIM_MULTI_VALUE
287 |
288 |
289 | module.exports = ExportMapping
290 |
--------------------------------------------------------------------------------
/src/coffee/globals.coffee:
--------------------------------------------------------------------------------
1 | module.exports =
2 | DEFAULT_LANGUAGE: 'en'
3 | DELIM_MULTI_VALUE: ';'
4 | DELIM_HEADER_LANGUAGE: '.'
5 | DELIM_CATEGORY_CHILD: '>'
6 | DELIM_TIERS_MULTI_VALUE: '%'
7 | DELIM_URL_ATTRIBUTES_SEPERATOR: '|'
8 | DELIM_DIMENSIONS_SEPERATOR: 'x'
9 |
10 |
--------------------------------------------------------------------------------
/src/coffee/header.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | CONS = require './constants'
3 | GLOBALS = require './globals'
4 |
5 | # TODO:
6 | # - JSDoc
7 | # - put it under utils
8 | class Header
9 |
10 | constructor: (@rawHeader) ->
11 |
12 | # checks some basic rules for the header row
13 | validate: ->
14 | errors = []
15 | if @rawHeader.length isnt _.unique(@rawHeader).length
16 | errors.push "There are duplicate header entries!"
17 |
18 | missingHeaders = _.difference [CONS.HEADER_PRODUCT_TYPE], @rawHeader
19 | if _.size(missingHeaders) > 0
20 | for missingHeader in missingHeaders
21 | errors.push "Can't find necessary base header '#{missingHeader}'!"
22 |
23 | if not _.contains(@rawHeader, CONS.HEADER_VARIANT_ID) and not _.contains(@rawHeader, CONS.HEADER_SKU)
24 | errors.push "You need either the column '#{CONS.HEADER_VARIANT_ID}' or '#{CONS.HEADER_SKU}' to identify your variants!"
25 |
26 | errors
27 |
28 | # "x,y,z"
29 | # toIndex:
30 | # x: 0
31 | # y: 1
32 | # z: 2
33 | toIndex: (name) ->
34 | if not @h2i then @h2i = _.object _.map @rawHeader, (head, index) -> [head, index]
35 | return @h2i[name] if name
36 | @h2i
37 |
38 | has: (name) ->
39 | @toIndex() unless @h2i?
40 | _.has @h2i, name
41 |
42 | toLanguageIndex: (name) ->
43 | @langH2i = @_languageToIndex CONS.BASE_LOCALIZED_HEADERS unless @langH2i
44 | return @langH2i[name] if name
45 | @langH2i
46 |
47 | hasLanguageForBaseAttribute: (name) ->
48 | _.has @langH2i, name
49 |
50 | hasLanguageForCustomAttribute: (name) ->
51 | foo = _.find @productTypeId2HeaderIndex, (productTypeLangH2i) ->
52 | _.has productTypeLangH2i, name
53 | foo?
54 |
55 | # "a,x.de,y,x.it,z"
56 | # productTypeAttributeToIndex for 'x'
57 | # de: 1
58 | # it: 3
59 | productTypeAttributeToIndex: (productType, attribute) ->
60 | @_productTypeLanguageIndexes(productType)[attribute.name]
61 |
62 | # "x,a1.de,foo,a1.it"
63 | # _languageToIndex =
64 | # a1:
65 | # de: 1
66 | # it: 3
67 | _languageToIndex: (localizedAttributes) ->
68 | langH2i = {}
69 | for langAttribName in localizedAttributes
70 | for head, index in @rawHeader
71 | parts = head.split GLOBALS.DELIM_HEADER_LANGUAGE
72 | if _.size(parts) >= 2
73 | nameRegexp = new RegExp("^#{langAttribName}\.")
74 | if head.match(nameRegexp)
75 | lang = _.last(parts)
76 | # TODO: check language
77 | # hard coded because materialType override material attribute because of sub string match
78 | if not (langAttribName == "material" && _.first(parts) != langAttribName)
79 | langH2i[langAttribName] || (langH2i[langAttribName] = {})
80 | langH2i[langAttribName][lang] = index
81 |
82 | langH2i
83 |
84 | # Stores the map between the id of product types and the language header index
85 | # Lenum and Set of Lenum are now first class localised citizens
86 | _productTypeLanguageIndexes: (productType) ->
87 | @productTypeId2HeaderIndex or= {}
88 | langH2i = @productTypeId2HeaderIndex[productType.id]
89 | unless langH2i
90 | ptLanguageAttributes = _.map productType.attributes, (attribute) ->
91 | if (attribute.type.name is CONS.ATTRIBUTE_TYPE_LTEXT) or
92 | (attribute.type.name is CONS.ATTRIBUTE_TYPE_SET and attribute.type.elementType?.name is CONS.ATTRIBUTE_TYPE_LTEXT) or
93 | (attribute.type.name is CONS.ATTRIBUTE_TYPE_LENUM) or
94 | (attribute.type.name is CONS.ATTRIBUTE_TYPE_SET and attribute.type.elementType?.name is CONS.ATTRIBUTE_TYPE_LENUM)
95 | if attribute.name in CONS.ALL_HEADERS
96 | "attribute.#{attribute.name}"
97 | else
98 | attribute.name
99 |
100 | langH2i = @_languageToIndex ptLanguageAttributes
101 | @productTypeId2HeaderIndex[productType.id] = langH2i
102 | langH2i
103 |
104 | missingHeaderForProductType: (productType) ->
105 | @toIndex()
106 | _.filter productType.attributes, (attribute) =>
107 | not @has(attribute.name) and not @productTypeAttributeToIndex(productType, attribute)
108 |
109 | module.exports = Header
110 |
--------------------------------------------------------------------------------
/src/coffee/helpers.coffee:
--------------------------------------------------------------------------------
1 | class Helpers
2 | @initMap: (key, value) ->
3 | map = {}
4 | map[key] = value
5 | map
6 |
7 | module.exports = Helpers
8 |
--------------------------------------------------------------------------------
/src/coffee/io/reader.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | Csv = require 'csv'
3 | path = require 'path'
4 | Promise = require 'bluebird'
5 | iconv = require 'iconv-lite'
6 | fs = Promise.promisifyAll require('fs')
7 | Excel = require 'exceljs'
8 |
9 | class Reader
10 |
11 | constructor: (@options = {}) ->
12 | logLevel = if @options.debug then 'debug' else 'info'
13 | @Logger = require('../logger')('IO::Reader', logLevel)
14 | @Logger.debug "options:", JSON.stringify(@options)
15 | @options.encoding = @options.encoding || 'utf-8'
16 | @header = null
17 | @rows = []
18 |
19 | read: (file) =>
20 | # read from file or from stdin?
21 | if file
22 | @Logger.debug "stream file %s", file
23 | @inputStream = fs.createReadStream file
24 | else
25 | @Logger.debug "stream stdin"
26 | @inputStream = process.stdin
27 |
28 | if @options.importFormat == 'xlsx'
29 | @_readXlsx(@inputStream)
30 | else
31 | @_readCsv(@inputStream)
32 |
33 | @parseCsv: (csv, delimiter, encoding) ->
34 | rows = []
35 | options =
36 | delimiter: delimiter
37 | skip_empty_lines: true
38 | trim: true # trim csv cells
39 |
40 | # only buffer can be decoded from another encoding
41 | if csv instanceof Buffer
42 | csv = @decode(csv, encoding)
43 |
44 | new Promise (resolve, reject) ->
45 | Csv()
46 | .from.string(csv, options)
47 | .on 'record', (row) ->
48 | rows.push(row)
49 | .on 'error', (err) ->
50 | reject(err)
51 | .on 'end', () ->
52 | resolve(rows)
53 |
54 | _readCsv: (stream) =>
55 | new Promise (resolve, reject) =>
56 | buffers = []
57 |
58 | # stream whole file to buffer because we need to decode it first from buffer
59 | # - iconv-lite does not support string to string decoding
60 | stream.on 'data', (buffer) ->
61 | buffers.push buffer
62 | stream.on 'error', (err) -> reject(err)
63 | stream.on 'end', () =>
64 | @Logger.debug "file was readed"
65 | buffer = Buffer.concat(buffers)
66 | Reader.parseCsv(buffer, @options.csvDelimiter, @options.encoding)
67 | .then (parsed) -> resolve(parsed)
68 | .catch (err) -> reject(err)
69 |
70 | _readXlsx: (stream) =>
71 | workbook = new Excel.Workbook()
72 | workbook.xlsx.read(stream)
73 | .then (workbook) =>
74 | @Logger.debug "file was readed"
75 |
76 | rows = []
77 | worksheet = workbook.getWorksheet(1)
78 | worksheet.eachRow (row) =>
79 | rowValues = row.values
80 | rowValues.shift()
81 |
82 | rows.push _.map rowValues, (item) =>
83 | if not item?
84 | item = ""
85 |
86 | if _.isObject(item) and item.richText
87 | @_stringifyRichText(item.richText)
88 | else
89 | String(item)
90 | rows
91 |
92 | @decode: (buffer, encoding) =>
93 | if encoding == 'utf-8'
94 | return buffer.toString()
95 |
96 | if not iconv.encodingExists encoding
97 | throw new Error 'Encoding does not exist: '+ encoding
98 |
99 | iconv.decode buffer, encoding
100 |
101 | getHeader: (header) =>
102 | @Logger.debug "get header"
103 | @header
104 |
105 | # method will remove styling from richText and return a plain text
106 | _stringifyRichText: (richText) ->
107 | richText.reduce((text, chunk) ->
108 | text + chunk.text
109 | , '')
110 |
111 | module.exports = Reader
112 |
--------------------------------------------------------------------------------
/src/coffee/io/writer.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | Csv = require 'csv'
3 | path = require 'path'
4 | Promise = require 'bluebird'
5 | iconv = require 'iconv-lite'
6 | fs = Promise.promisifyAll require('fs')
7 | Excel = require 'exceljs'
8 |
9 | class Writer
10 |
11 | constructor: (@options = {}) ->
12 | logLevel = if @options.debug then 'debug' else 'info'
13 | @Logger = require('../logger')('IO::Writer', logLevel)
14 |
15 | @Logger.debug "options:", JSON.stringify(@options)
16 | @options.defaultEncoding = "utf8"
17 | @options.availableFormats = ["xlsx", "csv"]
18 |
19 | if @options.availableFormats.indexOf(@options.exportFormat) < 0
20 | throw new Error("Unsupported file type: #{@options.exportFormat}, alowed formats are #{@options.availableFormats.toString()}")
21 |
22 | # write to file or to stdout?
23 | if @options.outputFile
24 | @Logger.debug "stream file %s", @options.outputFile
25 | @outputStream = fs.createWriteStream @options.outputFile
26 | else
27 | @Logger.debug "stream stdout"
28 | @outputStream = process.stdout
29 |
30 |
31 | # if we use xlsx export - create workbook first
32 | if @options.exportFormat == 'xlsx'
33 | @options.workbookOpts = {
34 | stream: @outputStream,
35 | useStyles: true,
36 | useSharedStrings: true
37 | }
38 |
39 | @workbook = new Excel.stream.xlsx.WorkbookWriter @options.workbookOpts
40 | @worksheet = @workbook.addWorksheet('Products')
41 |
42 | encode: (string) =>
43 | if @options.encoding == @options.defaultEncoding
44 | return string
45 |
46 | if not iconv.encodingExists @options.encoding
47 | throw new Error 'Encoding does not exist: '+ @options.encoding
48 |
49 | iconv.encode string, @options.encoding
50 |
51 | # create header
52 | setHeader: (header) =>
53 | @Logger.debug "writing header %s", header
54 |
55 | if @options.exportFormat == 'xlsx'
56 | @_writeXlsxHeader header
57 | else
58 | @_writeCsvRows [header]
59 |
60 | write: (rows) ->
61 | @Logger.debug "writing rows len: %d", rows.length
62 |
63 | if @options.exportFormat == 'xlsx'
64 | @_writeXlsxRows rows
65 | else
66 | @_writeCsvRows rows
67 |
68 | # go through all cells and
69 | # - replace undefined and empty strings with null value (alias an empty xlsx cell)
70 | _fixXlsxRow: (row) =>
71 | # replace selected values with null
72 | _.map row, (item) ->
73 | if typeof item == "undefined" || item == '' then null else item
74 |
75 | _writeXlsxRows: (rows) =>
76 | Promise.map rows, (row) =>
77 | @worksheet.addRow(@_fixXlsxRow(row)).commit()
78 | , { concurrency: 1}
79 |
80 | _writeXlsxHeader: (header) =>
81 | header = header.map((name) => {header: name})
82 | @worksheet.columns = header
83 | Promise.resolve()
84 |
85 | _writeCsvRows: (data) =>
86 | new Promise (resolve, reject) =>
87 | data.push([])
88 | parsedCsv = Csv().from(data, {delimiter: @options.csvDelimiter})
89 |
90 | # can't use .pipe - it would close stream right after first batch
91 | parsedCsv.to.string (string) =>
92 | try
93 | string = @encode(string)
94 | catch e
95 | return reject e
96 |
97 | @outputStream.write(string)
98 | resolve()
99 |
100 | parsedCsv
101 | .on 'error', (err) -> reject err
102 |
103 | flush: () =>
104 | @Logger.debug "flushing content"
105 | if @options.exportFormat == 'xlsx'
106 | @workbook.commit()
107 | else
108 | Promise.resolve()
109 |
110 | module.exports = Writer
111 |
--------------------------------------------------------------------------------
/src/coffee/logger.coffee:
--------------------------------------------------------------------------------
1 | {Logger} = require 'sphere-node-utils'
2 |
3 | class MyCustomLogger extends Logger
4 | @appName: require('../package.json').name
5 |
6 | module.exports = (scope, logLevel = 'info') ->
7 | new MyCustomLogger
8 | name: scope
9 | levelStream: logLevel
10 |
--------------------------------------------------------------------------------
/src/coffee/main.coffee:
--------------------------------------------------------------------------------
1 | module.exports =
2 | Export: require './export'
3 | ExportMapping: require './exportmapping'
4 | Categories: require './categories'
5 | Header: require './header'
6 | Import: require './import'
7 | Mapping: require './mapping'
8 | Validator: require './validator'
9 |
--------------------------------------------------------------------------------
/src/coffee/mapping.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | _.mixin require('underscore.string').exports()
3 | CONS = require './constants'
4 | GLOBALS = require './globals'
5 |
6 | # TODO:
7 | # - JSDoc
8 | # - no services!!!
9 | # - utils only
10 | class Mapping
11 |
12 | constructor: (options = {}) ->
13 | @types = options.types
14 | @customerGroups = options.customerGroups
15 | @categories = options.categories
16 | @taxes = options.taxes
17 | @states = options.states
18 | @channels = options.channels
19 | @continueOnProblems = options.continueOnProblems
20 | @errors = []
21 |
22 | mapProduct: (raw, productType) ->
23 | productType or= raw.master[@header.toIndex CONS.HEADER_PRODUCT_TYPE]
24 | rowIndex = raw.startRow
25 |
26 | product = @mapBaseProduct raw.master, productType, rowIndex
27 | product.masterVariant = @mapVariant raw.master, Number(raw.master[@header.toIndex CONS.HEADER_VARIANT_ID]) || 1, productType, rowIndex, product
28 | _.each raw.variants, (entry, index) =>
29 | product.variants.push @mapVariant entry.variant, Number(entry.variant[@header.toIndex CONS.HEADER_VARIANT_ID]) || index + 2, productType, entry.rowIndex, product
30 |
31 | data =
32 | product: product
33 | rowIndex: raw.startRow
34 | header: @header
35 | publish: raw.publish
36 | data
37 |
38 | mapBaseProduct: (rawMaster, productType, rowIndex) ->
39 | product =
40 | productType:
41 | typeId: 'product-type'
42 | id: productType.id
43 | masterVariant: {}
44 | variants: []
45 |
46 | if @header.has(CONS.HEADER_ID)
47 | product.id = rawMaster[@header.toIndex CONS.HEADER_ID]
48 | if @header.has(CONS.HEADER_KEY)
49 | product.key = rawMaster[@header.toIndex CONS.HEADER_KEY]
50 | if @header.has(CONS.HEADER_META_TITLE)
51 | product.metaTitle = rawMaster[@header.toIndex CONS.HEADER_META_TITLE] or {}
52 | if @header.has(CONS.HEADER_META_DESCRIPTION)
53 | product.metaDescription = rawMaster[@header.toIndex CONS.HEADER_META_DESCRIPTION] or {}
54 | if @header.has(CONS.HEADER_META_KEYWORDS)
55 | product.metaKeywords = rawMaster[@header.toIndex CONS.HEADER_META_KEYWORDS] or {}
56 | if @header.has(CONS.HEADER_SEARCH_KEYWORDS)
57 | product.searchKeywords = rawMaster[@header.toIndex CONS.HEADER_SEARCH_KEYWORDS] or {}
58 |
59 | product.categories = @mapCategories rawMaster, rowIndex
60 | tax = @mapTaxCategory rawMaster, rowIndex
61 | product.taxCategory = tax if tax
62 | state = @mapState rawMaster, rowIndex
63 | product.state = state if state
64 | product.categoryOrderHints = @mapCategoryOrderHints rawMaster, rowIndex
65 |
66 | for attribName in CONS.BASE_LOCALIZED_HEADERS
67 | if attribName is CONS.HEADER_SEARCH_KEYWORDS
68 | val = @mapSearchKeywords rawMaster, attribName, @header.toLanguageIndex()
69 | else
70 | val = @mapLocalizedAttrib rawMaster, attribName, @header.toLanguageIndex()
71 | product[attribName] = val if val
72 |
73 | unless product.slug
74 | product.slug = {}
75 | if product.name? and product.name[GLOBALS.DEFAULT_LANGUAGE]?
76 | product.slug[GLOBALS.DEFAULT_LANGUAGE] = @ensureValidSlug(_.slugify product.name[GLOBALS.DEFAULT_LANGUAGE], rowIndex)
77 | product
78 |
79 | ensureValidSlug: (slug, rowIndex, appendix = '') ->
80 | unless _.isString(slug) and slug.length > 2
81 | @errors.push "[row #{rowIndex}:#{CONS.HEADER_SLUG}] Can't generate valid slug out of '#{slug}'! If you did not provide slug in your file, please do so as slug could not be auto-generated from the product name given."
82 | return
83 | @slugs or= []
84 | currentSlug = "#{slug}#{appendix}"
85 | unless _.contains(@slugs, currentSlug)
86 | @slugs.push currentSlug
87 | return currentSlug
88 | @ensureValidSlug slug, rowIndex, Math.floor((Math.random() * 89999) + 10001) # five digets
89 |
90 | hasValidValueForHeader: (row, headerName) ->
91 | return false unless @header.has(headerName)
92 | @isValidValue(row[@header.toIndex headerName])
93 |
94 | isValidValue: (rawValue) ->
95 | return _.isString(rawValue) and rawValue.length > 0
96 |
97 | mapCategories: (rawMaster, rowIndex) ->
98 | categories = []
99 | return categories unless @hasValidValueForHeader(rawMaster, CONS.HEADER_CATEGORIES)
100 | rawCategories = rawMaster[@header.toIndex CONS.HEADER_CATEGORIES].split GLOBALS.DELIM_MULTI_VALUE
101 | for rawCategory in rawCategories
102 | cat =
103 | typeId: 'category'
104 | if _.has(@categories.externalId2id, rawCategory)
105 | cat.id = @categories.externalId2id[rawCategory]
106 | else if _.has(@categories.fqName2id, rawCategory)
107 | cat.id = @categories.fqName2id[rawCategory]
108 | else if _.has(@categories.name2id, rawCategory)
109 | if _.contains(@categories.duplicateNames, rawCategory)
110 | msg = "[row #{rowIndex}:#{CONS.HEADER_CATEGORIES}] The category '#{rawCategory}' is not unqiue!"
111 | if @continueOnProblems
112 | console.warn msg
113 | else
114 | @errors.push msg
115 | else
116 | cat.id = @categories.name2id[rawCategory]
117 | else if _.has(@categories.key2Id, rawCategory)
118 | cat.id = @categories.key2Id[rawCategory]
119 |
120 | if cat.id
121 | categories.push cat
122 |
123 | else
124 | msg = "[row #{rowIndex}:#{CONS.HEADER_CATEGORIES}] Can not find category for '#{rawCategory}'!"
125 | if @continueOnProblems
126 | console.warn msg
127 | else
128 | @errors.push msg
129 |
130 | categories
131 |
132 | # parses the categoryOrderHints column for a given row
133 | mapCategoryOrderHints: (rawMaster, rowIndex) ->
134 | catOrderHints = {}
135 | # check if there actually is something to parse in the column
136 | return catOrderHints unless @hasValidValueForHeader(rawMaster, CONS.HEADER_CATEGORY_ORDER_HINTS)
137 | # parse the value to get a list of all catOrderHints
138 | rawCatOrderHints = rawMaster[@header.toIndex CONS.HEADER_CATEGORY_ORDER_HINTS].split GLOBALS.DELIM_MULTI_VALUE
139 | _.each rawCatOrderHints, (rawCatOrderHint) =>
140 | # extract the category id and the order hint from the raw value
141 | [rawCatId, rawOrderHint] = rawCatOrderHint.split ':'
142 | orderHint = parseFloat(rawOrderHint)
143 | # check if the product is actually assigned to the category
144 | catId =
145 | if _.has(@categories.id2fqName, rawCatId)
146 | rawCatId
147 | else if _.has(@categories.externalId2id, rawCatId)
148 | @categories.externalId2id[rawCatId]
149 | # in case the category was provided as the category name
150 | # check if the product is actually assigend to the category
151 | else if _.has(@categories.name2id, rawCatId)
152 | # get the actual category id instead of the category name
153 | @categories.name2id[rawCatId]
154 | # in case the category was provided using the category slug
155 | else if _.contains(@categories.id2slug, rawCatId)
156 | # get the actual category id instead of the category name
157 | _.findKey @categories.id2slug, (slug) ->
158 | slug == rawCatId
159 | else
160 | msg = "[row #{rowIndex}:#{CONS.HEADER_CATEGORY_ORDER_HINTS}] Can not find category for ID '#{rawCatId}'!"
161 | if @continueOnProblems
162 | console.warn msg
163 | else
164 | @errors.push msg
165 | null
166 |
167 | if orderHint == NaN
168 | msg = "[row #{rowIndex}:#{CONS.HEADER_CATEGORY_ORDER_HINTS}] Order hint has to be a valid number!"
169 | if @continueOnProblems
170 | console.warn msg
171 | else
172 | @errors.push msg
173 | else if !(orderHint > 0 && orderHint < 1)
174 | msg = "[row #{rowIndex}:#{CONS.HEADER_CATEGORY_ORDER_HINTS}] Order hint has to be < 1 and > 0 but was '#{orderHint}'!"
175 | if @continueOnProblems
176 | console.warn msg
177 | else
178 | @errors.push msg
179 | else
180 | if catId
181 | # orderHint and catId are ensured to be valid
182 | catOrderHints[catId] = orderHint.toString()
183 |
184 | catOrderHints
185 |
186 |
187 | mapTaxCategory: (rawMaster, rowIndex) ->
188 | return unless @hasValidValueForHeader(rawMaster, CONS.HEADER_TAX)
189 | rawTax = rawMaster[@header.toIndex CONS.HEADER_TAX]
190 | if _.contains(@taxes.duplicateNames, rawTax)
191 | @errors.push "[row #{rowIndex}:#{CONS.HEADER_TAX}] The tax category '#{rawTax}' is not unqiue!"
192 | return
193 | unless _.has(@taxes.name2id, rawTax)
194 | @errors.push "[row #{rowIndex}:#{CONS.HEADER_TAX}] The tax category '#{rawTax}' is unknown!"
195 | return
196 |
197 | tax =
198 | typeId: 'tax-category'
199 | id: @taxes.name2id[rawTax]
200 |
201 | mapState: (rawMaster, rowIndex) ->
202 | return unless @hasValidValueForHeader(rawMaster, CONS.HEADER_STATE)
203 | rawState = rawMaster[@header.toIndex CONS.HEADER_STATE]
204 | if _.contains(@states.duplicateKeys, rawState)
205 | @errors.push "[row #{rowIndex}:#{CONS.HEADER_STATE}] The state '#{rawState}' is not unqiue!"
206 | return
207 | unless _.has(@states.key2id, rawState)
208 | @errors.push "[row #{rowIndex}:#{CONS.HEADER_STATE}] The state '#{rawState}' is unknown!"
209 | return
210 |
211 | state =
212 | typeId: 'state'
213 | id: @states.key2id[rawState]
214 |
215 | mapVariant: (rawVariant, variantId, productType, rowIndex, product) ->
216 | if variantId > 2 and @header.has(CONS.HEADER_VARIANT_ID)
217 | vId = @mapInteger rawVariant[@header.toIndex CONS.HEADER_VARIANT_ID], CONS.HEADER_VARIANT_ID, rowIndex
218 | if vId? and not _.isNaN vId
219 | variantId = vId
220 | else
221 | # we have no valid variant id - mapInteger already mentioned this as error
222 | return
223 |
224 | variant =
225 | id: variantId
226 | attributes: []
227 |
228 | if @header.has(CONS.HEADER_VARIANT_KEY)
229 | variant.key = rawVariant[@header.toIndex CONS.HEADER_VARIANT_KEY]
230 |
231 | variant.sku = rawVariant[@header.toIndex CONS.HEADER_SKU] if @header.has CONS.HEADER_SKU
232 |
233 | languageHeader2Index = @header._productTypeLanguageIndexes productType
234 | if productType.attributes
235 | for attribute in productType.attributes
236 | attrib = undefined
237 | if attribute.attributeConstraint is CONS.ATTRIBUTE_CONSTRAINT_SAME_FOR_ALL
238 | attrib = _.find product.masterVariant.attributes, (a) -> a.name is attribute.name
239 |
240 | if not attrib
241 | attrib = @mapAttribute rawVariant, attribute, languageHeader2Index, rowIndex
242 | variant.attributes.push attrib if attrib
243 | variant.prices = @mapPrices rawVariant[@header.toIndex CONS.HEADER_PRICES], rowIndex
244 | variant.images = @mapImages rawVariant, variantId, rowIndex
245 |
246 | variant
247 |
248 | mapAttribute: (rawVariant, attribute, languageHeader2Index, rowIndex) ->
249 | # if attribute conflicts with some base product property prefix it with "attribute." string
250 | prefixedAttributeName = if attribute.name in CONS.PRODUCT_LEVEL_PROPERTIES.concat(CONS.ALL_HEADERS)
251 | "attribute.#{attribute.name}"
252 | else
253 | attribute.name
254 |
255 | value = @mapValue rawVariant, prefixedAttributeName, attribute, languageHeader2Index, rowIndex
256 | return undefined if _.isUndefined(value) or (_.isObject(value) and _.isEmpty(value)) or (_.isString(value) and _.isEmpty(value))
257 | attribute =
258 | name: attribute.name
259 | value: value
260 | attribute
261 |
262 | mapValue: (rawVariant, attributeName, attribute, languageHeader2Index, rowIndex) ->
263 | switch attribute.type.name
264 | when CONS.ATTRIBUTE_TYPE_SET then @mapSetAttribute rawVariant, attributeName, attribute.type.elementType, languageHeader2Index, rowIndex
265 | when CONS.ATTRIBUTE_TYPE_LTEXT then @mapLocalizedAttrib rawVariant, attributeName, languageHeader2Index
266 | when CONS.ATTRIBUTE_TYPE_NUMBER then @mapNumber rawVariant[@header.toIndex attributeName], attribute.name, rowIndex
267 | when CONS.ATTRIBUTE_TYPE_BOOLEAN then @mapBoolean rawVariant[@header.toIndex attributeName], attribute.name, rowIndex
268 | when CONS.ATTRIBUTE_TYPE_MONEY then @mapMoney rawVariant[@header.toIndex attributeName], attribute.name, rowIndex
269 | when CONS.ATTRIBUTE_TYPE_REFERENCE then @mapReference rawVariant[@header.toIndex attributeName], attribute.type
270 | when CONS.ATTRIBUTE_TYPE_ENUM then @mapEnumAttribute rawVariant[@header.toIndex attributeName], attribute.type.values
271 | when CONS.ATTRIBUTE_TYPE_LENUM then @mapEnumAttribute rawVariant[@header.toIndex attributeName], attribute.type.values
272 | else rawVariant[@header.toIndex attributeName] # works for text
273 |
274 | mapEnumAttribute: (enumKey, enumValues) ->
275 | if enumKey
276 | _.find enumValues, (value) -> value.key is enumKey
277 |
278 | mapSetAttribute: (rawVariant, attributeName, elementType, languageHeader2Index, rowIndex) ->
279 | if elementType.name is CONS.ATTRIBUTE_TYPE_LTEXT
280 | multiValObj = @mapLocalizedAttrib rawVariant, attributeName, languageHeader2Index
281 | value = []
282 | _.each multiValObj, (raw, lang) =>
283 | if @isValidValue(raw)
284 | languageVals = raw.split GLOBALS.DELIM_MULTI_VALUE
285 | _.each languageVals, (v, index) ->
286 | localized = {}
287 | localized[lang] = v
288 | value[index] = _.extend (value[index] or {}), localized
289 | value
290 | else
291 | raw = rawVariant[@header.toIndex attributeName]
292 | if @isValidValue(raw)
293 | rawValues = raw.split GLOBALS.DELIM_MULTI_VALUE
294 | _.map rawValues, (rawValue) =>
295 | switch elementType.name
296 | when CONS.ATTRIBUTE_TYPE_MONEY
297 | @mapMoney rawValue, attributeName, rowIndex
298 | when CONS.ATTRIBUTE_TYPE_NUMBER
299 | @mapNumber rawValue, attributeName, rowIndex
300 | when CONS.ATTRIBUTE_TYPE_BOOLEAN
301 | @mapBoolean rawValue, attributeName, rowIndex
302 | when CONS.ATTRIBUTE_TYPE_ENUM
303 | @mapEnumAttribute rawValue, elementType.values
304 | when CONS.ATTRIBUTE_TYPE_LENUM
305 | @mapEnumAttribute rawValue, elementType.values
306 | when CONS.ATTRIBUTE_TYPE_REFERENCE
307 | @mapReference rawValue, elementType
308 | else
309 | rawValue
310 |
311 | mapPrices: (raw, rowIndex) ->
312 | prices = []
313 | return prices unless @isValidValue(raw)
314 | rawPrices = raw.split GLOBALS.DELIM_MULTI_VALUE
315 | for rawPrice in rawPrices
316 | matchedPrice = CONS.REGEX_PRICE.exec rawPrice
317 | unless matchedPrice
318 | @errors.push "[row #{rowIndex}:#{CONS.HEADER_PRICES}] Can not parse price '#{rawPrice}'!"
319 | continue
320 |
321 | country = matchedPrice[2]
322 | currencyCode = matchedPrice[3]
323 | centAmount = matchedPrice[4]
324 | customerGroupName = matchedPrice[8]
325 | channelKey = matchedPrice[10]
326 | validFrom = matchedPrice[12]
327 | validUntil = matchedPrice[14]
328 | tiers = matchedPrice[16]
329 | price =
330 | value: @mapMoney "#{currencyCode} #{centAmount}", CONS.HEADER_PRICES, rowIndex
331 | price.validFrom = validFrom if validFrom
332 | price.validUntil = validUntil if validUntil
333 | price.country = country if country
334 | price.tiers = @mapTiers tiers if tiers
335 |
336 | if customerGroupName
337 | unless _.has(@customerGroups.name2id, customerGroupName)
338 | @errors.push "[row #{rowIndex}:#{CONS.HEADER_PRICES}] Can not find customer group '#{customerGroupName}'!"
339 | return []
340 | price.customerGroup =
341 | typeId: 'customer-group'
342 | id: @customerGroups.name2id[customerGroupName]
343 | if channelKey
344 | unless _.has(@channels.key2id, channelKey)
345 | @errors.push "[row #{rowIndex}:#{CONS.HEADER_PRICES}] Can not find channel with key '#{channelKey}'!"
346 | return []
347 | price.channel =
348 | typeId: 'channel'
349 | id: @channels.key2id[channelKey]
350 |
351 | prices.push price
352 |
353 | prices
354 |
355 | mapTiers: (tiers) ->
356 | unless tiers
357 | return []
358 | tiers.split(GLOBALS.DELIM_TIERS_MULTI_VALUE).map((priceTier) ->
359 | matchedPriceTier = priceTier.split(/ |@/g)
360 | formattedTier =
361 | value:
362 | currencyCode: matchedPriceTier[0]
363 | centAmount: parseInt matchedPriceTier[1], 10
364 | minimumQuantity: parseInt matchedPriceTier[3], 10
365 | )
366 |
367 | # EUR 300
368 | # USD 999
369 | mapMoney: (rawMoney, attribName, rowIndex) ->
370 | return unless @isValidValue(rawMoney)
371 | matchedMoney = CONS.REGEX_MONEY.exec rawMoney
372 | unless matchedMoney
373 | @errors.push "[row #{rowIndex}:#{attribName}] Can not parse money '#{rawMoney}'!"
374 | return
375 | # TODO: check for correct currencyCode
376 |
377 | money =
378 | currencyCode: matchedMoney[1]
379 | centAmount: parseInt matchedMoney[2]
380 |
381 | mapReference: (rawReference, attributeType) ->
382 | return undefined unless rawReference
383 | ref =
384 | id: rawReference
385 | typeId: attributeType.referenceTypeId
386 |
387 | mapInteger: (rawNumber, attribName, rowIndex) ->
388 | parseInt @mapNumber(rawNumber, attribName, rowIndex, CONS.REGEX_INTEGER)
389 |
390 | mapNumber: (rawNumber, attribName, rowIndex, regEx = CONS.REGEX_FLOAT) ->
391 | return unless @isValidValue(rawNumber)
392 | matchedNumber = regEx.exec rawNumber
393 | unless matchedNumber
394 | @errors.push "[row #{rowIndex}:#{attribName}] The number '#{rawNumber}' isn't valid!"
395 | return
396 | parseFloat matchedNumber[0]
397 |
398 | mapBoolean: (rawBoolean, attribName, rowIndex) ->
399 | if _.isUndefined(rawBoolean) or (_.isString(rawBoolean) and _.isEmpty(rawBoolean))
400 | return
401 | errorMsg = "[row #{rowIndex}:#{attribName}] The value '#{rawBoolean}' isn't a valid boolean!"
402 | try
403 | b = JSON.parse(rawBoolean.toLowerCase())
404 | if _.isBoolean(b) or b == 0 or b == 1
405 | return Boolean(b)
406 | else
407 | @errors.push errorMsg
408 | return
409 | catch
410 | @errors.push errorMsg
411 |
412 | # "a.en,a.de,a.it"
413 | # "hi,Hallo,ciao"
414 | # values:
415 | # de: 'Hallo'
416 | # en: 'hi'
417 | # it: 'ciao'
418 | mapLocalizedAttrib: (row, attribName, langH2i) ->
419 | values = {}
420 | if _.has langH2i, attribName
421 | _.each langH2i[attribName], (index, language) ->
422 | val = row[index]
423 | values[language] = val if val
424 | # fall back to non localized column if language columns could not be found
425 | if _.size(values) is 0
426 | return unless @header.has(attribName)
427 | val = row[@header.toIndex attribName]
428 | values[GLOBALS.DEFAULT_LANGUAGE] = val if val
429 |
430 | return if _.isEmpty values
431 | values
432 |
433 | # "a.en,a.de,a.it"
434 | # "hi,Hallo,ciao"
435 | # values:
436 | # de: 'Hallo'
437 | # en: 'hi'
438 | # it: 'ciao'
439 | mapSearchKeywords: (row, attribName, langH2i) ->
440 | values = {}
441 | if _.has langH2i, attribName
442 | _.each langH2i[attribName], (index, language) ->
443 | val = row[index]
444 | if not _.isString(val) || val == ""
445 | return
446 |
447 | singleValues = val.split GLOBALS.DELIM_MULTI_VALUE
448 | texts = []
449 | _.each singleValues, (v, index) ->
450 | texts.push { text: v}
451 | values[language] = texts
452 | # fall back to non localized column if language columns could not be found
453 | if _.size(values) is 0
454 | return unless @header.has(attribName)
455 | val = row[@header.toIndex attribName]
456 | values[GLOBALS.DEFAULT_LANGUAGE].text = val if val
457 |
458 | return if _.isEmpty values
459 | values
460 |
461 | mapImages: (rawVariant, variantId, rowIndex) ->
462 | images = []
463 | return images unless @hasValidValueForHeader(rawVariant, CONS.HEADER_IMAGES)
464 | rawImages = rawVariant[@header.toIndex CONS.HEADER_IMAGES].split GLOBALS.DELIM_MULTI_VALUE
465 |
466 | for rawImage in rawImages
467 | # Url Example :- https://jeanscentre-static.joggroup.net/sys|BundleThumb|200x200
468 | imageAttributes = rawImage.split GLOBALS.DELIM_URL_ATTRIBUTES_SEPERATOR
469 | dimensions = (imageAttributes[2] || "").split GLOBALS.DELIM_DIMENSIONS_SEPERATOR
470 | width = dimensions[0]
471 | height = dimensions[1]
472 | image =
473 | url: imageAttributes[0]
474 | dimensions:
475 | w: if isNaN(width) then 0 else Number width
476 | h: if isNaN(height) then 0 else Number height
477 | label: imageAttributes[1] || ""
478 | images.push image
479 |
480 | images
481 |
482 |
483 | module.exports = Mapping
484 |
--------------------------------------------------------------------------------
/src/coffee/matchutils.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | GLOBALS = require './globals'
3 | Helpers = require './helpers'
4 |
5 | class MatchUtils
6 |
7 | # Create map of product.id -> product
8 | # @param {Object} product - Product
9 | @mapById: (product) ->
10 | Helpers.initMap(product.id, product)
11 |
12 | # Create map of product.slug -> product
13 | # @param {Object} product - Product
14 | @mapBySlug: (product) ->
15 | Helpers.initMap(product.slug[GLOBALS.DEFAULT_LANGUAGE], product)
16 |
17 | # Create map of product.[skus] -> product
18 | # @param {Object} product - Product
19 | @mapBySku: (product) ->
20 | [product.masterVariant].concat(product.variants || []).reduce((agg, v) ->
21 | _.extend(agg, Helpers.initMap(v.sku, product))
22 | , {})
23 |
24 | # Create map of product.[attrName] -> product
25 | # @param {String} attrName - name of the attribute
26 | # @param {Object} product - Product
27 | @mapByCustomAttribute: (attrName) -> (product) ->
28 | [product.masterVariant].concat(product.variants || []).reduce((agg, v) ->
29 | key = v.attributes.filter((a) -> a.name == attrName)[0].value
30 | _.extend(agg, Helpers.initMap(key, product))
31 | , {})
32 |
33 | # Map product identifier
34 | # @param {String} matchBy - attribute which will be used as an identifier
35 | @mapIdentifier: (matchBy) ->
36 | switch matchBy
37 | when 'id' then (entry) -> entry.product.id
38 | when 'slug' then (entry) -> entry.product.slug[GLOBALS.DEFAULT_LANGUAGE]
39 | when 'sku' then (entry) -> entry.product.masterVariant.sku
40 | else (entry) -> entry.product.masterVariant.attributes.
41 | filter((a) -> a.name == matchBy)[0].value
42 |
43 | # Map product identifier
44 | # @param {String} matchBy - attribute which will be used as an identifier
45 | @mapMapper: (matchBy) ->
46 | switch matchBy
47 | when 'id' then MatchUtils.mapById
48 | when 'slug' then MatchUtils.mapBySlug
49 | when 'sku' then MatchUtils.mapBySku
50 | else MatchUtils.mapByCustomAttribute(matchBy)
51 |
52 | # initialize match function which returns existing product based on shared
53 | # identifier
54 | # @param {String} matchBy - identifier attribute name
55 | # @param {Array} existingProducts - array of existing products
56 | @initMatcher: (matchBy, existingProducts) ->
57 | mapper = MatchUtils.mapMapper(matchBy)
58 | identifier = MatchUtils.mapIdentifier(matchBy)
59 | map = existingProducts.reduce((m, p) ->
60 | _.extend(m, mapper(p))
61 | , {})
62 | (entry) ->
63 | map[identifier(entry)]
64 |
65 | module.exports = MatchUtils
66 |
--------------------------------------------------------------------------------
/src/coffee/queryutils.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | GLOBALS = require './globals'
3 |
4 | class Helpers
5 |
6 | @exists: (x) -> !!x
7 |
8 | class QueryUtils
9 |
10 | @mapMatchFunction: (matchBy) ->
11 | switch matchBy
12 | when 'id' then QueryUtils.matchById
13 | when 'slug' then QueryUtils.matchBySlug
14 | when 'sku' then QueryUtils.matchBySku
15 | else QueryUtils.matchByCustomAttribute(matchBy)
16 |
17 | # Matches products by `id` attribute
18 | # @param {object} service - SDK service object
19 | # @param {Array} products
20 | @matchById: (products) ->
21 | ids = products.map((p) -> p.product.id)
22 | QueryUtils.formatAttributePredicate("id", ids)
23 |
24 | # Matches products by `slug` attribute
25 | # @param {object} service - SDK service object
26 | # @param {Array} products
27 | @matchBySlug: (products) ->
28 | slugs = products.map((p) ->
29 | p.product.slug[GLOBALS.DEFAULT_LANGUAGE])
30 | "slug(#{QueryUtils.formatAttributePredicate(GLOBALS.DEFAULT_LANGUAGE, slugs)})"
31 |
32 | # Matches products by `sku` attribute
33 | # @param {object} service - SDK service object
34 | # @param {Array} products
35 | @matchBySku: (products) ->
36 | skus = products.map((p) ->
37 | p.product.masterVariant.sku
38 | ).filter(Helpers.exists)
39 | predicate = QueryUtils.formatAttributePredicate("sku", skus)
40 | QueryUtils.applyPredicateToVariants(predicate)
41 |
42 | # Matches products by custom attribute
43 | # @param {object} service - SDK service object
44 | # @param {Array} products
45 | @matchByCustomAttribute: (attribute) ->
46 | (products) ->
47 | values = _.flatten(products.map((p) ->
48 | [
49 | QueryUtils.valueOf(p.product.masterVariant.attributes, attribute)
50 | ].concat(p.product.variants.map((v) ->
51 | QueryUtils.valueOf(v.attributes, attribute)))))
52 | .filter(Helpers.exists)
53 | predicate = QueryUtils.formatCustomAttributePredicate(attribute, values)
54 | QueryUtils.applyPredicateToVariants(predicate)
55 |
56 | # Withdraw particular attribute value out of attributes collection
57 | # @param {Array} attributes - attributes collection
58 | # @param {String} name - name of the attribute
59 | # @return {Any} attribute value if found
60 | @valueOf: (attributes, name) ->
61 | attrib = _.find(attributes, (attribute) ->
62 | attribute.name is name)
63 | attrib?.value
64 |
65 | @formatAttributePredicate: (name, items) ->
66 | "#{name} in (#{@escapeItems items})"
67 |
68 | @formatCustomAttributePredicate: (name, items) ->
69 | "attributes(name=\"#{name}\" and value in (#{@escapeItems items}))"
70 |
71 | @applyPredicateToVariants: (predicate) ->
72 | "masterVariant(#{predicate}) or variants(#{predicate})"
73 |
74 | @escapeItems: (items) ->
75 | if items.length is 0
76 | '""'
77 | else
78 | items.map((item) -> JSON.stringify(item)).join(", ")
79 |
80 | module.exports = QueryUtils
81 |
--------------------------------------------------------------------------------
/src/coffee/resourceutils.coffee:
--------------------------------------------------------------------------------
1 | { createRequestBuilder } = require '@commercetools/api-request-builder'
2 |
3 | exports.fetchResources = (client, projectKey, resource) ->
4 | service = createRequestBuilder { projectKey }
5 | request = {
6 | uri: service[resource].build()
7 | method: 'GET'
8 | }
9 | handler = (payload) -> Promise.resolve(payload)
10 | client.process request, handler, { accumulate: true }
11 | .then (response) ->
12 | response.reduce (acc, payload) ->
13 | acc.concat(payload.body.results)
14 | , []
--------------------------------------------------------------------------------
/src/coffee/states.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | { fetchResources } = require './resourceutils'
3 |
4 | # TODO:
5 | # - JSDoc
6 | # - make it util only
7 | class States
8 | constructor: ->
9 | @key2id = {}
10 | @id2key = {}
11 | @duplicateKeys = []
12 |
13 | getAll: (client, projectKey) ->
14 | fetchResources(client, projectKey, 'states')
15 |
16 | buildMaps: (states) ->
17 | _.each states, (state) =>
18 | key = state.key
19 | id = state.id
20 |
21 | @id2key[id] = key
22 |
23 | if _.has @key2id, key
24 | @duplicateKeys.push key
25 | @key2id[key] = id
26 |
27 |
28 | module.exports = States
29 |
--------------------------------------------------------------------------------
/src/coffee/taxes.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | { fetchResources } = require './resourceutils'
3 |
4 | # TODO:
5 | # - JSDoc
6 | # - make it util only
7 | class Taxes
8 | constructor: ->
9 | @name2id = {}
10 | @id2name = {}
11 | @duplicateNames = []
12 |
13 | getAll: (client, projectKey) ->
14 | fetchResources(client, projectKey, 'taxCategories')
15 |
16 | buildMaps: (taxCategories) ->
17 | _.each taxCategories, (taxCat) =>
18 | name = taxCat.name
19 | id = taxCat.id
20 |
21 | @id2name[id] = name
22 |
23 | if _.has @name2id, name
24 | @duplicateNames.push name
25 | @name2id[name] = id
26 |
27 |
28 | module.exports = Taxes
29 |
--------------------------------------------------------------------------------
/src/coffee/types.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | CONS = require './constants'
3 | { fetchResources } = require './resourceutils'
4 |
5 | # TODO:
6 | # - JSDoc
7 | # - make it util only
8 | class Types
9 | constructor: ->
10 | @id2index = {}
11 | @name2id = {}
12 | @duplicateNames = []
13 | @id2SameForAllAttributes = {}
14 | @id2nameAttributeDefMap = {}
15 |
16 | getAll: (client, projectKey) ->
17 | fetchResources(client, projectKey, 'productTypes')
18 |
19 | buildMaps: (productTypes) ->
20 | _.each productTypes, (pt, index) =>
21 | name = pt.name
22 | id = pt.id
23 |
24 | @id2index[id] = index
25 | @id2SameForAllAttributes[id] = []
26 | @id2nameAttributeDefMap[id] = {}
27 |
28 | if _.has @name2id, name
29 | @duplicateNames.push name
30 | @name2id[name] = id
31 |
32 | pt.attributes or= []
33 | _.each pt.attributes, (attribute) =>
34 | @id2SameForAllAttributes[id].push(attribute.name) if attribute.attributeConstraint is CONS.ATTRIBUTE_CONSTRAINT_SAME_FOR_ALL
35 | @id2nameAttributeDefMap[id][attribute.name] = attribute
36 |
37 |
38 | module.exports = Types
39 |
--------------------------------------------------------------------------------
/src/coffee/validator.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | _.mixin require('underscore.string').exports()
3 | Promise = require 'bluebird'
4 | Csv = require 'csv'
5 | CONS = require './constants'
6 | GLOBALS = require './globals'
7 | Mapping = require './mapping'
8 | Header = require './header'
9 |
10 | class Validator
11 |
12 | constructor: (options = {}, client, projectKey) ->
13 | @types = options.types
14 | @customerGroups = options.customerGroups
15 | @categories = options.categories
16 | @taxes = options.taxes
17 | @states = options.states
18 | @channels = options.channels
19 |
20 | options.validator = @
21 | # TODO:
22 | # - pass only correct options, not all classes
23 | # - avoid creating a new instance of the client, since it should be created from Import class
24 | @client = client
25 | @projectKey = projectKey
26 | @rawProducts = []
27 | @errors = []
28 | @suppressMissingHeaderWarning = false
29 | @csvOptions =
30 | delimiter: options.csvDelimiter or ','
31 | quote: options.csvQuote or '"'
32 | trim: true
33 |
34 | parse: (csvString) ->
35 | # TODO: use parser with streaming API
36 | # https://github.com/sphereio/sphere-node-product-csv-sync/issues/56
37 | new Promise (resolve, reject) =>
38 | Csv().from.string(csvString, @csvOptions)
39 | .on 'error', (error) -> reject error
40 | .to.array (data) =>
41 | data = @serialize(data)
42 | resolve data
43 |
44 | serialize: (data) =>
45 | @header = new Header(data[0])
46 | return {
47 | header: @header
48 | data: _.rest(data)
49 | count: data.length
50 | }
51 |
52 | validate: (csvContent) ->
53 | @validateOffline csvContent
54 | @validateOnline()
55 |
56 | validateOffline: (csvContent) ->
57 | @header.validate()
58 | @checkDelimiters()
59 |
60 | variantHeader = CONS.HEADER_VARIANT_ID if @header.has(CONS.HEADER_VARIANT_ID)
61 | if @header.has(CONS.HEADER_SKU) and not variantHeader?
62 | variantHeader = CONS.HEADER_SKU
63 | @updateVariantsOnly = true
64 | @buildProducts csvContent, variantHeader
65 |
66 | checkDelimiters: ->
67 | allDelimiter =
68 | csvDelimiter: @csvOptions.delimiter
69 | csvQuote: @csvOptions.quote
70 | language: GLOBALS.DELIM_HEADER_LANGUAGE
71 | multiValue: GLOBALS.DELIM_MULTI_VALUE
72 | categoryChildren: GLOBALS.DELIM_CATEGORY_CHILD
73 | delims = _.map allDelimiter, (delim, _) -> delim
74 | if _.size(delims) isnt _.size(_.uniq(delims))
75 | @errors.push "Your selected delimiter clash with each other: #{JSON.stringify(allDelimiter)}"
76 |
77 | fetchResources: (cache) =>
78 | promise = Promise.resolve(cache)
79 | if not cache
80 | promise = Promise.all([
81 | @types.getAll @client, @projectKey
82 | @customerGroups.getAll @client, @projectKey
83 | @categories.getAll @client, @projectKey
84 | @taxes.getAll @client, @projectKey
85 | @states.getAll @client, @projectKey
86 | @channels.getAll @client, @projectKey
87 | ])
88 |
89 | promise
90 | .then (resources) =>
91 | [productTypes, customerGroups, categories, taxes, states, channels] = resources
92 | @productTypes = productTypes
93 | @types.buildMaps @productTypes
94 | @customerGroups.buildMaps customerGroups
95 | @categories.buildMaps categories
96 | @taxes.buildMaps taxes
97 | @states.buildMaps states
98 | @channels.buildMaps channels
99 | Promise.resolve resources
100 |
101 | validateOnline: ->
102 | # TODO: too much parallel?
103 | # TODO: is it ok storing everything in memory?
104 | @valProducts @rawProducts # TODO: ???
105 | if _.size(@errors) is 0
106 | @valProductTypes @productTypes # TODO: ???
107 | if _.size(@errors) is 0
108 | Promise.resolve @rawProducts
109 | else
110 | Promise.reject @errors
111 | else
112 | Promise.reject @errors
113 |
114 | shouldPublish: (csvRow) ->
115 | if not @header.has CONS.HEADER_PUBLISH
116 | return false
117 | csvRow[@header.toIndex CONS.HEADER_PUBLISH] == 'true'
118 |
119 | # TODO: Allow to define a column that defines the variant relationship.
120 | # If the value is the same, they belong to the same product
121 | buildProducts: (content, variantColumn) ->
122 | buildVariantsOnly = (aggr, row, index) =>
123 | rowIndex = index + 2 # Excel et all start counting at 1 and we already popped the header
124 | productTypeIndex = @header.toIndex CONS.HEADER_PRODUCT_TYPE
125 | productType = row[productTypeIndex]
126 | lastProduct = _.last @rawProducts
127 |
128 | # if there is no productType and no product above
129 | # skip this line
130 | if not productType and not lastProduct
131 | @errors.push "[row #{rowIndex}] Please provide a product type!"
132 | return aggr
133 |
134 | if not productType
135 | console.warn "[row #{rowIndex}] Using previous productType for variant update"
136 | lastProduct = _.last @rawProducts
137 | row[productTypeIndex] = lastProduct.master[productTypeIndex]
138 |
139 | @rawProducts.push({
140 | master: _.deepClone(row),
141 | startRow: rowIndex,
142 | variants: [],
143 | publish: @shouldPublish row
144 | })
145 |
146 | aggr
147 |
148 | buildProductsOnFly = (aggr, row, index) =>
149 | rowIndex = index + 2 # Excel et all start counting at 1 and we already popped the header
150 | publish = @shouldPublish row
151 | if @isProduct row, variantColumn
152 | product =
153 | master: row
154 | startRow: rowIndex
155 | variants: []
156 | publish: publish
157 | @rawProducts.push product
158 | else if @isVariant row, variantColumn
159 | product = _.last @rawProducts
160 | if product
161 | product.variants.push
162 | variant: row
163 | rowIndex: rowIndex
164 | if publish
165 | product.publish = true
166 | else
167 | @errors.push "[row #{rowIndex}] We need a product before starting with a variant!"
168 | else
169 | @errors.push "[row #{rowIndex}] Could not be identified as product or variant!"
170 | aggr
171 |
172 | reducer = if @updateVariantsOnly
173 | buildVariantsOnly
174 | else buildProductsOnFly
175 | content.reduce(reducer, {})
176 |
177 | valProductTypes: (productTypes) ->
178 | return if @suppressMissingHeaderWarning
179 | _.each productTypes, (pt) =>
180 | attributes = @header.missingHeaderForProductType pt
181 | unless _.isEmpty(attributes)
182 | console.warn "For the product type '#{pt.name}' the following attributes don't have a matching header:"
183 | _.each attributes, (attr) ->
184 | console.warn " #{attr.name}: type '#{attr.type.name} #{if attr.type.name is 'set' then 'of ' + attr.type.elementType.name else ''}' - constraint '#{attr.attributeConstraint}' - #{if attr.isRequired then 'isRequired' else 'optional'}"
185 |
186 | valProducts: (products) ->
187 | _.each products, (product) => @valProduct product
188 |
189 | valProduct: (raw) ->
190 | rawMaster = raw.master
191 | ptInfo = rawMaster[@header.toIndex CONS.HEADER_PRODUCT_TYPE]
192 |
193 | @errors.push "[row #{raw.startRow}] The product type name '#{ptInfo}' is not unique. Please use the ID!" if _.contains(@types.duplicateNames, ptInfo)
194 |
195 | if _.has(@types.name2id, ptInfo)
196 | ptInfo = @types.name2id[ptInfo]
197 | if _.has(@types.id2index, ptInfo)
198 | index = @types.id2index[ptInfo]
199 | rawMaster[@header.toIndex CONS.HEADER_PRODUCT_TYPE] = @productTypes[index]
200 | else
201 | @errors.push "[row #{raw.startRow}] Can't find product type for '#{ptInfo}'"
202 |
203 | isVariant: (row, variantColumn) ->
204 | if variantColumn is CONS.HEADER_VARIANT_ID
205 | hasProductTypeColumn = not _.isBlank(row[@header.toIndex(CONS.HEADER_PRODUCT_TYPE)])
206 | return !hasProductTypeColumn
207 | else
208 | not @isProduct row
209 |
210 | isProduct: (row, variantColumn) ->
211 | hasProductTypeColumn = not _.isBlank(row[@header.toIndex(CONS.HEADER_PRODUCT_TYPE)])
212 | return hasProductTypeColumn
213 |
214 | _hasVariantCriteria: (row, variantColumn) ->
215 | critertia = row[@header.toIndex(variantColumn)]
216 | critertia?
217 |
218 | module.exports = Validator
219 |
--------------------------------------------------------------------------------
/src/spec/categories.spec.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | Categories = require '../lib/categories'
3 |
4 | describe 'Categories', ->
5 | beforeEach ->
6 | @categories = new Categories()
7 |
8 | describe '#constructor', ->
9 | it 'should construct', ->
10 | expect(@categories).toBeDefined()
11 |
12 | describe '#buildMaps', ->
13 | it 'should create maps for root categories', ->
14 | categories = [
15 | { id: 1, name: { en: 'cat1' }, slug: { en: 'cat-1'}, key: "test 1" }
16 | { id: 2, name: { en: 'cat2' }, slug: { en: 'cat-2'}, key: "test 2" }
17 | ]
18 | @categories.buildMaps categories
19 | expect(_.size @categories.id2index).toBe 2
20 | expect(_.size @categories.name2id).toBe 2
21 | expect(_.size @categories.fqName2id).toBe 2
22 | expect(_.size @categories.key2Id).toBe 2
23 | expect(_.size @categories.duplicateNames).toBe 0
24 |
25 | it 'should create maps for children categories', ->
26 | categories = [
27 | { id: 'idx', name: { en: 'root' }, slug: { en: 'root'} }
28 | { id: 'idy', name: { en: 'main' }, ancestors: [ { id: 'idx' } ], slug: { en: 'main'} }
29 | { id: 'idz', name: { en: 'sub' }, ancestors: [ { id: 'idx' }, { id: 'idy' } ], slug: { en: 'sub'} }
30 | ]
31 | @categories.buildMaps categories
32 | expect(_.size @categories.id2index).toBe 3
33 | expect(_.size @categories.name2id).toBe 3
34 | expect(_.size @categories.fqName2id).toBe 3
35 | expect(@categories.fqName2id['root']).toBe 'idx'
36 | expect(@categories.fqName2id['root>main']).toBe 'idy'
37 | expect(@categories.fqName2id['root>main>sub']).toBe 'idz'
38 | expect(_.size @categories.duplicateNames).toBe 0
39 |
40 | it 'should create maps for categories with externalId', ->
41 | categories = [
42 | { id: 'idx', name: { en: 'root' }, externalId: '123', slug: { en: 'root'}, key: "test 1" }
43 | { id: 'idy', name: { en: 'main' }, externalId: '234', slug: { en: 'main'}, key: "test 2" }
44 | { id: 'idz', name: { en: 'sub' }, externalId: '345', slug: { en: 'sub'} }
45 | ]
46 | @categories.buildMaps categories
47 | expect(_.size @categories.id2index).toBe 3
48 | expect(_.size @categories.name2id).toBe 3
49 | expect(_.size @categories.fqName2id).toBe 3
50 | expect(_.size @categories.key2Id).toBe 2
51 | expect(_.size @categories.externalId2id).toBe 3
52 | expect(@categories.externalId2id['123']).toBe 'idx'
53 | expect(@categories.externalId2id['234']).toBe 'idy'
54 | expect(@categories.externalId2id['345']).toBe 'idz'
55 | expect(_.size @categories.duplicateNames).toBe 0
56 |
--------------------------------------------------------------------------------
/src/spec/export.spec.coffee:
--------------------------------------------------------------------------------
1 | fetch = require 'node-fetch'
2 | { Export } = require '../lib/main'
3 | Config = require '../config'
4 | _ = require 'underscore'
5 |
6 | priceFactory = ({ country } = {}) ->
7 | country: country or 'US'
8 |
9 | variantFactory = ({ country, published } = {}) ->
10 | prices: [
11 | priceFactory({ country })
12 | ],
13 | attributes: [
14 | {
15 | name: 'published',
16 | value: published or true
17 | }
18 | ]
19 |
20 | describe 'Export', ->
21 |
22 | beforeEach ->
23 | {client_id, client_secret, project_key} = Config.config
24 | authConfig =
25 | projectKey: project_key
26 | credentials:
27 | clientId: client_id
28 | clientSecret: client_secret
29 | fetch: fetch
30 | httpConfig = {fetch: fetch}
31 | userAgentConfig = {}
32 | @exporter = new Export({ authConfig, httpConfig, userAgentConfig })
33 |
34 | describe 'Function to filter variants by attributes', ->
35 |
36 | it 'should keep all variants if no filter for price is given', ->
37 |
38 | # init variant without price
39 | variant = variantFactory()
40 | variant.prices = []
41 | # filter for US prices -> no price should be left in variant
42 | filteredVariants = @exporter._filterVariantsByAttributes(
43 | [ variant ],
44 | []
45 | )
46 | actual = filteredVariants[0]
47 | expected = variant
48 |
49 | expect(actual).toEqual(expected)
50 |
51 | it 'should keep variants that meet the filter condition', ->
52 |
53 | variant = variantFactory()
54 | filteredVariants = @exporter._filterVariantsByAttributes(
55 | [variant],
56 | [{ name: 'published', value: true }]
57 | )
58 |
59 | actual = filteredVariants[0]
60 | expected = variant
61 |
62 | expect(actual).toEqual(expected)
63 |
64 | it 'should remove variants that don\'t meet the filter condition', ->
65 |
66 | variant = variantFactory()
67 | filteredVariants = @exporter._filterVariantsByAttributes(
68 | [variant],
69 | [{ name: 'published', value: false }]
70 | )
71 |
72 | actual = filteredVariants[0]
73 | expected = undefined
74 |
75 | expect(actual).toEqual(expected)
76 |
77 | it 'should filter prices if no variant filter is provided', ->
78 |
79 | # init variant with DE price
80 | variant = variantFactory({country: 'DE'})
81 | variant.prices.push(priceFactory({ country: 'US' }))
82 | # filter for US prices -> no price should be left in variant
83 | @exporter.queryOptions.filterPrices = [{ name: 'country', value: 'US' }]
84 | filteredVariants = @exporter._filterVariantsByAttributes(
85 | [ variant ],
86 | []
87 | )
88 |
89 | actual = filteredVariants[0]
90 | expected = _.extend(variant, { prices: [] })
91 |
92 | expect(actual).toEqual(expected)
93 |
94 | it 'should filter out a variant
95 | if the price filter filtered out all prices of the variant', ->
96 |
97 | # init variant with DE price
98 | variant = variantFactory({country: 'US'})
99 | variant.prices.push(priceFactory({ country: 'US' }))
100 | # filter for US prices -> no price should be left in variant
101 | @exporter.queryOptions.filterPrices = [{ name: 'country', value: 'DE' }]
102 | filteredVariants = @exporter._filterVariantsByAttributes(
103 | [ variant ],
104 | []
105 | )
106 | expect(filteredVariants[0]).toBeFalsy()
107 |
108 | describe 'Function to filter prices', ->
109 |
110 | it 'should keep prices that meet the filter condition', ->
111 |
112 | price = priceFactory()
113 | filteredVariants = @exporter._filterPrices(
114 | [ price ],
115 | [{ name: 'country', value: 'US' }]
116 | )
117 |
118 | actual = filteredVariants[0]
119 | expected = price
120 |
121 | expect(actual).toEqual(expected)
122 |
123 | it 'should remove prices that don\'t meet the filter condition', ->
124 |
125 | price = priceFactory({ country: 'DE' })
126 | usPrice = priceFactory({ country: 'US' })
127 | filteredPrices = @exporter._filterPrices(
128 | [ price, usPrice, usPrice, usPrice ],
129 | [{ name: 'country', value: 'DE' }]
130 | )
131 |
132 | actual = filteredPrices.length
133 | expected = 1
134 |
135 | expect(actual).toEqual(expected)
136 |
137 |
138 | describe 'Product queryString', ->
139 |
140 | it 'should append custom condition to queryString', ->
141 | query = 'where=productType(id="987") AND id="567"&staged=false'
142 | expectedQuery = 'where=productType(id="987") AND id="567" AND productType(id="123")&staged=false'
143 | customWherePredicate = 'productType(id="123")'
144 |
145 | parsed = @exporter._parseQueryString query
146 | parsed = @exporter._appendQueryStringPredicate parsed, customWherePredicate
147 | result = @exporter._stringifyQueryString parsed
148 |
149 | expect(result).toEqual(expectedQuery)
150 |
--------------------------------------------------------------------------------
/src/spec/header.spec.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | _.mixin require('underscore-mixins')
3 | CONS = require '../lib/constants'
4 | {Header, Validator} = require '../lib/main'
5 |
6 | describe 'Header', ->
7 | beforeEach ->
8 | @validator = new Validator()
9 |
10 | describe '#constructor', ->
11 | it 'should initialize', ->
12 | expect(-> new Header()).toBeDefined()
13 |
14 | it 'should initialize rawHeader', ->
15 | header = new Header ['name']
16 | expect(header.rawHeader).toEqual ['name']
17 |
18 | describe '#validate', ->
19 | it 'should return error for each missing header', (done) ->
20 | csv =
21 | """
22 | foo,sku
23 | 1,2
24 | """
25 | @validator.parse csv
26 | .then =>
27 | errors = @validator.header.validate()
28 | expect(errors.length).toBe 1
29 | expect(errors[0]).toBe "Can't find necessary base header 'productType'!"
30 | done()
31 | .catch (err) -> done.fail _.prettify(err)
32 |
33 | it 'should return error when no sku and not variantId header', (done) ->
34 | csv =
35 | """
36 | foo,productType
37 | 1,2
38 | """
39 | @validator.parse csv
40 | .then =>
41 | errors = @validator.header.validate()
42 | expect(errors.length).toBe 1
43 | expect(errors[0]).toBe "You need either the column 'variantId' or 'sku' to identify your variants!"
44 | done()
45 | .catch (err) -> done.fail _.prettify(err)
46 |
47 | it 'should return error on duplicate header', (done) ->
48 | csv =
49 | """
50 | productType,name,variantId,name
51 | 1,2,3,4
52 | """
53 | @validator.parse csv
54 | .then =>
55 | errors = @validator.header.validate()
56 | expect(errors.length).toBe 1
57 | expect(errors[0]).toBe "There are duplicate header entries!"
58 | done()
59 | .catch (err) -> done.fail _.prettify(err)
60 |
61 | describe '#toIndex', ->
62 | it 'should create mapping', (done) ->
63 | csv =
64 | """
65 | productType,foo,variantId
66 | 1,2,3
67 | """
68 | @validator.parse csv
69 | .then =>
70 | h2i = @validator.header.toIndex()
71 | expect(_.size h2i).toBe 3
72 | expect(h2i['productType']).toBe 0
73 | expect(h2i['foo']).toBe 1
74 | expect(h2i['variantId']).toBe 2
75 | done()
76 | .catch (err) -> done.fail _.prettify(err)
77 |
78 | describe '#_productTypeLanguageIndexes', ->
79 | beforeEach ->
80 | @productType =
81 | id: '213'
82 | attributes: [
83 | name: 'foo'
84 | type:
85 | name: 'ltext'
86 | ]
87 | @csv =
88 | """
89 | someHeader,foo.en,foo.de
90 | """
91 | it 'should create language header index for ltext attributes', (done) ->
92 | @validator.parse @csv
93 | .then =>
94 | langH2i = @validator.header._productTypeLanguageIndexes @productType
95 | expect(_.size langH2i).toBe 1
96 | expect(_.size langH2i['foo']).toBe 2
97 | expect(langH2i['foo']['de']).toBe 2
98 | expect(langH2i['foo']['en']).toBe 1
99 | done()
100 | .catch (err) -> done.fail _.prettify(err)
101 |
102 | it 'should provide access via productType', (done) ->
103 | @validator.parse @csv
104 | .then =>
105 | expected =
106 | de: 2
107 | en: 1
108 | expect(@validator.header.productTypeAttributeToIndex(@productType, @productType.attributes[0])).toEqual expected
109 | done()
110 | .catch (err) -> done.fail _.prettify(err)
111 |
112 | describe '#_languageToIndex', ->
113 | it 'should create mapping for language attributes', (done) ->
114 | csv =
115 | """
116 | foo,a1.de,bar,a1.it
117 | """
118 | @validator.parse csv
119 | .then =>
120 | langH2i = @validator.header._languageToIndex(['a1'])
121 | expect(_.size langH2i).toBe 1
122 | expect(_.size langH2i['a1']).toBe 2
123 | expect(langH2i['a1']['de']).toBe 1
124 | expect(langH2i['a1']['it']).toBe 3
125 | done()
126 | .catch (err) -> done.fail _.prettify(err)
127 |
128 | describe '#missingHeaderForProductType', ->
129 | it 'should give list of attributes that are not covered by headers', (done) ->
130 | csv =
131 | """
132 | foo,a1.de,bar,a1.it
133 | """
134 | productType =
135 | id: 'whatAtype'
136 | attributes: [
137 | { name: 'foo', type: { name: 'text' } }
138 | { name: 'bar', type: { name: 'enum' } }
139 | { name: 'a1', type: { name: 'ltext' } }
140 | { name: 'a2', type: { name: 'set' } }
141 | ]
142 | @validator.parse csv
143 | .then =>
144 | header = @validator.header
145 | header.toIndex()
146 | header.toLanguageIndex()
147 | missingHeaders = header.missingHeaderForProductType(productType)
148 | expect(_.size missingHeaders).toBe 1
149 | expect(missingHeaders[0]).toEqual { name: 'a2', type: { name: 'set' } }
150 | done()
151 | .catch (err) -> done.fail _.prettify(err)
152 |
--------------------------------------------------------------------------------
/src/spec/import.spec.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | fetch = require 'node-fetch'
3 | CONS = require '../lib/constants'
4 | { Import } = require '../lib/main'
5 | Config = require '../config'
6 |
7 | describe 'Import', ->
8 | { client_id, client_secret, project_key } = Config.config
9 | authConfig =
10 | projectKey: project_key
11 | credentials:
12 | clientId: client_id
13 | clientSecret: client_secret
14 | fetch: fetch
15 | httpConfig = {fetch: fetch}
16 | userAgentConfig = {}
17 | beforeEach ->
18 | @importer = new Import {
19 | authConfig: authConfig
20 | httpConfig: httpConfig
21 | userAgentConfig: userAgentConfig
22 | }
23 |
24 | describe '#constructor', ->
25 | it 'should initialize with options', ->
26 | expect(@importer).toBeDefined()
27 | expect(@importer.client).toBeDefined()
28 |
29 | describe 'match on custom attribute', ->
30 | xit 'should find match based on custom attribute', ->
31 | product =
32 | id: '123'
33 | masterVariant:
34 | attributes: [
35 | { name: 'foo', value: 'bar' }
36 | ]
37 | @importer.customAttributeNameToMatch = 'foo'
38 |
39 | val = @importer.getCustomAttributeValue(product.masterVariant)
40 | expect(val).toEqual 'bar'
41 |
42 | @importer.initMatcher [product]
43 | expect(@importer.id2index).toEqual { 123: 0 }
44 | expect(@importer.sku2index).toEqual {}
45 | expect(@importer.slug2index).toEqual {}
46 | expect(@importer.customAttributeValue2index).toEqual { 'bar': 0 }
47 |
48 | index = @importer._matchOnCustomAttribute product
49 | expect(index).toBe 0
50 |
51 | match = @importer.match
52 | product:
53 | masterVariant:
54 | attributes: []
55 | variants: [
56 | { attributes: [{ name: 'foo', value: 'bar' }] }
57 | ]
58 | header:
59 | has: -> false
60 | hasLanguageForBaseAttribute: -> false
61 |
62 | expect(match).toBe product
63 |
64 | describe 'mapVariantsBasedOnSKUs', ->
65 | beforeEach ->
66 | @header = {}
67 | it 'should map masterVariant', ->
68 | existingProducts = [
69 | { masterVariant: { id: 2, sku: "mySKU" }, variants: [] }
70 | ]
71 | #@importer.initMatcher existingProducts
72 | entry =
73 | product:
74 | masterVariant: { sku: "mySKU", attributes: [ { foo: 'bar' } ] }
75 | productsToUpdate = @importer.mapVariantsBasedOnSKUs(existingProducts, [entry])
76 | expect(_.size productsToUpdate).toBe 1
77 | product = productsToUpdate[0].product
78 | expect(product.masterVariant).toBeDefined()
79 | expect(product.masterVariant.id).toBe 2
80 | expect(product.masterVariant.sku).toBe 'mySKU'
81 | expect(_.size product.variants).toBe 0
82 | expect(product.masterVariant.attributes).toEqual [{ foo: 'bar' }]
83 |
84 | xit 'should map several variants into one product', ->
85 | existingProducts = [
86 | { masterVariant: { id: 1, sku: "mySKU" }, variants: [] }
87 | { masterVariant: { id: 1, sku: "mySKU1" }, variants: [
88 | { id: 2, sku: "mySKU2", attributes: [ { foo: 'bar' } ] }
89 | { id: 4, sku: "mySKU4", attributes: [ { foo: 'baz' } ] }
90 | ] }
91 | ]
92 | #@importer.initMatcher existingProducts
93 | entry =
94 | product:
95 | variants: [
96 | { sku: "mySKU4", attributes: [ { foo: 'bar4' } ] }
97 | { sku: "mySKU2", attributes: [ { foo: 'bar2' } ] }
98 | { sku: "mySKU3", attributes: [ { foo: 'bar3' } ] }
99 | ]
100 | productsToUpdate = @importer.mapVariantsBasedOnSKUs(existingProducts, [entry])
101 | expect(_.size productsToUpdate).toBe 1
102 | product = productsToUpdate[0].product
103 | expect(product.masterVariant.id).toBe 1
104 | expect(product.masterVariant.sku).toBe 'mySKU1'
105 | expect(_.size product.variants).toBe 2
106 | expect(product.variants[0].id).toBe 2
107 | expect(product.variants[0].sku).toBe 'mySKU2'
108 | expect(product.variants[0].attributes).toEqual [ { foo: 'bar2' } ]
109 | expect(product.variants[1].id).toBe 4
110 | expect(product.variants[1].attributes).toEqual [ { foo: 'bar4' } ]
111 |
112 | describe 'splitUpdateActionsArray', ->
113 | it 'should split an array when exceeding max amount of allowed actions', ->
114 | updateRequest = {
115 | actions: [
116 | { action: 'updateAction1', payload: 'bar1' },
117 | { action: 'updateAction2', payload: 'bar2' },
118 | { action: 'updateAction3', payload: 'bar3' },
119 | { action: 'updateAction4', payload: 'bar4' },
120 | { action: 'updateAction5', payload: 'bar5' },
121 | { action: 'updateAction6', payload: 'bar6' },
122 | { action: 'updateAction7', payload: 'bar7' },
123 | { action: 'updateAction8', payload: 'bar8' },
124 | { action: 'updateAction9', payload: 'bar9' },
125 | { action: 'updateAction10', payload: 'bar10' }
126 | ],
127 | version: 1
128 | }
129 | # max amount of actions = 3
130 | splitArray = @importer.splitUpdateActionsArray(updateRequest, 3)
131 | # array of 10 actions divided by max of 3 becomes 4 arrays
132 | expect(splitArray.length).toEqual 4
133 |
--------------------------------------------------------------------------------
/src/spec/integration/impex.spec.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | _.mixin require('underscore-mixins')
3 | Promise = require 'bluebird'
4 | fetch = require 'node-fetch'
5 | fs = Promise.promisifyAll require('fs')
6 | {Export, Import} = require '../../lib/main'
7 | Config = require '../../config'
8 | TestHelpers = require './testhelpers'
9 |
10 | TEXT_ATTRIBUTE_NONE = 'attr-text-n'
11 | LTEXT_ATTRIBUTE_COMBINATION_UNIQUE = 'attr-ltext-cu'
12 | SET_TEXT_ATTRIBUTE_NONE = 'attr-set-text-n'
13 | BOOLEAN_ATTRIBUTE_NONE = 'attr-boolean-n'
14 |
15 | describe 'Impex integration tests', ->
16 | { client_id, client_secret, project_key } = Config.config
17 | authConfig = {
18 | host: 'https://auth.sphere.io'
19 | projectKey: project_key
20 | credentials: {
21 | clientId: client_id
22 | clientSecret: client_secret
23 | }
24 | fetch: fetch
25 | }
26 | httpConfig = { host: 'https://api.sphere.io', fetch: fetch }
27 | userAgentConfig = {}
28 |
29 | beforeEach (done) ->
30 | jasmine.DEFAULT_TIMEOUT_INTERVAL = 120000 # 2mins
31 | initOptions = {
32 | authConfig: authConfig
33 | httpConfig: httpConfig
34 | userAgentConfig: userAgentConfig
35 | encoding: 'utf8'
36 | }
37 | @importer = new Import initOptions
38 | @importer.matchBy = 'slug'
39 | @importer.suppressMissingHeaderWarning = true
40 | @exporter = new Export initOptions
41 | @client = @importer.client
42 |
43 | @productType = TestHelpers.mockProductType()
44 |
45 | TestHelpers.setupProductType(@client, @productType, null, project_key)
46 | .then (result) =>
47 | @productType = result
48 | done()
49 | .catch (err) -> done.fail _.prettify(err.body)
50 | , 60000 # 60sec
51 |
52 | it 'should import and re-export a simple product', (done) ->
53 | header = "productType,name.en,slug.en,variantId,sku,prices,#{LTEXT_ATTRIBUTE_COMBINATION_UNIQUE}.en,#{TEXT_ATTRIBUTE_NONE},#{SET_TEXT_ATTRIBUTE_NONE},#{BOOLEAN_ATTRIBUTE_NONE}"
54 | p1 =
55 | """
56 | #{@productType.name},myProduct1,my-slug1,1,sku1,FR-EUR 999;CHF 1099,some Text,foo,false
57 | ,,,2,sku2,EUR 799,some other Text,foo,\"t1;t2;t3;Üß\"\"Let's see if we support multi
58 | line value\"\"\",true
59 | """
60 | p2 =
61 | """
62 | #{@productType.name},myProduct2,my-slug2,1,sku3,USD 1899,,,,true
63 | ,,,2,sku4,USD 1999,,,,false
64 | ,,,3,sku5,USD 2099,,,,true
65 | ,,,4,sku6,USD 2199,,,,false
66 | """
67 | csv =
68 | """
69 | #{header}
70 | #{p1}
71 | #{p2}
72 | """
73 | @importer.publishProducts = true
74 | @importer.import(csv)
75 | .then (result) =>
76 | console.log "import", result
77 | expect(_.size result).toBe 2
78 | expect(result[0]).toBe '[row 2] New product created.'
79 | expect(result[1]).toBe '[row 4] New product created.'
80 | file = '/tmp/impex.csv'
81 | @exporter.exportDefault(csv, file)
82 | .then (result) =>
83 | console.log "export", result
84 | expect(result).toBe 'Export done.'
85 | service = TestHelpers.createService(project_key, 'products')
86 | request = {
87 | uri: service.build()
88 | method: 'GET'
89 | }
90 | @client.execute request
91 | .then (res) ->
92 | console.log "products %j", res.body
93 | fs.readFileAsync file, {encoding: 'utf8'}
94 | .then (content) ->
95 | console.log "export file content", content
96 | expect(content).toMatch header
97 | expect(content).toMatch p1
98 | expect(content).toMatch p2
99 | done()
100 | .catch (err) -> done.fail _.prettify(err)
101 |
102 | it 'should import and re-export SEO attributes', (done) ->
103 | header = "productType,variantId,name.en,description.en,slug.en,metaTitle.en,metaDescription.en,metaKeywords.en,#{LTEXT_ATTRIBUTE_COMBINATION_UNIQUE}.en,searchKeywords.en"
104 | p1 =
105 | """
106 | #{@productType.name},1,seoName,seoDescription,seoSlug,seoMetaTitle,seoMetaDescription,seoMetaKeywords,foo,new;search;keywords
107 | ,2,,,,,,,bar
108 | """
109 | csv =
110 | """
111 | #{header}
112 | #{p1}
113 | """
114 | @importer.publishProducts = true
115 | @importer.import(csv)
116 | .then (result) =>
117 | console.log "import", result
118 | expect(_.size result).toBe 1
119 | expect(result[0]).toBe '[row 2] New product created.'
120 | file = '/tmp/impex.csv'
121 | @exporter.exportDefault(header, file)
122 | .then (result) ->
123 | console.log "export", result
124 | expect(result).toBe 'Export done.'
125 | fs.readFileAsync file, {encoding: 'utf8'}
126 | .then (content) ->
127 | console.log "export file content", content
128 | expect(content).toMatch header
129 | expect(content).toMatch p1
130 | done()
131 | .catch (err) -> done.fail _.prettify(err)
132 |
--------------------------------------------------------------------------------
/src/spec/integration/importArchive.spec.coffee:
--------------------------------------------------------------------------------
1 | Promise = require 'bluebird'
2 | fetch = require 'node-fetch'
3 | _ = require 'underscore'
4 | archiver = require 'archiver'
5 | _.mixin require('underscore-mixins')
6 | {Import} = require '../../lib/main'
7 | Config = require '../../config'
8 | TestHelpers = require './testhelpers'
9 | Excel = require 'exceljs'
10 | cuid = require 'cuid'
11 | path = require 'path'
12 | tmp = require 'tmp'
13 | fs = Promise.promisifyAll require('fs')
14 | # will clean temporary files even when an uncaught exception occurs
15 | tmp.setGracefulCleanup()
16 |
17 | CHANNEL_KEY = 'retailerA'
18 | { client_id, client_secret, project_key } = Config.config
19 | authConfig = {
20 | host: 'https://auth.sphere.io'
21 | projectKey: project_key
22 | credentials: {
23 | clientId: client_id
24 | clientSecret: client_secret
25 | }
26 | fetch: fetch
27 | }
28 | httpConfig = { host: 'https://api.sphere.io', fetch: fetch }
29 | userAgentConfig = {}
30 |
31 | createImporter = (format) ->
32 | config = JSON.parse(JSON.stringify(Config)) # cloneDeep
33 | config.importFormat = format || "csv"
34 | im = new Import {
35 | authConfig: authConfig
36 | httpConfig: httpConfig
37 | userAgentConfig: userAgentConfig
38 | }
39 | im.matchBy = 'sku'
40 | im.allowRemovalOfVariants = true
41 | im.suppressMissingHeaderWarning = true
42 | im
43 |
44 | writeXlsx = (filePath, data) ->
45 | workbook = new Excel.Workbook()
46 | workbook.created = new Date()
47 | worksheet = workbook.addWorksheet('Products')
48 | console.log "Generating Xlsx file"
49 |
50 | data.forEach (items, index) ->
51 | if index
52 | worksheet.addRow items
53 | else
54 | headers = []
55 | for i of items
56 | headers.push {
57 | header: items[i]
58 | }
59 | worksheet.columns = headers
60 |
61 | workbook.xlsx.writeFile(filePath)
62 |
63 | describe 'Import integration test', ->
64 |
65 | beforeEach (done) ->
66 | jasmine.DEFAULT_TIMEOUT_INTERVAL = 120000 # 2mins
67 | @importer = createImporter()
68 | @importer.suppressMissingHeaderWarning = true
69 | @client = @importer.client
70 |
71 | @productType = TestHelpers.mockProductType()
72 |
73 | TestHelpers.setupProductType(@client, @productType, null, project_key)
74 | .then (result) =>
75 | @productType = result
76 | # Check if channel exists
77 | service = TestHelpers.createService(project_key, 'channels')
78 | request = {
79 | uri: service
80 | .where("key=\"#{CHANNEL_KEY}\"")
81 | .build()
82 | method: 'GET'
83 | }
84 | @client.execute request
85 | .then (result) =>
86 | # Create the channel if it doesn't exist else ignore
87 | if (!result.body.total)
88 | service = TestHelpers.createService(project_key, 'channels')
89 | request = {
90 | uri: service.build()
91 | method: 'POST'
92 | body:
93 | key: CHANNEL_KEY
94 | roles: ['InventorySupply']
95 | }
96 | @client.execute request
97 | .then -> done()
98 | .catch (err) -> done.fail _.prettify(err.body)
99 | , 120000 # 2min
100 |
101 | describe '#import', ->
102 |
103 | beforeEach ->
104 | @newProductName = TestHelpers.uniqueId 'name-'
105 | @newProductSlug = TestHelpers.uniqueId 'slug-'
106 | @newProductSku = TestHelpers.uniqueId 'sku-'
107 |
108 | it 'should import multiple archived products from CSV', (done) ->
109 | tempDir = tmp.dirSync({ unsafeCleanup: true })
110 | archivePath = path.join tempDir.name, 'products.zip'
111 |
112 | csv = [
113 | """
114 | productType,name,variantId,slug
115 | #{@productType.id},#{@newProductName},1,#{@newProductSlug}
116 | """,
117 | """
118 | productType,name,variantId,slug
119 | #{@productType.id},#{@newProductName+1},1,#{@newProductSlug+1}
120 | """
121 | ]
122 |
123 | Promise.map csv, (content, index) ->
124 | fs.writeFileAsync path.join(tempDir.name, "products-#{index}.csv"), content
125 | .then ->
126 | archive = archiver 'zip'
127 | outputStream = fs.createWriteStream archivePath
128 |
129 | new Promise (resolve, reject) ->
130 | outputStream.on 'close', () -> resolve()
131 | archive.on 'error', (err) -> reject(err)
132 | archive.pipe outputStream
133 | archive.glob('**', { cwd: tempDir.name })
134 | archive.finalize()
135 | .then =>
136 | @importer.importManager(archivePath, true)
137 | .then =>
138 | service = TestHelpers.createService(project_key, 'productProjections')
139 | request = {
140 | uri: service
141 | .sort("createdAt", "ASC")
142 | .where("productType(id=\"#{@productType.id}\")")
143 | .staged true
144 | .build()
145 | method: 'GET'
146 | }
147 | @client.execute request
148 | .then (result) =>
149 | expect(_.size result.body.results).toBe 2
150 |
151 | p = result.body.results[0]
152 | expect(p.name).toEqual en: @newProductName
153 | expect(p.slug).toEqual en: @newProductSlug
154 |
155 | p = result.body.results[1]
156 | expect(p.name).toEqual en: @newProductName+1
157 | expect(p.slug).toEqual en: @newProductSlug+1
158 |
159 | done()
160 | .catch (err) -> done.fail _.prettify(err)
161 | .finally ->
162 | tempDir.removeCallback()
163 |
164 | # TODO: Test broken; fixme!
165 | xit 'should import multiple archived products from XLSX', (done) ->
166 | importer = createImporter("xlsx")
167 | tempDir = tmp.dirSync({ unsafeCleanup: true })
168 | archivePath = path.join tempDir.name, 'products.zip'
169 |
170 | data = [
171 | [
172 | ["productType","name","variantId","slug"],
173 | [@productType.id,@newProductName,1,@newProductSlug]
174 | ],
175 | [
176 | ["productType","name","variantId","slug"],
177 | [@productType.id,@newProductName+1,1,@newProductSlug+1]
178 | ]
179 | ]
180 |
181 | Promise.map data, (content, index) ->
182 | writeXlsx(path.join(tempDir.name, "products-#{index}.xlsx"), content)
183 | .then ->
184 | archive = archiver 'zip'
185 | outputStream = fs.createWriteStream archivePath
186 |
187 | new Promise (resolve, reject) ->
188 | outputStream.on 'close', () -> resolve()
189 | archive.on 'error', (err) -> reject(err)
190 | archive.pipe outputStream
191 | archive.glob('**', { cwd: tempDir.name })
192 | archive.finalize()
193 | .then =>
194 | importer.importManager(archivePath, true)
195 | .then =>
196 | service = TestHelpers.createService(project_key, 'productProjections')
197 | request = {
198 | uri: service
199 | .sort("createdAt", "ASC")
200 | .where("productType(id=\"#{@productType.id}\")")
201 | .staged true
202 | .build()
203 | method: 'GET'
204 | }
205 | @client.execute request
206 | .then (result) =>
207 | expect(_.size result.body.results).toBe 2
208 |
209 | p = result.body.results[0]
210 | expect(p.name).toEqual en: @newProductName
211 | expect(p.slug).toEqual en: @newProductSlug
212 |
213 | p = result.body.results[1]
214 | expect(p.name).toEqual en: @newProductName+1
215 | expect(p.slug).toEqual en: @newProductSlug+1
216 |
217 | done()
218 | .catch (err) -> done.fail _.prettify(err)
219 | .finally ->
220 | tempDir.removeCallback()
221 |
--------------------------------------------------------------------------------
/src/spec/integration/importPublish.spec.coffee:
--------------------------------------------------------------------------------
1 | Promise = require 'bluebird'
2 | fetch = require 'node-fetch'
3 | _ = require 'underscore'
4 | _.mixin require('underscore-mixins')
5 | {Import} = require '../../lib/main'
6 | Config = require '../../config'
7 | TestHelpers = require './testhelpers'
8 | cuid = require 'cuid'
9 | path = require 'path'
10 | tmp = require 'tmp'
11 | fs = Promise.promisifyAll require('fs')
12 | # will clean temporary files even when an uncaught exception occurs
13 | tmp.setGracefulCleanup()
14 |
15 | { client_id, client_secret, project_key } = Config.config
16 | authConfig = {
17 | host: 'https://auth.sphere.io'
18 | projectKey: project_key
19 | credentials: {
20 | clientId: client_id
21 | clientSecret: client_secret
22 | }
23 | fetch: fetch
24 | }
25 | httpConfig = { host: 'https://api.sphere.io', fetch: fetch }
26 | userAgentConfig = {}
27 |
28 | createImporter = ->
29 | im = new Import {
30 | authConfig: authConfig
31 | httpConfig: httpConfig
32 | userAgentConfig: userAgentConfig
33 | }
34 | im.matchBy = 'sku'
35 | im.allowRemovalOfVariants = true
36 | im.suppressMissingHeaderWarning = true
37 | im
38 |
39 | CHANNEL_KEY = 'retailerA'
40 |
41 | describe 'Import and publish test', ->
42 |
43 | beforeEach (done) ->
44 | jasmine.DEFAULT_TIMEOUT_INTERVAL = 90000 # 90 sec
45 | @importer = createImporter()
46 | @client = @importer.client
47 |
48 | @productType = TestHelpers.mockProductType()
49 |
50 | TestHelpers.setupProductType(@client, @productType, null, project_key)
51 | .then (result) =>
52 | @productType = result
53 | # Check if channel exists
54 | service = TestHelpers.createService(project_key, 'channels')
55 | request = {
56 | uri: service
57 | .where("key=\"#{CHANNEL_KEY}\"")
58 | .build()
59 | method: 'GET'
60 | }
61 | @client.execute request
62 | .then (result) =>
63 | # Create the channel if it doesn't exist else ignore
64 | if (!result.body.total)
65 | service = TestHelpers.createService(project_key, 'channels')
66 | request = {
67 | uri: service.build()
68 | method: 'POST'
69 | body:
70 | key: CHANNEL_KEY
71 | roles: ['InventorySupply']
72 | }
73 | @client.execute request
74 | .then -> done()
75 | .catch (err) -> done _.prettify(err.body)
76 | , 120000 # 2min
77 |
78 | describe '#import', ->
79 |
80 | beforeEach ->
81 | @newProductName = TestHelpers.uniqueId 'name-'
82 | @newProductSlug = TestHelpers.uniqueId 'slug-'
83 | @newProductSku = TestHelpers.uniqueId 'sku-'
84 |
85 | it 'should import products and publish them afterward', (done) ->
86 | csv =
87 | """
88 | productType,name,variantId,slug,publish
89 | #{@productType.id},#{@newProductName},1,#{@newProductSlug},true
90 | #{@productType.id},#{@newProductName}1,1,#{@newProductSlug}1,false
91 | """
92 |
93 | @importer.import(csv)
94 | .then (result) =>
95 | expect(_.size result).toBe 2
96 | expect(result).toEqual [
97 | '[row 2] New product created.',
98 | '[row 3] New product created.'
99 | ]
100 | service = TestHelpers.createService(project_key, 'productProjections')
101 | request = {
102 | uri: service
103 | .where("productType(id=\"#{@productType.id}\")")
104 | .staged true
105 | .build()
106 | method: 'GET'
107 | }
108 | @client.execute request
109 | .then (result) =>
110 | expect(_.size result.body.results).toBe 2
111 | products = result.body.results
112 | p = _.where(products, { published: true})
113 | expect(p.length).toBe 1
114 | expect(p[0].slug).toEqual en: @newProductSlug
115 |
116 | p = _.where(products, { published: false})
117 | expect(p.length).toBe 1
118 | expect(p[0].slug).toEqual en: "#{@newProductSlug}1"
119 | done()
120 | .catch (err) -> done.fail _.prettify(err)
121 |
122 |
123 | it 'should update products and publish them afterward', (done) ->
124 | csv =
125 | """
126 | productType,variantId,sku,name,publish
127 | #{@productType.id},1,#{@newProductSku},#{@newProductName},true
128 | #{@productType.id},1,#{@newProductSku}1,#{@newProductName}1,true
129 | """
130 |
131 | @importer.import(csv)
132 | .then (result) =>
133 | expect(_.size result).toBe 2
134 |
135 | csv =
136 | """
137 | productType,variantId,sku,name,publish
138 | #{@productType.id},1,#{@newProductSku},#{@newProductName}2,true
139 | #{@productType.id},1,#{@newProductSku}1,#{@newProductName}12,
140 | """
141 | im = createImporter()
142 | im.import(csv)
143 | .then (result) =>
144 | expect(_.size result).toBe 2
145 | expect(result).toEqual [
146 | '[row 2] Product updated.',
147 | '[row 3] Product updated.'
148 | ]
149 | service = TestHelpers.createService(project_key, 'productProjections')
150 | request = {
151 | uri: service
152 | .where("productType(id=\"#{@productType.id}\")")
153 | .staged true
154 | .build()
155 | method: 'GET'
156 | }
157 | @client.execute request
158 | .then (result) =>
159 | products = _.where(result.body.results, { published: true })
160 | expect(_.size products).toBe 2
161 |
162 | p = _.where(products, { hasStagedChanges: false })
163 | expect(p.length).toBe 1
164 | expect(p[0].name).toEqual en: "#{@newProductName}2"
165 |
166 | p = _.where(products, { hasStagedChanges: true })
167 | expect(p.length).toBe 1
168 | expect(p[0].name).toEqual en: "#{@newProductName}12"
169 | done()
170 | .catch (err) -> done.fail _.prettify(err)
171 |
172 | it 'should update and publish product when matching using SKU', (done) ->
173 | csv =
174 | """
175 | productType,variantId,name,sku,publish
176 | #{@productType.id},1,#{@newProductName}1,#{@newProductSku}1,true
177 | ,2,,#{@newProductSku}2,false
178 | #{@productType.id},1,#{@newProductName}3,#{@newProductSku}3,true
179 | """
180 |
181 | @importer.import(csv)
182 | .then (result) =>
183 | expect(_.size result).toBe 2
184 | expect(result).toEqual [
185 | '[row 2] New product created.',
186 | '[row 4] New product created.'
187 | ]
188 |
189 | csv =
190 | """
191 | productType,sku,prices,publish
192 | #{@productType.id},#{@newProductSku}1,EUR 111,true
193 | #{@productType.id},#{@newProductSku}2,EUR 222,false
194 | #{@productType.id},#{@newProductSku}3,EUR 333,false
195 | """
196 | im = createImporter()
197 | im.import(csv)
198 | .then (result) =>
199 | expect(_.size result).toBe 2
200 | expect(result).toEqual [
201 | '[row 2] Product updated.',
202 | '[row 4] Product updated.'
203 | ]
204 | service = TestHelpers.createService(project_key, 'productProjections')
205 | request = {
206 | uri: service
207 | .where("productType(id=\"#{@productType.id}\")")
208 | .staged true
209 | .build()
210 | method: 'GET'
211 | }
212 | @client.execute request
213 | .then (result) =>
214 | products = _.where(result.body.results, { published: true })
215 | expect(_.size products).toBe 2
216 |
217 | p = _.where(products, { hasStagedChanges: false })
218 | expect(p.length).toBe 1
219 | expect(p[0].variants.length).toBe 1
220 | expect(p[0].name).toEqual en: "#{@newProductName}1"
221 | expect(p[0].masterVariant.prices[0].value).toEqual jasmine.objectContaining(currencyCode: 'EUR', centAmount: 111)
222 | expect(p[0].variants[0].prices[0].value).toEqual jasmine.objectContaining(currencyCode: 'EUR', centAmount: 222)
223 |
224 | p = _.where(products, { hasStagedChanges: true })
225 | expect(p.length).toBe 1
226 | expect(p[0].name).toEqual en: "#{@newProductName}3"
227 | expect(p[0].masterVariant.prices[0].value).toEqual jasmine.objectContaining(currencyCode: 'EUR', centAmount: 333)
228 |
229 | done()
230 | .catch (err) -> done.fail _.prettify(err)
231 |
232 | it 'should publish even if there are no update actions', (done) ->
233 | csv =
234 | """
235 | productType,variantId,name,sku
236 | #{@productType.id},1,#{@newProductName}1,#{@newProductSku}1
237 | ,2,,#{@newProductSku}2
238 | #{@productType.id},1,#{@newProductName}3,#{@newProductSku}3
239 | """
240 |
241 | @importer.import(csv)
242 | .then (result) =>
243 | expect(_.size result).toBe 2
244 | expect(result).toEqual [
245 | '[row 2] New product created.',
246 | '[row 4] New product created.'
247 | ]
248 |
249 | csv =
250 | """
251 | productType,sku,publish
252 | #{@productType.id},#{@newProductSku}1,true
253 | #{@productType.id},#{@newProductSku}3,false
254 | """
255 | im = createImporter()
256 | im.import(csv)
257 | .then (result) =>
258 | expect(_.size result).toBe 2
259 | expect(result).toEqual [
260 | '[row 2] Product updated.',
261 | '[row 3] Product update not necessary.'
262 | ]
263 |
264 | service = TestHelpers.createService(project_key, 'productProjections')
265 | request = {
266 | uri: service
267 | .where("productType(id=\"#{@productType.id}\")")
268 | .staged true
269 | .build()
270 | method: 'GET'
271 | }
272 | @client.execute request
273 | .then (result) =>
274 | p = _.where(result.body.results, { published: true })
275 | expect(p.length).toBe 1
276 | expect(p[0].name).toEqual en: "#{@newProductName}1"
277 |
278 | p = _.where(result.body.results, { published: false })
279 | expect(p.length).toBe 1
280 | expect(p[0].name).toEqual en: "#{@newProductName}3"
281 |
282 | done()
283 | .catch (err) -> done.fail _.prettify(err)
284 |
285 |
--------------------------------------------------------------------------------
/src/spec/integration/importXlsx.spec.coffee:
--------------------------------------------------------------------------------
1 | Promise = require 'bluebird'
2 | fetch = require 'node-fetch'
3 | _ = require 'underscore'
4 | archiver = require 'archiver'
5 | _.mixin require('underscore-mixins')
6 | iconv = require 'iconv-lite'
7 | {Import} = require '../../lib/main'
8 | Config = require '../../config'
9 | TestHelpers = require './testhelpers'
10 | Excel = require 'exceljs'
11 | cuid = require 'cuid'
12 | path = require 'path'
13 | tmp = require 'tmp'
14 | fs = Promise.promisifyAll require('fs')
15 | # will clean temporary files even when an uncaught exception occurs
16 | tmp.setGracefulCleanup()
17 | CHANNEL_KEY = 'retailerA'
18 |
19 | { client_id, client_secret, project_key } = Config.config
20 | authConfig = {
21 | host: 'https://auth.sphere.io'
22 | projectKey: project_key
23 | credentials: {
24 | clientId: client_id
25 | clientSecret: client_secret
26 | }
27 | fetch: fetch
28 | }
29 | httpConfig = { host: 'https://api.sphere.io', fetch: fetch }
30 | userAgentConfig = {}
31 |
32 | writeXlsx = (filePath, data) ->
33 | workbook = new Excel.Workbook()
34 | workbook.created = new Date()
35 | worksheet = workbook.addWorksheet('Products')
36 | console.log "Generating Xlsx file"
37 |
38 | data.forEach (items, index) ->
39 | if index
40 | worksheet.addRow items
41 | else
42 | headers = []
43 | for i of items
44 | headers.push {
45 | header: items[i]
46 | }
47 | worksheet.columns = headers
48 |
49 | workbook.xlsx.writeFile(filePath)
50 |
51 | createImporter = ->
52 | Config.importFormat = "xlsx"
53 | im = new Import {
54 | authConfig: authConfig
55 | httpConfig: httpConfig
56 | userAgentConfig: userAgentConfig
57 | }
58 | im.matchBy = 'sku'
59 | im.allowRemovalOfVariants = true
60 | im.suppressMissingHeaderWarning = true
61 | im
62 |
63 | describe 'Import integration test', ->
64 |
65 | beforeEach (done) ->
66 | jasmine.DEFAULT_TIMEOUT_INTERVAL = 90000 # 90 sec
67 | @importer = createImporter()
68 | @client = @importer.client
69 |
70 | @productType = TestHelpers.mockProductType()
71 |
72 | TestHelpers.setupProductType(@client, @productType, null, project_key)
73 | .then (result) =>
74 | @productType = result
75 | # Check if channel exists
76 | service = TestHelpers.createService(project_key, 'channels')
77 | request = {
78 | uri: service
79 | .where("key=\"#{CHANNEL_KEY}\"")
80 | .build()
81 | method: 'GET'
82 | }
83 | @client.execute request
84 | .then (result) =>
85 | # Create the channel if it doesn't exist else ignore
86 | if (!result.body.total)
87 | service = TestHelpers.createService(project_key, 'channels')
88 | request = {
89 | uri: service.build()
90 | method: 'POST'
91 | body:
92 | key: CHANNEL_KEY
93 | roles: ['InventorySupply']
94 | }
95 | @client.execute request
96 | .then -> done()
97 | .catch (err) -> done.fail _.prettify(err.body)
98 | , 120000 # 2min
99 |
100 | describe '#importXlsx', ->
101 |
102 | beforeEach ->
103 | @newProductName = TestHelpers.uniqueId 'name-'
104 | @newProductSlug = TestHelpers.uniqueId 'slug-'
105 | @newProductSku = TestHelpers.uniqueId 'sku-'
106 |
107 | # TODO: Test broken; fixme!
108 | xit 'should import a simple product from xlsx', (done) ->
109 | filePath = "/tmp/test-import.xlsx"
110 | data = [
111 | ["productType","name","variantId","slug"],
112 | [@productType.id,@newProductName,1,@newProductSlug]
113 | ]
114 |
115 | writeXlsx(filePath, data)
116 | .then () =>
117 | @importer.importManager(filePath)
118 | .then (result) =>
119 | expect(_.size result).toBe 1
120 | expect(result[0]).toBe '[row 2] New product created.'
121 |
122 | service = TestHelpers.createService(project_key, 'productProjections')
123 | request = {
124 | uri: service
125 | .where("productType(id=\"#{@productType.id}\")")
126 | .staged(true)
127 | .build()
128 | method: 'GET'
129 | }
130 | @client.execute request
131 | .then (result) =>
132 | expect(_.size result.body.results).toBe 1
133 | p = result.body.results[0]
134 | expect(p.name).toEqual en: @newProductName
135 | expect(p.slug).toEqual en: @newProductSlug
136 | done()
137 | .catch (err) -> done.fail _.prettify(err)
138 |
139 | # TODO: Test broken; fixme!
140 | xit 'should import a product with prices (even when one of them is discounted)', (done) ->
141 | filePath = "/tmp/test-import.xlsx"
142 | data = [
143 | ["productType","name","variantId","slug","prices"],
144 | [@productType.id,@newProductName,1,@newProductSlug,"EUR 899;CH-EUR 999;DE-EUR 999|799;CH-USD 77777700 ##{CHANNEL_KEY}"]
145 | ]
146 |
147 | writeXlsx(filePath, data)
148 | .then () =>
149 | @importer.importManager(filePath)
150 | .then (result) =>
151 | expect(_.size result).toBe 1
152 | expect(result[0]).toBe '[row 2] New product created.'
153 |
154 | service = TestHelpers.createService(project_key, 'productProjections')
155 | request = {
156 | uri: service
157 | .where("productType(id=\"#{@productType.id}\")")
158 | .staged(true)
159 | .build()
160 | method: 'GET'
161 | }
162 | @client.execute request
163 | .then (result) ->
164 | expect(_.size result.body.results).toBe 1
165 | p = result.body.results[0]
166 | expect(_.size p.masterVariant.prices).toBe 4
167 | prices = p.masterVariant.prices
168 | expect(prices[0].value).toEqual jasmine.objectContaining(currencyCode: 'EUR', centAmount: 899)
169 | expect(prices[1].value).toEqual jasmine.objectContaining(currencyCode: 'EUR', centAmount: 999)
170 | expect(prices[1].country).toBe 'CH'
171 | expect(prices[2].country).toBe 'DE'
172 | expect(prices[2].value).toEqual jasmine.objectContaining(currencyCode: 'EUR', centAmount: 999)
173 | expect(prices[3].channel.typeId).toBe 'channel'
174 | expect(prices[3].channel.id).toBeDefined()
175 | done()
176 | .catch (err) -> done.fail _.prettify(err)
177 |
178 | # TODO: Test broken; fixme!
179 | xit 'should do nothing on 2nd import run', (done) ->
180 | filePath = "/tmp/test-import.xlsx"
181 | data = [
182 | ["productType","name","variantId","slug"],
183 | [@productType.id,@newProductName,1,@newProductSlug]
184 | ]
185 |
186 | writeXlsx(filePath, data)
187 | .then () =>
188 | @importer.importManager(filePath)
189 | .then (result) ->
190 | expect(_.size result).toBe 1
191 | expect(result[0]).toBe '[row 2] New product created.'
192 |
193 | im = createImporter()
194 | im.matchBy = 'slug'
195 | im.importManager(filePath)
196 | .then (result) ->
197 | expect(_.size result).toBe 1
198 | expect(result[0]).toBe '[row 2] Product update not necessary.'
199 | done()
200 | .catch (err) -> done.fail _.prettify(err)
201 |
202 |
203 | # TODO: Test broken; fixme!
204 | xit 'should do a partial update of prices based on SKUs', (done) ->
205 | filePath = "/tmp/test-import.xlsx"
206 | data = [
207 | ["productType","name","sku","variantId","prices"],
208 | [@productType.id,@newProductName,@newProductSku+1,1,"EUR 999"],
209 | [null,null,@newProductSku+2,2,"USD 70000"]
210 | ]
211 |
212 | writeXlsx(filePath, data)
213 | .then () =>
214 | @importer.importManager(filePath)
215 | .then (result) =>
216 | expect(_.size result).toBe 1
217 | expect(result[0]).toBe '[row 2] New product created.'
218 | csv =
219 | """
220 | sku,prices,productType
221 | #{@newProductSku+1},EUR 1999,#{@productType.name}
222 | #{@newProductSku+2},USD 80000,#{@productType.name}
223 | """
224 | im = createImporter()
225 | im.allowRemovalOfVariants = false
226 | im.updatesOnly = true
227 | im.import(csv)
228 | .then (result) =>
229 | expect(_.size result).toBe 1
230 | expect(result[0]).toBe '[row 2] Product updated.'
231 |
232 | service = TestHelpers.createService(project_key, 'productProjections')
233 | request = {
234 | uri: service
235 | .where("productType(id=\"#{@productType.id}\")")
236 | .staged(true)
237 | .build()
238 | method: 'GET'
239 | }
240 | @client.execute request
241 | .then (result) =>
242 | expect(_.size result.body.results).toBe 1
243 | p = result.body.results[0]
244 | expect(p.name).toEqual {en: @newProductName}
245 | expect(p.masterVariant.sku).toBe "#{@newProductSku}1"
246 | expect(p.masterVariant.prices[0].value).toEqual jasmine.objectContaining(centAmount: 1999, currencyCode: 'EUR')
247 | expect(p.variants[0].sku).toBe "#{@newProductSku}2"
248 | expect(p.variants[0].prices[0].value).toEqual jasmine.objectContaining(centAmount: 80000, currencyCode: 'USD')
249 | done()
250 | .catch (err) -> done.fail _.prettify(err)
251 |
--------------------------------------------------------------------------------
/src/spec/integration/state.spec.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | fetch = require 'node-fetch'
3 | _.mixin require('underscore-mixins')
4 | fetch = require 'node-fetch'
5 | {Import} = require '../../lib/main'
6 | Config = require '../../config'
7 | TestHelpers = require './testhelpers'
8 |
9 | performAllProducts = -> true
10 |
11 | TEXT_ATTRIBUTE_NONE = 'attr-text-n'
12 |
13 | describe 'State integration tests', ->
14 | { client_id, client_secret, project_key } = Config.config
15 | authConfig = {
16 | host: 'https://auth.sphere.io'
17 | projectKey: project_key
18 | credentials: {
19 | clientId: client_id
20 | clientSecret: client_secret
21 | }
22 | fetch: fetch
23 | }
24 | httpConfig = { host: 'https://api.sphere.io', fetch: fetch }
25 | userAgentConfig = {}
26 | beforeEach (done) ->
27 | @importer = new Import {
28 | authConfig: authConfig
29 | httpConfig: httpConfig
30 | userAgentConfig: userAgentConfig
31 | }
32 | @importer.matchBy = 'sku'
33 | @importer.suppressMissingHeaderWarning = true
34 | @client = @importer.client
35 |
36 | @productType = TestHelpers.mockProductType()
37 |
38 | TestHelpers.setupProductType(@client, @productType, null, project_key)
39 | .then (result) =>
40 | @productType = result
41 | done()
42 | .catch (err) -> done _.prettify(err.body)
43 | , 50000 # 50sec
44 |
45 |
46 | it 'should publish and unpublish products', (done) ->
47 | csv =
48 | """
49 | productType,name.en,slug.en,variantId,sku,#{TEXT_ATTRIBUTE_NONE}
50 | #{@productType.name},myProduct1,my-slug1,1,sku1,foo
51 | #{@productType.name},myProduct2,my-slug2,1,sku2,bar
52 | """
53 | @importer.import(csv)
54 | .then (result) =>
55 | expect(_.size result).toBe 2
56 | expect(result[0]).toBe '[row 2] New product created.'
57 | expect(result[1]).toBe '[row 3] New product created.'
58 | @importer.changeState(true, false, performAllProducts)
59 | .then (result) =>
60 | expect(_.size result).toBe 2
61 | expect(result[0]).toBe '[row 0] Product published.'
62 | expect(result[1]).toBe '[row 0] Product published.'
63 | @importer.changeState(false, false, performAllProducts)
64 | .then (result) ->
65 | expect(_.size result).toBe 2
66 | expect(result[0]).toBe '[row 0] Product unpublished.'
67 | expect(result[1]).toBe '[row 0] Product unpublished.'
68 | done()
69 | .catch (err) -> done.fail _.prettify(err)
70 | , 50000 # 50sec
71 |
72 | it 'should only published products with hasStagedChanges', (done) ->
73 | csv =
74 | """
75 | productType,name.en,slug.en,variantId,sku,#{TEXT_ATTRIBUTE_NONE}
76 | #{@productType.name},myProduct1,my-slug1,1,sku1,foo
77 | #{@productType.name},myProduct2,my-slug2,1,sku2,bar
78 | """
79 | @importer.import(csv)
80 | .then (result) =>
81 | expect(_.size result).toBe 2
82 | expect(result[0]).toBe '[row 2] New product created.'
83 | expect(result[1]).toBe '[row 3] New product created.'
84 | @importer.changeState(true, false, performAllProducts)
85 | .then (result) =>
86 | expect(_.size result).toBe 2
87 | expect(result[0]).toBe '[row 0] Product published.'
88 | expect(result[1]).toBe '[row 0] Product published.'
89 | csv =
90 | """
91 | productType,name.en,slug.en,variantId,sku,#{TEXT_ATTRIBUTE_NONE}
92 | #{@productType.name},myProduct1,my-slug1,1,sku1,foo
93 | #{@productType.name},myProduct2,my-slug2,1,sku2,baz
94 | """
95 | im = new Import {
96 | authConfig: authConfig
97 | httpConfig: httpConfig
98 | userAgentConfig: userAgentConfig
99 | }
100 | im.matchBy = 'slug'
101 | im.suppressMissingHeaderWarning = true
102 | im.import(csv)
103 | .then (result) =>
104 | expect(_.size result).toBe 2
105 | expect(result[0]).toBe '[row 2] Product update not necessary.'
106 | expect(result[1]).toBe '[row 3] Product updated.'
107 | @importer.changeState(true, false, performAllProducts)
108 | .then (result) ->
109 | expect(_.size result).toBe 2
110 | expect(_.contains(result, '[row 0] Product published.')).toBe true
111 | expect(_.contains(result, '[row 0] Product is already published - no staged changes.')).toBe true
112 | done()
113 | .catch (err) -> done.fail _.prettify(err)
114 | , 50000 # 50sec
115 |
116 | it 'should delete unpublished products', (done) ->
117 | csv =
118 | """
119 | productType,name.en,slug.en,variantId,sku,#{TEXT_ATTRIBUTE_NONE}
120 | #{@productType.name},myProduct1,my-slug1,1,sku1,foo
121 | #{@productType.name},myProduct2,my-slug2,1,sku2,bar
122 | """
123 | @importer.import(csv)
124 | .then (result) =>
125 | expect(_.size result).toBe 2
126 | expect(result[0]).toBe '[row 2] New product created.'
127 | expect(result[1]).toBe '[row 3] New product created.'
128 | @importer.changeState(true, true, performAllProducts)
129 | .then (result) ->
130 | expect(_.size result).toBe 2
131 | expect(result[0]).toBe '[row 0] Product deleted.'
132 | expect(result[1]).toBe '[row 0] Product deleted.'
133 | done()
134 | .catch (err) -> done.fail _.prettify(err)
135 | , 50000 # 50sec
136 |
--------------------------------------------------------------------------------
/src/spec/integration/testhelpers.coffee:
--------------------------------------------------------------------------------
1 | { createRequestBuilder } = require '@commercetools/api-request-builder'
2 | _ = require 'underscore'
3 | _.mixin require('underscore-mixins')
4 | Promise = require 'bluebird'
5 |
6 | exports.uniqueId = uniqueId = (prefix) ->
7 | _.uniqueId "#{prefix}#{new Date().getTime()}_"
8 |
9 | getAllAttributesByConstraint = (constraint) ->
10 | lowerConstraint = switch constraint
11 | when 'Unique' then 'u'
12 | when 'CombinationUnique' then 'cu'
13 | when 'SameForAll' then 'sfa'
14 | else 'n'
15 |
16 | [
17 | { type: { name: 'text' }, name: "attr-text-#{lowerConstraint}", label: { en: "Attribute TEXT #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false, inputHint: 'SingleLine' }
18 | { type: { name: 'ltext' }, name: "attr-ltext-#{lowerConstraint}", label: { en: "Attribute LTEXT #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false, inputHint: 'SingleLine' }
19 | { type: { name: 'enum', values: [{ key: 'enum1', label: 'Enum1' }, { key: 'enum2', label: 'Enum2' }]}, name: "attr-enum-#{lowerConstraint}", label: { en: "Attribute ENUM #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
20 | { type: { name: 'lenum', values: [{ key: 'lenum1', label: { en: 'Enum1' } }, { key: 'lenum2', label: { en: 'Enum2' } }]}, name: "attr-lenum-#{lowerConstraint}", label: { en: "Attribute LENUM #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
21 | { type: { name: 'number' }, name: "attr-number-#{lowerConstraint}", label: { en: "Attribute NUMBER #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
22 | { type: { name: 'boolean' }, name: "attr-boolean-#{lowerConstraint}", label: { en: "Attribute BOOLEAN #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
23 | { type: { name: 'money' }, name: "attr-money-#{lowerConstraint}", label: { en: "Attribute MONEY #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
24 | { type: { name: 'date' }, name: "attr-date-#{lowerConstraint}", label: { en: "Attribute DATE #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
25 | { type: { name: 'time' }, name: "attr-time-#{lowerConstraint}", label: { en: "Attribute TIME #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
26 | { type: { name: 'datetime' }, name: "attr-datetime-#{lowerConstraint}", label: { en: "Attribute DATETIME #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
27 | { type: { name: 'reference', referenceTypeId: 'product' }, name: "attr-ref-product-#{lowerConstraint}", label: { en: "Attribute REFERENCE-PRODUCT #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
28 | { type: { name: 'reference', referenceTypeId: 'product-type' }, name: "attr-ref-product-type-#{lowerConstraint}", label: { en: "Attribute REFERENCE-PRODUCT-TYPE #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
29 | { type: { name: 'reference', referenceTypeId: 'channel' }, name: "attr-ref-channel-#{lowerConstraint}", label: { en: "Attribute REFERENCE-CHANNEL #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
30 | { type: { name: 'reference', referenceTypeId: 'state' }, name: "attr-ref-state-#{lowerConstraint}", label: { en: "Attribute REFERENCE-STATE #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
31 | { type: { name: 'reference', referenceTypeId: 'zone' }, name: "attr-ref-zone-#{lowerConstraint}", label: { en: "Attribute REFERENCE-ZONE #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
32 | { type: { name: 'reference', referenceTypeId: 'shipping-method' }, name: "attr-ref-shipping-method-#{lowerConstraint}", label: { en: "Attribute REFERENCE-SHIPPING-METHOD #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
33 | { type: { name: 'reference', referenceTypeId: 'category' }, name: "attr-ref-category-#{lowerConstraint}", label: { en: "Attribute REFERENCE-CATEGORY #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
34 | { type: { name: 'reference', referenceTypeId: 'review' }, name: "attr-ref-review-#{lowerConstraint}", label: { en: "Attribute REFERENCE-REVIEW #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
35 | { type: { name: 'reference', referenceTypeId: 'key-value-document' }, name: "attr-ref-key-value-#{lowerConstraint}", label: { en: "Attribute REFERENCE-KEY-VALUE-DOCUMENT #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
36 | { type: { name: 'set', elementType: { name: 'text' } }, name: "attr-set-text-#{lowerConstraint}", label: { en: "Attribute SET-TEXT #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false, inputHint: 'SingleLine' }
37 | { type: { name: 'set', elementType: { name: 'ltext' } }, name: "attr-set-ltext-#{lowerConstraint}", label: { en: "Attribute SET-LTEXT #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false, inputHint: 'SingleLine' }
38 | { type: { name: 'set', elementType: { name: 'enum', values: [{ key: 'enum1', label: 'Enum1' }, { key: 'enum2', label: 'Enum2' }] } }, name: "attr-set-enum-#{lowerConstraint}", label: { en: "Attribute SET-ENUM #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
39 | { type: { name: 'set', elementType: { name: 'lenum', values: [{ key: 'lenum1', label: { en: 'Enum1' } }, { key: 'lenum2', label: { en: 'Enum2' } }] } }, name: "attr-set-lenum-#{lowerConstraint}", label: { en: "Attribute SET-LENUM #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
40 | { type: { name: 'set', elementType: { name: 'number' } }, name: "attr-set-number-#{lowerConstraint}", label: { en: "Attribute SET-NUMBER #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
41 | { type: { name: 'set', elementType: { name: 'boolean' } }, name: "attr-set-boolean-#{lowerConstraint}", label: { en: "Attribute SET-BOOLEAN #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
42 | { type: { name: 'set', elementType: { name: 'money' } }, name: "attr-set-money-#{lowerConstraint}", label: { en: "Attribute SET-MONEY #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
43 | { type: { name: 'set', elementType: { name: 'date' } }, name: "attr-set-date-#{lowerConstraint}", label: { en: "Attribute SET-DATE #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
44 | { type: { name: 'set', elementType: { name: 'time' } }, name: "attr-set-time-#{lowerConstraint}", label: { en: "Attribute SET-TIME #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
45 | { type: { name: 'set', elementType: { name: 'datetime' } }, name: "attr-set-datetime-#{lowerConstraint}", label: { en: "Attribute SET-DATETIME #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
46 | { type: { name: 'set', elementType: { name: 'reference', referenceTypeId: 'product' } }, name: "attr-set-ref-product-#{lowerConstraint}", label: { en: "Attribute SET-REFERENCE-PRODUCT #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
47 | { type: { name: 'set', elementType: { name: 'reference', referenceTypeId: 'product-type' } }, name: "attr-set-ref-product-type-#{lowerConstraint}", label: { en: "Attribute SET-REFERENCE-PRODUCT-TYPE #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
48 | { type: { name: 'set', elementType: { name: 'reference', referenceTypeId: 'channel' } }, name: "attr-set-ref-channel-#{lowerConstraint}", label: { en: "Attribute SET-REFERENCE-CHANNEL #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
49 | { type: { name: 'set', elementType: { name: 'reference', referenceTypeId: 'state' } }, name: "attr-set-ref-state-#{lowerConstraint}", label: { en: "Attribute SET-REFERENCE-STATE #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
50 | { type: { name: 'set', elementType: { name: 'reference', referenceTypeId: 'zone' } }, name: "attr-set-ref-zone-#{lowerConstraint}", label: { en: "Attribute SET-REFERENCE-ZONE #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
51 | { type: { name: 'set', elementType: { name: 'reference', referenceTypeId: 'shipping-method' } }, name: "attr-set-ref-shipping-method-#{lowerConstraint}", label: { en: "Attribute SET-REFERENCE-SHIPPING-METHOD #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
52 | { type: { name: 'set', elementType: { name: 'reference', referenceTypeId: 'category' } }, name: "attr-set-ref-category-#{lowerConstraint}", label: { en: "Attribute SET-REFERENCE-CATEGORY #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
53 | { type: { name: 'set', elementType: { name: 'reference', referenceTypeId: 'review' } }, name: "attr-set-ref-review-#{lowerConstraint}", label: { en: "Attribute SET-REFERENCE-REVIEW #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
54 | { type: { name: 'set', elementType: { name: 'reference', referenceTypeId: 'key-value-document' } }, name: "attr-set-ref-key-value-#{lowerConstraint}", label: { en: "Attribute SET-REFERENCE-KEY-VALUE-DOCUMENT #{constraint}" }, attributeConstraint: constraint, isRequired: false, isSearchable: false }
55 | ]
56 |
57 | exports.mockProductType = ->
58 | name: 'ImpEx with all types'
59 | description: 'A generic type with all attributes'
60 | attributes: _.flatten(_.map ['None', 'Unique', 'CombinationUnique', 'SameForAll'], (constraint) ->
61 | getAllAttributesByConstraint(constraint))
62 |
63 | exports.cleanupProducts = (client, projectKey) ->
64 | console.log 'About to cleanup products...'
65 | productProjectionUri = createService(projectKey, 'productProjections')
66 | .sort('id')
67 | .where('published = "true"')
68 | .perPage(30)
69 | .build()
70 |
71 | productProjectionRequest = {
72 | uri: productProjectionUri
73 | method: 'GET'
74 | }
75 | client.process productProjectionRequest, (payload) ->
76 | Promise.map payload.body.results, (existingProduct) ->
77 | data = {
78 | id: existingProduct.id
79 | version: existingProduct.version
80 | actions: [
81 | action: 'unpublish'
82 | ]
83 | }
84 | unublishService = createService(projectKey, 'products')
85 | unpublishRequest = {
86 | uri: unublishService.byId(existingProduct.id).build()
87 | method: 'POST'
88 | body: data
89 | }
90 | client.execute(unpublishRequest)
91 | .then ->
92 | service = createService(projectKey, 'products')
93 | request = {
94 | uri: service.perPage(30).build()
95 | method: 'GET'
96 | }
97 | client.process request, (payload) ->
98 | Promise.map payload.body.results, (existingProduct) ->
99 | deleteService = createService(projectKey, 'products')
100 | deleteRequest = {
101 | uri: deleteService
102 | .byId(existingProduct.id)
103 | .withVersion(existingProduct.version)
104 | .build()
105 | method: 'DELETE'
106 | }
107 | client.execute(deleteRequest)
108 | .then (result) ->
109 | console.log "Deleted #{_.size result} products"
110 | result
111 |
112 | ###
113 | * You may omit the product in this case it resolves the created product type.
114 | * Otherwise the created product is resolved.
115 | ###
116 | exports.setupProductType = (client, productType, product, projectKey) ->
117 | exports.cleanupProducts(client, projectKey)
118 | .then ->
119 | console.log "About to ensure productType"
120 | # ensure the productType exists, otherwise create it
121 | service = createService(projectKey, 'productTypes')
122 | request = {
123 | uri: service.where("name = \"#{productType.name}\"").perPage(10).build()
124 | method: 'GET'
125 | }
126 | client.execute(request)
127 | .then (result) ->
128 | if _.size(result.body.results) > 0
129 | console.log "Total number of same product type '#{result.body.results}"
130 | Promise.all(_.map(result.body.results, (result) ->
131 | existingProductType = result
132 | console.log "ProductType '#{existingProductType.name}' already exists - deleting"
133 | deleteService = createService(projectKey, 'productTypes')
134 | deleteRequest = {
135 | uri: deleteService
136 | .byId(existingProductType.id)
137 | .withVersion(existingProductType.version)
138 | .build()
139 | method: 'DELETE'
140 | }
141 | client.execute(deleteRequest)
142 | ))
143 | .then ->
144 | console.log "Ensuring productType '#{productType.name}'"
145 | service = createService(projectKey, 'productTypes')
146 | request = {
147 | uri: service.build()
148 | method: 'POST'
149 | body: productType
150 | }
151 | client.execute(request)
152 | .then (result) -> Promise.resolve(result.body)
153 | .then (pt) ->
154 | if product?
155 | product.productType.id = pt.id
156 | service = createService(projectKey, 'products')
157 | request = {
158 | uri: service.build()
159 | method: 'POST'
160 | body: product
161 | }
162 | client.execute(request)
163 | .then (result) ->
164 | Promise.resolve result.body # returns product
165 | else
166 | Promise.resolve pt # returns productType
167 |
168 |
169 | exports.ensureCategories = (client, categoryList, projectKey) ->
170 | console.log 'About to cleanup categories...'
171 | service = createService(projectKey, 'categories')
172 | request = {
173 | uri: service.perPage(30).build()
174 | method: 'GET'
175 | }
176 | client.process request, (payload) ->
177 | Promise.map payload.body.results, (category) ->
178 | deleteService = createService(projectKey, 'categories')
179 | deleteRequest = {
180 | uri: deleteService
181 | .byId(category.id)
182 | .withVersion(category.version)
183 | .build()
184 | method: 'DELETE'
185 | }
186 | client.execute(deleteRequest)
187 | .then (result) ->
188 | console.log "Deleted #{_.size result} categories, creating new one"
189 | Promise.map categoryList, (category) ->
190 | service = createService(projectKey, 'categories')
191 | request = {
192 | uri: service.build()
193 | method: 'POST'
194 | body: category
195 | }
196 | client.execute(request)
197 | .then (result) -> result.body
198 |
199 | exports.generateCategories = (len) ->
200 | categories = []
201 | for i in [1...len+1]
202 | categories.push( {
203 | "name": {
204 | "en": "Catgeory#{i}"
205 | },
206 | "slug": {
207 | "en": "category-#{i}"
208 | },
209 | "externalId": "#{i}",
210 | })
211 | categories
212 |
213 | exports.ensurePreviousState = (client, projectKey) ->
214 | # Check if states exist
215 | service = createService(projectKey, 'states')
216 | request = {
217 | uri: service
218 | .where("key=\"previous-state\"")
219 | .build()
220 | method: 'GET'
221 | }
222 | client.execute request
223 | .then (result) =>
224 | # Create the state if it doesn't exist else ignore
225 | if (!result.body.total)
226 | service = createService(projectKey, 'states')
227 | request = {
228 | uri: service.build()
229 | method: 'POST'
230 | body:
231 | key: 'previous-state'
232 | type: 'ProductState'
233 | }
234 | client.execute request
235 |
236 | exports.ensureNextState = (client, projectKey) ->
237 | # Check if states exist
238 | service = createService(projectKey, 'states')
239 | request = {
240 | uri: service
241 | .where("key=\"next-state\"")
242 | .build()
243 | method: 'GET'
244 | }
245 | client.execute request
246 | .then (result) =>
247 | # Create the state if it doesn't exist else ignore
248 | if (!result.body.total)
249 | service = createService(projectKey, 'states')
250 | request = {
251 | uri: service.build()
252 | method: 'POST'
253 | body:
254 | key: 'next-state'
255 | type: 'ProductState'
256 | }
257 | client.execute request
258 |
259 | exports.ensureChannels = (client, projectKey, channelKey) ->
260 | # Check if channel exists
261 | service = createService(projectKey, 'channels')
262 | request = {
263 | uri: service
264 | .where("key=\"#{channelKey}\"")
265 | .build()
266 | method: 'GET'
267 | }
268 | client.execute request
269 | .then (result) =>
270 | # Create the channel if it doesn't exist else ignore
271 | if (!result.body.total)
272 | service = createService(projectKey, 'channels')
273 | request = {
274 | uri: service.build()
275 | method: 'POST'
276 | body:
277 | key: channelKey
278 | roles: ['InventorySupply']
279 | }
280 | client.execute request
281 |
282 | createService = (projectKey, type) ->
283 | service = createRequestBuilder({ projectKey })[type]
284 | service
285 |
286 | # This enables this function work in this file and in the test files
287 | exports.createService = createService
288 |
--------------------------------------------------------------------------------
/src/spec/matchutils.spec.coffee:
--------------------------------------------------------------------------------
1 | MatchUtils = require "../lib/matchutils"
2 | GLOBALS = require '../lib/globals'
3 |
4 | product1 = {
5 | slug: {
6 | en: "1"
7 | },
8 | masterVariant: {
9 | id: 1,
10 | attributes: [
11 | {
12 | name: "key",
13 | value: "1"
14 | }
15 | ],
16 | sku: "1"
17 | },
18 | variants: [
19 | {
20 | id: 2,
21 | attributes: [
22 | {
23 | name: "key",
24 | value: "2"
25 | }
26 | ],
27 | sku: "2"
28 | },
29 | {
30 | id: 3,
31 | attributes: [
32 | {
33 | name: "key",
34 | value: "3"
35 | }
36 | ],
37 | sku: "3"
38 | }
39 | ],
40 | id: "1"
41 | }
42 |
43 | product2 = {
44 | slug: {
45 | en: "2"
46 | }
47 | masterVariant: {
48 | id: 1,
49 | attributes: [
50 | {
51 | name: "key",
52 | value: "4"
53 | }
54 | ],
55 | sku: "4"
56 | },
57 | variants: [
58 | {
59 | id: 2
60 | attributes: [
61 | {
62 | name: "key",
63 | value: "5"
64 | }
65 | ],
66 | sku: "5"
67 | },
68 | {
69 | id: 3
70 | attributes: [
71 | {
72 | name: "key",
73 | value: "6"
74 | }
75 | ],
76 | sku: "6"
77 | }
78 | ],
79 | id: "2"
80 | }
81 |
82 | products = [
83 | {
84 | product: product1
85 | },
86 | {
87 | product: product2
88 | }
89 | ]
90 |
91 | describe "MatchUtils", ->
92 | describe "mapById", ->
93 | it "should return the map of id -> product", ->
94 | map = MatchUtils.mapById(product1)
95 | expect(map).toEqual({"1": product1})
96 |
97 | describe "mapBySlug", ->
98 | it "should return the map of slug -> product", ->
99 | GLOBALS.DEFAULT_LANGUAGE = "en"
100 | map = MatchUtils.mapBySlug(product1)
101 | expect(map).toEqual({"1": product1})
102 |
103 | describe "mapBySku", ->
104 | it "should return the map of [skus] -> product", ->
105 | map = MatchUtils.mapBySku(product1)
106 | expect(map).toEqual({
107 | "1": product1,
108 | "2": product1,
109 | "3": product1
110 | })
111 |
112 | describe "mapByCustomAttribute", ->
113 | it "should return the map of [customAttributes] -> product", ->
114 | p = product1
115 | map = MatchUtils.mapByCustomAttribute("key")(p)
116 | expect(map).toEqual({
117 | "1": p,
118 | "2": p,
119 | "3": p
120 | })
121 |
122 | describe "mapIdentifier", ->
123 | it "should return function which returns an id of product entry", ->
124 | p = product1
125 | foo = MatchUtils.mapIdentifier("id")
126 | id = foo({ product: p })
127 | expect(id).toBe("1")
128 |
129 | it "should return function which returns a slug of product entry", ->
130 | GLOBALS.DEFAULT_LANGUAGE = "en"
131 | p = product1
132 | foo = MatchUtils.mapIdentifier("slug")
133 | slug = foo({ product: p })
134 | expect(slug).toBe("1")
135 |
136 | it "should return function which returns an sku of product entry", ->
137 | p = product1
138 | foo = MatchUtils.mapIdentifier("sku")
139 | sku = foo({ product: p })
140 | expect(sku).toBe("1")
141 |
142 | it "should return function which returns a custom attribute value of product's master variant", ->
143 | p = product1
144 | foo = MatchUtils.mapIdentifier("key")
145 | value = foo({ product: p })
146 | expect(value).toEqual("1")
147 |
148 | describe "initMatcher", ->
149 | it "should produce the function which maps the given entry to existing product based on id", ->
150 | foo = MatchUtils.initMatcher("id", [product1, product2])
151 | p = product1
152 | existingProduct = foo({ product: p })
153 | expect(existingProduct).toEqual(p)
154 |
155 | it "should produce the function which maps the given entry to existing product based on slug", ->
156 | GLOBALS.DEFAULT_LANGUAGE = "en"
157 | foo = MatchUtils.initMatcher("slug", [product1, product2])
158 | p = product1
159 | existingProduct = foo({ product: p })
160 | expect(existingProduct).toEqual(p)
161 |
162 | it "should produce the function which maps the given entry to existing product based on sku", ->
163 | foo = MatchUtils.initMatcher("sku", [product1, product2])
164 | p = product1
165 | existingProduct = foo({ product: p })
166 | expect(existingProduct).toEqual(p)
167 |
168 | it "should produce the function which maps the given entry to existing product based on custom attribute", ->
169 | foo = MatchUtils.initMatcher("key", [product1, product2])
170 | p = product1
171 | existingProduct = foo({ product: p })
172 | expect(existingProduct).toEqual(p)
173 |
--------------------------------------------------------------------------------
/src/spec/queryutils.spec.coffee:
--------------------------------------------------------------------------------
1 | QueryUtils = require "../lib/queryutils"
2 | GLOBALS = require '../lib/globals'
3 |
4 | products = [
5 | {
6 | product: {
7 | masterVariant: {
8 | id: 1,
9 | attributes: [
10 | {
11 | name: "key",
12 | value: '1"bar"'
13 | }
14 | ],
15 | sku: '1"foo"'
16 | },
17 | variants: [
18 | {
19 | attributes: [
20 | {
21 | name: "key",
22 | value: "2"
23 | }
24 | ],
25 | sku: "2"
26 | },
27 | {
28 | id: 3,
29 | attributes: [
30 | {
31 | name: "key",
32 | value: "3"
33 | }
34 | ],
35 | sku: "3"
36 | }
37 | ],
38 | id: "1",
39 | slug: {
40 | en: "1"
41 | }
42 | }
43 | },
44 | {
45 | product: {
46 | masterVariant: {
47 | id: 1,
48 | attributes: [
49 | {
50 | name: "key",
51 | value: "4"
52 | }
53 | ],
54 | sku: "4"
55 | },
56 | variants: [
57 | {
58 | attributes: [
59 | {
60 | name: "key",
61 | value: "5"
62 | }
63 | ],
64 | sku: "5"
65 | },
66 | {
67 | attributes: [
68 | {
69 | name: "key",
70 | value: "6"
71 | }
72 | ],
73 | sku: "6"
74 | }
75 | ],
76 | id: "2",
77 | slug: {
78 | en: "2"
79 | }
80 | }
81 | }
82 | ]
83 |
84 | describe "QueryUtils", ->
85 |
86 | describe "mapMatchFunction", ->
87 |
88 | it "should return \"matchById\" function if supplied with \"id\" value", ->
89 | matchFunction = QueryUtils.mapMatchFunction("id")
90 | expect(matchFunction).toBeDefined()
91 | expect(matchFunction).toBe QueryUtils.matchById
92 |
93 | it "should return \"matchBySlug\" function if supplied with \"slug\" value", ->
94 | matchFunction = QueryUtils.mapMatchFunction("slug")
95 | expect(matchFunction).toBeDefined()
96 | expect(matchFunction).toBe QueryUtils.matchBySlug
97 |
98 | it "should return \"matchBySku\" function if supplied with \"sku\" value", ->
99 | matchFunction = QueryUtils.mapMatchFunction("sku")
100 | expect(matchFunction).toBeDefined()
101 | expect(matchFunction).toBe QueryUtils.matchBySku
102 |
103 | it "should return \"matchByCustomAttribute\" function if supplied with random value", ->
104 | matchFunction = QueryUtils.mapMatchFunction("custom_attribute_name")
105 | expect(matchFunction).toBeDefined()
106 | expect(typeof matchFunction).toEqual('function')
107 |
108 | describe "matchById", ->
109 | it "should return query predicte based on products provided", ->
110 | predicate = QueryUtils.matchById products
111 | expect(predicate).toEqual "id in (\"1\", \"2\")"
112 |
113 | describe "matchBySlug", ->
114 | it "should return query predicte based on products provided", ->
115 | GLOBALS.DEFAULT_LANGUAGE = "en"
116 | predicate = QueryUtils.matchBySlug products
117 | expect(predicate).toEqual "slug(en in (\"1\", \"2\"))"
118 |
119 | describe "matchBySku", ->
120 | it "should return query predicte based on products provided", ->
121 | predicate = QueryUtils.matchBySku(products)
122 | expect(predicate).toEqual("masterVariant(sku in " +
123 | '("1\\"foo\\"", "4")) or ' +
124 | 'variants(sku in ("1\\"foo\\"", "4"))')
125 |
126 | describe "matchByCustomAttribute", ->
127 | it "should return query predicte based on products provided", ->
128 | predicate = QueryUtils.matchByCustomAttribute("key")(products)
129 | expect(predicate).toEqual(
130 | 'masterVariant(attributes(name="key" and value in
131 | ("1\\"bar\\"", "2", "3", "4", "5", "6"))) or
132 | variants(attributes(name="key" and value in
133 | ("1\\"bar\\"", "2", "3", "4", "5", "6")))'
134 | )
135 |
--------------------------------------------------------------------------------
/src/spec/reader.spec.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | Promise = require 'bluebird'
3 | Excel = require 'exceljs'
4 | Reader = require '../lib/io/reader'
5 |
6 | tmp = require 'tmp'
7 | fs = Promise.promisifyAll require('fs')
8 |
9 | tmp.setGracefulCleanup()
10 |
11 |
12 | writeXlsx = (filePath, data) ->
13 | workbook = new Excel.Workbook()
14 | workbook.created = new Date()
15 | worksheet = workbook.addWorksheet('Products')
16 | console.log "Generating Xlsx file"
17 |
18 | data.forEach (items, index) ->
19 | if index
20 | worksheet.addRow items
21 | else
22 | headers = []
23 | for i of items
24 | headers.push {
25 | header: items[i]
26 | }
27 | worksheet.columns = headers
28 |
29 | workbook.xlsx.writeFile(filePath)
30 |
31 | describe 'IO Reader test', ->
32 | it 'should trim csv header file', (done) ->
33 | sampleCsv =
34 | """
35 | myHeader ,name
36 | row1,name1
37 | """
38 | Reader.parseCsv(sampleCsv).then (data) =>
39 | expect(data).toEqual([ [ 'myHeader ,name' ], [ 'row1,name1' ] ])
40 | done()
41 |
42 | it 'should stringify richText', (done) ->
43 | expected = 'Stringified rich text'
44 | richText = [
45 | {
46 | font: { size: 10, name: 'Arial', family: 2, charset: 1 },
47 | text: 'Stringified '
48 | },
49 | {
50 | font: { size: 14, name: 'Arial', family: 2, charset: 1 },
51 | text: 'rich text'
52 | }
53 | ]
54 |
55 | reader = new Reader()
56 | actual = reader._stringifyRichText(richText)
57 | expect(actual).toBe(expected)
58 | done()
59 |
60 | it 'should read xlsx file', (done) ->
61 | filePath = "/tmp/test.xlsx"
62 | expected = ['TEXT', '1', '2', '', '', 'false', 'true']
63 | data = [
64 | ["a","b","c","d","e","f","g"],
65 | ["TEXT",1,"2",null,undefined,false,true]
66 | ]
67 |
68 | writeXlsx(filePath, data)
69 | .then () =>
70 | reader = new Reader
71 | importFormat: "xlsx",
72 | debug: true,
73 | reader.read(filePath)
74 | .then (result) =>
75 | expect(result.length).toBe(2)
76 | expect(result[1]).toEqual(expected)
77 | done()
78 | .catch (err) -> done.fail _.prettify(err)
79 |
--------------------------------------------------------------------------------
/src/spec/types.spec.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | Types = require '../lib/types'
3 |
4 | describe 'Types', ->
5 | beforeEach ->
6 | @types = new Types()
7 |
8 | describe '#constructor', ->
9 | it 'should construct', ->
10 | expect(@types).toBeDefined()
11 |
12 | describe '#buildMaps', ->
13 | it 'should create maps for product types', ->
14 | pt1 =
15 | id: 'pt1'
16 | name: 'myType'
17 | pt2 =
18 | id: 'pt2'
19 | name: 'myType2'
20 | attributes: [
21 | { name: 'foo', attributeConstraint: 'SameForAll' }
22 | ]
23 | pt3 =
24 | id: 'pt3'
25 | name: 'myType'
26 | @types.buildMaps [pt1, pt2, pt3]
27 | expect(_.size @types.id2index).toBe 3
28 | expect(@types.id2index['pt1']).toBe 0
29 | expect(@types.id2index['pt2']).toBe 1
30 | expect(@types.id2index['pt3']).toBe 2
31 | expect(@types.name2id['myType']).toBe 'pt3'
32 | expect(@types.name2id['myType2']).toBe 'pt2'
33 | expect(_.size @types.duplicateNames).toBe 1
34 | expect(@types.duplicateNames[0]).toBe 'myType'
35 | expect(_.size @types.id2SameForAllAttributes).toBe 3
36 | expect(@types.id2SameForAllAttributes['pt1']).toEqual []
37 | expect(@types.id2SameForAllAttributes['pt2']).toEqual [ 'foo' ]
38 | expect(@types.id2SameForAllAttributes['pt3']).toEqual []
39 | expect(_.size @types.id2nameAttributeDefMap).toBe 3
40 | expectedObj =
41 | foo: pt2.attributes[0]
42 | expect(@types.id2nameAttributeDefMap['pt2']).toEqual expectedObj
43 |
--------------------------------------------------------------------------------
/src/spec/unless.spec.coffee:
--------------------------------------------------------------------------------
1 | describe 'understanding unless', ->
2 |
3 | it 'with an object', ->
4 | expect(true unless {}).toBeUndefined()
5 |
6 | it 'with null', ->
7 | expect(true unless null).toBe true
8 |
9 | it 'with undefined', ->
10 | expect(true unless undefined).toBe true
11 |
12 | it 'with empty single quoted string', ->
13 | expect(true unless '').toBe true
14 |
15 | it 'with empty double quoted string', ->
16 | expect(true unless "").toBe true
17 |
18 | it 'with some string', ->
19 | expect(true unless "foo").toBeUndefined()
20 |
21 | it 'with 0', ->
22 | expect(true unless 0).toBe true
23 |
24 | it 'with 1', ->
25 | expect(true unless 1).toBeUndefined()
26 |
27 | it 'with false', ->
28 | expect(true unless false).toBe true
29 |
30 | it 'with true', ->
31 | expect(true unless true).toBeUndefined()
32 |
--------------------------------------------------------------------------------
/src/spec/validator.spec.coffee:
--------------------------------------------------------------------------------
1 | _ = require 'underscore'
2 | _.mixin require('underscore-mixins')
3 | CONS = require '../lib/constants'
4 | {Header, Validator} = require '../lib/main'
5 |
6 | describe 'Validator', ->
7 | beforeEach ->
8 | @validator = new Validator()
9 |
10 | describe '@constructor', ->
11 | it 'should initialize', ->
12 | expect(@validator).toBeDefined()
13 |
14 | describe '#parse', ->
15 | it 'should parse string', (done) ->
16 | @validator.parse 'foo'
17 | .then (parsed) ->
18 | expect(parsed.count).toBe 1
19 | done()
20 | .catch (err) -> done.fail _.prettify(err)
21 |
22 | it 'should store header', (done) ->
23 | csv =
24 | """
25 | myHeader
26 | row1
27 | """
28 | @validator.parse csv
29 | .then =>
30 | expect(@validator.header).toBeDefined
31 | expect(@validator.header.rawHeader).toEqual ['myHeader']
32 | done()
33 | .catch (err) -> done.fail _.prettify(err)
34 |
35 | it 'should trim csv cells', (done) ->
36 | csv =
37 | """
38 | myHeader ,name
39 | row1,name1
40 | """
41 | @validator.parse csv
42 | .then =>
43 | expect(@validator.header).toBeDefined
44 | expect(@validator.header.rawHeader).toEqual ['myHeader', 'name']
45 | done()
46 | .catch (err) -> done.fail _.prettify(err)
47 |
48 | it 'should pass everything but the header as content to callback', (done) ->
49 | csv =
50 | """
51 | myHeader
52 | row1
53 | row2,foo
54 | """
55 | @validator.parse csv
56 | .then (parsed) ->
57 | expect(parsed.data.length).toBe 2
58 | expect(parsed.data[0]).toEqual ['row1']
59 | expect(parsed.data[1]).toEqual ['row2', 'foo']
60 | done()
61 | .catch (err) -> done.fail _.prettify(err)
62 |
63 | describe '#checkDelimiters', ->
64 | it 'should work if all delimiters are different', ->
65 | @validator = new Validator
66 | csvDelimiter: '#'
67 | csvQuote: "'"
68 | @validator.checkDelimiters()
69 | expect(_.size @validator.errors).toBe 0
70 |
71 | it 'should produce an error of two delimiters are the same', ->
72 | @validator = new Validator
73 | csvDelimiter: ';'
74 | @validator.checkDelimiters()
75 | expect(_.size @validator.errors).toBe 1
76 | expectedErrorMessage =
77 | '''
78 | Your selected delimiter clash with each other: {"csvDelimiter":";","csvQuote":"\\"","language":".","multiValue":";","categoryChildren":">"}
79 | '''
80 | expect(@validator.errors[0]).toBe expectedErrorMessage
81 |
82 | describe '#isVariant', ->
83 | beforeEach ->
84 | @validator.header = new Header CONS.BASE_HEADERS
85 |
86 | it 'should be true for a variant', ->
87 | expect(@validator.isVariant ['', '2'], CONS.HEADER_VARIANT_ID).toBe true
88 |
89 | it 'should be false for a product', ->
90 | expect(@validator.isVariant ['myProduct', '1']).toBe false
91 |
92 | describe '#isProduct', ->
93 | beforeEach ->
94 | @validator.header = new Header CONS.BASE_HEADERS
95 |
96 | it 'should be false for a variantId > 1 with a product type given', ->
97 | expect(@validator.isProduct ['foo', '2'], CONS.HEADER_VARIANT_ID).toBe false
98 |
99 | describe '#buildProducts', ->
100 | beforeEach ->
101 |
102 | it 'should build 2 products and their variants', (done) ->
103 | csv =
104 | """
105 | productType,name,variantId
106 | foo,n1,1
107 | ,,2
108 | ,,3
109 | bar,n2,1
110 | ,,2
111 | """
112 | @validator.parse csv
113 | .then (parsed) =>
114 | @validator.buildProducts parsed.data, CONS.HEADER_VARIANT_ID
115 | expect(@validator.errors.length).toBe 0
116 | expect(@validator.rawProducts.length).toBe 2
117 | expect(@validator.rawProducts[0].master).toEqual ['foo', 'n1', '1']
118 | expect(@validator.rawProducts[0].variants.length).toBe 2
119 | expect(@validator.rawProducts[0].startRow).toBe 2
120 | expect(@validator.rawProducts[1].master).toEqual ['bar', 'n2', '1']
121 | expect(@validator.rawProducts[1].variants.length).toBe 1
122 | expect(@validator.rawProducts[1].startRow).toBe 5
123 | done()
124 | .catch (err) -> done.fail _.prettify(err)
125 |
126 | it 'should return error if row isnt a variant nor product', (done) ->
127 | csv =
128 | """
129 | productType,name,variantId
130 | myType,,1
131 | ,,1
132 | myType,,2
133 | ,,foo
134 | ,,
135 | """
136 | @validator.parse csv
137 | .then (parsed) =>
138 | @validator.buildProducts parsed.data, CONS.HEADER_VARIANT_ID
139 | expect(@validator.errors.length).toBe 3
140 | expect(@validator.errors[0]).toBe '[row 3] Could not be identified as product or variant!'
141 | expect(@validator.errors[1]).toBe '[row 5] Could not be identified as product or variant!'
142 | expect(@validator.errors[2]).toBe '[row 6] Could not be identified as product or variant!'
143 | done()
144 | .catch (err) -> done.fail _.prettify(err)
145 |
146 | it 'should return error if first row isnt a product row', (done) ->
147 | csv =
148 | """
149 | productType,name,variantId
150 | foo,,2
151 | """
152 | @validator.parse csv
153 | .then (parsed) =>
154 | @validator.buildProducts parsed.data, CONS.HEADER_VARIANT_ID
155 | expect(@validator.errors.length).toBe 1
156 | expect(@validator.errors[0]).toBe '[row 2] We need a product before starting with a variant!'
157 | done()
158 | .catch (err) -> done.fail _.prettify(err)
159 |
160 | it 'should build products without variantId', (done) ->
161 | csv =
162 | """
163 | productType,sku
164 | foo,123
165 | bar,234
166 | ,345
167 | ,456
168 | """
169 | @validator.parse csv
170 | .then (parsed) =>
171 | @validator.buildProducts parsed.data
172 | expect(@validator.errors.length).toBe 0
173 | expect(@validator.rawProducts.length).toBe 2
174 | expect(@validator.rawProducts[0].master).toEqual ['foo', '123']
175 | expect(@validator.rawProducts[0].variants.length).toBe 0
176 | expect(@validator.rawProducts[0].startRow).toBe 2
177 | expect(@validator.rawProducts[1].master).toEqual ['bar', '234']
178 | expect(@validator.rawProducts[1].variants.length).toBe 2
179 | expect(@validator.rawProducts[1].variants[0].variant).toEqual ['', '345']
180 | expect(@validator.rawProducts[1].variants[1].variant).toEqual ['', '456']
181 | expect(@validator.rawProducts[1].startRow).toBe 3
182 | done()
183 | .catch (err) -> done.fail _.prettify(err)
184 |
185 | # TODO: deprecated test. should be updated
186 | xit 'should build products per product type - sku update', (done) ->
187 | csv =
188 | """
189 | productType,sku
190 | foo,123
191 | bar,234
192 | bar,345
193 | foo,456
194 | """
195 | @validator.parse csv
196 | .then (parsed) =>
197 | @validator.updateVariantsOnly = true
198 | @validator.buildProducts parsed.data
199 | expect(@validator.errors.length).toBe 0
200 | expect(@validator.rawProducts.length).toBe 2
201 | expect(@validator.rawProducts[0].variants.length).toBe 2
202 | expect(@validator.rawProducts[0].startRow).toBe 2
203 | expect(@validator.rawProducts[0].variants[0].variant).toEqual ['foo', '123']
204 | expect(@validator.rawProducts[0].variants[0].rowIndex).toBe 2
205 | expect(@validator.rawProducts[0].variants[1].variant).toEqual ['foo', '456']
206 | expect(@validator.rawProducts[0].variants[1].rowIndex).toBe 5
207 | expect(@validator.rawProducts[1].variants.length).toBe 2
208 | expect(@validator.rawProducts[1].variants[0].variant).toEqual ['bar', '234']
209 | expect(@validator.rawProducts[1].variants[0].rowIndex).toBe 3
210 | expect(@validator.rawProducts[1].variants[1].variant).toEqual ['bar', '345']
211 | expect(@validator.rawProducts[1].variants[1].rowIndex).toBe 4
212 | expect(@validator.rawProducts[1].startRow).toBe 3
213 | done()
214 | .catch (err) -> done.fail _.prettify(err)
215 |
216 | it 'should use a previous productType if it is missing when doing sku update', (done) ->
217 | csv =
218 | """
219 | productType,sku
220 | foo,123
221 | bar,234
222 | ,345
223 | foo,456
224 | """
225 | @validator.parse csv
226 | .then (parsed) =>
227 | @validator.updateVariantsOnly = true
228 | @validator.buildProducts parsed.data
229 | expect(@validator.errors.length).toBe 0
230 | expect(_.size(@validator.rawProducts)).toBe 4
231 | expect(@validator.rawProducts[2].master).toEqual ["bar", "345"]
232 | done()
233 | .catch (err) -> done.fail _.prettify(err)
234 |
235 | describe '#valProduct', ->
236 | xit 'should return no error', (done) ->
237 | csv =
238 | """
239 | productType,name,variantId
240 | foo,bar,bla
241 | """
242 | @validator.parse csv
243 | .then (parsed) =>
244 | @validator.valProduct parsed.data
245 | done()
246 | .catch (err) -> done.fail _.prettify(err)
247 |
248 | describe '#validateOffline', ->
249 | it 'should return no error', (done) ->
250 | csv =
251 | """
252 | productType,name,variantId
253 | foo,bar,1
254 | """
255 | @validator.parse csv
256 | .then (parsed) =>
257 | @validator.validateOffline parsed.data
258 | expect(@validator.errors).toEqual []
259 | done()
260 | .catch (err) -> done.fail _.prettify(err)
261 |
--------------------------------------------------------------------------------