├── .gitignore ├── README.md ├── automations └── copy_user_to_tenant.md ├── data └── consumersales │ ├── AggSales.qvd │ ├── Basket.qvd │ ├── BudgetPeriod.qvd │ ├── Channel.qvd │ ├── Consumer Sales.qvf │ ├── CustomerAddress.qvd │ ├── CustomerMapCoordinates.qvd │ ├── CustomerType.qvd │ ├── Division.qvd │ ├── FiscalRank.qvd │ ├── ItemBranch.qvd │ ├── ItemMaster.qvd │ ├── MasterPlanning.qvd │ ├── OrderDates1.qvd │ ├── ProductGroup.qvd │ ├── ProductSubGroup.qvd │ ├── Regional.qvd │ ├── SalesHeader.qvd │ ├── SalesRep.qvd │ ├── Salesdecile-1.qvd │ ├── ShipToAddress2.qvd │ ├── ZipData.qvd │ └── maptype.qvd ├── examples ├── qcs_deployment │ ├── clean_tenant.js │ ├── configure_tenant.js │ ├── create_jwt.js │ ├── create_jwt_client_managed.js │ ├── create_tenant.js │ ├── deploy_content.js │ ├── interactive_login.html │ ├── jwt_timing_tests.js │ ├── package-lock.json │ ├── package.json │ ├── qct_config_template.json │ └── resources │ │ ├── Template_Finance.qvf │ │ └── Transactions.txt ├── qs_encryption_qvd_performance │ ├── 00_Main.qvs │ ├── 01_Config.qvs │ ├── 02_sTimerEnd().qvs │ ├── 03_TimerLog & Start.qvs │ ├── 04_Badges.qvs │ ├── 05_Post History.qvs │ ├── 06_Posts.qvs │ ├── 07_Users.qvs │ ├── 08_Store Logging.qvs │ ├── 09_exit script.qvs │ └── Encrypted QVD Performance.qvf ├── qs_flexible_pivot.qvf ├── qs_variable_load │ ├── MyVariables.xlsx │ └── qs_variable_load.qvf └── recno_rowno_speed.qvf └── snippets ├── Server Disk Space v1.qvf ├── bat_DriveSpace.bat ├── bat_ServiceCheck.bat ├── node_gen_date_range.js ├── q_all_fields.md ├── q_colours.md ├── q_gitignore ├── q_groupby_performance.md ├── q_load_dynamic_date_fields.md ├── q_random_data.md ├── q_variable_creator.md ├── q_version_tab.qvs ├── qcs_audit_delete_data_connections.md ├── qcs_create_user_assign_license.md ├── qcs_deleteAllGroups.md ├── qcs_exporting_content.md ├── qcs_genericLink.md ├── qcs_monitor_reload_task_disabled.md ├── qcs_qpo_snippets.md ├── qcs_qvds_from_qvf.md ├── qcs_reloadTrigger.md ├── qcs_resource_update_owner_space.md ├── qcs_space_delete_files.md ├── qs-geoanalytics-eastings-northings.md ├── qs_attachedfiles.md ├── qs_backup_node.ps1 ├── qs_enumerations.md ├── qs_recursive_qvd_index.qvs ├── qs_repository_explorer.qvs ├── qs_rest_qrs_api.md ├── qs_section_access_search.md ├── qs_task_status.qvs └── qs_user_sessions.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # Snowpack dependency directory (https://snowpack.dev/) 46 | web_modules/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional stylelint cache 58 | .stylelintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variable files 76 | .env 77 | .env.development.local 78 | .env.test.local 79 | .env.production.local 80 | .env.local 81 | 82 | # parcel-bundler cache (https://parceljs.org/) 83 | .cache 84 | .parcel-cache 85 | 86 | # Next.js build output 87 | .next 88 | out 89 | 90 | # Nuxt.js build / generate output 91 | .nuxt 92 | dist 93 | 94 | # Gatsby files 95 | .cache/ 96 | # Comment in the public line in if your project uses Gatsby and not Next.js 97 | # https://nextjs.org/blog/next-9-1#public-directory-support 98 | # public 99 | 100 | # vuepress build output 101 | .vuepress/dist 102 | 103 | # vuepress v2.x temp and cache directory 104 | .temp 105 | .cache 106 | 107 | # Docusaurus cache and generated files 108 | .docusaurus 109 | 110 | # Serverless directories 111 | .serverless/ 112 | 113 | # FuseBox cache 114 | .fusebox/ 115 | 116 | # DynamoDB Local files 117 | .dynamodb/ 118 | 119 | # TernJS port file 120 | .tern-port 121 | 122 | # Stores VSCode versions used for testing VSCode extensions 123 | .vscode-test 124 | 125 | # yarn v2 126 | .yarn/cache 127 | .yarn/unplugged 128 | .yarn/build-state.yml 129 | .yarn/install-state.gz 130 | .pnp.* 131 | 132 | # Remove configs 133 | examples/qcs_deployment/qct_config.json 134 | examples/qcs_deployment/certs/ -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # qlik 2 | 3 | A bucket for snippets and examples that don't have their own repository. 4 | -------------------------------------------------------------------------------- /automations/copy_user_to_tenant.md: -------------------------------------------------------------------------------- 1 | ## Copy user to tenant 2 | 3 | Copies a user from one tenant to another, searched by email address 4 | 5 | ```JSON 6 | {"blocks":[{"id":"4CEA1436-BBF4-4E4A-AD72-C1CA041EFAE5","type":"StartBlock","disabled":false,"name":"Start","displayName":"Start","comment":"","childId":"8F88DA32-D889-4364-9320-C3C1CFC4E2AC","inputs":[{"id":"run_mode","value":"manual","type":"select","structure":{}}],"settings":[],"collapsed":[{"name":"loop","isCollapsed":false}],"x":0,"y":0},{"id":"CD3186BB-FE18-4C9B-8FC4-77D6D496A8AD","type":"EndpointBlock","disabled":false,"name":"createTenant","displayName":"Qlik Platform Operations - Create Tenant","comment":"","childId":"E8812B96-5A4E-4AD8-903A-38FE2937BB8B","inputs":[{"id":"349488c0-98c7-11ed-a36c-d756b982108d","value":"eu","type":"string","structure":[]},{"id":"804e0d80-eaff-11ec-9d3c-27c65d22b2cd","value":"{$.GetLicenseOverview.licenseKey}","type":"string","structure":[]}],"settings":[{"id":"datasource","value":null,"type":"select","structure":[]},{"id":"blendr_on_error","value":"stop","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":573,"y":431,"datasourcetype_guid":"c7e48240-e0f2-11ec-ada1-d5ef75014b77","endpoint_guid":"dda61e00-e0f2-11ec-a33b-895d4cdc49ef","endpoint_role":"create"},{"id":"8F88DA32-D889-4364-9320-C3C1CFC4E2AC","type":"SnippetBlock","disabled":false,"name":"GetTenantNameAndRegion","displayName":"Qlik Platform Operations - Get Tenant Name And Region","comment":"","childId":"3B80F45B-164E-4E1A-AD7B-EB1356C65359","inputs":[{"id":"575d1740-b1e2-11ed-958a-598edfec33b8","value":"orchestration.eu.qlikcloud.com","type":"string","structure":[]}],"settings":[{"id":"datasource","value":null,"type":"select","structure":[]},{"id":"blendr_on_error","value":"stop","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":-246,"y":96,"datasourcetype_guid":"c7e48240-e0f2-11ec-ada1-d5ef75014b77","snippet_guid":"bd5c1ce0-ad14-11ed-83f6-1d42e53790dd"},{"id":"3B80F45B-164E-4E1A-AD7B-EB1356C65359","type":"EndpointBlock","disabled":false,"name":"GetLicenseOverview","displayName":"Qlik Platform Operations - Get License Overview","comment":"","childId":"E62006ED-8248-4106-90CB-50E6510DD8D9","inputs":[{"id":"6ee8e9c0-9d78-11ed-9bc8-9be121ac3db0","value":"{$.GetTenantNameAndRegion}","type":"string","structure":[]}],"settings":[{"id":"datasource","value":null,"type":"select","structure":[]},{"id":"blendr_on_error","value":"stop","type":"select","structure":[]},{"id":"cache","value":"0","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":-329,"y":213,"datasourcetype_guid":"c7e48240-e0f2-11ec-ada1-d5ef75014b77","endpoint_guid":"6ea9fa20-9d78-11ed-a228-a3d1b7dd2e0f","endpoint_role":"get"},{"id":"E62006ED-8248-4106-90CB-50E6510DD8D9","type":"EndpointBlock","disabled":false,"name":"listUsers","displayName":"Qlik Platform Operations - List Users","comment":"","childId":"CD3186BB-FE18-4C9B-8FC4-77D6D496A8AD","inputs":[{"id":"447b9eb0-7634-11ed-a9df-a788128e422e","value":"{$.GetTenantNameAndRegion}","type":"string","structure":[]},{"id":"eb56ff20-9ca8-11ed-82cb-752b6b37b1a7","value":"email eq \"email.address@email.com\"","type":"string","structure":[]}],"settings":[{"id":"datasource","value":null,"type":"select","structure":[]},{"id":"maxitemcount","value":"","type":"string","structure":[]},{"id":"blendr_on_error","value":"stop","type":"select","structure":[]},{"id":"cache","value":"0","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":-346,"y":87,"datasourcetype_guid":"c7e48240-e0f2-11ec-ada1-d5ef75014b77","endpoint_guid":"445157c0-7634-11ed-9b40-9720b0f37135","endpoint_role":"list"},{"id":"763B3844-BE30-4D39-AA37-861CE293C44E","type":"EndpointBlock","disabled":false,"name":"listRoles","displayName":"Qlik Platform Operations - List Roles","comment":"","childId":"28F15612-088B-4373-9F27-B0630F206056","inputs":[{"id":"bae46270-b76f-11ed-a52b-d71383a5b940","value":"{$.GetTenantNameAndRegion2}","type":"string","structure":[]},{"id":"35bbb150-b77a-11ed-9a99-9b0cd6b2a9ab","value":"name eq \"TenantAdmin\"","type":"string","structure":[]}],"settings":[{"id":"datasource","value":null,"type":"select","structure":[]},{"id":"maxitemcount","value":"","type":"string","structure":[]},{"id":"blendr_on_error","value":"stop","type":"select","structure":[]},{"id":"cache","value":"0","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":-341,"y":93,"datasourcetype_guid":"c7e48240-e0f2-11ec-ada1-d5ef75014b77","endpoint_guid":"ba493570-b76f-11ed-b6ab-6fd465c15105","endpoint_role":"list"},{"id":"28F15612-088B-4373-9F27-B0630F206056","type":"EndpointBlock","disabled":false,"name":"createUser","displayName":"Qlik Platform Operations - Create User","comment":"","childId":"86A71FBE-570D-40FA-8A2C-4BFD8C057454","inputs":[{"id":"6c07a470-44c9-11ed-81ec-2dfd98774628","value":"{$.GetTenantNameAndRegion2}","type":"string","structure":[]},{"id":"6beaed10-44c9-11ed-9f5a-2de80bbba99e","value":"{$.listUsers[0].name}","type":"string","structure":[]},{"id":"ca85a980-b9c0-11ed-823f-0315a7948c75","value":"{$.listUsers[0].email}","type":"string","structure":[]},{"id":"4410c760-44ca-11ed-b584-69b354d9bb1f","value":"{$.listUsers[0].subject}","type":"string","structure":[]},{"id":"32d487a0-44ca-11ed-842e-55c135bb046d","value":"{$.listRoles}","type":"string","structure":[]}],"settings":[{"id":"datasource","value":null,"type":"select","structure":[]},{"id":"blendr_on_error","value":"stop","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":-368,"y":402,"datasourcetype_guid":"c7e48240-e0f2-11ec-ada1-d5ef75014b77","endpoint_guid":"6bd82bf0-44c9-11ed-80c7-0507c09f13f9","endpoint_role":"create"},{"id":"E8812B96-5A4E-4AD8-903A-38FE2937BB8B","type":"SnippetBlock","disabled":false,"name":"GetTenantNameAndRegion2","displayName":"Qlik Platform Operations - Get Tenant Name And Region 2","comment":"","childId":"763B3844-BE30-4D39-AA37-861CE293C44E","inputs":[{"id":"575d1740-b1e2-11ed-958a-598edfec33b8","value":"{$.createTenant.hostnames[0]}","type":"string","structure":[]}],"settings":[{"id":"datasource","value":null,"type":"select","structure":[]},{"id":"blendr_on_error","value":"stop","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":-338,"y":391,"datasourcetype_guid":"c7e48240-e0f2-11ec-ada1-d5ef75014b77","snippet_guid":"bd5c1ce0-ad14-11ed-83f6-1d42e53790dd"},{"id":"86A71FBE-570D-40FA-8A2C-4BFD8C057454","type":"ShowBlock","disabled":false,"name":"output","displayName":"Output","comment":"","childId":null,"inputs":[{"id":"input","value":"## Created tenant and added user\n\nTenant: [{$.createTenant.hostnames[0]}](https://{$.createTenant.hostnames[0]})\nUser: { $.createUser.name } / { $.createUser.email } / { $.createUser.subject }","type":"string","structure":[]}],"settings":[{"id":"display_mode","value":"add","type":"select","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":408,"y":397}],"variables":[]} 7 | ``` -------------------------------------------------------------------------------- /data/consumersales/AggSales.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/AggSales.qvd -------------------------------------------------------------------------------- /data/consumersales/Basket.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/Basket.qvd -------------------------------------------------------------------------------- /data/consumersales/BudgetPeriod.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/BudgetPeriod.qvd -------------------------------------------------------------------------------- /data/consumersales/Channel.qvd: -------------------------------------------------------------------------------- 1 | 2 | 3 | 50667 4 | fc09b392-9e39-4cbc-a63d-924862edddd5 5 | 2023-04-13 18:39:25 6 | 7 | 8 | -1 9 | 10 | Channel 11 | 12 | 13 | Channel Name 14 | 0 15 | 3 16 | 0 17 | 18 | UNKNOWN 19 | 0 20 | 0 21 | 22 | 23 | 24 | 25 | 7 26 | 0 27 | 71 28 | 29 | 30 | $ascii 31 | $text 32 | 33 | 34 | 35 | Channel 36 | 3 37 | 5 38 | 0 39 | 40 | UNKNOWN 41 | 0 42 | 0 43 | 44 | 45 | 46 | 47 | 8 48 | 71 49 | 33 50 | 51 | 52 | $ascii 53 | $text 54 | $key 55 | 56 | 57 | 58 | 59 | 1 60 | 8 61 | 104 62 | 8 63 | 64 | 65 | 66 | 67 | 68 | 69 | CatalogDirectInternetHospitalGovernmentDistributionRetailC10C9C1C2C3C6C7C8#,5> -------------------------------------------------------------------------------- /data/consumersales/Consumer Sales.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/Consumer Sales.qvf -------------------------------------------------------------------------------- /data/consumersales/CustomerAddress.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/CustomerAddress.qvd -------------------------------------------------------------------------------- /data/consumersales/CustomerMapCoordinates.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/CustomerMapCoordinates.qvd -------------------------------------------------------------------------------- /data/consumersales/CustomerType.qvd: -------------------------------------------------------------------------------- 1 | 2 | 3 | 50667 4 | fc09b392-9e39-4cbc-a63d-924862edddd5 5 | 2023-04-13 18:39:31 6 | 7 | 8 | -1 9 | 10 | CustomerType 11 | 12 | 13 | Customer Type Name 14 | 0 15 | 2 16 | 0 17 | 18 | UNKNOWN 19 | 0 20 | 0 21 | 22 | 23 | 24 | 25 | 3 26 | 0 27 | 35 28 | 29 | 30 | $ascii 31 | $text 32 | 33 | 34 | 35 | Customer Type 36 | 2 37 | 6 38 | 0 39 | 40 | UNKNOWN 41 | 0 42 | 0 43 | 44 | 45 | 46 | 47 | 3 48 | 35 49 | 12 50 | 51 | 52 | $ascii 53 | $text 54 | $key 55 | 56 | 57 | 58 | 59 | 1 60 | 3 61 | 47 62 | 3 63 | 64 | 65 | 66 | 67 | 68 | 69 | InternationalDomesticCanadianG1G2G3 70 | -------------------------------------------------------------------------------- /data/consumersales/Division.qvd: -------------------------------------------------------------------------------- 1 | 2 | 3 | 50667 4 | fc09b392-9e39-4cbc-a63d-924862edddd5 5 | 2023-04-13 18:39:43 6 | 7 | 8 | -1 9 | 10 | Division 11 | 12 | 13 | Division Name 14 | 0 15 | 0 16 | 0 17 | 18 | UNKNOWN 19 | 0 20 | 0 21 | 22 | 23 | 24 | 25 | 1 26 | 0 27 | 10 28 | 29 | 30 | $ascii 31 | $text 32 | 33 | 34 | 35 | Division 36 | 0 37 | 8 38 | 0 39 | 40 | UNKNOWN 41 | 0 42 | 0 43 | 44 | 45 | 46 | 47 | 1 48 | 10 49 | 7 50 | 51 | 52 | $key 53 | $numeric 54 | $integer 55 | 56 | 57 | 58 | 59 | 1 60 | 1 61 | 17 62 | 1 63 | 64 | 65 | 66 | 67 | 68 | 69 | Domestic2 -------------------------------------------------------------------------------- /data/consumersales/FiscalRank.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/FiscalRank.qvd -------------------------------------------------------------------------------- /data/consumersales/ItemBranch.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/ItemBranch.qvd -------------------------------------------------------------------------------- /data/consumersales/ItemMaster.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/ItemMaster.qvd -------------------------------------------------------------------------------- /data/consumersales/MasterPlanning.qvd: -------------------------------------------------------------------------------- 1 | 2 | 3 | 50667 4 | fc09b392-9e39-4cbc-a63d-924862edddd5 5 | 2023-04-13 18:39:26 6 | 7 | 8 | -1 9 | 10 | MasterPlanning 11 | 12 | 13 | Master Planning Family Desc 14 | 0 15 | 0 16 | 0 17 | 18 | UNKNOWN 19 | 0 20 | 0 21 | 22 | 23 | 24 | 25 | 1 26 | 0 27 | 21 28 | 29 | 30 | $ascii 31 | $text 32 | 33 | 34 | 35 | Master Planning Family 36 | 0 37 | 8 38 | 0 39 | 40 | UNKNOWN 41 | 0 42 | 0 43 | 44 | 45 | 46 | 47 | 1 48 | 21 49 | 5 50 | 51 | 52 | $ascii 53 | $text 54 | 55 | 56 | 57 | 58 | 1 59 | 1 60 | 26 61 | 1 62 | 63 | 64 | 65 | 66 | 67 | 68 | Print on Price ListP01 -------------------------------------------------------------------------------- /data/consumersales/OrderDates1.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/OrderDates1.qvd -------------------------------------------------------------------------------- /data/consumersales/ProductGroup.qvd: -------------------------------------------------------------------------------- 1 | 2 | 3 | 50667 4 | fc09b392-9e39-4cbc-a63d-924862edddd5 5 | 2023-04-13 18:39:17 6 | 7 | 8 | -1 9 | 10 | ProductGroup 11 | 12 | 13 | Product Group Desc 14 | 0 15 | 8 16 | 0 17 | 18 | UNKNOWN 19 | 0 20 | 0 21 | 22 | 23 | 24 | 25 | 17 26 | 0 27 | 200 28 | 29 | 30 | $ascii 31 | $text 32 | 33 | 34 | 35 | Product Group 36 | 8 37 | 8 38 | 0 39 | 40 | UNKNOWN 41 | 0 42 | 0 43 | 44 | 45 | 46 | 47 | 17 48 | 200 49 | 129 50 | 51 | 52 | $key 53 | $numeric 54 | $integer 55 | 56 | 57 | 58 | 59 | 2 60 | 17 61 | 329 62 | 34 63 | 64 | 65 | 66 | 67 | 68 | 69 | Alcoholic BeveragesBaked GoodsBeveragesBreakfast FoodsCanned FoodsDairyDeliMeatProduceSeafoodSnack FoodsSnacksBaking GoodsCanned ProductsEggsFrozen FoodsStarchy Foods12456 70 | 10 11161920212237 12 1323 71 | 72 |  -------------------------------------------------------------------------------- /data/consumersales/ProductSubGroup.qvd: -------------------------------------------------------------------------------- 1 | 2 | 3 | 50667 4 | fc09b392-9e39-4cbc-a63d-924862edddd5 5 | 2023-04-13 18:39:18 6 | 7 | 8 | -1 9 | 10 | ProductSubGroup 11 | 12 | 13 | Product Sub Group Desc 14 | 0 15 | 8 16 | 0 17 | 18 | UNKNOWN 19 | 0 20 | 0 21 | 22 | 23 | 24 | 25 | 70 26 | 0 27 | 708 28 | 29 | 30 | $ascii 31 | $text 32 | 33 | 34 | 35 | Product Sub Group 36 | 8 37 | 8 38 | 0 39 | 40 | UNKNOWN 41 | 0 42 | 0 43 | 44 | 45 | 46 | 47 | 70 48 | 708 49 | 555 50 | 51 | 52 | $key 53 | $numeric 54 | $integer 55 | 56 | 57 | 58 | 59 | 2 60 | 70 61 | 1263 62 | 140 63 | 64 | 65 | 66 | 67 | 68 | 69 | NutsShellfishCanned FruitSpicesPastaYogurtCoffeeDeli MeatsIce CreamTV DinnerCheeseChipsFresh VegetablesSour CreamCottage CheeseDeli SaladsDried MeatSodaFresh FishBagelsMuffinsSliced BreadPancake MixPancakesJuiceJellyJamPreservesWafflesCerealChocolate CandyGumHard CandyBeerWineCookiesPretzelsSaucesCooking OilSugarChocolateFlavored DrinksPeanut ButterPopcornRiceSoupFrozen VegetablesCanned VegetablesFrench FriesPizzaHamburgerEggsTofuMilkFresh ChickenHot DogsCrackersDipsDonutsBolognaTunaShrimpAnchoviesClamsOystersSardinesFresh FruitFrozen ChickenDried FruitPopsicles123456Z908 9 70 | 10 11 12=6114151617192425262728293031 32!33"34#35$36%37&38+43,44-45.46048149250351452553654957:58<60>62?63@64A65B66C67L76M77Q81R82S83T84[91]93^94_95`96a97b98c99d100m109n110 71 | 72 |  !!""##$$%%&&''(())**++,,--..//00112233445566778899::;;<<==>>??@@AABBCCDDEE -------------------------------------------------------------------------------- /data/consumersales/Regional.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/Regional.qvd -------------------------------------------------------------------------------- /data/consumersales/SalesHeader.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/SalesHeader.qvd -------------------------------------------------------------------------------- /data/consumersales/SalesRep.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/SalesRep.qvd -------------------------------------------------------------------------------- /data/consumersales/Salesdecile-1.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/Salesdecile-1.qvd -------------------------------------------------------------------------------- /data/consumersales/ShipToAddress2.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/ShipToAddress2.qvd -------------------------------------------------------------------------------- /data/consumersales/ZipData.qvd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/data/consumersales/ZipData.qvd -------------------------------------------------------------------------------- /data/consumersales/maptype.qvd: -------------------------------------------------------------------------------- 1 | 2 | 3 | 50667 4 | fc09b392-9e39-4cbc-a63d-924862edddd5 5 | 2023-04-13 18:39:44 6 | 7 | 8 | -1 9 | 10 | maptype 11 | 12 | 13 | maptype 14 | 0 15 | 8 16 | 0 17 | 18 | UNKNOWN 19 | 0 20 | 0 21 | 22 | 23 | 24 | 25 | 5 26 | 0 27 | 45 28 | 29 | 30 | $ascii 31 | $text 32 | 33 | 34 | 35 | 36 | 1 37 | 5 38 | 45 39 | 5 40 | 41 | 42 | 43 | 44 | 45 | 46 | roadmapmobilesatelliteterrainhybrid -------------------------------------------------------------------------------- /examples/qcs_deployment/clean_tenant.js: -------------------------------------------------------------------------------- 1 | // Bring in config file with values for the qcs const below, template is in the package 2 | const config = require('./qct_config.json'); 3 | 4 | // Configure for your Qlik Cloud deployment 5 | const qcs = { 6 | targetTenant: 'name', // the tenant that you wish to configure, e.g. name for name.eu.qlikcloud.com 7 | region: config.region, // Qlik Cloud region URL e.g. name.eu.qlikcloud.com becomes eu.qlikcloud.com 8 | regionClientId: config.regionClientId, // our oauth client id, generated in MyQlik, e.g. '12345678912345678' 9 | regionClientSecret: config.regionClientSecret // our oauth client secret, generated in MyQlik, e.g. 'eac0dswfec23ewfweweg2g2vsebw77e1cce' 10 | } 11 | 12 | // This example uses just https, you may want to use a package 13 | const https = require('https'); 14 | 15 | 16 | // Prepare our oauth cred package for future auth requests 17 | const oauthClientData = JSON.stringify({ 18 | client_id: qcs.regionClientId, 19 | client_secret: qcs.regionClientSecret, 20 | grant_type: "client_credentials" 21 | }) 22 | 23 | // Prepare target tenant URL 24 | const targetTenantUrlFull = 'https://' + qcs.targetTenant + '.' + qcs.region; 25 | 26 | 27 | // Function to handle our requests 28 | async function httpsFetch(url, settings) { 29 | try { 30 | const fetchResponse = await fetch(url, settings); 31 | 32 | try { 33 | const data = await fetchResponse.json(); 34 | } catch { 35 | const data = await fetchResponse; 36 | } 37 | 38 | 39 | return data; 40 | 41 | } catch (e) { 42 | return e; 43 | } 44 | } 45 | 46 | async function cleanTenant() { 47 | 48 | // *************************** 49 | // 1 - Get access token for target tenant, for use will all future requests 50 | var data = await httpsFetch(targetTenantUrlFull + '/oauth/token', { 51 | method: 'POST', 52 | headers: { 53 | 'Content-Type': 'application/json' 54 | }, 55 | body: oauthClientData 56 | }); 57 | 58 | // For the demo, log out the response 59 | console.log('Output from step 1: ', data); 60 | // Save access token for next request 61 | let targetTenantToken = data.access_token; 62 | 63 | // 2 - Get list of apps and delete them 64 | var stillPages = true; 65 | var queryUrl = targetTenantUrlFull + '/api/v1/items?resourceType=app,qvapp&limit=100&noActions=true'; 66 | var pageIter = 0; 67 | var appIter = 0; 68 | while (stillPages) { 69 | var pageIter = pageIter + 1; 70 | var data = await httpsFetch(queryUrl, { 71 | method: 'GET', 72 | headers: { 73 | 'Content-Type': 'application/json', 74 | 'Authorization': 'Bearer ' + targetTenantToken 75 | } 76 | }); 77 | 78 | // For the demo, log out the response 79 | console.log('Output from step 2.' + pageIter + ': ', data); 80 | 81 | // Pagination checks 82 | if (data.links.next == undefined) { 83 | var stillPages = false; 84 | } else { 85 | var queryUrl = data.links.next.href; 86 | } 87 | 88 | // Loop through returned apps and call delete endpoint 89 | for (let appKey in data.data) { 90 | var appIter = appIter + 1; 91 | var appId = data.data[appKey].resourceId; 92 | 93 | var data = await httpsFetch(targetTenantUrlFull + '/api/v1/apps/' + appId, { 94 | method: 'DELETE', 95 | headers: { 96 | 'Content-Type': 'application/json', 97 | 'Authorization': 'Bearer ' + targetTenantToken 98 | } 99 | }); 100 | 101 | // For the demo, log out the response 102 | console.log('Output from step 2.' + pageIter + '/'+ appIter + ': ', data); 103 | } 104 | 105 | } 106 | 107 | // 3 - Get list of spaces and delete them 108 | var stillPages = true; 109 | var queryUrl = targetTenantUrlFull + '/api/v1/spaces?limit=100'; 110 | var pageIter = 0; 111 | var spaceIter = 0; 112 | while (stillPages) { 113 | var pageIter = pageIter + 1; 114 | var data = await httpsFetch(queryUrl, { 115 | method: 'GET', 116 | headers: { 117 | 'Content-Type': 'application/json', 118 | 'Authorization': 'Bearer ' + targetTenantToken 119 | } 120 | }); 121 | console.log(queryUrl); 122 | // For the demo, log out the response 123 | console.log('Output from step 3.' + spaceIter + ': ', data); 124 | 125 | // Pagination checks 126 | if (data.links.next == undefined) { 127 | var stillPages = false; 128 | } else { 129 | var queryUrl = data.links.next.href; 130 | } 131 | 132 | // Loop through returned spaces and call delete endpoint 133 | for (let spaceKey in data.data) { 134 | var spaceIter = spaceIter + 1; 135 | var spaceId = data.data[spaceKey].id; 136 | 137 | var data = await httpsFetch(targetTenantUrlFull + '/api/v1/spaces/' + spaceId, { 138 | method: 'DELETE', 139 | headers: { 140 | 'Content-Type': 'application/json', 141 | 'Authorization': 'Bearer ' + targetTenantToken 142 | } 143 | }); 144 | 145 | // For the demo, log out the response 146 | console.log('Output from step 3.' + pageIter + '/'+ spaceIter + ': ', data); 147 | } 148 | 149 | } 150 | 151 | 152 | }; 153 | 154 | // Let's deploy content 155 | cleanTenant(); -------------------------------------------------------------------------------- /examples/qcs_deployment/configure_tenant.js: -------------------------------------------------------------------------------- 1 | // Bring in config file with values for the qcs const below, template is in the package 2 | const config = require('./qct_config.json'); 3 | 4 | // This example uses just https, you may want to use a package for requests 5 | const https = require('https') 6 | 7 | // Required for jwt generation 8 | const fs = require('fs'); 9 | const uid = require('uid-safe'); 10 | const jwt = require('jsonwebtoken'); 11 | 12 | // Configure for your Qlik Cloud deployment 13 | const qcs = { 14 | targetTenant: 'name', // the tenant that you wish to configure, e.g. name for name.eu.qlikcloud.com 15 | region: config.region, // Qlik Cloud region URL e.g. name.eu.qlikcloud.com becomes eu.qlikcloud.com 16 | regionClientId: config.regionClientId, // our oauth client id, generated in MyQlik, e.g. '12345678912345678' 17 | regionClientSecret: config.regionClientSecret // our oauth client secret, generated in MyQlik, e.g. 'eac0dswfec23ewfweweg2g2vsebw77e1cce' 18 | } 19 | 20 | // Configure tenant settings 21 | const tenantSettings = { 22 | autoCreateGroups: true, 23 | autoAssignProfessional: false, 24 | autoAssignAnalyzer: false 25 | } 26 | 27 | // Config for IdP 28 | const idpSettings = { 29 | description: "Auth for my app or portal", 30 | } 31 | 32 | // JWT: Configure JWT IdP payload with the groups we want to seed 33 | const jwtPayload = { 34 | jti: uid.sync(32), // 32 bytes random string 35 | sub: 'mydomain\\yournamehere', 36 | subType: 'user', 37 | name: 'Your Name Here', 38 | email: 'yournamehere@example.com', 39 | email_verified: true, 40 | groups: ['Admin', 'Finance', 'Marketing', 'Sales'], 41 | }; 42 | 43 | // JWT: Identify keys 44 | const jwtPrivateKey = fs.readFileSync('./certs/nameap_privatekey.pem'); 45 | const jwtPublicKey = fs.readFileSync('./certs/nameap_publickey.cer'); 46 | 47 | // JWT: Provide signing options 48 | // kid and issuer have to match with the IDP config and the 49 | // audience has to be qlik.api/jwt-login-session 50 | const jwtSigningOptions = { 51 | keyid: 'myapporportal', 52 | algorithm: 'RS256', 53 | issuer: 'nameeustage', 54 | expiresIn: '30m', 55 | notBefore: '0s', 56 | audience: 'qlik.api/login/jwt-session' 57 | }; 58 | 59 | 60 | // Prepare our oauth cred package for future auth requests 61 | const oauthClientData = JSON.stringify({ 62 | client_id: qcs.regionClientId, 63 | client_secret: qcs.regionClientSecret, 64 | grant_type: "client_credentials" 65 | }) 66 | 67 | // Prepare target tenant URL 68 | const targetTenantUrl = qcs.targetTenant + '.' + qcs.region; 69 | 70 | // Function to handle our requests 71 | function httpsRequest(params, postBody) { 72 | return new Promise(function (resolve, reject) { 73 | var req = https.request(params, function (res) { 74 | 75 | // There is no error handling on HTTP response codes here 76 | // In your application, you should handle these responses in either the request handler or your calls 77 | //console.log(res); 78 | 79 | var body = []; 80 | res.on('data', function (chunk) { 81 | body.push(chunk); 82 | }); 83 | res.on('end', function () { 84 | try { 85 | body = JSON.parse(Buffer.concat(body).toString()); 86 | } catch (e) { 87 | // Here to catch empty responses and play them back nicely 88 | body = 'Status ' + res.statusCode + 'r/n/' + body; 89 | } 90 | resolve(body); 91 | }); 92 | }); 93 | req.on('error', function (err) { 94 | reject(err); 95 | }); 96 | if (postBody) { 97 | req.write(postBody); 98 | } 99 | req.end(); 100 | }); 101 | } 102 | 103 | async function configureTenant() { 104 | 105 | // *************************** 106 | // 1 - Get access token for target tenant, for use will all future requests 107 | var data = await httpsRequest({ 108 | hostname: targetTenantUrl, 109 | port: 443, 110 | path: '/oauth/token', 111 | method: 'POST', 112 | headers: { 113 | 'Content-Type': 'application/json' 114 | } 115 | }, oauthClientData); 116 | // For the demo, log out the response 117 | console.log('Output from step 1: ' + JSON.stringify(data)); 118 | // Save access token for next request 119 | let targetTenantToken = data.access_token; 120 | 121 | // *************************** 122 | // 2 - Configure autoCreateGroups 123 | var data = await httpsRequest({ 124 | hostname: targetTenantUrl, 125 | port: 443, 126 | path: '/api/v1/groups/settings', 127 | method: 'PATCH', 128 | headers: { 129 | 'Content-Type': 'application/json', 130 | 'Authorization': 'Bearer ' + targetTenantToken 131 | } 132 | }, JSON.stringify([{ 133 | op: 'replace', 134 | path: '/autoCreateGroups', 135 | value: tenantSettings.autoCreateGroups 136 | }])); 137 | // For the demo, log out the response 138 | console.log('Output from step 2: ' + data); 139 | 140 | // *************************** 141 | // 3 - Configure automatic assignment of professional and analyzer licenses 142 | var data = await httpsRequest({ 143 | hostname: targetTenantUrl, 144 | port: 443, 145 | path: '/api/v1/licenses/settings', 146 | method: 'PUT', 147 | headers: { 148 | 'Content-Type': 'Application/Json', 149 | 'Authorization': 'Bearer ' + targetTenantToken 150 | } 151 | }, JSON.stringify({ 152 | autoAssignProfessional: tenantSettings.autoAssignProfessional, 153 | autoAssignAnalyzer: tenantSettings.autoAssignAnalyzer 154 | })); 155 | // For the demo, log out the response 156 | console.log('Output from step 3: ' + JSON.stringify(data)); 157 | 158 | // *************************** 159 | // 4 - Get tenant id 160 | var data = await httpsRequest({ 161 | hostname: targetTenantUrl, 162 | port: 443, 163 | path: '/api/v1/tenants', 164 | method: 'GET', 165 | headers: { 166 | 'Content-Type': 'application/json', 167 | 'Authorization': 'Bearer ' + targetTenantToken 168 | } 169 | }); 170 | // For the demo, log out the response 171 | console.log('Output from step 4: ' + JSON.stringify(data)); 172 | 173 | // Keep a copy of the tenant id, we'll need it shortly 174 | const tenantId = data.data[0].id; 175 | 176 | // *************************** 177 | // 5 - Configure JWT IdP on the tenant 178 | // Prepare IdP configuration for the JWT IdP post with the tenant ID and config 179 | const idpConfiguration = JSON.stringify({ 180 | tenantIds: [ 181 | tenantId 182 | ], 183 | provider: "external", 184 | protocol: "jwtAuth", 185 | interactive: false, 186 | active: true, 187 | description: idpSettings.description, 188 | options: { 189 | jwtLoginEnabled: true, 190 | issuer: jwtSigningOptions.issuer, 191 | staticKeys: [ 192 | { 193 | kid: jwtSigningOptions.keyid, 194 | pem: jwtPublicKey.toString() 195 | } 196 | ] 197 | } 198 | }); 199 | 200 | // For the demo, log out the IdP config we're going to send 201 | console.log('Output from step 5a: ' + idpConfiguration); 202 | 203 | // Send request to create JWT IdP 204 | var data = await httpsRequest({ 205 | hostname: targetTenantUrl, 206 | port: 443, 207 | path: '/api/v1/identity-providers', 208 | method: 'POST', 209 | headers: { 210 | 'Content-Type': 'application/json', 211 | 'Authorization': 'Bearer ' + targetTenantToken 212 | } 213 | }, idpConfiguration); 214 | 215 | // For the demo, log out the response 216 | console.log('Output from step 5b: ' + JSON.stringify(data)); 217 | 218 | // *************************** 219 | // 6 - Send our JWT request and seed groups 220 | // Built our JWT with the groups we want to seed 221 | const seedToken = jwt.sign(jwtPayload, jwtPrivateKey, jwtSigningOptions); 222 | 223 | // For the demo, log out the token we're going to use to seed groups 224 | console.log('Output from step 6a: ' + seedToken); 225 | 226 | // Send the request 227 | var data = await httpsRequest({ 228 | hostname: targetTenantUrl, 229 | port: 443, 230 | path: '/login/jwt-session', 231 | method: 'POST', 232 | headers: { 233 | 'Content-Type': 'application/json', 234 | 'Authorization': 'Bearer ' + seedToken 235 | } 236 | }); 237 | 238 | // For the demo, log out the response 239 | console.log('Output from step 6b: ' + JSON.stringify(data)); 240 | 241 | // *************************** 242 | // 7 - Get groups on the tenant to give us way of verifying that they have been created 243 | var data = await httpsRequest({ 244 | hostname: targetTenantUrl, 245 | port: 443, 246 | path: '/api/v1/groups', 247 | method: 'GET', 248 | headers: { 249 | 'Content-Type': 'application/json', 250 | 'Authorization': 'Bearer ' + targetTenantToken 251 | } 252 | }); 253 | 254 | // For the demo, log out the response 255 | console.log('Output from step 7: ' + JSON.stringify(data)); 256 | } 257 | 258 | // Go (con)figure! 259 | configureTenant(); -------------------------------------------------------------------------------- /examples/qcs_deployment/create_jwt.js: -------------------------------------------------------------------------------- 1 | // This will attempt to create an IdP, then generate a JWT for that IdP for use with interactive_login.html 2 | 3 | // Bring in config file with values for the qcs const below, template is in the package 4 | const config = require('./qct_config.json'); 5 | 6 | // This example uses just https, you may want to use a package for requests 7 | const https = require('https'); 8 | 9 | // Required for jwt generation 10 | const fs = require('fs'); 11 | const uid = require('uid-safe'); 12 | const jwt = require('jsonwebtoken'); 13 | 14 | // Configure for your Qlik Cloud deployment 15 | const qcs = { 16 | targetTenant: '5ie54a4r13glxs5', // the tenant that you wish to configure, e.g. name for name.eu.qlikcloud.com 17 | region: config.region, // Qlik Cloud region URL e.g. name.eu.qlikcloud.com becomes eu.qlikcloud.com 18 | regionClientId: config.regionClientId, // our oauth client id, generated in MyQlik, e.g. '12345678912345678' 19 | regionClientSecret: config.regionClientSecret // our oauth client secret, generated in MyQlik, e.g. 'eac0dswfec23ewfweweg2g2vsebw77e1cce' 20 | } 21 | 22 | // To help with the JWT decoding step 23 | function parseJwt (token) { 24 | return JSON.parse(Buffer.from(token.split('.')[1], 'base64').toString()); 25 | } 26 | 27 | // Config for IdP 28 | const idpSettings = { 29 | description: "Auth for my app or portal", 30 | } 31 | 32 | // JWT: Configure JWT IdP payload with the groups we want to seed 33 | const jwtPayload = { 34 | jti: uid.sync(32), // 32 bytes random string 35 | sub: 'mydomain\\yournamehere', 36 | subType: 'user', 37 | name: 'Your Name Here', 38 | email: 'yournamehere@example.com', 39 | email_verified: true, 40 | groups: ['Admin'], 41 | }; 42 | 43 | // JWT: Identify keys 44 | const jwtPrivateKey = fs.readFileSync('./certs/nameap_privatekey.pem'); 45 | const jwtPublicKey = fs.readFileSync('./certs/nameap_publickey.cer'); 46 | 47 | // JWT: Provide signing options 48 | // kid and issuer have to match with the IDP config and the 49 | // audience has to be qlik.api/jwt-login-session 50 | const jwtSigningOptions = { 51 | keyid: 'myapporportal', 52 | algorithm: 'RS256', 53 | issuer: 'euqlikcloud', 54 | expiresIn: '5m', 55 | notBefore: '47s', 56 | audience: 'qlik.api/login/jwt-session' 57 | }; 58 | 59 | // Prepare our oauth cred package for future auth requests 60 | const oauthClientData = JSON.stringify({ 61 | client_id: qcs.regionClientId, 62 | client_secret: qcs.regionClientSecret, 63 | grant_type: "client_credentials" 64 | }) 65 | 66 | // Prepare target tenant URL 67 | const targetTenantUrl = qcs.targetTenant + '.' + qcs.region; 68 | 69 | // Function to handle our requests 70 | function httpsRequest(params, postBody) { 71 | return new Promise(function (resolve, reject) { 72 | var req = https.request(params, function (res) { 73 | 74 | var body = []; 75 | res.on('data', function (chunk) { 76 | body.push(chunk); 77 | }); 78 | res.on('end', function () { 79 | try { 80 | body = JSON.parse(Buffer.concat(body).toString()); 81 | } catch (e) { 82 | // Here to catch empty responses and play them back nicely 83 | body = 'Status ' + res.statusCode + 'r/n/' + body; 84 | } 85 | resolve(body); 86 | }); 87 | }); 88 | req.on('error', function (err) { 89 | reject(err); 90 | }); 91 | if (postBody) { 92 | req.write(postBody); 93 | } 94 | req.end(); 95 | }); 96 | } 97 | 98 | async function createJWT() { 99 | 100 | // *************************** 101 | // 1 - Get access token for target tenant, for use will all future requests 102 | var data = await httpsRequest({ 103 | hostname: targetTenantUrl, 104 | port: 443, 105 | path: '/oauth/token', 106 | method: 'POST', 107 | headers: { 108 | 'Content-Type': 'application/json' 109 | } 110 | }, oauthClientData); 111 | // For the demo, log out the response 112 | //console.log(Math.floor(Date.now()/1000), 'Output from step 1: ' + JSON.stringify(data)); 113 | // Save access token for next request 114 | let targetTenantToken = data.access_token; 115 | 116 | // *************************** 117 | // 2 - Get tenant id 118 | var data = await httpsRequest({ 119 | hostname: targetTenantUrl, 120 | port: 443, 121 | path: '/api/v1/tenants', 122 | method: 'GET', 123 | headers: { 124 | 'Content-Type': 'application/json', 125 | 'Authorization': 'Bearer ' + targetTenantToken 126 | } 127 | }); 128 | // For the demo, log out the response 129 | //console.log(Math.floor(Date.now()/1000), 'Output from step 2: ' + JSON.stringify(data)); 130 | 131 | // Keep a copy of the tenant id, we'll need it shortly 132 | const tenantId = data.data[0].id; 133 | 134 | const testClockToleranceSec = 47; 135 | 136 | // *************************** 137 | // 3 - Configure JWT IdP on the tenant 138 | // Prepare IdP configuration for the JWT IdP post with the tenant ID and config 139 | const idpConfiguration = JSON.stringify({ 140 | tenantIds: [ 141 | tenantId 142 | ], 143 | provider: "external", 144 | protocol: "jwtAuth", 145 | interactive: false, 146 | active: true, 147 | description: idpSettings.description, 148 | clockToleranceSec: testClockToleranceSec, 149 | options: { 150 | jwtLoginEnabled: true, 151 | issuer: jwtSigningOptions.issuer, 152 | staticKeys: [ 153 | { 154 | kid: jwtSigningOptions.keyid, 155 | pem: jwtPublicKey.toString() 156 | } 157 | ] 158 | } 159 | }); 160 | 161 | // For the demo, log out the IdP config we're going to send 162 | //console.log(Math.floor(Date.now()/1000), 'Output from step 3a: ' + idpConfiguration); 163 | 164 | // Send request to create JWT IdP 165 | var data = await httpsRequest({ 166 | hostname: targetTenantUrl, 167 | port: 443, 168 | path: '/api/v1/identity-providers', 169 | method: 'POST', 170 | headers: { 171 | 'Content-Type': 'application/json', 172 | 'Authorization': 'Bearer ' + targetTenantToken 173 | } 174 | }, idpConfiguration); 175 | 176 | // Get the JWT IdP id 177 | const idpId = data.id; 178 | 179 | // Get the created ts 180 | const idpCreated = Math.floor(new Date(data.created) / 1000); 181 | 182 | // For the demo, log out the response 183 | //console.log(Math.floor(Date.now()/1000), 'Output from step 3b, IdP created at:', idpCreated, 'with config:', JSON.stringify(data)); 184 | 185 | // *************************** 186 | // 4 - Send our JWT request and seed groups 187 | // Built our JWT with the groups we want to seed 188 | const seedToken = jwt.sign(jwtPayload, jwtPrivateKey, jwtSigningOptions); 189 | 190 | const testTimeJwtNbf = parseJwt(seedToken).nbf; 191 | 192 | // For the demo, log out the token we're going to use to seed groups 193 | console.log(Math.floor(Date.now()/1000), 'Generated JWT properties:', parseJwt(seedToken)); 194 | 195 | console.log(Math.floor(Date.now()/1000), 'Generated JWT:', seedToken,); 196 | 197 | } 198 | 199 | // Go test 200 | createJWT(); -------------------------------------------------------------------------------- /examples/qcs_deployment/create_jwt_client_managed.js: -------------------------------------------------------------------------------- 1 | // This will generate a JWT for a client-managed deployment 2 | 3 | // Required for jwt generation 4 | const fs = require('fs'); 5 | const jwt = require('jsonwebtoken'); 6 | 7 | // Creating a Qlik Sense on Windows payload. 8 | const jwtPayload = { 9 | userid: 'jwtuser', 10 | userdirectory: 'jwt', 11 | name: 'JWT User', 12 | email: 'jwt@example', 13 | groups: ['Adminstrators'], 14 | }; 15 | 16 | // JWT: Private key (pem file) 17 | const jwtPrivateKey = fs.readFileSync('./certs/qs-dev jwt/privatekey.pem'); 18 | 19 | // JWT: Provide signing options 20 | const jwtSigningOptions = { 21 | algorithm: 'RS256', 22 | expiresIn: '365d', 23 | audience: 'jwt', 24 | }; 25 | 26 | // Create the token 27 | const token = jwt.sign(jwtPayload, jwtPrivateKey, jwtSigningOptions); 28 | 29 | console.log('Generated JWT:', token); 30 | -------------------------------------------------------------------------------- /examples/qcs_deployment/create_tenant.js: -------------------------------------------------------------------------------- 1 | // Bring in config file with values for the qcs const below, template is in the package 2 | const config = require('./qct_config.json'); 3 | 4 | // Configure for your Qlik Cloud deployment 5 | const qcs = { 6 | licenseKey: config.licenseKey, // the license key for the organisation, e.g. eyJhbGciOiJFZERTQSIsImtp.... 7 | region: config.region, // Qlik Cloud region URL e.g. name.eu.qlikcloud.com becomes eu.qlikcloud.com 8 | regionClientId: config.regionClientId, // our oauth client id, generated in MyQlik, e.g. '12345678912345678' 9 | regionClientSecret: config.regionClientSecret // our oauth client secret, generated in MyQlik, e.g. 'eac0dswfec23ewfweweg2g2vsebw77e1cce' 10 | } 11 | 12 | // This example uses just https, you may want to use a package 13 | const https = require('https') 14 | 15 | // Prepare our oauth cred package for future auth requests 16 | const oauthClientData = JSON.stringify({ 17 | client_id: qcs.regionClientId, 18 | client_secret: qcs.regionClientSecret, 19 | grant_type: "client_credentials" 20 | }) 21 | 22 | // Function to handle our requests 23 | async function httpsFetch(url, settings) { 24 | try { 25 | const fetchResponse = await fetch(url, settings); 26 | const data = await fetchResponse.json(); 27 | return data; 28 | 29 | } catch (e) { 30 | return e; 31 | } 32 | } 33 | 34 | // Get an access token for a specified hostname and oauth client 35 | async function getAccessToken(hostname) { 36 | 37 | var data = await httpsFetch('https://' + hostname + '/oauth/token', { 38 | method: 'POST', 39 | headers: { 40 | 'Content-Type': 'application/json' 41 | }, 42 | body: oauthClientData 43 | }); 44 | 45 | return data; 46 | 47 | } 48 | 49 | async function createTenant() { 50 | 51 | // *************************** 52 | // 1 - Get access token for the regional register tenant, which we need to request a new tenant 53 | var data = await getAccessToken('register.' + qcs.region); 54 | 55 | // For the demo, log out the response 56 | console.log('Output from step 1: ' + JSON.stringify(data)); 57 | // Save access token for next request 58 | let registerTenantToken = data.access_token; 59 | 60 | // *************************** 61 | // 2 - Request new tenant in region 62 | var data = await httpsFetch('https://register.' + qcs.region + '/api/v1/tenants', { 63 | method: 'POST', 64 | headers: { 65 | 'Content-Type': 'application/json', 66 | 'Authorization': 'Bearer ' + registerTenantToken 67 | }, 68 | body: JSON.stringify({ 69 | licenseKey: qcs.licenseKey 70 | }) 71 | }); 72 | 73 | // For the demo, log out the response 74 | console.log('Output from step 2: ' + JSON.stringify(data)); 75 | 76 | // Save new tenant URL for next request 77 | let targetTenantUrl = data.hostnames[0]; 78 | console.log('Hostname from step 2: ' + targetTenantUrl); 79 | 80 | // *************************** 81 | // 3 - Get access token for new tenant 82 | var data = await getAccessToken(targetTenantUrl); 83 | 84 | // For the demo, log out the response 85 | console.log('Output from step 3: ' + JSON.stringify(data)); 86 | 87 | // Save target tenant access token for next request 88 | let targetTenantToken = data.access_token; 89 | 90 | 91 | // *************************** 92 | // 4 - Send a request to the new tenant (proof of life) 93 | var data = await httpsFetch('https://' + targetTenantUrl + '/api/v1/users/me', { 94 | method: 'GET', 95 | headers: { 96 | 'Content-Type': 'application/json', 97 | 'Authorization': 'Bearer ' + targetTenantToken 98 | } 99 | }); 100 | 101 | // For the demo, log out the response 102 | console.log('Output from step 4: ' + JSON.stringify(data)); 103 | 104 | }; 105 | 106 | // Create 1 tenant 107 | createTenant(); 108 | 109 | // // Go create 5 tenants 110 | // for (let i = 0; i < 5; i++) { 111 | // createTenant(); 112 | // } -------------------------------------------------------------------------------- /examples/qcs_deployment/deploy_content.js: -------------------------------------------------------------------------------- 1 | // Bring in config file with values for the qcs const below, template is in the package 2 | const config = require('./qct_config.json'); 3 | 4 | // Configure for your Qlik Cloud deployment 5 | const qcs = { 6 | targetTenant: 'name', // the tenant that you wish to configure, e.g. name for name.eu.qlikcloud.com 7 | region: config.region, // Qlik Cloud region URL e.g. name.eu.qlikcloud.com becomes eu.qlikcloud.com 8 | regionClientId: config.regionClientId, // our oauth client id, generated in MyQlik, e.g. '12345678912345678' 9 | regionClientSecret: config.regionClientSecret // our oauth client secret, generated in MyQlik, e.g. 'eac0dswfec23ewfweweg2g2vsebw77e1cce' 10 | } 11 | 12 | // This example uses just https, you may want to use a package 13 | const https = require('https'); 14 | const fs = require('fs'); 15 | 16 | // Configure which spaces to deploy 17 | let deploySpaces = [{ 18 | spaceName: 'Finance staging', 19 | spaceDesc: 'Staging area for deployment of finance applications, for the "ADMIN" user group', 20 | spaceType: 'shared', 21 | members: [ 22 | { 23 | "type": "group", 24 | "name": "admin", 25 | "roles": [ 26 | "consumer", 27 | "dataconsumer", 28 | "facilitator", 29 | "producer" 30 | ] 31 | }] 32 | }, 33 | { 34 | spaceName: 'Finance', 35 | spaceDesc: 'Applications for the "FINANCE" user group', 36 | spaceType: 'managed', 37 | members: [ 38 | { 39 | "type": "group", 40 | "name": "admin", 41 | "roles": [ 42 | "consumer", 43 | "contributor", 44 | "dataconsumer", 45 | "facilitator", 46 | "publisher" 47 | ] 48 | }, { 49 | "type": "group", 50 | "name": "finance", 51 | "roles": [ 52 | "consumer" 53 | ] 54 | }] 55 | }, { 56 | spaceName: 'Marketing staging', 57 | spaceDesc: 'Staging area for deployment of marketing applications, for the "ADMIN" user group', 58 | spaceType: 'shared', 59 | members: [ 60 | { 61 | "type": "group", 62 | "name": "admin", 63 | "roles": [ 64 | "consumer", 65 | "dataconsumer", 66 | "facilitator", 67 | "producer" 68 | ] 69 | }] 70 | }, 71 | { 72 | spaceName: 'Marketing', 73 | spaceDesc: 'Applications for the "MARKETING" user group', 74 | spaceType: 'managed', 75 | members: [ 76 | { 77 | "type": "group", 78 | "name": "admin", 79 | "roles": [ 80 | "consumer", 81 | "contributor", 82 | "dataconsumer", 83 | "facilitator", 84 | "publisher" 85 | ] 86 | }, { 87 | "type": "group", 88 | "name": "marketing", 89 | "roles": [ 90 | "consumer" 91 | ] 92 | }] 93 | }]; 94 | 95 | // Configure which apps to deploy where 96 | let deployApps = [ 97 | { 98 | appName: 'Finance Dashboard', 99 | appDescription: 'The finance dashboard gives you a full view of transactions.', 100 | appBinary: './resources/Template_Finance.qvf', 101 | spaceStage: 'finance staging', 102 | spaceProd: 'finance' 103 | }, 104 | { 105 | appName: 'Marketing Dashboard', 106 | appDescription: 'The marketing dashboard gives you a full view of leads, trials and other key metrics.', 107 | appBinary: './resources/Template_Finance.qvf', 108 | spaceStage: 'marketing staging', 109 | spaceProd: 'marketing' 110 | }, 111 | ] 112 | 113 | 114 | // Prepare our oauth cred package for future auth requests 115 | const oauthClientData = JSON.stringify({ 116 | client_id: qcs.regionClientId, 117 | client_secret: qcs.regionClientSecret, 118 | grant_type: "client_credentials" 119 | }) 120 | 121 | // Prepare target tenant URL 122 | const targetTenantUrl = qcs.targetTenant + '.' + qcs.region; 123 | const targetTenantUrlFull = 'https://' + qcs.targetTenant + '.' + qcs.region; 124 | 125 | // Function to handle our requests 126 | function httpsRequest(params, postBody) { 127 | return new Promise(function (resolve, reject) { 128 | var req = https.request(params, function (res) { 129 | 130 | // There is no error handling on HTTP response codes here 131 | // In your application, you should handle these responses in either the request handler or your calls 132 | //console.log(res); 133 | 134 | var body = []; 135 | res.on('data', function (chunk) { 136 | body.push(chunk); 137 | }); 138 | res.on('end', function () { 139 | try { 140 | body = JSON.parse(Buffer.concat(body).toString()); 141 | } catch (e) { 142 | // Here to catch empty responses and play them back nicely 143 | body = 'Status ' + res.statusCode + 'r/n/' + body; 144 | } 145 | resolve(body); 146 | }); 147 | }); 148 | req.on('error', function (err) { 149 | reject(err); 150 | }); 151 | if (postBody) { 152 | req.write(postBody); 153 | } 154 | req.end(); 155 | }); 156 | } 157 | 158 | // Function to handle our requests 159 | async function httpsFetch(url, settings) { 160 | try { 161 | const fetchResponse = await fetch(url, settings); 162 | const data = await fetchResponse.json(); 163 | return data; 164 | 165 | } catch (e) { 166 | return e; 167 | } 168 | } 169 | 170 | async function deployToTenant() { 171 | 172 | // *************************** 173 | // 1 - Get access token for target tenant, for use will all future requests 174 | var data = await httpsRequest({ 175 | hostname: targetTenantUrl, 176 | port: 443, 177 | path: '/oauth/token', 178 | method: 'POST', 179 | headers: { 180 | 'Content-Type': 'application/json' 181 | } 182 | }, oauthClientData); 183 | // For the demo, log out the response 184 | console.log('Output from step 1: ', data); 185 | // Save access token for next request 186 | let targetTenantToken = data.access_token; 187 | 188 | // *************************** 189 | // 2 - Loop through spaces 190 | for (let spaceKey in deploySpaces) { 191 | var iterSpace = parseInt(spaceKey) + 1; 192 | console.log('Deploying space ' + iterSpace + '/' + deploySpaces.length + ' <' + deploySpaces[spaceKey].spaceName + '>'); 193 | 194 | // Build space body 195 | var spaceData = JSON.stringify( 196 | { 197 | name: deploySpaces[spaceKey].spaceName, 198 | description: deploySpaces[spaceKey].spaceDesc, 199 | type: deploySpaces[spaceKey].spaceType 200 | } 201 | ); 202 | 203 | // 2.iterSpace.1 - Send space creation requests 204 | var data = await httpsRequest({ 205 | hostname: targetTenantUrl, 206 | port: 443, 207 | path: '/api/v1/spaces', 208 | method: 'POST', 209 | headers: { 210 | 'Content-Type': 'application/json', 211 | 'Authorization': 'Bearer ' + targetTenantToken 212 | } 213 | }, spaceData); 214 | // For the demo, log out the response 215 | console.log('Output from step 2.' + iterSpace + '.1: ', data); 216 | 217 | // Quick check in case space already created (just so that we gracefully handle this one error) 218 | if (data.code == 'SpaceNameConflict') { 219 | var data = await httpsRequest({ 220 | hostname: targetTenantUrl, 221 | port: 443, 222 | path: '/api/v1/spaces?type=' + deploySpaces[spaceKey].spaceType + '&name=' + encodeURIComponent(deploySpaces[spaceKey].spaceName), 223 | method: 'GET', 224 | headers: { 225 | 'Content-Type': 'application/json', 226 | 'Authorization': 'Bearer ' + targetTenantToken 227 | } 228 | }); 229 | 230 | // Set returned var for use in assignment 231 | var spaceId = data.data[0].id; 232 | console.log('There was an issue with creating the space, retrieving id: ', data); 233 | } else { 234 | // Set returned var for use in assignment 235 | var spaceId = data.id; 236 | } 237 | 238 | for (let memberKey in deploySpaces[spaceKey].members) { 239 | var iterMember = parseInt(memberKey) + 1; 240 | // Only do the group requests to keep this simple 241 | if (deploySpaces[spaceKey].members[memberKey].type == 'group') { 242 | // First get the group id (we only have the name) 243 | // This is a bit dumb, you should improve this 244 | var data = await httpsRequest({ 245 | hostname: targetTenantUrl, 246 | port: 443, 247 | path: '/api/v1/groups?filter=' + encodeURIComponent('name eq "' + deploySpaces[spaceKey].members[memberKey].name + '"'), 248 | method: 'GET', 249 | headers: { 250 | 'Content-Type': 'application/json', 251 | 'Authorization': 'Bearer ' + targetTenantToken 252 | } 253 | }); 254 | 255 | // Print out our group ID for validation 256 | console.log('Output from step 2.' + iterSpace + '.2.' + iterMember + '.1: Group id for <' + deploySpaces[spaceKey].members[memberKey].name + '> is <' + data.data[0].id + '>, to be assigned to space id ' + spaceId); 257 | 258 | // Build assignment payload 259 | var assignmentData = JSON.stringify({ 260 | "type": deploySpaces[spaceKey].members[memberKey].type, 261 | "assigneeId": data.data[0].id, 262 | "roles": deploySpaces[spaceKey].members[memberKey].roles 263 | }); 264 | 265 | // Finally send the assignment request for the space 266 | var data = await httpsRequest({ 267 | hostname: targetTenantUrl, 268 | port: 443, 269 | path: '/api/v1/spaces/' + spaceId + '/assignments', 270 | method: 'POST', 271 | headers: { 272 | 'Content-Type': 'application/json', 273 | 'Authorization': 'Bearer ' + targetTenantToken 274 | } 275 | }, assignmentData); 276 | // For the demo, log out the response 277 | console.log('Output from step 2.' + iterSpace + '.2.' + iterMember + '.2: ', data); 278 | 279 | } else { 280 | console.log('Notice: Only groups assignments are implemented by this script.') 281 | } 282 | 283 | }; 284 | } 285 | // 3 - Now deploy apps 286 | for (let appKey in deployApps) { 287 | var iterApp = parseInt(appKey) + 1; 288 | console.log('Step 3.' + iterApp + ': Deploying app <' + deployApps[appKey].appName + '> into staging space <' + deployApps[appKey].spaceStage + '>, production space <' + deployApps[appKey].spaceProd + '>'); 289 | 290 | // 3.iterApp.1 Let's resolve the staging space id 291 | // This is a bit dumb, you should improve this 292 | var data = await httpsRequest({ 293 | hostname: targetTenantUrl, 294 | port: 443, 295 | path: '/api/v1/spaces?type=shared&name=' + encodeURIComponent(deployApps[appKey].spaceStage), 296 | method: 'GET', 297 | headers: { 298 | 'Content-Type': 'application/json', 299 | 'Authorization': 'Bearer ' + targetTenantToken 300 | } 301 | }); 302 | console.log('Step 3.' + iterApp + '.1: Getting spaceId for space <' + deployApps[appKey].spaceStage + '>: ', data); 303 | var spaceIdStage = data.data[0].id; 304 | 305 | // 3.iterApp.2 Let's resolve the prod space id 306 | // This is a bit dumb, you should improve this 307 | var data = await httpsRequest({ 308 | hostname: targetTenantUrl, 309 | port: 443, 310 | path: '/api/v1/spaces?type=managed&name=' + encodeURIComponent(deployApps[appKey].spaceProd), 311 | method: 'GET', 312 | headers: { 313 | 'Content-Type': 'application/json', 314 | 'Authorization': 'Bearer ' + targetTenantToken 315 | } 316 | }); 317 | console.log('Step 3.' + iterApp + '.2: Getting spaceId for space <' + deployApps[appKey].spaceProd + '>: ', data); 318 | var spaceIdProd = data.data[0].id; 319 | 320 | // Grab our binary file 321 | const appBinary = fs.createReadStream(deployApps[appKey].appBinary); 322 | 323 | // 3.iterApp.3 Let's get the app landed into the tenant first 324 | var data = await httpsFetch(targetTenantUrlFull + '/api/v1/apps/import?name=' + encodeURIComponent(deployApps[appKey].appName), { 325 | method: 'POST', 326 | headers: { 327 | 'Content-Type': 'application/octet-stream', 328 | 'Authorization': 'Bearer ' + targetTenantToken 329 | }, 330 | body: appBinary 331 | }); 332 | // For the demo, log out the response 333 | console.log('Output from step 3.' + iterApp + '.3: ', data); 334 | // Get the app ID as imported 335 | var appId = data.attributes.id; 336 | 337 | // 3.iterApp.4 Now move into the shared space 338 | var data = await httpsFetch(targetTenantUrlFull + '/api/v1/apps/' + appId + '/space', { 339 | method: 'PUT', 340 | headers: { 341 | 'Content-Type': 'application/json', 342 | 'Authorization': 'Bearer ' + targetTenantToken 343 | }, 344 | body: JSON.stringify({ 345 | spaceId: spaceIdStage 346 | }) 347 | }); 348 | // For the demo, log out the response 349 | console.log('Output from step 3.' + iterApp + '.4: ', data); 350 | 351 | // 3.iterApp.5 Now publish into the managed space 352 | var data = await httpsFetch(targetTenantUrlFull + '/api/v1/apps/' + appId + '/publish', { 353 | method: 'POST', 354 | headers: { 355 | 'Content-Type': 'application/json', 356 | 'Authorization': 'Bearer ' + targetTenantToken 357 | }, 358 | body: JSON.stringify({ 359 | spaceId: spaceIdProd, 360 | data: 'source' 361 | }) 362 | }); 363 | // For the demo, log out the response 364 | console.log('Output from step 3.' + iterApp + '.5: ', data); 365 | 366 | } 367 | 368 | }; 369 | 370 | // Let's deploy content 371 | deployToTenant(); -------------------------------------------------------------------------------- /examples/qcs_deployment/interactive_login.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 45 | 46 | 47 | 48 | 51 |
52 |
53 |
54 | 56 | 58 |
59 | 60 |
61 | 63 | 65 |
66 | 67 |
68 | 70 | 72 |
73 | 74 |
75 | 79 |
80 |
81 |
82 | -------------------------------------------------------------------------------- /examples/qcs_deployment/jwt_timing_tests.js: -------------------------------------------------------------------------------- 1 | // Bring in config file with values for the qcs const below, template is in the package 2 | const config = require('./qct_config.json'); 3 | 4 | // This example uses just https, you may want to use a package for requests 5 | const https = require('https'); 6 | 7 | // Required for jwt generation 8 | const fs = require('fs'); 9 | const uid = require('uid-safe'); 10 | const jwt = require('jsonwebtoken'); 11 | 12 | // Configure for your Qlik Cloud deployment 13 | const qcs = { 14 | targetTenant: '5ie54a4r13glxs5', // the tenant that you wish to configure, e.g. name for name.eu.qlikcloud.com 15 | region: config.region, // Qlik Cloud region URL e.g. name.eu.qlikcloud.com becomes eu.qlikcloud.com 16 | regionClientId: config.regionClientId, // our oauth client id, generated in MyQlik, e.g. '12345678912345678' 17 | regionClientSecret: config.regionClientSecret // our oauth client secret, generated in MyQlik, e.g. 'eac0dswfec23ewfweweg2g2vsebw77e1cce' 18 | } 19 | 20 | // To help with the JWT decoding step 21 | function parseJwt (token) { 22 | return JSON.parse(Buffer.from(token.split('.')[1], 'base64').toString()); 23 | } 24 | 25 | // Config for IdP 26 | const idpSettings = { 27 | description: "Auth for my app or portal", 28 | } 29 | 30 | // JWT: Configure JWT IdP payload with the groups we want to seed 31 | const jwtPayload = { 32 | jti: uid.sync(32), // 32 bytes random string 33 | sub: 'mydomain\\yournamehere', 34 | subType: 'user', 35 | name: 'Your Name Here', 36 | email: 'yournamehere@example.com', 37 | email_verified: true, 38 | groups: ['Admin', 'Finance', 'Marketing', 'Sales'], 39 | }; 40 | 41 | // JWT: Identify keys 42 | const jwtPrivateKey = fs.readFileSync('./certs/nameap_privatekey.pem'); 43 | const jwtPublicKey = fs.readFileSync('./certs/nameap_publickey.cer'); 44 | 45 | // JWT: Provide signing options 46 | // kid and issuer have to match with the IDP config and the 47 | // audience has to be qlik.api/jwt-login-session 48 | const jwtSigningOptions = { 49 | keyid: 'myapporportal', 50 | algorithm: 'RS256', 51 | issuer: 'euqlikcloud', 52 | expiresIn: '5m', 53 | notBefore: '47s', 54 | audience: 'qlik.api/login/jwt-session' 55 | }; 56 | 57 | // Prepare our oauth cred package for future auth requests 58 | const oauthClientData = JSON.stringify({ 59 | client_id: qcs.regionClientId, 60 | client_secret: qcs.regionClientSecret, 61 | grant_type: "client_credentials" 62 | }) 63 | 64 | // Prepare target tenant URL 65 | const targetTenantUrl = qcs.targetTenant + '.' + qcs.region; 66 | 67 | // Function to handle our requests 68 | function httpsRequest(params, postBody) { 69 | return new Promise(function (resolve, reject) { 70 | var req = https.request(params, function (res) { 71 | 72 | var body = []; 73 | res.on('data', function (chunk) { 74 | body.push(chunk); 75 | }); 76 | res.on('end', function () { 77 | try { 78 | body = JSON.parse(Buffer.concat(body).toString()); 79 | } catch (e) { 80 | // Here to catch empty responses and play them back nicely 81 | body = 'Status ' + res.statusCode + 'r/n/' + body; 82 | } 83 | resolve(body); 84 | }); 85 | }); 86 | req.on('error', function (err) { 87 | reject(err); 88 | }); 89 | if (postBody) { 90 | req.write(postBody); 91 | } 92 | req.end(); 93 | }); 94 | } 95 | 96 | async function testJwtTiming() { 97 | 98 | // *************************** 99 | // 1 - Get access token for target tenant, for use will all future requests 100 | var data = await httpsRequest({ 101 | hostname: targetTenantUrl, 102 | port: 443, 103 | path: '/oauth/token', 104 | method: 'POST', 105 | headers: { 106 | 'Content-Type': 'application/json' 107 | } 108 | }, oauthClientData); 109 | // For the demo, log out the response 110 | console.log(Math.floor(Date.now()/1000), 'Output from step 1: ' + JSON.stringify(data)); 111 | // Save access token for next request 112 | let targetTenantToken = data.access_token; 113 | 114 | // *************************** 115 | // 2 - Get tenant id 116 | var data = await httpsRequest({ 117 | hostname: targetTenantUrl, 118 | port: 443, 119 | path: '/api/v1/tenants', 120 | method: 'GET', 121 | headers: { 122 | 'Content-Type': 'application/json', 123 | 'Authorization': 'Bearer ' + targetTenantToken 124 | } 125 | }); 126 | // For the demo, log out the response 127 | console.log(Math.floor(Date.now()/1000), 'Output from step 2: ' + JSON.stringify(data)); 128 | 129 | // Keep a copy of the tenant id, we'll need it shortly 130 | const tenantId = data.data[0].id; 131 | 132 | const testClockToleranceSec = 47; 133 | 134 | // *************************** 135 | // 3 - Configure JWT IdP on the tenant 136 | // Prepare IdP configuration for the JWT IdP post with the tenant ID and config 137 | const idpConfiguration = JSON.stringify({ 138 | tenantIds: [ 139 | tenantId 140 | ], 141 | provider: "external", 142 | protocol: "jwtAuth", 143 | interactive: false, 144 | active: true, 145 | description: idpSettings.description, 146 | clockToleranceSec: testClockToleranceSec, 147 | options: { 148 | jwtLoginEnabled: true, 149 | issuer: jwtSigningOptions.issuer, 150 | staticKeys: [ 151 | { 152 | kid: jwtSigningOptions.keyid, 153 | pem: jwtPublicKey.toString() 154 | } 155 | ] 156 | } 157 | }); 158 | 159 | // For the demo, log out the IdP config we're going to send 160 | console.log(Math.floor(Date.now()/1000), 'Output from step 3a: ' + idpConfiguration); 161 | 162 | // Send request to create JWT IdP 163 | var data = await httpsRequest({ 164 | hostname: targetTenantUrl, 165 | port: 443, 166 | path: '/api/v1/identity-providers', 167 | method: 'POST', 168 | headers: { 169 | 'Content-Type': 'application/json', 170 | 'Authorization': 'Bearer ' + targetTenantToken 171 | } 172 | }, idpConfiguration); 173 | 174 | // Get the JWT IdP id 175 | const idpId = data.id; 176 | 177 | // Get the created ts 178 | const idpCreated = Math.floor(new Date(data.created) / 1000); 179 | 180 | // For the demo, log out the response 181 | console.log(Math.floor(Date.now()/1000), 'Output from step 3b, IdP created at:', idpCreated, 'with config:', JSON.stringify(data)); 182 | 183 | // *************************** 184 | // 4 - Send our JWT request and seed groups 185 | // Built our JWT with the groups we want to seed 186 | const seedToken = jwt.sign(jwtPayload, jwtPrivateKey, jwtSigningOptions); 187 | 188 | const testTimeJwtNbf = parseJwt(seedToken).nbf; 189 | 190 | // For the demo, log out the token we're going to use to seed groups 191 | console.log(Math.floor(Date.now()/1000), 'Output from step 4a:', seedToken, parseJwt(seedToken)); 192 | 193 | // Send the request 194 | var data = await httpsRequest({ 195 | hostname: targetTenantUrl, 196 | port: 443, 197 | path: '/login/jwt-session', 198 | method: 'POST', 199 | headers: { 200 | 'Content-Type': 'application/json', 201 | 'Authorization': 'Bearer ' + seedToken 202 | } 203 | }); 204 | 205 | // For the demo, log out the response 206 | console.log(Math.floor(Date.now()/1000), 'Output from step 4b: ' + JSON.stringify(data)); 207 | 208 | // 5 - Delete the IdP 209 | // Send the request 210 | var data = await httpsRequest({ 211 | hostname: targetTenantUrl, 212 | port: 443, 213 | path: '/api/v1/identity-providers/' + idpId, 214 | method: 'DELETE', 215 | headers: { 216 | 'Content-Type': 'application/json', 217 | 'Authorization': 'Bearer ' + targetTenantToken 218 | } 219 | }); 220 | 221 | // For the demo, log out the response 222 | console.log(Math.floor(Date.now()/1000), 'Output from step 5: ' + JSON.stringify(data)); 223 | 224 | // For the test 225 | console.log(Math.floor(Date.now()/1000),'Test resulted in nbf of',testTimeJwtNbf,'and server time of',idpCreated,'which was a delta of',testTimeJwtNbf-idpCreated,'seconds.'); 226 | 227 | if (testTimeJwtNbf > idpCreated + testClockToleranceSec) { 228 | console.log(Math.floor(Date.now()/1000),'Error: System time +',testClockToleranceSec,'was lesser than nbf'); 229 | } 230 | } 231 | 232 | // Go test 233 | testJwtTiming(); -------------------------------------------------------------------------------- /examples/qcs_deployment/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "qcs_deployment", 3 | "version": "1.0.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "qcs_deployment", 9 | "version": "1.0.0", 10 | "license": "ISC", 11 | "dependencies": { 12 | "fetch": "^1.1.0", 13 | "jsonwebtoken": "^9.0.0", 14 | "uid-safe": "^2.1.5" 15 | } 16 | }, 17 | "node_modules/biskviit": { 18 | "version": "1.0.1", 19 | "resolved": "https://registry.npmjs.org/biskviit/-/biskviit-1.0.1.tgz", 20 | "integrity": "sha512-VGCXdHbdbpEkFgtjkeoBN8vRlbj1ZRX2/mxhE8asCCRalUx2nBzOomLJv8Aw/nRt5+ccDb+tPKidg4XxcfGW4w==", 21 | "dependencies": { 22 | "psl": "^1.1.7" 23 | }, 24 | "engines": { 25 | "node": ">=1.0.0" 26 | } 27 | }, 28 | "node_modules/buffer-equal-constant-time": { 29 | "version": "1.0.1", 30 | "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", 31 | "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" 32 | }, 33 | "node_modules/ecdsa-sig-formatter": { 34 | "version": "1.0.11", 35 | "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", 36 | "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", 37 | "dependencies": { 38 | "safe-buffer": "^5.0.1" 39 | } 40 | }, 41 | "node_modules/encoding": { 42 | "version": "0.1.12", 43 | "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.12.tgz", 44 | "integrity": "sha512-bl1LAgiQc4ZWr++pNYUdRe/alecaHFeHxIJ/pNciqGdKXghaTCOwKkbKp6ye7pKZGu/GcaSXFk8PBVhgs+dJdA==", 45 | "dependencies": { 46 | "iconv-lite": "~0.4.13" 47 | } 48 | }, 49 | "node_modules/fetch": { 50 | "version": "1.1.0", 51 | "resolved": "https://registry.npmjs.org/fetch/-/fetch-1.1.0.tgz", 52 | "integrity": "sha512-5O8TwrGzoNblBG/jtK4NFuZwNCkZX6s5GfRNOaGtm+QGJEuNakSC/i2RW0R93KX6E0jVjNXm6O3CRN4Ql3K+yA==", 53 | "dependencies": { 54 | "biskviit": "1.0.1", 55 | "encoding": "0.1.12" 56 | } 57 | }, 58 | "node_modules/iconv-lite": { 59 | "version": "0.4.24", 60 | "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", 61 | "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", 62 | "dependencies": { 63 | "safer-buffer": ">= 2.1.2 < 3" 64 | }, 65 | "engines": { 66 | "node": ">=0.10.0" 67 | } 68 | }, 69 | "node_modules/jsonwebtoken": { 70 | "version": "9.0.0", 71 | "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.0.tgz", 72 | "integrity": "sha512-tuGfYXxkQGDPnLJ7SibiQgVgeDgfbPq2k2ICcbgqW8WxWLBAxKQM/ZCu/IT8SOSwmaYl4dpTFCW5xZv7YbbWUw==", 73 | "dependencies": { 74 | "jws": "^3.2.2", 75 | "lodash": "^4.17.21", 76 | "ms": "^2.1.1", 77 | "semver": "^7.3.8" 78 | }, 79 | "engines": { 80 | "node": ">=12", 81 | "npm": ">=6" 82 | } 83 | }, 84 | "node_modules/jwa": { 85 | "version": "1.4.1", 86 | "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", 87 | "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", 88 | "dependencies": { 89 | "buffer-equal-constant-time": "1.0.1", 90 | "ecdsa-sig-formatter": "1.0.11", 91 | "safe-buffer": "^5.0.1" 92 | } 93 | }, 94 | "node_modules/jws": { 95 | "version": "3.2.2", 96 | "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", 97 | "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", 98 | "dependencies": { 99 | "jwa": "^1.4.1", 100 | "safe-buffer": "^5.0.1" 101 | } 102 | }, 103 | "node_modules/lodash": { 104 | "version": "4.17.21", 105 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", 106 | "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" 107 | }, 108 | "node_modules/lru-cache": { 109 | "version": "6.0.0", 110 | "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", 111 | "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", 112 | "dependencies": { 113 | "yallist": "^4.0.0" 114 | }, 115 | "engines": { 116 | "node": ">=10" 117 | } 118 | }, 119 | "node_modules/ms": { 120 | "version": "2.1.3", 121 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", 122 | "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" 123 | }, 124 | "node_modules/psl": { 125 | "version": "1.8.0", 126 | "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", 127 | "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" 128 | }, 129 | "node_modules/random-bytes": { 130 | "version": "1.0.0", 131 | "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", 132 | "integrity": "sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ==", 133 | "engines": { 134 | "node": ">= 0.8" 135 | } 136 | }, 137 | "node_modules/safe-buffer": { 138 | "version": "5.2.1", 139 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", 140 | "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", 141 | "funding": [ 142 | { 143 | "type": "github", 144 | "url": "https://github.com/sponsors/feross" 145 | }, 146 | { 147 | "type": "patreon", 148 | "url": "https://www.patreon.com/feross" 149 | }, 150 | { 151 | "type": "consulting", 152 | "url": "https://feross.org/support" 153 | } 154 | ] 155 | }, 156 | "node_modules/safer-buffer": { 157 | "version": "2.1.2", 158 | "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", 159 | "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" 160 | }, 161 | "node_modules/semver": { 162 | "version": "7.5.4", 163 | "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", 164 | "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", 165 | "dependencies": { 166 | "lru-cache": "^6.0.0" 167 | }, 168 | "bin": { 169 | "semver": "bin/semver.js" 170 | }, 171 | "engines": { 172 | "node": ">=10" 173 | } 174 | }, 175 | "node_modules/uid-safe": { 176 | "version": "2.1.5", 177 | "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz", 178 | "integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==", 179 | "dependencies": { 180 | "random-bytes": "~1.0.0" 181 | }, 182 | "engines": { 183 | "node": ">= 0.8" 184 | } 185 | }, 186 | "node_modules/yallist": { 187 | "version": "4.0.0", 188 | "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", 189 | "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" 190 | } 191 | }, 192 | "dependencies": { 193 | "biskviit": { 194 | "version": "1.0.1", 195 | "resolved": "https://registry.npmjs.org/biskviit/-/biskviit-1.0.1.tgz", 196 | "integrity": "sha512-VGCXdHbdbpEkFgtjkeoBN8vRlbj1ZRX2/mxhE8asCCRalUx2nBzOomLJv8Aw/nRt5+ccDb+tPKidg4XxcfGW4w==", 197 | "requires": { 198 | "psl": "^1.1.7" 199 | } 200 | }, 201 | "buffer-equal-constant-time": { 202 | "version": "1.0.1", 203 | "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", 204 | "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" 205 | }, 206 | "ecdsa-sig-formatter": { 207 | "version": "1.0.11", 208 | "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", 209 | "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", 210 | "requires": { 211 | "safe-buffer": "^5.0.1" 212 | } 213 | }, 214 | "encoding": { 215 | "version": "0.1.12", 216 | "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.12.tgz", 217 | "integrity": "sha512-bl1LAgiQc4ZWr++pNYUdRe/alecaHFeHxIJ/pNciqGdKXghaTCOwKkbKp6ye7pKZGu/GcaSXFk8PBVhgs+dJdA==", 218 | "requires": { 219 | "iconv-lite": "~0.4.13" 220 | } 221 | }, 222 | "fetch": { 223 | "version": "1.1.0", 224 | "resolved": "https://registry.npmjs.org/fetch/-/fetch-1.1.0.tgz", 225 | "integrity": "sha512-5O8TwrGzoNblBG/jtK4NFuZwNCkZX6s5GfRNOaGtm+QGJEuNakSC/i2RW0R93KX6E0jVjNXm6O3CRN4Ql3K+yA==", 226 | "requires": { 227 | "biskviit": "1.0.1", 228 | "encoding": "0.1.12" 229 | } 230 | }, 231 | "iconv-lite": { 232 | "version": "0.4.24", 233 | "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", 234 | "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", 235 | "requires": { 236 | "safer-buffer": ">= 2.1.2 < 3" 237 | } 238 | }, 239 | "jsonwebtoken": { 240 | "version": "9.0.0", 241 | "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.0.tgz", 242 | "integrity": "sha512-tuGfYXxkQGDPnLJ7SibiQgVgeDgfbPq2k2ICcbgqW8WxWLBAxKQM/ZCu/IT8SOSwmaYl4dpTFCW5xZv7YbbWUw==", 243 | "requires": { 244 | "jws": "^3.2.2", 245 | "lodash": "^4.17.21", 246 | "ms": "^2.1.1", 247 | "semver": "^7.3.8" 248 | } 249 | }, 250 | "jwa": { 251 | "version": "1.4.1", 252 | "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", 253 | "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", 254 | "requires": { 255 | "buffer-equal-constant-time": "1.0.1", 256 | "ecdsa-sig-formatter": "1.0.11", 257 | "safe-buffer": "^5.0.1" 258 | } 259 | }, 260 | "jws": { 261 | "version": "3.2.2", 262 | "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", 263 | "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", 264 | "requires": { 265 | "jwa": "^1.4.1", 266 | "safe-buffer": "^5.0.1" 267 | } 268 | }, 269 | "lodash": { 270 | "version": "4.17.21", 271 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", 272 | "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" 273 | }, 274 | "lru-cache": { 275 | "version": "6.0.0", 276 | "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", 277 | "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", 278 | "requires": { 279 | "yallist": "^4.0.0" 280 | } 281 | }, 282 | "ms": { 283 | "version": "2.1.3", 284 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", 285 | "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" 286 | }, 287 | "psl": { 288 | "version": "1.8.0", 289 | "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", 290 | "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" 291 | }, 292 | "random-bytes": { 293 | "version": "1.0.0", 294 | "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", 295 | "integrity": "sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ==" 296 | }, 297 | "safe-buffer": { 298 | "version": "5.2.1", 299 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", 300 | "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" 301 | }, 302 | "safer-buffer": { 303 | "version": "2.1.2", 304 | "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", 305 | "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" 306 | }, 307 | "semver": { 308 | "version": "7.5.4", 309 | "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", 310 | "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", 311 | "requires": { 312 | "lru-cache": "^6.0.0" 313 | } 314 | }, 315 | "uid-safe": { 316 | "version": "2.1.5", 317 | "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz", 318 | "integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==", 319 | "requires": { 320 | "random-bytes": "~1.0.0" 321 | } 322 | }, 323 | "yallist": { 324 | "version": "4.0.0", 325 | "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", 326 | "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" 327 | } 328 | } 329 | } 330 | -------------------------------------------------------------------------------- /examples/qcs_deployment/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "qcs_deployment", 3 | "version": "1.0.0", 4 | "description": "Create a tenant, configure the tenant, and deploy apps and content", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "fetch": "^1.1.0", 13 | "jsonwebtoken": "^9.0.0", 14 | "uid-safe": "^2.1.5" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /examples/qcs_deployment/qct_config_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "licenseKey": "eyJhbGciOiJ...", 3 | "sourceTenant": "name", 4 | "region": "eu.qlikcloud.com", 5 | "regionClientId": "1ef2caed0dbe0a6...", 6 | "regionClientSecret": "eac0d78212fd61e3e..." 7 | } -------------------------------------------------------------------------------- /examples/qcs_deployment/resources/Template_Finance.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/examples/qcs_deployment/resources/Template_Finance.qvf -------------------------------------------------------------------------------- /examples/qs_encryption_qvd_performance/00_Main.qvs: -------------------------------------------------------------------------------- 1 | SET ThousandSep=','; 2 | SET DecimalSep='.'; 3 | SET MoneyThousandSep=','; 4 | SET MoneyDecimalSep='.'; 5 | SET MoneyFormat='$#,##0.00;-$#,##0.00'; 6 | SET TimeFormat='hh:mm:ss TT'; 7 | SET DateFormat='DD/MM/YYYY'; 8 | SET TimestampFormat='DD/MM/YYYY hh:mm:ss[.fff]'; 9 | SET FirstWeekDay=6; 10 | SET BrokenWeeks=1; 11 | SET ReferenceDay=0; 12 | SET FirstMonthOfYear=1; 13 | SET CollationLocale='en-US'; 14 | SET CreateSearchIndexOnReload=1; 15 | SET MonthNames='Jan;Feb;Mar;Apr;May;Jun;Jul;Aug;Sep;Oct;Nov;Dec'; 16 | SET LongMonthNames='January;February;March;April;May;June;July;August;September;October;November;December'; 17 | SET DayNames='Mon;Tue;Wed;Thu;Fri;Sat;Sun'; 18 | SET LongDayNames='Monday;Tuesday;Wednesday;Thursday;Friday;Saturday;Sunday'; 19 | SET NumericalAbbreviation='3:k;6:M;9:G;12:T;15:P;18:E;21:Z;24:Y;-3:m;-6:μ;-9:n;-12:p;-15:f;-18:a;-21:z;-24:y'; 20 | -------------------------------------------------------------------------------- /examples/qs_encryption_qvd_performance/01_Config.qvs: -------------------------------------------------------------------------------- 1 | // Configure shared variables 2 | 3 | SET vConn_QVD = 'lib://file_stackoverflow'; 4 | SET vConn_QVD_Data = '$(vConn_QVD)/datascience.stackexchange.com'; 5 | SET vConn_QVD_Math = '$(vConn_QVD)/math.stackexchange.com'; 6 | SET vConn_QVD_Server = '$(vConn_QVD)/serverfault.com'; 7 | 8 | // Set number of iterations to test for 9 | SET vTest_Iterations = 10; 10 | 11 | // Set context for test 12 | SET vTest_Mode = 'Encrypted QVD + QVF'; 13 | -------------------------------------------------------------------------------- /examples/qs_encryption_qvd_performance/02_sTimerEnd().qvs: -------------------------------------------------------------------------------- 1 | // Create a function to collect the log timestamp and append it to the timer log table 2 | 3 | SUB sTimerEnd(vSub_TestName) 4 | 5 | // Set the time now 6 | LET vSub_Now = now(); 7 | 8 | TRACE >> Event [$(vSub_TestName)] recorded with start [$(vLog_Time)] and end [$(vSub_Now)].; 9 | 10 | // Concatenate onto the logging table 11 | Concatenate(Logging) 12 | LOAD 13 | rowno() AS [Log ID], 14 | '$(vSub_Now)' AS [Log Timestamp], 15 | '$(vSub_TestName)' AS [Log Event], 16 | '$(vLog_Time)' AS [Log Event Start], 17 | '$(vSub_Now)' AS [Log Event End], 18 | ROUND(('$(vSub_Now)'-'$(vLog_Time)')*24*60*60,0.001) AS [Log Event Duration (s)], 19 | '$(vTest_Mode)' AS [Log Mode] 20 | AutoGenerate 1; 21 | 22 | // Now reset the log time variable 23 | LET vLog_Time = now(); 24 | 25 | END SUB; -------------------------------------------------------------------------------- /examples/qs_encryption_qvd_performance/03_TimerLog & Start.qvs: -------------------------------------------------------------------------------- 1 | // Create an empty table to append log records 2 | // Log table will eventually hold: 3 | // - Log Timestamp 4 | // - Log Event 5 | // - Log Event Start 6 | // - Log Event End 7 | // - Log Event Duration 8 | // - Log ID (autogenerated) 9 | Logging: 10 | LOAD 11 | null() AS [Log ID] 12 | AutoGenerate 0; 13 | 14 | // Initiate the timer 15 | LET vLog_Time = now(); -------------------------------------------------------------------------------- /examples/qs_encryption_qvd_performance/04_Badges.qvs: -------------------------------------------------------------------------------- 1 | // Pick which data set 2 | LET vConn_Test_Data = '$(vConn_QVD_Server)'; 3 | LET vConn_Test_File = 'Badges'; 4 | 5 | // Create table 6 | LET vConn_Test_Table = lower('$(vConn_Test_File)'); 7 | 8 | // Loop over test 9 | FOR iter_Test = 0 to vTest_Iterations-1 10 | 11 | /////////// Raw load 12 | [$(vConn_Test_File)]: 13 | LOAD 14 | Id, 15 | "UserId", 16 | Name, 17 | "Date", 18 | "Class", 19 | TagBased 20 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).xml] 21 | (XmlSimple, table is [$(vConn_Test_Table)/row]); 22 | 23 | Call sTimerEnd('$(vConn_Test_File) raw load [$(vConn_Test_Data)])'); 24 | 25 | /////////// Badge raw store & drop 26 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File).qvd] (qvd); 27 | DROP TABLE [$(vConn_Test_File)]; 28 | 29 | Call sTimerEnd('$(vConn_Test_File) raw store & drop [$(vConn_Test_Data)])'); 30 | 31 | 32 | /////////// QVD optimised load 33 | [$(vConn_Test_File)]: 34 | LOAD 35 | * 36 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] (qvd); 37 | 38 | Call sTimerEnd('$(vConn_Test_File) QVD optimised load [$(vConn_Test_Data)])'); 39 | 40 | /////////// Badge second store & drop 41 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_copy.qvd] (qvd); 42 | DROP TABLE [$(vConn_Test_File)]; 43 | 44 | Call sTimerEnd('$(vConn_Test_File) QVD store and drop [$(vConn_Test_Data)])'); 45 | 46 | /////////// Unoptimised load and store 47 | [$(vConn_Test_File)]: 48 | LOAD 49 | Id AS [Badge ID], 50 | "UserId" AS [Badge User ID], 51 | Name AS [Badge Name], 52 | Name & ' (' & [Class] & ')' AS [Badge Name & Class], 53 | Timestamp("Date") AS [Badge Timestamp], 54 | Date(Floor("Date")) AS [Badge Date], 55 | "Class" AS [Badge Class], 56 | IF(TagBased,'Yes','No') AS [Badge Tag Based] 57 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] 58 | (qvd); 59 | 60 | Call sTimerEnd('$(vConn_Test_File) QVD unoptimised load & transform 1 (load) [$(vConn_Test_Data)])'); 61 | 62 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_ult1.qvd] (qvd); 63 | DROP TABLE [$(vConn_Test_File)]; 64 | 65 | Call sTimerEnd('$(vConn_Test_File) QVD unoptimised load & transform 1 (store) [$(vConn_Test_Data)])'); 66 | 67 | /////////// Unoptimised load and store #2 68 | [$(vConn_Test_File)]: 69 | LOAD 70 | *, 71 | Hash128(Id) AS IdHash 72 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] 73 | (qvd) 74 | WHERE WildMatch(Name,'*a*') OR WildMatch(Id,'*1*'); 75 | 76 | Call sTimerEnd('$(vConn_Test_File) QVD unoptimised load & transform 2 (load) [$(vConn_Test_Data)])'); 77 | 78 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_ult2.qvd] (qvd); 79 | DROP TABLE [$(vConn_Test_File)]; 80 | 81 | Call sTimerEnd('$(vConn_Test_File) QVD unoptimised load & transform 2 (store) [$(vConn_Test_Data)])'); 82 | 83 | /////////// Optimised load and resident then store 84 | [$(vConn_Test_File)_Temp]: 85 | LOAD 86 | * 87 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] 88 | (qvd); 89 | 90 | [$(vConn_Test_File)]: 91 | NoConcatenate 92 | LOAD 93 | Id AS [Badge ID], 94 | "UserId" AS [Badge User ID], 95 | Name AS [Badge Name], 96 | Name & ' (' & [Class] & ')' AS [Badge Name & Class], 97 | Timestamp("Date") AS [Badge Timestamp], 98 | Date(Floor("Date")) AS [Badge Date], 99 | "Class" AS [Badge Class], 100 | IF(TagBased,'Yes','No') AS [Badge Tag Based] 101 | RESIDENT [$(vConn_Test_File)_Temp]; 102 | 103 | DROP TABLE [$(vConn_Test_File)_Temp]; 104 | 105 | Call sTimerEnd('$(vConn_Test_File) QVD optimised load & transform 1 (load) [$(vConn_Test_Data)])'); 106 | 107 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_olt1.qvd] (qvd); 108 | DROP TABLE [$(vConn_Test_File)]; 109 | 110 | Call sTimerEnd('$(vConn_Test_File) QVD optimised load & transform 1 (store) [$(vConn_Test_Data)])'); 111 | 112 | Next iter_Test; -------------------------------------------------------------------------------- /examples/qs_encryption_qvd_performance/05_Post History.qvs: -------------------------------------------------------------------------------- 1 | // Pick which data set 2 | LET vConn_Test_Data = '$(vConn_QVD_Server)'; 3 | LET vConn_Test_File = 'PostHistory'; 4 | 5 | // Create table 6 | LET vConn_Test_Table = lower('$(vConn_Test_File)'); 7 | 8 | // Loop over test 9 | FOR iter_Test = 0 to vTest_Iterations-1 10 | 11 | /////////// Raw load 12 | [$(vConn_Test_File)]: 13 | LOAD 14 | Id, 15 | PostHistoryTypeId, 16 | PostId, 17 | RevisionGUID, 18 | CreationDate, 19 | "UserId", 20 | "Text", 21 | "Comment", 22 | UserDisplayName 23 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).xml] 24 | (XmlSimple, table is [$(vConn_Test_Table)/row]); 25 | 26 | Call sTimerEnd('$(vConn_Test_File) raw load [$(vConn_Test_Data)])'); 27 | 28 | /////////// Badge raw store & drop 29 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File).qvd] (qvd); 30 | DROP TABLE [$(vConn_Test_File)]; 31 | 32 | Call sTimerEnd('$(vConn_Test_File) raw store & drop [$(vConn_Test_Data)])'); 33 | 34 | 35 | /////////// QVD optimised load 36 | [$(vConn_Test_File)]: 37 | LOAD 38 | * 39 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] (qvd); 40 | 41 | Call sTimerEnd('$(vConn_Test_File) QVD optimised load [$(vConn_Test_Data)])'); 42 | 43 | /////////// Badge second store & drop 44 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_copy.qvd] (qvd); 45 | DROP TABLE [$(vConn_Test_File)]; 46 | 47 | Call sTimerEnd('$(vConn_Test_File) QVD store and drop [$(vConn_Test_Data)])'); 48 | 49 | /////////// Unoptimised load and store 50 | [$(vConn_Test_File)]: 51 | LOAD 52 | Id AS [Post History ID], 53 | PostHistoryTypeId AS [Post History Type ID], 54 | PostId AS [Post ID], 55 | AutoNumber(RevisionGUID) AS [Post Revision ID], 56 | RevisionGUID AS [Post Revision GUID], 57 | Timestamp(CreationDate) AS [Post Created Timestamp], 58 | Date(Floor(CreationDate)) AS [Post Created Date], 59 | "UserId" AS [Post User ID], 60 | "Text" AS [Post Text], 61 | Hash256([Text]) AS [Post Text Hash], 62 | IF(WildMatch([Text],'*app*','*application*'),'Yes','No') AS [Post Mentions App], 63 | "Comment" AS [Post Comment], 64 | UserDisplayName AS [Post User Name] 65 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] 66 | (qvd); 67 | 68 | Call sTimerEnd('$(vConn_Test_File) QVD unoptimised load & transform 1 (load) [$(vConn_Test_Data)])'); 69 | 70 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_ult1.qvd] (qvd); 71 | DROP TABLE [$(vConn_Test_File)]; 72 | 73 | Call sTimerEnd('$(vConn_Test_File) QVD unoptimised load & transform 1 (store) [$(vConn_Test_Data)])'); 74 | 75 | /////////// Unoptimised load and store #2 76 | [$(vConn_Test_File)]: 77 | LOAD 78 | *, 79 | Hash128(Id) AS IdHash 80 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] 81 | (qvd) 82 | WHERE WildMatch([Text],'*app*') OR WildMatch(Id,'*1*'); 83 | 84 | Call sTimerEnd('$(vConn_Test_File) QVD unoptimised load & transform 2 (load) [$(vConn_Test_Data)])'); 85 | 86 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_ult2.qvd] (qvd); 87 | DROP TABLE [$(vConn_Test_File)]; 88 | 89 | Call sTimerEnd('$(vConn_Test_File) QVD unoptimised load & transform 2 (store) [$(vConn_Test_Data)])'); 90 | 91 | /////////// Optimised load and resident then store 92 | [$(vConn_Test_File)_Temp]: 93 | LOAD 94 | * 95 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] 96 | (qvd); 97 | 98 | [$(vConn_Test_File)]: 99 | NoConcatenate 100 | LOAD 101 | Id AS [Post History ID], 102 | PostHistoryTypeId AS [Post History Type ID], 103 | PostId AS [Post ID], 104 | AutoNumber(RevisionGUID) AS [Post Revision ID], 105 | RevisionGUID AS [Post Revision GUID], 106 | Timestamp(CreationDate) AS [Post Created Timestamp], 107 | Date(Floor(CreationDate)) AS [Post Created Date], 108 | "UserId" AS [Post User ID], 109 | "Text" AS [Post Text], 110 | Hash256([Text]) AS [Post Text Hash], 111 | IF(WildMatch([Text],'*app*','*application*'),'Yes','No') AS [Post Mentions App], 112 | "Comment" AS [Post Comment], 113 | UserDisplayName AS [Post User Name] 114 | RESIDENT [$(vConn_Test_File)_Temp]; 115 | 116 | DROP TABLE [$(vConn_Test_File)_Temp]; 117 | 118 | Call sTimerEnd('$(vConn_Test_File) QVD optimised load & transform 1 (load) [$(vConn_Test_Data)])'); 119 | 120 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_olt1.qvd] (qvd); 121 | DROP TABLE [$(vConn_Test_File)]; 122 | 123 | Call sTimerEnd('$(vConn_Test_File) QVD optimised load & transform 1 (store) [$(vConn_Test_Data)])'); 124 | 125 | Next iter_Test; -------------------------------------------------------------------------------- /examples/qs_encryption_qvd_performance/06_Posts.qvs: -------------------------------------------------------------------------------- 1 | // Pick which data set 2 | LET vConn_Test_Data = '$(vConn_QVD_Server)'; 3 | LET vConn_Test_File = 'Posts'; 4 | 5 | // Create table 6 | LET vConn_Test_Table = lower('$(vConn_Test_File)'); 7 | 8 | // Loop over test 9 | FOR iter_Test = 0 to vTest_Iterations-1 10 | 11 | /////////// Raw load 12 | [$(vConn_Test_File)]: 13 | LOAD 14 | Id, 15 | PostTypeId, 16 | AcceptedAnswerId, 17 | CreationDate, 18 | Score, 19 | ViewCount, 20 | Body, 21 | OwnerUserId, 22 | LastEditorUserId, 23 | LastEditDate, 24 | LastActivityDate, 25 | Title, 26 | "Tags", 27 | AnswerCount, 28 | CommentCount, 29 | FavoriteCount, 30 | ParentId, 31 | ClosedDate, 32 | CommunityOwnedDate, 33 | OwnerDisplayName, 34 | LastEditorDisplayName 35 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).xml] 36 | (XmlSimple, table is [$(vConn_Test_Table)/row]); 37 | 38 | Call sTimerEnd('$(vConn_Test_File) raw load [$(vConn_Test_Data)])'); 39 | 40 | /////////// Badge raw store & drop 41 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File).qvd] (qvd); 42 | DROP TABLE [$(vConn_Test_File)]; 43 | 44 | Call sTimerEnd('$(vConn_Test_File) raw store & drop [$(vConn_Test_Data)])'); 45 | 46 | 47 | /////////// QVD optimised load 48 | [$(vConn_Test_File)]: 49 | LOAD 50 | * 51 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] (qvd); 52 | 53 | Call sTimerEnd('$(vConn_Test_File) QVD optimised load [$(vConn_Test_Data)])'); 54 | 55 | /////////// Badge second store & drop 56 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_copy.qvd] (qvd); 57 | DROP TABLE [$(vConn_Test_File)]; 58 | 59 | Call sTimerEnd('$(vConn_Test_File) QVD store and drop [$(vConn_Test_Data)])'); 60 | 61 | /////////// Unoptimised load and store 62 | [$(vConn_Test_File)]: 63 | LOAD 64 | Id AS [Post ID], 65 | PostTypeId AS [Post Type ID], 66 | AcceptedAnswerId AS [Post Accepted Answer ID], 67 | Timestamp(CreationDate) AS [Post Created Timestamp], 68 | Date(Floor(CreationDate)) AS [Post Created Date], 69 | Score AS [Post Score], 70 | ViewCount AS [Post View Count], 71 | Body AS [Post Body], 72 | Hash256(Body) AS [Post Body Hash], 73 | OwnerUserId AS [Post Owner User ID], 74 | LastEditorUserId AS [Post Last Editor User ID], 75 | Timestamp(LastEditDate) AS [Post Last Edit Timestamp], 76 | Date(Floor(LastEditDate)) AS [Post Last Edit Date], 77 | Timestamp(LastActivityDate) AS [Post Last Activity Timestamp], 78 | Date(Floor(LastActivityDate)) AS [Post Last Activity Date], 79 | Title AS [Post Title], 80 | "Tags" AS [Post Tags], 81 | AnswerCount AS [Post Answer Count], 82 | CommentCount AS [Post Comment Count], 83 | FavoriteCount AS [Post Favourite Count], 84 | ParentId AS [Post Parent ID], 85 | Timestamp(ClosedDate) AS [Post Closed Timestamp], 86 | Date(Floor(ClosedDate)) AS [Post Closed Date], 87 | Timestamp(CommunityOwnedDate) AS [Post Community Owned Timestamp], 88 | Date(Floor(CommunityOwnedDate)) AS [Post Community Owned Date], 89 | OwnerDisplayName AS [Post Owner Name], 90 | LastEditorDisplayName AS [Post Last Editor Name] 91 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] 92 | (qvd); 93 | 94 | Call sTimerEnd('$(vConn_Test_File) QVD unoptimised load & transform 1 (load) [$(vConn_Test_Data)])'); 95 | 96 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_ult1.qvd] (qvd); 97 | DROP TABLE [$(vConn_Test_File)]; 98 | 99 | Call sTimerEnd('$(vConn_Test_File) QVD unoptimised load & transform 1 (store) [$(vConn_Test_Data)])'); 100 | 101 | /////////// Unoptimised load and store #2 102 | [$(vConn_Test_File)]: 103 | LOAD 104 | *, 105 | Hash128(Id) AS IdHash 106 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] 107 | (qvd) 108 | WHERE WildMatch([Body],'*app*') OR WildMatch(Id,'*1*'); 109 | 110 | Call sTimerEnd('$(vConn_Test_File) QVD unoptimised load & transform 2 (load) [$(vConn_Test_Data)])'); 111 | 112 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_ult2.qvd] (qvd); 113 | DROP TABLE [$(vConn_Test_File)]; 114 | 115 | Call sTimerEnd('$(vConn_Test_File) QVD unoptimised load & transform 2 (store) [$(vConn_Test_Data)])'); 116 | 117 | /////////// Optimised load and resident then store 118 | [$(vConn_Test_File)_Temp]: 119 | LOAD 120 | * 121 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] 122 | (qvd); 123 | 124 | [$(vConn_Test_File)]: 125 | NoConcatenate 126 | LOAD 127 | Id AS [Post ID], 128 | PostTypeId AS [Post Type ID], 129 | AcceptedAnswerId AS [Post Accepted Answer ID], 130 | Timestamp(CreationDate) AS [Post Created Timestamp], 131 | Date(Floor(CreationDate)) AS [Post Created Date], 132 | Score AS [Post Score], 133 | ViewCount AS [Post View Count], 134 | Body AS [Post Body], 135 | Hash256(Body) AS [Post Body Hash], 136 | OwnerUserId AS [Post Owner User ID], 137 | LastEditorUserId AS [Post Last Editor User ID], 138 | Timestamp(LastEditDate) AS [Post Last Edit Timestamp], 139 | Date(Floor(LastEditDate)) AS [Post Last Edit Date], 140 | Timestamp(LastActivityDate) AS [Post Last Activity Timestamp], 141 | Date(Floor(LastActivityDate)) AS [Post Last Activity Date], 142 | Title AS [Post Title], 143 | "Tags" AS [Post Tags], 144 | AnswerCount AS [Post Answer Count], 145 | CommentCount AS [Post Comment Count], 146 | FavoriteCount AS [Post Favourite Count], 147 | ParentId AS [Post Parent ID], 148 | Timestamp(ClosedDate) AS [Post Closed Timestamp], 149 | Date(Floor(ClosedDate)) AS [Post Closed Date], 150 | Timestamp(CommunityOwnedDate) AS [Post Community Owned Timestamp], 151 | Date(Floor(CommunityOwnedDate)) AS [Post Community Owned Date], 152 | OwnerDisplayName AS [Post Owner Name], 153 | LastEditorDisplayName AS [Post Last Editor Name] 154 | RESIDENT [$(vConn_Test_File)_Temp]; 155 | 156 | DROP TABLE [$(vConn_Test_File)_Temp]; 157 | 158 | Call sTimerEnd('$(vConn_Test_File) QVD optimised load & transform 1 (load) [$(vConn_Test_Data)])'); 159 | 160 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_olt1.qvd] (qvd); 161 | DROP TABLE [$(vConn_Test_File)]; 162 | 163 | Call sTimerEnd('$(vConn_Test_File) QVD optimised load & transform 1 (store) [$(vConn_Test_Data)])'); 164 | 165 | Next iter_Test; -------------------------------------------------------------------------------- /examples/qs_encryption_qvd_performance/07_Users.qvs: -------------------------------------------------------------------------------- 1 | // Pick which data set 2 | LET vConn_Test_Data = '$(vConn_QVD_Server)'; 3 | LET vConn_Test_File = 'Users'; 4 | 5 | // Create table 6 | LET vConn_Test_Table = lower('$(vConn_Test_File)'); 7 | 8 | // Loop over test 9 | FOR iter_Test = 0 to vTest_Iterations-1 10 | 11 | /////////// Raw load 12 | [$(vConn_Test_File)]: 13 | LOAD 14 | Id, 15 | Reputation, 16 | CreationDate, 17 | DisplayName, 18 | LastAccessDate, 19 | WebsiteUrl, 20 | Location, 21 | AboutMe, 22 | Views, 23 | UpVotes, 24 | DownVotes, 25 | AccountId, 26 | ProfileImageUrl 27 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).xml] 28 | (XmlSimple, table is [$(vConn_Test_Table)/row]); 29 | 30 | Call sTimerEnd('$(vConn_Test_File) raw load [$(vConn_Test_Data)])'); 31 | 32 | /////////// Badge raw store & drop 33 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File).qvd] (qvd); 34 | DROP TABLE [$(vConn_Test_File)]; 35 | 36 | Call sTimerEnd('$(vConn_Test_File) raw store & drop [$(vConn_Test_Data)])'); 37 | 38 | 39 | /////////// QVD optimised load 40 | [$(vConn_Test_File)]: 41 | LOAD 42 | * 43 | FROM [$(vConn_Test_Data)/$(vConn_Test_File).qvd] (qvd); 44 | 45 | Call sTimerEnd('$(vConn_Test_File) QVD optimised load [$(vConn_Test_Data)])'); 46 | 47 | /////////// Badge second store & drop 48 | STORE [$(vConn_Test_File)] INTO [$(vConn_Test_Data)/$(vConn_Test_File)_copy.qvd] (qvd); 49 | DROP TABLE [$(vConn_Test_File)]; 50 | 51 | Call sTimerEnd('$(vConn_Test_File) QVD store and drop [$(vConn_Test_Data)])'); 52 | 53 | Next iter_Test; -------------------------------------------------------------------------------- /examples/qs_encryption_qvd_performance/08_Store Logging.qvs: -------------------------------------------------------------------------------- 1 | // Store the logging to disk 2 | LET vLog_Timestamp = timestamp(now(),'YYYYMMDD_hhmmss'); 3 | STORE Logging INTO [$(vConn_QVD)/Logging_$(vLog_Timestamp).qvd] (qvd); 4 | LET vLog_Timestamp = ; -------------------------------------------------------------------------------- /examples/qs_encryption_qvd_performance/09_exit script.qvs: -------------------------------------------------------------------------------- 1 | exit script; -------------------------------------------------------------------------------- /examples/qs_encryption_qvd_performance/Encrypted QVD Performance.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/examples/qs_encryption_qvd_performance/Encrypted QVD Performance.qvf -------------------------------------------------------------------------------- /examples/qs_flexible_pivot.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/examples/qs_flexible_pivot.qvf -------------------------------------------------------------------------------- /examples/qs_variable_load/MyVariables.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/examples/qs_variable_load/MyVariables.xlsx -------------------------------------------------------------------------------- /examples/qs_variable_load/qs_variable_load.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/examples/qs_variable_load/qs_variable_load.qvf -------------------------------------------------------------------------------- /examples/recno_rowno_speed.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/examples/recno_rowno_speed.qvf -------------------------------------------------------------------------------- /snippets/Server Disk Space v1.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/withdave/qlik/930874175e114138aa4ec560880bfa2d3ccad23d/snippets/Server Disk Space v1.qvf -------------------------------------------------------------------------------- /snippets/bat_DriveSpace.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | :: Batch file to log out a local drive report to a .log file in the same directory 3 | :: Log files are named based on the date and time 4 | 5 | :: Specify which servers to attempt to return disk space for (delimit with spaces) 6 | :: Enter hostname, and ensure the account running this script has local or domain admin rights 7 | set SERVER_LIST=server 8 | 9 | :: Set output date time (YYYYMMDD_hhmmss) 10 | set LOG_TIMESTAMP=%date:~-4,4%%date:~-7,2%%date:~0,2%_%time:~0,2%%time:~3,2%%time:~6,2% 11 | set LOG_DATE=%date:~-4,4%%date:~-7,2%%date:~0,2% 12 | set LOG_TIME=%time:~0,2%%time:~3,2%%time:~6,2% 13 | 14 | :: Specify output prefix - this uses an absolute path, if you prefer to use relative then ensure the scheduler includes a "start in" path 15 | set LOG_LOCATION=C:\DriveReport\DriveReport_%LOG_TIMESTAMP%.log 16 | 17 | :: Create empty output file 18 | >nul copy nul %LOG_LOCATION% 19 | 20 | :: Loop over each server to return stats and append to log file 21 | echo ServerName,LogDate,LogTime,Drive,Size,FreeSpace>>%LOG_LOCATION% 22 | for %%i in (%SERVER_LIST%) do ( 23 | 24 | for /f "tokens=1,2,3" %%a in ('wmic /node:"%%i" LogicalDisk Where DriveType^="3" Get DeviceID^,Size^,FreeSpace^|find ":"') do @echo %%i,%LOG_DATE%,%LOG_TIME%,%%a,%%c,%%b>>%LOG_LOCATION% 25 | 26 | ) 27 | -------------------------------------------------------------------------------- /snippets/bat_ServiceCheck.bat: -------------------------------------------------------------------------------- 1 | @Echo Off 2 | :: ServiceCheck.bat 3 | :: Accepts a service name, if it's running, exits. If it's not running, attempts to start it and creates a log file. 4 | :: Uplift to check and retry? 5 | 6 | Set ServiceName=%~1 7 | ::Set ServiceName=QlikSenseProxyService 8 | 9 | :: Get date in yyyyMMdd_HHmm format to use with file name. 10 | FOR /f "usebackq" %%i IN (`PowerShell ^(Get-Date^).ToString^('yyyy-MM-dd'^)`) DO SET LogDate=%%i 11 | 12 | SC queryex "%ServiceName%"|Find "STATE"|Find /v "RUNNING">Nul&&( 13 | echo %ServiceName% not running 14 | echo Start %ServiceName% 15 | 16 | Net start "%ServiceName%">nul||( 17 | Echo "%ServiceName%" wont start 18 | exit /b 1 19 | ) 20 | 21 | echo "%ServiceName%" started 22 | 23 | :: Now log out to a file so we have some sort of history 24 | echo ### Service [%ServiceName%] not running on %LogDate% & echo %Time% Attempting to start service.>>"%~dp0ServiceCheck_%ServiceName%_%LogDate%.log" 25 | exit /b 0 26 | 27 | )||( 28 | :: All OK, let's just write to console and exit 29 | echo "%ServiceName%" working 30 | exit /b 0 31 | ) 32 | -------------------------------------------------------------------------------- /snippets/node_gen_date_range.js: -------------------------------------------------------------------------------- 1 | /* 2 | To do: 3 | - Handle start date correctly 4 | - Wrap end date to last day 5 | */ 6 | 7 | function generateDateArray(timestamp) { 8 | var currentDate = new Date(); 9 | var currentDate = currentDate.setDate(currentDate.getDate() - 2); 10 | const startDate = new Date(timestamp); 11 | var iterDate = new Date(timestamp); 12 | const datesArray = []; 13 | 14 | // Set the start date's time to match the provided timestamp 15 | startDate.setHours(new Date(timestamp).getHours()); 16 | startDate.setMinutes(new Date(timestamp).getMinutes()); 17 | startDate.setSeconds(new Date(timestamp).getSeconds()); 18 | startDate.setMilliseconds(new Date(timestamp).getMilliseconds()); 19 | 20 | iterDate = new Date(JSON.parse(JSON.stringify(startDate))); 21 | iterDate.setHours(24,59,59,999); 22 | datesArray.push((new Date(startDate)).toISOString() + '/' + (new Date(iterDate)).toISOString()); 23 | 24 | 25 | while (startDate < currentDate) { 26 | //console.log(startDate + currentDate) 27 | startDate.setDate(startDate.getDate() + 1); 28 | //startDate.setHours(1); 29 | startDate.setHours(1,0,0,0); 30 | 31 | 32 | iterDate = new Date(JSON.parse(JSON.stringify(startDate))); 33 | iterDate.setHours(24,59,59,999); 34 | 35 | datesArray.push((new Date(startDate)).toISOString() + '/' + (new Date(iterDate)).toISOString()); 36 | } 37 | 38 | // Add the current timestamp 39 | var today = new Date(); 40 | var today = today.setHours(1,0,0,0); 41 | datesArray.push((new Date(today)).toISOString() + '/' + (new Date()).toISOString()); 42 | 43 | return datesArray; 44 | } 45 | 46 | // Example usage 47 | const inputTimestamp = 1684262549000; 48 | const dateArray = generateDateArray(inputTimestamp); 49 | 50 | console.log(dateArray); -------------------------------------------------------------------------------- /snippets/q_all_fields.md: -------------------------------------------------------------------------------- 1 | # Load all fields in a data model 2 | 3 | Script loads all fields in a data model into a single field. Useful for custom pivot sheets. 4 | 5 | ``` 6 | // Let's keep things tidy in the UI 7 | Set HidePrefix = '%'; 8 | 9 | // Load a table of field names 10 | // First iter over all tables 11 | FOR iter_tables=0 to NoOfTables() - 1 12 | 13 | // Next iter over all fields in this table 14 | FOR iter_fields=1 to NoOfFields(TableName($(iter_tables))) 15 | 16 | Island_Fields: 17 | LOAD 18 | TableName($(iter_tables)) AS %TableName, 19 | FieldName($(iter_fields),TableName($(iter_tables))) AS %FieldName 20 | AutoGenerate 1; 21 | 22 | NEXT iter_fields; 23 | NEXT iter_tables; 24 | ``` 25 | 26 | ## To then reference specific fields in a pivot table 27 | 28 | This can be used on the show column property of a pivot table. 29 | 30 | ``` 31 | SubStringCount('|' & Concat(distinct %FieldName, '|') & '|', '|Expression1|') 32 | ``` 33 | -------------------------------------------------------------------------------- /snippets/q_colours.md: -------------------------------------------------------------------------------- 1 | # Qlik colour list 2 | 3 | ## Green white & grey (QS 2020) 4 | 5 | 6 | | Colour | HEX | RGB | 7 | |--- |--- |--- | 8 | | Green | 009845 | 0,152,69 | 9 | | White | ffffff | 255,255,255 | 10 | | Light Grey (Alternative) | dddddd | 221,221,221 | 11 | | Dark Grey (Excluded) | a9a9a9 | 169,169,169 | 12 | -------------------------------------------------------------------------------- /snippets/q_gitignore: -------------------------------------------------------------------------------- 1 | ## GitIgnore built from own experience and using samples from https://github.com/github/gitignore 2 | ## DC - 20190920 3 | 4 | ## Qlik exclusions 5 | # Ignore all Qlik data files 6 | *.qvd 7 | 8 | 9 | ## Windows exclusions 10 | # Windows thumbnail cache files 11 | Thumbs.db 12 | Thumbs.db:encryptable 13 | ehthumbs.db 14 | ehthumbs_vista.db 15 | 16 | # Dump file 17 | *.stackdump 18 | 19 | # Folder config file 20 | [Dd]esktop.ini 21 | 22 | # Recycle Bin used on file shares 23 | $RECYCLE.BIN/ 24 | 25 | # Windows Installer files 26 | *.cab 27 | *.msi 28 | *.msix 29 | *.msm 30 | *.msp 31 | 32 | # Windows shortcuts 33 | *.lnk 34 | 35 | ## Dropbox exclusions 36 | # Dropbox settings and caches 37 | .dropbox 38 | .dropbox.attr 39 | .dropbox.cache 40 | 41 | ## MacOS exclusions 42 | # General 43 | .DS_Store 44 | .AppleDouble 45 | .LSOverride 46 | 47 | # Icon must end with two \r 48 | Icon 49 | 50 | 51 | # Thumbnails 52 | ._* 53 | 54 | # Files that might appear in the root of a volume 55 | .DocumentRevisions-V100 56 | .fseventsd 57 | .Spotlight-V100 58 | .TemporaryItems 59 | .Trashes 60 | .VolumeIcon.icns 61 | .com.apple.timemachine.donotpresent 62 | 63 | # Directories potentially created on remote AFP share 64 | .AppleDB 65 | .AppleDesktop 66 | Network Trash Folder 67 | Temporary Items 68 | .apdisk 69 | 70 | # Finally I want to exclude any packages I might make 71 | *.zip 72 | -------------------------------------------------------------------------------- /snippets/q_groupby_performance.md: -------------------------------------------------------------------------------- 1 | # Group By Performance 2 | 3 | This script explores whether it's faster to aggregate data using a straight group by, or if there's a benefit to sorting the data first. 4 | 5 | Tests: 6 | * Order by, group by (two resident loads) 7 | * Group by (one resident load) 8 | 9 | ## Script 10 | ``` 11 | ///$tab sLog 12 | Log: 13 | Load 14 | null() AS TestName 15 | AutoGenerate 0; 16 | 17 | Sub sLog(vTest,vStart,vFinish) 18 | 19 | TRACE >> Log [$(vTest)] started $(vStart) finished $(vFinish).; 20 | 21 | Concatenate(Log) 22 | LOAD 23 | '$(vTest)' AS TestName, 24 | '$(vStart)' AS TestStart, 25 | '$(vFinish)'AS TestFinish 26 | AutoGenerate 1; 27 | 28 | END SUB; 29 | 30 | 31 | ///$tab Load Sample Data 32 | // Load data from QVD 33 | Source: 34 | LOAD 35 | // TransLineID, 36 | // TransID, 37 | Number, 38 | Dimension1, 39 | // Dimension2, 40 | Dimension3, 41 | // Dimension4, 42 | Expression1 43 | // Expression2, 44 | // Expression3 45 | FROM [lib://dir_root_azurefiles/me/RandomData.qvd] 46 | (qvd); 47 | 48 | // Set number of iterations to run 49 | SET vIter = 5; 50 | 51 | ///$tab Order by, Group by 52 | For i=0 to vIter 53 | 54 | // Group by letter on a sum 55 | LET vTestStart = timestamp(now(),'hh:mm:ss'); 56 | 57 | // Load order by 58 | OrderBy: 59 | LOAD 60 | Dimension1, 61 | Expression1 62 | RESIDENT Source 63 | ORDER BY 64 | Dimension1; 65 | 66 | GroupBy: 67 | LOAD 68 | Dimension1, 69 | SUM(Expression1) AS Exp1 70 | RESIDENT OrderBy 71 | GROUP BY 72 | Dimension1; 73 | 74 | DROP Table OrderBy, GroupBy; 75 | 76 | Call sLog('Dimension1 - O&G','$(vTestStart)',timestamp(now(),'hh:mm:ss')); 77 | 78 | // Group by a large dimension 79 | LET vTestStart = timestamp(now(),'hh:mm:ss'); 80 | 81 | // Load order by 82 | OrderBy: 83 | LOAD 84 | Dimension1, 85 | Expression1 86 | RESIDENT Source 87 | ORDER BY 88 | Dimension1; 89 | 90 | GroupBy: 91 | LOAD 92 | Dimension1, 93 | SUM(Expression1) AS Exp1 94 | RESIDENT OrderBy 95 | GROUP BY 96 | Dimension1; 97 | 98 | DROP Table OrderBy, GroupBy; 99 | 100 | Call sLog('Dimension3 - O&G','$(vTestStart)',timestamp(now(),'hh:mm:ss')); 101 | 102 | // Group by number on a sum 103 | LET vTestStart_1 = timestamp(now(),'hh:mm:ss'); 104 | 105 | // Load order by 106 | OrderBy: 107 | LOAD 108 | Number, 109 | Expression1 110 | RESIDENT Source 111 | ORDER BY 112 | Number; 113 | 114 | GroupBy: 115 | LOAD 116 | Number, 117 | SUM(Expression1) AS Exp1 118 | RESIDENT OrderBy 119 | GROUP BY 120 | Number; 121 | 122 | DROP Table OrderBy, GroupBy; 123 | 124 | Call sLog('Number - O&G','$(vTestStart)',timestamp(now(),'hh:mm:ss')); 125 | 126 | 127 | Next i; 128 | 129 | 130 | ///$tab Group by 131 | For i=0 to vIter 132 | 133 | // Group by letter on a sum 134 | LET vTestStart = timestamp(now(),'hh:mm:ss'); 135 | 136 | Temp_1: 137 | LOAD 138 | Dimension1, 139 | SUM(Expression1) AS Exp1 140 | RESIDENT Source 141 | GROUP BY 142 | Dimension1; 143 | 144 | DROP Table Temp_1; 145 | 146 | Call sLog('Dimension1 - G','$(vTestStart)',timestamp(now(),'hh:mm:ss')); 147 | 148 | // Group by a large dimension 149 | LET vTestStart = timestamp(now(),'hh:mm:ss'); 150 | 151 | Temp_1: 152 | LOAD 153 | Dimension3, 154 | SUM(Expression1) AS Exp1 155 | RESIDENT Source 156 | GROUP BY 157 | Dimension3; 158 | 159 | DROP Table Temp_1; 160 | 161 | Call sLog('Dimension3 - G','$(vTestStart)',timestamp(now(),'hh:mm:ss')); 162 | 163 | // Group by number on a sum 164 | LET vTestStart_1 = timestamp(now(),'hh:mm:ss'); 165 | 166 | Temp_1: 167 | LOAD 168 | Number, 169 | SUM(Expression1) AS Exp1 170 | RESIDENT Source 171 | GROUP BY 172 | Number; 173 | 174 | DROP Table Temp_1; 175 | 176 | Call sLog('Number - G','$(vTestStart)',timestamp(now(),'hh:mm:ss')); 177 | 178 | 179 | Next i; 180 | 181 | 182 | ///$tab Generate Data 183 | // This is here just for reference 184 | exit script; 185 | // Generate some random data 186 | // This is roughly based off of the Qlik Ctrl+O+O default script, but with a bit more variety 187 | SET vRecordCount = 5000000; 188 | 189 | Transactions: 190 | Load 191 | IterNo() as TransLineID, 192 | RecNo() as TransID, 193 | mod(RecNo(),26)+1 as Number, 194 | chr(Floor(26*Rand())+65) as Dimension1, 195 | chr(Floor(26*Rand())+97) as Dimension2, 196 | chr(Floor(26*Rand())+pick(round(rand())+1,65,97)) 197 | &chr(Floor(26*Rand())+pick(round(rand())+1,65,97)) 198 | &chr(Floor(26*Rand())+pick(round(rand())+1,65,97)) 199 | &chr(Floor(26*Rand())+pick(round(rand())+1,65,97)) as Dimension3, 200 | // Hash128(Rand()) as Dimension4, 201 | round(1000000*Rand(),0.01) as Expression1, 202 | Round(1000*Rand()*Rand()) as Expression2, 203 | Round(Rand()*Rand()*Rand(),0.00001) as Expression3 204 | Autogenerate $(vRecordCount); 205 | 206 | // Add comments to the dimension fields 207 | Comment Field Dimension1 With "Random upper-case letter"; 208 | Comment Field Dimension2 With "Random lower-case letter"; 209 | Comment Field Dimension3 With "Random four letter string"; 210 | Comment Field Dimension4 With "Random string (hash128)"; 211 | Comment Field Expression1 With "Random value between 0 and 1000000 (2dp)"; 212 | Comment Field Expression2 With "Random value between 0 and 1000 (0dp)"; 213 | Comment Field Expression3 With "Random value between 0 and 1 (5dp)"; 214 | 215 | STORE Transactions INTO [lib://dir_root_qlikdatastore/me/RandomData.qvd] (qvd); 216 | DROP TABLE Transactions; 217 | 218 | ``` 219 | -------------------------------------------------------------------------------- /snippets/q_load_dynamic_date_fields.md: -------------------------------------------------------------------------------- 1 | 2 | # Load a source file with changing field names and crosstable 3 | For use where the data has columns named after dates, and the dates change each week. Likely to be a cleaner way of doing this! 4 | 5 | Data Example - Qlik Inline 6 | ``` 7 | LOAD * INLINE [ 8 | F1, F2, F3, F4, F5, F6, F7, F8, F9, F10, F11, F12, F13, F14, F15, F16 9 | Resource Name, Project Name, Assignment Bill Rate (USD), 29/03/2020, 05/04/2020, 12/04/2020, 19/04/2020, 26/04/2020, 03/05/2020, 10/05/2020, 17/05/2020, 24/05/2020, 31/05/2020, 07/06/2020, 14/06/2020, 21/06/2020 10 | Person A, Project 1, 100.00, -, -, -, -, -, -, -, 500.00, -, -, -, -, - 11 | Person B, Project 2, 100.00, 400.00, 0.00, 0.00, 0.00, 500.00, -, -, -, -, -, -, -, - 12 | Person C, Project 3, 100.00, -, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, -, 650.00, - 13 | Person A, Project 4, 100.00, -, -, -, -, 900.00, -, -, -, -, -, -, -, - 14 | Person B, Project 5, 100.00, 350.00, -, -, -, -, -, 750.00, -, -, -, -, -, - 15 | ]; 16 | ``` 17 | 18 | Data Example - Tabular 19 | | Resource Name | Project Name | Assignment Bill Rate (USD) | 29/03/2020 | 05/04/2020 | 12/04/2020 | 19/04/2020 | 26/04/2020 | 03/05/2020 | 10/05/2020 | 17/05/2020 | 24/05/2020 | 31/05/2020 | 07/06/2020 | 14/06/2020 | 21/06/2020 | 20 | |---------------|--------------|----------------------------|------------|------------|------------|------------|------------|------------|------------|------------|------------|------------|------------|------------|------------| 21 | | Person A | Project 1 | 100.00 | - | - | - | - | - | - | - | 500.00 | - | - | - | - | - | 22 | | Person B | Project 2 | 100.00 | 400.00 | 0.00 | 0.00 | 0.00 | 500.00 | - | - | - | - | - | - | - | - | 23 | | Person C | Project 3 | 100.00 | - | 0.00 | 0.00 | 0.00 | 0.00 | 0.00 | 0.00 | 0.00 | 0.00 | 0.00 | - | 650.00 | - | 24 | | Person A | Project 4 | 100.00 | - | - | - | - | 900.00 | - | - | - | - | - | - | - | - | 25 | | Person B | Project 5 | 100.00 | 350.00 | - | - | - | - | - | 750.00 | - | - | - | - | - | - | 26 | 27 | 28 | Qlik load script 29 | ``` 30 | // Load the table definition 31 | // Can be specified as A/B/C or with * 32 | Table_Definition: 33 | FIRST 1 34 | LOAD 35 | * 36 | FROM [lib://DLs/TLExport.xlsx] 37 | (ooxml, no labels, table is Sheet1); 38 | 39 | // Load and print field counts 40 | LET vDefinition_Count = NoOfFields('Table_Definition'); 41 | TRACE >> Found $(vDefinition_Count) fields.; 42 | 43 | // Create an empty field definition 44 | SET vDefinition_Fields = ''; 45 | 46 | // Iterate over the table to build the field definition 47 | FOR vIter = 1 TO vDefinition_Count 48 | 49 | LET vLoop_FieldValue = Peek(FieldName(vIter,'Table_Definition'),0,'Table_Definition'); 50 | LET vLoop_FieldValue = IF(IsNum('$(vLoop_FieldValue)'),NUM('$(vLoop_FieldValue)','00000'),'$(vLoop_FieldValue)'); 51 | LET vDefinition_Fields = '$(vDefinition_Fields)' & chr(91) & '$(vLoop_FieldValue)' & chr(93) & ','; 52 | 53 | Next vIter; 54 | 55 | // Clean the trailing ',', drop source table and trace out 56 | LET vDefinition_Fields = LEFT('$(vDefinition_Fields)',LEN('$(vDefinition_Fields)')-1); 57 | TRACE >> $(vDefinition_Fields); 58 | DROP TABLE Table_Definition; 59 | 60 | 61 | // Now load the new table with our custom definition 62 | MyData: 63 | CrossTable('Date','Revenue',3) 64 | LOAD 65 | $(vDefinition_Fields) 66 | FROM [lib://DLs/TLExport.xlsx] 67 | (ooxml, embedded labels, table is Sheet1); 68 | ``` 69 | -------------------------------------------------------------------------------- /snippets/q_random_data.md: -------------------------------------------------------------------------------- 1 | # Generate some random data 2 | 3 | ``` 4 | // Generate some random data 5 | // This is roughly based off of the Qlik Ctrl+O+O default script, but with a bit more variety 6 | SET vRecordCount = 50000; 7 | 8 | Transactions: 9 | Load 10 | IterNo() as TransLineID, 11 | RecNo() as TransID, 12 | mod(RecNo(),26)+1 as Number, 13 | chr(Floor(26*Rand())+65) as Dimension1, 14 | chr(Floor(26*Rand())+97) as Dimension2, 15 | chr(Floor(26*Rand())+pick(round(rand())+1,65,97)) 16 | &chr(Floor(26*Rand())+pick(round(rand())+1,65,97)) 17 | &chr(Floor(26*Rand())+pick(round(rand())+1,65,97)) 18 | &chr(Floor(26*Rand())+pick(round(rand())+1,65,97)) as Dimension3, 19 | Hash128(Rand()) as Dimension4, 20 | round(1000000*Rand(),0.01) as Expression1, 21 | Round(1000*Rand()*Rand()) as Expression2, 22 | Round(Rand()*Rand()*Rand(),0.00001) as Expression3 23 | Autogenerate $(vRecordCount); 24 | 25 | // Add comments to describe each field 26 | Comment Field Dimension1 With "Random upper-case letter"; 27 | Comment Field Dimension2 With "Random lower-case letter"; 28 | Comment Field Dimension3 With "Random four letter string"; 29 | Comment Field Dimension4 With "Random string (hash128)"; 30 | Comment Field Expression1 With "Random value between 0 and 1000000 (2dp)"; 31 | Comment Field Expression2 With "Random value between 0 and 1000 (0dp)"; 32 | Comment Field Expression3 With "Random value between 0 and 1 (5dp)"; 33 | ``` 34 | -------------------------------------------------------------------------------- /snippets/q_variable_creator.md: -------------------------------------------------------------------------------- 1 | # Variable generator 2 | 3 | Simple qvs to create a number of variables for testing APIs and client. 4 | 5 | ``` 6 | // Snippet to produce 1000 variables (all quite long, all with an integer at the start to make them unique) 7 | 8 | For i=0 to 1000 9 | 10 | LET [v_$(i)] = 'Var' & $(i) & ': This is a wonderfully long variable which doesnt fit into the bla bLorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industrys standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.'; 11 | 12 | Next i; 13 | ``` 14 | -------------------------------------------------------------------------------- /snippets/q_version_tab.qvs: -------------------------------------------------------------------------------- 1 | /* 2 | ********************* INFORMATION ********************* 3 | 4 | CREATED BY: withdave 5 | CREATED DATE: 19/03/2020 6 | PURPOSE: RAW Extractor for Teradata T3 datamarts 7 | 8 | ********************* CHANGE LOG ********************* 9 | */ 10 | 11 | // Update this version control table after each edit. Avoid using the ; delimiter in your change log 12 | Island_VersionControl: 13 | LOAD 14 | * 15 | INLINE [ 16 | %Ver_Version; %Ver_Date; %Ver_Author; %Ver_Change 17 | 1; 19/03/2020; withdave; Initial version 18 | 2; 21/04/2020; withdave; Changed store file names and added mapping for region from contact 19 | 3; 28/04/2020; withdave; Added transformed contact file 20 | ] (delimiter is ';'); 21 | 22 | 23 | // Do not edit the section below as this loads and sets the version variable in the app using the table above 24 | // Identify the maximum version 25 | Temp_Version: 26 | LOAD 27 | Max(%Ver_Version) AS Temp_Max_Version 28 | RESIDENT Island_VersionControl; 29 | 30 | // Set the version variable 31 | LET vVersion = Peek('Temp_Max_Version',0,'Temp_Version'); 32 | 33 | // Drop the temporary table 34 | DROP TABLE Temp_Version; 35 | 36 | -------------------------------------------------------------------------------- /snippets/qcs_audit_delete_data_connections.md: -------------------------------------------------------------------------------- 1 | # Auditing and deleting data connections by type 2 | 3 | It is not possible to disable specific data connection types on QSE SaaS. 4 | 5 | Instead, we could automate the platform to proactively search for and remove data connections that are not approved for use. 6 | 7 | Process: 8 | - Return data connections 9 | - Delete those which shouldn't be present 10 | - Store information on the deleted connections and owners 11 | 12 | ## Script 13 | 14 | The script grabs all data connections, creates a new object containing the type we're after, then blindly deletes the connections from the object. 15 | 16 | It also stores a list of the connections it's about to delete as a CSV into the same directory prior to deleting them. 17 | 18 | ``` 19 | # This script doesn't do error handling or tracking, it simply 'does' 20 | # Set the connection type to delete 21 | $connectionToDeleteType = 'File_AmazonS3Connector'; 22 | 23 | # Grab all non-datafiles connections 24 | $dataConnections = qlik raw get v1/data-connections --query noDatafiles=true | ConvertFrom-Json 25 | 26 | # Create a new object containing only those with the connection type we're after 27 | $connectionsToDelete = Foreach ($dataConnection in $dataConnections) { 28 | If ($dataConnection.datasourceID -eq $connectionToDeleteType) { 29 | New-Object PSObject $dataConnection; 30 | } 31 | } 32 | 33 | # Store this object down into a CSV before we try to run deletes 34 | $connectionsToDelete | Export-Csv -Path ".\ConnectionsToDelete_$($connectionToDeleteType)_$(get-date -f yyyyMMdd_hhmmss).csv" -NoTypeInformation 35 | 36 | # Create a new object containing only those with the connection type we're after 37 | Foreach ($connectionToDelete in $connectionsToDelete) { 38 | If ($connectionToDelete.datasourceID -eq $connectionToDeleteType) { 39 | qlik raw delete v1/data-connections/$($connectionToDelete.id) 40 | } 41 | } 42 | 43 | ``` 44 | 45 | ## Script to generate dummy data connections on S3 46 | 47 | This script generates 100 dummy S3 data connections to test the audit script above. Change the connection owner and space ID prior to running. 48 | 49 | ``` 50 | for ($i=1; $i -lt 101; $i++) { 51 | 52 | $ownerID = "" 53 | $s3Bucket = "my-best-bucket-$i" 54 | $s3BucketRegion = "eu-west-1" 55 | $connectionName = "A test S3 connection that should not be here ($i)" 56 | $accessKey = "" 57 | $secretKey = "" 58 | $spaceID = "" 59 | $payload = @{ 60 | "datasourceID" = "File_AmazonS3Connector" 61 | "owner" = "$ownerID" 62 | "qConnectStatement" = "CUSTOM CONNECT TO `\provider=QvWebStorageProviderConnectorPackage.exe; sourceType=File_AmazonS3Connector; region=$s3BucketRegion; bucketName=$s3Bucket;`\" 63 | "qName" = "$connectionName" 64 | "qType" = "QvWebStorageProviderConnectorPackage.exe" 65 | "qUsername" = $null 66 | "qPassword" = "mysupersecurekey" 67 | "space" = "$spaceID" 68 | } 69 | $payload = ConvertTo-Json $payload | % { $_ -replace '"', '\"' } | % { $_ -replace '\\\\', '\\\"' } 70 | qlik raw post v1/data-connections --body $payload --verbose; 71 | } 72 | 73 | ``` 74 | 75 | -------------------------------------------------------------------------------- /snippets/qcs_create_user_assign_license.md: -------------------------------------------------------------------------------- 1 | # Create users and assign licenses on a QCS tenant 2 | 3 | ## Approach 4 | 5 | This script: 6 | 1) Defines the tenant ID 7 | 2) Loads the planned user list from a CSV file 8 | 3) Uses Qlik CLI to create the users 9 | 4) Uses the QCS APIs to assign users licenses (due to a bug in the CLI on this endpoint) 10 | 11 | This will not overwrite or amend licenses already assigned. 12 | 13 | ## Input 14 | 15 | The input for this script is a CSV file which provides required user information and their associated license type. 16 | 17 | In the example below, some users aren't assigned licenses, which the script will report but not fail on. 18 | 19 | | UserSubject | UserEmail | UserName | UserLicense | 20 | |-------------|---------------------|----------|--------------| 21 | | test\user1 | user1@mydomain.com | User 1 | professional | 22 | | test\user2 | user2@mydomain.com | User 2 | analyzer | 23 | | test\user3 | user3@mydomain.com | User 3 | professional | 24 | | test\user4 | user4@mydomain.com | User 4 | professional | 25 | | test\user5 | user5@mydomain.com | User 5 | analyzer | 26 | | test\user6 | user6@mydomain.com | User 6 | professional | 27 | | test\user7 | user7@mydomain.com | User 7 | analyzer | 28 | | test\user8 | user8@mydomain.com | User 8 | | 29 | | test\user9 | user9@mydomain.com | User 9 | | 30 | | test\user10 | user10@mydomain.com | User 10 | | 31 | | test\user11 | user11@mydomain.com | User 11 | professional | 32 | | test\user12 | user12@mydomain.com | User 12 | analyzer | 33 | | test\user13 | user13@mydomain.com | User 13 | | 34 | | test\user14 | user14@mydomain.com | User 14 | | 35 | | test\user15 | user15@mydomain.com | User 15 | professional | 36 | | test\user16 | user16@mydomain.com | User 16 | analyzer | 37 | | test\user17 | user17@mydomain.com | User 17 | professional | 38 | | test\user18 | user18@mydomain.com | User 18 | analyzer | 39 | | test\user19 | user19@mydomain.com | User 19 | professional | 40 | | test\user20 | user20@mydomain.com | User 20 | analyzer | 41 | 42 | 43 | ## Script 44 | 45 | ``` 46 | # We need to get the tenant ID. Either specify it, or run the below to grab it 47 | # If you know it... 48 | $tenantId = ''; 49 | 50 | # If you don't know it...get it from somewhere that this API key has access to 51 | $apiKeys = qlik api-key ls | ConvertFrom-Json 52 | $tenantId = $apiKeys[0].tenantId 53 | 54 | # Load our user list from CSV 55 | $pathUsers = "C:\path\qcs_user_provisioning.csv" 56 | $users = Import-Csv -Path $Path 57 | 58 | function license-assignment-add { 59 | # This isn't great, but this endpoint is experimental and going via CLI doesn't currently work correctly (should be fixed soon) 60 | # This requires the tenant URL plus the API key (could also pull this from CLI) 61 | param( 62 | [parameter (position=0)] 63 | [string]$subject, 64 | [parameter (position=1)] 65 | [validateset(ignorecase=$false,'professional','analyzer')] 66 | [string]$type 67 | ) 68 | 69 | # Define your tenant URL 70 | $tenant = "saas.us.qlikcloud.com" 71 | 72 | # Define your API key 73 | $apikey = "YOURKEY" 74 | 75 | # Dummy value for the headers 76 | $hdrs = @{} 77 | 78 | # Add in the API key to the headers 79 | $hdrs.Add("Authorization","Bearer $($apikey)") 80 | 81 | # Handle TLS 1.2 only environments 82 | [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]'Ssl3,Tls,Tls11,Tls12' 83 | 84 | # Construct the payload 85 | $payload = @([pscustomobject]@{ 86 | "subject" = $subject 87 | "type" = $type 88 | }) 89 | 90 | # Construct the request to match the schema 91 | $assignment = @{ 92 | "add" = $payload 93 | } | ConvertTo-Json 94 | 95 | # Send the post request to create the assignment 96 | Invoke-WebRequest -Method post -Uri "https://$($tenant)/api/v1/licenses/assignments/actions/add" -Headers $hdrs -Body $assignment -Verbose 97 | 98 | } 99 | 100 | # Loop over each user in the list 101 | foreach ($user in $users) { 102 | 103 | # Print out what we're trying to do 104 | Write-Host "Attempting to create " $user.UserSubject " with " $user.UserLicense " license" 105 | 106 | # Create the user (will display an error if the email already exists) 107 | $createUser = qlik user create --email $user.UserEmail --name $user.UserName --subject $user.UserSubject --tenantId $tenantId 108 | 109 | # Assign a license to the user (to test whether this replaces/ adds) 110 | $assignUser = license-assignment-add $user.UserSubject $user.UserLicense 111 | 112 | } 113 | 114 | ``` 115 | -------------------------------------------------------------------------------- /snippets/qcs_deleteAllGroups.md: -------------------------------------------------------------------------------- 1 | # Delete all groups from a Qlik Cloud tenant 2 | 3 | This script selects 100 groups at a time from the tenant, and sends a delete request per group to remove them from the tenant. 4 | 5 | It is possible to do this via: 6 | * API & CLI 7 | * Application automation 8 | 9 | Notes: 10 | * Groups will be repopulated from the user's claims the next time a user logs into the tenant if `Creation of groups` is enabled on the tenant 11 | 12 | # CLI deletion of groups 13 | 14 | Notes: 15 | * If the script prints "No groups returned." then the initial request either returned no groups or an error 16 | * This works on the first 1000 groups - run multiple times or increase the limit if you have more groups 17 | 18 | ``` 19 | # Assign result of groups to a groups variable 20 | $groups=$(qlik group ls --limit 1000) | ConvertFrom-Json 21 | 22 | # Replay contents of group variable 23 | Write-Host $groups.count "groups returned." 24 | 25 | 26 | # If there are groups 27 | if ($groups.count -ne 0) { 28 | # Now iterate over all groups with a delete command 29 | $groups | ForEach { 30 | 31 | $group = $_.id 32 | Write-Host "Deleting group" $_.displayName "("$group")" 33 | $deleteResponse=$(qlik group rm $group) 34 | } 35 | } else { 36 | Write-Host "No groups found." 37 | } 38 | 39 | ``` 40 | 41 | ## Automation deletion of groups 42 | 43 | Doing this in application automation is very simple. Save this snippet as a JSON file and import it to a workspace to delete all groups in the tenant. 44 | 45 | ``` 46 | {"blocks":[{"id":"53859078-C158-4484-8B86-0CE96DE2DE47","type":"StartBlock","disabled":false,"name":"Start","displayName":"Start","comment":"","childId":"B762D871-CB09-4D20-9C13-33C5A07BE77D","inputs":[{"id":"run_mode","value":"manual","type":"select","structure":{}}],"settings":[],"collapsed":[{"name":"loop","isCollapsed":false}],"x":0,"y":0},{"id":"B762D871-CB09-4D20-9C13-33C5A07BE77D","type":"EndpointBlock","disabled":false,"name":"rawAPIListRequest","displayName":"Qlik Cloud Services - Raw API List Request","comment":"","childId":null,"inputs":[{"id":"c3a1c780-2076-11ec-98c4-dd329f9ef682","value":"groups","type":"string","structure":{}},{"id":"c6915fb0-2839-11ec-a450-03c7d8aebbe7","value":null,"type":"object","mode":"keyValue","structure":{}}],"settings":[{"id":"maxitemcount","value":null,"type":"string","structure":{}},{"id":"blendr_on_error","value":"stop","type":"select","structure":{}},{"id":"cache","value":"0","type":"select","structure":{}}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":440,"y":290,"loopBlockId":"3CD5CD8D-BFB0-44D4-BF5C-50B82747E118","datasourcetype_guid":"61a87510-c7a3-11ea-95da-0fb0c241e75c","endpoint_guid":"4b993580-2072-11ec-8f59-e5aaa8656a36","endpoint_role":"list"},{"id":"3CD5CD8D-BFB0-44D4-BF5C-50B82747E118","type":"EndpointBlock","disabled":false,"name":"rawAPIRequest","displayName":"Qlik Cloud Services - Raw API Request","comment":"","childId":null,"inputs":[{"id":"4add8960-2078-11ec-be2c-7fc55d771fe6","value":"groups/{$.rawAPIListRequest.item.id}","type":"string","structure":{}},{"id":"3b992a40-2072-11ec-9f0e-ed01586a7e1b","value":"3ba2a970-2072-11ec-be02-2dd5c73abb12","type":"select","displayValue":"DELETE","structure":{}},{"id":"3b8b9090-2072-11ec-8c77-5195fbb0ca65","value":null,"type":"object","mode":"keyValue","structure":{}},{"id":"993585d0-2839-11ec-b431-df4deed6c1ae","value":null,"type":"object","mode":"keyValue","structure":{}}],"settings":[{"id":"blendr_on_error","value":"stop","type":"select","structure":{}},{"id":"cache","value":"0","type":"select","structure":{}}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":-282,"y":94,"datasourcetype_guid":"61a87510-c7a3-11ea-95da-0fb0c241e75c","endpoint_guid":"3b75dd80-2072-11ec-9043-3b2aff6123af","endpoint_role":"get"}],"variables":[]} 47 | ``` 48 | -------------------------------------------------------------------------------- /snippets/qcs_exporting_content.md: -------------------------------------------------------------------------------- 1 | # Exporting content from QCS 2 | 3 | Qlik Sense Enterprise SaaS allows export of applications from personal and shared spaces, but other content is more difficult to extract from the platform. This page summarises available options for exporting content from QCS. 4 | 5 | Note that QCS has a constant release cycle, meaning the information below become out of date at any time. 6 | 7 | ## Content Types 8 | 9 | ### Applications (private/ shared spaces) 10 | 11 | When the user has the correct permissions for the space, go to the hub, click the ellipsis (three dots) on an app and then "Export with data" or "Export without data" to download the app with the data in the data model, or without the data in the data model. 12 | 13 | Tenant or analytics admins will be able to export apps from any private/ shared space. 14 | 15 | It is not possible to export objects from within an app in a shared space that aren't owned by the exporting user (i.e. those which aren't approved/ public). 16 | 17 | ### Applications (managed spaces) 18 | 19 | It is not possible to export applications in managed spaces via the hub or management console. 20 | 21 | It is not possible to export objects from within an app in a managed space that aren't owned by the exporting user. 22 | 23 | The alternatives are: 24 | * Export a copy of the app from a shared space (if it exists), or from the QSEoW platform that's distributing the app to that managed space (if distributed) 25 | * Duplicate sheets in the app and copy them into a new app (if the user has at least contribute permissions on the space, and the data model exists elsewhere) - this will not return the data model 26 | * Use the APIs to return the application definition and rebuild the app in a private or shared space 27 | 28 | #### Unbuild and build of apps in managed spaces 29 | 30 | Note: if the app has been distributed from a QSEoW environment, the copy on QCS will not contain a load script. Apps published from a QCS private or shared space will contain a load script and be recoverable. 31 | 32 | The unbuild command will create flat-file definitions of the application for all approved (i.e. base) and published (i.e. community) sheets and stories, along with the associated objects. 33 | 34 | To generate the unbuild app: 35 | 36 | ``` 37 | qlik app unbuild --app '{appId}' --dir "{localPathForUnbuildFiles}" 38 | ``` 39 | 40 | This can then be rebuilt into a new application in a private space. 41 | 42 | First, we need to create a shell app to add all of this content to. This creates a blank app in your personal space: 43 | 44 | ``` 45 | qlik app create --attributes-name "Rebuilt app" --attributes-description "Rebuilt via build command" 46 | 47 | ``` 48 | You can optionally specify "--attributes-spaceId" to set the target shared space. 49 | 50 | ``` 51 | qlik app build --app "Rebuilt app" --app-properties "app-properties.json" --dimensions "dimensions.json" --measures "measures.json" --objects "objects\*" --script "script.qvs" --variables "variables.json" --no-reload``` 52 | ``` 53 | 54 | Note that we do not re-import the connections.yml file as we only want the app and not the data connections recreated. We also cannot rebuild bookmarks as these aren't currently created during unbuild. Once rebuilt, base and community sheets will need to be made public again. 55 | 56 | Some additional examples of using these commands can be found here: https://qlik.dev/tutorials/migrate-apps-from-qlik-sense-on-windows-to-qlik-sense-saas 57 | 58 | ### Data (private/ shared/ managed spaces) 59 | 60 | Data from any space can be loaded into apps and stored (via load script) into a cloud accessible data store such as AWS S3, where it can then be exported. It is not generally possible to export data via the GUI or APIs from Qlik SaaS. 61 | 62 | Related support article: https://community.qlik.com/t5/Knowledge/Qlik-Sense-SaaS-Enable-download-or-export-of-data-files-from/ta-p/1858718 63 | 64 | ### Extensions 65 | 66 | Extensions can't be downloaded from the management console, but they can be returned via APIs. 67 | 68 | First get the extension ID. One method to do this is to return all extensions via the APIs: 69 | ``` 70 | GET /api/v1/extensions 71 | ``` 72 | 73 | Now request the file for the extension you wish to export, and it will provide the zip file for the extension in the response: 74 | ``` 75 | GET /api/v1/extensions/{extensionId}/file 76 | ``` 77 | The response will need to be saved to a file with the .zip format to open normally on Windows machines. 78 | 79 | Reference: https://qlik.dev/apis/rest/extensions 80 | 81 | ### Themes 82 | 83 | Themes can't be downloaded from the management console, but they can be returned via APIs. 84 | 85 | First get the theme ID. One method to do this is to return all themes via the APIs: 86 | 87 | ``` 88 | GET /api/v1/themes 89 | ``` 90 | 91 | Now request the file for the theme you wish to export, and it will provide the zip file for the theme in the response: 92 | 93 | ``` 94 | GET /api/v1/themes/{themeId}/file 95 | ``` 96 | 97 | The response will need to be saved to a file with the .zip format to open normally on Windows machines. 98 | 99 | Reference: https://qlik.dev/apis/rest/themes 100 | -------------------------------------------------------------------------------- /snippets/qcs_genericLink.md: -------------------------------------------------------------------------------- 1 | # Generic Links on Qlik Cloud Services 2 | 3 | ## Creating links via CLI 4 | 5 | We can pass an object/ array, or load a file using the CLI. It is not currently a public API endpoint. 6 | 7 | ``` 8 | qlik raw post v1/generic-links --body-file "link.json" 9 | ``` 10 | 11 | Contents of the JSON file 12 | ``` 13 | {"name": "Help","link" :" https://help.qlik.com/"} 14 | ``` 15 | -------------------------------------------------------------------------------- /snippets/qcs_monitor_reload_task_disabled.md: -------------------------------------------------------------------------------- 1 | # Monitoring for disabled reload tasks in QCS 2 | 3 | In QCS, a scheduled reload task will automatically be disabled by the platform after 5 consecutive reload failures. There is no notification in the QMC of this action, so this document explains options to notify administrators of these issues. 4 | 5 | ## Process 6 | 7 | The overall process for identifying a disabled task (in the absence of an event to track this): 8 | 1) Establish a regular schedule for the check and a distribution for the notification 9 | 2) Poll the reload task endpoint for events with the following message: "Scheduled reload has been disabled since exceeded limit of 5 consecutive reload failures. Please fix error and re-enable schedule." 10 | 11 | This has the following general limitations: 12 | - Loads all reload-tasks, irrespective of whether the corresponding apps are in a personal or shared space (e.g. in a space used for development or testing) 13 | - Uses an experimental API which may change at any time 14 | 15 | Possible enhancements include: 16 | - Filtering the reload tasks to show only apps in managed spaces prior to hitting the API (as we really only care about tasks on production apps which generally should always be running) 17 | - Loading the owner of the reload task or app and messaging them to notify it's been disabled 18 | 19 | ## In Qlik Application Automation 20 | 21 | This automation does the following: 22 | 1) Starts on an hourly schedule 23 | 2) Sets variables for the tenant URL and the bearer token (in the screenshot using an input form) 24 | 3) Call the /reload-tasks endpoint (an experimental endpoint) 25 | 4) Loop over each value in the response 26 | 5) Filter the list to only those responses which are disabled, and which have a log value matching "Scheduled reload has been disabled since exceeded limit of 5" 27 | 6) Get the app information 28 | 7) Get the user information 29 | 8) For those reload tasks whose app has a stream set, send a slack message and print the output 30 | 9) For those reload tasks whose app doesn't have a stream set, just print the output 31 | 10) Stop the loop 32 | 33 | In order: 34 | 35 | ![image](https://user-images.githubusercontent.com/825142/140746703-b40ce921-20e4-46d9-9c99-7f081281338d.png) 36 | 37 | ![image](https://user-images.githubusercontent.com/825142/140746768-cba2e22b-b01f-4990-a6e1-4b4c2568e8a0.png) 38 | 39 | ![image](https://user-images.githubusercontent.com/825142/140747441-1d06c320-4d72-4bef-8c35-4014adb96504.png) 40 | 41 | A view of the slack message: 42 | 43 | ![image](https://user-images.githubusercontent.com/825142/140747899-bbd5ed35-be43-40a2-accd-2c0e1413557e.png) 44 | 45 | Results in: 46 | 47 | ![image](https://user-images.githubusercontent.com/825142/140748130-a5ce1e0a-ac43-4edb-b8d3-eeae49259be2.png) 48 | 49 | 50 | Notes: 51 | - Excludes personal spaces as it's assumed that we only want to monitor apps in shared and managed spaces 52 | 53 | Limitations: 54 | 1) Doesn't load more than 100 reload tasks, pagination on the API needs to be added in. Currently there is no reload-tasks block in Qlik Application Automation 55 | 2) Notifies on all disabled apps every time it's run 56 | 57 | ## In an app 58 | 59 | Load the log field from the reload-tasks API, matching the logic above. 60 | 61 | Hoping for updates to the Reload Analyzer monitoring app...https://community.qlik.com/t5/Support-Updates-Blog/The-Reload-Analyzer-for-Qlik-SaaS-customers-is-available-NOW/ba-p/1826163 62 | 63 | -------------------------------------------------------------------------------- /snippets/qcs_qpo_snippets.md: -------------------------------------------------------------------------------- 1 | # Qlik Platform Operation snippets 2 | 3 | These snippets can be copied into an Application Automation workspace using your system copy & paste. 4 | 5 | ## Add role to a user 6 | 7 | This will maintain any existing roles that user has. 8 | 9 | The automation will prompt for the tenant you wish to update the user on, then the automation will prompt for the user, and the roles you wish to add. 10 | 11 | ``` 12 | 13 | {"blocks":[{"id":"B8905015-5A6B-4E55-8BBE-25EAA9995149","type":"StartBlock","disabled":false,"name":"Start","displayName":"Start","comment":"","childId":"9A49B9AA-762F-48A0-B78A-62179936240A","inputs":[{"id":"run_mode","value":"manual","type":"select","structure":{}}],"settings":[],"collapsed":[{"name":"loop","isCollapsed":false}],"x":0,"y":0},{"id":"9A49B9AA-762F-48A0-B78A-62179936240A","type":"FormBlock","disabled":false,"name":"inputs","displayName":"Inputs","comment":"Specify the tenant","childId":"BD74BDF4-6741-4D16-9B41-0DA86A2A4A49","inputs":[],"settings":[{"id":"persist_data","value":"yes","type":"select","displayValue":"Yes","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":0,"y":120,"form":[{"id":"inputs-input-0","type":"input","label":"Tenant URL","order":0,"values":null,"options":[],"helpText":"URL of the Qlik Cloud tenant (e.g. mytenant.eu.qlikcloud.com)","isRequired":true}],"persistData":"yes"},{"id":"A1B6E4B8-8737-470E-B77C-F0355E3EC6A4","type":"EndpointBlock","disabled":false,"name":"listRoles","displayName":"Qlik Platform Operations - List Roles","comment":"Collect tenant roles and ids","childId":"161A90B4-B781-48A3-ADDB-10EEE5822543","inputs":[{"id":"bae46270-b76f-11ed-a52b-d71383a5b940","value":"{$.GetTenantNameAndRegion}","type":"string","structure":[]},{"id":"35bbb150-b77a-11ed-9a99-9b0cd6b2a9ab","value":null,"type":"string","structure":[]}],"settings":[{"id":"datasource","value":null,"type":"select","structure":[]},{"id":"maxitemcount","value":"","type":"string","structure":[]},{"id":"blendr_on_error","value":"stop","type":"select","structure":[]},{"id":"cache","value":"0","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":true}],"x":0,"y":840,"loopBlockId":null,"datasourcetype_guid":"c7e48240-e0f2-11ec-ada1-d5ef75014b77","endpoint_guid":"ba493570-b76f-11ed-b6ab-6fd465c15105","endpoint_role":"list"},{"id":"35C93190-3543-44CD-B6BA-92409974C81B","type":"FormBlock","disabled":false,"name":"inputs2","displayName":"Inputs 2","comment":"Select which user to update","childId":"A1B6E4B8-8737-470E-B77C-F0355E3EC6A4","inputs":[],"settings":[{"id":"persist_data","value":"no","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":0,"y":600,"form":[{"id":"inputs2-input-0","type":"select","label":"User to amend","order":0,"values":"{ $.transformList }","options":[],"helpText":"Select the user you wish to amend","isRequired":true}],"persistData":"no"},{"id":"BD74BDF4-6741-4D16-9B41-0DA86A2A4A49","type":"SnippetBlock","disabled":false,"name":"GetTenantNameAndRegion","displayName":"Qlik Platform Operations - Get Tenant Name And Region","comment":"","childId":"7110D824-F15D-45D1-ABCF-5E0662FFE953","inputs":[{"id":"575d1740-b1e2-11ed-958a-598edfec33b8","value":"{$.inputs.'Tenant URL'}","type":"string","structure":[]}],"settings":[{"id":"datasource","value":null,"type":"select","structure":[]},{"id":"blendr_on_error","value":"stop","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":-327,"y":157,"datasourcetype_guid":"c7e48240-e0f2-11ec-ada1-d5ef75014b77","snippet_guid":"bd5c1ce0-ad14-11ed-83f6-1d42e53790dd"},{"id":"161A90B4-B781-48A3-ADDB-10EEE5822543","type":"EndpointBlock","disabled":false,"name":"getUser","displayName":"Qlik Platform Operations - Get User","comment":"Retrieve the selected user record","childId":"FFB90886-559F-403B-85C7-A6BC23D962EC","inputs":[{"id":"dfb7c880-9caa-11ed-bae1-3325829d9725","value":"{ $.GetTenantNameAndRegion }","type":"string","structure":[]},{"id":"dfc724b0-9caa-11ed-8d7a-a5bc1a965960","value":"{ $.inputs2.'User to amend' }","type":"string","structure":[]}],"settings":[{"id":"datasource","value":null,"type":"select","structure":[]},{"id":"blendr_on_error","value":"stop","type":"select","structure":[]},{"id":"cache","value":"0","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":0,"y":520,"datasourcetype_guid":"c7e48240-e0f2-11ec-ada1-d5ef75014b77","endpoint_guid":"df9f1570-9caa-11ed-ac68-8ffd59764433","endpoint_role":"get"},{"id":"7110D824-F15D-45D1-ABCF-5E0662FFE953","type":"EndpointBlock","disabled":false,"name":"listUsers","displayName":"Qlik Platform Operations - List Users","comment":"","childId":"671897F0-9D6A-4E4E-AD8F-FDF96F4BF01F","inputs":[{"id":"447b9eb0-7634-11ed-a9df-a788128e422e","value":"{$.GetTenantNameAndRegion}","type":"string","structure":[]},{"id":"eb56ff20-9ca8-11ed-82cb-752b6b37b1a7","value":null,"type":"string","structure":[]}],"settings":[{"id":"datasource","value":null,"type":"select","structure":[]},{"id":"maxitemcount","value":"","type":"string","structure":[]},{"id":"blendr_on_error","value":"stop","type":"select","structure":[]},{"id":"cache","value":"0","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":true}],"x":-392,"y":105,"datasourcetype_guid":"c7e48240-e0f2-11ec-ada1-d5ef75014b77","endpoint_guid":"445157c0-7634-11ed-9b40-9720b0f37135","endpoint_role":"list"},{"id":"671897F0-9D6A-4E4E-AD8F-FDF96F4BF01F","type":"TransformListBlock","disabled":false,"name":"transformList","displayName":"Transform List","comment":"Format the user list for the input box","childId":"35C93190-3543-44CD-B6BA-92409974C81B","inputs":[{"id":"list","value":"{ $.listUsers }","type":"string","structure":[]},{"id":"mapping","value":[{"key":"id","value":"{$.transformList.item.id}"},{"key":"name","value":"{$.transformList.item.name} (sub: {$.transformList.item.subject} / email: {$.transformList.item.email} / id: {$.transformList.item.id})"}],"type":"object","mode":"keyValue","structure":[]}],"settings":[{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":true}],"x":-390,"y":480.181884765625},{"id":"239E980F-99AC-4DA2-89B0-4778DDFC2649","type":"FormBlock","disabled":false,"name":"inputs3","displayName":"Inputs 3","comment":"Select the roles to add","childId":"FDB44DB1-0A82-4C16-8E93-8355FE65CF82","inputs":[],"settings":[{"id":"persist_data","value":"no","type":"select","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":531,"y":802,"form":[{"id":"inputs3-input-0","label":"Role to add","helpText":"This list shows the roles which can be added to this user","type":"multi_select","values":"{$.compareLists}","isRequired":true,"options":{},"order":0}],"persistData":"no"},{"id":"FFB90886-559F-403B-85C7-A6BC23D962EC","type":"CompareListsBlock","disabled":false,"name":"compareLists","displayName":"Compare Lists","comment":"Highlight which available roles the user doesn't yet have","childId":"239E980F-99AC-4DA2-89B0-4778DDFC2649","inputs":[{"id":"list1","value":"{$.listRoles}","type":"string","structure":[]},{"id":"list2","value":"{$.getUser.assignedRoles}","type":"string","structure":[]}],"settings":[{"id":"list1_unique_key","value":"id","type":"field","structure":[]},{"id":"list2_unique_key","value":"id","type":"field","structure":[]},{"id":"compare_mode","value":"diff","type":"select","structure":[]},{"id":"case_insensitive","value":null,"type":"checkbox","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":true}],"x":-382,"y":824.107421875},{"id":"0AB65223-869A-4C29-AFC6-35438B8849C0","type":"MergeListsBlock","disabled":false,"name":"mergeLists","displayName":"Merge Lists","comment":"Merge original roles with new roles","childId":"E520F666-190A-4FB7-A9F9-39F9C97C2B55","inputs":[{"id":"list1","value":"{$.transformList2}","type":"string","structure":[]},{"id":"list2","value":"{ $.getUser.assignedRoles[*] }","type":"string","structure":[]}],"settings":[{"id":"merge_strategy","value":"merge","type":"select","structure":[]},{"id":"list1_unique_key","value":"id","type":"field","structure":[]},{"id":"list2_unique_key","value":"id","type":"field","structure":[]},{"id":"on_duplicate_key","value":"merge","type":"select","displayValue":"Merge item from list 2 into item from list 1","structure":[]},{"id":"on_existing_item_key","value":"keep_list1","type":"select","structure":[]},{"id":"case_insensitive","value":null,"type":"checkbox","structure":[]},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":-408,"y":482.7509765625},{"id":"4207BA4B-05C1-4EC7-A9B1-869FAF91DBDA","type":"TransformListBlock","disabled":false,"name":"transformList2","displayName":"Transform List 2","comment":"Format the selected roles into an id: 9898 format","childId":"0AB65223-869A-4C29-AFC6-35438B8849C0","inputs":[{"id":"list","value":"{$.roles}","type":"string","structure":[]},{"id":"mapping","value":[{"key":"id","value":"{$.transformList2.item}"}],"type":"object","mode":"keyValue","structure":[]}],"settings":[{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":0,"y":1460},{"id":"FDB44DB1-0A82-4C16-8E93-8355FE65CF82","type":"ForEachBlock","disabled":false,"name":"loop","displayName":"Loop","comment":"Add the selected roles to a variable","childId":"4207BA4B-05C1-4EC7-A9B1-869FAF91DBDA","inputs":[{"id":"input","value":"{$.inputs3.'Role to add'.selected}","type":"string","structure":{}}],"settings":[{"id":"automations_censor_data","value":false,"type":"checkbox","structure":{}}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":-685,"y":1231,"loopBlockId":"3F0DF83E-F1A4-4114-9B29-1350A7425DA1"},{"id":"3F0DF83E-F1A4-4114-9B29-1350A7425DA1","type":"VariableBlock","disabled":false,"name":"roles","displayName":"Variable - roles","comment":"","childId":null,"inputs":[],"settings":[],"collapsed":[{"name":"loop","isCollapsed":false}],"x":-710,"y":1228,"variableGuid":"44557A7A-9C01-42ED-8A8C-36E0B7B8D53C","operations":[{"key":"B2CEA259-4B0B-457F-9530-D28076342DBB","id":"add_item","name":"Add item to { variable }","value":"{$.loop.item}"}]},{"id":"E520F666-190A-4FB7-A9F9-39F9C97C2B55","type":"TransformListBlock","disabled":false,"name":"transformList3","displayName":"Transform List 3","comment":"Tidy up list to ensure we just have id","childId":"EC4A8FB0-0537-456E-88D3-E77F5BBBF0E7","inputs":[{"id":"list","value":"{$.mergeLists}","type":"string","structure":[]},{"id":"mapping","value":[{"key":"id","value":"{$.transformList3.item.id}"}],"type":"object","mode":"keyValue","structure":[]}],"settings":[{"id":"automations_censor_data","value":false,"type":"checkbox","structure":[]}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":250,"y":1460},{"id":"EC4A8FB0-0537-456E-88D3-E77F5BBBF0E7","type":"EndpointBlock","disabled":false,"name":"updateUser","displayName":"Qlik Platform Operations - Update User","comment":"","childId":null,"inputs":[{"id":"1275cc40-7d52-11ed-97f5-35480c4d133a","value":"{$.GetTenantNameAndRegion}","type":"string","structure":{}},{"id":"12813fb0-7d52-11ed-b2ac-cfed8b1c48f2","value":"{$.getUser.id}","type":"string","structure":{}},{"id":"55364350-7d52-11ed-889a-fb5bd7462102","value":null,"type":"string","structure":{}},{"id":"97589920-ceec-11ed-aede-33df8d01bc94","value":"{json: {$.transformList3}}","type":"string","structure":{}},{"id":"ae9800c0-ceec-11ed-bf0f-0510a2f5834b","value":null,"type":"string","structure":{}},{"id":"b440c910-ceec-11ed-b95b-c57e4f78d383","value":null,"type":"string","structure":{}},{"id":"b9f384a0-ceec-11ed-bd06-5f56dda1d03f","value":null,"type":"string","structure":{}},{"id":"bfb49fc0-ceec-11ed-9d75-f9d68f3e98a8","value":null,"type":"string","structure":{}}],"settings":[{"id":"datasource","value":null,"type":"select","structure":{}},{"id":"blendr_on_error","value":"stop","type":"select","structure":{}},{"id":"automations_censor_data","value":false,"type":"checkbox","structure":{}}],"collapsed":[{"name":"loop","isCollapsed":false}],"x":-258,"y":1759,"datasourcetype_guid":"c7e48240-e0f2-11ec-ada1-d5ef75014b77","endpoint_guid":"12491ca0-7d52-11ed-b5ad-e771eed0ab44","endpoint_role":"update"}],"variables":[{"guid":"44557A7A-9C01-42ED-8A8C-36E0B7B8D53C","name":"roles","type":"list"}]} 14 | 15 | ``` -------------------------------------------------------------------------------- /snippets/qcs_qvds_from_qvf.md: -------------------------------------------------------------------------------- 1 | # Create QVDs from a QVF 2 | 3 | This snippet creates QVDs from the tables in a QVF. 4 | 5 | Into another QVF, enter and run this script. 6 | 7 | ``` 8 | binary [66230f99-6da0-4d7f-9558-67b738c36e48]; 9 | 10 | for i = 0 to nooftables() - 1 11 | 12 | let vTableName = tablename(i); 13 | store [$(vTableName)] into [lib://Amazon_S3_V2/$(vTableName).qvd] (qvd); 14 | drop table [$(vTableName)]; 15 | 16 | next; 17 | ``` -------------------------------------------------------------------------------- /snippets/qcs_reloadTrigger.md: -------------------------------------------------------------------------------- 1 | # QCS Reload Trigger 2 | 3 | In QCS, task chaining doesn't currently exist. Instead, you need to utilise the APIs to trigger tasks. 4 | 5 | This snippet is intended for use at the end of your load scripts. You can reference the subroutine in an include file, then call it at the end of the script. It'll trigger the listed app, then write a log file to record the action. If the input parameters aren't correct then the reload of the containing app will fail. 6 | 7 | One of the known limitations here is around the API key permissions - these are currently all or nothing, and tied to your user account. If you share an API key that you've generated with others, then they can use the APIs to impersonate you. 8 | 9 | ``` 10 | Sub sTriggerReload(sub_appID,sub_connAPI,sub_connLog) 11 | 12 | /* 13 | 14 | This subroutine triggers the reload of a QCS application (directly, not using scheduled tasks) 15 | 16 | INPUTS: 17 | * sub_appID = the GUID for the app to be reloaded 18 | * sub_connAPI = a REST data connection that can access the tenant APIs with appropriate privileges 19 | * sub_connLog = a folder data connection for storing reload trigger log files (these will be stored as "ReloadLog__.qvd") 20 | 21 | OUTPUTS: 22 | * Send a POST message the task API to trigger the relevant app reload 23 | * Store a log file to record the reload trigger to assist with finding this event in audit logs if needed 24 | 25 | REST CONNECTION CONFIG 26 | * URL: https://..qlikcloud.com/api/v1/reloads 27 | * Type: POST 28 | * Body: {"appId":""} 29 | * Header: Authorization: Bearer 30 | 31 | */ 32 | 33 | // Connect to the REST connection 34 | LIB CONNECT TO '$(sub_connAPI)'; 35 | 36 | LET sub_QueryBody = '{""appId"":""$(sub_appID)""}'; 37 | 38 | // Collect data from the response for logging 39 | // Configure app ID for reload 40 | RestConnectorMasterTable: 41 | SQL SELECT 42 | "id", 43 | "appId", 44 | "tenantId", 45 | "userId", 46 | "type", 47 | "status", 48 | "creationTime", 49 | "__KEY_root" 50 | FROM JSON (wrap on) "root" PK "__KEY_root" 51 | WITH CONNECTION (BODY "$(sub_QueryBody)"); 52 | 53 | ReloadLog: 54 | LOAD DISTINCT 55 | [id] AS [Reload ID], 56 | [appId] AS [Reload App ID], 57 | [tenantId] AS [Reload Tenant ID], 58 | [userId] AS [Reload User ID], 59 | [type] AS [Reload Type], 60 | [status] AS [Reload Status], 61 | [creationTime] AS [Reload Creation Time], 62 | DocumentName() AS [Reload Trigger App ID], 63 | DocumentTitle() AS [Reload Trigger App Name] 64 | RESIDENT RestConnectorMasterTable 65 | WHERE NOT IsNull([__KEY_root]); 66 | 67 | // Set variables to produce log filenames 68 | LET sub_ReloadTime = Timestamp(Peek('Reload Creation Time',0),'YYYYMMDDhhmmss'); 69 | LET sub_ReloadID = Peek('Reload ID',0); 70 | 71 | // Check to see if the reload request returned rows, and the variables carry data. If not, fail this reload 72 | If (NoOfRows('ReloadLog') <> 1) OR ('$(sub_ReloadTime)' = '') OR ('$(sub_ReloadID)' = '') THEN 73 | // Fail with an error for the log 74 | Call Error('An unexpected number of rows was returned by the reloads API, or invalid data was found.'); 75 | END IF; 76 | 77 | TRACE >>> Returned reload $(sub_ReloadID) at $(sub_ReloadTime); 78 | 79 | // Store logs and clear model 80 | STORE ReloadLog INTO [lib://$(sub_connLog)/ReloadLog_$(sub_appID)_$(sub_ReloadID)_$(sub_ReloadTime).qvd] (qvd); 81 | DROP TABLE ReloadLog; 82 | DROP TABLE RestConnectorMasterTable; 83 | 84 | End Sub; 85 | 86 | // Call - pass in the app ID, the REST connection name, the folder connection name 87 | Call sTriggerReload('ab77b40d-4a30-46d9-9d2b-2943c6b82902','','DataFiles'); 88 | ``` 89 | -------------------------------------------------------------------------------- /snippets/qcs_resource_update_owner_space.md: -------------------------------------------------------------------------------- 1 | # Updating owners and spaces on objects 2 | 3 | This page documents how to change the owner and space properties on different resource types. 4 | 5 | Apologies, a bit of variety in tooling and languages below. 6 | 7 | ## Object types and coverage 8 | 9 | GUI and API availability varies per resource type in SaaS. 10 | 11 | | Resource Type | Change owner via GUI | Change owner via API | Change space via GUI | Change space via API | 12 | | --- | --- | --- | --- | --- | 13 | | Apps | Yes | Yes | Yes | Yes | 14 | | App Objects | No | No | N/A | N/A | 15 | | Data Connections | Yes | Yes | Yes | Yes | 16 | | Data Files | No | ? | No | Yes | 17 | | Spaces | Yes | Yes | N/A | N/A | 18 | 19 | 20 | ## App Objects 21 | 22 | ### Change owner via GUI 23 | 24 | This is not currently possible. 25 | 26 | ### Change owner via API 27 | 28 | This is not currently possible, as the object owner can't updated via the items API. 29 | 30 | It is possible to script the rebuild of objects (using unbuild/ build) and impersonation (using JWT). 31 | 32 | ## Data Connections 33 | 34 | ### Change owner via GUI 35 | 36 | Navigate to Management console > Data content, and there is an option to "move" any data connection (go to https://{hostname}.{region}.qlikcloud.com/console/content/data-connections) 37 | 38 | ### Change owner via API 39 | 40 | The `/actions/update` endpoint allows you to replace both or either the owner or space of an app: 41 | 42 | ``` 43 | curl -L -X POST 'https://{hostname}.{region}.qlikcloud.com/api/v1/data-connections/actions/update' \ 44 | -H 'Authorization: Bearer {token}' \ 45 | -H 'Content-type: application/json' \ 46 | -H 'Accept: application/json' \ 47 | --data-raw '{ 48 | "connections": [ 49 | { 50 | "id": "{data connection id}", 51 | "ownerId": "{new data connection owner id}" 52 | } 53 | ] 54 | }' 55 | ``` 56 | 57 | ### Change space via GUI 58 | 59 | Navigate to Management console > Data content, and there is an option to "move" any data connection (go to https://{hostname}.{region}.qlikcloud.com/console/content/data-connections) 60 | 61 | ### Change space via API 62 | 63 | The new way of changing the space via API is using the `/actions/update` endpoint, and a call like the following: 64 | 65 | ``` 66 | curl -L -X POST 'https://{hostname}.{region}.qlikcloud.com/api/v1/data-connections/actions/update' \ 67 | -H 'Authorization: Bearer {token}' \ 68 | -H 'Content-type: application/json' \ 69 | -H 'Accept: application/json' \ 70 | --data-raw '{ 71 | "connections": [ 72 | { 73 | "id": "{data connection id}", 74 | "spaceId": "{target space id}", 75 | "spaceType": "{personal|shared|managed}" 76 | } 77 | ] 78 | }' 79 | ``` 80 | 81 | The old way of doing this is below, in a powershell snippet: 82 | 83 | ``` 84 | # Script to do this in native PS as qlik CLI seems a bit buggy on this endpoint 85 | 86 | # Define your tenant URL 87 | $tenant = "tenant.region.qlikcloud.com" 88 | 89 | # Define your API key 90 | $apikey = "myapikey" 91 | 92 | # Define your data connection ID 93 | $dataconnid = "id of the data connection" 94 | 95 | # Define your target space 96 | $space = "id of the target space" 97 | 98 | # Dummy value for the headers 99 | $hdrs = @{} 100 | 101 | # Add in the API key to the headers 102 | $hdrs.Add("Authorization","Bearer $($apikey)") 103 | 104 | # Handle TLS 1.2 only environments 105 | [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]'Ssl3,Tls,Tls11,Tls12' 106 | 107 | # Get the Data Connection Info 108 | $dataconn = Invoke-WebRequest -Method Get -Uri "https://$($tenant)/api/v1/data-connections/$($dataconnid)" -Headers $hdrs | ConvertFrom-Json 109 | 110 | # Create the payload for updating the data connection 111 | $payload = @{ 112 | "qId" = $dataconn.qId 113 | "qName" = $dataconn.qName 114 | "qConnectStatement" = $dataconn.qConnectStatement 115 | "qType" = $dataconn.qType 116 | "qEngineObjectID" = $dataconn.qEngineObjectID 117 | "space" = $space 118 | } | ConvertTo-Json 119 | $payload = ConvertTo-Json $payload 120 | 121 | # Send the post request to update the data connection 122 | $dataconnresp = Invoke-WebRequest -Method put -Uri "https://$($tenant)/api/v1/data-connections/$($dataconnid)" -Headers $hdrs -Body $payload 123 | ``` 124 | 125 | ## Spaces 126 | 127 | ### Change owner via GUI 128 | 129 | This can be done from the QMC at https://{tenant}.{region}.qlikcloud.com/console/spaces/ 130 | 131 | ### Change owner via API 132 | 133 | This is simple and requires just the ID for the space and the user who will own the resource. 134 | ``` 135 | qlik space update {spaceId} --ownerId {userId} 136 | 137 | example: 138 | qlik space update 6176c4aae25328b217e952e2 --ownerId oVtOVXb4qg1oXZn7nF-LSh960MUnetxN 139 | ``` 140 | -------------------------------------------------------------------------------- /snippets/qcs_space_delete_files.md: -------------------------------------------------------------------------------- 1 | # Delete all files in a Qlik Cloud space 2 | 3 | This powershell script requires you to provide a spaceId, and will loop over and delete all data files in a space in chunks. 4 | 5 | ...use with caution. 6 | 7 | ``` 8 | # Set the space ID 9 | $spaceId = '63bc52d878cff58eaeda4c58'; 10 | 11 | # Get the space's data connections 12 | $dataConnections = qlik raw get v1/data-connections --query spaceId=$spaceId | ConvertFrom-Json 13 | 14 | # Find that blasted datafiles connection ID (this is a loop rather than search, sorry) 15 | foreach ($dataConnection in $dataConnections) { 16 | 17 | echo $("Checking connection: " + $dataConnection.qName); 18 | 19 | if ($dataConnection.qName -eq 'DataFiles') { 20 | 21 | # We got it 22 | echo $("Found DataFiles connection: " + $dataConnection.qName + ", id: " + $dataConnection.id); 23 | 24 | # Get the data files in that space 25 | $dataFiles = qlik data-file ls --connectionId $($dataConnection.id) | ConvertFrom-Json 26 | 27 | # Now remove them by page 28 | while ($($dataFiles.Length) -gt 1) { 29 | foreach ($dataFile in $dataFiles) { 30 | Write-Host "Deleting $($dataFile.name) with id $($dataFile.id)" 31 | qlik raw delete v1/data-files/$($dataFile.id) | Out-Null 32 | } 33 | $dataFiles = qlik data-file ls --connectionId $($dataConnection.id) | ConvertFrom-Json 34 | Write-Host 'Checking for more files...' 35 | } 36 | 37 | # Break here as we should have only one 38 | echo $("All data files removed, connection: " + $dataConnection.qName + " matched DataFiles, breaking."); 39 | break; 40 | } 41 | } 42 | ``` 43 | -------------------------------------------------------------------------------- /snippets/qs-geoanalytics-eastings-northings.md: -------------------------------------------------------------------------------- 1 | # Convert Eastings and Northings to Lat/ Long for QS maps 2 | 3 | For QSECM with GeoAnalytics connector 4 | 5 | ``` 6 | // Convert eastings and northings to lat long for Qlik Sense mapping 7 | 8 | // Load source where values correct 9 | Source: 10 | LOAD 11 | %KeyLocation, 12 | Easting, 13 | Northing, 14 | RowNo() AS RowNo 15 | FROM [lib://AttachedFiles/Dim_Location.qvd] 16 | (qvd) 17 | WHERE Easting>1000 AND Northing>1000 AND IsNum(Easting) AND IsNum(Northing); 18 | 19 | 20 | 21 | LIB CONNECT TO ''; 22 | 23 | /* Generated by GeoAnalytics for operation Load ---------------------- */ 24 | [_inlineMap_]: 25 | mapping LOAD * inline [ 26 | _char_, _utf_ 27 | "'", '\u0027' 28 | '"', '\u0022' 29 | "[", '\u005b' 30 | "/", '\u002f' 31 | "*", '\u002a' 32 | ";", '\u003b' 33 | "}", '\u007d' 34 | "{", '\u007b' 35 | "`", '\u0060' 36 | "´", '\u00b4' 37 | " ", '\u0009' 38 | ]; 39 | 40 | IF FieldNumber('%KeyLocation', 'Source') = 0 THEN 41 | call InvalidInlineData('The field %KeyLocation in Source is not available'); 42 | END IF 43 | IF FieldNumber('Easting', 'Source') = 0 THEN 44 | call InvalidInlineData('The field Easting in Source is not available'); 45 | END IF 46 | IF FieldNumber('Northing', 'Source') = 0 THEN 47 | call InvalidInlineData('The field Northing in Source is not available'); 48 | END IF 49 | Let [TranslatedInlineTable] = '%KeyLocation' & Chr(9) & 'Easting' & Chr(9) & 'Northing'; 50 | Let numRows = NoOfRows('Source'); 51 | Let chunkSize = 1000; 52 | Let chunks = numRows/chunkSize; 53 | For n = 0 to chunks 54 | Let chunkText = ''; 55 | Let chunk = n*chunkSize; 56 | For i = 0 To chunkSize-1 57 | Let row = ''; 58 | Let rowNr = chunk+i; 59 | Exit for when rowNr >= numRows; 60 | For Each f In '%KeyLocation', 'Easting', 'Northing' 61 | row = row & Chr(9) & MapSubString('_inlineMap_', Peek('$(f)', $(rowNr), 'Source')); 62 | Next 63 | chunkText = chunkText & Chr(10) & Mid('$(row)', 2); 64 | Next 65 | [TranslatedInlineTable] = [TranslatedInlineTable] & chunkText; 66 | Next 67 | chunkText='' 68 | 69 | 70 | [Translated]: 71 | SQL SELECT [%KeyLocation], [Translated_Geometry] FROM Load(dataset='Translated') 72 | DATASOURCE Translated INLINE tableName='Source', tableFields='%KeyLocation,Easting,Northing', geometryType='POINTLATLON', loadDistinct='NO', suffix='', crs='EPSG:27700' {$(TranslatedInlineTable)} 73 | ; 74 | tag field [%KeyLocation] with '$primarykey'; 75 | tag field [Translated_Geometry] with '$geopoint'; 76 | tag field [%KeyLocation] with '$geoname'; 77 | tag field [Translated_Geometry] with '$relates_%KeyLocation'; 78 | tag field [%KeyLocation] with '$relates_Translated_Geometry'; 79 | 80 | [TranslatedInlineTable] = ''; 81 | 82 | /* End GeoAnalytics operation Load ----------------------------------- */ 83 | ``` 84 | -------------------------------------------------------------------------------- /snippets/qs_attachedfiles.md: -------------------------------------------------------------------------------- 1 | # Identify attached files 2 | 3 | This powershell script scans the static content directory and outputs a CSV file containing file names and sizes. This can then be interrogated in Qlik Sense to add application names. 4 | 5 | Open a powershell window at the root of your persistance share (the location of this can be found in the "Service Cluster" section in the Qlik Sense QMC), and run the script below 6 | 7 | ``` 8 | dir -Path ".\StaticContent\AppContent" -Recurse -File | Select FullName, Length | Export-Csv "AttachedFiles.csv" 9 | ``` 10 | -------------------------------------------------------------------------------- /snippets/qs_backup_node.ps1: -------------------------------------------------------------------------------- 1 | # Backup QS - simple branch of qlik_migrate/blob/master/site_backup.ps1 (internal only) 2 | # Will copy connectors but nothing like web connectors, or on-system services 3 | 4 | # First drop all the restrictions on script execution (depends on policy) 5 | #Set-ExecutionPolicy Unrestricted 6 | 7 | # Need to set the PGP Pass file 8 | # PGPASS must have: localhost:4432:QSR:Postgres:[superuserpassword] 9 | SET PGPASSFILE=C:\Backups\Qlik\pgpass.conf 10 | 11 | # Set start date and time 12 | $Today = Get-Date -UFormat “%Y%m%d_%H%M” 13 | $StartTime = Get-Date -UFormat “%Y%m%d_%H%M” 14 | 15 | # Set usual postgres and program data locations 16 | $PostGreSQLLocation = “C:\Program Files\Qlik\Sense\Repository\PostgreSQL\9.6\bin” 17 | $PostGresBackupTarget = “C:\Backups\Qlik” 18 | $SenseProgramData = “C:\QlikShare\” # Shared Persistance Folder 19 | 20 | # Write out to console 21 | write-host “Attempting to shut down Qlik Sense services.” 22 | 23 | # Put some loginc in here and log when these don't go down 24 | # Also add more status updates as there is no indication of progress 25 | # Is -WarningAction SilentlyContinue appropriate? 26 | stop-service QlikSenseProxyService -WarningAction SilentlyContinue 27 | Start-Sleep -s 10 28 | stop-service QlikSenseEngineService -WarningAction SilentlyContinue 29 | Start-Sleep -s 10 30 | stop-service QlikSenseSchedulerService -WarningAction SilentlyContinue 31 | Start-Sleep -s 10 32 | stop-service QlikSensePrintingService -WarningAction SilentlyContinue 33 | Start-Sleep -s 10 34 | stop-service QlikSenseServiceDispatcher -WarningAction SilentlyContinue 35 | Start-Sleep -s 10 36 | stop-service QlikSenseRepositoryService -WarningAction SilentlyContinue 37 | # Add logging service in case we want to dump this too 38 | Start-Sleep -s 10 39 | Stop-Service QlikLoggingService -WarningAction SilentlyContinue 40 | 41 | # Write out to console 42 | write-host “Backing up Shared Persistance Data from $SenseProgramData.” 43 | 44 | # Copy Qlik Sense logs, apps, and other core content 45 | Copy-Item $SenseProgramData\ArchivedLogs -Destination $PostGresBackupTarget\$StartTime\ArchivedLogs -Recurse 46 | Copy-Item $SenseProgramData\Apps -Destination $PostGresBackupTarget\$StartTime\Apps -Recurse 47 | Copy-Item $SenseProgramData\StaticContent -Destination $PostGresBackupTarget\$StartTime\StaticContent -Recurse 48 | Copy-Item $SenseProgramData\CustomData -Destination $PostGresBackupTarget\$StartTime\CustomData -Recurse 49 | 50 | # Write out to console 51 | write-host “File Backup Completed” 52 | 53 | # Script can get lost here - waits for user to hit enter on prompt for password if we don't use the pgpass 54 | write-host “Backing up PostgreSQL Repository Database” 55 | 56 | # Change to postgres folder and execute dump (use pg_pass) 57 | cd $PostGreSQLLocation 58 | .\pg_dump.exe -h localhost -p 4432 -U postgres -b -F t -f “$PostGresBackupTarget\$StartTime\QSR_backup_$Today.tar” QSR 59 | 60 | # Write out to console 61 | write-host “PostgreSQL backup Completed, restarting Qlik Services” 62 | 63 | # Delay to make sure services can talk to each other 64 | start-service QlikSenseRepositoryService -WarningAction SilentlyContinue 65 | Start-Sleep -s 10 66 | start-service QlikSenseEngineService -WarningAction SilentlyContinue 67 | Start-Sleep -s 10 68 | start-service QlikSenseSchedulerService -WarningAction SilentlyContinue 69 | Start-Sleep -s 10 70 | start-service QlikSensePrintingService -WarningAction SilentlyContinue 71 | Start-Sleep -s 10 72 | start-service QlikSenseServiceDispatcher -WarningAction SilentlyContinue 73 | Start-Sleep -s 10 74 | start-service QlikSenseProxyService -WarningAction SilentlyContinue 75 | 76 | # Set end time 77 | $EndTime = Get-Date -UFormat “%Y%m%d_%H%M%S” 78 | 79 | # Write out to console 80 | write-host “This backup process started at ” $StartTime ” and ended at ” $EndTime 81 | -------------------------------------------------------------------------------- /snippets/qs_enumerations.md: -------------------------------------------------------------------------------- 1 | # Enumerations for Qlik Sense QRS API values 2 | 3 | ``` 4 | // Task status enums 5 | Map_TaskStatus: 6 | MAPPING LOAD 7 | * 8 | INLINE [ 9 | Key, Value 10 | 0, 0 - Never started 11 | 1, 1 - Triggered 12 | 2, 2 - Started 13 | 3, 3 - Queued 14 | 4, 4 - Abort initiated 15 | 5, 5 - Aborting 16 | 6, 6 - Aborted 17 | 7, 7 - Successful 18 | 8, 8 - Failed 19 | 9, 9 - Skipped 20 | 10, 10 - Retrying 21 | 11, 11 - Error 22 | 12, 12 - Reset 23 | ]; 24 | 25 | 26 | // Audit Activity Log Verbosity 27 | Map_AuditActivityLog: 28 | MAPPING LOAD 29 | * 30 | INLINE [ 31 | Key, Value 32 | 0, 0 - Off 33 | 1, 1 - Fatal 34 | 2, 2 - Error 35 | 3, 3 - Warning 36 | 4, 4 - Basic 37 | 5, 5 - Extended 38 | ]; 39 | 40 | ``` 41 | -------------------------------------------------------------------------------- /snippets/qs_recursive_qvd_index.qvs: -------------------------------------------------------------------------------- 1 | // Sub to recursively load QVD file metadata from a directory 2 | SUB sLoadQVDMetadata(vSub_Path) 3 | 4 | TRACE >> Loading files in path [$(vSub_Path)].; 5 | 6 | // Iterate over each QVD file in the directory and load metadata 7 | // Use backslash for compatibility with QlikView 12 8 | FOR EACH vSub_File in FileList('$(vSub_Path)\*.qvd') 9 | 10 | // For use with QlikView 12, comment out the two lineage rows 11 | Metadata_QVD: 12 | LOAD 13 | QvBuildNo, 14 | CreatorDoc, 15 | CreateUtcTime, 16 | SourceFileSize, 17 | "TableName", 18 | RecordByteSize, 19 | NoOfRecords, 20 | Offset, 21 | "Length", 22 | "Lineage/LineageInfo/Discriminator", 23 | "Lineage/LineageInfo/Statement", 24 | FileName() AS [File Name], 25 | '$(vSub_Path)' AS [File Data Connection], 26 | FilePath() AS [File Data Path], 27 | FileSize() AS [File Size] 28 | FROM [$(vSub_File)] 29 | (XmlSimple, table is [QvdTableHeader]); 30 | 31 | // Set a count and print to console 32 | LET vLoad_Rows = NoOfRows('Metadata_QVD'); 33 | TRACE >>> Loaded $(vLoad_Rows) rows, last file found: [$(vSub_File)].; 34 | 35 | NEXT vSub_File; 36 | 37 | // Now recursively call the function for each directory found in this path 38 | // Use backslash for compatibility with QlikView 12 39 | FOR EACH vSub_Directory in DirList('$(vSub_Path)\*') 40 | 41 | // Resursively call sub 42 | CALL sLoadQVDMetadata('$(vSub_Directory)'); 43 | 44 | NEXT vSub_Directory; 45 | 46 | END SUB; 47 | 48 | // Qlik Sense - i.e. lib, do not include trailing slash 49 | Call sLoadQVDMetadata('lib://Dir_QlikFiles'); 50 | // QlikView - i.e. path (do not include trailing slash) 51 | // Call sLoadQVDMetadata('D:\QlikFiles'); 52 | -------------------------------------------------------------------------------- /snippets/qs_repository_explorer.qvs: -------------------------------------------------------------------------------- 1 | // Qlik SenseRepository Explorer Script 2 | // Version date: 27/06/2019 3 | // This script loads each table from the repository to give you the ability to find field values and identify issues without downloading PgAdmin 4 | // Even in big deployments this app shouldn't be too large, as we're avoiding the logs database 5 | 6 | // Config steps: 7 | // 1) Create a data connection to the repository and note the name. For a simple single node install: 8 | // Connection Type: PostgreSQL 9 | // Host name: localhost 10 | // Port: 4432 11 | // Database: QSR 12 | // User name: postgres 13 | // Password: defined during Qlik Sense install (superuser password) 14 | // 2) Update the connection name in the script below, then paste it into an app and reload 15 | 16 | // -- Script start 17 | // Create a connection to localhost:4432 with postgres user and the superuser password 18 | LIB CONNECT TO 'PostgreSQL_QSR'; 19 | 20 | // Load the table listing from the default Postgres directory 21 | TableList: 22 | LOAD 23 | RowNo() AS %KeyTableList, 24 | table_catalog AS [Table Catalog], 25 | table_schema AS [Table Schema], 26 | table_name AS [Table Name], 27 | table_type AS [Table Type]; 28 | SELECT 29 | "table_catalog", 30 | "table_schema", 31 | "table_name", 32 | "table_type" 33 | FROM "information_schema"."tables" 34 | WHERE "table_catalog" = 'QSR' // Only load from repository tables 35 | AND "table_schema" = 'public' // Only load public tables 36 | ; 37 | 38 | // Set a variable with the table count and print this to the console 39 | LET vCount_Tables = NoOfRows('TableList'); 40 | TRACE >> Found $(vCount_Tables) tables in QSR.Public.; 41 | 42 | // Create an empty table to concatenate table rows to 43 | TableRows: 44 | LOAD 45 | null() AS %KeyTableList 46 | AutoGenerate 0; 47 | 48 | // Now loop over these tables and load their contents! 49 | FOR i=0 to vCount_Tables - 1 50 | 51 | LET vLoop_TableKey = Peek('%KeyTableList',i,'TableList'); 52 | LET vLoop_TableSchema = Peek('Table Schema',i,'TableList'); 53 | LET vLoop_TableName = Peek('Table Name',i,'TableList'); 54 | TRACE >>> Loading from $(vLoop_TableSchema).$(vLoop_TableName).; 55 | 56 | // Set qualify statement for all Qlik data tables 57 | QUALIFY *; 58 | 59 | // Get the data from the table 60 | [$(vLoop_TableName)]: 61 | LOAD 62 | *; 63 | SELECT 64 | * 65 | FROM "$(vLoop_TableSchema)"."$(vLoop_TableName)"; 66 | 67 | // Set unqualify statement now that we've done the data load 68 | UNQUALIFY *; 69 | 70 | // Get a row count from the table and join back to the table listing 71 | Concatenate(TableRows) 72 | LOAD 73 | '$(vLoop_TableKey)' AS %KeyTableList, 74 | num(tablerows,'#,##0') AS [Table Row Count]; 75 | SELECT 76 | COUNT(*) as tablerows 77 | FROM "$(vLoop_TableSchema)"."$(vLoop_TableName)"; 78 | 79 | Next i; 80 | 81 | // -- Script end 82 | -------------------------------------------------------------------------------- /snippets/qs_rest_qrs_api.md: -------------------------------------------------------------------------------- 1 | # Creating a server-bound connection to the QRS API 2 | 3 | These are effectively a copy of the existing REST monitoring app connections. 4 | 5 | ## Example of QRS App endpoint 6 | 7 | This is the default config for the app endpoint (before FQDN change). 8 | 9 | ``` 10 | CUSTOM CONNECT TO "provider=QvRestConnector.exe;url=https://localhost/win/qrs/app/full;timeout=900;method=GET;autoDetectResponseType=true;keyGenerationStrategy=0;authSchema=ntlm;skipServerCertificateValidation=true;useCertificate=No;certificateStoreLocation=LocalMachine;certificateStoreName=My;trustedLocations=qrs-proxy%2https://localhost:4244;queryParameters=xrfkey%20000000000000000;addMissingQueryParametersToFinalRequest=false;queryHeaders=X-Qlik-XrfKey%20000000000000000%1User-Agent%2Windows;PaginationType=None;" 11 | ``` 12 | 13 | ## Configuring via the Hub 14 | 15 | | Property | Value 1 | Value 2 | Notes | 16 | | -------- | ------- | ------- | ----- | 17 | | URL | https://localhost/qrs/app/full | | Swap out localhost for the FQDN of the CN | 18 | | Timeout | 60 | | The default is 900s, but this feels too high | 19 | | Authentication | Windows NTLM | | Ideal on Windows | 20 | | Username / Password | service account credentials | | | 21 | | Trusted locations | qrs-proxy | https://localhost:4244 | Swap out for FQDN | 22 | | Query parameters | xrfkey | 0000000000000000 | Use a random 16 character alphanumeric string to match header below | 23 | | Query headers | X-Qlik-XrfKey | 0000000000000000 | As above | 24 | | Query headers | User-Agent | Windows | | 25 | | Allow HTTPS only | True | | May as well build in this check, as it'll still accept self signed certs | 26 | 27 | ## Alternative option 28 | 29 | If validation fails / creds not available / etc, use an internet facing endpoint which doesn't require authentication to create the connection, then paste the new string and credentials into the QMC. 30 | 31 | URL: https://httpbin.org/get 32 | -------------------------------------------------------------------------------- /snippets/qs_section_access_search.md: -------------------------------------------------------------------------------- 1 | # Section Access Search 2 | 3 | This powershell script helps identify where apps have section access through analysis of the script logs. 4 | 5 | It: 6 | * Creates an array of unique app GUIDs in the script log directory 7 | * Iterates over each unique ID to scan the last modified log file for each for the phrase "Section Access" 8 | * Appends any data found to a CSV log file 9 | 10 | 11 | ``` 12 | # Set the path to the script logs - default C:\ProgramData\Qlik\Sense\Log\Script 13 | $LogPath = 'C:\ProgramData\Qlik\Sense\Log\Script'; 14 | 15 | # Get all app IDs that have a .log file from every reload (in case there are published apps with broken reloads) 16 | $LogFiles = Get-ChildItem $LogPath -filter *.log | 17 | ForEach-Object { 18 | Write-Output $_.BaseName.Split('.')[0]; 19 | } | 20 | Group-Object | 21 | Sort-Object -Property Name | 22 | Select-Object Name; 23 | 24 | # Iterate over each unique filename and search only the latest file for each GUID 25 | $LogFiles | 26 | foreach { 27 | $ScriptLog = $_.Name; 28 | Get-ChildItem -Path $LogPath -filter $ScriptLog*.log | Sort-Object -Descending -Property LastWriteTime | Select -First 1 | Select-String -Pattern 'Section Access' | Export-CSV -Append -Path 'C:\Temp\SectionAccess.csv' 29 | } 30 | ``` 31 | -------------------------------------------------------------------------------- /snippets/qs_task_status.qvs: -------------------------------------------------------------------------------- 1 | // Qlik Sense Task Status 2 | // Version date: 12/09/2019 3 | // Tested compatibility: Qlik Sense February 2018 - June 2019 4 | // Description: This script loads a specific set of tasks (based on the app stream) 5 | 6 | // Mapping load for task status 7 | Map_TaskStatus: 8 | MAPPING LOAD 9 | * 10 | INLINE [ 11 | Status Code, Status Description 12 | 0, 0 - Never started 13 | 1, 1 - Triggered 14 | 2, 2 - Started 15 | 3, 3 - Queued 16 | 4, 4 - Abort initiated 17 | 5, 5 - Aborting 18 | 6, 6 - Aborted 19 | 7, 7 - Successful 20 | 8, 8 - Failed 21 | 9, 9 - Skipped 22 | 10, 10 - Retrying 23 | 11, 11 - Error 24 | 12, 12 - Reset 25 | ]; 26 | 27 | 28 | // Connect to default monitoring connection 29 | LIB CONNECT TO 'monitor_apps_REST_task'; 30 | 31 | // Load temporary table with our data (all data) 32 | RestConnectorMasterTable: 33 | SQL SELECT 34 | "isManuallyTriggered", 35 | "name" AS "name_u2", 36 | "enabled", 37 | "__KEY_root", 38 | (SELECT 39 | "__KEY_app", 40 | "__FK_app", 41 | (SELECT 42 | "name", 43 | "__FK_stream" 44 | FROM "stream" FK "__FK_stream") 45 | FROM "app" PK "__KEY_app" FK "__FK_app"), 46 | (SELECT 47 | "nextExecution", 48 | "__KEY_operational", 49 | "__FK_operational", 50 | (SELECT 51 | "status", 52 | "startTime", 53 | "stopTime", 54 | "duration", 55 | "__FK_lastExecutionResult", 56 | "__KEY_lastExecutionResult" 57 | FROM "lastExecutionResult" PK "__KEY_lastExecutionResult" FK "__FK_lastExecutionResult") 58 | FROM "operational" PK "__KEY_operational" FK "__FK_operational") 59 | FROM JSON (wrap on) "root" PK "__KEY_root"; 60 | 61 | // Create a Tasks table starting at our streams, as we're using this to reduce the data loaded 62 | Fact_Tasks: 63 | LOAD 64 | [name] AS [Stream Name], 65 | [__FK_stream] AS [__KEY_app] 66 | RESIDENT RestConnectorMasterTable 67 | WHERE NOT IsNull([__FK_stream]) 68 | AND WildMatch([name],'*','*RF_UK'); // Where the name of the stream matches our request 69 | 70 | // Join in app information (we only want tasks with apps) 71 | LEFT JOIN (Fact_Tasks) 72 | LOAD 73 | [__KEY_app], 74 | [__FK_app] AS [__KEY_root] 75 | RESIDENT RestConnectorMasterTable 76 | WHERE NOT IsNull([__FK_app]); 77 | 78 | // Join in tasks for those apps using an inner join 79 | // Using an inner join avoids the manual reload tasks 80 | INNER JOIN (Fact_Tasks) 81 | LOAD 82 | [name_u2] AS [Task Name], 83 | [__KEY_root] 84 | RESIDENT RestConnectorMasterTable 85 | WHERE NOT IsNull([__KEY_root]) 86 | AND [isManuallyTriggered]='False' // Where is automatically triggered 87 | AND [enabled]='True'; // Where it's enabled 88 | 89 | // Join in operational history 90 | LEFT JOIN (Fact_Tasks) 91 | LOAD 92 | Timestamp(Floor(Timestamp#([nextExecution],'YYYY-MM-DDThh:mm:ss[.fff]Z'),1/24/60/60),'YYYY-MM-DD hh:mm:ss') AS [Next Execution], 93 | [__KEY_operational], 94 | [__FK_operational] AS [__KEY_root] 95 | RESIDENT RestConnectorMasterTable 96 | WHERE NOT IsNull([__FK_operational]); 97 | 98 | // Join in the execution history 99 | // Correct timestamp formats and round to nearest second (don't need milliseconds!) 100 | LEFT JOIN (Fact_Tasks) 101 | LOAD 102 | [status] AS [Task Status Code], 103 | ApplyMap('Map_TaskStatus',[status],[status]) AS [Task Status], 104 | Timestamp(Floor(Timestamp#([startTime],'YYYY-MM-DDThh:mm:ss[.fff]Z'),1/24/60/60),'YYYY-MM-DD hh:mm:ss') AS [Task Start Time], 105 | Timestamp(Floor(Timestamp#([stopTime],'YYYY-MM-DDThh:mm:ss[.fff]Z'),1/24/60/60),'YYYY-MM-DD hh:mm:ss') AS [Task Stop Time], 106 | CEIL([duration]/1000) AS [Task Duration (s)], 107 | CEIL([duration]/1000/60) AS [Task Duration (m)], 108 | [__FK_lastExecutionResult] AS [__KEY_operational] 109 | RESIDENT RestConnectorMasterTable 110 | WHERE NOT IsNull([__FK_lastExecutionResult]); 111 | 112 | // Now tidy up all those pesky key fields 113 | DROP FIELDS 114 | [__KEY_root], 115 | [__KEY_app], 116 | [__KEY_operational], 117 | [__FK_lastExecutionResult] 118 | ; 119 | 120 | // Drop our REST table 121 | DROP TABLE RestConnectorMasterTable; 122 | -------------------------------------------------------------------------------- /snippets/qs_user_sessions.md: -------------------------------------------------------------------------------- 1 | # Purpose 2 | 3 | TBC 4 | 5 | # Notes 6 | 7 | * Each load performs a full load of the archived log files - this could be optimised to take the latest modified log file, or strip the file name 8 | 9 | # Backlog 10 | 11 | * Verify session data 12 | * Add licence allocation 13 | 14 | # Script 15 | 16 | ``` 17 | 18 | // Extract user accounts and admin roles 19 | 20 | // Connect to user full endpoint (should match /qrs/user/full) 21 | LIB CONNECT TO 'monitor_apps_REST_user'; 22 | 23 | // Load temp table 24 | RestConnectorMasterTable: 25 | SQL SELECT 26 | "id" AS "User GUID", 27 | "userId" AS "User ID", 28 | "userDirectory" AS "User Directory", 29 | "name" AS "User Name", 30 | "inactive" AS "User Inactive", 31 | "removedExternally" AS "User Removed Externally", 32 | "blacklisted" AS "User Blacklisted", 33 | "deleteProhibited" AS "User Delete Prohibited", 34 | "__KEY_root", 35 | (SELECT 36 | "@Value" AS "User Role", 37 | "__FK_roles" 38 | FROM "roles" FK "__FK_roles" ArrayValueAlias "User Role") 39 | FROM JSON (wrap on) "root" PK "__KEY_root"; 40 | 41 | // Load all role assignments 42 | Dim_Roles: 43 | LOAD 44 | [User Role], 45 | [__FK_roles] AS %Key_User 46 | RESIDENT RestConnectorMasterTable 47 | WHERE NOT IsNull([__FK_roles]); 48 | 49 | // Load current user list 50 | Dim_User: 51 | LOAD 52 | [User GUID], 53 | [User ID], 54 | [User Directory], 55 | [User Directory] & '\' & [User ID] AS %Key_UserAccount, 56 | [User Name], 57 | [User Inactive], 58 | [User Removed Externally], 59 | [User Blacklisted], 60 | [User Delete Prohibited], 61 | [__KEY_root] AS %Key_User 62 | RESIDENT RestConnectorMasterTable 63 | WHERE NOT IsNull([__KEY_root]); 64 | 65 | // Drop the temp table 66 | DROP TABLE RestConnectorMasterTable; 67 | 68 | 69 | ``` --------------------------------------------------------------------------------