├── CanJoinOn.qvs ├── CreateFilledCalendar.qvs ├── CreateSectionAccess.txt ├── FillDateValues.txt ├── FixVariables.txt ├── GetScriptsFromGoogleDrive.qvs ├── GetScriptsFromSharepoint.qvs ├── GoogleSheetsAPI.txt ├── LICENSE ├── Next-Prev-Buttons.qvf ├── Pivot Sort Challenge.qvf ├── README.md ├── Uniqueness Checks.qvf ├── Worldmap dotted.qvf ├── allFieldsOfTable.txt ├── codepage-conv └── readme.md ├── create_Northwind_qvds ├── css ├── CSS Hacks.qvf ├── csshacks.txt └── readme.md ├── cyclicgroups ├── __CyclicGroupsAlt.qvf ├── cyclicgroups.txt └── readme.md ├── databridge logo.png ├── dotted_worldmap.txt ├── exec_script_from_google_drive.qvs ├── fieldCompare.qvs ├── fillcalendar.md ├── getCustomProp.qvs ├── json ├── 1.json ├── 2.json ├── README.md ├── _importJson.qvs ├── diagnose-claims.json ├── flights1.json ├── flights2.json ├── ganttsample.json ├── hierarchy.json ├── itacsample.json ├── object.json ├── qrs.json ├── sample2.json └── users.json ├── load_all_qvd.txt ├── next-prev-buttons.txt ├── pivotsort.txt ├── removeFieldsLike.qvs ├── rest-connector ├── BEST REST.qvf ├── ImportHierarchy.qvf ├── json-trees.txt └── readme.md ├── splitTimeSpans.txt ├── store_all_qvd.txt ├── syncSomeSelections.txt └── who_reloads_me.qvs /CanJoinOn.qvs: -------------------------------------------------------------------------------- 1 | 2 | SUB CanJoinOn ($tab1, $tab2, $commonField1, $commonField2, $commonField3, $commonField4, $commonField5) 3 | 4 | /* 5 | provide two tables that you plan to join, add 1 .. 5 fields which you expect that the two tables 6 | would have in common and, hence, over which the join will happen. 7 | 8 | If there are any differences in the effective list of common fields between the two tables in 9 | comparision to what you expected, the script will stop printing the error. 10 | */ 11 | 12 | // Find out which fields $tab1 and $tab2 have in common 13 | [~commonFields]: 14 | LOAD FieldName(RecNo(), '$($tab1)') AS [~commonField] 15 | AUTOGENERATE (NoOfFields('$($tab1)')); 16 | INNER JOIN 17 | LOAD FieldName(RecNo(), '$($tab2)') AS [~commonField] 18 | AUTOGENERATE (NoOfFields('$($tab2)')); 19 | LEFT JOIN LOAD 1 AS [~isEffective] AUTOGENERATE(1); 20 | 21 | [~expectedCommonFields]: 22 | LOAD * INLINE [$($commonField1), $($commonField2), $($commonField3), $($commonField4), $($commonField5)]; 23 | 24 | OUTER JOIN ([~commonFields]) 25 | LOAD FieldName(RecNo(), '~expectedCommonFields') AS [~commonField], 1 AS [~isExpected] 26 | AUTOGENERATE (NoOfFields('~expectedCommonFields')); 27 | DROP TABLE [~expectedCommonFields]; 28 | 29 | [~commonFieldsAggregated]: 30 | LOAD Concat('[' & [~commonField] & ']', ' , ') AS [~effectiveCommonFields] 31 | RESIDENT [~commonFields] 32 | WHERE [~isEffective] = 1; 33 | LEFT JOIN 34 | LOAD Concat('[' & [~commonField] & ']', ' , ') AS [~expectedCommonFields] 35 | RESIDENT [~commonFields] 36 | WHERE [~isExpected] = 1; 37 | 38 | LET priv_effectiveCommonFields = Peek('~effectiveCommonFields', -1, '~commonFieldsAggregated'); 39 | LET priv_expectedCommonFields = Peek('~expectedCommonFields', -1, '~commonFieldsAggregated'); 40 | 41 | IF priv_effectiveCommonFields <> priv_expectedCommonFields THEN 42 | TRACE `The tables '$($tab1)' and '$($tab2)' have the following field(s) in common: 43 | $(priv_effectiveCommonFields) 44 | but you wanted the join on those field(s) 45 | $(priv_expectedCommonFields) 46 | so the script is stopped to avoid undesired join effects.`; 47 | [abort here]; 48 | ELSE 49 | TRACE `The tables '$($tab1)' and '$($tab2)' will be joined on their common field(s) 50 | $(priv_effectiveCommonFields)`; 51 | END IF 52 | 53 | LET priv_effectiveCommonFields = Null(); 54 | LET priv_expectedCommonFields = Null(); 55 | 56 | END SUB 57 | -------------------------------------------------------------------------------- /CreateFilledCalendar.qvs: -------------------------------------------------------------------------------- 1 | SUB CreateFilledCalendar(paramDateField, paramDateFormat) 2 | TRACE Looking for Min/Max von $(paramDateField)...; 3 | 4 | tmpMinMaxDate: 5 | LOAD 6 | Max(FieldValue('$(paramDateField)',RecNo())) as MaxDate, 7 | Min(FieldValue('$(paramDateField)',RecNo())) as MinDate 8 | AUTOGENERATE FieldValueCount('$(paramDateField)'); 9 | 10 | LET vCalMinDate= Floor(peek('MinDate', 0, 'tmpMinMaxDate')); 11 | // Let vCalMaxDate= Floor(YearEnd(peek('MaxDate', 0, 'MinMaxDate'))); 12 | LET vCalMaxDate= Floor(peek('MaxDate', 0, 'tmpMinMaxDate')); 13 | 14 | DROP TABLE tmpMinMaxDate; 15 | 16 | LET vTrace = 'Min: ' & Date(vCalMinDate) & ' Max: ' & Date(vCalMaxDate); 17 | TRACE [$(vTrace)]; 18 | 19 | IF Alt(TableNumber('tmpCalendarFromMinToMax'),0) THEN 20 | 21 | // if tmpCalendarFromMinToMax already exists, drop it 22 | DROP TABLE tmpCalendarFromMinToMax; 23 | 24 | END IF 25 | 26 | tmpCalendarFromMinToMax: 27 | LOAD 28 | Date(RowNo() -1 + $(vCalMinDate), '$(paramDateFormat)') as [$(paramDateField)] 29 | AUTOGENERATE ($(vCalMaxDate) - $(vCalMinDate) + 1 ); 30 | 31 | END SUB 32 | 33 | CALL CreateFilledCalendar('orderDate', 'DD.MM.YYYY'); 34 | -------------------------------------------------------------------------------- /CreateSectionAccess.txt: -------------------------------------------------------------------------------- 1 | 2 | SUB CreateSectionAccess(vCentralFactTableName,vSecurityTableName,vSecurityFieldList,vMatchOperator,vSkipCondition) 3 | /* 4 | Note: If you are using Qlik Sense Desktop, SECTION ACCESS is not supported and you 5 | may simulate the Section Access table only as a "normal" table in your data model by 6 | selecting a USERID. If you use Qlik Sense Server and you like to just test the Access table 7 | like with Desktop you can set variable SectionAccess = '//' outside this sub before 8 | calling it. 9 | This sub doesn't run on QlikView (needs a few modifications) 10 | 11 | Preconditions: 12 | °°°°°°°°°°°°°° 13 | The central Fact table already has a %SECURITYMATCHHASH column, see parameter 1; 14 | 15 | The parameters for this SUB: 16 | °°°°°°°°°°°°°°°°°°°°°°°°°°°° 17 | 1) vCentralFactTableName (mandatory) 18 | Name of the central Facts table where the security will be linked to. This table 19 | must have a %SECURITYMATCHHASH field already, which should be added in the LOAD 20 | block with Hash128() and the same field list as given in argument 3 21 | 22 | 2) vSecurityTableName (mandatory) 23 | Name of the security describing table. It must have USERID and ACCESS but should 24 | not be in SECTION ACCESS. That will be done inside here. The fields which define 25 | who sees what must have the same name as in the central fact table. 26 | 27 | 3) vSecurityFieldList (mandatory) 28 | List of fields to be considered for security settings in one string, comma- 29 | separated and the field-names must be in [square brackets] like '[Country],[Type]' 30 | 31 | 4) vMatchOperator (optional, defaults to exact match) 32 | You can use 'Wild' or 'mix' or '' (=default if you don't pass this argument) to 33 | compare the values in the Security table against the values in the central Fact table. 34 | Using if you put 'wild' it supports the usage of '?' and '*' wildcards in Security 35 | settings table and does a case-insensitive comparision. 'mix' only does case- 36 | insensitive comparision without wildcards. The default is case-sensitive comparision. 37 | 38 | 5) vSkipCondition (options, defaults to 'Len({{field}}') 39 | This is used to determine "no security limitation" setting in the Security table 40 | columns. Typically, if a column is left blank ('') or Null this means "no limit". 41 | Sometimes, people put '*' in that case, then the setting for vSkipCondition 42 | should be ' {{field}}=''*'' ' 43 | 44 | Purpose of this sub: 45 | °°°°°°°°°°°°°°°°°°°° 46 | It will replace the Security settings table with table called SECTIONACCESSTABLE 47 | which is put in SECTION ACCESS (on Qlik Sense Server) to enforce security, and 48 | it will create a SECURITYLINK table between the SECTIONACCESSTABLE and your central 49 | Fact table with all combinations resolved. 50 | 51 | The SECTIONACCESSTABLE will inherit the columns ACCESS and USERID but not any of the 52 | original 'right-assigning' columns as they otherwise would create circular references 53 | in the data model. Instead, a proper Qlik WHERE formula that represents the original 54 | settings is created together with a hash of it. 55 | 56 | The SECURITYLINK will bridge the Facts to the SECTIONACCESSTABLE where each distinct 57 | SECURITYSEARCHHASH is matched to the SECURITYMATCHHASH. 58 | 59 | For convenience this sub will also create a table SECURITYFORMULAMATCHES which 60 | has a counter of how many matches of a given WHEREFORMULA where found in the Facts 61 | table. This can be vital to spot misconfiguration that leads to users not seeing 62 | their data rows: Check if one of the rows has 0 in field WHEREFORMULAMATCHES which 63 | means, that rule returns no single data row. 64 | 65 | Examples to call this sub: 66 | °°°°°°°°°°°°°°°°°°°°°°°°°° 67 | SET vSecurityFields = [RegionID],[SalesRepID],[ProdGroupID]; 68 | Facts: LOAD *, Hash128($(vSecurityFields)) AS %SECURITYMATCHHASH; 69 | SQL SELECT RegionID, SalesRepID, ProdGroupID, Sales FROM Sales.dbo.Facts; 70 | SecurityTable: LOAD USERID, ACCESS, RegionID, SalesRepID, ProdGroupID 71 | FROM [lib://MyDataFolder (qtsel_csw)/SectionAcces.txt] 72 | (txt, utf8, embedded labels, delimiter is ',', msq); 73 | CALL CreateSectionAccess('Facts','SecurityTable','$(vSecurityFields)','wild'); 74 | 75 | */ 76 | 77 | TRACE *** Building Security Tables ***; 78 | TRACE vCentralFactTableName = '$(vCentralFactTableName)'; 79 | TRACE vSecurityTableName = '$(vSecurityTableName)'; 80 | SET q = [']; 81 | LET vSecurityFieldList2 = Replace(Replace(vSecurityFieldList,'[',q),']',q); 82 | TRACE vSecurityFieldList = $(vSecurityFieldList); 83 | //LET vMatchOperator = ' ' &Trim(If(Len(vMatchOperator),vMatchOperator, '='))& ' '; 84 | TRACE vMatchOperator = '$(vMatchOperator)'; 85 | LET vSkipCondition = If(Len(vSkipCondition)=0,'Len({{field}})',vSkipCondition); 86 | TRACE vSkipCondition = '$(vSkipCondition)'; 87 | SET vWhereFormula = ['True()']; 88 | 89 | FOR EACH vField IN $(vSecurityFieldList2) 90 | LET vWhereFormula = vWhereFormula 91 | & '& If('& Replace(vSkipCondition,'{{field}}','[$(vField)]') 92 | & ',$(q) AND $(vMatchOperator)Match([$(vField)],$(q)&CHR(39)&[$(vField)]&CHR(39)&$(q))$(q))'; 93 | // & ',$(q) AND [$(vField)]$(vMatchOperator)$(q)&CHR(39)&[$(vField)]&CHR(39))'; 94 | NEXT 95 | LET vWhereFormula = 'Replace(' & vWhereFormula & ',$(q)True() AND$(q),$(q)$(q))'; 96 | TRACE 'Creating SECTIONACCESSTABLE with WHEREFORMULA'; 97 | TRACE $(vWhereFormula); 98 | LET vField = Null(); 99 | 100 | 101 | IF FieldIndex('ACCESS','ADMIN') = 0 THEN 102 | You dont have any ADMIN level user. You may lock yourself out from this app!; 103 | END IF 104 | 105 | IF DocumentName() LIKE '*.qvf' AND '$(SectionAccess)' <> '//' THEN 106 | TRACE Seems you are running >>Qlik Sense Desktop<< 107 | Section Access it not supported, creating normal table ...; 108 | LET SectionAccess = '//'; 109 | END IF 110 | 111 | $(SectionAccess) SECTION ACCESS; 112 | 113 | SECTIONACCESSTABLE: 114 | LOAD 115 | Upper(USERID) AS USERID 116 | ,UPPER(ACCESS) AS ACCESS 117 | ,If (ACCESS LIKE 'USER', $(vWhereFormula)) AS WHEREFORMULA 118 | ,Hash128($(vWhereFormula)) AS %SECURITYSEARCHHASH 119 | RESIDENT [$(vSecurityTableName)]; 120 | 121 | $(SectionAccess) SECTION APPLICATION; 122 | 123 | DROP TABLE [$(vSecurityTableName)]; 124 | 125 | tmp_WHEREFORMULA: 126 | // Because FieldValue(), FieldValueCount() doesn't work within a 127 | // SECTION ACCESS table, create a temp copy of just the WHEREFORMULA column 128 | LOAD DISTINCT WHEREFORMULA AS WHEREFORMULA_tmp 129 | RESIDENT SECTIONACCESSTABLE; 130 | 131 | 132 | // Create empty tables with 2 columns 133 | SECURITYLINK: LOAD * INLINE [%SECURITYSEARCHHASH, %SECURITYMATCHHASH]; 134 | SECURITYFORMULAMATCHES: LOAD * INLINE [WHEREFORMULA, WHEREFORMULAMATCHES]; 135 | 136 | LET vMax = FieldValueCount('WHEREFORMULA_tmp'); 137 | FOR v = 1 TO vMax 138 | // Loop through all combinations of WHEREFORMULA and link the matches from Facts 139 | LET vWhere = FieldValue('WHEREFORMULA_tmp', v); 140 | TRACE Security Condition $(v)/$(vMax): $(vWhere); 141 | 142 | LET vBefore = NoOfRows('SECURITYLINK'); 143 | CONCATENATE (SECURITYLINK) 144 | LOAD 145 | Hash128('$(vWhere)') AS %SECURITYSEARCHHASH 146 | ,%SECURITYMATCHHASH 147 | RESIDENT [$(vCentralFactTableName)] 148 | WHERE $(vWhere); 149 | 150 | CONCATENATE (SECURITYFORMULAMATCHES) 151 | LOAD '$(vWhere)' AS WHEREFORMULA 152 | ,NoOfRows('SECURITYLINK') - $(vBefore) AS WHEREFORMULAMATCHES 153 | AUTOGENERATE (1); 154 | NEXT v 155 | 156 | // Delete temporary variables and table 157 | DROP TABLE tmp_WHEREFORMULA; 158 | LET v = Null(); 159 | LET vMax = Null(); 160 | LET vWhere = Null(); 161 | LET vUserExample = Null(); 162 | LET q = Null(); 163 | LET vWhereFormula = Null(); 164 | 165 | // Do not use full-text search on this technical fields: 166 | SEARCH EXCLUDE %SECURITYSEARCHHASH, %SECURITYMATCHHASH, WHEREFORMULA, WHEREFORMULAMATCHES; 167 | 168 | END SUB 169 | -------------------------------------------------------------------------------- /FillDateValues.txt: -------------------------------------------------------------------------------- 1 | /* 2 | This sub fills your fact table with date fields so that you can show Zeros in time series charts when you have no 3 | data in that timespan. 4 | 5 | How to use? 6 | In your script put this call (it takes 4 arguments): 7 | CALL FillDateValues(, , , ); 8 | - Arg1: Name of date field 9 | - Arg2: Name of table where this field is in 10 | - Arg3: proper Timestamp format string 11 | - Arg4: resolution in days (1= one per day, 1/24 = 1 per hour, 0.5 = every 12 hours) 12 | Example: 13 | CALL FillDateValues('OrderDate', 'OrderTable', 'YYYY-MM-DD hh:mm:ss', 1); 14 | */ 15 | 16 | SUB FillDateValues (vFillDateField, vFillDateTable, vFillDateFormat, vFillResolution) 17 | 18 | TRACE >> 19 | Executing sub "FillDateValues" by Christof Schwarz 20 | Determining min and max value of [$(vFillDateField)] 21 | <<; 22 | 23 | [$tmp_MinMax$(vFillDateField)]: 24 | LOAD 25 | Min(Floor([$(vFillDateField)], $(vFillResolution))) AS $MinFillValue 26 | ,Max(Floor([$(vFillDateField)], $(vFillResolution))) AS $MaxFillValue 27 | RESIDENT [$(vFillDateTable)]; 28 | LET vMinFillDate = Peek('$MinFillValue',0, '$tmp_MinMax$(vFillDateField)'); 29 | LET vMaxFillDate = Peek('$MaxFillValue',0, '$tmp_MinMax$(vFillDateField)'); 30 | DROP TABLE [$tmp_MinMax$(vFillDateField)]; 31 | 32 | LET vFillRows = 'from ' & TimeStamp(vMinFillDate, vFillDateFormat) 33 | & ' to ' & TimeStamp(vMaxFillDate, vFillDateFormat); 34 | TRACE >> 35 | Filling values $(vFillRows) 36 | <<; 37 | 38 | LET vFillRows = NoOfRows(vFillDateTable); 39 | 40 | // Fill Dates: 41 | CONCATENATE ([$(vFillDateTable)]) 42 | LOAD 43 | $(vMinFillDate) + (RecNo()-1) * $(vFillResolution) AS [$(vFillDateField)], 44 | Num($(vMinFillDate) + (RecNo()-1) * $(vFillResolution),'','.',' ') AS [$FilledDates], 45 | 0 AS [$FillZeros] 46 | AUTOGENERATE(($(vMaxFillDate)-$(vMinFillDate)) / $(vFillResolution) + 1); 47 | 48 | LET vFillRows = NoOfRows(vFillDateTable) - vFillRows; 49 | TRACE >> 50 | $(vFillRows) values for field [$(vFillDateField)] added to [$(vFillDateTable)] 51 | <<; 52 | LET vCodeNumFormat = Replace('"","."," "', CHR(34), CHR(39)); // double-quotes -> single quotes 53 | LET vFillZerosSet = '{1 <$FilledDates = {">=$' 54 | & '(=Num(Min([$(vFillDateField)]),' & vCodeNumFormat &'))<=$' 55 | & '(=Num(Max([$(vFillDateField)]),' & vCodeNumFormat &'))"}>}'; 56 | 57 | LET vFillZeros = 'If(Count({1 <$FilledDates = {">=$' 58 | & '(=Num(Min([$(vFillDateField)]),' & vCodeNumFormat &'))<=$' 59 | & '(=Num(Max([$(vFillDateField)]),' & vCodeNumFormat &'))"}>} $FillZeros),0)'; 60 | // Delete temporary variables 61 | LET vFillRows = Null(); 62 | LET vFillDateTable = Null(); 63 | LET vFillDateField = Null(); 64 | LET vCodeNumFormat = Null(); 65 | LET vMinFillDate = Null(); 66 | LET vMaxFillDate = Null(); 67 | 68 | TAG FIELDS $FilledDates, $FillZeros WITH $hidden; 69 | END SUB 70 | 71 | -------------------------------------------------------------------------------- /FixVariables.txt: -------------------------------------------------------------------------------- 1 | TRACE including FixVariables.txt v1.0; 2 | 3 | SUB FixVariables (pVarListe) 4 | 5 | // replaces the $:( in variables with $( 6 | // and removes the Tab-sign (Chr 9) and Newline-sign (chr 10) 7 | // call it like this, comma-spearate multiple variables in the parameter 8 | // CALL FixVariables('var1, var2, var3'); 9 | 10 | [~FixVariablen]: 11 | LOAD * INLINE [$(pVarListe)]; 12 | 13 | FOR priv = 1 TO NoOfFields('~FixVariablen'); 14 | LET vVarName = FieldName(priv, '~FixVariablen'); 15 | TRACE Fixing variable $(vVarName); 16 | LET [$(vVarName)] = Replace(Replace(Replace([$(vVarName)] 17 | ,'$:(', '$' & '('), CHR(9), ' '), CHR(10), ' '); 18 | NEXT priv; 19 | DROP TABLE [~FixVariablen]; 20 | LET priv = Null(); 21 | LET vVarName = Null(); 22 | 23 | END SUB 24 | 25 | -------------------------------------------------------------------------------- /GetScriptsFromGoogleDrive.qvs: -------------------------------------------------------------------------------- 1 | LET vAllScripts = '/* please call SUB GetScriptsFromGoogleDrive */'; 2 | 3 | SUB GetScriptsFromGoogleDrive(_connection1, _connection2, _folder, _pattern) 4 | 5 | /* 6 | will load and execute all script files (define file pattern as 4th argument e.g. '*.qvs' 7 | 8 | needs two connection to Google Drive 9 | - Google Drive & Spreadsheets connection to read meta data about the Google Drive (file list/folder list) 10 | - Google Drive connection to extract the text files 11 | 12 | All *.qvs files found are put into variables vScript1, vScript2 ... vScriptN (alphabetically sorted) 13 | and a variable vAllScripts is set to call the vScripts. 14 | 15 | So to execute script from Google Drive Folder do the following 2 commands: 16 | 17 | CALL GetScriptsFromGoogleDrive('GDrive_meta_connection', 'GDrive_files_connection', 'rootfolder/subfolder', '*.qvs'); 18 | $(vAllScripts); 19 | 20 | One script file must not exceed 64kB 21 | 22 | */ 23 | LIB CONNECT TO '$(_connection1)'; 24 | [~gDriveContent]: 25 | HIERARCHY (id, parents_id, title, 'parent_name', title, 'path', '/', 'hierarchy_level') 26 | LOAD 27 | id, title, parents_id, 28 | if(mimeType like '*folder*', id) AS folderId, 29 | if(NOT mimeType like '*folder*', title) AS fileName 30 | WHERE 31 | NOT labels_trashed LIKE 'true' 32 | ; 33 | SELECT 34 | title, id, mimeType, parents_id, labels_trashed 35 | FROM 36 | ListFiles 37 | WITH PROPERTIES ( 38 | driveId='', 39 | query='' 40 | ); 41 | 42 | DISCONNECT; 43 | 44 | // reduce the full list of files to the ones inside and below the given _folder 45 | INNER JOIN ([~gDriveContent]) 46 | LOAD DISTINCT 47 | path 48 | RESIDENT 49 | [~gDriveContent] 50 | WHERE 51 | path LIKE '$(_folder)*'; 52 | 53 | IF FieldValueCount('folderId') = 0 THEN 54 | [Folder "$(_folder)" not found in connection $(_connection1)]; 55 | ELSEIF FieldValueCount('folderId') > 1 THEN 56 | [More than one folder "$(_folder)" found in connection $(_connection1)]; 57 | ELSE 58 | LET v_GAS_folderId = FieldValue('folderId', 1); 59 | TRACE Folder "$(_folder)" has id $(v_GAS_folderId); 60 | 61 | LET vAllScripts = ''; 62 | 63 | [~scriptFiles]: 64 | LOAD title AS [~scriptFileName] 65 | RESIDENT [~gDriveContent] 66 | WHERE title LIKE '$(_pattern)' 67 | ORDER BY title ASC; 68 | 69 | IF NoOfRows('~scriptFiles') = 0 THEN 70 | [No $(_pattern) script files found in folder $(_folder)]; 71 | ELSE 72 | FOR v_GAS_i = 1 TO FieldValueCount('~scriptFileName') 73 | LET v_GAS_FileName = FieldValue('~scriptFileName', v_GAS_i); 74 | 75 | reading: 76 | LOAD Concat(@1,CHR(10),RecNo()) AS [$script] 77 | FROM [lib://$(_connection2)/$(v_GAS_folderId)/$(v_GAS_FileName)] 78 | (txt, utf8, no labels, delimiter is '\n', no quotes); 79 | 80 | LET vScript$(v_GAS_i) = Peek('$script'); 81 | DROP TABLE reading; 82 | 83 | LET vAllScripts = vAllScripts & 'TRACE executing script $(_folder)/$(v_GAS_FileName);' & CHR(10) 84 | & '$' & '(vScript$(v_GAS_i));' & CHR(10) 85 | & 'LET vScript$(v_GAS_i) = Null();' & CHR(10); 86 | NEXT v_GAS_i 87 | 88 | TRACE GetAllScripts found $(v_GAS_i) *.qvs files in folder $(_folder); 89 | END IF 90 | LET v_GAS_i = Null(); 91 | LET v_GAS_FileName = Null(); 92 | LET v_GAS_folderId = Null(); 93 | END IF 94 | DROP TABLES [~scriptFiles], [~gDriveContent]; 95 | END SUB 96 | -------------------------------------------------------------------------------- /GetScriptsFromSharepoint.qvs: -------------------------------------------------------------------------------- 1 | LET vAllScripts = '/* please call SUB GetScriptsFromSharepoint */'; 2 | 3 | SUB GetScriptsFromSharepoint(_connection1, _connection2, _site, _folder, _pattern) 4 | 5 | /* 6 | will load and execute all script files (define file pattern as 4th argument e.g. '*.qvs' 7 | 8 | needs two connection to Sharepoint 9 | - metadata connection to Sharepoint (file folder contents) 10 | - Sharepoint Drive connection to extract the text files 11 | 12 | All *.qvs files found are put into variables vScript1, vScript2 ... vScriptN (alphabetically sorted) 13 | and a variable vAllScripts is set to call the vScripts. 14 | 15 | So to execute script from Sharepoint do the following 2 commands: 16 | 17 | CALL GetScriptsFromSharepoint('Sharepoint_meta_connection', 'Sharepoint_files_connection', 'sites/team', 'folder/subfolder', '*.qvs'); 18 | $(vAllScripts); 19 | 20 | One script file must not exceed 64kB 21 | 22 | */ 23 | // Add a leading / to site and folder 24 | 25 | LET v_priv_SharepointSite = If(Left(_site, 1) <> '/', '/') & _site; 26 | LET v_priv_SharepointFolder = If(Left(_folder, 1) <> '/', '/') & _folder; 27 | 28 | // remove a trailling / from folder if provided in the param 29 | 30 | IF Right(v_priv_SharepointFolder, 1) = '/' THEN 31 | LET v_priv_SharepointFolder = Left(v_priv_SharepointFolder, Len(v_priv_SharepointFolder) - 1); 32 | ENDIF 33 | 34 | LIB CONNECT TO '$(_connection1)'; 35 | 36 | [~folderContent]: 37 | SELECT Name 38 | FROM ListFiles 39 | WITH PROPERTIES ( 40 | subSite='$(v_priv_SharepointSite)', 41 | folder='$(v_priv_SharepointSite)$(v_priv_SharepointFolder)', 42 | maxResults='' 43 | ); 44 | DISCONNECT; 45 | 46 | [~scriptFiles]: 47 | LOAD 48 | Name AS [~scriptFileName] 49 | RESIDENT 50 | [~folderContent] 51 | WHERE 52 | Name LIKE '$(_pattern)' 53 | ORDER BY 54 | Name ASC; 55 | 56 | DROP TABLE [~folderContent]; 57 | 58 | IF NoOfRows('~scriptFiles') = 0 THEN 59 | [No $(_pattern) script files found in folder $(_folder)]; 60 | ELSE 61 | FOR v_GAS_i = 1 TO FieldValueCount('~scriptFileName') 62 | LET v_GAS_FileName = FieldValue('~scriptFileName', v_GAS_i); 63 | 64 | reading: 65 | LOAD Concat(@1,CHR(10),RecNo()) AS [$script] 66 | FROM [lib://$(_connection2)$(v_priv_SharepointFolder)/$(v_GAS_FileName)] 67 | (txt, utf8, no labels, delimiter is '\n', no quotes); 68 | 69 | LET vScript$(v_GAS_i) = Peek('$script'); 70 | DROP TABLE reading; 71 | 72 | LET vAllScripts = vAllScripts & 'TRACE executing script $(_folder)/$(v_GAS_FileName);' & CHR(10) 73 | & '$' & '(vScript$(v_GAS_i));' & CHR(10) 74 | & 'LET vScript$(v_GAS_i) = Null();' & CHR(10); 75 | NEXT v_GAS_i 76 | 77 | TRACE GetAllScripts found $(v_GAS_i) *.qvs files in folder $(_folder); 78 | END IF 79 | 80 | LET v_GAS_i = Null(); 81 | LET v_GAS_FileName = Null(); 82 | LET v_priv_SharepointSite = Null(); 83 | LET v_priv_SharepointFolder = Null(); 84 | 85 | DROP TABLES [~scriptFiles]; 86 | 87 | END SUB 88 | -------------------------------------------------------------------------------- /GoogleSheetsAPI.txt: -------------------------------------------------------------------------------- 1 | // needed to encode url parameters 2 | EncodeChars: MAPPING LOAD * INLINE [ 3 | ' ' = %20 4 | '/' = %2F 5 | ] (no labels, delimiter is '='); 6 | 7 | /* 8 | To get a RefreshToken ... 9 | Create a project at https://console.developers.google.com 10 | Under Credentials add a "Oauth 2.0 client ID" which is of type Web Client 11 | Note down its clientID and add below into UserSettings section. 12 | As "Authorized redirect URIs" put https://developers.google.com/oauthplayground 13 | Note down the clientSecret and add below into UserSettings section 14 | 15 | go to https://developers.google.com/oauthplayground/ 16 | under settings on the right checkmark "Use your own OAuth credentials" and reuse ClientID and ClientSecret 17 | Follow the Step 1 the left (taking you to a user consent, which you have to accept) 18 | Under Step 2 click "Exchange authorization code for tokens" 19 | Note down the Refresh token and add into UserSettings section 20 | */ 21 | 22 | UserSettings: MAPPING LOAD * INLINE [ 23 | SpreadsheetId = 1POih73B4T2v8wEgupinuRzs2bh_fQQfuYx8ogkKUKmU 24 | SheetRange = Form responses 1 25 | ClientID = 645709070951-e1re3ghvpq6vvnhjophtuu8c531ru7s2.apps.googleusercontent.com 26 | ClientSecret = NGBLOQLrYtCSL0oW9kSGkvbw 27 | RefreshToken = 1/NPvMZYKwLAq0VebH6FGl8t-SA3rIRGDUSDZ2tccICRk 28 | ] (no labels, delimiter is '='); 29 | 30 | LET vGoogleTokenAPI = 'https://www.googleapis.com/oauth2/v4/token'; 31 | LET vGoogleSheetAPI = 'https://sheets.googleapis.com/v4/spreadsheets/' & ApplyMap('UserSettings', 'SpreadsheetId') 32 | & '/values/' & MapSubString('EncodeChars', ApplyMap('UserSettings', 'SheetRange')); 33 | LET vClientID = ApplyMap('UserSettings', 'ClientID'); 34 | LET vClientSecret = ApplyMap('UserSettings', 'ClientSecret'); 35 | LET vRefreshToken = ApplyMap('UserSettings', 'RefreshToken'); 36 | 37 | 38 | LIB CONNECT TO 'Post-Request (qlikcloud_qlikid_csw)'; // <- replace with yours 39 | 40 | TRACE POST request to $(vGoogleTokenAPI); 41 | 42 | BearerToken: 43 | SQL SELECT 44 | "access_token", 45 | "expires_in", 46 | "scope", 47 | "token_type" 48 | FROM JSON (wrap on) "root" 49 | WITH CONNECTION ( 50 | URL "$(vGoogleTokenAPI)", 51 | QUERY "client_secret" "$(vClientSecret)", 52 | QUERY "grant_type" "refresh_token", 53 | QUERY "refresh_token" "$(vRefreshToken)", 54 | QUERY "client_id" "$(vClientID)", 55 | HTTPHEADER "content-type" "application/x-www-form-urlencoded" 56 | ); 57 | 58 | TRACE New Token requested:; 59 | LET vToken = Peek('access_token', -1, 'BearerToken'); 60 | TRACE $(vToken); 61 | DROP TABLE BearerToken; 62 | 63 | LIB CONNECT TO 'Get-Request (qlikcloud_qlikid_csw)'; // <- replace with yours 64 | 65 | TRACE GET request to $(vGoogleSheetAPI); 66 | 67 | GoogleSheetRaw: 68 | LOAD @Value 69 | ,__FK_values_u0 AS RowNo 70 | ,If(__FK_values_u0 = Peek('RowNo'), RangeSum(Peek('ColNo'), 1), 1) AS ColNo 71 | WHERE NOT IsNull(__FK_values_u0); 72 | SQL SELECT 73 | // "__KEY_root", 74 | (SELECT 75 | // "__FK_values", 76 | // "__KEY_values", 77 | (SELECT 78 | "@Value", 79 | "__FK_values_u0" 80 | FROM "values" FK "__FK_values_u0" ArrayValueAlias "@Value") 81 | FROM "values" PK "__KEY_values" FK "__FK_values") 82 | FROM JSON (wrap on) "root" PK "__KEY_root" 83 | WITH CONNECTION ( 84 | URL "$(vGoogleSheetAPI)" 85 | ,HTTPHEADER "Authorization" "Bearer $(vToken)" 86 | ); 87 | 88 | 89 | // Transpose the array into a table with column names 90 | 91 | LET vCols = 0; 92 | FOR v = 1 TO FieldValueCount('ColNo'); 93 | LET vCols = RangeMax(vCols, FieldValue('ColNo',v)); 94 | NEXT v 95 | TRACE $(vCols) columns found in spreadsheet; 96 | 97 | LET vPrefix = 'GoogleSheetData:'; 98 | FOR v = 1 TO vCols 99 | LET vColName = Lookup('@Value','ColNo',v,'GoogleSheetRaw'); 100 | WHEN Len(vColName) = 0 LET vColName = 'Col$(v)'; 101 | TRACE $(v) = $(vColName); 102 | WHEN v > 1 LET vPrefix = 'LEFT JOIN (GoogleSheetData)'; 103 | $(vPrefix) 104 | LOAD 105 | RowNo 106 | ,@Value AS [$(vColName)] RESIDENT GoogleSheetRaw 107 | WHERE ColNo = $(v) AND RowNo > 1; 108 | NEXT v; 109 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Christof Schwarz 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Next-Prev-Buttons.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChristofSchwarz/QlikScripts/8f521fad886a813a94d4209e121002066b59cf5b/Next-Prev-Buttons.qvf -------------------------------------------------------------------------------- /Pivot Sort Challenge.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChristofSchwarz/QlikScripts/8f521fad886a813a94d4209e121002066b59cf5b/Pivot Sort Challenge.qvf -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # QlikScripts 2 | Useful Qlik Script Code Snippets like 3 | * [AlmostAlternateStates (SyncSomeSelections)](https://github.com/ChristofSchwarz/QlikScripts/blob/master/syncSomeSelections.txt) 4 | * Substitute for BINARY load: [Store all tables to QVD](https://github.com/ChristofSchwarz/QlikScripts/blob/master/store_all_qvd.txt) and [Load all tables from QVD](https://github.com/ChristofSchwarz/QlikScripts/blob/master/load_all_qvd.txt) 5 | * [Advanced Section Access Mapping](https://github.com/ChristofSchwarz/QlikScripts/blob/master/CreateSectionAccess.txt) 6 | 7 | Feel free to either download my snippets or include them directly from GitHub. How? 8 | 9 | If you want to refer to the snippet from your Qlik Load script and include it online 10 | * click on "Raw" button of the snippet page here in Github and copy the link of the raw-text url ... 11 | ![alttext](https://github.com/ChristofSchwarz/pics/raw/master/rawsnippet.png "screenshot") 12 | 13 | * Then create a new "Web File" connection type in Qlik Sense and paste the url 14 | ![alttext](https://github.com/ChristofSchwarz/pics/raw/master/webfileconn.png "screenshot") 15 | 16 | * Check the name of the new data connection (in my case "syncSomeSelections (qse-csw_admincsw) and write the follwing line, which then will include the code live from Github. 17 | ``` 18 | $(must_include=[lib://syncSomeSelections (qse-csw_admincsw)]); 19 | ``` 20 | 21 | 22 | -------------------------------------------------------------------------------- /Uniqueness Checks.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChristofSchwarz/QlikScripts/8f521fad886a813a94d4209e121002066b59cf5b/Uniqueness Checks.qvf -------------------------------------------------------------------------------- /Worldmap dotted.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChristofSchwarz/QlikScripts/8f521fad886a813a94d4209e121002066b59cf5b/Worldmap dotted.qvf -------------------------------------------------------------------------------- /allFieldsOfTable.txt: -------------------------------------------------------------------------------- 1 | // Puts a comma-separated list of all fields of table except those matching 2 | // one of the fields or patterns in the vSkipFields variable 3 | 4 | LET vOfTable = 'Tabelle1'; 5 | SET vSkipFields = `'%*', 'Column 2'`; // ignore some fields starting with % or "Column 2" 6 | SET vSkipFields = `''`; // use this if you want all fields to be returned 7 | LET vFieldList = ''; 8 | FOR vFieldIndex = 1 TO NoOfFields(vOfTable); 9 | IF NOT WildMatch(FieldName(vFieldIndex, vOfTable), $(vSkipFields)) THEN 10 | LET vFieldList = If(Len(vFieldList), vFieldList & ',[', '[') & FieldName(vFieldIndex, vOfTable) & ']'; 11 | END IF 12 | NEXT vFieldIndex 13 | TRACE Field list of table $(vOfTable):; 14 | TRACE $(vFieldList); 15 | // delete temp variables 16 | LET vFieldIndex = Null(); 17 | LET vOfTable = Null(); 18 | LET vSkipFields = Null(); 19 | -------------------------------------------------------------------------------- /codepage-conv/readme.md: -------------------------------------------------------------------------------- 1 | read 2 | -------------------------------------------------------------------------------- /create_Northwind_qvds: -------------------------------------------------------------------------------- 1 | 2 | LIB CONNECT TO 'REST'; // REST GET connection with "allow WITH CONNECTION" enabled 3 | LET vSource = 'https://raw.githubusercontent.com/graphql-compose/graphql-compose-examples/refs/heads/master/examples/northwind/data/csv'; 4 | 5 | 6 | categories: 7 | SELECT 8 | "categoryID","categoryName","description" 9 | FROM CSV (header on, delimiter ",", quote """") "CSV_source" 10 | WITH CONNECTION ( URL "$(vSource)/categories.csv" ); 11 | 12 | DROP FIELD __extra_; 13 | STORE categories INTO [lib://Training/Northwind/categories.qvd] (qvd); 14 | 15 | employees: 16 | SELECT 17 | "employeeID","lastName","firstName","title","titleOfCourtesy","birthDate","hireDate","address","city","region","postalCode","country","homePhone","extension","notes","reportsTo" 18 | FROM CSV (header on, delimiter ",", quote """") "CSV_source" 19 | WITH CONNECTION ( URL "$(vSource)/customers.csv" ); 20 | 21 | DROP FIELD __extra_; 22 | STORE employees INTO [lib://Training/Northwind/customers.qvd] (qvd); 23 | 24 | employee_territories: 25 | SELECT 26 | "employeeID","territoryID" 27 | FROM CSV (header on, delimiter ",", quote """") "CSV_source" 28 | WITH CONNECTION ( URL "$(vSource)/employee_territories.csv" ); 29 | 30 | DROP FIELD __extra_; 31 | STORE employee_territories INTO [lib://Training/Northwind/employee_territories.qvd] (qvd); 32 | 33 | orders: 34 | SELECT 35 | "orderID","customerID","employeeID","orderDate","requiredDate","shippedDate","shipVia","freight","shipName","shipAddress","shipCity","shipRegion","shipPostalCode","shipCountry" 36 | FROM CSV (header on, delimiter ",", quote """") "CSV_source" 37 | WITH CONNECTION ( URL "$(vSource)/orders.csv" ); 38 | 39 | DROP FIELD __extra_; 40 | STORE orders INTO [lib://Training/Northwind/orders.qvd] (qvd); 41 | 42 | 43 | order_details: 44 | SELECT 45 | "orderID","productID","unitPrice","quantity","discount" 46 | FROM CSV (header on, delimiter ",", quote """") "CSV_source" 47 | WITH CONNECTION ( URL "$(vSource)/order_details.csv" ); 48 | 49 | DROP FIELD __extra_; 50 | STORE order_details INTO [lib://Training/Northwind/order_details.qvd] (qvd); 51 | 52 | QUALIFY 'unitPrice'; 53 | products: 54 | SELECT 55 | "productID","productName","supplierID","categoryID","quantityPerUnit","unitPrice","unitsInStock","unitsOnOrder","reorderLevel","discontinued" 56 | FROM CSV (header on, delimiter ",", quote """") "CSV_source" 57 | WITH CONNECTION ( URL "$(vSource)/products.csv" ); 58 | 59 | DROP FIELD __extra_; 60 | STORE orders INTO [lib://Training/Northwind/customers.qvd] (qvd); 61 | 62 | 63 | territories: 64 | SELECT 65 | "territoryID","territoryDescription","regionID" 66 | FROM CSV (header on, delimiter ",", quote """") "CSV_source" 67 | WITH CONNECTION ( URL "$(vSource)/territories.csv" ); 68 | 69 | DROP FIELD __extra_; 70 | STORE territories INTO [lib://Training/Northwind/territories.qvd] (qvd); 71 | 72 | 73 | regions: 74 | SELECT 75 | "regionID","regionDescription" 76 | FROM CSV (header on, delimiter ",", quote """") "CSV_source" 77 | WITH CONNECTION ( URL "$(vSource)/regions.csv" ); 78 | 79 | DROP FIELD __extra_; 80 | STORE regions INTO [lib://Training/Northwind/regions.qvd] (qvd); 81 | 82 | 83 | // QUALIFY *; 84 | // UNQUALIFY '*ID', '__extra_'; 85 | suppliers: 86 | SELECT 87 | "supplierID","companyName","contactName","contactTitle","address","city","region","postalCode","country","phone","fax" 88 | FROM CSV (header on, delimiter ",", quote """") "CSV_source" 89 | WITH CONNECTION ( URL "$(vSource)/suppliers.csv" ); 90 | 91 | DROP FIELD __extra_; 92 | STORE suppliers INTO [lib://Training/Northwind/suppliers.qvd] (qvd); 93 | 94 | 95 | shippers: 96 | SELECT 97 | "shipperID","companyName","phone" 98 | FROM CSV (header on, delimiter ",", quote """") "CSV_source" 99 | WITH CONNECTION ( URL "$(vSource)/shippers.csv" ); 100 | 101 | DROP FIELD __extra_; 102 | STORE shippers INTO [lib://Training/Northwind/shippers.qvd] (qvd); 103 | 104 | 105 | // QUALIFY *; 106 | // UNQUALIFY '*ID', '__extra_'; 107 | 108 | 109 | customers: 110 | SELECT 111 | "customerID","companyName","contactName","contactTitle","address","city","region","postalCode","country","phone","fax" 112 | FROM CSV (header on, delimiter ",", quote """") "CSV_source" 113 | WITH CONNECTION ( URL "$(vSource)/customers.csv" ); 114 | 115 | DROP FIELD __extra_; 116 | STORE customers INTO [lib://Training/Northwind/customers.qvd] (qvd); 117 | 118 | 119 | EXIT SCRIPT; 120 | -------------------------------------------------------------------------------- /css/CSS Hacks.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChristofSchwarz/QlikScripts/8f521fad886a813a94d4209e121002066b59cf5b/css/CSS Hacks.qvf -------------------------------------------------------------------------------- /css/csshacks.txt: -------------------------------------------------------------------------------- 1 | $Effect | $CSS 2 | Pivot: Null Values invisible | .cell.ng-scope.null-value {background-color:rgba(0,0,0,0); color: rgba(0,0,0,0); font-size:0px;} 3 | Pivot: Bold cells as normal | .qv-pt .cell.bold { font-weight: 300; } 4 | Multi-KPI: Hide object | .qv-object-qlik-multi-kpi { display:none; } .qv-mode-edit .qv-object-qlik-multi-kpi { display:flex; } 5 | Pivot: No dropdown listboxes | [tid="meta.rows"],[tid="meta.columns"] {display:none;} 6 | -------------------------------------------------------------------------------- /css/readme.md: -------------------------------------------------------------------------------- 1 | ## Tricks with CSS for Qlik Sense Objects 2 | 3 | Download the app here. 4 | 5 | Video resource: https://www.youtube.com/watch?v=9lhL3Nrel5Q 6 | 7 | The app loads this text file, you can also copy/paste the css right from here it you want. 8 | 9 | // lib://csshacks.txt points to a Webfile connection at 10 | // https://raw.githubusercontent.com/ChristofSchwarz/QlikScripts/master/css/csshacks.txt 11 | 12 | LOAD 13 | $Effect, 14 | $CSS 15 | FROM [lib://csshacks.txt] 16 | (txt, utf8, embedded labels, delimiter is '|', no quotes); 17 | 18 | -------------------------------------------------------------------------------- /cyclicgroups/__CyclicGroupsAlt.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChristofSchwarz/QlikScripts/8f521fad886a813a94d4209e121002066b59cf5b/cyclicgroups/__CyclicGroupsAlt.qvf -------------------------------------------------------------------------------- /cyclicgroups/cyclicgroups.txt: -------------------------------------------------------------------------------- 1 | // CyclicGroups: a inline table with at least the following 3 2 | // columns: $cgId, $cgField, $cgLabel.EN 3 | // EN is the English label (the UI looks for the field ending 4 | // with two chars as per variable vLanguage ! 5 | // feel free to add/remove labels in different languages like $cgLabel.DE 6 | // Two hints: 7 | // - you can also use formulas in the $cgField column (a calculated dim), 8 | // put formula in "double-quotes" 9 | // - put field names with spaces or special characters in [square brackets] 10 | 11 | [CyclicGroups]: 12 | LOAD *, 13 | RecNo() AS $cgSort, 14 | Peek('$cgSetVars') & If($cgId <> Peek('$cgId'), CHR(10) & 15 | 'SET [vCG' & $cgId & '] = `' & $cgField & '`;') AS $cgSetVars 16 | INLINE 17 | /* ▼ Cyclic Group Definitions ▼ */ ` 18 | 19 | $cgId, $cgField, $cgLabel.EN, $cgLabel.DE 20 | 21 | 1, Year, Year, Jahr 22 | 1, Quarter, Quarter, Quartal 23 | 1, Month, Month, Monat 24 | // 1, "Date(MakeDate(2020,Month,1),'MMM')", Month, Monat 25 | 26 | 2, "'All'", Total, Total 27 | 2, [Insurance Company], per insurance, pro Versicherung 28 | 29 | `/* ▲ Cyclic Group Definitions ▲ */ 30 | WHERE NOT $cgId LIKE '//*'; 31 | 32 | // Create a variable vCG# for each Group from above table ... 33 | LET vCGTmp = Peek('$cgSetVars', -1, 'CyclicGroups') & CHR(10); 34 | TRACE `** initializing CyclicGroup variables **$(vCGTmp)`; 35 | $(vCGTmp); 36 | // Set all fields of CyclicGroups table to $hidden 37 | FOR vCGTmp = 1 To NoOfFields('CyclicGroups'); 38 | LET vCGField = FieldName(vCGTmp, 'CyclicGroups'); 39 | TAG FIELDS [$(vCGField)] WITH '$hidden'; 40 | NEXT vCGTmp; 41 | LET vCGTmp = Null(); // remove temp variable 42 | LET vCGField = Null(); // remove temp variable 43 | -------------------------------------------------------------------------------- /cyclicgroups/readme.md: -------------------------------------------------------------------------------- 1 | # Cyclic Groups substitute for Qlik Sense 2 | 3 | I am explaining the app and the script here: 4 | https://www.youtube.com/watch?v=0RwJXgEQZ8c 5 | -------------------------------------------------------------------------------- /databridge logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChristofSchwarz/QlikScripts/8f521fad886a813a94d4209e121002066b59cf5b/databridge logo.png -------------------------------------------------------------------------------- /dotted_worldmap.txt: -------------------------------------------------------------------------------- 1 | // download app from https://github.com/ChristofSchwarz/QlikScripts/raw/master/Worldmap%20dotted.qvf 2 | 3 | TextMap: 4 | LOAD Mid(@1,2) AS @1 5 | INLINE [ 6 | ^ #### ##### ############ #### #### 7 | ^ ####### ### ######### ## ######## ## 8 | ^ ## ######### ### ####### #### ## ####################### 9 | ^#################### ### ##### ######################################### 10 | ^################### #### ### ## ######################################### 11 | ^#### ############ ### ## # ### ################################### 12 | ^ # ########### ##### ### ## ############################# # 13 | ^ ################## ### ################################ 14 | ^ ############### # ################################## 15 | ^ ############### ####### ## #################### # 16 | ^ ############# ### ######### ################### # 17 | ^ ############ ## ### ######################## # # 18 | ^ ########## ###### ##################### # 19 | ^ ######### ################################ 20 | ^ #### # ############ ### ############### 21 | ^ ### # ## ############# ##### ########### 22 | ^ #### ## ############## ### ### ### # 23 | ^ ## ################ ## ### # 24 | ^ ###### ################ ## # # 25 | ^ ###### ############## ## # 26 | ^ ######## ######## ## ##### 27 | ^ ########## ####### ## ## ##### 28 | ^ ########### ####### ## # ### 29 | ^ ########## ####### ## # # 30 | ^ ######### ####### # ## # 31 | ^ ######## ####### ## ###### # 32 | ^ ####### ##### # ######### # 33 | ^ ##### ##### ########## 34 | ^ ##### #### ########## 35 | ^ ##### ## ### #### # 36 | ^ #### ### ## 37 | ^ ### # ## 38 | ^ ## # 39 | ^ ## 40 | ^ ## 41 | ^ # 42 | ] (no labels); 43 | 44 | ContinentForPoint: 45 | MAPPING LOAD 46 | Hash128(x, Subfield(ys, ',')+1), Continent 47 | INLINE [Continent x ys 48 | EU 34 -5 49 | EU 35 -5 50 | EU 37 -11,-12,-7,-8 51 | EU 38 -11,-12,-6,-7,-8 52 | EU 39 -10,-11,-7,-8,-9 53 | EU 40 -10,-9 54 | EU 41 -10,-5,-6,-7,-8,-9 55 | EU 42 -1,-10,-11,-12,-4,-5,-6,-7,-8,-9 56 | EU 43 -1,-10,-11,-4,-5,-6,-8,-9 57 | EU 44 -1,-10,-11,-12,-3,-4,-5,-7,-8,-9 58 | EU 45 -1,-10,-11,-3,-4,-5,-6,-7,-8,-9 59 | EU 46 -3,-4,-5,-6,-7,-8,-9 60 | EU 47 -3,-4,-5,-6,-7,-8,-9 61 | EU 48 -4,-5,-6,-7,-8,-9 62 | EU 49 -10,-4,-5,-6,-7,-8,-9 63 | EU 50 -10,-4,-5,-6,-7,-8,-9 64 | EU 51 -4,-5,-6,-7,-8,-9 65 | EU 52 -3,-4,-5,-6,-7,-8 66 | EU 53 -2,-3,-4,-5,-6,-7 67 | EU 54 -2,-4,-5,-6 68 | SA 20 -21,-22,-23 69 | SA 21 -19,-20,-21,-22,-23,-24,-25,-34,-35 70 | SA 22 -19,-20,-21,-22,-23,-24,-25,-26,-30,-31,-32,-33,-34,-35,-36 71 | SA 23 -19,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30,-31,-32,-33 72 | SA 24 -19,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30,-31,-32 73 | SA 25 -20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30,-31 74 | SA 26 -20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30 75 | SA 27 -21,-22,-23,-24,-25,-26,-27,-28,-29 76 | SA 28 -22,-23,-24,-25,-26,-27 77 | SA 29 -22,-23,-24,-25,-26,-27 78 | SA 30 -23,-24 79 | AF 35 -16,-17,-18 80 | AF 36 -15,-16,-17,-18,-19 81 | AF 37 -13,-14,-15,-16,-17,-18,-19,-20 82 | AF 38 -13,-14,-15,-16,-17,-18,-19,-20 83 | AF 39 -13,-14,-15,-16,-17,-18,-19,-20 84 | AF 40 -12,-13,-14,-15,-16,-17,-18,-19,-20 85 | AF 41 -12,-13,-14,-15,-16,-17,-18,-19,-20 86 | AF 42 -13,-14,-15,-16,-17,-18,-19,-20,-21,-22,-23,-24,-25,-26 87 | AF 43 -14,-15,-16,-17,-18,-19,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29 88 | AF 44 -14,-15,-16,-17,-18,-19,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30 89 | AF 45 -14,-15,-16,-17,-18,-19,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29,-30 90 | AF 46 -14,-15,-16,-17,-18,-19,-20,-21,-22,-23,-24,-25,-26,-27,-28,-29 91 | AF 47 -14,-15,-16,-17,-18,-19,-20,-21,-22,-23,-24,-25,-26,-27,-28 92 | AF 48 -17,-18,-19,-20,-21,-22,-23,-24,-25,-26 93 | AF 49 -18,-19,-20,-21 94 | AF 50 -18,-19,-20,-26,-27 95 | AF 51 -19,-25,-26 96 | NA 0 -4,-5,-6 97 | NA 1 -3,-4,-5,-6,-7 98 | NA 2 -3,-4,-5,-6 99 | NA 3 -4,-5,-6 100 | NA 4 -4,-5 101 | NA 5 -4,-5,-6 102 | NA 6 -4,-5,-6 103 | NA 7 -4,-5,-6 104 | NA 8 -4,-5,-6,-7 105 | NA 9 -4,-5,-6,-7,-8 106 | NA 10 -10,-11,-12,-3,-4,-5,-6,-7,-8,-9 107 | NA 11 -10,-11,-12,-13,-3,-4,-5,-6,-7,-8,-9 108 | NA 12 -1,-10,-11,-12,-13,-14,-2,-3,-4,-5,-6,-7,-8,-9 109 | NA 13 -1,-10,-11,-12,-13,-14,-15,-2,-3,-4,-5,-6,-7,-8,-9 110 | NA 14 -1,-10,-11,-12,-13,-14,-15,-16,-2,-3,-4,-5,-6,-7,-8,-9 111 | NA 15 -1,-10,-11,-12,-13,-14,-15,-16,-17,-2,-3,-4,-5,-6,-7,-8,-9 112 | NA 16 -10,-11,-12,-13,-14,-15,-16,-17,-2,-3,-4,-5,-6,-7,-8,-9 113 | NA 17 -1,-10,-11,-12,-13,-14,-17,-2,-3,-4,-5,-7,-8,-9 114 | NA 18 -1,-10,-11,-12,-13,-14,-16,-17,-18,-2,-3,-4,-5,-7,-8,-9,0 115 | NA 19 -1,-10,-11,-12,-13,-14,-18,-19,-4,-8,-9,0 116 | NA 20 -1,-10,-11,-12,-13,-14,-15,-16,-19,-2,-3,-8,-9,0 117 | NA 21 -1,-10,-11,-12,-16,-2,-3,-5,-6,-7,-8,-9,0 118 | NA 22 -10,-11,-17,-2,-3,-4,-5,-6,-7,-8,-9,0 119 | NA 23 -1,-10,-17,-4,-5,-6,-7,-8,-9 120 | NA 24 -1,-10,-4,-5,-7,-8,-9,0 121 | NA 25 -1,-7,-8,0 122 | NA 26 -1,-2,-8,-9,0 123 | NA 27 -1,-2,-3,-4,-5,0 124 | NA 28 -1,-2,-3,-4,-5,-6,0 125 | NA 29 -1,-2,-3,-4,-5,-6,0 126 | NA 30 -1,-2,-3,-4,0 127 | NA 31 -1,-2,-3,-4,0 128 | NA 32 -1,-2,-3,0 129 | NA 33 -1,-2,-3,0 130 | NA 34 -1,-2,0 131 | NA 35 0 132 | AU 67 -27,-28,-29,-30 133 | AU 68 -27,-28,-29,-30 134 | AU 69 -26,-27,-28,-29,-30 135 | AU 70 -25,-26,-27,-28,-29 136 | AU 71 -25,-26,-27,-28,-29 137 | AU 72 -26,-27,-28,-29,-30 138 | AU 73 -22,-23,-25,-26,-27,-28,-29,-30,-31 139 | AU 74 -23,-24,-26,-27,-28,-29,-30,-31,-32 140 | AU 75 -27,-28,-29,-30,-31 141 | AU 76 -24,-28,-29 142 | AU 78 -26 143 | AU 79 -27 144 | AU 80 -32,-33 145 | AU 81 -30,-31,-32 146 | AU 82 -31 147 | ] (delimiter is '\t'); 148 | 149 | LET vWest = -163; 150 | LET vEast = 179; 151 | LET vNorth = 79; 152 | LET vSouth = -56; 153 | LET vX.Min = 0; 154 | LET vX.Max = Null(); 155 | LET vY.Min = -NoOfRows('TextMap') + 1; 156 | LET vY.Max = 0; 157 | 158 | FOR v = 1 TO NoOfRows('TextMap') 159 | LET vRow = Peek('@1', v-1, 'TextMap'); 160 | LET vX.Max = RangeMax(vX.Max, Len(vRow)-1); 161 | DottedMap: 162 | LOAD 163 | RowNo() AS dotId, 164 | -$(v) + 1 AS y, 165 | RecNo()-1 AS x, 166 | ApplyMap('ContinentForPoint', Hash128(RecNo()-1, -$(v) + 1), 'AS') AS Continent 167 | AUTOGENERATE(Len(vRow)) 168 | WHERE 169 | Mid('$(vRow)', RecNo(), 1) <> ' '; 170 | NEXT v 171 | 172 | DROP TABLE TextMap; 173 | 174 | // interpolate x and y into longitude/latitude 175 | LEFT JOIN (DottedMap) 176 | LOAD 177 | dotId, 178 | x * ($(vEast) - $(vWest)) / ($(vX.Max) - $(vX.Min)) + $(vWest) AS long, 179 | y * ($(vNorth) - $(vSouth)) / ($(vY.Max) - $(vY.Min)) + $(vNorth) AS lat 180 | RESIDENT DottedMap; 181 | 182 | FOR EACH v IN 'vWest','vEast','vNorth','vSouth','vX.Min','vX.Max','vY.Min','vY.Max','vRow' 183 | LET $(v) = Null(); // delete temp variable 184 | NEXT v; 185 | 186 | -------------------------------------------------------------------------------- /exec_script_from_google_drive.qvs: -------------------------------------------------------------------------------- 1 | SUB GetAllScripts(_connection1, _connection2, _folder, _pattern) 2 | 3 | /* 4 | will load and execute all script files (define file pattern as 4th argument e.g. '*.qvs' 5 | 6 | needs two connection to Google Drive 7 | - Google Drive & Spreadsheets connection to read meta data about the Google Drive (file list/folder list) 8 | - Google Drive connection to extract the text files 9 | 10 | All *.qvs files found are put into variables vScript1, vScript2 ... vScriptN (alphabetically sorted) 11 | and a variable vAllScripts is set to call the vScripts. 12 | 13 | So to execute script from Google Drive Folder do the following 2 commands: 14 | 15 | CALL GetAllScripts('GDrive_meta_connection', 'GDrive_files_connection', 'rootfolder/subfolder', '*.qvs'); 16 | $(vAllScripts); 17 | 18 | One script file must not exceed 64kB 19 | 20 | */ 21 | LIB CONNECT TO '$(_connection1)'; 22 | [~gDriveContent]: 23 | HIERARCHY (id, parents_id, title, 'parent_name', title, 'path', '/', 'hierarchy_level') 24 | LOAD 25 | id, title, parents_id, 26 | if(mimeType like '*folder*', id) AS folderId, 27 | if(NOT mimeType like '*folder*', title) AS fileName 28 | WHERE 29 | NOT labels_trashed LIKE 'true' 30 | ; 31 | SELECT 32 | title, id, mimeType, parents_id, labels_trashed 33 | FROM 34 | ListFiles 35 | WITH PROPERTIES ( 36 | driveId='', 37 | query='' 38 | ); 39 | 40 | // reduce the full list of files to the ones inside and below the given _folder 41 | INNER JOIN ([~gDriveContent]) 42 | LOAD DISTINCT 43 | path 44 | RESIDENT 45 | [~gDriveContent] 46 | WHERE 47 | path LIKE '$(_folder)*'; 48 | 49 | IF FieldValueCount('folderId') = 0 THEN 50 | [Folder "$(_folder)" not found in connection $(_connection1)]; 51 | ELSEIF FieldValueCount('folderId') > 1 THEN 52 | [More than one folder "$(_folder)" found in connection $(_connection1)]; 53 | ELSE 54 | LET v_GAS_folderId = FieldValue('folderId', 1); 55 | TRACE Folder "$(_folder)" has id $(v_GAS_folderId); 56 | 57 | LET vAllScripts = ''; 58 | 59 | [~scriptFiles]: 60 | LOAD title AS [~scriptFileName] 61 | RESIDENT [~gDriveContent] 62 | WHERE title LIKE '$(_pattern)' 63 | ORDER BY title ASC; 64 | 65 | IF NoOfRows('~scriptFiles') = 0 THEN 66 | [No $(_pattern) script files found in folder $(_folder)]; 67 | ELSE 68 | FOR v_GAS_i = 1 TO FieldValueCount('~scriptFileName') 69 | LET v_GAS_FileName = FieldValue('~scriptFileName', v_GAS_i); 70 | 71 | reading: 72 | LOAD Concat(@1,CHR(10),RecNo()) AS [$script] 73 | FROM [lib://$(_connection2)/$(v_GAS_folderId)/$(v_GAS_FileName)] 74 | (txt, utf8, no labels, delimiter is '\n', no quotes); 75 | 76 | LET vScript$(v_GAS_i) = Peek('$script'); 77 | DROP TABLE reading; 78 | 79 | LET vAllScripts = vAllScripts & 'TRACE executing script $(_folder)/$(v_GAS_FileName);' & CHR(10) 80 | & '$' & '(vScript$(v_GAS_i));' & CHR(10) 81 | & 'LET vScript$(v_GAS_i) = Null();' & CHR(10); 82 | NEXT v_GAS_i 83 | 84 | TRACE GetAllScripts found $(v_GAS_i) *.qvs files in folder $(_folder); 85 | END IF 86 | LET v_GAS_i = Null(); 87 | LET v_GAS_FileName = Null(); 88 | LET v_GAS_folderId = Null(); 89 | END IF 90 | DROP TABLES [~scriptFiles], [~gDriveContent]; 91 | END SUB 92 | -------------------------------------------------------------------------------- /fieldCompare.qvs: -------------------------------------------------------------------------------- 1 | SUB fieldCompare(param_table, param_option) 2 | 3 | // This sub can be called multiple times and it lists which fields were added or removed 4 | // between the current call and the previous one. It keeps a table about the fields of the 5 | // specific table in memory, until the param_option "last" is sent. That marks the last 6 | // comparison and it removes the temp table afterwards 7 | 8 | TRACE calling SUB fieldCompare ...; 9 | 10 | IF Alt(TableNumber(param_table), -1) = -1 THEN 11 | TRACE No such table in data model: $(param_table); 12 | ELSE 13 | IF Alt(TableNumber('~fields-$(param_table)'), -1) = -1 THEN 14 | 15 | [~fields-$(param_table)]: 16 | LOAD 17 | FieldName(RecNo(), '$(param_table)') AS [~field], 18 | 1 AS [~Snap1] 19 | AUTOGENERATE(NoOfFields(param_table)); 20 | 21 | LET priv_allFields = ''; 22 | FOR priv_iter = 1 TO NoOfRows('~fields-$(param_table)') 23 | LET priv_allFields = priv_allFields & IF(Len(priv_allFields), ', ') 24 | & '[' & Peek('~field', priv_iter - 1, '~fields-$(param_table)') & ']'; 25 | LET priv_Count = priv_Count + 1; 26 | NEXT priv_iter 27 | 28 | LET priv_Count = NoOfFields(param_table); 29 | TRACE The table "$(param_table)" has the following $(priv_Count) fields: 30 | $(priv_allFields); 31 | 32 | ELSE 33 | 34 | LET priv_snap = NoOfFields('~fields-$(param_table)'); 35 | OUTER JOIN ([~fields-$(param_table)]) 36 | LOAD 37 | // JOIN ON 38 | FieldName(RecNo(), '$(param_table)') AS [~field], 39 | 1 AS [~Snap$(priv_snap)] 40 | AUTOGENERATE(NoOfFields(param_table)); 41 | TRACE; 42 | 43 | LET priv_newFields = ''; 44 | LET priv_removedFields = ''; 45 | 46 | FOR priv_iter = 1 TO NoOfRows('~fields-$(param_table)') 47 | 48 | IF Peek('~Snap' & (priv_snap -1), priv_iter - 1, '~fields-$(param_table)') <> 1 THEN 49 | LET priv_newFields = priv_newFields & IF(Len(priv_newFields), ', ') 50 | & '[' & Peek('~field', priv_iter - 1, '~fields-$(param_table)') & ']'; 51 | END IF 52 | IF Peek('~Snap$(priv_snap)', priv_iter - 1, '~fields-$(param_table)') <> 1 THEN 53 | LET priv_removedFields = priv_removedFields & IF(Len(priv_removedFields), ', ') 54 | & '[' & Peek('~field', priv_iter - 1, '~fields-$(param_table)') & ']'; 55 | END IF 56 | NEXT priv_iter 57 | 58 | 59 | IF Len(priv_newFields) THEN 60 | LET priv_Count = 1 + SubStringCount(priv_newFields, '], ['); 61 | TRACE The following $(priv_Count) field(s) were added to table "$(param_table)": 62 | $(priv_newFields); 63 | END IF 64 | 65 | IF Len(priv_removedFields) THEN 66 | LET priv_Count = 1 + SubStringCount(priv_removedFields, '], ['); 67 | TRACE The following $(priv_Count) field(s) were removed from table "$(param_table)": 68 | $(priv_removedFields); 69 | END IF 70 | 71 | IF '$(priv_removedFields)$(priv_newFields)' = '' THEN 72 | TRACE Table "$(param_table)" still has same fields as before; 73 | END IF 74 | IF (param_option LIKE 'last') THEN 75 | DROP TABLE [~fields-$(param_table)]; 76 | END IF 77 | END IF 78 | END IF 79 | 80 | END SUB 81 | -------------------------------------------------------------------------------- /fillcalendar.md: -------------------------------------------------------------------------------- 1 | # Fill Calendar Events 2 | For each day between the Min and Max Event Date a filled row should be created, repeating the row of the last day before. Basically, all the rows in below table tagged "orig" were given, the ones with "filled" were automatically created. The script is below 3 | 4 | | ID | FilledEvent | More | | 5 | | --- | --- | --- | --- | 6 | | 1 | 2019-08-25 | C | orig | 7 | | 1 | 2019-08-26 | C | filled | 8 | | 1 | 2019-08-27 | D | orig | 9 | | 1 | 2019-08-28 | D | filled | 10 | | 1 | 2019-08-29 | E | orig | 11 | | 1 | 2019-08-30 | E | filled | 12 | | 1 | 2019-08-31 | E | filled | 13 | | 1 | 2019-09-01 | F | orig | 14 | | 1 | 2019-09-02 | F | filled | 15 | | 2 | 2019-08-26 | G | orig | 16 | | 2 | 2019-08-27 | G | filled | 17 | | 2 | 2019-08-28 | G | filled | 18 | | 2 | 2019-08-29 | G | filled | 19 | | 2 | 2019-08-30 | G | filled | 20 | | 2 | 2019-08-31 | H | orig | 21 | | 2 | 2019-09-01 | H | filled | 22 | | 2 | 2019-09-02 | I | orig | 23 | 24 | 25 | ``` 26 | PoT: 27 | LOAD * INLINE [ 28 | ID, Event, More, MuchMore 29 | 1, 2019-08-15, C, y 30 | 1, 2019-08-17, D, x 31 | 1, 2019-08-29, E, x 32 | 1, 2019-09-02, F, y 33 | 2, 2019-08-16, G, y 34 | 2, 2019-08-31, H, x 35 | 2, 2019-09-03, I, x 36 | ]; 37 | // Add a "SpanEventTo" column which is 1 second before the subsequent known Event 38 | // Find out the total Max and Min Event while loading each line 39 | PoT2: 40 | LOAD 41 | *, 42 | TimeStamp( 43 | IF(Peek('ID',-1,'PoT2') = ID, Peek('Event',-1,'PoT2')-1/24/3600, MakeDate(2100,12,31)) 44 | ) AS SpanEventTo, 45 | RangeMax(Peek('Event.Max',-1,'PoT2'), Event) AS Event.Max, 46 | RangeMin(Peek('Event.Min',-1,'PoT2'), Event) AS Event.Min 47 | RESIDENT 48 | PoT 49 | ORDER BY 50 | ID ASC, Event DESC; 51 | 52 | // get the last rows value for Min and Max (which is logically the total min/total max) 53 | LET vMaxDate = Peek('Event.Max',-1,'PoT2'); 54 | LET vMinDate = Peek('Event.Min',-1,'PoT2'); 55 | DROP FIELDS Event.Min, Event.Max; 56 | DROP TABLE PoT; 57 | 58 | // Create a calender with dates spanning from Min to Max 59 | AllTimes: 60 | LOAD 61 | Date(RecNo() -1 + $(vMinDate), 'YYYY-MM-DD') AS FilledEvent 62 | AUTOGENERATE ($(vMaxDate) - $(vMinDate) + 1); 63 | // Create a cartesean product with all existing IDs 64 | // (JOIN with no matching field) 65 | JOIN LOAD DISTINCT ID RESIDENT PoT2; 66 | // Merge the PoT2 table into the calendar. IntervalMatch 67 | // does the magic to find out into which From-To timespan a 68 | // given day falls 69 | INNER JOIN 70 | IntervalMatch(FilledEvent, ID) 71 | LOAD Event, SpanEventTo, ID RESIDENT PoT2; 72 | 73 | // Move all the other columns from original table into new calendar and drop old table 74 | INNER JOIN (AllTimes) 75 | LOAD * RESIDENT PoT2; 76 | DROP TABLE PoT2; 77 | //DROP FIELD SpanEventTo; 78 | ``` 79 | 80 | -------------------------------------------------------------------------------- /getCustomProp.qvs: -------------------------------------------------------------------------------- 1 | LET v_http_header_key = 'runas'; // set according to your Virtual Proxy 2 | LET v_http_header_val = 'script'; // set according to your Virtual Proxy 3 | 4 | SUB getCustomProp (param_LibConnection, param_custPropName, param_varPrefix, param_breakIfMissing, param_KeepQrsRepsonseTable); 5 | TRACE [Calling SUB getCustomProp(...,'$(param_custPropName)','$(param_varPrefix)',$(param_breakIfMissing),$(param_KeepQrsRepsonseTable))]; 6 | 7 | // This sub queries the QRS API to get the custom properties of the current application and 8 | // puts them into a variable (or variables, since wildcards are possible it can do all at once). 9 | 10 | // To setup the virtual proxy follow these instructions: 11 | // https://community.qlik.com/t5/Qlik-Sense-Documents/How-to-access-QRS-Repository-from-Load-Script/ta-p/1484264 12 | 13 | // The sub needs the following parameters: 14 | 15 | // 1) the name of a http-GET REST Connection (defined in your data connections) 16 | // note that you also have to set two global variables outside the script to 17 | // define the http-header key and value for authentication 18 | 19 | // 2) The name or pattern (wildcards allowed!) to look for in the custom property namespace 20 | 21 | // 3) a possible prefix (typically 'v') for the variable name (the variable name matches the 22 | // custom property name) 23 | 24 | // 4) set to 1 if your script MUST assign the given variable. If such a custom property doesn't 25 | // exist on this application, it breaks the script execution 26 | 27 | // 5) set to 1 to leave the tmp_CustProps table in the data model, either because you're curious 28 | // or because you plan to call this sub multiple times and want to save the time to query 29 | // the QRS API again (the result would be the same a second later ;-) 30 | 31 | // Christof Schwarz, 18-Dec-2020 32 | 33 | // Examples to call this sub: 34 | 35 | // Set explicitly custom properties "namespace" and "ShowInMashup" to a variable "namespace" and 36 | // "vShowInMashup". Break if not found. Don't drop REST response table until the last call. 37 | // CALL getCustomProp ('REST GET (vm1_christof.schwarz)', 'namespace', '', 1, 1); 38 | // CALL getCustomProp ('REST GET (vm1_christof.schwarz)', 'ShowInMashup', 'v', 1, 0); 39 | 40 | // Turn all Custom Properties into variables with a prefix "v". Continue, even 41 | // if no Custom Property is set. 42 | // CALL getCustomProp ('$(vLibRestGET)', '*', 'v', 0, 0); 43 | 44 | LET v_tmp_XrfKey = 'databridge' & Left(PurgeChar(Repeat(Rand(),3),'.,'),6); 45 | LET v_tmp_ThisAppId = DocumentName(); 46 | 47 | IF IsNull(TableNumber('tmp_CustProps')) THEN 48 | 49 | LIB CONNECT TO '$(param_LibConnection)'; 50 | tmp_QrsResponse: 51 | SQL SELECT 52 | "__KEY_root", 53 | (SELECT 54 | "value" AS "customProp.value", 55 | "__KEY_customProperties", 56 | "__FK_customProperties", 57 | (SELECT 58 | "name" AS "customProp.name", 59 | "__KEY_definition", 60 | "__FK_definition" 61 | FROM "definition" PK "__KEY_definition" FK "__FK_definition") 62 | FROM "customProperties" PK "__KEY_customProperties" FK "__FK_customProperties") 63 | FROM JSON (wrap on) "root" PK "__KEY_root" 64 | WITH CONNECTION ( 65 | URL "$(vQRSAPIurl)/app/full", 66 | QUERY "filter" "id eq $(v_tmp_ThisAppId)", 67 | QUERY "xrfkey" "$(v_tmp_XrfKey)", 68 | HTTPHEADER "X-Qlik-Xrfkey" "$(v_tmp_XrfKey)", 69 | HTTPHEADER "$(v_http_header_key)" "$(v_http_header_val)" 70 | ); 71 | DISCONNECT; 72 | 73 | tmp_CustProps0: 74 | LOAD 75 | customProp.name, 76 | __FK_definition AS __KEY_customProperties 77 | RESIDENT tmp_QrsResponse 78 | WHERE NOT IsNull(__FK_definition); 79 | 80 | LEFT JOIN 81 | LOAD 82 | customProp.value, 83 | __KEY_customProperties 84 | RESIDENT tmp_QrsResponse 85 | WHERE NOT IsNull(__FK_customProperties); 86 | DROP TABLE tmp_QrsResponse; 87 | 88 | // in case of multiple values assigned to a custom property, concat them into a pipe-separated list 89 | tmp_CustProps: 90 | NOCONCATENATE LOAD 91 | customProp.name, 92 | Concat(customProp.value, '|') AS customProp.value 93 | RESIDENT 94 | tmp_CustProps0 95 | GROUP BY 96 | customProp.name; 97 | 98 | DROP TABLE tmp_CustProps0; 99 | ELSE 100 | TRACE [Table "tmp_CustProps" found in data model from previous call.]; 101 | END IF 102 | 103 | LET v_tmp_Found = 0; 104 | FOR v_tmp_Row = 1 TO NoOfRows('tmp_CustProps') 105 | LET v_tmp_pName = Peek('customProp.name', v_tmp_Row -1, 'tmp_CustProps'); 106 | IF v_tmp_pName LIKE param_custPropName THEN 107 | LET [$(param_varPrefix)$(v_tmp_pName)] = Peek('customProp.value', v_tmp_Row -1, 'tmp_CustProps'); 108 | TRACE [Variable "$(param_varPrefix)$(v_tmp_pName)" assigned from CustomProperty.]; 109 | LET v_tmp_Found = v_tmp_Found + 1; 110 | END IF 111 | NEXT v_tmp_Row 112 | TRACE [$(v_tmp_Found) variables assigned.]; 113 | 114 | // remove temp variables 115 | LET v_tmp_Row = Null(); 116 | LET v_tmp_XrfKey = Null(); 117 | LET v_tmp_ThisAppId = Null(); 118 | 119 | IF v_tmp_Found = 0 AND Alt('$(param_breakIfMissing)', 0) THEN 120 | // remove temp variable 121 | LET v_tmp_Found = Null(); 122 | [Error: Could not find custom property matching "$(param_custPropName)"]; 123 | END IF 124 | 125 | LET v_tmp_Found = Null(); 126 | 127 | IF Alt('$(param_KeepQrsRepsonseTable)', 0) THEN 128 | TRACE [Keeping table "tmp_CustProps" in data model.]; 129 | ELSE 130 | TRACE [Dropping table "tmp_CustProps" from data model.]; 131 | DROP TABLE tmp_CustProps; 132 | END IF 133 | 134 | END SUB 135 | 136 | -------------------------------------------------------------------------------- /json/1.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "Musterprojekt K21 Modell_Entwurf Überarbeitung V9", 4 | "description": null, 5 | "projectId": "227cc83c-bd1e-4967-9da4-f9597347e0b1", 6 | "userGroupName": "General", 7 | "userGroupId": "e4c3c196-d4cf-43d4-9876-f816898dd7c8", 8 | "topologyId": "054f0a63-36a2-40c8-85d1-4c4050386976", 9 | "revisionObjects": [ 10 | { 11 | "id": "05d3775f-688e-48ac-a21d-cadfe886e397", 12 | "importFileName": "Musterprojekt K21 Modell_Entwurf Überarbeitung V9.ifc", 13 | "projectId": "227cc83c-bd1e-4967-9da4-f9597347e0b1", 14 | "divisionId": "428b4d22-7e4f-48ac-8519-798fc3a62454", 15 | "revisionNr": 3, 16 | "created": "2020-04-13T17:55:25.7", 17 | "createdby": { 18 | "id": "3ffae35f-6fe6-520c-6c24-d09d6ef3cfc7", 19 | "email": "stefan@soares-kaufmann.de", 20 | "firstname": "Stefan", 21 | "lastname": "Kaufmann" 22 | }, 23 | "changed": "2019-08-16T10:14:21.38", 24 | "changedby": { 25 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 26 | "email": "dirk.bessert@etask.de", 27 | "firstname": "Dirk", 28 | "lastname": "Bessert" 29 | }, 30 | "applicationId": "0106c8ba-ad46-7c08-e26f-026852cb7525", 31 | "topologyId": "054f0a63-36a2-40c8-85d1-4c4050386976", 32 | "inputType": "IFC_IMPORT", 33 | "release": 0 34 | }, 35 | { 36 | "id": "ee8694f1-c835-4da1-8718-23fd8d4a7fd1", 37 | "importFileName": "Musterprojekt K21 Modell_Entwurf Überarbeitung V9", 38 | "description": "", 39 | "projectId": "227cc83c-bd1e-4967-9da4-f9597347e0b1", 40 | "divisionId": "428b4d22-7e4f-48ac-8519-798fc3a62454", 41 | "revisionNr": 2, 42 | "created": "2020-04-09T11:58:19.64", 43 | "createdby": { 44 | "id": "3ffae35f-6fe6-520c-6c24-d09d6ef3cfc7", 45 | "email": "stefan@soares-kaufmann.de", 46 | "firstname": "Stefan", 47 | "lastname": "Kaufmann" 48 | }, 49 | "changed": "2020-04-09T11:58:19.64", 50 | "changedby": { 51 | "id": "3ffae35f-6fe6-520c-6c24-d09d6ef3cfc7", 52 | "email": "stefan@soares-kaufmann.de", 53 | "firstname": "Stefan", 54 | "lastname": "Kaufmann" 55 | }, 56 | "applicationId": "0106c8ba-ad46-7c08-e26f-026852cb7525", 57 | "topologyId": "054f0a63-36a2-40c8-85d1-4c4050386976", 58 | "inputType": "IFC_IMPORT", 59 | "release": 1 60 | }, 61 | { 62 | "id": "c6c25e5b-26a1-4d67-9cf9-1d1d15e08c91", 63 | "importFileName": "Musterprojekt K21 Modell_Entwurf Überarbeitung V9.ifc", 64 | "projectId": "227cc83c-bd1e-4967-9da4-f9597347e0b1", 65 | "divisionId": "428b4d22-7e4f-48ac-8519-798fc3a62454", 66 | "revisionNr": 1, 67 | "created": "2019-08-14T09:48:03.76", 68 | "createdby": { 69 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 70 | "email": "dirk.bessert@etask.de", 71 | "firstname": "Dirk", 72 | "lastname": "Bessert" 73 | }, 74 | "changed": "2019-08-14T09:48:03.76", 75 | "changedby": { 76 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 77 | "email": "dirk.bessert@etask.de", 78 | "firstname": "Dirk", 79 | "lastname": "Bessert" 80 | }, 81 | "applicationId": "0106c8ba-ad46-7c08-e26f-026852cb7525", 82 | "topologyId": "054f0a63-36a2-40c8-85d1-4c4050386976", 83 | "inputType": "IFC_IMPORT", 84 | "release": 1 85 | } 86 | ], 87 | "teamSlug": "allplan-kaufmann", 88 | "url": "/allplan-kaufmann/divisions/428b4d22-7e4f-48ac-8519-798fc3a62454/download?revision=3", 89 | "fileType": "Ifc", 90 | "inputType": "IFC_IMPORT", 91 | "created": "2019-08-14T09:48:03", 92 | "createdby": { 93 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 94 | "email": "dirk.bessert@etask.de", 95 | "firstname": "Dirk", 96 | "lastname": "Bessert" 97 | }, 98 | "changed": "2019-08-16T10:14:21.38", 99 | "changedby": { 100 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 101 | "email": "dirk.bessert@etask.de", 102 | "firstname": "Dirk", 103 | "lastname": "Bessert" 104 | }, 105 | "size": 93945856, 106 | "sizeMB": "89.59", 107 | "importFileName": "Musterprojekt K21 Modell_Entwurf Überarbeitung V9.ifc", 108 | "importFileSize": 46881650, 109 | "ifcProject": "0SiGCfIO95Yv2oo1hHyCBB", 110 | "id": "428b4d22-7e4f-48ac-8519-798fc3a62454" 111 | }, 112 | { 113 | "name": "THM E10", 114 | "description": null, 115 | "projectId": "227cc83c-bd1e-4967-9da4-f9597347e0b1", 116 | "userGroupName": "General", 117 | "userGroupId": "e4c3c196-d4cf-43d4-9876-f816898dd7c8", 118 | "topologyId": "caa5b9d2-a0ab-4438-89ad-a0f5a343419b", 119 | "revisionObjects": [ 120 | { 121 | "id": "0c0bd3cb-9375-4e22-9109-501a8a16430f", 122 | "importFileName": "Modell E10.ifc", 123 | "projectId": "227cc83c-bd1e-4967-9da4-f9597347e0b1", 124 | "divisionId": "9c431bbb-a985-4154-a477-45a4fe6e5cd7", 125 | "revisionNr": 2, 126 | "created": "2020-04-14T10:23:19.59", 127 | "createdby": { 128 | "id": "3ffae35f-6fe6-520c-6c24-d09d6ef3cfc7", 129 | "email": "stefan@soares-kaufmann.de", 130 | "firstname": "Stefan", 131 | "lastname": "Kaufmann" 132 | }, 133 | "changed": "2019-08-28T10:29:06.623", 134 | "changedby": { 135 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 136 | "email": "dirk.bessert@etask.de", 137 | "firstname": "Dirk", 138 | "lastname": "Bessert" 139 | }, 140 | "applicationId": "0106c8ba-ad46-7c08-e26f-026852cb7525", 141 | "topologyId": "caa5b9d2-a0ab-4438-89ad-a0f5a343419b", 142 | "inputType": "IFC_IMPORT", 143 | "release": 0 144 | }, 145 | { 146 | "id": "bf17949f-303c-4eda-b565-5f5c03687a8a", 147 | "importFileName": "Modell E10.ifc", 148 | "projectId": "227cc83c-bd1e-4967-9da4-f9597347e0b1", 149 | "divisionId": "9c431bbb-a985-4154-a477-45a4fe6e5cd7", 150 | "revisionNr": 1, 151 | "created": "2019-08-28T10:26:21.7", 152 | "createdby": { 153 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 154 | "email": "dirk.bessert@etask.de", 155 | "firstname": "Dirk", 156 | "lastname": "Bessert" 157 | }, 158 | "changed": "2019-08-28T10:26:21.7", 159 | "changedby": { 160 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 161 | "email": "dirk.bessert@etask.de", 162 | "firstname": "Dirk", 163 | "lastname": "Bessert" 164 | }, 165 | "applicationId": "0106c8ba-ad46-7c08-e26f-026852cb7525", 166 | "topologyId": "caa5b9d2-a0ab-4438-89ad-a0f5a343419b", 167 | "inputType": "IFC_IMPORT", 168 | "release": 1 169 | } 170 | ], 171 | "teamSlug": "allplan-kaufmann", 172 | "url": "/allplan-kaufmann/divisions/9c431bbb-a985-4154-a477-45a4fe6e5cd7/download?revision=2", 173 | "fileType": "Ifc", 174 | "inputType": "IFC_IMPORT", 175 | "created": "2019-08-28T10:26:22", 176 | "createdby": { 177 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 178 | "email": "dirk.bessert@etask.de", 179 | "firstname": "Dirk", 180 | "lastname": "Bessert" 181 | }, 182 | "changed": "2019-08-28T10:29:06.623", 183 | "changedby": { 184 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 185 | "email": "dirk.bessert@etask.de", 186 | "firstname": "Dirk", 187 | "lastname": "Bessert" 188 | }, 189 | "size": 104109056, 190 | "sizeMB": "99.29", 191 | "importFileName": "Modell E10.ifc", 192 | "importFileSize": 58023657, 193 | "ifcProject": "08i3SuPYL9Zgjj7AsPMrPe", 194 | "id": "9c431bbb-a985-4154-a477-45a4fe6e5cd7" 195 | }, 196 | { 197 | "name": "Modell E10 Gebäude_test_1", 198 | "description": null, 199 | "projectId": "227cc83c-bd1e-4967-9da4-f9597347e0b1", 200 | "userGroupName": "General", 201 | "userGroupId": "e4c3c196-d4cf-43d4-9876-f816898dd7c8", 202 | "topologyId": "e4443e2d-889d-4787-83cf-7f3a120ef4b4", 203 | "revisionObjects": [ 204 | { 205 | "id": "fac1243d-31ee-4c07-9708-054f9b58a4e8", 206 | "importFileName": "Modell E10 Gebäude_test_1.ifc", 207 | "projectId": "227cc83c-bd1e-4967-9da4-f9597347e0b1", 208 | "divisionId": "9857584c-247a-400b-85b8-8d134e7269ed", 209 | "revisionNr": 1, 210 | "created": "2019-10-07T13:00:03.26", 211 | "createdby": { 212 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 213 | "email": "dirk.bessert@etask.de", 214 | "firstname": "Dirk", 215 | "lastname": "Bessert" 216 | }, 217 | "changed": "2019-10-07T13:01:55.06", 218 | "changedby": { 219 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 220 | "email": "dirk.bessert@etask.de", 221 | "firstname": "Dirk", 222 | "lastname": "Bessert" 223 | }, 224 | "applicationId": "0106c8ba-ad46-7c08-e26f-026852cb7525", 225 | "topologyId": "e4443e2d-889d-4787-83cf-7f3a120ef4b4", 226 | "inputType": "IFC_IMPORT", 227 | "release": 0 228 | } 229 | ], 230 | "teamSlug": "allplan-kaufmann", 231 | "url": "/allplan-kaufmann/divisions/9857584c-247a-400b-85b8-8d134e7269ed/download?revision=1", 232 | "fileType": "Ifc", 233 | "inputType": "IFC_IMPORT", 234 | "created": "2019-10-07T13:00:03", 235 | "createdby": { 236 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 237 | "email": "dirk.bessert@etask.de", 238 | "firstname": "Dirk", 239 | "lastname": "Bessert" 240 | }, 241 | "changed": "2019-10-07T13:01:55.06", 242 | "changedby": { 243 | "id": "3f311caa-3434-480c-8d5e-0da5171ddf76", 244 | "email": "dirk.bessert@etask.de", 245 | "firstname": "Dirk", 246 | "lastname": "Bessert" 247 | }, 248 | "size": 39774208, 249 | "sizeMB": "37.93", 250 | "importFileName": "Modell E10 Gebäude_test_1.ifc", 251 | "importFileSize": 21099174, 252 | "ifcProject": "3V44Aa$JTCyAMmUjXvFU1P", 253 | "id": "9857584c-247a-400b-85b8-8d134e7269ed" 254 | } 255 | ] 256 | -------------------------------------------------------------------------------- /json/2.json: -------------------------------------------------------------------------------- 1 | { 2 | "children": [ 3 | { 4 | "children": [ 5 | { 6 | "children": [ 7 | { 8 | "children": [], 9 | "ifcType": "IfcName_None", 10 | "ifcId": "3Eu0KZv9jBCB9nbFKei9ZK#0", 11 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 12 | "parent": "5a6172bd-a35c-4094-8af3-3ad0b196a880", 13 | "name": "Oberfläche:278229", 14 | "type": "Topology", 15 | "number": 3, 16 | "id": "fe2f20c0-4a8b-4545-92a6-3fbc5c80898f" 17 | }, 18 | { 19 | "children": [ 20 | { 21 | "children": [], 22 | "ifcType": "IfcBuildingStorey", 23 | "ifcId": "0SiGCfIO95Yv2oo1ek2bVP", 24 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 25 | "parent": "0f5346be-8ae4-4d22-a35b-68a726e4d5e5", 26 | "name": "Floor -1", 27 | "type": "Topology", 28 | "number": 5, 29 | "id": "b5cf4f6d-58e5-4e0a-98f6-1d4a8000615b" 30 | }, 31 | { 32 | "children": [], 33 | "ifcType": "IfcBuildingStorey", 34 | "ifcId": "0SiGCfIO95Yv2oo1ek3pmC", 35 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 36 | "parent": "0f5346be-8ae4-4d22-a35b-68a726e4d5e5", 37 | "name": "Floor_0", 38 | "type": "Topology", 39 | "number": 6, 40 | "id": "9a2d1e86-a67b-460c-b903-4680851d49a4" 41 | }, 42 | { 43 | "children": [], 44 | "ifcType": "IfcBuildingStorey", 45 | "ifcId": "0SiGCfIO95Yv2oo1ek3p_D", 46 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 47 | "parent": "0f5346be-8ae4-4d22-a35b-68a726e4d5e5", 48 | "name": "Floor_1", 49 | "type": "Topology", 50 | "number": 7, 51 | "id": "79dfcb84-a9f6-4ea5-8b1b-54b504d96335" 52 | }, 53 | { 54 | "children": [], 55 | "ifcType": "IfcBuildingStorey", 56 | "ifcId": "0SiGCfIO95Yv2oo1ek3fZ0", 57 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 58 | "parent": "0f5346be-8ae4-4d22-a35b-68a726e4d5e5", 59 | "name": "Floor_2", 60 | "type": "Topology", 61 | "number": 8, 62 | "id": "edf37015-0590-47f7-8d0c-549b6d49f5d3" 63 | }, 64 | { 65 | "children": [], 66 | "ifcType": "IfcBuildingStorey", 67 | "ifcId": "0SiGCfIO95Yv2oo1ek3fGU", 68 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 69 | "parent": "0f5346be-8ae4-4d22-a35b-68a726e4d5e5", 70 | "name": "Floor_3", 71 | "type": "Topology", 72 | "number": 9, 73 | "id": "b083b0ca-e356-4a04-9160-13614cd96aff" 74 | }, 75 | { 76 | "children": [], 77 | "ifcType": "IfcBuildingStorey", 78 | "ifcId": "0SiGCfIO95Yv2oo1ek1bvY", 79 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 80 | "parent": "0f5346be-8ae4-4d22-a35b-68a726e4d5e5", 81 | "name": "Fußbodenaufbau Floor 3", 82 | "type": "Topology", 83 | "number": 10, 84 | "id": "21ada4a3-2d02-487f-94f9-2c59059032e8" 85 | }, 86 | { 87 | "children": [], 88 | "ifcType": "IfcBuildingStorey", 89 | "ifcId": "0SiGCfIO95Yv2oo1ek3esJ", 90 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 91 | "parent": "0f5346be-8ae4-4d22-a35b-68a726e4d5e5", 92 | "name": "Floor_4", 93 | "type": "Topology", 94 | "number": 11, 95 | "id": "d9560f77-e537-4dfa-b786-56ef8f6c781d" 96 | } 97 | ], 98 | "ifcType": "IfcBuilding", 99 | "ifcId": "0SiGCfIO95Yv2oo1hHyCBA", 100 | "elementTypeId": "d72696e0-6f5e-4883-a54d-f36ccb3b5f3b", 101 | "parent": "5a6172bd-a35c-4094-8af3-3ad0b196a880", 102 | "name": "", 103 | "type": "TopologyItem", 104 | "number": 4, 105 | "id": "0f5346be-8ae4-4d22-a35b-68a726e4d5e5" 106 | } 107 | ], 108 | "ifcType": "IfcSite", 109 | "ifcId": "3Eu0KZv9jBCB9nbFKei9ZK", 110 | "elementTypeId": "d5dcaf7d-adb3-4bba-a4a6-2bdaba9112e4", 111 | "parent": "054f0a63-36a2-40c8-85d1-4c4050386976", 112 | "name": "Oberfläche:278229", 113 | "type": "Site", 114 | "number": 2, 115 | "id": "5a6172bd-a35c-4094-8af3-3ad0b196a880" 116 | } 117 | ], 118 | "ifcType": "IfcProject", 119 | "ifcId": "0SiGCfIO95Yv2oo1hHyCBB", 120 | "elementTypeId": "e003e080-f21b-4f8a-8c4c-4c9c5026cf50", 121 | "parent": "227cc83c-bd1e-4967-9da4-f9597347e0b1", 122 | "name": "Musterprojekt K21 Modell_Entwurf Überarbeitung V9", 123 | "type": "TopologyDivision", 124 | "number": 1, 125 | "id": "054f0a63-36a2-40c8-85d1-4c4050386976" 126 | }, 127 | { 128 | "children": [ 129 | { 130 | "children": [], 131 | "ifcType": "IfcName_None", 132 | "ifcId": "08i3SuPYL9Zgjj7AsPMrPe#0", 133 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 134 | "parent": "caa5b9d2-a0ab-4438-89ad-a0f5a343419b", 135 | "name": "Projektnummer", 136 | "type": "Topology", 137 | "number": 2, 138 | "id": "9abffa37-d776-49b1-b978-816fb9da9cde" 139 | }, 140 | { 141 | "children": [ 142 | { 143 | "children": [], 144 | "ifcType": "IfcName_None", 145 | "ifcId": "29dFaj2cP0VR2$KAGBOyUO#0", 146 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 147 | "parent": "18a7bec6-9a33-4723-abad-fad0bf19a24f", 148 | "name": "Oberfläche:504933", 149 | "type": "Topology", 150 | "number": 4, 151 | "id": "1d847f4e-6edc-4736-bcc4-c4d17da132fd" 152 | }, 153 | { 154 | "children": [ 155 | { 156 | "children": [], 157 | "ifcType": "IfcBuildingStorey", 158 | "ifcId": "08i3SuPYL9Zgjj7ArcfCgt", 159 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 160 | "parent": "4c62ca4f-16f1-4548-9dc3-adc08eeea3be", 161 | "name": "-01 Untergeschoss", 162 | "type": "Topology", 163 | "number": 6, 164 | "id": "83a01b8f-5e55-4894-bb9c-a1f2f085d608" 165 | }, 166 | { 167 | "children": [], 168 | "ifcType": "IfcBuildingStorey", 169 | "ifcId": "08i3SuPYL9Zgjj7ArchCJs", 170 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 171 | "parent": "4c62ca4f-16f1-4548-9dc3-adc08eeea3be", 172 | "name": "GOK", 173 | "type": "Topology", 174 | "number": 7, 175 | "id": "1b4da53d-0d2d-4c9d-9dc6-8b5214772e88" 176 | }, 177 | { 178 | "children": [], 179 | "ifcType": "IfcBuildingStorey", 180 | "ifcId": "08i3SuPYL9Zgjj7ArcfCbQ", 181 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 182 | "parent": "4c62ca4f-16f1-4548-9dc3-adc08eeea3be", 183 | "name": "00 Erdgeschoss", 184 | "type": "Topology", 185 | "number": 8, 186 | "id": "2947d3cf-c885-4c1a-8f95-ec87a413b613" 187 | }, 188 | { 189 | "children": [], 190 | "ifcType": "IfcBuildingStorey", 191 | "ifcId": "08i3SuPYL9Zgjj7ArcfCeQ", 192 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 193 | "parent": "4c62ca4f-16f1-4548-9dc3-adc08eeea3be", 194 | "name": "01 Obergeschoss", 195 | "type": "Topology", 196 | "number": 9, 197 | "id": "ff39749d-a139-4838-bf26-2edacdb7599a" 198 | }, 199 | { 200 | "children": [], 201 | "ifcType": "IfcBuildingStorey", 202 | "ifcId": "08i3SuPYL9Zgjj7ArcfPwJ", 203 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 204 | "parent": "4c62ca4f-16f1-4548-9dc3-adc08eeea3be", 205 | "name": "02 Obergeschoss", 206 | "type": "Topology", 207 | "number": 10, 208 | "id": "35ed22a3-c19f-443f-bc23-98c774b5cf57" 209 | }, 210 | { 211 | "children": [], 212 | "ifcType": "IfcBuildingStorey", 213 | "ifcId": "08i3SuPYL9Zgjj7ArceXCO", 214 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 215 | "parent": "4c62ca4f-16f1-4548-9dc3-adc08eeea3be", 216 | "name": "03 Dachgeschoss", 217 | "type": "Topology", 218 | "number": 11, 219 | "id": "0826fccf-808f-46aa-90e0-ba6bdb5e8497" 220 | } 221 | ], 222 | "ifcType": "IfcBuilding", 223 | "ifcId": "08i3SuPYL9Zgjj7AsPMrPf", 224 | "elementTypeId": "d72696e0-6f5e-4883-a54d-f36ccb3b5f3b", 225 | "parent": "18a7bec6-9a33-4723-abad-fad0bf19a24f", 226 | "name": "", 227 | "type": "TopologyItem", 228 | "number": 5, 229 | "id": "4c62ca4f-16f1-4548-9dc3-adc08eeea3be" 230 | } 231 | ], 232 | "ifcType": "IfcSite", 233 | "ifcId": "29dFaj2cP0VR2$KAGBOyUO", 234 | "elementTypeId": "d5dcaf7d-adb3-4bba-a4a6-2bdaba9112e4", 235 | "parent": "caa5b9d2-a0ab-4438-89ad-a0f5a343419b", 236 | "name": "Oberfläche:504933", 237 | "type": "Site", 238 | "number": 3, 239 | "id": "18a7bec6-9a33-4723-abad-fad0bf19a24f" 240 | } 241 | ], 242 | "ifcType": "IfcProject", 243 | "ifcId": "08i3SuPYL9Zgjj7AsPMrPe", 244 | "elementTypeId": "e003e080-f21b-4f8a-8c4c-4c9c5026cf50", 245 | "parent": "227cc83c-bd1e-4967-9da4-f9597347e0b1", 246 | "name": "THM E10", 247 | "type": "TopologyDivision", 248 | "number": 1, 249 | "id": "caa5b9d2-a0ab-4438-89ad-a0f5a343419b" 250 | }, 251 | { 252 | "children": [ 253 | { 254 | "children": [], 255 | "ifcType": "IfcName_None", 256 | "ifcId": "3V44Aa$JTCyAMmUjXvFU1P#0", 257 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 258 | "parent": "e4443e2d-889d-4787-83cf-7f3a120ef4b4", 259 | "name": "Projektnummer", 260 | "type": "Topology", 261 | "number": 2, 262 | "id": "8884af04-d4c2-4ce1-b3c2-7f08b17caa66" 263 | }, 264 | { 265 | "children": [ 266 | { 267 | "children": [ 268 | { 269 | "children": [], 270 | "ifcType": "IfcBuildingStorey", 271 | "ifcId": "3V44Aa$JTCyAMmUjY6mdo6", 272 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 273 | "parent": "f9dc125a-7054-4ff4-afbc-f45a4659604d", 274 | "name": "OK FFB KG", 275 | "type": "Topology", 276 | "number": 5, 277 | "id": "e0be06cd-0e09-4c4f-8da4-656d41b04d58" 278 | }, 279 | { 280 | "children": [], 281 | "ifcType": "IfcBuildingStorey", 282 | "ifcId": "3V44Aa$JTCyAMmUjY6nTEZ", 283 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 284 | "parent": "f9dc125a-7054-4ff4-afbc-f45a4659604d", 285 | "name": "GOK", 286 | "type": "Topology", 287 | "number": 6, 288 | "id": "aed823c3-67aa-4020-8da7-2cf8f508b13f" 289 | }, 290 | { 291 | "children": [], 292 | "ifcType": "IfcBuildingStorey", 293 | "ifcId": "3V44Aa$JTCyAMmUjY6mdzh", 294 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 295 | "parent": "f9dc125a-7054-4ff4-afbc-f45a4659604d", 296 | "name": "OK FFB EG", 297 | "type": "Topology", 298 | "number": 7, 299 | "id": "df8aa35b-67de-417c-979e-c360fa68f704" 300 | }, 301 | { 302 | "children": [], 303 | "ifcType": "IfcBuildingStorey", 304 | "ifcId": "3V44Aa$JTCyAMmUjY6nNIv", 305 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 306 | "parent": "f9dc125a-7054-4ff4-afbc-f45a4659604d", 307 | "name": "OK Zwischenpodest EG", 308 | "type": "Topology", 309 | "number": 8, 310 | "id": "b088b603-4180-41a3-8f00-4e3e24fa6998" 311 | }, 312 | { 313 | "children": [], 314 | "ifcType": "IfcBuildingStorey", 315 | "ifcId": "3V44Aa$JTCyAMmUjY6mdmh", 316 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 317 | "parent": "f9dc125a-7054-4ff4-afbc-f45a4659604d", 318 | "name": "OK FFB 1.OG", 319 | "type": "Topology", 320 | "number": 9, 321 | "id": "b0822551-8ec6-4fec-9b57-fce876b2c11f" 322 | }, 323 | { 324 | "children": [], 325 | "ifcType": "IfcBuildingStorey", 326 | "ifcId": "3V44Aa$JTCyAMmUjY6nNEO", 327 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 328 | "parent": "f9dc125a-7054-4ff4-afbc-f45a4659604d", 329 | "name": "OK Zwischenpodest 1.OG", 330 | "type": "Topology", 331 | "number": 10, 332 | "id": "30469032-7e53-4796-81fb-11b2bc79d5bd" 333 | }, 334 | { 335 | "children": [], 336 | "ifcType": "IfcBuildingStorey", 337 | "ifcId": "3V44Aa$JTCyAMmUjY6moYY", 338 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 339 | "parent": "f9dc125a-7054-4ff4-afbc-f45a4659604d", 340 | "name": "OK FFB 2. OG", 341 | "type": "Topology", 342 | "number": 11, 343 | "id": "29306182-afc5-4b97-9407-d35677486cc2" 344 | }, 345 | { 346 | "children": [], 347 | "ifcType": "IfcBuildingStorey", 348 | "ifcId": "3V44Aa$JTCyAMmUjY6nLhp", 349 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 350 | "parent": "f9dc125a-7054-4ff4-afbc-f45a4659604d", 351 | "name": "OK FFB DG", 352 | "type": "Topology", 353 | "number": 12, 354 | "id": "7862390a-8a61-4283-91f9-852418f7eb18" 355 | }, 356 | { 357 | "children": [], 358 | "ifcType": "IfcBuildingStorey", 359 | "ifcId": "3V44Aa$JTCyAMmUjY6ojiI", 360 | "elementTypeId": "699c3feb-d9fa-4c3a-a91a-e1615a843d76", 361 | "parent": "f9dc125a-7054-4ff4-afbc-f45a4659604d", 362 | "name": "UK Decke Aula", 363 | "type": "Topology", 364 | "number": 13, 365 | "id": "9e0fa92d-8211-4d87-832b-cd583278d23e" 366 | } 367 | ], 368 | "ifcType": "IfcBuilding", 369 | "ifcId": "3V44Aa$JTCyAMmUjXvFU1O", 370 | "elementTypeId": "d72696e0-6f5e-4883-a54d-f36ccb3b5f3b", 371 | "parent": "2efaa4be-7275-47b2-8578-8ff22ad960c3", 372 | "name": "", 373 | "type": "TopologyItem", 374 | "number": 4, 375 | "id": "f9dc125a-7054-4ff4-afbc-f45a4659604d" 376 | } 377 | ], 378 | "ifcType": "IfcSite", 379 | "ifcId": "3V44Aa$JTCyAMmUjXvFU1R", 380 | "elementTypeId": "d5dcaf7d-adb3-4bba-a4a6-2bdaba9112e4", 381 | "parent": "e4443e2d-889d-4787-83cf-7f3a120ef4b4", 382 | "name": "Default", 383 | "type": "Site", 384 | "number": 3, 385 | "id": "2efaa4be-7275-47b2-8578-8ff22ad960c3" 386 | } 387 | ], 388 | "ifcType": "IfcProject", 389 | "ifcId": "3V44Aa$JTCyAMmUjXvFU1P", 390 | "elementTypeId": "e003e080-f21b-4f8a-8c4c-4c9c5026cf50", 391 | "parent": "227cc83c-bd1e-4967-9da4-f9597347e0b1", 392 | "name": "Modell E10 Gebäude_test_1", 393 | "type": "TopologyDivision", 394 | "number": 1, 395 | "id": "e4443e2d-889d-4787-83cf-7f3a120ef4b4" 396 | } 397 | ], 398 | "elementTypeId": "8d27ae6d-3c9a-4201-8a4d-bf0225861788", 399 | "parent": null, 400 | "name": "We are the Bimswarm", 401 | "type": "Project", 402 | "number": 0, 403 | "id": "227cc83c-bd1e-4967-9da4-f9597347e0b1" 404 | } 405 | -------------------------------------------------------------------------------- /json/README.md: -------------------------------------------------------------------------------- 1 | This folder has a simple solution for importing .json files into Qlik Sense. 2 | 3 | Include or paste the script found in _importJson.qvs 4 | to your app script. 5 | 6 | If you wwant to load the entire file into a QS table, call 7 | ``` 8 | CALL importJson('lib://connection/folder/file.json', 'myTable'); 9 | ``` 10 | 11 | If you want to load the file starting from a certain sub-object position and below, call 12 | ``` 13 | CALL importJson('lib://connection/folder/file.json', 'myTable', 'data'); 14 | ``` 15 | where data is the position. 16 | 17 | If you want to load json from a variable that holds it, provide the variable as the 1st parameter (the 18 | 2nd and optional 3rd parameter is as before) 19 | ``` 20 | SET vJson = `{"key1": 123, "key2": "abc", "key3": true, "arr": [5,6,7] }`; 21 | CALL importJson(vJson, 'myTable'); 22 | ``` 23 | 24 | Written by Christof Schwarz, databridge (csw@databridge.ch). Provided as is. 25 | -------------------------------------------------------------------------------- /json/_importJson.qvs: -------------------------------------------------------------------------------- 1 | //--------------------------------------------------------------------- 2 | 3 | SUB analyzeJsonFields(p3_table, p3_position, p3_keyField) 4 | 5 | LET v2_renamings = ''; 6 | FOR v3_fldIdx = 1 TO NoOfFields('~tJson') 7 | 8 | LET v3_fld = FieldName(v3_fldIdx, '~tJson'); 9 | //TRACE analyzeJsonFields: $(v3_fld); 10 | IF v3_fld <> p3_keyField THEN 11 | LET v3_rowsBefore = NoOfRows('~tLoadNext'); 12 | CONCATENATE ([~tLoadNext]) 13 | LOAD DISTINCT 14 | SubField( 15 | '$(p3_position)/' & (RecNo() - 1) & '/$(v3_fld)' & CHR(10) 16 | & '$(p3_position)/$(v3_fld)', CHR(10)) AS __jsonPos 17 | ,'$(v3_fld)' AS __keyField 18 | //,PurgeChar('$(v2_fld_qlfd)', '%') AS [__keyPos] 19 | ,'$(p3_table).$(v3_fld)' AS __tableName 20 | RESIDENT 21 | [~tJson] 22 | WHERE 23 | Len([$(v3_fld)]) = 16 24 | AND Len(PurgeChar([$(v3_fld)],'0123456789ABCDEF')) = 0 25 | ; 26 | IF NoOfRows('~tLoadNext') > v3_rowsBefore THEN 27 | TRACE $(v3_fld) is a key to subobjects; 28 | LET v2_renamings = v2_renamings & If(Len(v2_renamings), CHR(10) & ',') 29 | & '[$(v3_fld)] AS [%$(p2_table).$(v3_fld)]'; 30 | ELSE 31 | TRACE $(v3_fld) is a simple value key; 32 | LET v2_renamings = v2_renamings & If(Len(v2_renamings), CHR(10) & ',') 33 | // & 'Trim([$(v3_fld)]) AS [$(p2_table).$(v3_fld)]'; 34 | & 'If([$(v3_fld)] <> ''null'', [$(v3_fld)]) AS [$(p2_table).$(v3_fld)]'; 35 | 36 | END IF 37 | ELSE 38 | TRACE Not adding keyfield $(v3_fld) again to renamings ...; 39 | END IF 40 | NEXT v3_fldIdx 41 | 42 | LET v3_fldIdx = Null(); 43 | LET v3_fld = Null(); 44 | LET v3_rowsBefore = Null(); 45 | 46 | // TRACE `Renamings: 47 | // $(v2_renamings)`; 48 | 49 | END SUB 50 | 51 | //--------------------------------------------------------------------- 52 | 53 | SUB importJsonPos(p2_table, p2_position, p2_keyField) 54 | 55 | // imports data from a given Json source (v1_FROM) and given position (p2_position) 56 | // (e.g. Root, Root/3/arr, Root/0/obj/1/arr) into given table (p2_table). If this is 57 | // a subsequent call, the keyFieldName will be provided to link the key values 58 | // to a previously loaded table (links they will be prefixed with "%") 59 | 60 | // Note: This SUB can show multiple errors, as it attempts to load a "naked" array, 61 | // where the values are coming into a pseudo-fieldname "NoName". If this is not 62 | // the case, it will show an error but continue to treat it as an Json subobject 63 | // with keys. 64 | 65 | // Those are script errors that are expected and they will be ignored. 66 | 67 | LET v1_impJsonPosCalls = v1_impJsonPosCalls + 1; 68 | 69 | TRACE 🟢$(v1_impJsonPosCalls). CALL importJson('$(p2_table)', '$(p2_position)', '$(p2_keyField)'); 70 | 71 | 72 | LET v2_LoadCmd = If(Alt(TableNumber(p2_table), -1) >= 0 73 | ,'CONCATENATE([$(p2_table)])' 74 | ,'[$(p2_table)]:'); 75 | 76 | LET v2_old_errorMode = ErrorMode; 77 | SET ErrorMode = 0; 78 | 79 | LET v2_keyFieldAlias = If(Len(p2_keyField) 80 | ,',[$(p2_keyField)] AS [%$(p2_table)]'); 81 | 82 | $(v2_LoadCmd) 83 | LOAD 84 | NoName AS [$(p2_table)] 85 | $(v2_keyFieldAlias) 86 | //,[$(p2_keyField)] AS [%$(p2_keyField)] 87 | $(v1_FROM) (json, table is '$(p2_position)'); 88 | 89 | LET v2_ScriptError = Num(ScriptError); 90 | 91 | 92 | SWITCH v2_ScriptError 93 | 94 | CASE 0 // field NoName exists, we are "inside" an array at this position 95 | 96 | TRACE `[] position '$(p2_position)' is an array of values`; 97 | // TRACE `$(v2_LoadCmd)`; 98 | // TRACE `NoName AS [$(p2_table)]`; 99 | // TRACE `$(v2_keyFieldAlias)`; 100 | 101 | LET v2_isArray = 1; 102 | LET ErrorMode = v2_old_errorMode; 103 | 104 | CASE 8 105 | 106 | TRACE `position '$(p2_position)' does not exist`; 107 | LET ErrorMode = v2_old_errorMode; 108 | 109 | CASE 11 110 | 111 | LET v2_isArray = 0; 112 | LET ErrorMode = v2_old_errorMode; 113 | 114 | TRACE `{} position '$(p2_position)' is an object with keys`; 115 | 116 | [~tJson]: 117 | LOAD 118 | * 119 | $(v1_FROM) (json, table is '$(p2_position)'); 120 | 121 | LET v2_renamings = ''; 122 | CALL analyzeJsonFields(p2_table, p2_position, p2_keyField); 123 | 124 | IF Len(v2_renamings) THEN 125 | 126 | TRACE `$(v2_LoadCmd)`; 127 | TRACE `$(v2_renamings)`; 128 | TRACE `$(v2_keyFieldAlias)`; 129 | 130 | $(v2_LoadCmd) 131 | LOAD 132 | $(v2_renamings) 133 | $(v2_keyFieldAlias) 134 | RESIDENT [~tJson]; 135 | 136 | END IF 137 | 138 | DROP TABLE [~tJson]; 139 | 140 | DEFAULT 141 | 142 | LET ErrorMode = v2_old_errorMode; 143 | [Unhandled error $(v2_ScriptError) at position '$(p2_position)']; 144 | 145 | END SWITCH 146 | 147 | LET v2_LoadCmd = Null(); 148 | LET v2_old_errorMode = Null(); 149 | LET v2_isArray = Null(); 150 | LET v2_fldIdx = Null(); 151 | LET v2_fld = Null(); 152 | LET v2_fld_qlfd = Null(); 153 | LET v2_rowsBefore = Null(); 154 | LET v2_keyFieldAlias = Null(); 155 | // LET v2_prefix = Null(); 156 | LET v2_ScriptError = Null(); 157 | LET v2_renamings = Null(); 158 | 159 | END SUB 160 | 161 | //--------------------------------------------------------------------- 162 | 163 | SUB importJson(p1_source, p1_table, p1_startPos) 164 | 165 | // Sub to import JSON from a VAR or a FILE 166 | 167 | // will call importJsonPos in a loop as long as more subobjects or subarrays 168 | // are found. In simplest case, it only goes over the Root position, which 169 | // is the starting point. 170 | 171 | // Parameters: 172 | // 1. either a lib://...json string of a lib-path to a .json file 173 | // or a variable that contains the entire JSON object from [..] or {..} 174 | // 2. The name of the in-memory table to load the JSON to; note, there 175 | // will be more tables when the JSON object has subobjects, subarrays 176 | // but all table-names will start with the given name (p1_table) 177 | // 3. starting position, will default to "Root" if omitted ... the word 178 | // Root must not be part of the starting position, so argument "data" 179 | // will internally translate into "Root/data" 180 | 181 | IF IsJson(p1_source) THEN 182 | LET v1_FROM = 'INLINE `$(p1_source)`'; 183 | ELSEIF p1_source LIKE 'lib:*' THEN 184 | LET v1_FROM = 'FROM [$(p1_source)]'; 185 | ELSE 186 | [Error in SUB importJson: 1st parameter is neither a file connection nor Json content.]; 187 | END IF 188 | 189 | [~tLoadNext]: 190 | LOAD 191 | '$(p1_table)' AS __tableName 192 | ,'Root' & If(Len('$(p1_startPos)'), '/') & '$(p1_startPos)' AS __jsonPos 193 | , If(Len('$(p1_startPos)'), SubField('$(p1_startPos)', '/', -1)) AS __keyField 194 | AUTOGENERATE(1); 195 | 196 | LET v1_impJsonPosCalls = 0; // just for trace-log a counter of how many times 197 | 198 | DO 199 | LET v1_JsonTable = Peek('__tableName', 0, '~tLoadNext'); 200 | LET v1_JsonPosition = Peek('__jsonPos' , 0, '~tLoadNext'); 201 | LET v1_JsonKeyField = Peek('__keyField' , 0, '~tLoadNext'); 202 | 203 | CALL importJsonPos(v1_JsonTable, v1_JsonPosition, v1_JsonKeyField); 204 | 205 | [~removing_entry]: 206 | NOCONCATENATE LOAD 207 | * 208 | RESIDENT 209 | [~tLoadNext] 210 | WHERE 211 | __jsonPos <> '$(v1_JsonPosition)'; 212 | 213 | DROP TABLE [~tLoadNext]; 214 | RENAME TABLE [~removing_entry] TO [~tLoadNext]; 215 | 216 | // IF v1_impJsonPosCalls = 1 THEN 217 | // EXIT SCRIPT; 218 | // END IF 219 | 220 | LOOP UNTIL NoOfRows('~tLoadNext') = 0; 221 | 222 | DROP TABLES [~tLoadNext]; //, [~tKeyPos]; 223 | 224 | LET v1_JsonTable = Null(); 225 | LET v1_JsonPosition = Null(); 226 | LET v1_JsonKeyField = Null(); 227 | LET v1_impJsonPosCalls = Null(); 228 | LET v1_FROM = Null(); 229 | 230 | END SUB 231 | -------------------------------------------------------------------------------- /json/diagnose-claims.json: -------------------------------------------------------------------------------- 1 | { 2 | "subType": "user", 3 | "internalClaims": { 4 | "sub": "PJG24Kuta2Zq3rw_HlYoXqEr5SUJtNKLJQ0MRxqfDmo", 5 | "tenantId": "2GgxmrcJqlqFqh3G6qyOzW6azMMlKHJn", 6 | "subType": "user", 7 | "userId": "xk--6oE9m6ZX8CEMrYTnwi3m0OKtRuGH", 8 | "roles": [ 9 | "TenantAdmin", 10 | "Developer", 11 | "SharedSpaceCreator" 12 | ], 13 | "name": "Christof Schwarz", 14 | "email": "christof.schwarz@databridge.ch", 15 | "email_verified": true, 16 | "sid": "+COZnOz4pA8TBJNfBiyDJqPybvdGmIoT7C7D7MX8BvA=", 17 | "jti": "dCfu4M_XSTZsnYYOKAkH8Ie91eGF8Ipm", 18 | "iat": 1642411663, 19 | "exp": 1642411963, 20 | "aud": "qlik.api.internal", 21 | "iss": "qlik.api.internal" 22 | }, 23 | "claimSource": "id-token", 24 | "claimsFromIdp": { 25 | "aud": "20af2134-75ca-4024-a95b-b7af44aeb44b", 26 | "iss": "https://login.microsoftonline.com/59a56ddf-6752-4573-a4f5-d971ad02a407/v2.0", 27 | "iat": 1642410858, 28 | "nbf": 1642410858, 29 | "exp": 1642414758, 30 | "groups": [ 31 | "3211b1a1-3c2c-427f-971f-a9c519b89be1", 32 | "ffed9459-bbc8-4367-a297-7504e453baa8", 33 | "7a71c2ee-1121-402a-915e-8450f8dc34e6", 34 | "472d8886-f4b0-4035-afb3-3eda24cba5b8" 35 | ], 36 | "name": "Christof Schwarz", 37 | "nonce": "-FnK6kjeqSQOEi1m-eT7gT8pxWBZofW94VaKel1fBKQ", 38 | "oid": "a6fa95f2-ea4b-4912-aa2e-57530c548da6", 39 | "preferred_username": "christof.schwarz@databridge.ch", 40 | "rh": "0.ATEA322lWVJnc0Wk9dlxrQKkBzQhryDKdSRAqVu3r0SutEsxAK0.", 41 | "sub": "PJG24Kuta2Zq3rw_HlYoXqEr5SUJtNKLJQ0MRxqfDmo", 42 | "tenant_ctry": "IL", 43 | "tid": "59a56ddf-6752-4573-a4f5-d971ad02a407", 44 | "upn": "christof.schwarz@databridge.ch", 45 | "uti": "10AXEgly4EakEGt-2j8SAg", 46 | "ver": "2.0" 47 | }, 48 | "mappedClaims": { 49 | "aud": "20af2134-75ca-4024-a95b-b7af44aeb44b", 50 | "iss": "https://login.microsoftonline.com/59a56ddf-6752-4573-a4f5-d971ad02a407/v2.0", 51 | "iat": 1642410858, 52 | "nbf": 1642410858, 53 | "exp": 1642414758, 54 | "nonce": "-FnK6kjeqSQOEi1m-eT7gT8pxWBZofW94VaKel1fBKQ", 55 | "oid": "a6fa95f2-ea4b-4912-aa2e-57530c548da6", 56 | "preferred_username": "christof.schwarz@databridge.ch", 57 | "rh": "0.ATEA322lWVJnc0Wk9dlxrQKkBzQhryDKdSRAqVu3r0SutEsxAK0.", 58 | "tenant_ctry": "IL", 59 | "tid": "59a56ddf-6752-4573-a4f5-d971ad02a407", 60 | "upn": "christof.schwarz@databridge.ch", 61 | "uti": "10AXEgly4EakEGt-2j8SAg", 62 | "ver": "2.0", 63 | "sub": "PJG24Kuta2Zq3rw_HlYoXqEr5SUJtNKLJQ0MRxqfDmo", 64 | "name": "Christof Schwarz", 65 | "groups": [ 66 | "3211b1a1-3c2c-427f-971f-a9c519b89be1", 67 | "ffed9459-bbc8-4367-a297-7504e453baa8", 68 | "7a71c2ee-1121-402a-915e-8450f8dc34e6", 69 | "472d8886-f4b0-4035-afb3-3eda24cba5b8" 70 | ], 71 | "email": "christof.schwarz@databridge.ch", 72 | "email_verified": true, 73 | "subType": "user" 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /json/flights2.json: -------------------------------------------------------------------------------- 1 | { 2 | "airportsFields": ["name","city","country","longitude","latitude"], 3 | "airlineFields":["name"], 4 | "airports":[ 5 | ["Innsbruck", "Innsbruck", "Austria", 11.343964, 47.260219], 6 | ["Salzburg", "Salzburg", "Austria", 13.004333, 47.793304], 7 | ["Schwechat", "Vienna", "Austria", 16.569722, 48.110278], 8 | ], 9 | "airlines":[ 10 | "Austrian","Austrian" 11 | ], 12 | "routes":[ 13 | [1,3,1], 14 | [1,2,3] 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /json/hierarchy.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstname":"Maria", 3 | "children":[ 4 | { 5 | "firstname":"Traude", 6 | "children":[ 7 | { 8 | "firstname":"Christine", 9 | "children":[] 10 | }, 11 | { 12 | "firstname":"Christof", 13 | "children":[ 14 | { 15 | "firstname":"Julia" 16 | }, 17 | { 18 | "firstname":"Eva" 19 | } 20 | ] 21 | } 22 | ] 23 | }, 24 | { 25 | "firstname":"Marlies", 26 | "children":[ 27 | { 28 | "firstname":"Birgit", 29 | "children":[ 30 | { 31 | "firstname":"Marlene" 32 | }, 33 | { 34 | "firstname":"Johannes" 35 | } 36 | ] 37 | }, 38 | { 39 | "firstname":"Wolfgang", 40 | "children":[ 41 | { 42 | "firstname":"Annina", 43 | "children":[] 44 | }, 45 | { 46 | "firstname":"Benedikt", 47 | "children":[] 48 | }, 49 | { 50 | "firstname":"Clemens", 51 | "children":[] 52 | } 53 | ] 54 | }, 55 | { 56 | "firstname":"Heidi", 57 | "children":[ 58 | { 59 | "firstname":"Kirsten" 60 | }, 61 | { 62 | "firstname":"Rebekka" 63 | } 64 | ] 65 | } 66 | ] 67 | } 68 | ] 69 | } 70 | -------------------------------------------------------------------------------- /json/qrs.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": "b7a5ba3b-d9db-4302-904b-bdcd61658eae", 4 | "createdDate": "2020-12-22T07:14:46.384Z", 5 | "modifiedDate": "2020-12-22T10:26:03.406Z", 6 | "modifiedByUserName": "VM-I-QS-DEV\\christof.schwarz", 7 | "customProperties": [], 8 | "app": { 9 | "id": "39334b5c-330f-485a-8d22-6e9679a85899", 10 | "name": "Worldmap dotted(1)", 11 | "appId": "", 12 | "publishTime": "1753-01-01T00:00:00.000Z", 13 | "published": false, 14 | "stream": null, 15 | "savedInProductVersion": "12.763.5", 16 | "migrationHash": "504d4e39a7133ee172fbe29aa58348b1e4054149", 17 | "availabilityStatus": 0, 18 | "privileges": null 19 | }, 20 | "isManuallyTriggered": false, 21 | "operational": { 22 | "id": "37a02b9e-2657-417d-8bfd-6f17b2ccb1af", 23 | "lastExecutionResult": { 24 | "id": "f3414839-45fd-42c2-bf97-6fa709baf86d", 25 | "executingNodeName": "vm-i-qs-dev.internal.cloudapp.net", 26 | "status": 7, 27 | "startTime": "2020-12-22T10:43:55.642Z", 28 | "stopTime": "2020-12-22T10:44:17.429Z", 29 | "duration": 21787, 30 | "fileReferenceID": "714917e9-7f92-4ccc-a5e8-2c460c409da5", 31 | "scriptLogAvailable": false, 32 | "details": [ 33 | { 34 | "id": "a5e51d47-8f37-4098-8de1-14be4739207c", 35 | "detailsType": 2, 36 | "message": "Changing task state to Triggered", 37 | "detailCreatedDate": "2020-12-22T10:43:55.643Z", 38 | "privileges": null 39 | }, 40 | { 41 | "id": "e92e5f7e-5100-4541-8235-165f0235ffdb", 42 | "detailsType": 2, 43 | "message": "Reference to scriptlog added", 44 | "detailCreatedDate": "2020-12-22T10:44:17.392Z", 45 | "privileges": null 46 | }, 47 | { 48 | "id": "fc6b0e1d-58e0-42a7-b138-529509f15759", 49 | "detailsType": 2, 50 | "message": "Changing task state from Started to FinishedSuccess", 51 | "detailCreatedDate": "2020-12-22T10:44:17.475Z", 52 | "privileges": null 53 | }, 54 | { 55 | "id": "526806b6-729c-4e1f-a169-fb1f0a66553d", 56 | "detailsType": 2, 57 | "message": "Changing task state from Triggered to Started", 58 | "detailCreatedDate": "2020-12-22T10:43:55.872Z", 59 | "privileges": null 60 | }, 61 | { 62 | "id": "6de92b5a-d4c6-4253-bf9d-14e09e7ad901", 63 | "detailsType": 2, 64 | "message": "Trying to start task. Sending task to slave scheduler vm-i-qs-dev.internal.cloudapp.net", 65 | "detailCreatedDate": "2020-12-22T10:43:55.763Z", 66 | "privileges": null 67 | } 68 | ], 69 | "scriptLogLocation": "vm-i-qs-dev.internal.cloudapp.net\\Script\\39334b5c-330f-485a-8d22-6e9679a85899.20201222T104356.114+0000.2357CB850323E262E934.log", 70 | "scriptLogSize": 9216320, 71 | "privileges": null 72 | }, 73 | "nextExecution": "1753-01-01T00:00:00.000Z", 74 | "privileges": null 75 | }, 76 | "name": "Partial Reload", 77 | "taskType": 0, 78 | "enabled": true, 79 | "taskSessionTimeout": 1440, 80 | "maxRetries": 0, 81 | "tags": [ 82 | { 83 | "id": "71a51a83-0e84-481a-9fd7-b2cb03d010e1", 84 | "name": "partial", 85 | "privileges": null 86 | } 87 | ], 88 | "privileges": null, 89 | "schemaPath": "ReloadTask" 90 | } 91 | ] 92 | -------------------------------------------------------------------------------- /json/sample2.json: -------------------------------------------------------------------------------- 1 | [ 2 | {"key1": 123, "key2": "abc", "key3": true} 3 | , {"key1": 234, "key2": "def", "key4": "2023-09-15"} 4 | ] 5 | -------------------------------------------------------------------------------- /json/users.json: -------------------------------------------------------------------------------- 1 | { 2 | "links": { 3 | "self": { 4 | "href": "https://databridge-internal.eu.qlikcloud.com/api/v1/users?fields=id%2Cname%2Croles%2Cstatus%2Cemail&limit=0&tenantId=2GgxmrcJqlqFqh3G6qyOzW6azMMlKHJn" 5 | } 6 | }, 7 | "data": [ 8 | { 9 | "id": "LZusNVxhmfXTtUNgntZmIxtBSumq7vde", 10 | "status": "active", 11 | "name": "Gregor Haus", 12 | "roles": [ 13 | "TenantAdmin", 14 | "Developer", 15 | "SharedSpaceCreator", 16 | "DataSpaceCreator", 17 | "ManagedSpaceCreator", 18 | "AnalyticsAdmin", 19 | "DataAdmin" 20 | ], 21 | "email": "gregor.haus@data.br" 22 | }, 23 | { 24 | "id": "YXwVYjQ6FYXvyMTpklxNJ4QJvd0oOpqZ", 25 | "status": "active", 26 | "name": "Patric Omatolli", 27 | "roles": [ 28 | "TenantAdmin", 29 | "Developer", 30 | "SharedSpaceCreator" 31 | ], 32 | "email": "patric@data.br" 33 | }, 34 | { 35 | "id": "t8api6x30tg7ynNeTjS-Vm7YjY86hyDG", 36 | "status": "active", 37 | "name": "Thomas Allein", 38 | "roles": [], 39 | "email": "thomas.allein@nothing.com" 40 | }, 41 | { 42 | "id": "iD3jQx5qZRBwOJCNhLZhAqe-xPr5SjnD", 43 | "status": "active", 44 | "name": "Valentin Cascapero", 45 | "roles": [], 46 | "email": "valentin.cascapero@something.com" 47 | } 48 | ] 49 | } 50 | -------------------------------------------------------------------------------- /load_all_qvd.txt: -------------------------------------------------------------------------------- 1 | // Script by Christof Schwarz (csw@qlik.com) 2 | // Version: 13-Jan-2015 3 | // 4 | // Description: 5 | // This include-script will load all tables which together 6 | // comprise a data model formerly saved with my other script 7 | // "STORE_ALL_TABLES.txt". It checks .meta.txt files for 8 | // each QVD file as well to make sure the save checksum is 9 | // the same for all QVD files. It throws an error if not 10 | // (which indicates that something went wrong during store). 11 | // 12 | // Parameters needed: 13 | // provide two variables with respective content: 14 | // "vTableQVD_LibConnect" and "vTableQVD_Prefix" 15 | // 16 | // Examples: 17 | /* 18 | LET vTableQVD_LibConnect = 'lib://QVDs (qtsel_csw)/BINARY'; 19 | LET vTableQVD_Prefix = DocumentTitle(); 20 | */ 21 | 22 | TRACE [*** LOAD ALL TABLES ***]; 23 | 24 | // Check if 2 parameters are provided correctly 25 | IF NOT '$(vTableQVD_LibConnect)' LIKE 'lib://*' THEN 26 | Error: Invalid variable vTableQVD_LibConnect content.; 27 | END IF 28 | IF Len('$(vTableQVD_Prefix)' < 2 THEN 29 | Error: Invalid variable vTableQVD_Prefix content.; 30 | END IF 31 | 32 | TRACE [*** PARAMETERS: ***]; 33 | TRACE [vTableQVD_LibConnect = $(vTableQVD_LibConnect)]; 34 | TRACE [vTableQVD_Prefix = $(vTableQVD_Prefix)]; 35 | 36 | IF NOT WildMatch(vTableQVD_LibConnect, '*/', '*\') THEN 37 | // Add missing trailling slash to the lib path 38 | LET vTableQVD_LibConnect = vTableQVD_LibConnect & '/'; 39 | END IF; 40 | 41 | // Create empty table 42 | $QVDMETADATA: LOAD * INLINE [$QVDFILENAME, $QVDCHECKSUM]; 43 | 44 | FOR EACH vTableQVD IN FileList(vTableQVD_LibConnect & vTableQVD_Prefix & '^*.qvd') 45 | LET vTableName = QvdTableName(vTableQVD); 46 | TRACE [Load $(vTableQVD) >> $(vTableName)]; 47 | 48 | [$(vTableName)]: LOAD * FROM [$(vTableQVD)] (qvd); 49 | LET vTableMetaData = Left(vTableQVD, Len(vTableQVD)-4) & '.meta.txt'; 50 | 51 | CONCATENATE ($QVDMETADATA) 52 | LOAD 53 | '$(vTableQVD)' AS $QVDFILENAME 54 | ,* 55 | FROM 56 | [$(vTableMetaData)] 57 | (txt, utf8, embedded labels, delimiter is '\t', msq); 58 | 59 | NEXT vTableQVD; 60 | 61 | IF FieldValueCount('$QVDCHECKSUM') > 1 THEN 62 | Error: The QVDCHECKSUM is not identical in the .meta.txt of the QVD folder; 63 | ELSE 64 | vQVDCheckSum = FieldValue('$QVDCHECKSUM', 1); 65 | TRACE [*** DONE LOADING ALL QVD. ***]; 66 | TRACE [$QVDCHECKSUM = $(vQVDCheckSum)]; 67 | END IF 68 | 69 | // delete temporary variables 70 | LET vTableQVD = Null(); 71 | LET vTableName = Null(); 72 | LET vTableMetaData = Null(); 73 | LET vQVDCheckSum = Null(); 74 | -------------------------------------------------------------------------------- /next-prev-buttons.txt: -------------------------------------------------------------------------------- 1 | TRACE including https://github.com/ChristofSchwarz/QlikScripts/blob/master/next-prev-buttons.txt; 2 | TRACE Version 1.0; 3 | 4 | SUB CreatePrevNextVar(paramField, paramType, paramPrecision) 5 | 6 | /* 7 | Author: Christof Schwarz 8 | Version: 1.0 - 04-SEP-2021 9 | 10 | This sub will create 4 variables for a given fieldname (provided in paramField). The 11 | paramType is one of the following (case-insensitive): 12 | Txt or Text, Int or Integer, Num or Numeric, Dual, Date, DateTime or Timestamp 13 | 14 | The optional 3rd parameter is Precision and is needed for numeric values with decimals 15 | (a timestamp for example has up to 9 digits after the decimal sign!) and it defines how 16 | many digits after the decimal need to be identical for a match (e.g. 1e-8 or 1e-9). 17 | The 3rd parameter will get an intelligent default if you don't provide it. 18 | 19 | Examples: 20 | For a text field you'd CALL CreatePrevNextVar('ProductCategory', 'Text') 21 | and you get 4 variables: 22 | [vNext:ProductCategory] ... a search string for the [Next] Action Button 23 | [vPrev:ProductCategory] ... a search string for the [Prev] Action Button 24 | [vNextVal:ProductCategory] ... a text with the next value after the current 25 | [vPrevVal:ProductCategory] ... a text with the previous value after the current 26 | 27 | For a timestamp field you'd CALL CreatePrevNextVar('CreationDateTime', 'timestamp') 28 | and you get 4 variables: 29 | [vNext:CreationDateTime] ... a search string for the [Next] Action Button 30 | [vPrev:CreationDateTime] ... a search string for the [Prev] Action Button 31 | [vNextVal:CreationDateTime] ... a text with the next value after the current 32 | [vPrevVal:CreationDateTime] ... a text with the previous value after the current 33 | */ 34 | 35 | // Get an intelligent default for privPrecision, if paramPrecision was omitted 36 | IF '$(paramPrecision)' = '' THEN 37 | IF Wildmatch(paramType, 'Datetime','Timestamp') THEN 38 | LET privPrecision = Num(1/24/3600/1000/2, '', '.', ' '); 39 | ELSEIF Wildmatch(paramType, 'Num*') THEN 40 | SET privPrecision = 1E-9; 41 | ELSE 42 | LET privPrecision = 0; 43 | END IF 44 | ELSE 45 | LET privPrecision = Num(Alt('$(paramPrecision)', 0), '', '.', ' '); 46 | END IF 47 | 48 | TRACE `SUB CreatePrevNextVar('$(paramField)', '$(paramType)', $(privPrecision))`; 49 | 50 | // Create Next Value and Prev Value variables 51 | LET [vNextVal:$(paramField)] = 'If(IsNull([$(paramField)]), MinString([$(paramField)]), 52 | MinString(TOTAL {<[$(paramField)]={"=Only({<[$(paramField)]=>}[$(paramField)]) > Only(TOTAL [$(paramField)])"}>} [$(paramField)]) 53 | )'; 54 | LET [vPrevVal:$(paramField)] = 'If(IsNull([$(paramField)]), MaxString([$(paramField)]), 55 | MaxString(TOTAL {<[$(paramField)]={"=Only({<[$(paramField)]=>}[$(paramField)]) < Only(TOTAL [$(paramField)])"}>} [$(paramField)]) 56 | )'; 57 | 58 | // Create the search strings for the Next and Prev Action Buttons based on the type 59 | 60 | IF paramType LIKE 'T*xt' THEN 61 | 62 | LET [vNext:$(paramField)] = CHR(39) & '=Only({<[$(paramField)]=>}[$(paramField)])=''''$' & '(=' & [vNextVal:$(paramField)] & ')''''' & CHR(39); 63 | LET [vPrev:$(paramField)] = CHR(39) & '=Only({<[$(paramField)]=>}[$(paramField)])=''''$' & '(=' & [vPrevVal:$(paramField)] & ')''''' & CHR(39); ; 64 | LET privPrecision = Null(); 65 | 66 | ELSEIF Wildmatch(paramType, 'Num*', 'Int*', 'Dual', 'Date', 'Datetime', 'Timestamp') THEN 67 | 68 | LET [vNext:$(paramField)] = 'Num(If(IsNull([$(paramField)]), Min([$(paramField)]), 69 | Min(TOTAL {<[$(paramField)]={"=Only({<[$(paramField)]=>}[$(paramField)]) > Only(TOTAL [$(paramField)])"}>} [$(paramField)]) 70 | ),'''',''.'','' '')'; 71 | LET [vPrev:$(paramField)] = 'Num(If(IsNull([$(paramField)]), Max([$(paramField)]), 72 | Max(TOTAL {<[$(paramField)]={"=Only({<[$(paramField)]=>}[$(paramField)]) < Only(TOTAL [$(paramField)])"}>} [$(paramField)]) 73 | ),'''',''.'','' '')'; 74 | IF privPrecision = 0 THEN 75 | LET [vNext:$(paramField)] = CHR(39) & '=Only({<[$(paramField)]=>}[$(paramField)]) = $' & '(=' & [vNext:$(paramField)] & ')' & CHR(39); 76 | LET [vPrev:$(paramField)] = CHR(39) & '=Only({<[$(paramField)]=>}[$(paramField)]) = $' & '(=' & [vPrev:$(paramField)] & ')' & CHR(39); 77 | ELSE 78 | LET [vNext:$(paramField)] = CHR(39) & '=FAbs(Only({<[$(paramField)]=>}[$(paramField)]) - $' & '(=' & [vNext:$(paramField)] & ')) < ' & privPrecision & CHR(39); 79 | LET [vPrev:$(paramField)] = CHR(39) & '=FAbs(Only({<[$(paramField)]=>}[$(paramField)]) - $' & '(=' & [vPrev:$(paramField)] & ')) < ' & privPrecision & CHR(39); 80 | END IF 81 | LET privPrecision = Null(); 82 | 83 | ELSE 84 | 85 | LET privPrecision = Null(); 86 | // Throw error 87 | [SUB CreatePrevNextVar: Unknown Field type "$(paramType)"]; 88 | END IF 89 | 90 | END SUB 91 | -------------------------------------------------------------------------------- /pivotsort.txt: -------------------------------------------------------------------------------- 1 | PivotData: 2 | LOAD * INLINE [ 3 | Year, Country, Customer, Sales 4 | 2017, Canada, Chris, 200 5 | 2017, Canada, Anny, 100 6 | 2017, United Kingdom, Foo, 20 7 | 2017, United Kingdom, Kong, 100 8 | 2018, Canada, Chris, 90 9 | 2018, Canada, Anny, 110 10 | 2018, Canada, Foo, 30 11 | 2018, United Kingdom, Foo, 180 12 | 2018, United Kingdom, Kong, 170 13 | ]; 14 | 15 | SET fxSort2 = Dual($1, Num#(Left(KeepChar(Hash256($1,$2),'0123456789ABCDEF'),18),'(hex)')); 16 | SET fxSort3 = Dual($1, Num#(Left(KeepChar(Hash256($1,$2,$3),'0123456789ABCDEF'),18),'(hex)')); 17 | SET fxSort4 = Dual($1, Num#(Left(KeepChar(Hash256($1,$2,$3,$4),'0123456789ABCDEF'),18),'(hex)')); 18 | SET fxSort5 = Dual($1, Num#(Left(KeepChar(Hash256($1,$2,$3,$4,$5),'0123456789ABCDEF'),18),'(hex)')); 19 | -------------------------------------------------------------------------------- /removeFieldsLike.qvs: -------------------------------------------------------------------------------- 1 | SUB RemoveFieldsLike (param_table, param_fieldPattern) 2 | 3 | TRACE Calling RemoveFieldsLike on table $(param_table); 4 | // removes the fields matching the pattern from the table param_table. 5 | // The pattern can be a single value (in single-quotes) or multiple values, 6 | // where field names are escaped within square brackets ... 7 | 8 | // Examples: 9 | // CALL RemoveFieldsLike('contracts', 'File_Entry'); 10 | // CALL RemoveFieldsLike('contracts', '*_Feed'); 11 | // CALL RemoveFieldsLike('contracts', '[*_Feed],[*_Entry]'); 12 | 13 | LET v_priv_fieldPattern = Replace(Replace(param_fieldPattern, '[', CHR(39)), ']', CHR(39)); 14 | IF SubStringCount(v_priv_fieldPattern, CHR(39)) = 0 THEN 15 | LET v_priv_fieldPattern = CHR(39) & v_priv_fieldPattern & CHR(39); 16 | END IF; 17 | FOR v_priv_i = NoOfFields(param_table) TO 1 STEP -1; 18 | LET v_priv_Field = FieldName( v_priv_i, param_table); 19 | //TRACE $(v_priv_i) $(v_priv_Field); 20 | IF WildMatch(v_priv_Field, $(v_priv_fieldPattern)) THEN 21 | DROP FIELD [$(v_priv_Field)] FROM [$(param_table)]; 22 | TRACE Dropping field [$(v_priv_Field)] from [$(param_table)]; 23 | ENDIF 24 | 25 | NEXT 26 | LET v_priv_i = Null(); 27 | LET v_priv_Field = Null(); 28 | LET v_priv_fieldPattern = Null(); 29 | 30 | END SUB 31 | -------------------------------------------------------------------------------- /rest-connector/BEST REST.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChristofSchwarz/QlikScripts/8f521fad886a813a94d4209e121002066b59cf5b/rest-connector/BEST REST.qvf -------------------------------------------------------------------------------- /rest-connector/ImportHierarchy.qvf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChristofSchwarz/QlikScripts/8f521fad886a813a94d4209e121002066b59cf5b/rest-connector/ImportHierarchy.qvf -------------------------------------------------------------------------------- /rest-connector/json-trees.txt: -------------------------------------------------------------------------------- 1 | 2 | // ▼ Replace with your connection string to the REST source. I am using a generic one, because 3 | // anything except for the Method (GET/POST) can be passed as "WITH CONNECTION" arguments below. 4 | LIB CONNECT TO 'Generic Get Request (qmi-qs-sn_vagrant)'; 5 | 6 | // By Christof Schwarz 7 | // More info see https://github.com/ChristofSchwarz/QlikScripts/tree/master/rest-connector 8 | 9 | // ▼▼▼ Check values for in all variables below 10 | // ▼ target data model table 11 | LET vDataTable = 'JsonTree'; 12 | // ▼ list of fields in each JSON array, field names MUST be in double-quotes, comma-separated 13 | //SET vJsonFields = ["id", "name", "ifcType", "ifcId", "type", "number", "elementTypeId"]; 14 | SET vJsonFields = ["firstname"]; 15 | // ▼ same list of fields as used in the LOAD block, can be renamed or formulas applied, no double-quotes needed 16 | //SET vQlikFields = id, If(Len(name),name,id) AS name, ifcType, ifcId, type, number, elementTypeId; 17 | SET vQlikFields = firstname AS name; 18 | // ▼ Which field has the "name" of each node -> used in the hierarchy load (paths 19 | SET vNameField = name; 20 | // ▼ Fieldname in the target table to hold the level counter 21 | SET vLevelField = 'HierarchyLevel'; 22 | // ▼ separator sign to join the path from the root e.g. grandma\mum\me 23 | SET vPathSep = '\'; 24 | // ▼ Key in your Json which has the child-node array 25 | LET vChildrenKey = 'children'; 26 | // ▼ change the "WITH CONNECTION" part accordingly, passing url and required other http arguments 27 | SET vQryFooter = FROM JSON (wrap on) "root" PK "%KEY_L1" // ◄ dont change this row 28 | WITH CONNECTION ( 29 | URL "https://raw.githubusercontent.com/ChristofSchwarz/QlikScripts/master/json/hierarchy.json" 30 | //URL "https://raw.githubusercontent.com/ChristofSchwarz/QlikScripts/master/json/2.json" 31 | //QUERY "" "", 32 | //HTTPHEADER "Content-Type" "application/json", 33 | //BODY "$(vAuthBody)" 34 | ); 35 | // ▼ Leave the below default values unchanged 36 | SET vQryHeader = 'SELECT'; 37 | LET vCurrLvl = 1; 38 | LET vParentLvl = 0; 39 | LET vIndent1 = ''; 40 | LET vIndent2 = ' '; 41 | LET vNL = CHR(10); 42 | 43 | 44 | // Create empty table 45 | [$(vDataTable)_tmp]: LOAD * INLINE [%$(vDataTable).NodeId, %$(vDataTable).ParentId]; 46 | 47 | DO 48 | // loop as long as more children records are found 49 | 50 | LET vQry = vQryHeader & vNL & vIndent2 & vJsonFields & ',' 51 | & vNL & vIndent2 & '"%KEY_L$(vCurrLvl)",' 52 | & vNL & vIndent2 & '"%FK_L$(vParentLvl)"' 53 | & vNL & vQryFooter; 54 | 55 | TRACE -----; 56 | TRACE $(vQry); 57 | TRACE -----; 58 | LET vRecsBefore = NoOfRows(vDataTable & '_tmp'); 59 | 60 | CONCATENATE ([$(vDataTable)_tmp]) 61 | LOAD 62 | '$(vCurrLvl):' & %KEY_L$(vCurrLvl) AS [%$(vDataTable).NodeId], 63 | // $(vCurrLvl) AS level, 64 | $(vQlikFields), 65 | If($(vParentLvl) > 0, '$(vParentLvl):' & %FK_L$(vParentLvl)) AS [%$(vDataTable).ParentId] 66 | ; 67 | SQL $(vQry); 68 | 69 | LET vRecsAdded = NoOfRows(vDataTable & '_tmp') - vRecsBefore; 70 | TRACE $(vRecsAdded) new records added.; 71 | 72 | IF vRecsAdded > 0 THEN 73 | LET vParentLvl = vCurrLvl; 74 | LET vCurrLvl = vCurrLvl + 1; 75 | LET vIndent1 = Repeat(' ',vParentLvl); 76 | LET vIndent2 = Repeat(' ',vCurrLvl); 77 | LET vQryHeader = vQryHeader & vNL & vIndent1 & '(SELECT'; 78 | LET vQryFooter = vIndent1 & 'FROM "$(vChildrenKey)" PK "%KEY_L$(vCurrLvl)" FK "%FK_L$(vParentLvl)")' & vNL & vQryFooter; 79 | END IF 80 | 81 | LOOP UNTIL vRecsAdded = 0; 82 | 83 | [$(vDataTable)]: 84 | HIERARCHY ([%$(vDataTable).NodeId], [%$(vDataTable).ParentId], [$(vNameField)], '$(vNameField).parent', [$(vNameField)], '$(vNameField).path', '$(vPathSep)', '$(vLevelField)') 85 | LOAD * RESIDENT [$(vDataTable)_tmp]; 86 | 87 | DROP TABLE [$(vDataTable)_tmp]; 88 | 89 | 90 | 91 | // ▼ Create a recursive table so that a selection on a parent node also 92 | // selects hits all childs, grandchilds ... and entire hierarchy below 93 | [$(vDataTable)_recursive]: LOAD * INLINE [%$(vDataTable).NodeId]; 94 | 95 | FOR vCurrLvl = 1 TO vParentLvl 96 | CONCATENATE ([$(vDataTable)_recursive]) 97 | LOAD 98 | [%$(vDataTable).NodeId] 99 | ,SubField([$(vNameField).path], '$(vPathSep)', $(vCurrLvl)) AS [$(vNameField).tree] 100 | RESIDENT 101 | [$(vDataTable)] 102 | WHERE 103 | HierarchyLevel >= $(vCurrLvl); 104 | NEXT vCurrLvl 105 | 106 | // ▼ Deleting all temporary Variables 107 | //LET vDataTable = Null(); 108 | LET vJsonFields = Null(); 109 | LET vQlikFields = Null(); 110 | //LET vNameField = Null(); 111 | //LET vLevelField = Null(); 112 | LET vPathSep = Null(); 113 | LET vChildrenKey = Null(); 114 | LET vQryHeader = Null(); 115 | LET vQryFooter = Null(); 116 | LET vQry = Null(); 117 | LET vCurrLvl = Null(); 118 | LET vParentLvl = Null(); 119 | LET vIndent1 = Null(); 120 | LET vIndent2 = Null(); 121 | LET vNL = Null(); 122 | LET vRecsBefore = Null(); 123 | LET vRecsAdded = Null(); 124 | -------------------------------------------------------------------------------- /rest-connector/readme.md: -------------------------------------------------------------------------------- 1 | # Tricks with Qlik Sense REST Connector 2 | 3 | In this chapter I am showing advanced tricks to import Json Structures with Qlik Sense's native REST Connector. 4 | 5 | 6 | | Topic | Script | Example App | Video Explanation | 7 | | ----------------- | ------ | ----------- | ----------------- | 8 | | Good practices | [Link](https://github.com/ChristofSchwarz/qs_script_rest_api) | [App (QVF)](https://github.com/ChristofSchwarz/qs_script_rest_api/blob/master/REST_example.qvf?raw=true) | https://youtu.be/7m9ZejlzkkY | 9 | | Import Json Trees | [Link](json-trees.txt) | [App (QVF)](https://github.com/ChristofSchwarz/QlikScripts/blob/master/rest-connector/ImportHierarchy.qvf?raw=true) | https://youtu.be/8xkwFjDjO84 | 10 | | BEST REST | | [App (QVF)](https://github.com/ChristofSchwarz/QlikScripts/blob/master/rest-connector/BEST%20REST.qvf?raw=true) | | 11 | -------------------------------------------------------------------------------- /splitTimeSpans.txt: -------------------------------------------------------------------------------- 1 | SUB SplitTimeSpans (parTable, parFromField, parToField, parResolution, parFormat, parCutDateField, parCutFormat, parDurationField, parFillTo, parCheckSums) 2 | 3 | TRACE Calling sub "SplitTimeSpans" by Christof Schwarz; 4 | SET subCheckSumTolerance = 0.00001; // acceptable rounding diffences between CheckSum1 and CheckSum2 5 | 6 | // Sub splits time-spans into multiple rows, when the span crosses a time-cut. The time-cut limits are defined by a 7 | // Resolution parameter : 8 | // 9 | // parTable (obligatory) : name of the table where your From and To dates are found 10 | // parFromField (obligatory) : name of the From date field 11 | // parToField (obligatory) : name of the To date field 12 | // parResolution (obligatory) : how many multiples of days do you want splitting time-cuts (1 = 1 per day, 13 | // 1/24 = 1 per hour) 14 | // parFormat (obligatory) : TimestampFormat to parse and render the newly generated From and To timestamp fields 15 | // e.g. 'YYYY-MM-DD hh:mm:ss' 16 | // parCutDateField (optional) : This field will hold the closest cut-date for the given period, this is good 17 | // to link it to your MasterCalendar. If parameter is ommited, the field will 18 | // temporarily be created but removed before the Sub finishes 19 | // parCutFormat (obligatory) : Date or TimestampFormat for the newly generated "putCutDateField" 20 | // e.g. 'YYYY-MM-DD', can also be 'Num' to leave it as a numeric, not a timestamp/date value 21 | // parDurationField (optional): leaves a Duration field of the result rows, so you can easily do Sum(Duration) 22 | // in your frontend. If parameter is ommited, the duration field will 23 | // temporarily be created but removed before the Sub finishes. It does so to 24 | // compare the initial Sum(~Duration) with the final Sum(~Duration) and will 25 | // break script execution in case they don't match (logic failed, shouldn't happen) 26 | // parFillTo (optional) : put 'Now()' (as string) if you want to fill missing To-Date/times up to Now(). If no 27 | // fill value or 'Null()' is given, no filling of missing To-values is made 28 | // parCheckSums (optional) : set to 0 to not compare the Sum of Duration before and after the operation (faster 29 | // script execution), default is 1 30 | // Examples: 31 | // CALL SplitTimeSpans ('My Intervals', 'FROM DATE', 'TO DATE', 1, 'YYYY-MM-DD hh:mm:ss','%CALDATE', 'YYYY-MM-DD', 'DURATION', 'Now()' 0); 32 | // CALL SplitTimeSpans ('My Intervals', 'FROM DATE', 'TO DATE', 1, 'YYYY-MM-DD hh:mm:ss','%CALDATE', 'Num', 'DURATION', 'Now()', 0); 33 | // CALL SplitTimeSpans ('tFACTS', 'FromDateTime', 'ToDateTime', 1/24, 'DD.MM.YYYY hh:mm:ss', Null(), Null(), 'Duration', Null(), 1); 34 | // CALL SplitTimeSpans ('Facts', 'Facts.From', 'Facts.To', 0.5, 'MM/DD/YYYY hh:mm:ss'); 35 | // 36 | // The resulting table has more rows than the original (provided that some durations spun across multiple time cuts). 37 | // It has the same fields names as before but we added some columns and manipulated two: 38 | // manipulated fields: 39 | // - the field after the operation can be either the original or the closest cut-time 40 | // - the field after the operation can be either the original or the closest cut-time 41 | // new fields: 42 | // - the original value is still kept in field .Original 43 | // - the original value is still kept in field .Original 44 | // - a field .IsOrig is added, which is 1 for the row that still shows the original , 0 for rows 45 | // which were created do to splitting of long periods 46 | // - the is added in the format to show the corresponding closest time cut in the past 47 | 48 | IF Len('$(parTable)')*Len('$(parFromField)')*Len('$(parToField)')*Len('$(parResolution)')*Len('$(parFormat)') = 0 THEN 49 | [Error: you did not provide all necessary parameters to sub SplitTimeSpans(...)]; 50 | END IF 51 | 52 | // set default values for optional parameters 53 | LET subDurationField = If(Len('$(parDurationField)'), '$(parDurationField)', '$(parTable)~Duration'); 54 | LET subCutDateField = If(Len('$(parCutDateField)'), '$(parCutDateField)', '$(parTable)~CutDate'); 55 | LET subCutFormat = If(Len('$(parCutFormat)'), '$(parCutFormat)', '$(parFormat)'); 56 | LET subResolution = Num(parResolution , '','.',' '); // Format parameter to US num format 57 | LET subRowsBefore = NoOfRows('$(parTable)'); 58 | LET subCheckSums = Alt('$(parCheckSums)', 1); 59 | LET subFormatFunct = If(parCutFormat LIKE 'NUM', 'Num', 'TimeStamp'); 60 | LET subFormatParam = If(parCutFormat LIKE 'NUM', '', ',''$(subFormatParam)'' '); 61 | LET subFillTo = If(Len('$(parFillTo)'), parFillTo, 'Null()'); 62 | 63 | // Get all existing fields of the table into a comma-separated list in "subLoadBlock". If the fields 64 | // are the From- and To-fields as per the parameters, load them into fieldname + ".Original" 65 | LET subLoadBlock = ''; 66 | FOR subV = 1 TO NoOfFields(parTable) 67 | LET subField = FieldName(subV, parTable); 68 | IF Match(subField, parFromField, parToField) THEN 69 | LET subLoadBlock = subLoadBlock & If(Len(subLoadBlock),', ') & '[$(subField)] AS [$(subField).Original]'; 70 | ELSE 71 | LET subLoadBlock = subLoadBlock & If(Len(subLoadBlock),', ') & '[$(subField)]'; 72 | ENDIF 73 | NEXT subV; 74 | 75 | // Phase 1) add the Floor of FROM and TO 76 | TRACE SUB SplitTimeSpans - Phase 1; 77 | [$(parTable)~1]: 78 | LOAD 79 | $(subLoadBlock), 80 | [$(parFromField)], 81 | Alt([$(parToField)], TimeStamp($(parFillTo),'$(parFormat)')) AS [$(parToField)], 82 | Alt([$(parToField)], TimeStamp($(parFillTo),'$(parFormat)')) - [$(parFromField)] AS [$(subDurationField)], 83 | Floor([$(parFromField)], $(subResolution)) AS [$(parFromField).Floor], 84 | Floor(Alt([$(parToField)], TimeStamp($(parFillTo),'$(parFormat)')), $(subResolution)) AS [$(parToField).Floor] 85 | RESIDENT 86 | [$(parTable)]; 87 | 88 | IF subCheckSums THEN 89 | TRACE Creating Checksum (1)...; 90 | [~CheckSums1]: 91 | LOAD 92 | Sum([$(subDurationField)]) AS [~CheckSum1] 93 | RESIDENT [$(parTable)~1]; 94 | LET vCheckSum1 = Peek('~CheckSum1',0,'~CheckSums1'); 95 | TRACE CheckSum1 Duration = $(vCheckSum1); 96 | DROP TABLE [~CheckSums1]; 97 | END IF 98 | DROP FIELD [$(subDurationField)] FROM [$(parTable)~1]; 99 | DROP TABLE [$(parTable)]; 100 | 101 | // Phase 2) Find min and max date in fields FROM and TO 102 | TRACE SUB SplitTimeSpans - Phase 2; 103 | TRACE Looking for min date of field "$(parFromField)"; 104 | [~tmpMinDate]: 105 | LOAD Min(FieldValue('$(parFromField).Floor', RecNo())) as [~MinDate] 106 | AUTOGENERATE FieldValueCount('$(parFromField).Floor'); 107 | LET vCalMinDate= Num(peek('~MinDate', 0, '~tmpMinDate'), '','.',' '); 108 | DROP TABLE [~tmpMinDate]; 109 | 110 | TRACE Looking for max date of field "$(parToField)"; 111 | [~tmpMaxDate]: 112 | LOAD Max(FieldValue('$(parToField).Floor',RecNo())) as [~MaxDate] 113 | AUTOGENERATE FieldValueCount('$(parToField).Floor'); 114 | LET vCalMaxDate= Num(peek('~MaxDate', 0, '~tmpMaxDate'), '','.',' '); 115 | DROP TABLE [~tmpMaxDate]; 116 | 117 | // Phase 3) Build a table ~Cuts which has as many cut dates as 118 | // contained between the min FromDate and max ToDate 119 | TRACE SUB SplitTimeSpans - Phase 3; 120 | [~Cuts]: 121 | LOAD 122 | Num((RowNo() -1) * $(subResolution) + $(vCalMinDate),'','.',' ') as [$(subCutDateField)] 123 | AUTOGENERATE (($(vCalMaxDate) - $(vCalMinDate))/$(subResolution) + 1); 124 | 125 | 126 | // Phase 4) Split the rows in the data table into multiple rowse 127 | // using inner join and interval match 128 | TRACE SUB SplitTimeSpans - Phase 4 (Interval Match); 129 | 130 | [$(parTable)~IM]: 131 | INTERVALMATCH([$(subCutDateField)]) 132 | LOAD DISTINCT [$(parFromField).Floor], [$(parToField).Floor] 133 | RESIDENT [$(parTable)~1]; 134 | DROP TABLE [~Cuts]; 135 | 136 | [$(parTable)~Exploder]: 137 | MAPPING LOAD 138 | Hash256([$(parFromField).Floor], [$(parToField).Floor]), 139 | Concat([$(subCutDateField)], CHR(9)) 140 | RESIDENT 141 | [$(parTable)~IM] 142 | GROUP BY 143 | [$(parFromField).Floor], [$(parToField).Floor]; 144 | DROP TABLE [$(parTable)~IM]; 145 | 146 | /* 147 | Using ApplyMap + SubField to avoid large tables to join 148 | TRACE SUB SplitTimeSpans - Phase 4 (Joining); 149 | INNER JOIN ([$(parTable)~1]) 150 | LOAD DISTINCT 151 | // JOIN ON 152 | [$(parFromField).Floor], [$(parToField).Floor], 153 | // Add field 154 | [$(subCutDateField)] 155 | RESIDENT [$(parTable)~IM]; 156 | DROP TABLE [$(parTable)~IM]; 157 | */ 158 | 159 | // Phase 5) create final table 160 | TRACE SUB SplitTimeSpans - Phase 5 ; 161 | 162 | [$(parTable)~2]: 163 | LOAD 164 | *, 165 | //TimeStamp([$(parToField).New] - [$(parFromField).New], 'h:mm:ss') AS [$(subDurationField)], 166 | Num([$(parToField).New] - [$(parFromField).New]) AS [$(subDurationField)], 167 | Fabs([$(parFromField).New]=[$(parFromField)]) AS [$(parFromField).IsOrig] 168 | ; 169 | LOAD 170 | *, 171 | TimeStamp(RangeMax([$(parFromField)], [$(subCutDateField)]), '$(parFormat)') AS [$(parFromField).New], 172 | TimeStamp(RangeMin([$(parToField)], [$(subCutDateField)]+ $(subResolution)), '$(parFormat)') AS [$(parToField).New] 173 | ; 174 | LOAD // Explode rows into multiple rows using ApplyMap and SubField (to avoid large tables to join) 175 | $(subFormatFunct) (SubField( // this "syntax-error" is okay 176 | ApplyMap('$(parTable)~Exploder',Hash256([$(parFromField).Floor], [$(parToField).Floor]), Null()) 177 | ,Chr(9)) $(subFormatParam)) AS [$(subCutDateField)], 178 | * 179 | RESIDENT 180 | [$(parTable)~1]; 181 | 182 | DROP TABLE [$(parTable)~1]; 183 | DROP FIELDS [$(parFromField).Floor], [$(parToField).Floor] FROM [$(parTable)~2]; 184 | 185 | LET subRowsAfter = NoOfRows('$(parTable)~2'); 186 | TRACE Table "$(parTable)" had $(subRowsBefore) rows, has now $(subRowsAfter) rows; 187 | 188 | // Phase 6) Make a final check that the durations haven't changed 189 | TRACE SUB SplitTimeSpans - Phase 6; 190 | IF subCheckSums THEN 191 | TRACE Creating Checksum (2) ...; 192 | [~CheckSums2]: 193 | LOAD 194 | Sum([$(subDurationField)]) AS [~CheckSum2] 195 | RESIDENT [$(parTable)~2]; 196 | LET vCheckSum2 = Peek('~CheckSum2',0,'~CheckSums2'); 197 | LET subCheckSumDiff = Num(Fabs(vCheckSum1 - vCheckSum2),'0.0000000','.',' '); 198 | TRACE CheckSum1: $(vCheckSum1) | CheckSum2: $(vCheckSum2) | Diff: $(subCheckSumDiff); 199 | IF subCheckSubDiff > subCheckSubTolerance THEN 200 | [Error: The total duration has changed after the split-operation. See ~CheckSums table.]; 201 | ELSE 202 | DROP TABLE [~CheckSums2]; 203 | END IF 204 | END IF 205 | 206 | // Phase 7) Clean up 207 | RENAME TABLE [$(parTable)~2] TO [$(parTable)]; 208 | WHEN subCutDateField = '$(parTable)~CutDate' DROP FIELD [$(parTable)~CutDate] FROM [$(parTable)]; 209 | WHEN subDurationField = '$(parTable)~Duration' DROP FIELD [$(parTable)~Duration] FROM [$(parTable)]; 210 | DROP FIELD [$(parFromField)]; 211 | DROP FIELD [$(parToField)]; 212 | RENAME FIELD [$(parFromField).New] TO [$(parFromField)]; 213 | RENAME FIELD [$(parToField).New] TO [$(parToField)]; 214 | //RENAME FIELD [$(parFromField)] TO [$(parFromField).Original]; 215 | //RENAME FIELD [$(parToField)] TO [$(parToField).Original]; 216 | //RENAME FIELD [$(parFromField).New] TO [$(parFromField)]; 217 | //RENAME FIELD [$(parToField).New] TO [$(parToField)]; 218 | // remove temp variables (created inside this sub) 219 | LET subDurationField = Null(); 220 | LET subCutDateField = Null(); 221 | LET subCutFormat = Null(); 222 | LET subResolution = Null(); 223 | LET subRowsBefore = Null(); 224 | LET subCheckSums = Null(); 225 | LET subFormatFunct = Null(); 226 | LET subFormatParam = Null(); 227 | LET subCheckSumDiff = Null(); 228 | LET subCheckSumTolerance = Null(); 229 | LET vCalMinDate = Null(); 230 | LET vCalMaxDate = Null(); 231 | LET subV = Null(); 232 | LET subLoadBlock = Null(); 233 | END SUB 234 | -------------------------------------------------------------------------------- /store_all_qvd.txt: -------------------------------------------------------------------------------- 1 | // Script by Christof Schwarz (csw@qlik.com) 2 | // Version: 13-Jan-2015 3 | // 4 | // Description: 5 | // This include-script will store all tables of the data model 6 | // which exist at the point of execution into a given Lib-Folder- 7 | // connection. 8 | // It also stores a checksum into a separate text file, so that 9 | // the block of files can be loaded together using the other 10 | // include script. This is a subsititute to "BINARY" load. 11 | // 12 | // Parameters needed: 13 | // provide two variables with respective content: 14 | // "vTableQVD_LibConnect" and "vTableQVD_Prefix" 15 | // 16 | // Examples: 17 | /* 18 | LET vTableQVD_LibConnect = 'lib://QVDs (qtsel_csw)/BINARY'; 19 | LET vTableQVD_Prefix = DocumentTitle(); 20 | */ 21 | 22 | TRACE [*** SAVE ALL TABLES ***]; 23 | 24 | // Check if 2 parameters are provided correctly 25 | IF NOT '$(vTableQVD_LibConnect)' LIKE 'lib://*' THEN 26 | Error: Invalid variable vTableQVD_LibConnect content.; 27 | END IF 28 | IF Len('$(vTableQVD_Prefix)') < 2 THEN 29 | Error: Invalid variable vTableQVD_Prefix content.; 30 | END IF 31 | 32 | 33 | TRACE [*** PARAMETERS: ***]; 34 | TRACE [vTableQVD_LibConnect = $(vTableQVD_LibConnect)]; 35 | TRACE [vTableQVD_Prefix = $(vTableQVD_Prefix)]; 36 | 37 | IF NOT WildMatch(vTableQVD_LibConnect, '*/', '*\') THEN 38 | // Add missing trailling slash to the lib path 39 | LET vTableQVD_LibConnect = vTableQVD_LibConnect & '/'; 40 | END IF; 41 | 42 | LET vTableQVD_CheckSum = TimeStamp(Now(),'YYYYMMDDhhmmss'); 43 | 44 | // Create Checksum Table 45 | $QVDMETADATA: LOAD '$(vTableQVD_CheckSum)' AS $QVDCHECKSUM AUTOGENERATE(1); 46 | 47 | FOR vTableIndex = 1 TO NoOfTables(); 48 | LET vTableName = TableName(vTableIndex - 1); 49 | IF vTableName <> '$QVDMETADATA' THEN 50 | LET vTableQVD = vTableQVD_LibConnect & vTableQVD_Prefix & '^' & vTableName ; 51 | TRACE [$(vTableIndex): $(vTableName) >> $(vTableQVD)]; 52 | STORE [$(vTableName)] INTO [$(vTableQVD).qvd] (qvd); 53 | STORE $QVDMETADATA INTO [$(vTableQVD).meta.txt] (txt, delimiter is '\t'); 54 | END IF 55 | NEXT vTableIndex; 56 | TRACE [*** DONE WITH ALL TABLES ***]; 57 | DROP TABLE $QVDMETADATA; 58 | 59 | // delete temporary variables 60 | LET vTableIndex = Null(); 61 | LET vvTableName = Null(); 62 | LET vTableQVD = Null(); 63 | LET vTableQVD_CheckSum = Null(); 64 | -------------------------------------------------------------------------------- /syncSomeSelections.txt: -------------------------------------------------------------------------------- 1 | TRACE [Running syncSomeSelections snippet https://bit.ly/2unjnbX]; 2 | 3 | // This script allows to define a number of fields as "global" fields even when used within 4 | // an alternate state. With that, you've got "almost alternate states". Typically, that are 5 | // date fields (year, month, ...) which should act as global, whereas all other selections 6 | // should be private within an alternate state (which they are by default) 7 | 8 | // This script snippet will create three variables, two of which you can use within/or as a 9 | // set analysis. vSyncSelAll and vAddSyncSel 10 | // Usage examples: 11 | // Sum ( $(vSyncSelAll) Sales) 12 | // Sum ( {< field = {"value"} $(vAddSyncSel) >} Sales) 13 | 14 | // See https://www.youtube.com/watch?v=jQVzMaxABNA 15 | 16 | // To tell the script, which field(s) you want to sync into the alternate states, put 17 | // a pattern match list (in single quotes, comma-separated) into variable vSyncFields 18 | // before including the below lines of script, for example 19 | // SET vSyncFields = [ 'order_date*' , 'employee_id', 'shipping_date*' ]; 20 | 21 | WHEN Len(Trim('$(vSyncFields)'))=0 SET vSyncFields = ['*']; 22 | SET vFieldsWithSelections = [=Replace('ValueList('&CHR(39)&GetCurrentSelections('*/,'&CHR(39),CHR(39)&'/*','',1,'$')&'*/)','ValueList(''*/)','Null()')]; 23 | SET vOpenSet = ['{<'&]; 24 | SET vCloseSet = [&'>}']; 25 | SET vSetSep = [','&]; 26 | SET vMarker1 = ['/*`*/'&]; 27 | SET vMarker2 = [&'/*´*/']; 28 | SET vFixEmptySet = ['/*`*/{<>}/*´*/','']; 29 | SET vFixEmptySep = ['/*`*/,/*´*/','']; 30 | SET vSyncSel = Concat(If(WildMatch(§(vFieldsWithSelections), §(vSyncFields)) 31 | ,'[' & §(vFieldsWithSelections) & '] =P({$}[' & §(vFieldsWithSelections) & '])' ), ' , '); 32 | LET vSyncSel = Replace(vSyncSel, '§(', '$' & '('); 33 | //$Fix: MAPPING LOAD '§(', '$' & '(' AUTOGENERATE(1); 34 | //LET vSyncSel = MapSubString('$Fix', vSyncSel); 35 | LET vSyncSelAll = '=Replace(' & vMarker1 & vOpenSet & vSyncSel & vCloseSet & vMarker2 & ', $(vFixEmptySet) )'; 36 | LET vAddSyncSel = '=Replace(' & vMarker1 & vSetSep & vSyncSel & vMarker2 & ', $(vFixEmptySep) )'; 37 | // Delete all the temp variables 38 | LET vMarker1 = Null(); 39 | LET vMarker2 = Null(); 40 | LET vOpenSet = Null(); 41 | LET vCloseSet = Null(); 42 | LET vSetSep = Null(); 43 | LET vFixEmptySet = Null(); 44 | LET vFixEmptySep = Null(); 45 | -------------------------------------------------------------------------------- /who_reloads_me.qvs: -------------------------------------------------------------------------------- 1 | TRACE /\ including who_reloads_me; 2 | 3 | LET vScriptStart = TimeStamp(Now(),'YYYY-MM-DD hh:mm:ss.fff'); 4 | LET vDocId = DocumentName(); 5 | LET vRestConnection = 'HTTP-GET-REST (vm-i-qs-dev_christof.schwarz)'; 6 | LET vBaseAPIurl = 'https://qs-i-dev.databridge.ch/header/qrs'; 7 | SET vHttpHeader = "header" "christof.schwarz"; 8 | LET vReloadTaskStart = ''; 9 | LET vReloadTaskName = ''; 10 | LET vReloadTaskId = ''; 11 | LET vReloadTaskTag = ''; 12 | 13 | 14 | 15 | SUB ClearTempVariables 16 | LET vHttpHeader = Null(); 17 | LET vBaseAPIurl = Null(); 18 | LET vRestConnection = Null(); 19 | END SUB 20 | 21 | 22 | SUB GetExecuteSession(param_restConnection, param_baseAPIurl, param_httpHeader) 23 | 24 | TRACE [connection: $(param_restConnection)]; 25 | LIB CONNECT TO '$(param_restConnection)'; 26 | LET v_tmp_xrfkey = Right(PurgeChar(Repeat(Rand(),3),'.,'),6) & 'databridge'; 27 | LET v_tmp_docId = DocumentName(); 28 | 29 | LET vReloadTaskStart = ''; 30 | LET vReloadTaskName = ''; 31 | LET vReloadTaskId = ''; 32 | LET vReloadTaskTag = ''; 33 | 34 | // Calling QRS API executesession endpoint to figure out which task is reloading the script 35 | 36 | tmp_QRS_executionsession: 37 | SQL SELECT 38 | "id" AS "session.id", 39 | // "__KEY_root", 40 | (SELECT 41 | "id" AS "__execution.id", 42 | "executingNodeName" AS "__execution.executingNodeName", 43 | "status" AS "__execution.status", 44 | "startTime" AS "__execution.startTime" 45 | // "__KEY_executionResult", 46 | // "__FK_executionResult" 47 | FROM "executionResult" PK "__KEY_executionResult" FK "__FK_executionResult"), 48 | (SELECT 49 | "id" AS "__task.id", 50 | "name" AS "__task.name" 51 | // "__KEY_reloadTask", 52 | // "__FK_reloadTask" 53 | FROM "reloadTask" PK "__KEY_reloadTask" FK "__FK_reloadTask") 54 | FROM JSON (wrap on) "root" PK "__KEY_root" 55 | WITH CONNECTION ( 56 | URL "$(param_baseAPIurl)/executionsession", 57 | QUERY "xrfkey" "$(v_tmp_xrfkey)", 58 | QUERY "filter" "app.id eq $(v_tmp_docId)", 59 | QUERY "orderBy" "createdDate desc", 60 | HTTPHEADER "X-Qlik-Xrfkey" "$(v_tmp_xrfkey)", 61 | HTTPHEADER $(param_httpHeader) 62 | ); 63 | 64 | IF NoOfRows('tmp_QRS_executionsession') THEN 65 | 66 | LET vReloadTaskStart = TimeStamp(TimeStamp#(PurgeChar(FieldValue('__execution.startTime', 1),'TZ'), 'YYYY-MM-DDhh:mm:ss.fff'), 'YYYY-MM-DD hh:mm:ss.fff'); 67 | LET vReloadTaskName = FieldValue('__task.name', 1); 68 | LET vReloadTaskId = FieldValue('__task.id', 1); 69 | 70 | tmp_QRS_task_tags: 71 | LOAD 72 | Concat([__tag.name],';') AS [__tag.name]; 73 | SQL SELECT 74 | // "__KEY_root", 75 | (SELECT 76 | "id" AS "__tag.id", 77 | "name" AS "__tag.name" 78 | // "__FK_tags" 79 | FROM "tags" FK "__FK_tags") 80 | FROM JSON (wrap on) "root" PK "__KEY_root" 81 | WITH CONNECTION ( 82 | URL "$(param_baseAPIurl)/reloadtask/full", 83 | QUERY "xrfkey" "$(v_tmp_xrfkey)", 84 | QUERY "filter" "id eq $(vReloadTaskId)", 85 | HTTPHEADER "X-Qlik-Xrfkey" "$(v_tmp_xrfkey)", 86 | HTTPHEADER $(param_httpHeader) 87 | ); 88 | 89 | IF NoOfRows('tmp_QRS_task_tags') THEN 90 | LET vReloadTaskTag = FieldValue('__tag.name', 1); 91 | END IF 92 | DROP TABLE tmp_QRS_task_tags; 93 | 94 | END IF 95 | 96 | DROP TABLE tmp_QRS_executionsession; 97 | 98 | DISCONNECT; 99 | LET v_tmp_xrfkey = Null(); 100 | LET v_tmp_docId = Null(); 101 | END SUB 102 | --------------------------------------------------------------------------------