├── LICENSE ├── README.md ├── SAS Foundation ├── Aster │ └── bulkload.sas ├── CONNOPT.md ├── DB2 │ └── bulkload.sas ├── Greenplum │ └── bulkload.sas ├── HAWQ │ └── bulkload.sas ├── Hadoop │ ├── createTableAsSelect.sas │ ├── orcFileFormat.sas │ └── partitionedData.sas ├── Impala │ ├── bulkload.sas │ └── procAppend.sas ├── MySQL │ └── bulkload.sas ├── Netezza │ └── bulkload.sas ├── ODBC │ └── bulkload.sas ├── OLE DB │ └── bulkload.sas ├── Oracle │ └── bulkload.sas ├── PI & AF │ ├── AFSystemSamples.sas │ └── PISystemSamples.sas ├── Postgres │ └── bulkload.sas ├── README.md ├── Redshift │ └── bulkload.sas ├── SQLPassthrough.sas ├── Salesforce │ ├── authentication.sas │ └── qualifiers.sas ├── Snowflake │ ├── bulkload.sas │ └── bulkloadS3.sas ├── Teradata │ └── TPT.sas ├── coreSamples.sas ├── createLibname.sas ├── createSampleData.sas ├── createTableAsSelect.sas └── procAppend.sas └── SAS Viya ├── JDBC ├── AmazonAthena │ └── README.md ├── DataVirtuality │ └── README.md ├── Denodo │ └── README.md ├── DuckDB │ └── README.md ├── README.md ├── SQLite │ └── README.md ├── SQream │ └── README.md ├── Spanner │ ├── README.md │ └── images │ │ └── franir_2024-06-19-13-58-48.png ├── Trino │ └── README.md └── jdbcSmokeTesting_template.sasnb ├── README.md ├── sdcBulkload.sas ├── sdcCasutilSample.sas ├── sdcColumnInfoSample.sas ├── sdcFileInfoSample.sas ├── sdcLoadSample.sas └── sdcSaveSample.sas /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SAS/ACCESS Sample Repository (sas-access-samples) 2 | 3 | ## Overview 4 | 5 | This repository contains sample SAS programs that use SAS/ACCESS engines to connect to your data source. The samples are divided into two main directories: SAS Foundation and SAS Viya. 6 | 7 | * Programs at the top-level platform directory are generic and can be used for any of their corresponding SAS/ACCESS engines. 8 | * Programs that run for a specific SAS/ACCESS interface are found in the folder for that interface. 9 | * Some engines have their own version of the generic programs, and those should be run instead. 10 | 11 | ## Prerequisites 12 | 13 | Before you can connect to your data source using these programs, you must know the following items: 14 | 15 | * User ID and password or authentication domain 16 | * Server host name and port number, if the port is different than the default value. 17 | 18 | ## Installation 19 | 20 | To load the programs into the editor in SAS Studio: 21 | 22 | 1. Click the download button on the right-hand side of the samples repository page on GitHub. 23 | 2. Save the project on your computer. 24 | 3. In the Explorer tab of the left-hand navigation pane, navigate to where you saved the sample files and open them. 25 | 26 | To load the programs into the program editor in SAS windowing environment: 27 | 28 | 1. Click the download button on the right-hand side of the samples repository page on GitHub. 29 | 2. Save the project on your computer 30 | 3. Click File > Open Program 31 | 4. Navigate to where you saved the sample files and open them. 32 | 33 | To load the sample files using the Git Integration feature in SAS Studio, refer [here]([https://go.documentation.sas.com/?docsetId=webeditorug&docsetTarget=n1db24v7wrl64gn19kjothg6mxpj.htm&docsetVersion=3.8&locale=en](https://go.documentation.sas.com/doc/en/webeditorcdc/default/webeditorug/p0puc7muifjjycn1uemlm9lj1jkt.htm)) 34 | 35 | ## Getting Started 36 | 37 | To use the sample programs on your machine: 38 | 39 | 1. The sample files should be executed in the following order: 40 | * createLibname 41 | * createSampleData 42 | * All sample files in your data-base specific directory 43 | * The remaining sample files in the top-level platform directory 44 | 45 | 2. Replace the comments in the createLibname sample file with the appropriate values for your system. 46 | * For the dbms macro, enter your engine name. For example, if your engine is postgres, type Postgres 47 | * For the CONNOPT macro, enter your database connection information. See CONNOPT.md for engine-specific parameters 48 | 49 | ## License 50 | 51 | This project is licensed under the [Apache 2.0 License](https://gitlab.sas.com/techoffice/new-project/blob/master/LICENSE). 52 | 53 | ## Additional Resources 54 | 55 | For more information about usage and options that are available for your interface, see this information: 56 | 57 | * [SAS/ACCESS for Relational Databases](https://go.documentation.sas.com/doc/en/pgmsascdc/default/acreldb/titlepage.htm) 58 | * Blog posts 59 | * SAS Communities 60 | -------------------------------------------------------------------------------- /SAS Foundation/Aster/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkload.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Aster */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, */ 10 | /* createLibname.sas, then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. Your may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /* CREATE GLOBAL MACROS FOR BULKLOAD */ 21 | 22 | %GLOBAL bl_path; /* Path to Aster bulk loader */ 23 | %GLOBAL bl_dbname; /* Database name for bulk load */ 24 | %GLOBAL bl_host; /* Hostname for bulk load */ 25 | 26 | /* ASSIGN GLOBAL MACRO VALUES FOR BULKLOAD */ 27 | 28 | %let bl_path =; 29 | %let bl_dbname =; 30 | %let bl_host =; 31 | 32 | /*==========================*/ 33 | /* BULKLOAD Sample 1 */ 34 | /*==========================*/ 35 | 36 | 37 | /***********************/ 38 | /* CREATE DATASET */ 39 | /***********************/ 40 | 41 | data work.DUBLKDAT; 42 | input name $ age sex $ bdate mmddyy.; 43 | cards; 44 | amy 3 f 030185 45 | bill 12 m 121277 46 | charlie 35 m 010253 47 | david 19 m 101469 48 | elinor 42 f 080845 49 | pearl 78 f 051222 50 | vera 96 f 101200 51 | frank 24 m 092663 52 | georgia 1 f 040687 53 | henry 46 m 053042 54 | joann 27 f 020461 55 | buddy 66 m 101432 56 | ; 57 | run; 58 | 59 | 60 | /************************************/ 61 | /* CREATE DBMS TABLE WITH OPTIONS */ 62 | /************************************/ 63 | 64 | /* BULKLOAD VALUES ASSIGNED IN ATSMAUTO */ 65 | 66 | 67 | proc sql; 68 | create table mydblib.DUBLKTAB ( 69 | BULKLOAD=YES 70 | bl_path=&bl_path 71 | bl_dbname=&bl_dbname 72 | bl_host=&bl_host 73 | ) as select * from work.DUBLKDAT; 74 | quit; 75 | 76 | 77 | /****************************/ 78 | /* Read Table */ 79 | /****************************/ 80 | 81 | proc print data=mydblib.DUBLKTAB; 82 | format bdate date7.; 83 | title 'proc print of table'; 84 | run; 85 | 86 | 87 | /****************************/ 88 | /* CLEANUP */ 89 | /****************************/ 90 | 91 | 92 | 93 | proc delete data=mydblib.DUBLKTAB; 94 | run; 95 | -------------------------------------------------------------------------------- /SAS Foundation/CONNOPT.md: -------------------------------------------------------------------------------- 1 | # Sample CONNOPT strings for createLibname.sas 2 | 3 | The following samples are suggested parameters for the CONNOPT connection string for each SAS Foundation SAS/ACCESS Engine. A full CONNOPT connection string for SAS/ACCESS to Postgres may look like the following: 4 | 5 | ``` 6 | %let CONNOPT=%str(user[username] 7 | password=[password] 8 | server=[server] 9 | port=0000 10 | database=[database]); 11 | ``` 12 | 13 | ## Aster: 14 | 15 | ``` 16 | user=???????? 17 | password=???????? 18 | database= 19 | server=???????? 20 | dimension=yes 21 | ``` 22 | 23 | ## DB2: 24 | ``` 25 | user=???????? 26 | password=???????? 27 | database=???????? 28 | ``` 29 | ## Greenplum: 30 | ``` 31 | user=???????? 32 | password=???????? 33 | path/database=???????? 34 | server=?????? 35 | port=?????? 36 | delete_mult_rows=yes 37 | ``` 38 | 39 | ## Hadoop: 40 | ``` 41 | user=???????? 42 | password=???????? 43 | database=???????? 44 | server=?????????? 45 | ``` 46 | 47 | ## Hawq: 48 | ``` 49 | user=???????? 50 | password=???????? 51 | db=??????? 52 | server=???????? 53 | ``` 54 | 55 | ## Impala: 56 | ``` 57 | user=???????? 58 | password=???????? 59 | database=???????? 60 | dsn=?????? 61 | ``` 62 | 63 | ## MySQL: 64 | ``` 65 | user=???????? 66 | password=???????? 67 | server=???????? 68 | database=????? 69 | port=?????? 70 | ``` 71 | 72 | ## Netezza: 73 | ``` 74 | user=???????? 75 | password=???????? 76 | database=???????? 77 | server=??????? 78 | ``` 79 | 80 | ## ODBC: 81 | ``` 82 | user=???????? 83 | password=???????? 84 | dsn/path=???????? 85 | ``` 86 | 87 | ## OLE DB: 88 | ``` 89 | user=???????? 90 | provider=???????? 91 | dsn/path=???????? 92 | ``` 93 | 94 | ## Oracle: 95 | ``` 96 | user=??? 97 | password=??? 98 | path=??? 99 | ``` 100 | 101 | ## Postgres: 102 | ``` 103 | user=???????? 104 | password=???????? 105 | server=???????? 106 | port=???????? 107 | database=????? 108 | ``` 109 | 110 | ## Redshift: 111 | ``` 112 | user=???????? 113 | password=???????? 114 | server=???????? 115 | port=???????? 116 | database=???????? 117 | ``` 118 | 119 | ## SQL Server: 120 | ``` 121 | user=???????? 122 | password=???????? 123 | dsn/path=???????? 124 | ``` 125 | 126 | ## Teradata: 127 | ``` 128 | user=???????? 129 | password=???????? 130 | server=???????? 131 | ``` 132 | 133 | ## Vertica: 134 | ``` 135 | user=???????? 136 | password=???????? 137 | database=???????? 138 | server=??????? 139 | ``` 140 | -------------------------------------------------------------------------------- /SAS Foundation/DB2/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkload.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to DB2 */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, */ 10 | /* createLibname, then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. Your may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | 21 | /*==========================*/ 22 | /* BULKLOAD Sample 1 */ 23 | /*==========================*/ 24 | 25 | /****************************/ 26 | /* CHECK DBMS TABLE DROPPED */ 27 | /****************************/ 28 | 29 | proc delete data=mydblib.DUBLKTAB; 30 | run; 31 | 32 | 33 | 34 | /***********************/ 35 | /* CREATE DATASET */ 36 | /***********************/ 37 | 38 | data work.DUBLKDAT; 39 | input name $ age sex $ bdate mmddyy.; 40 | cards; 41 | amy 3 f 030185 42 | bill 12 m 121277 43 | charlie 35 m 010253 44 | david 19 m 101469 45 | elinor 42 f 080845 46 | pearl 78 f 051222 47 | vera 96 f 101200 48 | frank 24 m 092663 49 | georgia 1 f 040687 50 | henry 46 m 053042 51 | joann 27 f 020461 52 | buddy 66 m 101432 53 | ; 54 | run; 55 | 56 | 57 | /************************************/ 58 | /* CREATE DBMS TABLE WITH OPTIONS */ 59 | /************************************/ 60 | 61 | proc sql; 62 | create table mydblib.DUBLKTAB ( 63 | BULKLOAD=YES 64 | ) as select * from work.DUBLKDAT; 65 | quit; 66 | 67 | 68 | /****************************/ 69 | /* Read Table */ 70 | /****************************/ 71 | 72 | 73 | 74 | proc print data=mydblib.DUBLKTAB; 75 | format bdate date7.; 76 | title 'proc print of table'; 77 | run; 78 | -------------------------------------------------------------------------------- /SAS Foundation/Greenplum/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkload.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Greenplum */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, */ 10 | /* createLibname, then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. You may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | 21 | /*=========================*/ 22 | /* BULKLOAD Sample 1 */ 23 | /*=========================*/ 24 | 25 | /* CLEAN UP */ 26 | 27 | proc delete data=mydblib.testblkld1; run; 28 | proc delete data=mydblib.testblkld2; run; 29 | 30 | /* CREATE WORK DATA SET */ 31 | data work.testblkld; 32 | input name $ age sex $ bdate mmddyy.; 33 | cards; 34 | amy 3 f 030185 35 | bill 12 m 121277 36 | charlie 35 m 010253 37 | david 19 m 101469 38 | elinor 42 f 080845 39 | pearl 78 f 051222 40 | vera 96 f 101200 41 | frank 24 m 092663 42 | georgia 1 f 040687 43 | henry 46 m 053042 44 | joann 27 f 020461 45 | buddy 66 m 101432 46 | ; 47 | run; 48 | 49 | 50 | 51 | /*===================================*/ 52 | /* BULKLOAD THE DATA. YOU WILL NEED */ 53 | /* TO SET HOST AND PORT VALUES */ 54 | /* SET PROTOCOL TO GPFDIST */ 55 | /*===================================*/ 56 | 57 | 58 | proc sql; 59 | create table mydblib.testblkld1 60 | (BULKLOAD=YES 61 | BL_PORT=&port 62 | BL_HOST=&host 63 | BL_PROTOCOL="gpfdist" 64 | bl_format='CSV') 65 | as select * from work.testblkld; 66 | quit; 67 | 68 | data mydblib.testblkld2 ( 69 | BULKLOAD=YES 70 | BL_PORT=&port 71 | BL_HOST=&host 72 | BL_PROTOCOL="gpfdist" 73 | ); 74 | set work.testblkld; 75 | run; 76 | -------------------------------------------------------------------------------- /SAS Foundation/HAWQ/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkload.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to HAWQ */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, */ 10 | /* createLibname, then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. You may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /* CREATE GLOBAL MACROS FOR BULKLOAD */ 21 | 22 | %GLOBAL PORT; /* Port for Hawq bulk loader */ 23 | %GLOBAL HOST; /* Client box for Hawq bulk load */ 24 | 25 | /* ASSIGN GLOBAL MACRO VALUES FOR BULKLOAD */ 26 | 27 | %let PORT =; 28 | %let HOST =; 29 | 30 | /*=========================*/ 31 | /* BULKLOAD Sample 1 */ 32 | /*=========================*/ 33 | 34 | /* CLEAN UP */ 35 | 36 | proc delete data=mydblib.testblkld1; run; 37 | proc delete data=mydblib.testblkld2; run; 38 | 39 | /* CREATE WORK DATA SET */ 40 | data work.testblkld; 41 | input name $ age sex $ bdate mmddyy.; 42 | cards; 43 | amy 3 f 030185 44 | bill 12 m 121277 45 | charlie 35 m 010253 46 | david 19 m 101469 47 | elinor 42 f 080845 48 | pearl 78 f 051222 49 | vera 96 f 101200 50 | frank 24 m 092663 51 | georgia 1 f 040687 52 | henry 46 m 053042 53 | joann 27 f 020461 54 | buddy 66 m 101432 55 | ; 56 | run; 57 | 58 | 59 | 60 | /*===================================*/ 61 | /* BULKLOAD THE DATA. YOU WILL NEED */ 62 | /* TO SET HOST AND PORT VALUES */ 63 | /* SET PROTOCOL TO GPFDIST */ 64 | /*===================================*/ 65 | 66 | 67 | proc sql; 68 | create table mydblib.testblkld1 69 | (BULKLOAD=YES 70 | BL_PORT=&port 71 | BL_HOST=&host 72 | BL_PROTOCOL="gpfdist" 73 | bl_format='CSV') 74 | as select * from work.testblkld; 75 | quit; 76 | 77 | data mydblib.testblkld2 ( 78 | BULKLOAD=YES 79 | BL_PORT=&port 80 | BL_HOST=&host 81 | BL_PROTOCOL="gpfdist" 82 | ); 83 | set work.testblkld; 84 | run; 85 | -------------------------------------------------------------------------------- /SAS Foundation/Hadoop/createTableAsSelect.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: createTableAsSelect.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Hadoop */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, */ 10 | /* createLibname, then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. You may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /*==========================*/ 21 | /* CTAS Sample #1 */ 22 | /*==========================*/ 23 | 24 | /* CLEAN UP */ 25 | proc delete data=mydblib.ts_STRING; run; 26 | proc delete data=mydblib.mydblibsas; run; 27 | proc delete data=mydblib.ysas; run; 28 | 29 | /* CREATE SAMPLE DATA */ 30 | 31 | data mydblib.ts_STRING; 32 | format ts datetime25.6; 33 | ts=datetime(); 34 | run; 35 | 36 | /*==============================*/ 37 | /* STANDARD READ OF SAMPLE DATA */ 38 | /*==============================*/ 39 | 40 | option sastrace=',,,d' sastraceloc=saslog nostsuffix; 41 | proc sql; 42 | connect to &dbms(&CONNOPT); 43 | create table mydblibsas as select * from connection to &dbms( 44 | SELECT ts FROM ts_STRING 45 | ); 46 | quit; 47 | 48 | option sastrace=',,,' sastraceloc=saslog nostsuffix; 49 | data _null_; set mydblibsas; put _all_; run; 50 | 51 | 52 | /*========================================*/ 53 | /* EXPLICIT SQL CTAS READ OF SAMPLE DATA */ 54 | /* USE READ_METHOD=JDBC WHEN HDFS NOT */ 55 | /* AVAILABLE */ 56 | /*========================================*/ 57 | 58 | 59 | option sastrace=',,,d' sastraceloc=saslog nostsuffix; 60 | proc sql; 61 | connect to &dbms(&CONNOPT READ_METHOD=JDBC); 62 | create table ysas as select * from connection to &dbms( 63 | SELECT ts FROM ts_STRING 64 | ); 65 | quit; 66 | 67 | option sastrace=',,,' sastraceloc=saslog nostsuffix; 68 | data _null_; set ysas; put _all_; run; 69 | -------------------------------------------------------------------------------- /SAS Foundation/Hadoop/orcFileFormat.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: orcFileFormat.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Hadoop */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, */ 10 | /* createLibname, then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. You may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | 21 | /*=====================*/ 22 | /* ORC SAMPLE 1 */ 23 | /*=====================*/ 24 | 25 | 26 | /* CLEAN UP */ 27 | 28 | proc delete data=mydblib.seqtab1; run; 29 | proc delete data=mydblib.compare; run; 30 | 31 | 32 | /*===================================*/ 33 | /* CREATE ORC DATA WITH PARTIONED BY */ 34 | /*===================================*/ 35 | 36 | data mydblib.compare 37 | work.apd 38 | mydblib.seqtab1 (DBCREATE_TABLE_OPTS="PARTITIONED BY (dealer string) 39 | STORED AS ORC"); 40 | 41 | 42 | input car $ color $ dealer $ sales returns dest $; 43 | cards; 44 | Toyota Red Smith 10000 100 NC 45 | Toyota Red Smith 15000 110 CT 46 | Chevy Green Smith 17000 45 NJ 47 | Ford Blue Smith 12000 0 CA 48 | Toyota Red Jones 4000 20 NC 49 | Ford Blue Finch 10000 400 NC 50 | Toyota Red Smith 10000 100 NJ 51 | Ford White Smith 15000 110 NC 52 | Ford Blue Smith 12000 0 NJ 53 | Toyota Green Jones 4000 20 NC 54 | Toyota Black Jones 4000 20 FL 55 | Toyota Red Jones 4000 20 NC 56 | Ford Blue Finch 10000 400 AL 57 | Chevy White Finch 10000 400 SC 58 | Ford Black Finch 10000 400 SC 59 | Chrysler Blue Finch 20000 400 NC 60 | Chrysler Green Finch 20000 400 CT 61 | Toyota Green Jones 10000 100 NC 62 | Toyota Red Finch 15000 110 CT 63 | Ford Green Smith 17000 45 NJ 64 | ; 65 | run; 66 | 67 | 68 | 69 | title1 'PROC PRINT OF DBLIB.SEQTAB1'; 70 | proc print data=mydblib.seqtab1; run; 71 | 72 | title1 'PROC PRINT OF DBLIB.COMPARE'; 73 | proc print data=mydblib.compare; run; 74 | -------------------------------------------------------------------------------- /SAS Foundation/Hadoop/partitionedData.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: partitionedData.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Hadoop */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. You may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | 21 | /*=============================*/ 22 | /* PARTITIONED DATA Sample 1 */ 23 | /*=============================*/ 24 | 25 | 26 | /* CLEAN UP */ 27 | 28 | proc delete data=mydblib.dpart; run; 29 | 30 | 31 | /*=============================*/ 32 | /* CREATE PARTITIONED DATA */ 33 | /*=============================*/ 34 | 35 | data mydblib.dpart (post_table_opts ="partitioned by (dates date) 36 | stored as sequencefile") ; 37 | input FLIGHT $3. +5 DATES date7. +2 ORIG $3. +3 DEST $3. +3 38 | DELAYCAT $15. +2 DESTYPE $15. +8 DELAY; 39 | informat DATES date7.; 40 | format DATES date9.; 41 | datalines; 42 | 114 01MAR98 LGA LAX 1-10 Minutes Domestic 8 43 | 202 01MAR98 LGA ORD No Delay Domestic -5 44 | 219 01MAR98 LGA LON 11+ Minutes International 18 45 | 114 02MAR98 LGA LAX No Delay Domestic 0 46 | 202 02MAR98 LGA ORD 1-10 Minutes Domestic 5 47 | 219 02MAR98 LGA LON 11+ Minutes International 18 48 | 622 02MAR98 LGA FRA No Delay International 0 49 | 114 03MAR98 LGA LAX No Delay Domestic -1 50 | 202 03MAR98 LGA ORD No Delay Domestic -1 51 | 219 03MAR98 LGA LON 1-10 Minutes International 4 52 | 302 03MAR98 LGA WAS 1-10 Minutes Domestic 5 53 | ; 54 | 55 | /*=============================*/ 56 | /* DATA PARTITIONED BY DATE */ 57 | /*=============================*/ 58 | 59 | title 'SHOULD SEE ONLY 3 ROWS'; 60 | proc sql; 61 | select flight from mydblib.dpart where dates = '01mar98'd; 62 | quit; 63 | -------------------------------------------------------------------------------- /SAS Foundation/Impala/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkload.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Impala */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. Your may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /* CREATE GLOBAL MACROS FOR BULKLOAD */ 21 | 22 | %GLOBAL BLDATF; 23 | %GLOBAL BLHOST; 24 | %GLOBAL BLPORT; 25 | 26 | /* ASSIGN GLOBAL MACRO VALUES FOR BULKLOAD */ 27 | 28 | %let BLDATF =; 29 | %let BLHOST =; 30 | %let BLPORT =; 31 | 32 | /*=======================================*/ 33 | /* BULKLOAD Sample 1 */ 34 | /*=======================================*/ 35 | 36 | options set=SAS_HADOOP_RESTFUL=1; 37 | 38 | /* CLEAN UP */ 39 | 40 | proc delete data=mydblib.testblkld1; run; 41 | proc delete data=mydblib.testblkld2; run; 42 | 43 | /* CREATE WORK DATA SET */ 44 | data work.testblkld; 45 | input name $ age sex $ bdate mmddyy.; 46 | cards; 47 | amy 3 f 030185 48 | bill 12 m 121277 49 | charlie 35 m 010253 50 | david 19 m 101469 51 | elinor 42 f 080845 52 | pearl 78 f 051222 53 | vera 96 f 101200 54 | frank 24 m 092663 55 | georgia 1 f 040687 56 | henry 46 m 053042 57 | joann 27 f 020461 58 | buddy 66 m 101432 59 | ; 60 | run; 61 | 62 | 63 | 64 | /*===================================*/ 65 | /* BULKLOAD THE DATA. YOU WILL NEED */ 66 | /* TO SET MACRO VALUES IN IMSMAUTO */ 67 | /*===================================*/ 68 | 69 | 70 | proc sql; 71 | create table mydblib.testblkld1 72 | (BULKLOAD=YES 73 | BL_DATAFILE=&bldatf 74 | BL_HOST=&blhost 75 | BL_PORT=&blport ) 76 | as select * from work.testblkld; 77 | quit; 78 | 79 | data mydblib.testblkld2 ( 80 | BULKLOAD=YES 81 | BL_DATAFILE=&bldatf 82 | BL_HOST=&blhost 83 | BL_PORT=&blport ); 84 | 85 | 86 | set work.testblkld; 87 | run; 88 | -------------------------------------------------------------------------------- /SAS Foundation/Impala/procAppend.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: procAppend.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Impala */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. Your may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /*================================*/ 21 | /* Test Errors Sample 1 */ 22 | /*================================*/ 23 | 24 | /* CLEAN UP */ 25 | 26 | proc delete data=mydblib.new1; 27 | run; 28 | 29 | /******************************************/ 30 | /* FAIL: TRY TO CREATE TABLE FROM DATASET */ 31 | /* THAT DOESN'T EXIST */ 32 | /******************************************/ 33 | 34 | data mydblib.new1; 35 | set work.new; 36 | run; 37 | 38 | /******************************************/ 39 | /* CREATE DATA SET WORK.NEW */ 40 | /* THAT DOESN'T EXIST NEW */ 41 | /*****************************************/ 42 | 43 | data work.new; 44 | x=1; 45 | run; 46 | 47 | /******************************************/ 48 | /* PASS: CREATE TABLE FROM DATASET */ 49 | /* */ 50 | /*****************************************/ 51 | 52 | 53 | data mydblib.new1; 54 | set work.new; 55 | run; 56 | 57 | proc delete data=mydblib.new1; 58 | run; 59 | -------------------------------------------------------------------------------- /SAS Foundation/MySQL/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkload.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Mysql */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. You may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /*=========================*/ 21 | /* BULKLOAD Sample 1 */ 22 | /*=========================*/ 23 | 24 | /* CLEAN UP */ 25 | 26 | proc delete data=mydblib.testblkld1; run; 27 | proc delete data=mydblib.testblkld2; run; 28 | 29 | /* CREATE WORK DATA SET */ 30 | 31 | data work.testblkld; 32 | input name $ age sex $ bdate mmddyy.; 33 | cards; 34 | amy 3 f 030185 35 | bill 12 m 121277 36 | charlie 35 m 010253 37 | david 19 m 101469 38 | elinor 42 f 080845 39 | pearl 78 f 051222 40 | vera 96 f 101200 41 | frank 24 m 092663 42 | georgia 1 f 040687 43 | henry 46 m 053042 44 | joann 27 f 020461 45 | buddy 66 m 101432 46 | ; 47 | run; 48 | 49 | /*=========================*/ 50 | /* BULKLOAD DATA */ 51 | /*=========================*/ 52 | 53 | data mydblib.testblkld1(bulkload=yes); 54 | set work.testblkld; 55 | run; 56 | -------------------------------------------------------------------------------- /SAS Foundation/Netezza/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkload.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Netezza */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. You may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /*=========================*/ 21 | /* BULKLOAD Sample 1 */ 22 | /*=========================*/ 23 | 24 | proc delete data=mydblib.NEBLKTAB; run; 25 | 26 | /***********************/ 27 | /* CREATE DATASET */ 28 | /***********************/ 29 | 30 | data work.NEBLKDAT; 31 | input name $ age sex $ bdate mmddyy.; 32 | cards; 33 | amy 3 f 030185 34 | bill 12 m 121277 35 | charlie 35 m 010253 36 | david 19 m 101469 37 | elinor 42 f 080845 38 | pearl 78 f 051222 39 | vera 96 f 101200 40 | frank 24 m 092663 41 | georgia 1 f 040687 42 | henry 46 m 053042 43 | joann 27 f 020461 44 | buddy 66 m 101432 45 | ; 46 | run; 47 | 48 | /************************************/ 49 | /* CREATE DBMS TABLE WITH OPTIONS */ 50 | /************************************/ 51 | 52 | proc sql; 53 | create table mydblib.NEBLKTAB (BULKLOAD=YES) as 54 | select * from work.NEBLKDAT; 55 | quit; 56 | 57 | /****************************/ 58 | /* Read Table */ 59 | /****************************/ 60 | 61 | proc print data=mydblib.NEBLKTAB(BULKUNLOAD=YES); 62 | format bdate date7.; 63 | title 'proc print of table'; 64 | run; 65 | -------------------------------------------------------------------------------- /SAS Foundation/ODBC/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkload.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to ODBC */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. You may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /* ASSIGN ODBC TO SQL SERVER BULKLOAD LIBNAME */ 21 | 22 | libname mybulk &dbms &connopt bcp=yes; 23 | 24 | 25 | /*==========================*/ 26 | /* BULKLOAD Sample 1 */ 27 | /*==========================*/ 28 | 29 | /****************************/ 30 | /* CHECK DBMS TABLE DROPPED */ 31 | /****************************/ 32 | 33 | proc delete data=mybulk.DUBLKTAB; 34 | run; 35 | 36 | 37 | 38 | /***********************/ 39 | /* CREATE DATASET */ 40 | /***********************/ 41 | 42 | data work.DUBLKDAT; 43 | input name $ age sex $ bdate mmddyy.; 44 | cards; 45 | amy 3 f 030185 46 | bill 12 m 121277 47 | charlie 35 m 010253 48 | david 19 m 101469 49 | elinor 42 f 080845 50 | pearl 78 f 051222 51 | vera 96 f 101200 52 | frank 24 m 092663 53 | georgia 1 f 040687 54 | henry 46 m 053042 55 | joann 27 f 020461 56 | buddy 66 m 101432 57 | ; 58 | run; 59 | 60 | 61 | /************************************/ 62 | /* CREATE DBMS TABLE WITH OPTIONS */ 63 | /************************************/ 64 | 65 | 66 | data mybulk.DUBLKTAB; 67 | set work.DUBLKDAT; 68 | run; 69 | 70 | /****************************/ 71 | /* Read Table */ 72 | /****************************/ 73 | 74 | 75 | 76 | proc print data=mybulk.DUBLKTAB; 77 | format bdate date7.; 78 | title 'proc print of table'; 79 | run; 80 | -------------------------------------------------------------------------------- /SAS Foundation/OLE DB/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: OLSAM08.SAS */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to OLE DB */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname, */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. You may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /* ASSIGN ODBC TO SQL SERVER BULKLOAD LIBNAME */ 21 | 22 | libname mybulk &dbms &connopt bcp=yes; 23 | 24 | 25 | /*==========================*/ 26 | /* BULKLOAD Sample 1 */ 27 | /*==========================*/ 28 | 29 | /****************************/ 30 | /* CHECK DBMS TABLE DROPPED */ 31 | /****************************/ 32 | 33 | proc delete data=mybulk.DUBLKTAB; 34 | run; 35 | 36 | 37 | 38 | /***********************/ 39 | /* CREATE DATASET */ 40 | /***********************/ 41 | 42 | data work.DUBLKDAT; 43 | input name $ age sex $ bdate mmddyy.; 44 | cards; 45 | amy 3 f 030185 46 | bill 12 m 121277 47 | charlie 35 m 010253 48 | david 19 m 101469 49 | elinor 42 f 080845 50 | pearl 78 f 051222 51 | vera 96 f 101200 52 | frank 24 m 092663 53 | georgia 1 f 040687 54 | henry 46 m 053042 55 | joann 27 f 020461 56 | buddy 66 m 101432 57 | ; 58 | run; 59 | 60 | 61 | /************************************/ 62 | /* CREATE DBMS TABLE WITH OPTIONS */ 63 | /************************************/ 64 | 65 | 66 | data mybulk.DUBLKTAB; 67 | set work.DUBLKDAT; 68 | run; 69 | 70 | /****************************/ 71 | /* Read Table */ 72 | /****************************/ 73 | 74 | 75 | 76 | proc print data=mybulk.DUBLKTAB; 77 | format bdate date7.; 78 | title 'proc print of table'; 79 | run; 80 | -------------------------------------------------------------------------------- /SAS Foundation/Oracle/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkload.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Oracle */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname, */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. You may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /*==========================*/ 21 | /* BULKLOAD Sample 1 */ 22 | /*==========================*/ 23 | 24 | 25 | proc delete data=mydblib.ORBULK13; run; 26 | 27 | /* BULKLOAD ON. LOAD ONLY 2000 ROWS */ 28 | data mydblib.ORBULK13 ( bulkload=yes 29 | BL_OPTIONS=' ERRORS=999, LOAD=2000' ); 30 | do i=1 to 10000 ; 31 | c1=1; output; 32 | end; 33 | run; 34 | 35 | /* DROP BULK TABLE */ 36 | proc delete data=mydblib.ORBULK13; run; 37 | -------------------------------------------------------------------------------- /SAS Foundation/PI & AF/AFSystemSamples.sas: -------------------------------------------------------------------------------- 1 | /********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: AFSystemSamples.sas */ 5 | /* TITLE: Source for Examples in Documentation */ 6 | /* PRODUCT: SAS/ACCESS Interface to the PI System Asset Framework */ 7 | /* SYSTEM: Windows ONLY */ 8 | /* MISC: To run these examples, you must install: */ 9 | /* - SAS/ACCESS Interface to the PI System Asset Framework */ 10 | /* - PI System client software library */ 11 | /* - Ensure you have the OSIsoft samples installed */ 12 | /* */ 13 | /********************************************************************/ 14 | 15 | 16 | TITLE1 '*** SAS/ACCESS Interface to the PI Asset Framework System Samples ***'; 17 | 18 | options linesize=150; 19 | 20 | /*******************************************/ 21 | TITLE1 'LIBNAME assign using default server'; 22 | libname db pisysaf; 23 | 24 | 25 | /*******************************************/ 26 | TITLE1 'List ELEMENTS & ATTRIBUTES'; 27 | 28 | TITLE1 'List all elements'; 29 | proc datasets lib=db; quit; 30 | 31 | TITLE1 'List contents for element attributes'; 32 | proc contents data=db.Atlanta_Data_Center varnum; run; 33 | 34 | TITLE1 'List attributes data for element "Atlanta_Data_Center"'; 35 | proc print data=db.Atlanta_Data_Center; run; 36 | 37 | TITLE1 'List attributes data for element'; 38 | TITLE1 ' "Atlanta Data Center\Server Rack1\ION 6200 Power Meter1"'; 39 | proc print data=db.ION_6200_Power_Meter1; run; 40 | 41 | TITLE1 'PROC SQL, selecting only specific columns'; 42 | proc sql; 43 | select AttributeName, Timestamp, Value, Path from db.ION_6200_Power_Meter1; 44 | quit; 45 | 46 | 47 | /*******************************************/ 48 | TITLE1 'ELEMENTS'; 49 | 50 | TITLE1 'List contents for element'; 51 | proc contents 52 | data=db.element 53 | (id="0355db1d-aee7-435e-87b9-869c079d3827" searchby=id) 54 | varnum; 55 | run; 56 | 57 | TITLE1 'List elements for given elementid'; 58 | proc print 59 | data=db.element 60 | (searchby=id id="0355db1d-aee7-435e-87b9-869c079d3827"); 61 | run; 62 | 63 | TITLE1 'List element(s) for given element name'; 64 | proc print 65 | data=db.element (searchby=name name="ION 6200 Power Meter1"); 66 | run; 67 | 68 | TITLE1 'List elements and children for given path name'; 69 | proc print 70 | data=db.element 71 | (searchby=path path="\\D78014\Joe\Atlanta Data Center\Server Rack1" 72 | child=all); 73 | run; 74 | 75 | TITLE1 'List element(s) for given template name'; 76 | proc print data=db.element(searchby=template template="Chemical Pump"); run; 77 | 78 | 79 | /*******************************************/ 80 | TITLE1 'ATTRIBUTES'; 81 | 82 | TITLE1 'List all attributes for element with given elementid'; 83 | proc print 84 | data=db.element_attribute 85 | (searchby=id id="0355db1d-aee7-435e-87b9-869c079d3827"); 86 | run; 87 | 88 | TITLE1 'List all attributes for all elements named "ION 6200 Power Meter1"'; 89 | proc print 90 | data=db.element_attribute(searchby=name name="ION 6200 Power Meter1"); 91 | run; 92 | 93 | 94 | /*******************************************/ 95 | TITLE1 'TIME-SERIES-DATA ATTRIBUTES'; 96 | 97 | TITLE1 'Support datamodel= recorded, count, plot and sampled'; 98 | proc print data=db.tsdata_attribute 99 | (searchby=name name="ION 6200 Power Meter1" start='*-1m' end='*' 100 | datamodel=archive); 101 | run; 102 | 103 | TITLE1 'Find the attributes with bad status'; 104 | proc print data=db.tsdata_attribute 105 | (searchby=name name="ION 6200 Power Meter1" start='*-1m' end='*' 106 | datamodel=archive hideoptvars=yes); 107 | where Status = 'Bad'; 108 | run; 109 | 110 | 111 | /*******************************************/ 112 | TITLE1 'TIME-SERIES-DATA'; 113 | 114 | TITLE1 'Time-Series data with INTERVAL option'; 115 | proc print data=db.tsdata_pipoint(searchby=name name="ION 6200 Power Meter1" 116 | attribute="VIn A" 117 | start='y' end='t' datamodel=sampled interval="8:00"); 118 | run; 119 | 120 | /****************************************************************/ 121 | /* END OF FILE */ 122 | /****************************************************************/ 123 | -------------------------------------------------------------------------------- /SAS Foundation/PI & AF/PISystemSamples.sas: -------------------------------------------------------------------------------- 1 | /******************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: PISystemSamples.sas */ 5 | /* TITLE: Source for Examples in Documentation */ 6 | /* PRODUCT: SAS/ACCESS Interface to the PI System */ 7 | /* SYSTEM: Windows ONLY */ 8 | /* MISC: To run these examples, you must install: */ 9 | /* - SAS/ACCESS Interface to the PI System */ 10 | /* - PI System client software library */ 11 | /* - Ensure you have the SINUSOID sample tag */ 12 | /* */ 13 | /******************************************************************/ 14 | 15 | TITLE1 '*** SAS/ACCESS Interface to the PI System Samples ***'; 16 | 17 | options linesize=150; 18 | 19 | /*******************************************/ 20 | TITLE1 'LIBNAME assign using default server'; 21 | libname db pisystem; 22 | 23 | /* Specify the server explicitly */ 24 | /*----------------------------------------*/ 25 | /* libname db pisystem "mypiserver"; */ 26 | /* libname db pisystem server=mypiserver; */ 27 | 28 | 29 | /*******************************************/ 30 | TITLE1 'PROC CONTENT'; 31 | proc contents data=db.pipoint varnum; run; 32 | proc contents data=db.sinusoid varnum; run; 33 | proc contents data=db.sinusoid(showflags=no) varnum; run; 34 | 35 | 36 | /*******************************************/ 37 | TITLE1 'PROC PRINT'; 38 | proc print data=db.sinusoid; run; 39 | proc print data=db.sinusoid (start="*-2d" end="*" interval="1:00:00"); run; 40 | proc print data=db.sinusoid (start="*-2d" end="*" count=10); run; 41 | proc print data=db.sinusoid (select=snapshot); run; 42 | 43 | 44 | /*******************************************/ 45 | TITLE1 'More selective PROC PRINT'; 46 | proc print data=db.sinusoid (start="*-2d" end="*" select=ok); run; 47 | proc print data=db.sinusoid (start="*-2d" end="*" select=bad); run; 48 | proc print data=db.sinusoid (start="*-2d" end="*" select=ok min=10); run; 49 | proc print data=db.sinusoid (start="*-2d" end="*" select=ok max=10); run; 50 | proc print data=db.sinusoid (start="*-2d" end="*" select=ok min=10 max=50); run; 51 | proc print data=db.sinusoid (start="*-2d" end="*" filter="'.' > 50"); run; 52 | 53 | 54 | /*******************************************/ 55 | TITLE1 'Digital state read'; 56 | libname db pisystem; 57 | data list; Tag="AMI.State.1"; output;run; 58 | proc print data=db.picomp (taglist=list start="*-2d" end="*" obs=20); run; 59 | 60 | 61 | /*******************************************/ 62 | TITLE1 'Create tag data from scratch'; 63 | proc delete data=db.temptag; run; 64 | data db.temptag (desc="Values are double"); format Timestamp DATETIME22.3; 65 | Timestamp='01jan2013:10:11:12.123'dt; Value=10.1; output; 66 | Timestamp='02feb2013:11:12:13'dt; Value=20.2; output; 67 | Timestamp='03mar2013:12:13:14'dt; Value=30.3; output; 68 | run; 69 | proc print data=db.temptag (delay=1);run; 70 | proc delete data=db.temptag; run; 71 | 72 | 73 | /*******************************************/ 74 | TITLE1 'PROC SQL'; 75 | libname db pisystem; 76 | proc sql noerrorstop; 77 | select * from db.sinusoid where (Value > 99); /* WHERE processed by SAS */ 78 | quit; 79 | 80 | proc sql noerrorstop; 81 | select * from db.sinusoid (min = 99); /* Filter pushed down to db */ 82 | quit; 83 | 84 | 85 | /*******************************************/ 86 | TITLE1 'Multi-tag read'; 87 | libname db pisystem showflags=no; 88 | data list; set db.pipoint (tagfilter="sinus*"); run; 89 | proc print data=db.picomp (taglist=list select=snap); run; 90 | proc print data=db.picomp (taglist=list min=99.99); run; 91 | 92 | libname db pisystem; 93 | data list; set db.pipoint (keep=tag zero where=(zero=100)); run; 94 | proc print data=list; run; 95 | proc print data=db.picomp (taglist=list select=snap drop=substituted annotated); 96 | run; 97 | 98 | 99 | /****************************************************************/ 100 | /* END OF FILE */ 101 | /****************************************************************/ 102 | -------------------------------------------------------------------------------- /SAS Foundation/Postgres/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkload.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Postgres */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. Your may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | 21 | /****************************/ 22 | /* BULKLOAD Sample 1 */ 23 | /****************************/ 24 | 25 | 26 | 27 | /****************************/ 28 | /* CLEAN UP */ 29 | /****************************/ 30 | 31 | 32 | proc delete data=mydblib.NEBLKTAB; 33 | run; 34 | 35 | 36 | /***********************/ 37 | /* CREATE DATASET */ 38 | /***********************/ 39 | 40 | data work.NEBLKDAT; 41 | input name $ age sex $ bdate mmddyy.; 42 | cards; 43 | amy 3 f 030185 44 | bill 12 m 121277 45 | charlie 35 m 010253 46 | david 19 m 101469 47 | elinor 42 f 080845 48 | pearl 78 f 051222 49 | vera 96 f 101200 50 | frank 24 m 092663 51 | georgia 1 f 040687 52 | henry 46 m 053042 53 | joann 27 f 020461 54 | buddy 66 m 101432 55 | ; 56 | run; 57 | 58 | 59 | /************************************/ 60 | /* CREATE DBMS TABLE WITH BULKLOAD */ 61 | /************************************/ 62 | 63 | 64 | proc sql; 65 | create table mydblib.NEBLKTAB (BULKLOAD=YES BL_USE_PIPE=NO 66 | BL_DELETE_DATAFILE=NO ) 67 | as select * from work.NEBLKDAT; 68 | quit; 69 | 70 | 71 | 72 | /****************************/ 73 | /* Read Table */ 74 | /****************************/ 75 | 76 | proc print data=mydblib.NEBLKTAB; 77 | format bdate date7.; 78 | title 'proc print of table'; 79 | run; 80 | -------------------------------------------------------------------------------- /SAS Foundation/README.md: -------------------------------------------------------------------------------- 1 | # SAS/ACCESS Sample Files Index 2 | 3 | The following is an index for all of the SAS Foundation SAS/ACCESS sample files. 4 | 5 | ## Generic files 6 | 7 | * createLibname 8 | * createSampleData 9 | * coreSamples 10 | * SQLPassthrough 11 | * procAppend 12 | * createTableAsSelect 13 | 14 | ## Database-specific files 15 | 16 | * bulkload 17 | * Aster 18 | * DB2 19 | * Greenplum 20 | * HAWQ 21 | * Impala 22 | * MySQL 23 | * Netezza 24 | * ODBC 25 | * OLE DB 26 | * Oracle 27 | * Postgres 28 | * Redshift 29 | * Snowflake 30 | 31 | * orcFileFormat 32 | * Hadoop 33 | 34 | * partitionedData 35 | * Hadoop 36 | 37 | * TPT 38 | * Teradata 39 | 40 | * authentication 41 | * Salesforce 42 | 43 | * qualifiers 44 | * Salesforce 45 | 46 | ## Generic files that are overridden in sub-directories 47 | 48 | * createTableAsSelect 49 | * Hadoop 50 | 51 | * procAppend 52 | * Impala 53 | -------------------------------------------------------------------------------- /SAS Foundation/Redshift/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkload.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Redshift */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname, */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. Your may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | 21 | /****************************/ 22 | /* BULKLOAD Sample 1 */ 23 | /****************************/ 24 | 25 | 26 | 27 | /****************************/ 28 | /* CLEAN UP */ 29 | /****************************/ 30 | 31 | 32 | proc delete data=mydblib.NEBLKTAB; 33 | run; 34 | 35 | 36 | /***********************/ 37 | /* CREATE DATASET */ 38 | /***********************/ 39 | 40 | data work.NEBLKDAT; 41 | input name $ age sex $ bdate mmddyy.; 42 | cards; 43 | amy 3 f 030185 44 | bill 12 m 121277 45 | charlie 35 m 010253 46 | david 19 m 101469 47 | elinor 42 f 080845 48 | pearl 78 f 051222 49 | vera 96 f 101200 50 | frank 24 m 092663 51 | georgia 1 f 040687 52 | henry 46 m 053042 53 | joann 27 f 020461 54 | buddy 66 m 101432 55 | ; 56 | run; 57 | 58 | 59 | /**************************************/ 60 | /* CREATE DBMS TABLE WITH BULKLOAD */ 61 | /* USE &MYBUCKET FOR YOUR AWS BUCKET */ 62 | /***************************************/ 63 | 64 | %LET mybucket= ; 65 | 66 | 67 | proc sql; 68 | create table mydblib.NEBLKTAB (BULKLOAD=YES 69 | BL_BUCKET="&mybucket") 70 | as select * from work.NEBLKDAT; 71 | quit; 72 | 73 | 74 | 75 | /****************************/ 76 | /* Read Table */ 77 | /****************************/ 78 | 79 | proc print data=mydblib.NEBLKTAB; 80 | format bdate date7.; 81 | title 'proc print of table'; 82 | run; 83 | -------------------------------------------------------------------------------- /SAS Foundation/SQLPassthrough.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: SQLPassthrough.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS Software for Relational Databases */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname */ 10 | /* trsmauto, then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. Your may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /******************************************************************/ 21 | /* SAS/ACCESS interface to Teradata users: */ 22 | /* */ 23 | /* When using the PROC SQL Pass-Through Facility, Teradata */ 24 | /* users must specify an explicit COMMIT statement to close a */ 25 | /* transaction. Teradata users must also specify an explicit */ 26 | /* COMMIT statement immediately after every Data Definition */ 27 | /* Language (DDL) statement. If your DBMS-name is TERA, then */ 28 | /* the COMMIT statement would be: execute (commit) by TERA; */ 29 | /* */ 30 | /******************************************************************/ 31 | 32 | /*=========================*/ 33 | /* Pass-Through Sample 1 */ 34 | /*=========================*/ 35 | 36 | title 'Pass-Through Sample 1: Brief Data for All Invoices'; 37 | 38 | options linesize=120; 39 | 40 | proc sql; 41 | connect to &dbms as mydb (&CONNOPT); 42 | %put %superq(sqlxmsg); 43 | 44 | select INVNUM, NAME, BILLEDON, 45 | AMTINUS format=dollar20.2 46 | from connection to mydb 47 | (select INVNUM, BILLEDON, AMTINUS, NAME 48 | from SAMDAT9, SAMDAT11 49 | where SAMDAT9.BILLEDTO=SAMDAT11.CUSTOMER 50 | order by BILLEDON, INVNUM); 51 | %put %superq(sqlxmsg); 52 | 53 | disconnect from mydb; 54 | quit; 55 | 56 | proc sql; 57 | connect to &dbms as mydb (&CONNOPT); 58 | %put %superq(sqlxmsg); 59 | 60 | create view samples.brief as 61 | select INVNUM, NAME, BILLEDON, 62 | AMTINUS format=dollar20.2 63 | from connection to mydb 64 | (select INVNUM, BILLEDON, AMTINUS, NAME 65 | from SAMDAT9, SAMDAT11 66 | where SAMDAT9.BILLEDTO=SAMDAT11.CUSTOMER); 67 | 68 | %put %superq(sqlxmsg); 69 | 70 | disconnect from mydb; 71 | 72 | options ls=120 label; 73 | 74 | select * from samples.brief 75 | order by BILLEDON, INVNUM; 76 | 77 | quit; 78 | 79 | /*=========================*/ 80 | /* Pass-Through Sample 2 */ 81 | /*=========================*/ 82 | 83 | options ls=120; 84 | 85 | title 'Pass-Through Sample 2: Interns Who Are Family Members of Employees'; 86 | 87 | proc sql; 88 | connect to &dbms as mydb (&CONNOPT); 89 | %put %superq(sqlxmsg); 90 | 91 | select samdat13.LASTNAME, samdat13.FIRSTNAM, 92 | samdat13.EMPID, samdat13.FAMILYID, 93 | samdat13.GENDER, samdat13.DEPT, 94 | samdat13.HIREDATE 95 | from connection to mydb 96 | (select * from SAMDAT10) as query1, samples.samdat13 97 | where query1.EMPID=samdat13.FAMILYID; 98 | 99 | %put %superq(sqlxmsg); 100 | 101 | disconnect from mydb; 102 | quit; 103 | 104 | 105 | /*=========================*/ 106 | /* Pass-Through Sample 3 */ 107 | /*=========================*/ 108 | 109 | proc sql; 110 | 111 | create view samples.allemp as 112 | select * from mydblib.SAMDAT10; 113 | 114 | quit; 115 | 116 | %let dept='ACC%'; 117 | 118 | proc sql stimer; 119 | title 'Pass-Through Sample 3: Employees Who Earn Below the Dept Average 120 | Salary'; 121 | connect to &dbms (&CONNOPT); 122 | 123 | select EMPID, LASTNAME 124 | from samples.allemp 125 | where DEPT like &dept and SALARY < 126 | (select avg(SALARY) 127 | from connection to &dbms 128 | (select SALARY from SAMDAT10 129 | where DEPT like &dept)); 130 | %put %superq(sqlxmsg); 131 | disconnect from &dbms; 132 | 133 | quit; 134 | -------------------------------------------------------------------------------- /SAS Foundation/Salesforce/authentication.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: authentication.sas */ 5 | /* TITLE: Authenticate to Salesforce */ 6 | /* PRODUCT: SAS/ACCESS to Salesforce */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, modify the below program to match your */ 10 | /* login scenario, and submit the relevant statement */ 11 | /* */ 12 | /*********************************************************************/ 13 | 14 | /*=========================================*/ 15 | /* Connect to production Salesforce org */ 16 | /*=========================================*/ 17 | /*===========================================================================*/ 18 | /* Salesforce requires a security token when connecting to its API */ 19 | /* If you do not know your security token, the following link provides */ 20 | /* information on how to reset it. When forming a libname, the security */ 21 | /* token should be directly appended to the end of your password. */ 22 | /* */ 23 | /* https://help.salesforce.com/articleView?id=user_security_token.htm */ 24 | /*===========================================================================*/ 25 | %let username="John.Doe@example.com"; 26 | %let password=%str(SamplePassword123); 27 | %let token=%str(ABC1234567890XYZ); 28 | %let passandtoken=&password&token; 29 | 30 | libname x sasioslf user=&username pass=&passandtoken; 31 | 32 | /*=========================================*/ 33 | /* Connect to a Salesforce sandbox */ 34 | /*=========================================*/ 35 | /*============================================================================*/ 36 | /* When creating a Salesforce sandbox, Salesforce asks for a name. This value */ 37 | /* is appended to your email when logging in to the sandbox. For instance, if */ 38 | /* your login is "John.Doe@example.com" and your sandbox name is "qa", your */ 39 | /* login username when connecting to the sandbox is "John.Doe@example.com.qa" */ 40 | /* In addition, you must alter your authendpoint as specified below. For more */ 41 | /* information, view the following Salesforce support document. */ 42 | /* */ 43 | /* https://help.salesforce.com/articleView?id=data_sandbox_create.htm */ 44 | /*============================================================================*/ 45 | 46 | libname x sasioslf user=&username pass=&passandtoken authendpoint="test.salesforce.com"; 47 | 48 | /*===========================================================*/ 49 | /* Connect to Salesforce using mutual authentication */ 50 | /*===========================================================*/ 51 | libname x sasioslf user=&username pass=&passandtoken mutual_auth=yes 52 | cert_path="/path/to/certificate" cert_pass="certificate_password"; 53 | -------------------------------------------------------------------------------- /SAS Foundation/Salesforce/qualifiers.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: qualifiers.sas */ 5 | /* TITLE: Sample programs showing Salesforce suffix qualifiers */ 6 | /* PRODUCT: SAS/ACCESS to Salesforce */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, */ 10 | /* createLibname, then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. You may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | %let username="John.Doe@example.com"; 20 | %let password=%str(SamplePassword123); 21 | %let token=%str(ABC1234567890XYZ); 22 | %let passandtoken=&password&token; 23 | 24 | /*==================================================================*/ 25 | /* Accessing a standard Salesforce object with default SAS behavior */ 26 | /*==================================================================*/ 27 | libname mydblib sasioslf user=&username pass=&passandtoken; 28 | 29 | proc sql; 30 | SELECT FirstName, LastName FROM mydblib.Account; 31 | quit; 32 | 33 | /*==================================================================*/ 34 | /* Accessing a custom Salesforce object with default SAS behavior */ 35 | /*==================================================================*/ 36 | proc sql; 37 | SELECT lname, fname FROM mydblib.SAMDAT7; 38 | quit; 39 | 40 | /*==================================================================*/ 41 | /* Accessing a standard Salesforce object with use_native_names=yes */ 42 | /*==================================================================*/ 43 | libname mydblib sasioslf user=&username pass=&passandtoken use_native_names=yes; 44 | 45 | proc sql; 46 | SELECT FirstName, LastName FROM mydblib.Account; 47 | quit; 48 | 49 | /*==================================================================*/ 50 | /* Accessing a custom Salesforce object with use_native_names=yes */ 51 | /*==================================================================*/ 52 | proc sql; 53 | SELECT lname__c, fname__c FROM mydblib.SAMDAT7__c; 54 | quit; 55 | -------------------------------------------------------------------------------- /SAS Foundation/Snowflake/bulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkload.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Snowflake */ 7 | /* SYSTEM: UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname, */ 10 | /* enter stage, then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. You may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /* CREATE GLOBAL MACROS FOR BULKLOAD */ 21 | 22 | 23 | 24 | %GLOBAL bl_internal_stage; /* bulk load internal stage */ 25 | 26 | 27 | 28 | /* ASSIGN GLOBAL MACRO VALUES FOR BULKLOAD */ 29 | 30 | 31 | 32 | %let bl_internal_stage =; /* can use 'user', 'table', or 33 | 'internal stage' */ 34 | /* ex. 'user/test1' or 'table/test1' or 'my_internal_stage/test1' */ 35 | 36 | 37 | /*=========================*/ 38 | /* BULKLOAD Sample 1 */ 39 | /*=========================*/ 40 | 41 | proc delete data=mydblib.SNBLKTAB; run; 42 | 43 | /***********************/ 44 | /* CREATE DATASET */ 45 | /***********************/ 46 | 47 | data work.SNBLKDAT; 48 | input name $ age sex $ bdate mmddyy.; 49 | cards; 50 | amy 3 f 030185 51 | bill 12 m 121277 52 | charlie 35 m 010253 53 | david 19 m 101469 54 | elinor 42 f 080845 55 | pearl 78 f 051222 56 | vera 96 f 101200 57 | frank 24 m 092663 58 | georgia 1 f 040687 59 | henry 46 m 053042 60 | joann 27 f 020461 61 | buddy 66 m 101432 62 | ; 63 | run; 64 | 65 | 66 | /************************************/ 67 | /* CREATE DBMS TABLE WITH OPTIONS */ 68 | /************************************/ 69 | 70 | proc sql; 71 | create table mydblib.SNBLKTAB ( 72 | BULKLOAD=YES 73 | BL_INTERNAL_STAGE=&bl_internal_stage 74 | ) as select * from work.SNBLKDAT; 75 | quit; 76 | 77 | /****************************/ 78 | /* Read Table */ 79 | /****************************/ 80 | 81 | proc print data=mydblib.SNBLKTAB; 82 | format bdate date7.; 83 | title 'proc print of table'; 84 | run; 85 | -------------------------------------------------------------------------------- /SAS Foundation/Snowflake/bulkloadS3.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: bulkloadS3.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Snowflake */ 7 | /* SYSTEM: UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname, */ 10 | /* and then submit this program. */ 11 | /* PRE-REQUISITE: AWS S3_Bucket, Access_key, Secret_key, Region and */ 12 | /* Security_Token */ 13 | /* NOTE: Some interfaces require that you add the SAS data set */ 14 | /* option, SASDATEFMT=, to the name of the DBMS view */ 15 | /* or table to have the output format correctly. */ 16 | /* NOTE: Some interfaces are case sensitive. You may need to */ 17 | /* change the case of table or column names to comply */ 18 | /* with the requirements of your database. */ 19 | /* */ 20 | /*********************************************************************/ 21 | 22 | /* CREATE GLOBAL MACROS FOR BULKLOAD */ 23 | 24 | %GLOBAL m_bucket; /* specifies amazon s3 bucket */ 25 | %GLOBAL m_key; /* specifies an AWS key that you use to access the AWS environment */ 26 | %GLOBAL m_secret; /* specifies the secret access key to access a data source. */ 27 | %GLOBAL m_region; /* specifies the AWS region from which S3 data is being loaded */ 28 | %GLOBAL m_security_token; /* specifies a temporary token associated with the temporary credentials provided with BL_KEY= and BL_SECRET= options. */ 29 | 30 | /* ASSIGN GLOBAL MACRO VALUES FOR BULKLOAD */ 31 | 32 | %let m_bucket ="bucket-name"; 33 | %let m_key =key-value; 34 | %let m_secret =secret-access-key; 35 | %let m_region ="region"; 36 | %let m_security_token =temporary-token; 37 | 38 | 39 | /*=========================*/ 40 | /* BULKLOAD Sample 1 */ 41 | /*=========================*/ 42 | 43 | /******************************************/ 44 | /* Clean up the table, if already exists */ 45 | /******************************************/ 46 | 47 | proc delete data=mydblib.SNBLKTAB; run; 48 | 49 | /***********************/ 50 | /* CREATE DATASET */ 51 | /***********************/ 52 | 53 | data work.SNBLKDAT; 54 | input name $ age sex $ bdate mmddyy.; 55 | cards; 56 | amy 3 f 030185 57 | bill 12 m 121277 58 | charlie 35 m 010253 59 | david 19 m 101469 60 | elinor 42 f 080845 61 | pearl 78 f 051222 62 | vera 96 f 101200 63 | frank 24 m 092663 64 | georgia 1 f 040687 65 | henry 46 m 053042 66 | joann 27 f 020461 67 | buddy 66 m 101432 68 | ; 69 | run; 70 | 71 | /************************************/ 72 | /* CREATE DBMS TABLE WITH OPTIONS */ 73 | /************************************/ 74 | 75 | /* Load via a Snowflake external stage (AWS S3 bucket) */ 76 | proc sql; 77 | create table mydblib.SNBLKTAB 78 | (BULKLOAD=YES bl_bucket=&m_bucket 79 | bl_key="&m_key" bl_secret="&m_secret" 80 | bl_region=&m_region bl_security_token="&m_security_token") as 81 | select * from work.SNBLKDAT; 82 | quit; 83 | 84 | /****************************/ 85 | /* READ TABLE */ 86 | /****************************/ 87 | 88 | proc print data=mydblib.SNBLKTAB (BULKUNLOAD=YES bl_bucket=&m_bucket 89 | bl_key="&m_key" bl_secret="&m_secret" 90 | bl_region=&m_region bl_security_token="&m_security_token"); 91 | format bdate date7.; 92 | title 'proc print of table'; 93 | run; 94 | -------------------------------------------------------------------------------- /SAS Foundation/Teradata/TPT.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: tpt.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS to Teradata */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname, */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. Your may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /* LIBNAME FOR TERADATA TPT BULKLOAD. */ 21 | libname mybulk &dbms &CONNOPT tpt=yes; 22 | 23 | /*==========================*/ 24 | /* TPT Sample 1 */ 25 | /*==========================*/ 26 | 27 | proc delete data=mybulk.test1; run; 28 | proc delete data=mybulk.test2; run; 29 | 30 | data work.test; 31 | x=1; output; 32 | run; 33 | 34 | proc append base=mybulk.test1 (tpt=yes fastload=yes) 35 | data=work.test; 36 | run; 37 | 38 | proc delete data=mybulk.test1; run; 39 | 40 | proc sql; 41 | create table mybulk.test2 (tpt=yes fastload=yes) as 42 | select * from work.test; 43 | quit; 44 | 45 | 46 | data work.test; 47 | x=1; output; 48 | x=2; output; 49 | x=3; output; 50 | x=4; output; 51 | x=5; output; 52 | run; 53 | 54 | 55 | proc sql; 56 | create table mybulk.test1 (tpt=yes fastload=yes) 57 | as select * from work.test; 58 | quit; 59 | 60 | proc delete data=mybulk.test1; run; 61 | 62 | proc sql; 63 | create table mybulk.test1 (tpt=yes multistmt=yes) 64 | as select * from work.test; 65 | quit; 66 | 67 | proc delete data=mybulk.test1; run; 68 | 69 | proc sql; 70 | create table mybulk.test1 (tpt=yes multiload=yes) 71 | as select * from work.test; 72 | quit; 73 | 74 | 75 | /* clean up */ 76 | proc delete data=mybulk.test1; run; 77 | proc delete data=mybulk.test2; run; 78 | -------------------------------------------------------------------------------- /SAS Foundation/coreSamples.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: coreSamples.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS Software for Relational Databases */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname, */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. Your may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /*=========================*/ 21 | /* LIBNAME Sample 1 */ 22 | /*=========================*/ 23 | 24 | proc print data=mydblib.SAMDAT7 25 | (keep=lname fname state hphone); 26 | where state = 'NJ'; 27 | title 'Libname Sample 1: New Jersey Phone List'; 28 | run; 29 | 30 | /*=========================*/ 31 | /* LIBNAME Sample 2 */ 32 | /*=========================*/ 33 | 34 | data work.highwage; 35 | set mydblib.SAMDAT5(drop=sex birth hired); 36 | if salary>60000 then 37 | CATEGORY='High'; 38 | else if salary<30000 then 39 | CATEGORY='Low'; 40 | else 41 | CATEGORY='Avg'; 42 | run; 43 | 44 | 45 | proc print data=work.highwage; 46 | title 'Libname Sample 2: Salary Analysis'; 47 | format SALARY dollar10.2; 48 | run; 49 | 50 | /*=========================*/ 51 | /* LIBNAME Sample 3 */ 52 | /*=========================*/ 53 | libname mydblib &dbms &CONNOPT connection=unique; 54 | data work.combined; 55 | merge mydblib.SAMDAT7 mydblib.SAMDAT8(in=super 56 | rename=(SUPID=IDNUM)); 57 | by IDNUM; 58 | if super; 59 | run; 60 | 61 | proc print data=work.combined; 62 | title 'Libname Sample 3: Supervisor Information'; 63 | run; 64 | 65 | /*=========================*/ 66 | /* LIBNAME Sample 4 */ 67 | /*=========================*/ 68 | 69 | data work.payroll; 70 | update mydblib.SAMDAT5 71 | mydblib.SAMDAT6; 72 | by IDNUM; 73 | run; 74 | libname mydblib &dbms &CONNOPT connection=sharedread; 75 | 76 | proc print data=work.payroll; 77 | title 'Libname Sample 4: Updated Payroll Data'; 78 | run; 79 | 80 | /*=========================*/ 81 | /* LIBNAME Sample 5 */ 82 | /*=========================*/ 83 | title 'Libname Sample 5: Total Salary by Jobcode'; 84 | 85 | proc sql; 86 | select JOBCODE label='Jobcode', 87 | sum(SALARY) as total 88 | label='Total for Group' 89 | format=dollar11.2 90 | from mydblib.SAMDAT5 91 | group by JOBCODE; 92 | quit; 93 | 94 | /*=========================*/ 95 | /* LIBNAME Sample 6 */ 96 | /*=========================*/ 97 | 98 | title 'Libname Sample 6: Flights to London and Frankfurt'; 99 | 100 | proc sql; 101 | select DATES, DEST from mydblib.SAMDAT2 102 | where (DEST eq "FRA") or 103 | (DEST eq "LON") 104 | order by DEST; 105 | quit; 106 | 107 | /*=========================*/ 108 | /* LIBNAME Sample 7 */ 109 | /*=========================*/ 110 | 111 | proc sql; 112 | title 'Libname Sample 7: International Flights by Flight Number'; 113 | title2 'with Over 200 Passengers'; 114 | select FLIGHT label="Flight Number", 115 | DATES label="Departure Date", 116 | DEST label="Destination", 117 | BOARDED label="Number Boarded" 118 | from mydblib.SAMDAT3 119 | where BOARDED > 200 120 | order by FLIGHT; 121 | quit; 122 | 123 | /*=========================*/ 124 | /* LIBNAME Sample 8 */ 125 | /*=========================*/ 126 | 127 | title 'Libname Sample 8: Employees with salary greater than $40,000'; 128 | 129 | proc sql; 130 | select a.LNAME, a.FNAME, b.SALARY 131 | format=dollar10.2 132 | from mydblib.SAMDAT7 a, mydblib.SAMDAT5 b 133 | where (a.IDNUM eq b.IDNUM) and 134 | (b.SALARY gt 40000); 135 | quit; 136 | 137 | /*==========================*/ 138 | /* LIBNAME Sample 9 */ 139 | /*==========================*/ 140 | 141 | /* SQL Implicit Passthru ON */ 142 | libname mydblib &dbms &CONNOPT direct_sql=yes; 143 | options debug=dbms_select; 144 | title 'Libname Sample 9a: Delayed International Flights in March'; 145 | 146 | proc sql; 147 | select distinct samdat1.FLIGHT, 148 | samdat1.DATES, 149 | DELAY format=2.0 150 | from mydblib.SAMDAT1, mydblib.SAMDAT2, mydblib.SAMDAT3 151 | where samdat1.FLIGHT=samdat2.FLIGHT and 152 | samdat1.DATES=samdat2.DATES and 153 | samdat1.FLIGHT=samdat3.FLIGHT and 154 | DELAY>0 155 | order by DELAY descending; 156 | quit; 157 | 158 | /*==========================*/ 159 | /* LIBNAME Sample 9b */ 160 | /*==========================*/ 161 | 162 | /* SQL Implicit Passthru OFF */ 163 | libname mydblib &dbms &CONNOPT direct_sql=no; 164 | 165 | title 'Libname Sample 9b: Delayed International Flights in March'; 166 | 167 | proc sql; 168 | select distinct samdat1.FLIGHT, 169 | samdat1.DATES, 170 | DELAY format=2.0 171 | from mydblib.SAMDAT1, mydblib.SAMDAT2, mydblib.SAMDAT3 172 | where samdat1.FLIGHT=samdat2.FLIGHT and 173 | samdat1.DATES=samdat2.DATES and 174 | samdat1.FLIGHT=samdat3.FLIGHT and 175 | DELAY>0 176 | order by DELAY descending; 177 | quit; 178 | 179 | /*==========================*/ 180 | /* LIBNAME Sample 9c */ 181 | /*==========================*/ 182 | 183 | libname mydblib &dbms &CONNOPT direct_sql=nomultoutjoins; 184 | 185 | title 'Libname Sample 9c: Delayed International Flights in March'; 186 | 187 | proc sql; 188 | select distinct samdat1.FLIGHT, 189 | samdat1.DATES, 190 | DELAY format=2.0 191 | from mydblib.SAMDAT1 192 | full join mydblib.SAMDAT2 on 193 | samdat1.FLIGHT = samdat2.FLIGHT 194 | full join mydblib.SAMDAT3 on 195 | samdat1.FLIGHT = samdat3.FLIGHT 196 | order by DELAY descending; 197 | quit; 198 | 199 | /*==========================*/ 200 | /* LIBNAME Sample 9d */ 201 | /*==========================*/ 202 | 203 | libname mydblib &dbms &CONNOPT direct_sql=nowhere; 204 | 205 | title 'Libname Sample 9d: Delayed International Flights in March'; 206 | 207 | proc sql; 208 | select distinct samdat1.FLIGHT, 209 | samdat1.DATES, 210 | DELAY format=2.0 211 | from mydblib.SAMDAT1, mydblib.SAMDAT2, mydblib.SAMDAT3 212 | where samdat1.FLIGHT=samdat2.FLIGHT and 213 | samdat1.DATES=samdat2.DATES and 214 | samdat1.FLIGHT=samdat3.FLIGHT and 215 | DELAY>0 216 | order by DELAY descending; 217 | quit; 218 | 219 | 220 | /* turn off debug option */ 221 | options debug=off; 222 | 223 | /*==========================*/ 224 | /* LIBNAME Sample 10 */ 225 | /*==========================*/ 226 | 227 | title 'Libname Sample 10: Payrolls 1 & 2'; 228 | 229 | proc sql; 230 | select IDNUM, SEX, JOBCODE, SALARY, 231 | BIRTH, 232 | HIRED 233 | from mydblib.SAMDAT5 234 | outer union corr 235 | select * 236 | from mydblib.SAMDAT6 237 | order by IDNUM, JOBCODE, SALARY; 238 | quit; 239 | 240 | /*==========================*/ 241 | /* LIBNAME Sample 11 */ 242 | /*==========================*/ 243 | 244 | %if (&enginename EQ ASTER) %then %do; 245 | proc sql undo_policy=none; 246 | insert into mydblib.SAMDAT8 247 | values(1588,'NY','FA'); 248 | quit; 249 | 250 | proc print data=mydblib.SAMDAT8; 251 | title 'Libname Sample 11: New Row in AIRLINE.SAMDAT8'; 252 | run; 253 | %end; 254 | %else %do; 255 | proc sql undo_policy=none; 256 | insert into mydblib.SAMDAT8 257 | values('1588','NY','FA'); 258 | quit; 259 | 260 | proc print data=mydblib.SAMDAT8; 261 | title 'Libname Sample 11: New Row in AIRLINE.SAMDAT8'; 262 | run; 263 | %end; 264 | 265 | /*==========================*/ 266 | /* LIBNAME Sample 12 */ 267 | /*==========================*/ 268 | 269 | /******************************************************************/ 270 | /* SAS/ACCESS interface to Impala and HAWQ users: */ 271 | /* Delete not supported, thus Test #12 is omitted for */ 272 | /* those databases */ 273 | /* */ 274 | /* SAS/ACCESS interface to Hadoop users: */ 275 | /* Support for delete added post 9.4M3 */ 276 | /* Hive .14 or higher is needed for this feature */ 277 | /* */ 278 | /******************************************************************/ 279 | 280 | %if (&enginename NE HAWQ and &enginename NE IMPALA) %then %do; 281 | 282 | proc sql; 283 | delete from mydblib.SAMDAT7 284 | where STATE='CT'; 285 | quit; 286 | 287 | 288 | proc print data=mydblib.SAMDAT7; 289 | title 'Libname Sample 12: AIRLINE.SAMDAT7 After Deleting Connecticut Employees'; 290 | 291 | run; 292 | 293 | %end; 294 | 295 | /*==========================*/ 296 | /* LIBNAME Sample 13 */ 297 | /*==========================*/ 298 | 299 | proc sql; 300 | 301 | create table work.gtforty as 302 | select LNAME as lastname, 303 | FNAME as firstname, 304 | SALARY as Salary 305 | from mydblib.SAMDAT7 a, mydblib.SAMDAT5 b 306 | where (a.IDNUM eq b.IDNUM) and (SALARY gt 40000); 307 | 308 | quit; 309 | 310 | proc print data=work.gtforty noobs; 311 | title 'Libname Sample 13: Employees with salaries over $40,000'; 312 | format SALARY dollar10.2; 313 | 314 | run; 315 | 316 | /*==========================*/ 317 | /* LIBNAME Sample 14 */ 318 | /*==========================*/ 319 | 320 | title 'Libname Sample 14: Number of Passengers per Flight by Date'; 321 | 322 | 323 | proc print data=mydblib.SAMDAT1 noobs; 324 | var DATES BOARDED; 325 | by FLIGHT DEST; 326 | sumby FLIGHT; 327 | sum BOARDED; 328 | run; 329 | 330 | title 'Libname Sample 14: Maximum Number of Passengers per Flight'; 331 | 332 | 333 | proc means data=mydblib.SAMDAT1 fw=5 maxdec=1 max; 334 | var BOARDED; 335 | class FLIGHT; 336 | run; 337 | 338 | /*==========================*/ 339 | /* LIBNAME Sample 15 */ 340 | /*==========================*/ 341 | title 'Libname Sample 15: Table Listing'; 342 | 343 | options pageno=1; 344 | 345 | proc datasets lib=mydblib; 346 | contents data=_all_ nods; 347 | run; 348 | 349 | /*==========================*/ 350 | /* LIBNAME Sample 16 */ 351 | /*==========================*/ 352 | 353 | title 'Libname Sample 16: Contents of the SAMDAT2 Table'; 354 | 355 | proc contents data=mydblib.SAMDAT2; 356 | run; 357 | 358 | /*==========================*/ 359 | /* LIBNAME Sample 17 */ 360 | /*==========================*/ 361 | 362 | title 'Libname Sample 17: Ranking of Delayed Flights'; 363 | 364 | options pageno=1; 365 | 366 | proc rank data=mydblib.SAMDAT2 descending 367 | ties=low out=work.ranked; 368 | var DELAY; 369 | ranks RANKING; 370 | run; 371 | 372 | proc print data=work.ranked; 373 | format DELAY 2.0; 374 | run; 375 | 376 | /*==========================*/ 377 | /* LIBNAME Sample 17a */ 378 | /*==========================*/ 379 | 380 | data mydblib.SAMTEMP; 381 | set mydblib.SAMDAT2; 382 | run; 383 | proc delete data=mydblib.SAMTEMP; run; 384 | 385 | /*==========================*/ 386 | /* LIBNAME Sample 18 */ 387 | /*==========================*/ 388 | 389 | title 'Libname Sample 18: Number of Employees by Jobcode'; 390 | 391 | proc tabulate data=mydblib.SAMDAT5 format=3.0; 392 | class JOBCODE; 393 | table JOBCODE*n; 394 | keylabel n="#"; 395 | run; 396 | 397 | /*==========================*/ 398 | /* LIBNAME Sample 19 */ 399 | /*==========================*/ 400 | 401 | title 'Libname Sample 19: SAMAT5 After Appending SAMDAT6'; 402 | 403 | proc append base=mydblib.SAMDAT5 404 | data=mydblib.SAMDAT6; 405 | run; 406 | 407 | proc print data=mydblib.SAMDAT5; 408 | run; 409 | 410 | 411 | /*==========================*/ 412 | /* LIBNAME Sample 20 */ 413 | /*==========================*/ 414 | 415 | title 'Libname Sample 20: Invoice Frequency by Country'; 416 | 417 | proc freq data=mydblib.SAMDAT9 (keep=INVNUM COUNTRY); 418 | tables COUNTRY; 419 | run; 420 | 421 | /*==========================*/ 422 | /* LIBNAME Sample 21 */ 423 | /*==========================*/ 424 | 425 | title 'Libname Sample 21: High Bills--Not Paid'; 426 | 427 | proc sql; 428 | create view work.allinv as 429 | select PAIDON, BILLEDON, INVNUM, AMTINUS, BILLEDTO 430 | from mydblib.SAMDAT9 (obs=5); 431 | quit; 432 | 433 | data work.notpaid(keep=INVNUM BILLEDTO AMTINUS BILLEDON); 434 | 435 | set work.allinv; 436 | where PAIDON is missing and AMTINUS>=300000.00; 437 | run; 438 | 439 | proc print data=work.notpaid label; 440 | format AMTINUS dollar20.2; 441 | label AMTINUS=amountinus 442 | BILLEDON=billedon 443 | INVNUM=invoicenum 444 | BILLEDTO=billedto; 445 | run; 446 | 447 | /*==========================*/ 448 | /* LIBNAME Sample 22 */ 449 | /*==========================*/ 450 | 451 | title 'Libname Sample 22: Interns Who Are Family Members of Employees'; 452 | 453 | 454 | proc sql; 455 | create view emp_csr as 456 | select * from mydblib.SAMDAT10 457 | where dept in ('CSR010', 'CSR011', 'CSR004'); 458 | 459 | select samdat13.LASTNAME, samdat13.FIRSTNAM, samdat13.EMPID, 460 | samdat13.FAMILYID, samdat13.GENDER, 461 | samdat13.DEPT, samdat13.HIREDATE 462 | from emp_csr, samples.samdat13 463 | where emp_csr.EMPID=samdat13.FAMILYID; 464 | 465 | quit; 466 | 467 | /*==========================*/ 468 | /* LIBNAME Sample 23 */ 469 | /*==========================*/ 470 | 471 | title 'Libname Sample 23: FedSql Dictionary Tables'; 472 | 473 | 474 | proc fedsql; 475 | select * from dictionary.tables where table_name='SAMDAT1'; 476 | create table work.flight as 477 | select st.flight,st.dates,st.orig, st.dest from mydblib.SAMDAT1 as st 478 | where dest='WAS'; 479 | quit; 480 | 481 | 482 | /*==========================*/ 483 | /* LIBNAME Sample 24 */ 484 | /*==========================*/ 485 | 486 | 487 | title 'Libname Sample 24: Passthru With Connect Using'; 488 | 489 | proc sql noerrorstop; 490 | connect using mydblib; 491 | execute ( create table SAMTEMP( col1 int,TAB1_C1 char(3), 492 | col2 int,col3 int 493 | ) ) by mydblib; 494 | execute ( insert into SAMTEMP values (101,'pup',103,104) 495 | ) by mydblib; 496 | quit; 497 | 498 | proc sql noerrorstop; 499 | connect using mydblib; 500 | select * from connection to mydblib 501 | ( select * from SAMTEMP ); 502 | quit; 503 | 504 | proc sql noerrorstop; 505 | connect using mydblib; 506 | execute ( drop table SAMTEMP ) by mydblib; 507 | quit; 508 | run; 509 | 510 | -------------------------------------------------------------------------------- /SAS Foundation/createLibname.sas: -------------------------------------------------------------------------------- 1 | /**********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: createLibname.sas */ 5 | /* TITLE: Autoexec used in creating and running samples. */ 6 | /* PRODUCT: SAS/ACCESS Software for Relational Databases */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS, LINUX */ 8 | /* DBMS: All Relational DBMS supported with ACCESS Products */ 9 | /* REF: SAS/ACCESS Software For Relational Databases: */ 10 | /* Reference, Version 9 */ 11 | /* NOTE: Edit this file to replace the ???????? with your site's */ 12 | /* DBMS-specific connection information. This autoexec */ 13 | /* should be called to run the createSampleData and test */ 14 | /* programs which create sample data and execute samples. */ 15 | /**********************************************************************/ 16 | 17 | /* included for Year-2000 compliance */ 18 | options yearcutoff=1925 ls=120 nodate; 19 | 20 | /* dbms(engine) name assignment */ 21 | 22 | %let dbms=????????; /* for example, Postgres */ 23 | 24 | /* Assign connection options */ 25 | /* */ 26 | /* For example, the POSTGRES connection string */ 27 | /* may look something like the following: */ 28 | /* */ 29 | /* %let CONNOPT=%str(user=???????? */ 30 | /* password=???????? */ 31 | /* server=???????? */ 32 | /* port=???????? */ 33 | /* database=????? */ 34 | /* ); */ 35 | /* */ 36 | /* Refer to the documentation for parameters specific to your */ 37 | /* database */ 38 | /* */ 39 | /* Aster, Oracle, Teradata, DB2 z/OS you can remove the */ 40 | /* connections options user= password= and path/database= completely */ 41 | /* allowing the default Authid and Database to be used, or use the */ 42 | /* following to specify the authid and database: */ 43 | /* authid=???????? and in='database ???????' */ 44 | /* */ 45 | /* *** For Asterdata you will need to specify dimension=yes as shown */ 46 | /* below. You would remove this for other database references */ 47 | /* */ 48 | /* *** For Greenplum you will need to specify delete_mult_rows=yes */ 49 | /* as shown below. You would remove this for other database */ 50 | /* */ 51 | /* *** For Salesforce, you will need to append a security token to */ 52 | /* your password. For further details and examples, see the */ 53 | /* sample file Salesforce/auth.sas */ 54 | 55 | %let CONNOPT=%str(?????????); 56 | 57 | /* edit the following statement to include the path */ 58 | /* to a local directory on the host where you will be */ 59 | /* executing SAS; this will be used as a SAS library to */ 60 | /* store SAS data sets and PROC SQL Views */ 61 | 62 | libname samples base '????????'; 63 | 64 | /* ISSUE LIBNAME STATEMENT WITH A COMBINATION OF THE ABOVE OPTIONS */ 65 | 66 | libname mydblib &dbms &CONNOPT; 67 | 68 | /* The following allows for reference of the database type */ 69 | /* (e.g. Postgres, MySQL, Hadoop, etc.) to allow for certain */ 70 | /* tests to be run conditionally against certain databases, */ 71 | /* but not others. */ 72 | 73 | proc sql noprint; 74 | select distinct engine into :enginename from dictionary.members 75 | where upcase(libname) = 'MYDBLIB'; 76 | quit; 77 | -------------------------------------------------------------------------------- /SAS Foundation/createTableAsSelect.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: createTableAsSelect.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS Software for Relational Databases */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec, createLibname, */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. Your may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /*=========================*/ 21 | /* CTAS Sample 1 */ 22 | /*=========================*/ 23 | 24 | /* CLEAN UP */ 25 | proc delete data=mydblib.RDTAB78; run; 26 | proc delete data=mydblib.CRTAB78A; run; 27 | proc delete data=mydblib.CRTAB78B; run; 28 | 29 | 30 | /* CREATE DBMS TABLE */ 31 | data mydblib.RDTAB78; 32 | do x = 1 to 10; 33 | do y = 1 to 10; 34 | output; end; end; run; 35 | 36 | /*=================================================*/ 37 | /* DO NOT ATTEMPT CREATE-TABLE-AS-SELECT */ 38 | /* PASSDOWN (CTAS) */ 39 | /*=================================================*/ 40 | options nodbidirectexec; 41 | 42 | proc sql noerrorstop; 43 | 44 | 45 | create table mydblib.CRTAB78A as 46 | select y from mydblib.RDTAB78 47 | where x gt 5 order by y; 48 | 49 | /* WITH IP TRIGGER */ 50 | create table mydblib.CRTAB78B as 51 | select distinct y from mydblib.RDTAB78 52 | where x gt 5 order by y; 53 | 54 | quit; 55 | 56 | 57 | data work.noexeA; set mydblib.CRTAB78A; by y; run; 58 | data work.noexeB; set mydblib.CRTAB78B; by y; run; 59 | 60 | proc delete data=mydblib.CRTAB78A; run; 61 | proc delete data=mydblib.CRTAB78B; run; 62 | 63 | 64 | /*=================================================*/ 65 | /* ATTEMPT CREATE-TABLE-AS-SELECT PASSDOWN (CTAS) */ 66 | /*=================================================*/ 67 | 68 | options dbidirectexec; 69 | 70 | proc sql noerrorstop; 71 | 72 | create table mydblib.CRTAB78A as 73 | select y from mydblib.RDTAB78 74 | where x gt 5 order by y; 75 | 76 | 77 | 78 | create table mydblib.CRTAB78B as 79 | select distinct y from mydblib.RDTAB78 80 | where x gt 5 order by y; 81 | 82 | quit; 83 | 84 | 85 | 86 | data work.exeA; set mydblib.CRTAB78A; by y; run; 87 | data work.exeB; set mydblib.CRTAB78B; by y; run; 88 | 89 | /* DATASETS SHOULD COMPARE EQUAL */ 90 | proc compare base=work.noexeA 91 | comp=work.exeA 92 | error briefsummary note;run; 93 | 94 | proc compare base=work.noexeB 95 | comp=work.exeB 96 | error briefsummary note;run; 97 | -------------------------------------------------------------------------------- /SAS Foundation/procAppend.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: procAppend.sas */ 5 | /* TITLE: Sample Programs */ 6 | /* PRODUCT: SAS/ACCESS Software for Relational Databases */ 7 | /* SYSTEM: z/OS, UNIX, WINDOWS */ 8 | /* REF: SAS/ACCESS 9 for Relational Databases: Reference */ 9 | /* USAGE: Invoke SAS, submit the autoexec createLibname, */ 10 | /* then submit this program. */ 11 | /* NOTE: Some interfaces require that you add the SAS data set */ 12 | /* option, SASDATEFMT=, to the name of the DBMS view */ 13 | /* or table to have the output format correctly. */ 14 | /* NOTE: Some interfaces are case sensitive. Your may need to */ 15 | /* change the case of table or column names to comply */ 16 | /* with the requirements of your database. */ 17 | /* */ 18 | /*********************************************************************/ 19 | 20 | /*================================*/ 21 | /* Test Errors Sample 1 */ 22 | /*================================*/ 23 | 24 | /* CLEAN UP */ 25 | proc delete data=mydblib.TRFORCE; 26 | proc delete data=mydblib.TRFORCE1; 27 | run; 28 | 29 | data work.new; 30 | c1='you'; output; 31 | c1='me'; output; 32 | run; 33 | 34 | /*=============================================*/ 35 | /* PASS: APPEND TO NEW TABLE USING DBFORCE=NO */ 36 | /*=============================================*/ 37 | 38 | proc append base=mydblib.TRFORCE( DBFORCE=NO ) 39 | data=work.new; 40 | run; 41 | 42 | title 'PASS: APPEND 2 ROWS'; 43 | proc print data=mydblib.TRFORCE; 44 | run; 45 | 46 | proc delete data=mydblib.TRFORCE; 47 | run; 48 | 49 | /*=============================================*/ 50 | /* PASS: APPEND TO NEW TABLE USING DBFORCE=YES */ 51 | /*=============================================*/ 52 | 53 | 54 | proc append base=mydblib.TRFORCE( DBFORCE=YES ) 55 | data=work.new; 56 | run; 57 | 58 | title 'PASS: APPEND 2 ROWS'; 59 | proc print data=mydblib.TRFORCE; 60 | run; 61 | 62 | /* CREATE NEW TABLE WITH EXPLICIT COL LENGTH) */ 63 | 64 | data mydblib.TRFORCE1( DBTYPE = (C1='varchar(2)')); 65 | c1='we'; output; 66 | c1='us'; output; 67 | run; 68 | 69 | /*=============================================*/ 70 | /* FAIL: APPEND FAILS DUE TO DIFFERENT COLUMN */ 71 | /* LENGTHS. SHOULD FAIL WITH ERROR MESSAGE */ 72 | /*=============================================*/ 73 | 74 | proc append base=mydblib.TRFORCE1( DBFORCE=NO ) 75 | data=work.new; 76 | run; 77 | 78 | title 'APPEND FAILS. NO ROWS ADDED'; 79 | proc print data=mydblib.TRFORCE1; 80 | run; 81 | -------------------------------------------------------------------------------- /SAS Viya/JDBC/AmazonAthena/README.md: -------------------------------------------------------------------------------- 1 | # Amazon Athena 2 | 3 | ## Introduction 4 | 5 | Amazon Athena is an interactive query service that makes it easy to analyze data directly in Amazon Simple Storage Service (Amazon S3) using standard SQL. With a few actions in the AWS Management Console, you can point Athena at your data stored in Amazon S3 and begin using standard SQL to run ad-hoc queries and get results in seconds. 6 | 7 | For more information, see [Getting started](https://docs.aws.amazon.com/athena/latest/ug/getting-started.html). 8 | 9 | Amazon Athena also makes it easy to interactively run data analytics using Apache Spark without having to plan for, configure, or manage resources. When you run Apache Spark applications on Athena, you submit Spark code for processing and receive the results directly. Use the simplified notebook experience in Amazon Athena console to develop Apache Spark applications using Python or [Athena notebook APIs](https://docs.aws.amazon.com/athena/latest/ug/notebooks-spark-api-list.html). 10 | 11 | For more information, see [Getting started with Apache Spark on Amazon Athena](https://docs.aws.amazon.com/athena/latest/ug/notebooks-spark-getting-started.html). 12 | 13 | Athena SQL and Apache Spark on Amazon Athena are serverless, so there is no infrastructure to set up or manage, and you pay only for the queries you run. Athena scales automatically—running queries in parallel—so results are fast, even with large datasets and complex queries. 14 | 15 | ## Resources 16 | 17 | - [Amazon Athena Documentation](https://docs.aws.amazon.com/athena/) 18 | - [Amazon Athena User Guide](https://docs.aws.amazon.com/athena/latest/ug/what-is.html) 19 | - [SAS/ACCESS Interface to JDBC](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/acreldb/n1usgr00wc9cvln1gnyp1807qu17.htm) 20 | - [JDBC Data Connector](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/casref/n1ldk5vubre9oen10bdqoqkfc1y7.htm) 21 | 22 | ## Prerequisites 23 | 24 | Before you can connect to Amazon Athena through JDBC from SAS Viya, you need to have the following: 25 | 26 | 1. Access to a Amazon Athena database: You must have access to a Amazon Athena database with the appropriate credentials. 27 | 28 | 2. Amazon Athena JDBC driver: You must download and install the [Amazon Athena JDBC driver](https://downloads.athena.us-east-1.amazonaws.com/drivers/JDBC/SimbaAthenaJDBC-2.1.5.1001/AthenaJDBC42-2.1.5.1001.jar). 29 | 30 | 3. Working installation of SAS Viya 31 | 32 | **To install the JDBC driver:** 33 | 34 | 1. Download the [Amazon Athena JDBC driver](https://downloads.athena.us-east-1.amazonaws.com/drivers/JDBC/SimbaAthenaJDBC-2.1.5.1001/AthenaJDBC42-2.1.5.1001.jar) 35 | 36 | 2. Unzip the JDBC driver into a location on the SAS Viya server. 37 | 38 | ## Overview 39 | 40 | | Amazon Athena | | 41 | | ------------------- | --------------------------------------------------------------------------------- | 42 | | **Company URL** | https://Amazon Athena.com | 43 | | **JDBC JAR File** | [Amazon Athena-jdbc-2.1.5](https://downloads.athena.us-east-1.amazonaws.com/drivers/JDBC/SimbaAthenaJDBC-2.1.5.1001/AthenaJDBC42-2.1.5.1001.jar) | 44 | | **JDBC URL Syntax** | jdbc:awsathena://AwsRegion=;S3OutputLocation=;AwsCredentialsProviderClass=com.simba.athena.amazonaws.auth.profile.ProfileCredentialsProvider;AwsCredentialsProviderArguments=default| 45 | | **Default Port** | 444 | 46 | | **JDBC Class** | com.simba.athena.jdbc.Driver | 47 | 48 | ## Setting up the connection 49 | 50 | This section provides step-by-step instructions on how to set up the connection to Amazon Athena using JDBC from SAS Viya Compute or CAS. 51 | 52 | - SAS Compute Library creation 53 | 54 | ```sas 55 | libname mylib sasiojdb 56 | URL="jdbc:awsathena://AwsRegion=;S3OutputLocation=;AwsCredentialsProviderClass=com.simba.athena.amazonaws.auth.profile.ProfileCredentialsProvider;AwsCredentialsProviderArguments=default" 57 | schema="myschema"; 58 | ``` 59 | 60 | - CAS Library creation 61 | 62 | ```sas 63 | caslib mycaslib desc='JDBC Caslib' 64 | dataSource=(srctype='jdbc', 65 | url="jdbc:awsathena://AwsRegion=;S3OutputLocation=;AwsCredentialsProviderClass=com.simba.athena.amazonaws.auth.profile.ProfileCredentialsProvider;AwsCredentialsProviderArguments=default", 66 | schema="myschema"); 67 | ``` 68 | 69 | ## Smoke Testing 70 | 71 | This section explains how to perform a smoke test to ensure that the connection is working correctly. A smoke test is a quick and simple test that verifies that the basic functionality of the connection is working as expected. 72 | 73 | | [**Table Creation Tests**](..#table-creation-tests) | | | 74 | | ------------------------------------- | -------------------- | --- | 75 | | | Create Airline Table | | 76 | | | Create Cars Table | | 77 | | [**Proc SQL Tests**](..#proc-sql-tests) | | | 78 | | | Create Table | | 79 | | | Update Table | | 80 | | | Delete Table | | 81 | | | Insert into Table | | 82 | | | Drop Table | | 83 | | [**Implicit SQL Test**](..#implicit-sql-tests) | | | 84 | | [**FEDSql Test**](..#fedsql-test) | | | 85 | | **Information Catalog Crawler Agent** | | | 86 | 87 | ## Troubleshooting 88 | 89 | This section provides guidance on how to troubleshoot common issues that may arise when connecting to Amazon Athena through JDBC from SAS Studio on SAS Viya. This may include issues with the JDBC driver, connectivity issues, or issues with data access and retrieval. 90 | 91 | You can run the following code for getting full detailed log. 92 | 93 | ```sas 94 | options sastrace=',,,d' sastraceloc=saslog nostsuffix msglevel=i 95 | linesize=132 pagesize=max validvarname=any validmemname=extend noquotelenmax; 96 | ``` 97 | 98 | You can refer to the [Troubleshooting](https://downloads.athena.us-east-1.amazonaws.com/drivers/JDBC/SimbaAthenaJDBC-2.1.5.1000/docs/Simba+Amazon+Athena+JDBC+Connector+Install+and+Configuration+Guide.pdf) section in the Amazon Athena documentation 99 | -------------------------------------------------------------------------------- /SAS Viya/JDBC/DataVirtuality/README.md: -------------------------------------------------------------------------------- 1 | # Data Virtuality DB 2 | 3 | ## Introduction 4 | 5 | Data Virtuality is a data integration and management platform for instant data access, easy data centralization, and data governance. It empowers companies to get fast and direct insights from scattered data. By uniquely combining data virtualization and data replication, Data Virtuality Platform provides data teams the flexibility to always choose the right method for the specific requirement. It is an enabler for Data Fabric and Data Mesh by providing the self-service capabilities and data governance features that are indispensable for these frameworks. Enterprises around the world, such as BSH, PGGM, PartnerRe, Crédit Agricole, and Vontobel use the Data Virtuality Platform to build modern data architectures that meet today’s and tomorrow’s business requirements. 6 | 7 | ## Resources 8 | 9 | - [Data Virtuality Documentation](https://datavirtuality.com/en/docs-and-support/) 10 | - [Data Virtuality Support Portal](https://support.datavirtuality.com/hc/en-us) 11 | - [Data Virtuality JDBC Driver Installation and Connection](https://docs.datavirtuality.com/v3/jdbc-driver-installation-and-connection) 12 | - [JDBC Data Connector](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/casref/n1ldk5vubre9oen10bdqoqkfc1y7.htm) 13 | - [SAS/ACCESS Interface to JDBC](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/acreldb/n1usgr00wc9cvln1gnyp1807qu17.htm) 14 | 15 | ## Prerequisites 16 | 17 | Before you can connect to Data Virtuality through JDBC from SAS Viya, you need to have the following: 18 | 19 | 1. Access to a Data Virtuality platform: You must have access to a Data Virtuality instance with the appropriate credentials. 20 | 21 | 2. Data Virtuality JDBC driver: You must download and install the [Data Virtuality JDBC driver](https://datavirtuality.com/en/download-driver/). 22 | 23 | 3. Working installation of SAS Viya. 24 | 25 | **To install the JDBC driver:** 26 | 27 | 1. Download the [Data Virtuality JDBC driver](https://datavirtuality.com/en/download-driver/). 28 | 29 | 2. Install the JDBC driver according to the instructions provided by Data Virtuality. 30 | 31 | ## Overview 32 | 33 | | Data Virtuality | | 34 | | -------------------- | --------------------------------------------------------------------------------- | 35 | | **Company URL** | [https://datavirtuality.com/en](https://datavirtuality.com/en) | 36 | | **JDBC JAR File Path** | [datavirtuality-jdbc_4.0.3.jar](https://datavirtuality.com/wp-content/uploads/2023/07/datavirtuality-jdbc_4.0.3.jar) | 37 | | **JDBC URL Syntax** | jdbc:datavirtuality:datavirtuality@mms:// | 38 | | **Default Port** | 45012 | 39 | | **JDBC Driver Class Name** | com.Data Virtuality.vdp.jdbc.Driver | 40 | ## Setting up the connection 41 | 42 | This section provides step-by-step instructions on how to set up the connection to Data Virtuality using JDBC from SAS Viya Compute or CAS. 43 | 44 | - SAS Compute Library creation 45 | 46 | ```sas 47 | libname mylib jdbc 48 | driverclass="com.datavirtuality.dv.jdbc.Driver" 49 | url="jdbc:datavirtuality:datavirtuality@mms://:45012" 50 | schema="myschema" 51 | user="myuser" 52 | password="mypw" 53 | preserve_tab_names=yes 54 | preserve_col_names=yes; 55 | /*materialized=yes; //Allows for permanment table creation */ 56 | ``` 57 | 58 | - CAS Library creation 59 | 60 | ```sas 61 | cas; 62 | caslib dvcas desc='JDBC Caslib' 63 | dataSource=(srctype='jdbc', 64 | driverclass="com.datavirtuality.dv.jdbc.Driver", 65 | url="jdbc:datavirtuality:datavirtuality@mms://:45012", 66 | schema="myschema", 67 | user="myuser", 68 | password="mypw"); 69 | caslib _all_ assign; 70 | ``` 71 | `` 72 | ## Smoke Testing 73 | 74 | This section explains how to perform a smoke test to ensure that the connection is working correctly. A smoke test is a quick and simple test that verifies that the basic functionality of the connection is working as expected. 75 | 76 | | Smoke Tests | | | 77 | | ------------------------------------- | -------------------- | --- | 78 | | [**Table Creation Tests**](..#table-creation-tests) | | | 79 | | | Create Airline Table | ✓ | 80 | | | Create Cars Table | ✓ | 81 | | [**Proc SQL Tests**](..#proc-sql-tests) | | | 82 | | | Create Table | ✓ | 83 | | | Update Table | ✓ | 84 | | | Delete Table | ✓ | 85 | | | Insert into Table | ✓ | 86 | | | Drop Table | ✓ | 87 | | [**Implicit SQL Test**](..#implicit-sql-tests) | | ✓ | 88 | | [**FEDSql Test**](..#fedsql-test) | | ✓ | 89 | | **Information Catalog Crawler Agent** | | | 90 | | | CAS Library Based Discovery Agent | ✓ | 91 | | | SAS Compute Library Based Discovery Agent | ✓ | 92 | ## Troubleshooting 93 | 94 | This section provides guidance on how to troubleshoot common issues that may arise when connecting to Data Virtuality through JDBC from SAS Studio on SAS Viya. This may include issues with the JDBC driver, connectivity issues, or issues with data access and retrieval. 95 | 96 | You can run the following code for getting full detailed log. 97 | 98 | ```sas 99 | options sastrace=',,,d' sastraceloc=saslog nostsuffix msglevel=i 100 | linesize=132 pagesize=max validvarname=any validmemname=extend noquotelenmax; 101 | ``` 102 | 103 | You can refer to the [Data Virtuality Support](https://support.datavirtuality.com/hc/en-us) section in the Data Virtuality documentation & support. -------------------------------------------------------------------------------- /SAS Viya/JDBC/Denodo/README.md: -------------------------------------------------------------------------------- 1 | # Denodo DB 2 | 3 | ## Introduction 4 | 5 | Denodo is a leading data virtualization platform that provides comprehensive data integration and data modeling capabilities across disparate systems. It centralizes the [security and governance of the unified data](https://community.denodo.com/docs/html/browse/8.0/en/), enabling business users to access it in real-time. Some of its most notable features include its ability to handle [large data volumes](https://hkrtrainings.com/denodo-platform) and its [advanced query optimizer](https://hkrtrainings.com/denodo-platform). 6 | 7 | ## Resources 8 | 9 | - [Denodo 8.0 Documentation](https://community.denodo.com/docs/html/browse/8.0/en/) 10 | - [Denodo Support Portal](https://support.denodo.com/) 11 | - [JDBC Data Connector](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/casref/n1ldk5vubre9oen10bdqoqkfc1y7.htm) 12 | - [SAS/ACCESS Interface to JDBC](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/acreldb/n1usgr00wc9cvln1gnyp1807qu17.htm) 13 | 14 | ## Prerequisites 15 | 16 | Before you can connect to Denodo through JDBC from SAS Viya, you need to have the following: 17 | 18 | 1. Access to a Denodo virtual database: You must have access to a Denodo virtual database with the appropriate credentials. 19 | 20 | 2. Denodo JDBC driver: You must download and install the [Denodo JDBC driver](https://community.denodo.com/drivers/jdbc/8.0). 21 | 22 | 3. Working installation of SAS Viya. 23 | 24 | **To install the JDBC driver:** 25 | 26 | 1. Download the [Denodo JDBC driver](https://community.denodo.com/drivers/jdbc/8.0). 27 | 28 | 2. Install the JDBC driver according to the instructions provided by Denodo. 29 | 30 | ## Overview 31 | 32 | | Denodo | | 33 | | -------------------- | --------------------------------------------------------------------------------- | 34 | | **Company URL** | https://denodo.com | 35 | | **JDBC JAR File Path** | [/tools/client-drivers/jdbc/vdp-jdbcdriver-core/denodo-vdp-jdbcdriver.jar](https://community.denodo.com/docs/html/browse/8.0/en/vdp/developer/access_through_jdbc/access_through_jdbc) | 36 | | **JDBC URL Syntax** | jdbc:denodo://<hostName>:<port>/<databaseName>[?<paramName>=<paramValue> [<paramName>=<paramValue>]*] | 37 | | **Default Port** | [9999 and 9997](https://community.denodo.com/docs/html/browse/7.0/platform/installation/appendix/default_ports_used_by_the_denodo_platform_modules/default_ports_used_by_the_denodo_platform_modules) | 38 | | **JDBC Driver Class Name** | com.denodo.vdp.jdbc.Driver | 39 | ## Setting up the connection 40 | 41 | This section provides step-by-step instructions on how to set up the connection to SQream using JDBC from SAS Viya Compute or CAS. 42 | 43 | - SAS Compute Library creation 44 | 45 | ```sas 46 | libname mylib jdbc 47 | driverclass="com.denodo.vdb.jdbcdriver.VDBJDBCDriver" 48 | URL="jdbc:denodo://:9999/?ssl=true" 49 | schema="default" 50 | user="myuser" 51 | password="mypw"; 52 | /*materialized=yes; //Allows for permanment table creation */ 53 | ``` 54 | 55 | - CAS Library creation 56 | 57 | ```sas 58 | caslib mycaslib desc='JDBC Caslib' 59 | dataSource=(srctype='jdbc', 60 | driverclass="com.denodo.vdb.jdbcdriver.VDBJDBCDriver", 61 | url="jdbc:denodo://:9999/?ssl=true", 62 | schema="default" 63 | user="myuser", 64 | password="mypw"); 65 | ``` 66 | `` 67 | ## Smoke Testing 68 | 69 | This section explains how to perform a smoke test to ensure that the connection is working correctly. A smoke test is a quick and simple test that verifies that the basic functionality of the connection is working as expected. 70 | | Smoke Tests | | | 71 | | ------------------------------------- | -------------------- | --- | 72 | | [**Table Creation Tests**](..#table-creation-tests) | | | 73 | | | Create Airline Table | ✓ | 74 | | | Create Cars Table | ✓ | 75 | | [**Proc SQL Tests**](..#proc-sql-tests) | | | 76 | | | Create Table | ✓ | 77 | | | Update Table | X (Expected) | 78 | | | Delete Table | X (Expected) | 79 | | | Insert into Table | ✓ | 80 | | | Drop Table | ✓ | 81 | | [**Implicit SQL Test**](..#implicit-sql-tests) | * | ✓ | 82 | | [**FEDSql Test**](..#fedsql-test) | * | X MYLIB.CLASS does not exist | 83 | | **Information Catalog Crawler Agent** | * | N/A | 84 | 85 | ## Troubleshooting 86 | 87 | This section provides guidance on how to troubleshoot common issues that may arise when connecting to Denodo through JDBC from SAS Studio on SAS Viya. This may include issues with the JDBC driver, connectivity issues, or issues with data access and retrieval. 88 | 89 | You can run the following code for getting full detailed log. 90 | 91 | ```sas 92 | options sastrace=',,,d' sastraceloc=saslog nostsuffix msglevel=i 93 | linesize=132 pagesize=max validvarname=any validmemname=extend noquotelenmax; 94 | ``` 95 | 96 | You can refer to the [Troubleshooting](https://community.denodo.com/kb/en/category/Troubleshooting) section in the Denodo documentation -------------------------------------------------------------------------------- /SAS Viya/JDBC/DuckDB/README.md: -------------------------------------------------------------------------------- 1 | # DuckDB 2 | 3 | ## Introduction 4 | 5 | DuckDB is an open-source, in-process SQL database system designed for fast analytical queries on large datasets. It's optimized for single-node operation, making it ideal for data science and analytics tasks on laptops or servers without needing a separate database server. DuckDB supports SQL queries directly within applications and data pipelines, making it highly compatible with Python, R, and other data science tools. Its columnar storage format and vectorized execution make it efficient for handling complex queries on data stored in formats like Parquet and CSV, providing a lightweight, scalable alternative to heavier database systems. 6 | 7 | ## Resources 8 | 9 | - [DuckDB Documentation](https://duckdb.org/docs/) 10 | - [DuckDB JDBC Driver Documentation](https://duckdb.org/docs/api/java) 11 | - [SAS JDBC Data Connector](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/casref/n1ldk5vubre9oen10bdqoqkfc1y7.htm) 12 | - [SAS/ACCESS Interface to JDBC](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/acreldb/n1usgr00wc9cvln1gnyp1807qu17.htm) 13 | 14 | ## Prerequisites 15 | 16 | Before you can connect to DuckDB through JDBC from SAS Viya, you need to have the following: 17 | 18 | 1. DuckDB JDBC driver: You must download and install the [DuckDB JDBC Driver](https://duckdb.org/docs/api/java). 19 | 2. Working installation of SAS Viya. 20 | 21 | **To install the JDBC driver:** 22 | 23 | 1. Download the [DuckDB JDBC Driver](https://duckdb.org/docs/api/java). 24 | 2. Install the JDBC driver according to the instructions provided by DuckDB. 25 | 26 | ## Overview 27 | 28 | 29 | | Trino | | 30 | | ---------------------------- | ------------------------------------------------------------------------------------------------------------ | 31 | | **Company URL** | [https://duckdb.org](https://duckdb.org) | 32 | | **JDBC JAR File Path** | [duckdb_jdbc-1.1.2.jar](https://repo1.maven.org/maven2/org/duckdb/duckdb_jdbc/1.1.2/duckdb_jdbc-1.1.2.jar) | 33 | | **JDBC URL Syntax** | jdbc:duckdb: | 34 | | **JDBC Driver Class Name** | org.duckdb.DuckDBDriver | 35 | 36 | ## Setting up the connection 37 | 38 | This section provides step-by-step instructions on how to set up the connection to DuckDB using JDBC from SAS Viya Compute or CAS. 39 | 40 | - SAS Compute Library creation 41 | 42 | ```sas 43 | libname mylib jdbc 44 | DRIVERCLASS="org.duckdb.DuckDBDriver" 45 | URL="jdbc:duckdb:/location/of/database" 46 | preserve_tab_names=yes 47 | preserve_col_names=yes 48 | dbmax_text=512 49 | ``` 50 | 51 | - CAS Library creation 52 | 53 | ```sas 54 | cas; 55 | caslib mycaslib desc='JDBC Caslib' 56 | dataSource=(srctype='jdbc', 57 | driverclass="org.duckdb.DuckDBDriver", 58 | url="jdbc:duckdb:/location/of/database"); 59 | caslib _all_ assign; 60 | ``` 61 | 62 | `` 63 | 64 | ## Smoke Testing 65 | 66 | This section explains how to perform a smoke test to ensure that the connection is working correctly. A smoke test is a quick and simple test that verifies that the basic functionality of the connection is working as expected. 67 | 68 | 69 | | Smoke Tests | | | 70 | | ----------------------------------------------------- | ------------------------------------------- | ------------------------------------------------------------------------- | 71 | | [**Table Creation Tests**](..#table-creation-tests) | | | 72 | | | Create Airline Table | Passed - Though Character fields appear to be null | 73 | | | Create Cars Table | Passed - Though Character fields appear to be null | 74 | | [**Proc SQL Tests**](..#proc-sql-tests) | | | 75 | | | Create Table | Passed | 76 | | | Update Table | Failed - Error - record level updates not supported by JDBC driver | 77 | | | Delete Table | Failed - Error - record level updates not supported by JDBC driverPasse | 78 | | | Insert into Table | Failed - Floating Point Overflow | 79 | | | Drop Table | Passed | 80 | | [**Implicit SQL Test**](..#implicit-sql-tests) | | Passed | 81 | | [**FEDSql Test**](..#fedsql-test) | | Passed | 82 | | **Information Catalog Crawler Agent** | | | 83 | | | CAS Library Based Discovery Agent | In progress | 84 | | | SAS Compute Library Based Discovery Agent | In progress | 85 | 86 | ## Troubleshooting 87 | 88 | This section provides guidance on how to troubleshoot common issues that may arise when connecting to DuckDB through JDBC from SAS Studio on SAS Viya. This may include issues with the JDBC driver, connectivity issues, or issues with data access and retrieval. 89 | 90 | You can run the following code for getting full detailed log. 91 | 92 | ```sas 93 | options sastrace=',,,d' sastraceloc=saslog nostsuffix msglevel=i 94 | linesize=132 pagesize=max validvarname=any validmemname=extend noquotelenmax; 95 | ``` 96 | 97 | You can refer to the [DuckDB Website](https://duckdb.org/) for documentation & support. 98 | 99 | In general testing has shown that when working with DuckDB use of explicit SQL has the best outcomes. for example when querying a parquet file stored on ADLS code that looks something like shown below will get good results. 100 | 101 | libname myduckj jdbc url="jdbc:duckdb:/tmp/mydb" DRIVERCLASS="org.duckdb.DuckDBDriver" preserve_tab_names=yes preserve_col_names=yes dbmax_text=512; 102 | 103 | proc sql outobs=30; 104 | 105 | connect using myduckj as j_duck; 106 | 107 | execute by j_duck (set global extension_directory = '/tmp/duckdb'); 108 | 109 | execute by j_duck (set home_directory = '/tmp/mydb'); 110 | 111 | execute by j_duck (INSTALL azure); 112 | 113 | execute by j_duck (LOAD azure); 114 | 115 | execute by j_duck (SET azure_storage_connection_string = 'DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=mykey;EndpointSuffix=core.windows.net'); 116 | 117 | select * from connection to j_duck (select count(*) from 'azure://path/nyctiny.parquet'); 118 | 119 | execute by j_duck (DROP table IF EXISTS test); 120 | 121 | execute by j_duck ( CREATE TABLE test AS SELECT * FROM 'azure://path/nyctiny.parquet'); 122 | 123 | select * from connection to j_duck (select * from test); 124 | 125 | quit; 126 | -------------------------------------------------------------------------------- /SAS Viya/JDBC/README.md: -------------------------------------------------------------------------------- 1 | # 3rd Party JDBC Drivers Review 2 | 3 | ## Overview 4 | 5 | 6 | 7 | | Database | Smoke Testing Date | Driver Version | 8 | | --------------------------------- | ------------------ | -------------- | 9 | | [Amazon Athena](AmazonAthena/README.md) | In Progress | AthenaJDBC42-2.1.5.1001.jar | 10 | | [Data Virtuality](DataVirtuality/README.md) | 2023-10-19 | datavirtuality-jdbc_4.0.3.jar | 11 | | [Denodo](Denodo/README.md) | 2023-07-31 | denodo-vdp-jdbcdriver-8.0-update-20230301.jar | 12 | | [SQLite](SQLite/README.md) | In Progress | sqlite-jdbc-3.46.0.0.jar | 13 | | [SQream](SQream/README.md) | 2023-07-31 | sqream-jdbc-4.5.9 | 14 | | [Trino](Trino/README.md) | In progress | trino-jdbc-434.jar | 15 | | [Spanner](Spanner/README.md) | 2024-06-21 | google-cloud-spanner-jdbc-2.19.3-single-jar-with-dependencies.jar | 16 | 17 | 18 | ## Getting Started 19 | 20 | SAS/ACCESS Interface to JDBC includes SAS Data Connector to JDBC. SAS/ACCESS Interface to JDBC enables access to relational databases by means of SQL and the Java Database Connectivity (JDBC) API. A JDBC driver is required for the data source from which you want to access data. JDBC drivers are available from DBMS vendors and other third-party JDBC driver developers. 21 | 22 | - [SAS/ACCESS Interface to JDBC](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/acreldb/p1soj7yuc1975yn1nndc3s11dhsv.htm) 23 | 24 | - [Support for JDBC on the SAS Viya Platform](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/acreldb/p1soj7yuc1975yn1nndc3s11dhsv.htm) 25 | 26 | - [JDBC Data Connector](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/casref/n1ldk5vubre9oen10bdqoqkfc1y7.htm) 27 | 28 | ## Smoke Testing 29 | 30 | This section explains how to perform a smoke test to ensure that the connection is working correctly. A smoke test is a quick and simple test that verifies that the basic functionality of the connection is working as expected. 31 | 32 | ### Table Creation Tests 33 | 34 | - Create Airline Table 35 | 36 | ```sas 37 | proc delete data=mylib.airline; 38 | run; 39 | data mylib.airline; set sashelp.airline; 40 | run; 41 | ``` 42 | 43 | - Create Class Table 44 | 45 | ```sas 46 | proc delete data=mylib.class; 47 | run; 48 | data mylib.class; set sashelp.class; 49 | run; 50 | ``` 51 | 52 | ### Proc SQL Tests 53 | 54 | - Create Table 55 | 56 | ```sas 57 | proc delete data=mylib.class2; 58 | run; 59 | proc sql; 60 | create table mylib.class2 as select * from mylib.class; 61 | quit; 62 | ``` 63 | 64 | - Update Table 65 | 66 | ```sas 67 | proc sql; 68 | update mylib.class set name='billyb' where name='Alfred'; 69 | quit; 70 | ``` 71 | 72 | - Delete Table 73 | 74 | ```sas 75 | proc sql; 76 | delete from mylib.class where name = 'Alfred'; 77 | quit; 78 | ``` 79 | 80 | - Insert into Table 81 | 82 | ```sas 83 | proc sql; 84 | insert into mylib.class (name) values ('ted'); 85 | quit; 86 | proc delete data=mylib.class2 mylib.class; 87 | run; 88 | ``` 89 | 90 | - Drop Table 91 | 92 | ```sas 93 | /* expect table not there, common problem */ 94 | /* an ERROR message means table not found wasn’t detected */ 95 | proc sql; 96 | drop table mylib.nosuch; 97 | quit; 98 | ``` 99 | 100 | ### Implicit SQL Tests 101 | 102 | ```sas 103 | data mylib.class; 104 | set sashelp.class; 105 | run; 106 | 107 | proc sql; 108 | select age **2 from mylib.class; 109 | quit; 110 | 111 | proc sql; 112 | select distinct age **2 from mylib.class; 113 | quit; 114 | ``` 115 | 116 | ### FedSQL Test 117 | 118 | Simple Proc fesql demonstrates LIBNAME options conveyed from LIBNAME to TKTS. 119 | 120 | ```sas 121 | /* IPTRACE shows the generated query */ 122 | options msglevel=n; 123 | 124 | proc fedsql iptrace; 125 | select * from mylib.class; 126 | quit; 127 | ``` 128 | -------------------------------------------------------------------------------- /SAS Viya/JDBC/SQLite/README.md: -------------------------------------------------------------------------------- 1 | # SQLite 2 | 3 | ## Introduction 4 | 5 | SQLite is an in-process library that implements a [self-contained](https://www.sqlite.org/selfcontained.html), [serverless](https://www.sqlite.org/serverless.html), [zero-configuration](https://www.sqlite.org/zeroconf.html), [transactional](https://www.sqlite.org/transactional.html) SQL database engine. The code for SQLite is in the [public domain](https://www.sqlite.org/copyright.html) and is thus free for use for any purpose, commercial or private. SQLite is the [most widely deployed](https://www.sqlite.org/mostdeployed.html) database in the world with more applications than we can count, including several [high-profile projects.](https://www.sqlite.org/famous.html) 6 | 7 | SQLite is an embedded SQL database engine. Unlike most other SQL databases, SQLite does not have a separate server process. SQLite reads and writes directly to ordinary disk files. A complete SQL database with multiple tables, indices, triggers, and views, is contained in a single disk file. The database [file format](https://www.sqlite.org/fileformat2.html) is cross-platform - you can freely copy a database between 32-bit and 64-bit systems or between [big-endian](http://en.wikipedia.org/wiki/Endianness) and [little-endian](http://en.wikipedia.org/wiki/Endianness) architectures. These features make SQLite a popular choice as an [Application File Format](https://www.sqlite.org/appfileformat.html). SQLite database files are a [recommended storage format](https://www.sqlite.org/locrsf.html) by the US Library of Congress. Think of SQLite not as a replacement for [Oracle](http://www.oracle.com/database/index.html) but as a replacement for [fopen()](http://man.he.net/man3/fopen) 8 | 9 | SQLite is a compact library. With all features enabled, the [library size](https://www.sqlite.org/footprint.html) can be less than 750KiB, depending on the target platform and compiler optimization settings. (64-bit code is larger. And some compiler optimizations such as aggressive function inlining and loop unrolling can cause the object code to be much larger.) There is a tradeoff between memory usage and speed. SQLite generally runs faster the more memory you give it. Nevertheless, performance is usually quite good even in low-memory environments. Depending on how it is used, SQLite can be [faster than direct filesystem I/O](https://www.sqlite.org/fasterthanfs.html). 10 | 11 | SQLite is [very carefully tested](https://www.sqlite.org/testing.html) prior to every release and has a reputation for being very reliable. Most of the SQLite source code is devoted purely to testing and verification. An automated test suite runs millions and millions of test cases involving hundreds of millions of individual SQL statements and achieves [100% branch test coverage](https://www.sqlite.org/testing.html#coverage). SQLite responds gracefully to memory allocation failures and disk I/O errors. Transactions are [ACID](http://en.wikipedia.org/wiki/ACID) even if interrupted by system crashes or power failures. All of this is verified by the automated tests using special test harnesses which simulate system failures. Of course, even with all this testing, there are still bugs. But unlike some similar projects (especially commercial competitors) SQLite is open and honest about all bugs and provides [bugs lists](https://www.sqlite.org/src/rptview?rn=1) and minute-by-minute [chronologies](https://www.sqlite.org/src/timeline) of code changes. 12 | 13 | The SQLite code base is supported by an [international team](https://www.sqlite.org/crew.html) of developers who work on SQLite full-time. The developers continue to expand the capabilities of SQLite and enhance its reliability and performance while maintaining backwards compatibility with the [published interface spec](https://www.sqlite.org/c3ref/intro.html), [SQL syntax](https://www.sqlite.org/lang.html), and database [file format](https://www.sqlite.org/fileformat2.html). The source code is absolutely free to anybody who wants it, but [professional support](https://www.sqlite.org/prosupport.html) is also available. 14 | 15 | The SQLite project was started on [2000-05-09](https://www.sqlite.org/src/timeline?c=2000-05-29+14:26:00). The future is always hard to predict, but the intent of the developers is to support SQLite through the year 2050. Design decisions are made with that objective in mind. 16 | 17 | We the developers hope that you find SQLite useful and we entreat you to use it well: to make good and beautiful products that are fast, reliable, and simple to use. Seek forgiveness for yourself as you forgive others. And just as you have received SQLite for free, so also freely give, paying the debt forward. 18 | 19 | 20 | ## Resources 21 | 22 | - [SQLite Official Web Site](https://www.sqlite.org/index.html) 23 | - [SQLite Documentation](https://www.sqlite.org/docs.html) 24 | - [SAS/ACCESS Interface to JDBC](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/acreldb/n1usgr00wc9cvln1gnyp1807qu17.htm) 25 | - [JDBC Data Connector](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/casref/n1ldk5vubre9oen10bdqoqkfc1y7.htm) 26 | 27 | ## Prerequisites 28 | 29 | Before you can connect to SQLite through JDBC from SAS Viya, you need to have the following: 30 | 31 | 1. Access to a SQLite database: You must have access to a SQLite database with the appropriate credentials. 32 | 33 | 2. SQLite JDBC driver: You must download and install the [SQLite JDBC driver](https://github.com/xerial/sqlite-jdbc/releases/download/3.46.0.0/sqlite-jdbc-3.46.0.0.jar). 34 | 35 | 3. Working installation of SAS Viya 36 | 37 | **To install the JDBC driver:** 38 | 39 | 1. Download the [SQLite JDBC driver](https://github.com/xerial/sqlite-jdbc/releases/download/3.46.0.0/sqlite-jdbc-3.46.0.0.jar) 40 | 41 | 2. Unzip the JDBC driver into a location on the SAS Viya server. 42 | 43 | ## Overview 44 | 45 | | SQLite | | 46 | | ------------------- | --------------------------------------------------------------------------------- | 47 | | **Company URL** | https://www.sqlite.org | 48 | | **JDBC JAR File** | [sqlite-jdbc-3.46.0.0](https://github.com/xerial/sqlite-jdbc/releases/download/3.46.0.0/sqlite-jdbc-3.46.0.0.jar) | 49 | | **JDBC URL Syntax** | jdbc:sqlite:// | 50 | | **JDBC Class** | org.sqlite.JDBC | 51 | 52 | ## Setting up the connection 53 | 54 | This section provides step-by-step instructions on how to set up the connection to SQLite using JDBC from SAS Viya Compute or CAS. 55 | 56 | - SAS Compute Library creation 57 | 58 | ```sas 59 | libname mylib sasiojdb 60 | URL="jdbc:sqlite://" 61 | preserve_names=yes; 62 | ``` 63 | 64 | - CAS Library creation 65 | 66 | ```sas 67 | caslib mycaslib desc='JDBC Caslib' 68 | dataSource=(srctype='jdbc', 69 | url="jdbc:sqlite://"); 70 | ``` 71 | 72 | ## Smoke Testing 73 | 74 | This section explains how to perform a smoke test to ensure that the connection is working correctly. A smoke test is a quick and simple test that verifies that the basic functionality of the connection is working as expected. 75 | 76 | | [**Table Creation Tests**](..#table-creation-tests) | | | 77 | | ------------------------------------- | -------------------- | --- | 78 | | | Create Airline Table | ✓ | 79 | | | Create Cars Table | ✓ | 80 | | [**Proc SQL Tests**](..#proc-sql-tests) | | | 81 | | | Create Table | ✓ | 82 | | | Update Table | Failed | 83 | | | Delete Table | ✓ | 84 | | | Insert into Table | ✓ | 85 | | | Drop Table | ✓ | 86 | | [**Implicit SQL Test**](..#implicit-sql-tests) | | ✓ | 87 | | [**FEDSql Test**](..#fedsql-test) | | Failed | 88 | | **Information Catalog Crawler Agent** | | ✓ | 89 | 90 | ## Troubleshooting 91 | 92 | This section provides guidance on how to troubleshoot common issues that may arise when connecting to SQLite through JDBC from SAS Studio on SAS Viya. This may include issues with the JDBC driver, connectivity issues, or issues with data access and retrieval. 93 | 94 | You can run the following code for getting full detailed log. 95 | 96 | ```sas 97 | options sastrace=',,,d' sastraceloc=saslog nostsuffix msglevel=i 98 | linesize=132 pagesize=max validvarname=any validmemname=extend noquotelenmax; 99 | ``` 100 | -------------------------------------------------------------------------------- /SAS Viya/JDBC/SQream/README.md: -------------------------------------------------------------------------------- 1 | # SQream DB 2 | 3 | ## Introduction 4 | 5 | SQream DB is a high-performance columnar analytic SQL database management system designed for big data analytics. SQream DB supports regular SQL including [a substantial amount of ANSI SQL](https://docs.sqream.com/en/latest/reference/sql_feature_support.html#sql-feature-support), uses [serializable transactions](https://docs.sqream.com/en/latest/feature_guides/transactions.html#transactions), and [scales horizontally](https://docs.sqream.com/en/latest/feature_guides/concurrency_and_scaling_in_sqream.html#concurrency-and-scaling-in-sqream) for concurrent statements. Even a [basic SQream DB machine](https://docs.sqream.com/en/latest/getting_started/hardware_guide.html#hardware-guide) can support tens to hundreds of terabytes of data. 6 | 7 | ## Resources 8 | 9 | - [SQream DB Documentation](https://docs.sqream.com/en/latest/index.html) 10 | - [Connect to SQream Using SAS Viya](https://docs.sqream.com/en/latest/connecting_to_sqream/client_platforms/sas_viya.html) 11 | - [SQream’s support portal](https://sqream.atlassian.net/servicedesk/) 12 | - [SAS/ACCESS Interface to JDBC](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/acreldb/n1usgr00wc9cvln1gnyp1807qu17.htm) 13 | - [JDBC Data Connector](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/casref/n1ldk5vubre9oen10bdqoqkfc1y7.htm) 14 | 15 | ## Prerequisites 16 | 17 | Before you can connect to SQream through JDBC from SAS Viya, you need to have the following: 18 | 19 | 1. Access to a SQream database: You must have access to a SQream database with the appropriate credentials. 20 | 21 | 2. SQream JDBC driver: You must download and install the [SQream JDBC driver](https://sq-ftp-public.s3.amazonaws.com/sqream-jdbc-4.5.9.jar). 22 | 23 | 3. Working installation of SAS Viya 24 | 25 | **To install the JDBC driver:** 26 | 27 | 1. Download the [SQream JDBC driver](https://sq-ftp-public.s3.amazonaws.com/sqream-jdbc-4.5.9.jar) 28 | 29 | 2. Unzip the JDBC driver into a location on the SAS Viya server. 30 | 31 | SQream recommends creating the directory `/opt/sqream` on the SAS Viya server. 32 | 33 | ## Overview 34 | 35 | | SQream | | 36 | | ------------------- | --------------------------------------------------------------------------------- | 37 | | **Company URL** | https://sqream.com | 38 | | **JDBC JAR File** | [sqream-jdbc-4.5.9](https://sq-ftp-public.s3.amazonaws.com/sqream-jdbc-4.5.9.jar) | 39 | | **JDBC URL Syntax** | jdbc:Sqream://:/;cluster=true | 40 | | **Default Port** | 5000 (cluster=false), 3108 (cluster=true) | 41 | | **JDBC Class** | com.sqream.jdbc.SQDriver | 42 | 43 | ## Setting up the connection 44 | 45 | This section provides step-by-step instructions on how to set up the connection to SQream using JDBC from SAS Viya Compute or CAS. 46 | 47 | - SAS Compute Library creation 48 | 49 | ```sas 50 | libname mylib sasiojdb 51 | URL="jdbc:Sqream://:5000/master;cluster=false" 52 | schema="myschema" 53 | user="myuser" 54 | password="mypw" 55 | preserve_names=yes; 56 | ``` 57 | 58 | - CAS Library creation 59 | 60 | ```sas 61 | caslib mycaslib desc='JDBC Caslib' 62 | dataSource=(srctype='jdbc', 63 | url="jdbc:Sqream://:5000/master;cluster=false", 64 | schema="myschema", 65 | user="myuser", 66 | password="mypw"); 67 | ``` 68 | 69 | ## Smoke Testing 70 | 71 | This section explains how to perform a smoke test to ensure that the connection is working correctly. A smoke test is a quick and simple test that verifies that the basic functionality of the connection is working as expected. 72 | 73 | | [**Table Creation Tests**](..#table-creation-tests) | | | 74 | | ------------------------------------- | -------------------- | --- | 75 | | | Create Airline Table | ✓ | 76 | | | Create Cars Table | ✓ | 77 | | [**Proc SQL Tests**](..#proc-sql-tests) | | | 78 | | | Create Table | ✓ | 79 | | | Update Table | ✓ | 80 | | | Delete Table | ✓ | 81 | | | Insert into Table | ✓ | 82 | | | Drop Table | ✓ | 83 | | [**Implicit SQL Test**](..#implicit-sql-tests) | | ✓ | 84 | | [**FEDSql Test**](..#fedsql-test) | | ✓ | 85 | | **Information Catalog Crawler Agent** | | ✓ | 86 | 87 | ## Troubleshooting 88 | 89 | This section provides guidance on how to troubleshoot common issues that may arise when connecting to SQream through JDBC from SAS Studio on SAS Viya. This may include issues with the JDBC driver, connectivity issues, or issues with data access and retrieval. 90 | 91 | You can run the following code for getting full detailed log. 92 | 93 | ```sas 94 | options sastrace=',,,d' sastraceloc=saslog nostsuffix msglevel=i 95 | linesize=132 pagesize=max validvarname=any validmemname=extend noquotelenmax; 96 | ``` 97 | 98 | You can refer to the [Troubleshooting](https://docs.sqream.com/en/latest/connecting_to_sqream/client_platforms/sas_viya.html#troubleshooting-sas-viya) section in the SQream documentation 99 | -------------------------------------------------------------------------------- /SAS Viya/JDBC/Spanner/images/franir_2024-06-19-13-58-48.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sassoftware/sas-access-samples/6e2fb148ee64aeda1e8e0a5d49689a0145afee13/SAS Viya/JDBC/Spanner/images/franir_2024-06-19-13-58-48.png -------------------------------------------------------------------------------- /SAS Viya/JDBC/Trino/README.md: -------------------------------------------------------------------------------- 1 | # Trino 2 | 3 | ## Introduction 4 | 5 | Trino is a tool designed to efficiently query vast amounts of data using distributed queries. If you work with terabytes or petabytes of data, you are likely using tools that interact with Hadoop and HDFS. Trino was designed as an alternative to tools that query HDFS using pipelines of MapReduce jobs, such as Hive or Pig, but Trino is not limited to accessing HDFS. Trino can be and has been extended to operate over different kinds of data sources, including traditional relational databases and other data sources such as Cassandra. 6 | Trino was designed to handle data warehousing and analytics: data analysis, aggregating large amounts of data and producing reports. These workloads are often classified as Online Analytical Processing (OLAP). 7 | 8 | ## Resources 9 | 10 | - [Trino Documentation](https://trino.io/docs/current/client/jdbc.html) 11 | - [Trino JDBC Driver Installation and Connection](https://trino.io/docs/current/client/jdbc.html) 12 | - [SAS JDBC Data Connector](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/casref/n1ldk5vubre9oen10bdqoqkfc1y7.htm) 13 | - [SAS/ACCESS Interface to JDBC](https://go.documentation.sas.com/doc/en/pgmsascdc/v_038/acreldb/n1usgr00wc9cvln1gnyp1807qu17.htm) 14 | 15 | ## Prerequisites 16 | 17 | Before you can connect to Trino through JDBC from SAS Viya, you need to have the following: 18 | 19 | 1. Access to a Trino platform: You must have access to a Trino instance with the appropriate credentials. 20 | 21 | 2. Trino JDBC driver: You must download and install the [Trino JDBC driver](https://trino.io/docs/current/client/jdbc.html). 22 | 23 | 3. Working installation of SAS Viya. 24 | 25 | 4. Setup a connection to a data source in Trino (see doc at https://trino.io/docs/current/connector.html) 26 | 4.1 In Smoke tests we used Iceberg connector, with data stored on ADLS, and a hive metastore. 27 | 28 | **To install the JDBC driver:** 29 | 30 | 1. Download the [Trino JDBC driver](https://trino.io/docs/current/client/jdbc.html). 31 | 32 | 2. Install the JDBC driver according to the instructions provided by Trino. 33 | 34 | ## Overview 35 | 36 | | Trino | | 37 | | -------------------- | --------------------------------------------------------------------------------- | 38 | | **Company URL** | [https://trino.io/](https://trino.io/) | 39 | | **JDBC JAR File Path** | [trino-jdbc-434.jar](https://repo1.maven.org/maven2/io/trino/trino-jdbc/434/trino-jdbc-434.jar) | 40 | | **JDBC URL Syntax** | jdbc:trino://:8080/ | 41 | | **JDBC Driver Class Name** | io.trino.jdbc.TrinoDriver | 42 | ## Setting up the connection 43 | 44 | This section provides step-by-step instructions on how to set up the connection to Trino using JDBC from SAS Viya Compute or CAS. 45 | 46 | - SAS Compute Library creation 47 | 48 | ```sas 49 | libname mylib jdbc 50 | driverclass="io.trino.jdbc.TrinoDriver" 51 | URL="jdbc:trino://:8080//" 52 | user="myuser" 53 | preserve_tab_names=yes 54 | preserve_col_names=yes 55 | dbmax_text=512 56 | ``` 57 | 58 | - CAS Library creation 59 | 60 | ```sas 61 | cas; 62 | caslib mycaslib desc='JDBC Caslib' 63 | dataSource=(srctype='jdbc', 64 | driverclass="io.trino.jdbc.TrinoDriver", 65 | url="jdbc:trino://:8080//", 66 | user="myuser"); 67 | caslib _all_ assign; 68 | ``` 69 | `` 70 | ## Smoke Testing 71 | 72 | This section explains how to perform a smoke test to ensure that the connection is working correctly. A smoke test is a quick and simple test that verifies that the basic functionality of the connection is working as expected. 73 | 74 | | Smoke Tests | | | 75 | | ------------------------------------- | -------------------- | --- | 76 | | [**Table Creation Tests**](..#table-creation-tests) | | | 77 | | | Create Airline Table | Passed - V.Slow | 78 | | | Create Cars Table | Passed | 79 | | [**Proc SQL Tests**](..#proc-sql-tests) | | | 80 | | | Create Table | Passed | 81 | | | Update Table | Failed - Error - record level updates not supported by JDBC driver | 82 | | | Delete Table | Passed | 83 | | | Insert into Table | Passed | 84 | | | Drop Table | Passed | 85 | | [**Implicit SQL Test**](..#implicit-sql-tests) | | Passed | 86 | | [**FEDSql Test**](..#fedsql-test) | | Passed | 87 | | **Information Catalog Crawler Agent** | | | 88 | | | CAS Library Based Discovery Agent | In progress | 89 | | | SAS Compute Library Based Discovery Agent | In progress | 90 | ## Troubleshooting 91 | 92 | This section provides guidance on how to troubleshoot common issues that may arise when connecting to Trino through JDBC from SAS Studio on SAS Viya. This may include issues with the JDBC driver, connectivity issues, or issues with data access and retrieval. 93 | 94 | You can run the following code for getting full detailed log. 95 | 96 | ```sas 97 | options sastrace=',,,d' sastraceloc=saslog nostsuffix msglevel=i 98 | linesize=132 pagesize=max validvarname=any validmemname=extend noquotelenmax; 99 | ``` 100 | 101 | You can refer to the [Trino Website](https://trino.io/) section in the Trino documentation & support. -------------------------------------------------------------------------------- /SAS Viya/JDBC/jdbcSmokeTesting_template.sasnb: -------------------------------------------------------------------------------- 1 | [{"kind":1,"language":"markdown","value":"# 3rd Party JDBC Drivers Smoke Testing with SAS/ACCESS Interface to JDBC","outputs":[]},{"kind":1,"language":"markdown","value":"This notebook explains how to perform a smoke test to ensure that a connection using SAS/ACCESS Interface to JDBC and 3rd party JDBC driver is working correctly. A smoke test is a quick and simple test that verifies that the basic functionality of the connection is working as expected.","outputs":[]},{"kind":1,"language":"markdown","value":"## Libraries Creation","outputs":[]},{"kind":1,"language":"markdown","value":"### SAS Compute Library Creation","outputs":[]},{"kind":2,"language":"sas","value":"options sastrace=\"d,,,\" sastraceloc=saslog nostsuffix ;\r\nlibname mylib jdbc\r\ndriverclass=\"\"\r\nURL=\"\"\r\nschema=\"schema\"\r\nuser=\"usr\"\r\npassword=\"pw\"\r\npreserve_tab_names=yes\r\npreserve_col_names=yes;\r\n/*materialized=yes; //Allows for permanment table creation */","outputs":[]},{"kind":1,"language":"markdown","value":"### CAS Library Creation","outputs":[]},{"kind":2,"language":"sas","value":"cas;\r\ncaslib mycaslib desc='JDBC Caslib'\r\n dataSource=(srctype='jdbc',\r\n driverclass=\"\",\r\n url=\"\",\r\n user=\"usr\",\r\n password=\"pw\",\r\n schema=\"schema\");\r\ncaslib _all_ assign;","outputs":[]},{"kind":1,"language":"markdown","value":"## Table Creation Tests","outputs":[]},{"kind":2,"language":"sas","value":"/* Create Table Airline */\r\nproc delete data=mylib.airline;\r\nrun;\r\n\r\ndata mylib.airline; set sashelp.airline;\r\nrun;","outputs":[]},{"kind":2,"language":"sas","value":"/* Create Table Class */\r\nproc delete data=mylib.class;\r\nrun;\r\n\r\ndata mylib.class; set sashelp.class;\r\nrun;","outputs":[]},{"kind":1,"language":"markdown","value":"## Proc SQL Tests","outputs":[]},{"kind":2,"language":"sas","value":"/* Delete Table Proc Delete */\r\nproc delete data=mylib.class2;\r\nrun;","outputs":[]},{"kind":2,"language":"sql","value":"/* Create Table */\r\ncreate table mylib.class2 as select * from mylib.class;","outputs":[]},{"kind":2,"language":"sql","value":"/* Update Table */\r\nupdate mylib.class set name='billyb' where name='Alfred';","outputs":[]},{"kind":2,"language":"sql","value":"/* Delete Table SQL */\r\ndelete from mylib.class where name = 'Alfred';","outputs":[]},{"kind":2,"language":"sql","value":"/* Insert into Table */\r\ninsert into mylib.class (name) values ('ted');","outputs":[]},{"kind":2,"language":"sas","value":"proc delete data=mylib.class2 mylib.class;\r\nrun; ","outputs":[]},{"kind":2,"language":"sql","value":"/* Drop Table */\r\n/* expect table not there, common problem */\r\n/* an ERROR message means table not found wasn’t detected */\r\ndrop table mylib.nosuch;","outputs":[]},{"kind":1,"language":"markdown","value":"## Implicit SQL Tests","outputs":[]},{"kind":2,"language":"sas","value":"/* Create Table Class */\r\ndata mylib.class;\r\n set sashelp.class;\r\nrun;","outputs":[]},{"kind":2,"language":"sql","value":"select age **2 from mylib.class;","outputs":[]},{"kind":2,"language":"sql","value":"select distinct age **2 from mylib.class;","outputs":[]},{"kind":1,"language":"markdown","value":"## FedSQL Test","outputs":[]},{"kind":2,"language":"sas","value":"/* IPTRACE shows the generated query */\r\noptions msglevel=n;\r\n\r\nproc fedsql iptrace;\r\n select * from MYLIB.class;\r\nquit;","outputs":[]},{"kind":1,"language":"markdown","value":"## Load Data to Caslib","outputs":[]},{"kind":2,"language":"sas","value":"/* Use the DATA Step to Load Data */\r\ndata mycaslib.cars; \r\n set sashelp.cars;\r\n run;","outputs":[]},{"kind":2,"language":"sas","value":"/* Use PROC CASUTIL to Load Data */\r\nproc casutil outcaslib=\"mycaslib\" ;\r\n load data=sashelp.heart replace ;\r\n run;","outputs":[]},{"kind":2,"language":"sas","value":"proc cas;\r\n fedsql.execdirect query='select count(*) from mycaslib.heart';\r\nquit;","outputs":[]}] -------------------------------------------------------------------------------- /SAS Viya/README.md: -------------------------------------------------------------------------------- 1 | # SAS/ACCESS Sample Files Index 2 | 3 | SAS/ACCESS sample files for SAS Data Connector on SAS Viya 4. 4 | 5 | ## Overview 6 | The SAS Data Connector rovides connection between SAS Cloud Analytic Services (CAS) and various data sources. 7 | It uses Database client on CAS controller/worker nodes to communicate to the Database, load data, and perform CAS actions. 8 | 9 | 10 | This directory contains Database-specific files for the SAS Data Connector Samples on SAS Viya 4. 11 | The sample programs will demonstrate the Read, Save, and Where functionalities, by connecting to a CAS Server, connecting 12 | to data sources, and performing specified actions, such as Load Table, Save Table, Column Info, etc. 13 | 14 | ## Generic files 15 | * sdcBulkloadSample.sas 16 | * sdcCasutilSample.sas 17 | * sdcFileInfoSample.sas 18 | * sdcColumnInfoSample.sas 19 | * sdcLoadSample.sas 20 | * sdcSaveSample.sas 21 | 22 | 23 | ## Instructions 24 | 1. Download the samples repository onto your local computer 25 | 2. Within the samples replaced the indicated fields with your specific desired datasource and database login credentials (username, password, server, database, etc.) and server host and port. 26 | 3. You are now ready to run the sample test programs. 27 | 28 | 29 | ## Supported Data Sources 30 | 31 | - Amazon Redshift 32 | - Google BigQuery 33 | - Greenplum 34 | - Hadoop 35 | - Impala 36 | - JDBC 37 | - MS SQL Server 38 | - MongoDB 39 | - MySQL 40 | - Netezza 41 | - ODBC 42 | - Oracle 43 | - PostgreSQL 44 | - Salesforce 45 | - SAP HANA 46 | - Snowflake 47 | - Teradata 48 | - Vertica 49 | - Yellowbrick 50 | -------------------------------------------------------------------------------- /SAS Viya/sdcBulkload.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: sdcBulkload_cf.sas */ 5 | /* TITLE: SAS Data Connector Sample Program: Bulkload/Bulkunload */ 6 | /* PRODUCT: SAS/ACCESS */ 7 | /* SYSTEM: UNIX */ 8 | /* REF: SAS Viya Programming Documentation: CAS User's Guide */ 9 | /* USAGE: Showcases basic CASUTIL statements with bulkload */ 10 | /* and bulkupload, in an end-to-end sample program */ 11 | /* NOTE: Some interfaces are case sensitive. You may need to */ 12 | /* change the case of table or column names to comply */ 13 | /* with the requirements of your database. */ 14 | /* */ 15 | /*********************************************************************/ 16 | 17 | /* Specify the DBMS engine, for example: ODBC */ 18 | %let srctype = postgres; 19 | 20 | 21 | /*Specify connection parameters within a string 22 | 23 | Edit the below variables with the required DBMS-specific conection options 24 | Also, specify such variables within the CONNOPS string, as this will be used for the other sample programs 25 | 26 | Most datasources will require username, password, database, and schema/server 27 | Please refer to the connopts.xlsx file for specific connection requirements for each DBMS engine 28 | 29 | BigQuery: requires protject and cred_path parameters 30 | Hadoop: requires hadoopJarPath, hadoopConfigDir, and dtm parameters 31 | Impala: requires port and conopts parameters 32 | JDBC: requires url, class, and classpath parameters 33 | Salesforce: requires authendpoint and catalog parameters 34 | SAP Hana: requires instance and tabletype parameters 35 | Teradata: requires dataTransferMode parameter 36 | Oracle, Netezza, and MSSWL require catalog parameter 37 | 38 | */ 39 | %let username = ????????; 40 | %let password = ????????; 41 | %let database = ????????; 42 | %let server = ????????; 43 | 44 | %let cas_session = mysess; 45 | %let caslib_alias = datalib; 46 | 47 | %let CONNOPTS=%str(user="????????" 48 | pass="????????" 49 | database="????????" 50 | server="????????"); 51 | 52 | /* Connect to CAS using cashost and casport, optional in SAS Studio */ 53 | /* options cashost="????????" casport=????; */ 54 | 55 | 56 | /* Create cas session and cas libname statement*/ 57 | cas &cas_session; 58 | libname caslib cas sessref=&cas_session tag=""; 59 | 60 | 61 | /* Execute addCaslib action*/ 62 | proc cas; 63 | session &cas_session; 64 | action addCaslib caslib="&caslib_alias" 65 | datasource={ srctype="&srctype", 66 | username="&username", 67 | password="&password", 68 | database="&database", 69 | server="&server", 70 | bulkload=true, 71 | bulkunload=true} ; 72 | run; 73 | quit; 74 | 75 | 76 | /* Create sample load table in DBMS: using DBMS-specific Libname engine*/ 77 | libname gridlib &srctype &CONNOPTS; 78 | 79 | 80 | /* Create sample dataset*/ 81 | data bulkloadsample; 82 | input name $ age sex $ bdate mmddyy.; 83 | datalines; 84 | Amy 3 F 030185 85 | Bill 12 M 121277 86 | Charlie 35 M 010253 87 | David 19 M 101469 88 | Elinor 42 F 080845 89 | Pearl 78 F 051222 90 | Vera 96 F 101200 91 | Frank 24 M 092663 92 | Georgia 1 F 040687 93 | Henry 46 M 053042 94 | ; 95 | run; 96 | 97 | 98 | /* creates and verifies data in CAS */ 99 | data caslib.bulksample; 100 | set work.bulkloadsample; 101 | run; 102 | 103 | proc print data=caslib.bulksample; 104 | run; 105 | 106 | 107 | /* proc casutil contents for the table in CAS */ 108 | proc casutil; 109 | contents casdata="bulksample"; 110 | quit; 111 | 112 | 113 | /* save via bulkload and verify */ 114 | proc casutil; 115 | save casdata="bulksample" casout="bulksamplesave" replace; 116 | quit; 117 | 118 | proc print data=gridlib.bulksamplesave; 119 | run; 120 | 121 | 122 | /* load via bulkunload and verify */ 123 | proc casutil; 124 | load casdata="bulksamplesave" casout="bulksampleload"; 125 | list tables; 126 | quit; 127 | 128 | proc print data=caslib.bulksampleload; 129 | run; 130 | 131 | 132 | /* clean up sources and drop tables */ 133 | proc casutil; 134 | deletesource casdata="bulksamplesave"; 135 | droptable casdata="bulksample"; 136 | droptable casdata="bulksampleload"; 137 | quit; 138 | 139 | 140 | /* remove CAS session */ 141 | cas &cas_session. terminate; 142 | -------------------------------------------------------------------------------- /SAS Viya/sdcCasutilSample.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: sdcCasutilSample.sas */ 5 | /* TITLE: SAS Data Connector Sample Program: CASUTIL */ 6 | /* PRODUCT: SAS/ACCESS */ 7 | /* SYSTEM: UNIX */ 8 | /* REF: SAS Viya Programming Documentation: CAS User's Guide */ 9 | /* USAGE: Showcases basic CASUTIL statements */ 10 | /* NOTE: Some interfaces are case sensitive. You may need to */ 11 | /* change the case of table or column names to comply */ 12 | /* with the requirements of your database. */ 13 | /* */ 14 | /*********************************************************************/ 15 | 16 | /* Specify the DBMS engine, for example: ODBC */ 17 | %let srctype=odbc; 18 | 19 | %let cas_session = mysess; 20 | %let caslib_alias = datalib; 21 | 22 | 23 | /*Specify connection parameters within a string 24 | 25 | Edit the below variables with the required DBMS-specific conection options 26 | Also, specify such variables within the CONNOPS string, as this will be used for the other sample programs 27 | 28 | Most datasources will require username, password, database, and schema/server 29 | Please refer to the connopts.xlsx file for specific connection requirements for each DBMS engine 30 | 31 | BigQuery: requires protject and cred_path parameters 32 | Hadoop: requires hadoopJarPath, hadoopConfigDir, and dtm parameters 33 | Impala: requires port and conopts parameters 34 | JDBC: requires url, class, and classpath parameters 35 | Salesforce: requires authendpoint and catalog parameters 36 | SAP Hana: requires instance and tabletype parameters 37 | Teradata: requires dataTransferMode parameter 38 | Oracle, Netezza, and MSSWL require catalog parameter 39 | 40 | */ 41 | 42 | %let username = ???????; 43 | %let password = ???????; 44 | %let database = ???????; 45 | %let schema = ???????; 46 | 47 | %let CONNOPTS=%str(user=??????? 48 | pwd=??????? 49 | dsn=???????); 50 | 51 | /* Connect to CAS using cashost and casport, optional in SAS Studio */ 52 | /* options cashost="????????" casport=????; */ 53 | 54 | 55 | /* Create a CAS Session */ 56 | cas &cas_session; 57 | 58 | 59 | /* Run a proc cas addCasLib action using the specific data connector options specified above */ 60 | proc cas; 61 | session &cas_session; 62 | action addCaslib caslib="&caslib_alias" 63 | datasource={ srctype="&srctype", 64 | user="&username", 65 | pass="&password", 66 | database="&database", 67 | schema="&schema", 68 | catalog="*"}; 69 | run; 70 | quit; 71 | 72 | 73 | /* Create in-memory tables in CAS using CAS Libname engine */ 74 | libname caslib cas sessref=&cas_session tag=""; 75 | 76 | data caslib.SAVESAMPLE; 77 | input FLIGHT $3. +5 DATES $7. +3 DEPART $5. +2 ORIG $3. 78 | +3 DEST $3. +7 MILES +6 BOARDED +6 CAPACITY; 79 | datalines; 80 | 114 01MAR98 7:10 LGA LAX 2475 172 210 81 | 202 01MAR98 10:43 LGA ORD 740 151 210 82 | 219 01MAR98 9:31 LGA LON 3442 198 250 83 | 622 01MAR98 12:19 LGA FRA 3857 207 250 84 | 132 01MAR98 15:35 LGA YYZ 366 115 178 85 | 271 01MAR98 13:17 LGA PAR 3635 138 250 86 | 302 01MAR98 20:22 LGA WAS 229 105 180 87 | 114 02MAR98 7:10 LGA LAX 2475 119 210 88 | 202 02MAR98 10:43 LGA ORD 740 120 210 89 | 219 02MAR98 9:31 LGA LON 3442 147 250 90 | 622 02MAR98 12:19 LGA FRA 3857 176 250 91 | 132 02MAR98 15:35 LGA YYZ 366 106 178 92 | ; 93 | run; 94 | 95 | 96 | /* Create sample load table (for CASUTIL LOAD) in DBMS: using DBMS-specific Libname engine*/ 97 | libname utilload &srctype &CONNOPTS; 98 | 99 | data utilload.UTILLOADSAMPLE; 100 | set caslib.SAVESAMPLE; 101 | run; 102 | 103 | 104 | /* Proc CASUTIL LOAD statement: loads entire table*/ 105 | proc casutil; 106 | LOAD data=utilload.UTILLOADSAMPLE 107 | casout="CASUTILLOAD" 108 | replace; 109 | quit; 110 | 111 | 112 | /* Verify loadtable using caslib libref with CAS Libname engine to print in-memory tables*/ 113 | libname caslib cas sessref=&cas_session tag=""; 114 | proc print data=caslib.CASUTILLOAD; 115 | run; 116 | 117 | 118 | /* Proc CASUTIL SAVE statement: saves entire table*/ 119 | proc casutil; 120 | SAVE casdata="SAVESAMPLE" 121 | casout="CASUTILSAVE" 122 | incaslib="&caslib_alias" 123 | replace; 124 | quit; 125 | 126 | 127 | /* Verify the results: print saved table in the datasource using Libname engine */ 128 | libname utilsave &srctype &CONNOPTS; 129 | proc print data=utilsave.CASUTILSAVE; 130 | run; 131 | 132 | 133 | /* Proc CASUTIL CONTENTS statement: displays table metadata (column names, data types, etc)*/ 134 | proc casutil; 135 | CONTENTS casdata="SAVESAMPLE" incaslib="&caslib_alias"; 136 | quit; 137 | 138 | 139 | /* Proc CASUTIL LIST statement: Lists files or in-memory tables currenlty in the caslib's datasource*/ 140 | proc casutil; 141 | /*Speficy either "tables" or "files" based on desired list */ 142 | LIST tables; 143 | quit; 144 | 145 | 146 | /* Clean-up: Delete files from data source associated with caslib */ 147 | proc cas; 148 | session &cas_session; 149 | action deleteSource 150 | caslib="&caslib_alias" 151 | source="UTILLOADSAMPLE" 152 | ; 153 | run; 154 | quit; 155 | 156 | 157 | /* Remove CAS session */ 158 | cas &cas_session. terminate; -------------------------------------------------------------------------------- /SAS Viya/sdcColumnInfoSample.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: sdcColumninfo.sas */ 5 | /* TITLE: SAS Data Connector to ODBC Sample Program: Columninfo */ 6 | /* PRODUCT: SAS/ACCESS to ODBC */ 7 | /* SYSTEM: UNIX */ 8 | /* REF: SAS Viya Programming Documentation: CAS User's Guide */ 9 | /* USAGE: Shows specific column information of the desired table */ 10 | /* NOTE: Some interfaces are case sensitive. You may need to */ 11 | /* change the case of table or column names to comply */ 12 | /* with the requirements of your database. */ 13 | /* */ 14 | /*********************************************************************/ 15 | 16 | 17 | /* Specify the DBMS engine, for example: ODBC */ 18 | %let srctype=odbc; 19 | 20 | %let cas_session = mysess; 21 | %let caslib_alias = datalib; 22 | 23 | 24 | /*Specify connection parameters within a string 25 | 26 | Edit the below variables with the required DBMS-specific conection options 27 | Also, specify such variables within the CONNOPS string, as this will be used for the other sample programs 28 | 29 | Most datasources will require username, password, database, and schema/server 30 | Please refer to the connopts.xlsx file for specific connection requirements for each DBMS engine 31 | 32 | BigQuery: requires protject and cred_path parameters 33 | Hadoop: requires hadoopJarPath, hadoopConfigDir, and dtm parameters 34 | Impala: requires port and conopts parameters 35 | JDBC: requires url, class, and classpath parameters 36 | Salesforce: requires authendpoint and catalog parameters 37 | SAP Hana: requires instance and tabletype parameters 38 | Teradata: requires dataTransferMode parameter 39 | Oracle, Netezza, and MSSWL require catalog parameter 40 | 41 | */ 42 | 43 | %let username = ???????; 44 | %let password = ???????; 45 | %let database = ???????; 46 | %let schema = ???????; 47 | 48 | %let CONNOPTS=%str(user=??????? 49 | pwd=??????? 50 | dsn=???????); 51 | 52 | /* Connect to CAS using cashost and casport, optional in SAS Studio */ 53 | /* options cashost="????????" casport=????; */ 54 | 55 | 56 | /* Create a CAS Session */ 57 | cas &cas_session; 58 | 59 | 60 | /* Run a proc cas addCasLib action using the specific data connector options specified above */ 61 | proc cas; 62 | session &cas_session; 63 | action addCaslib caslib="&caslib_alias" 64 | datasource={ srctype="&srctype", 65 | user="&username", 66 | pass="&password", 67 | database="&database", 68 | schema="&schema", 69 | catalog="*"}; 70 | run; 71 | quit; 72 | 73 | 74 | /* Create in-memory tables in CAS using CAS Libname engine */ 75 | libname caslib cas sessref=&cas_session tag=""; 76 | 77 | data caslib.COLUMNSAMPLE; 78 | input FLIGHT $3. +5 DATES $7. +3 DEPART $5. +2 ORIG $3. 79 | +3 DEST $3. +7 MILES +6 BOARDED +6 CAPACITY; 80 | datalines; 81 | 114 01MAR98 7:10 LGA LAX 2475 172 210 82 | 202 01MAR98 10:43 LGA ORD 740 151 210 83 | 219 01MAR98 9:31 LGA LON 3442 198 250 84 | 622 01MAR98 12:19 LGA FRA 3857 207 250 85 | 132 01MAR98 15:35 LGA YYZ 366 115 178 86 | 271 01MAR98 13:17 LGA PAR 3635 138 250 87 | 302 01MAR98 20:22 LGA WAS 229 105 180 88 | 114 02MAR98 7:10 LGA LAX 2475 119 210 89 | 202 02MAR98 10:43 LGA ORD 740 120 210 90 | 219 02MAR98 9:31 LGA LON 3442 147 250 91 | 622 02MAR98 12:19 LGA FRA 3857 176 250 92 | 132 02MAR98 15:35 LGA YYZ 366 106 178 93 | ; 94 | run; 95 | 96 | 97 | /* Proc cas columninfo action */ 98 | proc cas; 99 | table.columnInfo / 100 | table={name = "COLUMNSAMPLE", 101 | caslib = "&caslib_alias"}; 102 | run; 103 | quit; 104 | 105 | /*Proc cas columninfo action with specified vars variable */ 106 | proc cas; 107 | table.columnInfo / 108 | table={name="COLUMNSAMPLE", 109 | caslib = "&caslib_alias" 110 | vars={"FLIGHT", "DATES", "BOARDED", "CAPACITY"}}; 111 | run; 112 | quit; 113 | 114 | /*Proc cas columninfo action alternative method of specifying variables to display*/ 115 | proc cas; 116 | table.columnInfo / 117 | inputs = {{name="FLIGHT"},{name="BOARDED"}} 118 | table={name = "COLUMNSAMPLE", 119 | caslib = "&caslib_alias"}; 120 | run; 121 | quit; 122 | 123 | 124 | /* Remove CAS session */ 125 | cas &cas_session. terminate; -------------------------------------------------------------------------------- /SAS Viya/sdcFileInfoSample.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: sdcFileInfo.sas */ 5 | /* TITLE: SAS Data Connector to ODBC Sample Program: FileInfo */ 6 | /* PRODUCT: SAS/ACCESS to ODBC */ 7 | /* SYSTEM: UNIX */ 8 | /* REF: SAS Viya Programming Documentation: CAS User's Guide */ 9 | /* USAGE: Shows specific file information of the desired path */ 10 | /* NOTE: Some interfaces are case sensitive. You may need to */ 11 | /* change the case of table or column names to comply */ 12 | /* with the requirements of your database. */ 13 | /* */ 14 | /*********************************************************************/ 15 | 16 | /* Specify the DBMS engine, for example: ODBC */ 17 | %let srctype=odbc; 18 | 19 | %let cas_session = mysess; 20 | %let caslib_alias = datalib; 21 | 22 | 23 | /*Specify connection parameters within a string 24 | 25 | Edit the below variables with the required DBMS-specific conection options 26 | Also, specify such variables within the CONNOPS string, as this will be used for the other sample programs 27 | 28 | Most datasources will require username, password, database, and schema/server 29 | Please refer to the connopts.xlsx file for specific connection requirements for each DBMS engine 30 | 31 | BigQuery: requires protject and cred_path parameters 32 | Hadoop: requires hadoopJarPath, hadoopConfigDir, and dtm parameters 33 | Impala: requires port and conopts parameters 34 | JDBC: requires url, class, and classpath parameters 35 | Salesforce: requires authendpoint and catalog parameters 36 | SAP Hana: requires instance and tabletype parameters 37 | Teradata: requires dataTransferMode parameter 38 | Oracle, Netezza, and MSSWL require catalog parameter */ 39 | 40 | %let username = ???????; 41 | %let password = ???????; 42 | %let database = ???????; 43 | %let schema = ???????; 44 | 45 | %let CONNOPTS=%str(user=??????? 46 | pwd=??????? 47 | dsn=???????); 48 | 49 | /* Connect to CAS using cashost and casport, optional in SAS Studio */ 50 | /* options cashost="????????" casport=????; */ 51 | 52 | 53 | /* Create a CAS Session */ 54 | cas &cas_session; 55 | 56 | 57 | /* Specify a path where the sample data is located */ 58 | %let path=???????; 59 | 60 | 61 | /* Add a caslib with source type "path" that references the specified path*/ 62 | proc cas; 63 | session &cas_session; 64 | table.addCaslib / 65 | caslib="mydata" 66 | datasource={srctype="path"} 67 | path="&path"; 68 | run; 69 | quit; 70 | 71 | 72 | proc cas; 73 | /* Run fileInfo action to list all file types in path*/ 74 | table.fileInfo / 75 | allfiles=true; 76 | 77 | 78 | /* Run fileInfo action on a specific file name */ 79 | table.fileInfo / 80 | path='samdat1.csv'; 81 | 82 | 83 | /* Run fileInfo action on a specific file name, and displays size in Kilobytes */ 84 | table.fileInfo / 85 | path='samdat1.csv' 86 | kbytes=true; 87 | 88 | 89 | /* Run fileInfo action on a specific file name with wildcards for file name */ 90 | table.fileInfo / 91 | path='%samdat1%'; 92 | 93 | 94 | /* Run fileInfo action on a specific file name with wildcards for file type */ 95 | table.fileInfo / 96 | path='%.csv'; 97 | 98 | run; 99 | quit; 100 | 101 | 102 | /* Remove CAS session */ 103 | cas &cas_session. terminate; 104 | -------------------------------------------------------------------------------- /SAS Viya/sdcLoadSample.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: sdcReadSample.sas */ 5 | /* TITLE: SAS Data Connector to Sample Program: Read */ 6 | /* PRODUCT: SAS/ACCESS */ 7 | /* SYSTEM: UNIX */ 8 | /* REF: SAS Viya Programming Documentation: CAS User's Guide */ 9 | /* USAGE: Create a table in DBMS, execute loadtable actions, */ 10 | /* verify the results, delete source, */ 11 | /* NOTE: Some interfaces are case sensitive. You may need to */ 12 | /* change the case of table or column names to comply */ 13 | /* with the requirements of your database. */ 14 | /* */ 15 | /*********************************************************************/ 16 | 17 | /* Specify the DBMS engine, for example: ODBC */ 18 | %let srctype=odbc; 19 | 20 | %let cas_session = mysess; 21 | %let caslib_alias = datalib; 22 | 23 | 24 | /*Specify connection parameters within a string 25 | 26 | Edit the below variables with the required DBMS-specific conection options 27 | Also, specify such variables within the CONNOPS string, as this will be used for the other sample programs 28 | 29 | Most datasources will require username, password, database, and schema/server 30 | Please refer to the connopts.xlsx file for specific connection requirements for each DBMS engine 31 | 32 | BigQuery: requires protject and cred_path parameters 33 | Hadoop: requires hadoopJarPath, hadoopConfigDir, and dtm parameters 34 | Impala: requires port and conopts parameters 35 | JDBC: requires url, class, and classpath parameters 36 | Salesforce: requires authendpoint and catalog parameters 37 | SAP Hana: requires instance and tabletype parameters 38 | Teradata: requires dataTransferMode parameter 39 | Oracle, Netezza, and MSSWL require catalog parameter 40 | 41 | */ 42 | 43 | %let username = ???????; 44 | %let password = ???????; 45 | %let database = ???????; 46 | %let schema = ???????; 47 | 48 | %let CONNOPTS=%str(user=??????? 49 | pwd=??????? 50 | dsn=???????); 51 | 52 | /* Connect to CAS using cashost and casport, optional in SAS Studio */ 53 | /* options cashost="????????" casport=????; */ 54 | 55 | 56 | /* Create a CAS Session */ 57 | cas &cas_session; 58 | 59 | 60 | /* Run a proc cas addCasLib action using the specific data connector options specified above */ 61 | proc cas; 62 | session &cas_session; 63 | action addCaslib caslib="&caslib_alias" 64 | datasource={ srctype="&srctype", 65 | user="&username", 66 | pass="&password", 67 | database="&database", 68 | schema="&schema", 69 | catalog="*"}; 70 | run; 71 | quit; 72 | 73 | 74 | /* Create sample load table in DBMS: using DBMS-specific Libname engine*/ 75 | libname loadlib &srctype &CONNOPTS; 76 | 77 | data loadlib.LOADSAMPLE; 78 | input FLIGHT $3. +5 DATES $7. +3 DEPART $5. +2 ORIG $3. 79 | +3 DEST $3. +7 MILES +6 BOARDED +6 CAPACITY; 80 | datalines; 81 | 114 01MAR98 7:10 LGA LAX 2475 172 210 82 | 202 01MAR98 10:43 LGA ORD 740 151 210 83 | 219 01MAR98 9:31 LGA LON 3442 198 250 84 | 622 01MAR98 12:19 LGA FRA 3857 207 250 85 | 132 01MAR98 15:35 LGA YYZ 366 115 178 86 | 271 01MAR98 13:17 LGA PAR 3635 138 250 87 | 302 01MAR98 20:22 LGA WAS 229 105 180 88 | 114 02MAR98 7:10 LGA LAX 2475 119 210 89 | 202 02MAR98 10:43 LGA ORD 740 120 210 90 | 219 02MAR98 9:31 LGA LON 3442 147 250 91 | 622 02MAR98 12:19 LGA FRA 3857 176 250 92 | 132 02MAR98 15:35 LGA YYZ 366 106 178 93 | ; 94 | run; 95 | 96 | 97 | /* Proc cas loadtable action: loads entire table*/ 98 | proc cas; 99 | session &cas_session; 100 | action loadtable 101 | casout={name="SAMDATLOAD" replace=true} 102 | caslib="&caslib_alias" 103 | path="LOADSAMPLE"; 104 | run; 105 | quit; 106 | 107 | 108 | /* Proc cas loadtable action with where parameter: subsets the input data */ 109 | proc cas; 110 | session &cas_session; 111 | action loadtable 112 | casout={name="SAMDATLOADWHERE" replace=true} 113 | caslib="&caslib_alias" 114 | where="FLIGHT = 114" 115 | path="LOADSAMPLE"; 116 | run; 117 | quit; 118 | 119 | 120 | /* Proc cas loadtable action with vars parameter: specifies variables to load */ 121 | proc cas; 122 | session &cas_session; 123 | action loadtable 124 | casout={name="SAMDATLOADVARS" replace=true} 125 | caslib="&caslib_alias" 126 | vars={"FLIGHT", "ORIG", "DEST"} 127 | path="LOADSAMPLE"; 128 | run; 129 | quit; 130 | 131 | 132 | /* Verify loadtable using caslib libref with CAS Libname engine to print in-memory tables*/ 133 | libname caslib cas sessref=&cas_session tag=""; 134 | proc print data=caslib.SAMDATLOAD; 135 | run; 136 | proc print data=caslib.SAMDATLOADWHERE; 137 | run; 138 | proc print data=caslib.SAMDATLOADVARS; 139 | run; 140 | 141 | 142 | /* Clean-up: Delete files from data source associated with caslib */ 143 | proc cas; 144 | session &cas_session; 145 | action deleteSource 146 | caslib="&caslib_alias" 147 | source="LOADSAMPLE" 148 | ; 149 | run; 150 | quit; 151 | 152 | 153 | /* Remove CAS session */ 154 | cas &cas_session. terminate; 155 | -------------------------------------------------------------------------------- /SAS Viya/sdcSaveSample.sas: -------------------------------------------------------------------------------- 1 | /*********************************************************************/ 2 | /* S A S S A M P L E L I B R A R Y */ 3 | /* */ 4 | /* NAME: sdcSaveSample.sas */ 5 | /* TITLE: SAS Data Connector Sample Program: Save */ 6 | /* PRODUCT: SAS/ACCESS */ 7 | /* SYSTEM: UNIX */ 8 | /* REF: SAS Viya Programming Documentation: CAS User's Guide */ 9 | /* USAGE: Create a table in CAS, execute savetable action, */ 10 | /* verify the results */ 11 | /* NOTE: Some interfaces are case sensitive. You may need to */ 12 | /* change the case of table or column names to comply */ 13 | /* with the requirements of your database. */ 14 | /* */ 15 | /*********************************************************************/ 16 | 17 | 18 | /* Specify the DBMS engine, for example: ODBC */ 19 | %let srctype=odbc; 20 | 21 | %let cas_session = mysess; 22 | %let caslib_alias = datalib; 23 | 24 | 25 | /*Specify connection parameters within a string 26 | 27 | Edit the below variables with the required DBMS-specific conection options 28 | Also, specify such variables within the CONNOPS string, as this will be used for the other sample programs 29 | 30 | Most datasources will require username, password, database, and schema/server 31 | Please refer to the connopts.xlsx file for specific connection requirements for each DBMS engine 32 | 33 | BigQuery: requires protject and cred_path parameters 34 | Hadoop: requires hadoopJarPath, hadoopConfigDir, and dtm parameters 35 | Impala: requires port and conopts parameters 36 | JDBC: requires url, class, and classpath parameters 37 | Salesforce: requires authendpoint and catalog parameters 38 | SAP Hana: requires instance and tabletype parameters 39 | Teradata: requires dataTransferMode parameter 40 | Oracle, Netezza, and MSSWL require catalog parameter 41 | 42 | */ 43 | 44 | %let username = ???????; 45 | %let password = ???????; 46 | %let database = ???????; 47 | %let schema = ???????; 48 | 49 | %let CONNOPTS=%str(user=??????? 50 | pwd=??????? 51 | dsn=???????); 52 | 53 | /* Connect to CAS using cashost and casport, optional in SAS Studio */ 54 | /* options cashost="????????" casport=????; */ 55 | 56 | 57 | /* Create a CAS Session */ 58 | cas &cas_session; 59 | 60 | 61 | /* Run a proc cas addCasLib action using the specific data connector options specified above */ 62 | proc cas; 63 | session &cas_session; 64 | action addCaslib caslib="&caslib_alias" 65 | datasource={ srctype="&srctype", 66 | user="&username", 67 | pass="&password", 68 | database="&database", 69 | schema="&schema", 70 | catalog="*"}; 71 | run; 72 | quit; 73 | 74 | 75 | /* Create in-memory tables in CAS using CAS Libname engine */ 76 | libname caslib cas sessref=&cas_session tag=""; 77 | 78 | data caslib.SAVESAMPLE; 79 | input FLIGHT $3. +5 DATES $7. +3 DEPART $5. +2 ORIG $3. 80 | +3 DEST $3. +7 MILES +6 BOARDED +6 CAPACITY; 81 | datalines; 82 | 114 01MAR98 7:10 LGA LAX 2475 172 210 83 | 202 01MAR98 10:43 LGA ORD 740 151 210 84 | 219 01MAR98 9:31 LGA LON 3442 198 250 85 | 622 01MAR98 12:19 LGA FRA 3857 207 250 86 | 132 01MAR98 15:35 LGA YYZ 366 115 178 87 | 271 01MAR98 13:17 LGA PAR 3635 138 250 88 | 302 01MAR98 20:22 LGA WAS 229 105 180 89 | 114 02MAR98 7:10 LGA LAX 2475 119 210 90 | 202 02MAR98 10:43 LGA ORD 740 120 210 91 | 219 02MAR98 9:31 LGA LON 3442 147 250 92 | 622 02MAR98 12:19 LGA FRA 3857 176 250 93 | 132 02MAR98 15:35 LGA YYZ 366 106 178 94 | ; 95 | run; 96 | 97 | 98 | /* Proc cas save action using SDC to Database*/ 99 | /*permanent copy of in-memory table, saved to datasource */ 100 | proc cas; 101 | save 102 | caslib="&caslib_alias" 103 | name="SAMDATSAVE" 104 | replace=TRUE 105 | table={caslib="&caslib_alias" ,name="SAVESAMPLE"}; 106 | run; 107 | quit; 108 | 109 | 110 | /* Proc cas save action with vars parameter: specify variables to save */ 111 | proc cas; 112 | save 113 | caslib="&caslib_alias" 114 | name="SAMDATSAVEVARS" 115 | replace=TRUE 116 | table={caslib="&caslib_alias", name="SAVESAMPLE", vars={"FLIGHT", "ORIG", "DEST"}}; 117 | ; 118 | run; 119 | quit; 120 | 121 | 122 | /* Verify the results: print saved table in the datasource using Libname engine */ 123 | libname savelib &srctype &CONNOPTS; 124 | proc print data=savelib.SAMDATSAVE; 125 | run; 126 | proc print data=savelib.SAMDATSAVEVARS; 127 | run; 128 | 129 | 130 | /* Remove CAS session */ 131 | cas &cas_session. terminate; --------------------------------------------------------------------------------