├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ └── bug_report.md ├── actions │ └── hxe │ │ └── action.yml └── workflows │ ├── conventional-commits.yml │ ├── release-please.yml │ ├── release.yml │ └── test.yml ├── .gitignore ├── .prettierignore ├── .prettierrc.js ├── .release-please-manifest.json ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── LICENSES ├── Apache-2.0.txt └── MIT.txt ├── README.md ├── REUSE.toml ├── SECURITY.md ├── attic ├── poc.md └── types.cds ├── db-service ├── CHANGELOG.md ├── LICENSE ├── README.md ├── index.js ├── lib │ ├── InsertResults.js │ ├── SQLService.js │ ├── common │ │ ├── DatabaseService.js │ │ ├── factory.d.ts │ │ ├── generic-pool.js │ │ └── session-context.js │ ├── converters.d.ts │ ├── cql-functions.js │ ├── cqn2sql.js │ ├── cqn4sql.js │ ├── deep-queries.js │ ├── fill-in-keys.js │ ├── infer │ │ ├── cqn.d.ts │ │ ├── index.js │ │ ├── join-tree.js │ │ └── pseudos.js │ ├── search.js │ └── utils.js ├── package.json └── test │ ├── assocs │ ├── schema1.cds │ ├── schema2.cds │ └── unmanaged-assocs.test.js │ ├── bookshop │ ├── db │ │ ├── booksWithExpr.cds │ │ ├── schema.cds │ │ └── search.cds │ └── srv │ │ ├── admin-service.cds │ │ └── cat-service.cds │ ├── cds-infer │ ├── api.test.js │ ├── calculated-elements.test.js │ ├── column.element.test.js │ ├── elements.test.js │ ├── model │ │ └── nestedProjections.cds │ ├── negative.test.js │ ├── nested-projections.test.js │ └── source.test.js │ ├── cqn4sql │ ├── A2J │ │ ├── classes.cds │ │ ├── schema.cds │ │ └── sharedFKIdentity.cds │ ├── API.test.js │ ├── DELETE.test.js │ ├── INSERT.test.js │ ├── UPDATE.test.js │ ├── assocs2joins.test.js │ ├── basic.test.js │ ├── calculated-elements.test.js │ ├── column.element.test.js │ ├── compare-structs.test.js │ ├── expand.test.js │ ├── flattening.test.js │ ├── functions.test.js │ ├── inline.test.js │ ├── keyless.test.js │ ├── localized.test.js │ ├── model │ │ ├── cap_issue.cds │ │ ├── collaborations.cds │ │ ├── keyless.cds │ │ ├── nestedProjections.cds │ │ ├── update.cds │ │ └── withParameters.cds │ ├── not-persisted.test.js │ ├── not-supported.test.js │ ├── path-in-from.test.js │ ├── pseudo-variable-replacement.test.js │ ├── replacements.test.js │ ├── search.test.js │ ├── structure-access.test.js │ ├── table-alias.test.js │ ├── tupleExpansion.test.js │ ├── where-exists.test.js │ ├── wildcards.test.js │ └── with-parameters.test.js │ ├── deep │ ├── deep.test.js │ ├── package.json │ └── schema.cds │ ├── etc │ └── cds.clone.test.js │ └── tsc │ ├── package-lock.json │ ├── package.json │ ├── tsconfig.json │ └── typescript.ts ├── eslint.config.mjs ├── hana ├── CHANGELOG.md ├── LICENSE ├── README.md ├── cds-plugin.js ├── index.js ├── lib │ ├── HANAService.js │ ├── collations.json │ ├── cql-functions.js │ ├── drivers │ │ ├── base.js │ │ ├── dynatrace.js │ │ ├── hana-client.js │ │ ├── hdb.js │ │ ├── index.js │ │ └── stream.js │ └── scripts │ │ ├── .hdiconfig │ │ ├── container-database.sql │ │ ├── container-tenant.sql │ │ └── deploy.sql ├── package.json ├── test │ ├── compliance │ ├── fuzzy.cds │ ├── fuzzy.test.js │ ├── hana-functions.test.js │ ├── param-views.cds │ ├── param-views.test.js │ ├── plain-sql.test.js │ ├── proc.cds │ ├── run.test.js │ ├── service.js │ ├── spatial.test.js │ ├── stream.test.js │ ├── temporal.test.js │ └── versioning.test.js └── tools │ ├── README.md │ ├── collation │ ├── collation.js │ ├── collation.wasm │ └── collation.wat │ └── docker │ ├── hce │ ├── hana.yml │ ├── jaeger.yaml │ ├── latest.js │ ├── otel.sh │ ├── prometheus.yml │ ├── ready.sh │ ├── start.sh │ └── update.sh │ └── hxe │ ├── Dockerfile │ ├── ci.yml │ ├── hana.yml │ ├── latest.js │ ├── ready.sh │ ├── setup.sh │ ├── start-hdi.sql │ └── start.sh ├── package-lock.json ├── package.json ├── postgres ├── CHANGELOG.md ├── LICENSE ├── README.md ├── cds-plugin.js ├── index.js ├── lib │ ├── PostgresService.js │ ├── ReservedWords.json │ ├── cql-functions.js │ └── session.json ├── package.json ├── pg-stack.yml └── test │ ├── beershop │ ├── db │ │ ├── _i18n │ │ │ └── i18n.properties │ │ ├── data │ │ │ ├── csw-Beers.csv │ │ │ ├── csw-Brewery.csv │ │ │ └── csw-TypeChecks.csv │ │ └── schema.cds │ ├── package-lock.json │ ├── package.json │ └── srv │ │ ├── beershop-admin-service.cds │ │ ├── beershop-admin-service.js │ │ ├── beershop-service.cds │ │ ├── beershop-service.js │ │ └── ui-annotations.cds │ ├── cds-build.test.js │ ├── compliance │ ├── connect.test.js │ ├── odata-string-functions.test.js │ ├── plain-sql.test.js │ ├── ql.test.js │ ├── service-admin.test.js │ ├── service-az.json │ ├── service-btp.json │ ├── service-types.test.js │ ├── service.json │ ├── service.test.js │ ├── streaming.test.js │ ├── timezone.test.js │ └── tiny-sample │ ├── db │ ├── data │ │ └── my.bookshop-Books.csv │ └── schema.cds │ ├── package.json │ └── srv │ └── cat-service.cds ├── release-please-config.json ├── renovate.json ├── samples.js ├── sqlite ├── CHANGELOG.md ├── LICENSE ├── README.md ├── cds-plugin.js ├── index.js ├── lib │ ├── SQLiteService.js │ └── cql-functions.js ├── package.json └── test │ ├── compliance │ ├── deep │ ├── deep.cds │ ├── deepInputProcessing.test.js │ ├── deepPersistenceSkip.test.js │ └── package.json │ ├── general │ ├── delete-rename.test.js │ ├── insert-entries-select.test.js │ ├── localized.test.js │ ├── managed.test.js │ ├── model.cds │ ├── model.js │ ├── package.json │ ├── samples │ │ ├── 1000.png │ │ ├── 1001.png │ │ ├── data.json │ │ └── test.jpg │ ├── stream.test.js │ ├── temporal.test.js │ ├── testModel.cds │ └── uuid.test.js │ ├── plain-sql.test.js │ ├── queries-without-models.test.js │ ├── recurse │ ├── genres-ancestors.sql │ ├── genres-descendants.sql │ └── genres-hierarchy.sql │ ├── service.json │ └── versioning.test.js └── test ├── bookshop ├── db │ ├── data │ │ ├── sap.capire.bookshop-Authors.csv │ │ ├── sap.capire.bookshop-Books.csv │ │ ├── sap.capire.bookshop-Books_texts.csv │ │ └── sap.capire.bookshop-Genres.csv │ ├── init.js │ └── schema.cds ├── package.json └── srv │ ├── admin-service.cds │ ├── admin-service.js │ ├── cat-service.cds │ ├── cat-service.js │ ├── draft-enabled-service.cds │ ├── genres.cds │ └── tree-service.cds ├── cds.js ├── compliance ├── CREATE.test.js ├── DELETE.test.js ├── INSERT.test.js ├── SELECT.test.js ├── UPDATE.test.js ├── UPSERT.test.js ├── client-options.test.js ├── functions.test.js ├── resources │ ├── db │ │ ├── basic │ │ │ ├── common.cds │ │ │ ├── common │ │ │ │ └── basic.common.default.js │ │ │ ├── index.cds │ │ │ ├── literals.cds │ │ │ ├── literals │ │ │ │ ├── basic.literals.array.js │ │ │ │ ├── basic.literals.binaries.js │ │ │ │ ├── basic.literals.date.js │ │ │ │ ├── basic.literals.dateTime.js │ │ │ │ ├── basic.literals.globals.js │ │ │ │ ├── basic.literals.map.js │ │ │ │ ├── basic.literals.number.js │ │ │ │ ├── basic.literals.string.js │ │ │ │ ├── basic.literals.time.js │ │ │ │ ├── basic.literals.timestamp.js │ │ │ │ └── basic.literals.vectors.js │ │ │ └── projection.cds │ │ ├── complex │ │ │ ├── associations.cds │ │ │ ├── associationsUnmanaged.cds │ │ │ ├── computed.cds │ │ │ ├── computed │ │ │ │ ├── complex.computed.dynamic.js │ │ │ │ └── complex.computed.static.js │ │ │ ├── index.cds │ │ │ ├── keywords.cds │ │ │ ├── keywords │ │ │ │ ├── complex.keywords.1234567890.js │ │ │ │ └── complex.keywords.~%60!%40%23%24%25%5E%26()_%2B-%3D'%22%5C%2F.%2C%5B%5D.js.js │ │ │ └── uniques.cds │ │ ├── data │ │ │ ├── basic.literals-dateTime.csv │ │ │ ├── basic.literals-globals.csv │ │ │ ├── basic.literals-string.csv │ │ │ ├── complex.associations.Authors.csv │ │ │ ├── complex.associations.Books.csv │ │ │ ├── complex.associations.unmanaged.Authors.csv │ │ │ ├── complex.associations.unmanaged.Books.csv │ │ │ └── edge.hana.functions-timestamps.csv │ │ ├── edge │ │ │ └── index.cds │ │ ├── hana │ │ │ ├── funcs │ │ │ │ ├── date-generator.js │ │ │ │ ├── datetime.cds │ │ │ │ ├── hana.js │ │ │ │ └── index.cds │ │ │ ├── index.cds │ │ │ ├── literals.cds │ │ │ ├── literals │ │ │ │ ├── edge.hana.literals.HANA_BINARY.js │ │ │ │ ├── edge.hana.literals.HANA_CHAR.js │ │ │ │ ├── edge.hana.literals.HANA_NCHAR.js │ │ │ │ ├── edge.hana.literals.HANA_NUMBER.js │ │ │ │ └── edge.hana.literals.HANA_ST.js │ │ │ └── versioning.cds │ │ └── index.cds │ ├── fts │ │ └── versioning │ │ │ ├── hana.cds │ │ │ └── sqlite.cds │ ├── package.json │ └── srv │ │ └── index.cds └── strictMode.test.js ├── deploy.js ├── index.js ├── scenarios ├── bookshop │ ├── delete.test.js │ ├── funcs.test.js │ ├── genres.json │ ├── genres.test.js │ ├── hierarchy.test.js │ ├── index.js │ ├── insert-large.test.js │ ├── insert.test.js │ ├── localization.test.js │ ├── orderBy.test.js │ ├── read.test.js │ ├── search.test.js │ ├── stream.perf.test.js │ ├── update.test.js │ └── upsert.test.js ├── index.js └── sflight │ ├── index.js │ ├── integration.test.js │ ├── lean-draft.test.js │ └── read.test.js └── sflight ├── app ├── labels.cds ├── services.cds ├── travel_analytics │ └── annotations.cds └── travel_processor │ ├── capabilities.cds │ └── field-control.cds ├── db ├── common.cds ├── data │ ├── sap.common-Countries.csv │ ├── sap.common-Currencies.csv │ ├── sap.fe.cap.travel-Airline.csv │ ├── sap.fe.cap.travel-Airport.csv │ ├── sap.fe.cap.travel-Booking.csv │ ├── sap.fe.cap.travel-BookingStatus.csv │ ├── sap.fe.cap.travel-BookingSupplement.csv │ ├── sap.fe.cap.travel-Flight.csv │ ├── sap.fe.cap.travel-FlightConnection.csv │ ├── sap.fe.cap.travel-Passenger.csv │ ├── sap.fe.cap.travel-Supplement.csv │ ├── sap.fe.cap.travel-SupplementType.csv │ ├── sap.fe.cap.travel-Travel.csv │ ├── sap.fe.cap.travel-TravelAgency.csv │ └── sap.fe.cap.travel-TravelStatus.csv ├── master-data.cds └── schema.cds ├── package.json ├── srv ├── analytics-service.cds ├── travel-service.cds └── travel-service.js └── test └── odata.test.js /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # cap-team 2 | 3 | ** @danjoa @johannes-vogel @stewsk @BobdenOs @patricebender @sjvans @David-Kunz 4 | 5 | # community 6 | 7 | postgres/ @BobdenOs @patricebender @gregorwolf @sebastianesch @vobu @danjoa @johannes-vogel @stewsk @sjvans @David-Kunz 8 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | 19 | 20 | 21 | ### Description of erroneous behaviour 22 | 23 | > Please test with the **latest release version** of the CAP runtime (see links below). 24 | Replace this text with a **clear** and **concise** description of the issue, including examples and links to your content as well as log output. 25 | For example... 26 | 27 | > We have a [query](https://github.com/SAP-samples/cloud-cap-samples/blob/e29394eac0a59ef80489f28e9d0954f719e1cafa/bookshop/srv/admin-service.js#L11) defined as follows: 28 | 29 | ```js 30 | await SELECT.one.from(req.target).columns('max(ID) as ID') 31 | ``` 32 | 33 | > When deploying to sqlite we get the following errors: 34 | 35 | ```sh 36 | ... copy of log output ... 37 | ``` 38 | 39 | ### Detailed steps to reproduce 40 | 41 | > For example (→ replace by appropriate ones for your case): 42 | > 1. git clone https://github.com/your/repo 43 | > 2. npm install 44 | > 3. cds deploy srv -2 sqlite 45 | 46 | ### Details about your project 47 | 48 | > Remove the lines not applicable, and fill in versions for remaining ones: 49 | 50 | | Your Project Name | https://github.com/your/repo | 51 | |:------------------|---------------------------------------| 52 | | OData version | v4 / v2 | 53 | | Node.js version | v18.x.x | 54 | | @sap/cds | 6.x.x | 55 | | @sap/cds-compiler | 3.x.x | 56 | | @sap/cds-dk | 6.x.x | 57 | | @cap-js/postgres | 1.x.x | 58 | @cap-js/sqlite | 1.x.x | 59 | 60 | > Run `cds v -i` in your project root to generate this 61 | -------------------------------------------------------------------------------- /.github/actions/hxe/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Start HANA' 2 | description: 'Starts an local HANA Express instance for isolated testing' 3 | inputs: 4 | GITHUB_TOKEN: 5 | description: 'Derivative token for using the GitHub REST API' 6 | required: true 7 | outputs: 8 | TAG: 9 | description: "The Image Tag" 10 | value: ${{ steps.find-hxe.outputs.TAG }} 11 | IMAGE_ID: 12 | description: "The " 13 | value: ${{ steps.find-hxe.outputs.IMAGE_ID }} 14 | runs: 15 | using: "composite" 16 | steps: 17 | - name: Find HXE image 18 | id: find-hxe 19 | shell: bash 20 | # TODO: replace hana/tools/docker/hxe/* with ${{ github.action_path }} 21 | run: | 22 | TAG="$(sha1sum hana/tools/docker/hxe/* | sha1sum --tag | grep '[^ ]*$' -o)"; 23 | IMAGE_ID=ghcr.io/${{ github.repository_owner }}/hanaexpress; 24 | IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]'); 25 | echo "TAG=${TAG}" >> $GITHUB_OUTPUT; 26 | echo "IMAGE_ID=${IMAGE_ID}" >> $GITHUB_OUTPUT; 27 | GHCR_TOKEN=$(echo ${{ inputs.GITHUB_TOKEN }} | base64); 28 | if 29 | curl -H "Authorization: Bearer ${GHCR_TOKEN}" https://ghcr.io/v2/${{ github.repository_owner }}/hanaexpress/manifests/$TAG | grep "MANIFEST_UNKNOWN"; 30 | then 31 | echo "BUILD_HXE=true" >> $GITHUB_OUTPUT 32 | else 33 | echo "BUILD_HXE=false" >> $GITHUB_OUTPUT 34 | fi; 35 | - name: Set up Docker Buildx 36 | if: ${{ steps.find-hxe.outputs.BUILD_HXE == 'true' }} 37 | uses: docker/setup-buildx-action@v3 38 | - name: Build HXE image 39 | if: ${{ steps.find-hxe.outputs.BUILD_HXE == 'true' }} 40 | shell: bash 41 | run: | 42 | echo "${{ inputs.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin; 43 | DOCKER_BUILDKIT=1 docker build -t $IMAGE_ID:$TAG ./hana/tools/docker/hxe; 44 | docker push $IMAGE_ID:$TAG; 45 | env: 46 | TAG: ${{ steps.find-hxe.outputs.TAG }} 47 | IMAGE_ID: ${{ steps.find-hxe.outputs.IMAGE_ID }} 48 | - name: Start HXE image 49 | shell: bash 50 | run: | 51 | echo "${{ inputs.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin; 52 | { npm start -w hana; } & 53 | env: 54 | TAG: ${{ steps.find-hxe.outputs.TAG }} 55 | IMAGE_ID: ${{ steps.find-hxe.outputs.IMAGE_ID }} 56 | -------------------------------------------------------------------------------- /.github/workflows/conventional-commits.yml: -------------------------------------------------------------------------------- 1 | name: 'Adheres to conventional commit standard' 2 | 3 | on: 4 | pull_request_target: 5 | types: 6 | - opened 7 | - edited 8 | - synchronize 9 | 10 | permissions: 11 | pull-requests: read 12 | 13 | jobs: 14 | main: 15 | name: Validate PR title 16 | runs-on: ubuntu-latest 17 | steps: 18 | # v5.4.0 19 | - uses: amannn/action-semantic-pull-request@e9fabac35e210fea40ca5b14c0da95a099eff26f 20 | env: 21 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 22 | with: 23 | # Configure which types are allowed (newline-delimited). 24 | # Default: https://github.com/commitizen/conventional-commit-types 25 | types: | 26 | feat 27 | fix 28 | changed 29 | removed 30 | docs 31 | style 32 | refactor 33 | perf 34 | test 35 | build 36 | ci 37 | chore 38 | revert 39 | wip 40 | deps 41 | 42 | -------------------------------------------------------------------------------- /.github/workflows/release-please.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | name: release-please 6 | jobs: 7 | release-please: 8 | permissions: 9 | contents: write 10 | pull-requests: write 11 | packages: write 12 | runs-on: ubuntu-latest 13 | environment: npm 14 | steps: 15 | # v4.1.3 16 | - uses: googleapis/release-please-action@1155c2c8dcbd6b15e85bae6bc6d1fed9552663cb 17 | id: release 18 | with: 19 | token: ${{secrets.CDS_DBS_TOKEN}} 20 | # The logic below handles the npm publication: 21 | - uses: actions/checkout@v4 22 | - uses: actions/setup-node@v4 23 | with: 24 | node-version: 20 25 | registry-url: 'https://registry.npmjs.org' 26 | 27 | ## debug info 28 | - run: echo '${{ toJSON(steps.release.outputs) }} ' 29 | 30 | # Publish packages 31 | - name: Publish db-service 32 | if: ${{ steps.release.outputs.db-service--release_created }} 33 | run: npm publish --workspace db-service --access public 34 | env: 35 | NODE_AUTH_TOKEN: ${{secrets.npm_token}} 36 | 37 | - name: Publish sqlite 38 | if: ${{ steps.release.outputs.sqlite--release_created }} 39 | run: npm publish --workspace sqlite --access public 40 | env: 41 | NODE_AUTH_TOKEN: ${{secrets.npm_token}} 42 | 43 | - name: Publish postgres 44 | if: ${{ steps.release.outputs.postgres--release_created }} 45 | run: npm publish --workspace postgres --access public 46 | env: 47 | NODE_AUTH_TOKEN: ${{secrets.npm_token}} 48 | 49 | - name: Publish SAP HANA 50 | if: ${{ steps.release.outputs.hana--release_created }} 51 | run: npm publish --workspace hana --access public 52 | env: 53 | NODE_AUTH_TOKEN: ${{secrets.npm_token}} 54 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-nodejs 3 | 4 | name: Release 5 | 6 | on: 7 | workflow_dispatch: 8 | 9 | permissions: 10 | contents: write 11 | 12 | jobs: 13 | publish-npm: 14 | runs-on: ubuntu-latest 15 | environment: npm 16 | steps: 17 | - uses: actions/checkout@v4 18 | - uses: actions/setup-node@v4 19 | with: 20 | node-version: 20 21 | registry-url: https://registry.npmjs.org/ 22 | - run: npm i 23 | - run: npm test -w db-service -w sqlite -w postgres 24 | env: 25 | cds_features_pool: true 26 | - name: get-version # this takes the version of the monorepo root 27 | id: package-version 28 | # v1.3.1 29 | uses: martinbeentjes/npm-get-version-action@3cf273023a0dda27efcd3164bdfb51908dd46a5b 30 | - name: Create a GitHub release 31 | # v1.15.0 32 | uses: ncipollo/release-action@6996c1bc95523d963d41deede8e4a8c7d3f45744 33 | with: 34 | tag: 'v${{ steps.package-version.outputs.current-version}}' 35 | name: 'Release v${{ steps.package-version.outputs.current-version}}' 36 | # prerelease: true 37 | # body: changelog... 38 | - run: npm publish --workspace db-service --access public 39 | env: 40 | NODE_AUTH_TOKEN: ${{secrets.npm_token}} 41 | - run: npm publish --workspace sqlite --access public 42 | env: 43 | NODE_AUTH_TOKEN: ${{secrets.npm_token}} 44 | - run: npm publish --workspace postgres --access public 45 | env: 46 | NODE_AUTH_TOKEN: ${{secrets.npm_token}} 47 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | types: [opened, synchronize, reopened, auto_merge_enabled] 8 | 9 | # Allow parallel jobs on `main`, so that each commit is tested. For PRs, run only the latest commit. 10 | concurrency: 11 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} 12 | cancel-in-progress: true 13 | 14 | jobs: 15 | test: 16 | runs-on: ubuntu-latest 17 | timeout-minutes: 20 18 | name: Tests 19 | permissions: 20 | packages: write 21 | 22 | strategy: 23 | fail-fast: true 24 | matrix: 25 | node: [22] 26 | 27 | steps: 28 | - uses: actions/checkout@v4 29 | - uses: actions/setup-node@v4 30 | with: 31 | node-version: ${{ matrix.node }} 32 | cache: 'npm' 33 | - run: npm ci 34 | - run: npm install -g @sap/cds-dk 35 | - run: npm ci 36 | - run: npm run lint 37 | - id: hxe 38 | uses: ./.github/actions/hxe 39 | with: 40 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 41 | # testing 42 | - run: npm test -ws 43 | env: 44 | cds_features_pool: true 45 | FORCE_COLOR: true 46 | TAG: ${{ steps.hxe.outputs.TAG }} 47 | IMAGE_ID: ${{ steps.hxe.outputs.IMAGE_ID }} 48 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | *.md 2 | *.html 3 | coverage/ 4 | cds/index.js 5 | cds/ql 6 | cds.ql/ 7 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | arrowParens: 'avoid', 3 | bracketSpacing: true, 4 | embeddedLanguageFormatting: 'auto', 5 | endOfLine: 'auto', 6 | htmlWhitespaceSensitivity: 'css', 7 | insertPragma: false, 8 | jsxSingleQuote: false, 9 | printWidth: 120, 10 | proseWrap: 'preserve', 11 | quoteProps: 'as-needed', 12 | requirePragma: false, 13 | semi: false, 14 | singleQuote: true, 15 | tabWidth: 2, 16 | trailingComma: 'all', 17 | useTabs: false, 18 | vueIndentScriptAndStyle: false, 19 | } 20 | -------------------------------------------------------------------------------- /.release-please-manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "db-service": "2.1.2", 3 | "sqlite": "2.0.1", 4 | "postgres": "2.0.2", 5 | "hana": "2.1.1" 6 | } 7 | -------------------------------------------------------------------------------- /LICENSES/MIT.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 6 | 7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 10 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![REUSE status](https://api.reuse.software/badge/github.com/cap-js/cds-dbs)](https://api.reuse.software/info/github.com/cap-js/cds-dbs) 2 | 3 | # Welcome to the @cap-js/cds-dbs Monorepo 4 | 5 | This is a monorepo for our SQL Database Services. 6 | 7 | It contains subfolders for the [base database service implementation](./db-service/) as well as the implementation of this interface for [SQLite](./sqlite/), [PostgreSQL](./postgres/) and [SAP HANA](./hana/). 8 | Each of the subfolders is published as individual npm module. 9 | 10 | Documentation can be found at [cap.cloud.sap](https://cap.cloud.sap/docs/guides/databases). 11 | 12 | ## Prerequisites 13 | 14 | See [Getting Started](https://cap.cloud.sap/docs/get-started/in-a-nutshell) on how to jumpstart your development and grow as you go with [SAP Cloud Application Programming Model](https://cap.cloud.sap). 15 | 16 | ## Setup 17 | 18 | In general, all you need to do is to install one of the database packages, as follows: 19 | 20 | Using SQLite for development: 21 | 22 | ```sh 23 | npm add @cap-js/sqlite -D 24 | ``` 25 | 26 | Learn more about setup and usage in the [respective database guides](https://cap.cloud.sap/docs/guides/databases). 27 | 28 | ## Support 29 | 30 | This project is open to feature requests/suggestions, bug reports etc. via [GitHub issues](https://github.com/cap-js/cds-dbs/issues). 31 | 32 | ## Contribution 33 | 34 | Contribution and feedback are encouraged and always welcome. For more information about how to contribute, the project structure, as well as additional contribution information, see our [Contribution Guidelines](CONTRIBUTING.md). 35 | 36 | ## Code of Conduct 37 | 38 | We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone. By participating in this project, you agree to abide by its [Code of Conduct](CODE_OF_CONDUCT.md) at all times. 39 | 40 | ## Licensing 41 | 42 | Copyright 2024 SAP SE or an SAP affiliate company and cds-dbs contributors. Please see our [LICENSE](LICENSE) for copyright and license information. Detailed information including third-party components and their licensing/copyright information is available [via the REUSE tool](https://api.reuse.software/info/github.com/cap-js/cds-dbs). 43 | -------------------------------------------------------------------------------- /REUSE.toml: -------------------------------------------------------------------------------- 1 | version = 1 2 | SPDX-PackageName = "cds-dbs" 3 | SPDX-PackageSupplier = "The cap team " 4 | SPDX-PackageDownloadLocation = "https://github.com/cap-js/cds-dbs" 5 | SPDX-PackageComment = "The code in this project may include calls to APIs (\"API Calls\") of\n SAP or third-party products or services developed outside of this project\n (\"External Products\").\n \"APIs\" means application programming interfaces, as well as their respective\n specifications and implementing code that allows software to communicate with\n other software.\n API Calls to External Products are not licensed under the open source license\n that governs this project. The use of such API Calls and related External\n Products are subject to applicable additional agreements with the relevant\n provider of the External Products. In no event shall the open source license\n that governs this project grant any rights in or to any External Products,or\n alter, expand or supersede any terms of the applicable additional agreements.\n If you have a valid license agreement with SAP for the use of a particular SAP\n External Product, then you may make use of any API Calls included in this\n project's code for that SAP External Product, subject to the terms of such\n license agreement. If you do not have a valid license agreement for the use of\n a particular SAP External Product, then you may only make use of any API Calls\n in this project for that SAP External Product for your internal, non-productive\n and non-commercial test and evaluation of such API Calls. Nothing herein grants\n you any rights to use or access any SAP External Product, or provide any third\n parties the right to use of access any SAP External Product, through API Calls." 6 | 7 | [[annotations]] 8 | path = "**" 9 | precedence = "aggregate" 10 | SPDX-FileCopyrightText = "2022 SAP SE or an SAP affiliate company and cds-dbs contributors" 11 | SPDX-License-Identifier = "Apache-2.0" 12 | 13 | [[annotations]] 14 | path = ["postgres/test/beershop/**", "postgres/test/odata-string-functions.test.js", "postgres/test/ql.test.js", "postgres/test/service.test.js"] 15 | precedence = "aggregate" 16 | SPDX-FileCopyrightText = ["", "2022 SAP SE or an SAP affiliate company and cds-dbs contributors", "Copyright (c) 2020 SAP Mentors & Friends"] 17 | SPDX-License-Identifier = "MIT" 18 | SPDX-FileComment = "The content of these files, entirely or in part, comes from https://github.com/sapmentors/cds-pg, which is MIT licensed." 19 | -------------------------------------------------------------------------------- /attic/types.cds: -------------------------------------------------------------------------------- 1 | entity Types { 2 | Boolean : Boolean; 3 | Integer : Integer; 4 | Integer64 : Integer64; 5 | Int64 : Int64; 6 | Int32 : Int32; 7 | Int16 : Int16; 8 | UInt8 : UInt8; 9 | Double : Double; 10 | Decimal : Decimal; 11 | String : String; 12 | LargeString : LargeString; 13 | Date : Date; 14 | Time : Time; 15 | DateTime : DateTime; 16 | Timestamp : Timestamp; 17 | Binary : Binary; 18 | LargeBinary : LargeBinary; 19 | } 20 | 21 | entity HANATypes { 22 | TINYINT : hana.TINYINT; 23 | SMALLINT : hana.SMALLINT; 24 | SMALLDECIMAL : hana.SMALLDECIMAL; 25 | REAL : hana.REAL; 26 | CHAR : hana.CHAR(7); 27 | CLOB : hana.CLOB; 28 | NCHAR : hana.NCHAR; 29 | BINARY : hana.BINARY; 30 | ST_POINT : hana.ST_POINT; 31 | ST_GEOMETRY : hana.ST_GEOMETRY; 32 | } 33 | -------------------------------------------------------------------------------- /db-service/README.md: -------------------------------------------------------------------------------- 1 | # CDS base database service 2 | 3 | Welcome to the base database service for [SAP Cloud Application Programming Model](https://cap.cloud.sap) Node.js. This service forms the core of all supported databases and is the base of our streamlined database architecture. 4 | 5 | Find documentation at 6 | 7 | ## Support 8 | 9 | This project is open to feature requests/suggestions, bug reports etc. via [GitHub issues](https://github.com/cap-js/cds-dbs/issues). 10 | 11 | ## Contribution 12 | 13 | Contribution and feedback are encouraged and always welcome. For more information about how to contribute, the project structure, as well as additional contribution information, see our [Contribution Guidelines](CONTRIBUTING.md). 14 | 15 | ## Versioning 16 | 17 | This library follows [Semantic Versioning](https://semver.org/). 18 | All notable changes are documented in [CHANGELOG.md](CHANGELOG.md). 19 | 20 | ## Code of Conduct 21 | 22 | We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone. By participating in this project, you agree to abide by its [Code of Conduct](CODE_OF_CONDUCT.md) at all times. 23 | 24 | ## Licensing 25 | 26 | Copyright 2024 SAP SE or an SAP affiliate company and cds-dbs contributors. Please see our [LICENSE](LICENSE) for copyright and license information. Detailed information including third-party components and their licensing/copyright information is available [via the REUSE tool](https://api.reuse.software/info/github.com/cap-js/cds-dbs). 27 | -------------------------------------------------------------------------------- /db-service/index.js: -------------------------------------------------------------------------------- 1 | const DatabaseService = require('./lib/common/DatabaseService') 2 | const SQLService = require('./lib/SQLService') 3 | const CQN2SQL = require('./lib/cqn2sql').classDefinition 4 | 5 | /** 6 | * @template T 7 | * @typedef {import('./lib/common/factory').Factory} Factory 8 | */ 9 | 10 | /** 11 | * @typedef {import('./lib/SQLService').prototype.PreparedStatement} PreparedStatement 12 | */ 13 | 14 | module.exports = { 15 | DatabaseService, 16 | SQLService, 17 | CQN2SQL, 18 | } 19 | -------------------------------------------------------------------------------- /db-service/lib/common/factory.d.ts: -------------------------------------------------------------------------------- 1 | import { Factory as GenericFactory, Options } from 'generic-pool' 2 | 3 | export interface Factory extends GenericFactory { 4 | options: Options 5 | } 6 | -------------------------------------------------------------------------------- /db-service/lib/common/session-context.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | class SessionContext { 4 | constructor(ctx) { 5 | Object.defineProperty(this, 'ctx', { value: ctx }) 6 | } 7 | get '$user.id'() { 8 | return (super['$user.id'] = this.ctx.user?.id || 'anonymous') 9 | } 10 | get '$user.locale'() { 11 | return (super['$user.locale'] = this.ctx.locale || cds.env.i18n.default_language) 12 | } 13 | // REVISIT: should be decided in spec meeting for definitive name 14 | get $now() { 15 | return (super.$now = (this.ctx.timestamp || new Date()).toISOString()) 16 | } 17 | } 18 | 19 | class TemporalSessionContext extends SessionContext { 20 | get '$valid.from'() { 21 | return (super['$valid.from'] = this.ctx._?.['VALID-FROM'] ?? this.ctx._?.['VALID-AT'] ?? new Date().toISOString()) 22 | } 23 | get '$valid.to'() { 24 | return (super['$valid.to'] = 25 | this.ctx._?.['VALID-TO'] ?? 26 | this.ctx._?.['VALID-AT']?.replace(/\.(\d*)(Z?)$/, (_, d, z) => `.${parseInt(d) + 1}${z}`) ?? 27 | (new Date(Date.now() + 1)).toISOString()) 28 | } 29 | } 30 | 31 | // Set all getters as enumerable 32 | const iterate = { enumerable: true } 33 | const getters = (obj) => { 34 | const prot = obj.prototype 35 | const patch = {} 36 | for (const [key, value] of Object.entries(Object.getOwnPropertyDescriptors(prot))) { 37 | if (!value.get) continue 38 | patch[key] = iterate 39 | } 40 | Object.defineProperties(prot, patch) 41 | } 42 | getters(SessionContext) 43 | getters(TemporalSessionContext) 44 | 45 | // REVISIT: only set temporal context if required! 46 | module.exports = TemporalSessionContext 47 | -------------------------------------------------------------------------------- /db-service/lib/converters.d.ts: -------------------------------------------------------------------------------- 1 | declare function ConverterFunction(expression: string): string 2 | export type Converter = typeof ConverterFunction 3 | 4 | export type Converters = { 5 | UUID: Converter 6 | String: Converter 7 | LargeString: Converter 8 | Binary: Converter 9 | LargeBinary: Converter 10 | Boolean: Converter 11 | Integer: Converter 12 | UInt8: Converter 13 | Int16: Converter 14 | Int32: Converter 15 | Int64: Converter 16 | Float: Converter 17 | Double: Converter 18 | Decimal: Converter 19 | DecimalFloat: Converter 20 | Date: Converter 21 | Time: Converter 22 | DateTime: Converter 23 | Timestamp: Converter 24 | } 25 | -------------------------------------------------------------------------------- /db-service/lib/infer/cqn.d.ts: -------------------------------------------------------------------------------- 1 | import * as cqn from '@sap/cds/apis/cqn' 2 | import * as csn from '@sap/cds/apis/csn' 3 | 4 | type linkedQuery = { 5 | target: csn.Definition 6 | elements: elements 7 | } 8 | export type SELECT = cqn.SELECT & linkedQuery 9 | export type INSERT = cqn.INSERT & linkedQuery 10 | export type UPSERT = cqn.UPSERT & linkedQuery 11 | export type UPDATE = cqn.UPDATE & linkedQuery 12 | export type DELETE = cqn.DELETE & linkedQuery 13 | export type CREATE = cqn.CREATE & linkedQuery 14 | export type DROP = cqn.DROP & linkedQuery 15 | 16 | export type Query = SELECT | INSERT | UPSERT | UPDATE | DELETE | CREATE | DROP 17 | 18 | export type element = csn.Element & { 19 | key?: boolean 20 | virtual?: boolean 21 | unique?: boolean 22 | notNull?: boolean 23 | } 24 | export type elements = { 25 | [name: string]: element 26 | } 27 | 28 | export type col = cqn.column_expr & { element: element } 29 | 30 | export type list = { 31 | list: cqn.expr[] 32 | } 33 | // Passthrough 34 | export type source = cqn.source 35 | export type ref = cqn.ref 36 | export type val = cqn.val 37 | export type xpr = cqn.xpr 38 | export type expr = cqn.expr 39 | export type func = cqn.function_call 40 | export type predicate = cqn.predicate 41 | export type ordering_term = cqn.ordering_term 42 | export type limit = { rows: val; offset: val } 43 | -------------------------------------------------------------------------------- /db-service/lib/infer/pseudos.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | // REVISIT: we should always return cds.linked elements 4 | // > e.g. cds.linked({definitions:{pseudos}}) 5 | const pseudos = { 6 | elements: { 7 | $user: { 8 | elements: { 9 | id: { type: 'cds.String' }, 10 | locale: { type: 'cds.String' }, // deprecated 11 | tenant: { type: 'cds.String' }, // deprecated 12 | }, 13 | }, 14 | $now: { type: 'cds.Timestamp' }, 15 | $at: { type: 'cds.Timestamp' }, 16 | $from: { type: 'cds.Timestamp' }, 17 | $to: { type: 'cds.Timestamp' }, 18 | $locale: { type: 'cds.String' }, 19 | $tenant: { type: 'cds.String' }, 20 | }, 21 | } 22 | 23 | module.exports = { pseudos } 24 | -------------------------------------------------------------------------------- /db-service/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@cap-js/db-service", 3 | "version": "2.1.2", 4 | "description": "CDS base database service", 5 | "homepage": "https://github.com/cap-js/cds-dbs/tree/main/db-service#cds-base-database-service", 6 | "repository": { 7 | "type": "git", 8 | "url": "git+https://github.com/cap-js/cds-dbs.git" 9 | }, 10 | "bugs": { 11 | "url": "https://github.com/cap-js/cds-dbs/issues" 12 | }, 13 | "keywords": [ 14 | "CAP", 15 | "CDS" 16 | ], 17 | "author": "SAP SE (https://www.sap.com)", 18 | "main": "index.js", 19 | "files": [ 20 | "lib", 21 | "CHANGELOG.md" 22 | ], 23 | "scripts": { 24 | "test": "cds-test" 25 | }, 26 | "dependencies": { 27 | "generic-pool": "^3.9.0" 28 | }, 29 | "peerDependencies": { 30 | "@sap/cds": ">=9" 31 | }, 32 | "license": "Apache-2.0" 33 | } 34 | -------------------------------------------------------------------------------- /db-service/test/assocs/schema1.cds: -------------------------------------------------------------------------------- 1 | entity Books { 2 | key ID : Integer; 3 | title : String(111); 4 | descr : String(1111); 5 | author : Association to Authors; // on author.ID = $self.author_ID; 6 | // author_ID : UUID; 7 | } 8 | 9 | entity Authors { 10 | key ID : Integer; 11 | name : String(111); 12 | books : Association to many Books on books.author = $self; 13 | } 14 | -------------------------------------------------------------------------------- /db-service/test/assocs/schema2.cds: -------------------------------------------------------------------------------- 1 | entity Books { 2 | key ID : Integer; 3 | title : String(111); 4 | descr : String(1111); 5 | author : Association to Authors on author.ID = $self.author_ID; 6 | author_ID : UUID; 7 | } 8 | 9 | entity Authors { 10 | key ID : Integer; 11 | name : String(111); 12 | books : Association to many Books on books.author = $self; 13 | } 14 | -------------------------------------------------------------------------------- /db-service/test/assocs/unmanaged-assocs.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | const cds = require('../../../test/cds') 3 | require('../../index') // to extend cds.ql query objects with .forSQL() and alike 4 | 5 | describe('where exists assoc', () => { 6 | it('should work with managed assocs', async () => { 7 | cds.model = await cds.load(__dirname + '/schema1').then(cds.linked) 8 | const { Books, Authors } = cds.model.entities 9 | let qb = SELECT.from(Books).where('exists author').forSQL() 10 | let qa = SELECT.from(Authors).where('exists books').forSQL() 11 | console.log(qa) 12 | console.log(qb) 13 | }) 14 | 15 | it('should work with unmanaged assocs', async () => { 16 | cds.model = await cds.load(__dirname + '/schema2').then(cds.linked) 17 | const { Books, Authors } = cds.model.entities 18 | let qb = SELECT.from(Books).where('exists author').forSQL() 19 | let qa = SELECT.from(Authors).where('exists books').forSQL() 20 | // let qx = q.forSQL() // FAILS with: 21 | /* 22 | TypeError: Cannot read properties of undefined (reading 'map') 23 | 24 | 880 | // for unmanaged associations, replace name of association (on target side of on condition) with explicit table alias 25 | 881 | // REVISIT: where not exists SiblingEntity -> definition is the source entity, not the assoc 26 | > 882 | on.push(...definition.on.map((t) => { 27 | | ^ 28 | 883 | if(t.ref?.length > 1 && t.ref[0] === definition.name) 29 | 884 | return {ref: [current.alias, ...t.ref.slice(1)]} 30 | 885 | else 31 | 32 | at map (cds-sqlite/lib/db/sql/cqn4sql.js:882:32) 33 | at getWhereExistsSubquery (cds-sqlite/lib/db/sql/cqn4sql.js:398:38) 34 | at Function.getTransformedTokenStream [as cqn4sql] (cds-sqlite/lib/db/sql/cqn4sql.js:55:31) 35 | at Query.cqn4sql [as forSQL] (cds-sqlite/cds/index.js:7:60) 36 | at Object.forSQL (cds-sqlite/test/unmanaged-assocs/unmanaged-assocs.test.js:12:16) 37 | */ 38 | console.log(JSON.stringify(qb, null, 2)) 39 | console.log(JSON.stringify(qa, null, 2)) 40 | }) 41 | }) 42 | -------------------------------------------------------------------------------- /db-service/test/bookshop/srv/admin-service.cds: -------------------------------------------------------------------------------- 1 | using { bookshop as my } from '../db/schema'; 2 | service AdminService { 3 | entity Books as projection on my.Books; 4 | entity Authors as projection on my.Authors; 5 | } 6 | -------------------------------------------------------------------------------- /db-service/test/bookshop/srv/cat-service.cds: -------------------------------------------------------------------------------- 1 | using { bookshop as my } from '../db/schema'; 2 | service CatalogService @(path:'/browse') { 3 | 4 | /** For displaying lists of Books */ 5 | @readonly entity ListOfBooks as projection on Books 6 | excluding { descr }; 7 | 8 | /** For display in details pages */ 9 | @readonly entity Books as projection on my.Books { *, 10 | author.name as author 11 | } excluding { createdBy, modifiedBy }; 12 | 13 | // @requires: 'authenticated-user' 14 | action submitOrder ( book: Books:ID, amount: Integer ) returns { stock: Integer }; 15 | event OrderedBook : { book: Books:ID; amount: Integer; buyer: String }; 16 | } 17 | -------------------------------------------------------------------------------- /db-service/test/cds-infer/calculated-elements.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const _inferred = require('../../lib/infer') 4 | const cds = require('@sap/cds') 5 | const { expect } = cds.test 6 | 7 | describe('Infer types of calculated elements in select list', () => { 8 | let model 9 | beforeAll(async () => { 10 | model = cds.model = await cds.load(__dirname + '/../bookshop/db/booksWithExpr').then(cds.linked) 11 | }) 12 | it('calc element has type or has cast', () => { 13 | let inferred = _inferred( 14 | cds.ql`SELECT from booksCalc.Books { 15 | ID, 16 | area, 17 | area as strArea : String, 18 | cast(area as Integer) as areaCastedToInt 19 | }`, 20 | model, 21 | ) 22 | let { Books } = model.entities 23 | expect(inferred.elements).to.deep.equal({ 24 | ID: Books.elements.ID, 25 | area: Books.elements.area, 26 | strArea: { 27 | type: 'cds.String', 28 | }, 29 | areaCastedToInt: { 30 | type: 'cds.Integer', 31 | }, 32 | }) 33 | }) 34 | it('calc elements via wildcard', () => { 35 | let inferred = _inferred( 36 | cds.ql`SELECT from booksCalc.Books { * } excluding { length, width, height, stock, price}`, 37 | model, 38 | ) 39 | let { Books } = model.entities 40 | expect(inferred.elements).to.deep.equal({ 41 | ID: Books.elements.ID, 42 | title: Books.elements.title, 43 | author: Books.elements.author, 44 | stock2: Books.elements.stock2, 45 | ctitle: Books.elements.ctitle, 46 | areaS: Books.elements.areaS, 47 | area: Books.elements.area, 48 | volume: Books.elements.volume, 49 | storageVolume: Books.elements.storageVolume, 50 | authorLastName: Books.elements.authorLastName, 51 | authorName: Books.elements.authorName, 52 | authorFullName: Books.elements.authorFullName, 53 | authorFullNameWithAddress: Books.elements.authorFullNameWithAddress, 54 | authorAdrText: Books.elements.authorAdrText, 55 | authorAge: Books.elements.authorAge, 56 | youngAuthorName: Books.elements.youngAuthorName, 57 | authorAgeNativePG: Books.elements.authorAgeNativePG, 58 | authorAgeInDogYears: Books.elements.authorAgeInDogYears, 59 | }) 60 | }) 61 | }) 62 | -------------------------------------------------------------------------------- /db-service/test/cds-infer/model/nestedProjections.cds: -------------------------------------------------------------------------------- 1 | // based on cds.compiler specification which can be found under 'internalDoc/NestedProjectionByExample.md' 2 | entity Employee { 3 | key id : String; 4 | name : String; 5 | job : String; 6 | department : Association to one Department; 7 | assets : Association to many Assets on assets.owner = $self; 8 | office { 9 | floor : String; 10 | room : String; 11 | building : Association to one Building; 12 | address { 13 | city : String; 14 | street : String; 15 | country : Association to one Country; 16 | }; 17 | furniture { 18 | chairs: Integer; 19 | desks: Integer; 20 | } 21 | } 22 | } 23 | // to test inline with `*` as it doesnt suppport unmanaged associations 24 | entity EmployeeNoUnmanaged { 25 | key id : String; 26 | name : String; 27 | job : String; 28 | department : Association to one Department; 29 | office { 30 | floor : String; 31 | room : String; 32 | building : Association to one Building; 33 | address { 34 | city : String; 35 | street : String; 36 | country : Association to one Country; 37 | }; 38 | furniture { 39 | chairs: Integer; 40 | desks: Integer; 41 | } 42 | } 43 | } 44 | 45 | entity Department { 46 | key id : String; 47 | name : String; 48 | costCenter : String; 49 | head : Association to one Employee; 50 | } 51 | entity Building { 52 | key id : String; 53 | name: String; 54 | } 55 | entity Country { 56 | key code : String; 57 | } 58 | entity Assets { 59 | key id : String; 60 | owner : Association to one Employee; 61 | descr : String; 62 | lifetime { 63 | start : String; 64 | end : String; 65 | } 66 | } 67 | 68 | entity foo as select from Employee { 69 | office.{ 70 | floor, 71 | room 72 | } 73 | }; 74 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/A2J/classes.cds: -------------------------------------------------------------------------------- 1 | // test many-to-many relations 2 | entity Classrooms { 3 | key ID : Integer; 4 | name: String; 5 | info: { 6 | capacity: Integer; 7 | location: String; 8 | }; 9 | pupils : Association to many ClassroomsPupils 10 | on pupils.classroom = $self 11 | } 12 | 13 | entity Pupils { 14 | key ID : Integer; 15 | classrooms : Association to many ClassroomsPupils 16 | on classrooms.pupil = $self 17 | } 18 | 19 | entity ClassroomsPupils { 20 | key classroom : Association to Classrooms; 21 | key pupil : Association to Pupils; 22 | } 23 | // ----------------------------------------------------- 24 | 25 | entity ForeignKeyIsAssoc { 26 | key ID : Integer; 27 | my: Association to TeachersRoom; 28 | } 29 | 30 | entity TeachersRoom { 31 | key room: Association to Classrooms { ID as number, name, info.location }; 32 | } 33 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/A2J/schema.cds: -------------------------------------------------------------------------------- 1 | namespace a2j; 2 | 3 | entity Header { 4 | key id : Integer; 5 | key id2 : Integer; 6 | elt: String(100); 7 | toItem_selfMgd : Association to many Item on $self.toItem_selfMgd.toHeader = $self; 8 | toItem_selfUmgd : Association to many Item on 9 | ((($self.toItem_selfUmgd.toHeaderUnmanaged = $self))); 10 | toItem_combined: association to Item on 11 | (toItem_combined.toHeader = $self OR toItem_combined.toHeaderUnmanaged = $self) and 5 != 4; 12 | toItem_fwd: association to Item on id = toItem_fwd.id; 13 | } 14 | 15 | entity Item { 16 | key id : Integer; 17 | elt2: String(100); 18 | toHeader: Association to one Header; 19 | toHeaderUnmanaged: association to Header on elt2 = toHeaderUnmanaged.elt; 20 | } 21 | 22 | entity Folder { 23 | key id: Integer; 24 | nodeCompanyCode: association to Folder; 25 | assignments: composition of Assignment on $self = assignments.toFolder; 26 | }; 27 | 28 | entity Assignment { 29 | key id: Integer; 30 | toFolder: association to Folder; 31 | data: String; 32 | }; 33 | 34 | entity E { 35 | key id: String; 36 | key toF: association to F; 37 | data: String; 38 | }; 39 | 40 | entity F { 41 | key id: String; 42 | // toE.id requires forwardAssocPathStep to be restored after converting ON cond of toF 43 | toE: association to E on $self = toE.toF and toE.id = $user.id; 44 | }; 45 | 46 | entity Foo { 47 | key ID : Integer; 48 | bar : Association to Bar; 49 | barRenamed : Association to Bar { ID as renameID, foo }; 50 | buz : Composition of many Buz 51 | on buz.bar = bar 52 | and buz.foo.ID = ID; 53 | buzUnmanaged : Composition of many Buz 54 | on buzUnmanaged.bar.foo.ID = bar.foo.ID 55 | and buzUnmanaged.bar.ID = bar.ID 56 | and buzUnmanaged.foo.ID = ID; 57 | buzRenamed : Composition of many Buz 58 | on buzRenamed.barRenamed = barRenamed 59 | and buzRenamed.foo.ID = ID; 60 | } 61 | 62 | entity Bar { 63 | key ID : String; 64 | key foo : Association to Foo; 65 | buz : Composition of many Buz 66 | on buz.bar = $self; 67 | } 68 | 69 | entity Buz { 70 | key ID : String; 71 | key bar : Association to Bar; 72 | key barRenamed : Association to Bar { ID as renameID, foo }; 73 | foo : Association to Foo; 74 | } 75 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/A2J/sharedFKIdentity.cds: -------------------------------------------------------------------------------- 1 | // Resolve correct foreign key if multiple FKs share the same 2 | // target element (here C:c.d.e.ID) 3 | 4 | entity C { 5 | key c { d { e { ID : String(30); } } }; 6 | } 7 | 8 | entity A { 9 | // toB has two FKs 10 | // a_b_c_toB_foo_boo with access path a.b.c.toB.b.c.d.parent.c.d.e.ID 11 | // a_b_c_toB_bar_bas with access path a.b.c.toB.e.f.g.child.c.d.e.ID 12 | // both FKs end up in same target element C:c.d.e.ID, artifact identity is 13 | // not sufficient to identify the correct foreign key 14 | key a { b { c { toB : Association to B { b.c.d.parent as foo, e.f.g.child as bar } } } }; 15 | } 16 | 17 | entity B { 18 | key b { c { d { parent : Association to C { c.d.e.ID as boo }; }; }; }; 19 | key e { f { g { child : Association to C { c.d.e.ID as bas }; }; }; }; 20 | } 21 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/API.test.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Make sure cqn4sql always works on a copy of the incoming query, enabling 3 | * extension scenarios and repetitive calls. 4 | */ 5 | 'use strict' 6 | 7 | const cqn4sql = require('../../lib/cqn4sql') 8 | const cds = require('@sap/cds') 9 | const { expect } = cds.test 10 | describe('Repetitive calls to cqn4sql must work', () => { 11 | let model 12 | beforeAll(async () => { 13 | model = await cds.load(__dirname + '/../bookshop/db/schema').then(cds.linked) 14 | }) 15 | 16 | it('query can be extended by another element', () => { 17 | const original = cds.ql`SELECT from bookshop.Books as Books { ID }` 18 | let query = cqn4sql(original, model) 19 | expect(query).to.deep.equal(cds.ql`SELECT from bookshop.Books as Books { Books.ID }`) 20 | original.SELECT.columns.push({ ref: ['title'] }) 21 | query = cqn4sql(original, model) 22 | expect(query).to.deep.equal(cds.ql`SELECT from bookshop.Books as Books { Books.ID, Books.title }`) 23 | original.SELECT.where = ['exists', { ref: ['author'] }] 24 | query = cqn4sql(original, model) 25 | expect(query).to.deep.equal( 26 | cds.ql` 27 | SELECT from bookshop.Books as Books 28 | { Books.ID, Books.title } 29 | WHERE EXISTS ( 30 | SELECT 1 from bookshop.Authors as $a where $a.ID = Books.author_ID 31 | ) 32 | `, 33 | ) 34 | }) 35 | 36 | it('accepts empty select list', () => { 37 | let query = cqn4sql(cds.ql`SELECT from bookshop.Books as Books { }`, model) 38 | expect(query).to.deep.equal(cds.ql`SELECT from bookshop.Books as Books { }`) 39 | }) 40 | 41 | it('yields the same result if same query is transformed multiple times', () => { 42 | const input = cds.ql`SELECT from bookshop.Books:author` 43 | let query = cqn4sql(input, model) 44 | let query2 = cqn4sql(input, model) 45 | expect(query).to.deep.equal(query2) 46 | }) 47 | it('yields the same result if same query is transformed multiple times (2)', () => { 48 | const input = cds.ql`SELECT from bookshop.Books where author.name like '%Poe'` 49 | let query = cqn4sql(input, model) 50 | let query2 = cqn4sql(input, model) 51 | expect(query2).to.deep.equal(query) 52 | }) 53 | }) 54 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/INSERT.test.js: -------------------------------------------------------------------------------- 1 | // not much to do for cqn4sql in case of INSERT/UPSERT 2 | 'use strict' 3 | const cqn4sql = require('../../lib/cqn4sql') 4 | const cds = require('@sap/cds') 5 | const { expect } = cds.test 6 | 7 | describe('INSERT', () => { 8 | let model 9 | beforeAll(async () => { 10 | model = cds.model = await cds.load(__dirname + '/../bookshop/db/schema').then(cds.linked) 11 | }) 12 | it('simple', () => { 13 | let i = INSERT.into('bookshop.Books') 14 | const query = cqn4sql(i, model) 15 | expect(query.INSERT.into).to.deep.equal({ ref: ['bookshop.Books'] }) 16 | }) 17 | it('path expression in into clause', () => { 18 | let i = INSERT.into('bookshop.Books:author') 19 | const query = cqn4sql(i, model) 20 | expect(query.INSERT.into).to.deep.equal({ ref: ['bookshop.Authors'] }) 21 | }) 22 | it('path expression in into clause with alias', () => { 23 | let i = { 24 | INSERT: { 25 | into: { ref: ['bookshop.Books', 'author'], as: 'Foo' }, 26 | }, 27 | } 28 | const result = cqn4sql(i, model) 29 | expect(result.INSERT.into).to.deep.equal({ ref: ['bookshop.Authors'], as: 'Foo' }) 30 | }) 31 | it('path expression in into clause with UPSERT', () => { 32 | let upsert = UPSERT.into('bookshop.Books:author') 33 | const result = cqn4sql(upsert, model) 34 | expect(result.UPSERT.into).to.deep.equal({ ref: ['bookshop.Authors'] }) 35 | }) 36 | }) 37 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/model/cap_issue.cds: -------------------------------------------------------------------------------- 1 | // here we gather special scenarios which came up through tickets 2 | // which are not easily reproducible by our standard models 3 | aspect cuid : { 4 | key ID : Int16; 5 | } 6 | 7 | entity Foo : cuid { 8 | text: localized String; 9 | owner : Composition of many Owner 10 | on owner.foo = $self; 11 | activeOwners : Association to many ActiveOwner 12 | on activeOwners.foo = $self; 13 | owner2 : Composition of many Owner2 14 | on owner2.foo = $self; 15 | specialOwners : Association to many SpecialOwner2 16 | on specialOwners.foo = $self; 17 | 18 | boos : Association to many Boo 19 | on boos.foo = $self; 20 | } 21 | 22 | entity ActiveOwner as projection on Owner where validFrom <= $now 23 | and validTo >= $now; 24 | 25 | entity SpecialOwner2 as projection on Owner2 where validFrom <= $now 26 | and validTo >= $now 27 | and isSpecial = true; 28 | 29 | entity Owner2 : cuid { 30 | foo : Association to one Foo; 31 | owner2 : Association to one Employees; 32 | isSpecial : Boolean default false; 33 | validFrom : Date; 34 | validTo : Date; 35 | } 36 | 37 | entity Owner : cuid { 38 | foo : Association to one Foo; 39 | owner : Association to one Employees; 40 | validFrom : Date; 41 | validTo : Date; 42 | } 43 | 44 | entity Employees { 45 | key userID : String; 46 | } 47 | 48 | entity Boo : cuid { 49 | foo_ID : UUID; 50 | text: localized String; 51 | foo : Association to one Foo on foo.ID = foo_ID; 52 | } 53 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/model/collaborations.cds: -------------------------------------------------------------------------------- 1 | // inspired by a customer bug report 2 | // where a nested expand on an association with 3 | // multiple conditions next to the `$self` backlink led to issues 4 | aspect cuid { 5 | key id: Integer; 6 | } 7 | 8 | entity Collaborations : cuid { 9 | subCollaborations: Composition of many SubCollaborations on subCollaborations.collaboration = $self; 10 | leads : Association to many CollaborationLeads on leads.collaboration = $self and leads.isLead = true; 11 | collaborationLogs: Association to many CollaborationLogs on collaborationLogs.collaboration = $self; 12 | activeOwners: Association to ActiveOwners on activeOwners.collaboration = $self; 13 | } 14 | entity ActiveOwners : cuid { 15 | collaboration: Association to Collaborations; 16 | owner_userID: Int16; 17 | } 18 | entity SubCollaborations : cuid { 19 | collaboration: Association to Collaborations; 20 | leads : Association to many SubCollaborationAssignments on leads.subCollaboration = $self and leads.isLead = true; 21 | } 22 | 23 | entity CollaborationLeads: cuid { 24 | collaboration: Association to Collaborations; 25 | scholar_userID: Int16; 26 | participant: Association to CollaborationParticipants; 27 | isLead: Boolean; 28 | } 29 | 30 | entity SubCollaborationAssignments : cuid { 31 | subCollaboration : Association to one SubCollaborations; 32 | isLead : Boolean default false; 33 | participant: Association to CollaborationParticipants; 34 | } 35 | entity CollaborationParticipants : cuid { 36 | scholar_userID: Int16; 37 | } 38 | entity CollaborationApplications : cuid { 39 | subCollaborations: Composition of many SubCollaborationApplications on subCollaborations.application = $self; 40 | } 41 | 42 | entity SubCollaborationApplications : cuid { 43 | application : Association to one CollaborationApplications; 44 | } 45 | 46 | entity CollaborationLogs : cuid { 47 | collaboration: Association to Collaborations; 48 | } 49 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/model/keyless.cds: -------------------------------------------------------------------------------- 1 | // path expressions along `Books:author` are not possible 2 | entity Books { 3 | key ID : Integer; 4 | title : String; 5 | stock : Integer; 6 | author : Association to Authors; 7 | authorName: String = author.name; 8 | authorWithExplicitForeignKey: Association to Authors { ID }; 9 | my: Association to Books; 10 | } 11 | 12 | entity Authors { 13 | ID : Integer; 14 | name : String; 15 | book: Association to Books; 16 | // backlink has no foreign keys... 17 | bookWithBackLink: Association to Books on bookWithBackLink.author = $self; 18 | } 19 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/model/nestedProjections.cds: -------------------------------------------------------------------------------- 1 | // based on cds.compiler specification which can be found under 'internalDoc/NestedProjectionByExample.md' 2 | entity Employee { 3 | key id : String; 4 | name : String; 5 | job : String; 6 | department : Association to one Department; 7 | assets : Association to many Assets on assets.owner = $self; 8 | office { 9 | floor : String; 10 | room : String; 11 | building : Association to one Building; 12 | address { 13 | city : String; 14 | street : String; 15 | country : Association to one Country; 16 | }; 17 | furniture { 18 | chairs: Integer; 19 | desks: Integer; 20 | } 21 | } 22 | } 23 | // to test inline with `*` as it doesnt suppport unmanaged associations 24 | entity EmployeeNoUnmanaged { 25 | key id : String; 26 | name : String; 27 | job : String; 28 | department : Association to one Department; 29 | office { 30 | floor : String; 31 | room : String; 32 | building : Association to one Building; 33 | address { 34 | city : String; 35 | street : String; 36 | country : Association to one Country; 37 | }; 38 | furniture { 39 | chairs: Integer; 40 | desks: Integer; 41 | } 42 | } 43 | } 44 | 45 | entity Department { 46 | key id : String; 47 | name : String; 48 | costCenter : String; 49 | head : Association to one Employee; 50 | } 51 | entity Building { 52 | key id : String; 53 | name: String; 54 | } 55 | entity Country { 56 | key code : String; 57 | } 58 | entity Assets { 59 | key id : String; 60 | owner : Association to one Employee; 61 | descr : String; 62 | lifetime { 63 | start : String; 64 | end : String; 65 | } 66 | } 67 | 68 | entity foo as select from Employee { 69 | office.{ 70 | floor, 71 | room 72 | } 73 | }; 74 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/model/update.cds: -------------------------------------------------------------------------------- 1 | // dont use virtual key `isActiveEntity` in `UPDATE … where () in ` 2 | // in case of path expressions 3 | namespace bookshop; 4 | 5 | entity Books { 6 | key ID : Integer; 7 | title : String; 8 | stock : Integer; 9 | author : Association to Authors; 10 | } 11 | 12 | entity Authors { 13 | key ID : Integer; 14 | name : String; 15 | alive : Boolean; 16 | } 17 | 18 | entity Orders { 19 | key ID: UUID; 20 | Items: composition of many { 21 | key book: Association to Books; 22 | price: Decimal = book.stock * 2; 23 | } 24 | } 25 | 26 | service CatalogService { 27 | @odata.draft.enabled 28 | entity Books as projection on bookshop.Books; 29 | 30 | entity Authors as projection on bookshop.Authors; 31 | } 32 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/model/withParameters.cds: -------------------------------------------------------------------------------- 1 | // model with a view and a parameterized entity 2 | // model with a view and a parameterized entity 3 | entity Books { 4 | key ID : Integer; 5 | author: Association to Authors; 6 | }; 7 | 8 | @cds.persistence.exists 9 | entity Authors(P1: Integer, P2: String(100)) { 10 | key ID : Integer; 11 | name : String; 12 | }; 13 | 14 | @cds.persistence.exists 15 | @cds.persistence.udf 16 | entity BooksUDF { 17 | key ID : Integer; 18 | author: Association to AuthorsUDF; 19 | }; 20 | 21 | @cds.persistence.exists 22 | @cds.persistence.udf 23 | entity AuthorsUDF { 24 | key ID : Integer; 25 | name : String; 26 | }; 27 | 28 | entity PBooks(P1 : Integer, P2 : String(100)) as 29 | select from Books; 30 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/not-supported.test.js: -------------------------------------------------------------------------------- 1 | // here we can collect features which are not (yet) supported 2 | 'use strict' 3 | const cqn4sql = require('../../lib/cqn4sql') 4 | const cds = require('@sap/cds') 5 | const { expect } = cds.test 6 | const _inferred = require('../../lib/infer') 7 | 8 | describe('not supported features', () => { 9 | let model 10 | beforeAll(async () => { 11 | model = cds.model = await cds.load(__dirname + '/../bookshop/db/schema').then(cds.linked) 12 | }) 13 | 14 | it('does not transform queries with multiple query sources, but just returns the inferred query', () => { 15 | let query = cds.ql`SELECT from bookshop.Books, bookshop.Receipt` 16 | expect(cqn4sql(query, model)).to.deep.equal(_inferred(query, model)) 17 | // .to.throw(/Queries with multiple query sources are not supported/) 18 | }) 19 | }) 20 | -------------------------------------------------------------------------------- /db-service/test/cqn4sql/replacements.test.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Test replacements which cqn4sql performs. 3 | */ 4 | 'use strict' 5 | 6 | const cqn4sql = require('../../lib/cqn4sql') 7 | const cds = require('@sap/cds') 8 | const { expect } = cds.test 9 | describe('in where', () => { 10 | let model 11 | beforeAll(async () => { 12 | model = await cds.load(__dirname + '/../bookshop/db/schema').then(cds.linked) 13 | }) 14 | 15 | it('replace ` in ` in where', () => { 16 | const original = SELECT.from('bookshop.Books') 17 | .alias('Books') 18 | .columns(['ID']) 19 | .where({ ID: { in: [] } }) 20 | 21 | expect(cqn4sql(original, model)).to.deep.equal( 22 | cds.ql` 23 | SELECT from bookshop.Books as Books { Books.ID } where Books.ID = null 24 | `, 25 | ) 26 | }) 27 | it('replace `not in ` in where', () => { 28 | const original = SELECT.from('bookshop.Books').alias('Books').columns(['ID']) 29 | original.SELECT.where = [{ ref: ['ID'] }, 'not', 'in', { list: [] }] 30 | 31 | expect(cqn4sql(original, model)).to.deep.equal( 32 | cds.ql` 33 | SELECT from bookshop.Books as Books { Books.ID } where Books.ID is not null 34 | `, 35 | ) 36 | }) 37 | it('replace `in ` in join condition induced by infix filter', () => { 38 | const query = SELECT.from('bookshop.Books') 39 | .alias('Books') 40 | .columns({ 41 | ref: [{ id: 'author', where: [{ ref: ['name'] }, 'not', 'in', { list: [] }] }, 'ID'], 42 | }) 43 | 44 | expect(cqn4sql(query, model)).to.deep.equal( 45 | cds.ql` 46 | SELECT from bookshop.Books as Books 47 | left join bookshop.Authors as author 48 | on author.ID = Books.author_ID and author.name is not null 49 | { author.ID as author_ID } 50 | `, 51 | ) 52 | }) 53 | it('replace `in ` in where exists subquery induced by scoped query', () => { 54 | const query = SELECT.from({ 55 | ref: [{ id: 'bookshop.Books', where: [{ ref: ['title'] }, 'not', 'in', { list: [] }] }, 'author'], 56 | }).columns({ 57 | ref: ['ID'], 58 | }) 59 | 60 | expect(cqn4sql(query, model)).to.deep.equal( 61 | cds.ql` 62 | SELECT from bookshop.Authors as $a 63 | { $a.ID } 64 | where exists ( 65 | SELECT 1 from bookshop.Books as $B where $B.author_ID = $a.ID and $B.title is not null 66 | ) 67 | `, 68 | ) 69 | }) 70 | }) 71 | -------------------------------------------------------------------------------- /db-service/test/deep/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "@cap-js/sqlite": "*" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /db-service/test/etc/cds.clone.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | const { expect } = cds.test 3 | 4 | describe('Cloning queries', () => { 5 | it('should create effectively equal clones with cds.ql.clone()', () => { 6 | let q1 = SELECT.from('Books').where({ ID: 201 }) 7 | let q2 = cds.ql.clone(q1) 8 | expect(q2).to.not.equal(q1) // using strict equal (===) 9 | expect(q2).to.deep.equal(q1) // not using strict equal 10 | expect(q2).to.eql(q1) // shortcut for .to.deep.equal 11 | expect(q2).eqls(q1) // shortcut for .to.deep.equal 12 | }) 13 | 14 | it('creates flat queries with .flat()', () => { 15 | let q1 = SELECT.from('Books').where({ ID: 201 }) 16 | let q2 = cds.ql.clone(q1) 17 | 18 | expect( 19 | JSON.stringify(q1), //> {"SELECT":{"from":{"ref":["Books"]},"where":[{"ref":["ID"]},"=",{"val":201}]}} 20 | ).to.not.eql( 21 | JSON.stringify(q2), //> {"SELECT":{}} 22 | ) 23 | 24 | expect( 25 | JSON.stringify(q1), //> {"SELECT":{"from":{"ref":["Books"]},"where":[{"ref":["ID"]},"=",{"val":201}]}} 26 | ).to.eql( 27 | JSON.stringify(q2.flat()), //> {"SELECT":{"from":{"ref":["Books"]},"where":[{"ref":["ID"]},"=",{"val":201}]}} 28 | ) 29 | 30 | // WARNING: q.flat() modifies q! -> never use that in productive code !!! 31 | }) 32 | 33 | it(`supports shallow clones`, () => { 34 | let q1 = SELECT.from('Books').where({ ID: 201 }) 35 | let q2 = { ...q1 } 36 | 37 | expect(q2).to.eql(q1) //> IMPORTANT: breaks when we add enumerable elements to cds.ql.Query.prototype !! 38 | 39 | // 1) compare content 40 | expect(q2.SELECT).to.eql(q1.SELECT) 41 | 42 | // 2) compare shallow copies 43 | expect({ ...q2 }).to.eql({ ...q1 }) 44 | 45 | // 3) force-assign the same proto 46 | Object.setPrototypeOf(q2, q1.__proto__) 47 | expect(q2).to.eql(q1) //> now it is equal 48 | }) 49 | 50 | it(`works well with JSON-clones`, () => { 51 | let q1 = SELECT.from('Books').where({ ID: 201 }) 52 | let q2 = JSON.parse(JSON.stringify(q1)) 53 | 54 | expect(q2).to.eql(q1) //> IMPORTANT: breaks when we add enumerable elements to cds.ql.Query.prototype !! 55 | 56 | // 1) compare content 57 | expect(q2.SELECT).to.eql(q1.SELECT) 58 | 59 | // 2) compare shallow copies 60 | expect({ ...q2 }).to.eql({ ...q1 }) 61 | 62 | // 3) force-assign the same proto 63 | Object.setPrototypeOf(q2, q1.__proto__) 64 | expect(q2).to.eql(q1) //> now it is equal 65 | }) 66 | }) 67 | -------------------------------------------------------------------------------- /db-service/test/tsc/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tsc", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "tsc", 9 | "version": "1.0.0", 10 | "license": "ISC" 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /db-service/test/tsc/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tsc", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC" 11 | } 12 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import cds from '@sap/cds/eslint.config.mjs' 2 | export default [ ...cds.recommended ] 3 | -------------------------------------------------------------------------------- /hana/README.md: -------------------------------------------------------------------------------- 1 | # CDS database service for SAP HANA 2 | 3 | Welcome to the SAP HANA database service for [SAP Cloud Application Programming Model](https://cap.cloud.sap) Node.js, based on streamlined database architecture. 4 | 5 | ## Setup 6 | 7 | All you need to do is to install the database package, as follows: 8 | 9 | ```sh 10 | npm add @cap-js/hana 11 | ``` 12 | 13 | Learn more about setup and usage in the [respective database guide](https://cap.cloud.sap/docs/guides/databases-hana). 14 | 15 | ## Support 16 | 17 | This project is open to feature requests/suggestions, bug reports etc. via [GitHub issues](https://github.com/cap-js/cds-dbs/issues). 18 | 19 | ## Contribution 20 | 21 | Contribution and feedback are encouraged and always welcome. For more information about how to contribute, the project structure, as well as additional contribution information, see our [Contribution Guidelines](CONTRIBUTING.md). 22 | 23 | ## Code of Conduct 24 | 25 | We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone. By participating in this project, you agree to abide by its [Code of Conduct](CODE_OF_CONDUCT.md) at all times. 26 | 27 | ## Licensing 28 | 29 | Copyright 2024 SAP SE or an SAP affiliate company and cds-dbs contributors. Please see our [LICENSE](LICENSE) for copyright and license information. Detailed information including third-party components and their licensing/copyright information is available [via the REUSE tool](https://api.reuse.software/info/github.com/cap-js/cds-dbs). 30 | -------------------------------------------------------------------------------- /hana/cds-plugin.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | if (!cds.env.fiori.lean_draft) { 4 | throw new Error('"@cap-js/hana" only works if cds.fiori.lean_draft is enabled. Please adapt your configuration.') 5 | } 6 | 7 | if (cds.requires.db?.impl === '@cap-js/hana') { 8 | cds.env.sql.dialect = 'hana' 9 | } 10 | -------------------------------------------------------------------------------- /hana/index.js: -------------------------------------------------------------------------------- 1 | module.exports = require('./lib/HANAService') 2 | -------------------------------------------------------------------------------- /hana/lib/drivers/index.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | Object.defineProperties(module.exports, { 4 | hdb: { get: () => require('./hdb') }, 5 | 'hana-client': { get: () => require('./hana-client') }, 6 | default: { 7 | get() { 8 | try { 9 | const projectPackage = require(cds.root + '/package.json') 10 | const dependencies = { 11 | ...projectPackage.dependencies, 12 | ...(process.env.NODE_ENV !== 'production' && projectPackage.devDependencies), 13 | } 14 | // Have a bias to hdb as the default driver 15 | if (dependencies.hdb) return module.exports.hdb 16 | if (dependencies['@sap/hana-client']) return module.exports['hana-client'] 17 | } catch { 18 | console.trace(`WARNING! Unable to require the project's package.json at "${cds.root + '/package.json'}". Please check your project setup.`) // eslint-disable-line no-console 19 | } 20 | 21 | // When no driver is installed still try to load any of the drivers 22 | try { 23 | return module.exports.hdb 24 | } catch { 25 | return module.exports['hana-client'] 26 | } 27 | }, 28 | }, 29 | }) 30 | -------------------------------------------------------------------------------- /hana/lib/scripts/deploy.sql: -------------------------------------------------------------------------------- 1 | DO 2 | BEGIN 3 | DECLARE userName NVARCHAR(100); 4 | 5 | -- Define HDI return types 6 | DECLARE RETURN_CODE int; 7 | DECLARE REQUEST_ID bigint; 8 | DECLARE MESSAGES _SYS_DI.TT_MESSAGES; 9 | DECLARE DIFF _SYS_DI.TT_FILESFOLDERS_STATUS; 10 | 11 | -- Define HDI input types 12 | DECLARE FILES _SYS_DI.TT_FILESFOLDERS_CONTENT; 13 | DECLARE DEPLOY _SYS_DI.TT_FILESFOLDERS; 14 | DECLARE UNDEPLOY _SYS_DI.TT_FILESFOLDERS; 15 | DECLARE FILES_PARAMS _SYS_DI.TT_FILESFOLDERS_PARAMETERS; 16 | 17 | NO_PARAMS = SELECT * FROM _SYS_DI.T_NO_PARAMETERS; 18 | 19 | FILES = SELECT * FROM JSON_TABLE('{{{JSON_FILES}}}', '$[*]' COLUMNS ( 20 | PATH NVARCHAR(511) PATH '$.path', 21 | CONTENT NVARCHAR(2147483647) PATH '$.content' 22 | )); 23 | CALL {{{CONTAINER_NAME}}}#DI.WRITE(:FILES, :NO_PARAMS, :RETURN_CODE, :REQUEST_ID, :MESSAGES); 24 | 25 | DEPLOY = SELECT PATH FROM :FILES; 26 | CALL {{{CONTAINER_NAME}}}#DI.STATUS(:DEPLOY, :NO_PARAMS, :RETURN_CODE, :REQUEST_ID, :MESSAGES, :DIFF); 27 | -- SELECT * FROM :DIFF; -- Return the changed files 28 | 29 | CALL {{{CONTAINER_NAME}}}#DI.MAKE(:DEPLOY, :UNDEPLOY, :FILES_PARAMS, :NO_PARAMS, :RETURN_CODE, :REQUEST_ID, :MESSAGES); 30 | -- SELECT * FROM :MESSAGES; -- Return the make log messages 31 | END; 32 | -------------------------------------------------------------------------------- /hana/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@cap-js/hana", 3 | "version": "2.1.1", 4 | "description": "CDS database service for SAP HANA", 5 | "homepage": "https://cap.cloud.sap/", 6 | "keywords": [ 7 | "CAP", 8 | "CDS", 9 | "HANA" 10 | ], 11 | "author": "SAP SE (https://www.sap.com)", 12 | "main": "index.js", 13 | "files": [ 14 | "cds-plugin.js", 15 | "lib", 16 | "CHANGELOG.md" 17 | ], 18 | "scripts": { 19 | "test": "(([ -z \"${HANA_HOST}\" ] && npm start) || true) && npm run test:plain && npm run test:bookshop:quoted", 20 | "test:bookshop:quoted": "cds_sql_names=quoted cds-test bookshop", 21 | "test:plain": "cds-test", 22 | "test:remote": "cds-test", 23 | "start": "npm run start:hce || npm run start:hxe", 24 | "start:hce": "cd ./tools/docker/hce/ && ./start.sh", 25 | "start:hxe": "cd ./tools/docker/hxe/ && ./start.sh" 26 | }, 27 | "dependencies": { 28 | "@cap-js/db-service": "^2.1.1", 29 | "hdb": "^0.19.5" 30 | }, 31 | "peerDependencies": { 32 | "@sap/hana-client": "^2", 33 | "@sap/cds": ">=9" 34 | }, 35 | "peerDependenciesMeta": { 36 | "@sap/hana-client": { 37 | "optional": true 38 | } 39 | }, 40 | "devDependencies": { 41 | "@sap/hana-client": ">=2" 42 | }, 43 | "cds": { 44 | "requires": { 45 | "kinds": { 46 | "sql": { 47 | "[production]": { 48 | "kind": "hana" 49 | } 50 | }, 51 | "hana": { 52 | "impl": "@cap-js/hana" 53 | } 54 | }, 55 | "db": "sql" 56 | } 57 | }, 58 | "license": "Apache-2.0" 59 | } 60 | -------------------------------------------------------------------------------- /hana/test/compliance: -------------------------------------------------------------------------------- 1 | ../../test -------------------------------------------------------------------------------- /hana/test/fuzzy.cds: -------------------------------------------------------------------------------- 1 | using {sap.capire.bookshop.BooksAnnotated as BooksAnnotated} from '../../test/bookshop/db/schema.cds'; 2 | 3 | annotate BooksAnnotated with @cds.search: {title, descr, currency.code}; 4 | annotate BooksAnnotated:title with @(Search.ranking: HIGH, Search.fuzzinessThreshold: 0.9); 5 | annotate BooksAnnotated:descr with @(Search.ranking: LOW, Search.fuzzinessThreshold: 0.9); -------------------------------------------------------------------------------- /hana/test/hana-functions.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../test/cds') 2 | 3 | describe('HANA native functions', () => { 4 | const { expect } = cds.test(__dirname, 'fuzzy.cds') 5 | 6 | describe('current_timestamp', () => { 7 | // TODO: resolve `hdb` driver behavior of rounding decimal numbers in timestamps 8 | test.skip('no arguments', async () => { 9 | const cqn = { SELECT: { 10 | one: true, 11 | from: {ref: ['DUMMY']}, 12 | columns: [{func: 'CURRENT_UTCTIMESTAMP', as: 'NO'}] 13 | }} 14 | 15 | const res = await cds.run(cqn) 16 | 17 | expect(res.NO.match(/\.(\d\d\d)0{0,4}/)).not.to.be.null // default 3 18 | }) 19 | 20 | // HXE does not allow args 21 | test.skip('0 skips ms precision', async () => { 22 | const cqn = { SELECT: { 23 | one: true, 24 | from: {ref: ['DUMMY']}, 25 | columns: [ 26 | {func: 'current_utctimestamp', as: 'NO'}, 27 | {func: 'current_utctimestamp', args: [{val: 0}], as: 'P0'}] 28 | }} 29 | 30 | const res = await cds.run(cqn) 31 | 32 | expect(res.P0.match(/\.0000000/)).not.to.be.null 33 | }) 34 | 35 | // HXE does not allow args 36 | test.skip('arbitrary values', async () => { 37 | const cqn = { SELECT: { 38 | one: true, 39 | from: {ref: ['DUMMY']}, 40 | columns: [ 41 | {func: 'current_utctimestamp', args: [{val: 3}], as: 'P3'}, 42 | {func: 'current_utctimestamp', args: [{val: 7}], as: 'P7'}] 43 | }} 44 | 45 | const res = await cds.run(cqn) 46 | 47 | expect(res.P3.match(/\.(\d\d\d)0000/)).not.to.be.null 48 | expect(res.P7.match(/\.(\d\d\d\d\d\d\d)/)).not.to.be.null 49 | }) 50 | }) 51 | }) -------------------------------------------------------------------------------- /hana/test/param-views.cds: -------------------------------------------------------------------------------- 1 | using {sap.capire.bookshop.Books as Books} from '../../test/bookshop/db/schema.cds'; 2 | 3 | namespace sap.capire.bookshop; 4 | 5 | entity ParamBooks(available : Integer) as 6 | select from Books { 7 | ID, 8 | title, 9 | stock, 10 | // Take foreign key for author association 11 | author.ID as author_ID, 12 | // author, Compiler does not like associations in parameterized views 13 | } 14 | where 15 | stock <= :available; 16 | -------------------------------------------------------------------------------- /hana/test/param-views.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../test/cds') 2 | 3 | describe('Parameterized view', () => { 4 | const { expect } = cds.test(__dirname, 'param-views.cds') 5 | 6 | const tests = [ 7 | // ===== required queries ===== 8 | { 9 | available: { val: 0 }, 10 | books: 0, 11 | }, { 12 | // all books with <= 12 stock 13 | available: { val: 12 }, 14 | books: 2, 15 | }, { 16 | // all books (with <= 1000 stock) 17 | available: { val: 1000 }, 18 | books: 5, 19 | }, 20 | // ===== just works queries ===== 21 | { 22 | // cast is required as the SQL becomes (? * ?) 23 | // all books with <= 22 stock 24 | available: CXL`cast(11 * 2 as cds.Integer)`, 25 | books: 3, 26 | }, { 27 | // the book with the least stock 28 | available: SELECT`min(stock)`.from('sap.capire.bookshop.Books'), 29 | books: 1, 30 | } 31 | ] 32 | 33 | test.each(tests)('select', async ({ available, books }) => { 34 | const { ParamBooks, Books } = cds.entities('sap.capire.bookshop') 35 | 36 | // Apply author association to parameterized view 37 | ParamBooks.elements.author = Books.elements.author 38 | 39 | const root = { 40 | id: ParamBooks.name, 41 | args: { available } 42 | } 43 | 44 | const [booksRes, authorsRes, expandRes] = await Promise.all([ 45 | SELECT.from({ ref: [root] }), 46 | SELECT.from({ ref: [root, 'author'] }), 47 | SELECT`ID,stock,author{ID}`.from({ ref: [root] }), 48 | ]) 49 | 50 | expect(booksRes).to.have.property('length').to.be.eq(books) 51 | const authorKeys = expandRes.map(r => r.author.ID) 52 | expect(authorsRes.filter(r => authorKeys.includes(r.ID))).to.have.property('length').to.be.eq(authorsRes.length) 53 | }) 54 | }) -------------------------------------------------------------------------------- /hana/test/plain-sql.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../test/cds.js') 2 | const bookshop = cds.utils.path.resolve(__dirname, '../../test/bookshop') 3 | 4 | describe('HANA Plain SQL', () => { 5 | describe.each([{mode: 'quoted'}, {mode: 'plain'}])('$mode', ({mode}) => { 6 | cds.env.sql.names = mode 7 | const { expect } = cds.test(bookshop) 8 | 9 | test('Plain sql', async () => { 10 | const res = await cds.run('SELECT * FROM sap_capire_bookshop_Books') 11 | expect(res.length).to.be.eq(5) 12 | 13 | const [res1, res2] = await cds.run([ 14 | 'SELECT * FROM sap_capire_bookshop_Books', 15 | 'SELECT * FROM sap_capire_bookshop_Books', 16 | ]) 17 | expect(res1.length).to.be.eq(5) 18 | expect(res2.length).to.be.eq(5) 19 | }) 20 | 21 | test('Plain sql with values', async () => { 22 | const res = await cds.run('SELECT * FROM sap_capire_bookshop_Books where ID = ?', [201]) 23 | expect(res.length).to.be.eq(1) 24 | }) 25 | 26 | test('Plain sql with multiple values', async () => { 27 | const res = await cds.run('SELECT * FROM sap_capire_bookshop_Books where ID = ?', [[201], [252]]) 28 | expect(res.length).to.be.eq(2) 29 | }) 30 | }) 31 | }) 32 | -------------------------------------------------------------------------------- /hana/test/proc.cds: -------------------------------------------------------------------------------- 1 | namespace sap.capire; 2 | 3 | entity TestEntity { 4 | ID: Integer; 5 | title: String(32); 6 | } 7 | 8 | service bla { 9 | entity STestEntity as projection on TestEntity; 10 | } 11 | -------------------------------------------------------------------------------- /hana/test/service.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "impl": "@cap-js/hana", 3 | "credentials": { 4 | "user": process.env.HANA_USER || "SYSTEM", 5 | "password": process.env.HANA_PASSWORD || "Manager1", 6 | "host": process.env.HANA_HOST || "localhost", 7 | "port": process.env.HANA_PORT || "30041", 8 | "useTLS": true, 9 | "encrypt": true, 10 | "sslValidateCertificate": false, 11 | "disableCloudRedirect": true, 12 | "driver": "hdb" 13 | } 14 | } -------------------------------------------------------------------------------- /hana/test/spatial.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../test/cds.js') 2 | 3 | describe('Spatial Types', () => { 4 | const { data, expect } = cds.test(__dirname + '/../../test/compliance/resources') 5 | data.autoIsolation(true) 6 | data.autoReset() 7 | 8 | test('point', async () => { 9 | const { HANA_ST } = cds.entities('edge.hana.literals') 10 | const point = 'POINT(1 1)' 11 | await INSERT({ point: null }).into(HANA_ST) 12 | await UPDATE(HANA_ST).data({ point }) 13 | const result = await SELECT.one.from(HANA_ST) 14 | expect(result.point).to.contain('POINT') 15 | }) 16 | 17 | test('geometry', async () => { 18 | const { HANA_ST } = cds.entities('edge.hana.literals') 19 | const geometry = 'POINT(1 1)' 20 | await INSERT({ geometry: null }).into(HANA_ST) 21 | await UPDATE(HANA_ST).data({ geometry }) 22 | const result = await SELECT.one.from(HANA_ST) 23 | expect(result.geometry).to.contain('POINT') 24 | }) 25 | }) 26 | -------------------------------------------------------------------------------- /hana/test/stream.test.js: -------------------------------------------------------------------------------- 1 | require('../../test/cds.js') 2 | describe('hana', () => { 3 | // REVISIT: fix streaming SQL syntax errors 4 | require('../../sqlite/test/general/stream.test') 5 | }) 6 | -------------------------------------------------------------------------------- /hana/test/temporal.test.js: -------------------------------------------------------------------------------- 1 | require('../../sqlite/test/general/temporal.test') 2 | -------------------------------------------------------------------------------- /hana/test/versioning.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../test/cds') 2 | 3 | describe('Versioned table', () => { 4 | before(() => { 5 | // Include the versioning feature model extension 6 | cds.requires.toggles = true 7 | }) 8 | 9 | const { expect } = cds.test( 10 | __dirname + '/../../test/compliance/resources', 11 | // Additional model definition is required, because feature flags don't work correctly without mtx 12 | __dirname + '/../../test/compliance/resources/fts/versioning/hana.cds' 13 | ) 14 | 15 | test('validation', async () => { 16 | const { versioned } = cds.entities('edge.hana.versioning') 17 | const { history } = cds.entities('edge.hana.versioning.versioned') 18 | 19 | const sel = SELECT.one`*, history[order by validFrom asc] {*}`.from(versioned) 20 | 21 | const ID = cds.utils.uuid() 22 | await INSERT([{ ID, data: 'original' }]).into(versioned) 23 | const org = await sel.clone() 24 | 25 | await UPSERT([{ ID, data: 'upserted' }]).into(versioned) 26 | await UPDATE(versioned).data({ data: 'updated' }).where({ ID }) 27 | const upd = await sel.clone() 28 | 29 | await DELETE(versioned) 30 | const del = await sel.clone() 31 | const his = await SELECT.from(history).orderBy('validFrom') 32 | 33 | expect(org).property('data').eq('original') 34 | expect(upd).property('data').eq('updated') 35 | expect(del).falsy 36 | 37 | expect(org).property('history').length(0) 38 | expect(upd).property('history').length(2) 39 | expect(upd).property('history').property('0').property('data').eq('original') 40 | expect(upd).property('history').property('1').property('data').eq('upserted') 41 | 42 | expect(his).length(3) 43 | }) 44 | 45 | }) -------------------------------------------------------------------------------- /hana/tools/README.md: -------------------------------------------------------------------------------- 1 | # Tools 2 | 3 | This folder contains some tools that are used to maintain the `HANA` service `lib` folder with up-to-date data. 4 | 5 | ## collation 6 | 7 | Takes the collation dictionary and converts it to the `collations.json` file for easy consumption. In case the collation dictionary changes the `collation.js` file can be used to update the `collations.json` file. 8 | 9 | ## docker 10 | 11 | Contains scripts that allow for anyone to start a `HANA` instance in their local docker. These scripts will be automatically used when running `npm run setup` in the `cds-dbs/hana` folder. The scripts will automatically detect what is the latest available `HANA` version for the current system and will `pull` the image and run any additional configuration scripts and run the respective health check to verify the system was fully initialized. 12 | 13 | Initial setup will take significantly longer then subsequent setups. The longest time is mostly pulling the image. Once the image is on the system initial boot time will takes a few minutes. After that restarting the container takes under a minute. 14 | 15 | To keep Github actions as fast as possible a prepared image is pushed to the Github repository. This means that the download is done within Github infrastructure and the boot time will be equivalent to the restart time rather then the initial boot time. 16 | -------------------------------------------------------------------------------- /hana/tools/collation/collation.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | 3 | // source: Expression/Dictionary/csv/collations/collations.csv 4 | // Read collation.csv as collation dictionary 5 | const src = fs.readFileSync(__dirname + '/collations.csv') 6 | 7 | // Load collation.wasm binary 8 | const wasmModule = new global.WebAssembly.Module(fs.readFileSync(__dirname + '/collation.wasm')) 9 | // Prepare memory object for parsing 10 | const wasmMemory = new global.WebAssembly.Memory({ initial: Math.ceil(src.length / (1 << 16)) }) 11 | // Create wasm instance for execution 12 | const wasmInstance = new global.WebAssembly.Instance(wasmModule, { 13 | js: { 14 | mem: wasmMemory, 15 | }, 16 | }) 17 | const { extract } = wasmInstance.exports 18 | 19 | // Copy collation.csv data into wasm memory 20 | const buf = new Uint8Array(wasmMemory.buffer, 0, wasmMemory.buffer.byteLength) 21 | src.copy(buf, 0, 0, buf.length) 22 | 23 | // Parse collation.csv into JSON map 24 | const start = extract(src.length) 25 | const result = Buffer.from(wasmMemory.buffer.slice(start - 1, src.length)) 26 | // Add JSON wrapper 27 | result[0] = '{'.charCodeAt(0) 28 | result[result.length - 1] = '}'.charCodeAt(0) 29 | 30 | // Validate JSON result 31 | JSON.parse(result) 32 | 33 | // Write JSON to lib folder 34 | fs.writeFileSync(__dirname + '/../../lib/collations.json', result) 35 | -------------------------------------------------------------------------------- /hana/tools/collation/collation.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cap-js/cds-dbs/591e1aa7942b9b430d80e116de12bf6d3847a3ab/hana/tools/collation/collation.wasm -------------------------------------------------------------------------------- /hana/tools/docker/hce/hana.yml: -------------------------------------------------------------------------------- 1 | version: '3.1' 2 | 3 | services: 4 | hana: 5 | image: hana-master:current 6 | restart: always 7 | hostname: hcehost 8 | networks: 9 | - backend 10 | command: 11 | - --init 12 | - role=worker:services=indexserver,dpserver,diserver:database=H00:create 13 | - --system-password 14 | - text:Manager1 15 | - --database-password 16 | - text:Manager1 17 | ports: 18 | # Currently the only port being used is 30041 19 | - '30041:30041' 20 | # - '30013:30013' 21 | # - '30017:30017' 22 | # - '30040:30040' 23 | # - '30042:30042' 24 | # - '30043:30043' 25 | 26 | jaeger: 27 | networks: 28 | backend: 29 | # This is the host name used in Prometheus scrape configuration. 30 | aliases: [ spm_metrics_source ] 31 | image: jaegertracing/jaeger:${JAEGER_VERSION:-latest} 32 | volumes: 33 | - "./jaeger.yaml:/etc/jaeger/config.yml" 34 | command: ["--config", "/etc/jaeger/config.yml"] 35 | ports: 36 | - "16686:16686" 37 | - "8888:8888" 38 | - "8889:8889" 39 | - "4317:4317" 40 | - "4318:4318" 41 | 42 | prometheus: 43 | networks: 44 | - backend 45 | image: prom/prometheus:v3.1.0 46 | volumes: 47 | - "./prometheus.yml:/etc/prometheus/prometheus.yml" 48 | ports: 49 | - "9090:9090" 50 | 51 | networks: 52 | backend: 53 | -------------------------------------------------------------------------------- /hana/tools/docker/hce/jaeger.yaml: -------------------------------------------------------------------------------- 1 | service: 2 | extensions: [jaeger_storage, jaeger_query] 3 | pipelines: 4 | traces: 5 | receivers: [otlp] 6 | processors: [batch] 7 | exporters: [jaeger_storage_exporter, spanmetrics] 8 | metrics/spanmetrics: 9 | receivers: [spanmetrics] 10 | exporters: [prometheus] 11 | telemetry: 12 | resource: 13 | service.name: jaeger 14 | metrics: 15 | level: detailed 16 | address: 0.0.0.0:8888 17 | logs: 18 | level: DEBUG 19 | 20 | extensions: 21 | jaeger_query: 22 | max_clock_skew_adjust: 30s 23 | storage: 24 | traces: some_storage 25 | metrics: some_metrics_storage 26 | jaeger_storage: 27 | backends: 28 | some_storage: 29 | memory: 30 | max_traces: 100000 31 | metric_backends: 32 | some_metrics_storage: 33 | prometheus: 34 | endpoint: http://prometheus:9090 35 | normalize_calls: true 36 | normalize_duration: true 37 | 38 | connectors: 39 | spanmetrics: 40 | 41 | receivers: 42 | otlp: 43 | protocols: 44 | grpc: 45 | http: 46 | endpoint: "0.0.0.0:4318" 47 | 48 | processors: 49 | batch: 50 | 51 | exporters: 52 | jaeger_storage_exporter: 53 | trace_storage: some_storage 54 | prometheus: 55 | endpoint: "0.0.0.0:8889" 56 | -------------------------------------------------------------------------------- /hana/tools/docker/hce/latest.js: -------------------------------------------------------------------------------- 1 | const dns = require('dns') 2 | const https = require('https') 3 | 4 | const host = 'repositories.cloud.sap' 5 | 6 | const hasAccess = () => { 7 | dns.lookup(host, { all: true }, (err, res) => { 8 | if (err || res.length < 4) return process.exit(1) 9 | fetchLatest() 10 | }) 11 | } 12 | 13 | const fetchLatest = () => { 14 | const req = https.request({ 15 | hostname: `public.int.${host}`, 16 | port: '443', 17 | path: '/ui/api/v1/mds/versions', 18 | method: 'POST', 19 | headers: { 20 | 'Content-Type': 'application/json', 21 | 'X-Requested-With': 'XMLHttpRequest', 22 | }, 23 | }) 24 | 25 | req.on('response', async res => { 26 | let response = '' 27 | for await (const chunk of res) { 28 | response += chunk 29 | } 30 | console.log(JSON.parse(response).data.versions.edges[0].node.name) // eslint-disable-line no-console 31 | process.exit(0) 32 | }) 33 | req.on('error', error => { 34 | console.error(error) // eslint-disable-line no-console 35 | process.exit(1) 36 | }) 37 | 38 | req.end( 39 | '{"graphQL":{"query":"query ($filter: VersionFilter\u0021, $first: Int, $orderBy: VersionOrder) { versions (filter: $filter, first: $first, orderBy: $orderBy) { edges { node { name } } } }","variables":{"filter":{"packageId":"gav://com.sap.hana.cloud.hana:hana-master","name":"*","ignorePreRelease":true},"first":1,"orderBy":{"field":"NAME_SEMVER","direction":"DESC"}}}}', 40 | ) 41 | } 42 | 43 | hasAccess() 44 | -------------------------------------------------------------------------------- /hana/tools/docker/hce/prometheus.yml: -------------------------------------------------------------------------------- 1 | global: 2 | scrape_interval: 15s # Set the scrape interval to every 15 seconds. Default is every 1 minute. 3 | evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute. 4 | # scrape_timeout is set to the global default (10s). 5 | 6 | scrape_configs: 7 | - job_name: aggregated-trace-metrics 8 | static_configs: 9 | - targets: ['spm_metrics_source:8889'] 10 | -------------------------------------------------------------------------------- /hana/tools/docker/hce/ready.sh: -------------------------------------------------------------------------------- 1 | until docker cp ./otel.sh hce-hana-1:/otel.sh 2 | do 3 | sleep 1 4 | done 5 | 6 | docker exec hce-hana-1 /bin/bash -c "while ! ./check_hana_health ; do sleep 10 ; done;/otel.sh &" 7 | docker exec -it hce-hana-1 /bin/bash -c "\ 8 | cd /usr/sap/H00/HDB00;\ 9 | . ./hdbenv.sh;\ 10 | hdbuserstore -i SET SYSDBKEY localhost:30013@SYSTEMDB SYSTEM Manager1;\ 11 | hdbsql -U \"SYSDBKEY\" -e -ssltrustcert \"SELECT COUNT(ACTIVE_STATUS) FROM SYS_DATABASES.M_SERVICES WHERE ACTIVE_STATUS='YES'\";\ 12 | hdbsql -U \"SYSDBKEY\" -e -ssltrustcert \"ALTER SYSTEM ALTER CONFIGURATION ('indexserver.ini', 'DATABASE', 'H00') SET ('session', 'enable_proxy_protocol') = 'false' WITH RECONFIGURE;\";\ 13 | hdbsql -U \"SYSDBKEY\" -e -ssltrustcert \"ALTER SYSTEM ALTER CONFIGURATION ('global.ini', 'System') SET ('public_hostname_resolution', 'use_default_route') = 'name' WITH RECONFIGURE;\";\ 14 | hdbsql -U \"SYSDBKEY\" -e -ssltrustcert \"ALTER SYSTEM ALTER CONFIGURATION ('global.ini', 'System') SET ('expensive_statement', 'enable') = 'TRUE' WITH RECONFIGURE;\";\ 15 | hdbsql -U \"SYSDBKEY\" -e -ssltrustcert \"ALTER SYSTEM ALTER CONFIGURATION ('global.ini', 'System') SET ('expensive_statement', 'threshold_duration') = '0' WITH RECONFIGURE;\";\ 16 | hdbsql -U \"SYSDBKEY\" -e -ssltrustcert \"ALTER SYSTEM ALTER CONFIGURATION ('global.ini', 'System') SET ('expensive_statement', 'trace_parameter_values') = 'FALSE' WITH RECONFIGURE;\";\ 17 | hdbsql -U \"SYSDBKEY\" -e -ssltrustcert \"ALTER SYSTEM ALTER CONFIGURATION ('global.ini', 'System') SET ('expensive_statement', 'use_in_memory_tracing') = 'FALSE' WITH RECONFIGURE;\";\ 18 | " 19 | -------------------------------------------------------------------------------- /hana/tools/docker/hce/start.sh: -------------------------------------------------------------------------------- 1 | exists=$(docker images hana-master:current -q); 2 | if [ $exists ]; then 3 | docker compose -f hana.yml up -d; 4 | ./ready.sh; 5 | else 6 | ./update.sh; 7 | if [ $? -ne 0 ]; then 8 | echo "hana-master:current image not found"; 9 | exit 1; 10 | fi 11 | ./start.sh; 12 | fi 13 | -------------------------------------------------------------------------------- /hana/tools/docker/hce/update.sh: -------------------------------------------------------------------------------- 1 | HOST=repositories.cloud.sap 2 | VERSION=$(node ./latest.js) 3 | if [ -z "$VERSION" ]; then 4 | echo "No version found" 5 | exit 1 6 | fi 7 | 8 | IMAGE=public.int.$HOST/com.sap.hana.cloud.hana/hana-master:$VERSION 9 | 10 | echo $VERSION 11 | 12 | if [ $(docker images $IMAGE -q) ]; then 13 | echo 'latest image is up-to-date'; 14 | else 15 | docker pull $IMAGE; 16 | echo 'latest image has been updated' 17 | fi 18 | 19 | docker tag $IMAGE hana-master:current 20 | -------------------------------------------------------------------------------- /hana/tools/docker/hxe/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM saplabs/hanaexpress:latest 2 | 3 | COPY ./start-hdi.sql /usr/sap/HXE/start-hdi.sql 4 | COPY ./setup.sh /setup 5 | 6 | # Do initial boot 7 | RUN /setup 8 | -------------------------------------------------------------------------------- /hana/tools/docker/hxe/ci.yml: -------------------------------------------------------------------------------- 1 | # Use postgres/example user/password credentials 2 | version: '3.1' 3 | 4 | services: 5 | hana: 6 | image: ${IMAGE_ID}:${TAG} 7 | restart: always 8 | hostname: buildkitsandbox 9 | ulimits: 10 | nofile: 11 | soft: 1048576 12 | hard: 1048576 13 | #sysctls: 14 | # - kernel.shmmax=1073741824 15 | # - net.ipv4.ip_local_port_range='60000 65535' 16 | # - kernel.shmmni=4096 17 | # - kernel.shmall=8388608 18 | ports: 19 | # Currently the only port being used is 39041 20 | - '30041:39041' 21 | # - '30013:39013' 22 | # - '30015:39015' 23 | # - '30041-30045:39041-39045' 24 | # - '1128-1129:1128-1129' 25 | # - '50013-50014:59013-59014' 26 | # - '30030-30033:39030-39033' 27 | # - '51000-51060:51000-51060' 28 | # - '53075:53075' 29 | -------------------------------------------------------------------------------- /hana/tools/docker/hxe/hana.yml: -------------------------------------------------------------------------------- 1 | # Use postgres/example user/password credentials 2 | version: '3.1' 3 | 4 | services: 5 | hana: 6 | image: saplabs/hanaexpress:${VERSION} 7 | restart: always 8 | hostname: hxehost 9 | command: 10 | - --agree-to-sap-license 11 | - --dont-check-system 12 | - --dont-check-mount-points 13 | - --master-password 14 | - Manager1 15 | ulimits: 16 | nofile: 17 | soft: 1048576 18 | hard: 1048576 19 | #sysctls: 20 | # - kernel.shmmax=1073741824 21 | # - net.ipv4.ip_local_port_range='60000 65535' 22 | # - kernel.shmmni=4096 23 | # - kernel.shmall=8388608 24 | ports: 25 | # Currently the only port being used is 39041 26 | - '30041:39041' 27 | # - '30013:39013' 28 | # - '30015:39015' 29 | # - '30041-30045:39041-39045' 30 | # - '1128-1129:1128-1129' 31 | # - '50013-50014:59013-59014' 32 | # - '30030-30033:39030-39033' 33 | # - '51000-51060:51000-51060' 34 | # - '53075:53075' 35 | -------------------------------------------------------------------------------- /hana/tools/docker/hxe/latest.js: -------------------------------------------------------------------------------- 1 | const https = require('https') 2 | 3 | const host = 'docker.com' 4 | const fetchLatest = () => { 5 | const req = https.request({ 6 | hostname: `hub.${host}`, 7 | port: '443', 8 | path: '/v2/repositories/saplabs/hanaexpress/tags/?page_size=1&page=1&name&ordering', 9 | }) 10 | 11 | req.on('response', async res => { 12 | let response = '' 13 | for await (const chunk of res) { 14 | response += chunk 15 | } 16 | console.log(JSON.parse(response).results[0].name) // eslint-disable-line no-console 17 | process.exit(0) 18 | }) 19 | req.on('error', error => { 20 | console.error(error) // eslint-disable-line no-console 21 | process.exit(1) 22 | }) 23 | 24 | req.end() 25 | } 26 | 27 | fetchLatest() 28 | -------------------------------------------------------------------------------- /hana/tools/docker/hxe/ready.sh: -------------------------------------------------------------------------------- 1 | until docker cp ./start-hdi.sql hxe-hana-1:/usr/sap/HXE/start-hdi.sql 2 | do 3 | sleep 1 4 | done 5 | 6 | docker exec hxe-hana-1 bash -c "until /check_hana_health -n -e ready-status > /dev/null; do sleep 1; done;" 7 | echo "HANA has started" 8 | docker exec hxe-hana-1 bash -c "/usr/sap/HXE/HDB90/exe/hdbsql -i 90 -d SYSTEMDB -u SYSTEM -p Manager1 -I /usr/sap/HXE/start-hdi.sql > /dev/null && sleep 10" 9 | echo "HDI has been enabled" 10 | -------------------------------------------------------------------------------- /hana/tools/docker/hxe/setup.sh: -------------------------------------------------------------------------------- 1 | /run_hana --agree-to-sap-license --dont-check-system --dont-check-mount-points --master-password Manager1 & 2 | until /check_hana_health -n -e ready-status > /dev/null; do sleep 1; done; 3 | /usr/sap/HXE/HDB90/exe/hdbsql -i 90 -d SYSTEMDB -u SYSTEM -p Manager1 -I /usr/sap/HXE/start-hdi.sql 4 | 5 | kill -TERM -- -0 6 | wait 7 | -------------------------------------------------------------------------------- /hana/tools/docker/hxe/start-hdi.sql: -------------------------------------------------------------------------------- 1 | -- Ensures that the HDI is enabled on the system 2 | DO 3 | BEGIN 4 | DECLARE dbName NVARCHAR(25) = 'HXE'; 5 | DECLARE diserverCount INT = 0; 6 | SELECT COUNT(*) INTO diserverCount FROM SYS_DATABASES.M_SERVICES WHERE SERVICE_NAME = 'diserver' AND DATABASE_NAME = :dbName AND ACTIVE_STATUS = 'YES'; 7 | IF diserverCount = 0 THEN 8 | EXEC 'ALTER DATABASE ' || :dbName || ' ADD ''diserver'''; 9 | END IF; 10 | END; 11 | 12 | -- Grants HDI privileges to SYSTEM 13 | CREATE LOCAL TEMPORARY TABLE #PRIVILEGES LIKE _SYS_DI.TT_API_PRIVILEGES; 14 | INSERT INTO #PRIVILEGES (PRINCIPAL_NAME, PRIVILEGE_NAME, OBJECT_NAME) SELECT 'SYSTEM', PRIVILEGE_NAME, OBJECT_NAME FROM _SYS_DI.T_DEFAULT_DI_ADMIN_PRIVILEGES; 15 | CALL _SYS_DI.GRANT_CONTAINER_GROUP_API_PRIVILEGES('_SYS_DI', #PRIVILEGES, _SYS_DI.T_NO_PARAMETERS, ?, ?, ?); 16 | DROP TABLE #PRIVILEGES; 17 | 18 | -- Forces all statistics tables to use NSE 19 | CALL _SYS_STATISTICS.SHARED_ALTER_PAGE_LOADABLE; 20 | 21 | -- Selects all tables that are loaded and unloads them from memory 22 | DO 23 | BEGIN 24 | DECLARE v_isbn VARCHAR(20) = ''; 25 | DECLARE CURSOR c_cursor1 (v_isbn VARCHAR(20)) FOR 26 | SELECT schema_name,table_name FROM m_cs_tables WHERE loaded != 'NO'; 27 | 28 | FOR cur_row AS c_cursor1(v_isbn) DO 29 | EXEC 'UNLOAD ' || :cur_row.schema_name || '.' || :cur_row.table_name || ' DELETE PERSISTENT MEMORY'; 30 | END FOR; 31 | END; 32 | 33 | -- Configure maximum memory allocation to 8192MiB as this does not translate to physical memory 34 | ALTER SYSTEM ALTER CONFIGURATION ('global.ini', 'system') SET ('memorymanager', 'global_allocation_limit') = '10240' WITH RECONFIGURE; 35 | -------------------------------------------------------------------------------- /hana/tools/docker/hxe/start.sh: -------------------------------------------------------------------------------- 1 | if [ $IMAGE_ID ] && [ $TAG ]; then 2 | echo "Using prepared HXE image" 3 | docker compose -f ci.yml up -d; 4 | else 5 | export VERSION=$(node ./latest.js); 6 | docker compose -f hana.yml up -d; 7 | fi 8 | ./ready.sh; 9 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@cap-js/db-services", 3 | "version": "1.3.1", 4 | "description": "This is a monorepo for our SQL Database Services.", 5 | "author": "SAP SE (https://www.sap.com)", 6 | "workspaces": [ 7 | "db-service", 8 | "sqlite", 9 | "postgres", 10 | "hana" 11 | ], 12 | "devDependencies": { 13 | "@cap-js/cds-test": ">=0.2.0", 14 | "axios": "^1" 15 | }, 16 | "scripts": { 17 | "test": "npm t -w db-service -w sqlite", 18 | "lint": "npx eslint ." 19 | }, 20 | "license": "Apache-2.0" 21 | } 22 | -------------------------------------------------------------------------------- /postgres/cds-plugin.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | const { fs, path } = cds.utils 3 | 4 | if (!cds.env.fiori.lean_draft) { 5 | throw new Error('"@cap-js/postgres" only works if cds.fiori.lean_draft is enabled. Please adapt your configuration.') 6 | } 7 | 8 | // requires @sap/cds-dk version >= 7.5.0 9 | cds.build?.register?.('postgres', class PostgresBuildPlugin extends cds.build.Plugin { 10 | 11 | static taskDefaults = { src: cds.env.folders.db } 12 | 13 | static hasTask() { return cds.requires.db?.kind === 'postgres' } 14 | 15 | init() { 16 | // different from the default build output structure 17 | this.task.dest = path.join(cds.root, cds.env.build.target !== '.' ? cds.env.build.target : 'gen', 'pg') 18 | } 19 | 20 | async build() { 21 | const model = await this.model() 22 | if (!model) return 23 | 24 | const promises = [] 25 | if (fs.existsSync(path.join(this.task.src, 'package.json'))) { 26 | promises.push(this.copy(path.join(this.task.src, 'package.json')).to('package.json')) 27 | } else { 28 | const packageJson = { 29 | dependencies: { 30 | '@sap/cds': '^8', 31 | '@cap-js/postgres': '^1' 32 | }, 33 | scripts: { 34 | start: 'cds-deploy' 35 | } 36 | } 37 | const assertIntegrity = cds.env?.features?.assert_integrity 38 | if (assertIntegrity) { 39 | packageJson.cds ??= {} 40 | packageJson.cds.features ??= {} 41 | packageJson.cds.features.assert_integrity = assertIntegrity 42 | } 43 | promises.push( 44 | this.write(packageJson).to('package.json') 45 | ) 46 | } 47 | promises.push(this.write(cds.compile.to.json(model)).to(path.join('db', 'csn.json'))) 48 | 49 | let data 50 | if (fs.existsSync(path.join(this.task.src, 'data'))) { 51 | data = 'data' 52 | } else if (fs.existsSync(path.join(this.task.src, 'csv'))) { 53 | data = 'csv' 54 | } 55 | if (data) { 56 | promises.push(this.copy(data).to(path.join('db', 'data'))) 57 | } 58 | return Promise.all(promises) 59 | } 60 | }) 61 | -------------------------------------------------------------------------------- /postgres/index.js: -------------------------------------------------------------------------------- 1 | module.exports = require('./lib/PostgresService.js') 2 | -------------------------------------------------------------------------------- /postgres/lib/ReservedWords.json: -------------------------------------------------------------------------------- 1 | { 2 | "ALL": 1, 3 | "ANALYSE": 1, 4 | "ANALYZE": 1, 5 | "AND": 1, 6 | "ANY": 1, 7 | "ARRAY": 1, 8 | "AS": 1, 9 | "ASC": 1, 10 | "ASYMMETRIC": 1, 11 | "AUTHORIZATION": 1, 12 | "BINARY": 1, 13 | "BOTH": 1, 14 | "CASE": 1, 15 | "CAST": 1, 16 | "CHECK": 1, 17 | "COLLATE": 1, 18 | "COLLATION": 1, 19 | "COLUMN": 1, 20 | "CONCURRENTLY": 1, 21 | "CONSTRAINT": 1, 22 | "CREATE": 1, 23 | "CROSS": 1, 24 | "CURRENT_CATALOG": 1, 25 | "CURRENT_DATE": 1, 26 | "CURRENT_ROLE": 1, 27 | "CURRENT_SCHEMA": 1, 28 | "CURRENT_TIME": 1, 29 | "CURRENT_TIMESTAMP": 1, 30 | "CURRENT_USER": 1, 31 | "DEFAULT": 1, 32 | "DEFERRABLE": 1, 33 | "DESC": 1, 34 | "DISTINCT": 1, 35 | "DO": 1, 36 | "ELSE": 1, 37 | "END": 1, 38 | "EXCEPT": 1, 39 | "FALSE": 1, 40 | "FETCH": 1, 41 | "FOR": 1, 42 | "FOREIGN": 1, 43 | "FREEZE": 1, 44 | "FROM": 1, 45 | "FULL": 1, 46 | "GRANT": 1, 47 | "GROUP": 1, 48 | "HAVING": 1, 49 | "ILIKE": 1, 50 | "IN": 1, 51 | "INITIALLY": 1, 52 | "INNER": 1, 53 | "INTERSECT": 1, 54 | "INTO": 1, 55 | "IS": 1, 56 | "ISNULL": 1, 57 | "JOIN": 1, 58 | "LATERAL": 1, 59 | "LEADING": 1, 60 | "LEFT": 1, 61 | "LIKE": 1, 62 | "LIMIT": 1, 63 | "LOCALTIME": 1, 64 | "LOCALTIMESTAMP": 1, 65 | "NATURAL": 1, 66 | "NOT": 1, 67 | "NOTNULL": 1, 68 | "NULL": 1, 69 | "OFFSET": 1, 70 | "ON": 1, 71 | "ONLY": 1, 72 | "OR": 1, 73 | "ORDER": 1, 74 | "OUTER": 1, 75 | "OVERLAPS": 1, 76 | "PLACING": 1, 77 | "PRIMARY": 1, 78 | "REFERENCES": 1, 79 | "RETURNING": 1, 80 | "RIGHT": 1, 81 | "SELECT": 1, 82 | "SESSION_USER": 1, 83 | "SIMILAR": 1, 84 | "SOME": 1, 85 | "SYMMETRIC": 1, 86 | "TABLE": 1, 87 | "TABLESAMPLE": 1, 88 | "THEN": 1, 89 | "TO": 1, 90 | "TRAILING": 1, 91 | "TRUE": 1, 92 | "UNION": 1, 93 | "UNIQUE": 1, 94 | "USER": 1, 95 | "USING": 1, 96 | "VARIADIC": 1, 97 | "VERBOSE": 1, 98 | "WHEN": 1, 99 | "WHERE": 1, 100 | "WINDOW": 1, 101 | "WITH": 1 102 | } 103 | -------------------------------------------------------------------------------- /postgres/lib/session.json: -------------------------------------------------------------------------------- 1 | { 2 | "$user.id": "cap.applicationuser", 3 | "$user.locale": "cap.locale", 4 | "$now": "cap.now", 5 | "$valid.from": "cap.valid_from", 6 | "$valid.to": "cap.valid_to" 7 | } 8 | -------------------------------------------------------------------------------- /postgres/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@cap-js/postgres", 3 | "version": "2.0.2", 4 | "description": "CDS database service for Postgres", 5 | "homepage": "https://github.com/cap-js/cds-dbs/tree/main/postgres#cds-database-service-for-postgres", 6 | "repository": { 7 | "type": "git", 8 | "url": "git+https://github.com/cap-js/cds-dbs.git" 9 | }, 10 | "bugs": { 11 | "url": "https://github.com/cap-js/cds-dbs/issues" 12 | }, 13 | "keywords": [ 14 | "CAP", 15 | "CDS", 16 | "Postgres" 17 | ], 18 | "author": "SAP SE (https://www.sap.com)", 19 | "main": "index.js", 20 | "files": [ 21 | "cds-plugin.js", 22 | "lib", 23 | "CHANGELOG.md" 24 | ], 25 | "scripts": { 26 | "test": "npm start && cds-test", 27 | "start": "docker compose -f pg-stack.yml up -d" 28 | }, 29 | "dependencies": { 30 | "@cap-js/db-service": "^2", 31 | "pg": "^8" 32 | }, 33 | "peerDependencies": { 34 | "@sap/cds": ">=9", 35 | "@sap/cds-dk": ">=9" 36 | }, 37 | "peerDependenciesMeta": { 38 | "@sap/cds-dk": { 39 | "optional": true 40 | } 41 | }, 42 | "cds": { 43 | "requires": { 44 | "kinds": { 45 | "sql": { 46 | "[production]": { 47 | "kind": "postgres" 48 | }, 49 | "[pg!]": { 50 | "kind": "postgres", 51 | "credentials": { 52 | "host": "localhost", 53 | "port": 5432, 54 | "user": "postgres", 55 | "password": "postgres", 56 | "database": "postgres" 57 | } 58 | } 59 | }, 60 | "postgres": { 61 | "impl": "@cap-js/postgres", 62 | "kind": "postgres", 63 | "dialect": "postgres", 64 | "vcap": { 65 | "label": "postgresql-db" 66 | }, 67 | "schema_evolution": "auto" 68 | } 69 | }, 70 | "db": "sql" 71 | }, 72 | "schema": { 73 | "buildTaskType": { 74 | "name": "postgres", 75 | "description": "Postgres database build plugin" 76 | } 77 | } 78 | }, 79 | "license": "Apache-2.0" 80 | } 81 | -------------------------------------------------------------------------------- /postgres/pg-stack.yml: -------------------------------------------------------------------------------- 1 | # Use postgres/example user/password credentials 2 | version: '3.1' 3 | 4 | services: 5 | db: 6 | image: postgres:16-alpine 7 | restart: always 8 | environment: 9 | POSTGRES_PASSWORD: postgres 10 | ports: 11 | - '5432:5432' 12 | command: ['postgres', '-c', 'log_statement=all'] 13 | ### use at will at dev time - save mem on ci time 14 | # adminer: 15 | # image: adminer 16 | # restart: always 17 | # ports: 18 | # - 8080:8080 19 | -------------------------------------------------------------------------------- /postgres/test/beershop/db/_i18n/i18n.properties: -------------------------------------------------------------------------------- 1 | TypeCheck=Type Check 2 | TypeChecks=Type Checks 3 | Details=Details -------------------------------------------------------------------------------- /postgres/test/beershop/db/data/csw-Beers.csv: -------------------------------------------------------------------------------- 1 | ID,name,abv,ibu,brewery_ID 2 | b8c3fc14-22e2-4f42-837a-e6134775a186,Lagerbier Hell,5.2,12,9c937100-d459-491f-a72d-81b2929af10f 3 | 9e1704e3-6fd0-4a5d-bfb1-13ac47f7976b,Schönramer Hell,5,20,fa6b959e-3a01-40ef-872e-6030ee4de4e5 4 | 9473beeb-1a74-4589-82b8-4fdcce1d66d5,Vollbier,4.9,0,6832ba00-c20e-48b4-a685-dca4aff2ca13 5 | 31c9bc4c-7ba3-4feb-b94c-96e396d848e0,Festbier,5.5,0,6832ba00-c20e-48b4-a685-dca4aff2ca13 6 | f5578f62-9ae1-4829-8cf1-b8f13e4c5dc3,Grünerla,4.9,11,d265037f-4996-4195-a1fb-b0a8ab5c6f30 7 | 8a67be38-be68-401f-a2ee-0accd920f7c2,Hallerndorfer Landbier Hell,4.9,0,4aeebbed-90c2-4bdd-aa70-d8eecb8eaebb 8 | fe9479d4-6f60-47e5-b565-a215e6292935,Hallerndorfer Hausbrauerbier,5,0,4aeebbed-90c2-4bdd-aa70-d8eecb8eaebb 9 | 1efd2b35-6fd4-4ac5-a73e-64dfc3fb123b,Bitter 42,5.5,42,4aeebbed-90c2-4bdd-aa70-d8eecb8eaebb 10 | ba398ee9-9bfe-45b0-b586-9445af402a1a,Summer 69,5.9,12,4aeebbed-90c2-4bdd-aa70-d8eecb8eaebb 11 | 08d5b3fb-549c-4be7-9ac8-e039d75823e7,Dunkles Lagerbier,0,0,0465e9ca-6255-4f5c-b8ba-7439531f8d28 12 | 3b9af296-f7d7-4436-bf49-f09f65ba3970,Leichtes Dunkel,0,0,0465e9ca-6255-4f5c-b8ba-7439531f8d28 -------------------------------------------------------------------------------- /postgres/test/beershop/db/data/csw-Brewery.csv: -------------------------------------------------------------------------------- 1 | ID,name 2 | 9c937100-d459-491f-a72d-81b2929af10f,Augustiner-Bräu Wagner KG 3 | fa6b959e-3a01-40ef-872e-6030ee4de4e5,Private Landbrauerei Schönram GmbH & Co. KG 4 | 6832ba00-c20e-48b4-a685-dca4aff2ca13,Brauerei Meister 5 | d265037f-4996-4195-a1fb-b0a8ab5c6f30,Grüner Bier 6 | 4aeebbed-90c2-4bdd-aa70-d8eecb8eaebb,Rittmayer Hallerndorf 7 | 0465e9ca-6255-4f5c-b8ba-7439531f8d28,Kathi-Bräu Heckenhof -------------------------------------------------------------------------------- /postgres/test/beershop/db/data/csw-TypeChecks.csv: -------------------------------------------------------------------------------- 1 | ID,type_String,type_LargeString 2 | 5e4ca9ef-7c4c-4b22-8e85-7cadefa02c94,Guía del autoestopista galáctico,"At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, At accusam aliquyam diam diam dolore dolores duo eirmod eos erat, et nonumy sed tempor et et invidunt justo labore Stet clita ea et gubergren, kasd magna no rebum. sanctus sea sed takimata ut vero voluptua. est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat." -------------------------------------------------------------------------------- /postgres/test/beershop/db/schema.cds: -------------------------------------------------------------------------------- 1 | namespace csw; 2 | 3 | using { 4 | cuid, 5 | managed 6 | } from '@sap/cds/common'; 7 | 8 | entity Beers : cuid, managed { 9 | name : String(100); 10 | abv : Decimal(3, 1); 11 | ibu : Integer; 12 | brewery : Association to one Brewery; 13 | virtual rating : Integer 14 | } 15 | 16 | entity Brewery : cuid, managed { 17 | name : String(150); 18 | beers : Composition of many Beers 19 | on beers.brewery = $self; 20 | } 21 | 22 | entity TypeChecks : cuid { 23 | type_Boolean : Boolean; 24 | type_Int32 : Integer; 25 | type_Int64 : Integer64; 26 | type_Decimal : Decimal(2, 1); 27 | type_Double : Double; 28 | type_Date : Date; 29 | type_Time : Time; 30 | type_DateTime : DateTime; 31 | type_Timestamp : Timestamp; 32 | type_String : String; 33 | type_Binary : Binary(100); 34 | type_LargeBinary : LargeBinary; 35 | type_LargeString : LargeString; 36 | virtual type_virtual : Integer; 37 | } 38 | -------------------------------------------------------------------------------- /postgres/test/beershop/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "beershop", 3 | "version": "0.0.1", 4 | "description": "the beershop. what are you going to drink today?", 5 | "dependencies": { 6 | "cds-pg": "*" 7 | }, 8 | "scripts": { 9 | "// note!": "you need credentials for the pg db in your cds env!", 10 | "start": "cds run", 11 | "watch": "cds watch" 12 | }, 13 | "cds": { 14 | "requires": { 15 | "db": { 16 | "kind": "pg" 17 | }, 18 | "features": { 19 | "lean_draft": true 20 | } 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /postgres/test/beershop/srv/beershop-admin-service.cds: -------------------------------------------------------------------------------- 1 | using {csw} from '../db/schema'; 2 | 3 | @(requires : 'authenticated-user', path: '/beershop-admin') 4 | service BeershopAdminService { 5 | @restrict : [{ 6 | grant : [ 7 | 'READ', 8 | 'WRITE', 9 | 'DELETE' 10 | ], 11 | where : 'createdBy = $user' 12 | }] 13 | entity Beers as projection on csw.Beers; 14 | 15 | entity Breweries as projection on csw.Brewery; 16 | 17 | @readonly 18 | entity UserScopes { 19 | key username : String; 20 | is_admin : Boolean; 21 | }; 22 | } 23 | -------------------------------------------------------------------------------- /postgres/test/beershop/srv/beershop-admin-service.js: -------------------------------------------------------------------------------- 1 | module.exports = async function (srv) { 2 | srv.on('READ', 'UserScopes', async req => { 3 | const users = [ 4 | { 5 | username: req.user.id, 6 | is_admin: req.user.is('admin'), 7 | }, 8 | ] 9 | return users 10 | }) 11 | } 12 | -------------------------------------------------------------------------------- /postgres/test/beershop/srv/beershop-service.cds: -------------------------------------------------------------------------------- 1 | using {csw} from '../db/schema'; 2 | 3 | @path: '/beershop' 4 | service BeershopService { 5 | 6 | entity Beers as projection on csw.Beers; 7 | entity Breweries as projection on csw.Brewery; 8 | entity TypeChecks as projection on csw.TypeChecks; 9 | 10 | @odata.draft.enabled 11 | entity TypeChecksWithDraft as projection on csw.TypeChecks; 12 | } 13 | 14 | extend service BeershopService with { 15 | action reset(); 16 | action createBeer(); 17 | } 18 | -------------------------------------------------------------------------------- /postgres/test/beershop/srv/beershop-service.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../../../test/cds.js') 2 | 3 | module.exports = srv => { 4 | srv.on('reset', async () => { 5 | let db 6 | try { 7 | db = await cds.connect.to('db') 8 | } catch { 9 | db = cds.db 10 | } 11 | await cds.deploy('./srv/', {}).to(db) 12 | }) 13 | srv.on('createBeer', async () => { 14 | const { Beers } = cds.entities('csw') 15 | const entries = [{ name: 'Beer1', abv: 1.0, ibu: 1, brewery_ID: '0465e9ca-6255-4f5c-b8ba-7439531f8d28' }] 16 | const insertResult = await cds.run(INSERT.into(Beers).entries(entries)) 17 | // eslint-disable-next-line no-console 18 | console.log(insertResult) 19 | }) 20 | srv.before('READ', '*', async req => { 21 | if (req.headers.schema) { 22 | req.user.schema = req.headers.schema 23 | } 24 | }) 25 | } 26 | -------------------------------------------------------------------------------- /postgres/test/beershop/srv/ui-annotations.cds: -------------------------------------------------------------------------------- 1 | using {BeershopService} from '../srv/beershop-service'; 2 | 3 | annotate BeershopService.TypeChecksWithDraft with @( 4 | Common.SemanticKey : [ID], 5 | Identification : [{Value : code}], 6 | UI : { 7 | SelectionFields : [ 8 | type_String, 9 | type_Date 10 | ], 11 | LineItem : [ 12 | {Value : type_String}, 13 | {Value : type_Date}, 14 | ], 15 | HeaderInfo : { 16 | TypeName : '{i18n>TypeCheck}', 17 | TypeNamePlural : '{i18n>TypeChecks}', 18 | Title : {Value : type_String}, 19 | Description : {Value : type_Date} 20 | }, 21 | Facets : [{ 22 | $Type : 'UI.ReferenceFacet', 23 | Label : '{i18n>Details}', 24 | Target : '@UI.FieldGroup#Details' 25 | }, ], 26 | FieldGroup #Details : {Data : [ 27 | {Value : type_Boolean}, 28 | {Value : type_Int32}, 29 | {Value : type_Int64}, 30 | {Value : type_Decimal}, 31 | {Value : type_Double}, 32 | {Value : type_Date}, 33 | {Value : type_Time}, 34 | {Value : type_DateTime}, 35 | {Value : type_Timestamp}, 36 | {Value : type_String}, 37 | {Value : type_LargeString}, 38 | ]}, 39 | } 40 | ); 41 | -------------------------------------------------------------------------------- /postgres/test/cds-build.test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | // make sure the build plugin works 4 | 5 | const path = require('path') 6 | const fs = require('fs') 7 | const { execSync } = require('child_process') 8 | const cds = require('../../test/cds.js') 9 | 10 | const workDir = path.join(__dirname, 'tiny-sample') 11 | const genDir = path.join(workDir, 'gen') 12 | const pgDest = path.join(genDir, 'pg') 13 | const dbDest = path.join(pgDest, 'db') 14 | 15 | // delete the generated folder after each test 16 | afterEach(() => { 17 | if (fs.existsSync(genDir)) fs.rmSync(genDir, { recursive: true }) 18 | }) 19 | 20 | describe('cds build plugin', () => { 21 | const { expect } = cds.test 22 | test('should run pg build with explicit build task', () => { 23 | execSync('npx cds build --for postgres', { cwd: workDir }) 24 | expect(fs.existsSync(path.join(dbDest, 'csn.json'))).to.be.true 25 | }) 26 | 27 | test('should run pg build with production profile', () => { 28 | execSync('npx cds build --production', { cwd: workDir }) 29 | expect(fs.existsSync(path.join(dbDest, 'csn.json'))).to.be.true 30 | }) 31 | 32 | test('should retain assert_integrity setting', () => { 33 | execSync('npx cds build --production', { cwd: workDir }) 34 | const packageJson = require(path.join(pgDest, 'package.json')) 35 | expect(packageJson.cds?.features?.assert_integrity).to.equal('db') 36 | const ddl = String(execSync('npx cds deploy --dry', { cwd: workDir })) 37 | expect(ddl).to.contain('REFERENCES') 38 | }) 39 | }) 40 | -------------------------------------------------------------------------------- /postgres/test/compliance: -------------------------------------------------------------------------------- 1 | ../../test -------------------------------------------------------------------------------- /postgres/test/connect.test.js: -------------------------------------------------------------------------------- 1 | const { Client } = require('pg') 2 | const PgService = require('../lib/PostgresService') 3 | 4 | const cds = require('../../test/cds.js') 5 | 6 | process.env.DEBUG && jest.setTimeout(100000) 7 | 8 | // fake the manifestation of the db connection 9 | Client.prototype.connect = () => { } 10 | 11 | describe('connect to pg db', () => { 12 | const { expect } = cds.test 13 | 14 | test('in docker', async () => { 15 | cds.env.requires.db = require('@cap-js/postgres/test/service.json') 16 | const pgService = new PgService() 17 | pgService.options.credentials = cds.env.requires.db.credentials 18 | const con = await pgService.factory.create() 19 | expect(con.host).to.equal(cds.env.requires.db.credentials.host) 20 | expect(con.user).to.equal(cds.env.requires.db.credentials.user) 21 | expect(con.database).to.equal(cds.env.requires.db.credentials.database) 22 | expect(con.ssl).to.equal(false) 23 | }) 24 | test('for btp pg hyperscaler', async () => { 25 | cds.env.requires.db = require('@cap-js/postgres/test/service-btp.json') 26 | const pgService = new PgService() 27 | pgService.options.credentials = cds.env.requires.db.credentials 28 | const con = await pgService.factory.create() 29 | expect(con.host).to.equal(cds.env.requires.db.credentials.hostname) 30 | expect(con.user).to.equal(cds.env.requires.db.credentials.username) 31 | expect(con.database).to.equal(cds.env.requires.db.credentials.dbname) 32 | expect(con.ssl.ca).to.equal(cds.env.requires.db.credentials.sslrootcert) 33 | expect(con.ssl.rejectUnauthorized).to.be.false 34 | }) 35 | test('with azure pg compatible settings', async () => { 36 | cds.env.requires.db = require('@cap-js/postgres/test/service-az.json') 37 | const pgService = new PgService() 38 | pgService.options.credentials = cds.env.requires.db.credentials 39 | const con = await pgService.factory.create() 40 | expect(con.ssl).to.equal(true) 41 | }) 42 | }) 43 | -------------------------------------------------------------------------------- /postgres/test/plain-sql.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../test/cds.js') 2 | const bookshop = cds.utils.path.resolve(__dirname, '../../test/bookshop') 3 | 4 | describe('Postgres Plain SQL', () => { 5 | const { expect } = cds.test(bookshop) 6 | 7 | test('Plain sql', async () => { 8 | const res = await cds.run('SELECT * FROM sap_capire_bookshop_Books') 9 | expect(res.length).to.be.eq(5) 10 | const [res1, res2] = await cds.run([ 11 | 'SELECT * FROM sap_capire_bookshop_Books', 12 | 'SELECT * FROM sap_capire_bookshop_Books', 13 | ]) 14 | expect(res1.length).to.be.eq(5) 15 | expect(res2.length).to.be.eq(5) 16 | }) 17 | 18 | test('Plain sql with values', async () => { 19 | const res = await cds.run('SELECT * FROM sap_capire_bookshop_Books where ID = $1', [201]) 20 | expect(res.length).to.be.eq(1) 21 | }) 22 | 23 | test('Plain sql with multiple values', async () => { 24 | const res = await cds.run('SELECT * FROM sap_capire_bookshop_Books where ID = $1', [[201], [252]]) 25 | expect(res.length).to.be.eq(2) 26 | }) 27 | }) 28 | -------------------------------------------------------------------------------- /postgres/test/service-az.json: -------------------------------------------------------------------------------- 1 | { 2 | "credentials": { 3 | "host": "some-azure-postgres.postgres.database.azure.com", 4 | "port": "5432", 5 | "database": "some-db", 6 | "user": "wannaberoot", 7 | "password": "notimportant", 8 | "ssl": true 9 | }, 10 | "dialect": "postgres", 11 | "impl": "@cap-js/postgres" 12 | } 13 | -------------------------------------------------------------------------------- /postgres/test/service-btp.json: -------------------------------------------------------------------------------- 1 | { 2 | "credentials": { 3 | "hostname": "some-btp-postgres.cf10.eu.ondemand.com", 4 | "port": "4711", 5 | "dbname": "some-db", 6 | "username": "btpuser", 7 | "password": "notimportant", 8 | "sslrootcert": { "who": "cares" } 9 | }, 10 | "dialect": "postgres", 11 | "impl": "@cap-js/postgres" 12 | } 13 | -------------------------------------------------------------------------------- /postgres/test/service.json: -------------------------------------------------------------------------------- 1 | { 2 | "credentials": { 3 | "host": "localhost", 4 | "port": "5432", 5 | "database": "postgres", 6 | "user": "postgres", 7 | "password": "postgres" 8 | }, 9 | "dialect": "postgres", 10 | "impl": "@cap-js/postgres" 11 | } 12 | -------------------------------------------------------------------------------- /postgres/test/streaming.test.js: -------------------------------------------------------------------------------- 1 | require('../../test/cds.js') 2 | describe('postgres', () => { 3 | require('../../sqlite/test/general/stream.test.js') 4 | }) 5 | -------------------------------------------------------------------------------- /postgres/test/timezone.test.js: -------------------------------------------------------------------------------- 1 | const { resolve } = require('path') 2 | const cds = require('../../test/cds.js') 3 | const project = resolve(__dirname, 'beershop') 4 | 5 | process.env.DEBUG && jest.setTimeout(100000) 6 | 7 | describe('CAP PostgreSQL Adapter', () => { 8 | const { GET, PUT, expect, data } = cds.test('serve', '--project', project).verbose() 9 | 10 | data.autoIsolation(true) 11 | data.autoReset(true) 12 | 13 | describe('Timezone Handling', () => { 14 | test('should respect db users timezone settings', async () => { 15 | //Set Different TimeZone 16 | //await cds.run(`alter user postgres set timezone = 'EST'`, []) //UTC,EST 17 | const beforeTimestamp = new Date() 18 | beforeTimestamp.setMilliseconds(0) 19 | await PUT( 20 | '/beershop/Beers/9e1704e3-6fd0-4a5d-bfb1-13ac47f7976b', 21 | { 22 | name: 'Changed name', 23 | ibu: 10, 24 | }, 25 | 26 | { 27 | headers: { 28 | 'Content-Type': 'application/json;charset=UTF-8;IEEE754Compatible=true', 29 | }, 30 | }, 31 | ) 32 | 33 | //await cds.run(`alter user postgres set timezone = 'UTC'`, []) 34 | const response = await GET('/beershop/Beers/9e1704e3-6fd0-4a5d-bfb1-13ac47f7976b') 35 | const afterTimestamp = new Date() 36 | 37 | const modifiedAt = new Date(response.data.modifiedAt) 38 | expect(beforeTimestamp).to.be.lessThanOrEqual(modifiedAt) 39 | expect(modifiedAt).to.be.lessThanOrEqual(afterTimestamp) 40 | }) 41 | }) 42 | }) 43 | -------------------------------------------------------------------------------- /postgres/test/tiny-sample/db/data/my.bookshop-Books.csv: -------------------------------------------------------------------------------- 1 | ID,title,stock 2 | 1,Wuthering Heights,100 3 | 2,Jane Eyre,500 4 | -------------------------------------------------------------------------------- /postgres/test/tiny-sample/db/schema.cds: -------------------------------------------------------------------------------- 1 | namespace my.bookshop; 2 | 3 | entity Books { 4 | key ID : Integer; 5 | title : String; 6 | stock : Integer; 7 | author : Association to Authors; 8 | } 9 | 10 | entity Authors { 11 | key ID : Integer; 12 | } 13 | -------------------------------------------------------------------------------- /postgres/test/tiny-sample/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tiny-sample", 3 | "version": "1.0.0", 4 | "description": "A simple CAP project, to test the build plugin", 5 | "dependencies": { 6 | "@cap-js/postgres": "../../." 7 | }, 8 | "cds": { 9 | "features": { 10 | "assert_integrity": "db" 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /postgres/test/tiny-sample/srv/cat-service.cds: -------------------------------------------------------------------------------- 1 | using my.bookshop as my from '../db/schema'; 2 | 3 | service CatalogService { 4 | @readonly entity Books as projection on my.Books; 5 | } 6 | -------------------------------------------------------------------------------- /release-please-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "packages": { 3 | "db-service": { 4 | }, 5 | "sqlite": { 6 | }, 7 | "postgres": { 8 | }, 9 | "hana": { 10 | } 11 | }, 12 | "changelog-sections": [ 13 | { 14 | "type": "feat", 15 | "section": "Added", 16 | "hidden": false 17 | }, 18 | { 19 | "type": "fix", 20 | "section": "Fixed", 21 | "hidden": false 22 | }, 23 | { 24 | "type": "deps", 25 | "section": "Changed", 26 | "hidden": false 27 | }, 28 | { 29 | "type": "perf", 30 | "section": "Changed", 31 | "hidden": false 32 | }, 33 | { 34 | "type": "removed", 35 | "section": "Removed", 36 | "hidden": false 37 | }, 38 | { 39 | "type": "changed", 40 | "section": "Changed", 41 | "hidden": false 42 | } 43 | ] 44 | } 45 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": ["config:recommended", ":disableDependencyDashboard"], 4 | "ignoreDeps": ["saplabs/hanaexpress", "amannn/action-semantic-pull-request", "martinbeentjes/npm-get-version-action"] 5 | } 6 | -------------------------------------------------------------------------------- /sqlite/README.md: -------------------------------------------------------------------------------- 1 | # CDS database service for SQLite 2 | 3 | Welcome to the SQLite database service for [SAP Cloud Application Programming Model](https://cap.cloud.sap) Node.js, based on streamlined database architecture and [*better-sqlite* driver](https://www.npmjs.com/package/better-sqlite3). 4 | 5 | ## Setup 6 | 7 | If you want to use SQLite for development, all you need to do is to install the database package, as follows 8 | 9 | ```sh 10 | npm add @cap-js/sqlite -D 11 | ``` 12 | 13 | Learn more about setup and usage in the [respective database guide](https://cap.cloud.sap/docs/guides/databases-sqlite). 14 | 15 | ## Support 16 | 17 | This project is open to feature requests/suggestions, bug reports etc. via [GitHub issues](https://github.com/cap-js/cds-dbs/issues). 18 | 19 | ## Contribution 20 | 21 | Contribution and feedback are encouraged and always welcome. For more information about how to contribute, the project structure, as well as additional contribution information, see our [Contribution Guidelines](CONTRIBUTING.md). 22 | 23 | ## Versioning 24 | 25 | This library follows [Semantic Versioning](https://semver.org/). 26 | All notable changes are documented in [CHANGELOG.md](CHANGELOG.md). 27 | 28 | ## Code of Conduct 29 | 30 | We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone. By participating in this project, you agree to abide by its [Code of Conduct](CODE_OF_CONDUCT.md) at all times. 31 | 32 | ## Licensing 33 | 34 | Copyright 2024 SAP SE or an SAP affiliate company and cds-dbs contributors. Please see our [LICENSE](LICENSE) for copyright and license information. Detailed information including third-party components and their licensing/copyright information is available [via the REUSE tool](https://api.reuse.software/info/github.com/cap-js/cds-dbs). 35 | -------------------------------------------------------------------------------- /sqlite/cds-plugin.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | if (!cds.env.fiori.lean_draft) { 4 | throw new Error('"@cap-js/sqlite" only works if cds.fiori.lean_draft is enabled. Please adapt your configuration.') 5 | } 6 | -------------------------------------------------------------------------------- /sqlite/index.js: -------------------------------------------------------------------------------- 1 | module.exports = require('./lib/SQLiteService.js') 2 | -------------------------------------------------------------------------------- /sqlite/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@cap-js/sqlite", 3 | "version": "2.0.1", 4 | "description": "CDS database service for SQLite", 5 | "homepage": "https://github.com/cap-js/cds-dbs/tree/main/sqlite#cds-database-service-for-sqlite", 6 | "repository": { 7 | "type": "git", 8 | "url": "git+https://github.com/cap-js/cds-dbs.git" 9 | }, 10 | "bugs": { 11 | "url": "https://github.com/cap-js/cds-dbs/issues" 12 | }, 13 | "keywords": [ 14 | "CAP", 15 | "CDS", 16 | "SQLite" 17 | ], 18 | "author": "SAP SE (https://www.sap.com)", 19 | "main": "index.js", 20 | "files": [ 21 | "cds-plugin.js", 22 | "lib", 23 | "CHANGELOG.md" 24 | ], 25 | "scripts": { 26 | "test": "cds-test" 27 | }, 28 | "dependencies": { 29 | "@cap-js/db-service": "^2", 30 | "better-sqlite3": "^11.0.0" 31 | }, 32 | "peerDependencies": { 33 | "@sap/cds": ">=9" 34 | }, 35 | "cds": { 36 | "requires": { 37 | "kinds": { 38 | "sql": { 39 | "[development]": { 40 | "kind": "sqlite", 41 | "credentials": { 42 | "url": ":memory:" 43 | } 44 | } 45 | }, 46 | "sqlite": { 47 | "impl": "@cap-js/sqlite" 48 | } 49 | }, 50 | "db": "sql" 51 | } 52 | }, 53 | "license": "Apache-2.0" 54 | } 55 | -------------------------------------------------------------------------------- /sqlite/test/compliance: -------------------------------------------------------------------------------- 1 | ../../test -------------------------------------------------------------------------------- /sqlite/test/deep/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@test/cds-db-layer", 3 | "version": "1.0.0", 4 | "description": "Base for db layer validations", 5 | "cds": { 6 | "requires": { 7 | "db": { 8 | "impl": "@cap-js/sqlite" 9 | } 10 | }, 11 | "features": { 12 | "ieee754compatible": true 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /sqlite/test/general/delete-rename.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../../test/cds.js') 2 | 3 | describe('delete on rename', () => { 4 | const { POST, DELETE, expect } = cds.test(__dirname, 'testModel.cds') 5 | 6 | test('delete on projection with renamed elements', async () => { 7 | let res 8 | res = await POST('/rename/SProjDeep', { 9 | IDRename: 1, 10 | parentRename: 1, 11 | otherNameRename: 'some name', 12 | otherName2Rename: 'some name2', 13 | childrenRename: [ 14 | { IDRename: 1, otherNameRename: 'children name', otherName2Rename: 'children name 1' }, 15 | { IDRename: 2, otherNameRename: 'children name', otherName2Rename: 'children name 2' }, 16 | ], 17 | }) 18 | expect(res).to.containSubset({ status: 201 }) 19 | 20 | // make sure the resulting query is resolved all the way to the database table 21 | res = await DELETE('/rename/SProjDeep(1)/childrenRename(1)') 22 | 23 | expect(res).to.containSubset({ status: 204 }) 24 | }) 25 | }) 26 | -------------------------------------------------------------------------------- /sqlite/test/general/insert-entries-select.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../../test/cds.js') 2 | const assert = require('assert') 3 | 4 | describe('insert from select', () => { 5 | cds.test(__dirname, 'testModel.cds') 6 | 7 | test('make sure that the placeholder values of the prepared statement are passed to the database', async () => { 8 | // fill other table first 9 | await cds.run(INSERT({ ID: 42, name: 'Foo2' }).into('Foo2')) 10 | const insert = INSERT.into('Foo') 11 | .columns(['ID', 'a']) 12 | .from( 13 | SELECT.from('Foo2') 14 | .columns(['ID', 'name']) 15 | .where({ ref: ['name'] }, '=', { val: 'Foo2' }), 16 | ) 17 | // insert from select 18 | const insertRes = await cds.run(insert) 19 | assert.strictEqual(insertRes.affectedRows, 1, 'One row should have been inserted') 20 | // select the inserted column 21 | const selectRes = await cds.run(SELECT.from('Foo').where({ ref: ['ID'] }, '=', { val: 42 })) 22 | assert.strictEqual(selectRes.length, 1, 'One row should have been inserted') 23 | }) 24 | }) 25 | -------------------------------------------------------------------------------- /sqlite/test/general/localized.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../../test/cds.js') 2 | 3 | describe('localized', () => { 4 | const { GET, POST, expect } = cds.test(__dirname, 'model.cds') 5 | 6 | beforeAll(async () => { 7 | return await POST('/test/fooLocalized', { 8 | ID: 5, 9 | text: 'english', 10 | texts: [{ locale: 'de', text: 'deutsch' }], 11 | }) 12 | }) 13 | 14 | test('generic request without language header falls back to default', async () => { 15 | const res = await GET('/test/fooLocalized') 16 | expect(res.status).to.equal(200) 17 | 18 | expect(res.data).to.deep.equal({ 19 | '@odata.context': '$metadata#fooLocalized', 20 | value: [ 21 | { 22 | ID: 5, 23 | text: 'english', 24 | }, 25 | ], 26 | }) 27 | }) 28 | 29 | test('generic request with language header is localized', async () => { 30 | const res = await GET('/test/fooLocalized', { headers: { 'Accept-Language': 'de' } }) 31 | expect(res.status).to.equal(200) 32 | 33 | expect(res.data).to.deep.equal({ 34 | '@odata.context': '$metadata#fooLocalized', 35 | value: [ 36 | { 37 | ID: 5, 38 | text: 'deutsch', 39 | }, 40 | ], 41 | }) 42 | }) 43 | 44 | test('custom handler does not return localized by default', async () => { 45 | const db = await cds.connect.to('test') 46 | 47 | cds.context = { locale: 'de' } 48 | return db.tx(async () => { 49 | const result = await SELECT.from('test.fooLocalized') 50 | expect(result).to.deep.equal([{ ID: 5, text: 'english' }]) 51 | 52 | const resultLocalized = await SELECT.localized('test.fooLocalized') 53 | expect(resultLocalized).to.deep.equal([{ ID: 5, text: 'deutsch' }]) 54 | }) 55 | }) 56 | }) 57 | -------------------------------------------------------------------------------- /sqlite/test/general/model.cds: -------------------------------------------------------------------------------- 1 | using { 2 | managed, 3 | temporal 4 | } from '@sap/cds/common'; 5 | 6 | entity db.fooTemporal : managed, temporal { 7 | key ID : Integer; 8 | } 9 | 10 | @path: '/test' 11 | service test { 12 | entity foo : managed { 13 | key ID : Integer; 14 | defaultValue: Integer default 100; 15 | } 16 | 17 | entity bar { 18 | key ID : UUID; 19 | } 20 | 21 | entity BooksWithAssocAsKey { 22 | key author: Association to AuthorAssoc; 23 | title : String; 24 | stock : Integer; 25 | } 26 | 27 | entity AuthorAssoc { 28 | key ID: UUID; 29 | } 30 | 31 | entity fooLocalized { 32 | key ID : Integer; 33 | text : localized String; 34 | } 35 | 36 | entity fooTemporal as projection on db.fooTemporal; 37 | 38 | entity Images { 39 | key ID : Integer; 40 | data : LargeBinary @Core.MediaType: 'image/jpeg'; 41 | data2 : LargeBinary @Core.MediaType: 'image/jpeg'; 42 | } 43 | 44 | entity ImagesView as projection on Images { 45 | *, 46 | data as renamedData 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /sqlite/test/general/model.js: -------------------------------------------------------------------------------- 1 | module.exports = srv => { 2 | const { fooTemporal } = srv.entities 3 | 4 | srv.on('CREATE', fooTemporal, async function (req) { 5 | // without the fix, this UPSERT throws 6 | await UPSERT(req.data).into(fooTemporal) 7 | return req.data 8 | }) 9 | } 10 | -------------------------------------------------------------------------------- /sqlite/test/general/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "@cap-js/sqlite": "*" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /sqlite/test/general/samples/1000.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cap-js/cds-dbs/591e1aa7942b9b430d80e116de12bf6d3847a3ab/sqlite/test/general/samples/1000.png -------------------------------------------------------------------------------- /sqlite/test/general/samples/1001.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cap-js/cds-dbs/591e1aa7942b9b430d80e116de12bf6d3847a3ab/sqlite/test/general/samples/1001.png -------------------------------------------------------------------------------- /sqlite/test/general/samples/test.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cap-js/cds-dbs/591e1aa7942b9b430d80e116de12bf6d3847a3ab/sqlite/test/general/samples/test.jpg -------------------------------------------------------------------------------- /sqlite/test/general/temporal.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../../test/cds.js') 2 | 3 | describe('temporal', () => { 4 | const { GET, POST, expect } = cds.test(__dirname, 'model.cds') 5 | 6 | beforeAll(async () => { 7 | const db = await cds.connect.to('db') 8 | const { fooTemporal } = db.model.entities('test') 9 | await db.create(fooTemporal).entries([ 10 | { ID: 1, validFrom: '1990-01-01T00:00:00.000Z', validTo: '9999-12-31T23:59:59.999Z' }, 11 | { ID: 2, validFrom: '2000-01-01T00:00:00.000Z', validTo: '9999-12-31T23:59:59.999Z' } 12 | ]) 13 | }) 14 | 15 | test('READ', async () => { 16 | let validAt, res 17 | 18 | validAt = '1970-01-01T00:00:00.000Z' 19 | res = await GET(`/test/fooTemporal?sap-valid-at=${validAt}`) 20 | expect(res.data.value.length).equals(0) 21 | 22 | validAt = '1995-01-01T00:00:00.000Z' 23 | res = await GET(`/test/fooTemporal?sap-valid-at=${validAt}`) 24 | expect(res.data.value.length).equals(1) 25 | const it = res.data.value[0] 26 | expect(it).to.containSubset({ ID: 1 }) 27 | // managed and temporal shall not clash 28 | expect(it.createdAt).not.equals(it.validFrom) 29 | 30 | validAt = '2010-01-01T00:00:00.000Z' 31 | res = await GET(`/test/fooTemporal?sap-valid-at=${validAt}`) 32 | expect(res.data.value.length).equals(2) 33 | }) 34 | 35 | test('UPSERT', async () => { 36 | const validFrom = '2000-01-01T00:00:00.000Z' 37 | const url = `/test/fooTemporal?sap-valid-from=${validFrom}` 38 | const data = { ID: 42, validFrom } 39 | const res = await POST(url, data) 40 | expect(res.data).to.containSubset({ validFrom }) 41 | }) 42 | }) 43 | -------------------------------------------------------------------------------- /sqlite/test/general/testModel.cds: -------------------------------------------------------------------------------- 1 | entity Foo { 2 | key ID: Integer; 3 | a: String; 4 | b: String; 5 | c: String; 6 | x: Integer; 7 | } 8 | 9 | entity Foo2 { 10 | key ID: Integer; 11 | name: String; 12 | a: Integer; 13 | virtual something : String(11); 14 | } 15 | 16 | entity Books { 17 | key ID : Integer; 18 | author : Composition of Author ; 19 | descr : String; 20 | code : String; 21 | } 22 | 23 | entity Author { 24 | key id : Integer; 25 | key version : String; 26 | parent : Association to Books; 27 | } 28 | 29 | entity Travel { 30 | key TravelUUID : UUID; 31 | TravelID : Integer @readonly default 0; 32 | BeginDate : Date; 33 | EndDate : Date; 34 | BookingFee : Decimal(16, 3); 35 | TotalPrice : Decimal(16, 3) @readonly; 36 | Description : String(1024); 37 | to_Booking : Composition of many Booking on to_Booking.to_Travel = $self; 38 | }; 39 | 40 | entity Booking { 41 | key BookingUUID : UUID; 42 | FlightPrice : Decimal(16, 3); 43 | to_BookSupplement : Composition of many BookingSupplement on to_BookSupplement.to_Booking = $self; 44 | to_Travel : Association to Travel; 45 | }; 46 | 47 | entity BookingSupplement { 48 | key BookSupplUUID : UUID; 49 | Price : Decimal(16, 3); 50 | to_Booking : Association to Booking; 51 | to_Travel : Association to Travel; 52 | }; 53 | 54 | entity DBDeepEntityChild { 55 | key ID : Integer; 56 | parent : Integer; 57 | otherName : String; 58 | otherName2 : String; 59 | } 60 | 61 | entity EProjChild as projection on DBDeepEntityChild { 62 | ID as IDRename, 63 | parent as parentRename, 64 | otherName as otherNameRename, 65 | otherName2 as otherName2Rename 66 | } 67 | 68 | entity DBDeepEntity { 69 | key ID : Integer; 70 | parent : Integer; 71 | otherName : String; 72 | otherName2 : String; 73 | children : Composition of many EProjChild 74 | on children.parentRename = ID; 75 | } 76 | 77 | entity FProjDeep as projection on DBDeepEntity { 78 | ID as IDRename, 79 | parent as parentRename, 80 | otherName as otherNameRename, 81 | otherName2 as otherName2Rename, 82 | children as childrenRename 83 | } 84 | 85 | service RenameService @(path:'/rename') { 86 | entity SProjDeep as projection on FProjDeep; 87 | } 88 | -------------------------------------------------------------------------------- /sqlite/test/general/uuid.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../../test/cds.js') 2 | 3 | describe('UUID Generation', () => { 4 | const {expect} = cds.test(__dirname, 'model.cds') 5 | 6 | test('INSERT with one entry', async () => { 7 | const db = await cds.connect.to('db') 8 | return db.tx(async () => { 9 | await INSERT.into('test.bar').entries({}) 10 | 11 | const result = await SELECT.from('test.bar') 12 | expect(result).to.have.nested.property('0.ID').to.be.a('string') 13 | 14 | await DELETE('test.bar') 15 | }) 16 | }) 17 | test('INSERT with multiple entries', async () => { 18 | const db = await cds.connect.to('db') 19 | return db.tx(async () => { 20 | await INSERT.into('test.bar').entries([{}, {}]) 21 | 22 | const result = await SELECT.from('test.bar') 23 | expect(result).to.have.length(2) 24 | expect(result).to.have.nested.property('0.ID').to.be.a('string') 25 | expect(result).to.have.nested.property('1.ID').to.be.a('string') 26 | expect(result[0].ID).not.to.equal(result[1].ID) 27 | 28 | await DELETE('test.bar') 29 | }) 30 | }) 31 | 32 | test('INSERT entity with missing key as association throws error', async () => { 33 | await expect( 34 | INSERT.into('test.BooksWithAssocAsKey').entries([{}]) 35 | ).rejectedWith({code:'SQLITE_CONSTRAINT_NOTNULL'}) 36 | }) 37 | }) 38 | -------------------------------------------------------------------------------- /sqlite/test/plain-sql.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../test/cds.js') 2 | const bookshop = cds.utils.path.resolve(__dirname, '../../test/bookshop') 3 | 4 | 5 | describe('SQLite Plain SQL', () => { 6 | const { expect } = cds.test(bookshop) 7 | 8 | test('Plain sql', async () => { 9 | const res = await cds.run('SELECT * FROM sap_capire_bookshop_Books') 10 | expect(res.length).to.be.eq(5) 11 | const [res1, res2] = await cds.run([ 12 | 'SELECT * FROM sap_capire_bookshop_Books', 13 | 'SELECT * FROM sap_capire_bookshop_Books', 14 | ]) 15 | expect(res1.length).to.be.eq(5) 16 | expect(res2.length).to.be.eq(5) 17 | }) 18 | 19 | test('Plain sql with values', async () => { 20 | const res = await cds.run('SELECT * FROM sap_capire_bookshop_Books where ID = ?', [201]) 21 | expect(res.length).to.be.eq(1) 22 | }) 23 | 24 | test('Plain sql with multiple values', async () => { 25 | const res = await cds.run('SELECT * FROM sap_capire_bookshop_Books where ID = ?', [[201], [252]]) 26 | expect(res.length).to.be.eq(2) 27 | }) 28 | }) 29 | -------------------------------------------------------------------------------- /sqlite/test/recurse/genres-ancestors.sql: -------------------------------------------------------------------------------- 1 | WITH RECURSIVE 2 | Hierarchy(HIERARCHY_LEVEL,HIERARCHY_PARENT_RANK,HIERARCHY_RANK,HIERARCHY_ROOT_RANK,HIERARCHY_TREE_SIZE,NODE_ID,PARENT_ID,ID,name) AS ( 3 | SELECT 1,0,rowid,rowid,(SELECT COUNT(*) + 1 FROM sap_capire_bookshop_Genres as children WHERE children.parent_ID=root.ID),ID,parent_ID,ID,name 4 | FROM sap_capire_bookshop_Genres AS root 5 | WHERE parent_ID IS NULL 6 | UNION ALL 7 | SELECT 8 | Hierarchy.HIERARCHY_LEVEL + 1, 9 | Hierarchy.HIERARCHY_RANK, 10 | Genres.rowid, 11 | Hierarchy.HIERARCHY_ROOT_RANK, 12 | (SELECT COUNT(*) + 1 FROM sap_capire_bookshop_Genres as children WHERE children.parent_ID=Genres.ID), 13 | Genres.ID, 14 | Genres.parent_ID, 15 | Genres.ID, 16 | Genres.name 17 | FROM sap_capire_bookshop_Genres AS Genres 18 | JOIN Hierarchy ON Genres.PARENT_ID=Hierarchy.NODE_ID 19 | ORDER BY 1 DESC 20 | ), 21 | Ancestors(HIERARCHY_LEVEL,HIERARCHY_PARENT_RANK,HIERARCHY_RANK,HIERARCHY_ROOT_RANK,HIERARCHY_TREE_SIZE,HIERARCHY_DISTANCE,NODE_ID,PARENT_ID,ID,name) AS ( 22 | SELECT HIERARCHY_LEVEL,HIERARCHY_PARENT_RANK,HIERARCHY_RANK,HIERARCHY_ROOT_RANK,HIERARCHY_TREE_SIZE,0,NODE_ID,PARENT_ID,ID,name 23 | FROM Hierarchy AS root 24 | WHERE name = 'Drama' 25 | UNION ALL 26 | SELECT 27 | Hierarchy.HIERARCHY_LEVEL, 28 | Hierarchy.HIERARCHY_PARENT_RANK, 29 | Hierarchy.HIERARCHY_RANK, 30 | Hierarchy.HIERARCHY_ROOT_RANK, 31 | Hierarchy.HIERARCHY_TREE_SIZE, 32 | Ancestors.HIERARCHY_DISTANCE - 1, 33 | Hierarchy.NODE_ID, 34 | Hierarchy.PARENT_ID, 35 | Hierarchy.ID, 36 | Hierarchy.name 37 | FROM Ancestors AS Ancestors 38 | JOIN Hierarchy AS Hierarchy ON Hierarchy.NODE_ID=Ancestors.PARENT_ID 39 | ORDER BY 1 40 | ) 41 | SELECT 42 | Hierarchy.HIERARCHY_LEVEL, 43 | Hierarchy.HIERARCHY_PARENT_RANK, 44 | Hierarchy.HIERARCHY_RANK, 45 | Hierarchy.HIERARCHY_ROOT_RANK, 46 | Hierarchy.HIERARCHY_TREE_SIZE, 47 | Ancestors.HIERARCHY_DISTANCE, 48 | Hierarchy.NODE_ID, 49 | Hierarchy.PARENT_ID, 50 | Hierarchy.ID, 51 | Hierarchy.name 52 | FROM Hierarchy 53 | INNER JOIN Ancestors 54 | ON Hierarchy.HIERARCHY_RANK = Ancestors.HIERARCHY_RANK -------------------------------------------------------------------------------- /sqlite/test/recurse/genres-descendants.sql: -------------------------------------------------------------------------------- 1 | WITH RECURSIVE 2 | Hierarchy(HIERARCHY_LEVEL,HIERARCHY_PARENT_RANK,HIERARCHY_RANK,HIERARCHY_ROOT_RANK,HIERARCHY_TREE_SIZE,NODE_ID,PARENT_ID,ID,name) AS ( 3 | SELECT 1,0,rowid,rowid,(SELECT COUNT(*) + 1 FROM sap_capire_bookshop_Genres as children WHERE children.parent_ID=root.ID),ID,parent_ID,ID,name 4 | FROM sap_capire_bookshop_Genres AS root 5 | WHERE parent_ID IS NULL 6 | UNION ALL 7 | SELECT 8 | Hierarchy.HIERARCHY_LEVEL + 1, 9 | Hierarchy.HIERARCHY_RANK, 10 | Genres.rowid, 11 | Hierarchy.HIERARCHY_ROOT_RANK, 12 | (SELECT COUNT(*) + 1 FROM sap_capire_bookshop_Genres as children WHERE children.parent_ID=Genres.ID), 13 | Genres.ID, 14 | Genres.parent_ID, 15 | Genres.ID, 16 | Genres.name 17 | FROM sap_capire_bookshop_Genres AS Genres 18 | JOIN Hierarchy ON Genres.PARENT_ID=Hierarchy.NODE_ID 19 | ORDER BY 1 DESC 20 | ), 21 | Descendants(HIERARCHY_LEVEL,HIERARCHY_PARENT_RANK,HIERARCHY_RANK,HIERARCHY_ROOT_RANK,HIERARCHY_TREE_SIZE,HIERARCHY_DISTANCE,NODE_ID,PARENT_ID,ID,name) AS ( 22 | SELECT HIERARCHY_LEVEL,HIERARCHY_PARENT_RANK,HIERARCHY_RANK,HIERARCHY_ROOT_RANK,HIERARCHY_TREE_SIZE,0,NODE_ID,PARENT_ID,ID,name 23 | FROM Hierarchy AS root 24 | WHERE name = 'Non-Fiction' 25 | UNION ALL 26 | SELECT 27 | Hierarchy.HIERARCHY_LEVEL, 28 | Hierarchy.HIERARCHY_PARENT_RANK, 29 | Hierarchy.HIERARCHY_RANK, 30 | Hierarchy.HIERARCHY_ROOT_RANK, 31 | Hierarchy.HIERARCHY_TREE_SIZE, 32 | Descendants.HIERARCHY_DISTANCE + 1, 33 | Hierarchy.NODE_ID, 34 | Hierarchy.PARENT_ID, 35 | Hierarchy.ID, 36 | Hierarchy.name 37 | FROM Descendants AS Descendants 38 | JOIN Hierarchy AS Hierarchy ON Hierarchy.PARENT_ID=Descendants.NODE_ID 39 | ORDER BY 1 40 | ) 41 | SELECT 42 | Hierarchy.HIERARCHY_LEVEL, 43 | Hierarchy.HIERARCHY_PARENT_RANK, 44 | Hierarchy.HIERARCHY_RANK, 45 | Hierarchy.HIERARCHY_ROOT_RANK, 46 | Hierarchy.HIERARCHY_TREE_SIZE, 47 | Descendants.HIERARCHY_DISTANCE, 48 | Hierarchy.NODE_ID, 49 | Hierarchy.PARENT_ID, 50 | Hierarchy.ID, 51 | Hierarchy.name 52 | FROM Hierarchy 53 | INNER JOIN Descendants 54 | ON Hierarchy.HIERARCHY_RANK = Descendants.HIERARCHY_RANK -------------------------------------------------------------------------------- /sqlite/test/recurse/genres-hierarchy.sql: -------------------------------------------------------------------------------- 1 | WITH RECURSIVE 2 | Hierarchy(HIERARCHY_LEVEL,HIERARCHY_PARENT_RANK,HIERARCHY_RANK,HIERARCHY_ROOT_RANK,HIERARCHY_TREE_SIZE,NODE_ID,PARENT_ID,ID,name) AS ( 3 | SELECT 1,0,rowid,rowid,(SELECT COUNT(*) + 1 FROM sap_capire_bookshop_Genres as children WHERE children.parent_ID=root.ID),ID,parent_ID,ID,name 4 | FROM sap_capire_bookshop_Genres AS root 5 | WHERE parent_ID IS NULL 6 | UNION ALL 7 | SELECT 8 | Hierarchy.HIERARCHY_LEVEL + 1, 9 | Hierarchy.HIERARCHY_RANK, 10 | Genres.rowid, 11 | Hierarchy.HIERARCHY_ROOT_RANK, 12 | (SELECT COUNT(*) + 1 FROM sap_capire_bookshop_Genres as children WHERE children.parent_ID=Genres.ID), 13 | Genres.ID, 14 | Genres.parent_ID, 15 | Genres.ID, 16 | Genres.name 17 | FROM sap_capire_bookshop_Genres AS Genres 18 | JOIN Hierarchy ON Genres.PARENT_ID=Hierarchy.NODE_ID 19 | ORDER BY 1 DESC 20 | ) 21 | SELECT * FROM Hierarchy -------------------------------------------------------------------------------- /sqlite/test/service.json: -------------------------------------------------------------------------------- 1 | { 2 | "impl": "@cap-js/sqlite" 3 | } 4 | -------------------------------------------------------------------------------- /test/bookshop/db/data/sap.capire.bookshop-Authors.csv: -------------------------------------------------------------------------------- 1 | ID;name;dateOfBirth;placeOfBirth;dateOfDeath;placeOfDeath; city; street; 2 | 101;Emily Brontë;1818-07-30;Thornton, Yorkshire;1848-12-19;Haworth, Yorkshire; Bradford; 1 Main Street 3 | 107;Charlotte Brontë;1818-04-21;Thornton, Yorkshire;1855-03-31;Haworth, Yorkshire; Bradford; 2 Main Street 4 | 150;Edgar Allen Poe;1809-01-19;Boston, Massachusetts;1849-10-07;Baltimore, Maryland; Baltimore; 1 Main Street 5 | 170;Richard Carpenter;1929-08-14;King’s Lynn, Norfolk;2012-02-26;Hertfordshire, England; London; 1 Main Street 6 | -------------------------------------------------------------------------------- /test/bookshop/db/data/sap.capire.bookshop-Books_texts.csv: -------------------------------------------------------------------------------- 1 | ID;locale;title;descr 2 | 201;de;Sturmhöhe;Sturmhöhe (Originaltitel: Wuthering Heights) ist der einzige Roman der englischen Schriftstellerin Emily Brontë (1818–1848). Der 1847 unter dem Pseudonym Ellis Bell veröffentlichte Roman wurde vom viktorianischen Publikum weitgehend abgelehnt, heute gilt er als ein Klassiker der britischen Romanliteratur des 19. Jahrhunderts. 3 | 201;fr;Les Hauts de Hurlevent;Les Hauts de Hurlevent (titre original : Wuthering Heights), parfois orthographié Les Hauts de Hurle-Vent, est l'unique roman d'Emily Brontë, publié pour la première fois en 1847 sous le pseudonyme d’Ellis Bell. Loin d'être un récit moralisateur, Emily Brontë achève néanmoins le roman dans une atmosphère sereine, suggérant le triomphe de la paix et du Bien sur la vengeance et le Mal. 4 | 207;de;Jane Eyre;Jane Eyre. Eine Autobiographie (Originaltitel: Jane Eyre. An Autobiography), erstmals erschienen im Jahr 1847 unter dem Pseudonym Currer Bell, ist der erste veröffentlichte Roman der britischen Autorin Charlotte Brontë und ein Klassiker der viktorianischen Romanliteratur des 19. Jahrhunderts. Der Roman erzählt in Form einer Ich-Erzählung die Lebensgeschichte von Jane Eyre (ausgesprochen /ˌdʒeɪn ˈɛə/), die nach einer schweren Kindheit eine Stelle als Gouvernante annimmt und sich in ihren Arbeitgeber verliebt, jedoch immer wieder um ihre Freiheit und Selbstbestimmung kämpfen muss. Als klein, dünn, blass, stets schlicht dunkel gekleidet und mit strengem Mittelscheitel beschrieben, gilt die Heldin des Romans Jane Eyre nicht zuletzt aufgrund der Kino- und Fernsehversionen der melodramatischen Romanvorlage als die bekannteste englische Gouvernante der Literaturgeschichte 5 | 252;de;Eleonora;“Eleonora” ist eine Erzählung von Edgar Allan Poe. Sie wurde 1841 erstveröffentlicht. In ihr geht es um das Paradox der Treue in der Treulosigkeit. -------------------------------------------------------------------------------- /test/bookshop/db/data/sap.capire.bookshop-Genres.csv: -------------------------------------------------------------------------------- 1 | ID;parent_ID;name 2 | 10;;Fiction 3 | 11;10;Drama 4 | 12;10;Poetry 5 | 13;10;Fantasy 6 | 14;10;Science Fiction 7 | 15;10;Romance 8 | 16;10;Mystery 9 | 17;10;Thriller 10 | 18;10;Dystopia 11 | 19;10;Fairy Tale 12 | 20;;Non-Fiction 13 | 21;20;Biography 14 | 22;21;Autobiography 15 | 23;20;Essay 16 | 24;20;Speech 17 | -------------------------------------------------------------------------------- /test/bookshop/db/init.js: -------------------------------------------------------------------------------- 1 | /** 2 | * In order to keep basic bookshop sample as simple as possible, we don't add 3 | * reuse dependencies. This db/init.js ensures we still have a minimum set of 4 | * currencies, if not obtained through @capire/common. 5 | */ 6 | 7 | module.exports = async tx => { 8 | const has_common = tx.model.definitions['sap.common.Currencies']?.elements.numcode 9 | if (has_common) return 10 | 11 | const already_filled = await tx.exists('sap.common.Currencies', { code: 'EUR' }) 12 | if (already_filled) return 13 | 14 | await tx.run( 15 | INSERT.into('sap.common.Currencies') 16 | .columns(['code', 'symbol', 'name']) 17 | .rows( 18 | ['EUR', '€', 'Euro'], 19 | ['USD', '$', 'US Dollar'], 20 | ['GBP', '£', 'British Pound'], 21 | ['ILS', '₪', 'Shekel'], 22 | ['JPY', '¥', 'Yen'], 23 | ), 24 | ) 25 | } 26 | -------------------------------------------------------------------------------- /test/bookshop/db/schema.cds: -------------------------------------------------------------------------------- 1 | using { 2 | Currency, 3 | managed, 4 | sap 5 | } from '@sap/cds/common'; 6 | 7 | namespace sap.capire.bookshop; 8 | 9 | entity Books : managed { 10 | key ID : Integer; 11 | title : localized String(111); 12 | descr : localized String(1111); 13 | author : Association to Authors; 14 | genre : Association to Genres default 10; 15 | stock : Integer; 16 | price : Decimal; 17 | currency : Currency; 18 | image : LargeBinary @Core.MediaType: 'image/png'; 19 | footnotes : array of String; 20 | authorsAddress : String = author.address; 21 | } 22 | 23 | entity Authors : managed { 24 | key ID : Integer; 25 | name : String(111); 26 | dateOfBirth : Date; 27 | dateOfDeath : Date; 28 | placeOfBirth : String; 29 | placeOfDeath : String; 30 | books : Association to many Books 31 | on books.author = $self; 32 | 33 | street : String; 34 | city : String; 35 | address : String = street || ', ' || city; 36 | } 37 | 38 | /** Hierarchically organized Code List for Genres */ 39 | entity Genres : sap.common.CodeList { 40 | key ID : Integer; 41 | parent : Association to Genres; 42 | children : Composition of many Genres 43 | on children.parent = $self; 44 | } 45 | 46 | entity A : managed { 47 | key ID : Integer; 48 | B : Integer; 49 | toB : Composition of many B 50 | on toB.ID = $self.B; 51 | C : Integer; 52 | toC : Composition of many C 53 | on toC.ID = $self.C; 54 | } 55 | 56 | entity B : managed { 57 | key ID : Integer; 58 | A : Integer; 59 | toA : Composition of many A 60 | on toA.ID = $self.A; 61 | 62 | C : Integer; 63 | toC : Composition of many C 64 | on toC.ID = $self.C; 65 | } 66 | 67 | entity C : managed { 68 | key ID : Integer; 69 | A : Integer; 70 | toA : Composition of many A 71 | on toA.ID = $self.A; 72 | B : Integer; 73 | toB : Composition of many B 74 | on toB.ID = $self.B; 75 | } 76 | 77 | entity Values { 78 | key ID : Integer; 79 | value : String; 80 | } 81 | 82 | entity BooksAnnotated as projection on Books; 83 | -------------------------------------------------------------------------------- /test/bookshop/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@capire/bookshop", 3 | "version": "1.0.0", 4 | "description": "A simple self-contained bookshop service.", 5 | "files": [ 6 | "app", 7 | "srv", 8 | "db", 9 | "index.cds", 10 | "index.js" 11 | ], 12 | "dependencies": { 13 | "@cap-js/sqlite": "*", 14 | "@sap/cds": "*", 15 | "express": "^4.17.1" 16 | }, 17 | "scripts": { 18 | "genres": "cds serve test/genres.cds", 19 | "start": "cds run", 20 | "watch": "cds watch" 21 | }, 22 | "cds": { 23 | "features": { 24 | "ieee754compatible": true 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /test/bookshop/srv/admin-service.cds: -------------------------------------------------------------------------------- 1 | using { sap.capire.bookshop as my } from '../db/schema'; 2 | service AdminService @(requires:'admin', path:'/admin') { 3 | entity Books as projection on my.Books; 4 | entity Authors as projection on my.Authors; 5 | entity A as projection on my.A; 6 | 7 | @cds.redirection.target: false 8 | entity RenameKeys as projection on my.Books { 9 | key ID as foo, 10 | author, 11 | author.name 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /test/bookshop/srv/admin-service.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | module.exports = class AdminService extends cds.ApplicationService { 4 | init() { 5 | this.before('NEW', 'Authors', genid) 6 | this.before('NEW', 'Books', genid) 7 | return super.init() 8 | } 9 | } 10 | 11 | /** Generate primary keys for target entity in request */ 12 | async function genid(req) { 13 | const { ID } = await SELECT.one.from(req.target).columns('max(ID) as ID') 14 | req.data.ID = ID - (ID % 100) + 100 + 1 15 | } 16 | -------------------------------------------------------------------------------- /test/bookshop/srv/cat-service.cds: -------------------------------------------------------------------------------- 1 | using { sap.capire.bookshop as my } from '../db/schema'; 2 | service CatalogService @(path:'/browse') { 3 | 4 | /** For displaying lists of Books */ 5 | @readonly entity ListOfBooks as projection on Books 6 | excluding { descr }; 7 | 8 | /** For display in details pages */ 9 | @readonly entity Books as projection on my.Books { *, 10 | author.name as author 11 | } excluding { createdBy, modifiedBy }; 12 | 13 | @requires: 'authenticated-user' 14 | action submitOrder ( book: Books:ID, quantity: Integer ) returns { stock: Integer }; 15 | event OrderedBook : { book: Books:ID; quantity: Integer; buyer: String }; 16 | } 17 | -------------------------------------------------------------------------------- /test/bookshop/srv/cat-service.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../cds.js') 2 | 3 | class CatalogService extends cds.ApplicationService { 4 | init() { 5 | const { Books } = cds.entities('sap.capire.bookshop') 6 | const { ListOfBooks } = this.entities 7 | 8 | // Reduce stock of ordered books if available stock suffices 9 | this.on('submitOrder', async req => { 10 | const { book, quantity } = req.data 11 | if (quantity < 1) return req.reject(400, `quantity has to be 1 or more`) 12 | let b = await SELECT`stock`.from(Books, book) 13 | if (!b) return req.error(404, `Book #${book} doesn't exist`) 14 | let { stock } = b 15 | if (quantity > stock) return req.reject(409, `${quantity} exceeds stock for book #${book}`) 16 | await UPDATE(Books, book).with({ stock: (stock -= quantity) }) 17 | await this.emit('OrderedBook', { book, quantity, buyer: req.user.id }) 18 | return { stock } 19 | }) 20 | 21 | // Add some discount for overstocked books 22 | this.after('READ', ListOfBooks, each => { 23 | if (each.stock > 111) each.title += ` -- 11% discount!` 24 | }) 25 | 26 | return super.init() 27 | } 28 | } 29 | 30 | module.exports = { CatalogService } 31 | -------------------------------------------------------------------------------- /test/bookshop/srv/draft-enabled-service.cds: -------------------------------------------------------------------------------- 1 | using { sap.capire.bookshop as my } from '../db/schema'; 2 | service DraftService { 3 | @odata.draft.enabled 4 | entity DraftEnabledBooks 5 | { 6 | key ID : Integer; 7 | title : String; 8 | } 9 | 10 | @odata.draft.enabled 11 | entity MoreDraftEnabledBooks as projection on my.Books; 12 | } 13 | -------------------------------------------------------------------------------- /test/bookshop/srv/genres.cds: -------------------------------------------------------------------------------- 1 | using { sap.capire.bookshop as my } from '../db/schema'; 2 | 3 | @path: '/test' 4 | service TestService { 5 | entity Genres as projection on my.Genres; 6 | entity A as projection on my.A; 7 | } 8 | 9 | annotate my.Genres:children with @depth: 5; 10 | -------------------------------------------------------------------------------- /test/bookshop/srv/tree-service.cds: -------------------------------------------------------------------------------- 1 | using {sap.capire.bookshop as my} from '../db/schema'; 2 | 3 | 4 | @path: '/tree' 5 | service TreeService { 6 | entity Genres as 7 | projection on my.Genres { 8 | *, 9 | null as LimitedDescendantCount, 10 | null as DistanceFromRoot, 11 | null as DrillState, 12 | null as Matched, 13 | null as MatchedDescendantCount, 14 | null as LimitedRank, 15 | }; 16 | 17 | annotate Genres with @Aggregation.RecursiveHierarchy#GenresHierarchy: { 18 | $Type : 'Aggregation.RecursiveHierarchyType', 19 | NodeProperty : ID, 20 | ParentNavigationProperty: parent 21 | }; 22 | } 23 | -------------------------------------------------------------------------------- /test/compliance/UPSERT.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../cds.js') 2 | 3 | describe('UPSERT', () => { 4 | const { data, expect } = cds.test(__dirname + '/resources') 5 | data.autoIsolation(true) 6 | 7 | describe('into', () => { 8 | test('Apply default for keys before join to existing data', async () => { 9 | const { keys } = cds.entities('basic.common') 10 | // HXE cannot handle the default key logic when using @sap/hana-client 11 | await INSERT([{ id: 0, data: 'insert' }, { id: 0, default: 'overwritten', data: 'insert' }]).into(keys) 12 | const insert = await SELECT.from(keys) 13 | 14 | await UPSERT([{ id: 0, data: 'upsert' }, { id: 0, default: 'overwritten', data: 'upsert' }]).into(keys) 15 | const upsert = await SELECT.from(keys) 16 | 17 | for (let i = 0; i < insert.length; i++) { 18 | const ins = insert[i] 19 | const ups = upsert[i] 20 | expect(ups.id).to.eq(ins.id) 21 | expect(ups.default).to.eq(ins.default) 22 | expect(ins.data).to.eq('insert') 23 | expect(ups.data).to.eq('upsert') 24 | } 25 | }) 26 | }) 27 | 28 | describe('entries', () => { 29 | test('smart quoting', async () => { 30 | const { ASC } = cds.entities('complex.keywords') 31 | await UPSERT.into(ASC).entries({ ID: 42, select: 4711 }) 32 | await UPSERT.into(ASC).entries({ ID: 42, alias: 9 }) 33 | const select = await SELECT.one.from(ASC).where('ID = 42') 34 | expect(select).to.eql({ ID: 42, select: 4711, alias: 9 }) 35 | }) 36 | }) 37 | 38 | describe('columns', () => { 39 | describe('values', () => { 40 | test.skip('missing', () => { 41 | throw new Error('not supported') 42 | }) 43 | }) 44 | 45 | describe('rows', () => { 46 | test('smart quoting', async () => { 47 | const { ASC } = cds.entities('complex.keywords') 48 | await UPSERT.into(ASC) 49 | .columns(['ID', 'select']) 50 | .rows([[42, 4711]]) 51 | let select = await SELECT.one.from(ASC, ['ID', 'select']).where('ID = 42') 52 | expect(select).to.eql({ ID: 42, select: 4711 }) 53 | }) 54 | }) 55 | }) 56 | 57 | describe('as', () => { 58 | test.skip('missing', () => { 59 | throw new Error('not supported') 60 | }) 61 | }) 62 | 63 | test('affected row', async () => { 64 | const affectedRows = await UPSERT.into('complex.associations.Books').entries({ ID: 9999999, title: 'Book' }) 65 | expect(affectedRows).to.be.eq(1) 66 | }) 67 | }) 68 | -------------------------------------------------------------------------------- /test/compliance/client-options.test.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert') 2 | const cds = require('../cds.js') 3 | cds.test.in(__dirname + '/resources') 4 | 5 | const clientOption = cds.env.requires.db.client 6 | let called = 0 7 | Object.defineProperty(cds.env.requires.db, 'client', { 8 | get: () => { 9 | called++ 10 | return clientOption 11 | } 12 | }) 13 | /** 14 | * Tests explicitely, that all DBs access the specific client options 15 | */ 16 | describe('affected rows', () => { 17 | cds.test() 18 | 19 | test('client option is called during bootstrapping', async () => { 20 | assert.strictEqual(called >= 1,true) 21 | }) 22 | }) 23 | -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/common.cds: -------------------------------------------------------------------------------- 1 | namespace basic.common; 2 | 3 | using { 4 | cuid as _cuid, 5 | managed as _managed, 6 | temporal as _temporal 7 | } from '@sap/cds/common'; 8 | 9 | entity cuid : _cuid {} 10 | entity managed : _cuid, _managed {} 11 | entity temporal : _cuid, _temporal {} 12 | 13 | // Set default values for all literals from ./literals.cds 14 | entity ![default] : _cuid { 15 | uuidDflt : UUID default '00000000-0000-0000-4000-000000000000'; 16 | bool : Boolean default false; 17 | integer8 : UInt8 default 8; 18 | integer16 : Int16 default 9; 19 | integer32 : Int32 default 10; 20 | integer64 : Int64 default 11; 21 | double : cds.Double default 1.1; 22 | float : cds.Decimal default 1.1; 23 | decimal : cds.Decimal(5, 4) default 1.11111; 24 | string : String default 'default'; 25 | char : String(1) default 'd'; 26 | short : String(10) default 'default'; 27 | medium : String(100) default 'default'; 28 | large : String(5000) default 'default'; 29 | // HANA Does not support default values on BLOB types 30 | // default value cannot be created on column of data type NCLOB: BLOB 31 | // blob : LargeString default 'default'; 32 | date : Date default '1970-01-01'; 33 | date_lit : Date default date'2021-05-05'; 34 | time : Time default '01:02:03'; 35 | dateTime : DateTime default '1970-01-01T01:02:03Z'; 36 | timestamp : Timestamp default '1970-01-01T01:02:03.123456789Z'; 37 | // Comment out, when HANA supports default functions or compiler generates them not as defaults 38 | // func : String(100) default tolower('DEfAUlT'); 39 | // Binary default values don't make sense. while technically possible 40 | // binary : Binary default 'YmluYXJ5'; // base64 encoded 'binary'; 41 | // largebinary : LargeBinary default 'YmluYXJ5'; // base64 encoded 'binary'; 42 | // Vector default values probably also don't make sense 43 | // vector : Vector default '[1.0,0.5,0.0,...]'; 44 | } 45 | 46 | entity dollar_now_default { 47 | key id : Integer; 48 | date : Date default $now; 49 | time : Time default $now; 50 | dateTime : DateTime default $now; 51 | timestamp : Timestamp default $now; 52 | } 53 | 54 | entity keys { 55 | key id : Integer; 56 | key default : String default 'defaulted'; 57 | data : String; 58 | } 59 | -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/common/basic.common.default.js: -------------------------------------------------------------------------------- 1 | const dstring = size => ({ d: 'default'.slice(0, size), o: 'not default'.slice(0, size) }) 2 | 3 | const columns = { 4 | uuidDflt: { d: '00000000-0000-0000-4000-000000000000', o: '11111111-1111-1111-4111-111111111111'}, 5 | bool: { d: false, o: true }, 6 | integer8: { d: 8, o: 18 }, 7 | integer16: { d: 9, o: 19 }, 8 | integer32: { d: 10, o: 20 }, 9 | integer64: { d: '11', o: '21' }, 10 | double: { d: 1.1, o: 2.2 }, 11 | float: { d: '1.1', o: '2.2' }, 12 | decimal: { d: '1.1111', o: '2.1111' }, 13 | string: dstring(255), 14 | char: dstring(1), 15 | short: dstring(10), 16 | medium: dstring(100), 17 | large: dstring(5000), 18 | // blob: dstring(5001), 19 | date: { d: '1970-01-01', o: '2000-01-01' }, 20 | date_lit: { d: '2021-05-05', o: '2011-08-01' }, 21 | time: { d: '01:02:03', o: '21:02:03' }, 22 | dateTime: { d: '1970-01-01T01:02:03Z', o: '2000-01-01T21:02:03Z' }, 23 | timestamp: { d: '1970-01-01T01:02:03.123Z', o: '2000-01-01T21:02:03.123Z' }, 24 | // func: { d: 'default', o: 'DefaULT' }, 25 | // Binary default values don't make sense. while technically possible 26 | // binary: { d: Buffer.from('binary'), o: Buffer.from('...') }, 27 | // largebinary: { d: Buffer.from('binary'), o: Buffer.from('...') }, 28 | } 29 | 30 | module.exports = Object.keys(columns).map(c => { 31 | const vals = columns[c] 32 | return [{ 33 | [c]: null // Make sure that null still works 34 | }, { 35 | [c]: vals.o // Make sure that overwriting the default works 36 | }, { 37 | [c]: vals.d // Make sure that the default can also be written 38 | }, { 39 | [`=${c}`]: vals.d // Make sure when excluded in the data that default is returned 40 | }] 41 | }).flat() -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/index.cds: -------------------------------------------------------------------------------- 1 | namespace basic; 2 | 3 | using from './projection'; 4 | using from './literals'; 5 | using from './common'; 6 | -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/literals.cds: -------------------------------------------------------------------------------- 1 | namespace basic.literals; 2 | 3 | entity globals { 4 | bool : Boolean; 5 | } 6 | 7 | entity uuid { 8 | uuid : UUID; 9 | } 10 | 11 | entity number { 12 | integer8 : UInt8; 13 | integer16 : Int16; 14 | integer32 : Int32; 15 | integer64 : Int64; 16 | double : cds.Double; 17 | // Decimal: (p,s) p = 1 - 38, s = 0 - p 18 | // p = number of total decimal digits 19 | // s = number of decimal digits after decimal seperator 20 | float : cds.Decimal; // implied float 21 | decimal : cds.Decimal(5, 4); // 𝝅 -> 3.1415 22 | } 23 | 24 | // NVARCHAR: Unicode string between 1 and 5000 length (default: 5000) 25 | entity string { 26 | string : String; 27 | char : String(1); 28 | short : String(10); 29 | medium : String(100); 30 | large : String(5000); // TODO: should be broken on HANA || switch to Binary 31 | blob : LargeString; // NCLOB: Unicode binary (max size 2 GiB) 32 | } 33 | 34 | // ISO Date format (1970-01-01) 35 | entity date { 36 | date : Date; 37 | } 38 | 39 | // ISO Time format (00:00:00) 40 | entity time { 41 | time : Time; 42 | } 43 | 44 | // ISO DateTime format (1970-1-1T00:00:00Z) 45 | entity dateTime { 46 | dateTime : DateTime; 47 | } 48 | 49 | // TODO: Verify that everyone agrees to only allow UTC timestamps 50 | // ISO timestamp format (1970-1-1T00:00:00.000Z) 51 | // HANA timestamp format (1970-1-1T00:00:00.0000000Z) 52 | entity timestamp { 53 | timestamp : Timestamp; 54 | } 55 | 56 | entity array { 57 | string : array of String; 58 | integer : array of Integer; 59 | } 60 | 61 | entity map { 62 | map : Map; 63 | } 64 | 65 | entity binaries { 66 | binary : Binary; 67 | largebinary : LargeBinary; 68 | } 69 | 70 | /* Excluded from the tests until fully supported 71 | entity vectors { 72 | vector : Vector; 73 | } 74 | */ 75 | -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/literals/basic.literals.array.js: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | { 3 | string: null, 4 | }, 5 | { 6 | integer: null, 7 | }, 8 | { 9 | string: ["0","1","2","3"], 10 | '=string': ['0', '1', '2', '3'], 11 | }, 12 | { 13 | integer: [0,1,2,3], 14 | '=integer': [0, 1, 2, 3], 15 | }, 16 | ] 17 | -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/literals/basic.literals.binaries.js: -------------------------------------------------------------------------------- 1 | const { Readable } = require('stream') 2 | 3 | const generator = function* () { 4 | yield Buffer.from('Simple Large Binary') 5 | } 6 | 7 | module.exports = [ 8 | { 9 | binary: null, 10 | largebinary: null, 11 | }, 12 | { 13 | binary: Buffer.from('Simple Binary') 14 | }, 15 | { 16 | binary: Buffer.from('Simple Binary').toString('base64'), 17 | '=binary': Buffer.from('Simple Binary') 18 | }, 19 | { 20 | largebinary: Buffer.from('Simple Large Binary'), 21 | '=largebinary': () => Readable.from(generator()) 22 | }, 23 | { 24 | largebinary: Buffer.from('Simple Large Binary').toString('base64'), 25 | '=largebinary': () => Readable.from(generator()) 26 | }, 27 | { 28 | largebinary: () => Readable.from(generator()), 29 | '=largebinary': () => Readable.from(generator()) 30 | } 31 | ] 32 | -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/literals/basic.literals.date.js: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | { 3 | date: null, 4 | }, 5 | { 6 | date: '1970-01-01', 7 | }, 8 | { 9 | // HANA supports up-to 9999-12-31 10 | date: '9999-12-31', 11 | }, 12 | /* Ignoring transformations 13 | { 14 | date: '1970-1-1', 15 | '=date': '1970-01-01' 16 | }, 17 | { 18 | date: '2', 19 | '=date': '0002-01-01' 20 | }, 21 | { 22 | // HANA supports left trim 23 | date: ' 2', 24 | '=date': '0002-01-01' 25 | }, 26 | { 27 | // HANA does not support right trim 28 | date: '2 ', 29 | '!': 'Invalid cds.Date "2 "' 30 | }, 31 | { 32 | // HANA does not support date expansion when time is included 33 | date: '2 00:00:00', 34 | '!': 'Invalid cds.Date "2 00:00:00"' 35 | }, 36 | { 37 | date: '2-2', 38 | '=date': '0002-02-01' 39 | }, 40 | { 41 | date: '2-2-2', 42 | '=date': '0002-02-02' 43 | }, 44 | { 45 | date: () => new Date('1970-01-01Z'), 46 | '=date': '1970-01-01' 47 | }, 48 | { 49 | // Z+2359 is supported by javascript, but HANA does not support timezones without a time being defined 50 | date: '1970-01-01Z+2359', 51 | '!': 'Invalid cds.Date "1970-01-01Z+2359"' 52 | }, 53 | { 54 | date: '1970-01-01Z-2359', 55 | '!': 'Invalid cds.Date "1970-01-01Z-2359"' 56 | }, 57 | { 58 | // Missing 'Z' before the +/-, because - is a valid separator for the date section 59 | date: '1970-01-01+2359', 60 | '!': 'Invalid cds.Date "1970-01-01+2359"' 61 | }, 62 | { 63 | date: '1970-01-01T01:10:59', 64 | '=date': '1970-01-01' 65 | }, 66 | { 67 | date: '1970-01-01T00:00:00-2359', 68 | '=date': '1970-01-01' 69 | }, 70 | { 71 | // HANA DATE does not support year 0 or lower 72 | date: '0000-01-01', 73 | '!': 'Invalid cds.Date "0000-01-01"' 74 | }, 75 | { 76 | // HANA DATE does not support year 10000 or higher 77 | date: '10000-01-01', 78 | '!': 'Invalid cds.Date "10000-01-01"' 79 | }, 80 | { 81 | // HANA DATE does not assume date information 82 | date: '00:00:00', 83 | '!': 'Invalid cds.Date "00:00:00"' 84 | } 85 | */ 86 | ] 87 | -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/literals/basic.literals.globals.js: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | { 3 | bool: true, 4 | }, 5 | { 6 | bool: false, 7 | }, 8 | { 9 | bool: null, 10 | }, 11 | ] 12 | -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/literals/basic.literals.map.js: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | { 3 | map: null, 4 | }, 5 | { 6 | map: {}, 7 | }, 8 | { 9 | map: { key: null }, 10 | }, 11 | { 12 | map: { key: 'value' }, 13 | }, 14 | { 15 | map: { a: { b: { c: 3 } } }, 16 | }, 17 | { 18 | map: { a: [{ b: 1 }, { c: 2 }] }, 19 | }, 20 | ] 21 | -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/literals/basic.literals.number.js: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | { 3 | integer8: null, 4 | }, 5 | { 6 | integer8: 0, 7 | }, 8 | { 9 | integer8: 255, 10 | }, 11 | /* REVISIT: UInt8 is not allowed to be over/under flow 0-255 12 | { 13 | integer8: -1, 14 | '!': /./, 15 | }, 16 | { 17 | integer8: 256, 18 | '!': /./, 19 | }, 20 | */ 21 | { 22 | integer16: null, 23 | }, 24 | { 25 | integer16: 32767, 26 | }, 27 | { 28 | integer16: -32768, 29 | }, 30 | /* REVISIT: UInt16 is not allowed to be over/under flow -32768 - 32767 31 | { 32 | integer16: 32768, 33 | '!': /./, 34 | }, 35 | { 36 | integer16: -32769, 37 | '!': /./, 38 | }, 39 | */ 40 | { 41 | integer32: null, 42 | }, 43 | { 44 | integer32: -2147483648, 45 | }, 46 | { 47 | integer32: 2147483647, 48 | }, 49 | { 50 | integer64: null, 51 | }, 52 | { 53 | integer64: '9223372036854775806', 54 | }, 55 | { 56 | integer64: '-9223372036854775808', 57 | }, 58 | { 59 | decimal: null 60 | }, 61 | { 62 | decimal: 0, 63 | '=decimal': '0.0000' 64 | }, 65 | { 66 | decimal: 1, 67 | '=decimal': '1.0000' 68 | }, 69 | { 70 | decimal: '3.14153', 71 | '=decimal': '3.1415' 72 | }, 73 | { 74 | decimal: 3.14, 75 | '=decimal': '3.1400' 76 | }, 77 | { 78 | double: 3.14159265358979 79 | }, 80 | { 81 | float: '3.14159265358979', 82 | '=float': /^3\.14159265358979/ 83 | }, 84 | { 85 | float: '-9007199254740991', 86 | '=float': /-9007199254740991/ 87 | }, 88 | { 89 | float: '9007199254740991', 90 | '=float': /^9007199254740991/ 91 | }, 92 | /* Ignoring transformations 93 | { 94 | decimal: 3.141592653589793, 95 | '=decimal': 3.1415 96 | }, 97 | { 98 | decimal: 31415, 99 | '=decimal': 5 100 | }, 101 | */ 102 | ] 103 | -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/literals/basic.literals.string.js: -------------------------------------------------------------------------------- 1 | const alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 2 | 3 | const alphabetize = (_, i) => alphabet[i % alphabet.length] 4 | 5 | module.exports = [ 6 | { 7 | string: null, 8 | char: null, 9 | short: null, 10 | medium: null, 11 | large: null, 12 | blob: null, 13 | }, 14 | { 15 | string: undefined, 16 | char: undefined, 17 | short: undefined, 18 | medium: undefined, 19 | large: undefined, 20 | blob: undefined, 21 | '=string': null, 22 | '=char': null, 23 | '=short': null, 24 | '=medium': null, 25 | '=large': null, 26 | '=blob': null, 27 | }, 28 | { 29 | string: 'Simple String', 30 | }, 31 | { 32 | char: 'A', 33 | }, 34 | { 35 | char: '대', // Ensure multi byte utf-8 characters also fit into a single character column 36 | }, 37 | { 38 | large: () => [...new Array(1000)].map(alphabetize).join(''), 39 | }, 40 | { 41 | string: '💾', 42 | }, 43 | { 44 | string: 45 | 'Simplified Chinese: 中国, Traditional Chinese: 中國, Korean: 대한민국, Japanese: 日本国, Russion: Российская Федерация, Greek: Ελληνική Δημοκρατία', 46 | }, 47 | /* Ignoring transformations 48 | { 49 | char: () => alphabet, 50 | '=char': () => alphabet.substring(0, 1) 51 | }, 52 | { 53 | short: () => alphabet, 54 | '=short': () => alphabet.substring(0, 10) 55 | }, 56 | { 57 | medium: () => [...new Array(100)].map(alphabetize).join('') 58 | }, 59 | { 60 | medium: () => [...new Array(101)].map(alphabetize).join(''), 61 | '=medium': () => [...new Array(100)].map(alphabetize).join('') 62 | }, 63 | { 64 | large: () => [...new Array(10001)].map(alphabetize).join(''), 65 | '=large': () => [...new Array(10000)].map(alphabetize).join('') 66 | }, 67 | /* 68 | { // Have to fix jest worker max memory settings for 2gb limit testing 69 | blob: () => [...new Array(1 << 30)].map(alphabetize).join('') 70 | } 71 | */ 72 | /* 73 | { 74 | blob: () => Buffer.from([...new Array(1 << 20)].map(alphabetize).join('')) 75 | }, 76 | */ 77 | ] 78 | -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/literals/basic.literals.time.js: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | { 3 | time: null, 4 | }, 5 | { 6 | time: '00:00:00', 7 | }, 8 | /* Ignoring transformations 9 | { 10 | time: '0', 11 | '=time': '00:00:00' 12 | }, 13 | { 14 | time: '0:0', 15 | '=time': '00:00:00' 16 | }, 17 | { 18 | time: '0:0:0', 19 | '=time': '00:00:00' 20 | }, 21 | { 22 | time: '1-1-1 0', 23 | '=time': '00:00:00' 24 | }, 25 | { 26 | time: '25:99:99', 27 | '!': 'Invalid cds.Time "25:99:99"' 28 | }, 29 | { 30 | time: '12:99:99', 31 | '!': 'Invalid cds.Time "12:99:99"' 32 | }, 33 | { 34 | time: '0:0:0.0', 35 | '=time': '00:00:00' 36 | }, 37 | { 38 | time: '0:0:0.000000000000000000000000000000', 39 | '=time': '00:00:00' 40 | }, 41 | { 42 | time: '1', 43 | '=time': '01:00:00' 44 | }, 45 | { 46 | time: '12', 47 | '=time': '12:00:00' 48 | }, 49 | { 50 | time: '24', 51 | '=time': '24:00:00' 52 | }, 53 | { 54 | time: '25', 55 | '!': 'Invalid cds.Time "25"' 56 | }, 57 | { 58 | time: '24:01', 59 | '!': 'Invalid cds.Time "24:01"' 60 | }, 61 | { 62 | time: '24:00:01', 63 | '!': 'Invalid cds.Time "24:00:01"' 64 | }, 65 | { 66 | time: '24:00:00.1', 67 | '=time': '24:00:00' 68 | } 69 | */ 70 | ] 71 | -------------------------------------------------------------------------------- /test/compliance/resources/db/basic/literals/basic.literals.vectors.js: -------------------------------------------------------------------------------- 1 | 2 | module.exports = [ 3 | { 4 | vector: null, 5 | }, 6 | { 7 | vector: '[0.1,0.2,0.3]', 8 | }, 9 | ] 10 | -------------------------------------------------------------------------------- /test/compliance/resources/db/complex/associations.cds: -------------------------------------------------------------------------------- 1 | namespace complex.associations; 2 | 3 | entity Books { 4 | key ID : Integer; 5 | title : String(111); 6 | author : Association to Authors; 7 | name : Association to Authors on $self.author.ID = name.ID; 8 | } 9 | 10 | entity Authors { 11 | key ID : Integer; 12 | name : String(111); 13 | books : Association to many Books on books.author = $self; 14 | } 15 | -------------------------------------------------------------------------------- /test/compliance/resources/db/complex/associationsUnmanaged.cds: -------------------------------------------------------------------------------- 1 | namespace complex.associations.unmanaged; 2 | 3 | entity Books { 4 | key ID : Integer; 5 | title : String(111); 6 | author_ID: Integer; 7 | author : Association to Authors on author.ID = $self.author_ID; 8 | } 9 | 10 | entity Authors { 11 | key ID : Integer; 12 | name : String(111); 13 | books : Association to many Books on books.author = $self; 14 | static : Association to many Books on static.author = $self and static.ID > 0 and name != null; 15 | } 16 | -------------------------------------------------------------------------------- /test/compliance/resources/db/complex/computed.cds: -------------------------------------------------------------------------------- 1 | namespace complex.computed; 2 | 3 | entity static { 4 | value : Integer; 5 | integer : Integer = 1; 6 | double : Double = 0.1; 7 | string : String = ''; 8 | } 9 | 10 | entity dynamic { 11 | integer : Integer; 12 | @(Core.Computed: true,readonly) 13 | ![case] : String = ( 14 | case 15 | when 16 | integer = 0 17 | then 18 | 'zero' 19 | when 20 | integer = 1 21 | then 22 | 'one' 23 | when 24 | integer = 2 25 | then 26 | 'two' 27 | end 28 | ); 29 | lambda : String = (integer = 0 ? 'none' : 'some') 30 | } 31 | -------------------------------------------------------------------------------- /test/compliance/resources/db/complex/computed/complex.computed.dynamic.js: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | { 3 | integer: 0, 4 | '=case': 'zero', 5 | '=lambda': 'none', 6 | }, 7 | { 8 | integer: 1, 9 | '=case': 'one', 10 | '=lambda': 'some', 11 | }, 12 | { 13 | integer: 2, 14 | '=case': 'two', 15 | '=lambda': 'some', 16 | } 17 | ] 18 | -------------------------------------------------------------------------------- /test/compliance/resources/db/complex/computed/complex.computed.static.js: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | { 3 | '=integer': 1, 4 | '=double': 0.1, 5 | '=string': '', 6 | } 7 | ] 8 | -------------------------------------------------------------------------------- /test/compliance/resources/db/complex/index.cds: -------------------------------------------------------------------------------- 1 | namespace complex; 2 | 3 | using from './computed'; 4 | using from './associations'; 5 | using from './associationsUnmanaged'; 6 | using from './uniques'; 7 | using from './keywords'; 8 | 9 | entity Root { 10 | key ID : Integer; 11 | fooRoot : String; 12 | children : Composition of many Child 13 | on children.parent = $self; 14 | } 15 | 16 | entity Child { 17 | key ID : Integer; 18 | fooChild : String; 19 | parent : Association to one Root; 20 | children : Composition of many GrandChild 21 | on children.parent = $self 22 | } 23 | 24 | entity GrandChild { 25 | key ID : Integer; 26 | fooGrandChild : String; 27 | parent : Association to one Child; 28 | } 29 | 30 | entity RootPWithKeys as 31 | projection on Root { 32 | key ID, 33 | fooRoot, 34 | children 35 | } 36 | 37 | entity ChildP as 38 | projection on Child { 39 | key ID, 40 | fooChild, 41 | parent 42 | } 43 | 44 | entity ChildPWithWhere as projection on Child where fooChild = 'bar' 45 | -------------------------------------------------------------------------------- /test/compliance/resources/db/complex/keywords.cds: -------------------------------------------------------------------------------- 1 | namespace complex.keywords; 2 | 3 | // ORDER / ALTER / ASC / NUMBER are reserved words in ANSI SQL standard 4 | entity Order { 5 | key ID : Integer; 6 | alter : Composition of many Alter 7 | on alter.order = $self; 8 | } 9 | 10 | entity Alter { 11 | key ID : Integer; 12 | number : Integer; 13 | order : Association to Order; 14 | } 15 | 16 | entity ASC { 17 | key ID : Integer; 18 | alias : Integer; 19 | ![select] : Integer; 20 | } 21 | 22 | entity ![1234567890] { 23 | ![1] : Boolean; 24 | ![2] : UUID; 25 | ![3] : UInt8; 26 | ![4] : Int16; 27 | ![5] : Int32; 28 | ![6] : Int64; 29 | ![7] : cds.Double; 30 | ![8] : cds.Decimal; 31 | ![9] : cds.Decimal(5, 4); 32 | ![10] : String; 33 | ![11] : String(1); 34 | ![12] : String(10); 35 | ![13] : String(100); 36 | ![14] : String(5000); 37 | ![15] : LargeString; 38 | ![16] : Date; 39 | ![17] : Time; 40 | ![18] : DateTime; 41 | ![19] : Timestamp; 42 | ![20] : LargeString; 43 | ![21] : LargeString; 44 | ![22] : Binary; 45 | ![23] : LargeBinary; 46 | ![24] : Boolean; // Vector; 47 | } 48 | 49 | entity ![~`!@#$%^&()_+-='"\/.,[]]] { 50 | ![~] : Boolean; 51 | ![`] : UUID; 52 | ![!!] : UInt8; 53 | ![@] : Int16; 54 | ![#] : Int32; 55 | ![$] : Int64; 56 | ![%] : cds.Double; 57 | ![^] : cds.Decimal; 58 | ![&] : cds.Decimal(5, 4); 59 | ![*] : String; 60 | ![(] : String(1); 61 | ![)] : String(10); 62 | ![_] : String(100); 63 | ![+] : String(5000); 64 | ![-] : LargeString; 65 | ![=] : Date; 66 | !['] : Time; 67 | !["] : DateTime; 68 | ![\1] : Timestamp; 69 | ![/] : LargeString; 70 | ![,] : LargeString; 71 | ![[] : Binary; 72 | ![]]] : LargeBinary; 73 | ![💾] : Boolean; // Vector; 74 | } 75 | -------------------------------------------------------------------------------- /test/compliance/resources/db/complex/keywords/complex.keywords.1234567890.js: -------------------------------------------------------------------------------- 1 | const isHana = /hana/.test(process.argv[1]) 2 | module.exports = isHana ? [] : [ 3 | { 4 | "1": null, 5 | "2": null, 6 | "3": null, 7 | "4": null, 8 | "5": null, 9 | "6": null, 10 | "7": null, 11 | "8": null, 12 | "9": null, 13 | "10": null, 14 | "11": null, 15 | "12": null, 16 | "13": null, 17 | "14": null, 18 | "15": null, 19 | "16": null, 20 | "17": null, 21 | "18": null, 22 | "19": null, 23 | "20": null, 24 | "21": null, 25 | "22": null, 26 | "23": null, 27 | "24": null, 28 | }, 29 | ] 30 | -------------------------------------------------------------------------------- /test/compliance/resources/db/complex/keywords/complex.keywords.~%60!%40%23%24%25%5E%26()_%2B-%3D'%22%5C%2F.%2C%5B%5D.js.js: -------------------------------------------------------------------------------- 1 | const isHana = /hana/.test(process.argv[1]) 2 | module.exports = isHana ? [] : [ 3 | { 4 | "~": null, 5 | "`": null, 6 | "!!": null, 7 | "@": null, 8 | "#": null, 9 | "$": null, 10 | "%": null, 11 | "^": null, 12 | "&": null, 13 | "*": null, 14 | "(": null, 15 | ")": null, 16 | "_": null, 17 | "+": null, 18 | "-": null, 19 | "=": null, 20 | "'": null, 21 | "\"": null, 22 | "\\1": null, 23 | "/": null, 24 | ',': null, 25 | "[": null, 26 | "]": null, 27 | "💾": null, 28 | }, 29 | ] 30 | -------------------------------------------------------------------------------- /test/compliance/resources/db/complex/uniques.cds: -------------------------------------------------------------------------------- 1 | namespace complex.uniques; 2 | 3 | entity Books { 4 | key ID : Integer; 5 | title : String(111); 6 | pages : Composition of many Pages on pages.book = $self; 7 | } 8 | 9 | @assert.unique: { number: [number, book] } 10 | entity Pages { 11 | key ID : Integer; 12 | book : Association to Books; 13 | number : Integer; 14 | } 15 | -------------------------------------------------------------------------------- /test/compliance/resources/db/data/basic.literals-dateTime.csv: -------------------------------------------------------------------------------- 1 | dateTime; 2 | 1970-02-02T10:09:34Z; 3 | -------------------------------------------------------------------------------- /test/compliance/resources/db/data/basic.literals-globals.csv: -------------------------------------------------------------------------------- 1 | bool 2 | true 3 | false 4 | null 5 | -------------------------------------------------------------------------------- /test/compliance/resources/db/data/basic.literals-string.csv: -------------------------------------------------------------------------------- 1 | string; 2 | yes; 3 | no; 4 | ; 5 | -------------------------------------------------------------------------------- /test/compliance/resources/db/data/complex.associations.Authors.csv: -------------------------------------------------------------------------------- 1 | ID;name 2 | 1;Emily 3 | -------------------------------------------------------------------------------- /test/compliance/resources/db/data/complex.associations.Books.csv: -------------------------------------------------------------------------------- 1 | ID;title;author_ID 2 | 1;Wuthering Heights;1 3 | -------------------------------------------------------------------------------- /test/compliance/resources/db/data/complex.associations.unmanaged.Authors.csv: -------------------------------------------------------------------------------- 1 | ID;name 2 | 1;Emily 3 | -------------------------------------------------------------------------------- /test/compliance/resources/db/data/complex.associations.unmanaged.Books.csv: -------------------------------------------------------------------------------- 1 | ID;title;author_ID 2 | 1;Wuthering Heights;1 3 | -------------------------------------------------------------------------------- /test/compliance/resources/db/edge/index.cds: -------------------------------------------------------------------------------- 1 | namespace edge; 2 | -------------------------------------------------------------------------------- /test/compliance/resources/db/hana/funcs/date-generator.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const path = require('path') 3 | const hana = require('./hana') 4 | 5 | const allDates = [] 6 | for (let year = 1970; year < 9999; year++) { 7 | const start = new Date(year + '') 8 | const end = new Date(year + 1 + '') 9 | let day = 2 10 | allDates.push(start) 11 | while (allDates[allDates.length - 1] < end) { 12 | const cur = new Date(start) 13 | cur.setDate(day++) 14 | allDates.push(cur) 15 | } 16 | } 17 | 18 | let seed = 1 19 | const next = function (i) { 20 | seed = (seed << 5) - seed + i 21 | return (seed >>> 0) % allDates.length 22 | } 23 | for (let i = 0; i < 10; i++) next(i) 24 | 25 | const csv = fs.createWriteStream(path.resolve(__dirname, '../../data/edge.hana.functions-timestamps.csv')) 26 | csv.on('ready', async () => { 27 | try { 28 | const columns = ['a', 'b', 'years', 'months', 'days', 'seconds', 'nano100'] 29 | const funcs = { 30 | a: () => `'${allDates[next(1)].toISOString()}'`, 31 | b: () => `'${allDates[next(1)].toISOString()}'`, 32 | years: () => 'YEARS_BETWEEN(:a,:b)', 33 | months: () => 'MONTHS_BETWEEN(:a,:b)', 34 | days: () => 'DAYS_BETWEEN(:a,:b)', 35 | seconds: () => 'SECONDS_BETWEEN(:a,:b)', 36 | nano100: () => 'NANO100_BETWEEN(:a,:b)', 37 | } 38 | csv.write(columns.join(';')) 39 | csv.write('\n') 40 | for (let i = 0; i < 1000; i++) { 41 | const vals = columns.map(n => funcs[n]()) 42 | const sql = `SELECT ${vals.map((c, i) => `${c} AS "${columns[i]}"`).join(',')} FROM DUMMY`.replace( 43 | /:(\D)/g, 44 | (_, n) => vals[columns.indexOf(n)], 45 | ) 46 | const res = (await hana(sql))[0] 47 | csv.write(columns.map(c => res[c]).join(';')) 48 | csv.write('\n') 49 | } 50 | } catch (e) { 51 | // eslint-disable-next-line no-console 52 | console.error(e.stack) 53 | process.exit(1) 54 | } 55 | csv.close() 56 | }) 57 | 58 | csv.on('close', () => process.exit()) 59 | -------------------------------------------------------------------------------- /test/compliance/resources/db/hana/funcs/datetime.cds: -------------------------------------------------------------------------------- 1 | namespace edge.hana.functions; 2 | 3 | entity timestamps { 4 | a : Timestamp; 5 | b : Timestamp; 6 | years : Integer64; 7 | months : Integer64; 8 | days : Integer64; 9 | seconds : Integer64; 10 | nano100 : Integer64; 11 | } 12 | -------------------------------------------------------------------------------- /test/compliance/resources/db/hana/funcs/hana.js: -------------------------------------------------------------------------------- 1 | const hdb = require('hdb') 2 | 3 | const creds = { 4 | rejectUnauthorized: false, // Turns of TLS validation on nodejs socket 5 | user: 'SYSTEM', // This is the default `SYSTEM` user on `HANA-cloud` 6 | password: 'Manager1', // This needs to be equal to the password defined in the docker run command 7 | host: 'localhost', // This needs to be the host of the docker machine 8 | port: '30041', // The default port 9 | useTLS: true, 10 | encrypt: true, // All HANA-cloud connections HAVE to be encrypted 11 | sslValidateCertificate: true, // The HANA-cloud docker image has self signed SSL certificates 12 | driver: 'com.sap.db.jdbc.Driver', 13 | url: 'jdbc:sap://localhost:30041?encrypt=true&validateCertificate=false', 14 | disableCloudRedirect: true, 15 | } 16 | 17 | let client 18 | 19 | const connection = async function () { 20 | if (client) return client 21 | return new Promise((res, rej) => { 22 | const con = hdb.createClient() // hdb.createConnection(); 23 | con.connect(creds, async err => { 24 | if (err) return rej(err) 25 | client = con 26 | res(client) 27 | }) 28 | }) 29 | } 30 | 31 | module.exports = async function (sql, values = []) { 32 | const con = await connection() 33 | 34 | return new Promise((res, rej) => { 35 | con.prepare(sql, (err, stmt) => { 36 | if (err) return rej(err) 37 | stmt.exec(values, (err, ret) => { 38 | if (err) return rej(err) 39 | res(ret) 40 | }) 41 | }) 42 | }) 43 | } 44 | -------------------------------------------------------------------------------- /test/compliance/resources/db/hana/funcs/index.cds: -------------------------------------------------------------------------------- 1 | using from './datetime'; 2 | -------------------------------------------------------------------------------- /test/compliance/resources/db/hana/index.cds: -------------------------------------------------------------------------------- 1 | // namespace edge.hana; // Would overwrite default hana namespace 2 | 3 | using from './literals'; 4 | using from './funcs'; 5 | using from './versioning'; 6 | -------------------------------------------------------------------------------- /test/compliance/resources/db/hana/literals.cds: -------------------------------------------------------------------------------- 1 | namespace edge.hana.literals; 2 | 3 | entity HANA_NUMBER { 4 | tinyint : hana.TINYINT; 5 | smallint : hana.SMALLINT; 6 | smalldecimal : hana.SMALLDECIMAL; 7 | real : hana.REAL; 8 | } 9 | 10 | // VARCHAR: ASCII string between 1 and 2000 length (default: 1) 11 | entity HANA_CHAR { 12 | char : hana.CHAR; // implied length 1 13 | short : hana.CHAR(10); 14 | medium : hana.CHAR(100); 15 | large : hana.CHAR(2000); 16 | blob : hana.CLOB; // CLOB: ASCII binary (max size 2 GiB) 17 | } 18 | 19 | // NVARCHAR: unicode string between 1 and 2000 length (default: 1) 20 | entity HANA_NCHAR { 21 | char : hana.NCHAR; // implied length 1 22 | short : hana.NCHAR(10); 23 | medium : hana.NCHAR(100); 24 | large : hana.NCHAR(2000); 25 | } 26 | 27 | // BLOB: binary (max size 2 GiB) 28 | entity HANA_BINARY { 29 | binary : hana.BINARY; 30 | } 31 | 32 | // All of this: 33 | // https://help.sap.com/docs/HANA_CLOUD_DATABASE/bc9e455fe75541b8a248b4c09b086cf5/7a2d5618787c10148dc4da810379e15b.html 34 | entity HANA_ST { 35 | point : hana.ST_POINT; // 2D point 36 | geometry : hana.ST_GEOMETRY; // 3D geometry 37 | } 38 | -------------------------------------------------------------------------------- /test/compliance/resources/db/hana/literals/edge.hana.literals.HANA_BINARY.js: -------------------------------------------------------------------------------- 1 | // TODO: Add HANA TYPE EXPECTATIONS 2 | module.exports = [ 3 | { 4 | binary: null, 5 | }, 6 | /* 7 | REVISIT: investigate why this is always considered binary by sqlite 8 | { 9 | binary: () => Buffer.from('binary'), 10 | '=binary': 'binary' 11 | }, 12 | { 13 | binary: 'binary' 14 | } 15 | */ 16 | ] 17 | -------------------------------------------------------------------------------- /test/compliance/resources/db/hana/literals/edge.hana.literals.HANA_CHAR.js: -------------------------------------------------------------------------------- 1 | // TODO: Add HANA TYPE EXPECTATIONS 2 | module.exports = [ 3 | { 4 | char: null, 5 | }, 6 | { 7 | char: 'a', 8 | }, 9 | /* 10 | { 11 | char: 'ab', 12 | '=char': 'a' 13 | }, 14 | { 15 | blob: () => Buffer.from('binary'), 16 | '=blob': 'binary' 17 | } 18 | */ 19 | ] 20 | -------------------------------------------------------------------------------- /test/compliance/resources/db/hana/literals/edge.hana.literals.HANA_NCHAR.js: -------------------------------------------------------------------------------- 1 | // TODO: Add HANA TYPE EXPECTATIONS 2 | module.exports = [ 3 | { 4 | char: null, 5 | }, 6 | { 7 | char: 'a', 8 | }, 9 | /* 10 | { 11 | char: 'ab', 12 | '=char': 'a' 13 | } 14 | */ 15 | ] 16 | -------------------------------------------------------------------------------- /test/compliance/resources/db/hana/literals/edge.hana.literals.HANA_NUMBER.js: -------------------------------------------------------------------------------- 1 | // TODO: Add HANA TYPE EXPECTATIONS 2 | module.exports = [ 3 | { 4 | tinyint: null, 5 | }, 6 | { 7 | tinyint: 0, 8 | }, 9 | { 10 | tinyint: 255, 11 | }, 12 | /* 13 | { 14 | tinyint: -1, 15 | '!': 'Invalid cds.hana.TINYINT "-1"' 16 | }, 17 | { 18 | tinyint: 256, 19 | '!': 'Invalid cds.hana.TINYINT "256"' 20 | } 21 | */ 22 | ] 23 | -------------------------------------------------------------------------------- /test/compliance/resources/db/hana/literals/edge.hana.literals.HANA_ST.js: -------------------------------------------------------------------------------- 1 | // TODO: Add HANA TYPE EXPECTATIONS 2 | module.exports = [/* 3 | { 4 | point: null, 5 | }, 6 | { 7 | point: 'POINT(1 1)', 8 | }, 9 | { 10 | point: '0101000000000000000000F03F000000000000F03F', 11 | }, 12 | { 13 | // GeoJSON specification: https://www.rfc-editor.org/rfc/rfc7946 14 | point: '{"x":1,"y":1,"spatialReference":{"wkid":4326}}', 15 | '=point': /\{\W*"x"\W*:\W*1\W*,\W*"y"\W*:\W*1(,.*)?\}/, 16 | },*/ 17 | ] 18 | -------------------------------------------------------------------------------- /test/compliance/resources/db/hana/versioning.cds: -------------------------------------------------------------------------------- 1 | using {cuid} from '@sap/cds/common'; 2 | 3 | namespace edge.hana.versioning; 4 | 5 | // The history table has to be defined before the origin table 6 | // As the compiler doesn't know the dependency between the two entities 7 | @readonly 8 | entity versioned.history { // : temporal 9 | validFrom : Timestamp; 10 | validTo : Timestamp; 11 | ID : UUID; // cuid doesn't work as it would make the ID column a key 12 | data : String(5000); 13 | } 14 | 15 | entity versioned : cuid { // : temporal 16 | validFrom : Timestamp; 17 | validTo : Timestamp; 18 | // Expose own history as an association 19 | history : Association to many versioned.history 20 | on history.ID = ID; 21 | data : String(5000); 22 | } 23 | -------------------------------------------------------------------------------- /test/compliance/resources/db/index.cds: -------------------------------------------------------------------------------- 1 | using from './basic'; 2 | using from './complex'; 3 | using from './edge'; 4 | using from './hana'; 5 | -------------------------------------------------------------------------------- /test/compliance/resources/fts/versioning/hana.cds: -------------------------------------------------------------------------------- 1 | using {edge.hana.versioning as versioning} from '../../db/hana/versioning'; 2 | 3 | annotate versioning.versioned with @sql.append: 'WITH SYSTEM VERSIONING HISTORY TABLE edge_hana_versioning_versioned_history'; 4 | 5 | annotate versioning.versioned with { 6 | validFrom @( 7 | hana.systemversioned, 8 | sql.append: 'NOT NULL GENERATED ALWAYS AS ROW START' 9 | ); 10 | validTo @( 11 | hana.systemversioned, 12 | sql.append: 'NOT NULL GENERATED ALWAYS AS ROW END' 13 | ); 14 | // IMPORTANT: the `data` column must always be the last defined column 15 | // As the @sql.append is doing a classic bit of SQL injection 16 | // to include the required period definition for the history table 17 | data @(sql.append: ', PERIOD FOR SYSTEM_TIME (validFrom, validTo)') 18 | } 19 | -------------------------------------------------------------------------------- /test/compliance/resources/fts/versioning/sqlite.cds: -------------------------------------------------------------------------------- 1 | using {edge.hana.versioning as versioning} from '../../db/hana/versioning'; 2 | 3 | annotate versioning.versioned with { 4 | validFrom @( 5 | cds.on.insert : $now, 6 | cds.on.update : $now, 7 | ); 8 | }; 9 | -------------------------------------------------------------------------------- /test/compliance/resources/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@test/cds-db-layer", 3 | "version": "1.0.0", 4 | "description": "Base for db layer validations", 5 | "cds": { 6 | "requires": { 7 | "db": { 8 | "impl": "@cap-js/sqlite", 9 | "client": { 10 | "xy": 51 11 | } 12 | } 13 | }, 14 | "features": { 15 | "ieee754compatible": true 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /test/compliance/resources/srv/index.cds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cap-js/cds-dbs/591e1aa7942b9b430d80e116de12bf6d3847a3ab/test/compliance/resources/srv/index.cds -------------------------------------------------------------------------------- /test/deploy.js: -------------------------------------------------------------------------------- 1 | var { deploy } = require('@cap-js/postgres') 2 | // eslint-disable-next-line no-console 3 | deploy('*').to('db').catch(console.error) 4 | -------------------------------------------------------------------------------- /test/index.js: -------------------------------------------------------------------------------- 1 | require('./compliance') 2 | require('./scenarios') 3 | -------------------------------------------------------------------------------- /test/scenarios/bookshop/delete.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../cds.js') 2 | const bookshop = cds.utils.path.resolve(__dirname, '../../bookshop') 3 | 4 | describe('Bookshop - Delete', () => { 5 | const { expect } = cds.test(bookshop) 6 | 7 | test('Deep delete works for queries with multiple where clauses', async () => { 8 | const del = DELETE.from('sap.capire.bookshop.Genres[ID = 4711]').where('ID = 4712') 9 | const affectedRows = await cds.db.run(del) 10 | expect(affectedRows).to.be.eq(0) 11 | }) 12 | 13 | test(`Deep delete rejects transitive circular dependencies`, async () => { 14 | await INSERT.into('sap.capire.bookshop.A').entries([ 15 | { ID: 999 }, 16 | { ID: 998 }, 17 | { 18 | ID: 1, 19 | toB: { 20 | ID: 12, 21 | toA: [{ ID: 121 }], 22 | toC: [ 23 | { 24 | ID: 123, 25 | toB: [ 26 | { 27 | ID: 1232, 28 | toC: [ 29 | { 30 | ID: 12323, 31 | toA: [{ ID: 123231 }], 32 | }, 33 | ], 34 | }, 35 | ], 36 | }, 37 | ], 38 | }, 39 | toC: { 40 | ID: 13, 41 | toA: [{ ID: 13 }], 42 | }, 43 | }, 44 | ]) 45 | const del = DELETE.from('sap.capire.bookshop.A').where('ID = 1') 46 | await expect(cds.db.run(del)).to.be.eventually.rejectedWith('Transitive circular composition detected') 47 | }) 48 | 49 | test('Delete with path expressions', async () => { 50 | const deleteEmilysBooks = DELETE.from('AdminService.RenameKeys').where(`author.name = 'Emily Brontë'`) 51 | const selectEmilysBooks = cds.ql`SELECT * FROM AdminService.Books where author.name = 'Emily Brontë'` 52 | 53 | const beforeDelete = await cds.run(selectEmilysBooks) 54 | await cds.run(deleteEmilysBooks) 55 | const afterDelete = await cds.run(selectEmilysBooks) 56 | expect(beforeDelete).to.have.lengthOf(1) 57 | expect(afterDelete).to.have.lengthOf(0) 58 | }) 59 | }) 60 | -------------------------------------------------------------------------------- /test/scenarios/bookshop/genres.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Some Sample Genres...", 3 | "descr": null, 4 | "ID": 100, 5 | "parent_ID": null, 6 | "children": [ 7 | { 8 | "name": "Cat", 9 | "descr": null, 10 | "ID": 101, 11 | "parent_ID": 100, 12 | "children": [ 13 | { 14 | "name": "Kitty", 15 | "descr": null, 16 | "ID": 102, 17 | "parent_ID": 101, 18 | "children": [ 19 | { 20 | "name": "Kitty Cat", 21 | "descr": null, 22 | "ID": 103, 23 | "parent_ID": 102, 24 | "children": [{ "name": "Aristocat", "descr": null, "ID": 104, "parent_ID": 103 }] 25 | }, 26 | { "name": "Kitty Bat", "descr": null, "ID": 105, "parent_ID": 102, "children": [] } 27 | ] 28 | }, 29 | { 30 | "name": "Catwoman", 31 | "descr": null, 32 | "ID": 106, 33 | "parent_ID": 101, 34 | "children": [{ "name": "Catalina", "descr": null, "ID": 107, "parent_ID": 106, "children": [] }] 35 | } 36 | ] 37 | }, 38 | { "name": "Catweazle", "descr": null, "ID": 108, "parent_ID": 100, "children": [] } 39 | ] 40 | } 41 | -------------------------------------------------------------------------------- /test/scenarios/bookshop/index.js: -------------------------------------------------------------------------------- 1 | require('./search.test') 2 | require('./read.test') 3 | require('./insert.test') 4 | require('./delete.test') 5 | require('./update.test') 6 | require('./funcs.test') 7 | require('./orderBy.test') 8 | require('./genres.test') 9 | require('./localization.test') 10 | -------------------------------------------------------------------------------- /test/scenarios/bookshop/insert-large.test.js: -------------------------------------------------------------------------------- 1 | const { Readable } = require('stream') 2 | const cds = require('../../cds.js') 3 | const bookshop = cds.utils.path.resolve(__dirname, '../../bookshop') 4 | 5 | // Stress test should not be run in the pipeline 6 | describe.skip('Bookshop - Insert', () => { 7 | cds.test(bookshop) 8 | 9 | test('Large (~33 mil rows)', async () => { 10 | const { Books } = cds.entities('sap.capire.bookshop') 11 | 12 | // Postgres 13 | // json (1 << 25) -> 5 min (with WAL warnings) 14 | // jsonb (1 << 24) -> size limit reached 15 | // json (1 << 23) -> 82.148 sec 16 | // jsonb (1 << 23) -> 52.148 sec 17 | // json (1 << 10) -> 2.35 sec 18 | // jsonb (1 << 10) -> 2.62 sec 19 | 20 | let totalRows = (1 << 20) 21 | let totalSize = 0 22 | const bufferSize = 1 << 16 23 | const stream = Readable.from((function* () { 24 | let buffer = '[' 25 | let i = 1000 26 | const target = i + totalRows 27 | buffer += `{"ID":${i++}}` 28 | for (; i < target;) { 29 | buffer += `,{"ID":${i++}}` 30 | if (buffer.length >= bufferSize) { 31 | totalSize += buffer.length 32 | yield buffer 33 | buffer = '' 34 | } 35 | } 36 | buffer += ']' 37 | totalSize += buffer.length 38 | yield buffer 39 | })(), { objectMode: false }) 40 | const s = performance.now() 41 | await INSERT(stream).into(Books) 42 | process.stdout.write(`total size: ${totalSize} total rows: ${totalRows} rows/ms: (${totalRows / (performance.now() - s)})\n`) 43 | }, 60 * 60 * 1000) 44 | }) 45 | -------------------------------------------------------------------------------- /test/scenarios/bookshop/upsert.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('../../cds.js') 2 | const bookshop = cds.utils.path.resolve(__dirname, '../../bookshop') 3 | 4 | describe('Bookshop - Upsert', () => { 5 | const { expect } = cds.test(bookshop) 6 | 7 | test('upsert data with "value" as column name', async () => { 8 | // in our UPSERT logic we used "value" as internal column 9 | // which led to an ambiguous column error if the entity has an element with the same name 10 | const { Values } = cds.entities 11 | const upsert = UPSERT({ ID: 201, value: 42 }).into(Values) 12 | const res = await upsert; 13 | expect(res).to.eql(1) 14 | }) 15 | 16 | }) 17 | -------------------------------------------------------------------------------- /test/scenarios/index.js: -------------------------------------------------------------------------------- 1 | require('./bookshop') 2 | require('./sflight') 3 | -------------------------------------------------------------------------------- /test/scenarios/sflight/index.js: -------------------------------------------------------------------------------- 1 | require('./read.test') 2 | require('./integration.test') 3 | -------------------------------------------------------------------------------- /test/sflight/app/services.cds: -------------------------------------------------------------------------------- 1 | using from './travel_processor/capabilities'; 2 | using from './travel_processor/field-control'; 3 | using from './travel_analytics/annotations'; 4 | -------------------------------------------------------------------------------- /test/sflight/app/travel_processor/capabilities.cds: -------------------------------------------------------------------------------- 1 | using TravelService from '../../srv/travel-service'; 2 | 3 | annotate TravelService.Travel with @odata.draft.enabled; 4 | annotate TravelService.Travel with @Common.SemanticKey: [TravelID]; 5 | annotate TravelService.Booking with @Common.SemanticKey: [BookingID]; 6 | annotate TravelService.BookingSupplement with @Common.SemanticKey: [BookingSupplementID]; 7 | -------------------------------------------------------------------------------- /test/sflight/db/common.cds: -------------------------------------------------------------------------------- 1 | using { sap, managed } from '@sap/cds/common'; 2 | 3 | extend sap.common.Currencies with { 4 | // Currencies.code = ISO 4217 alphabetic three-letter code 5 | // with the first two letters being equal to ISO 3166 alphabetic country codes 6 | // See also: 7 | // [1] https://www.iso.org/iso-4217-currency-codes.html 8 | // [2] https://www.currency-iso.org/en/home/tables/table-a1.html 9 | // [3] https://www.ibm.com/support/knowledgecenter/en/SSZLC2_7.0.0/com.ibm.commerce.payments.developer.doc/refs/rpylerl2mst97.htm 10 | numcode : Integer; 11 | exponent : Integer; //> e.g. 2 --> 1 Dollar = 10^2 Cent 12 | minor : String; //> e.g. 'Cent' 13 | } 14 | 15 | 16 | aspect custom.managed { 17 | createdAt : managed:createdAt; 18 | createdBy : managed:createdBy; 19 | LastChangedAt : managed:modifiedAt; 20 | LastChangedBy : managed:modifiedBy; 21 | } 22 | -------------------------------------------------------------------------------- /test/sflight/db/data/sap.common-Countries.csv: -------------------------------------------------------------------------------- 1 | code;name;descr 2 | AU;Australia;Commonwealth of Australia 3 | CA;Canada;Canada 4 | CN;China;People's Republic of China (PRC) 5 | FR;France;French Republic 6 | DE;Germany;Federal Republic of Germany 7 | IN;India;Republic of India 8 | IL;Israel;State of Israel 9 | MM;Myanmar;Republic of the Union of Myanmar 10 | GB;United Kingdom;United Kingdom of Great Britain and Northern Ireland 11 | US;United States;United States of America (USA) 12 | EU;European Union;European Union 13 | IT;Italy;Italy 14 | SG;Singapore;Singapore 15 | AT;Austria;Austria 16 | SE;Sweden;Sweden 17 | CH;Switzerland;Switzerland 18 | RU;Russia;Russia 19 | JP;Japan;Japan 20 | ZA;South Africa;South Africa 21 | ES;Spain;Spain 22 | BE;Belgium;Belgium 23 | SI;Slovenia;Slovenia 24 | NL;Netherlands;Netherlands 25 | MX;Mexico;Mexico 26 | BR;Brazil;Brazil 27 | CU;Cuba;Cuba 28 | ZW;Zimbabwe;Zimbabwe 29 | MY;Malaysia;Malaysia 30 | TH;Thailand;Thailand -------------------------------------------------------------------------------- /test/sflight/db/data/sap.common-Currencies.csv: -------------------------------------------------------------------------------- 1 | code;symbol;name;descr;numcode;minor;exponent 2 | EUR;€;Euro;European Euro;978;Cent;2 3 | USD;$;US Dollar;United States Dollar;840;Cent;2 4 | CAD;$;Canadian Dollar;Canadian Dollar;124;Cent;2 5 | AUD;$;Australian Dollar;Australian Dollar;036;Cent;2 6 | GBP;£;British Pound;Great Britain Pound;826;Penny;2 7 | ILS;₪;Shekel;Israeli New Shekel;376;Agorat;2 8 | INR;₹;Rupee;Indian Rupee;356;Paise;2 9 | QAR;﷼;Riyal;Katar Riyal;356;Dirham;2 10 | SAR;﷼;Riyal;Saudi Riyal;682;Halala;2 11 | JPY;¥;Yen;Japanese Yen;392;Sen;2 12 | CNY;¥;Yuan;Chinese Yuan Renminbi;156;Jiao;1 13 | SGD;S$;Singapore Dollar;Singapore Dollar;702;Cent;2 14 | ZAR;R;Rand;South African Rand;710;Cent;2 -------------------------------------------------------------------------------- /test/sflight/db/data/sap.fe.cap.travel-Airline.csv: -------------------------------------------------------------------------------- 1 | AirlineID;Name;CurrencyCode_code;AirlinePicURL 2 | GA;Green Albatros;CAD;https://raw.githubusercontent.com/SAP-samples/fiori-elements-opensap/main/week1/images/airlines/Green-Albatross-logo.png 3 | FA;Fly Africa;ZAR;https://raw.githubusercontent.com/SAP-samples/fiori-elements-opensap/main/week1/images/airlines/Fly-Africa-logo.png 4 | EA;European Airlines;EUR;https://raw.githubusercontent.com/SAP-samples/fiori-elements-opensap/main/week1/images/airlines/European-Airlines-logo.png 5 | OC;Oceania;USD;https://raw.githubusercontent.com/SAP-samples/fiori-elements-opensap/main/week1/images/airlines/Oceania-logo.png 6 | SW;Sunset Wings;USD;https://raw.githubusercontent.com/SAP-samples/fiori-elements-opensap/main/week1/images/airlines/Sunset-Wings-logo.png -------------------------------------------------------------------------------- /test/sflight/db/data/sap.fe.cap.travel-Airport.csv: -------------------------------------------------------------------------------- 1 | AirportID;Name;City;CountryCode_code 2 | FRA;Frankfurt Airport;Frankfurt/Main;DE 3 | HAM;Hamburg Airport;Hamburg;DE 4 | MUC;Munich Airport;Munich;DE 5 | SXF;Berlin Schönefeld Airport;Berlin;DE 6 | THF;Berlin Tempelhof Airport;Berlin;DE 7 | TXL;Berlin Tegel Airport;Berlin;DE 8 | CDG;Charles de Gaulle Airport;Paris;FR 9 | ORY;Orly Airport;Paris;FR 10 | VIE;Vienna International Airport;Vienna;AT 11 | ZRH;Zürich Airport;Zurich;CH 12 | RTM;Rotterdam The Hague Airport;Rotterdam;NL 13 | FCO;Leonardo da Vinci–Fiumicino Airport;Rome;IT 14 | VCE;Venice Marco Polo Airport;Venice;IT 15 | LCY;London City Airport;London;GB 16 | LGW;Gatwick Airport;London;GB 17 | LHR;Heathrow Airport;London;GB 18 | MAD;Adolfo Suárez Madrid–Barajas Airport;Madrid;ES 19 | VKO;Vnukovo International Airport;Moscow;RU 20 | SVO;Sheremetyevo International Airport;Moscow;RU 21 | JFK;John F. Kennedy International Airport;New York City, New York;US 22 | BNA;Nashville International Airport;Nashville, Tennessee;US 23 | BOS;Logan International Airport;Boston, Massachusetts;US 24 | ELP;El Paso International Airport;El Paso, Texas;US 25 | DEN;Denver International Airport;Denver, Colorado;US 26 | HOU;William P. Hobby Airport;Houston, Texas;US 27 | LAS;McCarran International Airport;Las Vegas, Nevada;US 28 | LAX;Los Angeles International Airport;Los Angeles, California;US 29 | MCI;Kansas City International Airport;Kansas City, Missouri;US 30 | MIA;Miami International Airport;Miami, Florida;US 31 | SFO;San Francisco International Airport;San Francisco, California;US 32 | EWR;Newark Liberty International Airport;Newark, New Jersey;US 33 | YOW;Ottawa Macdonald–Cartier Int. Airport;Ottawa, Ontario;CA 34 | ACA;General Juan N. Álvarez Int. Airport;Acapulco, Guerrero;MX 35 | GIG;Rio de Janeiro–Galeão Int. Airport;Rio de Janeiro;BR 36 | HAV;José Martí International Airport;Havana;CU 37 | ASP;Alice Springs Airport;Alice Springs, Northern Territory;AU 38 | ACE;Lanzarote Airport;Lanzarote, Canary Islands;ES 39 | HRE;Harare International Airport;Harare;ZW 40 | GCJ;Grand Central Airport;Johannesburg;ZA 41 | NRT;Narita International Airport;Tokyo, Honshu;JP 42 | ITM;Osaka International Airport;Osaka, Honshu;JP 43 | KIX;Kansai International Airport;Osaka, Honshu;JP 44 | HIJ;Hiroshima Airport;Hiroshima, Honshu;JP 45 | SIN;Singapore Changi Airport;Singapore;SG 46 | KUL;Kuala Lumpur International Airport;Kuala Lumpur;MY 47 | HKG;Hong Kong International Airport;Hongkong;CN 48 | BKK;Suvarnabhumi Airport;Bangkok;TH -------------------------------------------------------------------------------- /test/sflight/db/data/sap.fe.cap.travel-Booking.csv: -------------------------------------------------------------------------------- 1 | BookingUUID ;to_Travel_TravelUUID ;BookingID ;BookingDate ;to_Customer_CustomerID ;to_Carrier_AirlineID ;ConnectionID ;FlightDate ;FlightPrice ;CurrencyCode_code ;BookingStatus_code ;LastChangedAt 2 | 7A757221A8E4645C17002DF03754AB66 ;52657221A8E4645C17002DF03754AB66 ; 0001 ;2023-08-02 ; 000099 ;SW ; 1537 ;2023-08-04 ; 438.00 ;USD ;N ;2023-07-21T18:48:08Z 3 | 4A787221A8E4645C17002DF03754AB66 ;EE657221A8E4645C17002DF03754AB66 ; 0001 ;2023-07-12 ; 000473 ;EA ; 0403 ;2023-07-31 ; 2300.00 ;EUR ;B ;2023-07-20T07:58:08Z 4 | 3A997221A8E4645C17002DF03754AB66 ;76757221A8E4645C17002DF03754AB66 ; 0001 ;2024-05-13 ; 000115 ;GA ; 0018 ;2024-05-30 ; 3657.00 ;USD ;N ;2023-10-20T11:03:30Z 5 | 3B997221A8E4645C17002DF03754AB66 ;76757221A8E4645C17002DF03754AB66 ; 0002 ;2024-05-13 ; 000096 ;FA ; 0018 ;2024-05-30 ; 3657.00 ;USD ;N ;2023-10-20T11:03:30Z -------------------------------------------------------------------------------- /test/sflight/db/data/sap.fe.cap.travel-BookingStatus.csv: -------------------------------------------------------------------------------- 1 | code;name 2 | N;New 3 | X;Canceled 4 | B;Booked -------------------------------------------------------------------------------- /test/sflight/db/data/sap.fe.cap.travel-Flight.csv: -------------------------------------------------------------------------------- 1 | AirlineID;ConnectionID;FlightDate;Price;CurrencyCode_code;PlaneType;MaximumSeats;OccupiedSeats 2 | SW;0001;2024-05-29;10818.00;SGD;767-200;260;223 3 | SW;0001;2023-08-02;5950.00;SGD;A340-600;330;168 4 | SW;0002;2024-05-30;11765.00;SGD;747-400;385;350 5 | SW;0002;2023-08-03;10953.00;SGD;747-400;385;334 6 | SW;0011;2024-05-30;2359.00;SGD;767-200;260;132 7 | SW;0011;2023-08-03;4880.00;SGD;A340-600;330;310 8 | SW;0012;2024-06-01;4665.00;SGD;767-200;260;236 9 | SW;0012;2023-08-05;2574.00;SGD;747-400;385;215 10 | SW;0058;2024-05-27;6629.00;USD;767-200;260;200 11 | SW;0058;2023-07-31;4996.00;USD;747-400;385;231 12 | SW;0059;2024-05-28;4131.00;USD;A340-600;330;161 13 | SW;0059;2023-08-01;6053.00;USD;A340-600;330;237 14 | SW;1537;2024-05-31;893.00;USD;A321-200;150;88 15 | SW;1537;2023-08-04;805.00;USD;737-800;140;75 16 | GA;0322;2024-06-02;1103.00;USD;A320-200;130;93 17 | GA;0322;2023-08-05;1611.00;USD;A320-200;130;123 18 | GA;0017;2024-05-29;462.00;USD;A321-200;150;139 19 | GA;0017;2023-08-02;478.00;USD;737-800;140;133 20 | GA;2678;2024-06-01;473.00;USD;A321-200;150;141 21 | GA;2678;2023-08-05;473.00;USD;A320-200;130;122 22 | GA;0015;2024-05-31;1911.00;USD;767-200;260;137 23 | GA;0015;2023-08-04;3117.00;USD;767-200;260;213 24 | GA;0018;2024-05-30;3781.00;USD;A380-800;475;446 25 | GA;0018;2023-08-03;3823.00;USD;767-200;260;247 26 | EA;0400;2024-06-01;5484.00;EUR;A340-600;330;306 27 | EA;0400;2023-08-05;2649.00;EUR;767-200;260;130 28 | EA;0401;2024-05-31;3697.00;EUR;747-400;385;265 29 | EA;0401;2023-08-04;4867.00;EUR;A380-800;475;403 30 | EA;0402;2024-05-27;4911.00;EUR;767-200;260;221 31 | EA;0402;2023-07-31;3232.00;EUR;747-400;385;231 32 | EA;0403;2024-05-27;2797.00;EUR;A340-600;330;171 33 | EA;0403;2023-07-31;2486.00;EUR;767-200;260;117 34 | OC;0407;2024-05-31;5346.00;JPY;747-400;385;254 35 | OC;0407;2023-08-04;4032.00;JPY;A340-600;330;165 36 | OC;0408;2024-06-01;8159.00;JPY;A380-800;475;432 37 | OC;0408;2023-08-05;6471.00;JPY;747-400;385;296 38 | FA;0788;2024-06-01;7580.00;EUR;767-200;260;221 39 | FA;0788;2023-08-05;8059.00;EUR;A380-800;475;422 40 | FA;0789;2024-05-31;8539.00;EUR;A380-800;475;441 41 | FA;0789;2023-08-04;5852.00;EUR;A380-800;475;325 -------------------------------------------------------------------------------- /test/sflight/db/data/sap.fe.cap.travel-FlightConnection.csv: -------------------------------------------------------------------------------- 1 | AirlineID;ConnectionID;DepartureAirport_AirportID;DestinationAirport_AirportID;DepartureTime;ArrivalTime;Distance;DistanceUnit 2 | SW;0001;SFO;SIN;01:15:00;11:50:00;13523;KM 3 | SW;0002;SIN;SFO;06:30:00;09:15:00;13523;KM 4 | SW;0011;NRT;SIN;14:55:00;20:50:00;5363;KM 5 | SW;0012;SIN;NRT;09:53:00;17:54:00;5363;KM 6 | SW;0058;SFO;FRA;13:45:00;09:55:00;9608;KM 7 | SW;0059;FRA;SFO;13:55:00;16:30:00;9608;KM 8 | SW;1537;EWR;MIA;21:56:00;12:47:00;1752;KM 9 | GA;0322;MIA;EWR;20:17:00;23:19:00;1752;KM 10 | GA;0017;MIA;HAV;07:19:00;08:03:00;520;KM 11 | GA;2678;HAV;MIA;06:15:00;10:30:00;520;KM 12 | GA;0015;JFK;SFO;07:13:00;10:04:00;4156;KM 13 | GA;0018;SFO;JFK;06:40:00;15:06:00;4156;KM 14 | EA;0400;FRA;JFK;10:10:00;11:34:00;6162;KM 15 | EA;0401;JFK;FRA;18:30:00;07:45:00;6162;KM 16 | EA;0402;FRA;EWR;13:30:00;15:35:00;6217;KM 17 | EA;0403;EWR;FRA;18:09:00;07:30:00;6217;KM 18 | OC;0407;NRT;FRA;13:23:00;15:56:00;9379;KM 19 | OC;0408;FRA;NRT;20:25:00;15:40:00;9379;KM 20 | FA;0788;VCE;NRT;13:25:00;10:13:00;9595;KM 21 | FA;0789;NRT;VCE;14:26:00;21:31:00;9595;KM -------------------------------------------------------------------------------- /test/sflight/db/data/sap.fe.cap.travel-Passenger.csv: -------------------------------------------------------------------------------- 1 | CustomerID;FirstName;LastName;Title;Street;PostalCode;City;CountryCode_code;PhoneNumber;EMailAddress 2 | 000008;August;Buchholm;Mr.;Lerchenstr. 23;64342;Seeheim-Jugenheim;DE;+49-184-089871;august.buchholm@flight.example.de 3 | 000115;Laura;Benz;Mrs.;Am Deich 35;79312;Emmendingen;DE;+49-871-814327;laura.benz@flight.example.de 4 | 000506;Stephen;Moyano;Mr.;Muehltalstr. 19;69190;Walldorf;DE;+49-433-618960;stephen.moyano@flight.example.de 5 | 000608;Benjamin;Prinz;Mr.;route de Pégomas 94;75839;Paris;FR;+33-722-645204;benjamin.prinz@flight.example.fr 6 | -------------------------------------------------------------------------------- /test/sflight/db/data/sap.fe.cap.travel-Supplement.csv: -------------------------------------------------------------------------------- 1 | SupplementID;Price;Type_code;Description;CurrencyCode_code 2 | BV-0001;2.30;BV;Hot Chocolate;EUR 3 | BV-0002;7.50;BV;Alcohol free Champagne;EUR 4 | BV-0003;3.50;BV;Coke;EUR 5 | BV-0004;3.50;BV;Orange Lemonade;EUR 6 | BV-0005;3.50;BV;Apple Juice;EUR 7 | BV-0006;3.50;BV;Pear Juice;EUR 8 | BV-0007;3.50;BV;Mango Juice;EUR 9 | BV-0008;3.50;BV;Lemon Lemonade;EUR 10 | BV-0009;4.50;BV;Tomato Juice;EUR 11 | ML-0001;3.00;ML;Black Forest Cake;EUR 12 | ML-0002;2.00;ML;Chocolate Cake;EUR 13 | ML-0003;1.50;ML;Apple Pie;EUR 14 | ML-0004;1.50;ML;Pear Pie;EUR 15 | ML-0005;8.00;ML;Nice Salad;EUR 16 | ML-0006;9.00;ML;Paris Salad;EUR 17 | ML-0007;12.00;ML;Hamburg Salad with Eggs;EUR 18 | ML-0008;25.00;ML;Quail with French Salad and Black Forest Cake;EUR 19 | ML-0009;13.00;ML;Duck on Lettuce;EUR 20 | ML-0010;5.00;ML;Carpaccio;EUR 21 | ML-0011;7.00;ML;Seasonal Salad;EUR 22 | ML-0012;16.00;ML;Hamburg Salad with Fresh Shrimps;EUR 23 | ML-0013;17.00;ML;Quail;EUR 24 | ML-0014;14.00;ML;Wiener Schnitzel;EUR 25 | ML-0015;13.00;ML;Pork Schnitzel;EUR 26 | ML-0016;14.00;ML;Schnitzel with Pepper Sauce;EUR 27 | ML-0017;11.00;ML;Chicken and French Fries;EUR 28 | ML-0018;12.00;ML;Turkey Steak;EUR 29 | ML-0019;15.00;ML;Bavarian Duck;EUR 30 | ML-0020;14.00;ML;Knuckle of Pork;EUR 31 | ML-0021;22.00;ML;Fillet of Beef;EUR 32 | ML-0022;21.00;ML;Trout Au Bleu;EUR 33 | ML-0023;20.00;ML;Trout Meuniere;EUR 34 | ML-0024;17.00;ML;Monkfish;EUR 35 | ML-0025;12.00;ML;Sole;EUR 36 | ML-0026;6.00;ML;Mini Fried Sole;EUR 37 | ML-0027;14.00;ML;Salmon in a Bearnaise Sauce;EUR 38 | ML-0028;15.00;ML;Salmon Lasagne;EUR 39 | ML-0029;3.00;ML;Chocolate Ice Cream;EUR 40 | ML-0030;2.50;ML;Vanilla Ice Cream;EUR 41 | ML-0031;4.50;ML;Vanilla Ice Cream with Hot Cherries;EUR 42 | ML-0032;4.50;ML;Vanilla Ice Cream with Hot Raspberries;EUR 43 | ML-0033;4.00;ML;Apple Strudel;EUR 44 | ML-0034;4.00;ML;Raspberry Sorbet;EUR 45 | ML-0035;4.00;ML;Strawberry Sorbet;EUR 46 | ML-0036;4.00;ML;Lemon Sorbet;EUR 47 | LU-0001;40.00;LU;Extra baggage 5 kgs;EUR 48 | LU-0002;15.00;LU;Luggage transfer from airport to hotel;EUR 49 | LU-0003;75.00;LU;Luggage pickup from home and return;EUR 50 | LU-0004;80.00;LU;Bulky goods like sports equipment;EUR -------------------------------------------------------------------------------- /test/sflight/db/data/sap.fe.cap.travel-SupplementType.csv: -------------------------------------------------------------------------------- 1 | code;name 2 | BV;Beverage 3 | ML;Meal 4 | LU;Luggage 5 | EX;Extra -------------------------------------------------------------------------------- /test/sflight/db/data/sap.fe.cap.travel-TravelStatus.csv: -------------------------------------------------------------------------------- 1 | code;name;fieldControl;createDeleteHidden;insertDeleteRestriction 2 | O;Open;7;false;true 3 | A;Accepted;1;true;false 4 | X;Canceled;7;true;false 5 | -------------------------------------------------------------------------------- /test/sflight/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@capire/sflight", 3 | "version": "1.0.0", 4 | "devDependencies": { 5 | "@cap-js/sqlite": "*" 6 | }, 7 | "private": true 8 | } -------------------------------------------------------------------------------- /test/sflight/srv/travel-service.cds: -------------------------------------------------------------------------------- 1 | using { sap.fe.cap.travel as my } from '../db/schema'; 2 | 3 | service TravelService @(path:'/processor') { 4 | 5 | @(restrict: [ 6 | { grant: 'READ', to: 'authenticated-user'}, 7 | { grant: ['rejectTravel','acceptTravel','deductDiscount'], to: 'reviewer'}, 8 | { grant: ['*'], to: 'processor'}, 9 | { grant: ['*'], to: 'admin'} 10 | ]) 11 | entity Travel as projection on my.Travel actions { 12 | action createTravelByTemplate() returns Travel; 13 | action rejectTravel(); 14 | action acceptTravel(); 15 | action deductDiscount( percent: Percentage not null ) returns Travel; 16 | }; 17 | 18 | } 19 | 20 | type Percentage : Integer @assert.range: [1,100]; 21 | --------------------------------------------------------------------------------