├── .editorconfig ├── exercises ├── 00 │ ├── assets │ │ ├── bas-clone-from-git.png │ │ ├── bas-create-dev-space.png │ │ ├── github-create-codespace.png │ │ ├── vscode-reopen-in-container.png │ │ ├── vscode-shell-cds-version.png │ │ └── run │ └── README.md ├── 05 │ ├── assets │ │ ├── reviews-start-page.png │ │ └── run │ └── README.md ├── 06 │ ├── assets │ │ ├── devtools-controls.png │ │ ├── open-folder-dialog.png │ │ ├── btp-trial-cf-environment.png │ │ ├── space-debugging-facility.png │ │ ├── devtools-inspector-attached.png │ │ └── devtools-remote-target-list.png │ └── README.md ├── 01 │ ├── assets │ │ ├── csv │ │ │ ├── sap.capire.bookshop-Authors.csv │ │ │ ├── sap.capire.bookshop-Authors.json │ │ │ ├── sap.capire.bookshop-Books.csv │ │ │ └── sap.capire.bookshop-Books.json │ │ ├── data │ │ │ ├── sap.capire.bookshop-Authors.csv │ │ │ ├── csv │ │ │ │ ├── sap.capire.bookshop-Authors.csv │ │ │ │ └── sap.capire.bookshop-Books.csv │ │ │ ├── sap.capire.bookshop-Authors.json │ │ │ ├── json │ │ │ │ ├── sap.capire.bookshop-Authors.json │ │ │ │ └── sap.capire.bookshop-Books.json │ │ │ ├── sap.capire.bookshop-Books.csv │ │ │ └── sap.capire.bookshop-Books.json │ │ ├── json │ │ │ ├── sap.capire.bookshop-Authors.json │ │ │ └── sap.capire.bookshop-Books.json │ │ ├── test │ │ │ └── data │ │ │ │ └── csv │ │ │ │ └── sap.capire.bookshop-Books.csv │ │ └── run │ └── README.md ├── 02 │ ├── assets │ │ ├── data │ │ │ ├── csv │ │ │ │ ├── sap.capire.bookshop-Authors.csv │ │ │ │ └── sap.capire.bookshop-Books.csv │ │ │ └── json │ │ │ │ ├── sap.capire.bookshop-Authors.json │ │ │ │ └── sap.capire.bookshop-Books.json │ │ └── run │ └── README.md ├── 04 │ ├── assets │ │ └── run │ └── README.md └── 03 │ ├── assets │ ├── run │ └── northbreeze.edmx │ └── README.md ├── .markdownlintrc ├── .devcontainer ├── Dockerfile └── devcontainer.json ├── REUSE.toml ├── README.md ├── LICENSE └── LICENSES └── Apache-2.0.txt /.editorconfig: -------------------------------------------------------------------------------- 1 | [*] 2 | trim_trailing_whitespace = true 3 | insert_final_newline = true 4 | -------------------------------------------------------------------------------- /exercises/00/assets/bas-clone-from-git.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SAP-samples/cap-local-development-workshop/HEAD/exercises/00/assets/bas-clone-from-git.png -------------------------------------------------------------------------------- /exercises/05/assets/reviews-start-page.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SAP-samples/cap-local-development-workshop/HEAD/exercises/05/assets/reviews-start-page.png -------------------------------------------------------------------------------- /exercises/06/assets/devtools-controls.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SAP-samples/cap-local-development-workshop/HEAD/exercises/06/assets/devtools-controls.png -------------------------------------------------------------------------------- /exercises/06/assets/open-folder-dialog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SAP-samples/cap-local-development-workshop/HEAD/exercises/06/assets/open-folder-dialog.png -------------------------------------------------------------------------------- /exercises/00/assets/bas-create-dev-space.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SAP-samples/cap-local-development-workshop/HEAD/exercises/00/assets/bas-create-dev-space.png -------------------------------------------------------------------------------- /exercises/00/assets/github-create-codespace.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SAP-samples/cap-local-development-workshop/HEAD/exercises/00/assets/github-create-codespace.png -------------------------------------------------------------------------------- /exercises/00/assets/vscode-reopen-in-container.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SAP-samples/cap-local-development-workshop/HEAD/exercises/00/assets/vscode-reopen-in-container.png -------------------------------------------------------------------------------- /exercises/00/assets/vscode-shell-cds-version.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SAP-samples/cap-local-development-workshop/HEAD/exercises/00/assets/vscode-shell-cds-version.png -------------------------------------------------------------------------------- /exercises/06/assets/btp-trial-cf-environment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SAP-samples/cap-local-development-workshop/HEAD/exercises/06/assets/btp-trial-cf-environment.png -------------------------------------------------------------------------------- /exercises/06/assets/space-debugging-facility.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SAP-samples/cap-local-development-workshop/HEAD/exercises/06/assets/space-debugging-facility.png -------------------------------------------------------------------------------- /exercises/06/assets/devtools-inspector-attached.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SAP-samples/cap-local-development-workshop/HEAD/exercises/06/assets/devtools-inspector-attached.png -------------------------------------------------------------------------------- /exercises/06/assets/devtools-remote-target-list.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SAP-samples/cap-local-development-workshop/HEAD/exercises/06/assets/devtools-remote-target-list.png -------------------------------------------------------------------------------- /exercises/01/assets/csv/sap.capire.bookshop-Authors.csv: -------------------------------------------------------------------------------- 1 | ID,name,dateOfBirth,placeOfBirth,dateOfDeath,placeOfDeath 2 | 42,"Douglas Adams",1952-03-11,"Cambridge, England",2001-05-11,"Montecito, California" 3 | -------------------------------------------------------------------------------- /exercises/01/assets/data/sap.capire.bookshop-Authors.csv: -------------------------------------------------------------------------------- 1 | ID,name,dateOfBirth,placeOfBirth,dateOfDeath,placeOfDeath 2 | 42,"Douglas Adams",1952-03-11,"Cambridge, England",2001-05-11,"Montecito, California" 3 | -------------------------------------------------------------------------------- /exercises/01/assets/data/csv/sap.capire.bookshop-Authors.csv: -------------------------------------------------------------------------------- 1 | ID,name,dateOfBirth,placeOfBirth,dateOfDeath,placeOfDeath 2 | 42,"Douglas Adams",1952-03-11,"Cambridge, England",2001-05-11,"Montecito, California" 3 | -------------------------------------------------------------------------------- /exercises/02/assets/data/csv/sap.capire.bookshop-Authors.csv: -------------------------------------------------------------------------------- 1 | ID,name,dateOfBirth,placeOfBirth,dateOfDeath,placeOfDeath 2 | 42,"Douglas Adams",1952-03-11,"Cambridge, England",2001-05-11,"Montecito, California" 3 | -------------------------------------------------------------------------------- /exercises/01/assets/csv/sap.capire.bookshop-Authors.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "ID": 42, 4 | "name": "Douglas Adams", 5 | "dateOfBirth": "1952-03-11", 6 | "dateOfDeath": "2001-05-11", 7 | "placeOfBirth": "Cambridge, England", 8 | "placeOfDeath": "Montecito, California" 9 | } 10 | ] 11 | -------------------------------------------------------------------------------- /exercises/01/assets/data/sap.capire.bookshop-Authors.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "ID": 42, 4 | "name": "Douglas Adams", 5 | "dateOfBirth": "1952-03-11", 6 | "dateOfDeath": "2001-05-11", 7 | "placeOfBirth": "Cambridge, England", 8 | "placeOfDeath": "Montecito, California" 9 | } 10 | ] 11 | -------------------------------------------------------------------------------- /exercises/01/assets/json/sap.capire.bookshop-Authors.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "ID": 42, 4 | "name": "Douglas Adams", 5 | "dateOfBirth": "1952-03-11", 6 | "dateOfDeath": "2001-05-11", 7 | "placeOfBirth": "Cambridge, England", 8 | "placeOfDeath": "Montecito, California" 9 | } 10 | ] 11 | -------------------------------------------------------------------------------- /exercises/01/assets/data/json/sap.capire.bookshop-Authors.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "ID": 42, 4 | "name": "Douglas Adams", 5 | "dateOfBirth": "1952-03-11", 6 | "dateOfDeath": "2001-05-11", 7 | "placeOfBirth": "Cambridge, England", 8 | "placeOfDeath": "Montecito, California" 9 | } 10 | ] 11 | -------------------------------------------------------------------------------- /exercises/02/assets/data/json/sap.capire.bookshop-Authors.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "ID": 42, 4 | "name": "Douglas Adams", 5 | "dateOfBirth": "1952-03-11", 6 | "dateOfDeath": "2001-05-11", 7 | "placeOfBirth": "Cambridge, England", 8 | "placeOfDeath": "Montecito, California" 9 | } 10 | ] 11 | -------------------------------------------------------------------------------- /.markdownlintrc: -------------------------------------------------------------------------------- 1 | { 2 | "comment": "Rules for tutorial and workshop Markdown content", 3 | "ul-style": { "style": "consistent" }, 4 | "no-trailing-spaces": { "br_spaces": 0 }, 5 | "line-length": false, 6 | "no-inline-html": { "allowed_elements": [ "a" ] }, 7 | "blanks-around-headings": { "lines_above": 0 } 8 | } 9 | -------------------------------------------------------------------------------- /exercises/00/assets/run: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eo pipefail 4 | 5 | declare projname=myproj 6 | 7 | main() { 8 | 9 | read -r -p "Hit to clean out $projname and start setup ..." 10 | 11 | rm -rf $projname 12 | 13 | # Unpack from tarball if there is one 14 | if [[ -f "$projname.tgz" ]]; then 15 | tar xzf "$projname.tgz" 16 | else 17 | echo \ 18 | cds init --add sample $projname \ 19 | && cd "$_" \ 20 | && npm install 21 | fi 22 | 23 | echo "👉 Now 'cd $projname/ && cds watch'" 24 | 25 | } 26 | 27 | main "$@" 28 | -------------------------------------------------------------------------------- /exercises/05/assets/run: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eo pipefail 4 | 5 | createMonoRepo() { 6 | 7 | mkdir capire && cd $_ 8 | jq -n '{name: "@capire/samples", workspaces: ["*"]}' > package.json 9 | git init 10 | cat << EOF > .gitignore 11 | node_modules 12 | gen 13 | EOF 14 | } 15 | 16 | addSubmodules() { 17 | for project in bookstore reviews orders common bookshop data-viewer; do 18 | git submodule add "https://github.com/capire/$project" 19 | done 20 | git submodule update --init 21 | } 22 | 23 | npmInstall() { 24 | npm install 25 | } 26 | 27 | main() { 28 | 29 | createMonoRepo 30 | addSubmodules 31 | npmInstall 32 | 33 | } 34 | 35 | if [[ ! $0 == "-bash" ]]; then 36 | main "$@" 37 | fi 38 | -------------------------------------------------------------------------------- /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | # syntax=docker/dockerfile:1 2 | 3 | ARG VARIANT="22" 4 | FROM mcr.microsoft.com/devcontainers/javascript-node:${VARIANT} 5 | 6 | # Install some generally useful tools 7 | RUN apt-get update && apt-get -y install --no-install-recommends curl git sqlite3 8 | 9 | # Install the cf CLI (linux64) v8 10 | RUN curl -L "https://packages.cloudfoundry.org/stable?release=linux64-binary&version=v8&source=github" \ 11 | | tar -C /usr/local/bin/ -z -x cf cf8 12 | 13 | # Install SAP CAP SDK globally 14 | USER node 15 | RUN npm install -g @sap/cds-dk 16 | 17 | # Make environment look as much like the BAS version as possible 18 | RUN mkdir /home/node/projects 19 | RUN echo 'export PS1="\[\e]0;\w\a\]\[\033[33;1m\]\u: \[\033[36m\]\$(basename \w) \$\[\033[m\] "' >> /home/node/.bashrc 20 | 21 | WORKDIR /home/node 22 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "CAP Local Development Workshop Dev Container", 3 | "build": { 4 | "dockerfile": "Dockerfile", 5 | "args": { 6 | "VARIANT": "22" 7 | } 8 | }, 9 | "customizations": { 10 | "vscode": { 11 | "extensions": [ 12 | "mechatroner.rainbow-csv", 13 | "dbaeumer.vscode-eslint", 14 | "sapse.vscode-cds", 15 | "sapse.vsc-extension-odata-csdl-modeler", 16 | "sapse.vscode-wing-cds-editor-vsc", 17 | "saposs.xml-toolkit", 18 | "humao.rest-client" 19 | ] 20 | } 21 | }, 22 | "features": { 23 | "ghcr.io/devcontainers/features/sshd:1": { 24 | "version": "latest" 25 | } 26 | }, 27 | "forwardPorts": [ 28 | 4004, 29 | 4005, 30 | 5005, 31 | 9229 32 | ], 33 | "remoteUser": "node" 34 | } 35 | -------------------------------------------------------------------------------- /exercises/04/assets/run: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eo pipefail 4 | 5 | addMessagingToRequires() { 6 | local tempfile 7 | tempfile="$(mktemp)" 8 | cp package.json "$tempfile" \ 9 | && jq '. * { cds: { requires: { "messaging": true } } }' "$tempfile" > package.json \ 10 | && rm "$tempfile" 11 | } 12 | 13 | addBookRemovedEventDefinition() { 14 | sed -i -e '/entity Books as projection on my.Books/a\' -e ' event bookremoved: { ID: Books:ID; }' srv/ex01-service.cds 15 | } 16 | 17 | addEx01Implementation() { 18 | cat << EOF > srv/ex01-service.js 19 | const cds = require('@sap/cds') 20 | 21 | class Ex01Service extends cds.ApplicationService { init() { 22 | this.after (['DELETE'], 'Books', (_, req) => { 23 | this.emit('bookremoved', req.data) 24 | }) 25 | return super.init() 26 | }} 27 | 28 | module.exports = Ex01Service 29 | EOF 30 | } 31 | 32 | main() { 33 | 34 | addMessagingToRequires 35 | addBookRemovedEventDefinition 36 | addEx01Implementation 37 | 38 | } 39 | 40 | main "$@" 41 | -------------------------------------------------------------------------------- /exercises/01/assets/test/data/csv/sap.capire.bookshop-Books.csv: -------------------------------------------------------------------------------- 1 | ID,title,descr,author_ID,stock,price,currency_code,genre_ID 2 | 501,"Dirk Gently's Holistic Detective Agency","Dirk Gently's Holistic Detective Agency is a humorous detective novel by English writer Douglas Adams, published in 1987. It is described by the author on its cover as a ""thumping good detective-ghost-horror-who dunnit-time travel-romantic-musical-comedy-epic"".",42,750,8.99,GBP,10 3 | 502,"The Long Dark Tea-Time of the Soul","The title is a phrase that appeared in Adams' novel Life, the Universe and Everything to describe the wretched boredom of immortal being Wowbagger, the Infinitely Prolonged, and is a play on the theological treatise Dark Night of the Soul, by Saint John of the Cross.",42,750,8.99,GBP,10 4 | 503,"The Salmon of Doubt","The Salmon of Doubt: Hitchhiking the Galaxy One Last Time is a posthumous collection of previously published and unpublished material by Douglas Adams. It consists largely of essays, interviews, and newspaper/magazine columns about technology and life experiences, but its major selling point is the inclusion of the incomplete novel on which Adams was working at the time of his death, The Salmon of Doubt (from which the collection gets its title, a reference to the Irish myth of the Salmon of Knowledge)",42,750,8.99,GBP,10 5 | -------------------------------------------------------------------------------- /exercises/01/assets/run: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eo pipefail 4 | 5 | addEx01Service() { 6 | cat << EOF > srv/ex01-service.cds 7 | using { sap.capire.bookshop as my } from '../db/schema'; 8 | @path: '/ex01' service Ex01Service { 9 | entity Books as projection on my.Books; 10 | } 11 | EOF 12 | } 13 | 14 | addCdsRequiresDbSqlite() { 15 | local tempfile 16 | tempfile="$(mktemp)" 17 | cp package.json "$tempfile" \ 18 | && jq '. + { cds: { requires: { db: { kind: "sqlite" } } } }' "$tempfile" > package.json \ 19 | && rm "$tempfile" 20 | } 21 | 22 | deployToSqlite() { 23 | cds deploy --to sqlite 24 | } 25 | 26 | updateStockForCatweazle() { 27 | sqlite3 db.sqlite 'update sap_capire_bookshop_Books set stock = 1000 where ID = 271' 28 | } 29 | 30 | extendServiceWithSales() { 31 | cat << EOF > services.cds 32 | using { cuid } from '@sap/cds/common'; 33 | using { Ex01Service } from './srv/ex01-service'; 34 | 35 | extend service Ex01Service with { 36 | entity Sales : cuid { 37 | date: Date; 38 | book: Association to Ex01Service.Books; 39 | quantity: Integer; 40 | } 41 | } 42 | EOF 43 | } 44 | 45 | generateSampleSalesData() { 46 | cds add data \ 47 | --filter Sales \ 48 | --records 3 \ 49 | --out test/data/ \ 50 | --force 51 | } 52 | 53 | runBuild() { 54 | cds build --for hana 55 | } 56 | 57 | main() { 58 | 59 | addEx01Service 60 | addCdsRequiresDbSqlite 61 | deployToSqlite 62 | updateStockForCatweazle 63 | extendServiceWithSales 64 | generateSampleSalesData 65 | deployToSqlite 66 | runBuild 67 | 68 | } 69 | 70 | main "$@" 71 | -------------------------------------------------------------------------------- /exercises/03/assets/run: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eo pipefail 4 | 5 | addAnnotationsToEx01() { 6 | 7 | cat << EOF1 >> srv/ex01-service.cds 8 | annotate Ex01Service with @requires: 'authenticated-user'; 9 | annotate Ex01Service.Books with @restrict: [ 10 | { grant: 'READ' }, 11 | { grant: 'WRITE', to: 'backoffice' } 12 | ]; 13 | EOF1 14 | 15 | } 16 | 17 | addUserInCdsrcFile() { 18 | 19 | cat << EOF2 > .cdsrc.json 20 | { 21 | "requires": { 22 | "auth": { 23 | "users": { 24 | "milton": { 25 | "password": "dontmovemydesk", 26 | "roles": [ 27 | "stapler", 28 | "backoffice" 29 | ] 30 | } 31 | } 32 | } 33 | } 34 | } 35 | EOF2 36 | } 37 | 38 | retrieveNorthbreezeEdmx() { 39 | curl -s \ 40 | --url 'https://developer-challenge.cfapps.eu10.hana.ondemand.com/odata/v4/northbreeze/$metadata' \ 41 | > northbreeze.edmx 42 | } 43 | 44 | importNorthbreezeEdmx() { 45 | cds import northbreeze.edmx 46 | } 47 | 48 | generateDataForSuppliers() { 49 | mkdir srv/external/data/ \ 50 | && cds add data \ 51 | --filter Suppliers \ 52 | --records 5 \ 53 | --out srv/external/data/ 54 | } 55 | 56 | fetchDataForNorthbreeze() { 57 | for entity in Products Suppliers Categories; do 58 | echo -n "$entity: " 59 | curl \ 60 | --silent \ 61 | --url "https://developer-challenge.cfapps.eu10.hana.ondemand.com/odata/v4/northbreeze/$entity" \ 62 | | jq .value \ 63 | | tee "srv/external/data/northbreeze-$entity.json" \ 64 | | jq length 65 | done 66 | } 67 | 68 | installSapCloudSdk() { 69 | npm add @sap-cloud-sdk/http-client 70 | } 71 | 72 | main() { 73 | 74 | addAnnotationsToEx01 75 | addUserInCdsrcFile 76 | retrieveNorthbreezeEdmx 77 | importNorthbreezeEdmx 78 | generateDataForSuppliers 79 | fetchDataForNorthbreeze 80 | installSapCloudSdk 81 | 82 | } 83 | 84 | if [[ ! $0 == "-bash" ]]; then 85 | main "$@" 86 | fi 87 | -------------------------------------------------------------------------------- /REUSE.toml: -------------------------------------------------------------------------------- 1 | version = 1 2 | SPDX-PackageName = "cap-local-development-workshop" 3 | SPDX-PackageSupplier = "dj.adams@sap.com" 4 | SPDX-PackageDownloadLocation = "https://github.com/SAP-samples/cap-local-development-workshop" 5 | SPDX-PackageComment = "The code in this project may include calls to APIs (\"API Calls\") of\n SAP or third-party products or services developed outside of this project\n (\"External Products\").\n \"APIs\" means application programming interfaces, as well as their respective\n specifications and implementing code that allows software to communicate with\n other software.\n API Calls to External Products are not licensed under the open source license\n that governs this project. The use of such API Calls and related External\n Products are subject to applicable additional agreements with the relevant\n provider of the External Products. In no event shall the open source license\n that governs this project grant any rights in or to any External Products,or\n alter, expand or supersede any terms of the applicable additional agreements.\n If you have a valid license agreement with SAP for the use of a particular SAP\n External Product, then you may make use of any API Calls included in this\n project's code for that SAP External Product, subject to the terms of such\n license agreement. If you do not have a valid license agreement for the use of\n a particular SAP External Product, then you may only make use of any API Calls\n in this project for that SAP External Product for your internal, non-productive\n and non-commercial test and evaluation of such API Calls. Nothing herein grants\n you any rights to use or access any SAP External Product, or provide any third\n parties the right to use of access any SAP External Product, through API Calls." 6 | 7 | [[annotations]] 8 | path = "**" 9 | precedence = "aggregate" 10 | SPDX-FileCopyrightText = "2025 SAP SE or an SAP affiliate company and cap-local-development-workshop contributors" 11 | SPDX-License-Identifier = "Apache-2.0" 12 | -------------------------------------------------------------------------------- /exercises/02/assets/run: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eo pipefail 4 | 5 | removeTestDir() { 6 | rm -rf test/ 7 | } 8 | 9 | addCredentialsUrlInMemory() { 10 | local tempfile 11 | tempfile="$(mktemp)" 12 | cp package.json "$tempfile" \ 13 | && jq '.cds.requires.db += { credentials: { url: ":memory:" } }' "$tempfile" > package.json \ 14 | && rm "$tempfile" 15 | } 16 | 17 | deploy() { 18 | cds deploy 19 | } 20 | 21 | moveDataToClassics() { 22 | mkdir db/classics/ \ 23 | && mv db/data/ db/classics/ 24 | } 25 | 26 | addClassicsConfig() { 27 | local tempfile 28 | tempfile="$(mktemp)" 29 | cp package.json "$tempfile" \ 30 | && jq '.cds.requires += { "[classics]": { initdata: { model: "db/classics/" } } }' "$tempfile" > package.json \ 31 | && rm "$tempfile" 32 | } 33 | 34 | touchDbClassicsIndex() { 35 | touch db/classics/index.cds 36 | } 37 | 38 | createHitchhikersDataCollection() { 39 | mkdir -p db/hitchhikers/data/ \ 40 | && cp ../exercises/02/assets/data/json/* "$_" \ 41 | && touch db/hitchhikers/index.cds 42 | } 43 | 44 | addHitchhikersConfig() { 45 | local tempfile 46 | tempfile="$(mktemp)" 47 | cp package.json "$tempfile" \ 48 | && jq '.cds.requires += { "[hitchhikers]": { initdata: { model: "db/hitchhikers/" } } }' "$tempfile" > package.json \ 49 | && rm "$tempfile" 50 | } 51 | 52 | deployToHitchhikersDbFile() { 53 | cds deploy --to sqlite:hitchhikers.db --profile hitchhikers 54 | } 55 | 56 | addSqliteInfoToHitchhikersConfig() { 57 | local tempfile 58 | tempfile="$(mktemp)" 59 | cp package.json "$tempfile" \ 60 | && jq '.cds.requires["[hitchhikers]"] += { db: { kind: "sqlite", credentials: { url: "hitchhikers.db" } } }' "$tempfile" > package.json \ 61 | && rm "$tempfile" 62 | } 63 | 64 | addCdsTestPackage() { 65 | npm add -D @cap-js/cds-test 66 | } 67 | 68 | main() { 69 | 70 | removeTestDir 71 | deploy 72 | addCredentialsUrlInMemory 73 | moveDataToClassics 74 | addClassicsConfig 75 | touchDbClassicsIndex 76 | createHitchhikersDataCollection 77 | addHitchhikersConfig 78 | deployToHitchhikersDbFile 79 | addCdsTestPackage 80 | 81 | } 82 | 83 | if [[ ! $0 == "-bash" ]]; then 84 | main "$@" 85 | fi 86 | -------------------------------------------------------------------------------- /exercises/01/assets/csv/sap.capire.bookshop-Books.csv: -------------------------------------------------------------------------------- 1 | ID,title,descr,author_ID,stock,price,currency_code,genre_ID 2 | 401,"The Hitchhiker's Guide to the Galaxy","The Hitchhiker's Guide to the Galaxy is the first book in the Hitchhiker's Guide to the Galaxy comedy science fiction ""trilogy of five books"" by Douglas Adams with a sixth book written by Eoin Colfer. The novel is an adaptation of the first four parts of Adams's radio series of the same name, centring on the adventures of the only man to survive the destruction of Earth. While roaming outer space, he comes to learn the truth behind Earth's existence.",42,1000,9.99,GBP,14 3 | 402,"The Restaurant at the End of the Universe","The Restaurant at the End of the Universe is the second book in the Hitchhiker's Guide to the Galaxy science fiction comedy ""trilogy"" by Douglas Adams. It was originally published by Pan Books as a paperback in 1980. Like the preceding novel, it was adapted from Adams' radio series, and became a critically acclaimed cult classic.",42,1000,9.99,GBP,14 4 | 403,"Life, the Universe and Everything","Life, the Universe and Everything (1982, ISBN 0-345-39182-9) is the third book in the six-volume Hitchhiker's Guide to the Galaxy science fiction ""trilogy of six books"" by British writer Douglas Adams. The title refers to the Answer to Life, the Universe, and Everything.",42,1000,9.99,GBP,14 5 | 404,"So Long, and Thanks for All the Fish","So Long, and Thanks for All the Fish is the fourth book of the Hitchhiker's Guide to the Galaxy ""trilogy of six books"" written by Douglas Adams. Its title is the message left by the dolphins when they departed Planet Earth just before it was demolished to make way for a hyperspace bypass, as described in The Hitchhiker's Guide to the Galaxy. A song of the same name was featured in the 2005 film adaptation of The Hitchhiker's Guide to the Galaxy.",42,1000,9.99,GBP,14 6 | 405,"Mostly Harmless","Mostly Harmless is a 1992 novel by Douglas Adams and the fifth book in the Hitchhiker's Guide to the Galaxy series. It is described on the cover of the first edition as ""The fifth book in the increasingly inaccurately named Hitch Hiker's Guide to the Galaxy trilogy"". It was the last Hitchhiker's book written by Adams and his final book released in his lifetime.",42,1000,9.99,GBP,14 7 | 406,"And Another Thing... (novel)","And Another Thing... is the sixth and final installment of Douglas Adams' The Hitchhiker's Guide to the Galaxy ""trilogy of six books"". The book, written by Eoin Colfer, was published on the thirtieth anniversary of the first book, 12 October 2009, in hardback. It was published by Penguin Books in the UK and by Hyperion Books in the US. Colfer was given permission to write the book by Adams' widow Jane Belson.",42,1000,9.99,GBP,14 8 | -------------------------------------------------------------------------------- /exercises/01/assets/data/sap.capire.bookshop-Books.csv: -------------------------------------------------------------------------------- 1 | ID,title,descr,author_ID,stock,price,currency_code,genre_ID 2 | 401,"The Hitchhiker's Guide to the Galaxy","The Hitchhiker's Guide to the Galaxy is the first book in the Hitchhiker's Guide to the Galaxy comedy science fiction ""trilogy of five books"" by Douglas Adams with a sixth book written by Eoin Colfer. The novel is an adaptation of the first four parts of Adams's radio series of the same name, centring on the adventures of the only man to survive the destruction of Earth. While roaming outer space, he comes to learn the truth behind Earth's existence.",42,1000,9.99,GBP,14 3 | 402,"The Restaurant at the End of the Universe","The Restaurant at the End of the Universe is the second book in the Hitchhiker's Guide to the Galaxy science fiction comedy ""trilogy"" by Douglas Adams. It was originally published by Pan Books as a paperback in 1980. Like the preceding novel, it was adapted from Adams' radio series, and became a critically acclaimed cult classic.",42,1000,9.99,GBP,14 4 | 403,"Life, the Universe and Everything","Life, the Universe and Everything (1982, ISBN 0-345-39182-9) is the third book in the six-volume Hitchhiker's Guide to the Galaxy science fiction ""trilogy of six books"" by British writer Douglas Adams. The title refers to the Answer to Life, the Universe, and Everything.",42,1000,9.99,GBP,14 5 | 404,"So Long, and Thanks for All the Fish","So Long, and Thanks for All the Fish is the fourth book of the Hitchhiker's Guide to the Galaxy ""trilogy of six books"" written by Douglas Adams. Its title is the message left by the dolphins when they departed Planet Earth just before it was demolished to make way for a hyperspace bypass, as described in The Hitchhiker's Guide to the Galaxy. A song of the same name was featured in the 2005 film adaptation of The Hitchhiker's Guide to the Galaxy.",42,1000,9.99,GBP,14 6 | 405,"Mostly Harmless","Mostly Harmless is a 1992 novel by Douglas Adams and the fifth book in the Hitchhiker's Guide to the Galaxy series. It is described on the cover of the first edition as ""The fifth book in the increasingly inaccurately named Hitch Hiker's Guide to the Galaxy trilogy"". It was the last Hitchhiker's book written by Adams and his final book released in his lifetime.",42,1000,9.99,GBP,14 7 | 406,"And Another Thing... (novel)","And Another Thing... is the sixth and final installment of Douglas Adams' The Hitchhiker's Guide to the Galaxy ""trilogy of six books"". The book, written by Eoin Colfer, was published on the thirtieth anniversary of the first book, 12 October 2009, in hardback. It was published by Penguin Books in the UK and by Hyperion Books in the US. Colfer was given permission to write the book by Adams' widow Jane Belson.",42,1000,9.99,GBP,14 8 | -------------------------------------------------------------------------------- /exercises/01/assets/data/csv/sap.capire.bookshop-Books.csv: -------------------------------------------------------------------------------- 1 | ID,title,descr,author_ID,stock,price,currency_code,genre_ID 2 | 401,"The Hitchhiker's Guide to the Galaxy","The Hitchhiker's Guide to the Galaxy is the first book in the Hitchhiker's Guide to the Galaxy comedy science fiction ""trilogy of five books"" by Douglas Adams with a sixth book written by Eoin Colfer. The novel is an adaptation of the first four parts of Adams's radio series of the same name, centring on the adventures of the only man to survive the destruction of Earth. While roaming outer space, he comes to learn the truth behind Earth's existence.",42,1000,9.99,GBP,14 3 | 402,"The Restaurant at the End of the Universe","The Restaurant at the End of the Universe is the second book in the Hitchhiker's Guide to the Galaxy science fiction comedy ""trilogy"" by Douglas Adams. It was originally published by Pan Books as a paperback in 1980. Like the preceding novel, it was adapted from Adams' radio series, and became a critically acclaimed cult classic.",42,1000,9.99,GBP,14 4 | 403,"Life, the Universe and Everything","Life, the Universe and Everything (1982, ISBN 0-345-39182-9) is the third book in the six-volume Hitchhiker's Guide to the Galaxy science fiction ""trilogy of six books"" by British writer Douglas Adams. The title refers to the Answer to Life, the Universe, and Everything.",42,1000,9.99,GBP,14 5 | 404,"So Long, and Thanks for All the Fish","So Long, and Thanks for All the Fish is the fourth book of the Hitchhiker's Guide to the Galaxy ""trilogy of six books"" written by Douglas Adams. Its title is the message left by the dolphins when they departed Planet Earth just before it was demolished to make way for a hyperspace bypass, as described in The Hitchhiker's Guide to the Galaxy. A song of the same name was featured in the 2005 film adaptation of The Hitchhiker's Guide to the Galaxy.",42,1000,9.99,GBP,14 6 | 405,"Mostly Harmless","Mostly Harmless is a 1992 novel by Douglas Adams and the fifth book in the Hitchhiker's Guide to the Galaxy series. It is described on the cover of the first edition as ""The fifth book in the increasingly inaccurately named Hitch Hiker's Guide to the Galaxy trilogy"". It was the last Hitchhiker's book written by Adams and his final book released in his lifetime.",42,1000,9.99,GBP,14 7 | 406,"And Another Thing... (novel)","And Another Thing... is the sixth and final installment of Douglas Adams' The Hitchhiker's Guide to the Galaxy ""trilogy of six books"". The book, written by Eoin Colfer, was published on the thirtieth anniversary of the first book, 12 October 2009, in hardback. It was published by Penguin Books in the UK and by Hyperion Books in the US. Colfer was given permission to write the book by Adams' widow Jane Belson.",42,1000,9.99,GBP,14 8 | -------------------------------------------------------------------------------- /exercises/02/assets/data/csv/sap.capire.bookshop-Books.csv: -------------------------------------------------------------------------------- 1 | ID,title,descr,author_ID,stock,price,currency_code,genre_ID 2 | 401,"The Hitchhiker's Guide to the Galaxy","The Hitchhiker's Guide to the Galaxy is the first book in the Hitchhiker's Guide to the Galaxy comedy science fiction ""trilogy of five books"" by Douglas Adams with a sixth book written by Eoin Colfer. The novel is an adaptation of the first four parts of Adams's radio series of the same name, centring on the adventures of the only man to survive the destruction of Earth. While roaming outer space, he comes to learn the truth behind Earth's existence.",42,1000,9.99,GBP,14 3 | 402,"The Restaurant at the End of the Universe","The Restaurant at the End of the Universe is the second book in the Hitchhiker's Guide to the Galaxy science fiction comedy ""trilogy"" by Douglas Adams. It was originally published by Pan Books as a paperback in 1980. Like the preceding novel, it was adapted from Adams' radio series, and became a critically acclaimed cult classic.",42,1000,9.99,GBP,14 4 | 403,"Life, the Universe and Everything","Life, the Universe and Everything (1982, ISBN 0-345-39182-9) is the third book in the six-volume Hitchhiker's Guide to the Galaxy science fiction ""trilogy of six books"" by British writer Douglas Adams. The title refers to the Answer to Life, the Universe, and Everything.",42,1000,9.99,GBP,14 5 | 404,"So Long, and Thanks for All the Fish","So Long, and Thanks for All the Fish is the fourth book of the Hitchhiker's Guide to the Galaxy ""trilogy of six books"" written by Douglas Adams. Its title is the message left by the dolphins when they departed Planet Earth just before it was demolished to make way for a hyperspace bypass, as described in The Hitchhiker's Guide to the Galaxy. A song of the same name was featured in the 2005 film adaptation of The Hitchhiker's Guide to the Galaxy.",42,1000,9.99,GBP,14 6 | 405,"Mostly Harmless","Mostly Harmless is a 1992 novel by Douglas Adams and the fifth book in the Hitchhiker's Guide to the Galaxy series. It is described on the cover of the first edition as ""The fifth book in the increasingly inaccurately named Hitch Hiker's Guide to the Galaxy trilogy"". It was the last Hitchhiker's book written by Adams and his final book released in his lifetime.",42,1000,9.99,GBP,14 7 | 406,"And Another Thing... (novel)","And Another Thing... is the sixth and final installment of Douglas Adams' The Hitchhiker's Guide to the Galaxy ""trilogy of six books"". The book, written by Eoin Colfer, was published on the thirtieth anniversary of the first book, 12 October 2009, in hardback. It was published by Penguin Books in the UK and by Hyperion Books in the US. Colfer was given permission to write the book by Adams' widow Jane Belson.",42,1000,9.99,GBP,14 8 | -------------------------------------------------------------------------------- /exercises/01/assets/csv/sap.capire.bookshop-Books.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "ID": 401, 4 | "title": "The Hitchhiker's Guide to the Galaxy", 5 | "descr": "The Hitchhiker's Guide to the Galaxy is the first book in the Hitchhiker's Guide to the Galaxy comedy science fiction \"trilogy of five books\" by Douglas Adams with a sixth book written by Eoin Colfer. The novel is an adaptation of the first four parts of Adams's radio series of the same name, centring on the adventures of the only man to survive the destruction of Earth. While roaming outer space, he comes to learn the truth behind Earth's existence.", 6 | "author_ID": 42, 7 | "genre_ID": 14, 8 | "stock": 1000, 9 | "price": 9.99, 10 | "currency_code": "GBP" 11 | }, 12 | { 13 | "ID": 402, 14 | "title": "The Restaurant at the End of the Universe", 15 | "descr": "The Restaurant at the End of the Universe is the second book in the Hitchhiker's Guide to the Galaxy science fiction comedy \"trilogy\" by Douglas Adams. It was originally published by Pan Books as a paperback in 1980. Like the preceding novel, it was adapted from Adams' radio series, and became a critically acclaimed cult classic.", 16 | "author_ID": 42, 17 | "genre_ID": 14, 18 | "stock": 1000, 19 | "price": 9.99, 20 | "currency_code": "GBP" 21 | }, 22 | { 23 | "ID": 403, 24 | "title": "Life, the Universe and Everything", 25 | "descr": "Life, the Universe and Everything (1982, ISBN 0-345-39182-9) is the third book in the six-volume Hitchhiker's Guide to the Galaxy science fiction \"trilogy of six books\" by British writer Douglas Adams. The title refers to the Answer to Life, the Universe, and Everything.", 26 | "author_ID": 42, 27 | "genre_ID": 14, 28 | "stock": 1000, 29 | "price": 9.99, 30 | "currency_code": "GBP" 31 | }, 32 | { 33 | "ID": 404, 34 | "title": "So Long, and Thanks for All the Fish", 35 | "descr": "So Long, and Thanks for All the Fish is the fourth book of the Hitchhiker's Guide to the Galaxy \"trilogy of six books\" written by Douglas Adams. Its title is the message left by the dolphins when they departed Planet Earth just before it was demolished to make way for a hyperspace bypass, as described in The Hitchhiker's Guide to the Galaxy. A song of the same name was featured in the 2005 film adaptation of The Hitchhiker's Guide to the Galaxy.", 36 | "author_ID": 42, 37 | "genre_ID": 14, 38 | "stock": 1000, 39 | "price": 9.99, 40 | "currency_code": "GBP" 41 | }, 42 | { 43 | "ID": 405, 44 | "title": "Mostly Harmless", 45 | "descr": "Mostly Harmless is a 1992 novel by Douglas Adams and the fifth book in the Hitchhiker's Guide to the Galaxy series. It is described on the cover of the first edition as \"The fifth book in the increasingly inaccurately named Hitch Hiker's Guide to the Galaxy trilogy\". It was the last Hitchhiker's book written by Adams and his final book released in his lifetime.", 46 | "author_ID": 42, 47 | "genre_ID": 14, 48 | "stock": 1000, 49 | "price": 9.99, 50 | "currency_code": "GBP" 51 | }, 52 | { 53 | "ID": 406, 54 | "title": "And Another Thing... (novel)", 55 | "descr": "And Another Thing... is the sixth and final installment of Douglas Adams' The Hitchhiker's Guide to the Galaxy \"trilogy of six books\". The book, written by Eoin Colfer, was published on the thirtieth anniversary of the first book, 12 October 2009, in hardback. It was published by Penguin Books in the UK and by Hyperion Books in the US. Colfer was given permission to write the book by Adams' widow Jane Belson.", 56 | "author_ID": 42, 57 | "genre_ID": 14, 58 | "stock": 1000, 59 | "price": 9.99, 60 | "currency_code": "GBP" 61 | } 62 | ] 63 | -------------------------------------------------------------------------------- /exercises/02/assets/data/json/sap.capire.bookshop-Books.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "ID": 401, 4 | "title": "The Hitchhiker's Guide to the Galaxy", 5 | "descr": "The Hitchhiker's Guide to the Galaxy is the first book in the Hitchhiker's Guide to the Galaxy comedy science fiction \"trilogy of five books\" by Douglas Adams with a sixth book written by Eoin Colfer. The novel is an adaptation of the first four parts of Adams's radio series of the same name, centring on the adventures of the only man to survive the destruction of Earth. While roaming outer space, he comes to learn the truth behind Earth's existence.", 6 | "author_ID": 42, 7 | "genre_ID": 14, 8 | "stock": 1000, 9 | "price": 9.99, 10 | "currency_code": "GBP" 11 | }, 12 | { 13 | "ID": 402, 14 | "title": "The Restaurant at the End of the Universe", 15 | "descr": "The Restaurant at the End of the Universe is the second book in the Hitchhiker's Guide to the Galaxy science fiction comedy \"trilogy\" by Douglas Adams. It was originally published by Pan Books as a paperback in 1980. Like the preceding novel, it was adapted from Adams' radio series, and became a critically acclaimed cult classic.", 16 | "author_ID": 42, 17 | "genre_ID": 14, 18 | "stock": 1000, 19 | "price": 9.99, 20 | "currency_code": "GBP" 21 | }, 22 | { 23 | "ID": 403, 24 | "title": "Life, the Universe and Everything", 25 | "descr": "Life, the Universe and Everything (1982, ISBN 0-345-39182-9) is the third book in the six-volume Hitchhiker's Guide to the Galaxy science fiction \"trilogy of six books\" by British writer Douglas Adams. The title refers to the Answer to Life, the Universe, and Everything.", 26 | "author_ID": 42, 27 | "genre_ID": 14, 28 | "stock": 1000, 29 | "price": 9.99, 30 | "currency_code": "GBP" 31 | }, 32 | { 33 | "ID": 404, 34 | "title": "So Long, and Thanks for All the Fish", 35 | "descr": "So Long, and Thanks for All the Fish is the fourth book of the Hitchhiker's Guide to the Galaxy \"trilogy of six books\" written by Douglas Adams. Its title is the message left by the dolphins when they departed Planet Earth just before it was demolished to make way for a hyperspace bypass, as described in The Hitchhiker's Guide to the Galaxy. A song of the same name was featured in the 2005 film adaptation of The Hitchhiker's Guide to the Galaxy.", 36 | "author_ID": 42, 37 | "genre_ID": 14, 38 | "stock": 1000, 39 | "price": 9.99, 40 | "currency_code": "GBP" 41 | }, 42 | { 43 | "ID": 405, 44 | "title": "Mostly Harmless", 45 | "descr": "Mostly Harmless is a 1992 novel by Douglas Adams and the fifth book in the Hitchhiker's Guide to the Galaxy series. It is described on the cover of the first edition as \"The fifth book in the increasingly inaccurately named Hitch Hiker's Guide to the Galaxy trilogy\". It was the last Hitchhiker's book written by Adams and his final book released in his lifetime.", 46 | "author_ID": 42, 47 | "genre_ID": 14, 48 | "stock": 1000, 49 | "price": 9.99, 50 | "currency_code": "GBP" 51 | }, 52 | { 53 | "ID": 406, 54 | "title": "And Another Thing...", 55 | "descr": "And Another Thing... is the sixth and final installment of Douglas Adams' The Hitchhiker's Guide to the Galaxy \"trilogy of six books\". The book, written by Eoin Colfer, was published on the thirtieth anniversary of the first book, 12 October 2009, in hardback. It was published by Penguin Books in the UK and by Hyperion Books in the US. Colfer was given permission to write the book by Adams' widow Jane Belson.", 56 | "author_ID": 42, 57 | "genre_ID": 14, 58 | "stock": 1000, 59 | "price": 9.99, 60 | "currency_code": "GBP" 61 | } 62 | ] 63 | -------------------------------------------------------------------------------- /exercises/01/assets/data/sap.capire.bookshop-Books.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "ID": 401, 4 | "title": "The Hitchhiker's Guide to the Galaxy", 5 | "descr": "The Hitchhiker's Guide to the Galaxy is the first book in the Hitchhiker's Guide to the Galaxy comedy science fiction \"trilogy of five books\" by Douglas Adams with a sixth book written by Eoin Colfer. The novel is an adaptation of the first four parts of Adams's radio series of the same name, centring on the adventures of the only man to survive the destruction of Earth. While roaming outer space, he comes to learn the truth behind Earth's existence.", 6 | "author_ID": 42, 7 | "genre_ID": 14, 8 | "stock": 1000, 9 | "price": 9.99, 10 | "currency_code": "GBP" 11 | }, 12 | { 13 | "ID": 402, 14 | "title": "The Restaurant at the End of the Universe", 15 | "descr": "The Restaurant at the End of the Universe is the second book in the Hitchhiker's Guide to the Galaxy science fiction comedy \"trilogy\" by Douglas Adams. It was originally published by Pan Books as a paperback in 1980. Like the preceding novel, it was adapted from Adams' radio series, and became a critically acclaimed cult classic.", 16 | "author_ID": 42, 17 | "genre_ID": 14, 18 | "stock": 1000, 19 | "price": 9.99, 20 | "currency_code": "GBP" 21 | }, 22 | { 23 | "ID": 403, 24 | "title": "Life, the Universe and Everything", 25 | "descr": "Life, the Universe and Everything (1982, ISBN 0-345-39182-9) is the third book in the six-volume Hitchhiker's Guide to the Galaxy science fiction \"trilogy of six books\" by British writer Douglas Adams. The title refers to the Answer to Life, the Universe, and Everything.", 26 | "author_ID": 42, 27 | "genre_ID": 14, 28 | "stock": 1000, 29 | "price": 9.99, 30 | "currency_code": "GBP" 31 | }, 32 | { 33 | "ID": 404, 34 | "title": "So Long, and Thanks for All the Fish", 35 | "descr": "So Long, and Thanks for All the Fish is the fourth book of the Hitchhiker's Guide to the Galaxy \"trilogy of six books\" written by Douglas Adams. Its title is the message left by the dolphins when they departed Planet Earth just before it was demolished to make way for a hyperspace bypass, as described in The Hitchhiker's Guide to the Galaxy. A song of the same name was featured in the 2005 film adaptation of The Hitchhiker's Guide to the Galaxy.", 36 | "author_ID": 42, 37 | "genre_ID": 14, 38 | "stock": 1000, 39 | "price": 9.99, 40 | "currency_code": "GBP" 41 | }, 42 | { 43 | "ID": 405, 44 | "title": "Mostly Harmless", 45 | "descr": "Mostly Harmless is a 1992 novel by Douglas Adams and the fifth book in the Hitchhiker's Guide to the Galaxy series. It is described on the cover of the first edition as \"The fifth book in the increasingly inaccurately named Hitch Hiker's Guide to the Galaxy trilogy\". It was the last Hitchhiker's book written by Adams and his final book released in his lifetime.", 46 | "author_ID": 42, 47 | "genre_ID": 14, 48 | "stock": 1000, 49 | "price": 9.99, 50 | "currency_code": "GBP" 51 | }, 52 | { 53 | "ID": 406, 54 | "title": "And Another Thing... (novel)", 55 | "descr": "And Another Thing... is the sixth and final installment of Douglas Adams' The Hitchhiker's Guide to the Galaxy \"trilogy of six books\". The book, written by Eoin Colfer, was published on the thirtieth anniversary of the first book, 12 October 2009, in hardback. It was published by Penguin Books in the UK and by Hyperion Books in the US. Colfer was given permission to write the book by Adams' widow Jane Belson.", 56 | "author_ID": 42, 57 | "genre_ID": 14, 58 | "stock": 1000, 59 | "price": 9.99, 60 | "currency_code": "GBP" 61 | } 62 | ] 63 | -------------------------------------------------------------------------------- /exercises/01/assets/json/sap.capire.bookshop-Books.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "ID": 401, 4 | "title": "The Hitchhiker's Guide to the Galaxy", 5 | "descr": "The Hitchhiker's Guide to the Galaxy is the first book in the Hitchhiker's Guide to the Galaxy comedy science fiction \"trilogy of five books\" by Douglas Adams with a sixth book written by Eoin Colfer. The novel is an adaptation of the first four parts of Adams's radio series of the same name, centring on the adventures of the only man to survive the destruction of Earth. While roaming outer space, he comes to learn the truth behind Earth's existence.", 6 | "author_ID": 42, 7 | "genre_ID": 14, 8 | "stock": 1000, 9 | "price": 9.99, 10 | "currency_code": "GBP" 11 | }, 12 | { 13 | "ID": 402, 14 | "title": "The Restaurant at the End of the Universe", 15 | "descr": "The Restaurant at the End of the Universe is the second book in the Hitchhiker's Guide to the Galaxy science fiction comedy \"trilogy\" by Douglas Adams. It was originally published by Pan Books as a paperback in 1980. Like the preceding novel, it was adapted from Adams' radio series, and became a critically acclaimed cult classic.", 16 | "author_ID": 42, 17 | "genre_ID": 14, 18 | "stock": 1000, 19 | "price": 9.99, 20 | "currency_code": "GBP" 21 | }, 22 | { 23 | "ID": 403, 24 | "title": "Life, the Universe and Everything", 25 | "descr": "Life, the Universe and Everything (1982, ISBN 0-345-39182-9) is the third book in the six-volume Hitchhiker's Guide to the Galaxy science fiction \"trilogy of six books\" by British writer Douglas Adams. The title refers to the Answer to Life, the Universe, and Everything.", 26 | "author_ID": 42, 27 | "genre_ID": 14, 28 | "stock": 1000, 29 | "price": 9.99, 30 | "currency_code": "GBP" 31 | }, 32 | { 33 | "ID": 404, 34 | "title": "So Long, and Thanks for All the Fish", 35 | "descr": "So Long, and Thanks for All the Fish is the fourth book of the Hitchhiker's Guide to the Galaxy \"trilogy of six books\" written by Douglas Adams. Its title is the message left by the dolphins when they departed Planet Earth just before it was demolished to make way for a hyperspace bypass, as described in The Hitchhiker's Guide to the Galaxy. A song of the same name was featured in the 2005 film adaptation of The Hitchhiker's Guide to the Galaxy.", 36 | "author_ID": 42, 37 | "genre_ID": 14, 38 | "stock": 1000, 39 | "price": 9.99, 40 | "currency_code": "GBP" 41 | }, 42 | { 43 | "ID": 405, 44 | "title": "Mostly Harmless", 45 | "descr": "Mostly Harmless is a 1992 novel by Douglas Adams and the fifth book in the Hitchhiker's Guide to the Galaxy series. It is described on the cover of the first edition as \"The fifth book in the increasingly inaccurately named Hitch Hiker's Guide to the Galaxy trilogy\". It was the last Hitchhiker's book written by Adams and his final book released in his lifetime.", 46 | "author_ID": 42, 47 | "genre_ID": 14, 48 | "stock": 1000, 49 | "price": 9.99, 50 | "currency_code": "GBP" 51 | }, 52 | { 53 | "ID": 406, 54 | "title": "And Another Thing... (novel)", 55 | "descr": "And Another Thing... is the sixth and final installment of Douglas Adams' The Hitchhiker's Guide to the Galaxy \"trilogy of six books\". The book, written by Eoin Colfer, was published on the thirtieth anniversary of the first book, 12 October 2009, in hardback. It was published by Penguin Books in the UK and by Hyperion Books in the US. Colfer was given permission to write the book by Adams' widow Jane Belson.", 56 | "author_ID": 42, 57 | "genre_ID": 14, 58 | "stock": 1000, 59 | "price": 9.99, 60 | "currency_code": "GBP" 61 | } 62 | ] 63 | -------------------------------------------------------------------------------- /exercises/01/assets/data/json/sap.capire.bookshop-Books.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "ID": 401, 4 | "title": "The Hitchhiker's Guide to the Galaxy", 5 | "descr": "The Hitchhiker's Guide to the Galaxy is the first book in the Hitchhiker's Guide to the Galaxy comedy science fiction \"trilogy of five books\" by Douglas Adams with a sixth book written by Eoin Colfer. The novel is an adaptation of the first four parts of Adams's radio series of the same name, centring on the adventures of the only man to survive the destruction of Earth. While roaming outer space, he comes to learn the truth behind Earth's existence.", 6 | "author_ID": 42, 7 | "genre_ID": 14, 8 | "stock": 1000, 9 | "price": 9.99, 10 | "currency_code": "GBP" 11 | }, 12 | { 13 | "ID": 402, 14 | "title": "The Restaurant at the End of the Universe", 15 | "descr": "The Restaurant at the End of the Universe is the second book in the Hitchhiker's Guide to the Galaxy science fiction comedy \"trilogy\" by Douglas Adams. It was originally published by Pan Books as a paperback in 1980. Like the preceding novel, it was adapted from Adams' radio series, and became a critically acclaimed cult classic.", 16 | "author_ID": 42, 17 | "genre_ID": 14, 18 | "stock": 1000, 19 | "price": 9.99, 20 | "currency_code": "GBP" 21 | }, 22 | { 23 | "ID": 403, 24 | "title": "Life, the Universe and Everything", 25 | "descr": "Life, the Universe and Everything (1982, ISBN 0-345-39182-9) is the third book in the six-volume Hitchhiker's Guide to the Galaxy science fiction \"trilogy of six books\" by British writer Douglas Adams. The title refers to the Answer to Life, the Universe, and Everything.", 26 | "author_ID": 42, 27 | "genre_ID": 14, 28 | "stock": 1000, 29 | "price": 9.99, 30 | "currency_code": "GBP" 31 | }, 32 | { 33 | "ID": 404, 34 | "title": "So Long, and Thanks for All the Fish", 35 | "descr": "So Long, and Thanks for All the Fish is the fourth book of the Hitchhiker's Guide to the Galaxy \"trilogy of six books\" written by Douglas Adams. Its title is the message left by the dolphins when they departed Planet Earth just before it was demolished to make way for a hyperspace bypass, as described in The Hitchhiker's Guide to the Galaxy. A song of the same name was featured in the 2005 film adaptation of The Hitchhiker's Guide to the Galaxy.", 36 | "author_ID": 42, 37 | "genre_ID": 14, 38 | "stock": 1000, 39 | "price": 9.99, 40 | "currency_code": "GBP" 41 | }, 42 | { 43 | "ID": 405, 44 | "title": "Mostly Harmless", 45 | "descr": "Mostly Harmless is a 1992 novel by Douglas Adams and the fifth book in the Hitchhiker's Guide to the Galaxy series. It is described on the cover of the first edition as \"The fifth book in the increasingly inaccurately named Hitch Hiker's Guide to the Galaxy trilogy\". It was the last Hitchhiker's book written by Adams and his final book released in his lifetime.", 46 | "author_ID": 42, 47 | "genre_ID": 14, 48 | "stock": 1000, 49 | "price": 9.99, 50 | "currency_code": "GBP" 51 | }, 52 | { 53 | "ID": 406, 54 | "title": "And Another Thing... (novel)", 55 | "descr": "And Another Thing... is the sixth and final installment of Douglas Adams' The Hitchhiker's Guide to the Galaxy \"trilogy of six books\". The book, written by Eoin Colfer, was published on the thirtieth anniversary of the first book, 12 October 2009, in hardback. It was published by Penguin Books in the UK and by Hyperion Books in the US. Colfer was given permission to write the book by Adams' widow Jane Belson.", 56 | "author_ID": 42, 57 | "genre_ID": 14, 58 | "stock": 1000, 59 | "price": 9.99, 60 | "currency_code": "GBP" 61 | } 62 | ] 63 | -------------------------------------------------------------------------------- /exercises/03/assets/northbreeze.edmx: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CAP local development workshop 2 | 3 | [![REUSE status](https://api.reuse.software/badge/github.com/SAP-samples/cap-local-development-workshop)](https://api.reuse.software/info/github.com/SAP-samples/cap-local-development-workshop) 4 | 5 | ## Description 6 | 7 | The content of this repository is for use in a [reCAP] 2025 hands-on workshop: 8 | 9 | Title: Stay cool, stay local. 10 | 11 | Abstract: CAP has myriad features to help developers develop. And that means local first, in a tight feedback loop. In this session you'll learn about those features and tools at your disposal as a CAP developer (predominantly Node.js) and get the chance to try some of them out yourself. 12 | 13 | ## Prerequisites 14 | 15 | To participate in this workshop, the following prerequisites are required: 16 | 17 | Ideally: 18 | 19 | - The following installed on your laptop: 20 | - [VS Code](https://code.visualstudio.com/download) 21 | - [Docker Desktop](https://www.docker.com/products/docker-desktop/) (or equivalent container runtime engine) 22 | - the [Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) installed in VS Code 23 | - [git](https://git-scm.com/) (to be able to clone a repository from GitHub) 24 | 25 | Alternatively: 26 | 27 | Your own development environment for CAP Node.js already set up and working, with CAP Node.js [9.0.0+](https://cap.cloud.sap/docs/releases/may25) installed and a Bash-compatible shell (e.g. via WSL on Windows), plus [jq](https://jqlang.org/) and [curl](https://curl.se/) installed. 28 | 29 | In both cases: 30 | 31 | - An active trial account on SAP Business Technology Platform, with a Cloud Foundry environment instance (see [this tutorial] for details) - this will be needed for the last exercise, and we may be able to supply temporary access for participants 32 | 33 | ## Exercises 34 | 35 | Work through each of the following exercises one at a time, each of which cover one or more topics that are relevant for local development (and are shown in brackets following the exercise titles). When reading through the exercises, actions for you to take, things you have to do yourself, are indicated with the 👉 symbol. 36 | 37 | - [00 - setting up and getting to a running server](exercises/00/) ([cds watch]) 38 | - [01 - cds watch, SQLite, initial data and sample data](exercises/01/) ([cds watch], [SQLite], [initial data]) 39 | - [02 - configuration profiles, more on initial data, and the cds REPL](exercises/02/) ([configuration profiles], [initial data], [cds REPL]) 40 | - [03 - mocking auth and required services](exercises/03) ([mock user authentication], [mocking of required services]) 41 | - [04 - a first look at local messaging and events](exercises/04) ([local messaging], [file-based messaging]) 42 | - [05 - workspaces, monorepos and more on messaging and events](exercises/05) ([workspaces and monorepos], [plugins], [file-based messaging]) 43 | - [06 - debugging local and remote servers](exercises/06) ([debugging remote applications]) 44 | 45 | ## Other topics 46 | 47 | There are other topics in Capire that also are relevant for local development, but not covered in this workshop (except in passing): 48 | 49 | - [hybrid testing] 50 | - [linting] 51 | - [cds init] 52 | - [cds add] 53 | - [serving UIs] 54 | 55 | ## How to obtain support 56 | 57 | Support for the content in this repository is available during the actual time of the workshop event for which this content has been designed. 58 | 59 | ## License 60 | 61 | Copyright (c) 2025 SAP SE or an SAP affiliate company. All rights reserved. This project is licensed under the Apache Software License, version 2.0 except as noted otherwise in the [LICENSE](LICENSES/Apache-2.0.txt) file. 62 | 63 | [reCAP]: https://recap-conf.dev/ 64 | [this tutorial]: https://developers.sap.com/tutorials/hcp-create-trial-account.html 65 | [hybrid testing]: https://cap.cloud.sap/docs/advanced/hybrid-testing 66 | [configuration profiles]: https://cap.cloud.sap/docs/node.js/cds-env#profiles 67 | [SQLite]: https://cap.cloud.sap/docs/guides/databases-sqlite 68 | [initial data]: https://cap.cloud.sap/docs/guides/databases#providing-initial-data 69 | [cds watch]: https://cap.cloud.sap/docs/tools/cds-cli#cds-watch 70 | [cds REPL]: https://cap.cloud.sap/docs/tools/cds-cli#cds-repl 71 | [mock user authentication]: https://cap.cloud.sap/docs/guides/security/authorization#prerequisite-authentication 72 | [debugging remote applications]: https://cap.cloud.sap/docs/tools/cds-cli#remote-applications 73 | [mocking of required services]: https://cap.cloud.sap/docs/guides/using-services#mock-remote-service-as-odata-service-node-js 74 | [plugins]: https://cap.cloud.sap/docs/plugins/#support-for-plugins 75 | [workspaces and monorepos]: https://cap.cloud.sap/docs/guides/deployment/microservices#create-a-solution-monorepo 76 | [linting]: https://cap.cloud.sap/docs/tools/cds-lint/#usage-lint-cli 77 | [cds init]: https://cap.cloud.sap/docs/tools/cds-cli#cds-init 78 | [cds add]: https://cap.cloud.sap/docs/tools/cds-cli#cds-add 79 | [local messaging]: https://cap.cloud.sap/docs/node.js/messaging#local-messaging 80 | [file-based messaging]: https://cap.cloud.sap/docs/node.js/messaging#file-based 81 | [serving UIs]: https://cap.cloud.sap/docs/get-started/in-a-nutshell#uis 82 | -------------------------------------------------------------------------------- /exercises/00/README.md: -------------------------------------------------------------------------------- 1 | # Exercise 00 - setting up and getting to a running server 2 | 3 | This workshop is all about local development, but we want to ensure that 4 | everyone has the same experience regardless of the machine, operating system 5 | and admin rights to install software. So there are different options you can 6 | take to set up and get ready to run through the exercises. 7 | 8 | If you have everything set up for CAP Node.js development already, including 9 | Node.js 22 or 24, the latest release of CAP Node.js ([9.0.0+]), and an editor 10 | you're comfortable using, then you're all set and you can jump straight to and 11 | continue from the [Check CAP Node.js version](#check-cap-nodejs-version) 12 | section. 13 | 14 | If not, then we have various options for you from which to choose. 15 | 16 | Once you've got everything set up, you'll finish this exercise by initializing 17 | a new CAP Node.js project and starting up the CAP server. 18 | 19 | ## Set up working environment 20 | 21 | Here are the options. 22 | 23 | > Only option 1 will allow for the completion of the `cds debug` based 24 | > debugging in the last exercise, due to the construct and online context of 25 | > the codespace / dev space; see the footnote in that exercise which shows an 26 | > alternative way of debugging. 27 | 28 | 👉 Choose one of them, work through the steps, and then jump forward to 29 | continue on from the [check the CAP Node.js version](#check-cap-nodejs-version) 30 | section. 31 | 32 | ### Option 1 - dev container and VS Code installed locally 33 | 34 | If you already have or can install git, Docker Desktop (see [footnote 35 | 1](#footnote-1)) and VS Code on your machine, then this option may be for you. 36 | 37 | - Ensure Docker Desktop is running (and if you have a context set, ensure it 38 | points to the local engine) 39 | - Check that you have the [Dev Containers] extension installed in VS Code 40 | - Clone this repo using `git clone 41 | https://github.com/SAP-samples/cap-local-development-workshop` 42 | - Open the directory containing the clone with VS Code, for example like this: 43 | `code cap-local-development-workshop`, or via the VS Code menu 44 | - Choose to "Reopen in Container" when asked: ![reopen in container dialogue 45 | box](assets/vscode-reopen-in-container.png) 46 | 47 | Then, so that everyone's working location is the same in all of the explicit 48 | options here: 49 | 50 | - Open a terminal and create a symbolic link so that the cloned repository is 51 | (also, virtually) in the `$HOME/projects/` directory: 52 | 53 | ```bash 54 | cd $HOME/projects/ \ 55 | && ln -s /workspaces/cap-local-development-workshop . \ 56 | && cd cap-local-development-workshop 57 | ``` 58 | 59 | ### Option 2 - dev container in a GitHub codespace 60 | 61 | For our purposes, [GitHub codespaces] are essentially the same as a locally 62 | running container and VS Code ... but provided by GitHub and accessed via the 63 | browser. 64 | 65 | - At the [home of this repo] on GitHub, use the "Code" button. 66 | - Select the "Codespaces" tab. 67 | - Choose to "Create codespace on main", and when the codespace is ready, you're 68 | all set: ![github-create-codespace](assets/github-create-codespace.png) 69 | 70 | Then, just like option 1, so that everyone's working location is the same in 71 | all of the explicit options here: 72 | 73 | - Open a terminal and create a symbolic link so that the cloned repository is 74 | (also, virtually) in the `$HOME/projects/` directory: 75 | 76 | ```bash 77 | cd $HOME/projects/ \ 78 | && ln -s /workspaces/cap-local-development-workshop . \ 79 | && cd cap-local-development-workshop 80 | ``` 81 | 82 | > 🚨 At the end of this workshop, when you're done, remember to delete the 83 | > codespace so as not to incur costs! 84 | 85 | ### Option 3 - dev space in SAP Business Application Studio 86 | 87 | This option is very much similar to the previous two options, in that it 88 | provides a VS Code based development environment and container. If you have a 89 | [trial account on the SAP Business Technology Platform], a [subscription to the 90 | SAP Business Application Studio], and the appropriate role collections 91 | assigned, then you can use this option. 92 | 93 | - Go to the SAP Business Application Studio from your [SAP BTP trial landing 94 | page] 95 | - Choose to "Create Dev Space", giving it a name and selecting the "Full Stack 96 | Cloud Application" type: ![creating a dev 97 | space](assets/bas-create-dev-space.png) 98 | - Once the dev space is started, enter it, use the "Clone from Git" option to 99 | clone this repo, and choose to open it when prompted: ![cloning this repo 100 | from git](assets/bas-clone-from-git.png) 101 | 102 | ## Check CAP Node.js version 103 | 104 | 👉 _Regardless of how you got to your working environment, continue this 105 | exercise from this point._ 106 | 107 | 👉 Once you have your working environment set up, check that CAP Node.js is 108 | installed (it should be) by opening up a terminal (menu option "Terminal -> New 109 | Terminal") and running `cds v`. The version for `@sap/cds-dk` should be 110 | [9.0.0+]. Here's an example from a terminal prompt from Option 1, but 111 | regardless of the option you chose, it should look similar: 112 | 113 | ![running cds v in a terminal prompt](assets/vscode-shell-cds-version.png) 114 | 115 | ## Create new CAP Node.js project 116 | 117 | We'll explore local development friendly features starting with a simple CAP 118 | Node.js project, which you should create now. It's based on the `sample` 119 | [facet], which provides a ready-to-go sample schema and set of services, plus 120 | annotations and some actual data. 121 | 122 | 👉 From a terminal prompt, initialize a new project called `myproj`, adding the 123 | `sample` facet; then move into that project's directory and install the NPM 124 | package dependencies: 125 | 126 | ```bash 127 | cd $HOME/projects/cap-local-development-workshop/ \ 128 | && cds init --add sample myproj \ 129 | && cd $_ \ 130 | && npm install 131 | ``` 132 | 133 | The output should appear similar to this: 134 | 135 | ```text 136 | creating new CAP project in ./myproj 137 | 138 | adding nodejs 139 | adding sample 140 | 141 | successfully created project – continue with cd myproj 142 | 143 | find samples on https://github.com/SAP-samples/cloud-cap-samples 144 | learn about next steps at https://cap.cloud.sap 145 | 146 | added 127 packages, and audited 128 packages in 1m 147 | 148 | 23 packages are looking for funding 149 | run `npm fund` for details 150 | 151 | found 0 vulnerabilities 152 | ``` 153 | 154 | ## Start the fast development cycle with cds watch 155 | 156 | With the previous shell invocation you created a project from scratch. And now 157 | it's time to invoke `cds watch`, a command so powerful and fundamental to CAP 158 | Node.js rapid development that the humble [Capire entry for it] doesn't really 159 | do it justice :-) 160 | 161 | 👉 Do that now: 162 | 163 | ```bash 164 | cds watch 165 | ``` 166 | 167 | Take a brief look at the output, we'll be revisiting some of this in the next 168 | exercise! 169 | 170 | ```text 171 | cds serve all --with-mocks --in-memory? 172 | ( live reload enabled for browsers ) 173 | 174 | ___________________________ 175 | 176 | [cds] - loaded model from 9 file(s): 177 | 178 | node_modules/@sap/cds/srv/outbox.cds 179 | app/services.cds 180 | app/common.cds 181 | app/browse/fiori-service.cds 182 | app/admin-books/fiori-service.cds 183 | srv/cat-service.cds 184 | srv/admin-service.cds 185 | db/schema.cds 186 | node_modules/@sap/cds/common.cds 187 | 188 | [cds] - connect using bindings from: { registry: '~/.cds-services.json' } 189 | [cds] - connect to db > sqlite { url: ':memory:' } 190 | > init from db/data/sap.capire.bookshop-Genres.csv 191 | > init from db/data/sap.capire.bookshop-Books_texts.csv 192 | > init from db/data/sap.capire.bookshop-Books.csv 193 | > init from db/data/sap.capire.bookshop-Authors.csv 194 | /> successfully deployed to in-memory database. 195 | 196 | [cds] - using auth strategy { 197 | kind: 'mocked', 198 | impl: 'node_modules/@sap/cds/lib/srv/middlewares/auth/basic-auth' 199 | } 200 | 201 | [cds] - serving AdminService { impl: 'srv/admin-service.js', path: '/odata/v4/admin' } 202 | [cds] - serving CatalogService { impl: 'srv/cat-service.js', path: '/odata/v4/catalog' } 203 | 204 | [cds] - server listening on { url: 'http://localhost:4004' } 205 | [cds] - server launched in: 497.749ms 206 | [cds] - [ terminate with ^C ] 207 | ``` 208 | 209 | Here are a few things to notice, given that by default we're in development 210 | mode here (i.e. we haven't specified "production" mode): 211 | 212 | - the CDS model that is loaded is made up of content from different sources 213 | - by default there's a SQLite powered in-memory database in play 214 | - there has been some initial data loaded for various entities 215 | - mock authentication is used automatically 216 | - two services are made available, via the OData adapter, and there are custom 217 | implementations for each of them 218 | 219 | > From hereon in, any commands entered in subsequent exercises should be 220 | > entered in the context of where we are right now, which is in this project's 221 | > root, i.e. the `myproj/` directory ... unless otherwise stated. 222 | 223 | --- 224 | 225 | [Next exercise](../01) 226 | 227 | --- 228 | 229 | ## Footnotes 230 | 231 | 232 | ### Footnote 1 233 | 234 | There's no reason why e.g. [Podman] won't work instead of Docker; you're 235 | welcome to use that, but we cannot provide support for any issues arising in 236 | this workshop due to time constraints. 237 | 238 | [GitHub codespaces]: https://github.com/features/codespaces 239 | [9.0.0+]: https://cap.cloud.sap/docs/releases/may25 240 | [Dev Containers]: https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers 241 | [home of this repo]: https://github.com/SAP-samples/cap-local-development-workshop 242 | [trial account on the SAP Business Technology Platform]: https://developers.sap.com/tutorials/hcp-create-trial-account.html 243 | [subscription to the SAP Business Application Studio]: https://developers.sap.com/tutorials/appstudio-onboarding.html 244 | [SAP BTP trial landing page]: https://account.hanatrial.ondemand.com/trial/#/home/trial 245 | [facet]: https://cap.cloud.sap/docs/tools/cds-cli#cds-add 246 | [Capire entry for it]: https://cap.cloud.sap/docs/tools/cds-cli#cds-watch 247 | [Podman]: https://podman.io/ 248 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [2025] [SAP SE] 190 | 191 | Licensed under the Apache License, Version 2.0 (); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSES/Apache-2.0.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [2025] [SAP SE] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /exercises/04/README.md: -------------------------------------------------------------------------------- 1 | # Exercise 04 - a first look at local messaging and events 2 | 3 | In [The Art and Science of CAP] series Daniel Hutzel shared with us many of the 4 | influences that informed CAP's design, and explained in great detail some of 5 | the core axioms. There are key philosophical truths that are inherent in what 6 | CAP is, two of which are: 7 | 8 | - [Everything is a service] 9 | - [Everything is an event] 10 | 11 | In this exercise we'll explore [events and messaging] in CAP, and in 12 | particular, the facilities in this space that are available to us when 13 | developing locally. 14 | 15 | The general idea, as you might expect, is that CAP's eventing is agnostic at 16 | the definition and API level; the actual mechanism used to manage the sending, 17 | receiving, queueing and relaying of messages is an implementation and platform 18 | context detail. 19 | 20 | Whether the "message channel" facilities are provided by SAP Cloud Application 21 | Event Hub, SAP Event Mesh, or another mechanism, is largely irrelevant from a 22 | developer perspective, especially in a local context, where, in addition to an 23 | in-process facility, [file-based messaging] is available and the main focus of 24 | this exercise. 25 | 26 | ## Take a quick look at in-process eventing 27 | 28 | Like [in-process mocking of required services], CAP supports in-process 29 | eventing. It's worth taking a brief look here, not least to have the chance to 30 | practice embracing the cds REPL, and to see how simple things are. 31 | 32 | ### Start the cds REPL 33 | 34 | 👉 Before you begin, stop any CAP server processes for now. Then launch the 35 | REPL: 36 | 37 | ```bash 38 | cds repl 39 | ``` 40 | 41 | 👉 At the prompt, define a simple service with a handler for a 42 | "widget-produced" event: 43 | 44 | ```javascript 45 | (srv = new cds.Service).on('widget-produced', x => console.log('Received:', x)) 46 | ``` 47 | 48 | This should emit the basic `Service` definition just created, which includes 49 | the `on` phase handler: 50 | 51 | ```javascript 52 | Service { 53 | name: 'Service', 54 | options: {}, 55 | handlers: EventHandlers { 56 | _initial: [], 57 | before: [], 58 | on: [ { on: 'widget-produced', handler: [Function (anonymous)] } ], 59 | after: [], 60 | _error: [] 61 | }, 62 | definition: undefined 63 | } 64 | ``` 65 | 66 | 👉 Now emit a few events, with a simple data payload: 67 | 68 | ```javascript 69 | ['small', 'medium', 'large'].forEach(size => srv.emit('widget-produced', { size: size })) 70 | ``` 71 | 72 | Each of the three events emitted reach the recipient causing this to be shown: 73 | 74 | ```log 75 | Received: EventMessage { event: 'widget-produced', data: { size: 'small' } } 76 | Received: EventMessage { event: 'widget-produced', data: { size: 'medium' } } 77 | Received: EventMessage { event: 'widget-produced', data: { size: 'large' } } 78 | ``` 79 | 80 | This is a simple example of in-process eventing - emission, transmission, 81 | receipt and handling of messages happened in the same process. It can be as 82 | simple as that. 83 | 84 | 👉 Exit the REPL session. 85 | 86 | > For more info on using the cds REPL, see the [Further 87 | > reading](#further-reading) section below. 88 | 89 | ## Explore file-based messaging 90 | 91 | This is what our Ex01Service definition looks like right now, from 92 | `srv/ex01-service.cds`: 93 | 94 | ```cds 95 | using { sap.capire.bookshop as my } from '../db/schema'; 96 | @path: '/ex01' service Ex01Service { 97 | entity Books as projection on my.Books; 98 | } 99 | 100 | annotate Ex01Service with @requires: 'authenticated-user'; 101 | annotate Ex01Service.Books with @restrict: [ 102 | { grant: 'READ' }, 103 | { grant: 'WRITE', to: 'backoffice' } 104 | ]; 105 | ``` 106 | 107 | and `srv/ex01-sales.cds`: 108 | 109 | ```cds 110 | using { cuid } from '@sap/cds/common'; 111 | using { Ex01Service } from './ex01-service'; 112 | 113 | extend service Ex01Service with { 114 | entity Sales : cuid { 115 | date: Date; 116 | book: Association to Ex01Service.Books; 117 | quantity: Integer; 118 | } 119 | } 120 | ``` 121 | 122 | Let's define an event, and emit it. 123 | 124 | ### Switch the classics data back to the default development profile 125 | 126 | Before we start, and to keep things simple, let's switch back the authors, 127 | books and genres data from within the classics profile back to the default, 128 | i.e. let's move: 129 | 130 | ```text 131 | db 132 | ├── classics 133 | │ ├── data 134 | │ │ ├── sap.capire.bookshop-Authors.csv 135 | │ │ ├── sap.capire.bookshop-Books.csv 136 | │ │ ├── sap.capire.bookshop-Books_texts.csv 137 | │ │ └── sap.capire.bookshop-Genres.csv 138 | │ └── index.cds 139 | ├── hitchhikers 140 | │ ├── data 141 | │ │ ├── sap.capire.bookshop-Authors.json 142 | │ │ └── sap.capire.bookshop-Books.json 143 | │ └── index.cds 144 | └── schema.cds 145 | ``` 146 | 147 | to be: 148 | 149 | ```text 150 | db 151 | ├── data 152 | │ ├── sap.capire.bookshop-Authors.csv 153 | │ ├── sap.capire.bookshop-Books.csv 154 | │ ├── sap.capire.bookshop-Books_texts.csv 155 | │ └── sap.capire.bookshop-Genres.csv 156 | ├── hitchhikers 157 | │ ├── data 158 | │ │ ├── sap.capire.bookshop-Authors.json 159 | │ │ └── sap.capire.bookshop-Books.json 160 | │ └── index.cds 161 | └── schema.cds 162 | ``` 163 | 164 | 👉 Do this now: 165 | 166 | ```bash 167 | mv db/classics/data/ db/ && rm -rf db/classics/ 168 | ``` 169 | 170 | 👉 Also, to keep things clean, remove the corresponding entry in 171 | `package.json#cds.requires`, and stay in the file as we'll be adding something 172 | in the next part: 173 | 174 | ```text 175 | "cds": { 176 | "requires": { 177 | "db": { 178 | "kind": "sqlite", 179 | "credentials": { 180 | "url": ":memory:" 181 | } 182 | }, 183 | "[classics]": { -+ 184 | "initdata": { | 185 | "model": "db/classics/" | remove this 186 | } | 187 | }, -+ 188 | "[hitchhikers]": { 189 | "initdata": { 190 | "model": "db/hitchhikers/" 191 | } 192 | }, 193 | "northbreeze": { 194 | "kind": "odata", 195 | "model": "srv/external/northbreeze" 196 | } 197 | } 198 | } 199 | ``` 200 | 201 | ### Declare a requirement for file-based messaging 202 | 203 | OK, the first thing we need to do is define a requirement for messaging. And 204 | for our local development scenario, we should use file-based messaging, which 205 | is the default. 206 | 207 | 👉 Add a `messaging` section within `package.json#cds.requires`, so it looks 208 | like this: 209 | 210 | ```json 211 | "cds": { 212 | "requires": { 213 | "db": { 214 | "kind": "sqlite", 215 | "credentials": { 216 | "url": ":memory:" 217 | } 218 | }, 219 | "[hitchhikers]": { 220 | "initdata": { 221 | "model": "db/hitchhikers/" 222 | } 223 | }, 224 | "northbreeze": { 225 | "kind": "odata", 226 | "model": "srv/external/northbreeze" 227 | }, 228 | "messaging": true 229 | } 230 | } 231 | ``` 232 | 233 | > We could also have been more explicit, specifying the value for `messaging` like this: 234 | > 235 | > ```json 236 | > { 237 | > "messaging": { 238 | > "kind": "file-based-messaging" 239 | > } 240 | > } 241 | > ``` 242 | 243 | ### Define an event 244 | 245 | In the previous exercise we created the "milton" user and gave them the 246 | "backoffice" role which allowed them to perform `WRITE` semantic operations on 247 | books. Let's define an event that should be emitted when a book is deleted. 248 | 249 | 👉 First, declare that by adding a [custom event definition] "bookremoved" to 250 | the service in `srv/ex01-service.cds`: 251 | 252 | ```cds 253 | ... 254 | @path: '/ex01' service Ex01Service { 255 | entity Books as projection on my.Books; 256 | event bookremoved: { ID: Books:ID; } // <--- 257 | } 258 | ... 259 | ``` 260 | 261 | The type structure (`{ ... }`) is deliberately as compact as possible for this 262 | example, designed to convey just the ID of the book that was removed. 263 | 264 | ### Add handler code to emit the event 265 | 266 | Now it's time to define when and how that event should be emitted. Let's start 267 | simple, with a temporary `console.log` statement. 268 | 269 | 👉 Create `srv/ex01-service.js` with the following content: 270 | 271 | ```javascript 272 | const cds = require('@sap/cds') 273 | 274 | class Ex01Service extends cds.ApplicationService { init() { 275 | 276 | this.after (['DELETE'], 'Books', (_, req) => { 277 | console.log('bookremoved', req.data) 278 | }) 279 | 280 | return super.init() 281 | 282 | }} 283 | 284 | module.exports = Ex01Service 285 | ``` 286 | 287 | This defines an "after" phase handler for DELETE events (yes, let's use the 288 | word "event" here too) relating to the `Books` entity. The signature of an 289 | [after handler] is such that the first parameter is the data relating to the 290 | event, and the second parameter is the request object. In the request object 291 | there's the data; let's have a look at what that is in this context. 292 | 293 | 👉 Start up the CAP server again, like this: 294 | 295 | ```bash 296 | cds w 297 | ``` 298 | 299 | Just out of interest, notice in the log output that instead of the built-in 300 | service implementation: 301 | 302 | ```log 303 | [cds] - serving Ex01Service { 304 | impl: 'node_modules/@sap/cds/libx/_runtime/common/Service.js', 305 | path: '/ex01' 306 | } 307 | ``` 308 | 309 | our new custom implementation is now in play for the Ex01Service: 310 | 311 | ```log 312 | [cds] - serving Ex01Service { impl: 'srv/ex01-service.js', path: '/ex01' } 313 | ``` 314 | 315 | While we're looking at the log output from the CAP server, we can also see that 316 | the "file-based-messaging" channel is active, owing to the `messaging` entry we 317 | added to `package.json#cds.requires`: 318 | 319 | ```log 320 | [cds] - connect to messaging > file-based-messaging 321 | ``` 322 | 323 | 👉 Now, in another terminal session, delete "The Raven": 324 | 325 | ```bash 326 | curl -X DELETE -u milton:dontmovemydesk localhost:4004/ex01/Books/251 327 | ``` 328 | 329 | In the CAP server log, we should see: 330 | 331 | ```log 332 | [odata] - DELETE /ex01/Books/251 333 | bookremoved { ID: 251 } 334 | ``` 335 | 336 | Great - the `req.data` is exactly what we need for the event payload. 337 | 338 | 👉 Now adjust the code in the "after" phase handler, changing: 339 | 340 | ```javascript 341 | console.log('bookremoved', req.data) 342 | ``` 343 | 344 | to: 345 | 346 | ```javascript 347 | this.emit('bookremoved', req.data) 348 | ``` 349 | 350 | 👉 At this point, the CAP server should have restarted; if not, give it a nudge 351 | by heading over to the terminal session in which it's running and pressing 352 | Enter. 353 | 354 | ### Start monitoring the message channel 355 | 356 | The message channel we're using here for this local context is [file-based 357 | messaging]. Event messages are stored and queued in a file. Where is that file? 358 | It's called `.cds-msg-box` and sits alongside another local development related 359 | file (`.cds-services.json`) in your home directory. 360 | 361 | > If a CAP file is in your home directory, it's a big clue that it's for local 362 | > development only. 363 | 364 | 👉 In a separate terminal session, ensure the file exists and start monitoring 365 | the contents: 366 | 367 | ```bash 368 | touch ~/.cds-msg-box \ 369 | && tail -f $_ 370 | ``` 371 | 372 | ### Delete a book 373 | 374 | The moment of truth is upon us! 375 | 376 | 👉 In the terminal session where you recently deleted "The Raven", bring up the 377 | command again and resend the request: 378 | 379 | > Because the CAP server is running with an in-memory SQLite database, we 380 | > benefit from the book data being restored on each restart. 381 | 382 | ```bash 383 | curl -X DELETE -u milton:dontmovemydesk localhost:4004/ex01/Books/251 384 | ``` 385 | 386 | ### Observe the event message 387 | 388 | While the CAP server log emits the usual: 389 | 390 | ```log 391 | [odata] - DELETE /ex01/Books/251 392 | ``` 393 | 394 | we also now see that something has been written to our file-based messaging 395 | store: 396 | 397 | ```log 398 | Ex01Service.bookremoved {"data":{"ID":251},"headers":{"x-correlation-id":"c72e2a47-2faf-4bcb-9f54-37ebb2ca88a6"}} 399 | ``` 400 | 401 | But what happens now? How is such a message subsequently received? We'll find 402 | out with a more comprehensive example in the next exercise where we also learn 403 | how to manage a larger scale CAP project, with independent services, locally. 404 | 405 | --- 406 | 407 | ## Further reading 408 | 409 | - [Level up your CAP skills by learning how to use the cds REPL] 410 | 411 | --- 412 | 413 | [Next exercise](../05) 414 | 415 | --- 416 | 417 | [The Art and Science of CAP]: https://qmacro.org/blog/posts/2024/12/06/the-art-and-science-of-cap/ 418 | [Everything is a service]: https://qmacro.org/blog/posts/2024/12/10/tasc-notes-part-4/#everything-is-a-service 419 | [Everything is an event]: https://qmacro.org/blog/posts/2024/11/07/five-reasons-to-use-cap/#:~:text=Everything%20is%20an%20event 420 | [events and messaging]: https://cap.cloud.sap/docs/guides/messaging/ 421 | [in-process mocking of required services]: ../03/README.md#footnote-1 422 | [file-based messaging]: https://cap.cloud.sap/docs/guides/messaging/#_1-use-file-based-messaging-in-development 423 | [custom event definition]: https://cap.cloud.sap/docs/cds/cdl#events 424 | [after handler]: https://cap.cloud.sap/docs/node.js/core-services#srv-after-request 425 | [Level up your CAP skills by learning how to use the cds REPL]: https://qmacro.org/blog/posts/2025/03/21/level-up-your-cap-skills-by-learning-how-to-use-the-cds-repl/ 426 | -------------------------------------------------------------------------------- /exercises/06/README.md: -------------------------------------------------------------------------------- 1 | # Exercise 06 - debugging local and remote servers 2 | 3 | The [Debugging] section of Capire tells us that with `cds debug` we can "_debug 4 | applications running locally or remotely on SAP BTP Cloud Foundry. Local 5 | applications will be started in debug mode, while (already running) remote 6 | applications are put into debug mode._". 7 | 8 | The benefit of using the same procedure and in fact the same debugging tools 9 | regardless of whether the CAP server is local or remote is enormous. We can use 10 | our local development tools and the same techniques, and while connected to a 11 | remote server we can still remain "local" in our minds. 12 | 13 | In this exercise we'll create a simple CAP project, run and debug it locally, 14 | then deploy it to Cloud Foundry (CF), connect to it and debug it remotely. 15 | 16 | ## Initialize a new CAP Node.js project 17 | 18 | For this topic we'll limit ourselves to a simple CAP service. 19 | 20 | 👉 In a new terminal session window, initialize a new CAP Node.js project 21 | "debugtest" in the workshop root directory: 22 | 23 | ```bash 24 | cd $HOME/projects/cap-local-development-workshop/ \ 25 | && cds init --add tiny-sample debugtest \ 26 | && cd $_ \ 27 | && npm install 28 | ``` 29 | 30 | The `tiny-sample` facet brings in a super small books based service with a 31 | couple of data records, and it relies on the built-in service implementation as 32 | there are no JavaScript files alongside the service definition, as we can see: 33 | 34 | ```bash 35 | ; tree -I node_modules 36 | . 37 | ├── app 38 | ├── db 39 | │ ├── data 40 | │ │ └── my.bookshop-Books.csv 41 | │ └── schema.cds 42 | ├── eslint.config.mjs 43 | ├── package.json 44 | ├── README.md 45 | └── srv 46 | └── cat-service.cds 47 | ``` 48 | 49 | 👉 So that we have something simple to which we can attach a breakpoint when 50 | debugging, add a `srv/cat-service.js` file with this content: 51 | 52 | ```javascript 53 | const cds = require('@sap/cds') 54 | module.exports = cds.service.impl(function() { 55 | this.after('each', 'Books', book => { 56 | console.log(book) 57 | }) 58 | }) 59 | ``` 60 | 61 | When debugging, we'll set a breakpoint on the `console.log(book)` line shortly. 62 | 63 | ## Debug locally 64 | 65 | We're already all set - it's straightforward. 66 | 67 | However, as [mentioned in the first 68 | exercise](../00#set-up-working-environment), if you're using a GitHub codespace 69 | or a Business Application Studio dev space, at this point you won't be able to 70 | avail yourself of the `cds debug` facilities as they (deliberately) make use of 71 | localised websocket connections. See [footnote 1](#footnote-1) for a quick 72 | summary of an alternative. 73 | 74 | ### Start the CAP server in debug mode 75 | 76 | 👉 Start debugging the service with `cds debug`, which as we'll see from the 77 | output is just shorthand for `cds watch --debug`: 78 | 79 | ```bash 80 | cds debug 81 | ``` 82 | 83 | The output contains information relevant for our debugging session, but 84 | otherwise the CAP server is operating pretty much the same way as it does 85 | normally when started with `cds watch`: 86 | 87 | ```log 88 | Starting 'cds watch --debug' 89 | 90 | cds serve all --with-mocks --in-memory? 91 | ( live reload enabled for browsers ) 92 | 93 | ___________________________ 94 | 95 | Debugger listening on ws://127.0.0.1:9229/2f95339d-33e9-4b40-9e24-461d1a75cc6c 96 | For help, see: https://nodejs.org/en/docs/inspector 97 | ... 98 | 99 | [cds] - serving CatalogService { impl: 'srv/cat-service.js', path: '/odata/v4/catalog' } 100 | 101 | [cds] - server listening on { url: 'http://localhost:4004' } 102 | ``` 103 | 104 | The difference is that the Node.js process has been started with the 105 | `--inspect` option. See the link to the Node.js debugging content in the 106 | [Further reading](#further-reading) section. 107 | 108 | Note the websocket address given: `ws://127.0.0.1:9229/`. 109 | 110 | 👉 That port 9229 is the default for Node.js debugging connections; let's see 111 | for ourselves, in another terminal session, that it's listening for 112 | connections: 113 | 114 | ```bash 115 | netstat -atn | grep LISTEN 116 | ``` 117 | 118 | This should elicit output like this: 119 | 120 | ```log 121 | tcp 0 0 127.0.0.1:33791 0.0.0.0:* LISTEN 122 | tcp 0 0 127.0.0.1:9229 0.0.0.0:* LISTEN 123 | tcp 0 0 0.0.0.0:2222 0.0.0.0:* LISTEN 124 | tcp6 0 0 :::4004 :::* LISTEN 125 | tcp6 0 0 :::2222 :::* LISTEN 126 | tcp6 0 0 ::1:35729 :::* LISTEN 127 | ``` 128 | 129 | In this list we can see the `127.0.0.1:9229` socket, as well as the 130 | `:4004` representing the socket which is ready to accept 131 | incoming connections to the OData service served by the CAP server. 132 | 133 | > An IP address or hostname combined with a port number is commonly referred to 134 | > as a "socket". 135 | 136 | ### Start a debugging client 137 | 138 | There are various debugging clients generally but the "classic" for this type 139 | of Node.js debugging is the Chrome Developer Tools, specifically the 140 | "Inspector". 141 | 142 | 👉 In a new tab in your Chrome (or Chromium) browser, go to address 143 | `chrome://inspect` where you should see something like this: 144 | 145 | ![Chrome DevTools Inspector showing a remote process debug 146 | target](assets/devtools-remote-target-list.png) 147 | 148 | This shows a single debugging target ready to be attached to and inspected. 149 | From the detail we can see it's our CAP server. 150 | 151 | > Don't be thrown by the fact that this target is listed in the "Remote Target" 152 | > section; from the DevTools Inspector point of view, all these websocket based 153 | > targets are "remote". 154 | 155 | ### Attach the inspector to the target 156 | 157 | 👉 Select the "inspect" link next to the target. 158 | 159 | You should see this in the CAP server log: 160 | 161 | ```log 162 | Debugger attached. 163 | ``` 164 | 165 | You should also be presented with an Inspector window that, if you've used 166 | DevTools in Chrome before, should be familiar: 167 | 168 | ![Chrome DevTools Inspector attached](assets/devtools-inspector-attached.png) 169 | 170 | 👉 Initially your window may not look like what you see in this screenshot; 171 | follow these steps to get there: 172 | 173 | - In the "Scripts" section (top left), drill down the `file://` tree to find 174 | and select `cat-services.js`. 175 | - Once the `cat-services.js` content is displayed, select the `console.log` 176 | line with the line number to set a breakpoint there (this is indicated with 177 | the blue arrow). 178 | - Press the Esc key to bring up the Console drawer in the bottom half, where 179 | you'll see the CAP server log output, and also have a prompt (`>`) which is 180 | effectively a regular DevTools console but in the context of the running 181 | process being debugged. 182 | 183 | ### Make a request for the books data 184 | 185 | Beyond having an inspector attachment, the CAP server is otherwise running 186 | normally. So we can make requests as normal too. 187 | 188 | 👉 Make a request for the books data by visiting 189 | . 190 | 191 | The response will not immediately be returned, as the processing should have 192 | paused at the breakpoint. 193 | 194 | 👉 Switch over to the Inspector and explore; if you're stuck for something to 195 | do, use the prompt in the Console to change the title of the book(s) being 196 | returned, with e.g.: 197 | 198 | ```javascript 199 | book.title = "my test book 1" 200 | ``` 201 | 202 | > Note that this is in the context of a handler in the "after" phase, 203 | > specifically with the special "each" event, so the breakpoint will be visited 204 | > twice, once for each book record being served. See the link in the [Further 205 | > reading](#further-reading) section below for more on this. 206 | 207 | 👉 Use the controls in the Inspector to control the debugging flow: 208 | 209 | ![controls in the Inspector](assets/devtools-controls.png) 210 | 211 | ## Push to Cloud Foundry 212 | 213 | Now we have done a little bit of debugging with a locally running CAP server, 214 | let's push it to CF so we can try out debugging a remotely deployed instance. 215 | We'll push it in the simplest possible way, keeping the machinery to a minimum 216 | and retaining the "local" nature of the server, to avoid getting distracted 217 | with making it production-ready. 218 | 219 | ### Log in with the cf CLI 220 | 221 | We'll use the `cf` CLI to make the push to CF. 222 | 223 | 👉 Use the credentials and API endpoint that you have been given in this 224 | workshop to log in. 225 | 226 | If you have an active trial account on the SAP Business Technology Platform 227 | (BTP), with a CF environment instance, then you can of course use that instead; 228 | the API endpoint is indicated in the CF environment section of your trial 229 | account overview in the cockpit, as shown here: 230 | 231 | ![BTP trial account showing CF environment instance 232 | details](assets/btp-trial-cf-environment.png) 233 | 234 | You can use your SAP BTP trial account's username (email address) and password, 235 | but we recommend you use the `--sso` option to enable you to use your already 236 | signed-in status in the browser to get a code to authenticate with; the flow 237 | looks like this: 238 | 239 | ```log 240 | ; cf login -a https://api.cf.us10-001.hana.ondemand.com --sso 241 | API endpoint: https://api.cf.us10-001.hana.ondemand.com 242 | 243 | Temporary Authentication Code ( Get one at https://login.cf.us10-001.hana.ondemand.com/passcode ): 244 | Authenticating... 245 | OK 246 | 247 | 248 | Targeted org 013e7c57trial. 249 | 250 | Targeted space dev. 251 | 252 | API endpoint: https://api.cf.us10-001.hana.ondemand.com 253 | API version: 3.194.0 254 | user: dj.adams@sap.com 255 | org: 013e7c57trial 256 | space: dev 257 | ``` 258 | 259 | ### Push the app 260 | 261 | Once you're logged in, you can push the app. Let's do it in stages. 262 | 263 | > Notice the terminology change slightly here - while we've been talking and 264 | > thinking about what we have here as a service, provided by a running CAP 265 | > server, from a CF perspective though it's an "app" ... as opposed to a 266 | > (backing) "service". 267 | 268 | 👉 Determine your specific workshop identifier, which you'll be allocated and 269 | should use as a suffix to the app name on CF. The identifier will most likely 270 | be a number; in the following illustrations, we'll assume the identifier is 271 | `000`. 272 | 273 | 👉 First, push the app using the most basic form, specifying that it shouldn't 274 | yet be started (because we have a few settings we want to make before that 275 | happens): 276 | 277 | ```bash 278 | cf push debugtest-000 -m 256M --no-start 279 | ``` 280 | 281 | 👉 Now set "non-production" values for a couple of the app's environment 282 | variables (so that we can avoid the "productive" requirements such as using a 283 | production-ready DB like SAP HANA, and avoid having to configure and wire up a 284 | connection to a real authorization service): 285 | 286 | ```bash 287 | cf set-env debugtest-000 NPM_CONFIG_PRODUCTION false 288 | cf set-env debugtest-000 NODE_ENV testing 289 | ``` 290 | 291 | > See the link to NPM config in the [Further reading](#further-reading) section 292 | > for more information. 293 | 294 | At this point we'd be ready to start the app up in the Cloud Foundry 295 | environment. But there's one thing we need to do specifically to be able to 296 | debug it. 297 | 298 | ### Enable SSH for the app 299 | 300 | In order for us to connect to the app when it's being inspected, we need to be 301 | able connect remotely to the websocket. That connection is achieved by means of 302 | an [SSH tunnel], i.e. a connection carried through a secure (remote) shell 303 | (SSH) session. 304 | 305 | We need to ensure that we can make such SSH connections to the app, and CF 306 | allows [access to apps via SSH]. It's likely that SSH is by default not enabled 307 | for the app that we've just pushed; we can check, like this: 308 | 309 | ```bash 310 | ; cf ssh-enabled debugtest-000 311 | ssh support is disabled for app 'debugtest-000'. 312 | ssh is disabled for app 313 | ``` 314 | 315 | 👉 Enable SSH for the app now, like this: 316 | 317 | ```bash 318 | cf enable-ssh debugtest-000 319 | ``` 320 | 321 | This will emit something like this: 322 | 323 | ```log 324 | Enabling ssh support for app debugtest-000 as dj.adams@sap.com... 325 | OK 326 | 327 | TIP: An app restart is required for the change to take effect. 328 | ``` 329 | 330 | We haven't yet started the app, so a start now will effect the SSH enablement 331 | too. 332 | 333 | ### Start the app 334 | 335 | 👉 Do that now, like this: 336 | 337 | ```bash 338 | cf start debugtest-000 339 | ``` 340 | 341 | There's a lot of output that occurs, but as this exercise is not about CF, 342 | we'll just briefly show what you should see as a sign of success: 343 | 344 | ```log 345 | Starting app debugtest-000 in org 013e7c57trial / space dev as dj.adams@sap.com... 346 | 347 | Waiting for app to start... 348 | 349 | Instances starting... 350 | 351 | name: debugtest-000 352 | requested state: started 353 | isolation segment: trial 354 | routes: debugtest-000.cfapps.us10-001.hana.ondemand.com 355 | last uploaded: Wed 02 Jul 06:31:39 UTC 2025 356 | stack: cflinuxfs4 357 | buildpacks: 358 | isolation segment: trial 359 | name version detect output buildpack name 360 | nodejs_buildpack 1.8.38 nodejs nodejs 361 | 362 | type: web 363 | sidecars: 364 | instances: 1/1 365 | memory usage: 256M 366 | state since cpu memory disk logging cpu entitlement details 367 | #0 running 2025-07-02T06:31:53Z 0.0% 0B of 0B 0B of 0B 0B/s of 0B/s 0.0% 368 | ``` 369 | 370 | ## Debug remotely 371 | 372 | The moment of truth has arrived. 373 | 374 | 👉 Use the same `cds debug` command as earlier, but this time specify the app 375 | name: 376 | 377 | ```bash 378 | cds debug debugtest-000 379 | ``` 380 | 381 | This time, the output is slightly different: 382 | 383 | ```log 384 | Found process of type node, ID: 256 385 | 386 | Opening SSH tunnel on 9229:127.0.0.1:9229 387 | Opening Chrome DevTools at devtools://devtools/bundled/inspector.html?ws=localhost:9229/16af26a1-064e-4994-8b3a-97f29780e61e 388 | 389 | > Now attach a debugger to port 9229. 390 | > Keep this terminal open while debugging. 391 | > See https://cap.cloud.sap/docs/tools/cds-cli#cds-debug for more. 392 | ... 393 | ``` 394 | 395 | You can see that an SSH tunnel has been established, with this description: 396 | `9229:127.0.0.1:9229`. This means that the `cds debug` mechanism has made an 397 | SSH connection to the app on CF, and used SSH's tunnel facility to create a 398 | tunnel, between port 9229 on the "local" host (that's the first `9229` in the 399 | description string) and port 9229, listening on 127.0.0.1 on the "remote" host 400 | (that's the `localhost:9229` in the description string). The upshot of this is 401 | that if a connection is made to "local" port 9229, it's forwarded through the 402 | tunnel to port 9229 on the "remote" host, which is the host where our debugtest 403 | app is running. 404 | 405 | This means that we can use our DevTools just like before, and connect just like 406 | before as well, as though the app being inspected were "local". 407 | 408 | Follow the same procedure as we did earlier in [Start a debugging 409 | client](#start-a-debugging-client) and debug just like you did before too. 410 | 411 | It's (almost) magic! 412 | 413 | --- 414 | 415 | ## Further reading 416 | 417 | - The [Debugging] topic in Capire 418 | - [Node.js debugging] 419 | - [Debugging JavaScript with Chrome DevTools] 420 | - [Method: srv.after(request)] 421 | - [NPM config] 422 | 423 | --- 424 | 425 | ## Footnotes 426 | 427 | ### Footnote 1 428 | 429 | Debugging in a codespace or dev space can be achieved in the context of a CAP 430 | Node.js process using the debugging tools built in to the environment. Here's 431 | how to proceed. 432 | 433 | First, re-open the space at the `debugtest/` directory, with the "File -> Open 434 | Folder" menu option, specifying 435 | `/workspaces/cap-local-development-workshop/debugtest/` as the location: 436 | 437 | ![the Open Folder dialog](assets/open-folder-dialog.png) 438 | 439 | Then select the "Debugging" perspective (via the icon on the left hand side, 440 | highlighted with a "1" in blue in the screenshot below), and within that 441 | perspective, open the `srv/cat-service.js` file, set a breakpoint on the 442 | `console.log` line, and then select the "run" icon for the "cds serve" 443 | debugging configuration in the top left: 444 | 445 | ![the debugging perspective](assets/space-debugging-facility.png) 446 | 447 | [Debugging]: https://cap.cloud.sap/docs/tools/cds-cli#cds-debug 448 | [Node.js debugging]: https://nodejs.org/en/learn/getting-started/debugging 449 | [Debugging JavaScript with Chrome DevTools]: https://developer.chrome.com/docs/devtools/javascript 450 | [Method: srv.after(request)]: https://cap.cloud.sap/docs/node.js/core-services#srv-after-request 451 | [NPM config]: https://docs.npmjs.com/cli/v9/using-npm/config 452 | [SSH tunnel]: https://www.ssh.com/academy/ssh/tunneling 453 | [access to apps via SSH]: https://docs.cloudfoundry.org/devguide/deploy-apps/ssh-apps.html 454 | -------------------------------------------------------------------------------- /exercises/05/README.md: -------------------------------------------------------------------------------- 1 | # Exercise 05 - workspaces, monorepos and more on messaging and events 2 | 3 | As the late English Renaissance poet John Donne penned: [No man is an island]. 4 | And no CAP project is an island, either. While what we can create based on an 5 | invocation of `cds init` is enough for a standalone CAP service, it likely does 6 | not ... stand alone. 7 | 8 | In Node.js the [workspaces] concept in NPM is often used to manage dependencies 9 | and supporting or related projects in the form of NPM packages. Think of 10 | workspaces as the next step up from the lower level [npm-link] concept. A key 11 | feature of workspace-based organization of related projects (packages) is that 12 | the development and use of those related projects ... can be done locally, that 13 | is without the need to involve the NPM package registry. 14 | 15 | ## Explore a simple workspace example for a CDS plugin package 16 | 17 | CAP Node.js typically uses the workspace concept for the development of [CDS 18 | plugin packages]. There's a simple example of this in [CAP Node.js plugins - 19 | part 1 - how things work], which is worth exploring here first. 20 | 21 | 👉 We are done with the project we've been working on thus far, so first stop 22 | any running CAP servers and close any terminal and editor sessions. 23 | 24 | ### Initialize a new CAP Node.js project 25 | 26 | 👉 In a new terminal session window, initialize a new CAP Node.js project 27 | "plugintest" in the workshop root directory: 28 | 29 | ```bash 30 | cd $HOME/projects/cap-local-development-workshop/ \ 31 | && cds init plugintest && cd $_ 32 | ``` 33 | 34 | 👉 Examine the contents of `package.json`: 35 | 36 | ```bash 37 | jq . package.json 38 | ``` 39 | 40 | The contents should look like this: 41 | 42 | ```json 43 | { 44 | "name": "plugintest", 45 | "version": "1.0.0", 46 | "description": "A simple CAP project.", 47 | "repository": "", 48 | "license": "UNLICENSED", 49 | "private": true, 50 | "dependencies": { 51 | "@sap/cds": "^9", 52 | "express": "^4" 53 | }, 54 | "engines": { 55 | "node": ">=20" 56 | }, 57 | "devDependencies": { 58 | "@cap-js/sqlite": "^2", 59 | "@cap-js/cds-types": "^0.11.0" 60 | }, 61 | "scripts": { 62 | "start": "cds-serve" 63 | } 64 | } 65 | ``` 66 | 67 | So far so good. At this point let's imagine we're going to start working on a 68 | new plugin. 69 | 70 | ### Create the plugin package 71 | 72 | 👉 Create the plugin package using the workspace concept: 73 | 74 | ```bash 75 | npm init -y --workspace myplugin 76 | ``` 77 | 78 | The output will typically look something like this: 79 | 80 | ```log 81 | Wrote to /tmp/plugintest/myplugin/package.json: 82 | 83 | { 84 | "name": "myplugin", 85 | "version": "1.0.0", 86 | "description": "", 87 | "main": "index.js", 88 | "scripts": { 89 | "test": "echo \"Error: no test specified\" && exit 1" 90 | }, 91 | "keywords": [], 92 | "author": "", 93 | "license": "ISC", 94 | "type": "commonjs" 95 | } 96 | 97 | added 128 packages in 1m 98 | ``` 99 | 100 | ### Add the dependency 101 | 102 | 👉 We can now declare a dependency, that our CAP project `plugintest` relies 103 | upon this new `myplugin` package. Do that now (remembering we're still in the 104 | top level `plugintest/` directory): 105 | 106 | ```bash 107 | npm add myplugin 108 | ``` 109 | 110 | ### Examine the package structure and relationship 111 | 112 | Let's look at what we have now in terms of structure and declaration. 113 | 114 | 👉 First, check the contents of `plugintest`'s `package.json` file again: 115 | 116 | ```bash 117 | jq . package.json 118 | ``` 119 | 120 | ```json 121 | { 122 | "name": "plugintest", 123 | "version": "1.0.0", 124 | "description": "A simple CAP project.", 125 | "repository": "", 126 | "license": "UNLICENSED", 127 | "private": true, 128 | "dependencies": { 129 | "@sap/cds": "^9", 130 | "express": "^4", 131 | "myplugin": "^1.0.0" 132 | }, 133 | "engines": { 134 | "node": ">=20" 135 | }, 136 | "devDependencies": { 137 | "@cap-js/cds-types": "^0.11.0", 138 | "@cap-js/sqlite": "^2" 139 | }, 140 | "scripts": { 141 | "start": "cds-serve" 142 | }, 143 | "workspaces": [ 144 | "myplugin" 145 | ] 146 | } 147 | ``` 148 | 149 | We now have: 150 | 151 | - a new `workspaces` section listing the `myplugin` package location (from the 152 | `npm init -y --workspace myplugin`) 153 | - an entry in the `dependencies` section for `myplugin` (from the `npm add 154 | myplugin`) 155 | 156 | 👉 Now check the overall filesystem structure, limiting the directory hierarchy 157 | levels displayed (with `-L`): 158 | 159 | ```bash 160 | tree -L 2 161 | ``` 162 | 163 | This should show us (heavily reduced): 164 | 165 | ```log 166 | . 167 | ├── README.md 168 | ├── app 169 | ├── db 170 | ├── eslint.config.mjs 171 | ├── myplugin 172 | │ └── package.json 173 | ├── node_modules 174 | │ ├── @cap-js 175 | │ ├── @eslint 176 | │ ├── @sap 177 | │ ├── @types 178 | │ ├── accepts 179 | │ ├── ... 180 | │ ├── myplugin -> ../myplugin 181 | │ └── ... 182 | ├── package-lock.json 183 | ├── package.json 184 | └── srv 185 | ``` 186 | 187 | Notice how the dependency (`plugintest` -> `myplugin`) is resolved via a 188 | _symbolic link_ to the workspace. 189 | 190 | ### Add a basic plugin hook 191 | 192 | Just to complete this short exploration, let's create a basic plugin hook and 193 | start the `plugintest` CAP server. 194 | 195 | 👉 Add this to a new file `myplugin/cds-plugin.js`: 196 | 197 | ```javascript 198 | console.log('Hello from myplugin') 199 | ``` 200 | 201 | > The `cds-plugin.js` name is [special]. 202 | 203 | 👉 Now start the server: 204 | 205 | ```bash 206 | cds w 207 | ``` 208 | 209 | whereupon you should see that the dependency is indeed realised (and also that 210 | the CDS plugin concept is really that simple): 211 | 212 | ```log 213 | cds serve all --with-mocks --in-memory? 214 | ( live reload enabled for browsers ) 215 | 216 | ___________________________ 217 | 218 | Hello from myplugin 219 | 220 | No models found in db/,srv/,app/,schema,services. 221 | Waiting for some to arrive... 222 | ``` 223 | 224 | 👉 At this point we're done with this section, so delete this `plugintest/` 225 | directory: 226 | 227 | ```bash 228 | cd .. && rm -rf plugintest/ 229 | ``` 230 | 231 | ## Explore monorepos powered by git submodules and NPM workspaces 232 | 233 | Now we understand the basics of NPM workspaces, let's dig in further and add in 234 | the concept of [git submodules], which we can think of as a source code control 235 | system relation of NPM workspaces. 236 | 237 | With git submodules, not only can we organize packages in workspaces, but also 238 | manage their sources with git, and combine a project and its dependencies into 239 | a single repository called a "monorepo" (where "mono" is short for 240 | "monolithic", which itself means "single [usually large] stone"). 241 | 242 | The [Microservices with CAP] topic in Capire covers this concept as a basis for 243 | the construction and management of a set of related and interdependent 244 | microservices, and has an example of a "deconstructed" version of the classic 245 | [cloud-cap-samples] monorepo. In this section we'll build up that monorepo 246 | manually using workspaces and pulling in individual git repositories as 247 | submodules. 248 | 249 | ### Create the top level project 250 | 251 | The individual repository versions of CAP projects within the 252 | [cloud-cap-samples] monorepo are all to be found in the GitHub organization at 253 | , "the home for CAP samples and reference apps". 254 | 255 | We'll create a top level project to be the "head" of the monorepo itself, and 256 | bring in individual CAP Node.js project repositories as git submodules, 257 | organizing them using the NPM workspace concept. 258 | 259 | 👉 Do that now in the simplest possible way: 260 | 261 | ```bash 262 | cd $HOME/projects/cap-local-development-workshop/ \ 263 | && mkdir capire \ 264 | && cd $_ \ 265 | && jq -n '{name: "@capire/samples", workspaces: ["*"]}' > package.json \ 266 | && git init -b main \ 267 | && printf "node_modules\ngen\n" > .gitignore 268 | ``` 269 | 270 | This is [the simplest thing that could possibly work] - a basic NPM project 271 | with just a name (in the form of a namespaced NPM package name) and a 272 | declaration allowing any subdirectory to be a workspace (that is, contain a 273 | dependent or related project package). 274 | 275 | ### Add the individual interdependent projects 276 | 277 | The project has already been initialized from a git perspective, so we can now 278 | bring in the individual projects as submodules, and manage them locally in 279 | separate NPM workspaces. 280 | 281 | 👉 Do that now: 282 | 283 | ```bash 284 | for project in bookstore reviews orders common bookshop data-viewer; do 285 | git submodule add "https://github.com/capire/$project" 286 | done 287 | git submodule update --init 288 | ``` 289 | 290 | ### Install, wire up and inspect the dependencies 291 | 292 | At this point there are multiple projects, as git submodule controlled NPM 293 | packages, each with their own `package.json` file, in the monorepo. While there 294 | are no dependencies declared in the top-level `package.json` file (there isn't 295 | even a `dependencies` section), the individual projects have dependencies. 296 | Let's examine the "bookstore" project's dependencies. 297 | 298 | 👉 Look at the `dependencies` list for the "bookstore" project: 299 | 300 | ```bash 301 | jq .dependencies bookstore/package.json 302 | ``` 303 | 304 | We can see that this depends on (amongst other things) other peer projects in 305 | this monorepo: 306 | 307 | ```json 308 | { 309 | "@capire/bookshop": "*", 310 | "@capire/reviews": "*", 311 | "@capire/orders": "*", 312 | "@capire/common": "*", 313 | "@capire/data-viewer": "*", 314 | "@sap-cloud-sdk/http-client": "^4", 315 | "@sap-cloud-sdk/resilience": "^4", 316 | "@sap/cds": ">=5", 317 | "express": "^4.17.1", 318 | "@cap-js/hana": ">=1" 319 | } 320 | ``` 321 | 322 | At this point we're ready to have dependencies installed, so do that now (while 323 | still within the project root `capire/` directory): 324 | 325 | ```bash 326 | npm install 327 | ``` 328 | 329 | 👉 Now take a look at where packages have been installed: 330 | 331 | ```bash 332 | find . -type d -name node_modules 333 | ``` 334 | 335 | This shows us that there is only a single `node_modules/` directory (at this 336 | level) for the entire monorepo: 337 | 338 | ```log 339 | ./node_modules 340 | ./node_modules/color/node_modules 341 | ./node_modules/body-parser/node_modules 342 | ./node_modules/send/node_modules 343 | ./node_modules/send/node_modules/debug/node_modules 344 | ./node_modules/express/node_modules 345 | ./node_modules/finalhandler/node_modules 346 | ``` 347 | 348 | Nothing has been installed in any of the project workspace directories. Indeed, 349 | part of the workspace concept enables the workspace projects to "look upwards" 350 | to the monorepo project root for what they need. 351 | 352 | In the light of this, what's in the monorepo project's root `node_modules/` 353 | directory? 354 | 355 | 👉 Let's take a look, with: 356 | 357 | ```bash 358 | tree -L 2 node_modules | more 359 | ``` 360 | 361 | This shows us something now familiar: 362 | 363 | ```log 364 | node_modules 365 | ├── @cap-js 366 | │ ├── db-service 367 | │ ├── hana 368 | │ └── sqlite 369 | ├── @capire 370 | │ ├── bookshop -> ../../bookshop 371 | │ ├── bookstore -> ../../bookstore 372 | │ ├── common -> ../../common 373 | │ ├── data-viewer -> ../../data-viewer 374 | │ ├── orders -> ../../orders 375 | │ └── reviews -> ../../reviews 376 | ├── @colors 377 | │ └── colors 378 | ├── @dabh 379 | │ └── diagnostics 380 | ├── @eslint 381 | │ └── js 382 | ├── @sap 383 | │ ├── cds 384 | ... 385 | ``` 386 | 387 | The dependencies to the various workspace project packages in the `@capire` 388 | namespace (`bookshop`, `bookstore`, etc) are realized ... _via symbolic links_. 389 | 390 | This is a great way to organize interdependent projects (such as those in 391 | composite applications or in microservices scenarios), especially in a local 392 | development context. 393 | 394 | 👉 Keep this new `capire/` project setup as we'll use it in the next section. 395 | 396 | ## Produce and consume an event message 397 | 398 | At the end of the previous exercise we had seen what an event message looks 399 | like "in the pipe", i.e. in the default `~/.cds-msg-box` file in the context of 400 | the file-based messaging channel. 401 | 402 | To round out this exploration of monorepos powered by NPM workspaces and git 403 | submodules for local development, let's fire up a couple of the projects in our 404 | monorepo here, and have them communicate asynchronously, also using the 405 | file-based messaging channel. 406 | 407 | ### Clean out any current in-flight event messages 408 | 409 | 👉 To keep things tidy and clean up any in-flight event messages in our 410 | file-based messaging channel, let's remove the default `~/.cds-msg-box` file. 411 | It will get recreated when required: 412 | 413 | ```bash 414 | rm ~/.cds-msg-box 415 | ``` 416 | 417 | ### Start the reviews project 418 | 419 | 👉 Now start the "reviews" project, from the monorepo root, with: 420 | 421 | ```bash 422 | cds w reviews 423 | ``` 424 | 425 | The "reviews" project provides a `ReviewsService` and also has a GUI served 426 | from the `app/` directory, available via the `/vue` link exposed on the [start 427 | page]; because of the setting in an `.env` file in the "reviews" project's root 428 | directory the CAP server listens on port 4005: 429 | 430 | ![The start page for the "reviews" project, with one Web Application at /vue 431 | and one service endpoint at /reviews](assets/reviews-start-page.png) 432 | 433 | The log shows us that the "reviews" project uses the file-based messaging 434 | channel: 435 | 436 | ```log 437 | [cds] - connect to messaging > file-based-messaging 438 | ``` 439 | 440 | and at this point the `~/.cds-msg-box` file is created anew. 441 | 442 | ### Add a review 443 | 444 | 👉 In the GUI at (you'll need to 445 | authenticate, use one of the [pre-defined test users] we learned about in a 446 | previous exercise, say, "bob"), add a review, and then check both the CAP 447 | server log, and the contents of the `~/.cds-msg-box` file. 448 | 449 | The log should show something like this: 450 | 451 | ```log 452 | [odata] - PATCH /reviews/Reviews/1689144d-3b10-4849-bcbe-2408a13e161d 453 | < emitting: reviewed { subject: '201', count: 2, rating: 4.5 } 454 | ``` 455 | 456 | And there should be a corresponding event message record written to the 457 | `~/.cds-msg-box` file, something like this: 458 | 459 | ```log 460 | ReviewsService.reviewed {"data":{"subject":"201","count":2,"rating":4.5},"headers":{"x-correlation-id":"6ae4fb7a-884e-4da4-9b39-f0c174c096a4"}} 461 | ``` 462 | 463 | This brings us to the stage that is the equivalent of where we were at the end 464 | of the previous exercise. 465 | 466 | ### Examine the bookstore project and its requirements 467 | 468 | 👉 In another terminal session, move into this new `capire/` project directory 469 | and examine the "bookstore" project's CAP requirements, defined in 470 | `bookstore/package.json#cds.requires`: 471 | 472 | ```bash 473 | cd $HOME/projects/cap-local-development-workshop/capire/ \ 474 | && jq .cds.requires bookstore/package.json 475 | ``` 476 | 477 | > We could of course use `cds env requires` from within the `bookstore/` 478 | > directory too. 479 | 480 | This shows us that amonst other things, the "bookstore" project relies upon 481 | (will need to connect to) the `ReviewsService`: 482 | 483 | ```json 484 | { 485 | "ReviewsService": { 486 | "kind": "odata", 487 | "model": "@capire/reviews" 488 | }, 489 | "OrdersService": { 490 | "kind": "odata", 491 | "model": "@capire/orders" 492 | }, 493 | "messaging": true, 494 | "db": true 495 | } 496 | ``` 497 | 498 | Notice how the `model` references are specified, in NPM package name form. 499 | 500 | From an earlier exercise we know that the `~/.cds-services.json` file acts as a 501 | local binding registry, a sort of "stock exchange" for required and provided 502 | services. 503 | 504 | 👉 Take a look at what's currently in that file: 505 | 506 | ```bash 507 | jq . ~/.cds-services.json 508 | ``` 509 | 510 | It should show that there indeed is a `ReviewsService` being provided: 511 | 512 | ```json 513 | { 514 | "cds": { 515 | "provides": { 516 | "ReviewsService": { 517 | "kind": "odata", 518 | "credentials": { 519 | "url": "http://localhost:4005/reviews" 520 | }, 521 | "server": 74989 522 | } 523 | }, 524 | "servers": { 525 | "74989": { 526 | "root": "file:///tmp/capire/reviews", 527 | "url": "http://localhost:4005" 528 | } 529 | } 530 | } 531 | } 532 | ``` 533 | 534 | So we can work out what's probably going to happen next. 535 | 536 | ### Start the bookstore project 537 | 538 | 👉 Start this "bookstore" project up, again from the monorepo project root, 539 | with: 540 | 541 | ```bash 542 | cds w bookstore 543 | ``` 544 | 545 | Here are some lines from the log output: 546 | 547 | ```log 548 | [cds] - connect using bindings from: { registry: '~/.cds-services.json' } 549 | ... 550 | [cds] - connect to messaging > file-based-messaging 551 | ... 552 | [cds] - connect to ReviewsService > odata { url: 'http://localhost:4005/reviews' } 553 | ... 554 | 555 | > received: reviewed { subject: '201', count: 2, rating: 4.5 } 556 | ``` 557 | 558 | This tells us: 559 | 560 | - it looked in the `~/.cds-services.json` registry (and therefore found that 561 | the `ReviewsService`, which it requires, is available, and knows how to get 562 | to it) 563 | - it also uses the file-based messaging channel 564 | - it successfully marshalled the information needed to be able to make calls to 565 | the `ReviewService` 566 | 567 | > We should refrain from saying "it successfully connected to ..." because at 568 | > this point no connection has yet been attempted. 569 | 570 | It also shows us that it received an event message! 571 | 572 | 👉 Take a look at the contents of `~/.cds-msg-box` now: 573 | 574 | ```bash 575 | cat ~/.cds-msg-box 576 | ``` 577 | 578 | It's empty! The "bookstore" project consumed the event message. 579 | 580 | Well done. 581 | 582 | 👉 Stop the CAP servers and close all but one of the terminals. 583 | 584 | --- 585 | 586 | ## Further reading 587 | 588 | - [CAP Node.js Plugins], a three part blog post series 589 | - [Reusable components for CAP with cds-plugin] 590 | - [Microservices with CAP] 591 | 592 | --- 593 | 594 | [Next exercise](../06) 595 | 596 | --- 597 | 598 | [No man is an island]: https://allpoetry.com/No-man-is-an-island 599 | [workspaces]: https://docs.npmjs.com/cli/v7/using-npm/workspaces 600 | [CDS plugin packages]: https://cap.cloud.sap/docs/node.js/cds-plugins 601 | [CAP Node.js plugins - part 1 - how things work]: https://qmacro.org/blog/posts/2024/10/05/cap-node.js-plugins-part-1-how-things-work/ 602 | [CAP Node.js Plugins]: https://qmacro.org/blog/posts/2024/12/30/cap-node.js-plugins/ 603 | [Reusable components for CAP with cds-plugin]: https://community.sap.com/t5/technology-blog-posts-by-sap/reusable-components-for-cap-with-cds-plugin/ba-p/13562077 604 | [git submodules]: https://git-scm.com/book/en/v2/Git-Tools-Submodules 605 | [Microservices with CAP]: https://cap.cloud.sap/docs/guides/deployment/microservices 606 | [cloud-cap-samples]: https://github.com/SAP-samples/cloud-cap-samples 607 | [the simplest thing that could possibly work]: https://creators.spotify.com/pod/profile/tech-aloud/episodes/The-Simplest-Thing-that-Could-Possibly-Work--A-conversation-with-Ward-Cunningham--Part-V---Bill-Venners-e5dpts 608 | [npm-link]: https://docs.npmjs.com/cli/v9/commands/npm-link 609 | [special]: https://cap.cloud.sap/docs/node.js/cds-plugins#add-a-cds-plugin-js 610 | [start page]: http://localhost:4005/ 611 | [pre-defined test users]: https://cap.cloud.sap/docs/node.js/authentication#mock-users 612 | -------------------------------------------------------------------------------- /exercises/02/README.md: -------------------------------------------------------------------------------- 1 | # Exercise 02 - configuration profiles, more on initial data, and the cds REPL 2 | 3 | The [profile] concept is a great way to organize different collections of 4 | configuration. There are some built-in profiles named "production", 5 | "development" and (in one particular context) "hybrid" but we are otherwise 6 | free to use profiles in whatever way we choose. They can help us manage our 7 | local development in many ways; in this exercise we'll extend our look at 8 | initial data and use that to explore profiles. We'll also take a first look at 9 | the cds REPL. 10 | 11 | > Throughout this exercise keep the `cds watch` process running and in its own 12 | > terminal instance; if necessary, open a second terminal to run any other 13 | > commands you need, so you've always got the `cds watch` process running and 14 | > visible. 15 | 16 | ## Modify the data organization 17 | 18 | From the previous exercise, here's what we have. The initial and sample data 19 | looks like this: 20 | 21 | ```text 22 | db 23 | ├── data 24 | │ ├── sap.capire.bookshop-Authors.csv 25 | │ ├── sap.capire.bookshop-Books.csv 26 | │ ├── sap.capire.bookshop-Books_texts.csv 27 | │ └── sap.capire.bookshop-Genres.csv 28 | └── schema.cds 29 | test 30 | └── data 31 | └── Ex01Service.Sales.csv 32 | ``` 33 | 34 | > You can generate this with `tree db test`. 35 | 36 | And the `package.json#cds.requires.db` section, implicitly reflecting the 37 | persistent file `db.sqlite`, looks like this: 38 | 39 | ```json 40 | "cds": { 41 | "requires": { 42 | "db": { 43 | "kind": "sqlite" 44 | } 45 | } 46 | } 47 | ``` 48 | 49 | ### Remove the sample data and switch back to in-memory 50 | 51 | 👉 To keep things simple and keep "noise" to a minimum, remove the sample data 52 | entirely and re-deploy, as we don't need it any more: 53 | 54 | ```bash 55 | rm -rf test/ \ 56 | && cds deploy 57 | ``` 58 | 59 | Now we should switch back to in-memory, mostly so we can more comfortably and 60 | immediately see the effects of what we're going to do in this next section. 61 | 62 | There are different ways we can switch back to in-memory. Here are a few: 63 | 64 | We could just remove the current `package.json#cds.requires.db` configuration 65 | entirely. 66 | 67 | We could add explicit `credentials` section to the current 68 | `package.json#cds.requires.db` configuration, reflecting explicitly the 69 | implicit default configuration: 70 | 71 | ```json 72 | "cds": { 73 | "requires": { 74 | "db": { 75 | "kind": "sqlite", 76 | "credentials": { 77 | "url": ":memory:" 78 | } 79 | } 80 | } 81 | } 82 | ``` 83 | 84 | > This is where having such an explicit section might have been helpful, as 85 | > mentioned in the note in the [Deploy to a persistent file] section of the 86 | > previous exercise. 87 | 88 | We could invoke `cds serve all` using the `--in-memory` option without the 89 | trailing question mark. 90 | 91 | 👉 Let's go for the option of adding an explicit `credentials` section; edit 92 | the configuration in `package.json` so it looks like the sample just above. 93 | Then make sure the CAP server has restarted (you may want to nudge it with 94 | Enter). 95 | 96 | When the CAP server restarts it emits some familiar log lines: 97 | 98 | ```log 99 | [cds] - connect to db > sqlite { url: ':memory:' } 100 | > init from db/data/sap.capire.bookshop-Genres.csv 101 | > init from db/data/sap.capire.bookshop-Books_texts.csv 102 | > init from db/data/sap.capire.bookshop-Books.csv 103 | > init from db/data/sap.capire.bookshop-Authors.csv 104 | /> successfully deployed to in-memory database. 105 | ``` 106 | 107 | This reminds us that data is being loaded from CSV files in `db/data/`, 108 | according to convention. 109 | 110 | ### Maintain a separate initial data collection 111 | 112 | Sometimes it's useful to maintain and use different starting sets of initial 113 | data. You can manage this with the combination of convention (the mechanism 114 | looks for `data/` directories directly within the `db/`, `srv/` and `app/` 115 | directories and any other referenced locations) and the [profile] concept. 116 | Let's try this out. 117 | 118 | OK. The name of the `data/` directory is special (see [footnote 119 | 1](#footnote-1)), and its relative location is also special; if we move it to 120 | somewhere else, the files containing the initial data won't get picked up 121 | automatically. 122 | 123 | 👉 Let's try that now: 124 | 125 | ```bash 126 | mkdir db/classics/ \ 127 | && mv db/data/ db/classics/ 128 | ``` 129 | 130 | At this point, the log output from the restarted CAP server doesn't show any 131 | "init from ..." lines, because no initial data was found ... in the expected / 132 | default location(s). 133 | 134 | But we can tell the CAP server about this "classics" initial data collection 135 | and assign a name to it, in the form of a [profile]. 136 | 137 | 👉 Let's do that now, by adding a new node (note the square brackets round 138 | "classics") to `package.json#cds.requires` so that it looks like this: 139 | 140 | ```json 141 | "cds": { 142 | "requires": { 143 | "db": { 144 | "kind": "sqlite", 145 | "credentials": { 146 | "url": ":memory:" 147 | } 148 | }, 149 | "[classics]": { 150 | "initdata": { 151 | "model": "db/classics/" 152 | } 153 | } 154 | } 155 | } 156 | ``` 157 | 158 | > The "initdata" name is arbitrary - see [footnote 2](#footnote-2). 159 | 160 | This doesn't have any positive effect yet; we need a couple more things. 161 | 162 | 👉 First, we need to add an empty CDS model file in the form of `index.cds` to 163 | the new `db/classics/` directory; this is so the CDS model compiler 164 | acknowledges this new "classics" directory and treats it as part of the model, 165 | including any initial data loading requirements: 166 | 167 | ```bash 168 | touch db/classics/index.cds 169 | ``` 170 | 171 | 👉 Now we need to actually go to the CAP server, stop it (with Ctrl-C) and 172 | restart it, specifying this new "classics" name as a profile: 173 | 174 | ```bash 175 | cds w --profile classics 176 | ``` 177 | 178 | [Lo and behold], the initial data in the CSV files in `db/classics/data/` is 179 | now loaded! 180 | 181 | ```log 182 | > init from db/classics/data/sap.capire.bookshop-Genres.csv 183 | > init from db/classics/data/sap.capire.bookshop-Books_texts.csv 184 | > init from db/classics/data/sap.capire.bookshop-Books.csv 185 | > init from db/classics/data/sap.capire.bookshop-Authors.csv 186 | ``` 187 | 188 | ### Add a second initial data collection 189 | 190 | To illustrate this technique more fully, let's add a second initial data 191 | collection in a similar way. In this workshop repository are a couple of data 192 | files for the author Douglas Adams and the books in his (increasingly 193 | inaccurately named) [Hitchhiker's Guide To The Galaxy] trilogy. 194 | 195 | 👉 Create a new "hitchhikers" directory, copy the data in (from this exercise's 196 | [assets/] directory) and create an empty `index.cds` file in there too, like 197 | this: 198 | 199 | ```bash 200 | mkdir -p db/hitchhikers/data/ \ 201 | && cp ../exercises/02/assets/data/json/* "$_" \ 202 | && touch db/hitchhikers/index.cds 203 | ``` 204 | 205 | > Note that this time, just to illustrate the possibility, the data files are 206 | > JSON not CSV. This works too, but JSON files are only supported in 207 | > development mode. See [footnote 3](#footnote-3) for why JSON might be a 208 | > useful choice sometimes. 209 | 210 | At this point the `db/` directory should look like this (you can check yourself 211 | with `tree db` if you want): 212 | 213 | ```text 214 | db 215 | ├── classics 216 | │ ├── data 217 | │ │ ├── sap.capire.bookshop-Authors.csv 218 | │ │ ├── sap.capire.bookshop-Books.csv 219 | │ │ ├── sap.capire.bookshop-Books_texts.csv 220 | │ │ └── sap.capire.bookshop-Genres.csv 221 | │ └── index.cds 222 | ├── hitchhikers 223 | │ ├── data 224 | │ │ ├── sap.capire.bookshop-Authors.json 225 | │ │ └── sap.capire.bookshop-Books.json 226 | │ └── index.cds 227 | └── schema.cds 228 | ``` 229 | 230 | It's now time to define a further stanza in `package.json#cds.requires` to 231 | "require" (effectively _include_) this new "hitchhikers" model too. Like the 232 | "classics" model earlier, there isn't actually any more [CDL] to load, but the 233 | very fact that there's an `index.cds` there, even an empty one, will cause the 234 | data loading mechanism to scoop up anything in any conventional relative 235 | `data/` locations. 236 | 237 | 👉 Add this stanza next to the "classics" one so that the entire `cds` stanza 238 | looks like this: 239 | 240 | ```json 241 | "cds": { 242 | "requires": { 243 | "db": { 244 | "kind": "sqlite", 245 | "credentials": { 246 | "url": ":memory:" 247 | } 248 | }, 249 | "[classics]": { 250 | "initdata": { 251 | "model": "db/classics/" 252 | } 253 | }, 254 | "[hitchhikers]": { 255 | "initdata": { 256 | "model": "db/hitchhikers/" 257 | } 258 | } 259 | } 260 | } 261 | ``` 262 | 263 | 👉 Now restart the CAP server using the "hitchhikers" profile: 264 | 265 | ```bash 266 | cds w --profile hitchhikers 267 | ``` 268 | 269 | and enjoy a different initial data set: 270 | 271 | ```log 272 | > init from db/hitchhikers/data/sap.capire.bookshop-Books.json 273 | > init from db/hitchhikers/data/sap.capire.bookshop-Authors.json 274 | ``` 275 | 276 | Naturally this also works the same way when deploying, for example: 277 | 278 | ```bash 279 | cds deploy --to sqlite:hitchhikers.db --profile hitchhikers 280 | ``` 281 | 282 | which results in: 283 | 284 | ```log 285 | > init from db/hitchhikers/data/sap.capire.bookshop-Books.json 286 | > init from db/hitchhikers/data/sap.capire.bookshop-Authors.json 287 | /> successfully deployed to hitchhikers.db 288 | ``` 289 | 290 | At this point, only the novels in Douglas Adams' "trilogy" are in that 291 | `hitchhikers.db` file: 292 | 293 | ```log 294 | ; sqlite3 hitchhikers.db 'select title from sap_capire_bookshop_Books;' 295 | The Hitchhiker's Guide to the Galaxy 296 | The Restaurant at the End of the Universe 297 | Life, the Universe and Everything 298 | So Long, and Thanks for All the Fish 299 | Mostly Harmless 300 | And Another Thing... 301 | ``` 302 | 303 | ## Use the cds REPL to explore path expression features with SQLite 304 | 305 | Using SQLite for local development doesn't mean sacrificing database features. 306 | The new database services, including the one for SQLite, offer a common set of 307 | [features] including all kinds of [path expressions & filters]. This is a good 308 | opportunity to try some of these out, directly, interactively, with the [cds 309 | REPL]. 310 | 311 | 312 | 313 | Before we continue, we need to add the [@cap-js/cds-test] package, which will 314 | make it easy for us to have the REPL start CAP servers for us. Add it now as a 315 | local development dependency: 316 | 317 | ```bash 318 | npm add -D @cap-js/cds-test 319 | ``` 320 | 321 | 👉 Start up the REPL in its basic form, specifying the "classics" profile (so 322 | the initial data in `db/classics/data/` is loaded): 323 | 324 | ```bash 325 | cds repl --profile classics 326 | ``` 327 | 328 | This should present a simple prompt that looks like this: 329 | 330 | ```text 331 | Welcome to cds repl v 9.0.4 332 | > 333 | ``` 334 | 335 | 👉 At the prompt, ask for help with `.help`, which should show: 336 | 337 | ```log 338 | .break Sometimes you get stuck, this gets you out 339 | .clear Alias for .break 340 | .editor Enter editor mode 341 | .exit Exit the REPL 342 | .help Print this help message 343 | .inspect Sets options for util.inspect, e.g. `.inspect .depth=1`. 344 | .load Load JS from a file into the REPL session 345 | .run Runs a cds server from a given CAP project folder, or module name like @capire/bookshop. 346 | .save Save all evaluated commands in this REPL session to a file 347 | 348 | Press Ctrl+C to abort current expression, Ctrl+D to exit the REPL 349 | ``` 350 | 351 | 👉 Get a taste of what's possible and available by using the cds specific 352 | command `.inspect` to look at the entire CDS facade at a high level, noting 353 | that much of it is [lazily loaded]: 354 | 355 | ```text 356 | .inspect cds .depth=0 357 | ``` 358 | 359 | This should show the current top level properties of the facade, like this: 360 | 361 | ```log 362 | cds: cds_facade { 363 | _events: [Object: null prototype], 364 | _eventsCount: 2, 365 | _maxListeners: undefined, 366 | model: undefined, 367 | db: undefined, 368 | cli: [Object], 369 | root: '/work/scratch/myproj', 370 | services: {}, 371 | extend: [Function (anonymous)], 372 | version: '9.0.4', 373 | builtin: [Object], 374 | service: [Function], 375 | log: [Function], 376 | parse: [Function], 377 | home: '/work/scratch/myproj/node_modules/@sap/cds', 378 | env: [Config], 379 | requires: {}, 380 | Symbol(shapeMode): false, 381 | Symbol(kCapture): false 382 | } 383 | ``` 384 | 385 | Certain properties in the facade such as `model` and `db` are still 386 | `undefined`, because there is no model loaded, no CAP server running and 387 | providing services. 388 | 389 | 👉 Let's change that situation now, and use the `.run` command to comfortably 390 | start a CAP server in the context of the REPL, for the current project (pointed 391 | to by the `.` symbol, the normal representation for "current directory"): 392 | 393 | ```text 394 | .run . 395 | ``` 396 | 397 | > You can also use the `--run` option when you invoke `cds repl` to have a CAP 398 | > server started up in the REPL context immediately. 399 | 400 | This should emit the usual CAP server startup log lines, plus something like 401 | this: 402 | 403 | ```log 404 | Following variables are made available in your repl's global context: 405 | 406 | from cds.entities: { 407 | Books, 408 | Authors, 409 | Genres, 410 | } 411 | 412 | from cds.services: { 413 | db, 414 | AdminService, 415 | CatalogService, 416 | Ex01Service, 417 | } 418 | 419 | Simply type e.g. Ex01Service in the prompt to use the respective objects. 420 | ``` 421 | 422 | We [defined our Ex01Service in the simplest way], but we can see in the REPL 423 | that there are already plenty of handlers attached. 424 | 425 | 👉 Have a look at them with `.inspect Ex01Service.handlers`: 426 | 427 | ```text 428 | .inspect Ex01Service.handlers 429 | ``` 430 | 431 | This should show handlers, grouped by phase, for the service; it's the ones in 432 | the `on` phase that provide the built-in handling for CRUD operations: 433 | 434 | ```text 435 | Ex01Service.handlers: EventHandlers { 436 | _initial: [ 437 | { 438 | before: '*', 439 | handler: [Function: check_service_level_restrictions] 440 | }, 441 | { before: '*', handler: [Function: check_auth_privileges] }, 442 | { before: '*', handler: [Function: check_readonly] }, 443 | { before: '*', handler: [Function: check_insertonly] }, 444 | { before: '*', handler: [Function: check_odata_constraints] }, 445 | { before: '*', handler: [Function: check_autoexposed] }, 446 | { before: '*', handler: [AsyncFunction: enforce_auth] }, 447 | { before: 'READ', handler: [Function: restrict_expand] }, 448 | { before: 'CREATE', handler: [AsyncFunction: validate_input] }, 449 | { before: 'UPDATE', handler: [AsyncFunction: validate_input] }, 450 | { before: 'NEW', handler: [AsyncFunction: validate_input] }, 451 | { before: 'READ', handler: [Function: handle_paging] }, 452 | { before: 'READ', handler: [Function: handle_sorting] } 453 | ], 454 | before: [], 455 | on: [ 456 | { on: 'CREATE', handler: [AsyncFunction: handle_crud_requests] }, 457 | { on: 'READ', handler: [AsyncFunction: handle_crud_requests] }, 458 | { on: 'UPDATE', handler: [AsyncFunction: handle_crud_requests] }, 459 | { on: 'DELETE', handler: [AsyncFunction: handle_crud_requests] }, 460 | { on: 'UPSERT', handler: [AsyncFunction: handle_crud_requests] } 461 | ], 462 | after: [], 463 | _error: [] 464 | } 465 | ``` 466 | 467 | Now it's time to try one of those [CQL] path expressions with infix filters 468 | that are supported by all the new database services (including SQLite, HANA and 469 | Postgres). 470 | 471 | At the REPL prompt, declare a query like this: 472 | 473 | ```text 474 | yorkshireBooks = SELECT `from ${Books}:author[placeOfBirth like '%Yorkshire%'] {placeOfBirth, books.title as book, name as author }` 475 | ``` 476 | 477 | This should emit the query object that results: 478 | 479 | ```text 480 | cds.ql { 481 | SELECT: { 482 | from: { 483 | ref: [ 484 | 'sap.capire.bookshop.Books', 485 | { 486 | id: 'author', 487 | where: [ 488 | { ref: [ 'placeOfBirth' ] }, 489 | 'like', 490 | { val: '%Yorkshire%' } 491 | ] 492 | } 493 | ] 494 | }, 495 | columns: [ 496 | { ref: [ 'placeOfBirth' ] }, 497 | { ref: [ 'books', 'title' ], as: 'book' }, 498 | { ref: [ 'name' ], as: 'author' } 499 | ] 500 | } 501 | } 502 | ``` 503 | 504 | As well as [stare at] it for a bit, we can also execute it (technically: send 505 | it to the default database service). Do that now: 506 | 507 | ```text 508 | await yorkshireBooks 509 | ``` 510 | 511 | It should return the books from the two Brontë sisters: 512 | 513 | ```text 514 | [ 515 | { 516 | placeOfBirth: 'Thornton, Yorkshire', 517 | book: 'Wuthering Heights', 518 | author: 'Emily Brontë' 519 | }, 520 | { 521 | placeOfBirth: 'Thornton, Yorkshire', 522 | book: 'Jane Eyre', 523 | author: 'Charlotte Brontë' 524 | } 525 | ] 526 | ``` 527 | 528 | There's plenty more to explore - see the links in the [Further 529 | reading](#further-reading) section below. 530 | 531 | 👉 When you're done, exit the REPL. 532 | 533 | --- 534 | 535 | ## Further reading 536 | 537 | - [Level up your CAP skills by learning how to use the cds 538 | REPL](https://qmacro.org/blog/posts/2025/03/21/level-up-your-cap-skills-by-learning-how-to-use-the-cds-repl/) 539 | - [A reCAP intro to the cds 540 | REPL](https://qmacro.org/blog/posts/2025/07/21/a-recap-intro-to-the-cds-repl/) 541 | - [The Art and Science of 542 | CAP](https://qmacro.org/blog/posts/2024/12/06/the-art-and-science-of-cap/) 543 | 544 | --- 545 | 546 | [Next exercise](../03) 547 | 548 | --- 549 | 550 | ## Footnotes 551 | 552 | 553 | ### Footnote 1 554 | 555 | The name can also be `csv/` which is also "special". 556 | 557 | 558 | ### Footnote 2 559 | 560 | The name `initdata` could indeed be anything. It could just as well be 561 | `banana`, and things would work the same. If we were to replace the name: 562 | 563 | ```bash 564 | sed -i 's/initdata/banana/' package.json \ 565 | && jq .cds package.json \ 566 | && cds env get requires --keys --profile classics 567 | ``` 568 | 569 | giving: 570 | 571 | ```json 572 | { 573 | "requires": { 574 | "db": { 575 | "kind": "sqlite", 576 | "credentials": { 577 | "url": ":memory:" 578 | } 579 | }, 580 | "[classics]": { 581 | "banana": { 582 | "model": "db/classics/" 583 | } 584 | } 585 | } 586 | } 587 | ``` 588 | 589 | and then were to retrieve the "requires" section of the effective configuraion: 590 | 591 | ```bash 592 | cds env get requires \ 593 | --json \ 594 | --profile classics 595 | ``` 596 | 597 | then "banana" would appear, all fine (output reduced for brevity): 598 | 599 | ```json 600 | { 601 | "middlewares": true, 602 | "queue": { 603 | "...": {} 604 | }, 605 | "auth": { 606 | "...": {} 607 | }, 608 | "db": { 609 | "impl": "@cap-js/sqlite", 610 | "credentials": { 611 | "url": ":memory:" 612 | }, 613 | "kind": "sqlite" 614 | }, 615 | "banana": { 616 | "model": "db/classics/" 617 | } 618 | } 619 | ``` 620 | 621 | 622 | ### Footnote 3 623 | 624 | JSON may be a useful choice for the format of initial data if you're creating 625 | it for a mocked version of an external service where the representations of 626 | data are also in JSON, such as OData entitysets. 627 | 628 | In fact we'll be doing exactly this in the next exercise. 629 | 630 | ```bash 631 | curl -s https://developer-challenge.cfapps.eu10.hana.ondemand.com/odata/v4/northbreeze/Products \ 632 | | jq .value \ 633 | > sap.capire.northbreeze-Products.json 634 | ``` 635 | 636 | [profile]: https://cap.cloud.sap/docs/node.js/cds-env#profiles 637 | [Hitchhiker's Guide To The Galaxy]: https://en.wikipedia.org/wiki/The_Hitchhiker%27s_Guide_to_the_Galaxy 638 | [Deploy to a persistent file]: ../01/README.md#deploy-to-a-persistent-file 639 | [assets/]: assets/ 640 | [CDL]: https://cap.cloud.sap/docs/cds/cdl 641 | [features]: https://cap.cloud.sap/docs/guides/databases-sqlite#features 642 | [path expressions & filters]: https://cap.cloud.sap/docs/guides/databases-sqlite#path-expressions-filters 643 | [cds REPL]: https://cap.cloud.sap/docs/tools/cds-cli#cds-repl 644 | [@cap-js/cds-test]: https://github.com/cap-js/cds-test 645 | [lazily loaded]: https://qmacro.org/blog/posts/2024/12/10/tasc-notes-part-4/#lazy-loading-of-the-cds-facades-many-features 646 | [defined our Ex01Service in the simplest way]: ../01/README.md#add-a-new-service-definition 647 | [CQL]: https://cap.cloud.sap/docs/cds/cql 648 | [stare at]: https://qmacro.org/blog/posts/2017/02/19/the-beauty-of-recursion-and-list-machinery/#initial-recognition 649 | [Lo and behold]: https://en.wikipedia.org/wiki/Lo_and_Behold 650 | -------------------------------------------------------------------------------- /exercises/01/README.md: -------------------------------------------------------------------------------- 1 | # Exercise 01 - cds watch, SQLite, initial data and sample data 2 | 3 | SQLite shines in local development environments and allows for the tightest 4 | feedback loop. It's no second class database system either, as you'll see; via 5 | the modern `@cap-js/sqlite` database service implementation it provides full 6 | support for all kinds of CQL constructions such as path expressions (see the 7 | [Further reading](#further-reading) section for more info). And with the 8 | [command line shell for SQLite], it's easy to interact with locally and 9 | natively. Along with with one of CAP's great features for local development and 10 | fast boostrapping - the ability to [provide initial data] - it's a combination 11 | that's hard to beat. 12 | 13 | In this exercise you'll explore the facilities on offer in this space, using 14 | the sample project you created at the end of the previous exercise. The sample 15 | project is a "bookshop" style affair with authors, books and genres as the main 16 | players. 17 | 18 | > Throughout this exercise keep the `cds watch` process from the previous 19 | > exercise running and in its own terminal instance; if necessary, open a 20 | > second terminal and move to the `myproj/` project root directory (with `cd 21 | > $HOME/projects/cap-local-development-workshop/myproj/`) to run any other 22 | > commands you need, so you've always got the CAP server running and the log 23 | > output visible. 24 | 25 | ## Add a new service definition 26 | 27 | To illustrate the simple power of `cds watch` plus the ultimate [developer 28 | friendly version of no-code] (the code is in the framework, not anything that 29 | you have to write yourself or even generate as boilerplate), add a new service 30 | definition to expose the books in a straightforward (non-administrative) way to 31 | keep things simple (see [footnote 1](#footnote-1)). 32 | 33 | 👉 Add the following to a new file `srv/ex01-service.cds`: 34 | 35 | ```cds 36 | using {sap.capire.bookshop as my} from '../db/schema'; 37 | 38 | @path: '/ex01' 39 | service Ex01Service { 40 | entity Books as projection on my.Books; 41 | } 42 | ``` 43 | 44 | When you save the file the CAP server process restarts automatically, and you 45 | should notice a few things. The service definition is gathered up in the 46 | collection of CDS model sources: 47 | 48 | ```log 49 | [cds] - loaded model from 10 file(s): 50 | 51 | srv/ex01-service.cds 52 | ... 53 | ``` 54 | 55 | Also, it is automatically made available via the (default) OData adapter too: 56 | 57 | ```log 58 | [cds] - serving Ex01Service { 59 | impl: 'node_modules/@sap/cds/libx/_runtime/common/Service.js', 60 | path: '/ex01' 61 | } 62 | ``` 63 | 64 | > It's worth pausing here to reflect on this; while not specifically a "local 65 | > development" facility, the fact that we get CRUD+Q handled completely and 66 | > automatically for us for any service like this, without writing a line of 67 | > code, is [insanely great]. 68 | 69 | In addition, there is some loading of data into an in-memory database: 70 | 71 | ```log 72 | [cds] - connect to db > sqlite { url: ':memory:' } 73 | > init from db/data/sap.capire.bookshop-Genres.csv 74 | > init from db/data/sap.capire.bookshop-Books_texts.csv 75 | > init from db/data/sap.capire.bookshop-Books.csv 76 | > init from db/data/sap.capire.bookshop-Authors.csv 77 | /> successfully deployed to in-memory database. 78 | ``` 79 | 80 | ## Dig in to the SQLite storage 81 | 82 | 👉 Inspect the books data via a QUERY operation on the corresponding entityset 83 | in the new OData service that is now available, like this: 84 | 85 | ```bash 86 | curl -s localhost:4004/ex01/Books \ 87 | | jq -r '.value|map([.ID, .title])[]|@tsv' 88 | ``` 89 | 90 | This should emit: 91 | 92 | ```text 93 | 201 Wuthering Heights 94 | 207 Jane Eyre 95 | 251 The Raven 96 | 252 Eleonora 97 | 271 Catweazle 98 | ``` 99 | 100 | ### Experience the default in-memory mode 101 | 102 | From the CAP server log output we observed that there's a SQLite powered 103 | in-memory database in play. Let's see how that affects things. 104 | 105 | 👉 Banish "The Raven": 106 | 107 | ```bash 108 | curl -X DELETE localhost:4004/ex01/Books/251 109 | ``` 110 | 111 | and you can check with the previous `curl` invocation that it's really gone. 112 | 113 | 👉 Move to the terminal where the CAP server is running and hit Enter, which 114 | will cause it to restart. 115 | 116 | As the default mode for the use of SQLite, with no explicit configuration, is 117 | in-memory (see [footnote 2](#footnote-2)), deployment of the initial data to 118 | the in-memory SQLite database is redone: 119 | 120 | ```log 121 | [cds] - connect to db > sqlite { url: ':memory:' } 122 | > init from db/data/sap.capire.bookshop-Genres.csv 123 | > init from db/data/sap.capire.bookshop-Books_texts.csv 124 | > init from db/data/sap.capire.bookshop-Books.csv 125 | > init from db/data/sap.capire.bookshop-Authors.csv 126 | /> successfully deployed to in-memory database. 127 | ``` 128 | 129 | and "The Raven" is back (check with the previous `curl` invocation again) ... 130 | no doubt to [continue repeating the word "Nevermore"]. 131 | 132 | 👉 Check this default configuration with [cds env]: 133 | 134 | ```bash 135 | cds env requires.db 136 | ``` 137 | 138 | which should return something like this, reflecting the implicit out-of-the-box 139 | default for development (see [footnote 5](#footnote-5)): 140 | 141 | ```text 142 | { 143 | impl: '@cap-js/sqlite', 144 | credentials: { url: ':memory:' }, 145 | kind: 'sqlite' 146 | } 147 | ``` 148 | 149 | Note that there is no `cds` section within `package.json` at this point; this 150 | really is a built-in default. 151 | 152 | ### Deploy to a persistent file 153 | 154 | We can also use a persistent database file, useful if we want the outcome of 155 | our OData requests to persist across CAP server restarts. 156 | 157 | 👉 Use: 158 | 159 | ```bash 160 | cds deploy --to sqlite 161 | ``` 162 | 163 | to deploy the CDS model, and the initial data, to a file whose name defaults to 164 | `db.sqlite` (see [footnote 3](#footnote-3)). 165 | 166 | 👉 Nudge the CAP server to restart as before (move to the terminal where it's 167 | running and hit Enter) ... and notice that nothing has changed: 168 | 169 | ```log 170 | [cds] - connect to db > sqlite { url: ':memory:' } 171 | ``` 172 | 173 | That's because we need to explicitly configure this setup. 174 | 175 | 👉 So let's do that now, by adding this to `package.json`: 176 | 177 | ```json 178 | "cds": { 179 | "requires": { 180 | "db": { 181 | "kind": "sqlite" 182 | } 183 | } 184 | } 185 | ``` 186 | 187 | > We could have specified a custom name for the database file, such as 188 | > `bookshop.db`, but why fight CAP's wonderful [convention over configuration]? 189 | > Incidentally, we would have had to specify the custom name within a 190 | > `credentials` section of what we've just added: 191 | > 192 | > ```json 193 | > "cds": { 194 | > "requires": { 195 | > "db": { 196 | > "kind": "sqlite", 197 | > "credentials": { 198 | > "url": "bookshop.db" 199 | > } 200 | > } 201 | > } 202 | > } 203 | > ``` 204 | > 205 | > That's not to say that we can't be explicit here even with the default 206 | > filename: 207 | > 208 | > ```json 209 | > "cds": { 210 | > "requires": { 211 | > "db": { 212 | > "kind": "sqlite", 213 | > "credentials": { 214 | > "url": "db.sqlite" 215 | > } 216 | > } 217 | > } 218 | > } 219 | > ``` 220 | > 221 | > As we'll see in the next exercise, this comes in handy sometimes! 222 | 223 | 👉 While we're here thinking about configuration and the [cds env], let's check 224 | that same node now: 225 | 226 | ```bash 227 | cds env requires.db 228 | ``` 229 | 230 | The output should reflect what we've added, replacing the earlier default. 231 | Specifically the value for `url` is different, going from `:memory:` to 232 | `db.sqlite`: 233 | 234 | ```text 235 | { 236 | impl: '@cap-js/sqlite', 237 | credentials: { url: 'db.sqlite' }, 238 | kind: 'sqlite' 239 | } 240 | ``` 241 | 242 | And when the CAP server is restarted you should see something like this: 243 | 244 | ```log 245 | [cds] - connect to db > sqlite { url: 'db.sqlite' } 246 | ``` 247 | 248 | Notice too what you _don't_ see - there are no "init from ..." log lines now as 249 | there is no deployment (which is the mechanism to which this data loading 250 | belongs) to be done - the CAP server will not try and deploy to something 251 | that's persistent like our database file, without our say so. 252 | 253 | > That's not to say there isn't anything in `db.sqlite` yet - did you notice 254 | > the "init from ..." lines when you executed the `cds deploy` command just 255 | > now? 256 | 257 | Now when you make modifications to the data, the modifications persist within 258 | the database file, across CAP server restarts too of course. 259 | 260 | ### Work directly at the database layer with the SQLite CLI 261 | 262 | Now that we have some data (and the schema into which it fits) that we can look 263 | at, let's do that. The database engine is local, the database is local, so 264 | everything is at hand. 265 | 266 | The `sqlite3` executable is known as SQLite's "command line shell" as it offers 267 | a prompt-based environment where we can explore (see [footnote 268 | 4](#footnote-4)). 269 | 270 | 👉 Invoke the executable, specifying our database file: 271 | 272 | ```bash 273 | sqlite3 db.sqlite 274 | ``` 275 | 276 | This should land us in the command line shell: 277 | 278 | ```text 279 | SQLite version 3.40.1 2022-12-28 14:03:47 280 | Enter ".help" for usage hints. 281 | sqlite> 282 | ``` 283 | 284 | How about looking at what artifacts are in there? 285 | 286 | 👉 Try the `.tables` command: 287 | 288 | ```text 289 | sqlite> .tables 290 | AdminService_Authors cds_outbox_Messages 291 | AdminService_Books localized_AdminService_Books 292 | AdminService_Books_drafts localized_AdminService_Currencies 293 | AdminService_Books_texts localized_AdminService_Genres 294 | AdminService_Books_texts_drafts localized_AdminService_Languages 295 | AdminService_Currencies localized_CatalogService_Books 296 | AdminService_Currencies_texts localized_CatalogService_Currencies 297 | AdminService_DraftAdministrativeData localized_CatalogService_Genres 298 | AdminService_Genres localized_CatalogService_ListOfBooks 299 | AdminService_Genres_texts localized_Ex01Service_Books 300 | AdminService_Languages localized_Ex01Service_Currencies 301 | AdminService_Languages_texts localized_Ex01Service_Genres 302 | CatalogService_Books localized_sap_capire_bookshop_Books 303 | CatalogService_Books_texts localized_sap_capire_bookshop_Genres 304 | CatalogService_Currencies localized_sap_common_Currencies 305 | CatalogService_Currencies_texts localized_sap_common_Languages 306 | CatalogService_Genres sap_capire_bookshop_Authors 307 | CatalogService_Genres_texts sap_capire_bookshop_Books 308 | CatalogService_ListOfBooks sap_capire_bookshop_Books_texts 309 | DRAFT_DraftAdministrativeData sap_capire_bookshop_Genres 310 | Ex01Service_Books sap_capire_bookshop_Genres_texts 311 | Ex01Service_Books_texts sap_common_Currencies 312 | Ex01Service_Currencies sap_common_Currencies_texts 313 | Ex01Service_Currencies_texts sap_common_Languages 314 | Ex01Service_Genres sap_common_Languages_texts 315 | Ex01Service_Genres_texts 316 | ``` 317 | 318 | Knowing that - at the Data Definition Language ([DDL]) layer - the CDS model 319 | consists predominantly of tables and views, we can dig in and see the artifacts 320 | and their types with something we learned from [The Art and Science of CAP], in 321 | particular [in Episode 8] where we looked at the `sqlite_schema`. 322 | 323 | 👉 In the SQLite shell, where you'll execute this and the next few commands, 324 | try this: 325 | 326 | ```sql 327 | select type,name from sqlite_schema order by type; 328 | ``` 329 | 330 | which should emit a long list, similar to this (output reduced for brevity): 331 | 332 | ```text 333 | index|sqlite_autoindex_sap_common_Languages_1 334 | index|sqlite_autoindex_sap_common_Currencies_1 335 | index|sqlite_autoindex_sap_capire_bookshop_Books_texts_1 336 | index|sqlite_autoindex_sap_capire_bookshop_Books_texts_2 337 | ... 338 | table|sap_capire_bookshop_Books 339 | table|sap_capire_bookshop_Authors 340 | table|sap_capire_bookshop_Genres 341 | table|cds_outbox_Messages 342 | table|sap_common_Languages 343 | table|sap_common_Currencies 344 | table|sap_capire_bookshop_Books_texts 345 | ... 346 | view|AdminService_Books 347 | view|AdminService_Authors 348 | view|CatalogService_Books 349 | view|Ex01Service_Books 350 | view|AdminService_Languages 351 | view|AdminService_Genres 352 | view|AdminService_Currencies 353 | view|AdminService_Books_texts 354 | view|CatalogService_Genres 355 | view|CatalogService_Currencies 356 | view|CatalogService_Books_texts 357 | view|Ex01Service_Genres 358 | view|Ex01Service_Currencies 359 | ... 360 | ``` 361 | 362 | What about the data? 363 | 364 | 👉 Try this: 365 | 366 | ```sql 367 | select title,stock from sap_capire_bookshop_Books; 368 | ``` 369 | 370 | which should show this (with or without The Raven, depending on the recent 371 | requests): 372 | 373 | ```text 374 | Wuthering Heights|12 375 | Jane Eyre|11 376 | The Raven|333 377 | Eleonora|555 378 | Catweazle|22 379 | ``` 380 | 381 | > The `sqlite3` shell has completion; you might want to try it out, it's 382 | > triggered with the Tab key, and especially useful for long table names such 383 | > as the one here (don't forget to terminate the statement with a semicolon 384 | > `;`). 385 | 386 | Sometimes we will want to adjust or augment the data in the database directly 387 | for testing purposes (to avoid having to modify the source initial data and 388 | then re-deploy and re-start the CAP server). That's easy because everything is 389 | local. 390 | 391 | 👉 Let's try that now: 392 | 393 | ```sql 394 | update sap_capire_bookshop_Books set stock = 1000 where ID = 271; 395 | ``` 396 | 397 | 👉 Now, exit the SQLite shell, then perform an OData READ operation to see if 398 | that has taken effect: 399 | 400 | ```bash 401 | curl -s 'localhost:4004/ex01/Books/271?$select=title,stock' | jq . 402 | ``` 403 | 404 | Yep, looks like it did: 405 | 406 | ```json 407 | { 408 | "@odata.context": "$metadata#Books/$entity", 409 | "title": "Catweazle", 410 | "stock": 4711, 411 | "ID": 271 412 | } 413 | ``` 414 | 415 | ## Understand the difference between initial and sample data 416 | 417 | Thus far we've been managing and using _initial_ data. There's also support for 418 | using _sample_ data locally. 419 | 420 | Briefly, sample data is exclusively for tests and demos, in other words for 421 | local development only, not for production. 422 | 423 | The CAP server will look for and load sample data from `data/` directories 424 | _not_ within the standard `db/`, `srv/` and `app/` directories, but inside a 425 | project root based `test/` directory parent: 426 | 427 | ```text 428 | . 429 | ├── db 430 | │ └── data <-- initial data location (what we've been using so far) 431 | ├── srv 432 | └── test 433 | └── data <-- sample data location 434 | ``` 435 | 436 | Let's explore this sample data concept now. 437 | 438 | ### Add a temporary Sales entity to the service 439 | 440 | For the sake of keeping things simple, let's assume we want to think of our 441 | authors, books and genres as initial "master" data ultimately destined for 442 | production, and explore the sample data concept with some "transactional" data 443 | in the form of some basic sales records, sample data that we only want while 444 | we're developing locally. 445 | 446 | 👉 In a new file called `srv/ex01-sales.cds` add this: 447 | 448 | ```cds 449 | using { cuid } from '@sap/cds/common'; 450 | using { Ex01Service } from './ex01-service'; 451 | 452 | extend service Ex01Service with { 453 | entity Sales : cuid { 454 | date: Date; 455 | book: Association to Ex01Service.Books; 456 | quantity: Integer; 457 | } 458 | } 459 | ``` 460 | 461 | ### Generate some sample sales data 462 | 463 | 👉 Next, use `cds add` with the `data` facet to create a CSV file with sample 464 | data for this new entity: 465 | 466 | ```bash 467 | cds add data \ 468 | --filter Sales \ 469 | --records 3 \ 470 | --out test/data/ \ 471 | --force 472 | ``` 473 | 474 | > The `--force` option here isn't stricly necessary but useful in case we want 475 | > to re-run this invocation later. 476 | 477 | You should see something like this: 478 | 479 | ```log 480 | using '--force' ... existing files will be overwritten 481 | adding data 482 | creating test/data/Ex01Service.Sales.csv 483 | 484 | successfully added features to your project 485 | ``` 486 | 487 | ### Re-deploy to db.sqlite 488 | 489 | The CAP server will have restarted, but we'll need to re-deploy, because 490 | currently our persistent storage (the SQLite `db.sqlite` file) contains neither 491 | DDL statements for the new `Sales` entity nor the sales records themselves, as 492 | we can see: 493 | 494 | ```bash 495 | ; sqlite3 db.sqlite 496 | SQLite version 3.40.1 2022-12-28 14:03:47 497 | Enter ".help" for usage hints. 498 | sqlite> select type,name from sqlite_schema where name like '%Sales%'; 499 | sqlite> 500 | ``` 501 | 502 | So let's re-deploy. Now that we have the persistent (and default `db.sqlite` 503 | filename based) configuration defined in `package.json#cds.requires.db` we can 504 | simply invoke `cds deploy` without any options, as it will look at 505 | `cds.requires.db` to work out what to do. 506 | 507 | 👉 Make the deployment: 508 | 509 | ```bash 510 | cds deploy 511 | ``` 512 | 513 | This should show: 514 | 515 | ```log 516 | > init from test/data/Ex01Service.Sales.csv 517 | > init from db/data/sap.capire.bookshop-Genres.csv 518 | > init from db/data/sap.capire.bookshop-Books_texts.csv 519 | > init from db/data/sap.capire.bookshop-Books.csv 520 | > init from db/data/sap.capire.bookshop-Authors.csv 521 | /> successfully deployed to db.sqlite 522 | ``` 523 | 524 | Hey, look at that - the sales data from `test/data/Ex01Service.Sales.csv` is 525 | included! 526 | 527 | 👉 Let's check that (first making sure the CAP server has restarted - give it a 528 | nudge with Enter if needed): 529 | 530 | ```bash 531 | curl -s localhost:4004/ex01/Sales | jq . 532 | ``` 533 | 534 | This should emit something similar to this: 535 | 536 | ```json 537 | { 538 | "@odata.context": "$metadata#Sales", 539 | "value": [ 540 | { 541 | "ID": "26243430-a307-4ba4-a72e-c4ce44653fa2", 542 | "date": "2003-09-29", 543 | "book_ID": 271, 544 | "quantity": 26 545 | }, 546 | { 547 | "ID": "26243431-d479-4891-9c09-b2c88f7a43c2", 548 | "date": "2011-05-03", 549 | "book_ID": 207, 550 | "quantity": 29 551 | }, 552 | { 553 | "ID": "26243432-68ce-4a10-b1e1-678e40f51346", 554 | "date": "2009-08-24", 555 | "book_ID": 207, 556 | "quantity": 69 557 | } 558 | ] 559 | } 560 | ``` 561 | 562 | Great! 563 | 564 | > Did you notice that the values for the `book_ID` property ... are not just 565 | > random? 566 | 567 | But let's make sure these sales records really are considered just local sample 568 | data. 569 | 570 | ### Perform a build 571 | 572 | With the cds [build] command we can prepare a deployment for the cloud. Let's 573 | do that. 574 | 575 | 👉 Using `DEBUG=build` to see everything that happens, including all the files 576 | that are taken into account, run `build`: 577 | 578 | ```bash 579 | DEBUG=build cds build --for hana 580 | ``` 581 | 582 | This produces a lot of log output, much of which has been omitted here for 583 | brevity: 584 | 585 | ```log 586 | [cli] - determining build tasks for project [/work/scratch/myproj]. 587 | ... 588 | [cli] - model: db/schema.cds, srv/admin-service.cds, srv/cat-service.cds, srv/ex01-sales.cds, srv/ex01-service.cds, app/common.cds, app/services.cds, node_modules/@sap/cds/srv/outbox.cds 589 | [cli] - compile.to.hana returned 590 | done > wrote output to: 591 | gen/db/package.json 592 | gen/db/src/gen/.hdiconfig 593 | ... 594 | gen/db/src/gen/AdminService.Authors.hdbview 595 | gen/db/src/gen/AdminService.Books.hdbview 596 | ... 597 | gen/db/src/gen/CatalogService.Books.hdbview 598 | gen/db/src/gen/CatalogService.Books_texts.hdbview 599 | ... 600 | gen/db/src/gen/data/sap.capire.bookshop-Authors.csv 601 | gen/db/src/gen/data/sap.capire.bookshop-Authors.hdbtabledata 602 | ... 603 | gen/db/src/gen/sap.capire.bookshop.Authors.hdbtable 604 | gen/db/src/gen/sap.capire.bookshop.Books.hdbtable 605 | ... 606 | 607 | build completed in 411 ms 608 | ``` 609 | 610 | The data files containing the _initial_ data (files in `db/data/`) are 611 | included, but the sample data file (in `test/data/`) is not: 612 | 613 | ```bash 614 | ; DEBUG=build cds build --for hana | grep -E '\/data\/' 615 | gen/db/src/gen/data/sap.capire.bookshop-Authors.csv 616 | gen/db/src/gen/data/sap.capire.bookshop-Authors.hdbtabledata 617 | gen/db/src/gen/data/sap.capire.bookshop-Books.csv 618 | gen/db/src/gen/data/sap.capire.bookshop-Books.hdbtabledata 619 | gen/db/src/gen/data/sap.capire.bookshop-Books_texts.csv 620 | gen/db/src/gen/data/sap.capire.bookshop-Books_texts.hdbtabledata 621 | gen/db/src/gen/data/sap.capire.bookshop-Genres.csv 622 | gen/db/src/gen/data/sap.capire.bookshop-Genres.hdbtabledata 623 | ``` 624 | 625 | We can see that initial data, in `test/`, is only for local sample and demo 626 | purposes. 627 | 628 | That's the end of this exercise! 629 | 630 | --- 631 | 632 | ## Further reading 633 | 634 | - [SQLite features] 635 | 636 | --- 637 | 638 | [Next exercise](../02) 639 | 640 | --- 641 | 642 | ## Footnotes 643 | 644 | 645 | ### Footnote 1 646 | 647 | The books are already exposed but only in the `AdminService` which is annotated 648 | to protect it, with: 649 | 650 | ```cds 651 | service AdminService @(requires:'admin') { ... } 652 | ``` 653 | 654 | (in `srv/admin-service.cds`). Yes, we can embrace the mock authentication: 655 | 656 | ```bash 657 | curl -s -u 'alice:' localhost:4004/odata/v4/admin/Books 658 | ``` 659 | 660 | But to be honest there's another reason, which is that they're also annotated 661 | (in `app/admin-books/fiori-service.cds`) as being draft-enabled: 662 | 663 | ```cds 664 | annotate sap.capire.bookshop.Books with @fiori.draft.enabled; 665 | ``` 666 | 667 | This adds a second key (`isActiveEntity`) to the entity, and we don't want to 668 | get into that at this early stage. 669 | 670 | 671 | ### Footnote 2 672 | 673 | Note the `--in-memory?` option in the expanded version of `cds w` which is `cds 674 | serve all --with-mocks --in-memory?`. The meaning of the question mark is 675 | important here - this is what the help says for the option: 676 | 677 | ```text 678 | Automatically adds a transient in-memory database bootstrapped on 679 | each (re-)start in the same way cds deploy would do, based on defaults 680 | or configuration in package.json#cds.requires.db. Add a question 681 | mark to apply a more defensive variant which respects the configured 682 | database, if any, and only adds an in-memory database if no 683 | persistent one is configured. 684 | ``` 685 | 686 | 687 | ### Footnote 3 688 | 689 | There is [no particular strict convention for SQLite database filename 690 | extensions]; either `.db` or `.sqlite` are decent choices though. 691 | 692 | 693 | ### Footnote 4 694 | 695 | We can also invoke one-shot commands too. An example of a one-shot command, 696 | i.e. a single `sqlite3` invocation at the shell prompt, is: 697 | 698 | ```bash 699 | sqlite3 db.sqlite 'select count(*) from sap_capire_bookshop_Authors;' 700 | ``` 701 | 702 | 703 | ### Footnote 5 704 | 705 | The development profile is the default; with `cds env requires.db --profile 706 | production` we get: 707 | 708 | ```text 709 | undefined 710 | ``` 711 | 712 | See the next exercise for more on profiles. 713 | 714 | [command line shell for SQLite]: https://sqlite.org/cli.html 715 | [provide initial data]: https://cap.cloud.sap/docs/guides/databases#providing-initial-data 716 | [developer friendly version of no-code]: https://qmacro.org/blog/posts/2024/11/07/five-reasons-to-use-cap/#1-the-code-is-in-the-framework-not-outside-of-it 717 | [continue repeating the word "Nevermore"]: https://en.wikipedia.org/wiki/The_Raven#:~:text=the%20raven%20seems%20to%20further%20antagonize%20the%20protagonist%20with%20its%20repetition%20of%20the%20word%20%22nevermore%22 718 | [insanely great]: https://www.inc.com/jason-aten/the-2-word-phrase-steve-jobs-used-to-inspire-his-team-to-make-worlds-most-iconic-products.html 719 | [no particular strict convention for SQLite database filename extensions]: https://stackoverflow.com/questions/808499/does-it-matter-what-extension-is-used-for-sqlite-database-files 720 | [convention over configuration]: https://qmacro.org/blog/posts/2019/11/06/cap-is-important-because-it's-not-important/#start-smart 721 | [The Art and Science of CAP]: https://qmacro.org/blog/posts/2024/12/06/the-art-and-science-of-cap/ 722 | [in Episode 8]: https://qmacro.org/blog/posts/2025/02/14/tasc-notes-part-8/#exploring-in-sqlite 723 | [DDL]: https://cap.cloud.sap/docs/guides/databases#rules-for-generated-ddl 724 | [cds env]: https://cap.cloud.sap/docs/tools/cds-cli#cds-env 725 | [build]: https://cap.cloud.sap/docs/guides/deployment/custom-builds#build-task-properties 726 | [SQLite features]: https://cap.cloud.sap/docs/guides/databases-sqlite#features 727 | -------------------------------------------------------------------------------- /exercises/03/README.md: -------------------------------------------------------------------------------- 1 | # Exercise 03 - mocking auth and required services 2 | 3 | There's no avoiding the fact that if you want your CAP apps and services to be 4 | useful, they're going to have to make use of authentication mechanisms, and 5 | consume other APIs, in the cloud. In local development mode, however, these 6 | requirements can get in the way and hinder progress. 7 | 8 | Fortunately the "developer centric" nature of CAP's local-first strategy 9 | provides various ways to not _ignore_ the reality, but to _embrace_ it locally 10 | ready for when it's really needed. With the "mocking" approach, we can design 11 | and declare our domain model and adorn it with annotations relating to 12 | authentication, and rely on mocked authentication while still thinking about, 13 | defining and testing user role and attribute based access control. We can also 14 | have required services mocked for us so we can connect to them from our own 15 | local development context. 16 | 17 | In this exercise we'll explore both those mockable areas. 18 | 19 | ## Explore the auth in play 20 | 21 | You may have already noticed the mocked authentication approach declared in the 22 | CAP server's log lines. 23 | 24 | 👉 Check this by stopping any currently running CAP server (the `cds watch` 25 | process), restarting it with: 26 | 27 | ```bash 28 | DEBUG=auth cds w --profile classics 29 | ``` 30 | 31 | and observe this output: 32 | 33 | ```log 34 | [cds] - using auth strategy { 35 | kind: 'mocked', 36 | impl: 'node_modules/@sap/cds/lib/srv/middlewares/auth/basic-auth' 37 | } 38 | ``` 39 | 40 | The "mocked" [authentication strategy] uses HTTP Basic Authentication (with 41 | simple usernames and passwords) combined with assignment of sample roles to 42 | [pre-defined test users]. It's more or less the same as the "basic" 43 | authentication strategy (as we can see from the implementation file in the log 44 | output), but with this set of users and roles pre-configured. 45 | 46 | ### Try accessing AdminService resources 47 | 48 | The `AdminService` (currently made available at `/odata/v4/admin`) has a 49 | [@requires] annotation which declares that only users with the "admin" role 50 | have access (in `srv/admin-service.cds`): 51 | 52 | ```cds 53 | using {sap.capire.bookshop as my} from '../db/schema'; 54 | 55 | service AdminService @(requires: 'admin') { 56 | entity Books as projection on my.Books; 57 | entity Authors as projection on my.Authors; 58 | } 59 | ``` 60 | 61 | #### Attempt an unauthenticated request 62 | 63 | 👉 Try to access the `Books` resource within that service without supplying any 64 | authentication detail: 65 | 66 | ```bash 67 | curl -i localhost:4004/odata/v4/admin/Books 68 | ``` 69 | 70 | > The `--include` (or `-i`) option causes `curl` to emit the response headers. 71 | 72 | This should result in an HTTP 401 response that will look like this (other 73 | response headers have been omitted for brevity): 74 | 75 | ```log 76 | HTTP/1.1 401 Unauthorized 77 | WWW-Authenticate: Basic realm="Users" 78 | ``` 79 | 80 | This will also have been logged by the CAP server due to the `DEBUG=auth` 81 | setting: 82 | 83 | ```log 84 | [basic] - 401 > login required 85 | ``` 86 | 87 | As authorization checks (in play here) are predicated on verified identity 88 | claims (i.e. you have to authenticate yourself to provide an identity against 89 | which authorization checks can be made), [authentication is a prerequisite] 90 | here. 91 | 92 | #### Attempt a request authenticated as a user without the requisite role 93 | 94 | 👉 Try authenticating with the pre-defined user "yves", who has the role 95 | "internal-user" (see the [pre-defined test users]): 96 | 97 | ```bash 98 | curl -i -u 'yves:' localhost:4004/odata/v4/admin/Books 99 | ``` 100 | 101 | > The colon separates username and password values when supplying such 102 | > credentials via `curl`'s `--user` (or `-u`) option. None of the pre-defined 103 | > users have passwords (i.e. the passwords are "empty"). If you omit the colon 104 | > from the value supplied to `-u` here, `curl` will prompt you for a password 105 | > (you can just hit Enter to send an empty password in this case). 106 | 107 | This request should elicit an HTTP status thus: 108 | 109 | ```log 110 | HTTP/1.1 403 Forbidden 111 | ``` 112 | 113 | with some extra info in the CAP server log output too: 114 | 115 | ```log 116 | [basic] - authenticated: { user: 'yves', tenant: undefined, features: undefined } 117 | [odata] - GET /odata/v4/admin/Books 118 | [error] - 403 - Error: Forbidden 119 | at requires_check (/work/scratch/myproj/node_modules/@sap/cds/lib/srv/protocols/http.js:54:32) 120 | at http_log (/work/scratch/myproj/node_modules/@sap/cds/lib/srv/protocols/http.js:42:59) { 121 | code: '403', 122 | reason: "User 'yves' is lacking required roles: [admin]", 123 | user: User { id: 'yves', roles: { 'internal-user': 1 } }, 124 | required: [ 'admin' ], 125 | '@Common.numericSeverity': 4 126 | } 127 | ``` 128 | 129 | Incidentally, this nicely underlines the difference between HTTP [401] and HTTP 130 | [403] responses: 131 | 132 | HTTP Response Code | Description | Meaning | Summary 133 | -|-|-|- 134 | 401|The request lacked valid authentication credentials|Can't verify who you are|Not authenticated 135 | 403|The request did contain valid credentials (i.e. was properly authenticated) but the authenticated user does not have the requisite permissions|Your identify is verified but you don't have access|Not authorized 136 | 137 | #### Make a request authenticated as a user with the requisite role 138 | 139 | 👉 Repeat the same resource request but this time as user "alice", who does 140 | have the "admin" role listed in the `@requires` annotation: 141 | 142 | ```bash 143 | curl -s -u 'alice:' localhost:4004/odata/v4/admin/Books \ 144 | | jq .value[].title 145 | ``` 146 | 147 | > The `--silent` (or `-s`) option turns on silent mode which means we don't get 148 | > the typical response resource retrieval progress info: 149 | > 150 | > ```log 151 | > % Total % Received % Xferd Average Speed Time Time Time Current 152 | > Dload Upload Total Spent Left Speed 153 | > 100 4572 100 4572 0 0 611k 0 --:--:-- --:--:-- --:--:-- 637k 154 | 155 | Success! Here's what the CAP server shows: 156 | 157 | ```log 158 | [basic] - authenticated: { user: 'alice', tenant: undefined, features: undefined } 159 | [odata] - GET /odata/v4/admin/Books 160 | ``` 161 | 162 | and here's what's returned: 163 | 164 | ```log 165 | "Wuthering Heights" 166 | "Jane Eyre" 167 | "The Raven" 168 | "Eleonora" 169 | "Catweazle" 170 | ``` 171 | 172 | ### Explore the @requires and @restrict annotations with the Ex01Service 173 | 174 | In a previous exercise we [added a new service definition] in 175 | `srv/ex01-service.cds`; now we'll add some annotations to that to get a feel 176 | for how they work, but more importantly to see how the mocked strategy supports 177 | exactly what would be supported in production. 178 | 179 | #### Set up restrictions 180 | 181 | 👉 Append a couple of annotation declarations to the `srv/ex01-service.cds` 182 | file so the entire content ends up looking like this: 183 | 184 | ```cds 185 | using {sap.capire.bookshop as my} from '../db/schema'; 186 | 187 | @path: '/ex01' 188 | service Ex01Service { 189 | entity Books as projection on my.Books; 190 | } 191 | 192 | annotate Ex01Service with @requires: 'authenticated-user'; 193 | 194 | annotate Ex01Service.Books with @restrict: [ 195 | {grant: 'READ'}, 196 | { 197 | grant: 'WRITE', 198 | to : 'backoffice' 199 | } 200 | ]; 201 | ``` 202 | 203 | These annotations set up: 204 | 205 | - a requirement that any request to the service be authenticated (i.e. made 206 | with a verified identity) 207 | - a restriction on the `Books` entity in that any (authenticated) user can 208 | perform read operations, but only (authenticated) users with the "backoffice" 209 | role can perform write operations 210 | 211 | > If you're wondering about `@(requires: ...)` vs `@requires: ...`, see the 212 | > link to the "Expressing multiple annotations with @(...)" section of a blog 213 | > post on OData and CDS annotations in the [Further reading](#further-reading) 214 | > section below. 215 | 216 | #### Make an unauthenticated request 217 | 218 | 👉 Try to retrieve the details of the book with ID 207 ("Jane Eyre") without 219 | providing any authentication details: 220 | 221 | ```bash 222 | curl -i localhost:4004/ex01/Books/207 223 | ``` 224 | 225 | As expected, we don't get very far with this; the output includes: 226 | 227 | ```log 228 | HTTP/1.1 401 Unauthorized 229 | WWW-Authenticate: Basic realm="Users" 230 | 231 | Unauthorized 232 | ``` 233 | 234 | and there's this line in the CAP server log: 235 | 236 | ```log 237 | [basic] - 401 > login required 238 | ``` 239 | 240 | #### Make authenticated requests with an administrative user 241 | 242 | 👉 Try that again, authenticating as the pre-defined administrative user 243 | "alice": 244 | 245 | ```bash 246 | curl -i -u 'alice:' localhost:4004/ex01/Books/207 247 | ``` 248 | 249 | The conditions of the (rather general) `READ` grant, combined with the 250 | `authenticated-user` requirement, are both fulfilled, meaning success for 251 | Alice: 252 | 253 | ```log 254 | HTTP/1.1 200 OK 255 | Content-Type: application/json; charset=utf-8 256 | 257 | {"@odata.context":"$metadata#Books/$entity","createdAt":"2025-06-23T13:26:48.247Z","createdBy":"anonymous","modifiedAt":"2025-06-23T13:26:48.247Z","modifiedBy":"anonymous","ID":207,"title":"Jane Eyre","descr":"..."} 258 | ``` 259 | 260 | But can Alice perform operations with the `WRITE` semantic? 261 | 262 | 👉 Try it: 263 | 264 | ```bash 265 | curl -X DELETE -i -u 'alice:' localhost:4004/ex01/Books/207 266 | ``` 267 | 268 | No! 269 | 270 | ```log 271 | HTTP/1.1 403 Forbidden 272 | 273 | {"error":{"message":"Forbidden","code":"403","@Common.numericSeverity":4}} 274 | ``` 275 | 276 | #### Define a new office user 277 | 278 | While there are pre-defined users we can make use of in the mocked 279 | authentication strategy, we can define our own too, which is especially helpful 280 | when we're iterating locally on building out the domain model and including the 281 | security considerations with that, which often means we're also defining our 282 | own roles (such as "backoffice" in this `{ grant: 'WRITE', to: 'backoffice'}` 283 | example here). 284 | 285 | We could put this configuration in `package.json#cds` but for a change, let's 286 | use a [project-local .cdsrc.json] file. 287 | 288 | 👉 Create a `.cdsrc.json` file in the project root, with this content for 289 | Milton, the [stapler guy]: 290 | 291 | ```json 292 | { 293 | "requires": { 294 | "auth": { 295 | "users": { 296 | "milton": { 297 | "password": "dontmovemydesk", 298 | "roles": [ 299 | "stapler" 300 | ] 301 | } 302 | } 303 | } 304 | } 305 | } 306 | ``` 307 | 308 | Once this is saved, the CAP server will restart. 309 | 310 | 👉 To satisfy our curiosity, check the effective environment, specifically for 311 | the auth details: 312 | 313 | ```bash 314 | cds env requires.auth 315 | ``` 316 | 317 | This should emit: 318 | 319 | ```log 320 | { 321 | restrict_all_services: false, 322 | kind: 'mocked', 323 | users: { 324 | alice: { tenant: 't1', roles: [ 'admin' ] }, 325 | bob: { tenant: 't1', roles: [ 'cds.ExtensionDeveloper' ] }, 326 | carol: { tenant: 't1', roles: [ 'admin', 'cds.ExtensionDeveloper' ] }, 327 | dave: { tenant: 't1', roles: [ 'admin' ], features: [] }, 328 | erin: { tenant: 't2', roles: [ 'admin', 'cds.ExtensionDeveloper' ] }, 329 | fred: { tenant: 't2', features: [ 'isbn' ] }, 330 | me: { tenant: 't1', features: [ '*' ] }, 331 | yves: { roles: [ 'internal-user' ] }, 332 | '*': true, 333 | milton: { password: 'dontmovemydesk', roles: [ 'stapler' ] } 334 | }, 335 | tenants: { t1: { features: [ 'isbn' ] }, t2: { features: '*' } } 336 | } 337 | ``` 338 | 339 | Alongside the pre-defined users we can see Milton. 340 | 341 | > Adding the `--profile classics` option here is also possible, but the end 342 | > result is the same in this case. 343 | 344 | #### Authenticate a request with the new office user 345 | 346 | 👉 Let's try this new user, like this: 347 | 348 | ```bash 349 | curl -X DELETE -i -u 'milton:dontmovemydesk' localhost:4004/ex01/Books/207 350 | ``` 351 | 352 | Not quite! 353 | 354 | ```log 355 | HTTP/1.1 403 Forbidden 356 | 357 | {"error":{"message":"Forbidden","code":"403","@Common.numericSeverity":4}} 358 | ``` 359 | 360 | Of course, we need to give him the "backoffice" role. 361 | 362 | 👉 Do that now by adding it to the `[ ... ]` list of roles in `.cdsrc.json` so 363 | that it looks like this: 364 | 365 | ```json 366 | "roles": [ 367 | "stapler", 368 | "backoffice" 369 | ] 370 | ``` 371 | 372 | Now try again: 373 | 374 | ```bash 375 | curl -X DELETE -i -u 'milton:dontmovemydesk' localhost:4004/ex01/Books/207 376 | ``` 377 | 378 | Success! 379 | 380 | ```log 381 | HTTP/1.1 204 No Content 382 | ``` 383 | 384 | This just scratches the surface of what's possible; remember that the power of 385 | all of the abstracted authentication and authorisation layers (including users) 386 | is available to all authentication strategies, even (or "especially") the ones 387 | designed for local development. And there's no change when one moves to 388 | production, at that level. 389 | 390 | See the [Further reading](#further-reading) section for more information. 391 | 392 | ## Mock an external service 393 | 394 | Working locally doesn't mean that we need to avoid development that involves 395 | remote services. A remote service API definition can be downloaded and 396 | imported, so that it becomes known to the CAP server (as a "required" service, 397 | rather than a "provided" service), and via the translation of the API 398 | definition to an internal model representation in Core Schema Notation [CSN] it 399 | can also be given active behavior and even test data. 400 | 401 | Everyone loves Northwind (don't they?) so let's use a cut-down version of 402 | Northwind, called Northbreeze, which is available as an OData v4 service at 403 | . 404 | 405 | ### Import the API definition 406 | 407 | The API definition of the Northbreeze service is essentially the EDMX available 408 | in the service's metadata document. 409 | 410 | 👉 Retrieve the metadata document resource and store the representation in a 411 | file: 412 | 413 | ```bash 414 | curl -s \ 415 | --url 'https://developer-challenge.cfapps.eu10.hana.ondemand.com/odata/v4/northbreeze/$metadata' \ 416 | > northbreeze.edmx 417 | ``` 418 | 419 | 👉 Now use the `cds import` command to import the API definition (in this EDMX 420 | form) and convert it to CSN: 421 | 422 | ```bash 423 | cds import northbreeze.edmx 424 | ``` 425 | 426 | You should see something like this: 427 | 428 | ```log 429 | [cds] - imported API to srv/external/northbreeze 430 | > use it in your CDS models through the like of: 431 | 432 | using { northbreeze as external } from './external/northbreeze' 433 | ``` 434 | 435 | 👉 Let's have a look at where the imported CSN is, in relation to other content 436 | in `srv/`: 437 | 438 | ```bash 439 | tree srv 440 | ``` 441 | 442 | We can see that the default location that `cds import` uses makes a lot of 443 | sense, in that it's a service, but not part of our own overall CDS model: 444 | 445 | ```log 446 | srv 447 | ├── admin-service.cds 448 | ├── admin-service.js 449 | ├── cat-service.cds 450 | ├── cat-service.js 451 | ├── ex01-service.cds 452 | └── external 453 | ├── northbreeze.csn 454 | └── northbreeze.edmx 455 | ``` 456 | 457 | Moreover, a reference to this as a "required" service has been added to the 458 | `package.json#cds` based configuration, which we can perhaps better observe by 459 | looking at the effective `requires` configuration. 460 | 461 | 👉 Do that now: 462 | 463 | ```bash 464 | cds env requires 465 | ``` 466 | 467 | As well as the sections for `middlewares`, `queue`, `auth` and `db` (which have 468 | been reduced for brevity here), we now have `northbreeze` listed, an external 469 | OData resource whose model is known and which is implemented by a built-in 470 | remote-service module: 471 | 472 | ```log 473 | { 474 | middlewares: true, 475 | queue: { 476 | model: '@sap/cds/srv/outbox', 477 | ... 478 | kind: 'persistent-queue' 479 | }, 480 | auth: { 481 | restrict_all_services: false, 482 | kind: 'mocked', 483 | users: { 484 | alice: { tenant: 't1', roles: [ 'admin' ] }, 485 | ... 486 | milton: { 487 | password: 'dontmovemydesk', 488 | roles: [ 'stapler', 'backoffice' ] 489 | } 490 | }, 491 | tenants: { t1: { features: [ 'isbn' ] }, t2: { features: '*' } } 492 | }, 493 | db: { 494 | impl: '@cap-js/sqlite', 495 | credentials: { url: ':memory:' }, 496 | kind: 'sqlite' 497 | }, 498 | northbreeze: { 499 | impl: '@sap/cds/libx/_runtime/remote/Service.js', 500 | external: true, 501 | kind: 'odata', 502 | model: 'srv/external/northbreeze' 503 | } 504 | } 505 | ``` 506 | 507 | ### Have the service mocked 508 | 509 | From this point until the end of this exercise, you'll be digging into the 510 | mocking of this Northbreeze service (see [footnote 1](#footnote-1)). 511 | 512 | 👉 So for now, stop the CAP server that's still running and listening on port 513 | 4004, and then, in that same terminal session, start mocking this service 514 | (using the same terminal session here is just to keep the "noise" to a minimum, 515 | not because of any technical requirement or restriction): 516 | 517 | ```bash 518 | cds mock northbreeze --port 5005 519 | ``` 520 | 521 | > Normally, without the `--port` option, a random port will be chosen, but for 522 | > the sake of this workshop and consistency of instructions, we'll use a 523 | > specific port. 524 | 525 | This will start a CAP server just for this service: 526 | 527 | ```log 528 | ... 529 | [cds] - connect using bindings from: { registry: '~/.cds-services.json' } 530 | ... 531 | [cds] - mocking northbreeze { 532 | impl: 'node_modules/@sap/cds/libx/_runtime/common/Service.js', 533 | path: '/odata/v4/northbreeze' 534 | } 535 | [cds] - server listening on { url: 'http://localhost:5005' } 536 | ... 537 | ``` 538 | 539 | But there's no data right now, as illustrated with simple request like this: 540 | 541 | ```bash 542 | ; curl 'localhost:5005/odata/v4/northbreeze/Suppliers/$count' 543 | 0 544 | ``` 545 | 546 | ### Add some data 547 | 548 | The sensible place to put data is "next to" the model definition for this 549 | external service, which means here: 550 | 551 | ```text 552 | srv 553 | ├── admin-service.cds 554 | ├── admin-service.js 555 | ├── cat-service.cds 556 | ├── cat-service.js 557 | ├── ex01-service.cds 558 | └── external 559 | ├── data 560 | │ └── [data files go here] 561 | ├── northbreeze.csn 562 | └── northbreeze.edmx 563 | ``` 564 | 565 | #### Use generated data 566 | 567 | 👉 So, after stopping the mocking server process, create a `data/` directory in 568 | `srv/external/`, use the "data" facet with `cds add` to generate a few records 569 | of mock data for the `Suppliers` entity, and then restart the mocking: 570 | 571 | ```bash 572 | mkdir srv/external/data/ \ 573 | && cds add data \ 574 | --filter Suppliers \ 575 | --records 5 \ 576 | --out srv/external/data/ \ 577 | && cds mock northbreeze --port 5005 578 | ``` 579 | 580 | This results in: 581 | 582 | ```log 583 | adding data 584 | creating srv/external/data/northbreeze.Suppliers.csv 585 | 586 | successfully added features to your project 587 | ``` 588 | 589 | 👉 That data is pretty useful already - check with: 590 | 591 | ```bash 592 | curl -s localhost:5005/odata/v4/northbreeze/Suppliers | jq .value 593 | ``` 594 | 595 | which should show output similar to this (massively reduced here for brevity): 596 | 597 | ```json 598 | [ 599 | { 600 | "SupplierID": 3865327, 601 | "CompanyName": "CompanyName-3865327", 602 | "ContactName": "ContactName-3865327", 603 | "ContactTitle": "ContactTitle-3865327", 604 | "Address": "Address-3865327", 605 | "City": "City-3865327", 606 | "Region": "Region-3865327", 607 | "PostalCode": "PostalCode-3865327", 608 | "Country": "Country-3865327", 609 | "Phone": "Phone-3865327", 610 | "Fax": "Fax-3865327", 611 | "HomePage": "HomePage-3865327" 612 | }, 613 | { 614 | "SupplierID": 3865328, 615 | "CompanyName": "...", 616 | } 617 | ] 618 | ``` 619 | 620 | #### Retrieve, store and use data from the real service 621 | 622 | But we can do better. Why not grab and store some "real" data from the actual 623 | service, and use it when we mock? 624 | 625 | 👉 First, stop the mock server process again. 626 | 627 | > Remember that the monitor-and-auto-restart feature comes with `cds watch`, 628 | > not `cds mock`; that's why we're stopping and starting the `cds mock` server. 629 | 630 | 👉 Now, remove the CSV data we just generated: 631 | 632 | ```bash 633 | rm srv/external/data/*.csv 634 | ``` 635 | 636 | 👉 Now retrieve the entityset resources (in their default JSON representation) 637 | and put the data into JSON files in that `srv/external/data/` directory: 638 | 639 | ```bash 640 | for entity in Products Suppliers Categories; do 641 | echo -n "$entity: " 642 | curl \ 643 | --silent \ 644 | --url "https://developer-challenge.cfapps.eu10.hana.ondemand.com/odata/v4/northbreeze/$entity" \ 645 | | jq .value \ 646 | | tee "srv/external/data/northbreeze-$entity.json" \ 647 | | jq length 648 | done 649 | ``` 650 | 651 | This should result in a number of records for each entityset: 652 | 653 | ```log 654 | Products: 77 655 | Suppliers: 29 656 | Categories: 8 657 | ``` 658 | 659 | and the creation of the corresponding files, this time in JSON format: 660 | 661 | ```text 662 | srv 663 | ├── admin-service.cds 664 | ├── admin-service.js 665 | ├── cat-service.cds 666 | ├── cat-service.js 667 | ├── ex01-service.cds 668 | └── external 669 | ├── data 670 | │ ├── northbreeze-Categories.json 671 | │ ├── northbreeze-Products.json 672 | │ └── northbreeze-Suppliers.json 673 | ├── northbreeze.csn 674 | └── northbreeze.edmx 675 | ``` 676 | 677 | 👉 Now restart the mock service: 678 | 679 | ```bash 680 | cds mock northbreeze --port 5005 681 | ``` 682 | 683 | 👉 and (in another terminal session) re-request the same entityset: 684 | 685 | ```bash 686 | curl -s localhost:5005/odata/v4/northbreeze/Suppliers | jq .value 687 | ``` 688 | 689 | This time, the data is more realistic, as it's the actual data we fetched from 690 | the real service (again, heavily reduced here for brevity): 691 | 692 | ```json 693 | [ 694 | { 695 | "SupplierID": 1, 696 | "CompanyName": "Exotic Liquids", 697 | "ContactName": "Charlotte Cooper", 698 | "ContactTitle": "Purchasing Manager", 699 | "Address": "49 Gilbert St.", 700 | "City": "London", 701 | "Region": "NULL", 702 | "PostalCode": "EC1 4SD", 703 | "Country": "UK", 704 | "Phone": "(171) 555-2222", 705 | "Fax": "NULL", 706 | "HomePage": "NULL" 707 | }, 708 | { 709 | "SupplierID": 2, 710 | "CompanyName": "..." 711 | } 712 | ] 713 | ``` 714 | 715 | Great! Now we have a fully mocked external service complete with real data. 716 | 717 | ### Access the mocked remote service from the cds REPL (bonus) 718 | 719 | If you have time, you can build on your confidence with an interactive REPL 720 | context by connecting to this mocked remote service from within the cds REPL. 721 | 722 | 👉 First, make sure the mock service is still running and listening on port 723 | 5005 (i.e. you haven't stopped it just now). 724 | 725 | 👉 Now, let's have a look at the "wiring" for this mocked remote service in our 726 | local development mode context; take a peek in the `.cds-services.json` file in 727 | your home directory: 728 | 729 | ```bash 730 | jq . ~/.cds-services.json 731 | ``` 732 | 733 | This is a file that the CAP server runtime uses in local development mode to 734 | declare and detail which services are (being) provided, and where. It will look 735 | something like this (the server IDs are just process IDs so they will be 736 | different for you): 737 | 738 | ```json 739 | { 740 | "cds": { 741 | "provides": { 742 | "northbreeze": { 743 | "kind": "odata", 744 | "credentials": { 745 | "url": "http://localhost:5005/odata/v4/northbreeze" 746 | }, 747 | "server": 124021 748 | } 749 | }, 750 | "servers": { 751 | "124021": { 752 | "root": "file:///work/scratch/myproj", 753 | "url": "http://localhost:5005" 754 | } 755 | } 756 | } 757 | } 758 | ``` 759 | 760 | We can see from this that any local CAP server requiring the "northbreeze" 761 | service knows that it's available, and how to reach it (via the 762 | `credentials.url` property). 763 | 764 | 👉 CAP makes use of the [SAP Cloud SDK] for management of destination 765 | information about, and connectivity to, remote services. So before we continue 766 | at this point, let's add the SDK packages: 767 | 768 | ```bash 769 | npm add @sap-cloud-sdk/http-client@4 770 | ``` 771 | 772 | 👉 Now start the cds REPL: 773 | 774 | ```bash 775 | cds repl 776 | ``` 777 | 778 | 👉 and in the prompt, [connect to the remote service] and store that connection 779 | in a variable: 780 | 781 | ```text 782 | nb = await cds.connect.to("northbreeze") 783 | ``` 784 | 785 | This will emit the internal representation of this connection, which you can 786 | get a summary of using the `.inspect` command which you learned about [in a 787 | previous exercise], like this: 788 | 789 | ```bash 790 | > .inspect nb .depth=0 791 | nb: RemoteService { 792 | name: 'northbreeze', 793 | options: [Object], 794 | kind: 'odata', 795 | model: [LinkedCSN], 796 | handlers: [EventHandlers], 797 | definition: [service], 798 | namespace: 'northbreeze', 799 | actions: [LinkedDefinitions], 800 | selectProduct: [Function: northbreeze.selectProduct], 801 | entities: [LinkedDefinitions], 802 | _source: '/work/scratch/myproj/node_modules/@sap/cds/libx/_runtime/remote/Service.js', 803 | datasource: undefined, 804 | destinationOptions: undefined, 805 | destination: [Object], 806 | path: undefined, 807 | requestTimeout: 60000, 808 | csrf: undefined, 809 | csrfInBatch: undefined, 810 | middlewares: [Object] 811 | } 812 | ``` 813 | 814 | We can indeed see that the connection object in `nb` contains information on 815 | "how to reach" the service: 816 | 817 | ```text 818 | > nb.destination 819 | { 820 | name: 'northbreeze', 821 | url: 'http://localhost:5005/odata/v4/northbreeze' 822 | } 823 | ``` 824 | 825 | (Does this structure [remind you of something]? Good, because that's 826 | essentially what it is!) 827 | 828 | 👉 Now, still at the cds REPL prompt, construct a query on the fly and send it 829 | across the connection to your locally mocked version of the remote Northbreeze 830 | service: 831 | 832 | ```text 833 | await nb.run(SELECT `CompanyName` .from `Suppliers`) 834 | ``` 835 | 836 | > Remember that pretty much everything in this context is going to be 837 | > asynchronous, i.e. in a Promise wrapper, so `await` is needed here to resolve 838 | > the calls and the values they evaluate to. 839 | 840 | This results in: 841 | 842 | - an actual OData call from the cds REPL context across to the mocked 843 | Northbreeze service on port 5005 844 | - the retrieval of the Suppliers entityset, specifically the `CompanyName` 845 | property for each entity 846 | 847 | ```text 848 | [ 849 | { CompanyName: 'Exotic Liquids', SupplierID: 1 }, 850 | { CompanyName: 'New Orleans Cajun Delights', SupplierID: 2 }, 851 | { CompanyName: "Grandma Kelly's Homestead", SupplierID: 3 }, 852 | { CompanyName: 'Tokyo Traders', SupplierID: 4 }, 853 | { CompanyName: "Cooperativa de Quesos 'Las Cabras'", SupplierID: 5 }, 854 | { CompanyName: "Mayumi's", SupplierID: 6 }, 855 | { CompanyName: 'Pavlova Ltd.', SupplierID: 7 }, 856 | ... 857 | { CompanyName: 'Gai pâturage', SupplierID: 28 }, 858 | { CompanyName: "Forêts d'érables", SupplierID: 29 } 859 | ] 860 | ``` 861 | 862 | Excellent! It's worth pausing for a second to take this in: 863 | 864 | - everything is happening locally 865 | - but even in this local context, we're still connecting "remotely" to the 866 | mocked Northbreeze service 867 | - there are local development specific affordances in play here (such as the 868 | `~/.cds-services.json` file) that make coordination of service management 869 | simple when it needs to be 870 | - even though everything is happening locally, the SAP Cloud SDK is still in 871 | play and doesn't really care about the difference between one (mocked) remote 872 | service and another, an abstraction which is of great benefit to us 873 | 874 | --- 875 | 876 | ## Further reading 877 | 878 | - The [Authentication] topic in Capire 879 | - The [CDS-based Authorization] topic in Capire 880 | - The contents of the [Service integration with SAP Cloud Application 881 | Programming Model] CodeJam 882 | - [Part 4 - digging deeper] of [Level up your CAP skills by learning to use the 883 | cds REPL] 884 | - The [Expressing multiple annotations with @(...)] section of [A deep dive 885 | into OData and CDS annotations] 886 | 887 | --- 888 | 889 | [Next exercise](../04) 890 | 891 | --- 892 | 893 | ## Footnotes 894 | 895 | 896 | ### Footnote 1 897 | 898 | We are going to be mocking in a separate CAP server process, for a more 899 | realistic scenario, albeit still local. It is also possible to use in-process 900 | mocking, where the same single CAP server provides services and also mocks the 901 | required services, but we won't be covering that here. See [Run local with 902 | mocks] in Capire for more info. 903 | 904 | [authentication strategy]: https://cap.cloud.sap/docs/node.js/authentication#strategies 905 | [pre-defined test users]: https://cap.cloud.sap/docs/node.js/authentication#mock-users 906 | [@requires]: https://cap.cloud.sap/docs/guides/security/authorization#requires 907 | [Authentication]: https://cap.cloud.sap/docs/node.js/authentication 908 | [CDS-based Authorization]: https://cap.cloud.sap/docs/guides/security/authorization 909 | [authentication is a prerequisite]: https://cap.cloud.sap/docs/guides/security/authorization#prerequisite-authentication 910 | [401]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Status/401 911 | [403]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Status/403 912 | [added a new service definition]: ../01/README.md#add-a-new-service-definition 913 | [project-local .cdsrc.json]: https://cap.cloud.sap/docs/node.js/cds-env#in-cdsrc-json 914 | [CSN]: https://cap.cloud.sap/docs/cds/csn 915 | [Service integration with SAP Cloud Application Programming Model]: https://github.com/SAP-samples/cap-service-integration-codejam/ 916 | [connect to the remote service]: https://cap.cloud.sap/docs/node.js/cds-connect#cds-connect-to-1 917 | [in a previous exercise]: ../02/README.md#use-the-cds-repl-to-explore-path-expression-features-with-sqlite 918 | [Part 4 - digging deeper]: https://qmacro.org/blog/posts/2025/03/21/level-up-your-cap-skills-by-learning-how-to-use-the-cds-repl/#part-4-digging-deeper 919 | [Level up your CAP skills by learning to use the cds REPL]: https://qmacro.org/blog/posts/2025/03/21/level-up-your-cap-skills-by-learning-how-to-use-the-cds-repl/ 920 | [A deep dive into OData and CDS annotations]: https://qmacro.org/blog/posts/2023/03/10/a-deep-dive-into-odata-and-cds-annotations/ 921 | [Expressing multiple annotations with @(...)]: https://qmacro.org/blog/posts/2023/03/10/a-deep-dive-into-odata-and-cds-annotations/#expressing-multiple-annotations-with- 922 | [remind you of something]: https://sap.github.io/cloud-sdk/docs/js/features/connectivity/destinations 923 | [Run local with mocks]: https://cap.cloud.sap/docs/guides/using-services#run-local-with-mocks 924 | [SAP Cloud SDK]: https://sap.github.io/cloud-sdk/ 925 | [stapler guy]: https://en.wikipedia.org/wiki/Office_Space#Red_stapler 926 | --------------------------------------------------------------------------------