├── .simple-git-hooks.json ├── eslint.config.mjs ├── test ├── personal-data │ ├── db │ │ ├── data │ │ │ ├── sap.auditlog.test.personal_data.db.Pages.csv │ │ │ ├── sap.auditlog.test.personal_data.db.LastOne.csv │ │ │ ├── sap.auditlog.test.personal_data.db.StatusChange.csv │ │ │ ├── sap.auditlog.test.personal_data.db.CustomerStatus.csv │ │ │ ├── sap.auditlog.test.personal_data.db.Customers.csv │ │ │ ├── sap.auditlog.test.personal_data.db.Comments.csv │ │ │ ├── sap.auditlog.test.personal_data.db.Notes.csv │ │ │ ├── sap.auditlog.test.personal_data.db.AddressAttachment.csv │ │ │ └── sap.auditlog.test.personal_data.db.CustomerPostalAddress.csv │ │ └── schema.cds │ ├── package.json │ ├── handle.test.js │ ├── srv │ │ ├── fiori-service.cds │ │ └── crud-service.cds │ └── fiori.test.js ├── integration │ ├── srv │ │ ├── integration-service.cds │ │ └── integration-service.js │ ├── package.json │ ├── standard.test.js │ ├── oauth2.test.js │ ├── premium.test.js │ ├── tests.js │ └── ng.test.js ├── api │ ├── srv │ │ ├── api-service.cds │ │ ├── api-service.js │ │ └── server.js │ ├── MyAuditLogService.js │ ├── custom.test.js │ └── api.test.js ├── jest.setup.js └── utils │ └── logger.js ├── jest.config.js ├── srv ├── log2als.js ├── log2console.js ├── service.js ├── log2restv2.js └── log2alsng.js ├── .github ├── workflows │ ├── label-issues.yml │ ├── ci.yml │ └── release.yml └── dependabot.yml ├── index.cds ├── REUSE.toml ├── package.json ├── CONTRIBUTING.md ├── cds-plugin.js ├── .gitignore ├── lib ├── access.js ├── _relation.js ├── modification.js └── utils.js ├── CHANGELOG.md ├── README.md ├── LICENSES └── Apache-2.0.txt └── LICENSE /.simple-git-hooks.json: -------------------------------------------------------------------------------- 1 | { 2 | "pre-commit": "npx pretty-quick --staged" 3 | } 4 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import cds from '@sap/cds/eslint.config.mjs' 2 | export default [...cds.recommended] 3 | -------------------------------------------------------------------------------- /test/personal-data/db/data/sap.auditlog.test.personal_data.db.Pages.csv: -------------------------------------------------------------------------------- 1 | ID;sensitive;personal 2 | 1;111;222 3 | -------------------------------------------------------------------------------- /test/personal-data/db/data/sap.auditlog.test.personal_data.db.LastOne.csv: -------------------------------------------------------------------------------- 1 | ID;lastOneField 2 | 74d4a37a-6319-4d52-bb48-02fd06b9f3r4;some last value 3 | -------------------------------------------------------------------------------- /test/integration/srv/integration-service.cds: -------------------------------------------------------------------------------- 1 | @path: '/integration' 2 | service IntegrationService { 3 | 4 | action passthrough(event : String, data : String); 5 | 6 | } 7 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | const config = { 2 | testTimeout: 42222, 3 | testMatch: ['**/*.test.js'], 4 | setupFilesAfterEnv: ['./test/jest.setup.js'] 5 | } 6 | 7 | module.exports = config 8 | -------------------------------------------------------------------------------- /test/personal-data/db/data/sap.auditlog.test.personal_data.db.StatusChange.csv: -------------------------------------------------------------------------------- 1 | ID;secondKey;description;last_ID 2 | 59d4a37a-6319-4d52-bb48-02fd06b9fbc2;some value;new change;74d4a37a-6319-4d52-bb48-02fd06b9f3r4 3 | -------------------------------------------------------------------------------- /test/integration/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "@cap-js/audit-logging": "*" 4 | }, 5 | "cds": { 6 | "requires": { 7 | "audit-log": "audit-log-to-restv2" 8 | } 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /test/personal-data/db/data/sap.auditlog.test.personal_data.db.CustomerStatus.csv: -------------------------------------------------------------------------------- 1 | ID;description;todo;change_ID;change_secondKey 2 | 23d4a37a-6319-4d52-bb48-02fd06b9ffa4;active;send reminder;59d4a37a-6319-4d52-bb48-02fd06b9fbc2;some value 3 | -------------------------------------------------------------------------------- /test/personal-data/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "@cap-js/audit-logging": "*" 4 | }, 5 | "cds": { 6 | "runtime": { 7 | "patch_as_upsert": true, 8 | "_put_as_replace": true 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /srv/log2als.js: -------------------------------------------------------------------------------- 1 | const credentials = JSON.parse(process.env.VCAP_SERVICES) || {} 2 | const isV3 = credentials['user-provided']?.some(obj => obj.tags.includes('auditlog-ng')) 3 | 4 | module.exports = isV3 ? require('./log2alsng') : require('./log2restv2') 5 | -------------------------------------------------------------------------------- /test/personal-data/db/data/sap.auditlog.test.personal_data.db.Customers.csv: -------------------------------------------------------------------------------- 1 | ID;emailAddress;firstName;lastName;creditCardNo;someOtherField;status_ID 2 | bcd4a37a-6319-4d52-bb48-02fd06b9ffe9;foo@bar.com;foo;bar;12345;dummy;23d4a37a-6319-4d52-bb48-02fd06b9ffa4 3 | -------------------------------------------------------------------------------- /test/personal-data/db/data/sap.auditlog.test.personal_data.db.Comments.csv: -------------------------------------------------------------------------------- 1 | ID;customer_ID;text 2 | 35bdc8d0-dcaf-4727-9377-9ae6930b0f2b;bcd4a37a-6319-4d52-bb48-02fd06b9ffe9;what 3 | 85c4ccdc-6e78-4671-a3c8-2bf10e6a85de;bcd4a37a-6319-4d52-bb48-02fd06b9ffe9;ever 4 | -------------------------------------------------------------------------------- /test/personal-data/db/data/sap.auditlog.test.personal_data.db.Notes.csv: -------------------------------------------------------------------------------- 1 | ID;attachment_ID;customerStatus_ID;note 2 | 35bdc8d0-dcaf-4727-9377-9ae6930b0f2c;3cd71292-ef69-4571-8cfb-10b9d5d1437e;;start 3 | 35bdc8d0-dcaf-4727-9377-9ae693055555;;23d4a37a-6319-4d52-bb48-02fd06b9ffa4;initial status note 4 | -------------------------------------------------------------------------------- /test/personal-data/db/data/sap.auditlog.test.personal_data.db.AddressAttachment.csv: -------------------------------------------------------------------------------- 1 | ID;address_ID;description;todo;notAnnotated 2 | 3cd71292-ef69-4571-8cfb-10b9d5d1437e;1ab71292-ef69-4571-8cfb-10b9d5d1459e;moo;shu;dummy 3 | 595225db-6eeb-4b4f-9439-dbe5fcb4ce5a;1ab71292-ef69-4571-8cfb-10b9d5d1459e;sue;lou;dummy 4 | -------------------------------------------------------------------------------- /test/personal-data/db/data/sap.auditlog.test.personal_data.db.CustomerPostalAddress.csv: -------------------------------------------------------------------------------- 1 | ID;customer_ID;street;town;someOtherField 2 | 1ab71292-ef69-4571-8cfb-10b9d5d1459e;bcd4a37a-6319-4d52-bb48-02fd06b9ffe9;moo;shu;dummy 3 | 285225db-6eeb-4b4f-9439-dbe5fcb4ce82;bcd4a37a-6319-4d52-bb48-02fd06b9ffe9;sue;lou;dummy 4 | -------------------------------------------------------------------------------- /test/integration/srv/integration-service.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | module.exports = async function () { 4 | const audit = await cds.connect.to('audit-log') 5 | 6 | this.on('passthrough', async function (req) { 7 | const { event, data } = req.data 8 | await audit.logSync(event, JSON.parse(data)) 9 | }) 10 | } 11 | -------------------------------------------------------------------------------- /srv/log2console.js: -------------------------------------------------------------------------------- 1 | const AuditLogService = require('./service') 2 | 3 | module.exports = class AuditLog2Console extends AuditLogService { 4 | async init() { 5 | this.on('*', function (req) { 6 | const { event, data } = req 7 | 8 | // eslint-disable-next-line no-console 9 | console.log(`[audit-log] - ${event}:`, data) 10 | }) 11 | 12 | // call AuditLogService's init 13 | await super.init() 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /test/integration/standard.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | const { POST } = cds.test().in(__dirname) 4 | 5 | cds.env.requires['audit-log'].credentials = process.env.ALS_CREDS_STANDARD && JSON.parse(process.env.ALS_CREDS_STANDARD) 6 | 7 | describe('Log to Audit Log Service with standard plan', () => { 8 | if (!cds.env.requires['audit-log'].credentials) 9 | return test.skip('Skipping tests due to missing credentials', () => {}) 10 | 11 | require('./tests')(POST) 12 | }) 13 | -------------------------------------------------------------------------------- /test/api/srv/api-service.cds: -------------------------------------------------------------------------------- 1 | @path: '/api' 2 | @requires: 'admin' 3 | service APIService { 4 | 5 | // default 6 | action testEmit(); 7 | action testSend(); 8 | 9 | // new 10 | action testLog(); 11 | action testLogSync(); 12 | 13 | @requires: 'cds.ExtensionDeveloper' 14 | entity Books { 15 | key ID : Integer; 16 | title : String; 17 | } 18 | 19 | // test helpers 20 | function getSequence() returns many String; 21 | action resetSequence(); 22 | 23 | } 24 | -------------------------------------------------------------------------------- /test/api/MyAuditLogService.js: -------------------------------------------------------------------------------- 1 | const { AuditLogService } = require('../../') //> package root 2 | 3 | class MyAuditLogService extends AuditLogService { 4 | async init() { 5 | this.on('*', function (req) { 6 | const { event, data } = req 7 | 8 | // eslint-disable-next-line no-console 9 | console.log(`[my-audit-log] - ${event}:`, data) 10 | }) 11 | 12 | // call AuditLogService's init 13 | await super.init() 14 | } 15 | } 16 | 17 | module.exports = MyAuditLogService 18 | -------------------------------------------------------------------------------- /.github/workflows/label-issues.yml: -------------------------------------------------------------------------------- 1 | name: Label issues 2 | 3 | permissions: 4 | issues: write 5 | 6 | on: 7 | issues: 8 | types: 9 | - reopened 10 | - opened 11 | 12 | jobs: 13 | label_issues: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - run: gh issue edit "$NUMBER" --add-label "$LABELS" 17 | env: 18 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 19 | GH_REPO: ${{ github.repository }} 20 | NUMBER: ${{ github.event.issue.number }} 21 | LABELS: new 22 | -------------------------------------------------------------------------------- /test/jest.setup.js: -------------------------------------------------------------------------------- 1 | function toContainMatchObject(received, expected) { 2 | let pass = false 3 | for (const each of received) { 4 | try { 5 | expect(each).toMatchObject(expected) 6 | pass = true 7 | } catch { 8 | // ignore 9 | } 10 | 11 | if (pass) break 12 | } 13 | 14 | const message = () => `expected 15 | ${JSON.stringify(received, null, 2)} 16 | to include an object matching 17 | ${JSON.stringify(expected, null, 2)}` 18 | 19 | return { pass, message } 20 | } 21 | 22 | function toBeDateLike(received) { 23 | return { 24 | pass: received instanceof Date || typeof received === 'string' && !!Date.parse(received), 25 | message: () => `expected ${received} to be date-like` 26 | } 27 | } 28 | 29 | expect.extend({ toContainMatchObject, toBeDateLike }) 30 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: 'npm' # See documentation for possible values 9 | directory: '/' # Location of package manifests 10 | schedule: 11 | interval: 'daily' 12 | groups: 13 | prod-dependencies: 14 | dependency-type: 'production' 15 | update-types: 16 | - 'minor' 17 | - 'patch' 18 | dev-dependencies: 19 | dependency-type: 'development' 20 | update-types: 21 | - 'minor' 22 | - 'patch' 23 | -------------------------------------------------------------------------------- /test/api/srv/api-service.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | module.exports = async function () { 4 | const audit = await cds.connect.to('audit-log') 5 | 6 | this.on('testEmit', async function () { 7 | await audit.emit('foo', { bar: 'baz' }) 8 | }) 9 | 10 | this.on('testSend', async function () { 11 | // only works with cds^7.5, but we don't execute the tests with prior cds versions 12 | await cds.unboxed(audit).send('foo', { bar: 'baz' }) 13 | }) 14 | 15 | this.on('testLog', async function () { 16 | await audit.log('foo', { bar: 'baz' }) 17 | }) 18 | 19 | this.on('testLogSync', async function () { 20 | await audit.logSync('foo', { bar: 'baz' }) 21 | }) 22 | 23 | // test helpers 24 | let _sequence = [] 25 | this.before( 26 | '*', 27 | req => !req.event.match(/sequence/i) && req.on('succeeded', () => _sequence.push('request succeeded')) 28 | ) 29 | this.on('getSequence', req => req.reply(_sequence)) 30 | this.on('resetSequence', () => (_sequence = [])) 31 | audit.after('*', () => _sequence.push('audit log logged')) 32 | } 33 | -------------------------------------------------------------------------------- /test/api/custom.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | // set cwd for resolving impl 4 | cds.test().in(__dirname) 5 | 6 | cds.env.requires['audit-log'] = { 7 | impl: 'MyAuditLogService.js' 8 | } 9 | 10 | describe('Custom Implementation', () => { 11 | let __log, _logs 12 | const _log = (...args) => { 13 | if (!(args.length === 2 && typeof args[0] === 'string' && args[0].match(/\[my-audit-log\]/i))) { 14 | // > not an audit log (most likely, anyway) 15 | return __log(...args) 16 | } 17 | 18 | _logs.push(args[1]) 19 | } 20 | 21 | beforeAll(() => { 22 | __log = global.console.log 23 | global.console.log = _log 24 | }) 25 | 26 | afterAll(() => { 27 | global.console.log = __log 28 | }) 29 | 30 | beforeEach(async () => { 31 | _logs = [] 32 | }) 33 | 34 | test('extending AuditLogService exported by plugin', async () => { 35 | const audit = await cds.connect.to('audit-log') 36 | await audit.log('foo', { data_subject: { ID: { bar: 'baz' } } }) 37 | expect(_logs).toContainMatchObject({ data_subject: { ID: { bar: 'baz' } } }) 38 | }) 39 | }) 40 | -------------------------------------------------------------------------------- /srv/service.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | module.exports = class AuditLogService extends cds.Service { 4 | async init() { 5 | // add common audit log entry fields 6 | this.before('*', req => { 7 | const { tenant, user, timestamp } = cds.context 8 | req.data.uuid ??= cds.utils.uuid() 9 | // allows to specify null as tenant in order to log to provider in multi-tenant scenarios 10 | // NOTE: tenant: null is not a public API! 11 | if (!('tenant' in req.data)) req.data.tenant = tenant 12 | req.data.user ??= user.id 13 | req.data.time ??= timestamp 14 | }) 15 | 16 | // call OutboxService's init 17 | await super.init() 18 | 19 | // add self-explanatory api (await audit.log/logSync(event, data)) 20 | this.log = this.emit 21 | // NOTE: logSync is not a public API! 22 | this.logSync = (...args) => { 23 | if (cds.unboxed) return cds.unboxed(this).send(...args) //> cds >= 7.5 24 | if (this.immediate instanceof cds.Service) return this.immediate.send(...args) //> cds ~ 7.4 25 | return this.send(...args) //> cds <= 7.3 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | workflow_dispatch: 8 | push: 9 | branches: [main] 10 | pull_request: 11 | branches: [main] 12 | 13 | jobs: 14 | lint: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: actions/checkout@v2 18 | - run: npm i 19 | - run: npm run lint 20 | test: 21 | runs-on: ubuntu-latest 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | node-version: [22.x, 20.x] 26 | cds-version: [9, 8] 27 | steps: 28 | - uses: actions/checkout@v2 29 | - name: Use Node.js ${{ matrix.node-version }} 30 | uses: actions/setup-node@v2 31 | with: 32 | node-version: ${{ matrix.node-version }} 33 | - run: npm i -g @sap/cds-dk@${{ matrix.cds-version }} 34 | - run: npm i 35 | - run: if [ ${{ matrix.cds-version }} -eq 8 ]; then npm i -f @sap/cds@8 @cap-js/sqlite@1; fi 36 | - run: cds v 37 | - run: npm run test 38 | env: 39 | ALS_CREDS_OAUTH2: ${{ secrets.ALS_CREDS_OAUTH2 }} 40 | ALS_CREDS_STANDARD: ${{ secrets.ALS_CREDS_STANDARD }} 41 | ALS_CREDS_PREMIUM: ${{ secrets.ALS_CREDS_PREMIUM }} 42 | ALS_CREDS_NG: ${{ secrets.ALS_CREDS_NG }} 43 | -------------------------------------------------------------------------------- /index.cds: -------------------------------------------------------------------------------- 1 | namespace sap.auditlog; 2 | 3 | service AuditLogService { 4 | 5 | action log (event : String, data : LogEntry); 6 | action logSync(event : String, data : LogEntry); 7 | 8 | event SensitiveDataRead : LogEntry { 9 | data_subject : DataSubject; 10 | object : DataObject; 11 | attributes : many { 12 | name : String; 13 | }; 14 | attachments : many { 15 | id : String; 16 | name : String; 17 | }; 18 | channel : String; 19 | } 20 | 21 | event PersonalDataModified : LogEntry { 22 | data_subject : DataSubject; 23 | object : DataObject; 24 | attributes : many Modification; 25 | success : Boolean default true; 26 | } 27 | 28 | event ConfigurationModified : LogEntry { 29 | object : DataObject; 30 | attributes : many Modification; 31 | } 32 | 33 | event SecurityEvent : LogEntry { 34 | data : {}; 35 | ip : String; 36 | } 37 | 38 | } 39 | 40 | type LogEntry { 41 | uuid : UUID; 42 | tenant : String; 43 | user : String; 44 | time : Timestamp; 45 | } 46 | 47 | type DataObject { 48 | type : String; 49 | id : {}; 50 | } 51 | 52 | type DataSubject : DataObject { 53 | role : String; 54 | } 55 | 56 | type Modification { 57 | name : String; 58 | old : String; 59 | new : String; 60 | } 61 | -------------------------------------------------------------------------------- /test/api/srv/server.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | let audit 4 | 5 | cds.on('served', async () => { 6 | audit = await cds.connect.to('audit-log') 7 | }) 8 | 9 | const audit_log_403 = (resource, ip) => { 10 | // we need to start our own tx because the default tx may be burnt 11 | audit.tx(async () => { 12 | await audit.log('SecurityEvent', { 13 | data: { 14 | user: cds.context.user?.id || 'unknown', 15 | action: `Attempt to access restricted resource "${resource}" with insufficient authority` 16 | }, 17 | ip 18 | }) 19 | }) 20 | } 21 | 22 | // log for requests that are rejected with 403 23 | cds.on('bootstrap', app => { 24 | app.use((req, res, next) => { 25 | req.on('close', () => { 26 | if (res.statusCode == 403) { 27 | const { originalUrl, ip } = req 28 | audit_log_403(originalUrl, ip) 29 | } 30 | }) 31 | next() 32 | }) 33 | }) 34 | 35 | // log for batch subrequests that are rejected with 403 (but the batch request itself is successful) 36 | cds.on('serving', srv => { 37 | if (srv instanceof cds.ApplicationService) { 38 | srv.on('error', (err, req) => { 39 | if (err.code == 403) { 40 | const { originalUrl, ip } = req.http.req 41 | if (originalUrl.endsWith('/$batch')) audit_log_403(originalUrl.replace('/$batch', req.req.url), ip) 42 | } 43 | }) 44 | } 45 | }) 46 | 47 | module.exports = cds.server 48 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | permissions: 4 | contents: write 5 | id-token: write 6 | 7 | on: 8 | workflow_dispatch: 9 | 10 | jobs: 11 | publish-npm: 12 | runs-on: ubuntu-latest 13 | environment: npm 14 | steps: 15 | - uses: actions/checkout@v3 16 | - uses: actions/setup-node@v3 17 | with: 18 | node-version: 24 19 | registry-url: https://registry.npmjs.org/ 20 | - name: run tests 21 | run: | 22 | npm i -g @sap/cds-dk 23 | npm i 24 | npm run lint 25 | npm run test 26 | env: 27 | ALS_CREDS_OAUTH2: ${{ secrets.ALS_CREDS_OAUTH2 }} 28 | ALS_CREDS_STANDARD: ${{ secrets.ALS_CREDS_STANDARD }} 29 | ALS_CREDS_PREMIUM: ${{ secrets.ALS_CREDS_PREMIUM }} 30 | - name: get version 31 | id: package-version 32 | uses: martinbeentjes/npm-get-version-action@v1.2.3 33 | - name: parse changelog 34 | id: parse-changelog 35 | uses: schwma/parse-changelog-action@v1.0.0 36 | with: 37 | version: '${{ steps.package-version.outputs.current-version }}' 38 | - name: create a GitHub release 39 | uses: ncipollo/release-action@v1 40 | with: 41 | tag: 'v${{ steps.package-version.outputs.current-version }}' 42 | body: '${{ steps.parse-changelog.outputs.body }}' 43 | - run: npm publish --access public --provenance 44 | -------------------------------------------------------------------------------- /test/integration/oauth2.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | const { POST } = cds.test().in(__dirname) 4 | const log = cds.test.log() 5 | 6 | cds.env.requires['audit-log'].credentials = process.env.ALS_CREDS_OAUTH2 && JSON.parse(process.env.ALS_CREDS_OAUTH2) 7 | 8 | // stay in provider account (i.e., use "$PROVIDER" and avoid x-zid header when fetching oauth2 token) 9 | cds.env.requires.auth.users.alice.tenant = cds.env.requires['audit-log'].credentials.uaa.tenantid 10 | 11 | cds.env.log.levels['audit-log'] = 'debug' 12 | 13 | describe('Log to Audit Log Service with oauth2 plan', () => { 14 | if (!cds.env.requires['audit-log'].credentials) 15 | return test.skip('Skipping tests due to missing credentials', () => {}) 16 | 17 | // required for tests to exit correctly (cf. token expiration timeouts) 18 | jest.useFakeTimers() 19 | 20 | require('./tests')(POST) 21 | 22 | test('no tenant is handled correctly', async () => { 23 | const data = JSON.stringify({ data: { foo: 'bar' } }) 24 | const res = await POST('/integration/passthrough', { event: 'SecurityEvent', data }) 25 | expect(res).toMatchObject({ status: 204 }) 26 | expect(log.output.match(/\$PROVIDER/)).toBeTruthy() 27 | }) 28 | 29 | // NOTE: unoffcial feature 30 | test('tenant $PROVIDER is handled correctly', async () => { 31 | const data = JSON.stringify({ data: { foo: 'bar' }, tenant: '$PROVIDER' }) 32 | const res = await POST('/integration/passthrough', { event: 'SecurityEvent', data }) 33 | expect(res).toMatchObject({ status: 204 }) 34 | expect(log.output.match(/\$PROVIDER/)).toBeTruthy() 35 | }) 36 | }) 37 | -------------------------------------------------------------------------------- /test/integration/premium.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | const { POST } = cds.test().in(__dirname) 4 | const log = cds.test.log() 5 | 6 | cds.env.requires['audit-log'].credentials = process.env.ALS_CREDS_PREMIUM && JSON.parse(process.env.ALS_CREDS_PREMIUM) 7 | 8 | // stay in provider account (i.e., use "$PROVIDER" and avoid x-zid header when fetching oauth2 token) 9 | cds.env.requires.auth.users.alice.tenant = cds.env.requires['audit-log'].credentials.uaa.tenantid 10 | 11 | cds.env.log.levels['audit-log'] = 'debug' 12 | 13 | describe('Log to Audit Log Service with premium plan', () => { 14 | if (!cds.env.requires['audit-log'].credentials) 15 | return test.skip('Skipping tests due to missing credentials', () => {}) 16 | 17 | // required for tests to exit correctly (cf. token expiration timeouts) 18 | jest.useFakeTimers() 19 | 20 | require('./tests')(POST) 21 | 22 | test('no tenant is handled correctly', async () => { 23 | const data = JSON.stringify({ data: { foo: 'bar' } }) 24 | const res = await POST('/integration/passthrough', { event: 'SecurityEvent', data }) 25 | expect(res).toMatchObject({ status: 204 }) 26 | expect(log.output.match(/\$PROVIDER/)).toBeTruthy() 27 | }) 28 | 29 | // NOTE: unoffcial feature 30 | test('tenant $PROVIDER is handled correctly', async () => { 31 | const data = JSON.stringify({ data: { foo: 'bar' }, tenant: '$PROVIDER' }) 32 | const res = await POST('/integration/passthrough', { event: 'SecurityEvent', data }) 33 | expect(res).toMatchObject({ status: 204 }) 34 | expect(log.output.match(/\$PROVIDER/)).toBeTruthy() 35 | }) 36 | }) 37 | -------------------------------------------------------------------------------- /REUSE.toml: -------------------------------------------------------------------------------- 1 | version = 1 2 | SPDX-PackageName = "audit-logging" 3 | SPDX-PackageSupplier = "The CAP team " 4 | SPDX-PackageDownloadLocation = "https://github.com/cap-js/audit-logging" 5 | SPDX-PackageComment = "The code in this project may include calls to APIs (\"API Calls\") of\n SAP or third-party products or services developed outside of this project\n (\"External Products\").\n \"APIs\" means application programming interfaces, as well as their respective\n specifications and implementing code that allows software to communicate with\n other software.\n API Calls to External Products are not licensed under the open source license\n that governs this project. The use of such API Calls and related External\n Products are subject to applicable additional agreements with the relevant\n provider of the External Products. In no event shall the open source license\n that governs this project grant any rights in or to any External Products, or\n alter, expand or supersede any terms of the applicable additional agreements.\n If you have a valid license agreement with SAP for the use of a particular SAP\n External Product, then you may make use of any API Calls included in this\n project's code for that SAP External Product, subject to the terms of such\n license agreement. If you do not have a valid license agreement for the use of\n a particular SAP External Product, then you may only make use of any API Calls\n in this project for that SAP External Product for your internal, non-productive\n and non-commercial test and evaluation of such API Calls. Nothing herein grants\n you any rights to use or access any SAP External Product, or provide any third\n parties the right to use of access any SAP External Product, through API Calls." 6 | 7 | [[annotations]] 8 | path = "**" 9 | precedence = "aggregate" 10 | SPDX-FileCopyrightText = "2023 SAP SE or an SAP affiliate company and audit-logging contributors." 11 | SPDX-License-Identifier = "Apache-2.0" 12 | -------------------------------------------------------------------------------- /test/utils/logger.js: -------------------------------------------------------------------------------- 1 | const _deepCopy = arg => { 2 | if (Buffer.isBuffer(arg)) return Buffer.from(arg) 3 | if (Array.isArray(arg)) return _deepCopyArray(arg) 4 | if (typeof arg === 'object') return _deepCopyObject(arg) 5 | return arg 6 | } 7 | 8 | const _deepCopyArray = arr => { 9 | if (!arr) return arr 10 | const clone = [] 11 | for (const item of arr) clone.push(_deepCopy(item)) 12 | return clone 13 | } 14 | 15 | const _deepCopyObject = obj => { 16 | if (!obj) return obj 17 | const clone = {} 18 | for (const key in obj) clone[key] = _deepCopy(obj[key]) 19 | return clone 20 | } 21 | 22 | const deepCopy = data => { 23 | if (Array.isArray(data)) return _deepCopyArray(data) 24 | return _deepCopyObject(data) 25 | } 26 | 27 | module.exports = (levels = {}) => { 28 | const _logs = {} 29 | 30 | const _push = (level, ...args) => { 31 | if (args.length > 1 || typeof args[0] !== 'object') return _logs[level].push(...args) 32 | const copy = deepCopy(args[0]) 33 | args[0].message && (copy.message = args[0].message) 34 | // args[0].stack && (copy.stack = args[0].stack) 35 | _logs[level].push(copy) 36 | } 37 | 38 | const fn = () => { 39 | return { 40 | trace: (...args) => _push('trace', ...args), 41 | debug: (...args) => _push('debug', ...args), 42 | log: (...args) => _push('log', ...args), 43 | info: (...args) => _push('info', ...args), 44 | warn: (...args) => _push('warn', ...args), 45 | error: (...args) => _push('error', ...args), 46 | _trace: levels.trace || false, 47 | _debug: levels.debug || false, 48 | _info: levels.info || false, 49 | _warn: levels.warn || false, 50 | _error: levels.error || false 51 | } 52 | } 53 | 54 | fn._logs = _logs 55 | fn._resetLogs = () => { 56 | _logs.trace = [] 57 | _logs.debug = [] 58 | _logs.log = [] 59 | _logs.info = [] 60 | _logs.warn = [] 61 | _logs.error = [] 62 | } 63 | 64 | fn._resetLogs() 65 | 66 | return fn 67 | } 68 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@cap-js/audit-logging", 3 | "version": "1.1.1", 4 | "description": "CDS plugin providing integration to the SAP Audit Log service as well as out-of-the-box personal data-related audit logging based on annotations.", 5 | "repository": "cap-js/audit-logging", 6 | "author": "SAP SE (https://www.sap.com)", 7 | "homepage": "https://cap.cloud.sap/", 8 | "license": "Apache-2.0", 9 | "main": "cds-plugin.js", 10 | "files": [ 11 | "lib", 12 | "srv" 13 | ], 14 | "scripts": { 15 | "lint": "npx eslint . --max-warnings=0", 16 | "test": "npx jest --silent" 17 | }, 18 | "peerDependencies": { 19 | "@sap/cds": ">=8" 20 | }, 21 | "devDependencies": { 22 | "@cap-js/audit-logging": "file:.", 23 | "@cap-js/cds-test": ">=0", 24 | "@cap-js/sqlite": ">=1", 25 | "axios": "^1", 26 | "eslint": "^9", 27 | "express": "^4", 28 | "jest": "^30", 29 | "pretty-quick": "^4.2.2", 30 | "simple-git-hooks": "^2.13.1" 31 | }, 32 | "cds": { 33 | "requires": { 34 | "audit-log": { 35 | "handle": [ 36 | "READ", 37 | "WRITE" 38 | ], 39 | "outbox": true, 40 | "[development]": { 41 | "kind": "audit-log-to-console" 42 | }, 43 | "[hybrid]": { 44 | "kind": "audit-log-to-restv2" 45 | }, 46 | "[production]": { 47 | "kind": "audit-log-to-restv2" 48 | } 49 | }, 50 | "kinds": { 51 | "audit-log-to-console": { 52 | "impl": "@cap-js/audit-logging/srv/log2console" 53 | }, 54 | "audit-log-to-als": { 55 | "impl": "@cap-js/audit-logging/srv/log2als" 56 | }, 57 | "audit-log-to-restv2": { 58 | "impl": "@cap-js/audit-logging/srv/log2restv2", 59 | "vcap": { 60 | "label": "auditlog" 61 | } 62 | }, 63 | "audit-log-to-alsng": { 64 | "impl": "@cap-js/audit-logging/srv/log2alsng" 65 | } 66 | } 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /test/personal-data/handle.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | let { GET: _GET } = cds.test().in(__dirname) 4 | 5 | // the persistent outbox adds a delay 6 | const wait = require('node:timers/promises').setTimeout 7 | const GET = (...args) => _GET(...args).then(async res => (await wait(42), res)) 8 | 9 | cds.env.requires['audit-log'].handle = ['WRITE'] 10 | 11 | describe('handle', () => { 12 | let __log, _logs 13 | const _log = (...args) => { 14 | if (!(args.length === 2 && typeof args[0] === 'string' && args[0].match(/\[audit-log\]/i))) { 15 | // > not an audit log (most likely, anyway) 16 | return __log(...args) 17 | } 18 | 19 | _logs.push(args[1]) 20 | } 21 | 22 | const ALICE = { username: 'alice', password: 'password' } 23 | 24 | let _handle 25 | 26 | beforeAll(() => { 27 | __log = global.console.log 28 | global.console.log = _log 29 | _handle = cds.env.requires['audit-log'].handle 30 | }) 31 | 32 | afterAll(() => { 33 | global.console.log = __log 34 | }) 35 | 36 | beforeEach(() => { 37 | cds.env.requires['audit-log'].handle = _handle 38 | _logs = [] 39 | }) 40 | 41 | test('defaults to WRITE', async () => { 42 | expect(cds.env.requires['audit-log'].handle).toEqual(['WRITE']) 43 | }) 44 | 45 | test('data access is not logged by default', async () => { 46 | const response = await GET('/crud-1/Customers', { auth: ALICE }) 47 | 48 | expect(response).toMatchObject({ status: 200 }) 49 | expect(_logs.length).toBe(0) 50 | }) 51 | 52 | test('data access can be handled out of the box', async () => { 53 | cds.env.requires['audit-log'].handle = ['READ', 'WRITE'] 54 | 55 | const response = await GET('/crud-1/Customers', { auth: ALICE }) 56 | 57 | expect(response).toMatchObject({ status: 200 }) 58 | expect(_logs.length).toBe(1) 59 | expect(_logs).toContainMatchObject({ 60 | user: 'alice', 61 | object: { 62 | type: 'CRUD_1.Customers', 63 | id: { ID: expect.any(String) } 64 | }, 65 | data_subject: { 66 | type: 'CRUD_1.Customers', 67 | id: { ID: expect.any(String) }, 68 | role: expect.any(String) 69 | }, 70 | attributes: [{ name: 'creditCardNo' }] 71 | }) 72 | }) 73 | }) 74 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Code of Conduct 4 | 5 | All members of the project community must abide by the [SAP Open Source Code of Conduct](https://github.com/SAP/.github/blob/main/CODE_OF_CONDUCT.md). 6 | Only by respecting each other we can develop a productive, collaborative community. 7 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting [a project maintainer](.reuse/dep5). 8 | 9 | ## Engaging in Our Project 10 | 11 | We use GitHub to manage reviews of pull requests. 12 | 13 | * If you are a new contributor, see: [Steps to Contribute](#steps-to-contribute) 14 | 15 | * Before implementing your change, create an issue that describes the problem you would like to solve or the code that should be enhanced. Please note that you are willing to work on that issue. 16 | 17 | * The team will review the issue and decide whether it should be implemented as a pull request. In that case, they will assign the issue to you. If the team decides against picking up the issue, the team will post a comment with an explanation. 18 | 19 | ## Steps to Contribute 20 | 21 | Should you wish to work on an issue, please claim it first by commenting on the GitHub issue that you want to work on. This is to prevent duplicated efforts from other contributors on the same issue. 22 | 23 | If you have questions about one of the issues, please comment on them, and one of the maintainers will clarify. 24 | 25 | ## Contributing Code or Documentation 26 | 27 | You are welcome to contribute code in order to fix a bug or to implement a new feature that is logged as an issue. 28 | 29 | The following rule governs code contributions: 30 | 31 | * Contributions must be licensed under the [Apache 2.0 License](./LICENSE) 32 | * Due to legal reasons, contributors will be asked to accept a Developer Certificate of Origin (DCO) when they create the first pull request to this project. This happens in an automated fashion during the submission process. SAP uses [the standard DCO text of the Linux Foundation](https://developercertificate.org/). 33 | 34 | ## Issues and Planning 35 | 36 | * We use GitHub issues to track bugs and enhancement requests. 37 | 38 | * Please provide as much context as possible when you open an issue. The information you provide must be comprehensive enough to reproduce that issue for the assignee. 39 | -------------------------------------------------------------------------------- /cds-plugin.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | const { auditAccess } = require('./lib/access') 4 | const { addDiffToCtx, calcModLogs4Before, calcModLogs4After, emitModLogs } = require('./lib/modification') 5 | const { hasPersonalData } = require('./lib/utils') 6 | 7 | const WRITE = ['CREATE', 'UPDATE', 'DELETE'] 8 | 9 | /* 10 | * Add generic audit logging handlers 11 | */ 12 | cds.on('served', services => { 13 | const db = cds.db 14 | 15 | for (const service of services) { 16 | if (!(service instanceof cds.ApplicationService)) continue 17 | 18 | const relevantEntities = [] 19 | for (const entity of service.entities) if (hasPersonalData(entity)) relevantEntities.push(entity) 20 | if (!relevantEntities.length) continue 21 | 22 | // automatically promote entities that are associated with data subjects 23 | for (const entity of relevantEntities) { 24 | if (entity['@PersonalData.EntitySemantics'] !== 'DataSubject') continue 25 | for (const e of service.entities) { 26 | for (const k in e.associations) { 27 | if (e.associations[k].target === entity.name && k !== 'SiblingEntity') { 28 | e['@PersonalData.EntitySemantics'] ??= 'Other' 29 | e.associations[k]['@PersonalData.FieldSemantics'] ??= 'DataSubjectID' 30 | if (!relevantEntities.includes(e)) relevantEntities.push(e) 31 | } 32 | } 33 | } 34 | } 35 | 36 | for (const entity of relevantEntities) { 37 | /* 38 | * data access 39 | */ 40 | service.after('READ', entity, auditAccess) 41 | 42 | /* 43 | * data modification 44 | */ 45 | // common 46 | db.before(WRITE, entity, addDiffToCtx) 47 | service.after(WRITE, entity, emitModLogs) 48 | /* 49 | * for new or modified data, modifications are calculated in after phase 50 | * for deleted data, modifications are calculated in before phase 51 | * deep updates can contain new, modified and deleted data -> both phases 52 | */ 53 | // create 54 | db.after('CREATE', entity, calcModLogs4After) 55 | // update 56 | db.before('UPDATE', entity, calcModLogs4Before) 57 | db.after('UPDATE', entity, calcModLogs4After) 58 | // delete 59 | db.before('DELETE', entity, calcModLogs4Before) 60 | } 61 | } 62 | }) 63 | 64 | /* 65 | * Export base class for extending in custom implementations 66 | */ 67 | module.exports = { 68 | AuditLogService: require('./srv/service') 69 | } 70 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # macOS 2 | .DS_Store 3 | 4 | # Logs 5 | logs 6 | *.log 7 | npm-debug.log* 8 | yarn-debug.log* 9 | yarn-error.log* 10 | lerna-debug.log* 11 | .pnpm-debug.log* 12 | 13 | # Diagnostic reports (https://nodejs.org/api/report.html) 14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 15 | 16 | # Runtime data 17 | pids 18 | *.pid 19 | *.seed 20 | *.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | lib-cov 24 | 25 | # Coverage directory used by tools like istanbul 26 | coverage 27 | *.lcov 28 | 29 | # nyc test coverage 30 | .nyc_output 31 | 32 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 33 | .grunt 34 | 35 | # Bower dependency directory (https://bower.io/) 36 | bower_components 37 | 38 | # node-waf configuration 39 | .lock-wscript 40 | 41 | # Compiled binary addons (https://nodejs.org/api/addons.html) 42 | build/Release 43 | 44 | # Dependency directories 45 | node_modules/ 46 | jspm_packages/ 47 | 48 | # Snowpack dependency directory (https://snowpack.dev/) 49 | web_modules/ 50 | 51 | # TypeScript cache 52 | *.tsbuildinfo 53 | 54 | # Optional npm cache directory 55 | .npm 56 | 57 | # Optional eslint cache 58 | .eslintcache 59 | 60 | # Optional stylelint cache 61 | .stylelintcache 62 | 63 | # Microbundle cache 64 | .rpt2_cache/ 65 | .rts2_cache_cjs/ 66 | .rts2_cache_es/ 67 | .rts2_cache_umd/ 68 | 69 | # Optional REPL history 70 | .node_repl_history 71 | 72 | # Output of 'npm pack' 73 | *.tgz 74 | 75 | # Yarn Integrity file 76 | .yarn-integrity 77 | 78 | # dotenv environment variable files 79 | .env 80 | .env.development.local 81 | .env.test.local 82 | .env.production.local 83 | .env.local 84 | 85 | # parcel-bundler cache (https://parceljs.org/) 86 | .cache 87 | .parcel-cache 88 | 89 | # Next.js build output 90 | .next 91 | out 92 | 93 | # Nuxt.js build / generate output 94 | .nuxt 95 | dist 96 | 97 | # Gatsby files 98 | .cache/ 99 | # Comment in the public line in if your project uses Gatsby and not Next.js 100 | # https://nextjs.org/blog/next-9-1#public-directory-support 101 | # public 102 | 103 | # vuepress build output 104 | .vuepress/dist 105 | 106 | # vuepress v2.x temp and cache directory 107 | .temp 108 | .cache 109 | 110 | # Docusaurus cache and generated files 111 | .docusaurus 112 | 113 | # Serverless directories 114 | .serverless/ 115 | 116 | # FuseBox cache 117 | .fusebox/ 118 | 119 | # DynamoDB Local files 120 | .dynamodb/ 121 | 122 | # TernJS port file 123 | .tern-port 124 | 125 | # Stores VSCode versions used for testing VSCode extensions 126 | .vscode-test 127 | 128 | # yarn v2 129 | .yarn/cache 130 | .yarn/unplugged 131 | .yarn/build-state.yml 132 | .yarn/install-state.gz 133 | .pnp.* 134 | 135 | # custom 136 | package-lock.json 137 | .npmrc 138 | .babelrc 139 | .prettierrc.js 140 | .vscode/ 141 | .vscode-test/ 142 | -------------------------------------------------------------------------------- /test/integration/tests.js: -------------------------------------------------------------------------------- 1 | module.exports = POST => { 2 | const object = { type: 'foo.bar', id: { foo: 'bar' } } 3 | const data_subject = Object.assign({ role: 'foo.bar' }, object) 4 | const create_attributes = [{ name: 'foo', new: 'baz' }] 5 | const update_attributes = [{ name: 'foo', old: 'bar', new: 'baz' }] 6 | const delete_attributes = [{ name: 'foo', old: 'bar' }] 7 | 8 | const ALICE = { username: 'alice', password: 'password' } 9 | 10 | test('sensitive data read', async () => { 11 | const data = JSON.stringify({ object, data_subject, attributes: [{ name: 'foo' }] }) 12 | const res = await POST('/integration/passthrough', { event: 'SensitiveDataRead', data }, { auth: ALICE }) 13 | expect(res).toMatchObject({ status: 204 }) 14 | }) 15 | 16 | describe('personal data modified', () => { 17 | test('create', async () => { 18 | const data = JSON.stringify({ object, data_subject, attributes: create_attributes }) 19 | const res = await POST('/integration/passthrough', { event: 'PersonalDataModified', data }, { auth: ALICE }) 20 | expect(res).toMatchObject({ status: 204 }) 21 | }) 22 | 23 | test('update', async () => { 24 | const data = JSON.stringify({ object, data_subject, attributes: update_attributes }) 25 | const res = await POST('/integration/passthrough', { event: 'PersonalDataModified', data }, { auth: ALICE }) 26 | expect(res).toMatchObject({ status: 204 }) 27 | }) 28 | 29 | test('delete', async () => { 30 | const data = JSON.stringify({ object, data_subject, attributes: delete_attributes }) 31 | const res = await POST('/integration/passthrough', { event: 'PersonalDataModified', data }, { auth: ALICE }) 32 | expect(res).toMatchObject({ status: 204 }) 33 | }) 34 | }) 35 | 36 | describe('configuration modified', () => { 37 | test('create', async () => { 38 | const data = JSON.stringify({ object, attributes: create_attributes }) 39 | const res = await POST('/integration/passthrough', { event: 'ConfigurationModified', data }, { auth: ALICE }) 40 | expect(res).toMatchObject({ status: 204 }) 41 | }) 42 | 43 | test('update', async () => { 44 | const data = JSON.stringify({ object, attributes: update_attributes }) 45 | const res = await POST('/integration/passthrough', { event: 'ConfigurationModified', data }, { auth: ALICE }) 46 | expect(res).toMatchObject({ status: 204 }) 47 | }) 48 | 49 | test('delete', async () => { 50 | const data = JSON.stringify({ object, attributes: delete_attributes }) 51 | const res = await POST('/integration/passthrough', { event: 'ConfigurationModified', data }, { auth: ALICE }) 52 | expect(res).toMatchObject({ status: 204 }) 53 | }) 54 | }) 55 | 56 | test('security event', async () => { 57 | const data = JSON.stringify({ data: { foo: 'bar' } }) 58 | const res = await POST('/integration/passthrough', { event: 'SecurityEvent', data }, { auth: ALICE }) 59 | expect(res).toMatchObject({ status: 204 }) 60 | }) 61 | } 62 | -------------------------------------------------------------------------------- /test/integration/ng.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | const { POST } = cds.test().in(__dirname) 4 | 5 | cds.env.requires['audit-log'].kind = 'audit-log-to-alsng' 6 | cds.env.requires['audit-log'].impl = '@cap-js/audit-logging/srv/log2alsng' 7 | const VCAP_SERVICES = { 8 | 'user-provided': [ 9 | { 10 | tags: ['auditlog-ng'], 11 | credentials: process.env.ALS_CREDS_NG && JSON.parse(process.env.ALS_CREDS_NG) 12 | } 13 | ] 14 | } 15 | process.env.VCAP_SERVICES = JSON.stringify(VCAP_SERVICES) 16 | 17 | describe('Log to Audit Log Service NG ', () => { 18 | if (!VCAP_SERVICES['user-provided'][0].credentials) 19 | return test.skip('Skipping tests due to missing credentials', () => {}) 20 | 21 | require('./tests')(POST) 22 | 23 | const ALICE = { username: 'alice', password: 'password' } 24 | const update_attributes = [{ name: 'foo', old: 'bar', new: 'baz' }] 25 | 26 | test('id flattening', async () => { 27 | expect( 28 | cds.services['audit-log'].flattenAndSortIdObject({ foo: 'bar', alpha: 'omega', ping: 'pong', fizz: 'buzz' }) 29 | ).toBe('alpha:omega fizz:buzz foo:bar ping:pong') 30 | }) 31 | 32 | test('writes log with multiple id attributes in object and data subject', async () => { 33 | const object = { 34 | type: 'foo.bar', 35 | id: { foo: 'bar', alpha: 'omega', ping: 'pong', fizz: 'buzz' } 36 | } 37 | const data_subject = { ...object, role: 'foo.bar' } 38 | const data = JSON.stringify({ object, data_subject, attributes: update_attributes }) 39 | const res = await POST('/integration/passthrough', { event: 'PersonalDataModified', data }, { auth: ALICE }) 40 | expect(res).toMatchObject({ status: 204 }) 41 | }) 42 | 43 | test('writes log without id attributes in object and data subject', async () => { 44 | const object = { type: 'foo.bar', id: {} } 45 | const data_subject = { ...object, role: 'foo.bar' } 46 | const data = JSON.stringify({ object, data_subject, attributes: update_attributes }) 47 | const res = await POST('/integration/passthrough', { event: 'PersonalDataModified', data }, { auth: ALICE }) 48 | expect(res).toMatchObject({ status: 204 }) 49 | }) 50 | 51 | test('rejects log with invalid data', async () => { 52 | await expect( 53 | POST('/integration/passthrough', { event: 'PersonalDataModified', data: '{}' }, { auth: ALICE }) 54 | ).rejects.toThrow('Request failed with: 403 - Forbidden') 55 | }) 56 | 57 | test('writes log for custom event tenantOnboarding', async () => { 58 | const customEvent = 'tenantOnboarding' 59 | const data = JSON.stringify({ 60 | tenantId: 'test-tenant' 61 | }) 62 | const res = await POST('/integration/passthrough', { event: customEvent, data }, { auth: ALICE }) 63 | expect(res).toMatchObject({ status: 204 }) 64 | }) 65 | 66 | test('writes log for custom event userLogoff', async () => { 67 | const customEvent = 'userLogoff' 68 | const data = JSON.stringify({ 69 | logoffType: 'UNSPECIFIED' 70 | }) 71 | const res = await POST('/integration/passthrough', { event: customEvent, data }, { auth: ALICE }) 72 | expect(res).toMatchObject({ status: 204 }) 73 | }) 74 | }) 75 | -------------------------------------------------------------------------------- /lib/access.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | // REVISIT: don't require internal stuff 4 | const getTemplate = require('@sap/cds/libx/_runtime/common/utils/template') 5 | 6 | const { 7 | getRootEntity, 8 | getPick, 9 | createLogEntry, 10 | addObjectID, 11 | addDataSubject, 12 | addDataSubjectForDetailsEntity, 13 | resolveDataSubjects 14 | } = require('./utils') 15 | 16 | let audit 17 | 18 | const _processorFnAccess = (accessLogs, model, req) => { 19 | return ({ row, key, element, plain }) => { 20 | if (row.IsActiveEntity === false) return 21 | 22 | const entity = getRootEntity(element) 23 | 24 | // create or augment log entry 25 | const entry = createLogEntry(accessLogs, entity, row) 26 | 27 | // process categories 28 | for (const category of plain.categories) { 29 | if (category === 'ObjectID') addObjectID(entry, row, key) 30 | else if (category === 'DataSubjectID') addDataSubject(entry, row, key, entity) 31 | else if (category === 'IsPotentiallySensitive' && key in row) { 32 | if (!entry.attributes.some(e => e.name === key)) entry.attributes.push({ name: key }) 33 | // REVISIT: attribute vs. attachment? 34 | } 35 | } 36 | 37 | // add promise to determine data subject if a DataSubjectDetails entity 38 | const semantics = entity['@PersonalData.EntitySemantics'] 39 | if ( 40 | (semantics === 'DataSubjectDetails' || semantics === 'Other') && 41 | Object.keys(entry.data_subject.id).length === 0 // > id still an empty object -> promise not yet set 42 | ) { 43 | addDataSubjectForDetailsEntity(row, entry, req, entity, model) 44 | } 45 | } 46 | } 47 | 48 | const auditAccess = async function (data, req) { 49 | if (!cds.env.requires['audit-log'].handle?.includes('READ')) return 50 | 51 | if (typeof data !== 'object' || data == null) return 52 | 53 | const mock = Object.assign({ name: req.target._service.name, model: this.model }) 54 | const template = getTemplate('personal_read', mock, req.target, { pick: getPick('READ') }) 55 | if (!template.elements.size) return 56 | 57 | const accessLogs = {} 58 | const processFn = _processorFnAccess(accessLogs, this.model, req) 59 | // cds internal templating mechanism api changed in 8.2.0 -> polyfill 60 | if (!template.process) { 61 | module.exports._templateProcessor ??= require('@sap/cds/libx/_runtime/common/utils/templateProcessor') 62 | template.process = (data, processFn) => { 63 | const _data = Array.isArray(data) ? data : [data] 64 | _data.forEach(row => module.exports._templateProcessor({ processFn, row, template })) 65 | } 66 | } 67 | template.process(data, processFn) 68 | 69 | for (const each of Object.keys(accessLogs)) if (!accessLogs[each].attributes.length) delete accessLogs[each] 70 | if (!Object.keys(accessLogs).length) return 71 | 72 | await resolveDataSubjects(accessLogs, req) 73 | const accesses = Object.values(accessLogs).filter(ele => ele.attributes.length) 74 | if (!accesses.length) return 75 | 76 | audit = audit || (await cds.connect.to('audit-log')) 77 | 78 | await Promise.all(accesses.map(access => audit.log('SensitiveDataRead', access))) 79 | } 80 | 81 | module.exports = { 82 | auditAccess 83 | } 84 | -------------------------------------------------------------------------------- /test/personal-data/srv/fiori-service.cds: -------------------------------------------------------------------------------- 1 | using {sap.auditlog.test.personal_data.db as db} from '../db/schema'; 2 | 3 | @path : '/fiori-1' 4 | @requires: 'admin' 5 | service Fiori_1 { 6 | @odata.draft.enabled 7 | entity Orders as projection on db.Orders; 8 | 9 | entity OrderHeader as projection on db.OrderHeader; 10 | entity OrderItems as projection on db.OrderItems; 11 | entity Pages as projection on db.Pages; 12 | 13 | @odata.draft.enabled 14 | entity Customers as projection on db.Customers; 15 | 16 | entity CustomerPostalAddress as projection on db.CustomerPostalAddress; 17 | entity Comments as projection on db.Comments; 18 | entity CustomerStatus as projection on db.CustomerStatus; 19 | entity StatusChange as projection on db.StatusChange; 20 | entity LastOne as projection on db.LastOne; 21 | entity Notes as projection on db.Notes; 22 | 23 | entity AddressAttachment as 24 | projection on db.AddressAttachment { 25 | *, 26 | address.customer as customer 27 | } 28 | 29 | annotate Orders with @PersonalData: {EntitySemantics: 'Other'} { 30 | misc @PersonalData.IsPotentiallySensitive; 31 | } 32 | 33 | annotate OrderHeader with @PersonalData: {EntitySemantics: 'Other'} { 34 | description @PersonalData.IsPotentiallySensitive; 35 | } 36 | 37 | annotate OrderHeader.sensitiveData with @PersonalData: {EntitySemantics: 'Other'} { 38 | note @PersonalData.IsPotentiallySensitive; 39 | } 40 | 41 | annotate Pages with @PersonalData : {EntitySemantics: 'DataSubject' 42 | // no DataSubjectRole for testing purposes 43 | } { 44 | ID @PersonalData.FieldSemantics: 'DataSubjectID'; 45 | sensitive @PersonalData.IsPotentiallySensitive; 46 | personal @PersonalData.IsPotentiallyPersonal; 47 | } 48 | 49 | annotate Customers with @PersonalData : { 50 | EntitySemantics: 'DataSubject', 51 | DataSubjectRole: 'Customer' 52 | } { 53 | ID @PersonalData.FieldSemantics: 'DataSubjectID'; 54 | emailAddress @PersonalData.IsPotentiallyPersonal @PersonalData.FieldSemantics: 'DataSubjectID'; 55 | firstName @PersonalData.IsPotentiallyPersonal; 56 | lastName @PersonalData.IsPotentiallyPersonal; 57 | creditCardNo @PersonalData.IsPotentiallySensitive; 58 | } 59 | 60 | annotate CustomerPostalAddress with @PersonalData: {EntitySemantics: 'DataSubjectDetails'} { 61 | customer @PersonalData.FieldSemantics : 'DataSubjectID'; 62 | street @PersonalData.IsPotentiallySensitive; 63 | town @PersonalData.IsPotentiallyPersonal; 64 | } 65 | 66 | annotate CustomerStatus with @PersonalData: {EntitySemantics: 'DataSubjectDetails'} { 67 | description @PersonalData.IsPotentiallySensitive; 68 | todo @PersonalData.IsPotentiallyPersonal; 69 | } 70 | 71 | annotate StatusChange with @PersonalData: {EntitySemantics: 'DataSubjectDetails'} { 72 | description @PersonalData.IsPotentiallySensitive; 73 | secondKey @PersonalData.IsPotentiallyPersonal; 74 | } 75 | 76 | annotate LastOne with @PersonalData: {EntitySemantics: 'DataSubjectDetails'} { 77 | lastOneField @PersonalData.IsPotentiallySensitive; 78 | } 79 | 80 | annotate AddressAttachment with @PersonalData: {EntitySemantics: 'DataSubjectDetails'} { 81 | customer @PersonalData.FieldSemantics : 'DataSubjectID'; 82 | description @PersonalData.IsPotentiallySensitive; 83 | todo @PersonalData.IsPotentiallyPersonal; 84 | } 85 | 86 | annotate Notes with @PersonalData: {EntitySemantics: 'Other'} { 87 | note @PersonalData.IsPotentiallySensitive; 88 | dummyArray @PersonalData.IsPotentiallyPersonal; 89 | } 90 | } 91 | 92 | @path : '/fiori-2' 93 | @requires: 'admin' 94 | service Fiori_2 { 95 | @odata.draft.enabled 96 | entity Customers as projection on db.Customers; 97 | 98 | entity CustomerPostalAddress as projection on db.CustomerPostalAddress; 99 | 100 | entity AddressAttachment as 101 | projection on db.AddressAttachment { 102 | *, 103 | address.customer as customer 104 | } 105 | 106 | annotate Customers with @PersonalData : {EntitySemantics: 'Other'} { 107 | addresses @PersonalData.FieldSemantics: 'DataSubjectID'; 108 | } 109 | 110 | annotate CustomerPostalAddress with @PersonalData: { 111 | EntitySemantics: 'DataSubject', 112 | DataSubjectRole: 'Address' 113 | } { 114 | ID @PersonalData.FieldSemantics : 'DataSubjectID'; 115 | street @PersonalData.IsPotentiallyPersonal @PersonalData.FieldSemantics: 'DataSubjectID'; 116 | town @PersonalData.IsPotentiallyPersonal @PersonalData.FieldSemantics: 'DataSubjectID'; 117 | someOtherField @PersonalData.IsPotentiallySensitive; 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | All notable changes to this project will be documented in this file. 4 | This project adheres to [Semantic Versioning](http://semver.org/). 5 | The format is based on [Keep a Changelog](http://keepachangelog.com/). 6 | 7 | ## Version 1.1.1 - 2025-10-27 8 | 9 | ### Fixed 10 | 11 | - Data subject lookup when data subject is not exposed in the same service 12 | 13 | ## Version 1.1.0 - 2025-09-08 14 | 15 | ### Added 16 | 17 | - `audit-log-to-alsng`: Support for sending generic audit log events 18 | 19 | ### Fixed 20 | 21 | - `audit-log-to-alsng`: Correctly retrieve `appId` from the `VCAP_APPLICATION` environment variable 22 | 23 | ## Version 1.0.1 - 2025-08-05 24 | 25 | ### Fixed 26 | 27 | - `audit-log-to-alsng`: EventDataPayload to Support Multi-Key Object and DataSubject IDs 28 | 29 | ## Version 1.0.0 - 2025-07-11 30 | 31 | ### Added 32 | 33 | - Beta support for next generation SAP Audit Log Service 34 | - Use explicit kind `audit-log-to-alsng` or alpha auto-detect kind `audit-log-to-als` 35 | 36 | ## Version 0.9.0 - 2025-06-05 37 | 38 | ### Added 39 | 40 | - Support for `@sap/cds^9` 41 | 42 | ## Version 0.8.3 - 2025-04-09 43 | 44 | ### Fixed 45 | 46 | - Preperation for `@sap/cds^9` 47 | 48 | ## Version 0.8.2 - 2024-11-27 49 | 50 | ### Fixed 51 | 52 | - Erroneous modification log for non-updated key properties 53 | - Error during non-modifying queries on database level 54 | - Specify charset UTF-8 for requests to SAP Audit Log Service 55 | 56 | ## Version 0.8.1 - 2024-09-13 57 | 58 | ### Fixed 59 | 60 | - Support for `@sap/cds^8.2` 61 | - Reduce clutter in error raised for outbound requests 62 | 63 | ## Version 0.8.0 - 2024-05-24 64 | 65 | ### Added 66 | 67 | - Allow to specify undefined tenant in order to log to provider account in multi-tenant scenarios 68 | 69 | ### Changed 70 | 71 | - Use kind `audit-log-to-restv2` in profile `hybrid` 72 | 73 | ## Version 0.7.0 - 2024-05-15 74 | 75 | ### Added 76 | 77 | - Automatically promote entities that are associated with data subjects 78 | 79 | ## Version 0.6.0 - 2024-02-05 80 | 81 | ### Added 82 | 83 | - Support for `@sap/cds^7.5` 84 | 85 | ### Fixed 86 | 87 | - Automatic personal data modification logging for data subject details with renamed keys 88 | - Data subject resolution in circular models 89 | 90 | ## Version 0.5.2 - 2023-12-08 91 | 92 | ### Fixed 93 | 94 | - Automatic personal data modification logging for deep data structures with renamings 95 | 96 | ## Version 0.5.1 - 2023-11-30 97 | 98 | ### Fixed 99 | 100 | - Falsy early exit during bootstrapping in case a service does not contain personal data 101 | 102 | ## Version 0.5.0 - 2023-11-22 103 | 104 | ### Added 105 | 106 | - Common log entry fields `uuid`, `tenant`, `user` and `time` can be provided manually 107 | 108 | ## Version 0.4.0 - 2023-10-24 109 | 110 | ### Added 111 | 112 | - Support for Premium plan of SAP Audit Log Service 113 | - Support for XSUAA credential type `x509` 114 | - Support for generic outbox 115 | 116 | ### Changed 117 | 118 | - Always use outbox (as configured in project) 119 | 120 | ### Fixed 121 | 122 | - Avoid dangling `SELECT`s to resolve data subject IDs, which resulted in "Transaction already closed" errors 123 | 124 | ## Version 0.3.2 - 2023-10-11 125 | 126 | ### Fixed 127 | 128 | - If the request has no tenant (e.g., Unauthorized), the audit log shall be sent to the provider account 129 | 130 | ## Version 0.3.1 - 2023-09-25 131 | 132 | ### Fixed 133 | 134 | - Defaulting of `@PersonalData.DataSubjectRole` to entity name 135 | - Overriding service configuration 136 | 137 | ## Version 0.3.0 - 2023-09-05 138 | 139 | ### Changed 140 | 141 | - Default value for `cds.requires['audit-log'].handle` changed to `['READ', 'WRITE']`, i.e., accessing sensitive data is now logged by default. 142 | 143 | ## Version 0.2.0 - 2023-09-01 144 | 145 | ### Added 146 | 147 | - Export class `AuditLogService` for extending in custom implementations as follows: 148 | ```js 149 | const { AuditLogService } = require('@cap-js/audit-logging') 150 | class MyAuditLogService extends AuditLogService { 151 | async init() { 152 | [...] 153 | // call AuditLogService's init 154 | await super.init() 155 | } 156 | } 157 | module.exports = MyAuditLogService 158 | ``` 159 | 160 | ## Version 0.1.0 - 2023-08-18 161 | 162 | ### Added 163 | 164 | - New API: 165 | - `await audit.log('', )` for asynchronous logs (cf. `emit`) 166 | - `await audit.logSync('', )` for synchronous logs (cf. `send`) 167 | - New REST API-based schema with auto-filled `LogEntry` aspect 168 | - New events `SensitiveDataRead`, `PersonalDataModified`, `ConfigurationModified`, and `SecurityEvent` 169 | - Full support for OAuth2 plan of SAP Audit Log Service 170 | 171 | ### Changed 172 | 173 | - Whether reading sensitive data and modifying personal data is logged is determined by `cds.requires['audit-log'].handle: [...]`. 174 | Possible values in the array are `READ` and/ or `WRITE`, with `WRITE` as the sole default entry. 175 | Hence, accessing sensitive data is not logged by default. 176 | - Integration with SAP Audit Log Service via REST API instead of client library (`@sap/audit-logging`) 177 | 178 | ### Fixed 179 | 180 | - Various glitches in log calculation 181 | 182 | ### Removed 183 | 184 | - Old events `dataAccessLog`, `dataModificationLog`, `configChangeLog`, and `securityLog` 185 | - `@AuditLog.Operation` annotations are ignored. Having the plugin as dependency signals the intent to audit log. 186 | - `cds.features.audit_personal_data: true` is no longer necessary. Instead, simply add the plugin as a dependency. 187 | -------------------------------------------------------------------------------- /test/personal-data/db/schema.cds: -------------------------------------------------------------------------------- 1 | using {cuid} from '@sap/cds/common'; 2 | 3 | namespace sap.auditlog.test.personal_data.db; 4 | 5 | entity Orders : cuid { 6 | header : Composition of one OrderHeader; 7 | items : Composition of many OrderItems 8 | on $self = items.order; 9 | misc : String; 10 | } 11 | 12 | entity OrderItems : cuid { 13 | name : String; 14 | order : Association to Orders; 15 | customer : Association to Customers; 16 | } 17 | 18 | entity OrderHeader : cuid { 19 | description : String; 20 | sensitiveData : Composition of one SensitiveData; 21 | } 22 | 23 | aspect SensitiveData : cuid { 24 | customer : Association to Customers; 25 | note : String; 26 | } 27 | 28 | entity Pages { 29 | key ID : Integer; 30 | personal : Integer; 31 | sensitive : Integer; 32 | } 33 | 34 | entity Customers : cuid { 35 | emailAddress : String; 36 | firstName : String; 37 | lastName : String; 38 | creditCardNo : String(16); 39 | someOtherField : String(128); 40 | addresses : Composition of many CustomerPostalAddress 41 | on addresses.customer = $self; 42 | comments : Composition of many Comments 43 | on comments.customer = $self; 44 | status : Composition of CustomerStatus; 45 | } 46 | 47 | entity CustomerPostalAddress : cuid { 48 | customer : Association to one Customers @assert.integrity: false; 49 | street : String(128); 50 | town : String(128); 51 | someOtherField : String(128); 52 | attachments : Composition of many AddressAttachment 53 | on attachments.address = $self; 54 | } 55 | 56 | entity Comments : cuid { 57 | customer : Association to one Customers; 58 | text : String; 59 | } 60 | 61 | entity CustomerStatus : cuid { 62 | description : String; 63 | todo : String; 64 | change : Composition of StatusChange; 65 | notes : Composition of many Notes 66 | on notes.customerStatus = $self; 67 | } 68 | 69 | entity StatusChange { 70 | key ID : UUID; 71 | key secondKey : String; 72 | description : String; 73 | last : Composition of LastOne; 74 | } 75 | 76 | entity LastOne : cuid { 77 | lastOneField : String; 78 | } 79 | 80 | entity AddressAttachment : cuid { 81 | description : String; 82 | todo : String; 83 | notAnnotated : String; 84 | address : Association to one CustomerPostalAddress; 85 | notes : Composition of many Notes 86 | on notes.attachment = $self; 87 | } 88 | 89 | type dummies { 90 | dummy : String; 91 | } 92 | 93 | entity Notes : cuid { 94 | note : String; 95 | attachment : Association to AddressAttachment; 96 | customerStatus : Association to CustomerStatus; 97 | dummyArray : many dummies; 98 | } 99 | 100 | entity Employees : cuid { 101 | name : { 102 | first : String; 103 | last : String; 104 | }; 105 | notes : many String; 106 | skills : many String; 107 | } 108 | 109 | entity RBase : cuid { 110 | emailAddress : String; 111 | firstName : String; 112 | lastName : String; 113 | creditCardNo : String(16); 114 | } 115 | 116 | annotate RBase with @PersonalData : { 117 | EntitySemantics: 'DataSubject', 118 | DataSubjectRole: 'RBase' 119 | } { 120 | ID @PersonalData.FieldSemantics: 'DataSubjectID'; 121 | emailAddress @PersonalData.IsPotentiallyPersonal; 122 | firstName @PersonalData.IsPotentiallyPersonal; 123 | lastName @PersonalData.IsPotentiallyPersonal; 124 | creditCardNo @PersonalData.IsPotentiallySensitive; 125 | } 126 | 127 | entity MainEntities { 128 | key ID : UUID; 129 | name : String; 130 | subEntities : Composition of many SubEntities 131 | on subEntities.mainEntity = $self; 132 | } 133 | 134 | entity SubEntities { 135 | key ID : UUID; 136 | name : String; 137 | mainEntity : Association to MainEntities; 138 | } 139 | 140 | annotate MainEntities with @PersonalData: { 141 | EntitySemantics: 'DataSubject', 142 | DataSubjectRole: 'MainEntity' 143 | } { 144 | ID @PersonalData.FieldSemantics : 'DataSubjectID'; 145 | name @PersonalData.IsPotentiallyPersonal; 146 | } 147 | 148 | annotate SubEntities with @PersonalData : {EntitySemantics: 'DataSubjectDetails'} { 149 | mainEntity @PersonalData.FieldSemantics: 'DataSubjectID'; 150 | name @PersonalData.IsPotentiallyPersonal; 151 | } 152 | 153 | entity A { 154 | key ID : UUID; 155 | text : String; 156 | b : Association to B; 157 | c : Association to C; 158 | } 159 | 160 | entity B { 161 | key ID : UUID; 162 | text : String; 163 | a : Association to A; 164 | c : Association to C; 165 | } 166 | 167 | entity C { 168 | key ID : UUID; 169 | text : String; 170 | } 171 | 172 | entity D { 173 | key ID : UUID; 174 | text : String; 175 | c : Association to C; 176 | } 177 | 178 | annotate A with @PersonalData : {EntitySemantics: 'DataSubjectDetails'} { 179 | c @PersonalData.FieldSemantics: 'DataSubjectID'; 180 | text @PersonalData.IsPotentiallyPersonal; 181 | } 182 | 183 | annotate B with @PersonalData : {EntitySemantics: 'DataSubjectDetails'} { 184 | c @PersonalData.FieldSemantics: 'DataSubjectID'; 185 | text @PersonalData.IsPotentiallyPersonal; 186 | } 187 | 188 | annotate C with @PersonalData : {EntitySemantics: 'DataSubject'} { 189 | ID @PersonalData.FieldSemantics: 'DataSubjectID'; 190 | text @PersonalData.IsPotentiallyPersonal; 191 | } 192 | 193 | annotate D with @PersonalData : {EntitySemantics: 'Other'} { 194 | c @PersonalData.FieldSemantics: 'DataSubjectID'; 195 | text @PersonalData.IsPotentiallyPersonal; 196 | } 197 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Welcome to @cap-js/audit-logging 2 | 3 | [![REUSE status](https://api.reuse.software/badge/github.com/cap-js/audit-logging)](https://api.reuse.software/info/github.com/cap-js/audit-logging) 4 | 5 | `@cap-js/audit-logging` is a CDS plugin providing integration to the SAP Audit Log service as well as out-of-the-box personal data-related audit logging based on annotations. 6 | 7 | Documentation can be found at [cap.cloud.sap](https://cap.cloud.sap/docs/guides/data-privacy). 8 | 9 | > [!IMPORTANT] 10 | > The information in this file is by no means complete but enables you to get started quickly. Make sure to read the provided documentation at [cap.cloud.sap](https://cap.cloud.sap/docs/guides/data-privacy) to get the full picture. 11 | 12 | 13 | ## Preliminaries 14 | 15 | In this guide, we use the [Incidents Management reference sample app](https://github.com/cap-js/incidents-app) as the base to add change tracking to. Clone the repository and apply the step-by-step instructions: 16 | 17 | ```sh 18 | git clone https://github.com/cap-js/incidents-app 19 | cd incidents-app 20 | npm i 21 | ``` 22 | 23 | 24 | ## Setup 25 | 26 | To enable audit logging, simply add this self-configuring plugin package to your project: 27 | 28 | ```sh 29 | npm add @cap-js/audit-logging 30 | ``` 31 | 32 | 33 | ## Annotate Personal Data 34 | 35 | Identify entities and elements (potentially) holding personal data using `@PersonalData` annotations. Create a `db/data-privacy.cds` file and add the following: 36 | 37 | ```cds 38 | using { sap.capire.incidents as my } from './schema'; 39 | 40 | annotate my.Customers with @PersonalData : { 41 | DataSubjectRole : 'Customer', 42 | EntitySemantics : 'DataSubject' 43 | } { 44 | ID @PersonalData.FieldSemantics: 'DataSubjectID'; 45 | firstName @PersonalData.IsPotentiallyPersonal; 46 | lastName @PersonalData.IsPotentiallyPersonal; 47 | email @PersonalData.IsPotentiallyPersonal; 48 | phone @PersonalData.IsPotentiallyPersonal; 49 | creditCardNo @PersonalData.IsPotentiallySensitive; 50 | }; 51 | 52 | annotate my.Addresses with @PersonalData: { 53 | EntitySemantics : 'DataSubjectDetails' 54 | } { 55 | customer @PersonalData.FieldSemantics: 'DataSubjectID'; 56 | city @PersonalData.IsPotentiallyPersonal; 57 | postCode @PersonalData.IsPotentiallyPersonal; 58 | streetAddress @PersonalData.IsPotentiallyPersonal; 59 | }; 60 | 61 | annotate my.Incidents with @PersonalData : { 62 | EntitySemantics : 'Other' 63 | } { 64 | customer @PersonalData.FieldSemantics: 'DataSubjectID'; 65 | }; 66 | 67 | ``` 68 | Learn more about the annotations in capire: 69 | - [@PersonalData.EntitySemantics](https://cap.cloud.sap/docs/guides/data-privacy/annotations#entitysemantics) 70 | - [@PersonalData.EntitySemantics: 'DataSubject'](https://cap.cloud.sap/docs/guides/data-privacy/annotations#datasubjectrole) 71 | - [@PersonalData.FieldSemantics: 'DataSubjectID'](https://cap.cloud.sap/docs/guides/data-privacy/annotations#fieldsemantics-datasubjectid) 72 | - [@PersonalData.IsPotentiallyPersonal](https://cap.cloud.sap/docs/guides/data-privacy/annotations#ispotentiallypersonal) 73 | - [@PersonalData.IsPotentiallySensitive](https://cap.cloud.sap/docs/guides/data-privacy/annotations#ispotentiallysensitive) 74 | 75 | 76 | ## Test-Drive Locally 77 | 78 | You've prepared everything to log personal data-related events. Let's see that in action. 79 | 80 | Start the server as usual: 81 | ```sh 82 | cds watch 83 | ``` 84 | 85 | Send an update request that changes personal data: 86 | ```http 87 | PATCH http://localhost:4004/odata/v4/admin/Customers('1004155') 88 | Authorization: Basic alice:in-wonderland 89 | Content-Type: application/json 90 | 91 | { 92 | "firstName": "Danny", 93 | "lastName": "Joules" 94 | } 95 | ``` 96 | 97 | See the audit logs in the server's console output: 98 | ```sh 99 | [audit-log] - PersonalDataModified: { 100 | data_subject: { 101 | id: { ID: '1004155' }, 102 | role: 'Customer', 103 | type: 'AdminService.Customers' 104 | }, 105 | object: { 106 | type: 'AdminService.Customers', 107 | id: { ID: '1004155' } 108 | }, 109 | attributes: [ 110 | { name: 'firstName', old: 'Daniel', new: 'Danny' }, 111 | { name: 'lastName', old: 'Watts', new: 'Joules' } 112 | ], 113 | uuid: '71fa93d9-c993-405f-ba1b-a9ef42668199', 114 | tenant: 't1', 115 | user: 'alice', 116 | time: 2023-02-26T08:13:48.287Z 117 | } 118 | ``` 119 | 120 | 121 | ## In Production 122 | 123 | The end-to-end out-of-the-box functionality provided by this plugin requires a paid-for instance of the [SAP Audit Log service for customers](https://help.sap.com/docs/btp/sap-business-technology-platform/audit-log-write-api-for-customers?locale=en-US). However, it is possible to provide an own implementation that writes the audit logs to a custom store. 124 | 125 | [_Learn more about using the SAP Audit Log service._](https://cap.cloud.sap/docs/guides/data-privacy/audit-logging#use-sap-audit-log-service) 126 | 127 | [_Learn more about custom audit logging._](https://cap.cloud.sap/docs/guides/data-privacy/audit-logging#custom-audit-logging) 128 | 129 | 130 | ## Support, Feedback, Contributing 131 | 132 | This project is open to feature requests/suggestions, bug reports etc. via [GitHub issues](https://github.com/cap-js/audit-logging/issues). Contribution and feedback are encouraged and always welcome. For more information about how to contribute, the project structure, as well as additional contribution information, see our [Contribution Guidelines](CONTRIBUTING.md). 133 | 134 | 135 | ## Code of Conduct 136 | 137 | We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone. By participating in this project, you agree to abide by its [Code of Conduct](CODE_OF_CONDUCT.md) at all times. 138 | 139 | 140 | ## Licensing 141 | 142 | Copyright 2023 SAP SE or an SAP affiliate company and contributors. Please see our [LICENSE](LICENSE) for copyright and license information. Detailed information including third-party components and their licensing/copyright information is available [via the REUSE tool](https://api.reuse.software/info/github.com/cap-js/audit-logging). 143 | -------------------------------------------------------------------------------- /lib/_relation.js: -------------------------------------------------------------------------------- 1 | let initializing = false 2 | 3 | class Relation { 4 | constructor(csn, path = []) { 5 | if (!initializing) throw new Error(`Do not new a relation, use 'Relation.to()' instead`) 6 | Object.defineProperty(this, 'csn', { get: () => csn }) 7 | Object.defineProperty(this, 'path', { 8 | get: () => path, 9 | set: _ => { 10 | path = _ 11 | } 12 | }) 13 | if (csn.target) Object.defineProperty(this, 'target', { get: () => csn.target }) 14 | initializing = false 15 | } 16 | 17 | static to(from, name) { 18 | initializing = true 19 | if (!name) return new Relation(from) 20 | return from._elements[name] && new Relation(from._elements[name], [...from.path, name]) 21 | } 22 | 23 | _has(prop) { 24 | return Reflect.has(this, prop) 25 | } 26 | 27 | get _elements() { 28 | if (this.csn.elements) return this.csn.elements 29 | if (this.csn._target && this.csn._target.elements) return this.csn._target.elements 30 | // if (csn.targetAspect) relation.elements = model.definitions[csn.targetAspect].elements 31 | // if (csn.kind = 'type') relation.elements = model.definitions[csn.type].element 32 | return {} 33 | } 34 | 35 | join(fromAlias = '', toAlias = '') { 36 | return _getOnCond(this.csn, this.path, { select: fromAlias, join: toAlias }) 37 | } 38 | } 39 | 40 | const exposeRelation = relation => Object.defineProperty({}, '_', { get: () => relation }) 41 | 42 | const relationHandler = relation => ({ 43 | get: (target, name) => { 44 | const path = name.split(',') 45 | const prop = path.join('_') 46 | if (!target[prop]) { 47 | if (path.length === 1) { 48 | // REVISIT: property 'join' must not be used in CSN to make this working 49 | if (relation._has(prop)) return relation[prop] 50 | const newRelation = Relation.to(relation, prop) 51 | if (newRelation) { 52 | target[prop] = new Proxy(exposeRelation(newRelation), relationHandler(newRelation)) 53 | } 54 | 55 | return target[prop] 56 | } 57 | 58 | target[prop] = path.reduce((relation, value) => relation[value] || relation.csn._relations[value], relation) 59 | target[prop].path = path 60 | } 61 | 62 | return target[prop] 63 | } 64 | }) 65 | 66 | module.exports = { 67 | Relation, 68 | exposeRelation, 69 | relationHandler 70 | } 71 | 72 | // 73 | // ----- utils 74 | // 75 | 76 | const _prefixForStruct = element => { 77 | const prefixes = [] 78 | let parent = element.parent 79 | while (parent && parent.kind !== 'entity') { 80 | prefixes.push(parent.name) 81 | parent = parent.parent 82 | } 83 | return prefixes.length ? prefixes.reverse().join('_') + '_' : '' 84 | } 85 | 86 | const _toRef = (alias, column) => { 87 | if (Array.isArray(column)) column = column.join('_') 88 | return { ref: alias ? [alias, column] : [column] } 89 | } 90 | 91 | const _adaptRefs = (onCond, path, { select, join }) => { 92 | const _adaptEl = el => { 93 | const ref = el.ref 94 | 95 | if (ref) { 96 | if (ref[0] === path.join('_') && ref[1]) { 97 | return _toRef(select, ref.slice(1)) 98 | } 99 | 100 | // no alias for special $user of canonical localized association 101 | if (ref[0] === '$user' && path[0] === 'localized') { 102 | return _toRef(undefined, ref.slice(0)) 103 | } 104 | 105 | return _toRef(join, ref.slice(0)) 106 | } 107 | 108 | if (el.xpr) return { xpr: el.xpr.map(_adaptEl) } 109 | return el 110 | } 111 | 112 | return onCond.map(_adaptEl) 113 | } 114 | 115 | const _replace$selfAndAliasOnCond = (xpr, csnElement, aliases, path) => { 116 | const selfIndex = xpr.findIndex(({ ref }) => ref?.[0] === '$self') 117 | if (selfIndex != -1) { 118 | let backLinkIndex 119 | if (xpr[selfIndex + 1] && xpr[selfIndex + 1] === '=') backLinkIndex = selfIndex + 2 120 | if (xpr[selfIndex - 1] && xpr[selfIndex - 1] === '=') backLinkIndex = selfIndex - 2 121 | if (backLinkIndex != null) { 122 | const ref = xpr[backLinkIndex].ref 123 | const backlinkName = ref[ref.length - 1] 124 | const mutOnCond = _newOnConditions(csnElement._backlink, [backlinkName], { 125 | select: aliases.join, 126 | join: aliases.select 127 | }) 128 | 129 | xpr.splice(Math.min(backLinkIndex, selfIndex), 3, ...mutOnCond) 130 | } 131 | } 132 | 133 | for (let i = 0; i < xpr.length; i++) { 134 | const element = xpr[i] 135 | if (element.xpr) { 136 | _replace$selfAndAliasOnCond(element.xpr, csnElement, aliases, path) 137 | continue 138 | } 139 | 140 | if (element.ref) { 141 | if (element.ref[0] === path.join('_') && element.ref[1]) { 142 | element.ref = _toRef(aliases.select, element.ref.slice(1)).ref 143 | continue 144 | } 145 | 146 | // no alias for special $user of canonical localized association 147 | if (element.ref[0] === '$user' && path[0] === 'localized') { 148 | element.ref = _toRef(undefined, element.ref.slice(0)).ref 149 | continue 150 | } 151 | //no alias for special $now variable 152 | if (element.ref[0] === '$now') { 153 | continue 154 | } 155 | 156 | if (element.ref[0] === aliases.join || element.ref[0] === aliases.select) { 157 | // nothing todo here, as already right alias 158 | continue 159 | } 160 | 161 | element.ref = _toRef(aliases.join, element.ref.slice(0)).ref 162 | } 163 | } 164 | } 165 | 166 | const _args = (csnElement, path, aliases) => { 167 | const onCond = csnElement.on 168 | if (!onCond || onCond.length === 0) return [] 169 | if (onCond.length < 3 && !onCond[0]?.xpr) return onCond 170 | if (!csnElement._isSelfManaged) return _adaptRefs(onCond, path, aliases) 171 | 172 | const onCondCopy = JSON.parse(JSON.stringify(onCond)) 173 | _replace$selfAndAliasOnCond(onCondCopy, csnElement, aliases, path) 174 | 175 | return onCondCopy 176 | } 177 | 178 | // this is only for 2one managed w/o on-conditions, i.e. no static values are possible 179 | const _foreignToOn = (csnElement, path, { select, join }) => { 180 | const on = [] 181 | 182 | for (const key of csnElement._foreignKeys) { 183 | if (on.length !== 0) { 184 | on.push('and') 185 | } 186 | 187 | const prefixChild = _prefixForStruct(key.childElement) 188 | const ref1 = _toRef(select, prefixChild + key.childElement.name) 189 | const structPrefix = path.length > 1 ? path.slice(0, -1) : [] 190 | const ref2 = _toRef(join, [...structPrefix, key.parentElement.name]) 191 | on.push(ref1, '=', ref2) 192 | } 193 | 194 | return on 195 | } 196 | 197 | const _newOnConditions = (csnElement, path, aliases) => { 198 | if (csnElement.keys) { 199 | return _foreignToOn(csnElement, path, aliases) 200 | } 201 | 202 | return _args(csnElement, path, aliases) 203 | } 204 | 205 | const _getOnCond = (csnElement, path = [], aliases = { select: '', join: '' }) => { 206 | const onCond = _newOnConditions(csnElement, path, aliases) 207 | return [{ xpr: onCond }] 208 | } 209 | -------------------------------------------------------------------------------- /test/api/api.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | const { axios, POST, GET } = cds.test().in(__dirname) 4 | 5 | // do not throw for 4xx responses 6 | axios.defaults.validateStatus = () => true 7 | 8 | cds.env.requires['audit-log'] = { 9 | kind: 'audit-log-to-console', 10 | impl: '../../srv/log2console', 11 | outbox: { kind: 'in-memory-outbox' } 12 | } 13 | 14 | const wait = require('node:timers/promises').setTimeout 15 | 16 | // Matcher for localhost IPs (handles both IPv6 and IPv4-mapped IPv6) 17 | const localhostIP = expect.stringMatching(/^(::1|::ffff:127\.0\.0\.1)$/) 18 | 19 | describe('AuditLogService API', () => { 20 | let __log, _logs 21 | const _log = (...args) => { 22 | if (!(args.length === 2 && typeof args[0] === 'string' && args[0].match(/\[audit-log\]/i))) { 23 | // > not an audit log (most likely, anyway) 24 | return __log(...args) 25 | } 26 | 27 | _logs.push(args[1]) 28 | } 29 | 30 | const ALICE = { username: 'alice', password: 'password' } 31 | const BOB = { username: 'bob', password: 'password' } 32 | 33 | beforeAll(() => { 34 | __log = global.console.log 35 | global.console.log = _log 36 | }) 37 | 38 | afterAll(() => { 39 | global.console.log = __log 40 | }) 41 | 42 | beforeEach(async () => { 43 | await POST('/api/resetSequence', {}, { auth: ALICE }) 44 | _logs = [] 45 | }) 46 | 47 | describe('default', () => { 48 | test('emit is deferred', async () => { 49 | const response = await POST('/api/testEmit', {}, { auth: ALICE }) 50 | expect(response).toMatchObject({ status: 204 }) 51 | await wait(42) 52 | const { 53 | data: { value: sequence } 54 | } = await GET('/api/getSequence()', { auth: ALICE }) 55 | expect(sequence).toEqual(['request succeeded', 'audit log logged']) 56 | expect(_logs.length).toBe(1) 57 | expect(_logs).toContainMatchObject({ user: 'alice', bar: 'baz' }) 58 | }) 59 | 60 | test('send is immediate', async () => { 61 | const response = await POST('/api/testSend', {}, { auth: ALICE }) 62 | expect(response).toMatchObject({ status: 204 }) 63 | await wait(42) 64 | const { 65 | data: { value: sequence } 66 | } = await GET('/api/getSequence()', { auth: ALICE }) 67 | expect(sequence).toEqual(['audit log logged', 'request succeeded']) 68 | expect(_logs.length).toBe(1) 69 | expect(_logs).toContainMatchObject({ user: 'alice', bar: 'baz' }) 70 | }) 71 | }) 72 | 73 | describe('new', () => { 74 | test('log is deferred', async () => { 75 | const response = await POST('/api/testLog', {}, { auth: ALICE }) 76 | expect(response).toMatchObject({ status: 204 }) 77 | await wait(42) 78 | const { 79 | data: { value: sequence } 80 | } = await GET('/api/getSequence()', { auth: ALICE }) 81 | expect(sequence).toEqual(['request succeeded', 'audit log logged']) 82 | expect(_logs.length).toBe(1) 83 | expect(_logs).toContainMatchObject({ user: 'alice', bar: 'baz' }) 84 | }) 85 | 86 | test('logSync is immediate', async () => { 87 | const response = await POST('/api/testLogSync', {}, { auth: ALICE }) 88 | expect(response).toMatchObject({ status: 204 }) 89 | await wait(42) 90 | const { 91 | data: { value: sequence } 92 | } = await GET('/api/getSequence()', { auth: ALICE }) 93 | expect(sequence).toEqual(['audit log logged', 'request succeeded']) 94 | expect(_logs.length).toBe(1) 95 | expect(_logs).toContainMatchObject({ user: 'alice', bar: 'baz' }) 96 | }) 97 | }) 98 | 99 | test('the default inspect depth of 2 is enough', async () => { 100 | const audit = await cds.connect.to('audit-log') 101 | await audit.log('foo', { data_subject: { ID: { bar: 'baz' } } }) 102 | await wait(42) 103 | expect(_logs).toContainMatchObject({ data_subject: { ID: { bar: 'baz' } } }) 104 | }) 105 | 106 | describe('common log entry fields', () => { 107 | test('are automatically filled', async () => { 108 | await cds.tx({ tenant: 'bar' }, async () => { 109 | const audit = await cds.connect.to('audit-log') 110 | await audit.log('foo', {}) 111 | }) 112 | await wait(42) 113 | expect(_logs).toContainMatchObject({ 114 | uuid: expect.any(String), 115 | tenant: 'bar', 116 | user: 'anonymous', 117 | time: expect.any(Date) 118 | }) 119 | }) 120 | 121 | test('can be provided manually', async () => { 122 | const time = new Date('2021-01-01T00:00:00.000Z') 123 | await cds.tx({ tenant: 'bar' }, async () => { 124 | const audit = await cds.connect.to('audit-log') 125 | await audit.log('foo', { uuid: 'baz', tenant: 'baz', user: 'baz', time }) 126 | }) 127 | await wait(42) 128 | expect(_logs).toContainMatchObject({ 129 | uuid: 'baz', 130 | tenant: 'baz', 131 | user: 'baz', 132 | time: expect.toBeDateLike() 133 | }) 134 | }) 135 | 136 | test('tenant can be null', async () => { 137 | await cds.tx({ tenant: 'bar' }, async () => { 138 | const audit = await cds.connect.to('audit-log') 139 | await audit.log('foo', { uuid: 'baz', tenant: null, user: 'baz' }) 140 | }) 141 | await wait(42) 142 | expect(_logs).toContainMatchObject({ 143 | uuid: 'baz', 144 | tenant: null, 145 | user: 'baz' 146 | }) 147 | }) 148 | }) 149 | 150 | describe('custom log 403', () => { 151 | test('early reject', async () => { 152 | const response = await GET('/api/Books', { auth: BOB }) 153 | expect(response).toMatchObject({ status: 403 }) 154 | await wait(42) 155 | expect(_logs.length).toBe(1) 156 | expect(_logs).toContainMatchObject({ user: 'bob', ip: localhostIP }) 157 | }) 158 | 159 | test('late reject', async () => { 160 | const response = await GET('/api/Books', { auth: ALICE }) 161 | expect(response).toMatchObject({ status: 403 }) 162 | await wait(42) 163 | expect(_logs.length).toBe(1) 164 | expect(_logs).toContainMatchObject({ user: 'alice', ip: localhostIP }) 165 | }) 166 | 167 | test('early reject in batch', async () => { 168 | const response = await POST( 169 | '/api/$batch', 170 | { requests: [{ method: 'GET', url: '/Books', id: 'r1' }] }, 171 | { auth: BOB } 172 | ) 173 | expect(response).toMatchObject({ status: 403 }) 174 | await wait(42) 175 | expect(_logs.length).toBeGreaterThan(0) //> coding in ./srv/server.js results in 2 logs on @sap/cds^7 176 | expect(_logs).toContainMatchObject({ user: 'bob', ip: localhostIP }) 177 | }) 178 | 179 | test('late reject in batch', async () => { 180 | const response = await POST( 181 | '/api/$batch', 182 | { requests: [{ method: 'GET', url: '/Books', id: 'r1' }] }, 183 | { auth: ALICE } 184 | ) 185 | expect(response).toMatchObject({ status: 200 }) 186 | expect(response.data.responses[0]).toMatchObject({ status: 403 }) 187 | await wait(42) 188 | expect(_logs.length).toBe(1) 189 | expect(_logs).toContainMatchObject({ user: 'alice', ip: localhostIP }) 190 | }) 191 | }) 192 | }) 193 | -------------------------------------------------------------------------------- /srv/log2restv2.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | const LOG = cds.log('audit-log') 4 | 5 | const AuditLogService = require('./service') 6 | 7 | module.exports = class AuditLog2RESTv2 extends AuditLogService { 8 | async init() { 9 | // credentials stuff 10 | const { credentials } = this.options 11 | if (!credentials) throw new Error('No or malformed credentials for "audit-log"') 12 | if (!credentials.uaa) { 13 | this._plan = 'standard' 14 | this._auth = 'Basic ' + Buffer.from(credentials.user + ':' + credentials.password).toString('base64') 15 | } else { 16 | this._plan = credentials.url.match(/6081/) ? 'premium' : 'oauth2' 17 | this._tokens = new Map() 18 | this._provider = credentials.uaa.tenantid 19 | } 20 | this._vcap = process.env.VCAP_APPLICATION ? JSON.parse(process.env.VCAP_APPLICATION) : null 21 | 22 | this.on('*', function (req) { 23 | const { event, data } = req 24 | 25 | // event.match() is used to support the old event names 26 | if (event === 'SensitiveDataRead' || event.match(/^dataAccess/i)) { 27 | return this._handle(data, 'DATA_ACCESS') 28 | } 29 | if (event === 'PersonalDataModified' || event.match(/^dataModification/i)) { 30 | data.success = true 31 | return this._handle(data, 'DATA_MODIFICATION') 32 | } 33 | if (event === 'ConfigurationModified' || event.match(/^configChange/i)) { 34 | data.success = true 35 | return this._handle(data, 'CONFIGURATION_CHANGE') 36 | } 37 | if (event === 'SecurityEvent' || event.match(/^security/i)) { 38 | if (typeof data.data === 'object') data.data = JSON.stringify(data.data) 39 | return this._handle(data, 'SECURITY_EVENT') 40 | } 41 | 42 | LOG._warn && LOG.warn(`Event "${event}" is not implemented`) 43 | }) 44 | 45 | // call AuditLogService's init 46 | await super.init() 47 | } 48 | 49 | async _getToken(tenant) { 50 | const { _tokens: tokens } = this 51 | if (tokens.has(tenant)) return tokens.get(tenant) 52 | 53 | const { uaa } = this.options.credentials 54 | const url = (uaa.certurl || uaa.url) + '/oauth/token' 55 | const data = { grant_type: 'client_credentials', response_type: 'token', client_id: uaa.clientid } 56 | const options = { headers: { 'content-type': 'application/x-www-form-urlencoded' } } 57 | if (tenant !== this._provider) options.headers['x-zid'] = tenant 58 | // certificate or secret? 59 | if (uaa['credential-type'] === 'x509') { 60 | options.agent = new https.Agent({ cert: uaa.certificate, key: uaa.key }) 61 | } else { 62 | data.client_secret = uaa.clientsecret 63 | } 64 | const urlencoded = Object.keys(data).reduce((acc, cur) => { 65 | acc += (acc ? '&' : '') + cur + '=' + data[cur] 66 | return acc 67 | }, '') 68 | try { 69 | const { access_token, expires_in } = await _post(url, urlencoded, options) 70 | tokens.set(tenant, access_token) 71 | // remove token from cache 60 seconds before it expires 72 | setTimeout(() => tokens.delete(tenant), (expires_in - 60) * 1000) 73 | return access_token 74 | } catch (err) { 75 | LOG._trace && LOG.trace('error during token fetch:', err) 76 | // 401 could also mean x-zid is not valid 77 | if (String(err.response?.statusCode).match(/^4\d\d$/)) err.unrecoverable = true 78 | throw err 79 | } 80 | } 81 | 82 | async _send(data, path) { 83 | const headers = { 'content-type': 'application/json;charset=utf-8' } 84 | if (this._vcap) { 85 | headers.XS_AUDIT_ORG = this._vcap.organization_name 86 | headers.XS_AUDIT_SPACE = this._vcap.space_name 87 | headers.XS_AUDIT_APP = this._vcap.application_name 88 | } 89 | let url 90 | if (this._plan === 'standard') { 91 | url = this.options.credentials.url + PATHS.STANDARD[path] 92 | headers.authorization = this._auth 93 | } else { 94 | url = this.options.credentials.url + PATHS.OAUTH2[path] 95 | data.tenant ??= this._provider //> if request has no tenant, stay in provider account 96 | if (data.tenant === '$PROVIDER') data.tenant = this._provider 97 | headers.authorization = 'Bearer ' + (await this._getToken(data.tenant)) 98 | data.tenant = data.tenant === this._provider ? '$PROVIDER' : '$SUBSCRIBER' 99 | } 100 | if (LOG._debug) { 101 | const _headers = Object.assign({}, headers, { authorization: headers.authorization.split(' ')[0] + ' ***' }) 102 | LOG.debug(`sending audit log to ${url} with tenant "${data.tenant}", user "${data.user}", and headers`, _headers) 103 | } 104 | try { 105 | await _post(url, data, { headers }) 106 | } catch (err) { 107 | LOG._trace && LOG.trace('error during log send:', err) 108 | // 429 (rate limit) is not unrecoverable 109 | if (String(err.response?.statusCode).match(/^4\d\d$/) && err.response?.statusCode !== 429) 110 | err.unrecoverable = true 111 | throw err 112 | } 113 | } 114 | 115 | async _handle(logs, path) { 116 | if (!Array.isArray(logs)) logs = [logs] 117 | 118 | // write the logs 119 | const errors = [] 120 | await Promise.all(logs.map(log => this._send(log, path).catch(err => errors.push(err)))) 121 | if (errors.length) throw _getErrorToThrow(errors) 122 | } 123 | } 124 | 125 | /* 126 | * consts 127 | */ 128 | 129 | const PATHS = { 130 | STANDARD: { 131 | DATA_ACCESS: '/audit-log/v2/data-accesses', 132 | DATA_MODIFICATION: '/audit-log/v2/data-modifications', 133 | CONFIGURATION_CHANGE: '/audit-log/v2/configuration-changes', 134 | SECURITY_EVENT: '/audit-log/v2/security-events' 135 | }, 136 | OAUTH2: { 137 | DATA_ACCESS: '/audit-log/oauth2/v2/data-accesses', 138 | DATA_MODIFICATION: '/audit-log/oauth2/v2/data-modifications', 139 | CONFIGURATION_CHANGE: '/audit-log/oauth2/v2/configuration-changes', 140 | SECURITY_EVENT: '/audit-log/oauth2/v2/security-events' 141 | } 142 | } 143 | 144 | /* 145 | * utils 146 | */ 147 | 148 | const https = require('https') 149 | 150 | async function _post(url, data, options) { 151 | options.method ??= 'POST' 152 | return new Promise((resolve, reject) => { 153 | const req = https.request(url, options, res => { 154 | const chunks = [] 155 | res.on('data', chunk => chunks.push(chunk)) 156 | res.on('end', () => { 157 | const { statusCode, statusMessage } = res 158 | let body = Buffer.concat(chunks).toString() 159 | if (res.headers['content-type']?.match(/json/)) body = JSON.parse(body) 160 | if (res.statusCode >= 400) { 161 | // prettier-ignore 162 | const err = new Error(`Request failed with${statusMessage ? `: ${statusCode} - ${statusMessage}` : ` status ${statusCode}`}`) 163 | err.request = { method: options.method, url, headers: options.headers, body: data } 164 | if (err.request.headers.authorization) 165 | err.request.headers.authorization = err.request.headers.authorization.split(' ')[0] + ' ***' 166 | err.response = { statusCode, statusMessage, headers: res.headers, body } 167 | reject(err) 168 | } else { 169 | resolve(body) 170 | } 171 | }) 172 | }) 173 | req.on('error', reject) 174 | req.write(typeof data === 'object' ? JSON.stringify(data) : data) 175 | req.end() 176 | }) 177 | } 178 | 179 | function _getErrorToThrow(errors) { 180 | if (errors.length === 1) return errors[0] 181 | const error = new cds.error('MULTIPLE_ERRORS') 182 | error.details = errors 183 | if (errors.some(e => e.unrecoverable)) error.unrecoverable = true 184 | return error 185 | } 186 | -------------------------------------------------------------------------------- /srv/log2alsng.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | const LOG = cds.log('audit-log') 3 | 4 | const https = require('https') 5 | 6 | const AuditLogService = require('./service') 7 | 8 | module.exports = class AuditLog2ALSNG extends AuditLogService { 9 | constructor() { 10 | super() 11 | this._vcap = JSON.parse(process.env.VCAP_SERVICES || '{}') 12 | this._userProvided = this._vcap['user-provided']?.find(obj => obj.tags.includes('auditlog-ng')) || {} 13 | if (!this._userProvided.credentials) throw new Error('No credentials found for SAP Audit Log Service NG') 14 | this._vcapApplication = JSON.parse(process.env.VCAP_APPLICATION || '{}') 15 | } 16 | 17 | async init() { 18 | this.on('*', function (req) { 19 | const { event, data } = req 20 | return this.eventMapper(event, data) 21 | }) 22 | await super.init() 23 | } 24 | 25 | eventMapper(event, data) { 26 | const known = { 27 | PersonalDataModified: () => this.logEvent('dppDataModification', data), 28 | SensitiveDataRead: () => this.logEvent('dppDataAccess', data), 29 | ConfigurationModified: () => this.logEvent('configurationChange', data), 30 | SecurityEvent: () => this.logEvent('legacySecurityWrapper', data) 31 | } 32 | const dfault = () => this.logEvent(event, data) 33 | return (known[event] ?? dfault)() 34 | } 35 | 36 | flattenAndSortIdObject(id) { 37 | if (!id || !Object.keys(id).length) return 'not provided' 38 | 39 | let s = '' 40 | for (const k of Object.keys(id).sort()) s += `${k}:${id[k]} ` 41 | return s.trim() 42 | } 43 | 44 | eventDataPayload(event, data) { 45 | const object = data['object'] || { type: 'not provided', id: { ID: 'not provided' } } 46 | const channel = data['channel'] || { type: 'not specified', id: 'not specified' } 47 | const subject = data['data_subject'] || { type: 'not provided', id: { ID: 'not provided' } } 48 | const attributes = data['attributes'] || [{ name: 'not provided', old: 'not provided', new: 'not provided' }] 49 | const objectId = this.flattenAndSortIdObject(object['id']) 50 | const oldValue = attributes[0]['old'] ?? '' 51 | const newValue = attributes[0]['new'] ?? '' 52 | const dataSubjectId = this.flattenAndSortIdObject(subject['id']) 53 | 54 | const known = { 55 | dppDataModification: { 56 | objectType: object['type'], 57 | objectId: objectId, 58 | attribute: attributes[0]['name'], 59 | oldValue: oldValue, 60 | newValue: newValue, 61 | dataSubjectType: subject['type'], 62 | dataSubjectId: dataSubjectId 63 | }, 64 | dppDataAccess: { 65 | channelType: channel['type'], 66 | channelId: channel['id'], 67 | dataSubjectType: subject['type'], 68 | dataSubjectId: dataSubjectId, 69 | objectType: object['type'], 70 | objectId: objectId, 71 | attribute: attributes[0]['name'] 72 | }, 73 | configurationChange: { 74 | propertyName: attributes[0]['name'], 75 | oldValue: oldValue, 76 | newValue: newValue, 77 | objectType: object['type'], 78 | objectId: objectId 79 | }, 80 | legacySecurityWrapper: { 81 | origEvent: JSON.stringify({ 82 | ...data, 83 | data: 84 | typeof data.data === 'object' && data.data !== null && !Array.isArray(data.data) 85 | ? JSON.stringify(data.data) 86 | : data.data 87 | }) 88 | } 89 | } 90 | if (event in known) return known[event] 91 | 92 | // For unknown events, remove common audit log entry fields from the event payload 93 | if (typeof data === 'object' && data !== null) { 94 | const rest = this.removeCommonAuditLogFields(data) 95 | return rest 96 | } 97 | 98 | return data 99 | } 100 | 101 | removeCommonAuditLogFields(obj) { 102 | if (typeof obj !== 'object' || obj === null) return obj 103 | const { ...rest } = obj 104 | delete rest.uuid 105 | delete rest.user 106 | delete rest.time 107 | delete rest.tenant 108 | return rest 109 | } 110 | 111 | eventPayload(event, data) { 112 | const tenant = cds.context?.tenant || null 113 | const timestamp = new Date().toISOString() 114 | 115 | const eventData = { 116 | id: cds.utils.uuid(), 117 | specversion: 1, 118 | source: `/${this._userProvided.credentials?.region}/${this._userProvided.credentials?.namespace}/${tenant}`, 119 | type: event, 120 | time: timestamp, 121 | data: { 122 | metadata: { 123 | ts: timestamp, 124 | appId: this._vcapApplication.application_id || 'default app', 125 | infrastructure: { 126 | other: { 127 | runtimeType: 'Node.js' 128 | } 129 | }, 130 | platform: { 131 | other: { 132 | platformName: 'CAP' 133 | } 134 | } 135 | }, 136 | data: { 137 | [event]: this.eventDataPayload(event, data) 138 | } 139 | } 140 | } 141 | 142 | return eventData 143 | } 144 | 145 | formatEventData(event, data) { 146 | if (event === 'legacySecurityWrapper') { 147 | return JSON.stringify([this.eventPayload(event, data)]) 148 | } 149 | 150 | if (event in { dppDataModification: 1, dppDataAccess: 1, configurationChange: 1 }) { 151 | const eventData = data['attributes']?.map(attr => { 152 | return this.eventPayload(event, { 153 | ...data, 154 | attributes: [attr] 155 | }) 156 | }) 157 | return JSON.stringify(eventData || []) 158 | } 159 | 160 | // Always wrap event in an envelope for custom events 161 | return JSON.stringify([this.eventPayload(event, data)]) 162 | } 163 | 164 | logEvent(event, data) { 165 | const passphrase = this._userProvided.credentials?.keyPassphrase 166 | const url = new URL(`${this._userProvided.credentials?.url}/ingestion/v1/events`) 167 | const eventData = this.formatEventData(event, data) 168 | 169 | const options = { 170 | method: 'POST', 171 | headers: { 172 | 'Content-Type': 'application/json', 173 | 'Content-Length': Buffer.byteLength(eventData) 174 | }, 175 | key: this._userProvided.credentials?.key, 176 | cert: this._userProvided.credentials?.cert, 177 | ...(passphrase !== undefined && { passphrase }) 178 | } 179 | 180 | return new Promise((resolve, reject) => { 181 | const req = https.request(url, options, res => { 182 | LOG.trace('🛰️ Status Code:', res.statusCode) 183 | 184 | const chunks = [] 185 | res.on('data', chunk => chunks.push(chunk)) 186 | 187 | res.on('end', () => { 188 | const { statusCode, statusMessage } = res 189 | let body = Buffer.concat(chunks).toString() 190 | if (res.headers['content-type']?.match(/json/)) body = JSON.parse(body) 191 | if (res.statusCode >= 400) { 192 | // prettier-ignore 193 | const err = new Error(`Request failed with${statusMessage ? `: ${statusCode} - ${statusMessage}` : ` status ${statusCode}`}`) 194 | err.request = { method: options.method, url, headers: options.headers, body: data } 195 | if (err.request.headers.authorization) 196 | err.request.headers.authorization = err.request.headers.authorization.split(' ')[0] + ' ***' 197 | err.response = { statusCode, statusMessage, headers: res.headers, body } 198 | reject(err) 199 | } else { 200 | resolve(body) 201 | } 202 | }) 203 | }) 204 | 205 | req.on('error', e => { 206 | reject(e.message) 207 | LOG.trace(`Problem with request: ${e.message}`) 208 | }) 209 | 210 | req.write(eventData) 211 | req.end() 212 | }) 213 | } 214 | } 215 | -------------------------------------------------------------------------------- /test/personal-data/srv/crud-service.cds: -------------------------------------------------------------------------------- 1 | using {sap.auditlog.test.personal_data.db as db} from '../db/schema'; 2 | 3 | @path : '/crud-1' 4 | @requires: 'admin' 5 | service CRUD_1 { 6 | 7 | entity Orders as projection on db.Orders; 8 | entity OrderHeader as projection on db.OrderHeader; 9 | entity OrderItems as projection on db.OrderItems; 10 | entity Pages as projection on db.Pages; 11 | entity Customers as projection on db.Customers; 12 | entity CustomerPostalAddress as projection on db.CustomerPostalAddress; 13 | entity Comments as projection on db.Comments; 14 | entity CustomerStatus as projection on db.CustomerStatus; 15 | entity StatusChange as projection on db.StatusChange; 16 | entity LastOne as projection on db.LastOne; 17 | entity Notes as projection on db.Notes; 18 | entity MainEntities as projection on db.MainEntities; 19 | entity SubEntities as projection on db.SubEntities; 20 | 21 | entity AddressAttachment as 22 | projection on db.AddressAttachment { 23 | *, 24 | address.customer as customer 25 | } 26 | 27 | annotate Orders with @PersonalData: {EntitySemantics: 'Other'} { 28 | misc @PersonalData.IsPotentiallySensitive; 29 | } 30 | 31 | annotate OrderHeader with @PersonalData: {EntitySemantics: 'Other'} { 32 | description @PersonalData.IsPotentiallySensitive; 33 | } 34 | 35 | annotate OrderHeader.sensitiveData with @PersonalData: {EntitySemantics: 'Other'} { 36 | note @PersonalData.IsPotentiallySensitive; 37 | } 38 | 39 | annotate Pages with @PersonalData : {EntitySemantics: 'DataSubject' 40 | // no DataSubjectRole for testing purposes 41 | } { 42 | ID @PersonalData.FieldSemantics: 'DataSubjectID'; 43 | sensitive @PersonalData.IsPotentiallySensitive; 44 | personal @PersonalData.IsPotentiallyPersonal; 45 | } 46 | 47 | annotate Customers with @PersonalData : { 48 | EntitySemantics: 'DataSubject', 49 | DataSubjectRole: 'Customer' 50 | } { 51 | ID @PersonalData.FieldSemantics: 'DataSubjectID'; 52 | emailAddress @PersonalData.IsPotentiallyPersonal; 53 | firstName @PersonalData.IsPotentiallyPersonal; 54 | lastName @PersonalData.IsPotentiallyPersonal; 55 | creditCardNo @PersonalData.IsPotentiallySensitive; 56 | } 57 | 58 | annotate CustomerPostalAddress with @PersonalData: {EntitySemantics: 'DataSubjectDetails'} { 59 | customer @PersonalData.FieldSemantics : 'DataSubjectID'; 60 | street @PersonalData.IsPotentiallySensitive; 61 | town @PersonalData.IsPotentiallyPersonal; 62 | } 63 | 64 | annotate CustomerStatus with @PersonalData: {EntitySemantics: 'DataSubjectDetails'} { 65 | description @PersonalData.IsPotentiallySensitive; 66 | todo @PersonalData.IsPotentiallyPersonal; 67 | } 68 | 69 | annotate StatusChange with @PersonalData: {EntitySemantics: 'DataSubjectDetails'} { 70 | description @PersonalData.IsPotentiallySensitive; 71 | secondKey @PersonalData.IsPotentiallyPersonal; 72 | } 73 | 74 | annotate LastOne with @PersonalData: {EntitySemantics: 'DataSubjectDetails'} { 75 | lastOneField @PersonalData.IsPotentiallySensitive; 76 | } 77 | 78 | annotate AddressAttachment with @PersonalData: {EntitySemantics: 'DataSubjectDetails'} { 79 | customer @PersonalData.FieldSemantics : 'DataSubjectID'; 80 | description @PersonalData.IsPotentiallySensitive; 81 | todo @PersonalData.IsPotentiallyPersonal; 82 | } 83 | 84 | annotate Notes with @PersonalData: {EntitySemantics: 'Other'} { 85 | note @PersonalData.IsPotentiallySensitive; 86 | dummyArray @PersonalData.IsPotentiallyPersonal; 87 | } 88 | 89 | entity Employees as projection on db.Employees; 90 | 91 | annotate Employees with @PersonalData: { 92 | EntitySemantics: 'DataSubject', 93 | DataSubjectRole: 'Employee' 94 | } { 95 | ID @PersonalData.FieldSemantics: 'DataSubjectID'; 96 | name @PersonalData.IsPotentiallyPersonal; 97 | notes @PersonalData.IsPotentiallySensitive @PersonalData.IsPotentiallyPersonal; 98 | skills @PersonalData.IsPotentiallyPersonal; 99 | } 100 | 101 | annotate SubEntities with @PersonalData : {EntitySemantics: 'DataSubjectDetails'} { 102 | mainEntity @PersonalData.FieldSemantics: 'DataSubjectID'; 103 | ID @PersonalData.IsPotentiallyPersonal; 104 | name @PersonalData.IsPotentiallyPersonal; 105 | } 106 | } 107 | 108 | @path : '/crud-2' 109 | @requires: 'admin' 110 | service CRUD_2 { 111 | entity Customers as projection on db.Customers; 112 | entity CustomerPostalAddress as projection on db.CustomerPostalAddress; 113 | entity CustomerStatus as projection on db.CustomerStatus; 114 | 115 | entity AddressAttachment as 116 | projection on db.AddressAttachment { 117 | *, 118 | address.customer as customer 119 | } 120 | 121 | annotate Customers with @PersonalData : {EntitySemantics: 'Other'} { 122 | addresses @PersonalData.FieldSemantics: 'DataSubjectID'; 123 | } 124 | 125 | annotate CustomerPostalAddress with @PersonalData: { 126 | EntitySemantics: 'DataSubject', 127 | DataSubjectRole: 'Address' 128 | } { 129 | ID @PersonalData.FieldSemantics : 'DataSubjectID'; 130 | street @PersonalData.IsPotentiallyPersonal @PersonalData.FieldSemantics: 'DataSubjectID'; 131 | town @PersonalData.IsPotentiallyPersonal @PersonalData.FieldSemantics: 'DataSubjectID'; 132 | someOtherField @PersonalData.IsPotentiallySensitive; 133 | } 134 | 135 | // invalid modeling (nothing personal/ sensitive), must have no effect 136 | annotate CustomerStatus with @PersonalData: {EntitySemantics: 'DataSubjectDetails'}; 137 | } 138 | 139 | @path : '/crud-3' 140 | @requires: 'admin' 141 | service CRUD_3 { 142 | 143 | entity R1 as 144 | projection on db.RBase { 145 | key ID as r1_ID, 146 | emailAddress as r1_emailAddress, 147 | firstName as r1_firstName, 148 | lastName as r1_lastName, 149 | creditCardNo as r1_creditCardNo 150 | } 151 | 152 | annotate R1 with @PersonalData: { 153 | EntitySemantics: 'DataSubject', 154 | DataSubjectRole: 'Renamed Customer' 155 | }; 156 | 157 | entity R2 as 158 | projection on R1 { 159 | key r1_ID as r2_ID, 160 | r1_emailAddress as r2_emailAddress, 161 | r1_firstName as r2_firstName, 162 | r1_lastName as r2_lastName, 163 | r1_creditCardNo as r2_creditCardNo 164 | } 165 | 166 | annotate R2 with @PersonalData: { 167 | EntitySemantics: 'DataSubject', 168 | DataSubjectRole: 'Twice Renamed Customer' 169 | }; 170 | 171 | entity C as 172 | projection on CRUD_1.Customers { 173 | key ID as c_id, 174 | emailAddress as c_emailAddress, 175 | addresses as c_addresses 176 | }; 177 | 178 | 179 | entity CPA as 180 | projection on CRUD_1.CustomerPostalAddress { 181 | key ID as cpa_id, 182 | town as cpa_town, 183 | customer as cpa_customer, 184 | attachments as cpa_attachments 185 | }; 186 | 187 | entity AA as 188 | projection on CRUD_1.AddressAttachment { 189 | key ID as aa_id, 190 | todo as aa_todo, 191 | address as aa_address 192 | }; 193 | } 194 | 195 | @path : '/crud-4' 196 | @requires: 'admin' 197 | service CRUD_4 { 198 | 199 | entity RenamedMainEntities as projection on db.MainEntities; 200 | 201 | entity RenamedSubEntities as 202 | projection on db.SubEntities { 203 | key ID as renamedID, 204 | name, 205 | mainEntity 206 | }; 207 | 208 | } 209 | 210 | @path : '/crud-5' 211 | @requires: 'admin' 212 | service CRUD_5 { 213 | 214 | entity A as projection on db.A; 215 | entity B as projection on db.B; 216 | entity C as projection on db.C; 217 | 218 | } 219 | 220 | @path : '/crud-6' 221 | @requires: 'admin' 222 | service CRUD_6 { 223 | entity D as projection on db.D; 224 | } 225 | -------------------------------------------------------------------------------- /lib/modification.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | // REVISIT: don't require internal stuff 4 | const getTemplate = require('@sap/cds/libx/_runtime/common/utils/template') 5 | 6 | const { 7 | getMapKeyForCurrentRequest, 8 | getRootEntity, 9 | getPick, 10 | createLogEntry, 11 | addObjectID, 12 | addDataSubject, 13 | addDataSubjectForDetailsEntity, 14 | resolveDataSubjects 15 | } = require('./utils') 16 | 17 | let audit 18 | 19 | const _applyTransitionRecursively = (transition, data, d, old = {}) => { 20 | for (const [k, v] of transition.mapping) { 21 | if (v.transition) { 22 | const ok = v.ref?.[0] || k 23 | if (!data[ok]) continue 24 | if (Array.isArray(data[ok])) { 25 | d[k] = [] 26 | for (let i = 0; i < data[ok].length; i++) { 27 | const _op = data[ok][i]._op || d._op 28 | d[k].push(_op ? { _op } : {}) 29 | const _old = old[ok]?.[i] || data[ok][i]._old 30 | if (_old) d[k][i]._old = _old 31 | _applyTransitionRecursively(v.transition, data[ok][i], d[k][i], _old) 32 | } 33 | if (old[ok] && data[ok].length !== old[ok].length) { 34 | for (const each of Object.values(old[ok]).filter(ele => !ele.__visited)) { 35 | const i = d[k].push({ _op: 'delete' }) 36 | d[k][i - 1]._old = each 37 | _applyTransitionRecursively(v.transition, each, d[k][i - 1], each) 38 | } 39 | } 40 | } else { 41 | d[k] = { _op: data[ok]._op || d._op } 42 | const _old = old[ok] || data[ok]._old 43 | if (_old) d[k]._old = _old 44 | _applyTransitionRecursively(v.transition, data[ok], d[k], _old) 45 | } 46 | } else if (v.ref) { 47 | if (v.ref[0] in data) d[k] = data[v.ref[0]] 48 | if (v.ref[0] in old) d._old[k] = old[v.ref[0]] 49 | } 50 | } 51 | if (Object.keys(old).length) old.__visited = true 52 | } 53 | 54 | // REVISIT: remove once old database impl is removed 55 | const _getDataWithAppliedTransitions = (data, req) => { 56 | let d 57 | const query = req.query.INSERT || req.query.UPDATE || req.query.DELETE 58 | // NOTE: there will only be transitions if old database impl is used 59 | const transition = query._transitions?.find(t => t.queryTarget.name === req.target.name) 60 | if (transition) { 61 | d = data._op ? { _op: data._op } : {} 62 | if (data._old) d._old = {} 63 | _applyTransitionRecursively(transition, data, d, data._old) 64 | } 65 | return d || data 66 | } 67 | 68 | /* 69 | * REVISIT: diff() doesn't work in srv after phase but foreign key propagation has not yet taken place in srv before phase 70 | * -> calc diff in db layer and store in audit data structure at context 71 | * -> REVISIT for GA: clear req._.partialPersistentState? 72 | */ 73 | const addDiffToCtx = async function (req) { 74 | // store diff in audit data structure at context 75 | const _audit = (req.context._audit ??= {}) 76 | if (!_audit.diffs) _audit.diffs = new Map() 77 | 78 | // get diff 79 | let diff = (await req.diff()) || {} 80 | diff = _getDataWithAppliedTransitions(diff, req) 81 | 82 | // add keys, if necessary 83 | let keys = _getDataWithAppliedTransitions(Object.assign({}, req.data), req) 84 | for (const key in keys) if (!(key in req.target.keys)) delete keys[key] 85 | Object.assign(diff, keys) 86 | 87 | _audit.diffs.set(req._.query, diff) 88 | } 89 | addDiffToCtx._initial = true 90 | 91 | const _getOldAndNew = (action, row, key, entity) => { 92 | let oldValue = action === 'Create' ? null : row._old && row._old[key] 93 | if (oldValue === undefined) oldValue = action === 'Update' && key in entity.keys ? row[key] : null 94 | else if (Array.isArray(oldValue)) oldValue = JSON.stringify(oldValue) 95 | let newValue = action === 'Delete' ? null : row[key] 96 | if (newValue === undefined) newValue = null 97 | else if (Array.isArray(newValue)) newValue = JSON.stringify(newValue) 98 | return { oldValue, newValue } 99 | } 100 | 101 | const _addAttribute = (log, action, row, key, entity) => { 102 | if (!log.attributes.find(ele => ele.name === key)) { 103 | const { oldValue, newValue } = _getOldAndNew(action, row, key, entity) 104 | if (oldValue !== newValue) { 105 | const attr = { name: key } 106 | if (action !== 'Create') attr.old = oldValue 107 | if (action !== 'Delete') attr.new = newValue 108 | log.attributes.push(attr) 109 | } 110 | } 111 | } 112 | 113 | const _maskAttribute = (attributes, key) => { 114 | const attribute = attributes?.find(ele => ele.name === key) 115 | if (attribute) { 116 | if ('old' in attribute) attribute.old = '***' 117 | if ('new' in attribute) attribute.new = '***' 118 | } 119 | } 120 | 121 | const _processorFnModification = (modificationLogs, model, req, beforeWrite) => elementInfo => { 122 | if (!elementInfo.row?._op) return 123 | 124 | let { row, key, element, plain } = elementInfo 125 | 126 | // delete in before phase, create and update in after phase 127 | if ((row._op === 'delete') !== !!beforeWrite) return 128 | 129 | const entity = getRootEntity(element) 130 | const action = row._op[0].toUpperCase() + row._op.slice(1) 131 | 132 | // create or augment log entry 133 | const modificationLog = createLogEntry(modificationLogs, entity, row) 134 | 135 | // process categories 136 | for (const category of plain.categories) { 137 | if (category === 'ObjectID') { 138 | addObjectID(modificationLog, row, key) 139 | } else if (category === 'DataSubjectID') { 140 | addDataSubject(modificationLog, row, key, entity) 141 | } else if (category === 'IsPotentiallyPersonal' || category === 'IsPotentiallySensitive') { 142 | _addAttribute(modificationLog, action, row, key, entity) 143 | // do not log the value of a sensitive attribute 144 | if (element['@PersonalData.IsPotentiallySensitive']) _maskAttribute(modificationLog.attributes, key) 145 | } 146 | } 147 | 148 | // add promise to determine data subject if a DataSubjectDetails entity 149 | if ( 150 | (entity['@PersonalData.EntitySemantics'] === 'DataSubjectDetails' || 151 | entity['@PersonalData.EntitySemantics'] === 'Other') && 152 | Object.keys(modificationLog.data_subject.id).length === 0 // > id still an empty object -> promise not yet set 153 | ) { 154 | addDataSubjectForDetailsEntity(row, modificationLog, req, entity, model) 155 | } 156 | } 157 | 158 | const _getDataModificationLogs = (req, tx, diff, beforeWrite) => { 159 | const template = getTemplate( 160 | `personal_${req.event}`.toLowerCase(), 161 | Object.assign({ name: req.target._service.name, model: tx.model }), 162 | req.target, 163 | { pick: getPick(req.event) } 164 | ) 165 | 166 | const modificationLogs = {} 167 | const processFn = _processorFnModification(modificationLogs, tx.model, req, beforeWrite) 168 | // cds internal templating mechanism api changed in 8.2.0 -> polyfill 169 | if (!template.process) { 170 | module.exports._templateProcessor ??= require('@sap/cds/libx/_runtime/common/utils/templateProcessor') 171 | template.process = (data, processFn) => { 172 | module.exports._templateProcessor({ processFn, row: data, template }) 173 | } 174 | } 175 | template.process(diff, processFn) 176 | 177 | return modificationLogs 178 | } 179 | 180 | const _calcModificationLogsHandler = async function (req, beforeWrite, that) { 181 | const mapKey = getMapKeyForCurrentRequest(req) 182 | 183 | const _audit = (req.context._audit ??= {}) 184 | const modificationLogs = _getDataModificationLogs(req, that, _audit.diffs.get(mapKey), beforeWrite) 185 | 186 | // store modificationLogs in audit data structure at context 187 | if (!_audit.modificationLogs) _audit.modificationLogs = new Map() 188 | const existingLogs = _audit.modificationLogs.get(mapKey) || {} 189 | _audit.modificationLogs.set(mapKey, Object.assign(existingLogs, modificationLogs)) 190 | 191 | // execute the data subject promises before going along to on phase 192 | // guarantees that the reads are executed before the data is modified 193 | await resolveDataSubjects(modificationLogs, req) 194 | } 195 | 196 | const calcModLogs4Before = function (req) { 197 | return _calcModificationLogsHandler(req, true, this) 198 | } 199 | 200 | const calcModLogs4After = function (_, req) { 201 | return _calcModificationLogsHandler(req, false, this) 202 | } 203 | 204 | const emitModLogs = async function (_, req) { 205 | const modificationLogs = req.context?._audit?.modificationLogs?.get(req.query) 206 | if (!modificationLogs) return 207 | 208 | audit = audit || (await cds.connect.to('audit-log')) 209 | 210 | const modifications = Object.keys(modificationLogs) 211 | .map(k => modificationLogs[k]) 212 | .filter(log => log.attributes.length) 213 | 214 | await Promise.all(modifications.map(modification => audit.log('PersonalDataModified', modification))) 215 | } 216 | 217 | module.exports = { 218 | addDiffToCtx, 219 | calcModLogs4Before, 220 | calcModLogs4After, 221 | emitModLogs 222 | } 223 | -------------------------------------------------------------------------------- /LICENSES/Apache-2.0.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. 10 | 11 | "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. 12 | 13 | "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. 14 | 15 | "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. 16 | 17 | "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. 18 | 19 | "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. 20 | 21 | "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). 22 | 23 | "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. 24 | 25 | "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." 26 | 27 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 28 | 29 | 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 30 | 31 | 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 32 | 33 | 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: 34 | 35 | (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and 36 | 37 | (b) You must cause any modified files to carry prominent notices stating that You changed the files; and 38 | 39 | (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and 40 | 41 | (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. 42 | 43 | You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 44 | 45 | 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 46 | 47 | 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 48 | 49 | 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 50 | 51 | 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 52 | 53 | 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. 54 | 55 | END OF TERMS AND CONDITIONS 56 | 57 | APPENDIX: How to apply the Apache License to your work. 58 | 59 | To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. 60 | 61 | Copyright [yyyy] [name of copyright owner] 62 | 63 | Licensed under the Apache License, Version 2.0 (the "License"); 64 | you may not use this file except in compliance with the License. 65 | You may obtain a copy of the License at 66 | 67 | http://www.apache.org/licenses/LICENSE-2.0 68 | 69 | Unless required by applicable law or agreed to in writing, software 70 | distributed under the License is distributed on an "AS IS" BASIS, 71 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 72 | See the License for the specific language governing permissions and 73 | limitations under the License. 74 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /lib/utils.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | const { Relation, exposeRelation, relationHandler } = require('./_relation') 4 | 5 | const WRITE = { CREATE: 1, UPDATE: 1, DELETE: 1 } 6 | 7 | const $hasPersonalData = Symbol('@cap-js/audit-logging:hasPersonalData') 8 | const $dataSubject = Symbol('@cap-js/audit-logging:dataSubject') 9 | const $parents = Symbol('@cap-js/audit-logging:parents') 10 | const $visitedUp = Symbol('@cap-js/audit-logging:visitedUp') 11 | const $visitedDown = Symbol('@cap-js/audit-logging:visitedDown') 12 | 13 | const hasPersonalData = entity => { 14 | if (entity.own($hasPersonalData) == null) { 15 | if (!entity['@PersonalData.EntitySemantics']) entity.set($hasPersonalData, false) 16 | else { 17 | // default role to entity name 18 | if (entity['@PersonalData.EntitySemantics'] === 'DataSubject' && !entity['@PersonalData.DataSubjectRole']) 19 | entity['@PersonalData.DataSubjectRole'] = entity.name.match(/\w+/g).pop() 20 | // prettier-ignore 21 | const hasPersonalData = !!Object.values(entity.elements).some(element => 22 | element['@PersonalData.IsPotentiallyPersonal'] || 23 | element['@PersonalData.IsPotentiallySensitive'] || 24 | (element['@PersonalData.FieldSemantics'] && element['@PersonalData.FieldSemantics'] === 'DataSubjectID')) 25 | entity.set($hasPersonalData, hasPersonalData) 26 | } 27 | } 28 | return entity.own($hasPersonalData) 29 | } 30 | 31 | const getMapKeyForCurrentRequest = req => { 32 | // running in srv or db layer? -> srv's req.query used as key of diff and logs maps at req.context 33 | // REVISIT: req._tx should not be used like that! 34 | return req.tx.isDatabaseService ? req._.query : req.query 35 | } 36 | 37 | const getRootEntity = element => { 38 | let entity = element.parent 39 | while (entity.kind !== 'entity') entity = entity.parent 40 | return entity 41 | } 42 | 43 | const _isDataSubject = (element, target) => { 44 | return ( 45 | !element.isAssociation && 46 | element['@PersonalData.FieldSemantics'] === 'DataSubjectID' && 47 | target['@PersonalData.EntitySemantics'] === 'DataSubject' 48 | ) 49 | } 50 | 51 | const getPick = event => { 52 | return (element, target) => { 53 | if (!hasPersonalData(target)) return 54 | const categories = [] 55 | if (!element.isAssociation && element.key) categories.push('ObjectID') 56 | if (_isDataSubject(element, target)) categories.push('DataSubjectID') 57 | if (event in WRITE && element['@PersonalData.IsPotentiallyPersonal']) categories.push('IsPotentiallyPersonal') 58 | if (element['@PersonalData.IsPotentiallySensitive']) categories.push('IsPotentiallySensitive') 59 | if (categories.length) return { categories } 60 | } 61 | } 62 | 63 | const _getHash = (entity, row) => { 64 | return `${entity.name}(${Object.keys(entity.keys) 65 | .map(k => `${k}=${row[k]}`) 66 | .join(',')})` 67 | } 68 | 69 | const createLogEntry = (logs, entity, row) => { 70 | const hash = _getHash(entity, row) 71 | let log = logs[hash] 72 | if (!log) { 73 | logs[hash] = { 74 | data_subject: { id: {}, role: entity['@PersonalData.DataSubjectRole'] }, 75 | object: { type: entity.name, id: {} }, 76 | attributes: [] 77 | } 78 | log = logs[hash] 79 | } 80 | return log 81 | } 82 | 83 | const addObjectID = (log, row, key) => { 84 | if (!(key in log.object.id) && key !== 'IsActiveEntity') log.object.id[key] = row[key] || row._old?.[key] 85 | } 86 | 87 | const addDataSubject = (log, row, key, entity) => { 88 | if (!log.data_subject.type) log.data_subject.type = entity.name 89 | if (!(key in log.data_subject.id)) { 90 | const value = row[key] || row._old?.[key] 91 | log.data_subject.id[key] = value 92 | } 93 | } 94 | 95 | const _addKeysToWhere = (keys, row, alias) => { 96 | return keys 97 | .filter(key => !key.isAssociation && key.name !== 'IsActiveEntity') 98 | .reduce((keys, key) => { 99 | if (keys.length) keys.push('and') 100 | keys.push({ ref: [alias, key.name] }, '=', { val: row[key.name] || row._old?.[key.name] }) 101 | return keys 102 | }, []) 103 | } 104 | 105 | const _keyColumns = (keys, alias) => { 106 | return keys 107 | .filter(key => !key.isAssociation && key.name !== 'IsActiveEntity') 108 | .map(key => ({ ref: [alias, key.name] })) 109 | } 110 | 111 | const _alias = entity => { 112 | // REVISIT: we should not rely on entity._service (but I don't want to break existing behavior right now) 113 | if (!entity._service) return `${entity}` 114 | return entity.name.replace(`${entity._service.name}.`, '').replace(/\./g, '_') 115 | } 116 | 117 | const _buildSubSelect = (model, { entity, relative, element, next }, row, previousCqn) => { 118 | // relative is a parent or an entity itself 119 | 120 | const keys = Object.values(entity.keys) 121 | 122 | const entityName = entity.name 123 | const as = _alias(entity) 124 | 125 | const childCqn = SELECT.from({ ref: [entityName], as }).columns(_keyColumns(keys, as)) 126 | 127 | const targetAlias = _alias(element._target) 128 | const relativeAlias = _alias(relative) 129 | 130 | // REVISIT: there seems to be a caching issue in cds^9 when elemets are renamed 131 | if (!('_relations' in relative) || !relative._relations[element.name]) { 132 | const newRelation = Relation.to(relative) 133 | relative._relations = new Proxy(exposeRelation(newRelation), relationHandler(newRelation)) 134 | } 135 | 136 | let w = relative._relations[element.name].join(targetAlias, relativeAlias) 137 | 138 | // REVISIT: rewrite to path expression, if alias for relative is already used in subselect to avoid sql error 139 | if (previousCqn?._aliases.has(relativeAlias)) { 140 | let t 141 | for (const a in entity.associations) if (entity.associations[a].target === relative.name) t = entity.associations[a] 142 | if (t && w[0]?.xpr) for (const ele of w[0].xpr) if (ele.ref?.[0] === relativeAlias) ele.ref.splice(0, 1, as, t.name) 143 | } 144 | childCqn._aliases = new Set(previousCqn ? [...previousCqn._aliases.values(), as] : [as]) 145 | 146 | childCqn.where(w) 147 | 148 | if (previousCqn) childCqn.where('exists', previousCqn) 149 | else childCqn.where(_addKeysToWhere(keys, row, as)) 150 | 151 | if (next) return _buildSubSelect(model, next, {}, childCqn) 152 | 153 | return childCqn 154 | } 155 | 156 | const _getDataSubjectIdQuery = ({ dataSubjectEntity, subs }, row, model) => { 157 | const keys = Object.values(dataSubjectEntity.keys) 158 | const as = _alias(dataSubjectEntity) 159 | 160 | const cqn = SELECT.one 161 | .from({ ref: [dataSubjectEntity.name], as }) 162 | .columns(_keyColumns(keys, as)) 163 | .where(['exists', _buildSubSelect(model, subs[0], row)]) 164 | 165 | // entity reused in different branches => must check all 166 | for (let i = 1; i < subs.length; i++) cqn.or(['exists', _buildSubSelect(model, subs[i], row)]) 167 | 168 | return cqn 169 | } 170 | 171 | const _getUps = (entity, model) => { 172 | if (entity.own($parents) == null) { 173 | const ups = [] 174 | for (const def of Object.values(model.definitions)) { 175 | if (def.kind !== 'entity' || !def.associations) continue 176 | for (const element of Object.values(def.associations)) { 177 | if (element.target !== entity.name || element._isBacklink || element.name === 'SiblingEntity') continue 178 | ups.push(element) 179 | } 180 | } 181 | entity.set($parents, ups) 182 | } 183 | return entity.own($parents) 184 | } 185 | 186 | const _getDataSubjectUp = (root, model, entity, prev, next, result) => { 187 | for (const element of _getUps(entity, model)) { 188 | // cycle detection 189 | if (element.own($visitedUp) == null) element.set($visitedUp, new Set()) 190 | if (element.own($visitedUp).has(root)) continue 191 | element.own($visitedUp).add(root) 192 | 193 | const me = { entity, relative: element.parent, element } 194 | if (prev) prev.next = me 195 | if (element.parent['@PersonalData.EntitySemantics'] === 'DataSubject') { 196 | if (!result) result = { dataSubjectEntity: element.parent, subs: [] } 197 | result.subs.push(next || me) 198 | return result 199 | } else { 200 | // dfs is a must here 201 | result = _getDataSubjectUp(root, model, element.parent, me, next || me, result) 202 | } 203 | } 204 | return result 205 | } 206 | 207 | const _getDataSubjectDown = (root, entity, prev, next) => { 208 | const associations = Object.values(entity.associations || {}).filter(e => !e._isBacklink) 209 | // bfs makes more sense here -> check all own assocs first before going deeper 210 | for (const element of associations) { 211 | const me = { entity, relative: entity, element } 212 | if (element._target['@PersonalData.EntitySemantics'] === 'DataSubject') { 213 | if (prev) prev.next = me 214 | return { dataSubjectEntity: element._target, subs: [next || me] } 215 | } 216 | } 217 | for (const element of associations) { 218 | // cycle detection 219 | if (element.own($visitedDown) == null) element.set($visitedDown, new Set()) 220 | if (element.own($visitedDown).has(root)) continue 221 | element.own($visitedDown).add(root) 222 | 223 | const me = { entity, relative: entity, element } 224 | if (prev) prev.next = me 225 | const dataSubject = _getDataSubjectDown(root, element._target, me, next || me) 226 | if (dataSubject) return dataSubject 227 | } 228 | } 229 | 230 | const getDataSubject = (entity, model) => { 231 | if (entity.own($dataSubject) == null) { 232 | // entities with EntitySemantics 'DataSubjectDetails' or 'Other' must not necessarily 233 | // be always below or always above 'DataSubject' entity in CSN tree 234 | let dataSubjectInfo = _getDataSubjectUp(entity.name, model, entity) 235 | if (!dataSubjectInfo) dataSubjectInfo = _getDataSubjectDown(entity.name, entity) 236 | entity.set($dataSubject, dataSubjectInfo) 237 | } 238 | return entity.own($dataSubject) 239 | } 240 | 241 | const _getDataSubjectsMap = req => { 242 | const mapKey = getMapKeyForCurrentRequest(req) 243 | const _audit = (req.context._audit ??= {}) 244 | if (!_audit.dataSubjects) _audit.dataSubjects = new Map() 245 | if (!_audit.dataSubjects.has(mapKey)) _audit.dataSubjects.set(mapKey, new Map()) 246 | return _audit.dataSubjects.get(mapKey) 247 | } 248 | 249 | const addDataSubjectForDetailsEntity = (row, log, req, entity, model) => { 250 | const dataSubjectInfo = getDataSubject(entity, model) 251 | const role = dataSubjectInfo.dataSubjectEntity['@PersonalData.DataSubjectRole'] 252 | log.data_subject.role ??= role 253 | log.data_subject.type = dataSubjectInfo.dataSubjectEntity.name 254 | /* 255 | * for each req (cf. $batch with atomicity) and data subject role (e.g., customer vs supplier), 256 | * store (in audit data structure at context) and reuse a single promise to look up the respective data subject 257 | */ 258 | const map = _getDataSubjectsMap(req) 259 | if (map.has(role)) log.data_subject.id = map.get(role) 260 | // REVISIT by downward lookups row might already contain ID - some potential to optimize 261 | else map.set(role, _getDataSubjectIdQuery(dataSubjectInfo, row, model)) 262 | } 263 | 264 | const resolveDataSubjects = (logs, req) => { 265 | const ps = [] 266 | 267 | const map = _getDataSubjectsMap(req) 268 | 269 | for (const each of Object.values(logs)) { 270 | if (each.data_subject.id instanceof cds.ql.Query) { 271 | const q = each.data_subject.id 272 | if (!map.has(q)) { 273 | const p = cds.run(q).then(res => map.set(q, res)) 274 | map.set(q, p) 275 | ps.push(p) 276 | } 277 | } 278 | } 279 | 280 | return Promise.all(ps).then(() => { 281 | for (const each of Object.values(logs)) { 282 | if (each.data_subject.id instanceof cds.ql.Query) { 283 | each.data_subject.id = map.get(each.data_subject.id) 284 | } 285 | } 286 | }) 287 | } 288 | 289 | module.exports = { 290 | hasPersonalData, 291 | getMapKeyForCurrentRequest, 292 | getRootEntity, 293 | getPick, 294 | createLogEntry, 295 | addObjectID, 296 | addDataSubject, 297 | addDataSubjectForDetailsEntity, 298 | resolveDataSubjects 299 | } 300 | -------------------------------------------------------------------------------- /test/personal-data/fiori.test.js: -------------------------------------------------------------------------------- 1 | const cds = require('@sap/cds') 2 | 3 | const { POST: _POST, PATCH: _PATCH, GET: _GET, DELETE: _DELETE, data } = cds.test().in(__dirname) 4 | 5 | // the persistent outbox adds a delay 6 | const wait = require('node:timers/promises').setTimeout 7 | const POST = (...args) => _POST(...args).then(async res => (await wait(42), res)) 8 | const PATCH = (...args) => _PATCH(...args).then(async res => (await wait(42), res)) 9 | const GET = (...args) => _GET(...args).then(async res => (await wait(42), res)) 10 | const DELETE = (...args) => _DELETE(...args).then(async res => (await wait(42), res)) 11 | 12 | const _logger = require('../utils/logger')({ debug: true }) 13 | cds.log.Logger = _logger 14 | 15 | describe('personal data audit logging in Fiori', () => { 16 | let __log, _logs 17 | const _log = (...args) => { 18 | if (!(args.length === 2 && typeof args[0] === 'string' && args[0].match(/\[audit-log\]/i))) { 19 | // > not an audit log (most likely, anyway) 20 | return __log(...args) 21 | } 22 | 23 | _logs.push(args[1]) 24 | } 25 | 26 | const CUSTOMER_ID = 'bcd4a37a-6319-4d52-bb48-02fd06b9ffe9' 27 | const DATA_SUBJECT = { 28 | type: 'Fiori_1.Customers', 29 | role: 'Customer', 30 | id: { ID: CUSTOMER_ID } 31 | } 32 | 33 | const ALICE = { username: 'alice', password: 'password' } 34 | 35 | beforeAll(() => { 36 | __log = global.console.log 37 | global.console.log = _log 38 | }) 39 | 40 | afterAll(() => { 41 | global.console.log = __log 42 | }) 43 | 44 | beforeEach(async () => { 45 | await data.reset() 46 | _logs = [] 47 | _logger._resetLogs() 48 | }) 49 | 50 | describe('data access logging for active draft enabled entities', () => { 51 | test('read with another data subject and sensitive data only in composition children', async () => { 52 | const { data: customer } = await GET( 53 | `/fiori-2/Customers(ID=${CUSTOMER_ID},IsActiveEntity=true)?$expand=addresses`, 54 | { auth: ALICE } 55 | ) 56 | const addressID1 = customer.addresses[0].ID 57 | const addressID2 = customer.addresses[1].ID 58 | expect(_logs.length).toBe(2) 59 | expect(_logs).toContainMatchObject({ 60 | user: 'alice', 61 | object: { 62 | type: 'Fiori_2.CustomerPostalAddress', 63 | id: { ID: addressID1 } 64 | }, 65 | data_subject: { 66 | type: 'Fiori_2.CustomerPostalAddress', 67 | role: 'Address', 68 | id: { 69 | ID: addressID1, 70 | street: 'moo', 71 | town: 'shu' 72 | } 73 | }, 74 | attributes: [{ name: 'someOtherField' }] 75 | }) 76 | expect(_logs).toContainMatchObject({ 77 | user: 'alice', 78 | object: { 79 | type: 'Fiori_2.CustomerPostalAddress', 80 | id: { ID: addressID2 } 81 | }, 82 | data_subject: { 83 | type: 'Fiori_2.CustomerPostalAddress', 84 | role: 'Address', 85 | id: { 86 | ID: addressID2, 87 | street: 'sue', 88 | town: 'lou' 89 | } 90 | }, 91 | attributes: [{ name: 'someOtherField' }] 92 | }) 93 | }) 94 | 95 | test('read all Customers', async () => { 96 | const response = await GET('/fiori-1/Customers', { auth: ALICE }) 97 | 98 | expect(response).toMatchObject({ status: 200 }) 99 | expect(_logs.length).toBe(1) 100 | expect(_logs).toContainMatchObject({ 101 | user: 'alice', 102 | object: { 103 | type: 'Fiori_1.Customers', 104 | id: { ID: CUSTOMER_ID } 105 | }, 106 | data_subject: DATA_SUBJECT, 107 | attributes: [{ name: 'creditCardNo' }] 108 | }) 109 | }) 110 | 111 | test('read single Customer', async () => { 112 | const response = await GET(`/fiori-1/Customers(ID=${CUSTOMER_ID},IsActiveEntity=true)`, { auth: ALICE }) 113 | 114 | expect(response).toMatchObject({ status: 200 }) 115 | expect(_logs.length).toBe(1) 116 | expect(_logs).toContainMatchObject({ 117 | user: 'alice', 118 | object: { 119 | type: 'Fiori_1.Customers', 120 | id: { ID: CUSTOMER_ID } 121 | }, 122 | data_subject: DATA_SUBJECT, 123 | attributes: [{ name: 'creditCardNo' }] 124 | }) 125 | }) 126 | 127 | test('read Customer expanding addresses and comments - comp of many', async () => { 128 | const response = await GET( 129 | `/fiori-1/Customers(ID=${CUSTOMER_ID},IsActiveEntity=true)?$expand=addresses($expand=attachments),comments`, 130 | { auth: ALICE } 131 | ) 132 | 133 | expect(response).toMatchObject({ status: 200 }) 134 | expect(_logs.length).toBe(5) 135 | expect(_logs).toContainMatchObject({ 136 | user: 'alice', 137 | object: { 138 | type: 'Fiori_1.Customers', 139 | id: { ID: CUSTOMER_ID } 140 | }, 141 | data_subject: DATA_SUBJECT, 142 | attributes: [{ name: 'creditCardNo' }] 143 | }) 144 | 145 | expect(_logs).toContainMatchObject({ 146 | user: 'alice', 147 | object: { 148 | type: 'Fiori_1.CustomerPostalAddress', 149 | id: { ID: '1ab71292-ef69-4571-8cfb-10b9d5d1459e' } 150 | }, 151 | data_subject: DATA_SUBJECT, 152 | attributes: [{ name: 'street' }] 153 | }) 154 | 155 | expect(_logs).toContainMatchObject({ 156 | user: 'alice', 157 | object: { 158 | type: 'Fiori_1.AddressAttachment', 159 | id: { ID: '3cd71292-ef69-4571-8cfb-10b9d5d1437e' } 160 | }, 161 | data_subject: DATA_SUBJECT, 162 | attributes: [{ name: 'description' }] 163 | }) 164 | expect(_logs).toContainMatchObject({ 165 | user: 'alice', 166 | object: { 167 | type: 'Fiori_1.AddressAttachment', 168 | id: { ID: '595225db-6eeb-4b4f-9439-dbe5fcb4ce5a' } 169 | }, 170 | data_subject: DATA_SUBJECT, 171 | attributes: [{ name: 'description' }] 172 | }) 173 | expect(_logs).toContainMatchObject({ 174 | user: 'alice', 175 | object: { 176 | type: 'Fiori_1.CustomerPostalAddress', 177 | id: { ID: '285225db-6eeb-4b4f-9439-dbe5fcb4ce82' } 178 | }, 179 | data_subject: DATA_SUBJECT, 180 | attributes: [{ name: 'street' }] 181 | }) 182 | }) 183 | 184 | test('read Customer expanding deep nested comp of one', async () => { 185 | const response = await GET( 186 | `/fiori-1/Customers(ID=${CUSTOMER_ID},IsActiveEntity=true)?$expand=status($expand=change($expand=last))`, 187 | { auth: ALICE } 188 | ) 189 | expect(response).toMatchObject({ status: 200 }) 190 | expect(_logs.length).toBe(4) 191 | expect(_logs).toContainMatchObject({ 192 | user: 'alice', 193 | object: { 194 | type: 'Fiori_1.Customers', 195 | id: { ID: CUSTOMER_ID } 196 | }, 197 | data_subject: DATA_SUBJECT, 198 | attributes: [{ name: 'creditCardNo' }] 199 | }) 200 | expect(_logs).toContainMatchObject({ 201 | user: 'alice', 202 | object: { 203 | type: 'Fiori_1.CustomerStatus', 204 | id: { ID: '23d4a37a-6319-4d52-bb48-02fd06b9ffa4' } 205 | }, 206 | data_subject: DATA_SUBJECT, 207 | attributes: [{ name: 'description' }] 208 | }) 209 | expect(_logs).toContainMatchObject({ 210 | user: 'alice', 211 | object: { 212 | type: 'Fiori_1.StatusChange', 213 | id: { ID: '59d4a37a-6319-4d52-bb48-02fd06b9fbc2', secondKey: 'some value' } 214 | }, 215 | data_subject: DATA_SUBJECT, 216 | attributes: [{ name: 'description' }] 217 | }) 218 | expect(_logs).toContainMatchObject({ 219 | user: 'alice', 220 | object: { 221 | type: 'Fiori_1.LastOne', 222 | id: { ID: '74d4a37a-6319-4d52-bb48-02fd06b9f3r4' } 223 | }, 224 | data_subject: DATA_SUBJECT, 225 | attributes: [{ name: 'lastOneField' }] 226 | }) 227 | }) 228 | 229 | test('read all CustomerStatus', async () => { 230 | const response = await GET('/fiori-1/CustomerStatus', { auth: ALICE }) 231 | expect(response).toMatchObject({ status: 200 }) 232 | expect(_logs.length).toBe(1) 233 | expect(_logs).toContainMatchObject({ 234 | user: 'alice', 235 | object: { 236 | type: 'Fiori_1.CustomerStatus', 237 | id: { ID: '23d4a37a-6319-4d52-bb48-02fd06b9ffa4' } 238 | }, 239 | data_subject: DATA_SUBJECT, 240 | attributes: [{ name: 'description' }] 241 | }) 242 | }) 243 | 244 | test('read all CustomerPostalAddress', async () => { 245 | const response = await GET('/fiori-1/CustomerPostalAddress', { auth: ALICE }) 246 | 247 | expect(response).toMatchObject({ status: 200 }) 248 | expect(_logs.length).toBe(2) 249 | expect(_logs).toContainMatchObject({ 250 | user: 'alice', 251 | object: { 252 | type: 'Fiori_1.CustomerPostalAddress', 253 | id: { ID: '1ab71292-ef69-4571-8cfb-10b9d5d1459e' } 254 | }, 255 | data_subject: DATA_SUBJECT, 256 | attributes: [{ name: 'street' }] 257 | }) 258 | 259 | expect(_logs).toContainMatchObject({ 260 | user: 'alice', 261 | object: { 262 | type: 'Fiori_1.CustomerPostalAddress', 263 | id: { ID: '285225db-6eeb-4b4f-9439-dbe5fcb4ce82' } 264 | }, 265 | data_subject: DATA_SUBJECT, 266 | attributes: [{ name: 'street' }] 267 | }) 268 | }) 269 | 270 | test('read all CustomerPostalAddress expanding Customer', async () => { 271 | const response = await GET('/fiori-1/CustomerPostalAddress?$expand=customer', { auth: ALICE }) 272 | 273 | expect(response).toMatchObject({ status: 200 }) 274 | expect(_logs.length).toBe(3) 275 | expect(_logs).toContainMatchObject({ 276 | user: 'alice', 277 | object: { 278 | type: 'Fiori_1.Customers', 279 | id: { ID: CUSTOMER_ID } 280 | }, 281 | data_subject: DATA_SUBJECT, 282 | attributes: [{ name: 'creditCardNo' }] 283 | }) 284 | 285 | expect(_logs).toContainMatchObject({ 286 | user: 'alice', 287 | object: { 288 | type: 'Fiori_1.CustomerPostalAddress', 289 | id: { ID: '1ab71292-ef69-4571-8cfb-10b9d5d1459e' } 290 | }, 291 | data_subject: DATA_SUBJECT, 292 | attributes: [{ name: 'street' }] 293 | }) 294 | 295 | expect(_logs).toContainMatchObject({ 296 | user: 'alice', 297 | object: { 298 | type: 'Fiori_1.CustomerPostalAddress', 299 | id: { ID: '285225db-6eeb-4b4f-9439-dbe5fcb4ce82' } 300 | }, 301 | data_subject: DATA_SUBJECT, 302 | attributes: [{ name: 'street' }] 303 | }) 304 | }) 305 | 306 | test('draft union', async () => { 307 | const response = await GET( 308 | '/fiori-1/Customers?$filter=(IsActiveEntity eq false or SiblingEntity/IsActiveEntity eq null)', 309 | { auth: ALICE } 310 | ) 311 | 312 | expect(response).toMatchObject({ status: 200 }) 313 | expect(_logs.length).toBe(1) 314 | expect(_logs).toContainMatchObject({ 315 | user: 'alice', 316 | object: { 317 | type: 'Fiori_1.Customers', 318 | id: { ID: CUSTOMER_ID } 319 | }, 320 | data_subject: DATA_SUBJECT, 321 | attributes: [{ name: 'creditCardNo' }] 322 | }) 323 | }) 324 | }) 325 | 326 | describe('modification and read draft logging', () => { 327 | test('draft edit, patch and activate with another data subject and sensitive data only in composition children', async () => { 328 | const { data: customer } = await GET( 329 | `/fiori-2/Customers(ID=${CUSTOMER_ID},IsActiveEntity=true)?$expand=addresses`, 330 | { auth: ALICE } 331 | ) 332 | const address_1 = customer.addresses[0] 333 | const address_2 = customer.addresses[1] 334 | 335 | // reset logs 336 | _logs = [] 337 | 338 | // draftEdit transfers active data to draft tables -> read sensitive data -> logs 339 | await POST(`/fiori-2/Customers(ID=${CUSTOMER_ID},IsActiveEntity=true)/draftEdit`, {}, { auth: ALICE }) 340 | expect(_logs.length).toBe(2) 341 | expect(_logs.length).toBe(2) 342 | expect(_logs).toContainMatchObject({ 343 | user: 'alice', 344 | object: { 345 | type: 'Fiori_2.CustomerPostalAddress', 346 | id: { ID: address_1.ID } 347 | }, 348 | data_subject: { 349 | type: 'Fiori_2.CustomerPostalAddress', 350 | role: 'Address', 351 | id: { 352 | ID: address_1.ID, 353 | street: address_1.street, 354 | town: address_1.town 355 | } 356 | }, 357 | attributes: [{ name: 'someOtherField' }] 358 | }) 359 | expect(_logs).toContainMatchObject({ 360 | user: 'alice', 361 | object: { 362 | type: 'Fiori_2.CustomerPostalAddress', 363 | id: { ID: address_2.ID } 364 | }, 365 | data_subject: { 366 | type: 'Fiori_2.CustomerPostalAddress', 367 | role: 'Address', 368 | id: { 369 | ID: address_2.ID, 370 | street: address_2.street, 371 | town: address_2.town 372 | } 373 | }, 374 | attributes: [{ name: 'someOtherField' }] 375 | }) 376 | 377 | // reset logs 378 | _logs = [] 379 | 380 | // draft data is never logged 381 | await GET(`/fiori-2/Customers(ID=${CUSTOMER_ID},IsActiveEntity=false)?$expand=addresses`, { auth: ALICE }) 382 | expect(_logs.length).toBe(0) 383 | 384 | await PATCH( 385 | `/fiori-2/Customers(ID=${CUSTOMER_ID},IsActiveEntity=false)/addresses(ID=${address_1.ID},IsActiveEntity=false)`, 386 | { 387 | street: 'updated', 388 | town: 'updated town' 389 | }, 390 | { auth: ALICE } 391 | ) 392 | const response = await POST( 393 | `/fiori-2/Customers(ID=${CUSTOMER_ID},IsActiveEntity=false)/draftActivate`, 394 | {}, 395 | { auth: ALICE } 396 | ) 397 | 398 | expect(response).toMatchObject({ status: 200 }) 399 | expect(_logs.length).toBe(1) 400 | expect(_logs).toContainMatchObject({ 401 | user: 'alice', 402 | object: { 403 | type: 'Fiori_2.CustomerPostalAddress', 404 | id: { ID: address_1.ID } 405 | }, 406 | data_subject: { 407 | type: 'Fiori_2.CustomerPostalAddress', 408 | role: 'Address', 409 | id: { 410 | ID: address_1.ID, 411 | street: 'updated', 412 | town: 'updated town' 413 | } 414 | }, 415 | attributes: [ 416 | { name: 'street', new: 'updated', old: 'moo' }, 417 | { name: 'town', new: 'updated town', old: 'shu' } 418 | ] 419 | }) 420 | }) 421 | 422 | test('create, patch, read and activate', async () => { 423 | const customer = { 424 | emailAddress: 'bla@blub.com', 425 | firstName: 'bla', 426 | lastName: 'blub', 427 | creditCardNo: '98765', 428 | someOtherField: 'dummy' 429 | } 430 | 431 | let response = await POST('/fiori-1/Customers', {}, { auth: ALICE }) 432 | 433 | expect(response).toMatchObject({ status: 201 }) 434 | customer.ID = response.data.ID 435 | expect(_logs.length).toBe(0) 436 | 437 | response = await PATCH(`/fiori-1/Customers(ID=${customer.ID},IsActiveEntity=false)`, customer, { auth: ALICE }) 438 | 439 | expect(response).toMatchObject({ status: 200 }) 440 | expect(_logs.length).toBe(0) 441 | 442 | response = await GET(`/fiori-1/Customers(ID=${customer.ID},IsActiveEntity=false)`, { auth: ALICE }) 443 | 444 | expect(response).toMatchObject({ status: 200 }) 445 | expect(_logs.length).toBe(0) 446 | 447 | response = await POST( 448 | `/fiori-1/Customers(ID=${customer.ID},IsActiveEntity=false)/Fiori_1.draftActivate`, 449 | {}, 450 | { auth: ALICE } 451 | ) 452 | 453 | expect(_logs.length).toBe(2) 454 | expect(_logs).toContainMatchObject({ 455 | user: 'alice', 456 | object: { 457 | type: 'Fiori_1.Customers', 458 | id: { ID: customer.ID } 459 | }, 460 | data_subject: { 461 | type: 'Fiori_1.Customers', 462 | role: 'Customer', 463 | id: { ID: customer.ID } 464 | }, 465 | attributes: [ 466 | { name: 'emailAddress', new: customer.emailAddress }, 467 | { name: 'firstName', new: customer.firstName }, 468 | { name: 'lastName', new: customer.lastName }, 469 | { name: 'creditCardNo', new: '***' } 470 | ] 471 | }) 472 | expect(_logs).toContainMatchObject({ 473 | user: 'alice', 474 | object: { 475 | type: 'Fiori_1.Customers', 476 | id: { ID: customer.ID } 477 | }, 478 | data_subject: { 479 | type: 'Fiori_1.Customers', 480 | role: 'Customer', 481 | id: { ID: customer.ID } 482 | }, 483 | attributes: [{ name: 'creditCardNo' }] 484 | }) 485 | }) 486 | 487 | test('draft edit, read union, delete draft', async () => { 488 | let response = await POST( 489 | `/fiori-1/Customers(ID=${CUSTOMER_ID},IsActiveEntity=true)/Fiori_1.draftEdit`, 490 | { PreserveChanges: true }, 491 | { auth: ALICE } 492 | ) 493 | expect(response).toMatchObject({ status: 201 }) 494 | expect(_logs.length).toBe(10) 495 | for (const l of _logs) expect(l).toMatchObject({ data_subject: { id: { ID: CUSTOMER_ID } } }) 496 | 497 | // reset logs 498 | _logs = [] 499 | 500 | // draft data is never logged and since cds^7.6, we only read actives for which no draft exists -> now 0 logs 501 | response = await GET( 502 | '/fiori-1/Customers?$filter=(IsActiveEntity eq false or SiblingEntity/IsActiveEntity eq null)', 503 | { auth: ALICE } 504 | ) 505 | expect(response).toMatchObject({ status: 200 }) 506 | expect(_logs.length).toBe(0) 507 | 508 | // reset logs 509 | _logs = [] 510 | 511 | response = await DELETE(`/fiori-1/Customers(ID=${CUSTOMER_ID},IsActiveEntity=false)`, { auth: ALICE }) 512 | expect(response).toMatchObject({ status: 204 }) 513 | expect(_logs.length).toBe(0) 514 | }) 515 | 516 | test('draft edit, patch and activate', async () => { 517 | let response 518 | 519 | // draftEdit transfers active data to draft tables -> read sensitive data -> logs 520 | response = await POST( 521 | `/fiori-1/Customers(ID=bcd4a37a-6319-4d52-bb48-02fd06b9ffe9,IsActiveEntity=true)/Fiori_1.draftEdit`, 522 | { PreserveChanges: true }, 523 | { auth: ALICE } 524 | ) 525 | expect(response).toMatchObject({ status: 201 }) 526 | expect(_logs.length).toBe(10) 527 | 528 | // reset logs 529 | _logs = [] 530 | 531 | const customer = { 532 | ID: response.data.ID, 533 | emailAddress: 'bla@blub.com', 534 | firstName: 'bla', 535 | lastName: 'blub', 536 | creditCardNo: '98765', 537 | someOtherField: 'dummy' 538 | } 539 | 540 | response = await PATCH(`/fiori-1/Customers(ID=${customer.ID},IsActiveEntity=false)`, customer, { auth: ALICE }) 541 | expect(response).toMatchObject({ status: 200 }) 542 | expect(_logs.length).toBe(0) 543 | 544 | // reset logs 545 | _logs = [] 546 | 547 | response = await POST( 548 | `/fiori-1/Customers(ID=${customer.ID},IsActiveEntity=false)/Fiori_1.draftActivate`, 549 | {}, 550 | { auth: ALICE } 551 | ) 552 | expect(_logs.length).toBe(2) 553 | expect(_logs).toContainMatchObject({ 554 | user: 'alice', 555 | object: { 556 | type: 'Fiori_1.Customers', 557 | id: { ID: customer.ID } 558 | }, 559 | data_subject: { 560 | type: 'Fiori_1.Customers', 561 | role: 'Customer', 562 | id: { ID: customer.ID } 563 | }, 564 | attributes: [ 565 | { name: 'emailAddress', old: 'foo@bar.com', new: customer.emailAddress }, 566 | { name: 'firstName', old: 'foo', new: customer.firstName }, 567 | { name: 'lastName', old: 'bar', new: customer.lastName }, 568 | { name: 'creditCardNo', old: '***', new: '***' } 569 | ] 570 | }) 571 | expect(_logs).toContainMatchObject({ 572 | user: 'alice', 573 | object: { 574 | type: 'Fiori_1.Customers', 575 | id: { ID: customer.ID } 576 | }, 577 | data_subject: { 578 | type: 'Fiori_1.Customers', 579 | role: 'Customer', 580 | id: { ID: customer.ID } 581 | }, 582 | attributes: [{ name: 'creditCardNo' }] 583 | }) 584 | }) 585 | 586 | test('create, patch, and activate - deep', async () => { 587 | let response = await POST('/fiori-1/Customers', {}, { auth: ALICE }) 588 | 589 | expect(response).toMatchObject({ status: 201 }) 590 | expect(_logs.length).toBe(0) 591 | 592 | const customer = { 593 | ID: response.data.ID, 594 | emailAddress: 'bla@blub.com', 595 | firstName: 'bla', 596 | lastName: 'blub', 597 | creditCardNo: '98765', 598 | someOtherField: 'dummy' 599 | } 600 | response = await PATCH(`/fiori-1/Customers(ID=${customer.ID},IsActiveEntity=false)`, customer, { auth: ALICE }) 601 | 602 | expect(response).toMatchObject({ status: 200 }) 603 | expect(_logs.length).toBe(0) 604 | 605 | response = await POST(`/fiori-1/Customers(ID=${customer.ID},IsActiveEntity=false)/addresses`, {}, { auth: ALICE }) 606 | 607 | expect(response).toMatchObject({ status: 201 }) 608 | expect(_logs.length).toBe(0) 609 | 610 | const address = { 611 | ID: response.data.ID, 612 | street: 'A1', 613 | town: 'Monnem', 614 | someOtherField: 'Beschde' 615 | } 616 | 617 | response = await PATCH( 618 | `/fiori-1/Customers(ID=${customer.ID},IsActiveEntity=false)/addresses(ID=${address.ID},IsActiveEntity=false)`, 619 | address, 620 | { auth: ALICE } 621 | ) 622 | 623 | expect(response).toMatchObject({ status: 200 }) 624 | expect(_logs.length).toBe(0) 625 | 626 | response = await POST( 627 | `/fiori-1/Customers(ID=${customer.ID},IsActiveEntity=false)/Fiori_1.draftActivate`, 628 | {}, 629 | { auth: ALICE } 630 | ) 631 | 632 | const data_subject = { 633 | type: 'Fiori_1.Customers', 634 | role: 'Customer', 635 | id: { ID: customer.ID } 636 | } 637 | 638 | expect(_logs.length).toBe(3) 639 | expect(_logs).toContainMatchObject({ 640 | user: 'alice', 641 | object: { 642 | type: 'Fiori_1.Customers', 643 | id: { ID: customer.ID } 644 | }, 645 | data_subject, 646 | attributes: [ 647 | { name: 'emailAddress', new: customer.emailAddress }, 648 | { name: 'firstName', new: customer.firstName }, 649 | { name: 'lastName', new: customer.lastName }, 650 | { name: 'creditCardNo', new: '***' } 651 | ] 652 | }) 653 | 654 | expect(_logs).toContainMatchObject({ 655 | user: 'alice', 656 | object: { 657 | type: 'Fiori_1.Customers', 658 | id: { ID: customer.ID } 659 | }, 660 | data_subject: { 661 | type: 'Fiori_1.Customers', 662 | role: 'Customer', 663 | id: { ID: customer.ID } 664 | }, 665 | attributes: [{ name: 'creditCardNo' }] 666 | }) 667 | 668 | expect(_logs).toContainMatchObject({ 669 | user: 'alice', 670 | object: { 671 | type: 'Fiori_1.CustomerPostalAddress', 672 | id: { ID: address.ID } 673 | }, 674 | data_subject, 675 | attributes: [ 676 | { name: 'street', new: '***' }, 677 | { name: 'town', new: address.town } 678 | ] 679 | }) 680 | }) 681 | 682 | test('delete active Customer - deep', async () => { 683 | let response = await GET( 684 | `/fiori-1/Customers(ID=${CUSTOMER_ID},IsActiveEntity=true)?$expand=addresses($expand=attachments),status($expand=change($expand=last)),comments`, 685 | { auth: ALICE } 686 | ) 687 | 688 | const oldAddresses = response.data.addresses 689 | const oldAttachments = response.data.addresses[0].attachments 690 | const oldStatus = response.data.status 691 | const oldChange = response.data.status.change 692 | const oldLast = response.data.status.change.last 693 | 694 | // reset logs 695 | _logs = [] 696 | _logger._resetLogs() 697 | 698 | response = await DELETE(`/fiori-1/Customers(ID=${CUSTOMER_ID},IsActiveEntity=true)`, { auth: ALICE }) 699 | 700 | expect(response).toMatchObject({ status: 204 }) 701 | expect(_logs.length).toBe(10) 702 | expect(_logs).toContainMatchObject({ 703 | user: 'alice', 704 | object: { 705 | type: 'Fiori_1.Customers', 706 | id: { ID: CUSTOMER_ID } 707 | }, 708 | data_subject: DATA_SUBJECT, 709 | attributes: [ 710 | { name: 'emailAddress', old: 'foo@bar.com' }, 711 | { name: 'firstName', old: 'foo' }, 712 | { name: 'lastName', old: 'bar' }, 713 | { name: 'creditCardNo', old: '***' } 714 | ] 715 | }) 716 | 717 | expect(_logs).toContainMatchObject({ 718 | user: 'alice', 719 | object: { 720 | type: 'Fiori_1.CustomerPostalAddress', 721 | id: { ID: oldAddresses[0].ID } 722 | }, 723 | data_subject: DATA_SUBJECT, 724 | attributes: [ 725 | { name: 'street', old: '***' }, 726 | { name: 'town', old: oldAddresses[0].town } 727 | ] 728 | }) 729 | 730 | expect(_logs).toContainMatchObject({ 731 | user: 'alice', 732 | object: { 733 | type: 'Fiori_1.AddressAttachment', 734 | id: { ID: oldAttachments[0].ID } 735 | }, 736 | data_subject: DATA_SUBJECT, 737 | attributes: [ 738 | { name: 'description', old: '***' }, 739 | { name: 'todo', old: oldAttachments[0].todo } 740 | ] 741 | }) 742 | expect(_logs).toContainMatchObject({ 743 | user: 'alice', 744 | object: { 745 | type: 'Fiori_1.AddressAttachment', 746 | id: { ID: oldAttachments[1].ID } 747 | }, 748 | data_subject: DATA_SUBJECT, 749 | attributes: [ 750 | { name: 'description', old: '***' }, 751 | { name: 'todo', old: oldAttachments[1].todo } 752 | ] 753 | }) 754 | expect(_logs).toContainMatchObject({ 755 | user: 'alice', 756 | object: { 757 | type: 'Fiori_1.CustomerPostalAddress', 758 | id: { ID: oldAddresses[1].ID } 759 | }, 760 | data_subject: DATA_SUBJECT, 761 | attributes: [ 762 | { name: 'street', old: '***' }, 763 | { name: 'town', old: oldAddresses[1].town } 764 | ] 765 | }) 766 | expect(_logs).toContainMatchObject({ 767 | user: 'alice', 768 | object: { 769 | type: 'Fiori_1.CustomerStatus', 770 | id: { ID: oldStatus.ID } 771 | }, 772 | data_subject: DATA_SUBJECT, 773 | attributes: [ 774 | { name: 'description', old: '***' }, 775 | { name: 'todo', old: 'send reminder' } 776 | ] 777 | }) 778 | expect(_logs).toContainMatchObject({ 779 | user: 'alice', 780 | object: { 781 | type: 'Fiori_1.StatusChange', 782 | id: { ID: oldChange.ID, secondKey: oldChange.secondKey } 783 | }, 784 | data_subject: DATA_SUBJECT, 785 | attributes: [{ name: 'description', old: '***' }] 786 | }) 787 | expect(_logs).toContainMatchObject({ 788 | user: 'alice', 789 | object: { 790 | type: 'Fiori_1.LastOne', 791 | id: { ID: oldLast.ID } 792 | }, 793 | data_subject: DATA_SUBJECT, 794 | attributes: [{ name: 'lastOneField', old: '***' }] 795 | }) 796 | 797 | const selects = _logger._logs.debug.filter( 798 | l => typeof l === 'string' && l.match(/^SELECT/) && l.match(/SELECT [Customers.]*ID FROM Fiori_1_Customers/) 799 | ) 800 | expect(selects.length).toBe(1) 801 | }) 802 | 803 | test('with atomicity group', async () => { 804 | let response = await GET( 805 | `/fiori-1/Customers(ID=${CUSTOMER_ID},IsActiveEntity=true)?$expand=addresses($expand=attachments($expand=notes)),status($expand=change($expand=last),notes)`, 806 | { auth: ALICE } 807 | ) 808 | const oldAddresses = response.data.addresses 809 | const oldAttachments = response.data.addresses[0].attachments 810 | const oldAttachmentNotes = response.data.addresses[0].attachments[0].notes 811 | 812 | // reset logs 813 | _logs = [] 814 | 815 | response = await POST( 816 | `/fiori-1/Customers(ID=bcd4a37a-6319-4d52-bb48-02fd06b9ffe9,IsActiveEntity=true)/Fiori_1.draftEdit`, 817 | { PreserveChanges: true }, 818 | { auth: ALICE } 819 | ) 820 | 821 | expect(response).toMatchObject({ status: 201 }) 822 | expect(_logs.length).toBe(10) 823 | 824 | // reset logs 825 | _logs = [] 826 | 827 | response = await PATCH( 828 | `/fiori-1/Customers(ID=bcd4a37a-6319-4d52-bb48-02fd06b9ffe9,IsActiveEntity=false)`, 829 | { status: null }, 830 | { auth: ALICE } 831 | ) 832 | 833 | expect(response).toMatchObject({ status: 200 }) 834 | expect(_logs.length).toBe(0) 835 | 836 | // reset logs 837 | _logs = [] 838 | 839 | const body = { 840 | requests: [ 841 | { 842 | method: 'POST', 843 | url: `/Customers(ID=bcd4a37a-6319-4d52-bb48-02fd06b9ffe9,IsActiveEntity=false)/Fiori_1.draftActivate`, 844 | headers: { 'content-type': 'application/json', 'odata-version': '4.0' }, 845 | id: 'r1', 846 | atomicityGroup: 'g1' 847 | }, 848 | { 849 | method: 'DELETE', 850 | url: `/Customers(ID=bcd4a37a-6319-4d52-bb48-02fd06b9ffe9,IsActiveEntity=true)`, 851 | headers: { 'content-type': 'application/json', 'odata-version': '4.0' }, 852 | id: 'r2', 853 | atomicityGroup: 'g1', 854 | dependsOn: ['r1'] 855 | } 856 | ] 857 | } 858 | response = await POST('/fiori-1/$batch', body, { auth: ALICE }) 859 | expect(response).toMatchObject({ status: 200 }) 860 | expect(response.data.responses.every(r => r.status >= 200 && r.status < 300)).toBeTruthy() 861 | expect(_logs.length).toBe(11) 862 | expect(_logs).toContainMatchObject({ 863 | user: 'alice', 864 | object: { 865 | type: 'Fiori_1.CustomerPostalAddress', 866 | id: { ID: oldAddresses[0].ID } 867 | }, 868 | data_subject: DATA_SUBJECT, 869 | attributes: [ 870 | { name: 'street', old: '***' }, 871 | { name: 'town', old: oldAddresses[0].town } 872 | ] 873 | }) 874 | expect(_logs).toContainMatchObject({ 875 | user: 'alice', 876 | object: { 877 | type: 'Fiori_1.AddressAttachment', 878 | id: { ID: oldAttachments[0].ID } 879 | }, 880 | data_subject: DATA_SUBJECT, 881 | attributes: [ 882 | { name: 'description', old: '***' }, 883 | { name: 'todo', old: oldAttachments[0].todo } 884 | ] 885 | }) 886 | expect(_logs).toContainMatchObject({ 887 | user: 'alice', 888 | object: { 889 | type: 'Fiori_1.AddressAttachment', 890 | id: { ID: oldAttachments[1].ID } 891 | }, 892 | data_subject: DATA_SUBJECT, 893 | attributes: [ 894 | { name: 'description', old: '***' }, 895 | { name: 'todo', old: oldAttachments[1].todo } 896 | ] 897 | }) 898 | expect(_logs).toContainMatchObject({ 899 | user: 'alice', 900 | object: { 901 | type: 'Fiori_1.CustomerPostalAddress', 902 | id: { ID: oldAddresses[1].ID } 903 | }, 904 | data_subject: DATA_SUBJECT, 905 | attributes: [ 906 | { name: 'street', old: '***' }, 907 | { name: 'town', old: oldAddresses[1].town } 908 | ] 909 | }) 910 | expect(_logs).toContainMatchObject({ 911 | user: 'alice', 912 | object: { 913 | type: 'Fiori_1.Notes', 914 | id: { ID: oldAttachmentNotes[0].ID } 915 | }, 916 | data_subject: DATA_SUBJECT, 917 | attributes: [{ name: 'note', old: '***' }] 918 | }) 919 | }) 920 | }) 921 | }) 922 | --------------------------------------------------------------------------------