├── website ├── static │ ├── .nojekyll │ └── img │ │ └── kafkajs-logoV2.svg ├── src │ ├── pages │ │ ├── markdown-page.md │ │ ├── index.module.css │ │ └── index.tsx │ ├── components │ │ └── HomepageFeatures │ │ │ ├── styles.module.css │ │ │ └── index.tsx │ └── css │ │ └── custom.css ├── tsconfig.json ├── sidebars.ts ├── package.json └── docusaurus.config.ts ├── .dockerignore ├── fixtures ├── proto │ ├── encodedAnotherPersonV2.ts │ └── encodedNestedV2.ts ├── avdl │ ├── simple.avdl │ ├── enum.avdl │ ├── import.avdl │ ├── two.avdl │ ├── enum_union.avdl │ ├── array.avdl │ ├── union.avdl │ ├── import_multiple_namespaces.avdl │ ├── array_union.avdl │ ├── multiple.avdl │ ├── multiple_union.avdl │ ├── multiple_namespaces.avdl │ └── complex.avdl ├── avsc │ ├── invalid │ │ ├── missingFields.avsc │ │ ├── missingName.avsc │ │ ├── missingType.avsc │ │ └── invalidType.avsc │ ├── non_namespaced.avsc │ └── person.avsc ├── wrongMagicByte.ts ├── avro │ └── encodedAnotherPersonV2.ts └── json │ └── encodedAnotherPersonV2.ts ├── src ├── utils │ ├── index.ts │ ├── readAVSC.spec.ts │ ├── readAVSC.ts │ ├── avdlToAVSC.ts │ └── avdlToAVSC.spec.ts ├── wireDecoder.ts ├── index.ts ├── wireEncoder.ts ├── constants.ts ├── api │ ├── middleware │ │ ├── userAgent.ts │ │ ├── confluentEncoderMiddleware.ts │ │ ├── errorMiddleware.ts │ │ ├── userAgent.spec.ts │ │ └── errorMiddleware.spec.ts │ ├── index.spec.ts │ └── index.ts ├── errors.ts ├── JsonHelper.ts ├── ProtoHelper.ts ├── cache.ts ├── ProtoSchema.ts ├── @types.ts ├── JsonSchema.ts ├── AvroHelper.ts ├── schemaTypeResolver.ts ├── SchemaRegistry.spec.ts ├── SchemaRegistry.protobuf.spec.ts ├── SchemaRegistry.json.spec.ts ├── SchemaRegistry.ts ├── SchemaRegistry.avro.spec.ts └── SchemaRegistry.newApi.spec.ts ├── dockest-error.json ├── .prettierrc.js ├── Dockerfile ├── bin └── avdlToAVSC.sh ├── pipeline ├── prepareRelease.sh └── updateGithubRelease.js ├── jest.config.js ├── .gitignore ├── .npmignore ├── tsconfig.json ├── LICENSE ├── docs ├── introduction.md ├── custom-types.md ├── schema-protobuf.md ├── development.md ├── usage-with-kafkajs.md ├── schema-avro.md ├── schema-json.md ├── advanced-usage.md ├── schema.md ├── v2.md └── usage.md ├── .eslintrc.js ├── package.json ├── docker-compose.yml ├── README.md ├── jest.setup.ts ├── azure-pipelines.yml └── CHANGELOG.md /website/static/.nojekyll: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | */node_modules 2 | *.log 3 | -------------------------------------------------------------------------------- /fixtures/proto/encodedAnotherPersonV2.ts: -------------------------------------------------------------------------------- 1 | export default [0, 0, 0, 0, 3, 10, 8, 74, 111, 104, 110, 32, 68, 111, 101] 2 | -------------------------------------------------------------------------------- /src/utils/index.ts: -------------------------------------------------------------------------------- 1 | export { avdlToAVSC, avdlToAVSCAsync } from './avdlToAVSC' 2 | export { readAVSC, readAVSCAsync } from './readAVSC' 3 | -------------------------------------------------------------------------------- /fixtures/avdl/simple.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.domain.fixtures") 2 | protocol SimpleProto { 3 | record Simple { 4 | string foo; 5 | } 6 | } -------------------------------------------------------------------------------- /fixtures/avsc/invalid/missingFields.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "MissingFields", 4 | "namespace": "com.org.domain.fixtures" 5 | } 6 | -------------------------------------------------------------------------------- /dockest-error.json: -------------------------------------------------------------------------------- 1 | { 2 | "errorPayload": { 3 | "trap": "SIGINT", 4 | "signal": "SIGINT" 5 | }, 6 | "timestamp": "2024-12-18T22:39:59.117Z" 7 | } -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | 'max-len': 'off', 3 | printWidth: 100, 4 | semi: false, 5 | singleQuote: true, 6 | tabWidth: 2, 7 | trailingComma: 'all', 8 | } 9 | -------------------------------------------------------------------------------- /website/src/pages/markdown-page.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Markdown page example 3 | --- 4 | 5 | # Markdown page example 6 | 7 | You don't need React to write simple standalone pages. 8 | -------------------------------------------------------------------------------- /fixtures/avdl/enum.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.app.track") 2 | protocol EnumProto { 3 | enum Foos { 4 | foo, bar, baz 5 | } 6 | 7 | record Enum { 8 | Foos foos; 9 | } 10 | } -------------------------------------------------------------------------------- /src/wireDecoder.ts: -------------------------------------------------------------------------------- 1 | export default (buffer: Buffer) => ({ 2 | magicByte: buffer.slice(0, 1), 3 | registryId: buffer.slice(1, 5).readInt32BE(0), 4 | payload: buffer.slice(5, buffer.length), 5 | }) 6 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:8.11.4 2 | 3 | WORKDIR /app/website 4 | 5 | EXPOSE 3000 35729 6 | COPY ./docs /app/docs 7 | COPY ./website /app/website 8 | RUN yarn install 9 | 10 | CMD ["yarn", "start"] 11 | -------------------------------------------------------------------------------- /fixtures/avdl/import.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.domain.fixtures") 2 | protocol ImportProto { 3 | import idl "simple.avdl"; 4 | 5 | record Import { 6 | com.org.domain.fixtures.Simple simple; 7 | } 8 | } -------------------------------------------------------------------------------- /fixtures/avdl/two.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.domain.fixtures") 2 | protocol TwoProto { 3 | record Bar { 4 | string baz; 5 | } 6 | 7 | record Two { 8 | string foo; 9 | Bar bar; 10 | } 11 | } -------------------------------------------------------------------------------- /fixtures/avsc/non_namespaced.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "NoNamespacePerson", 4 | "fields": [ 5 | { 6 | "type": "string", 7 | "name": "full_name" 8 | } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /fixtures/avdl/enum_union.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.app.track") 2 | protocol EnumUnionProto { 3 | enum Foos { 4 | foo, bar, baz 5 | } 6 | 7 | record EnumUnion { 8 | union {null, Foos} foos = null; 9 | } 10 | } -------------------------------------------------------------------------------- /fixtures/avsc/invalid/missingName.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "namespace": "com.org.domain.fixtures", 4 | "fields": [ 5 | { 6 | "type": "string", 7 | "name": "full_name" 8 | } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /fixtures/avdl/array.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.app.track") 2 | protocol ArrayProto { 3 | record KeyValue { 4 | string key; 5 | string value; 6 | } 7 | 8 | record Array { 9 | array properties; 10 | } 11 | } -------------------------------------------------------------------------------- /fixtures/avsc/invalid/missingType.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "name": "MissingType", 3 | "namespace": "com.org.domain.fixtures", 4 | "fields": [ 5 | { 6 | "type": "string", 7 | "name": "full_name" 8 | } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /fixtures/avsc/person.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "Person", 4 | "namespace": "com.org.domain.fixtures", 5 | "fields": [ 6 | { 7 | "type": "string", 8 | "name": "fullName" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /fixtures/avdl/union.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.domain.fixtures") 2 | protocol UnionProto { 3 | record Bar { 4 | string baz; 5 | } 6 | 7 | record Union { 8 | string foo; 9 | union {null, Bar} bar = null; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import { default as SchemaRegistry, DecodeOptions } from './SchemaRegistry' 2 | export { SchemaRegistry, DecodeOptions } 3 | export * from './utils' 4 | export { SchemaType } from './@types' 5 | export { COMPATIBILITY } from './constants' 6 | -------------------------------------------------------------------------------- /fixtures/avsc/invalid/invalidType.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "invalid", 3 | "name": "InvalidType", 4 | "namespace": "com.org.domain.fixtures", 5 | "fields": [ 6 | { 7 | "type": "string", 8 | "name": "full_name" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /fixtures/avdl/import_multiple_namespaces.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.domain.fixtures") 2 | protocol ImportMultipleNamespacesProto { 3 | import idl "multiple_namespaces.avdl"; 4 | 5 | record ImportMultipleNamespaces { 6 | com.org.messaging.Metadata metadata; 7 | } 8 | } -------------------------------------------------------------------------------- /fixtures/avdl/array_union.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.app.track") 2 | protocol ArrayUnionProto { 3 | record KeyValue { 4 | string key; 5 | string value; 6 | } 7 | 8 | record ArrayUnion { 9 | union {null, array } properties = null; 10 | } 11 | } -------------------------------------------------------------------------------- /fixtures/avdl/multiple.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.domain.fixtures") 2 | protocol MultipleProto { 3 | record Baz { 4 | string bam; 5 | } 6 | 7 | record Bar { 8 | Baz baz; 9 | } 10 | 11 | record Multiple { 12 | string foo; 13 | Bar bar; 14 | } 15 | } -------------------------------------------------------------------------------- /website/src/components/HomepageFeatures/styles.module.css: -------------------------------------------------------------------------------- 1 | .features { 2 | display: flex; 3 | align-items: center; 4 | padding: 2rem 0; 5 | width: 100%; 6 | margin-top: 100px; 7 | margin-bottom: 100px; 8 | } 9 | 10 | .featureSvg { 11 | height: 200px; 12 | width: 200px; 13 | } 14 | -------------------------------------------------------------------------------- /fixtures/proto/encodedNestedV2.ts: -------------------------------------------------------------------------------- 1 | export default [0, 0, 0, 0, 32, 10, 10, 100, 97, 116, 97, 45, 118, 97, 108, 117, 101, 18, 17, 10, 15, 115, 111, 109, 101, 70, 105, 101, 108, 100, 45, 118, 97, 108, 117, 101, 26, 22, 10, 20, 115, 111, 109, 101, 79, 116, 104, 101, 114, 70, 105, 101, 108, 100, 45, 118, 97, 108, 117, 101 ] -------------------------------------------------------------------------------- /bin/avdlToAVSC.sh: -------------------------------------------------------------------------------- 1 | avdl_path=$1 2 | avsc_name=$2 3 | 4 | if [ -z "${avdl_path}" ]; then 5 | echo "AVDL path not defined. e.g. ./bin/protocolToSchema.sh /path/to.avdl" 6 | exit; 7 | fi 8 | 9 | docker run --rm -v "$(pwd)":/avro kpnnl/avro-tools:1.12.0 idl2schemata ${avdl_path} tmp && cat tmp/${avsc_name}.avsc 10 | -------------------------------------------------------------------------------- /fixtures/wrongMagicByte.ts: -------------------------------------------------------------------------------- 1 | export default [ 2 | 48, 3 | 0, 4 | 0, 5 | 0, 6 | 3, 7 | 16, 8 | 74, 9 | 111, 10 | 104, 11 | 110, 12 | 32, 13 | 68, 14 | 111, 15 | 101, 16 | 18, 17 | 83, 18 | 116, 19 | 111, 20 | 99, 21 | 107, 22 | 104, 23 | 111, 24 | 108, 25 | 109, 26 | ] 27 | -------------------------------------------------------------------------------- /website/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | // This file is not used in compilation. It is here just for a nice editor experience. 3 | "extends": "@docusaurus/tsconfig", 4 | "compilerOptions": { 5 | "baseUrl": ".", 6 | "types": ["@docusaurus/theme-classic"] 7 | }, 8 | "exclude": [".docusaurus", "build"] 9 | } 10 | -------------------------------------------------------------------------------- /fixtures/avro/encodedAnotherPersonV2.ts: -------------------------------------------------------------------------------- 1 | export default [ 2 | 0, 3 | 0, 4 | 0, 5 | 0, 6 | 3, 7 | 16, 8 | 74, 9 | 111, 10 | 104, 11 | 110, 12 | 32, 13 | 68, 14 | 111, 15 | 101, 16 | 18, 17 | 83, 18 | 116, 19 | 111, 20 | 99, 21 | 107, 22 | 104, 23 | 111, 24 | 108, 25 | 109, 26 | ] 27 | -------------------------------------------------------------------------------- /fixtures/avdl/multiple_union.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.domain.fixtures") 2 | protocol MultipleUnionProto { 3 | record Baz { 4 | string bam; 5 | } 6 | 7 | record Bar { 8 | union {null, Baz} baz = null; 9 | } 10 | 11 | record MultipleUnion { 12 | string foo; 13 | union {null, Bar} bar = null; 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /pipeline/prepareRelease.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euv 3 | 4 | rm -rf dist/ 5 | rm -rf release/ 6 | 7 | npm run build 8 | 9 | mkdir -p release 10 | mv ./dist ./release/dist 11 | cp package.json ./release/package.json 12 | cp README.md ./release/README.md 13 | cp CHANGELOG.md ./release/CHANGELOG.md 14 | cp LICENSE ./release/LICENSE 15 | -------------------------------------------------------------------------------- /src/wireEncoder.ts: -------------------------------------------------------------------------------- 1 | const DEFAULT_OFFSET = 0 2 | 3 | export const MAGIC_BYTE = Buffer.alloc(1) 4 | 5 | export const encode = (registryId: number, payload: Buffer) => { 6 | const registryIdBuffer = Buffer.alloc(4) 7 | registryIdBuffer.writeInt32BE(registryId, DEFAULT_OFFSET) 8 | 9 | return Buffer.concat([MAGIC_BYTE, registryIdBuffer, payload]) 10 | } 11 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | preset: 'ts-jest', 3 | testEnvironment: 'node', 4 | globals: { 5 | 'ts-jest': { 6 | diagnostics: false, 7 | }, 8 | }, 9 | testPathIgnorePatterns: ['/node_modules/'], 10 | watchPathIgnorePatterns: ['/node_modules/'], 11 | roots: ['.'], 12 | setupFilesAfterEnv: ['/jest.setup.ts'], 13 | } 14 | -------------------------------------------------------------------------------- /fixtures/json/encodedAnotherPersonV2.ts: -------------------------------------------------------------------------------- 1 | export default [ 2 | 0, 3 | 0, 4 | 0, 5 | 0, 6 | 3, 7 | 123, 8 | 34, 9 | 102, 10 | 117, 11 | 108, 12 | 108, 13 | 78, 14 | 97, 15 | 109, 16 | 101, 17 | 34, 18 | 58, 19 | 34, 20 | 74, 21 | 111, 22 | 104, 23 | 110, 24 | 32, 25 | 68, 26 | 111, 27 | 101, 28 | 34, 29 | 125, 30 | ] 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # OSX 2 | # 3 | .DS_Store 4 | 5 | # node.js 6 | # 7 | node_modules 8 | npm-debug.log 9 | yarn-error.log 10 | junit.xml 11 | test-report.xml 12 | .eslintcache 13 | 14 | # Others 15 | # 16 | trash 17 | 18 | \.vscode/ 19 | \.vs/ 20 | 21 | # confluent-schema-registry 22 | # 23 | tmp 24 | dist 25 | release 26 | 27 | # website 28 | # 29 | build 30 | *.swp 31 | *~ 32 | 33 | .docusaurus -------------------------------------------------------------------------------- /website/sidebars.ts: -------------------------------------------------------------------------------- 1 | import { SidebarsConfig } from '@docusaurus/plugin-content-docs' 2 | 3 | const sidebars: SidebarsConfig = { 4 | docs: { 5 | 'Getting started': ['introduction', 'usage', 'advanced-usage'], 6 | 'How-to': ['custom-types', 'usage-with-kafkajs', 'schemas'], 7 | 'Migration guides': ['v2'], 8 | Contributing: ['development'], 9 | }, 10 | } 11 | 12 | export default sidebars 13 | -------------------------------------------------------------------------------- /src/constants.ts: -------------------------------------------------------------------------------- 1 | export enum COMPATIBILITY { 2 | NONE = 'NONE', 3 | FULL = 'FULL', 4 | BACKWARD = 'BACKWARD', 5 | FORWARD = 'FORWARD', 6 | BACKWARD_TRANSITIVE = 'BACKWARD_TRANSITIVE', 7 | FORWARD_TRANSITIVE = 'FORWARD_TRANSITIVE', 8 | FULL_TRANSITIVE = 'FULL_TRANSITIVE', 9 | } 10 | 11 | export const DEFAULT_SEPARATOR = '.' 12 | 13 | export const DEFAULT_API_CLIENT_ID = 'Confluent_Schema_Registry' 14 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | test-report.xml 3 | junit.xml 4 | coverage/ 5 | yarn-error.log 6 | yarn.lock 7 | **/*.spec.js 8 | __tests__ 9 | __snapshots__ 10 | fixtures 11 | declarations/ 12 | docs/ 13 | pipeline/ 14 | website/ 15 | docker-compose.yml 16 | docker-compose.*.yml 17 | jest.config.js 18 | jest.setup.js 19 | .prettierrc 20 | .prettierignore 21 | .eslintrc.js 22 | .eslintignore 23 | .travis.yml 24 | azure-pipelines.yml 25 | *.png 26 | *.svg 27 | *.tgz 28 | .vscode/ 29 | .tsconfig.json 30 | *.swp 31 | *~ 32 | -------------------------------------------------------------------------------- /website/src/pages/index.module.css: -------------------------------------------------------------------------------- 1 | /** 2 | * CSS files with the .module.css suffix will be treated as CSS modules 3 | * and scoped locally. 4 | */ 5 | 6 | .heroBanner { 7 | padding: 4rem 0; 8 | text-align: center; 9 | position: relative; 10 | overflow: hidden; 11 | } 12 | 13 | @media screen and (max-width: 996px) { 14 | .heroBanner { 15 | padding: 2rem; 16 | } 17 | } 18 | 19 | .buttons { 20 | display: flex; 21 | align-items: center; 22 | justify-content: center; 23 | gap: 12px; 24 | } 25 | 26 | .mainImage { 27 | max-width: 150px; 28 | } 29 | -------------------------------------------------------------------------------- /fixtures/avdl/multiple_namespaces.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.confluentschemaregistry") 2 | protocol MultipleNamespacesProto { 3 | @namespace("com.org.messaging") 4 | record Metadata { 5 | string event_id; 6 | string publisher_system_id; 7 | @logicalType("timestamp-millis") 8 | long occurred_at; 9 | @logicalType("timestamp-millis") 10 | long published_at; 11 | string correlation_id; 12 | } 13 | 14 | record MultipleNamespaces { 15 | com.org.messaging.Metadata metadata; 16 | string id; 17 | int amount; 18 | string description; 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "baseUrl": ".", 4 | "paths": { 5 | "*": ["src/*", "declarations/*"] 6 | }, 7 | "outDir": "dist", 8 | "lib": ["es2019", "dom"], 9 | "module": "commonjs", 10 | "moduleResolution": "node", 11 | "sourceMap": true, 12 | "strict": true, 13 | "target": "es2019", 14 | "noUnusedParameters": true, 15 | "noUnusedLocals": true, 16 | "esModuleInterop": true, 17 | "declaration": true, 18 | "resolveJsonModule": true 19 | }, 20 | "include": ["./src/**/*.ts", "declarations/**/*.ts"], 21 | "exclude": ["dist", "node_modules", "**/*.spec.ts"] 22 | } 23 | -------------------------------------------------------------------------------- /src/api/middleware/userAgent.ts: -------------------------------------------------------------------------------- 1 | import { Middleware } from 'mappersmith' 2 | import { DEFAULT_API_CLIENT_ID } from '../../constants' 3 | 4 | const product = '@kafkajs/confluent-schema-registry' 5 | 6 | const userAgentMiddleware: Middleware = ({ clientId }) => { 7 | const comment = clientId !== DEFAULT_API_CLIENT_ID ? clientId : undefined 8 | const userAgent = comment ? `${product} (${comment})` : product 9 | const headers = { 10 | 'User-Agent': userAgent, 11 | } 12 | return { 13 | prepareRequest: next => { 14 | return next().then(req => req.enhance({ headers })) 15 | }, 16 | } 17 | } 18 | 19 | export default userAgentMiddleware 20 | -------------------------------------------------------------------------------- /fixtures/avdl/complex.avdl: -------------------------------------------------------------------------------- 1 | @namespace("com.org.app.track") 2 | protocol ComplexProto { 3 | enum Foos { 4 | foo, bar, baz 5 | } 6 | 7 | record Bam { 8 | string bam; 9 | } 10 | 11 | record Array3 { 12 | union {null, Bam} bam = null; 13 | } 14 | 15 | record Array2 { 16 | string array2_name; 17 | union {null, Foos} foos = null; 18 | } 19 | 20 | record Array1 { 21 | union {null, array } array2 = null; 22 | } 23 | 24 | record Baz { 25 | union {null, array } array1 = null; 26 | } 27 | 28 | record Bar { 29 | Baz baz; 30 | } 31 | 32 | record Complex { 33 | union {null, array } array1 = null; 34 | array array3; 35 | Bar bar; 36 | } 37 | } -------------------------------------------------------------------------------- /src/utils/readAVSC.spec.ts: -------------------------------------------------------------------------------- 1 | import path from 'path' 2 | 3 | import { readAVSC, readAVSCAsync } from './readAVSC' 4 | import { ConfluentSchemaRegistryInvalidSchemaError } from '../errors' 5 | 6 | describe('readAVSC', () => { 7 | const invalidSchemaFiles = ['invalidType', 'missingFields', 'missingName', 'missingType'] 8 | invalidSchemaFiles.forEach(schemaName => { 9 | it(`throws an exception for invalid schema definitions - ${schemaName}`, () => { 10 | expect(() => 11 | readAVSC(path.join(__dirname, `../../fixtures/avsc/invalid/${schemaName}.avsc`)), 12 | ).toThrow(ConfluentSchemaRegistryInvalidSchemaError) 13 | }) 14 | }) 15 | }) 16 | 17 | describe('readAVSCAsync', () => { 18 | it('returns a validated schema asynchronously', async () => { 19 | return expect( 20 | readAVSCAsync(path.join(__dirname, `../../fixtures/avsc/person.avsc`)), 21 | ).resolves.toHaveProperty('name', 'Person') 22 | }) 23 | }) 24 | -------------------------------------------------------------------------------- /src/errors.ts: -------------------------------------------------------------------------------- 1 | class ConfluentSchemaRegistryError extends Error { 2 | constructor(error: any) { 3 | super(error.message || error) 4 | this.name = this.constructor.name 5 | } 6 | } 7 | 8 | class ConfluentSchemaRegistryArgumentError extends ConfluentSchemaRegistryError {} 9 | class ConfluentSchemaRegistryCompatibilityError extends ConfluentSchemaRegistryError {} 10 | class ConfluentSchemaRegistryInvalidSchemaError extends ConfluentSchemaRegistryError {} 11 | class ConfluentSchemaRegistryValidationError extends ConfluentSchemaRegistryError { 12 | public paths: string[][] 13 | 14 | constructor(error: any, paths: string[][]) { 15 | super(error) 16 | this.paths = paths 17 | } 18 | } 19 | 20 | export { 21 | ConfluentSchemaRegistryError, 22 | ConfluentSchemaRegistryArgumentError, 23 | ConfluentSchemaRegistryCompatibilityError, 24 | ConfluentSchemaRegistryInvalidSchemaError, 25 | ConfluentSchemaRegistryValidationError, 26 | } 27 | -------------------------------------------------------------------------------- /src/api/middleware/confluentEncoderMiddleware.ts: -------------------------------------------------------------------------------- 1 | import { Middleware, Response } from 'mappersmith' 2 | 3 | const REQUEST_HEADERS = { 4 | 'Content-Type': 'application/vnd.schemaregistry.v1+json', 5 | } 6 | 7 | const updateContentType = (response: Response) => 8 | response.enhance({ 9 | headers: { 10 | 'content-type': 'application/json', 11 | }, 12 | }) 13 | 14 | const confluentEncoderMiddleware: Middleware = () => ({ 15 | request: req => { 16 | try { 17 | if (req.body()) { 18 | return req.enhance({ 19 | headers: REQUEST_HEADERS, 20 | body: JSON.stringify(req.body()), 21 | }) 22 | } 23 | } catch (_) {} 24 | 25 | return req.enhance({ headers: REQUEST_HEADERS }) 26 | }, 27 | 28 | response: next => 29 | next() 30 | .then(updateContentType) 31 | .catch((response: Response) => { 32 | throw updateContentType(response) 33 | }), 34 | }) 35 | 36 | export default confluentEncoderMiddleware 37 | -------------------------------------------------------------------------------- /src/JsonHelper.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Schema, 3 | SchemaHelper, 4 | ConfluentSubject, 5 | SchemaResponse, 6 | SchemaType, 7 | ProtocolOptions, 8 | JsonConfluentSchema, 9 | } from './@types' 10 | import { ConfluentSchemaRegistryError } from './errors' 11 | 12 | export default class JsonHelper implements SchemaHelper { 13 | public validate(_schema: Schema): void { 14 | return 15 | } 16 | 17 | public getSubject( 18 | _confluentSchema: JsonConfluentSchema, 19 | _schema: Schema, 20 | _separator: string, 21 | ): ConfluentSubject { 22 | throw new ConfluentSchemaRegistryError('not implemented yet') 23 | } 24 | 25 | public toConfluentSchema(data: SchemaResponse): JsonConfluentSchema { 26 | return { type: SchemaType.JSON, schema: data.schema, references: data.references } 27 | } 28 | 29 | updateOptionsFromSchemaReferences( 30 | referencedSchemas: JsonConfluentSchema[], 31 | options: ProtocolOptions = {}, 32 | ): ProtocolOptions { 33 | return { ...options, [SchemaType.JSON]: { ...options[SchemaType.JSON], referencedSchemas } } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/ProtoHelper.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Schema, 3 | SchemaHelper, 4 | ConfluentSubject, 5 | SchemaResponse, 6 | SchemaType, 7 | ProtocolOptions, 8 | ProtoConfluentSchema, 9 | } from './@types' 10 | import { ConfluentSchemaRegistryError } from './errors' 11 | 12 | export default class ProtoHelper implements SchemaHelper { 13 | public validate(_schema: Schema): void { 14 | return 15 | } 16 | 17 | public getSubject( 18 | _confluentSchema: ProtoConfluentSchema, 19 | _schema: Schema, 20 | _separator: string, 21 | ): ConfluentSubject { 22 | throw new ConfluentSchemaRegistryError('not implemented yet') 23 | } 24 | 25 | public toConfluentSchema(data: SchemaResponse): ProtoConfluentSchema { 26 | return { type: SchemaType.PROTOBUF, schema: data.schema, references: data.references } 27 | } 28 | 29 | updateOptionsFromSchemaReferences( 30 | referencedSchemas: ProtoConfluentSchema[], 31 | options: ProtocolOptions = {}, 32 | ): ProtocolOptions { 33 | return { 34 | ...options, 35 | [SchemaType.PROTOBUF]: { ...options[SchemaType.PROTOBUF], referencedSchemas }, 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/cache.ts: -------------------------------------------------------------------------------- 1 | import { AvroSchema, Schema, SchemaType } from './@types' 2 | 3 | type CacheEntry = { type: SchemaType; schema: Schema | AvroSchema } 4 | 5 | export default class Cache { 6 | registryIdBySubject: { [key: string]: number } 7 | schemasByRegistryId: { [key: string]: CacheEntry } 8 | 9 | constructor() { 10 | this.registryIdBySubject = {} 11 | this.schemasByRegistryId = {} 12 | } 13 | 14 | getLatestRegistryId = (subject: string): number | undefined => this.registryIdBySubject[subject] 15 | 16 | setLatestRegistryId = (subject: string, id: number): number => { 17 | this.registryIdBySubject[subject] = id 18 | 19 | return this.registryIdBySubject[subject] 20 | } 21 | 22 | getSchema = (registryId: number): CacheEntry | undefined => this.schemasByRegistryId[registryId] 23 | 24 | setSchema = (registryId: number, type: SchemaType, schema: Schema): CacheEntry => { 25 | this.schemasByRegistryId[registryId] = { type, schema } 26 | 27 | return this.schemasByRegistryId[registryId] 28 | } 29 | 30 | clear = (): void => { 31 | this.registryIdBySubject = {} 32 | this.schemasByRegistryId = {} 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Erik Engervall (erik.engervall@gmail.com) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /src/api/middleware/errorMiddleware.ts: -------------------------------------------------------------------------------- 1 | import { Middleware, Response } from 'mappersmith' 2 | 3 | interface ConfluenceResponse extends Omit { 4 | data: () => { 5 | message: string 6 | } 7 | } 8 | 9 | class ResponseError extends Error { 10 | status: number 11 | unauthorized: boolean 12 | url: string 13 | 14 | constructor(clientName: string, response: ConfluenceResponse) { 15 | super( 16 | `${clientName} - ${response.data().message || 17 | `Error, status ${response.status()}${response.data() ? `: ${response.data()}` : ''}`}`, 18 | ) 19 | 20 | const request = response.request() 21 | this.name = this.constructor.name 22 | this.status = response.status() 23 | this.unauthorized = this.status === 401 24 | this.url = `${request.method()} ${request.url()}` 25 | } 26 | } 27 | 28 | const errorMiddleware: Middleware = ({ clientId }) => ({ 29 | response: next => 30 | new Promise((resolve, reject) => 31 | next() 32 | .then(resolve) 33 | .catch((response: Response) => reject(new ResponseError(clientId ?? '', response))), 34 | ), 35 | }) 36 | 37 | export default errorMiddleware 38 | -------------------------------------------------------------------------------- /src/utils/readAVSC.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import { promisify } from 'util' 3 | 4 | import { RawAvroSchema } from '../@types' 5 | import { ConfluentSchemaRegistryInvalidSchemaError } from '../errors' 6 | 7 | const readFileAsync = promisify(fs.readFile) 8 | const ENCODING = 'utf-8' 9 | 10 | function isValidSchema(rawSchema: any): rawSchema is RawAvroSchema { 11 | return ( 12 | 'name' in rawSchema && 13 | 'type' in rawSchema && 14 | rawSchema.type === 'record' && 15 | 'fields' in rawSchema 16 | ) 17 | } 18 | 19 | function validatedSchema(path: string, rawSchema: any): RawAvroSchema { 20 | if (!isValidSchema(rawSchema)) { 21 | throw new ConfluentSchemaRegistryInvalidSchemaError( 22 | `${path} is not recognized as a valid AVSC file (expecting valid top-level name, type and fields attributes)`, 23 | ) 24 | } 25 | return rawSchema 26 | } 27 | 28 | export function readAVSC(path: string): RawAvroSchema { 29 | const rawSchema = JSON.parse(fs.readFileSync(path, ENCODING)) 30 | return validatedSchema(path, rawSchema) 31 | } 32 | 33 | export async function readAVSCAsync(path: string): Promise { 34 | const rawSchema = JSON.parse(await readFileAsync(path, ENCODING)) 35 | return validatedSchema(path, rawSchema) 36 | } 37 | -------------------------------------------------------------------------------- /src/api/index.spec.ts: -------------------------------------------------------------------------------- 1 | import { Middleware } from 'mappersmith' 2 | import API from '.' 3 | import { mockClient, install, uninstall } from 'mappersmith/test' 4 | 5 | const customMiddleware: Middleware = jest.fn(() => { 6 | return { 7 | async request(request) { 8 | return request.enhance({ 9 | headers: { 10 | Authorization: 'Bearer Random', 11 | }, 12 | }) 13 | }, 14 | async response(next) { 15 | return next() 16 | }, 17 | } 18 | }) 19 | 20 | const client = API({ 21 | clientId: 'test-client', 22 | host: 'http://example.com', 23 | middlewares: [customMiddleware], 24 | }) 25 | const mock = mockClient(client) 26 | .resource('Schema') 27 | .method('find') 28 | .with({ id: 'abc' }) 29 | .response({}) 30 | .assertObject() 31 | 32 | describe('API Client', () => { 33 | beforeEach(() => install()) 34 | 35 | afterEach(() => uninstall()) 36 | 37 | it('should include a user agent header and call custom middleware', async () => { 38 | const response = await client.Schema.find({ id: 'abc' }) 39 | 40 | expect(mock.callsCount()).toBe(1) 41 | expect(response.request().header('User-Agent')).not.toBeUndefined() 42 | expect(response.request().header('Authorization')).toBe('Bearer Random') 43 | expect(customMiddleware).toHaveBeenCalled() 44 | }) 45 | }) 46 | -------------------------------------------------------------------------------- /website/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "website-new", 3 | "version": "0.0.0", 4 | "private": true, 5 | "scripts": { 6 | "docusaurus": "docusaurus", 7 | "start": "docusaurus start", 8 | "build": "docusaurus build", 9 | "swizzle": "docusaurus swizzle", 10 | "deploy": "docusaurus deploy", 11 | "clear": "docusaurus clear", 12 | "serve": "docusaurus serve", 13 | "write-translations": "docusaurus write-translations", 14 | "write-heading-ids": "docusaurus write-heading-ids", 15 | "typecheck": "tsc" 16 | }, 17 | "dependencies": { 18 | "@docusaurus/core": "3.7.0", 19 | "@docusaurus/preset-classic": "3.7.0", 20 | "@mdx-js/react": "^3.0.0", 21 | "clsx": "^2.0.0", 22 | "prism-react-renderer": "^2.3.0", 23 | "react": "^19.0.0", 24 | "react-dom": "^19.0.0" 25 | }, 26 | "devDependencies": { 27 | "@docusaurus/module-type-aliases": "3.7.0", 28 | "@docusaurus/tsconfig": "3.7.0", 29 | "@docusaurus/types": "3.7.0", 30 | "typescript": "~5.7.3" 31 | }, 32 | "browserslist": { 33 | "production": [ 34 | ">0.5%", 35 | "not dead", 36 | "not op_mini all" 37 | ], 38 | "development": [ 39 | "last 3 chrome version", 40 | "last 3 firefox version", 41 | "last 5 safari version" 42 | ] 43 | }, 44 | "engines": { 45 | "node": ">=18.0" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /website/src/components/HomepageFeatures/index.tsx: -------------------------------------------------------------------------------- 1 | import { ReactNode } from 'react' 2 | import clsx from 'clsx' 3 | import Heading from '@theme/Heading' 4 | import styles from './styles.module.css' 5 | 6 | type FeatureItem = { 7 | title: string 8 | description: ReactNode 9 | } 10 | 11 | const FeatureList: FeatureItem[] = [ 12 | { 13 | title: 'Simple interface', 14 | description: <>Communicate with Schema Registry via an easily understood interface, 15 | }, 16 | { 17 | title: 'Solid and heavily used', 18 | description: ( 19 | <>The source for this project has been running on large scale production projects for years 20 | ), 21 | }, 22 | { 23 | title: 'All your schemas in one place', 24 | description: <>Full support for Avro, JSON Schema and Protobuf, 25 | }, 26 | ] 27 | 28 | function Feature({ title, description }: FeatureItem) { 29 | return ( 30 |
31 |
32 | {title} 33 |

{description}

34 |
35 |
36 | ) 37 | } 38 | 39 | export default function HomepageFeatures(): ReactNode { 40 | return ( 41 |
42 |
43 |
44 | {FeatureList.map((props, idx) => ( 45 | 46 | ))} 47 |
48 |
49 |
50 | ) 51 | } 52 | -------------------------------------------------------------------------------- /docs/introduction.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: introduction 3 | title: Introduction 4 | sidebar_label: Introduction 5 | --- 6 | 7 | `confluent-schema-registry` is a library that makes it easier to interact with the Confluent schema registry, it provides convenient methods to encode, decode and register new schemas using the Apache Avro, JSON Schema and Protobuf serialization formats and Confluent's [wire format](https://docs.confluent.io/current/schema-registry/docs/serializer-formatter.html#wire-format). 8 | 9 | ## Install 10 | 11 | ```sh 12 | npm install @kafkajs/confluent-schema-registry 13 | # yarn add @kafkajs/confluent-schema-registry 14 | ``` 15 | 16 | ## Example 17 | 18 | This example uses an AVRO schema. For more detailed explanations and examples of other schema types, [see usage](./usage.md). 19 | 20 | ```js 21 | const path = require('path') 22 | const { SchemaRegistry, SchemaType } = require('@kafkajs/confluent-schema-registry') 23 | 24 | const registry = new SchemaRegistry({ host: 'http://localhost:8081' }) 25 | 26 | // Upload a schema to the registry 27 | const schema = ` 28 | { 29 | "type": "record", 30 | "name": "RandomTest", 31 | "namespace": "examples", 32 | "fields": [{ "type": "string", "name": "fullName" }] 33 | } 34 | ` 35 | const { id } = await registry.register({ 36 | type: SchemaType.AVRO, 37 | schema, 38 | }) 39 | 40 | // Encode using the uploaded schema 41 | const payload = { fullName: 'John Doe' } 42 | const encodedPayload = await registry.encode(id, payload) 43 | 44 | // Decode the payload 45 | const decodedPayload = await registry.decode(encodedPayload) 46 | ``` 47 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | root: true, 3 | extends: [ 4 | 'prettier', 5 | 'plugin:prettier/recommended', // Enables eslint-plugin-prettier and eslint-config-prettier. This will display prettier errors as ESLint errors. Make sure this is always the last configuration in the extends array. 6 | ], 7 | parser: '@typescript-eslint/parser', 8 | plugins: ['@typescript-eslint', 'no-only-tests'], 9 | parserOptions: { 10 | ecmaVersion: 2019, // Allows for the parsing of modern ECMAScript features 11 | sourceType: 'module', // Allows for the use of imports 12 | ecmaFeatures: { 13 | jsx: false, 14 | }, 15 | }, 16 | env: { 17 | node: true, 18 | }, 19 | overrides: [ 20 | { 21 | files: ['**/*.ts'], 22 | extends: [ 23 | 'plugin:@typescript-eslint/recommended', // Uses the recommended rules from the @typescript-eslint/eslint-plugin 24 | 'prettier/@typescript-eslint', // Uses eslint-config-prettier to disable ESLint rules from @typescript-eslint/eslint-plugin that would conflict with prettier 25 | ], 26 | rules: { 27 | 'no-console': 'error', 28 | 'no-only-tests/no-only-tests': [ 29 | 'error', 30 | { block: ['test', 'it', 'assert'], focus: ['only', 'focus'] }, 31 | ], 32 | 33 | // Typescript 34 | '@typescript-eslint/explicit-function-return-type': 'off', 35 | '@typescript-eslint/no-use-before-define': 'error', 36 | '@typescript-eslint/no-explicit-any': 'warn', 37 | '@typescript-eslint/ban-ts-ignore': 'warn', 38 | }, 39 | }, 40 | ], 41 | } 42 | -------------------------------------------------------------------------------- /src/api/middleware/userAgent.spec.ts: -------------------------------------------------------------------------------- 1 | import { Request } from 'mappersmith' 2 | 3 | import UserAgentMiddleware from './userAgent' 4 | 5 | const middlewareParams = (clientId?: string) => ({ 6 | resourceName: 'resourceNameMock', 7 | resourceMethod: 'resourceMethodMock', 8 | context: { context: 'contextMock' }, 9 | clientId, 10 | }) 11 | 12 | describe('UserAgentMiddleware', () => { 13 | let next, request 14 | 15 | beforeEach(() => { 16 | request = ({ 17 | enhance: jest.fn(), 18 | } as unknown) as jest.Mocked 19 | next = jest.fn().mockResolvedValue(request) 20 | }) 21 | 22 | describe('When the user has provided a clientId', () => { 23 | const params = middlewareParams('some-client-id') 24 | 25 | it('should add the client id as a user agent comment', async () => { 26 | const middleware = UserAgentMiddleware(params) 27 | 28 | await middleware.prepareRequest(next, jest.fn()) 29 | 30 | expect(request.enhance).toHaveBeenCalledWith({ 31 | headers: { 32 | 'User-Agent': `@kafkajs/confluent-schema-registry (${params.clientId})`, 33 | }, 34 | }) 35 | }) 36 | }) 37 | 38 | describe('When the user has not provided a clientId', () => { 39 | const params = middlewareParams() 40 | 41 | it('should not include a comment in the user agent', async () => { 42 | const middleware = UserAgentMiddleware(params) 43 | 44 | await middleware.prepareRequest(next, jest.fn()) 45 | 46 | expect(request.enhance).toHaveBeenCalledWith({ 47 | headers: { 48 | 'User-Agent': `@kafkajs/confluent-schema-registry`, 49 | }, 50 | }) 51 | }) 52 | }) 53 | }) 54 | -------------------------------------------------------------------------------- /website/src/pages/index.tsx: -------------------------------------------------------------------------------- 1 | import { ReactNode } from 'react' 2 | import clsx from 'clsx' 3 | import Link from '@docusaurus/Link' 4 | import useDocusaurusContext from '@docusaurus/useDocusaurusContext' 5 | import Layout from '@theme/Layout' 6 | import HomepageFeatures from '@site/src/components/HomepageFeatures' 7 | import Heading from '@theme/Heading' 8 | 9 | import styles from './index.module.css' 10 | 11 | function HomepageHeader() { 12 | const { siteConfig } = useDocusaurusContext() 13 | const SVG = require('@site/static/img/kafkajs-logoV2.svg').default 14 | return ( 15 |
16 |
17 | 18 |

{siteConfig.tagline}

19 |
20 | 21 | Documentation 22 | 23 | 27 | Github 28 | 29 |
30 |
31 |
32 | ) 33 | } 34 | 35 | export default function Home(): ReactNode { 36 | const { siteConfig } = useDocusaurusContext() 37 | return ( 38 | 42 | 43 |
44 | 45 |
46 |
47 | ) 48 | } 49 | -------------------------------------------------------------------------------- /website/src/css/custom.css: -------------------------------------------------------------------------------- 1 | /** 2 | * Any CSS included here will be global. The classic template 3 | * bundles Infima by default. Infima is a CSS framework designed to 4 | * work well for content-centric websites. 5 | */ 6 | 7 | /* You can override the default Infima variables here. */ 8 | :root { 9 | --ifm-color-primary-lightest: #6f9576; 10 | --ifm-color-primary-lighter: #628468; 11 | --ifm-color-primary-light: #5e7f63; 12 | --ifm-color-primary: #55735a; 13 | --ifm-color-primary-dark: #4d6851; 14 | --ifm-color-primary-darker: #48624d; 15 | --ifm-color-primary-darkest: #3c513f; 16 | --ifm-code-font-size: 95%; 17 | --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1); 18 | } 19 | 20 | /* For readability concerns, you should choose a lighter palette in dark mode. */ 21 | [data-theme='dark'] { 22 | --ifm-color-primary: #25c2a0; 23 | --ifm-color-primary-dark: #21af90; 24 | --ifm-color-primary-darker: #1fa588; 25 | --ifm-color-primary-darkest: #1a8870; 26 | --ifm-color-primary-light: #29d5b0; 27 | --ifm-color-primary-lighter: #32d8b4; 28 | --ifm-color-primary-lightest: #4fddbf; 29 | --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3); 30 | } 31 | 32 | .footer-container { 33 | display: flex; 34 | align-items: center; 35 | justify-content: center; 36 | gap: 12px; 37 | } 38 | 39 | .footer-section { 40 | display: flex; 41 | align-items: center; 42 | justify-content: space-between; 43 | } 44 | 45 | .footer__link-item { 46 | width: 100%; 47 | } 48 | 49 | h1 { 50 | font-size: 30px; 51 | } 52 | 53 | h2 { 54 | font-size: 24px; 55 | } 56 | 57 | .licence { 58 | display: flex; 59 | align-items: center; 60 | } 61 | 62 | .nav-home { 63 | opacity: 0.7; 64 | transition: all 1s ease-out; 65 | } 66 | 67 | .nav-home:hover { 68 | opacity: 1; 69 | } 70 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@kafkajs/confluent-schema-registry", 3 | "version": "3.9.0", 4 | "main": "dist/index.js", 5 | "description": "ConfluentSchemaRegistry is a library that makes it easier to interact with the Confluent schema registry, it provides convenient methods to encode, decode and register new schemas using the Apache Avro serialization format.", 6 | "keywords": [ 7 | "confluent schema registry", 8 | "kafka" 9 | ], 10 | "repository": { 11 | "type": "git", 12 | "url": "https://github.com/kafkajs/confluent-schema-registry.git" 13 | }, 14 | "scripts": { 15 | "prepare:release": "./pipeline/prepareRelease.sh", 16 | "build": "rm -rf ./dist && tsc", 17 | "build:watch": "rm -rf ./dist && tsc --watch", 18 | "test:unit:watch": "yarn test:unit --watch", 19 | "test:unit": "jest", 20 | "test": "docker compose up -d --wait schemaRegistry && jest", 21 | "lint": "eslint './src/**/*.ts'", 22 | "check:types": "tsc --noEmit", 23 | "format": "yarn lint --fix" 24 | }, 25 | "dependencies": { 26 | "ajv": "^7.1.0", 27 | "avsc": ">= 5.4.13 < 6", 28 | "mappersmith": ">= 2.44.0 < 3", 29 | "protobufjs": "^7.4.0" 30 | }, 31 | "devDependencies": { 32 | "@types/jest": "^29.5.14", 33 | "@types/node": "^18.19.70", 34 | "@types/prettier": "^1.18.2", 35 | "@typescript-eslint/eslint-plugin": "^2.1.0", 36 | "@typescript-eslint/parser": "^2.1.0", 37 | "@typescript-eslint/typescript-estree": "^2.1.0", 38 | "ajv8": "npm:ajv@^8.6.3", 39 | "eslint": "^6.3.0", 40 | "eslint-config-prettier": "^6.1.0", 41 | "eslint-plugin-no-only-tests": "^2.3.1", 42 | "eslint-plugin-prettier": "^3.1.0", 43 | "jest": "^29.7.0", 44 | "prettier": "^1.18.2", 45 | "ts-jest": "^29.2.5", 46 | "ts-node": "^8.3.0", 47 | "typescript": "^5.7.3", 48 | "uuid": "^11.0.5" 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | 3 | services: 4 | kafka: # https://hub.docker.com/r/confluentinc/cp-kafka 5 | environment: 6 | KAFKA_ZOOKEEPER_CONNECT: 'zooKeeper:2181' 7 | KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092' 8 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT' 9 | KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' 10 | KAFKA_BROKER_ID: 1 11 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 12 | depends_on: 13 | zooKeeper: 14 | condition: service_healthy 15 | image: 'confluentinc/cp-kafka:5.5.3' 16 | ports: 17 | - '9092:9092' 18 | healthcheck: 19 | test: ["CMD", "nc", "-z", "localhost", "9092"] 20 | interval: 1s 21 | timeout: 60s 22 | retries: 60 23 | 24 | zooKeeper: # https://hub.docker.com/r/confluentinc/cp-zookeeper 25 | environment: 26 | ZOOKEEPER_CLIENT_PORT: '2181' 27 | image: 'confluentinc/cp-zookeeper:5.2.2' 28 | ports: 29 | - '2181:2181' 30 | healthcheck: 31 | test: echo ruok | nc 127.0.0.1 2181 || exit 1 32 | interval: 2s 33 | timeout: 5s 34 | retries: 20 35 | 36 | schemaRegistry: # https://hub.docker.com/r/confluentinc/cp-schema-registry 37 | environment: 38 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zooKeeper:2181' 39 | SCHEMA_REGISTRY_HOST_NAME: localhost 40 | image: 'confluentinc/cp-schema-registry:5.5.3' 41 | ports: 42 | - '8982:8081' 43 | depends_on: 44 | kafka: 45 | condition: service_healthy 46 | healthcheck: 47 | test: "curl --output /dev/null --silent --head --fail http://localhost:8081/subjects" 48 | interval: 30s 49 | timeout: 10s 50 | retries: 10 51 | 52 | avro_tools: # https://hub.docker.com/r/kpnnl/avro-tools 53 | image: kpnnl/avro-tools:1.12.0 54 | ports: 55 | - '9999:9999' 56 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # confluent-schema-registry 2 | 3 | `@kafkajs/confluent-schema-registry` is a library that makes it easier to interact with the Confluent schema registry, it provides convenient methods to encode, decode and register new schemas using the Apache Avro serialization format and Confluent's [wire format](https://docs.confluent.io/current/schema-registry/docs/serializer-formatter.html#wire-format). 4 | 5 | [![Build Status](https://dev.azure.com/tulios/ConfluentSchemaRegistry/_apis/build/status/kafkajs.confluent-schema-registry?branchName=master)](https://dev.azure.com/tulios/ConfluentSchemaRegistry/_build/latest?definitionId=3&branchName=master) 6 | 7 | ## Getting started 8 | 9 | ```sh 10 | npm install @kafkajs/confluent-schema-registry 11 | # yarn add @kafkajs/confluent-schema-registry 12 | ``` 13 | 14 | ```javascript 15 | const { Kafka } = require('kafkajs') 16 | const { SchemaRegistry } = require('@kafkajs/confluent-schema-registry') 17 | 18 | const kafka = new Kafka({ clientId: 'my-app', brokers: ['kafka1:9092'] }) 19 | const registry = new SchemaRegistry({ host: 'http://registry:8081/' }) 20 | const consumer = kafka.consumer({ groupId: 'test-group' }) 21 | 22 | const run = async () => { 23 | await consumer.connect() 24 | await consumer.subscribe({ topic: 'test-topic', fromBeginning: true }) 25 | 26 | await consumer.run({ 27 | eachMessage: async ({ topic, partition, message }) => { 28 | const decodedKey = await registry.decode(message.key) 29 | const decodedValue = await registry.decode(message.value) 30 | console.log({ decodedKey, decodedValue }) 31 | }, 32 | }) 33 | } 34 | 35 | run().catch(console.error) 36 | ``` 37 | 38 | ## Documentation 39 | 40 | Learn more about using [KafkaJS Confluent Schema registry on the official site!](https://kafkajs.github.io/confluent-schema-registry/) 41 | 42 | ## License 43 | 44 | See [LICENSE](https://github.com/kafkajs/confluent-schema-registry/blob/master/LICENSE) for more details. 45 | -------------------------------------------------------------------------------- /jest.setup.ts: -------------------------------------------------------------------------------- 1 | import { MAGIC_BYTE } from './src/wireEncoder' 2 | import decode from './src/wireDecoder' 3 | import { MatcherFunction } from 'expect' 4 | 5 | const toMatchConfluentEncodedPayload: MatcherFunction<[{ payload: Buffer }]> = function( 6 | received, 7 | { payload: expectedPayload }, 8 | ) { 9 | const { printExpected, printReceived, printWithType } = this.utils 10 | 11 | if (!Buffer.isBuffer(expectedPayload)) { 12 | const error = [ 13 | 'Expect payload to be a Buffer', 14 | printWithType('Got', expectedPayload, printExpected), 15 | ].join('\n') 16 | 17 | throw new Error(error) 18 | } 19 | 20 | const { magicByte, payload } = decode(received as Buffer) 21 | const expectedMessage = decode(expectedPayload) 22 | 23 | if (!Buffer.isBuffer(received)) { 24 | return { 25 | pass: false, 26 | message: () => 27 | [ 28 | 'Received value must be a Buffer', 29 | printWithType('Received', received, printReceived), 30 | ].join('\n'), 31 | } 32 | } 33 | 34 | if (Buffer.compare(MAGIC_BYTE, magicByte) !== 0) { 35 | return { 36 | pass: false, 37 | message: () => 38 | [ 39 | 'expected magic byte', 40 | printExpected(MAGIC_BYTE), 41 | '\nreceived', 42 | printReceived(magicByte), 43 | ].join('\n'), 44 | } 45 | } 46 | 47 | return { 48 | pass: this.equals(payload, expectedMessage.payload), 49 | message: () => 50 | [ 51 | 'expected payload', 52 | printExpected(expectedMessage.payload), 53 | '\nreceived', 54 | printReceived(payload), 55 | ].join('\n'), 56 | } 57 | } 58 | 59 | expect.extend({ 60 | toMatchConfluentEncodedPayload, 61 | }) 62 | 63 | declare global { 64 | // eslint-disable-next-line @typescript-eslint/no-namespace 65 | namespace jest { 66 | interface Matchers { 67 | toMatchConfluentEncodedPayload(args: { registryId: number; payload: Buffer }): R 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /docs/custom-types.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: custom-types 3 | title: Configuring Custom Types 4 | sidebar_label: Custom Types 5 | --- 6 | 7 | Schemas can contain types that cannot be represented by Javascript primitive types, or 8 | where there's more than one possible type to deserialize into. For example, 9 | the Avro logical type `timestamp-millis` represents a date, but is serialized as a `long`. 10 | In order to deserialize that into a `Date` object, we need to configure the schema library. 11 | 12 | ## Avro 13 | 14 | `@kafkajs/confluent-schema-registry` uses the [`avsc`](https://github.com/mtth/avsc/wiki/Advanced-usage#logical-types) 15 | library for Avro encoding/decoding. The [Schema Type Options](./usage.md#schema-type-options) 16 | for Avro are passed to [`avsc.Type.forSchema`](https://github.com/mtth/avsc/wiki/API#typeforschemaschema-opts), 17 | which allows us to set up a mapping between the logical type and the type we want to deserialize 18 | into: 19 | 20 | ```ts 21 | import { SchemaRegistry, SchemaType } from '@kafkajs/confluent-schema-registry' 22 | import avro from 'avsc' 23 | 24 | class DateType extends avro.types.LogicalType { 25 | _fromValue(val: string) { 26 | return new Date(val); 27 | } 28 | _toValue(date: Date): number { 29 | return +date 30 | } 31 | _resolve(type: any) { 32 | if (avro.Type.isType(type, 'long', 'string', 'logical:timestamp-millis')) { 33 | return this._fromValue; 34 | } 35 | } 36 | } 37 | 38 | 39 | const options = { 40 | [SchemaType.AVRO]: { 41 | logicalTypes: { 'timestamp-millis': DateType } 42 | } 43 | } 44 | const registry = new SchemaRegistry({ host: 'http://localhost:9092' }, options) 45 | ``` 46 | 47 | ### Custom long type 48 | 49 | JavaScript represents all numbers as doubles internally, which means that it is possible 50 | to lose precision when using very large numbers. In order to use a type that can 51 | accomodate such large numbers, you can use the same configuration option as above to 52 | have avsc use for example `BigInt` or `longjs` instead of the native `number` type. 53 | 54 | See [Custom Long Types](https://github.com/mtth/avsc/wiki/Advanced-usage#custom-long-types) 55 | for more details. -------------------------------------------------------------------------------- /docs/schema-protobuf.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: schema-protobuf 3 | title: Example Protobuf Schemas 4 | sidebar_label: Example Protobuf Schemas 5 | --- 6 | 7 | ## Schema with references to other schemas 8 | 9 | You might want to split the Protobuf definition into several schemas, one for each type. 10 | 11 | ```protobuf 12 | syntax = "proto3"; 13 | package test; 14 | import "test/B.proto"; 15 | 16 | message A { 17 | int32 id = 1; 18 | B b = 2; 19 | } 20 | ``` 21 | 22 | ```protobuf 23 | syntax = "proto3"; 24 | package test; 25 | 26 | message B { 27 | int32 id = 1; 28 | } 29 | ``` 30 | 31 | To register schemas with references, the schemas have to be registered in reverse order. The schema that references another schema has to be registered after the schema it references. In this example B has to be registered before A. Furthermore, when registering A, a list of references have to be provided. A reference consist of: 32 | 33 | * `name` - String matching the import statement. For example: `test/B.proto` 34 | * `subject` - the subject the schema is registered under in the registry 35 | * `version` - the version of the schema you want to use 36 | 37 | The library will handle an arbitrary number of nested levels. 38 | 39 | ```js 40 | const schemaA = ` 41 | syntax = "proto3"; 42 | package test; 43 | import "test/B.proto"; 44 | 45 | message A { 46 | int32 id = 1; 47 | B b = 2; 48 | }` 49 | 50 | const schemaB = ` 51 | syntax = "proto3"; 52 | package test; 53 | 54 | message B { 55 | int32 id = 1; 56 | }` 57 | 58 | await schemaRegistry.register( 59 | { type: SchemaType.PROTOBUF, schema: schemaB }, 60 | { subject: 'Proto:B' }, 61 | ) 62 | 63 | const response = await schemaRegistry.api.Subject.latestVersion({ subject: 'Proto:B' }) 64 | const { version } = JSON.parse(response.responseData) 65 | 66 | const { id } = await schemaRegistry.register( 67 | { 68 | type: SchemaType.PROTOBUF, 69 | schema: schemaA, 70 | references: [ 71 | { 72 | name: 'test/B.proto', 73 | subject: 'Proto:B', 74 | version, 75 | }, 76 | ], 77 | }, 78 | { subject: 'Proto:A' }, 79 | ) 80 | 81 | const obj = { id: 1, b: { id: 2 } } 82 | 83 | const buffer = await schemaRegistry.encode(id, obj) 84 | const decodedObj = await schemaRegistry.decode(buffer) 85 | ``` 86 | -------------------------------------------------------------------------------- /docs/development.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: development 3 | title: Development 4 | sidebar_label: Development 5 | --- 6 | 7 | To run the registry locally: 8 | 9 | ```sh 10 | docker-compose up 11 | ``` 12 | 13 | To tail the logs: 14 | 15 | ```sh 16 | # Replace CONTAINER_ID with the container's ID 17 | docker exec -it CONTAINER_ID bash -c "supervisorctl tail -f schema-registry" 18 | ``` 19 | 20 | ### Glossary 21 | 22 | This glossary shall help you when reading the code and understanding Schema Registry at large. 23 | 24 | | Term | Description | 25 | | ---------- | ------------------------------------------------------------------------------------------------- | 26 | | subject | The full name to be used to group a schema history. Consists of a namespace and a name. | 27 | | namespace | The initial part of a subject. e.g. domain.your-project.some-feature | 28 | | name | The final part of a subject. e.g. User | 29 | | version | The object containing id and the schema. Its own id is _not_ global, but unique for each subject. | 30 | | registryId | The global id of a schema version. Retrieved by `register`. | 31 | 32 | ### References 33 | 34 | - [Confluent wire format](https://docs.confluent.io/current/schema-registry/docs/serializer-formatter.html#wire-format) 35 | - [Java version](https://github.com/confluentinc/schema-registry/tree/master/avro-serializer/src/main/java/io/confluent/kafka/serializers) 36 | - [Compatibility and schema evolution](https://docs.confluent.io/current/avro.html) 37 | 38 | ### Publishing a new version 39 | 40 | - Create a new branch 41 | - Update `CHANGELOG.md` with all the relevant changes since the last release by [comparing commits](https://github.com/kafkajs/confluent-schema-registry/compare/v1.0.5...master) since last release 42 | - Bump the `package.json` version and create a corresponding tag using `npm version ` 43 | - Push changes to your branch 44 | - Create PR, wait for successful builds 45 | - Merge PR 46 | - Push tags `git push --tags`, this will trigger a CI job which publishes the new version on `npm`. 47 | -------------------------------------------------------------------------------- /docs/usage-with-kafkajs.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: usage-with-kafkajs 3 | title: Usage with KafkaJS 4 | sidebar_label: Usage with KafkaJS 5 | --- 6 | 7 | Although Confluent Schema Registry can be used with any Kafka client, or outside 8 | of Kafka entirely, it is commonly used together with [KafkaJS](https://kafka.js.org). 9 | 10 | The following is an example of an application that consumes from a topic of 11 | AVRO encoded messages and produces to another topic after encoding the messages 12 | with a different schema. 13 | 14 | 15 | ```js 16 | const path = require('path') 17 | const { Kafka } = require('kafkajs') 18 | const { SchemaRegistry, SchemaType, avdlToAVSCAsync } = require('@kafkajs/confluent-schema-registry') 19 | 20 | const registry = new SchemaRegistry({ host: 'http://localhost:8081' }) 21 | const kafka = new Kafka({ 22 | brokers: ['localhost:9092'], 23 | clientId: 'example-consumer', 24 | }) 25 | const consumer = kafka.consumer({ groupId: 'test-group' }) 26 | const producer = kafka.producer() 27 | 28 | const incomingTopic = 'incoming' 29 | const outgoingTopic = 'outgoing' 30 | 31 | const run = async () => { 32 | const schema = await avdlToAVSCAsync(path.join(__dirname, 'schema.avdl')) 33 | const { id } = await registry.register({ type: SchemaType.AVRO, schema: JSON.stringify(schema) }) 34 | 35 | await consumer.connect() 36 | await producer.connect() 37 | 38 | await consumer.subscribe({ topic: incomingTopic }) 39 | 40 | await consumer.run({ 41 | eachMessage: async ({ topic, partition, message }) => { 42 | const decodedMessage = { 43 | ...message, 44 | value: await registry.decode(message.value) 45 | } 46 | 47 | const outgoingMessage = { 48 | key: message.key, 49 | value: await registry.encode(id, decodedMessage.value) 50 | } 51 | 52 | await producer.send({ 53 | topic: outgoingTopic, 54 | messages: [ outgoingMessage ] 55 | }) 56 | }, 57 | }) 58 | } 59 | 60 | run().catch(async e => { 61 | console.error(e) 62 | consumer && await consumer.disconnect() 63 | producer && await producer.disconnect() 64 | process.exit(1) 65 | }) 66 | ``` 67 | 68 | Note that this example is only intended as a simple visualization of how 69 | to use Confluent Schema Registry together with KafkaJS. It is not necessarily 70 | intended to be a production-ready application. 71 | -------------------------------------------------------------------------------- /docs/schema-avro.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: schema-avro 3 | title: Example Avro Schemas 4 | sidebar_label: Example Avro Schemas 5 | --- 6 | 7 | ## Schema with references to other schemas 8 | 9 | You might want to split the Avro definition into several schemas one for each type. 10 | 11 | ```json 12 | { 13 | "type" : "record", 14 | "namespace" : "test", 15 | "name" : "A", 16 | "fields" : [ 17 | { "name" : "id" , "type" : "int" }, 18 | { "name" : "b" , "type" : "test.B" } 19 | ] 20 | } 21 | ``` 22 | 23 | ```json 24 | { 25 | "type" : "record", 26 | "namespace" : "test", 27 | "name" : "B", 28 | "fields" : [ 29 | { "name" : "id" , "type" : "int" } 30 | ] 31 | } 32 | ``` 33 | 34 | To register schemas with references, the schemas have to be registered in reverse order. The schema that references another schema has to be registered after the schema it references. In this example B has to be registered before A. Furthermore, when registering A, a list of references have to be provided. A reference consist of: 35 | 36 | * `name` - the fully qualified name of the referenced schema. Example: `test.B` 37 | * `subject` - the subject the schema is registered under in the registry 38 | * `version` - the version of the schema you want to use 39 | 40 | The library will handle an arbitrary number of nested levels. 41 | 42 | ```js 43 | const schemaA = { 44 | type: 'record', 45 | namespace: 'test', 46 | name: 'A', 47 | fields: [ 48 | { name: 'id', type: 'int' }, 49 | { name: 'b', type: 'test.B' }, 50 | ], 51 | } 52 | 53 | const schemaB = { 54 | type: 'record', 55 | namespace: 'test', 56 | name: 'B', 57 | fields: [{ name: 'id', type: 'int' }], 58 | } 59 | 60 | await schemaRegistry.register( 61 | { type: SchemaType.AVRO, schema: JSON.stringify(schemaB) }, 62 | { subject: 'Avro:B' }, 63 | ) 64 | 65 | const response = await schemaRegistry.api.Subject.latestVersion({ subject: 'Avro:B' }) 66 | const { version } = JSON.parse(response.responseData) 67 | 68 | const { id } = await schemaRegistry.register( 69 | { 70 | type: SchemaType.AVRO, 71 | schema: JSON.stringify(schemaA), 72 | references: [ 73 | { 74 | name: 'test.B', 75 | subject: 'Avro:B', 76 | version, 77 | }, 78 | ], 79 | }, 80 | { subject: 'Avro:A' }, 81 | ) 82 | 83 | const obj = { id: 1, b: { id: 2 } } 84 | 85 | const buffer = await schemaRegistry.encode(id, obj) 86 | const decodedObj = await schemaRegistry.decode(buffer) 87 | ``` 88 | -------------------------------------------------------------------------------- /src/api/middleware/errorMiddleware.spec.ts: -------------------------------------------------------------------------------- 1 | import { MiddlewareDescriptor } from 'mappersmith' 2 | 3 | import ErrorMiddleware from './errorMiddleware' 4 | 5 | const middlewareParams = { 6 | resourceName: 'resourceNameMock', 7 | resourceMethod: 'resourceMethodMock', 8 | context: { context: 'contextMock' }, 9 | clientId: 'clientIdMock', 10 | } 11 | 12 | describe('ErrorMiddleware', () => { 13 | let executedMiddleware: MiddlewareDescriptor 14 | 15 | beforeEach(() => { 16 | executedMiddleware = ErrorMiddleware(middlewareParams) 17 | }) 18 | 19 | describe('when the request succeeds', () => { 20 | it('does not interfere with the promise', async () => { 21 | await expect( 22 | // @ts-ignore 23 | executedMiddleware.response(() => Promise.resolve('arbitrary value'), undefined), 24 | ).resolves.toBe('arbitrary value') 25 | }) 26 | }) 27 | 28 | describe('when the request fails', () => { 29 | const createResponse = data => ({ 30 | data: jest.fn(() => data), 31 | status: jest.fn(() => 500), 32 | request: jest.fn(() => ({ 33 | method: jest.fn(() => 'get'), 34 | url: jest.fn(() => 'url'), 35 | })), 36 | }) 37 | 38 | it('raise an error with message', async () => { 39 | const message = 'error message' 40 | const response = createResponse({ message }) 41 | 42 | await expect( 43 | executedMiddleware.response(() => Promise.reject(response), undefined), 44 | ).rejects.toHaveProperty('message', `${middlewareParams.clientId} - ${message}`) 45 | }) 46 | 47 | it('raises an error with a message in case of client-side errors', async () => { 48 | const message = 'error message' 49 | const response = createResponse(message) 50 | 51 | await expect( 52 | executedMiddleware.response(() => Promise.reject(response), undefined), 53 | ).rejects.toHaveProperty( 54 | 'message', 55 | `${middlewareParams.clientId} - Error, status 500: ${message}`, 56 | ) 57 | }) 58 | 59 | it('raise an error with a default message if the error payload is empty', async () => { 60 | const response = createResponse('') 61 | 62 | await expect( 63 | executedMiddleware.response(() => Promise.reject(response), undefined), 64 | ).rejects.toHaveProperty('message', `${middlewareParams.clientId} - Error, status 500`) 65 | }) 66 | }) 67 | }) 68 | -------------------------------------------------------------------------------- /docs/schema-json.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: schema-json 3 | title: Example JSON Schemas 4 | sidebar_label: Example JSON Schemas 5 | --- 6 | 7 | ## Schema with references to other schemas 8 | 9 | You might want to split the JSON definition into several schemas one for each type. 10 | 11 | ```JSON 12 | { 13 | "$id": "https://example.com/schemas/A", 14 | "type": "object", 15 | "properties": { 16 | "id": { "type": "number" }, 17 | "b": { "$ref": "https://example.com/schemas/B" } 18 | } 19 | } 20 | ``` 21 | 22 | ```JSON 23 | { 24 | "$id": "https://example.com/schemas/B", 25 | "type": "object", 26 | "properties": { 27 | "id": { "type": "number" } 28 | } 29 | } 30 | ``` 31 | 32 | To register schemas with references, the schemas have to be registered in reverse order. The schema that references another schema has to be registered after the schema it references. In this example B has to be registered before A. Furthermore, when registering A, a list of references have to be provided. A reference consist of: 33 | 34 | * `name` - A URL matching the `$ref` from the schema 35 | * `subject` - the subject the schema is registered under in the registry 36 | * `version` - the version of the schema you want to use 37 | 38 | The library will handle an arbitrary number of nested levels. 39 | 40 | ```js 41 | const schemaA = { 42 | $id: 'https://example.com/schemas/A', 43 | type: 'object', 44 | properties: { 45 | id: { type: 'number' }, 46 | b: { $ref: 'https://example.com/schemas/B' }, 47 | }, 48 | } 49 | 50 | const schemaB = { 51 | $id: 'https://example.com/schemas/B', 52 | type: 'object', 53 | properties: { 54 | id: { type: 'number' }, 55 | }, 56 | } 57 | 58 | await schemaRegistry.register( 59 | { type: SchemaType.JSON, schema: JSON.stringify(schemaB) }, 60 | { subject: 'JSON:B' }, 61 | ) 62 | 63 | const response = await schemaRegistry.api.Subject.latestVersion({ subject: 'JSON:B' }) 64 | const { version } = JSON.parse(response.responseData) 65 | 66 | const { id } = await schemaRegistry.register( 67 | { 68 | type: SchemaType.JSON, 69 | schema: JSON.stringify(schemaA), 70 | references: [ 71 | { 72 | name: 'https://example.com/schemas/B', 73 | subject: 'JSON:B', 74 | version, 75 | }, 76 | ], 77 | }, 78 | { subject: 'JSON:A' }, 79 | ) 80 | 81 | const obj = { id: 1, b: { id: 2 } } 82 | 83 | const buffer = await schemaRegistry.encode(id, obj) 84 | const decodedObj = await schemaRegistry.decode(buffer) 85 | ``` 86 | -------------------------------------------------------------------------------- /docs/advanced-usage.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: advanced-usage 3 | title: Advanced Usage 4 | sidebar_label: Advanced Usage 5 | --- 6 | 7 | While typical usage is covered in [Usage](./usage), Confluent Schema Registry also 8 | provides functionality for more advanced usage. 9 | 10 | ## Get latest schema id by subject 11 | 12 | ```js 13 | const subject = 'com.example.Simple' 14 | const id = await registry.getLatestSchemaId(subject) 15 | ``` 16 | 17 | ## Get schema id by subject and version 18 | 19 | ```js 20 | const subject = 'com.example.Simple' 21 | const version = 1 22 | 23 | const id = await registry.getRegistryId(subject, version) 24 | ``` 25 | 26 | > _Note:_ Currently there is no way to list versions by subject. 27 | 28 | ## Get schema id by schema 29 | 30 | Returns the schema id if the schema has already been registered for the provided 31 | subject. 32 | 33 | If a matching schema does not exist for the subject, it throws a 34 | `ConfluentSchemaRegistryError` 35 | 36 | ```js 37 | const subject = 'com.example.Simple' 38 | const schema = await avdlToAVSCAsync('path/to/protocol.avdl') 39 | 40 | const id = await registry.getRegistryIdBySchema(subject, { 41 | type: SchemaType.AVRO, 42 | schema: JSON.stringify(schema), 43 | }) 44 | ``` 45 | 46 | ## Getting schema by schema id 47 | 48 | Normally Confluent Schema Registry keeps the schemas internally and don't require 49 | the user to handle them to encode/decode data, but if you need to get a schema 50 | from the registry, you can do so by its schema id: 51 | 52 | ```js 53 | // See https://github.com/kafkajs/confluent-schema-registry/blob/master/src/%40types.ts#L30-L46 54 | // for a complete return type 55 | const schema = await registry.getSchema(id) 56 | ``` 57 | 58 | ## Using custom middlewares 59 | 60 | The Schema Registry Client now supports adding custom mappersmith middlewares, providing flexibility to handle various use cases such as OAuth authentication or other custom request/response transformations. 61 | 62 | To use custom middlewares, include them when initializing the Schema Registry Client: 63 | 64 | ```ts 65 | import { Middleware } from 'mappersmith' 66 | 67 | const customMiddleware: Middleware = jest.fn(() => { 68 | return { 69 | async request(request) { 70 | return request.enhance({ 71 | headers: { 72 | Authorization: 'Bearer Random', 73 | }, 74 | }) 75 | }, 76 | async response(next) { 77 | return next() 78 | }, 79 | } 80 | }) 81 | 82 | const registry = new SchemaRegistry({ 83 | host: 'your_host', 84 | middlewares: [customMiddleware], 85 | }) 86 | ``` 87 | -------------------------------------------------------------------------------- /docs/schema.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: schemas 3 | title: Example Avro Schemas 4 | sidebar_label: Example Avro Schemas 5 | --- 6 | 7 | ## Unions with null 8 | 9 | Schemas using unions with `null` are simple. You just have to provide the data or omit in case of `null`, example: 10 | 11 | ```avdl 12 | @namespace("com.org.domain.examples") 13 | protocol MyProtocol { 14 | record Picture { 15 | string url; 16 | } 17 | 18 | record Event { 19 | string name; 20 | union {null, Picture} picture = null; 21 | } 22 | } 23 | ``` 24 | 25 | This schema can be satisfied with: 26 | 27 | ```JSON 28 | { 29 | "name": "John", 30 | "picture": { 31 | "url": "https://..." 32 | } 33 | } 34 | ``` 35 | 36 | or 37 | 38 | ```JSON 39 | { 40 | "name": "John" 41 | } 42 | ``` 43 | 44 | ## Unions with different records 45 | 46 | Unions with different records can have ambiguities, the data can be the same but for a different type. In these cases you have to provide a wrapped union type, example: 47 | 48 | ```avdl 49 | @namespace("com.org.domain.examples") 50 | protocol MyProtocol { 51 | record Picture { 52 | string url; 53 | } 54 | record Photo { 55 | string url; 56 | } 57 | 58 | record Event { 59 | string name; 60 | union {Picture, Photo} asset; 61 | } 62 | } 63 | ``` 64 | 65 | This schema can be satisfied with: 66 | 67 | ```JSON 68 | { 69 | "name": "John", 70 | "asset": { 71 | "com.org.domain.examples.Picture": { 72 | "url": "https://..." 73 | } 74 | } 75 | } 76 | ``` 77 | 78 | or 79 | 80 | ```JSON 81 | { 82 | "name": "John", 83 | "asset": { 84 | "com.org.domain.examples.Photo": { 85 | "url": "https://..." 86 | } 87 | } 88 | } 89 | ``` 90 | 91 | ## Imported schemas 92 | 93 | Schemas can be imported from other AVDL or AVSC files using [the import declaration](https://avro.apache.org/docs/1.8.2/idl.html#imports). **Note** that this only works using `avdlToAVSCAsync`, not `avdlToAVSC`. Import paths are defined relative to the AVDL file they are imported from. In the following example, `person.avdl` is located next to this AVDL file. 94 | 95 | ```avdl 96 | @namespace("com.org.domain.examples") 97 | protocol MyProtocol { 98 | // AVDL files can be imported with "import idl" 99 | import idl 'person.avdl'; 100 | 101 | // AVSC files can be imported with "import schema" 102 | import schema 'place.avsc'; 103 | 104 | record Picture { 105 | string url; 106 | } 107 | 108 | record Event { 109 | Picture picture; 110 | com.org.domain.examples.Person person; 111 | com.org.domain.examples.Place place; 112 | } 113 | } 114 | ``` -------------------------------------------------------------------------------- /website/docusaurus.config.ts: -------------------------------------------------------------------------------- 1 | import { themes as prismThemes } from 'prism-react-renderer' 2 | import { Config } from '@docusaurus/types' 3 | 4 | const config: Config = { 5 | title: 'Confluent Schema Registry', 6 | tagline: 'A library that makes it easier to interact with the Confluent schema registry', 7 | favicon: 'img/kafkajs-logoV2.svg', 8 | url: 'https://kafkajs.github.io', 9 | baseUrl: '/confluent-schema-registry', 10 | organizationName: 'kafkajs', 11 | projectName: 'confluent-schema-registry', 12 | onBrokenLinks: 'throw', 13 | onBrokenMarkdownLinks: 'warn', 14 | i18n: { 15 | defaultLocale: 'en', 16 | locales: ['en'], 17 | }, 18 | presets: [ 19 | [ 20 | 'classic', 21 | { 22 | docs: { 23 | path: '../docs', 24 | sidebarPath: './sidebars.ts', 25 | editUrl: 'https://github.com/kafkajs/confluent-schema-registry/edit/master/s', 26 | }, 27 | theme: { 28 | customCss: './src/css/custom.css', 29 | }, 30 | }, 31 | ], 32 | ], 33 | themeConfig: { 34 | navbar: { 35 | title: 'Confluent Schema Registry', 36 | logo: { 37 | alt: 'Confluent Schema Registry Logo', 38 | src: 'img/kafkajs-logoV2.svg', 39 | }, 40 | items: [ 41 | { to: '/docs/introduction', label: 'Docs', position: 'left' }, 42 | { 43 | href: 'https://github.com/kafkajs/confluent-schema-registry', 44 | label: 'GitHub', 45 | position: 'right', 46 | }, 47 | ], 48 | }, 49 | footer: { 50 | style: 'dark', 51 | copyright: `Copyright © ${new Date().getFullYear()} Kafkajs`, 52 | links: [ 53 | { 54 | html: ` 55 | 70 | `, 71 | }, 72 | ], 73 | }, 74 | prism: { 75 | theme: prismThemes.github, 76 | darkTheme: prismThemes.dracula, 77 | }, 78 | }, 79 | } 80 | 81 | export default config 82 | -------------------------------------------------------------------------------- /website/static/img/kafkajs-logoV2.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/ProtoSchema.ts: -------------------------------------------------------------------------------- 1 | import { Schema, ProtoOptions, ProtoConfluentSchema } from './@types' 2 | import protobuf from 'protobufjs' 3 | import { IParserResult, ReflectionObject, Namespace, Type } from 'protobufjs/light' 4 | import { 5 | ConfluentSchemaRegistryArgumentError, 6 | ConfluentSchemaRegistryValidationError, 7 | } from './errors' 8 | 9 | export default class ProtoSchema implements Schema { 10 | private message: Type 11 | 12 | constructor(schema: ProtoConfluentSchema, opts?: ProtoOptions) { 13 | const parsedMessage = protobuf.parse(schema.schema) 14 | const root = parsedMessage.root 15 | const referencedSchemas = opts?.referencedSchemas 16 | 17 | // handle all schema references independent on nested references 18 | if (referencedSchemas) { 19 | referencedSchemas.forEach(rawSchema => protobuf.parse(rawSchema.schema as string, root)) 20 | } 21 | 22 | this.message = root.lookupType(this.getTypeName(parsedMessage, opts)) 23 | } 24 | 25 | private getNestedTypeName(parent: { [k: string]: ReflectionObject } | undefined): string { 26 | if (!parent) throw new ConfluentSchemaRegistryArgumentError('no nested fields') 27 | const keys = Object.keys(parent) 28 | const reflection = parent[keys[0]] 29 | 30 | // Traverse down the nested Namespaces until we find a message Type instance (which extends Namespace) 31 | if (reflection instanceof Namespace && !(reflection instanceof Type) && reflection.nested) 32 | return this.getNestedTypeName(reflection.nested) 33 | return keys[0] 34 | } 35 | 36 | private getTypeName(parsedMessage: IParserResult, opts?: ProtoOptions): string { 37 | const root = parsedMessage.root 38 | const pkg = parsedMessage.package 39 | const name = opts && opts.messageName ? opts.messageName : this.getNestedTypeName(root.nested) 40 | return `${pkg ? pkg : ''}.${name}` 41 | } 42 | 43 | private trimStart(buffer: Buffer): Buffer { 44 | const index = buffer.findIndex((value: number) => value != 0) 45 | return buffer.slice(index) 46 | } 47 | 48 | public toBuffer(payload: object): Buffer { 49 | const paths: string[][] = [] 50 | if ( 51 | !this.isValid(payload, { 52 | errorHook: (path: Array) => paths.push(path), 53 | }) 54 | ) { 55 | throw new ConfluentSchemaRegistryValidationError('invalid payload', paths) 56 | } 57 | 58 | const protoPayload = this.message.create(payload) 59 | return Buffer.from(this.message.encode(protoPayload).finish()) 60 | } 61 | 62 | public fromBuffer(buffer: Buffer): any { 63 | const newBuffer = this.trimStart(buffer) 64 | return this.message.decode(newBuffer) 65 | } 66 | 67 | public isValid( 68 | payload: object, 69 | opts?: { errorHook: (path: Array, value: any, type?: any) => void }, 70 | ): boolean { 71 | const errMsg: null | string = this.message.verify(payload) 72 | if (errMsg) { 73 | if (opts?.errorHook) { 74 | opts.errorHook([errMsg], payload) 75 | } 76 | return false 77 | } 78 | return true 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/@types.ts: -------------------------------------------------------------------------------- 1 | import { Resolver, ForSchemaOptions, Type } from 'avsc' 2 | import { ValidateFunction } from './JsonSchema' 3 | import Ajv from 'ajv' 4 | 5 | export enum SchemaType { 6 | AVRO = 'AVRO', 7 | JSON = 'JSON', 8 | PROTOBUF = 'PROTOBUF', 9 | UNKNOWN = 'UNKNOWN', 10 | } 11 | export interface SchemaHelper { 12 | validate(schema: Schema): void 13 | getSubject(confluentSchema: ConfluentSchema, schema: Schema, separator: string): ConfluentSubject 14 | toConfluentSchema(data: SchemaResponse): ConfluentSchema 15 | updateOptionsFromSchemaReferences( 16 | referencedSchemas: ConfluentSchema[], 17 | options?: ProtocolOptions, 18 | ): ProtocolOptions 19 | } 20 | 21 | export type AvroOptions = Partial & { 22 | referencedSchemas?: AvroConfluentSchema[] 23 | } 24 | 25 | export type JsonOptions = ConstructorParameters[0] & { 26 | ajvInstance?: 27 | | { 28 | addSchema: Ajv['addSchema'] 29 | compile: (schema: any) => ValidateFunction 30 | } 31 | | Ajv 32 | referencedSchemas?: JsonConfluentSchema[] 33 | detailedErrorPaths?: boolean 34 | } 35 | export type ProtoOptions = { messageName?: string; referencedSchemas?: ProtoConfluentSchema[] } 36 | 37 | export interface LegacyOptions { 38 | forSchemaOptions?: AvroOptions 39 | } 40 | export interface ProtocolOptions { 41 | [SchemaType.AVRO]?: AvroOptions 42 | [SchemaType.JSON]?: JsonOptions 43 | [SchemaType.PROTOBUF]?: ProtoOptions 44 | } 45 | export type SchemaRegistryAPIClientOptions = ProtocolOptions | LegacyOptions 46 | 47 | export interface Schema { 48 | toBuffer(payload: object): Buffer // FIXME: 49 | fromBuffer(buffer: Buffer, resolver?: Resolver, noCheck?: boolean): any 50 | isValid( 51 | payload: object, 52 | opts?: { errorHook: (path: Array, value: any, type?: any) => void }, 53 | ): boolean 54 | } 55 | 56 | export interface RawAvroSchema { 57 | name: string 58 | namespace?: string 59 | type: 'record' 60 | fields: any[] 61 | } 62 | 63 | export interface AvroSchema 64 | extends Schema, 65 | RawAvroSchema, 66 | Pick {} 67 | 68 | export interface ConfluentSubject { 69 | name: string 70 | } 71 | 72 | export interface AvroConfluentSchema { 73 | type: SchemaType.AVRO 74 | schema: string | RawAvroSchema 75 | references?: SchemaReference[] 76 | } 77 | 78 | export type SchemaReference = { 79 | name: string 80 | subject: string 81 | version: number 82 | } 83 | export interface ProtoConfluentSchema { 84 | type: SchemaType.PROTOBUF 85 | schema: string 86 | references?: SchemaReference[] 87 | } 88 | export interface JsonConfluentSchema { 89 | type: SchemaType.JSON 90 | schema: string 91 | references?: SchemaReference[] 92 | } 93 | export interface SchemaResponse { 94 | schema: string 95 | schemaType: string 96 | references?: SchemaReference[] 97 | } 98 | 99 | export type ConfluentSchema = AvroConfluentSchema | ProtoConfluentSchema | JsonConfluentSchema 100 | -------------------------------------------------------------------------------- /src/JsonSchema.ts: -------------------------------------------------------------------------------- 1 | import { Schema, JsonOptions, JsonConfluentSchema } from './@types' 2 | import Ajv from 'ajv' 3 | import { ConfluentSchemaRegistryValidationError } from './errors' 4 | 5 | interface BaseAjvValidationError { 6 | data?: unknown 7 | schema?: unknown 8 | message?: string 9 | } 10 | interface OldAjvValidationError extends BaseAjvValidationError { 11 | dataPath: string 12 | instancePath?: string 13 | } 14 | interface NewAjvValidationError extends BaseAjvValidationError { 15 | instancePath: string 16 | } 17 | 18 | type AjvValidationError = OldAjvValidationError | NewAjvValidationError 19 | 20 | export interface ValidateFunction { 21 | (this: any, data: any): boolean 22 | errors?: null | AjvValidationError[] 23 | } 24 | export default class JsonSchema implements Schema { 25 | private validate: ValidateFunction 26 | private detailedErrorPaths: boolean 27 | 28 | constructor(schema: JsonConfluentSchema, opts?: JsonOptions) { 29 | this.validate = this.getJsonSchema(schema, opts) 30 | this.detailedErrorPaths = opts?.detailedErrorPaths ? opts?.detailedErrorPaths : false 31 | } 32 | 33 | private getJsonSchema(schema: JsonConfluentSchema, opts?: JsonOptions) { 34 | const ajv = opts?.ajvInstance ?? new Ajv(opts) 35 | const referencedSchemas = opts?.referencedSchemas 36 | if (referencedSchemas) { 37 | referencedSchemas.forEach(rawSchema => { 38 | const $schema = JSON.parse(rawSchema.schema) 39 | ajv.addSchema($schema, $schema['$id']) 40 | }) 41 | } 42 | const validate = ajv.compile(JSON.parse(schema.schema)) 43 | return validate 44 | } 45 | 46 | private validatePayload(payload: any) { 47 | const paths: any[] = [] 48 | 49 | if ( 50 | !this.isValid(payload, { 51 | errorHook: (path, message) => { 52 | if (this.detailedErrorPaths) { 53 | paths.push({ path, message }) 54 | } else { 55 | paths.push(path) 56 | } 57 | }, 58 | }) 59 | ) { 60 | throw new ConfluentSchemaRegistryValidationError('invalid payload', paths) 61 | } 62 | } 63 | 64 | public toBuffer(payload: object): Buffer { 65 | this.validatePayload(payload) 66 | return Buffer.from(JSON.stringify(payload)) 67 | } 68 | 69 | public fromBuffer(buffer: Buffer): any { 70 | const payload = JSON.parse(buffer.toString()) 71 | this.validatePayload(payload) 72 | return payload 73 | } 74 | 75 | public isValid( 76 | payload: object, 77 | opts?: { errorHook: (path: Array, value: any, type?: any) => void }, 78 | ): boolean { 79 | if (!this.validate(payload)) { 80 | if (opts?.errorHook) { 81 | for (const err of this.validate.errors as AjvValidationError[]) { 82 | const path = this.isOldAjvValidationError(err) ? err.dataPath : err.instancePath 83 | opts.errorHook([path], err.message ?? err.data, err.schema) 84 | } 85 | return false 86 | } 87 | } 88 | return true 89 | } 90 | 91 | private isOldAjvValidationError(error: AjvValidationError): error is OldAjvValidationError { 92 | return (error as OldAjvValidationError).dataPath != null 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /src/AvroHelper.ts: -------------------------------------------------------------------------------- 1 | import { 2 | AvroSchema, 3 | RawAvroSchema, 4 | AvroOptions, 5 | ConfluentSchema, 6 | SchemaHelper, 7 | ConfluentSubject, 8 | ProtocolOptions, 9 | AvroConfluentSchema, 10 | } from './@types' 11 | import { ConfluentSchemaRegistryArgumentError } from './errors' 12 | import avro, { ForSchemaOptions, Schema, Type } from 'avsc' 13 | import { SchemaResponse, SchemaType } from './@types' 14 | 15 | type TypeHook = (schema: Schema, opts: ForSchemaOptions) => Type | undefined 16 | export default class AvroHelper implements SchemaHelper { 17 | private getRawAvroSchema(schema: ConfluentSchema): RawAvroSchema { 18 | return (typeof schema.schema === 'string' 19 | ? JSON.parse(schema.schema) 20 | : schema.schema) as RawAvroSchema 21 | } 22 | 23 | public getAvroSchema(schema: ConfluentSchema | RawAvroSchema, opts?: AvroOptions) { 24 | const rawSchema: RawAvroSchema = this.isRawAvroSchema(schema) 25 | ? schema 26 | : this.getRawAvroSchema(schema) 27 | // @ts-ignore TODO: Fix typings for Schema... 28 | 29 | const addReferencedSchemas = (userHook?: TypeHook): TypeHook => ( 30 | schema: avro.Schema, 31 | opts: ForSchemaOptions, 32 | ) => { 33 | const avroOpts = opts as AvroOptions 34 | avroOpts?.referencedSchemas?.forEach(subSchema => { 35 | const rawSubSchema = this.getRawAvroSchema(subSchema) 36 | avroOpts.typeHook = userHook 37 | avro.Type.forSchema(rawSubSchema, avroOpts) 38 | }) 39 | if (userHook) { 40 | return userHook(schema, opts) 41 | } 42 | } 43 | 44 | const avroSchema = avro.Type.forSchema(rawSchema, { 45 | ...opts, 46 | typeHook: addReferencedSchemas(opts?.typeHook), 47 | }) 48 | 49 | return avroSchema 50 | } 51 | 52 | public validate(avroSchema: AvroSchema): void { 53 | if (!avroSchema.name) { 54 | throw new ConfluentSchemaRegistryArgumentError(`Invalid name: ${avroSchema.name}`) 55 | } 56 | } 57 | 58 | public getSubject( 59 | schema: AvroConfluentSchema, 60 | _avroSchema: AvroSchema, 61 | separator: string, 62 | ): ConfluentSubject { 63 | const rawSchema: RawAvroSchema = this.getRawAvroSchema(schema) 64 | 65 | if (!rawSchema.namespace) { 66 | throw new ConfluentSchemaRegistryArgumentError(`Invalid namespace: ${rawSchema.namespace}`) 67 | } 68 | 69 | const subject: ConfluentSubject = { 70 | name: [rawSchema.namespace, rawSchema.name].join(separator), 71 | } 72 | return subject 73 | } 74 | 75 | private isRawAvroSchema(schema: ConfluentSchema | RawAvroSchema): schema is RawAvroSchema { 76 | const asRawAvroSchema = schema as RawAvroSchema 77 | return asRawAvroSchema.name != null && asRawAvroSchema.type != null 78 | } 79 | 80 | public toConfluentSchema(data: SchemaResponse): AvroConfluentSchema { 81 | return { type: SchemaType.AVRO, schema: data.schema, references: data.references } 82 | } 83 | 84 | updateOptionsFromSchemaReferences( 85 | referencedSchemas: AvroConfluentSchema[], 86 | options: ProtocolOptions = {}, 87 | ): ProtocolOptions { 88 | return { ...options, [SchemaType.AVRO]: { ...options[SchemaType.AVRO], referencedSchemas } } 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/schemaTypeResolver.ts: -------------------------------------------------------------------------------- 1 | import AvroHelper from './AvroHelper' 2 | import JsonHelper from './JsonHelper' 3 | import JsonSchema from './JsonSchema' 4 | import ProtoHelper from './ProtoHelper' 5 | import ProtoSchema from './ProtoSchema' 6 | import { 7 | SchemaType, 8 | SchemaHelper, 9 | ConfluentSchema, 10 | SchemaRegistryAPIClientOptions, 11 | LegacyOptions, 12 | ProtocolOptions, 13 | AvroOptions, 14 | JsonOptions, 15 | ProtoOptions, 16 | Schema, 17 | AvroSchema, 18 | } from './@types' 19 | import { ConfluentSchemaRegistryArgumentError } from './errors' 20 | 21 | const helperTypeFromSchemaTypeMap: Record = {} 22 | 23 | export const schemaTypeFromString = (schemaTypeString: string) => { 24 | switch (schemaTypeString) { 25 | case 'AVRO': 26 | case undefined: 27 | return SchemaType.AVRO 28 | case 'JSON': 29 | return SchemaType.JSON 30 | case 'PROTOBUF': 31 | return SchemaType.PROTOBUF 32 | default: 33 | return SchemaType.UNKNOWN 34 | } 35 | } 36 | 37 | export const helperTypeFromSchemaType = ( 38 | schemaType: SchemaType = SchemaType.AVRO, 39 | ): SchemaHelper => { 40 | const schemaTypeStr = schemaType.toString() 41 | 42 | if (!helperTypeFromSchemaTypeMap[schemaTypeStr]) { 43 | let helper 44 | switch (schemaType) { 45 | case SchemaType.AVRO: { 46 | helper = new AvroHelper() 47 | break 48 | } 49 | case SchemaType.JSON: { 50 | helper = new JsonHelper() 51 | break 52 | } 53 | case SchemaType.PROTOBUF: { 54 | helper = new ProtoHelper() 55 | break 56 | } 57 | default: 58 | throw new ConfluentSchemaRegistryArgumentError('invalid schemaType') 59 | } 60 | helperTypeFromSchemaTypeMap[schemaTypeStr] = helper 61 | } 62 | return helperTypeFromSchemaTypeMap[schemaTypeStr] 63 | } 64 | 65 | export const schemaFromConfluentSchema = ( 66 | confluentSchema: ConfluentSchema, 67 | options?: SchemaRegistryAPIClientOptions, 68 | ): Schema | AvroSchema => { 69 | try { 70 | let schema: Schema 71 | 72 | switch (confluentSchema.type) { 73 | case SchemaType.AVRO: { 74 | const opts: AvroOptions | undefined = 75 | (options as LegacyOptions)?.forSchemaOptions || 76 | (options as ProtocolOptions)?.[SchemaType.AVRO] 77 | schema = (helperTypeFromSchemaType(confluentSchema.type) as AvroHelper).getAvroSchema( 78 | confluentSchema, 79 | opts, 80 | ) 81 | break 82 | } 83 | case SchemaType.JSON: { 84 | const opts: JsonOptions | undefined = (options as ProtocolOptions)?.[SchemaType.JSON] 85 | schema = new JsonSchema(confluentSchema, opts) 86 | break 87 | } 88 | case SchemaType.PROTOBUF: { 89 | const opts: ProtoOptions | undefined = (options as ProtocolOptions)?.[SchemaType.PROTOBUF] 90 | schema = new ProtoSchema(confluentSchema, opts) 91 | break 92 | } 93 | default: 94 | throw new ConfluentSchemaRegistryArgumentError('invalid schemaType') 95 | } 96 | 97 | return schema 98 | } catch (err) { 99 | if (err instanceof Error) throw new ConfluentSchemaRegistryArgumentError(err.message) 100 | throw err 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /src/utils/avdlToAVSC.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs' 2 | import { assembleProtocol, readProtocol } from 'avsc' 3 | 4 | import { ConfluentSchemaRegistryError } from '../errors' 5 | 6 | interface AssembleProtocolError extends Error { 7 | path: string 8 | } 9 | interface Obj { 10 | [key: string]: any 11 | } 12 | interface Iterable extends Obj { 13 | map: any 14 | } 15 | interface Field { 16 | type: { 17 | type: string 18 | items: any 19 | } 20 | } 21 | 22 | let cache: any 23 | const merge = Object.assign 24 | const isObject = (obj: unknown): obj is Obj => !!obj && typeof obj === 'object' 25 | const isIterable = (obj: unknown): obj is Iterable => 26 | isObject(obj) && typeof obj.map !== 'undefined' 27 | const isFieldArray = (field: unknown): field is Field => 28 | isObject(field) && isObject(field.type) && field.type.type === 'array' 29 | 30 | const combine = (rootType: any, types: any) => { 31 | if (!rootType.fields) { 32 | return rootType 33 | } 34 | 35 | const find = (name: any) => { 36 | if (typeof name === 'string') { 37 | name = name.toLowerCase() 38 | } 39 | 40 | const typeToCombine = types.find((t: any) => { 41 | const names = [] 42 | if (t.namespace) { 43 | names.push(`${t.namespace}.`) 44 | } 45 | names.push(t.name.toLowerCase()) 46 | 47 | return names.join('') === name 48 | }) 49 | 50 | if (!typeToCombine || cache[typeToCombine.name]) { 51 | return null 52 | } 53 | 54 | cache[typeToCombine.name] = 1 55 | 56 | return combine(typeToCombine, types) 57 | } 58 | 59 | const combinedFields = rootType.fields.map((field: any) => { 60 | if (isFieldArray(field)) { 61 | const typeToCombine = find(field.type.items) 62 | return typeToCombine 63 | ? merge(field, { type: merge(field.type, { items: typeToCombine }) }) 64 | : field 65 | } else if (isIterable(field.type)) { 66 | const type = field.type.map((unionType: any) => { 67 | if (isObject(unionType)) { 68 | const typeToCombine = find(unionType.items) 69 | return typeToCombine ? merge(unionType, { items: typeToCombine }) : unionType 70 | } else { 71 | return find(unionType) || unionType 72 | } 73 | }) 74 | 75 | return merge(field, { type }) 76 | } 77 | 78 | const typeToCombine = find(field.type) 79 | return typeToCombine ? merge(field, { type: typeToCombine }) : field 80 | }) 81 | 82 | return merge(rootType, { fields: combinedFields }) 83 | } 84 | 85 | export function avdlToAVSC(path: any) { 86 | cache = {} 87 | const protocol = readProtocol(fs.readFileSync(path, 'utf8')) 88 | 89 | return merge({ namespace: protocol.namespace }, combine(protocol.types.pop(), protocol.types)) 90 | } 91 | 92 | export async function avdlToAVSCAsync(path: string) { 93 | cache = {} 94 | 95 | const protocol: Record = await new Promise((resolve, reject) => { 96 | assembleProtocol(path, (err: AssembleProtocolError, schema) => { 97 | if (err) { 98 | reject(new ConfluentSchemaRegistryError(`${err.message}. Caused by: ${err.path}`)) 99 | } else { 100 | resolve(schema as Record) 101 | } 102 | }) 103 | }) 104 | 105 | return merge({ namespace: protocol.namespace }, combine(protocol.types.pop(), protocol.types)) 106 | } 107 | -------------------------------------------------------------------------------- /src/utils/avdlToAVSC.spec.ts: -------------------------------------------------------------------------------- 1 | import path from 'path' 2 | import avro from 'avsc' 3 | import { exec } from 'child_process' 4 | import fs from 'node:fs' 5 | 6 | import SchemaRegistry from '../SchemaRegistry' 7 | import { avdlToAVSCAsync } from './avdlToAVSC' 8 | 9 | const registry = new SchemaRegistry({ host: 'http://localhost:8982' }) 10 | const absolutePath = (...paths: string[]) => path.join(__dirname, '../..', ...paths) 11 | 12 | const promisifiedExec = async (command: string): Promise => { 13 | return new Promise((resolve, reject) => { 14 | exec(command, (error, stdout) => { 15 | if (error) { 16 | return reject(error) 17 | } 18 | 19 | return resolve(stdout) 20 | }) 21 | }) 22 | } 23 | 24 | const compareWithJavaImplementation = (avdlPath: string, name: string) => async () => { 25 | const absolutePathToAvdlToAVSC = absolutePath('./bin/avdlToAVSC.sh') 26 | 27 | let expectedAVSC 28 | try { 29 | const result = await promisifiedExec( 30 | `${absolutePathToAvdlToAVSC} ./fixtures/avdl/${avdlPath} ${name}`, 31 | ) 32 | expectedAVSC = JSON.parse(result) 33 | } catch (error) { 34 | console.error(`Error when running ${absolutePathToAvdlToAVSC}`, error) // eslint-disable-line no-console 35 | throw error 36 | } 37 | 38 | const avsc = await avdlToAVSCAsync(absolutePath('./fixtures/avdl', avdlPath)) 39 | 40 | expect(avsc).toEqual(expectedAVSC) 41 | expect(avro.Type.forSchema(avsc)).toBeTruthy() 42 | expect(await registry.register(avsc)).toBeTruthy() 43 | } 44 | 45 | beforeAll(async () => { 46 | jest.setTimeout(10000) 47 | 48 | // deletes all the files from tmp dir 49 | const tmpDirectory = absolutePath('./tmp') 50 | try { 51 | fs.statSync(tmpDirectory) 52 | } catch (e) { 53 | fs.mkdirSync(tmpDirectory) 54 | } 55 | for (const file of fs.readdirSync(tmpDirectory)) { 56 | fs.unlinkSync(path.join(tmpDirectory, file)) 57 | } 58 | }) 59 | 60 | test('simple protocol', compareWithJavaImplementation('simple.avdl', 'Simple')) 61 | 62 | test('protocol with two levels', compareWithJavaImplementation('two.avdl', 'Two')) 63 | 64 | test('protocol with multiple levels', compareWithJavaImplementation('multiple.avdl', 'Multiple')) 65 | 66 | test('protocol with union', compareWithJavaImplementation('union.avdl', 'Union')) 67 | 68 | test( 69 | 'protocol with multiple union levels', 70 | compareWithJavaImplementation('multiple_union.avdl', 'MultipleUnion'), 71 | ) 72 | 73 | test('protocol with enum', compareWithJavaImplementation('enum.avdl', 'Enum')) 74 | 75 | test('protocol with enum & union', compareWithJavaImplementation('enum_union.avdl', 'EnumUnion')) 76 | 77 | test('protocol with array', compareWithJavaImplementation('array.avdl', 'Array')) 78 | 79 | test('protocol with array & union', compareWithJavaImplementation('array_union.avdl', 'ArrayUnion')) 80 | 81 | test('protocol with really complex stuff', compareWithJavaImplementation('complex.avdl', 'Complex')) 82 | 83 | test( 84 | 'protocol with multiple namespaces', 85 | compareWithJavaImplementation('multiple_namespaces.avdl', 'MultipleNamespaces'), 86 | ) 87 | 88 | /* 89 | * AVSC includes the namespace of imported records even if they are being imported 90 | * into the same namespace, causing a difference against the Java version. 91 | * 92 | * @issue: https://github.com/mtth/avsc/issues/281 93 | */ 94 | test.skip( 95 | 'protocol with import from same namespace', 96 | compareWithJavaImplementation('import.avdl', 'Import'), 97 | ) 98 | 99 | test( 100 | 'protocol with import from different namespace', 101 | compareWithJavaImplementation('import_multiple_namespaces.avdl', 'ImportMultipleNamespaces'), 102 | ) 103 | -------------------------------------------------------------------------------- /azure-pipelines.yml: -------------------------------------------------------------------------------- 1 | trigger: 2 | tags: 3 | include: 4 | - 'v*' 5 | 6 | branches: 7 | include: 8 | - master 9 | 10 | pr: 11 | branches: 12 | include: 13 | - master 14 | 15 | variables: 16 | - group: secrets 17 | - name: COMPOSE_FILE 18 | value: docker-compose.yml 19 | 20 | jobs: 21 | - job: lint 22 | displayName: Lint 23 | pool: 24 | vmImage: 'ubuntu-latest' 25 | steps: 26 | - task: NodeTool@0 27 | inputs: 28 | versionSpec: '18.x' 29 | - bash: yarn install 30 | displayName: yarn install 31 | - bash: yarn lint 32 | displayName: yarn lint 33 | - bash: yarn check:types 34 | displayName: Check type 35 | 36 | - job: build 37 | displayName: Build 38 | pool: 39 | vmImage: 'ubuntu-latest' 40 | steps: 41 | - task: NodeTool@0 42 | inputs: 43 | versionSpec: '18.x' 44 | - bash: yarn install 45 | displayName: yarn install 46 | - bash: yarn build 47 | displayName: yarn build 48 | 49 | - job: unit_tests 50 | displayName: Tests 51 | dependsOn: lint 52 | pool: 53 | vmImage: 'ubuntu-latest' 54 | steps: 55 | - task: NodeTool@0 56 | inputs: 57 | versionSpec: '18.x' 58 | - bash: yarn install 59 | displayName: yarn install 60 | - bash: docker compose -f ${COMPOSE_FILE} pull 61 | displayName: docker compose pull 62 | - bash: yarn test 63 | displayName: integration test with docker compose 64 | 65 | - job: npm_release 66 | displayName: 'NPM release' 67 | condition: and(succeeded(), contains(variables['Build.SourceBranch'], 'refs/tags/')) 68 | dependsOn: 69 | - lint 70 | - build 71 | - unit_tests 72 | pool: 73 | vmImage: 'ubuntu-latest' 74 | steps: 75 | - task: NodeTool@0 76 | inputs: 77 | versionSpec: '18.x' 78 | - bash: yarn install 79 | displayName: yarn install 80 | - bash: yarn prepare:release 81 | displayName: yarn prepare:release 82 | - task: Npm@1 83 | inputs: 84 | command: custom 85 | customCommand: publish release/ --access public 86 | customEndpoint: npm_registry 87 | publishEndpoint: npm_registry 88 | - bash: TOKEN=${GH_TOKEN} TAG=$(Build.SourceBranch) ./pipeline/updateGithubRelease.js 89 | displayName: github release 90 | env: 91 | GH_TOKEN: $(GH_TOKEN) 92 | 93 | - job: website_deploy 94 | displayName: 'Website' 95 | condition: and(succeeded(), contains(variables['Build.SourceBranch'], 'refs/heads/master')) 96 | dependsOn: 97 | - lint 98 | - unit_tests 99 | pool: 100 | vmImage: 'ubuntu-latest' 101 | steps: 102 | - task: NodeTool@0 103 | inputs: 104 | versionSpec: '18.x' 105 | - bash: git config core.autocrlf true || test true 106 | displayName: git config core.autocrlf 107 | - bash: git config --global user.name "${GH_NAME}" || test true 108 | displayName: git config username 109 | env: 110 | GH_NAME: $(GH_NAME) 111 | - bash: git config --global user.email "${GH_EMAIL}" || test true 112 | displayName: git config email 113 | env: 114 | GH_EMAIL: $(GH_EMAIL) 115 | - bash: echo "machine github.com login ${GH_NAME} password ${GH_TOKEN}" > ~/.netrc || test true 116 | displayName: git config machine 117 | env: 118 | GH_NAME: $(GH_NAME) 119 | GH_TOKEN: $(GH_TOKEN) 120 | - bash: cd website && yarn install && GIT_USER="${GH_NAME}" yarn run publish-gh-pages || test true 121 | displayName: publish to gh-pages 122 | env: 123 | GH_NAME: $(GH_NAME) 124 | -------------------------------------------------------------------------------- /src/api/index.ts: -------------------------------------------------------------------------------- 1 | import { Agent } from 'http' 2 | import forge, { 3 | Authorization, 4 | Client, 5 | GatewayConfiguration, 6 | Middleware, 7 | ManifestOptions, 8 | } from 'mappersmith' 9 | import RetryMiddleware, { RetryMiddlewareOptions } from 'mappersmith/middleware/retry/v2' 10 | import BasicAuthMiddleware from 'mappersmith/middleware/basic-auth' 11 | 12 | import { DEFAULT_API_CLIENT_ID } from '../constants' 13 | import errorMiddleware from './middleware/errorMiddleware' 14 | import confluentEncoder from './middleware/confluentEncoderMiddleware' 15 | import userAgentMiddleware from './middleware/userAgent' 16 | 17 | const DEFAULT_RETRY = { 18 | maxRetryTimeInSecs: 5, 19 | initialRetryTimeInSecs: 0.1, 20 | factor: 0.2, // randomization factor 21 | multiplier: 2, // exponential factor 22 | retries: 3, // max retries 23 | } 24 | 25 | export interface SchemaRegistryAPIClientArgs { 26 | host: string 27 | auth?: Authorization 28 | clientId?: string 29 | retry?: Partial 30 | /** HTTP Agent that will be passed to underlying API calls */ 31 | agent?: Agent 32 | middlewares?: Middleware[] 33 | } 34 | 35 | // TODO: Improve typings 36 | export type SchemaRegistryAPIClient = Client<{ 37 | Schema: { 38 | find: (_: any) => any 39 | } 40 | Subject: { 41 | all: (_: any) => any 42 | latestVersion: (_: any) => any 43 | version: (_: any) => any 44 | config: (_: any) => any 45 | updateConfig: (_: any) => any 46 | register: (_: any) => any 47 | registered: (_: any) => any 48 | compatible: (_: any) => any 49 | } 50 | }> 51 | 52 | export default ({ 53 | auth, 54 | clientId: userClientId, 55 | host, 56 | retry = {}, 57 | agent, 58 | middlewares = [], 59 | }: SchemaRegistryAPIClientArgs): SchemaRegistryAPIClient => { 60 | const clientId = userClientId || DEFAULT_API_CLIENT_ID 61 | // FIXME: ResourcesType typings is not exposed by mappersmith 62 | const manifest: ManifestOptions = { 63 | clientId, 64 | ignoreGlobalMiddleware: true, 65 | host, 66 | middleware: [ 67 | userAgentMiddleware, 68 | confluentEncoder, 69 | RetryMiddleware(Object.assign(DEFAULT_RETRY, retry)), 70 | errorMiddleware, 71 | ...(auth ? [BasicAuthMiddleware(auth)] : []), 72 | ...middlewares, 73 | ], 74 | resources: { 75 | Schema: { 76 | find: { 77 | method: 'get', 78 | path: '/schemas/ids/{id}', 79 | }, 80 | }, 81 | Subject: { 82 | all: { 83 | method: 'get', 84 | path: '/subjects', 85 | }, 86 | latestVersion: { 87 | method: 'get', 88 | path: '/subjects/{subject}/versions/latest', 89 | }, 90 | version: { 91 | method: 'get', 92 | path: '/subjects/{subject}/versions/{version}', 93 | }, 94 | registered: { 95 | method: 'post', 96 | path: '/subjects/{subject}', 97 | }, 98 | 99 | config: { 100 | method: 'get', 101 | path: '/config/{subject}', 102 | }, 103 | updateConfig: { 104 | method: 'put', 105 | path: '/config/{subject}', 106 | }, 107 | 108 | register: { 109 | method: 'post', 110 | path: '/subjects/{subject}/versions', 111 | }, 112 | compatible: { 113 | method: 'post', 114 | path: '/compatibility/subjects/{subject}/versions/{version}', 115 | params: { version: 'latest' }, 116 | }, 117 | }, 118 | }, 119 | } 120 | // if an agent was provided, bind the agent to the mappersmith configs 121 | if (agent) { 122 | // gatewayConfigs is not listed as a type on manifest object in mappersmith 123 | ;((manifest as unknown) as { gatewayConfigs: Partial }).gatewayConfigs = { 124 | HTTP: { 125 | configure: () => ({ agent }), 126 | }, 127 | } 128 | } 129 | return forge(manifest) 130 | } 131 | -------------------------------------------------------------------------------- /pipeline/updateGithubRelease.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const fs = require('fs') 4 | const path = require('path') 5 | const https = require('https') 6 | 7 | let { TAG, TOKEN } = process.env 8 | 9 | if (!TAG) { 10 | throw new Error('Missing TAG env variable') 11 | } 12 | 13 | if (!TOKEN) { 14 | throw new Error('Missing TOKEN env variable') 15 | } 16 | 17 | if (TAG.startsWith('refs/tags/')) { 18 | TAG = TAG.replace('refs/tags/', '') 19 | } 20 | 21 | console.log(`-> Updating release ${TAG}`) 22 | 23 | const changelog = fs.readFileSync(path.join(__dirname, '../CHANGELOG.md'), 'utf-8') 24 | const lines = changelog.split('\n') 25 | 26 | const USER_AGENT = 'kafkajs/confluent-schema-registry Azure pipeline' 27 | const RELEASE_HEADER = /^\s*##\s*\[\d+\.\d+\.\d+(-beta\.\d)?\]\s*-\s*\d{4}-\d{2}-\d{2}\s*$/ 28 | 29 | while (!RELEASE_HEADER.test(lines[0])) { 30 | lines.shift() 31 | } 32 | 33 | const releases = {} 34 | let buffer = [] 35 | 36 | const getVersionNumber = () => buffer[0].match(/\[(.*)\]/)[1] 37 | 38 | for (const line of lines) { 39 | if (RELEASE_HEADER.test(line)) { 40 | if (buffer.length !== 0) { 41 | releases[`v${getVersionNumber()}`] = buffer.join('\n') 42 | buffer = [] 43 | } 44 | 45 | buffer.push(line) 46 | continue 47 | } 48 | 49 | buffer.push(line) 50 | } 51 | 52 | if (buffer.length !== 0) { 53 | releases[`v${getVersionNumber()}`] = buffer.join('\n') 54 | } 55 | 56 | const getTag = async () => 57 | new Promise((resolve, reject) => { 58 | const request = https.request( 59 | { 60 | protocol: 'https:', 61 | host: 'api.github.com', 62 | path: `/repos/kafkajs/confluent-schema-registry/tags`, 63 | headers: { 64 | 'User-Agent': USER_AGENT, 65 | Accept: 'application/vnd.github.v3+json', 66 | Authorization: `token ${TOKEN}`, 67 | }, 68 | }, 69 | res => { 70 | let rawData = '' 71 | 72 | res.setEncoding('utf8') 73 | res.on('data', chunk => (rawData += chunk)) 74 | res.on('end', () => { 75 | try { 76 | if (res.statusCode !== 200) { 77 | return reject(new Error(`Error getting tag: ${res.statusCode} - ${rawData}`)) 78 | } 79 | 80 | const data = JSON.parse(rawData) 81 | const tag = data.find(({ name }) => name === TAG) 82 | 83 | resolve(tag) 84 | } catch (e) { 85 | reject(e) 86 | } 87 | }) 88 | }, 89 | ) 90 | 91 | request.on('error', reject) 92 | request.end() 93 | }) 94 | 95 | const createRelease = async tag => 96 | new Promise((resolve, reject) => { 97 | const request = https.request( 98 | { 99 | method: 'post', 100 | protocol: 'https:', 101 | host: 'api.github.com', 102 | path: `/repos/kafkajs/confluent-schema-registry/releases`, 103 | headers: { 104 | 'User-Agent': USER_AGENT, 105 | 'Content-Type': 'application/json', 106 | Accept: 'application/vnd.github.v3+json', 107 | Authorization: `token ${TOKEN}`, 108 | }, 109 | }, 110 | res => { 111 | let rawData = '' 112 | 113 | res.setEncoding('utf8') 114 | res.on('data', chunk => (rawData += chunk)) 115 | res.on('end', () => { 116 | try { 117 | if (res.statusCode !== 201) { 118 | return reject(new Error(`Error creating release: ${res.statusCode}`)) 119 | } 120 | 121 | const data = JSON.parse(rawData) 122 | resolve(data) 123 | } catch (e) { 124 | reject(e) 125 | } 126 | }) 127 | }, 128 | ) 129 | 130 | request.on('error', reject) 131 | request.write( 132 | /* eslint-disable */ 133 | JSON.stringify({ 134 | tag_name: TAG, 135 | target_commitish: tag.commit.sha, 136 | name: TAG, 137 | body: releases[TAG], 138 | draft: false, 139 | prerelease: false, 140 | }), 141 | ) 142 | request.end() 143 | }) 144 | 145 | getTag() 146 | .then(tag => createRelease(tag)) 147 | .then(release => console.log(`Release ${TAG} created`, release)) 148 | .catch(console.error) 149 | -------------------------------------------------------------------------------- /docs/v2.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: v2 3 | title: Migrating from v1 to v2 4 | sidebar_label: Migrating to version 2 5 | --- 6 | 7 | In version 1, `@kafkajs/confluent-schema-registry` only supported Avro schemas and 8 | the API was very Avro specific. In version 2, support for JSON Schema and Protobuf 9 | was added, which necessitated some changes in the public api. 10 | 11 | We have tried to make these changes as unintrusive as we can, by making the vast 12 | majority of changes backwards compatible. In fact, for Javascript users, the changes 13 | are completely backwards compatible for Avro schemas. 14 | 15 | For Typescript users, however, or if you are migrating from using Avro to using 16 | one of the other schema types, there are a few things to keep in mind. 17 | 18 | The main change is that the core `Schema` type has changed from an Avro specific 19 | schema to a generic schema interface for all schema types. The old schema looked 20 | like this: 21 | 22 | ```ts 23 | interface AvroSchema { 24 | name: string 25 | namespace?: string 26 | type: 'record' 27 | fields: any[] 28 | toBuffer(payload: object): Buffer 29 | fromBuffer(buffer: Buffer, resolver?: Resolver, noCheck?: boolean): any 30 | isValid( 31 | payload: object, 32 | opts?: { errorHook: (path: Array, value: any, type?: any) => void }, 33 | ): boolean 34 | } 35 | ``` 36 | 37 | This is still the case for Avro schemas, but for Protobuf or Json Schema, a 38 | more generic schema type is used: 39 | 40 | ```ts 41 | interface Schema { 42 | toBuffer(payload: object): Buffer 43 | fromBuffer(buffer: Buffer, resolver?: Resolver, noCheck?: boolean): any 44 | isValid( 45 | payload: object, 46 | opts?: { errorHook: (path: Array, value: any, type?: any) => void }, 47 | ): boolean 48 | } 49 | ``` 50 | 51 | If you have code that uses any of the Avro specific fields on the schema 52 | (for example returned by `getSchema`), you may need to first narrow the type 53 | to `AvroSchema`: 54 | 55 | ```ts 56 | import { AvroSchema, Schema } from '@kafkajs/confluent-schema-registry' 57 | 58 | function isAvroSchema(schema: AvroSchema | Schema): schema is AvroSchema { 59 | return (schema as AvroSchema).name != null 60 | } 61 | 62 | const schema = await registry.getSchema(registryId) 63 | 64 | if (isAvroSchema(schema)) { 65 | // schema is now `AvroSchema` 66 | const { name, namespace, type, fields } = schema 67 | } 68 | ``` 69 | 70 | ## Adapting to new APIs 71 | 72 | This is **entirely optional**, as all the old APIs should be retained (with the above caveat 73 | about the schema type), but if you want to adapt your code to the new API to make 74 | future migrations easier, these are the changes you would need to make. 75 | 76 | ### Configuring serialization libraries 77 | 78 | In version 1, the client constructor took a second `options` argument with 79 | a single option `forSchemaOptions` that was passed directly to 80 | [`avsc.Type.forSchema`](https://github.com/mtth/avsc/wiki/API#typeforschemaschema-opts) 81 | as the `opts` argument. 82 | 83 | ```js 84 | const { SchemaRegistry } = require('@kafkajs/confluent-schema-registry') 85 | 86 | const registry = new SchemaRegistry( 87 | { host: 'http://localhost:8081' }, 88 | { forSchemaOptions: { noAnonymousTypes: true }} 89 | ) 90 | ``` 91 | 92 | Since we now support multiple schema types, these options have been moved 93 | one level into a schema type specific option without the `forSchemaOptions` 94 | key: 95 | 96 | ```js 97 | const { SchemaRegistry, SchemaType } = require('@kafkajs/confluent-schema-registry') 98 | 99 | const registry = new SchemaRegistry( 100 | { host: 'http://localhost:8081' }, 101 | { 102 | [SchemaType.AVRO]: { noAnonymousTypes: true }, 103 | 104 | // This allows you to also pass options for Protobuf and JSON Schema 105 | [SchemaType.JSON]: { strict: true } 106 | 107 | [SchemaType.PROTOBUF]: { messageName: 'CustomMessage' } 108 | } 109 | ) 110 | ``` 111 | 112 | See [Schema Type Options](./usage#schema-type-options) for more information. 113 | 114 | ### Registering schemas 115 | 116 | In version 1, the schema type was implicitly Avro, so you would just pass in the 117 | schema from `readAVSCAsync` directly: 118 | 119 | ```js 120 | const schema = await readAVSCAsync('path/to/schema.avsc') 121 | await registry.register(schema) 122 | ``` 123 | 124 | In version 2, there are two major changes: 125 | 126 | 1. The `type` of the schema needs to be set to one of `SchemaType`. 127 | 2. The `schema` itself is now a string, instead of an object. 128 | 129 | We call this new type a `ConfluentSchema` 130 | 131 | ```ts 132 | interface ConfluentSchema { 133 | type: SchemaType 134 | schema: string 135 | } 136 | ``` 137 | 138 | ```js 139 | const { SchemaType } = require('@kafkajs/confluent-schema-registry') 140 | 141 | const schema = await readAVSCAsync('path/to/schema.avsc') 142 | const schemaString = JSON.stringify(schema) 143 | await registry.register({ type: SchemaType.AVRO, schema: schemaString }) 144 | ``` 145 | 146 | ### Getting registry id by schema 147 | 148 | Similar to `register`, getting the registry id by schema used to take an 149 | `AvroSchema`, and now takes a `ConfluentSchema`. 150 | 151 | Version 1: 152 | 153 | ```js 154 | const schema = await readAVSCAsync('path/to/schema.avsc') 155 | await registry.getRegistryIdBySchema('subject', schema) 156 | ``` 157 | 158 | Version 2: 159 | 160 | ```js 161 | const schema = await readAVSCAsync('path/to/schema.avsc') 162 | const schemaString = JSON.stringify(schema) 163 | await registry.getRegistryIdBySchema('subject', { type: SchemaType.AVRO, schema: schemaString }) 164 | ``` -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) 6 | and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). 7 | 8 | 9 | ## [3.3.2] - 2025-01-29 10 | ## Fixed 11 | Json schema validation return, adds the error message 12 | Fixes docker compose dependency 13 | 14 | ### Added 15 | 16 | ## [3.3.0] - 2022-10-04 17 | 18 | ### Added 19 | 20 | - Support [schema references](https://docs.confluent.io/platform/current/schema-registry/serdes-develop/index.html#schema-references) for Avro, Protocol Buffer, and JSON schema [#197](https://github.com/kafkajs/confluent-schema-registry/pull/197) 21 | 22 | ### Fixed 23 | 24 | - Fix Apicurio compatibility with register function [#201](https://github.com/kafkajs/confluent-schema-registry/pull/201) 25 | 26 | ## [3.2.1] - 2022-01-28 27 | 28 | ### Fixed 29 | 30 | - Don't swallow error message from client-side errors from registry requests [#176](https://github.com/kafkajs/confluent-schema-registry/pull/176) 31 | 32 | ## [3.2.0] - 2021-11-22 33 | 34 | ### Added 35 | 36 | - Add reader schema option when decoding Avro messages [#166](https://github.com/kafkajs/confluent-schema-registry/pull/166) 37 | 38 | ## [3.1.1] - 2021-11-03 39 | 40 | ### Fixed 41 | 42 | - Support backwards incompatible changes in Ajv 8 when passing in Ajv instance in JSON Schema options [#163](https://github.com/kafkajs/confluent-schema-registry/pull/163) 43 | 44 | ## [3.1.0] - 2021-11-03 45 | 46 | ### Added 47 | 48 | - Allow passing in Ajv instance in JSON Schema options [#133](https://github.com/kafkajs/confluent-schema-registry/pull/133) 49 | 50 | ### Fixed 51 | 52 | - Fix backwards compatibility with older Schema Registry versions [#158](https://github.com/kafkajs/confluent-schema-registry/pull/158) 53 | 54 | ### Fixed 55 | 56 | - Fix gateway config for when setting HTTP agent [#127](https://github.com/kafkajs/confluent-schema-registry/pull/127) 57 | 58 | ## [3.0.1] - 2021-06-11 59 | ### Fixed 60 | 61 | - Fix gateway config for when setting HTTP agent [#127](https://github.com/kafkajs/confluent-schema-registry/pull/127) 62 | 63 | ## [3.0.0] - 2021-05-20 64 | 65 | This version is non-breaking for the overwhelming majority of users. 66 | 67 | When creating an instance of SchemaRegistry for Protobuf without the [`messageName` 68 | parameter](https://kafkajs.github.io/confluent-schema-registry/docs/usage#protobuf-1) 69 | confluent-schema-registry would, under certain circumstances, default 70 | to the wrong message type in the schema. Specifically, instead of defaulting to 71 | the first message type in the schema it would erroneously default to the first 72 | message type that did not define a nested type. 73 | 74 | **If you were relying on this behavior may need to either**: 75 | 76 | * Start passing [the `messageName` parameter](https://kafkajs.github.io/confluent-schema-registry/docs/usage#protobuf-1) instead of relying on the default behavior 77 | * Update your schemas and re-ingest messages accordingly 78 | 79 | See issue [#112](https://github.com/kafkajs/confluent-schema-registry/issues/112) for 80 | more info 81 | 82 | ### Added 83 | 84 | - Allow setting HTTP agent [#108](https://github.com/kafkajs/confluent-schema-registry/pull/108) 85 | 86 | ### Fixed 87 | 88 | - Fix default nested Protobuf type [#113](https://github.com/kafkajs/confluent-schema-registry/pull/113) 89 | 90 | ## [2.0.1] - 2021-04-02 91 | 92 | - Fix export of SchemaType [#100](https://github.com/kafkajs/confluent-schema-registry/pull/100) 93 | 94 | ## [2.0.0] - 2021-02-28 95 | 96 | This version adds support for Protobuf and JSON Schema, in addition to the already 97 | supported Avro format! 98 | 99 | See [Migrating to v2](https://kafkajs.github.io/confluent-schema-registry/docs/v2) 100 | for information on how to adapt your application to the new API. For most users, 101 | the change should be rather minor. 102 | 103 | Big thanks to @dskatz22 and @Malkiz for their significant contributions! 104 | 105 | ### Added 106 | 107 | - Support Protobuf and JSON Schema [#93](https://github.com/kafkajs/confluent-schema-registry/pull/93) 108 | 109 | ## [1.0.6] - 2020-07-02 110 | 111 | ### Added 112 | 113 | - Support pre registered schemas by adding method `getRegistryIdBySchema` [#58](https://github.com/kafkajs/confluent-schema-registry/pull/58) 114 | 115 | ## [1.0.5] - 2020-03-18 116 | 117 | ### Added 118 | 119 | - Prevent Unnecessary Requests on Cache Misses [#48](https://github.com/kafkajs/confluent-schema-registry/pull/48) 120 | 121 | ## [1.0.4] - 2020-03-07 122 | 123 | ### Added 124 | 125 | - Support ForSchemaOption to call avro.Type.forSchema() [#47](https://github.com/kafkajs/confluent-schema-registry/pull/47) 126 | 127 | ## [1.0.3] - 2020-02-11 128 | 129 | ### Added 130 | 131 | - Support sharing of types between protocols [#43](https://github.com/kafkajs/confluent-schema-registry/pull/43) 132 | 133 | ### Fixed 134 | 135 | - Fix SchemaRegistry host port [#40](https://github.com/kafkajs/confluent-schema-registry/pull/40) 136 | - Add string as possible type for subject version [#38](https://github.com/kafkajs/confluent-schema-registry/pull/38) 137 | 138 | ## [1.0.2] - 2019-11-28 139 | 140 | ### Added 141 | 142 | - Allow for specifying subjects explicitly [#19](https://github.com/kafkajs/confluent-schema-registry/pull/19) 143 | 144 | ### Fixed 145 | 146 | - Fix `@types/jest` issue [#29](https://github.com/kafkajs/confluent-schema-registry/pull/29) 147 | - Fix `es-abstract` version issue [28](https://github.com/kafkajs/confluent-schema-registry/pull/28) 148 | 149 | ## [1.0.1] - 2019-10-25 150 | 151 | ### Added 152 | 153 | - Added the schema compatibility remaining constants [#14](https://github.com/kafkajs/confluent-schema-registry/pull/14) 154 | - Added method to fetch latest schema id by subject [#17](https://github.com/kafkajs/confluent-schema-registry/issues/17) 155 | - Added method to get the schemaID based on subject [#18](https://github.com/kafkajs/confluent-schema-registry/pull/18) 156 | - Support basic auth authentication [#21](https://github.com/kafkajs/confluent-schema-registry/pull/21) 157 | 158 | ## [1.0.0] - 2019-09-13 159 | 160 | ### Changed 161 | 162 | - See `0.2.0` version 163 | 164 | ## [0.2.0] - 2019-09-13 165 | 166 | ### Changed 167 | 168 | - Version `0.1.0` didn't transpile the Typescript files 169 | 170 | ## [0.1.0] - 2019-09-12 171 | 172 | ### Added 173 | 174 | - Encode, decode and sync operations 175 | -------------------------------------------------------------------------------- /docs/usage.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: usage 3 | title: Usage 4 | sidebar_label: Usage 5 | --- 6 | 7 | Typical usage consists of [uploading one or more schemas](#uploading-schemas) to the registry, [encoding 8 | data](#encoding-data) using the registered schemas, and/or [decoding encoded data](#decoding-data) by getting 9 | the schemas from the registry. 10 | 11 | ## Creating the registry client 12 | 13 | ```js 14 | const { SchemaRegistry } = require('@kafkajs/confluent-schema-registry') 15 | 16 | const registry = new SchemaRegistry({ host: 'http://localhost:8081' }) 17 | ``` 18 | 19 | For more configuration options, [see configuration](#configuration). 20 | 21 | ## Uploading schemas 22 | 23 | The schemas can be registered with the schema registry using 24 | `registry.register({ type: SchemaType, schema: string })`, which resolves to an object containing the 25 | schema id. This schema id is later [used when encoding](#encoding-data). 26 | 27 | ```js 28 | const schema = { 29 | type: SchemaType.AVRO | SchemaType.JSON | SchemaType.PROTOBUF, 30 | schema: "string" 31 | } 32 | 33 | const options = { 34 | subject: "string" 35 | } 36 | 37 | await registry.register(schema, options) 38 | ``` 39 | 40 | ### Avro 41 | 42 | ```js 43 | const schema = ` 44 | { 45 | "type": "record", 46 | "name": "RandomTest", 47 | "namespace": "examples", 48 | "fields": [{ "type": "string", "name": "fullName" }] 49 | } 50 | ` 51 | const { id } = await registry.register({ type: SchemaType.AVRO, schema }) 52 | ``` 53 | 54 | To simplify working with Avro schemas and integrating with existing tooling, 55 | some utility functions are available. Schemas can be defined in either `AVSC` 56 | or `AVDL` format, and are read using `readAVSCAsync` and `avdlToAVSCAsync` 57 | respectively. 58 | 59 | Note that these functions return objects rather than strings, but they can 60 | be passed directly to `register` as the `schema` argument and will be 61 | stringified internally. 62 | 63 | ```js 64 | const { SchemaType, readAVSCAsync, avdlToAVSCAsync } = require('@kafkajs/confluent-schema-registry') 65 | 66 | // From an avsc file 67 | const schema = await readAVSCAsync('path/to/schema.avsc') 68 | const { id } = await registry.register({ type: SchemaType.AVRO, schema }) // { id: 2 } 69 | 70 | // From an avdl file 71 | const schema = await avdlToAVSCAsync('path/to/protocol.avdl') 72 | const { id } = await registry.register({ type: SchemaType.AVRO, schema }) // { id: 3 } 73 | ``` 74 | 75 | #### Subject 76 | 77 | For Avro schemas, the subject is automatically inferred from the schema if 78 | `options.subject` is not set. 79 | 80 | > See [Subjects](#subjects) for more information on subjects 81 | 82 | ### JSON Schema 83 | 84 | ```js 85 | const { SchemaType } = require('@kafkajs/confluent-schema-registry') 86 | 87 | const schema = ` 88 | { 89 | "definitions" : { 90 | "record:examples.Person" : { 91 | "type" : "object", 92 | "required" : [ "fullName" ], 93 | "additionalProperties" : false, 94 | "properties" : { 95 | "fullName" : { 96 | "type" : "string" 97 | } 98 | } 99 | } 100 | }, 101 | "$ref" : "#/definitions/record:examples.Person" 102 | } 103 | ` 104 | const { id } = await registry.register({ type: SchemaType.JSON, schema }) 105 | ``` 106 | 107 | ### Protobuf 108 | 109 | ```js 110 | const { SchemaType } = require('@kafkajs/confluent-schema-registry') 111 | 112 | const schema = ` 113 | package examples; 114 | message RandomTest { 115 | required string fullName = 1; 116 | } 117 | ` 118 | const { id } = await registry.register({ type: SchemaType.PROTOBUF, schema }) 119 | ``` 120 | 121 | ### Compatibility 122 | 123 | The [compatibility](https://docs.confluent.io/current/schema-registry/avro.html#compatibility-types) of the schema will be whatever the global default is (typically `BACKWARD`). 124 | It's possible to override this for the specific subject by setting it like so: 125 | 126 | ```js 127 | const { 128 | COMPATIBILITY: { NONE }, 129 | } = require('@kafkajs/confluent-schema-registry') 130 | await registry.register(schema, { compatibility: NONE }) 131 | ``` 132 | 133 | **NOTE:** 134 | If the subject already has an overridden compatibility setting and it's different, 135 | the client will throw and error (`ConfluentSchemaRegistryCompatibilityError`) 136 | 137 | ### Subjects 138 | 139 | Each schema is registered under a [subject](https://docs.confluent.io/current/schema-registry/serializer-formatter.html#sr-avro-subject-name-strategy). 140 | In Avro, this subject is generated by concatenating the schema namespace and the schema name 141 | with a separator. For example, the following schema would get the subject `com.example.Simple`: 142 | 143 | ```avdl 144 | @namespace("com.example") 145 | protocol SimpleProto { 146 | record Simple { 147 | string foo; 148 | } 149 | } 150 | ``` 151 | 152 | `registry.register` accepts a `subject` option to override the subject entirely: 153 | 154 | ```js 155 | await registry.register(schema, { subject: 'my-fixed-subject' }) 156 | ``` 157 | 158 | If you just want to change the separator used when automatically creating the subject, use 159 | the `separator` option: 160 | 161 | ```js 162 | // This would result in "com.example-Simple" 163 | await registry.register(schema, { separator: '-' }) 164 | ``` 165 | 166 | #### Other schema types 167 | 168 | For non-Avro schema types, `subject` is required and the method will throw if not provided. 169 | 170 | ## Encoding data 171 | 172 | To encode data, call `registry.encode` with the schema id and the payload to encode. 173 | 174 | ```js 175 | const payload = { full_name: 'John Doe' } 176 | await registry.encode(id, payload) 177 | ``` 178 | 179 | ## Decoding data 180 | 181 | The encoded payload contains the schema id of the schema used to decode it, 182 | so to decode, simply call `registry.decode` with the encoded payload. The 183 | corresponding schema will be downloaded from the registry if needed in order 184 | to decode the payload. 185 | 186 | ```js 187 | const payload = await registry.decode(buffer) 188 | // { full_name: 'John Doe' } 189 | ``` 190 | 191 | `registry.decode` has an optional second `options` argument with options 192 | specific to each schema type. 193 | 194 | ### Avro 195 | 196 | With Avro you can specify a specific reader schema to use to decode the 197 | message, rather than using the schema registered in the registry. This can 198 | be useful if you need a projection that is different from the writer schema, 199 | or if you want to decode a message with a different version than was 200 | used to encode the message. 201 | 202 | ```js 203 | import avro from 'avsc' 204 | import { readAVSCAsync } from '@kafkajs/confluent-schema-registry' 205 | 206 | const readerSchema = await readAVSCAsync('path/to/protocol.avdl') 207 | 208 | const payload = await registry.decode(buffer, { 209 | [SchemaType.AVRO]: { readerSchema } 210 | }) 211 | ``` 212 | 213 | ## Configuration 214 | 215 | ### Retry 216 | 217 | By default, all `GET` requests will retry three times in case of failure. If you want to tweak this config you can do: 218 | 219 | ```js 220 | const registry = new SchemaRegistry({ 221 | host: 'http://localhost:8081', 222 | retry: { 223 | maxRetryTimeInSecs: 5, 224 | initialRetryTimeInSecs: 0.1, 225 | factor: 0.2, // randomization factor 226 | multiplier: 2, // exponential factor 227 | retries: 3, // max retries 228 | }, 229 | }) 230 | ``` 231 | 232 | ### Basic auth 233 | 234 | It's also possible to configure basic auth: 235 | 236 | ```js 237 | const registry = new SchemaRegistry({ 238 | host: 'http://localhost:8081', 239 | auth: { 240 | username: '***', 241 | password: '***', 242 | }, 243 | }) 244 | ``` 245 | 246 | ### HTTP Agent 247 | 248 | Configuring the behavior of the HTTP requests towards the schema registry API 249 | can be done by passing in an instance of an [Agent](https://nodejs.org/api/https.html#https_class_https_agent). 250 | 251 | ```ts 252 | import { Agent } from 'http' 253 | 254 | const agent = new Agent({ keepAlive: true }) 255 | const registry = new SchemaRegistry({ 256 | host: 'http://localhost:8081', 257 | agent 258 | }) 259 | ``` 260 | 261 | ### Schema type options 262 | 263 | The second argument to the `SchemaRegistry` constructor is an object with keys for each `SchemaType`. 264 | 265 | #### Avro 266 | 267 | The Avro schema type options are passed directly to 268 | [`avsc.Type.forSchema` as the `opts` argument](https://github.com/mtth/avsc/wiki/API#typeforschemaschema-opts). 269 | For example: 270 | 271 | ```ts 272 | import { SchemaRegistry, SchemaType } from '@kafkajs/confluent-schema-registry' 273 | 274 | const options = { 275 | [SchemaType.AVRO]: { 276 | logicalTypes: { decimal: DecimalType } 277 | } 278 | } 279 | 280 | const registry = new SchemaRegistry({ host: 'http://localhost:8081' }, options) 281 | ``` 282 | 283 | #### Protobuf 284 | 285 | The only available option is `messageName`, which is used to select which message 286 | in a schema containing multiple messages to use for encoding/decoding the payload. 287 | If omitted, the first message type in the schema is used. 288 | 289 | ```ts 290 | const options = { 291 | [SchemaType.PROTOBUF]: { 292 | messageName: 'CustomMessage' 293 | } 294 | } 295 | ``` 296 | 297 | 298 | #### JSON Schema 299 | 300 | The JSON Schema schema type options are passed to the [Ajv constructor](https://ajv.js.org/options.html). 301 | For example: 302 | 303 | ```ts 304 | const options = { 305 | [SchemaType.JSON]: { 306 | strict: true 307 | } 308 | } 309 | ``` 310 | 311 | Alternatively, you can provide a custom Ajv instance using the `ajvInstance` option. This can be useful if you 312 | need to configure Ajv outside of what the constructor parameters allow. 313 | 314 | ```ts 315 | const options = { 316 | [SchemaType.JSON]: { 317 | ajvInstance: new Ajv() 318 | } 319 | } 320 | ``` -------------------------------------------------------------------------------- /src/SchemaRegistry.spec.ts: -------------------------------------------------------------------------------- 1 | import path from 'path' 2 | import { v4 as uuid } from 'uuid' 3 | 4 | import { readAVSC } from './utils' 5 | import SchemaRegistry from './SchemaRegistry' 6 | import API, { SchemaRegistryAPIClient } from './api' 7 | import { COMPATIBILITY, DEFAULT_API_CLIENT_ID } from './constants' 8 | import encodedAnotherPersonV2 from '../fixtures/avro/encodedAnotherPersonV2' 9 | import wrongMagicByte from '../fixtures/wrongMagicByte' 10 | import { RawAvroSchema } from './@types' 11 | 12 | const REGISTRY_HOST = 'http://localhost:8982' 13 | const schemaRegistryAPIClientArgs = { host: REGISTRY_HOST } 14 | const schemaRegistryArgs = { host: REGISTRY_HOST } 15 | 16 | const personSchema = readAVSC(path.join(__dirname, '../fixtures/avsc/person.avsc')) 17 | const payload = { fullName: 'John Doe' } // eslint-disable-line @typescript-eslint/camelcase 18 | 19 | describe('SchemaRegistry - old AVRO api', () => { 20 | let schemaRegistry: SchemaRegistry 21 | 22 | beforeEach(async () => { 23 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs) 24 | await schemaRegistry.register(personSchema) 25 | }) 26 | 27 | describe('#register', () => { 28 | let namespace: string, Schema: RawAvroSchema, subject: string, api: SchemaRegistryAPIClient 29 | 30 | beforeEach(() => { 31 | api = API(schemaRegistryAPIClientArgs) 32 | namespace = `N${uuid().replace(/-/g, '_')}` 33 | subject = `${namespace}.RandomTest` 34 | Schema = { 35 | namespace, 36 | type: 'record', 37 | name: 'RandomTest', 38 | fields: [{ type: 'string', name: 'fullName' }], 39 | } 40 | }) 41 | 42 | it('uploads the new schema', async () => { 43 | await expect(api.Subject.latestVersion({ subject })).rejects.toHaveProperty( 44 | 'message', 45 | `${DEFAULT_API_CLIENT_ID} - Subject '${namespace}.${Schema.name}' not found.`, 46 | ) 47 | 48 | await expect(schemaRegistry.register(Schema)).resolves.toEqual({ id: expect.any(Number) }) 49 | }) 50 | 51 | it('automatically cache the id and schema', async () => { 52 | const { id } = await schemaRegistry.register(Schema) 53 | 54 | expect(schemaRegistry.cache.getSchema(id)).toBeTruthy() 55 | }) 56 | 57 | it('fetch and validate the latest schema id after registering a new schema', async () => { 58 | const { id } = await schemaRegistry.register(Schema) 59 | const latestSchemaId = await schemaRegistry.getLatestSchemaId(subject) 60 | 61 | expect(id).toBe(latestSchemaId) 62 | }) 63 | 64 | it('set the default compatibility to BACKWARD', async () => { 65 | await schemaRegistry.register(Schema) 66 | const response = await api.Subject.config({ subject }) 67 | expect(response.data()).toEqual({ compatibilityLevel: COMPATIBILITY.BACKWARD }) 68 | }) 69 | 70 | it('sets the compatibility according to param', async () => { 71 | await schemaRegistry.register(Schema, { compatibility: COMPATIBILITY.NONE }) 72 | const response = await api.Subject.config({ subject }) 73 | expect(response.data()).toEqual({ compatibilityLevel: COMPATIBILITY.NONE }) 74 | }) 75 | 76 | it('throws an error when schema does not have a name', async () => { 77 | delete Schema.name 78 | await expect(schemaRegistry.register(Schema)).rejects.toHaveProperty( 79 | 'message', 80 | 'Invalid name: undefined', 81 | ) 82 | }) 83 | 84 | it('throws an error when schema does not have a namespace', async () => { 85 | delete Schema.namespace 86 | await expect(schemaRegistry.register(Schema)).rejects.toHaveProperty( 87 | 'message', 88 | 'Invalid namespace: undefined', 89 | ) 90 | }) 91 | 92 | it('accepts schema without a namespace when subject is specified', async () => { 93 | delete Schema.namespace 94 | const nonNamespaced = readAVSC(path.join(__dirname, '../fixtures/avsc/non_namespaced.avsc')) 95 | await expect(schemaRegistry.register(nonNamespaced, { subject })).resolves.toEqual({ 96 | id: expect.any(Number), 97 | }) 98 | }) 99 | 100 | it('throws an error when the configured compatibility is different than defined in the client', async () => { 101 | await schemaRegistry.register(Schema) 102 | await api.Subject.updateConfig({ subject, body: { compatibility: COMPATIBILITY.FULL } }) 103 | await expect(schemaRegistry.register(Schema)).rejects.toHaveProperty( 104 | 'message', 105 | 'Compatibility does not match the configuration (BACKWARD != FULL)', 106 | ) 107 | }) 108 | }) 109 | 110 | describe('#encode', () => { 111 | beforeEach(async () => { 112 | await schemaRegistry.register(personSchema) 113 | }) 114 | 115 | it('throws an error if registryId is empty', async () => { 116 | await expect(schemaRegistry.encode(undefined, payload)).rejects.toHaveProperty( 117 | 'message', 118 | 'Invalid registryId: undefined', 119 | ) 120 | }) 121 | 122 | it('encodes using a defined registryId', async () => { 123 | const SchemaV1 = Object.assign({}, personSchema, { 124 | name: 'AnotherPerson', 125 | fields: [{ type: 'string', name: 'fullName' }], 126 | }) 127 | const SchemaV2 = Object.assign({}, SchemaV1, { 128 | fields: [ 129 | { type: 'string', name: 'fullName' }, 130 | { type: 'string', name: 'city', default: 'Stockholm' }, 131 | ], 132 | }) 133 | 134 | const schema1 = await schemaRegistry.register(SchemaV1) 135 | const schema2 = await schemaRegistry.register(SchemaV2) 136 | expect(schema2.id).not.toEqual(schema1.id) 137 | 138 | const data = await schemaRegistry.encode(schema2.id, payload) 139 | expect(data).toMatchConfluentEncodedPayload({ 140 | registryId: schema2.id, 141 | payload: Buffer.from(encodedAnotherPersonV2), 142 | }) 143 | }) 144 | }) 145 | 146 | describe('#decode', () => { 147 | let registryId: number 148 | 149 | beforeEach(async () => { 150 | registryId = (await schemaRegistry.register(personSchema)).id 151 | }) 152 | 153 | it('decodes data', async () => { 154 | const buffer = Buffer.from(await schemaRegistry.encode(registryId, payload)) 155 | const data = await schemaRegistry.decode(buffer) 156 | 157 | expect(data).toEqual(payload) 158 | }) 159 | 160 | it('throws an error if the magic byte is not supported', async () => { 161 | const buffer = Buffer.from(wrongMagicByte) 162 | await expect(schemaRegistry.decode(buffer)).rejects.toHaveProperty( 163 | 'message', 164 | 'Message encoded with magic byte {"type":"Buffer","data":[48]}, expected {"type":"Buffer","data":[0]}', 165 | ) 166 | }) 167 | 168 | it('caches the schema', async () => { 169 | const buffer = Buffer.from(await schemaRegistry.encode(registryId, payload)) 170 | 171 | schemaRegistry.cache.clear() 172 | await schemaRegistry.decode(buffer) 173 | 174 | expect(schemaRegistry.cache.getSchema(registryId)).toBeTruthy() 175 | }) 176 | 177 | it('creates a single origin request for a schema cache-miss', async () => { 178 | const buffer = Buffer.from(await schemaRegistry.encode(registryId, payload)) 179 | 180 | schemaRegistry.cache.clear() 181 | 182 | const spy = jest.spyOn((schemaRegistry as any).api.Schema, 'find') 183 | 184 | await Promise.all([ 185 | schemaRegistry.decode(buffer), 186 | schemaRegistry.decode(buffer), 187 | schemaRegistry.decode(buffer), 188 | ]) 189 | 190 | expect(spy).toHaveBeenCalledTimes(1) 191 | }) 192 | 193 | describe('when the cache is populated', () => { 194 | it('uses the cache data', async () => { 195 | const buffer = Buffer.from(await schemaRegistry.encode(registryId, payload)) 196 | expect(schemaRegistry.cache.getSchema(registryId)).toBeTruthy() 197 | 198 | jest.spyOn(schemaRegistry.cache, 'setSchema') 199 | await schemaRegistry.decode(buffer) 200 | 201 | expect(schemaRegistry.cache.setSchema).not.toHaveBeenCalled() 202 | }) 203 | }) 204 | }) 205 | 206 | describe('#getRegistryIdBySchema', () => { 207 | let namespace: string, Schema: RawAvroSchema, subject: string 208 | 209 | beforeEach(() => { 210 | namespace = `N${uuid().replace(/-/g, '_')}` 211 | subject = `${namespace}.RandomTest` 212 | Schema = JSON.parse(` 213 | { 214 | "type": "record", 215 | "name": "RandomTest", 216 | "namespace": "${namespace}", 217 | "fields": [{ "type": "string", "name": "fullName" }] 218 | } 219 | `) 220 | }) 221 | 222 | it('returns the registry id if the schema has already been registered under that subject', async () => { 223 | const { id } = await schemaRegistry.register(Schema, { subject }) 224 | 225 | await expect(schemaRegistry.getRegistryIdBySchema(subject, Schema)).resolves.toEqual(id) 226 | }) 227 | 228 | it('throws an error if the subject does not exist', async () => { 229 | await expect(schemaRegistry.getRegistryIdBySchema(subject, Schema)).rejects.toHaveProperty( 230 | 'message', 231 | `Confluent_Schema_Registry - Subject '${namespace}.${Schema.name}' not found.`, 232 | ) 233 | }) 234 | 235 | it('throws an error if the schema has not been registered under that subject', async () => { 236 | const otherSchema = JSON.parse(` 237 | { 238 | "type": "record", 239 | "name": "RandomTest", 240 | "namespace": "${namespace}", 241 | "fields": [{ "type": "string", "name": "notFullName" }] 242 | } 243 | `) 244 | await schemaRegistry.register(otherSchema, { subject }) 245 | 246 | await expect(schemaRegistry.getRegistryIdBySchema(subject, Schema)).rejects.toHaveProperty( 247 | 'message', 248 | 'Confluent_Schema_Registry - Schema not found', 249 | ) 250 | }) 251 | }) 252 | }) 253 | 254 | describe('SchemaRegistry - Custom Middleware', () => { 255 | const customMiddleware = jest.fn() 256 | 257 | const schemaRegistry = new SchemaRegistry({ 258 | ...schemaRegistryArgs, 259 | middlewares: [customMiddleware], 260 | }) 261 | 262 | it('should have called the custom middleware', async () => { 263 | await schemaRegistry.register(personSchema) 264 | 265 | expect(customMiddleware).toHaveBeenCalled() 266 | }) 267 | }) 268 | -------------------------------------------------------------------------------- /src/SchemaRegistry.protobuf.spec.ts: -------------------------------------------------------------------------------- 1 | import SchemaRegistry, { RegisteredSchema } from './SchemaRegistry' 2 | import API from './api' 3 | import { ProtoConfluentSchema, SchemaType } from './@types' 4 | 5 | const REGISTRY_HOST = 'http://localhost:8982' 6 | const schemaRegistryAPIClientArgs = { host: REGISTRY_HOST } 7 | const schemaRegistryArgs = { host: REGISTRY_HOST } 8 | 9 | const TestSchemas = { 10 | FirstLevelSchema: { 11 | type: SchemaType.PROTOBUF, 12 | schema: ` 13 | syntax = "proto3"; 14 | package test; 15 | import "test/second_level_A.proto"; 16 | import "test/second_level_B.proto"; 17 | 18 | message FirstLevel { 19 | int32 id1 = 1; 20 | SecondLevelA level1a = 2; 21 | SecondLevelB level1b = 3; 22 | }`, 23 | references: [ 24 | { 25 | name: 'test/second_level_A.proto', 26 | subject: 'Proto:SecondLevelA', 27 | version: undefined, 28 | }, 29 | { 30 | name: 'test/second_level_B.proto', 31 | subject: 'Proto:SecondLevelB', 32 | version: undefined, 33 | }, 34 | ], 35 | } as ProtoConfluentSchema, 36 | 37 | SecondLevelASchema: { 38 | type: SchemaType.PROTOBUF, 39 | schema: ` 40 | syntax = "proto3"; 41 | package test; 42 | import "test/third_level.proto"; 43 | 44 | message SecondLevelA { 45 | int32 id2a = 1; 46 | ThirdLevel level2a = 2; 47 | }`, 48 | references: [ 49 | { 50 | name: 'test/third_level.proto', 51 | subject: 'Proto:ThirdLevel', 52 | version: undefined, 53 | }, 54 | ], 55 | } as ProtoConfluentSchema, 56 | 57 | SecondLevelBSchema: { 58 | type: SchemaType.PROTOBUF, 59 | schema: ` 60 | syntax = "proto3"; 61 | package test; 62 | import "test/third_level.proto"; 63 | 64 | message SecondLevelB { 65 | int32 id2b = 1; 66 | ThirdLevel level2b = 2; 67 | }`, 68 | references: [ 69 | { 70 | name: 'test/third_level.proto', 71 | subject: 'Proto:ThirdLevel', 72 | version: undefined, 73 | }, 74 | ], 75 | } as ProtoConfluentSchema, 76 | 77 | ThirdLevelSchema: { 78 | type: SchemaType.PROTOBUF, 79 | schema: ` 80 | syntax = "proto3"; 81 | package test; 82 | 83 | message ThirdLevel { 84 | int32 id3 = 1; 85 | }`, 86 | } as ProtoConfluentSchema, 87 | } 88 | 89 | function apiResponse(result) { 90 | return JSON.parse(result.responseData) 91 | } 92 | 93 | describe('SchemaRegistry', () => { 94 | let schemaRegistry: SchemaRegistry 95 | let registeredSchema: RegisteredSchema 96 | let api 97 | 98 | beforeEach(async () => { 99 | api = API(schemaRegistryAPIClientArgs) 100 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs) 101 | }) 102 | 103 | describe('when register', () => { 104 | describe('when no reference', () => { 105 | beforeEach(async () => { 106 | registeredSchema = await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 107 | subject: 'Proto:ThirdLevel', 108 | }) 109 | }) 110 | it('should return schema id', async () => { 111 | expect(registeredSchema.id).toEqual(expect.any(Number)) 112 | }) 113 | 114 | it('should be able to encode/decode', async () => { 115 | const obj = { id3: 3 } 116 | 117 | const buffer = await schemaRegistry.encode(registeredSchema.id, obj) 118 | const resultObj = await schemaRegistry.decode(buffer) 119 | 120 | expect(resultObj).toEqual(obj) 121 | }) 122 | }) 123 | 124 | describe('with reference', () => { 125 | let schemaId 126 | let referenceSchema 127 | 128 | beforeEach(async () => { 129 | await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 130 | subject: 'Proto:ThirdLevel', 131 | }) 132 | 133 | const latest = apiResponse(await api.Subject.latestVersion({ subject: 'Proto:ThirdLevel' })) 134 | TestSchemas.SecondLevelASchema.references[0].version = latest.version 135 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelASchema, { 136 | subject: 'Proto:SecondLevelA', 137 | }) 138 | schemaId = registeredSchema.id 139 | 140 | const schemaRaw = apiResponse(await api.Schema.find({ id: schemaId })) 141 | referenceSchema = schemaRaw.references[0].subject 142 | }) 143 | 144 | it('should return schema id', async () => { 145 | expect(schemaId).toEqual(expect.any(Number)) 146 | }) 147 | it('should create a schema with reference', async () => { 148 | expect(referenceSchema).toEqual('Proto:ThirdLevel') 149 | }) 150 | 151 | it('should be able to encode/decode', async () => { 152 | const obj = { id2a: 2, level2a: { id3: 3 } } 153 | 154 | const buffer = await schemaRegistry.encode(registeredSchema.id, obj) 155 | const resultObj = await schemaRegistry.decode(buffer) 156 | 157 | expect(resultObj).toEqual(obj) 158 | }) 159 | }) 160 | 161 | describe('with multiple reference', () => { 162 | beforeEach(async () => { 163 | let latest 164 | 165 | await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 166 | subject: 'Proto:ThirdLevel', 167 | }) 168 | 169 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Proto:ThirdLevel' })) 170 | TestSchemas.SecondLevelASchema.references[0].version = latest.version 171 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelASchema, { 172 | subject: 'Proto:SecondLevelA', 173 | }) 174 | 175 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Proto:ThirdLevel' })) 176 | TestSchemas.SecondLevelBSchema.references[0].version = latest.version 177 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelBSchema, { 178 | subject: 'Proto:SecondLevelB', 179 | }) 180 | 181 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Proto:SecondLevelA' })) 182 | TestSchemas.FirstLevelSchema.references[0].version = latest.version 183 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Proto:SecondLevelB' })) 184 | TestSchemas.FirstLevelSchema.references[1].version = latest.version 185 | registeredSchema = await schemaRegistry.register(TestSchemas.FirstLevelSchema, { 186 | subject: 'Proto:FirstLevel', 187 | }) 188 | }) 189 | 190 | it('should be able to encode/decode', async () => { 191 | const obj = { 192 | id1: 1, 193 | level1a: { id2a: 2, level2a: { id3: 3 } }, 194 | level1b: { id2b: 4, level2b: { id3: 5 } }, 195 | } 196 | 197 | const buffer = await schemaRegistry.encode(registeredSchema.id, obj) 198 | const resultObj = await schemaRegistry.decode(buffer) 199 | 200 | expect(resultObj).toEqual(obj) 201 | }) 202 | 203 | it('should be able to encode/decode independent', async () => { 204 | const obj = { 205 | id1: 1, 206 | level1a: { id2a: 2, level2a: { id3: 3 } }, 207 | level1b: { id2b: 4, level2b: { id3: 5 } }, 208 | } 209 | 210 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs) 211 | const buffer = await schemaRegistry.encode(registeredSchema.id, obj) 212 | 213 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs) 214 | const resultObj = await schemaRegistry.decode(buffer) 215 | 216 | expect(resultObj).toEqual(obj) 217 | }) 218 | }) 219 | }) 220 | 221 | describe('_getSchema', () => { 222 | let schema 223 | 224 | describe('no references', () => { 225 | beforeEach(async () => { 226 | registeredSchema = await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 227 | subject: 'Proto:ThirdLevel', 228 | }) 229 | ;({ schema } = await schemaRegistry['_getSchema'](registeredSchema.id)) 230 | }) 231 | 232 | it('should return schema that match message', async () => { 233 | expect(schema.message.name).toEqual('ThirdLevel') 234 | }) 235 | 236 | it('should be able to encode/decode', async () => { 237 | const obj = { id3: 3 } 238 | 239 | const buffer = await schema.toBuffer(obj) 240 | const resultObj = await schema.fromBuffer(buffer) 241 | 242 | expect(resultObj).toEqual(obj) 243 | }) 244 | }) 245 | 246 | describe('with references', () => { 247 | beforeEach(async () => { 248 | await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { subject: 'Proto:ThirdLevel' }) 249 | 250 | const latest = apiResponse(await api.Subject.latestVersion({ subject: 'Proto:ThirdLevel' })) 251 | TestSchemas.SecondLevelASchema.references[0].version = latest.version 252 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelASchema, { 253 | subject: 'Proto:SecondLevelA', 254 | }) 255 | ;({ schema } = await schemaRegistry['_getSchema'](registeredSchema.id)) 256 | }) 257 | 258 | it('should return schema that match message', async () => { 259 | expect(schema.message.name).toEqual('SecondLevelA') 260 | }) 261 | 262 | it('should be able to encode/decode', async () => { 263 | const obj = { id2a: 2, level2a: { id3: 3 } } 264 | 265 | const buffer = await schema.toBuffer(obj) 266 | const resultObj = await schema.fromBuffer(buffer) 267 | 268 | expect(resultObj).toEqual(obj) 269 | }) 270 | }) 271 | 272 | describe('with multi references', () => { 273 | beforeEach(async () => { 274 | let latest 275 | 276 | await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 277 | subject: 'Proto:ThirdLevel', 278 | }) 279 | 280 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Proto:ThirdLevel' })) 281 | TestSchemas.SecondLevelASchema.references[0].version = latest.version 282 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelASchema, { 283 | subject: 'Proto:SecondLevelA', 284 | }) 285 | 286 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Proto:ThirdLevel' })) 287 | TestSchemas.SecondLevelBSchema.references[0].version = latest.version 288 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelBSchema, { 289 | subject: 'Proto:SecondLevelB', 290 | }) 291 | 292 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Proto:SecondLevelA' })) 293 | TestSchemas.FirstLevelSchema.references[0].version = latest.version 294 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Proto:SecondLevelB' })) 295 | TestSchemas.FirstLevelSchema.references[1].version = latest.version 296 | registeredSchema = await schemaRegistry.register(TestSchemas.FirstLevelSchema, { 297 | subject: 'Proto:FirstLevel', 298 | }) 299 | ;({ schema } = await schemaRegistry['_getSchema'](registeredSchema.id)) 300 | }) 301 | 302 | it('should return schema that match message', async () => { 303 | expect(schema.message.name).toEqual('FirstLevel') 304 | }) 305 | 306 | it('should be able to encode/decode', async () => { 307 | const obj = { 308 | id1: 1, 309 | level1a: { id2a: 2, level2a: { id3: 3 } }, 310 | level1b: { id2b: 4, level2b: { id3: 5 } }, 311 | } 312 | 313 | const buffer = await schema.toBuffer(obj) 314 | const resultObj = await schema.fromBuffer(buffer) 315 | 316 | expect(resultObj).toEqual(obj) 317 | }) 318 | }) 319 | }) 320 | 321 | describe('when document example', () => { 322 | it('should encode/decode', async () => { 323 | const schemaA = ` 324 | syntax = "proto3"; 325 | package test; 326 | import "test/B.proto"; 327 | 328 | message A { 329 | int32 id = 1; 330 | B b = 2; 331 | }` 332 | 333 | const schemaB = ` 334 | syntax = "proto3"; 335 | package test; 336 | 337 | message B { 338 | int32 id = 1; 339 | }` 340 | 341 | await schemaRegistry.register( 342 | { type: SchemaType.PROTOBUF, schema: schemaB }, 343 | { subject: 'Proto:B' }, 344 | ) 345 | 346 | const response = await schemaRegistry.api.Subject.latestVersion({ subject: 'Proto:B' }) 347 | const { version } = JSON.parse(response.responseData) 348 | 349 | const { id } = await schemaRegistry.register( 350 | { 351 | type: SchemaType.PROTOBUF, 352 | schema: schemaA, 353 | references: [ 354 | { 355 | name: 'test/B.proto', 356 | subject: 'Proto:B', 357 | version, 358 | }, 359 | ], 360 | }, 361 | { subject: 'Proto:A' }, 362 | ) 363 | 364 | const obj = { id: 1, b: { id: 2 } } 365 | 366 | const buffer = await schemaRegistry.encode(id, obj) 367 | const decodedObj = await schemaRegistry.decode(buffer) 368 | 369 | expect(decodedObj).toEqual(obj) 370 | }) 371 | }) 372 | }) 373 | -------------------------------------------------------------------------------- /src/SchemaRegistry.json.spec.ts: -------------------------------------------------------------------------------- 1 | import SchemaRegistry, { RegisteredSchema } from './SchemaRegistry' 2 | import API from './api' 3 | import { JsonConfluentSchema, SchemaType } from './@types' 4 | import Ajv from 'ajv' 5 | import { ConfluentSchemaRegistryValidationError } from './errors' 6 | 7 | const REGISTRY_HOST = 'http://localhost:8982' 8 | const schemaRegistryAPIClientArgs = { host: REGISTRY_HOST } 9 | const schemaRegistryArgs = { host: REGISTRY_HOST } 10 | 11 | const TestSchemas = { 12 | ThirdLevelSchema: { 13 | type: SchemaType.JSON, 14 | schema: ` 15 | { 16 | "$id": "https://example.com/schemas/ThirdLevel", 17 | "type": "object", 18 | "properties": { 19 | "id3": { "type": "number" } 20 | } 21 | } 22 | `, 23 | } as JsonConfluentSchema, 24 | 25 | SecondLevelASchema: { 26 | type: SchemaType.JSON, 27 | schema: ` 28 | { 29 | "$id": "https://example.com/schemas/SecondLevelA", 30 | "type": "object", 31 | "properties": { 32 | "id2a": { "type": "number" }, 33 | "level2a": { "$ref": "https://example.com/schemas/ThirdLevel" } 34 | } 35 | } 36 | `, 37 | references: [ 38 | { 39 | name: 'https://example.com/schemas/ThirdLevel', 40 | subject: 'JSON:ThirdLevel', 41 | version: undefined, 42 | }, 43 | ], 44 | } as JsonConfluentSchema, 45 | 46 | SecondLevelBSchema: { 47 | type: SchemaType.JSON, 48 | schema: ` 49 | { 50 | "$id": "https://example.com/schemas/SecondLevelB", 51 | "type": "object", 52 | "properties": { 53 | "id2b": { "type": "number" }, 54 | "level2b": { "$ref": "https://example.com/schemas/ThirdLevel" } 55 | } 56 | } 57 | `, 58 | references: [ 59 | { 60 | name: 'https://example.com/schemas/ThirdLevel', 61 | subject: 'JSON:ThirdLevel', 62 | version: undefined, 63 | }, 64 | ], 65 | } as JsonConfluentSchema, 66 | 67 | FirstLevelSchema: { 68 | type: SchemaType.JSON, 69 | schema: ` 70 | { 71 | "$id": "https://example.com/schemas/FirstLevel", 72 | "type": "object", 73 | "properties": { 74 | "id1": { "type": "number" }, 75 | "level1a": { "$ref": "https://example.com/schemas/SecondLevelA" }, 76 | "level1b": { "$ref": "https://example.com/schemas/SecondLevelB" } 77 | } 78 | } 79 | `, 80 | references: [ 81 | { 82 | name: 'https://example.com/schemas/SecondLevelA', 83 | subject: 'JSON:SecondLevelA', 84 | version: undefined, 85 | }, 86 | { 87 | name: 'https://example.com/schemas/SecondLevelB', 88 | subject: 'JSON:SecondLevelB', 89 | version: undefined, 90 | }, 91 | ], 92 | } as JsonConfluentSchema, 93 | } 94 | 95 | function apiResponse(result) { 96 | return JSON.parse(result.responseData) 97 | } 98 | 99 | describe('SchemaRegistry', () => { 100 | let schemaRegistry: SchemaRegistry 101 | let registeredSchema: RegisteredSchema 102 | let api 103 | 104 | beforeEach(async () => { 105 | const options = { 106 | [SchemaType.JSON]: { 107 | allErrors: true, 108 | detailedErrorPaths: true, 109 | }, 110 | } 111 | api = API(schemaRegistryAPIClientArgs) 112 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs, options) 113 | }) 114 | 115 | describe('when register', () => { 116 | describe('when no reference', () => { 117 | beforeEach(async () => { 118 | registeredSchema = await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 119 | subject: 'JSON:ThirdLevel', 120 | }) 121 | }) 122 | it('should return schema id', async () => { 123 | expect(registeredSchema.id).toEqual(expect.any(Number)) 124 | }) 125 | 126 | it('should be able to encode/decode', async () => { 127 | const obj = { id3: 3 } 128 | 129 | const buffer = await schemaRegistry.encode(registeredSchema.id, obj) 130 | const resultObj = await schemaRegistry.decode(buffer) 131 | 132 | expect(resultObj).toEqual(obj) 133 | }) 134 | }) 135 | 136 | describe('with reference', () => { 137 | let schemaId 138 | let referenceSchema 139 | 140 | beforeEach(async () => { 141 | await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 142 | subject: 'JSON:ThirdLevel', 143 | }) 144 | 145 | const latest = apiResponse(await api.Subject.latestVersion({ subject: 'JSON:ThirdLevel' })) 146 | TestSchemas.SecondLevelASchema.references[0].version = latest.version 147 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelASchema, { 148 | subject: 'JSON:SecondLevelA', 149 | }) 150 | schemaId = registeredSchema.id 151 | 152 | const schemaRaw = apiResponse(await api.Schema.find({ id: schemaId })) 153 | referenceSchema = schemaRaw.references[0].subject 154 | }) 155 | 156 | it('should return schema id', async () => { 157 | expect(schemaId).toEqual(expect.any(Number)) 158 | }) 159 | 160 | it('should create a schema with reference', async () => { 161 | expect(referenceSchema).toEqual('JSON:ThirdLevel') 162 | }) 163 | 164 | it('should be able to encode/decode', async () => { 165 | const obj = { id2a: 2, level2a: { id3: 3 } } 166 | 167 | const buffer = await schemaRegistry.encode(registeredSchema.id, obj) 168 | const resultObj = await schemaRegistry.decode(buffer) 169 | 170 | expect(resultObj).toEqual(obj) 171 | }) 172 | 173 | it('should return error message', async () => { 174 | const obj = { id2a: 'sdfsdfsdf', level2a: 1 } 175 | try { 176 | await schemaRegistry.encode(registeredSchema.id, obj) 177 | } catch (ex) { 178 | expect(ex.paths[0].message).toBeDefined() 179 | expect(ex.paths[0].message).toEqual('should be number') 180 | } 181 | }) 182 | }) 183 | 184 | describe('with multiple reference', () => { 185 | beforeEach(async () => { 186 | let latest 187 | 188 | await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 189 | subject: 'JSON:ThirdLevel', 190 | }) 191 | 192 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'JSON:ThirdLevel' })) 193 | TestSchemas.SecondLevelASchema.references[0].version = latest.version 194 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelASchema, { 195 | subject: 'JSON:SecondLevelA', 196 | }) 197 | 198 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'JSON:ThirdLevel' })) 199 | TestSchemas.SecondLevelBSchema.references[0].version = latest.version 200 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelBSchema, { 201 | subject: 'JSON:SecondLevelB', 202 | }) 203 | 204 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'JSON:SecondLevelA' })) 205 | TestSchemas.FirstLevelSchema.references[0].version = latest.version 206 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'JSON:SecondLevelB' })) 207 | TestSchemas.FirstLevelSchema.references[1].version = latest.version 208 | registeredSchema = await schemaRegistry.register(TestSchemas.FirstLevelSchema, { 209 | subject: 'JSON:FirstLevel', 210 | }) 211 | }) 212 | 213 | it('should be able to encode/decode', async () => { 214 | const obj = { 215 | id1: 1, 216 | level1a: { id2a: 2, level2a: { id3: 3 } }, 217 | level1b: { id2b: 4, level2b: { id3: 5 } }, 218 | } 219 | 220 | const buffer = await schemaRegistry.encode(registeredSchema.id, obj) 221 | const resultObj = await schemaRegistry.decode(buffer) 222 | 223 | expect(resultObj).toEqual(obj) 224 | }) 225 | 226 | it('should be able to encode/decode independent', async () => { 227 | const obj = { 228 | id1: 1, 229 | level1a: { id2a: 2, level2a: { id3: 3 } }, 230 | level1b: { id2b: 4, level2b: { id3: 5 } }, 231 | } 232 | 233 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs) 234 | const buffer = await schemaRegistry.encode(registeredSchema.id, obj) 235 | 236 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs) 237 | const resultObj = await schemaRegistry.decode(buffer) 238 | 239 | expect(resultObj).toEqual(obj) 240 | }) 241 | }) 242 | }) 243 | 244 | describe('_getSchema', () => { 245 | let schema 246 | 247 | describe('no references', () => { 248 | beforeEach(async () => { 249 | registeredSchema = await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 250 | subject: 'JSON:ThirdLevel', 251 | }) 252 | ;({ schema } = await schemaRegistry['_getSchema'](registeredSchema.id)) 253 | }) 254 | 255 | it('should be able to encode/decode', async () => { 256 | const obj = { id3: 3 } 257 | 258 | const buffer = await schema.toBuffer(obj) 259 | const resultObj = await schema.fromBuffer(buffer) 260 | 261 | expect(resultObj).toEqual(obj) 262 | }) 263 | }) 264 | 265 | describe('with references', () => { 266 | beforeEach(async () => { 267 | await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { subject: 'JSON:ThirdLevel' }) 268 | 269 | const latest = apiResponse(await api.Subject.latestVersion({ subject: 'JSON:ThirdLevel' })) 270 | TestSchemas.SecondLevelASchema.references[0].version = latest.version 271 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelASchema, { 272 | subject: 'JSON:SecondLevelA', 273 | }) 274 | ;({ schema } = await schemaRegistry['_getSchema'](registeredSchema.id)) 275 | }) 276 | 277 | it('should be able to encode/decode', async () => { 278 | const obj = { id2a: 2, level2a: { id3: 3 } } 279 | 280 | const buffer = await schema.toBuffer(obj) 281 | const resultObj = await schema.fromBuffer(buffer) 282 | 283 | expect(resultObj).toEqual(obj) 284 | }) 285 | }) 286 | 287 | describe('with multi references', () => { 288 | beforeEach(async () => { 289 | let latest 290 | 291 | await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 292 | subject: 'JSON:ThirdLevel', 293 | }) 294 | 295 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'JSON:ThirdLevel' })) 296 | TestSchemas.SecondLevelASchema.references[0].version = latest.version 297 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelASchema, { 298 | subject: 'JSON:SecondLevelA', 299 | }) 300 | 301 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'JSON:ThirdLevel' })) 302 | TestSchemas.SecondLevelBSchema.references[0].version = latest.version 303 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelBSchema, { 304 | subject: 'JSON:SecondLevelB', 305 | }) 306 | 307 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'JSON:SecondLevelA' })) 308 | TestSchemas.FirstLevelSchema.references[0].version = latest.version 309 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'JSON:SecondLevelB' })) 310 | TestSchemas.FirstLevelSchema.references[1].version = latest.version 311 | registeredSchema = await schemaRegistry.register(TestSchemas.FirstLevelSchema, { 312 | subject: 'JSON:FirstLevel', 313 | }) 314 | ;({ schema } = await schemaRegistry['_getSchema'](registeredSchema.id)) 315 | }) 316 | 317 | it('should be able to encode/decode', async () => { 318 | const obj = { 319 | id1: 1, 320 | level1a: { id2a: 2, level2a: { id3: 3 } }, 321 | level1b: { id2b: 4, level2b: { id3: 5 } }, 322 | } 323 | 324 | const buffer = await schema.toBuffer(obj) 325 | const resultObj = await schema.fromBuffer(buffer) 326 | 327 | expect(resultObj).toEqual(obj) 328 | }) 329 | }) 330 | }) 331 | 332 | describe('when document example', () => { 333 | it('should encode/decode', async () => { 334 | const schemaA = { 335 | $id: 'https://example.com/schemas/A', 336 | type: 'object', 337 | properties: { 338 | id: { type: 'number' }, 339 | b: { $ref: 'https://example.com/schemas/B' }, 340 | }, 341 | } 342 | 343 | const schemaB = { 344 | $id: 'https://example.com/schemas/B', 345 | type: 'object', 346 | properties: { 347 | id: { type: 'number' }, 348 | }, 349 | } 350 | 351 | await schemaRegistry.register( 352 | { type: SchemaType.JSON, schema: JSON.stringify(schemaB) }, 353 | { subject: 'JSON:B' }, 354 | ) 355 | 356 | const response = await schemaRegistry.api.Subject.latestVersion({ subject: 'JSON:B' }) 357 | const { version } = JSON.parse(response.responseData) 358 | 359 | const { id } = await schemaRegistry.register( 360 | { 361 | type: SchemaType.JSON, 362 | schema: JSON.stringify(schemaA), 363 | references: [ 364 | { 365 | name: 'https://example.com/schemas/B', 366 | subject: 'JSON:B', 367 | version, 368 | }, 369 | ], 370 | }, 371 | { subject: 'JSON:A' }, 372 | ) 373 | 374 | const obj = { id: 1, b: { id: 2 } } 375 | 376 | const buffer = await schemaRegistry.encode(id, obj) 377 | const decodedObj = await schemaRegistry.decode(buffer) 378 | 379 | expect(decodedObj).toEqual(obj) 380 | }) 381 | }) 382 | }) 383 | -------------------------------------------------------------------------------- /src/SchemaRegistry.ts: -------------------------------------------------------------------------------- 1 | import { Type } from 'avsc' 2 | import { Response } from 'mappersmith' 3 | 4 | import { encode, MAGIC_BYTE } from './wireEncoder' 5 | import decode from './wireDecoder' 6 | import { COMPATIBILITY, DEFAULT_SEPARATOR } from './constants' 7 | import API, { SchemaRegistryAPIClientArgs, SchemaRegistryAPIClient } from './api' 8 | import Cache from './cache' 9 | import { 10 | ConfluentSchemaRegistryError, 11 | ConfluentSchemaRegistryArgumentError, 12 | ConfluentSchemaRegistryCompatibilityError, 13 | ConfluentSchemaRegistryValidationError, 14 | } from './errors' 15 | import { 16 | Schema, 17 | RawAvroSchema, 18 | AvroSchema, 19 | SchemaType, 20 | ConfluentSchema, 21 | ConfluentSubject, 22 | SchemaRegistryAPIClientOptions, 23 | AvroConfluentSchema, 24 | SchemaResponse, 25 | ProtocolOptions, 26 | SchemaHelper, 27 | SchemaReference, 28 | LegacyOptions, 29 | } from './@types' 30 | import { 31 | helperTypeFromSchemaType, 32 | schemaTypeFromString, 33 | schemaFromConfluentSchema, 34 | } from './schemaTypeResolver' 35 | 36 | export interface RegisteredSchema { 37 | id: number 38 | } 39 | 40 | interface Opts { 41 | compatibility?: COMPATIBILITY 42 | separator?: string 43 | subject: string 44 | } 45 | 46 | interface AvroDecodeOptions { 47 | readerSchema?: RawAvroSchema | AvroSchema | Schema 48 | } 49 | export interface DecodeOptions { 50 | [SchemaType.AVRO]?: AvroDecodeOptions 51 | } 52 | 53 | const DEFAULT_OPTS = { 54 | compatibility: COMPATIBILITY.BACKWARD, 55 | separator: DEFAULT_SEPARATOR, 56 | } 57 | export default class SchemaRegistry { 58 | private api: SchemaRegistryAPIClient 59 | private cacheMissRequests: { [key: number]: Promise } = {} 60 | private options: SchemaRegistryAPIClientOptions | undefined 61 | 62 | public cache: Cache 63 | 64 | constructor( 65 | { auth, clientId, host, retry, agent, middlewares }: SchemaRegistryAPIClientArgs, 66 | options?: SchemaRegistryAPIClientOptions, 67 | ) { 68 | this.api = API({ auth, clientId, host, retry, agent, middlewares }) 69 | this.cache = new Cache() 70 | this.options = options 71 | } 72 | 73 | private isConfluentSchema( 74 | schema: RawAvroSchema | AvroSchema | ConfluentSchema, 75 | ): schema is ConfluentSchema { 76 | return (schema as ConfluentSchema).schema != null 77 | } 78 | 79 | private getConfluentSchema( 80 | schema: RawAvroSchema | AvroSchema | ConfluentSchema, 81 | ): ConfluentSchema { 82 | let confluentSchema: ConfluentSchema 83 | // convert data from old api (for backwards compatibility) 84 | if (!this.isConfluentSchema(schema)) { 85 | // schema is instanceof RawAvroSchema or AvroSchema 86 | confluentSchema = { 87 | type: SchemaType.AVRO, 88 | schema: JSON.stringify(schema), 89 | } 90 | } else { 91 | confluentSchema = schema as ConfluentSchema 92 | } 93 | return confluentSchema 94 | } 95 | 96 | public async register( 97 | schema: Exclude, 98 | userOpts: Opts, 99 | ): Promise 100 | public async register( 101 | schema: RawAvroSchema | AvroConfluentSchema, 102 | userOpts?: Omit & { subject?: string }, 103 | ): Promise 104 | public async register( 105 | schema: RawAvroSchema | ConfluentSchema, 106 | userOpts: Opts, 107 | ): Promise 108 | public async register( 109 | schema: RawAvroSchema | ConfluentSchema, 110 | userOpts?: Opts, 111 | ): Promise { 112 | const { compatibility, separator } = { ...DEFAULT_OPTS, ...userOpts } 113 | 114 | const confluentSchema: ConfluentSchema = this.getConfluentSchema(schema) 115 | 116 | const helper = helperTypeFromSchemaType(confluentSchema.type) 117 | 118 | const options = await this.updateOptionsWithSchemaReferences(confluentSchema, this.options) 119 | const schemaInstance = schemaFromConfluentSchema(confluentSchema, options) 120 | helper.validate(schemaInstance) 121 | let isFirstTimeRegistration = false 122 | let subject: ConfluentSubject 123 | if (userOpts?.subject) { 124 | subject = { 125 | name: userOpts.subject, 126 | } 127 | } else { 128 | subject = helper.getSubject(confluentSchema, schemaInstance, separator) 129 | } 130 | 131 | try { 132 | const response = await this.api.Subject.config({ subject: subject.name }) 133 | const { compatibilityLevel }: { compatibilityLevel: COMPATIBILITY } = response.data() 134 | 135 | if (compatibilityLevel.toUpperCase() !== compatibility) { 136 | throw new ConfluentSchemaRegistryCompatibilityError( 137 | `Compatibility does not match the configuration (${compatibility} != ${compatibilityLevel.toUpperCase()})`, 138 | ) 139 | } 140 | } catch (error) { 141 | if (!error || typeof error !== 'object' || !('status' in error) || error.status !== 404) { 142 | throw error 143 | } else { 144 | isFirstTimeRegistration = true 145 | } 146 | } 147 | 148 | const response = await this.api.Subject.register({ 149 | subject: subject.name, 150 | body: { 151 | schemaType: confluentSchema.type === SchemaType.AVRO ? undefined : confluentSchema.type, 152 | schema: confluentSchema.schema, 153 | references: confluentSchema.references, 154 | }, 155 | }) 156 | 157 | if (compatibility && isFirstTimeRegistration) { 158 | await this.api.Subject.updateConfig({ subject: subject.name, body: { compatibility } }) 159 | } 160 | 161 | const registeredSchema: RegisteredSchema = response.data() 162 | this.cache.setLatestRegistryId(subject.name, registeredSchema.id) 163 | this.cache.setSchema(registeredSchema.id, confluentSchema.type, schemaInstance) 164 | 165 | return registeredSchema 166 | } 167 | 168 | private async updateOptionsWithSchemaReferences( 169 | schema: ConfluentSchema, 170 | options?: SchemaRegistryAPIClientOptions, 171 | ) { 172 | const helper = helperTypeFromSchemaType(schema.type) 173 | const referencedSchemas = await this.getreferencedSchemas(schema, helper) 174 | 175 | const protocolOptions = this.asProtocolOptions(options) 176 | return helper.updateOptionsFromSchemaReferences(referencedSchemas, protocolOptions) 177 | } 178 | 179 | private asProtocolOptions(options?: SchemaRegistryAPIClientOptions): ProtocolOptions | undefined { 180 | if (!(options as LegacyOptions)?.forSchemaOptions) { 181 | return options as ProtocolOptions | undefined 182 | } 183 | return { 184 | [SchemaType.AVRO]: (options as LegacyOptions)?.forSchemaOptions, 185 | } 186 | } 187 | 188 | private async getreferencedSchemas( 189 | schema: ConfluentSchema, 190 | helper: SchemaHelper, 191 | ): Promise { 192 | const referencesSet = new Set() 193 | return this.getreferencedSchemasRecursive(schema, helper, referencesSet) 194 | } 195 | 196 | private async getreferencedSchemasRecursive( 197 | schema: ConfluentSchema, 198 | helper: SchemaHelper, 199 | referencesSet: Set, 200 | ): Promise { 201 | const references = schema.references || [] 202 | 203 | let referencedSchemas: ConfluentSchema[] = [] 204 | for (const reference of references) { 205 | const schemas = await this.getreferencedSchemasFromReference(reference, helper, referencesSet) 206 | referencedSchemas = referencedSchemas.concat(schemas) 207 | } 208 | return referencedSchemas 209 | } 210 | 211 | async getreferencedSchemasFromReference( 212 | reference: SchemaReference, 213 | helper: SchemaHelper, 214 | referencesSet: Set, 215 | ): Promise { 216 | const { name, subject, version } = reference 217 | const key = `${name}-${subject}-${version}` 218 | 219 | // avoid duplicates 220 | if (referencesSet.has(key)) { 221 | return [] 222 | } 223 | referencesSet.add(key) 224 | 225 | const versionResponse = await this.api.Subject.version(reference) 226 | const foundSchema = versionResponse.data() as SchemaResponse 227 | 228 | const schema = helper.toConfluentSchema(foundSchema) 229 | const referencedSchemas = await this.getreferencedSchemasRecursive( 230 | schema, 231 | helper, 232 | referencesSet, 233 | ) 234 | 235 | referencedSchemas.push(schema) 236 | return referencedSchemas 237 | } 238 | 239 | private async _getSchema( 240 | registryId: number, 241 | ): Promise<{ type: SchemaType; schema: Schema | AvroSchema }> { 242 | const cacheEntry = this.cache.getSchema(registryId) 243 | 244 | if (cacheEntry) { 245 | return cacheEntry 246 | } 247 | 248 | const response = await this.getSchemaOriginRequest(registryId) 249 | const foundSchema: SchemaResponse = response.data() 250 | 251 | const schemaType = schemaTypeFromString(foundSchema.schemaType) 252 | 253 | const helper = helperTypeFromSchemaType(schemaType) 254 | const confluentSchema = helper.toConfluentSchema(foundSchema) 255 | 256 | const options = await this.updateOptionsWithSchemaReferences(confluentSchema, this.options) 257 | const schemaInstance = schemaFromConfluentSchema(confluentSchema, options) 258 | return this.cache.setSchema(registryId, schemaType, schemaInstance) 259 | } 260 | 261 | public async getSchema(registryId: number): Promise { 262 | return await (await this._getSchema(registryId)).schema 263 | } 264 | 265 | public async encode(registryId: number, payload: any): Promise { 266 | if (!registryId) { 267 | throw new ConfluentSchemaRegistryArgumentError( 268 | `Invalid registryId: ${JSON.stringify(registryId)}`, 269 | ) 270 | } 271 | 272 | const { schema } = await this._getSchema(registryId) 273 | try { 274 | const serializedPayload = schema.toBuffer(payload) 275 | return encode(registryId, serializedPayload) 276 | } catch (error) { 277 | if (error instanceof ConfluentSchemaRegistryValidationError) throw error 278 | 279 | const paths = this.collectInvalidPaths(schema, payload) 280 | throw new ConfluentSchemaRegistryValidationError(error, paths) 281 | } 282 | } 283 | 284 | private collectInvalidPaths(schema: Schema, jsonPayload: object) { 285 | const paths: string[][] = [] 286 | schema.isValid(jsonPayload, { 287 | errorHook: path => paths.push(path), 288 | }) 289 | 290 | return paths 291 | } 292 | 293 | public async decode(buffer: Buffer, options?: DecodeOptions): Promise { 294 | if (!Buffer.isBuffer(buffer)) { 295 | throw new ConfluentSchemaRegistryArgumentError('Invalid buffer') 296 | } 297 | 298 | const { magicByte, registryId, payload } = decode(buffer) 299 | if (Buffer.compare(MAGIC_BYTE, magicByte) !== 0) { 300 | throw new ConfluentSchemaRegistryArgumentError( 301 | `Message encoded with magic byte ${JSON.stringify(magicByte)}, expected ${JSON.stringify( 302 | MAGIC_BYTE, 303 | )}`, 304 | ) 305 | } 306 | 307 | const { type, schema: writerSchema } = await this._getSchema(registryId) 308 | 309 | let rawReaderSchema 310 | switch (type) { 311 | case SchemaType.AVRO: 312 | rawReaderSchema = options?.[SchemaType.AVRO]?.readerSchema as RawAvroSchema | AvroSchema 313 | } 314 | if (rawReaderSchema) { 315 | const readerSchema = schemaFromConfluentSchema( 316 | { type: SchemaType.AVRO, schema: rawReaderSchema }, 317 | this.options, 318 | ) as AvroSchema 319 | if (readerSchema.equals(writerSchema as Type)) { 320 | /* Even when schemas are considered equal by `avsc`, 321 | * they still aren't interchangeable: 322 | * provided `readerSchema` may have different `opts` (e.g. logicalTypes / unionWrap flags) 323 | * see https://github.com/mtth/avsc/issues/362 */ 324 | return readerSchema.fromBuffer(payload) 325 | } else { 326 | // decode using a resolver from writer type into reader type 327 | return readerSchema.fromBuffer(payload, readerSchema.createResolver(writerSchema as Type)) 328 | } 329 | } 330 | 331 | return writerSchema.fromBuffer(payload) 332 | } 333 | 334 | public async getRegistryId(subject: string, version: number | string): Promise { 335 | const response = await this.api.Subject.version({ subject, version }) 336 | const { id }: { id: number } = response.data() 337 | 338 | return id 339 | } 340 | 341 | public async getRegistryIdBySchema( 342 | subject: string, 343 | schema: AvroSchema | RawAvroSchema | ConfluentSchema, 344 | ): Promise { 345 | try { 346 | const confluentSchema: ConfluentSchema = this.getConfluentSchema(schema) 347 | const response = await this.api.Subject.registered({ 348 | subject, 349 | body: { 350 | schemaType: confluentSchema.type === SchemaType.AVRO ? undefined : confluentSchema.type, 351 | schema: confluentSchema.schema, 352 | }, 353 | }) 354 | const { id }: { id: number } = response.data() 355 | 356 | return id 357 | } catch (error) { 358 | if (error && typeof error === 'object' && 'status' in error && error.status === 404) { 359 | throw new ConfluentSchemaRegistryError(error) 360 | } 361 | 362 | throw error 363 | } 364 | } 365 | 366 | public async getLatestSchemaId(subject: string): Promise { 367 | const response = await this.api.Subject.latestVersion({ subject }) 368 | const { id }: { id: number } = response.data() 369 | 370 | return id 371 | } 372 | 373 | private async getSchemaOriginRequest(registryId: number): Promise { 374 | // ensure that cache-misses result in a single origin request 375 | const req = this.cacheMissRequests[registryId] 376 | if (req) return req 377 | 378 | const request = this.api.Schema.find({ id: registryId }).finally(() => { 379 | delete this.cacheMissRequests[registryId] 380 | }) 381 | 382 | this.cacheMissRequests[registryId] = request 383 | 384 | return request 385 | } 386 | } 387 | -------------------------------------------------------------------------------- /src/SchemaRegistry.avro.spec.ts: -------------------------------------------------------------------------------- 1 | import SchemaRegistry, { RegisteredSchema } from './SchemaRegistry' 2 | import API from './api' 3 | import { AvroConfluentSchema, SchemaType } from './@types' 4 | import avro from 'avsc' 5 | 6 | const REGISTRY_HOST = 'http://localhost:8982' 7 | const schemaRegistryAPIClientArgs = { host: REGISTRY_HOST } 8 | const schemaRegistryArgs = { host: REGISTRY_HOST } 9 | 10 | enum Color { 11 | RED = 1, 12 | GREEN = 2, 13 | BLUE = 3, 14 | } 15 | 16 | enum Direction { 17 | UP = 1, 18 | DOWN = 2, 19 | } 20 | 21 | const TestSchemas = { 22 | FirstLevelSchema: { 23 | type: SchemaType.AVRO, 24 | schema: ` 25 | { 26 | "type" : "record", 27 | "namespace" : "test", 28 | "name" : "FirstLevel", 29 | "fields" : [ 30 | { "name" : "id1" , "type" : "int" }, 31 | { "name" : "level1a" , "type" : "test.SecondLevelA" }, 32 | { "name" : "level1b" , "type" : "test.SecondLevelB" } 33 | ] 34 | }`, 35 | references: [ 36 | { 37 | name: 'test.SecondLevelA', 38 | subject: 'Avro:SecondLevelA', 39 | version: undefined, 40 | }, 41 | { 42 | name: 'test.SecondLevelB', 43 | subject: 'Avro:SecondLevelB', 44 | version: undefined, 45 | }, 46 | ], 47 | } as AvroConfluentSchema, 48 | 49 | SecondLevelASchema: { 50 | type: SchemaType.AVRO, 51 | schema: ` 52 | { 53 | "type" : "record", 54 | "namespace" : "test", 55 | "name" : "SecondLevelA", 56 | "fields" : [ 57 | { "name" : "id2a" , "type" : "int" }, 58 | { "name" : "level2a" , "type" : "test.ThirdLevel" } 59 | ] 60 | }`, 61 | references: [ 62 | { 63 | name: 'test.ThirdLevel', 64 | subject: 'Avro:ThirdLevel', 65 | version: undefined, 66 | }, 67 | ], 68 | } as AvroConfluentSchema, 69 | 70 | SecondLevelBSchema: { 71 | type: SchemaType.AVRO, 72 | schema: ` 73 | { 74 | "type" : "record", 75 | "namespace" : "test", 76 | "name" : "SecondLevelB", 77 | "fields" : [ 78 | { "name" : "id2b" , "type" : "int" }, 79 | { "name" : "level2b" , "type" : "test.ThirdLevel" } 80 | ] 81 | }`, 82 | references: [ 83 | { 84 | name: 'test.ThirdLevel', 85 | subject: 'Avro:ThirdLevel', 86 | version: undefined, 87 | }, 88 | ], 89 | } as AvroConfluentSchema, 90 | 91 | ThirdLevelSchema: { 92 | type: SchemaType.AVRO, 93 | schema: ` 94 | { 95 | "type" : "record", 96 | "namespace" : "test", 97 | "name" : "ThirdLevel", 98 | "fields" : [ 99 | { "name" : "id3" , "type" : "int" } 100 | ] 101 | }`, 102 | } as AvroConfluentSchema, 103 | 104 | EnumSchema: { 105 | type: SchemaType.AVRO, 106 | schema: ` 107 | { 108 | "type" : "record", 109 | "namespace" : "test", 110 | "name" : "EnumSchema", 111 | "fields" : [ 112 | { 113 | "name": "color", 114 | "type": ["null", { 115 | "type": "enum", 116 | "name": "Color", 117 | "symbols": ["RED", "GREEN", "BLUE"] 118 | } 119 | ] 120 | } 121 | ] 122 | }`, 123 | } as AvroConfluentSchema, 124 | 125 | EnumWithReferencesSchema: { 126 | type: SchemaType.AVRO, 127 | schema: ` 128 | { 129 | "type" : "record", 130 | "namespace" : "test", 131 | "name" : "EnumWithReferences", 132 | "fields" : [ 133 | { 134 | "name": "direction", 135 | "type": ["null", { 136 | "type": "enum", 137 | "name": "Direction", 138 | "symbols": ["UP", "DOWN"] 139 | } 140 | ] 141 | }, 142 | { "name" : "attributes" , "type" : "test.EnumSchema" } 143 | ] 144 | }`, 145 | references: [ 146 | { 147 | name: 'test.EnumSchema', 148 | subject: 'Avro:EnumSchema', 149 | version: undefined, 150 | }, 151 | ], 152 | } as AvroConfluentSchema, 153 | } 154 | 155 | function apiResponse(result) { 156 | return JSON.parse(result.responseData) 157 | } 158 | 159 | describe('SchemaRegistry', () => { 160 | let schemaRegistry: SchemaRegistry 161 | let registeredSchema: RegisteredSchema 162 | let api 163 | 164 | beforeEach(async () => { 165 | api = API(schemaRegistryAPIClientArgs) 166 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs) 167 | }) 168 | 169 | describe('when register', () => { 170 | describe('when no reference', () => { 171 | beforeEach(async () => { 172 | registeredSchema = await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 173 | subject: 'Avro:ThirdLevel', 174 | }) 175 | }) 176 | 177 | it('should return schema id', async () => { 178 | expect(registeredSchema.id).toEqual(expect.any(Number)) 179 | }) 180 | 181 | it('should be able to encode/decode', async () => { 182 | const obj = { id3: 3 } 183 | 184 | const buffer = await schemaRegistry.encode(registeredSchema.id, obj) 185 | const resultObj = await schemaRegistry.decode(buffer) 186 | 187 | expect(resultObj).toEqual(obj) 188 | }) 189 | }) 190 | 191 | describe('with reference', () => { 192 | let schemaId 193 | let referenceSchema 194 | 195 | beforeEach(async () => { 196 | await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 197 | subject: 'Avro:ThirdLevel', 198 | }) 199 | 200 | const latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:ThirdLevel' })) 201 | TestSchemas.SecondLevelASchema.references[0].version = latest.version 202 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelASchema, { 203 | subject: 'Avro:SecondLevelA', 204 | }) 205 | schemaId = registeredSchema.id 206 | 207 | const schemaRaw = apiResponse(await api.Schema.find({ id: schemaId })) 208 | referenceSchema = schemaRaw.references[0].subject 209 | }) 210 | 211 | it('should return schema id', async () => { 212 | expect(schemaId).toEqual(expect.any(Number)) 213 | }) 214 | 215 | it('should create a schema with reference', async () => { 216 | expect(referenceSchema).toEqual('Avro:ThirdLevel') 217 | }) 218 | 219 | it('should be able to encode/decode', async () => { 220 | const obj = { id2a: 2, level2a: { id3: 3 } } 221 | 222 | const buffer = await schemaRegistry.encode(registeredSchema.id, obj) 223 | const resultObj = await schemaRegistry.decode(buffer) 224 | 225 | expect(resultObj).toEqual(obj) 226 | }) 227 | }) 228 | 229 | describe('with multiple reference', () => { 230 | beforeEach(async () => { 231 | let latest 232 | 233 | registeredSchema = await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 234 | subject: 'Avro:ThirdLevel', 235 | }) 236 | 237 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:ThirdLevel' })) 238 | TestSchemas.SecondLevelASchema.references[0].version = latest.version 239 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelASchema, { 240 | subject: 'Avro:SecondLevelA', 241 | }) 242 | 243 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:ThirdLevel' })) 244 | TestSchemas.SecondLevelBSchema.references[0].version = latest.version 245 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelBSchema, { 246 | subject: 'Avro:SecondLevelB', 247 | }) 248 | 249 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:SecondLevelA' })) 250 | TestSchemas.FirstLevelSchema.references[0].version = latest.version 251 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:SecondLevelB' })) 252 | TestSchemas.FirstLevelSchema.references[1].version = latest.version 253 | registeredSchema = await schemaRegistry.register(TestSchemas.FirstLevelSchema, { 254 | subject: 'Avro:FirstLevel', 255 | }) 256 | }) 257 | 258 | it('should be able to encode/decode', async () => { 259 | const obj = { 260 | id1: 1, 261 | level1a: { id2a: 2, level2a: { id3: 3 } }, 262 | level1b: { id2b: 4, level2b: { id3: 5 } }, 263 | } 264 | 265 | const buffer = await schemaRegistry.encode(registeredSchema.id, obj) 266 | const resultObj = await schemaRegistry.decode(buffer) 267 | 268 | expect(resultObj).toEqual(obj) 269 | }) 270 | 271 | it('should be able to encode/decode independent', async () => { 272 | const obj = { 273 | id1: 1, 274 | level1a: { id2a: 2, level2a: { id3: 3 } }, 275 | level1b: { id2b: 4, level2b: { id3: 5 } }, 276 | } 277 | 278 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs) 279 | const buffer = await schemaRegistry.encode(registeredSchema.id, obj) 280 | 281 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs) 282 | const resultObj = await schemaRegistry.decode(buffer) 283 | 284 | expect(resultObj).toEqual(obj) 285 | }) 286 | }) 287 | }) 288 | 289 | describe('_getSchema', () => { 290 | let schema 291 | 292 | describe('no references', () => { 293 | beforeEach(async () => { 294 | registeredSchema = await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 295 | subject: 'Avro:ThirdLevel', 296 | }) 297 | ;({ schema } = await schemaRegistry['_getSchema'](registeredSchema.id)) 298 | }) 299 | 300 | it('should return schema that match name', async () => { 301 | expect(schema.name).toEqual('test.ThirdLevel') 302 | }) 303 | 304 | it('should be able to encode/decode', async () => { 305 | const obj = { id3: 3 } 306 | 307 | const buffer = await schema.toBuffer(obj) 308 | const resultObj = await schema.fromBuffer(buffer) 309 | 310 | expect(resultObj).toEqual(obj) 311 | }) 312 | }) 313 | 314 | describe('with references', () => { 315 | beforeEach(async () => { 316 | await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { subject: 'Avro:ThirdLevel' }) 317 | 318 | const latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:ThirdLevel' })) 319 | TestSchemas.SecondLevelASchema.references[0].version = latest.version 320 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelASchema, { 321 | subject: 'Avro:SecondLevelA', 322 | }) 323 | ;({ schema } = await schemaRegistry['_getSchema'](registeredSchema.id)) 324 | }) 325 | 326 | it('should return schema that match name', async () => { 327 | expect(schema.name).toEqual('test.SecondLevelA') 328 | }) 329 | 330 | it('should be able to encode/decode', async () => { 331 | const obj = { id2a: 2, level2a: { id3: 3 } } 332 | 333 | const buffer = await schema.toBuffer(obj) 334 | const resultObj = await schema.fromBuffer(buffer) 335 | 336 | expect(resultObj).toEqual(obj) 337 | }) 338 | }) 339 | 340 | describe('with multi references', () => { 341 | beforeEach(async () => { 342 | let latest 343 | 344 | await schemaRegistry.register(TestSchemas.ThirdLevelSchema, { 345 | subject: 'Avro:ThirdLevel', 346 | }) 347 | 348 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:ThirdLevel' })) 349 | TestSchemas.SecondLevelASchema.references[0].version = latest.version 350 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelASchema, { 351 | subject: 'Avro:SecondLevelA', 352 | }) 353 | 354 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:ThirdLevel' })) 355 | TestSchemas.SecondLevelBSchema.references[0].version = latest.version 356 | registeredSchema = await schemaRegistry.register(TestSchemas.SecondLevelBSchema, { 357 | subject: 'Avro:SecondLevelB', 358 | }) 359 | 360 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:SecondLevelA' })) 361 | TestSchemas.FirstLevelSchema.references[0].version = latest.version 362 | latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:SecondLevelB' })) 363 | TestSchemas.FirstLevelSchema.references[1].version = latest.version 364 | registeredSchema = await schemaRegistry.register(TestSchemas.FirstLevelSchema, { 365 | subject: 'Avro:FirstLevel', 366 | }) 367 | ;({ schema } = await schemaRegistry['_getSchema'](registeredSchema.id)) 368 | }) 369 | 370 | it('should return schema that match name', async () => { 371 | expect(schema.name).toEqual('test.FirstLevel') 372 | }) 373 | 374 | it('should be able to encode/decode', async () => { 375 | const obj = { 376 | id1: 1, 377 | level1a: { id2a: 2, level2a: { id3: 3 } }, 378 | level1b: { id2b: 4, level2b: { id3: 5 } }, 379 | } 380 | 381 | const buffer = await schema.toBuffer(obj) 382 | const resultObj = await schema.fromBuffer(buffer) 383 | 384 | expect(resultObj).toEqual(obj) 385 | }) 386 | }) 387 | }) 388 | 389 | describe('when document example', () => { 390 | it('should encode/decode', async () => { 391 | const schemaA = { 392 | type: 'record', 393 | namespace: 'test', 394 | name: 'A', 395 | fields: [ 396 | { name: 'id', type: 'int' }, 397 | { name: 'b', type: 'test.B' }, 398 | ], 399 | } 400 | 401 | const schemaB = { 402 | type: 'record', 403 | namespace: 'test', 404 | name: 'B', 405 | fields: [{ name: 'id', type: 'int' }], 406 | } 407 | 408 | await schemaRegistry.register( 409 | { type: SchemaType.AVRO, schema: JSON.stringify(schemaB) }, 410 | { subject: 'Avro:B' }, 411 | ) 412 | 413 | const response = await schemaRegistry.api.Subject.latestVersion({ subject: 'Avro:B' }) 414 | const { version } = JSON.parse(response.responseData) 415 | 416 | const { id } = await schemaRegistry.register( 417 | { 418 | type: SchemaType.AVRO, 419 | schema: JSON.stringify(schemaA), 420 | references: [ 421 | { 422 | name: 'test.B', 423 | subject: 'Avro:B', 424 | version, 425 | }, 426 | ], 427 | }, 428 | { subject: 'Avro:A' }, 429 | ) 430 | 431 | const obj = { id: 1, b: { id: 2 } } 432 | 433 | const buffer = await schemaRegistry.encode(id, obj) 434 | const decodedObj = await schemaRegistry.decode(buffer) 435 | 436 | expect(decodedObj).toEqual(obj) 437 | }) 438 | }) 439 | 440 | describe('with EnumType types and nested schemas', () => { 441 | /** 442 | * Hook which will decode/encode enums to/from integers. 443 | * 444 | * The default `EnumType` implementation represents enum values as strings 445 | * (consistent with the JSON representation). This hook can be used to provide 446 | * an alternate representation (which is for example compatible with TypeScript 447 | * enums). 448 | * 449 | * For simplicity, we don't do any bound checking here but we could by 450 | * implementing a "bounded long" logical type and returning that instead. 451 | * 452 | * https://gist.github.com/mtth/c0088c745de048c4e466#file-long-enum-js 453 | */ 454 | function typeHook(attrs, opts) { 455 | if (attrs.type === 'enum') { 456 | return avro.parse('long', opts) 457 | } 458 | } 459 | 460 | let schema 461 | 462 | describe('with no enum typeHook defined', () => { 463 | beforeEach(async () => { 464 | const schemaRegistry = new SchemaRegistry(schemaRegistryArgs) 465 | 466 | await schemaRegistry.register(TestSchemas.EnumSchema, { 467 | subject: 'Avro:EnumSchema', 468 | }) 469 | 470 | const latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:EnumSchema' })) 471 | TestSchemas.EnumWithReferencesSchema.references[0].version = latest.version 472 | const registeredSchema = await schemaRegistry.register( 473 | TestSchemas.EnumWithReferencesSchema, 474 | { 475 | subject: 'Avro:EnumWithReferences', 476 | }, 477 | ) 478 | ;({ schema } = await schemaRegistry['_getSchema'](registeredSchema.id)) 479 | }) 480 | 481 | it('should not be able to encode/decode enums schemas', async () => { 482 | const obj = { 483 | direction: Direction.UP, 484 | attributes: { color: Color.BLUE }, 485 | } 486 | 487 | expect(() => schema.toBuffer(obj)).toThrow(Error) 488 | }) 489 | }) 490 | 491 | describe('with enum typeHook defined', () => { 492 | beforeEach(async () => { 493 | const schemaRegistry = new SchemaRegistry(schemaRegistryArgs, { 494 | [SchemaType.AVRO]: { typeHook }, 495 | }) 496 | 497 | await schemaRegistry.register(TestSchemas.EnumSchema, { 498 | subject: 'Avro:EnumSchema', 499 | }) 500 | 501 | const latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:EnumSchema' })) 502 | TestSchemas.EnumWithReferencesSchema.references[0].version = latest.version 503 | const registeredSchema = await schemaRegistry.register( 504 | TestSchemas.EnumWithReferencesSchema, 505 | { 506 | subject: 'Avro:EnumWithReferences', 507 | }, 508 | ) 509 | ;({ schema } = await schemaRegistry['_getSchema'](registeredSchema.id)) 510 | }) 511 | 512 | it('should be able to encode/decode enums schemas', async () => { 513 | const obj = { 514 | direction: Direction.UP, 515 | attributes: { color: Color.BLUE }, 516 | } 517 | 518 | const buffer = await schema.toBuffer(obj) 519 | const resultObj = await schema.fromBuffer(buffer) 520 | 521 | expect(resultObj).toEqual(obj) 522 | }) 523 | }) 524 | describe('with enum typeHook defined as LegacyOptions', () => { 525 | beforeEach(async () => { 526 | const schemaRegistry = new SchemaRegistry(schemaRegistryArgs, { 527 | forSchemaOptions: { typeHook }, 528 | }) 529 | 530 | await schemaRegistry.register(TestSchemas.EnumSchema, { 531 | subject: 'Avro:EnumSchema', 532 | }) 533 | 534 | const latest = apiResponse(await api.Subject.latestVersion({ subject: 'Avro:EnumSchema' })) 535 | TestSchemas.EnumWithReferencesSchema.references[0].version = latest.version 536 | const registeredSchema = await schemaRegistry.register( 537 | TestSchemas.EnumWithReferencesSchema, 538 | { 539 | subject: 'Avro:EnumWithReferences', 540 | }, 541 | ) 542 | ;({ schema } = await schemaRegistry['_getSchema'](registeredSchema.id)) 543 | }) 544 | 545 | it('should be able to encode/decode enums schemas', async () => { 546 | const obj = { 547 | direction: Direction.UP, 548 | attributes: { color: Color.BLUE }, 549 | } 550 | 551 | const buffer = await schema.toBuffer(obj) 552 | const resultObj = await schema.fromBuffer(buffer) 553 | 554 | expect(resultObj).toEqual(obj) 555 | }) 556 | }) 557 | }) 558 | }) 559 | -------------------------------------------------------------------------------- /src/SchemaRegistry.newApi.spec.ts: -------------------------------------------------------------------------------- 1 | import { Type } from 'avsc' 2 | import { v4 as uuid } from 'uuid' 3 | 4 | import SchemaRegistry from './SchemaRegistry' 5 | import { ConfluentSubject, ConfluentSchema, SchemaType } from './@types' 6 | import API, { SchemaRegistryAPIClient } from './api' 7 | import { COMPATIBILITY, DEFAULT_API_CLIENT_ID } from './constants' 8 | import encodedAnotherPersonV2Avro from '../fixtures/avro/encodedAnotherPersonV2' 9 | import encodedAnotherPersonV2Json from '../fixtures/json/encodedAnotherPersonV2' 10 | import encodedAnotherPersonV2Proto from '../fixtures/proto/encodedAnotherPersonV2' 11 | import encodedNestedV2Proto from '../fixtures/proto/encodedNestedV2' 12 | import wrongMagicByte from '../fixtures/wrongMagicByte' 13 | import Ajv2020 from 'ajv8/dist/2020' 14 | import Ajv from 'ajv' 15 | import { ConfluentSchemaRegistryValidationError } from './errors' 16 | 17 | const REGISTRY_HOST = 'http://localhost:8982' 18 | const schemaRegistryAPIClientArgs = { host: REGISTRY_HOST } 19 | const schemaRegistryArgs = { host: REGISTRY_HOST } 20 | 21 | const payload = { fullName: 'John Doe' } 22 | 23 | type KnownSchemaTypes = Exclude 24 | 25 | describe('SchemaRegistry - new Api', () => { 26 | let schemaRegistry: SchemaRegistry 27 | 28 | const schemaStringsByType: Record = { 29 | [SchemaType.AVRO]: { 30 | random: (namespace: string) => ` 31 | { 32 | "type": "record", 33 | "name": "RandomTest", 34 | "namespace": "${namespace}", 35 | "fields": [{ "type": "string", "name": "fullName" }] 36 | } 37 | `, 38 | otherRandom: (namespace: string) => ` 39 | { 40 | "type": "record", 41 | "name": "RandomTest", 42 | "namespace": "${namespace}", 43 | "fields": [{ "type": "string", "name": "notFullName" }] 44 | } 45 | `, 46 | v1: `{ 47 | "type": "record", 48 | "name": "AnotherPerson", 49 | "namespace": "com.org.domain.fixtures", 50 | "fields": [ { "type": "string", "name": "fullName" } ] 51 | }`, 52 | v2: `{ 53 | "type": "record", 54 | "name": "AnotherPerson", 55 | "namespace": "com.org.domain.fixtures", 56 | "fields": [ 57 | { "type": "string", "name": "fullName" }, 58 | { "type": "string", "name": "city", "default": "Stockholm" } 59 | ] 60 | }`, 61 | encodedAnotherPersonV2: encodedAnotherPersonV2Avro, 62 | }, 63 | [SchemaType.JSON]: { 64 | random: (namespace: string) => ` 65 | { 66 | "definitions" : { 67 | "record:${namespace}.RandomTest" : { 68 | "type" : "object", 69 | "required" : [ "fullName" ], 70 | "additionalProperties" : false, 71 | "properties" : { 72 | "fullName" : { 73 | "type" : "string" 74 | } 75 | } 76 | } 77 | }, 78 | "$ref" : "#/definitions/record:${namespace}.RandomTest" 79 | } 80 | `, 81 | otherRandom: (namespace: string) => ` 82 | { 83 | "definitions" : { 84 | "record:${namespace}.RandomTest" : { 85 | "type" : "object", 86 | "required" : [ "notFullName" ], 87 | "additionalProperties" : false, 88 | "properties" : { 89 | "notFullName" : { 90 | "type" : "string" 91 | } 92 | } 93 | } 94 | }, 95 | "$ref" : "#/definitions/record:${namespace}.RandomTest" 96 | } 97 | `, 98 | v1: ` 99 | { 100 | "title": "AnotherPerson", 101 | "type": "object", 102 | "required": [ 103 | "fullName" 104 | ], 105 | "properties": { 106 | "fullName": { 107 | "type": "string", 108 | "pattern": "^.*$" 109 | } 110 | } 111 | } 112 | `, 113 | v2: ` 114 | { 115 | "title": "AnotherPerson", 116 | "type": "object", 117 | "required": [ 118 | "fullName" 119 | ], 120 | "properties": { 121 | "fullName": { 122 | "type": "string", 123 | "pattern": "^.*$" 124 | }, 125 | "city": { 126 | "type": "string", 127 | "pattern": "^.*$" 128 | } 129 | } 130 | } 131 | `, 132 | encodedAnotherPersonV2: encodedAnotherPersonV2Json, 133 | }, 134 | [SchemaType.PROTOBUF]: { 135 | random: (namespace: string) => ` 136 | package ${namespace}; 137 | message RandomTest { 138 | required string fullName = 1; 139 | } 140 | `, 141 | otherRandom: (namespace: string) => ` 142 | package ${namespace}; 143 | message RandomTest { 144 | required string notFullName = 1; 145 | } 146 | `, 147 | v1: ` 148 | syntax = "proto2"; 149 | package com.org.domain.fixtures; 150 | message AnotherPerson { 151 | required string fullName = 1; 152 | } 153 | `, 154 | v2: ` 155 | syntax = "proto2"; 156 | package com.org.domain.fixtures; 157 | message AnotherPerson { 158 | required string fullName = 1; 159 | optional string city = 2 [default = "Stockholm"]; 160 | } 161 | `, 162 | encodedAnotherPersonV2: encodedAnotherPersonV2Proto, 163 | }, 164 | } 165 | const types = Object.keys(schemaStringsByType).map(str => SchemaType[str]) as KnownSchemaTypes[] 166 | 167 | types.forEach(type => 168 | describe(`${type}`, () => { 169 | const subject: ConfluentSubject = { 170 | name: [type, 'com.org.domain.fixtures', 'AnotherPerson'].join('.'), 171 | } 172 | const schema: ConfluentSchema = { 173 | type, 174 | schema: schemaStringsByType[type].v1, 175 | } 176 | 177 | beforeEach(async () => { 178 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs) 179 | await schemaRegistry.register(schema, { subject: subject.name }) 180 | }) 181 | 182 | describe('#register', () => { 183 | let namespace, 184 | Schema, 185 | subject: string, 186 | api: SchemaRegistryAPIClient, 187 | confluentSubject: ConfluentSubject, 188 | confluentSchema: ConfluentSchema 189 | 190 | beforeEach(() => { 191 | api = API(schemaRegistryAPIClientArgs) 192 | namespace = `N${uuid().replace(/-/g, '_')}` 193 | subject = `${namespace}.RandomTest` 194 | Schema = schemaStringsByType[type].random(namespace) 195 | confluentSubject = { name: subject } 196 | confluentSchema = { type, schema: Schema } 197 | }) 198 | 199 | it('uploads the new schema', async () => { 200 | await expect(api.Subject.latestVersion({ subject })).rejects.toHaveProperty( 201 | 'message', 202 | `${DEFAULT_API_CLIENT_ID} - Subject '${subject}' not found.`, 203 | ) 204 | 205 | await expect( 206 | schemaRegistry.register(confluentSchema, { subject: confluentSubject.name }), 207 | ).resolves.toEqual({ 208 | id: expect.any(Number), 209 | }) 210 | }) 211 | 212 | it('automatically cache the id and schema', async () => { 213 | const { id } = await schemaRegistry.register(confluentSchema, { 214 | subject: confluentSubject.name, 215 | }) 216 | 217 | expect(schemaRegistry.cache.getSchema(id)).toBeTruthy() 218 | }) 219 | 220 | it('fetch and validate the latest schema id after registering a new schema', async () => { 221 | const { id } = await schemaRegistry.register(confluentSchema, { 222 | subject: confluentSubject.name, 223 | }) 224 | const latestSchemaId = await schemaRegistry.getLatestSchemaId(subject) 225 | 226 | expect(id).toBe(latestSchemaId) 227 | }) 228 | 229 | it('set the default compatibility to BACKWARD', async () => { 230 | await schemaRegistry.register(confluentSchema, { subject: confluentSubject.name }) 231 | const response = await api.Subject.config({ subject }) 232 | expect(response.data()).toEqual({ compatibilityLevel: COMPATIBILITY.BACKWARD }) 233 | }) 234 | 235 | it('sets the compatibility according to param', async () => { 236 | await schemaRegistry.register(confluentSchema, { 237 | subject: confluentSubject.name, 238 | compatibility: COMPATIBILITY.NONE, 239 | }) 240 | const response = await api.Subject.config({ subject }) 241 | expect(response.data()).toEqual({ compatibilityLevel: COMPATIBILITY.NONE }) 242 | }) 243 | 244 | it('throws an error when the configured compatibility is different than defined in the client', async () => { 245 | await schemaRegistry.register(confluentSchema, { subject: confluentSubject.name }) 246 | await api.Subject.updateConfig({ subject, body: { compatibility: COMPATIBILITY.FULL } }) 247 | await expect( 248 | schemaRegistry.register(confluentSchema, { subject: confluentSubject.name }), 249 | ).rejects.toHaveProperty( 250 | 'message', 251 | 'Compatibility does not match the configuration (BACKWARD != FULL)', 252 | ) 253 | }) 254 | 255 | it('throws an error when the given schema string is invalid', async () => { 256 | const invalidSchema = `asdf` 257 | const invalidConfluentSchema: ConfluentSchema = { 258 | type, 259 | schema: invalidSchema, 260 | } 261 | await expect( 262 | schemaRegistry.register(invalidConfluentSchema, { subject: confluentSubject.name }), 263 | ).rejects.toHaveProperty('name', 'ConfluentSchemaRegistryArgumentError') 264 | }) 265 | }) 266 | 267 | describe('#encode', () => { 268 | beforeEach(async () => { 269 | await schemaRegistry.register(schema, { subject: subject.name }) 270 | }) 271 | 272 | it('throws an error if registryId is empty', async () => { 273 | await expect(schemaRegistry.encode(undefined, payload)).rejects.toHaveProperty( 274 | 'message', 275 | 'Invalid registryId: undefined', 276 | ) 277 | }) 278 | 279 | it('encodes using a defined registryId', async () => { 280 | const confluentSchemaV1: ConfluentSchema = { 281 | type, 282 | schema: schemaStringsByType[type].v1, 283 | } 284 | const confluentSchemaV2: ConfluentSchema = { 285 | type, 286 | schema: schemaStringsByType[type].v2, 287 | } 288 | 289 | const schema1 = await schemaRegistry.register(confluentSchemaV1, { 290 | subject: `${type}_test1`, 291 | }) 292 | const schema2 = await schemaRegistry.register(confluentSchemaV2, { 293 | subject: `${type}_test2`, 294 | }) 295 | expect(schema2.id).not.toEqual(schema1.id) 296 | 297 | const data = await schemaRegistry.encode(schema2.id, payload) 298 | 299 | expect(data).toMatchConfluentEncodedPayload({ 300 | registryId: schema2.id, 301 | payload: Buffer.from(schemaStringsByType[type].encodedAnotherPersonV2), 302 | }) 303 | }) 304 | 305 | it('throws an error if the payload does not match the schema', async () => { 306 | const confluentSchema: ConfluentSchema = { 307 | type, 308 | schema: schemaStringsByType[type].v1, 309 | } 310 | const schema = await schemaRegistry.register(confluentSchema, { 311 | subject: `${type}_test`, 312 | }) 313 | 314 | const badPayload = { asdf: 123 } 315 | 316 | await expect(schemaRegistry.encode(schema.id, badPayload)).rejects.toHaveProperty( 317 | 'name', 318 | 'ConfluentSchemaRegistryValidationError', 319 | ) 320 | }) 321 | }) 322 | 323 | describe('#decode', () => { 324 | let registryId: number 325 | 326 | beforeEach(async () => { 327 | registryId = (await schemaRegistry.register(schema, { subject: subject.name })).id 328 | }) 329 | 330 | it('decodes data', async () => { 331 | const buffer = Buffer.from(await schemaRegistry.encode(registryId, payload)) 332 | const data = await schemaRegistry.decode(buffer) 333 | 334 | expect(data).toEqual(payload) 335 | }) 336 | 337 | it('throws an error if the magic byte is not supported', async () => { 338 | const buffer = Buffer.from(wrongMagicByte) 339 | await expect(schemaRegistry.decode(buffer)).rejects.toHaveProperty( 340 | 'message', 341 | 'Message encoded with magic byte {"type":"Buffer","data":[48]}, expected {"type":"Buffer","data":[0]}', 342 | ) 343 | }) 344 | 345 | it.skip('throws an error if the payload does not match the schema', async () => { 346 | const badPayload = { asdf: 123 } 347 | // TODO: find a way to encode the bad payload with the registryId 348 | const buffer = Buffer.from(await schemaRegistry.encode(registryId, badPayload)) 349 | 350 | await expect(schemaRegistry.decode(buffer)).rejects.toHaveProperty( 351 | 'name', 352 | 'ConfluentSchemaRegistryValidationError', 353 | ) 354 | }) 355 | 356 | it('caches the schema', async () => { 357 | const buffer = Buffer.from(await schemaRegistry.encode(registryId, payload)) 358 | 359 | schemaRegistry.cache.clear() 360 | await schemaRegistry.decode(buffer) 361 | 362 | expect(schemaRegistry.cache.getSchema(registryId)).toBeTruthy() 363 | }) 364 | 365 | it('creates a single origin request for a schema cache-miss', async () => { 366 | const buffer = Buffer.from(await schemaRegistry.encode(registryId, payload)) 367 | 368 | schemaRegistry.cache.clear() 369 | 370 | const spy = jest.spyOn((schemaRegistry as any).api.Schema, 'find') 371 | 372 | await Promise.all([ 373 | schemaRegistry.decode(buffer), 374 | schemaRegistry.decode(buffer), 375 | schemaRegistry.decode(buffer), 376 | ]) 377 | 378 | expect(spy).toHaveBeenCalledTimes(1) 379 | }) 380 | 381 | describe('when the cache is populated', () => { 382 | it('uses the cache data', async () => { 383 | const buffer = Buffer.from(await schemaRegistry.encode(registryId, payload)) 384 | expect(schemaRegistry.cache.getSchema(registryId)).toBeTruthy() 385 | 386 | jest.spyOn(schemaRegistry.cache, 'setSchema') 387 | await schemaRegistry.decode(buffer) 388 | 389 | expect(schemaRegistry.cache.setSchema).not.toHaveBeenCalled() 390 | }) 391 | }) 392 | }) 393 | 394 | describe('#getRegistryIdBySchema', () => { 395 | let namespace: string, confluentSubject: ConfluentSubject, confluentSchema: ConfluentSchema 396 | 397 | beforeEach(() => { 398 | namespace = `N${uuid().replace(/-/g, '_')}` 399 | const subject = `${namespace}.RandomTest` 400 | const schema = schemaStringsByType[type].random(namespace) 401 | confluentSubject = { name: subject } 402 | confluentSchema = { type, schema: schema } 403 | }) 404 | 405 | it('returns the registry id if the schema has already been registered under that subject', async () => { 406 | const { id } = await schemaRegistry.register(confluentSchema, { 407 | subject: confluentSubject.name, 408 | }) 409 | 410 | await expect( 411 | schemaRegistry.getRegistryIdBySchema(confluentSubject.name, confluentSchema), 412 | ).resolves.toEqual(id) 413 | }) 414 | 415 | it('throws an error if the subject does not exist', async () => { 416 | await expect( 417 | schemaRegistry.getRegistryIdBySchema(confluentSubject.name, confluentSchema), 418 | ).rejects.toHaveProperty( 419 | 'message', 420 | `Confluent_Schema_Registry - Subject '${confluentSubject.name}' not found.`, 421 | ) 422 | }) 423 | 424 | it('throws an error if the schema has not been registered under that subject', async () => { 425 | const otherSchema = schemaStringsByType[type].otherRandom(namespace) 426 | const confluentOtherSchema: ConfluentSchema = { 427 | type, 428 | schema: otherSchema, 429 | } 430 | 431 | await schemaRegistry.register(confluentOtherSchema, { subject: confluentSubject.name }) 432 | 433 | await expect( 434 | schemaRegistry.getRegistryIdBySchema(confluentSubject.name, confluentSchema), 435 | ).rejects.toHaveProperty('message', 'Confluent_Schema_Registry - Schema not found') 436 | }) 437 | }) 438 | }), 439 | ) 440 | 441 | describe('PROTOBUF tests', () => { 442 | const v3 = ` 443 | syntax = "proto2"; 444 | package com.org.domain.fixtures; 445 | message SomeOtherMessage { 446 | required string bla = 1; 447 | required string foo = 2; 448 | } 449 | message AnotherPerson { 450 | required string fullName = 1; 451 | optional string city = 2 [default = "Stockholm"]; 452 | } 453 | `, 454 | v3Opts = { [SchemaType.PROTOBUF]: { messageName: 'AnotherPerson' } }, 455 | type = SchemaType.PROTOBUF 456 | 457 | beforeAll(() => { 458 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs, v3Opts) 459 | }) 460 | 461 | it('encodes using schemaOptions', async () => { 462 | const confluentSchemaV3: ConfluentSchema = { 463 | type, 464 | schema: v3, 465 | } 466 | 467 | const schema3 = await schemaRegistry.register(confluentSchemaV3, { 468 | subject: `${type}_test3`, 469 | }) 470 | 471 | const data = await schemaRegistry.encode(schema3.id, payload) 472 | 473 | expect(data).toMatchConfluentEncodedPayload({ 474 | registryId: schema3.id, 475 | payload: Buffer.from(schemaStringsByType[type].encodedAnotherPersonV2), 476 | }) 477 | }) 478 | 479 | it('decodes using schemaOptions', async () => { 480 | const confluentSchemaV3: ConfluentSchema = { 481 | type, 482 | schema: v3, 483 | } 484 | 485 | const schema3 = await schemaRegistry.register(confluentSchemaV3, { 486 | subject: `${type}_test3`, 487 | }) 488 | 489 | const buffer = Buffer.from(await schemaRegistry.encode(schema3.id, payload)) 490 | const data = await schemaRegistry.decode(buffer) 491 | 492 | expect(data).toEqual(payload) 493 | }) 494 | 495 | describe('nested message types tests', () => { 496 | const v4 = ` 497 | syntax = "proto2"; 498 | package com.org.domain.fixtures; 499 | message OuterMessageType { 500 | required string data = 1; 501 | required InnerMessageType1 innerMessageType1 = 2; 502 | required InnerMessageType2 innerMessageType2 = 3; 503 | 504 | message InnerMessageType1 { 505 | required string someField = 1; 506 | } 507 | message InnerMessageType2 { 508 | required string someOtherField = 1; 509 | } 510 | } 511 | `, 512 | type = SchemaType.PROTOBUF, 513 | nestedPayload = { 514 | data: 'data-value', 515 | innerMessageType1: { 516 | someField: 'someField-value', 517 | }, 518 | innerMessageType2: { 519 | someOtherField: 'someOtherField-value', 520 | }, 521 | } 522 | 523 | beforeAll(() => { 524 | schemaRegistry = new SchemaRegistry(schemaRegistryArgs) 525 | }) 526 | 527 | it('encodes', async () => { 528 | const confluentSchemaV4: ConfluentSchema = { 529 | type, 530 | schema: v4, 531 | } 532 | 533 | const schema4 = await schemaRegistry.register(confluentSchemaV4, { 534 | subject: `${type}_test4`, 535 | }) 536 | 537 | const data = await schemaRegistry.encode(schema4.id, nestedPayload) 538 | 539 | expect(data).toMatchConfluentEncodedPayload({ 540 | registryId: schema4.id, 541 | payload: Buffer.from(encodedNestedV2Proto), 542 | }) 543 | }) 544 | 545 | it('decodes', async () => { 546 | const confluentSchemaV4: ConfluentSchema = { 547 | type, 548 | schema: v4, 549 | } 550 | 551 | const schema4 = await schemaRegistry.register(confluentSchemaV4, { 552 | subject: `${type}_test4`, 553 | }) 554 | 555 | const buffer = Buffer.from(await schemaRegistry.encode(schema4.id, nestedPayload)) 556 | const data = await schemaRegistry.decode(buffer) 557 | 558 | expect(data).toEqual(nestedPayload) 559 | }) 560 | }) 561 | }) 562 | 563 | describe('JSON Schema tests', () => { 564 | describe('passing an Ajv instance in the constructor', () => { 565 | test.each([ 566 | ['Ajv 7', new Ajv()], 567 | ['Ajv2020', new Ajv2020()], 568 | ])( 569 | 'Errors are thrown with their path in %s when the validation fails', 570 | async (_, ajvInstance) => { 571 | expect.assertions(3) 572 | const registry = new SchemaRegistry(schemaRegistryArgs, { 573 | [SchemaType.JSON]: { ajvInstance, detailedErrorPaths: true }, 574 | }) 575 | const subject: ConfluentSubject = { 576 | name: [SchemaType.JSON, 'com.org.domain.fixtures', 'AnotherPerson'].join('.'), 577 | } 578 | const schema: ConfluentSchema = { 579 | type: SchemaType.JSON, 580 | schema: schemaStringsByType[SchemaType.JSON].v1, 581 | } 582 | 583 | const { id: schemaId } = await registry.register(schema, { subject: subject.name }) 584 | 585 | try { 586 | await registry.encode(schemaId, { fullName: true }) 587 | } catch (error) { 588 | expect(error).toBeInstanceOf(ConfluentSchemaRegistryValidationError) 589 | expect(error.message).toEqual('invalid payload') 590 | expect(error.paths[0].path).toEqual(['/fullName']) 591 | } 592 | }, 593 | ) 594 | }) 595 | }) 596 | 597 | describe('Avro tests', () => { 598 | it('uses reader schema if specified (avro-only)', async () => { 599 | const subject: ConfluentSubject = { 600 | name: [SchemaType.AVRO, 'com.org.domain.fixtures', 'AnotherPerson'].join('.'), 601 | } 602 | const schema: ConfluentSchema = { 603 | type: SchemaType.AVRO, 604 | schema: schemaStringsByType[SchemaType.AVRO].v1, 605 | } 606 | const registryId = (await schemaRegistry.register(schema, { subject: subject.name })).id 607 | const writerBuffer = Buffer.from(await schemaRegistry.encode(registryId, payload)) 608 | const readerSchema = JSON.parse(schemaStringsByType[SchemaType.AVRO].v2) 609 | 610 | await expect( 611 | schemaRegistry.decode(writerBuffer, { [SchemaType.AVRO]: { readerSchema } }), 612 | ).resolves.toHaveProperty('city', 'Stockholm') 613 | 614 | const registeredReaderSchema = await schemaRegistry.getSchema(registryId) 615 | await expect( 616 | schemaRegistry.decode(writerBuffer, { 617 | [SchemaType.AVRO]: { readerSchema: registeredReaderSchema }, 618 | }), 619 | ) 620 | }) 621 | }) 622 | }) 623 | --------------------------------------------------------------------------------