1.2.0 • Published 1 year ago
schema-safe-kafkajs v1.2.0
Schema Safe Kafka Producer
This is a simple Kafka producer that uses the Schema Registry to ensure that the data being sent to Kafka is compatible with the schema that is registered for the topic.
Usage
import {KafkaClient, Partitioners} from "schema-safe-kafka";
const client = new KafkaClient({
cluster: {
clientId: "CLIENT_ID",
brokers: ["BOOTSTRAP_SERVER"],
ssl: true,
sasl: {
mechanism: "plain",
username: "API_KEY",
password: "API_SECRET"
}
},
schemaRegistry: {
host: "SCHEMA_REGISTRY_URL",
auth: {
username: "SCHEMA_REGISTRY_API_KEY",
password: "SCHEMA_REGISTRY_API_SECRET"
}
}
})
const producer = client.producer({
createPartitioner: Partitioners.LegacyPartitioner,
allowAutoTopicCreation: false,
idempotent: true,
})
async function run() {
await producer.connect()
await client.publish(producer, {
topic: "topic-name",
messages: [{
key: "key",
value: {
"foo": "bar",
"baz": 1
},
headers: {
"meta": "data"
},
schemaId: 1
}]
})
await producer.disconnect()
}
run().catch(console.error)
Deploying new Version
npm version patch
git push
gh release create
1.2.0
1 year ago
1.1.0
1 year ago
1.0.1
1 year ago
0.0.25
1 year ago
0.1.0
1 year ago
0.0.26
1 year ago
0.0.27
1 year ago
0.0.24
1 year ago
0.0.20
2 years ago
0.0.21
2 years ago
0.0.22
2 years ago
0.0.23
2 years ago
0.0.19
2 years ago
0.0.12
2 years ago
0.0.13
2 years ago
0.0.14
2 years ago
0.0.15
2 years ago
0.0.16
2 years ago
0.0.17
2 years ago
0.0.18
2 years ago
0.0.11
2 years ago
0.0.10
2 years ago
0.0.9
2 years ago
0.0.8
2 years ago
0.0.7
2 years ago
0.0.6
2 years ago
0.0.5
2 years ago
0.0.4
2 years ago
0.0.3
2 years ago
0.0.2
2 years ago
0.0.1
2 years ago
1.0.0
2 years ago