This repository has been archived by the owner on Jan 1, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathAAA.js
96 lines (80 loc) · 2.5 KB
/
AAA.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
const pb = require("./AAA_pb");
const aaa = new pb.AAA();
aaa.setMessage("AAA_XXX");
const data = Buffer.from(aaa.serializeBinary());
console.log("AAA", "buffer", Array.prototype.toString.call(data));
var aaaNew = pb.AAA.deserializeBinary(data);
console.log("AAA", "data.message", aaaNew.getMessage());
// ========================================
// ========================================
// ========================================
// ========================================
const DEFAULT_OFFSET = 0;
// Based on /~https://github.com/mtth/avsc/issues/140
// const collectInvalidPaths = (schema: Schema, jsonPayload: object) => {
// const paths: any = [];
// schema.isValid(jsonPayload, {
// errorHook: (path) => paths.push(path),
// });
// return paths;
// };
const MAGIC_BYTE = Buffer.alloc(1);
const MESSAGE_INDEX_BYTES = Buffer.alloc(1);
const encode = (schema, registryId, jsonPayload) => {
// let avroPayload;
// try {
// avroPayload = schema.toBuffer(jsonPayload);
// } catch (error) {
// error.paths = collectInvalidPaths(schema, jsonPayload);
// throw error;
// }
const registryIdBuffer = Buffer.alloc(4);
registryIdBuffer.writeInt32BE(registryId, DEFAULT_OFFSET);
// return Buffer.concat([MAGIC_BYTE, registryIdBuffer, avroPayload]);
return Buffer.concat([MAGIC_BYTE, registryIdBuffer, MESSAGE_INDEX_BYTES, data]);
};
// ========================================
// ========================================
// ========================================
// ========================================
const { Kafka, logLevel } = require("kafkajs");
const kafka = new Kafka({
brokers: ["broker:29092"],
clientId: "AAA",
logLevel: logLevel.DEBUG,
});
async function producer() {
const producer = kafka.producer();
await producer.connect();
await producer.send({
messages: [
{
value: encode(0, 53, 0),
},
],
topic: "AAA",
});
await producer.disconnect();
}
void producer();
async function consumer() {
const consumer = kafka.consumer({
groupId: "AAA",
});
await consumer.connect();
await consumer.subscribe({
fromBeginning: true,
topic: "AAA",
});
await consumer.run({
eachMessage: async ({ topic, partition, message }) => {
console.log("topic", topic);
console.log("partition", partition);
console.log("message", message);
const aaaNew = pb.AAA.deserializeBinary(message.value);
const message2 = aaaNew.getMessage();
console.log("DATA.message =>", message2);
},
});
}
// void consumer();