mz-sources.td 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171
  1. # Copyright Materialize, Inc. and contributors. All rights reserved.
  2. #
  3. # Use of this software is governed by the Business Source License
  4. # included in the LICENSE file at the root of this repository.
  5. #
  6. # As of the Change Date specified in that file, in accordance with
  7. # the Business Source License, use of this software will be governed
  8. # by the Apache License, Version 2.0.
  9. $ set-arg-default default-storage-size=1
  10. # Verify that envelope types are correctly reported in mz_sources
  11. > CREATE CONNECTION kafka_conn
  12. TO KAFKA (BROKER '${testdrive.kafka-addr}', SECURITY PROTOCOL PLAINTEXT);
  13. > CREATE CONNECTION csr_conn TO CONFLUENT SCHEMA REGISTRY (
  14. URL '${testdrive.schema-registry-url}'
  15. );
  16. $ kafka-create-topic topic=none-topic partitions=1
  17. > CREATE CLUSTER none_source_cluster SIZE '${arg.default-storage-size}';
  18. > CREATE SOURCE none_source
  19. IN CLUSTER none_source_cluster
  20. FROM KAFKA CONNECTION kafka_conn (TOPIC 'testdrive-none-topic-${testdrive.seed}')
  21. KEY FORMAT TEXT
  22. VALUE FORMAT TEXT
  23. INCLUDE KEY
  24. ENVELOPE NONE
  25. > CREATE CLUSTER none_source_no_key_cluster SIZE '${arg.default-storage-size}';
  26. > CREATE SOURCE none_source_no_key
  27. IN CLUSTER none_source_no_key_cluster
  28. FROM KAFKA CONNECTION kafka_conn (TOPIC 'testdrive-none-topic-${testdrive.seed}')
  29. FORMAT TEXT
  30. ENVELOPE NONE
  31. $ set keyschema={
  32. "type": "record",
  33. "name": "Key",
  34. "fields": [
  35. {"name": "id", "type": "long"}
  36. ]
  37. }
  38. $ set schema={
  39. "type" : "record",
  40. "name" : "envelope",
  41. "fields" : [
  42. {
  43. "name": "before",
  44. "type": [
  45. {
  46. "name": "row",
  47. "type": "record",
  48. "fields": [
  49. {
  50. "name": "id",
  51. "type": "long"
  52. },
  53. {
  54. "name": "creature",
  55. "type": "string"
  56. }]
  57. },
  58. "null"
  59. ]
  60. },
  61. { "name": "op", "type": "string" },
  62. {
  63. "name": "after",
  64. "type": ["row", "null"]
  65. },
  66. {
  67. "name": "source",
  68. "type": {
  69. "type": "record",
  70. "name": "Source",
  71. "namespace": "io.debezium.connector.mysql",
  72. "fields": [
  73. {
  74. "name": "file",
  75. "type": "string"
  76. },
  77. {
  78. "name": "pos",
  79. "type": "long"
  80. },
  81. {
  82. "name": "row",
  83. "type": "int"
  84. },
  85. {
  86. "name": "snapshot",
  87. "type": [
  88. {
  89. "type": "boolean",
  90. "connect.default": false
  91. },
  92. "null"
  93. ],
  94. "default": false
  95. }
  96. ],
  97. "connect.name": "io.debezium.connector.mysql.Source"
  98. }
  99. }
  100. ]
  101. }
  102. $ kafka-create-topic topic=dbzupsert partitions=1
  103. $ kafka-ingest format=avro topic=dbzupsert key-format=avro key-schema=${keyschema} schema=${schema} timestamp=1
  104. {"id": 1} {"before": {"row": {"id": 1, "creature": "fish"}}, "after": {"row": {"id": 1, "creature": "mudskipper"}}, "op": "u", "source": {"file": "binlog1", "pos": 1, "row": 1, "snapshot": {"boolean": false}}}
  105. > CREATE CLUSTER debezium_source_cluster SIZE '${arg.default-storage-size}';
  106. > CREATE SOURCE debezium_source
  107. IN CLUSTER debezium_source_cluster
  108. FROM KAFKA CONNECTION kafka_conn (TOPIC 'testdrive-dbzupsert-${testdrive.seed}')
  109. FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY CONNECTION csr_conn
  110. ENVELOPE DEBEZIUM
  111. $ kafka-create-topic topic=upsert-topic
  112. $ set keyschema={
  113. "type": "record",
  114. "name": "Key",
  115. "fields": [
  116. {"name": "key", "type": "string"}
  117. ]
  118. }
  119. $ set schema={
  120. "type" : "record",
  121. "name" : "test",
  122. "fields" : [
  123. {"name":"f1", "type":"string"},
  124. {"name":"f2", "type":"long"}
  125. ]
  126. }
  127. $ kafka-ingest format=avro topic=upsert-topic key-format=avro key-schema=${keyschema} schema=${schema}
  128. {"key": "fish"} {"f1": "fish", "f2": 1000}
  129. > CREATE CLUSTER upsert_source_cluster SIZE '${arg.default-storage-size}';
  130. > CREATE SOURCE upsert_source
  131. IN CLUSTER upsert_source_cluster
  132. FROM KAFKA CONNECTION kafka_conn (TOPIC 'testdrive-upsert-topic-${testdrive.seed}')
  133. FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY CONNECTION csr_conn
  134. ENVELOPE UPSERT
  135. > SELECT envelope_type FROM mz_sources WHERE name = 'none_source'
  136. none
  137. > SELECT envelope_type FROM mz_sources WHERE name = 'debezium_source'
  138. debezium
  139. > SELECT envelope_type FROM mz_sources WHERE name = 'upsert_source'
  140. upsert
  141. > SELECT key_format, value_format FROM mz_sources WHERE name = 'none_source'
  142. text text
  143. > SELECT key_format, value_format FROM mz_sources WHERE name = 'none_source_no_key'
  144. <null> text
  145. > SELECT key_format, value_format FROM mz_sources WHERE name = 'debezium_source'
  146. avro avro