source-linear-operators.td 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154
  1. # Copyright Materialize, Inc. and contributors. All rights reserved.
  2. #
  3. # Use of this software is governed by the Business Source License
  4. # included in the LICENSE file at the root of this repository.
  5. #
  6. # As of the Change Date specified in that file, in accordance with
  7. # the Business Source License, use of this software will be governed
  8. # by the Apache License, Version 2.0.
  9. $ set-arg-default single-replica-cluster=quickstart
  10. # Test that filter and demand information are properly progatated from a view
  11. # down to an unmaterialized source.
  12. $ set schema={
  13. "type": "record",
  14. "name": "row",
  15. "fields": [
  16. {"name": "a", "type": ["long", "null"]},
  17. {"name": "b", "type": ["long", "null"]},
  18. {"name": "c", "type": ["long", "null"]},
  19. {"name": "d", "type": ["long", "null"]}
  20. ]
  21. }
  22. $ kafka-create-topic topic=data
  23. $ kafka-ingest format=avro topic=data schema=${schema}
  24. {"a": {"long": 1}, "b": {"long": 1}, "c": {"long": 3}, "d": {"long": 4}}
  25. {"a": {"long": 2}, "b": {"long": 1}, "c": {"long": 5}, "d": {"long": 4}}
  26. {"a": {"long": 3}, "b": {"long": 1}, "c": {"long": 3}, "d": {"long": 5}}
  27. {"a": {"long": 1}, "b": {"long": 2}, "c": {"long": 2}, "d": {"long": 3}}
  28. > CREATE CONNECTION kafka_conn
  29. TO KAFKA (BROKER '${testdrive.kafka-addr}', SECURITY PROTOCOL PLAINTEXT);
  30. > CREATE SOURCE data
  31. IN CLUSTER ${arg.single-replica-cluster}
  32. FROM KAFKA CONNECTION kafka_conn (TOPIC 'testdrive-data-${testdrive.seed}')
  33. FORMAT AVRO USING SCHEMA '${schema}'
  34. $ set-regex match=u\d+ replacement=UID
  35. # basic test: pushing filters down to sources
  36. > CREATE VIEW v as SELECT * from data where a = 1 and d = 3;
  37. > CREATE DEFAULT INDEX ON v;
  38. > SELECT * FROM v
  39. 1 2 2 3
  40. > DROP VIEW v;
  41. # basic test: pushing demand down to sources
  42. > CREATE VIEW v as SELECT b from data where b = 1;
  43. > CREATE DEFAULT INDEX ON v;
  44. > SELECT * FROM v
  45. 1
  46. 1
  47. 1
  48. > DROP VIEW v;
  49. > CREATE VIEW inner_view as SELECT a, b, d from data where d = 4;
  50. # Filter gets pushed through intervening view.
  51. > CREATE VIEW v as SELECT b from inner_view where a = 1
  52. > CREATE DEFAULT INDEX ON v;
  53. > SELECT * FROM v
  54. 1
  55. > DROP VIEW v;
  56. # Demand gets pushed through intervening view.
  57. > CREATE VIEW v as SELECT d from inner_view where a = 1;
  58. > CREATE DEFAULT INDEX ON v;
  59. > SELECT * FROM v
  60. 4
  61. > DROP VIEW v;
  62. > CREATE VIEW v as SELECT s1.a from data s1, data s2 where s1.a = s2.b and s2.d = 4;
  63. > CREATE DEFAULT INDEX ON v;
  64. > SELECT * FROM v
  65. 1
  66. 1
  67. 1
  68. 1
  69. > DROP VIEW v;
  70. # filters and demand can be inferred in more complicated queries
  71. > CREATE VIEW v as SELECT s2.a from data s1, data s2 where s1.a = s2.b and s2.d = 4 and s1.d = 4;
  72. > CREATE DEFAULT INDEX ON v;
  73. > SELECT * FROM v
  74. 1
  75. 2
  76. > DROP VIEW v;
  77. > CREATE VIEW v as SELECT s2.c from data s1, data s2 where s1.a = s2.a
  78. > CREATE DEFAULT INDEX ON v;
  79. > SELECT * FROM v
  80. 3
  81. 5
  82. 3
  83. 2
  84. 3
  85. 2
  86. > DROP VIEW v;
  87. > CREATE VIEW v as SELECT * FROM (SELECT a, sum(b) FROM data GROUP BY a UNION ALL SELECT a, (a + c)::numeric FROM data) WHERE a = 1;
  88. > CREATE DEFAULT INDEX ON v;
  89. > SELECT * FROM v
  90. 1 3
  91. 1 3
  92. 1 4
  93. > DROP VIEW v;
  94. # multiple source test
  95. $ kafka-create-topic topic=data2
  96. $ kafka-ingest format=avro topic=data2 schema=${schema}
  97. {"a": {"long": 3}, "b": {"long": 2}, "c": null, "d": {"long": 4}}
  98. {"a": {"long": 2}, "b": {"long": 1}, "c": {"long": 5}, "d": null}
  99. > CREATE SOURCE data2
  100. IN CLUSTER ${arg.single-replica-cluster}
  101. FROM KAFKA CONNECTION kafka_conn (TOPIC 'testdrive-data2-${testdrive.seed}')
  102. FORMAT AVRO USING SCHEMA '${schema}'
  103. > CREATE VIEW v as SELECT a, c FROM data EXCEPT ALL SELECT a, c FROM data2 where d is null
  104. > CREATE DEFAULT INDEX ON v;
  105. > SELECT * FROM v
  106. 1 2
  107. 1 3
  108. 3 3
  109. > DROP VIEW v;